diff options
author | kjnash <k.j.nash@usa.net> | 2022-09-05 22:42:13 (GMT) |
---|---|---|
committer | kjnash <k.j.nash@usa.net> | 2022-09-05 22:42:13 (GMT) |
commit | e388abbb03ee1e1fba8bcceb1bbb9dde7e54d63b (patch) | |
tree | fd34480b0ef994873e28763b760b70eb782d3213 | |
parent | 254c149a17d2fe55e3297734a3a8c089e9bf8581 (diff) | |
parent | 57807a30bb766ff5e18b94270ac3d92041b006af (diff) | |
download | tcl-e388abbb03ee1e1fba8bcceb1bbb9dde7e54d63b.zip tcl-e388abbb03ee1e1fba8bcceb1bbb9dde7e54d63b.tar.gz tcl-e388abbb03ee1e1fba8bcceb1bbb9dde7e54d63b.tar.bz2 |
Merge 8.7
-rw-r--r-- | .gitignore | 2 | ||||
-rw-r--r-- | doc/Eval.3 | 2 | ||||
-rw-r--r-- | doc/TraceVar.3 | 4 | ||||
-rw-r--r-- | generic/tclBasic.c | 32 | ||||
-rw-r--r-- | generic/tclCmdIL.c | 43 | ||||
-rw-r--r-- | generic/tclExecute.c | 107 | ||||
-rw-r--r-- | generic/tclInt.decls | 11 | ||||
-rw-r--r-- | generic/tclInt.h | 508 | ||||
-rw-r--r-- | generic/tclIntDecls.h | 20 | ||||
-rw-r--r-- | generic/tclInterp.c | 14 | ||||
-rw-r--r-- | generic/tclListObj.c | 3714 | ||||
-rw-r--r-- | generic/tclStubInit.c | 4 | ||||
-rw-r--r-- | generic/tclTest.c | 316 | ||||
-rw-r--r-- | tests-perf/comparePerf.tcl | 371 | ||||
-rw-r--r-- | tests-perf/listPerf.tcl | 1290 | ||||
-rw-r--r-- | tests/apply.test | 2 | ||||
-rw-r--r-- | tests/env.test | 40 | ||||
-rw-r--r-- | tests/listRep.test | 2538 | ||||
-rw-r--r-- | unix/tclUnixSock.c | 4 | ||||
-rw-r--r-- | win/tclWinInit.c | 9 | ||||
-rw-r--r-- | win/tclWinSock.c | 8 |
21 files changed, 7475 insertions, 1564 deletions
@@ -26,7 +26,7 @@ manifest.uuid _FOSSIL_ */tclConfig.sh */tclsh* -*/tcltest* +*/tcltest */versions.vc */version.vc */libtcl.vfs @@ -54,7 +54,7 @@ ORed combination of flag bits that specify additional options. .AP "const char" *fileName in Name of a file containing a Tcl script. .AP int objc in -The number of values in the array pointed to by \fIobjPtr\fR; +The number of values in the array pointed to by \fIobjv\fR; this is also the number of words in the command. .AP Tcl_Obj **objv in Points to an array of pointers to values; each value holds the diff --git a/doc/TraceVar.3 b/doc/TraceVar.3 index 7751cf7..5de6a44 100644 --- a/doc/TraceVar.3 +++ b/doc/TraceVar.3 @@ -126,8 +126,8 @@ It should have arguments and result that match the type typedef char *\fBTcl_VarTraceProc\fR( ClientData \fIclientData\fR, Tcl_Interp *\fIinterp\fR, - char *\fIname1\fR, - char *\fIname2\fR, + const char *\fIname1\fR, + const char *\fIname2\fR, int \fIflags\fR); .CE .PP diff --git a/generic/tclBasic.c b/generic/tclBasic.c index 645a581..4bacba6 100644 --- a/generic/tclBasic.c +++ b/generic/tclBasic.c @@ -2690,26 +2690,30 @@ Tcl_CreateCommand( */ typedef struct { - void *clientData; /* Arbitrary value to pass to object function. */ Tcl_ObjCmdProc2 *proc; - Tcl_ObjCmdProc2 *nreProc; + void *clientData; /* Arbitrary value to pass to proc function. */ Tcl_CmdDeleteProc *deleteProc; + void *deleteData; /* Arbitrary value to pass to deleteProc function. */ + Tcl_ObjCmdProc2 *nreProc; } CmdWrapperInfo; static int cmdWrapperProc(void *clientData, - Tcl_Interp *interp, - int objc, + Tcl_Interp *interp, + int objc, Tcl_Obj * const *objv) { CmdWrapperInfo *info = (CmdWrapperInfo *)clientData; + if (objc < 0) { + objc = -1; + } return info->proc(info->clientData, interp, objc, objv); } static void cmdWrapperDeleteProc(void *clientData) { CmdWrapperInfo *info = (CmdWrapperInfo *)clientData; - clientData = info->clientData; + clientData = info->deleteData; Tcl_CmdDeleteProc *deleteProc = info->deleteProc; ckfree(info); if (deleteProc != NULL) { @@ -2736,8 +2740,9 @@ Tcl_CreateObjCommand2( { CmdWrapperInfo *info = (CmdWrapperInfo *)ckalloc(sizeof(CmdWrapperInfo)); info->proc = proc; - info->deleteProc = deleteProc; info->clientData = clientData; + info->deleteProc = deleteProc; + info->deleteData = clientData; return Tcl_CreateObjCommand(interp, cmdName, (proc ? cmdWrapperProc : NULL), @@ -3380,7 +3385,7 @@ Tcl_SetCommandInfoFromToken( if (cmdPtr->deleteProc == cmdWrapperDeleteProc) { CmdWrapperInfo *info = (CmdWrapperInfo *)cmdPtr->deleteData; info->deleteProc = infoPtr->deleteProc; - info->clientData = infoPtr->deleteData; + info->deleteData = infoPtr->deleteData; } else { cmdPtr->deleteProc = infoPtr->deleteProc; cmdPtr->deleteData = infoPtr->deleteData; @@ -3464,7 +3469,7 @@ Tcl_GetCommandInfoFromToken( if (cmdPtr->deleteProc == cmdWrapperDeleteProc) { CmdWrapperInfo *info = (CmdWrapperInfo *)cmdPtr->deleteData; infoPtr->deleteProc = info->deleteProc; - infoPtr->deleteData = info->clientData; + infoPtr->deleteData = info->deleteData; } else { infoPtr->deleteProc = cmdPtr->deleteProc; infoPtr->deleteData = cmdPtr->deleteData; @@ -9187,6 +9192,11 @@ Tcl_NRCallObjProc2( size_t objc, Tcl_Obj *const objv[]) { + if (objc > INT_MAX) { + Tcl_WrongNumArgs(interp, 1, objv, "?args?"); + return TCL_ERROR; + } + NRE_callback *rootPtr = TOP_CB(interp); CmdWrapperInfo *info = (CmdWrapperInfo *)ckalloc(sizeof(CmdWrapperInfo)); info->clientData = clientData; @@ -9232,6 +9242,9 @@ static int cmdWrapperNreProc( Tcl_Obj *const objv[]) { CmdWrapperInfo *info = (CmdWrapperInfo *)clientData; + if (objc < 0) { + objc = -1; + } return info->nreProc(info->clientData, interp, objc, objv); } @@ -9256,9 +9269,10 @@ Tcl_NRCreateCommand2( { CmdWrapperInfo *info = (CmdWrapperInfo *)ckalloc(sizeof(CmdWrapperInfo)); info->proc = proc; + info->clientData = clientData; info->nreProc = nreProc; info->deleteProc = deleteProc; - info->clientData = clientData; + info->deleteData = clientData; return Tcl_NRCreateCommand(interp, cmdName, (proc ? cmdWrapperProc : NULL), (nreProc ? cmdWrapperNreProc : NULL), diff --git a/generic/tclCmdIL.c b/generic/tclCmdIL.c index 1197b92..cdc302c 100644 --- a/generic/tclCmdIL.c +++ b/generic/tclCmdIL.c @@ -19,6 +19,7 @@ #include "tclInt.h" #include "tclRegexp.h" +#include <assert.h> /* * During execution of the "lsort" command, structures of the following type @@ -2898,10 +2899,15 @@ Tcl_LrepeatObjCmd( listPtr = Tcl_NewListObj(totalElems, NULL); if (totalElems) { - List *listRepPtr = ListRepPtr(listPtr); - - listRepPtr->elemCount = elementCount*objc; - dataArray = listRepPtr->elements; + ListRep listRep; + ListObjGetRep(listPtr, &listRep); + dataArray = ListRepElementsBase(&listRep); + listRep.storePtr->numUsed = totalElems; + if (listRep.spanPtr) { + /* Future proofing in case Tcl_NewListObj returns a span */ + listRep.spanPtr->spanStart = listRep.storePtr->firstUsed; + listRep.spanPtr->spanLength = listRep.storePtr->numUsed; + } } /* @@ -3081,14 +3087,21 @@ Tcl_LreverseObjCmd( } if (Tcl_IsShared(objv[1]) - || (ListRepPtr(objv[1])->refCount > 1)) { /* Bug 1675044 */ + || ListObjRepIsShared(objv[1])) { /* Bug 1675044 */ Tcl_Obj *resultObj, **dataArray; - List *listRepPtr; + ListRep listRep; resultObj = Tcl_NewListObj(elemc, NULL); - listRepPtr = ListRepPtr(resultObj); - listRepPtr->elemCount = elemc; - dataArray = listRepPtr->elements; + + /* Modify the internal rep in-place */ + ListObjGetRep(resultObj, &listRep); + listRep.storePtr->numUsed = elemc; + dataArray = ListRepElementsBase(&listRep); + if (listRep.spanPtr) { + /* Future proofing */ + listRep.spanPtr->spanStart = listRep.storePtr->firstUsed; + listRep.spanPtr->spanLength = listRep.storePtr->numUsed; + } for (i=0,j=elemc-1 ; i<elemc ; i++,j--) { dataArray[j] = elemv[i]; @@ -4409,12 +4422,12 @@ Tcl_LsortObjCmd( */ if (sortInfo.resultCode == TCL_OK) { - List *listRepPtr; + ListRep listRep; Tcl_Obj **newArray, *objPtr; resultPtr = Tcl_NewListObj(sortInfo.numElements * groupSize, NULL); - listRepPtr = ListRepPtr(resultPtr); - newArray = listRepPtr->elements; + ListObjGetRep(resultPtr, &listRep); + newArray = ListRepElementsBase(&listRep); if (group) { for (i=0; elementPtr!=NULL ; elementPtr=elementPtr->nextPtr) { idx = elementPtr->payload.index; @@ -4443,7 +4456,11 @@ Tcl_LsortObjCmd( Tcl_IncrRefCount(objPtr); } } - listRepPtr->elemCount = i; + listRep.storePtr->numUsed = i; + if (listRep.spanPtr) { + listRep.spanPtr->spanStart = listRep.storePtr->firstUsed; + listRep.spanPtr->spanLength = listRep.storePtr->numUsed; + } Tcl_SetObjResult(interp, resultPtr); } diff --git a/generic/tclExecute.c b/generic/tclExecute.c index 3fb7e07..8aa3bb2 100644 --- a/generic/tclExecute.c +++ b/generic/tclExecute.c @@ -169,11 +169,11 @@ static BuiltinFunc const tclBuiltinFuncTable[] = { typedef struct TEBCdata { ByteCode *codePtr; /* Constant until the BC returns */ /* -----------------------------------------*/ - ptrdiff_t *catchTop; /* These fields are used on return TO this */ + Tcl_Obj **catchTop; /* These fields are used on return TO this */ Tcl_Obj *auxObjList; /* this level: they record the state when a */ CmdFrame cmdFrame; /* new codePtr was received for NR */ /* execution. */ - void *stack[1]; /* Start of the actual combined catch and obj + Tcl_Obj *stack[1]; /* Start of the actual combined catch and obj * stacks; the struct will be expanded as * necessary */ } TEBCdata; @@ -424,7 +424,7 @@ VarHashCreateVar( #define OBJ_AT_DEPTH(n) *(tosPtr-(n)) -#define CURR_DEPTH ((ptrdiff_t) (tosPtr - initTosPtr)) +#define CURR_DEPTH ((size_t)(tosPtr - initTosPtr)) #define STACK_BASE(esPtr) ((esPtr)->stackWords - 1) @@ -437,9 +437,9 @@ VarHashCreateVar( #ifdef TCL_COMPILE_DEBUG # define TRACE(a) \ while (traceInstructions) { \ - fprintf(stdout, "%2d: %2d (%u) %s ", iPtr->numLevels, \ - (int) CURR_DEPTH, \ - (unsigned) (pc - codePtr->codeStart), \ + fprintf(stdout, "%2d: %2" TCL_Z_MODIFIER "u (%" TCL_Z_MODIFIER "u) %s ", iPtr->numLevels, \ + CURR_DEPTH, \ + (size_t)(pc - codePtr->codeStart), \ GetOpcodeName(pc)); \ printf a; \ break; \ @@ -453,9 +453,9 @@ VarHashCreateVar( TRACE_APPEND(("ERROR: %.30s\n", O2S(Tcl_GetObjResult(interp)))); # define TRACE_WITH_OBJ(a, objPtr) \ while (traceInstructions) { \ - fprintf(stdout, "%2d: %2d (%u) %s ", iPtr->numLevels, \ - (int) CURR_DEPTH, \ - (unsigned) (pc - codePtr->codeStart), \ + fprintf(stdout, "%2d: %2" TCL_Z_MODIFIER "u (%" TCL_Z_MODIFIER "u) %s ", iPtr->numLevels, \ + CURR_DEPTH, \ + (size_t)(pc - codePtr->codeStart), \ GetOpcodeName(pc)); \ printf a; \ TclPrintObject(stdout, objPtr, 30); \ @@ -678,7 +678,7 @@ static const char * GetOpcodeName(const unsigned char *pc); static void PrintByteCodeInfo(ByteCode *codePtr); static const char * StringForResultCode(int result); static void ValidatePcAndStackTop(ByteCode *codePtr, - const unsigned char *pc, int stackTop, + const unsigned char *pc, size_t stackTop, int checkStack); #endif /* TCL_COMPILE_DEBUG */ static ByteCode * CompileExprObj(Tcl_Interp *interp, Tcl_Obj *objPtr); @@ -1935,8 +1935,8 @@ ArgumentBCEnter( *---------------------------------------------------------------------- */ #define bcFramePtr (&TD->cmdFrame) -#define initCatchTop ((ptrdiff_t *) (TD->stack-1)) -#define initTosPtr ((Tcl_Obj **) (initCatchTop+codePtr->maxExceptDepth)) +#define initCatchTop (TD->stack-1) +#define initTosPtr (initCatchTop+codePtr->maxExceptDepth) #define esPtr (iPtr->execEnvPtr->execStackPtr) int @@ -2007,7 +2007,7 @@ TclNRExecuteByteCode( */ TclNRAddCallback(interp, TEBCresume, TD, /* pc */ NULL, - /* cleanup */ INT2PTR(0), INT2PTR(iPtr->evalFlags)); + /* cleanup */ NULL, INT2PTR(iPtr->evalFlags)); /* * Reset discard result flag - because it is applicable for this call only, @@ -2123,7 +2123,7 @@ TEBCresume( #ifdef TCL_COMPILE_DEBUG if (!pc && (tclTraceExec >= 2)) { PrintByteCodeInfo(codePtr); - fprintf(stdout, " Starting stack top=%d\n", (int) CURR_DEPTH); + fprintf(stdout, " Starting stack top=%" TCL_Z_MODIFIER "u\n", CURR_DEPTH); fflush(stdout); } #endif @@ -2327,7 +2327,7 @@ TEBCresume( CHECK_STACK(); if (traceInstructions) { - fprintf(stdout, "%2d: %2d ", iPtr->numLevels, (int) CURR_DEPTH); + fprintf(stdout, "%2d: %2" TCL_Z_MODIFIER "u ", iPtr->numLevels, CURR_DEPTH); TclPrintInstruction(codePtr, pc); fflush(stdout); } @@ -2694,10 +2694,10 @@ TEBCresume( */ TclNewObj(objPtr); - objPtr->internalRep.twoPtrValue.ptr2 = INT2PTR(CURR_DEPTH); + objPtr->internalRep.twoPtrValue.ptr2 = UINT2PTR(CURR_DEPTH); objPtr->length = 0; PUSH_TAUX_OBJ(objPtr); - TRACE(("=> mark depth as %d\n", (int) CURR_DEPTH)); + TRACE(("=> mark depth as %" TCL_Z_MODIFIER "u\n", CURR_DEPTH)); NEXT_INST_F(1, 0, 0); break; @@ -2709,7 +2709,7 @@ TEBCresume( */ CLANG_ASSERT(auxObjList); - objc = CURR_DEPTH - PTR2INT(auxObjList->internalRep.twoPtrValue.ptr2); + objc = CURR_DEPTH - PTR2UINT(auxObjList->internalRep.twoPtrValue.ptr2); POP_TAUX_OBJ(); #ifdef TCL_COMPILE_DEBUG /* Ugly abuse! */ @@ -2720,7 +2720,8 @@ TEBCresume( case INST_EXPAND_STKTOP: { int i; - ptrdiff_t moved; + TEBCdata *newTD; + ptrdiff_t oldCatchTopOff, oldTosPtrOff; /* * Make sure that the element at stackTop is a list; if not, just @@ -2749,19 +2750,21 @@ TEBCresume( + codePtr->maxStackDepth /* Beyond the original max */ - CURR_DEPTH; /* Relative to where we are */ DECACHE_STACK_INFO(); - moved = GrowEvaluationStack(iPtr->execEnvPtr, length, 1) - - (Tcl_Obj **) TD; - if (moved) { + oldCatchTopOff = catchTop - initCatchTop; + oldTosPtrOff = tosPtr - initTosPtr; + newTD = (TEBCdata *) + GrowEvaluationStack(iPtr->execEnvPtr, length, 1); + if (newTD != TD) { /* * Change the global data to point to the new stack: move the * TEBCdataPtr TD, recompute the position of every other * stack-allocated parameter, update the stack pointers. */ - TD = (TEBCdata *) (((Tcl_Obj **)TD) + moved); + TD = newTD; - catchTop += moved; - tosPtr += moved; + catchTop = initCatchTop + oldCatchTopOff; + tosPtr = initTosPtr + oldTosPtrOff; } } @@ -2813,7 +2816,7 @@ TEBCresume( case INST_INVOKE_EXPANDED: CLANG_ASSERT(auxObjList); - objc = CURR_DEPTH - PTR2INT(auxObjList->internalRep.twoPtrValue.ptr2); + objc = CURR_DEPTH - PTR2UINT(auxObjList->internalRep.twoPtrValue.ptr2); POP_TAUX_OBJ(); if (objc) { pcAdjustment = 1; @@ -3516,7 +3519,7 @@ TEBCresume( varPtr->value.objPtr = objResultPtr = newValue; Tcl_IncrRefCount(newValue); } - if (Tcl_ListObjReplace(interp, objResultPtr, len, 0, objc, objv) + if (TclListObjAppendElements(interp, objResultPtr, objc, objv) != TCL_OK) { TRACE_ERROR(interp); goto gotError; @@ -3574,7 +3577,7 @@ TEBCresume( } else { valueToAssign = objResultPtr; } - if (Tcl_ListObjReplace(interp, valueToAssign, len, 0, + if (TclListObjAppendElements(interp, valueToAssign, objc, objv) != TCL_OK) { if (createdNewObj) { TclDecrRefCount(valueToAssign); @@ -5998,7 +6001,7 @@ TEBCresume( * Handle shifts within the native long range. */ - if (((size_t) shift < CHAR_BIT*sizeof(long)) + if (((size_t)shift < CHAR_BIT*sizeof(long)) && !((w1>0 ? w1 : ~w1) & -(1UL<<(CHAR_BIT*sizeof(long) - 1 - shift)))) { wResult = (Tcl_WideUInt)w1 << shift; @@ -6805,10 +6808,10 @@ TEBCresume( * stack. */ - *(++catchTop) = CURR_DEPTH; - TRACE(("%u => catchTop=%d, stackTop=%d\n", - TclGetUInt4AtPtr(pc+1), (int) (catchTop - initCatchTop - 1), - (int) CURR_DEPTH)); + *(++catchTop) = (Tcl_Obj *)UINT2PTR(CURR_DEPTH); + TRACE(("%u => catchTop=%" TCL_Z_MODIFIER "u, stackTop=%" TCL_Z_MODIFIER "u\n", + TclGetUInt4AtPtr(pc+1), (size_t)(catchTop - initCatchTop - 1), + CURR_DEPTH)); NEXT_INST_F(5, 0, 0); break; @@ -6818,7 +6821,7 @@ TEBCresume( Tcl_ResetResult(interp); CACHE_STACK_INFO(); result = TCL_OK; - TRACE(("=> catchTop=%d\n", (int) (catchTop - initCatchTop - 1))); + TRACE(("=> catchTop=%" TCL_Z_MODIFIER "u\n", (size_t)(catchTop - initCatchTop - 1))); NEXT_INST_F(1, 0, 0); break; @@ -7717,8 +7720,8 @@ TEBCresume( while (auxObjList) { if ((catchTop != initCatchTop) - && (*catchTop > (ptrdiff_t) - auxObjList->internalRep.twoPtrValue.ptr2)) { + && (PTR2UINT(*catchTop) > + PTR2UINT(auxObjList->internalRep.twoPtrValue.ptr2))) { break; } POP_TAUX_OBJ(); @@ -7793,16 +7796,16 @@ TEBCresume( */ processCatch: - while (CURR_DEPTH > *catchTop) { + while (CURR_DEPTH > PTR2UINT(*catchTop)) { valuePtr = POP_OBJECT(); TclDecrRefCount(valuePtr); } #ifdef TCL_COMPILE_DEBUG if (traceInstructions) { - fprintf(stdout, " ... found catch at %d, catchTop=%d, " - "unwound to %ld, new pc %u\n", - rangePtr->codeOffset, (int) (catchTop - initCatchTop - 1), - (long)*catchTop, (unsigned) rangePtr->catchOffset); + fprintf(stdout, " ... found catch at %d, catchTop=%" TCL_Z_MODIFIER "u, " + "unwound to %" TCL_Z_MODIFIER "u, new pc %" TCL_Z_MODIFIER "u\n", + rangePtr->codeOffset, (size_t)(catchTop - initCatchTop - 1), + PTR2UINT(*catchTop), (size_t)rangePtr->catchOffset); } #endif pc = (codePtr->codeStart + rangePtr->catchOffset); @@ -7838,10 +7841,10 @@ TEBCresume( if (tosPtr < initTosPtr) { fprintf(stderr, - "\nTclNRExecuteByteCode: abnormal return at pc %u: " - "stack top %d < entry stack top %d\n", - (unsigned)(pc - codePtr->codeStart), - (unsigned) CURR_DEPTH, (unsigned) 0); + "\nTclNRExecuteByteCode: abnormal return at pc %" TCL_Z_MODIFIER "u: " + "stack top %" TCL_Z_MODIFIER "u < entry stack top %d\n", + (size_t)(pc - codePtr->codeStart), + CURR_DEPTH, 0); Tcl_Panic("TclNRExecuteByteCode execution failure: end stack top < start stack top"); } CLANG_ASSERT(bcFramePtr); @@ -9122,21 +9125,21 @@ ValidatePcAndStackTop( * stdout. */ const unsigned char *pc, /* Points to first byte of a bytecode * instruction. The program counter. */ - int stackTop, /* Current stack top. Must be between + size_t stackTop, /* Current stack top. Must be between * stackLowerBound and stackUpperBound * (inclusive). */ int checkStack) /* 0 if the stack depth check should be * skipped. */ { - int stackUpperBound = codePtr->maxStackDepth; + size_t stackUpperBound = codePtr->maxStackDepth; /* Greatest legal value for stackTop. */ - size_t relativePc = (size_t) (pc - codePtr->codeStart); - size_t codeStart = (size_t) codePtr->codeStart; + size_t relativePc = (size_t)(pc - codePtr->codeStart); + size_t codeStart = (size_t)codePtr->codeStart; size_t codeEnd = (size_t) (codePtr->codeStart + codePtr->numCodeBytes); unsigned char opCode = *pc; - if (((size_t) pc < codeStart) || ((size_t) pc > codeEnd)) { + if ((PTR2UINT(pc) < codeStart) || (PTR2UINT(pc) > codeEnd)) { fprintf(stderr, "\nBad instruction pc 0x%p in TclNRExecuteByteCode\n", pc); Tcl_Panic("TclNRExecuteByteCode execution failure: bad pc"); @@ -9147,11 +9150,11 @@ ValidatePcAndStackTop( Tcl_Panic("TclNRExecuteByteCode execution failure: bad opcode"); } if (checkStack && - ((stackTop < 0) || (stackTop > stackUpperBound))) { + (stackTop > stackUpperBound)) { int numChars; const char *cmd = GetSrcInfoForPc(pc, codePtr, &numChars, NULL, NULL); - fprintf(stderr, "\nBad stack top %d at pc %" TCL_Z_MODIFIER "u in TclNRExecuteByteCode (min 0, max %i)", + fprintf(stderr, "\nBad stack top %" TCL_Z_MODIFIER "u at pc %" TCL_Z_MODIFIER "u in TclNRExecuteByteCode (min 0, max %" TCL_Z_MODIFIER "u)", stackTop, relativePc, stackUpperBound); if (cmd != NULL) { Tcl_Obj *message; diff --git a/generic/tclInt.decls b/generic/tclInt.decls index 5a9f4f0..8d9ef6c 100644 --- a/generic/tclInt.decls +++ b/generic/tclInt.decls @@ -1025,6 +1025,7 @@ declare 256 { int TclPtrUnsetVar(Tcl_Interp *interp, Tcl_Var varPtr, Tcl_Var arrayPtr, Tcl_Obj *part1Ptr, Tcl_Obj *part2Ptr, int flags) } + declare 257 { void TclStaticLibrary(Tcl_Interp *interp, const char *prefix, Tcl_LibraryInitProc *initProc, Tcl_LibraryInitProc *safeInitProc) @@ -1036,8 +1037,14 @@ declare 258 { Tcl_Obj *basenameObj) } -declare 259 { - void TclUnusedStubEntry(void) +# TIP 625: for unit testing - create list objects with span +declare 260 { + Tcl_Obj *TclListTestObj(int length, int leadingSpace, int endSpace) +} + +# TIP 625: for unit testing - check list invariants +declare 261 { + void TclListObjValidate(Tcl_Interp *interp, Tcl_Obj *listObj) } ############################################################################## diff --git a/generic/tclInt.h b/generic/tclInt.h index f032efb..a7c5c2c 100644 --- a/generic/tclInt.h +++ b/generic/tclInt.h @@ -1439,7 +1439,7 @@ typedef int (CompileProc)(Tcl_Interp *interp, Tcl_Parse *parsePtr, */ typedef int (CompileHookProc)(Tcl_Interp *interp, - struct CompileEnv *compEnvPtr, ClientData clientData); + struct CompileEnv *compEnvPtr, void *clientData); /* * The data structure for a (linked list of) execution stacks. @@ -2437,59 +2437,211 @@ typedef enum TclEolTranslation { #define TCL_INVOKE_NO_TRACEBACK (1<<2) /* - * The structure used as the internal representation of Tcl list objects. This - * struct is grown (reallocated and copied) as necessary to hold all the - * list's element pointers. The struct might contain more slots than currently - * used to hold all element pointers. This is done to make append operations - * faster. + * ListSizeT is the type for holding list element counts. It's defined + * simplify sharing source between Tcl8 and Tcl9. */ +#if TCL_MAJOR_VERSION > 8 -typedef struct List { - int refCount; - int maxElemCount; /* Total number of element array slots. */ - int elemCount; /* Current number of list elements. */ - int canonicalFlag; /* Set if the string representation was - * derived from the list representation. May - * be ignored if there is no string rep at - * all.*/ - Tcl_Obj *elements[TCLFLEXARRAY]; /* First list element; the struct is grown to - * accommodate all elements. */ -} List; +typedef size_t ListSizeT; -#define LIST_MAX \ - ((int)(((size_t)UINT_MAX - offsetof(List, elements))/sizeof(Tcl_Obj *))) -#define LIST_SIZE(numElems) \ - (TCL_HASH_TYPE)(offsetof(List, elements) + ((numElems) * sizeof(Tcl_Obj *))) +/* + * SSIZE_MAX, NOT SIZE_MAX as negative differences need to be expressed + * between values of the ListSizeT type so limit the range to signed + */ +#define ListSizeT_MAX ((ListSizeT)PTRDIFF_MAX) + +#else + +typedef int ListSizeT; +#define ListSizeT_MAX INT_MAX + +#endif /* - * Macro used to get the elements of a list object. + * ListStore -- + * + * A Tcl list's internal representation is defined through three structures. + * + * A ListStore struct is a structure that includes a variable size array that + * serves as storage for a Tcl list. A contiguous sequence of slots in the + * array, the "in-use" area, holds valid pointers to Tcl_Obj values that + * belong to one or more Tcl lists. The unused slots before and after these + * are free slots that may be used to prepend and append without having to + * reallocate the struct. The ListStore may be shared amongst multiple lists + * and reference counted. + * + * A ListSpan struct defines a sequence of slots within a ListStore. This sequence + * always lies within the "in-use" area of the ListStore. Like ListStore, the + * structure may be shared among multiple lists and is reference counted. + * + * A ListRep struct holds the internal representation of a Tcl list as stored + * in a Tcl_Obj. It is composed of a ListStore and a ListSpan that together + * define the content of the list. The ListSpan specifies the range of slots + * within the ListStore that hold elements for this list. The ListSpan is + * optional in which case the list includes all the "in-use" slots of the + * ListStore. + * */ +typedef struct ListStore { + ListSizeT firstUsed; /* Index of first slot in use within slots[] */ + ListSizeT numUsed; /* Number of slots in use (starting firstUsed) */ + ListSizeT numAllocated; /* Total number of slots[] array slots. */ + size_t refCount; /* Number of references to this instance */ + int flags; /* LISTSTORE_* flags */ + Tcl_Obj *slots[TCLFLEXARRAY]; /* Variable size array. Grown as needed */ +} ListStore; + +#define LISTSTORE_CANONICAL 0x1 /* All Tcl_Obj's referencing this + store have their string representation + derived from the list representation */ + +/* Max number of elements that can be contained in a list */ +#define LIST_MAX \ + ((ListSizeT)(((size_t)ListSizeT_MAX - offsetof(ListStore, slots)) \ + / sizeof(Tcl_Obj *))) +/* Memory size needed for a ListStore to hold numSlots_ elements */ +#define LIST_SIZE(numSlots_) \ + ((int)(offsetof(ListStore, slots) + ((numSlots_) * sizeof(Tcl_Obj *)))) + +/* + * ListSpan -- + * See comments above for ListStore + */ +typedef struct ListSpan { + ListSizeT spanStart; /* Starting index of the span */ + ListSizeT spanLength; /* Number of elements in the span */ + size_t refCount; /* Count of references to this span record */ +} ListSpan; +#ifndef LIST_SPAN_THRESHOLD /* May be set on build line */ +#define LIST_SPAN_THRESHOLD 101 +#endif -#define ListRepPtr(listPtr) \ - ((List *) (listPtr)->internalRep.twoPtrValue.ptr1) +/* + * ListRep -- + * See comments above for ListStore + */ +typedef struct ListRep { + ListStore *storePtr;/* element array shared amongst different lists */ + ListSpan *spanPtr; /* If not NULL, the span holds the range of slots + within *storePtr that contain this list elements. */ +} ListRep; -#define ListObjGetElements(listPtr, objc, objv) \ - ((objv) = ListRepPtr(listPtr)->elements, \ - (objc) = ListRepPtr(listPtr)->elemCount) +/* + * Macros used to get access list internal representations. + * + * Naming conventions: + * ListRep* - expect a pointer to a valid ListRep + * ListObj* - expect a pointer to a Tcl_Obj whose internal type is known to + * be a list (tclListType). Will crash otherwise. + * TclListObj* - expect a pointer to a Tcl_Obj whose internal type may or may not + * be tclListType. These will convert as needed and return error if + * conversion not possible. + */ + +/* Returns the starting slot for this listRep in the contained ListStore */ +#define ListRepStart(listRepPtr_) \ + ((listRepPtr_)->spanPtr ? (listRepPtr_)->spanPtr->spanStart \ + : (listRepPtr_)->storePtr->firstUsed) + +/* Returns the number of elements in this listRep */ +#define ListRepLength(listRepPtr_) \ + ((listRepPtr_)->spanPtr ? (listRepPtr_)->spanPtr->spanLength \ + : (listRepPtr_)->storePtr->numUsed) + +/* Returns a pointer to the first slot containing this ListRep elements */ +#define ListRepElementsBase(listRepPtr_) \ + (&(listRepPtr_)->storePtr->slots[ListRepStart(listRepPtr_)]) + +/* Stores the number of elements and base address of the element array */ +#define ListRepElements(listRepPtr_, objc_, objv_) \ + (((objv_) = ListRepElementsBase(listRepPtr_)), \ + ((objc_) = ListRepLength(listRepPtr_))) + +/* Returns 1/0 whether the ListRep's ListStore is shared. */ +#define ListRepIsShared(listRepPtr_) ((listRepPtr_)->storePtr->refCount > 1) + +/* Returns a pointer to the ListStore component */ +#define ListObjStorePtr(listObj_) \ + ((ListStore *)((listObj_)->internalRep.twoPtrValue.ptr1)) + +/* Returns a pointer to the ListSpan component */ +#define ListObjSpanPtr(listObj_) \ + ((ListSpan *)((listObj_)->internalRep.twoPtrValue.ptr2)) + +/* Returns the ListRep internal representaton in a Tcl_Obj */ +#define ListObjGetRep(listObj_, listRepPtr_) \ + do { \ + (listRepPtr_)->storePtr = ListObjStorePtr(listObj_); \ + (listRepPtr_)->spanPtr = ListObjSpanPtr(listObj_); \ + } while (0) + +/* Returns the length of the list */ +#define ListObjLength(listObj_, len_) \ + ((len_) = ListObjSpanPtr(listObj_) ? ListObjSpanPtr(listObj_)->spanLength \ + : ListObjStorePtr(listObj_)->numUsed) + +/* Returns the starting slot index of this list's elements in the ListStore */ +#define ListObjStart(listObj_) \ + (ListObjSpanPtr(listObj_) ? ListObjSpanPtr(listObj_)->spanStart \ + : ListObjStorePtr(listObj_)->firstUsed) -#define ListObjLength(listPtr, len) \ - ((len) = ListRepPtr(listPtr)->elemCount) +/* Stores the element count and base address of this list's elements */ +#define ListObjGetElements(listObj_, objc_, objv_) \ + (((objv_) = &ListObjStorePtr(listObj_)->slots[ListObjStart(listObj_)]), \ + (ListObjLength(listObj_, (objc_)))) -#define ListObjIsCanonical(listPtr) \ - (((listPtr)->bytes == NULL) || ListRepPtr(listPtr)->canonicalFlag) +/* + * Returns 1/0 whether the internal representation (not the Tcl_Obj itself) + * is shared. Note by intent this only checks for sharing of ListStore, + * not spans. + */ +#define ListObjRepIsShared(listObj_) (ListObjStorePtr(listObj_)->refCount > 1) + +/* + * Certain commands like concat are optimized if an existing string + * representation of a list object is known to be in canonical format (i.e. + * generated from the list representation). There are three conditions when + * this will be the case: + * (1) No string representation exists which means it will obviously have + * to be generated from the list representation when needed + * (2) The ListStore flags is marked canonical. This is done at the time + * the string representation is generated from the list IF the list + * representation does not have a span (see comments in UpdateStringOfList). + * (3) The list representation does not have a span component. This is + * because list Tcl_Obj's with spans are always created from existing lists + * and never from strings (see SetListFromAny) and thus their string + * representation will always be canonical. + */ +#define ListObjIsCanonical(listObj_) \ + (((listObj_)->bytes == NULL) \ + || (ListObjStorePtr(listObj_)->flags & LISTSTORE_CANONICAL) \ + || ListObjSpanPtr(listObj_) != NULL) -#define TclListObjGetElementsM(interp, listPtr, objcPtr, objvPtr) \ - (((listPtr)->typePtr == &tclListType) \ - ? ((ListObjGetElements((listPtr), *(objcPtr), *(objvPtr))), TCL_OK)\ - : Tcl_ListObjGetElements((interp), (listPtr), (objcPtr), (objvPtr))) +/* + * Converts the Tcl_Obj to a list if it isn't one and stores the element + * count and base address of this list's elements in objcPtr_ and objvPtr_. + * Return TCL_OK on success or TCL_ERROR if the Tcl_Obj cannot be + * converted to a list. + */ +#define TclListObjGetElementsM(interp_, listObj_, objcPtr_, objvPtr_) \ + (((listObj_)->typePtr == &tclListType) \ + ? ((ListObjGetElements((listObj_), *(objcPtr_), *(objvPtr_))), \ + TCL_OK) \ + : Tcl_ListObjGetElements( \ + (interp_), (listObj_), (objcPtr_), (objvPtr_))) -#define TclListObjLengthM(interp, listPtr, lenPtr) \ - (((listPtr)->typePtr == &tclListType) \ - ? ((ListObjLength((listPtr), *(lenPtr))), TCL_OK)\ - : Tcl_ListObjLength((interp), (listPtr), (lenPtr))) +/* + * Converts the Tcl_Obj to a list if it isn't one and stores the element + * count in lenPtr_. Returns TCL_OK on success or TCL_ERROR if the + * Tcl_Obj cannot be converted to a list. + */ +#define TclListObjLengthM(interp_, listObj_, lenPtr_) \ + (((listObj_)->typePtr == &tclListType) \ + ? ((ListObjLength((listObj_), *(lenPtr_))), TCL_OK) \ + : Tcl_ListObjLength((interp_), (listObj_), (lenPtr_))) -#define TclListObjIsCanonical(listPtr) \ - (((listPtr)->typePtr == &tclListType) ? ListObjIsCanonical((listPtr)) : 0) +#define TclListObjIsCanonical(listObj_) \ + (((listObj_)->typePtr == &tclListType) ? ListObjIsCanonical((listObj_)) : 0) /* * Modes for collecting (or not) in the implementations of TclNRForeachCmd, @@ -2595,7 +2747,7 @@ typedef struct List { */ #define TCL_FILESYSTEM_VERSION_2 ((Tcl_FSVersion) 0x2) -typedef ClientData (TclFSGetCwdProc2)(ClientData clientData); +typedef void *(TclFSGetCwdProc2)(void *clientData); typedef int (Tcl_FSLoadFileProc2) (Tcl_Interp *interp, Tcl_Obj *pathPtr, Tcl_LoadHandle *handlePtr, Tcl_FSUnloadFileProc **unloadProcPtr, int flags); @@ -2757,7 +2909,7 @@ MODULE_SCOPE Tcl_Encoding tclIdentityEncoding; MODULE_SCOPE Tcl_GetTimeProc *tclGetTimeProcPtr; MODULE_SCOPE Tcl_ScaleTimeProc *tclScaleTimeProcPtr; -MODULE_SCOPE ClientData tclTimeClientData; +MODULE_SCOPE void *tclTimeClientData; /* * Variables denoting the Tcl object types defined in the core. @@ -2933,7 +3085,7 @@ MODULE_SCOPE void TclArgumentBCRelease(Tcl_Interp *interp, MODULE_SCOPE void TclArgumentGet(Tcl_Interp *interp, Tcl_Obj *obj, CmdFrame **cfPtrPtr, int *wordPtr); MODULE_SCOPE int TclAsyncNotifier(int sigNumber, Tcl_ThreadId threadId, - ClientData clientData, int *flagPtr, int value); + void *clientData, int *flagPtr, int value); MODULE_SCOPE void TclAsyncMarkFromNotifier(void); MODULE_SCOPE double TclBignumToDouble(const void *bignum); MODULE_SCOPE int TclByteArrayMatch(const unsigned char *string, @@ -2964,7 +3116,7 @@ MODULE_SCOPE int TclConvertElement(const char *src, int length, char *dst, int flags); MODULE_SCOPE Tcl_Command TclCreateObjCommandInNs(Tcl_Interp *interp, const char *cmdName, Tcl_Namespace *nsPtr, - Tcl_ObjCmdProc *proc, ClientData clientData, + Tcl_ObjCmdProc *proc, void *clientData, Tcl_CmdDeleteProc *deleteProc); MODULE_SCOPE Tcl_Command TclCreateEnsembleInNs(Tcl_Interp *interp, const char *name, Tcl_Namespace *nameNamespacePtr, @@ -2989,9 +3141,9 @@ MODULE_SCOPE Tcl_ObjCmdProc TclFileRenameCmd; MODULE_SCOPE Tcl_ObjCmdProc TclFileTempDirCmd; MODULE_SCOPE Tcl_ObjCmdProc TclFileTemporaryCmd; MODULE_SCOPE void TclCreateLateExitHandler(Tcl_ExitProc *proc, - ClientData clientData); + void *clientData); MODULE_SCOPE void TclDeleteLateExitHandler(Tcl_ExitProc *proc, - ClientData clientData); + void *clientData); MODULE_SCOPE char * TclDStringAppendObj(Tcl_DString *dsPtr, Tcl_Obj *objPtr); MODULE_SCOPE char * TclDStringAppendDString(Tcl_DString *dsPtr, @@ -3046,7 +3198,7 @@ MODULE_SCOPE int TclGetCompletionCodeFromObj(Tcl_Interp *interp, MODULE_SCOPE Proc * TclGetLambdaFromObj(Tcl_Interp *interp, Tcl_Obj *objPtr, Tcl_Obj **nsObjPtrPtr); MODULE_SCOPE int TclGetNumberFromObj(Tcl_Interp *interp, - Tcl_Obj *objPtr, ClientData *clientDataPtr, + Tcl_Obj *objPtr, void **clientDataPtr, int *typePtr); MODULE_SCOPE int TclGetOpenModeEx(Tcl_Interp *interp, const char *modeString, int *seekFlagPtr, @@ -3068,16 +3220,16 @@ MODULE_SCOPE int TclIncrObj(Tcl_Interp *interp, Tcl_Obj *valuePtr, Tcl_Obj *incrPtr); MODULE_SCOPE Tcl_Obj * TclIncrObjVar2(Tcl_Interp *interp, Tcl_Obj *part1Ptr, Tcl_Obj *part2Ptr, Tcl_Obj *incrPtr, int flags); -MODULE_SCOPE int TclInfoExistsCmd(ClientData dummy, Tcl_Interp *interp, +MODULE_SCOPE int TclInfoExistsCmd(void *dummy, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int TclInfoCoroutineCmd(ClientData dummy, Tcl_Interp *interp, +MODULE_SCOPE int TclInfoCoroutineCmd(void *dummy, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); MODULE_SCOPE Tcl_Obj * TclInfoFrame(Tcl_Interp *interp, CmdFrame *framePtr); -MODULE_SCOPE int TclInfoGlobalsCmd(ClientData dummy, Tcl_Interp *interp, +MODULE_SCOPE int TclInfoGlobalsCmd(void *dummy, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int TclInfoLocalsCmd(ClientData dummy, Tcl_Interp *interp, +MODULE_SCOPE int TclInfoLocalsCmd(void *dummy, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int TclInfoVarsCmd(ClientData dummy, Tcl_Interp *interp, +MODULE_SCOPE int TclInfoVarsCmd(void *dummy, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); MODULE_SCOPE void TclInitAlloc(void); MODULE_SCOPE void TclInitDbCkalloc(void); @@ -3107,6 +3259,9 @@ MODULE_SCOPE void TclListLines(Tcl_Obj *listObj, int line, int n, MODULE_SCOPE Tcl_Obj * TclListObjCopy(Tcl_Interp *interp, Tcl_Obj *listPtr); MODULE_SCOPE Tcl_Obj * TclListObjRange(Tcl_Obj *listPtr, int fromIdx, int toIdx); +MODULE_SCOPE int TclListObjAppendElements(Tcl_Interp *interp, + Tcl_Obj *toObj, int elemCount, + Tcl_Obj *const elemObjv[]); MODULE_SCOPE Tcl_Obj * TclLsetList(Tcl_Interp *interp, Tcl_Obj *listPtr, Tcl_Obj *indexPtr, Tcl_Obj *valuePtr); MODULE_SCOPE Tcl_Obj * TclLsetFlat(Tcl_Interp *interp, Tcl_Obj *listPtr, @@ -3149,18 +3304,18 @@ MODULE_SCOPE Tcl_Obj * TclpTempFileNameForLibrary(Tcl_Interp *interp, Tcl_Obj* pathPtr); MODULE_SCOPE Tcl_Obj * TclNewFSPathObj(Tcl_Obj *dirPtr, const char *addStrRep, int len); -MODULE_SCOPE void TclpAlertNotifier(ClientData clientData); -MODULE_SCOPE ClientData TclpNotifierData(void); +MODULE_SCOPE void TclpAlertNotifier(void *clientData); +MODULE_SCOPE void *TclpNotifierData(void); MODULE_SCOPE void TclpServiceModeHook(int mode); MODULE_SCOPE void TclpSetTimer(const Tcl_Time *timePtr); MODULE_SCOPE int TclpWaitForEvent(const Tcl_Time *timePtr); MODULE_SCOPE void TclpCreateFileHandler(int fd, int mask, - Tcl_FileProc *proc, ClientData clientData); + Tcl_FileProc *proc, void *clientData); MODULE_SCOPE int TclpDeleteFile(const void *path); MODULE_SCOPE void TclpDeleteFileHandler(int fd); MODULE_SCOPE void TclpFinalizeCondition(Tcl_Condition *condPtr); MODULE_SCOPE void TclpFinalizeMutex(Tcl_Mutex *mutexPtr); -MODULE_SCOPE void TclpFinalizeNotifier(ClientData clientData); +MODULE_SCOPE void TclpFinalizeNotifier(void *clientData); MODULE_SCOPE void TclpFinalizePipes(void); MODULE_SCOPE void TclpFinalizeSockets(void); MODULE_SCOPE int TclCreateSocketAddress(Tcl_Interp *interp, @@ -3168,13 +3323,13 @@ MODULE_SCOPE int TclCreateSocketAddress(Tcl_Interp *interp, const char *host, int port, int willBind, const char **errorMsgPtr); MODULE_SCOPE int TclpThreadCreate(Tcl_ThreadId *idPtr, - Tcl_ThreadCreateProc *proc, ClientData clientData, + Tcl_ThreadCreateProc *proc, void *clientData, int stackSize, int flags); MODULE_SCOPE int TclpFindVariable(const char *name, int *lengthPtr); MODULE_SCOPE void TclpInitLibraryPath(char **valuePtr, TCL_HASH_TYPE *lengthPtr, Tcl_Encoding *encodingPtr); MODULE_SCOPE void TclpInitLock(void); -MODULE_SCOPE ClientData TclpInitNotifier(void); +MODULE_SCOPE void *TclpInitNotifier(void); MODULE_SCOPE void TclpInitPlatform(void); MODULE_SCOPE void TclpInitUnlock(void); MODULE_SCOPE Tcl_Obj * TclpObjListVolumes(void); @@ -3193,7 +3348,7 @@ MODULE_SCOPE int TclCrossFilesystemCopy(Tcl_Interp *interp, MODULE_SCOPE int TclpMatchInDirectory(Tcl_Interp *interp, Tcl_Obj *resultPtr, Tcl_Obj *pathPtr, const char *pattern, Tcl_GlobTypeData *types); -MODULE_SCOPE ClientData TclpGetNativeCwd(ClientData clientData); +MODULE_SCOPE void *TclpGetNativeCwd(void *clientData); MODULE_SCOPE Tcl_FSDupInternalRepProc TclNativeDupInternalRep; MODULE_SCOPE Tcl_Obj * TclpObjLink(Tcl_Obj *pathPtr, Tcl_Obj *toPtr, int linkType); @@ -3282,7 +3437,7 @@ MODULE_SCOPE int TclUtfCount(int ch); MODULE_SCOPE int TclUniCharToUCS4(const Tcl_UniChar *, int *); MODULE_SCOPE const Tcl_UniChar *TclUCS4Prev(const Tcl_UniChar *, const Tcl_UniChar *); #endif -MODULE_SCOPE Tcl_Obj * TclpNativeToNormalized(ClientData clientData); +MODULE_SCOPE Tcl_Obj * TclpNativeToNormalized(void *clientData); MODULE_SCOPE Tcl_Obj * TclpFilesystemPathType(Tcl_Obj *pathPtr); MODULE_SCOPE int TclpDlopen(Tcl_Interp *interp, Tcl_Obj *pathPtr, Tcl_LoadHandle *loadHandle, @@ -3378,60 +3533,60 @@ MODULE_SCOPE int TclIsSpaceProc(int byte); *---------------------------------------------------------------- */ -MODULE_SCOPE int Tcl_AfterObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_AfterObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_AppendObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_AppendObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_ApplyObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_ApplyObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); MODULE_SCOPE Tcl_Command TclInitArrayCmd(Tcl_Interp *interp); MODULE_SCOPE Tcl_Command TclInitBinaryCmd(Tcl_Interp *interp); -MODULE_SCOPE int Tcl_BreakObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_BreakObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); #if !defined(TCL_NO_DEPRECATED) -MODULE_SCOPE int Tcl_CaseObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_CaseObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); #endif -MODULE_SCOPE int Tcl_CatchObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_CatchObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_CdObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_CdObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); MODULE_SCOPE Tcl_Command TclInitChanCmd(Tcl_Interp *interp); -MODULE_SCOPE int TclChanCreateObjCmd(ClientData clientData, +MODULE_SCOPE int TclChanCreateObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int TclChanPostEventObjCmd(ClientData clientData, +MODULE_SCOPE int TclChanPostEventObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int TclChanPopObjCmd(ClientData clientData, +MODULE_SCOPE int TclChanPopObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int TclChanPushObjCmd(ClientData clientData, +MODULE_SCOPE int TclChanPushObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); MODULE_SCOPE void TclClockInit(Tcl_Interp *interp); MODULE_SCOPE int TclClockOldscanObjCmd( void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_CloseObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_CloseObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_ConcatObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_ConcatObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_ContinueObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_ContinueObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); MODULE_SCOPE Tcl_TimerToken TclCreateAbsoluteTimerHandler( Tcl_Time *timePtr, Tcl_TimerProc *proc, - ClientData clientData); + void *clientData); MODULE_SCOPE int TclDefaultBgErrorHandlerObjCmd( - ClientData clientData, Tcl_Interp *interp, + void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); MODULE_SCOPE Tcl_Command TclInitDictCmd(Tcl_Interp *interp); MODULE_SCOPE int TclDictWithFinish(Tcl_Interp *interp, Var *varPtr, @@ -3440,236 +3595,236 @@ MODULE_SCOPE int TclDictWithFinish(Tcl_Interp *interp, Var *varPtr, Tcl_Obj *const pathv[], Tcl_Obj *keysPtr); MODULE_SCOPE Tcl_Obj * TclDictWithInit(Tcl_Interp *interp, Tcl_Obj *dictPtr, int pathc, Tcl_Obj *const pathv[]); -MODULE_SCOPE int Tcl_DisassembleObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_DisassembleObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); /* Assemble command function */ -MODULE_SCOPE int Tcl_AssembleObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_AssembleObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int TclNRAssembleObjCmd(ClientData clientData, +MODULE_SCOPE int TclNRAssembleObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); MODULE_SCOPE Tcl_Command TclInitEncodingCmd(Tcl_Interp *interp); -MODULE_SCOPE int Tcl_EofObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_EofObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_ErrorObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_ErrorObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_EvalObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_EvalObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_ExecObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_ExecObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_ExitObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_ExitObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_ExprObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_ExprObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_FblockedObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_FblockedObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); MODULE_SCOPE int Tcl_FconfigureObjCmd( - ClientData clientData, Tcl_Interp *interp, + void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_FcopyObjCmd(ClientData dummy, +MODULE_SCOPE int Tcl_FcopyObjCmd(void *dummy, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); MODULE_SCOPE Tcl_Command TclInitFileCmd(Tcl_Interp *interp); -MODULE_SCOPE int Tcl_FileEventObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_FileEventObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_FlushObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_FlushObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_ForObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_ForObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_ForeachObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_ForeachObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_FormatObjCmd(ClientData dummy, +MODULE_SCOPE int Tcl_FormatObjCmd(void *dummy, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_GetsObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_GetsObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_GlobalObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_GlobalObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_GlobObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_GlobObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_IfObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_IfObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_IncrObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_IncrObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); MODULE_SCOPE Tcl_Command TclInitInfoCmd(Tcl_Interp *interp); -MODULE_SCOPE int Tcl_InterpObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_InterpObjCmd(void *clientData, Tcl_Interp *interp, int argc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_JoinObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_JoinObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_LappendObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_LappendObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_LassignObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_LassignObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_LindexObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_LindexObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_LinsertObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_LinsertObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_LlengthObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_LlengthObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_ListObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_ListObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_LmapObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_LmapObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_LoadObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_LoadObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_LpopObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_LpopObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_LrangeObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_LrangeObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_LremoveObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_LremoveObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_LrepeatObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_LrepeatObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_LreplaceObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_LreplaceObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_LreverseObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_LreverseObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_LsearchObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_LsearchObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_LsetObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_LsetObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_LsortObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_LsortObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); MODULE_SCOPE Tcl_Command TclInitNamespaceCmd(Tcl_Interp *interp); -MODULE_SCOPE int TclNamespaceEnsembleCmd(ClientData dummy, +MODULE_SCOPE int TclNamespaceEnsembleCmd(void *dummy, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_OpenObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_OpenObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_PackageObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_PackageObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_PidObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_PidObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); MODULE_SCOPE Tcl_Command TclInitPrefixCmd(Tcl_Interp *interp); -MODULE_SCOPE int Tcl_PutsObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_PutsObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_PwdObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_PwdObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_ReadObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_ReadObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_RegexpObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_RegexpObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_RegsubObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_RegsubObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_RenameObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_RenameObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_RepresentationCmd(ClientData clientData, +MODULE_SCOPE int Tcl_RepresentationCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_ReturnObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_ReturnObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_ScanObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_ScanObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_SeekObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_SeekObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_SetObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_SetObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_SplitObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_SplitObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_SocketObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_SocketObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_SourceObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_SourceObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); MODULE_SCOPE Tcl_Command TclInitStringCmd(Tcl_Interp *interp); -MODULE_SCOPE int Tcl_SubstObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_SubstObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_SwitchObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_SwitchObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_TellObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_TellObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_ThrowObjCmd(ClientData dummy, Tcl_Interp *interp, +MODULE_SCOPE int Tcl_ThrowObjCmd(void *dummy, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_TimeObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_TimeObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_TimeRateObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_TimeRateObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_TraceObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_TraceObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_TryObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_TryObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_UnloadObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_UnloadObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_UnsetObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_UnsetObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_UpdateObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_UpdateObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_UplevelObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_UplevelObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_UpvarObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_UpvarObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_VariableObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_VariableObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_VwaitObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_VwaitObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); -MODULE_SCOPE int Tcl_WhileObjCmd(ClientData clientData, +MODULE_SCOPE int Tcl_WhileObjCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); @@ -4001,103 +4156,103 @@ MODULE_SCOPE int TclCompileBasicMin2ArgCmd(Tcl_Interp *interp, Tcl_Parse *parsePtr, Command *cmdPtr, struct CompileEnv *envPtr); -MODULE_SCOPE int TclInvertOpCmd(ClientData clientData, +MODULE_SCOPE int TclInvertOpCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); MODULE_SCOPE int TclCompileInvertOpCmd(Tcl_Interp *interp, Tcl_Parse *parsePtr, Command *cmdPtr, struct CompileEnv *envPtr); -MODULE_SCOPE int TclNotOpCmd(ClientData clientData, +MODULE_SCOPE int TclNotOpCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); MODULE_SCOPE int TclCompileNotOpCmd(Tcl_Interp *interp, Tcl_Parse *parsePtr, Command *cmdPtr, struct CompileEnv *envPtr); -MODULE_SCOPE int TclAddOpCmd(ClientData clientData, +MODULE_SCOPE int TclAddOpCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); MODULE_SCOPE int TclCompileAddOpCmd(Tcl_Interp *interp, Tcl_Parse *parsePtr, Command *cmdPtr, struct CompileEnv *envPtr); -MODULE_SCOPE int TclMulOpCmd(ClientData clientData, +MODULE_SCOPE int TclMulOpCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); MODULE_SCOPE int TclCompileMulOpCmd(Tcl_Interp *interp, Tcl_Parse *parsePtr, Command *cmdPtr, struct CompileEnv *envPtr); -MODULE_SCOPE int TclAndOpCmd(ClientData clientData, +MODULE_SCOPE int TclAndOpCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); MODULE_SCOPE int TclCompileAndOpCmd(Tcl_Interp *interp, Tcl_Parse *parsePtr, Command *cmdPtr, struct CompileEnv *envPtr); -MODULE_SCOPE int TclOrOpCmd(ClientData clientData, +MODULE_SCOPE int TclOrOpCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); MODULE_SCOPE int TclCompileOrOpCmd(Tcl_Interp *interp, Tcl_Parse *parsePtr, Command *cmdPtr, struct CompileEnv *envPtr); -MODULE_SCOPE int TclXorOpCmd(ClientData clientData, +MODULE_SCOPE int TclXorOpCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); MODULE_SCOPE int TclCompileXorOpCmd(Tcl_Interp *interp, Tcl_Parse *parsePtr, Command *cmdPtr, struct CompileEnv *envPtr); -MODULE_SCOPE int TclPowOpCmd(ClientData clientData, +MODULE_SCOPE int TclPowOpCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); MODULE_SCOPE int TclCompilePowOpCmd(Tcl_Interp *interp, Tcl_Parse *parsePtr, Command *cmdPtr, struct CompileEnv *envPtr); -MODULE_SCOPE int TclLshiftOpCmd(ClientData clientData, +MODULE_SCOPE int TclLshiftOpCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); MODULE_SCOPE int TclCompileLshiftOpCmd(Tcl_Interp *interp, Tcl_Parse *parsePtr, Command *cmdPtr, struct CompileEnv *envPtr); -MODULE_SCOPE int TclRshiftOpCmd(ClientData clientData, +MODULE_SCOPE int TclRshiftOpCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); MODULE_SCOPE int TclCompileRshiftOpCmd(Tcl_Interp *interp, Tcl_Parse *parsePtr, Command *cmdPtr, struct CompileEnv *envPtr); -MODULE_SCOPE int TclModOpCmd(ClientData clientData, +MODULE_SCOPE int TclModOpCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); MODULE_SCOPE int TclCompileModOpCmd(Tcl_Interp *interp, Tcl_Parse *parsePtr, Command *cmdPtr, struct CompileEnv *envPtr); -MODULE_SCOPE int TclNeqOpCmd(ClientData clientData, +MODULE_SCOPE int TclNeqOpCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); MODULE_SCOPE int TclCompileNeqOpCmd(Tcl_Interp *interp, Tcl_Parse *parsePtr, Command *cmdPtr, struct CompileEnv *envPtr); -MODULE_SCOPE int TclStrneqOpCmd(ClientData clientData, +MODULE_SCOPE int TclStrneqOpCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); MODULE_SCOPE int TclCompileStrneqOpCmd(Tcl_Interp *interp, Tcl_Parse *parsePtr, Command *cmdPtr, struct CompileEnv *envPtr); -MODULE_SCOPE int TclInOpCmd(ClientData clientData, +MODULE_SCOPE int TclInOpCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); MODULE_SCOPE int TclCompileInOpCmd(Tcl_Interp *interp, Tcl_Parse *parsePtr, Command *cmdPtr, struct CompileEnv *envPtr); -MODULE_SCOPE int TclNiOpCmd(ClientData clientData, +MODULE_SCOPE int TclNiOpCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); MODULE_SCOPE int TclCompileNiOpCmd(Tcl_Interp *interp, Tcl_Parse *parsePtr, Command *cmdPtr, struct CompileEnv *envPtr); -MODULE_SCOPE int TclMinusOpCmd(ClientData clientData, +MODULE_SCOPE int TclMinusOpCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); MODULE_SCOPE int TclCompileMinusOpCmd(Tcl_Interp *interp, Tcl_Parse *parsePtr, Command *cmdPtr, struct CompileEnv *envPtr); -MODULE_SCOPE int TclDivOpCmd(ClientData clientData, +MODULE_SCOPE int TclDivOpCmd(void *clientData, Tcl_Interp *interp, int objc, Tcl_Obj *const objv[]); MODULE_SCOPE int TclCompileDivOpCmd(Tcl_Interp *interp, @@ -5181,7 +5336,7 @@ void Tcl_Panic(const char *, ...) __attribute__((analyzer_noreturn)); typedef struct NRE_callback { Tcl_NRPostProc *procPtr; - ClientData data[4]; + void *data[4]; struct NRE_callback *nextPtr; } NRE_callback; @@ -5196,10 +5351,10 @@ typedef struct NRE_callback { NRE_callback *_callbackPtr; \ TCLNR_ALLOC((interp), (_callbackPtr)); \ _callbackPtr->procPtr = (postProcPtr); \ - _callbackPtr->data[0] = (ClientData)(data0); \ - _callbackPtr->data[1] = (ClientData)(data1); \ - _callbackPtr->data[2] = (ClientData)(data2); \ - _callbackPtr->data[3] = (ClientData)(data3); \ + _callbackPtr->data[0] = (void *)(data0); \ + _callbackPtr->data[1] = (void *)(data1); \ + _callbackPtr->data[2] = (void *)(data2); \ + _callbackPtr->data[3] = (void *)(data3); \ _callbackPtr->nextPtr = TOP_CB(interp); \ TOP_CB(interp) = _callbackPtr; \ } while (0) @@ -5223,6 +5378,7 @@ typedef struct NRE_callback { #include "tclIntDecls.h" #include "tclIntPlatDecls.h" + #if !defined(USE_TCL_STUBS) && !defined(TCL_MEM_DEBUG) #define Tcl_AttemptAlloc(size) TclpAlloc(size) #define Tcl_AttemptRealloc(ptr, size) TclpRealloc((ptr), (size)) @@ -5254,8 +5410,8 @@ typedef struct NRE_callback { * Other externals. */ -MODULE_SCOPE size_t TclEnvEpoch; /* Epoch of the tcl environment - * (if changed with tcl-env). */ +MODULE_SCOPE size_t TclEnvEpoch; /* Epoch of the tcl environment + * (if changed with tcl-env). */ #endif /* _TCLINT */ diff --git a/generic/tclIntDecls.h b/generic/tclIntDecls.h index 33b6883..69aee7c 100644 --- a/generic/tclIntDecls.h +++ b/generic/tclIntDecls.h @@ -656,8 +656,13 @@ EXTERN void TclStaticLibrary(Tcl_Interp *interp, /* 258 */ EXTERN Tcl_Obj * TclpCreateTemporaryDirectory(Tcl_Obj *dirObj, Tcl_Obj *basenameObj); -/* 259 */ -EXTERN void TclUnusedStubEntry(void); +/* Slot 259 is reserved */ +/* 260 */ +EXTERN Tcl_Obj * TclListTestObj(int length, int leadingSpace, + int endSpace); +/* 261 */ +EXTERN void TclListObjValidate(Tcl_Interp *interp, + Tcl_Obj *listObj); typedef struct TclIntStubs { int magic; @@ -922,7 +927,9 @@ typedef struct TclIntStubs { int (*tclPtrUnsetVar) (Tcl_Interp *interp, Tcl_Var varPtr, Tcl_Var arrayPtr, Tcl_Obj *part1Ptr, Tcl_Obj *part2Ptr, int flags); /* 256 */ void (*tclStaticLibrary) (Tcl_Interp *interp, const char *prefix, Tcl_LibraryInitProc *initProc, Tcl_LibraryInitProc *safeInitProc); /* 257 */ Tcl_Obj * (*tclpCreateTemporaryDirectory) (Tcl_Obj *dirObj, Tcl_Obj *basenameObj); /* 258 */ - void (*tclUnusedStubEntry) (void); /* 259 */ + void (*reserved259)(void); + Tcl_Obj * (*tclListTestObj) (int length, int leadingSpace, int endSpace); /* 260 */ + void (*tclListObjValidate) (Tcl_Interp *interp, Tcl_Obj *listObj); /* 261 */ } TclIntStubs; extern const TclIntStubs *tclIntStubsPtr; @@ -1368,8 +1375,11 @@ extern const TclIntStubs *tclIntStubsPtr; (tclIntStubsPtr->tclStaticLibrary) /* 257 */ #define TclpCreateTemporaryDirectory \ (tclIntStubsPtr->tclpCreateTemporaryDirectory) /* 258 */ -#define TclUnusedStubEntry \ - (tclIntStubsPtr->tclUnusedStubEntry) /* 259 */ +/* Slot 259 is reserved */ +#define TclListTestObj \ + (tclIntStubsPtr->tclListTestObj) /* 260 */ +#define TclListObjValidate \ + (tclIntStubsPtr->tclListObjValidate) /* 261 */ #endif /* defined(USE_TCL_STUBS) */ diff --git a/generic/tclInterp.c b/generic/tclInterp.c index 4ce2f31..d51b289 100644 --- a/generic/tclInterp.c +++ b/generic/tclInterp.c @@ -12,6 +12,7 @@ */ #include "tclInt.h" +#include <assert.h> /* * A pointer to a string that holds an initialization script that if non-NULL @@ -1822,7 +1823,7 @@ AliasNRCmd( int prefc, cmdc, i; Tcl_Obj **prefv, **cmdv; Tcl_Obj *listPtr; - List *listRep; + ListRep listRep; int flags = TCL_EVAL_INVOKE; /* @@ -1834,10 +1835,15 @@ AliasNRCmd( prefv = &aliasPtr->objPtr; cmdc = prefc + objc - 1; + /* TODO - encapsulate this into tclListObj.c */ listPtr = Tcl_NewListObj(cmdc, NULL); - listRep = ListRepPtr(listPtr); - listRep->elemCount = cmdc; - cmdv = listRep->elements; + ListObjGetRep(listPtr, &listRep); + cmdv = ListRepElementsBase(&listRep); + listRep.storePtr->numUsed = cmdc; + if (listRep.spanPtr) { + listRep.spanPtr->spanStart = listRep.storePtr->firstUsed; + listRep.spanPtr->spanLength = listRep.storePtr->numUsed; + } prefv = &aliasPtr->objPtr; memcpy(cmdv, prefv, prefc * sizeof(Tcl_Obj *)); diff --git a/generic/tclListObj.c b/generic/tclListObj.c index c24809e..7a702e0 100644 --- a/generic/tclListObj.c +++ b/generic/tclListObj.c @@ -3,9 +3,7 @@ * * This file contains functions that implement the Tcl list object type. * - * Copyright © 1995-1997 Sun Microsystems, Inc. - * Copyright © 1998 Scriptics Corporation. - * Copyright © 2001 Kevin B. Kenny. All rights reserved. + * Copyright © 2022 Ashok P. Nadkarni. All rights reserved. * * See the file "license.terms" for information on usage and redistribution of * this file, and for a DISCLAIMER OF ALL WARRANTIES. @@ -15,28 +13,140 @@ #include <assert.h> /* - * Prototypes for functions defined later in this file: + * TODO - memmove is fast. Measure at what size we should prefer memmove + * (for unshared objects only) in lieu of range operations. On the other + * hand, more cache dirtied? */ -static List * AttemptNewList(Tcl_Interp *interp, int objc, - Tcl_Obj *const objv[]); -static List * NewListInternalRep(int objc, Tcl_Obj *const objv[], int p); -static void DupListInternalRep(Tcl_Obj *srcPtr, Tcl_Obj *copyPtr); -static void FreeListInternalRep(Tcl_Obj *listPtr); -static int SetListFromAny(Tcl_Interp *interp, Tcl_Obj *objPtr); -static void UpdateStringOfList(Tcl_Obj *listPtr); +/* + * Macros for validation and bug checking. + */ + +/* + * Control whether asserts are enabled. Always enable in debug builds. In non-debug + * builds, can be set with cdebug="-DENABLE_LIST_ASSERTS" on the nmake command line. + */ +#ifdef ENABLE_LIST_ASSERTS +# ifdef NDEBUG +# undef NDEBUG /* Activate assert() macro */ +# endif +#else +# ifndef NDEBUG +# define ENABLE_LIST_ASSERTS /* Always activate list asserts in debug mode */ +# endif +#endif + +#ifdef ENABLE_LIST_ASSERTS + +#define LIST_ASSERT(cond_) assert(cond_) /* TODO - is there a Tcl-specific one? */ +/* + * LIST_INDEX_ASSERT is to catch errors with negative indices and counts + * being passed AFTER validation. On Tcl9 length types are unsigned hence + * the checks against LIST_MAX. On Tcl8 length types are signed hence the + * also checks against 0. + */ +#define LIST_INDEX_ASSERT(idxarg_) \ + do { \ + ListSizeT idx_ = (idxarg_); /* To guard against ++ etc. */ \ + LIST_ASSERT(idx_ >= 0 && idx_ < LIST_MAX); \ + } while (0) +/* Ditto for counts except upper limit is different */ +#define LIST_COUNT_ASSERT(countarg_) \ + do { \ + ListSizeT count_ = (countarg_); /* To guard against ++ etc. */ \ + LIST_ASSERT(count_ >= 0 && count_ <= LIST_MAX); \ + } while (0) + +#else + +#define LIST_ASSERT(cond_) ((void) 0) +#define LIST_INDEX_ASSERT(idx_) ((void) 0) +#define LIST_COUNT_ASSERT(count_) ((void) 0) + +#endif + +/* Checks for when caller should have already converted to internal list type */ +#define LIST_ASSERT_TYPE(listObj_) \ + LIST_ASSERT((listObj_)->typePtr == &tclListType); + + +/* + * If ENABLE_LIST_INVARIANTS is enabled (-DENABLE_LIST_INVARIANTS from the + * command line), the entire list internal representation is checked for + * inconsistencies. This has a non-trivial cost so has to be separately + * enabled and not part of assertions checking. However, the test suite does + * invoke ListRepValidate directly even without ENABLE_LIST_INVARIANTS. + */ +#ifdef ENABLE_LIST_INVARIANTS +#define LISTREP_CHECK(listRepPtr_) ListRepValidate(listRepPtr_, __FILE__, __LINE__) +#else +#define LISTREP_CHECK(listRepPtr_) (void) 0 +#endif + +/* + * Flags used for controlling behavior of allocation of list + * internal representations. + * + * If the LISTREP_PANIC_ON_FAIL bit is set, the function will panic if + * list is too large or memory cannot be allocated. Without the flag + * a NULL pointer is returned. + * + * The LISTREP_SPACE_FAVOR_NONE, LISTREP_SPACE_FAVOR_FRONT, + * LISTREP_SPACE_FAVOR_BACK, LISTREP_SPACE_ONLY_BACK flags are used to + * control additional space when allocating. + * - If none of these flags is present, the exact space requested is + * allocated, nothing more. + * - Otherwise, if only LISTREP_FAVOR_FRONT is present, extra space is + * allocated with more towards the front. + * - Conversely, if only LISTREP_FAVOR_BACK is present extra space is allocated + * with more to the back. + * - If both flags are present (LISTREP_SPACE_FAVOR_NONE), the extra space + * is equally apportioned. + * - Finally if LISTREP_SPACE_ONLY_BACK is present, ALL extra space is at + * the back. + */ +#define LISTREP_PANIC_ON_FAIL 0x00000001 +#define LISTREP_SPACE_FAVOR_FRONT 0x00000002 +#define LISTREP_SPACE_FAVOR_BACK 0x00000004 +#define LISTREP_SPACE_ONLY_BACK 0x00000008 +#define LISTREP_SPACE_FAVOR_NONE \ + (LISTREP_SPACE_FAVOR_FRONT | LISTREP_SPACE_FAVOR_BACK) +#define LISTREP_SPACE_FLAGS \ + (LISTREP_SPACE_FAVOR_FRONT | LISTREP_SPACE_FAVOR_BACK \ + | LISTREP_SPACE_ONLY_BACK) + +/* + * Prototypes for non-inline static functions defined later in this file: + */ +static int MemoryAllocationError(Tcl_Interp *, size_t size); +static int ListLimitExceededError(Tcl_Interp *); +static ListStore *ListStoreNew(ListSizeT objc, Tcl_Obj *const objv[], int flags); +static int ListRepInit(ListSizeT objc, Tcl_Obj *const objv[], int flags, ListRep *); +static int ListRepInitAttempt(Tcl_Interp *, + ListSizeT objc, + Tcl_Obj *const objv[], + ListRep *); +static void ListRepClone(ListRep *fromRepPtr, ListRep *toRepPtr, int flags); +static void ListRepUnsharedFreeUnreferenced(const ListRep *repPtr); +static int TclListObjGetRep(Tcl_Interp *, Tcl_Obj *listPtr, ListRep *repPtr); +static void ListRepRange(ListRep *srcRepPtr, + ListSizeT rangeStart, + ListSizeT rangeEnd, + int preserveSrcRep, + ListRep *rangeRepPtr); +static ListStore *ListStoreReallocate(ListStore *storePtr, ListSizeT numSlots); +static void ListRepValidate(const ListRep *repPtr, const char *file, + int lineNum); +static void DupListInternalRep(Tcl_Obj *srcPtr, Tcl_Obj *copyPtr); +static void FreeListInternalRep(Tcl_Obj *listPtr); +static int SetListFromAny(Tcl_Interp *interp, Tcl_Obj *objPtr); +static void UpdateStringOfList(Tcl_Obj *listPtr); /* * The structure below defines the list Tcl object type by means of functions * that can be invoked by generic object code. * - * The internal representation of a list object is a two-pointer - * representation. The first pointer designates a List structure that contains - * an array of pointers to the element objects, together with integers that - * represent the current element count and the allocated size of the array. - * The second pointer is normally NULL; during execution of functions in this - * file that operate on nested sublists, it is occasionally used as working - * storage to avoid an auxiliary stack. + * The internal representation of a list object is ListRep defined in tcl.h. */ const Tcl_ObjType tclListType = { @@ -48,142 +158,905 @@ const Tcl_ObjType tclListType = { }; /* Macros to manipulate the List internal rep */ +#define ListRepIncrRefs(repPtr_) \ + do { \ + (repPtr_)->storePtr->refCount++; \ + if ((repPtr_)->spanPtr) \ + (repPtr_)->spanPtr->refCount++; \ + } while (0) + +/* Returns number of free unused slots at the back of the ListRep's ListStore */ +#define ListRepNumFreeTail(repPtr_) \ + ((repPtr_)->storePtr->numAllocated \ + - ((repPtr_)->storePtr->firstUsed + (repPtr_)->storePtr->numUsed)) -#define ListSetInternalRep(objPtr, listRepPtr) \ - do { \ - Tcl_ObjInternalRep ir; \ - ir.twoPtrValue.ptr1 = (listRepPtr); \ - ir.twoPtrValue.ptr2 = NULL; \ - (listRepPtr)->refCount++; \ - Tcl_StoreInternalRep((objPtr), &tclListType, &ir); \ +/* Returns number of free unused slots at the front of the ListRep's ListStore */ +#define ListRepNumFreeHead(repPtr_) ((repPtr_)->storePtr->firstUsed) + +/* Returns a pointer to the slot corresponding to list index listIdx_ */ +#define ListRepSlotPtr(repPtr_, listIdx_) \ + (&(repPtr_)->storePtr->slots[ListRepStart(repPtr_) + (listIdx_)]) + +/* + * Macros to replace the internal representation in a Tcl_Obj. There are + * subtle differences in each so make sure to use the right one to avoid + * memory leaks, access to freed memory and the like. + * + * ListObjStompRep - assumes the Tcl_Obj internal representation can be + * overwritten AND that the passed ListRep already has reference counts that + * include the reference from the Tcl_Obj. Basically just copies the pointers + * and sets the internal Tcl_Obj type to list + * + * ListObjOverwriteRep - like ListObjOverwriteRep but additionally + * increments reference counts on the passed ListRep. Generally used when + * the string representation of the Tcl_Obj is not to be modified. + * + * ListObjReplaceRepAndInvalidate - Like ListObjOverwriteRep but additionally + * assumes the Tcl_Obj internal rep is valid (and possibly even same as + * passed ListRep) and frees it first. Additionally invalidates the string + * representation. Generally used when modifying a Tcl_Obj value. + */ +#define ListObjStompRep(objPtr_, repPtr_) \ + do { \ + (objPtr_)->internalRep.twoPtrValue.ptr1 = (repPtr_)->storePtr; \ + (objPtr_)->internalRep.twoPtrValue.ptr2 = (repPtr_)->spanPtr; \ + (objPtr_)->typePtr = &tclListType; \ } while (0) -#define ListGetInternalRep(objPtr, listRepPtr) \ - do { \ - const Tcl_ObjInternalRep *irPtr; \ - irPtr = TclFetchInternalRep((objPtr), &tclListType); \ - (listRepPtr) = irPtr ? (List *)irPtr->twoPtrValue.ptr1 : NULL; \ +#define ListObjOverwriteRep(objPtr_, repPtr_) \ + do { \ + ListRepIncrRefs(repPtr_); \ + ListObjStompRep(objPtr_, repPtr_); \ } while (0) -#define ListResetInternalRep(objPtr, listRepPtr) \ - TclFetchInternalRep((objPtr), &tclListType)->twoPtrValue.ptr1 = (listRepPtr) +#define ListObjReplaceRepAndInvalidate(objPtr_, repPtr_) \ + do { \ + /* Note order important, don't use ListObjOverwriteRep! */ \ + ListRepIncrRefs(repPtr_); \ + TclFreeInternalRep(objPtr_); \ + TclInvalidateStringRep(objPtr_); \ + ListObjStompRep(objPtr_, repPtr_); \ + } while (0) + +/* + *------------------------------------------------------------------------ + * + * ListSpanNew -- + * + * Allocates and initializes memory for a new ListSpan. The reference + * count on the returned struct is 0. + * + * Results: + * Non-NULL pointer to the allocated ListSpan. + * + * Side effects: + * The function will panic on memory allocation failure. + * + *------------------------------------------------------------------------ + */ +static inline ListSpan * +ListSpanNew( + ListSizeT firstSlot, /* Starting slot index of the span */ + ListSizeT numSlots) /* Number of slots covered by the span */ +{ + ListSpan *spanPtr = (ListSpan *) ckalloc(sizeof(*spanPtr)); + spanPtr->refCount = 0; + spanPtr->spanStart = firstSlot; + spanPtr->spanLength = numSlots; + return spanPtr; +} + +/* + *------------------------------------------------------------------------ + * + * ListSpanDecrRefs -- + * + * Decrements the reference count on a span, freeing the memory if + * it drops to zero or less. + * + * Results: + * None. + * + * Side effects: + * The memory may be freed. + * + *------------------------------------------------------------------------ + */ +static inline void +ListSpanDecrRefs(ListSpan *spanPtr) +{ + if (spanPtr->refCount <= 1) { + ckfree(spanPtr); + } else { + spanPtr->refCount -= 1; + } +} + +/* + *------------------------------------------------------------------------ + * + * ListSpanMerited -- + * + * Creation of a new list may sometimes be done as a span on existing + * storage instead of allocating new. The tradeoff is that if the + * original list is released, the new span-based list may hold on to + * more memory than desired. This function implements heuristics for + * deciding which option is better. + * + * Results: + * Returns non-0 if a span-based list is likely to be more optimal + * and 0 if not. + * + * Side effects: + * None. + * + *------------------------------------------------------------------------ + */ +static inline int +ListSpanMerited( + ListSizeT length, /* Length of the proposed span */ + ListSizeT usedStorageLength, /* Number of slots currently in used */ + ListSizeT allocatedStorageLength) /* Length of the currently allocation */ +{ + /* + TODO + - heuristics thresholds need to be determined + - currently, information about the sharing (ref count) of existing + storage is not passed. Perhaps it should be. For example if the + existing storage has a "large" ref count, then it might make sense + to do even a small span. + */ -#ifndef TCL_MIN_ELEMENT_GROWTH -#define TCL_MIN_ELEMENT_GROWTH TCL_MIN_GROWTH/sizeof(Tcl_Obj *) -#endif + if (length < LIST_SPAN_THRESHOLD) { + return 0;/* No span for small lists */ + } + if (length < (allocatedStorageLength / 2 - allocatedStorageLength / 8)) { + return 0; /* No span if less than 3/8 of allocation */ + } + if (length < usedStorageLength / 2) { + return 0; /* No span if less than half current storage */ + } + + return 1; +} /* - *---------------------------------------------------------------------- + *------------------------------------------------------------------------ * - * NewListInternalRep -- + * ListStoreUpSize -- * - * Creates a 'List' structure with space for 'objc' elements. 'objc' must - * be > 0. If 'objv' is not NULL, The list is initialized with first - * 'objc' values in that array. Otherwise the list is initialized to have - * 0 elements, with space to add 'objc' more. Flag value 'p' indicates - * how to behave on failure. + * For reasons of efficiency, extra space is allocated for a ListStore + * compared to what was requested. This function calculates how many + * slots should actually be allocated for a given request size. * - * Value + * Results: + * Number of slots to allocate. * - * A new 'List' structure with refCount 0. If some failure - * prevents this NULL is returned if 'p' is 0 , and 'Tcl_Panic' - * is called if it is not. + * Side effects: + * None. * - * Effect + *------------------------------------------------------------------------ + */ +static inline ListSizeT +ListStoreUpSize(ListSizeT numSlotsRequested) { + /* TODO -how much extra? May be double only for smaller requests? */ + return numSlotsRequested < (LIST_MAX / 2) ? 2 * numSlotsRequested + : LIST_MAX; +} + +/* + *------------------------------------------------------------------------ * - * The refCount of each value in 'objv' is incremented as it is added - * to the list. + * ListRepFreeUnreferenced -- * - *---------------------------------------------------------------------- + * Inline wrapper for ListRepUnsharedFreeUnreferenced that does quick checks + * before calling it. + * + * IMPORTANT: this function must not be called on an internal + * representation of a Tcl_Obj that is itself shared. + * + * Results: + * None. + * + * Side effects: + * See comments for ListRepUnsharedFreeUnreferenced. + * + *------------------------------------------------------------------------ + */ +static inline void +ListRepFreeUnreferenced(const ListRep *repPtr) +{ + if (! ListRepIsShared(repPtr) && repPtr->spanPtr) { + /* T:listrep-1.5.1 */ + ListRepUnsharedFreeUnreferenced(repPtr); + } +} + +/* + *------------------------------------------------------------------------ + * + * ObjArrayIncrRefs -- + * + * Increments the reference counts for Tcl_Obj's in a subarray. + * + * Results: + * None. + * + * Side effects: + * As above. + * + *------------------------------------------------------------------------ + */ +static inline void +ObjArrayIncrRefs( + Tcl_Obj * const *objv, /* Pointer to the array */ + ListSizeT startIdx, /* Starting index of subarray within objv */ + ListSizeT count) /* Number of elements in the subarray */ +{ + Tcl_Obj * const *end; + LIST_INDEX_ASSERT(startIdx); + LIST_COUNT_ASSERT(count); + objv += startIdx; + end = objv + count; + while (objv < end) { + Tcl_IncrRefCount(*objv); + ++objv; + } +} + +/* + *------------------------------------------------------------------------ + * + * ObjArrayDecrRefs -- + * + * Decrements the reference counts for Tcl_Obj's in a subarray. + * + * Results: + * None. + * + * Side effects: + * As above. + * + *------------------------------------------------------------------------ + */ +static inline void +ObjArrayDecrRefs( + Tcl_Obj * const *objv, /* Pointer to the array */ + ListSizeT startIdx, /* Starting index of subarray within objv */ + ListSizeT count) /* Number of elements in the subarray */ +{ + Tcl_Obj * const *end; + LIST_INDEX_ASSERT(startIdx); + LIST_COUNT_ASSERT(count); + objv += startIdx; + end = objv + count; + while (objv < end) { + Tcl_DecrRefCount(*objv); + ++objv; + } +} + +/* + *------------------------------------------------------------------------ + * + * ObjArrayCopy -- + * + * Copies an array of Tcl_Obj* pointers. + * + * Results: + * None. + * + * Side effects: + * Reference counts on copied Tcl_Obj's are incremented. + * + *------------------------------------------------------------------------ + */ +static inline void +ObjArrayCopy( + Tcl_Obj **to, /* Destination */ + ListSizeT count, /* Number of pointers to copy */ + Tcl_Obj *const from[]) /* Source array of Tcl_Obj* */ +{ + Tcl_Obj **end; + LIST_COUNT_ASSERT(count); + end = to + count; + /* TODO - would memmove followed by separate IncrRef loop be faster? */ + while (to < end) { + Tcl_IncrRefCount(*from); + *to++ = *from++; + } +} + +/* + *------------------------------------------------------------------------ + * + * MemoryAllocationError -- + * + * Generates a memory allocation failure error. + * + * Results: + * Always TCL_ERROR. + * + * Side effects: + * Error message and code are stored in the interpreter if not NULL. + * + *------------------------------------------------------------------------ */ +static int +MemoryAllocationError( + Tcl_Interp *interp, /* Interpreter for error message. May be NULL */ + size_t size) /* Size of attempted allocation that failed */ +{ + if (interp != NULL) { + Tcl_SetObjResult( + interp, + Tcl_ObjPrintf( + "list construction failed: unable to alloc %" TCL_LL_MODIFIER + "u bytes", + (Tcl_WideInt)size)); + Tcl_SetErrorCode(interp, "TCL", "MEMORY", NULL); + } + return TCL_ERROR; +} + +/* + *------------------------------------------------------------------------ + * + * ListLimitExceeded -- + * + * Generates an error for exceeding maximum list size. + * + * Results: + * Always TCL_ERROR. + * + * Side effects: + * Error message and code are stored in the interpreter if not NULL. + * + *------------------------------------------------------------------------ + */ +static int +ListLimitExceededError(Tcl_Interp *interp) +{ + if (interp != NULL) { + Tcl_SetObjResult( + interp, + Tcl_NewStringObj("max length of a Tcl list exceeded", -1)); + Tcl_SetErrorCode(interp, "TCL", "MEMORY", NULL); + } + return TCL_ERROR; +} + +/* + *------------------------------------------------------------------------ + * + * ListRepUnsharedShiftDown -- + * + * Shifts the "in-use" contents in the ListStore for a ListRep down + * by the given number of slots. The ListStore must be unshared and + * the free space at the front of the storage area must be big enough. + * It is the caller's responsibility to check. + * + * Results: + * None. + * + * Side effects: + * The contents of the ListRep's ListStore area are shifted down in the + * storage area. The ListRep's ListSpan is updated accordingly. + * + *------------------------------------------------------------------------ + */ +static inline void +ListRepUnsharedShiftDown(ListRep *repPtr, ListSizeT shiftCount) +{ + ListStore *storePtr; -static List * -NewListInternalRep( - int objc, - Tcl_Obj *const objv[], - int p) + LISTREP_CHECK(repPtr); + LIST_ASSERT(!ListRepIsShared(repPtr)); + + storePtr = repPtr->storePtr; + + LIST_COUNT_ASSERT(shiftCount); + LIST_ASSERT(storePtr->firstUsed >= shiftCount); + + memmove(&storePtr->slots[storePtr->firstUsed - shiftCount], + &storePtr->slots[storePtr->firstUsed], + storePtr->numUsed * sizeof(Tcl_Obj *)); + storePtr->firstUsed -= shiftCount; + if (repPtr->spanPtr) { + repPtr->spanPtr->spanStart -= shiftCount; + LIST_ASSERT(repPtr->spanPtr->spanLength == storePtr->numUsed); + } else { + /* + * If there was no span, firstUsed must have been 0 (Invariant) + * AND shiftCount must have been 0 (<= firstUsed on call) + * In other words, this would have been a no-op + */ + + LIST_ASSERT(storePtr->firstUsed == 0); + LIST_ASSERT(shiftCount == 0); + } + + LISTREP_CHECK(repPtr); +} + +/* + *------------------------------------------------------------------------ + * + * ListRepUnsharedShiftUp -- + * + * Shifts the "in-use" contents in the ListStore for a ListRep up + * by the given number of slots. The ListStore must be unshared and + * the free space at the back of the storage area must be big enough. + * It is the caller's responsibility to check. + * TODO - this function is not currently used. + * + * Results: + * None. + * + * Side effects: + * The contents of the ListRep's ListStore area are shifted up in the + * storage area. The ListRep's ListSpan is updated accordingly. + * + *------------------------------------------------------------------------ + */ +#if 0 +static inline void +ListRepUnsharedShiftUp(ListRep *repPtr, ListSizeT shiftCount) { - List *listRepPtr; + ListStore *storePtr; + + LISTREP_CHECK(repPtr); + LIST_ASSERT(!ListRepIsShared(repPtr)); + LIST_COUNT_ASSERT(shiftCount); + + storePtr = repPtr->storePtr; + LIST_ASSERT((storePtr->firstUsed + storePtr->numUsed + shiftCount) + <= storePtr->numAllocated); + + memmove(&storePtr->slots[storePtr->firstUsed + shiftCount], + &storePtr->slots[storePtr->firstUsed], + storePtr->numUsed * sizeof(Tcl_Obj *)); + storePtr->firstUsed += shiftCount; + if (repPtr->spanPtr) { + repPtr->spanPtr->spanStart += shiftCount; + } else { + /* No span means entire original list is span */ + /* Should have been zero before shift - Invariant TBD */ + LIST_ASSERT(storePtr->firstUsed == shiftCount); + repPtr->spanPtr = ListSpanNew(shiftCount, storePtr->numUsed); + } - if (objc <= 0) { - Tcl_Panic("NewListInternalRep: expects postive element count"); + LISTREP_CHECK(repPtr); +} +#endif + +/* + *------------------------------------------------------------------------ + * + * ListRepValidate -- + * + * Checks all invariants for a ListRep and panics on failure. + * Note this is independent of NDEBUG, assert etc. + * + * Results: + * None. + * + * Side effects: + * Panics if any invariant is not met. + * + *------------------------------------------------------------------------ + */ +static void +ListRepValidate(const ListRep *repPtr, const char *file, int lineNum) +{ + ListStore *storePtr = repPtr->storePtr; + const char *condition; + + (void)storePtr; /* To stop gcc from whining about unused vars */ + +#define INVARIANT(cond_) \ + do { \ + if (!(cond_)) { \ + condition = #cond_; \ + goto failure; \ + } \ + } while (0) + + /* Separate each condition so line number gives exact reason for failure */ + INVARIANT(storePtr != NULL); + INVARIANT(storePtr->numAllocated >= 0); + INVARIANT(storePtr->numAllocated <= LIST_MAX); + INVARIANT(storePtr->firstUsed >= 0); + INVARIANT(storePtr->firstUsed < storePtr->numAllocated); + INVARIANT(storePtr->numUsed >= 0); + INVARIANT(storePtr->numUsed <= storePtr->numAllocated); + INVARIANT(storePtr->firstUsed <= (storePtr->numAllocated - storePtr->numUsed)); + + if (! ListRepIsShared(repPtr)) { + /* + * If this is the only reference and there is no span, then store + * occupancy must begin at 0 + */ + INVARIANT(repPtr->spanPtr || repPtr->storePtr->firstUsed == 0); } + INVARIANT(ListRepStart(repPtr) >= storePtr->firstUsed); + INVARIANT(ListRepLength(repPtr) <= storePtr->numUsed); + INVARIANT(ListRepStart(repPtr) <= (storePtr->firstUsed + storePtr->numUsed - ListRepLength(repPtr))); + +#undef INVARIANT + + return; + +failure: + Tcl_Panic("List internal failure in %s line %d. Condition: %s", + file, + lineNum, + condition); +} + +/* + *------------------------------------------------------------------------ + * + * TclListObjValidate -- + * + * Wrapper around ListRepValidate. Primarily used from test suite. + * + * Results: + * None. + * + * Side effects: + * Will panic if internal structure is not consistent or if object + * cannot be converted to a list object. + * + *------------------------------------------------------------------------ + */ +void +TclListObjValidate(Tcl_Interp *interp, Tcl_Obj *listObj) +{ + ListRep listRep; + if (TclListObjGetRep(interp, listObj, &listRep) != TCL_OK) { + Tcl_Panic("Object passed to TclListObjValidate cannot be converted to " + "a list object."); + } + ListRepValidate(&listRep, __FILE__, __LINE__); +} + +/* + *---------------------------------------------------------------------- + * + * ListStoreNew -- + * + * Allocates a new ListStore with space for at least objc elements. objc + * must be > 0. If objv!=NULL, initializes with the first objc values + * in that array. If objv==NULL, initalize 0 elements, with space + * to add objc more. + * + * Normally the function allocates the exact space requested unless + * the flags arguments has any LISTREP_SPACE_* + * bits set. See the comments for those #defines. + * + * Results: + * On success, a pointer to the allocated ListStore is returned. + * On allocation failure, panics if LISTREP_PANIC_ON_FAIL is set in + * flags; otherwise returns NULL. + * + * Side effects: + * The ref counts of the elements in objv are incremented on success + * since the returned ListStore references them. + * + *---------------------------------------------------------------------- + */ +static ListStore * +ListStoreNew( + ListSizeT objc, + Tcl_Obj *const objv[], + int flags) +{ + ListStore *storePtr; + ListSizeT capacity; + /* * First check to see if we'd overflow and try to allocate an object - * larger than our memory allocator allows. Note that this is actually a - * fairly small value when you're on a serious 64-bit machine, but that - * requires API changes to fix. See [Bug 219196] for a discussion. + * larger than our memory allocator allows. */ - - if ((size_t)objc > LIST_MAX) { - if (p) { - Tcl_Panic("max length of a Tcl list (%d elements) exceeded", - LIST_MAX); + if (objc > LIST_MAX) { + if (flags & LISTREP_PANIC_ON_FAIL) { + Tcl_Panic("max length of a Tcl list exceeded"); } return NULL; } - listRepPtr = (List *)attemptckalloc(LIST_SIZE(objc)); - if (listRepPtr == NULL) { - if (p) { + if (flags & LISTREP_SPACE_FLAGS) { + capacity = ListStoreUpSize(objc); + } else { + capacity = objc; + } + + storePtr = (ListStore *)attemptckalloc(LIST_SIZE(capacity)); + if (storePtr == NULL && capacity != objc) { + capacity = objc; /* Try allocating exact size */ + storePtr = (ListStore *)attemptckalloc(LIST_SIZE(capacity)); + } + if (storePtr == NULL) { + if (flags & LISTREP_PANIC_ON_FAIL) { Tcl_Panic("list creation failed: unable to alloc %u bytes", LIST_SIZE(objc)); } return NULL; } - listRepPtr->canonicalFlag = 0; - listRepPtr->refCount = 0; - listRepPtr->maxElemCount = objc; + storePtr->refCount = 0; + storePtr->flags = 0; + storePtr->numAllocated = capacity; + if (capacity == objc) { + storePtr->firstUsed = 0; + } else { + ListSizeT extra = capacity - objc; + int spaceFlags = flags & LISTREP_SPACE_FLAGS; + if (spaceFlags == LISTREP_SPACE_ONLY_BACK) { + storePtr->firstUsed = 0; + } else if (spaceFlags == LISTREP_SPACE_FAVOR_FRONT) { + /* Leave more space in the front */ + storePtr->firstUsed = + extra - (extra / 4); /* NOT same as 3*extra/4 */ + } else if (spaceFlags == LISTREP_SPACE_FAVOR_BACK) { + /* Leave more space in the back */ + storePtr->firstUsed = extra / 4; + } else { + /* Apportion equally */ + storePtr->firstUsed = extra / 2; + } + } if (objv) { - Tcl_Obj **elemPtrs; - int i; - - listRepPtr->elemCount = objc; - elemPtrs = listRepPtr->elements; - for (i = 0; i < objc; i++) { - elemPtrs[i] = objv[i]; - Tcl_IncrRefCount(elemPtrs[i]); - } + storePtr->numUsed = objc; + ObjArrayCopy(&storePtr->slots[storePtr->firstUsed], objc, objv); } else { - listRepPtr->elemCount = 0; + storePtr->numUsed = 0; } - return listRepPtr; + + return storePtr; +} + +/* + *------------------------------------------------------------------------ + * + * ListStoreReallocate -- + * + * Reallocates the memory for a ListStore. + * + * Results: + * Pointer to the ListStore which may be the same as storePtr or pointer + * to a new block of memory. On reallocation failure, NULL is returned. + * + * + * Side effects: + * The memory pointed to by storePtr is freed if it a new block has to + * be returned. + * + * + *------------------------------------------------------------------------ + */ +ListStore * +ListStoreReallocate (ListStore *storePtr, ListSizeT numSlots) +{ + ListSizeT newCapacity; + ListStore *newStorePtr; + + newCapacity = ListStoreUpSize(numSlots); + newStorePtr = + (ListStore *)attemptckrealloc(storePtr, LIST_SIZE(newCapacity)); + if (newStorePtr == NULL) { + newCapacity = numSlots; + newStorePtr = (ListStore *)attemptckrealloc(storePtr, + LIST_SIZE(newCapacity)); + if (newStorePtr == NULL) + return NULL; + } + /* Only the capacity has changed, fix it in the header */ + newStorePtr->numAllocated = newCapacity; + return newStorePtr; } /* *---------------------------------------------------------------------- * - * AttemptNewList -- + * ListRepInit -- + * + * Initializes a ListRep to hold a list internal representation + * with space for objc elements. + * + * objc must be > 0. If objv!=NULL, initializes with the first objc + * values in that array. If objv==NULL, initalize list internal rep to + * have 0 elements, with space to add objc more. + * + * Normally the function allocates the exact space requested unless + * the flags arguments has one of the LISTREP_SPACE_* bits set. + * See the comments for those #defines. * - * Like NewListInternalRep, but additionally sets an error message on failure. + * The reference counts of the ListStore and ListSpan (if present) + * pointed to by the initialized repPtr are set to zero. + * Caller has to manage them as necessary. + * + * Results: + * On success, TCL_OK is returned with *listRepPtr initialized. + * On failure, panics if LISTREP_PANIC_ON_FAIL is set in flags; otherwise + * returns TCL_ERROR with *listRepPtr fields set to NULL. + * + * Side effects: + * The ref counts of the elements in objv are incremented since the + * resulting list now refers to them. * *---------------------------------------------------------------------- */ +static int +ListRepInit( + ListSizeT objc, + Tcl_Obj *const objv[], + int flags, + ListRep *repPtr + ) +{ + ListStore *storePtr; -static List * -AttemptNewList( + storePtr = ListStoreNew(objc, objv, flags); + if (storePtr) { + repPtr->storePtr = storePtr; + if (storePtr->firstUsed == 0) { + repPtr->spanPtr = NULL; + } else { + repPtr->spanPtr = + ListSpanNew(storePtr->firstUsed, storePtr->numUsed); + } + return TCL_OK; + } + /* + * Initialize to keep gcc happy at the call site. Else it complains + * about possibly uninitialized use. + */ + repPtr->storePtr = NULL; + repPtr->spanPtr = NULL; + return TCL_ERROR; +} + +/* + *---------------------------------------------------------------------- + * + * ListRepInitAttempt -- + * + * Creates a list internal rep with space for objc elements. See + * ListRepInit for requirements for parameters (in particular objc must + * be > 0). This function only adds error messages to the interpreter if + * not NULL. + * + * The reference counts of the ListStore and ListSpan (if present) + * pointed to by the initialized repPtr are set to zero. + * Caller has to manage them as necessary. + * + * Results: + * On success, TCL_OK is returned with *listRepPtr initialized. + * On allocation failure, returnes TCL_ERROR with an error message + * in the interpreter if non-NULL. + * + * Side effects: + * The ref counts of the elements in objv are incremented since the + * resulting list now refers to them. + * + *---------------------------------------------------------------------- + */ +static int +ListRepInitAttempt( Tcl_Interp *interp, - int objc, - Tcl_Obj *const objv[]) + ListSizeT objc, + Tcl_Obj *const objv[], + ListRep *repPtr) { - List *listRepPtr = NewListInternalRep(objc, objv, 0); + int result = ListRepInit(objc, objv, 0, repPtr); - if (interp != NULL && listRepPtr == NULL) { + if (result != TCL_OK && interp != NULL) { if (objc > LIST_MAX) { - Tcl_SetObjResult(interp, Tcl_ObjPrintf( - "max length of a Tcl list (%d elements) exceeded", - LIST_MAX)); + ListLimitExceededError(interp); } else { - Tcl_SetObjResult(interp, Tcl_ObjPrintf( - "list creation failed: unable to alloc %u bytes", - LIST_SIZE(objc))); + MemoryAllocationError(interp, LIST_SIZE(objc)); } - Tcl_SetErrorCode(interp, "TCL", "MEMORY", NULL); } - return listRepPtr; + return result; +} + +/* + *------------------------------------------------------------------------ + * + * ListRepClone -- + * + * Does a deep clone of an existing ListRep. + * + * Normally the function allocates the exact space needed unless + * the flags arguments has one of the LISTREP_SPACE_* bits set. + * See the comments for those #defines. + * + * Results: + * None. + * + * Side effects: + * The toRepPtr location is initialized with the ListStore and ListSpan + * (if needed) containing a copy of the list elements in fromRepPtr. + * The function will panic if memory cannot be allocated. + * + *------------------------------------------------------------------------ + */ +static void +ListRepClone(ListRep *fromRepPtr, ListRep *toRepPtr, int flags) +{ + Tcl_Obj **fromObjs; + ListSizeT numFrom; + + ListRepElements(fromRepPtr, numFrom, fromObjs); + ListRepInit(numFrom, fromObjs, flags | LISTREP_PANIC_ON_FAIL, toRepPtr); +} + +/* + *------------------------------------------------------------------------ + * + * ListRepUnsharedFreeUnreferenced -- + * + * Frees any Tcl_Obj's from the "in-use" area of the ListStore for a + * ListRep that are not actually references from any lists. + * + * IMPORTANT: this function must not be called on a shared internal + * representation or the internal representation of a shared Tcl_Obj. + * + * Results: + * None. + * + * Side effects: + * The firstUsed and numUsed fields of the ListStore are updated to + * reflect the new "in-use" extent. + * + *------------------------------------------------------------------------ + */ +static void ListRepUnsharedFreeUnreferenced(const ListRep *repPtr) +{ + ListSizeT count; + ListStore *storePtr; + ListSpan *spanPtr; + + LIST_ASSERT(!ListRepIsShared(repPtr)); + LISTREP_CHECK(repPtr); + + storePtr = repPtr->storePtr; + spanPtr = repPtr->spanPtr; + if (spanPtr == NULL) { + LIST_ASSERT(storePtr->firstUsed == 0); /* Invariant TBD */ + return; + } + + /* Collect garbage at front */ + count = spanPtr->spanStart - storePtr->firstUsed; + LIST_COUNT_ASSERT(count); + if (count > 0) { + /* T:listrep-1.5.1,6.{1:8} */ + ObjArrayDecrRefs(storePtr->slots, storePtr->firstUsed, count); + storePtr->firstUsed = spanPtr->spanStart; + LIST_ASSERT(storePtr->numUsed >= count); + storePtr->numUsed -= count; + } + + /* Collect garbage at back */ + count = (storePtr->firstUsed + storePtr->numUsed) + - (spanPtr->spanStart + spanPtr->spanLength); + LIST_COUNT_ASSERT(count); + if (count > 0) { + /* T:listrep-6.{1:8} */ + ObjArrayDecrRefs( + storePtr->slots, spanPtr->spanStart + spanPtr->spanLength, count); + LIST_ASSERT(storePtr->numUsed >= count); + storePtr->numUsed -= count; + } + + LIST_ASSERT(ListRepStart(repPtr) == storePtr->firstUsed); + LIST_ASSERT(ListRepLength(repPtr) == storePtr->numUsed); + LISTREP_CHECK(repPtr); } /* @@ -191,20 +1064,23 @@ AttemptNewList( * * Tcl_NewListObj -- * - * Creates a new list object and adds values to it. When TCL_MEM_DEBUG is - * defined, 'Tcl_DbNewListObj' is called instead. - * - * Value + * This function is normally called when not debugging: i.e., when + * TCL_MEM_DEBUG is not defined. It creates a new list object from an + * (objc,objv) array: that is, each of the objc elements of the array + * referenced by objv is inserted as an element into a new Tcl object. * - * A new list 'Tcl_Obj' to which is appended values from 'objv', or if - * 'objc' is less than or equal to zero, a list 'Tcl_Obj' having no - * elements. The string representation of the new 'Tcl_Obj' is set to - * NULL. The refCount of the list is 0. + * When TCL_MEM_DEBUG is defined, this function just returns the result + * of calling the debugging version Tcl_DbNewListObj. * - * Effect + * Results: + * A new list object is returned that is initialized from the object + * pointers in objv. If objc is less than or equal to zero, an empty + * object is returned. The new object's string representation is left + * NULL. The resulting new list object has ref count 0. * - * The refCount of each elements in 'objv' is incremented as it is added - * to the list. + * Side effects: + * The ref counts of the elements in objv are incremented since the + * resulting list now refers to them. * *---------------------------------------------------------------------- */ @@ -214,7 +1090,7 @@ AttemptNewList( Tcl_Obj * Tcl_NewListObj( - int objc, /* Count of objects referenced by objv. */ + ListSizeT objc, /* Count of objects referenced by objv. */ Tcl_Obj *const objv[]) /* An array of pointers to Tcl objects. */ { return Tcl_DbNewListObj(objc, objv, "unknown", 0); @@ -224,45 +1100,50 @@ Tcl_NewListObj( Tcl_Obj * Tcl_NewListObj( - int objc, /* Count of objects referenced by objv. */ + ListSizeT objc, /* Count of objects referenced by objv. */ Tcl_Obj *const objv[]) /* An array of pointers to Tcl objects. */ { - List *listRepPtr; - Tcl_Obj *listPtr; + ListRep listRep; + Tcl_Obj *listObj; - TclNewObj(listPtr); + TclNewObj(listObj); if (objc <= 0) { - return listPtr; + return listObj; } - /* - * Create the internal rep. - */ - - listRepPtr = NewListInternalRep(objc, objv, 1); + ListRepInit(objc, objv, LISTREP_PANIC_ON_FAIL, &listRep); + ListObjReplaceRepAndInvalidate(listObj, &listRep); - /* - * Now create the object. - */ - - TclInvalidateStringRep(listPtr); - ListSetInternalRep(listPtr, listRepPtr); - return listPtr; + return listObj; } #endif /* if TCL_MEM_DEBUG */ /* *---------------------------------------------------------------------- * - * Tcl_DbNewListObj -- + * Tcl_DbNewListObj -- + * + * This function is normally called when debugging: i.e., when + * TCL_MEM_DEBUG is defined. It creates new list objects. It is the same + * as the Tcl_NewListObj function above except that it calls + * Tcl_DbCkalloc directly with the file name and line number from its + * caller. This simplifies debugging since then the [memory active] + * command will report the correct file name and line number when + * reporting objects that haven't been freed. * - * Like 'Tcl_NewListObj', but it calls Tcl_DbCkalloc directly with the - * file name and line number from its caller. This simplifies debugging - * since the [memory active] command will report the correct file - * name and line number when reporting objects that haven't been freed. + * When TCL_MEM_DEBUG is not defined, this function just returns the + * result of calling Tcl_NewListObj. * - * When TCL_MEM_DEBUG is not defined, 'Tcl_NewListObj' is called instead. + * Results: + * A new list object is returned that is initialized from the object + * pointers in objv. If objc is less than or equal to zero, an empty + * object is returned. The new object's string representation is left + * NULL. The new list object has ref count 0. + * + * Side effects: + * The ref counts of the elements in objv are incremented since the + * resulting list now refers to them. * *---------------------------------------------------------------------- */ @@ -271,43 +1152,33 @@ Tcl_NewListObj( Tcl_Obj * Tcl_DbNewListObj( - int objc, /* Count of objects referenced by objv. */ + ListSizeT objc, /* Count of objects referenced by objv. */ Tcl_Obj *const objv[], /* An array of pointers to Tcl objects. */ const char *file, /* The name of the source file calling this * function; used for debugging. */ int line) /* Line number in the source file; used for * debugging. */ { - Tcl_Obj *listPtr; - List *listRepPtr; + Tcl_Obj *listObj; + ListRep listRep; - TclDbNewObj(listPtr, file, line); + TclDbNewObj(listObj, file, line); if (objc <= 0) { - return listPtr; + return listObj; } - /* - * Create the internal rep. - */ - - listRepPtr = NewListInternalRep(objc, objv, 1); - - /* - * Now create the object. - */ - - TclInvalidateStringRep(listPtr); - ListSetInternalRep(listPtr, listRepPtr); + ListRepInit(objc, objv, LISTREP_PANIC_ON_FAIL, &listRep); + ListObjReplaceRepAndInvalidate(listObj, &listRep); - return listPtr; + return listObj; } #else /* if not TCL_MEM_DEBUG */ Tcl_Obj * Tcl_DbNewListObj( - int objc, /* Count of objects referenced by objv. */ + ListSizeT objc, /* Count of objects referenced by objv. */ Tcl_Obj *const objv[], /* An array of pointers to Tcl objects. */ TCL_UNUSED(const char *) /*file*/, TCL_UNUSED(int) /*line*/) @@ -317,45 +1188,152 @@ Tcl_DbNewListObj( #endif /* TCL_MEM_DEBUG */ /* + *------------------------------------------------------------------------ + * + * TclNewListObj2 -- + * + * Create a new Tcl_Obj list comprising of the concatenation of two + * Tcl_Obj* arrays. + * TODO - currently this function is not used within tclListObj but + * need to see if it would be useful in other files that preallocate + * lists and then append. + * + * Results: + * Non-NULL pointer to the allocate Tcl_Obj. + * + * Side effects: + * None. + * + *------------------------------------------------------------------------ + */ +Tcl_Obj * +TclNewListObj2( + ListSizeT objc1, /* Count of objects referenced by objv1. */ + Tcl_Obj *const objv1[], /* First array of pointers to Tcl objects. */ + ListSizeT objc2, /* Count of objects referenced by objv2. */ + Tcl_Obj *const objv2[] /* Second array of pointers to Tcl objects. */ +) +{ + Tcl_Obj *listObj; + ListStore *storePtr; + ListSizeT objc = objc1 + objc2; + + listObj = Tcl_NewListObj(objc, NULL); + if (objc == 0) { + return listObj; /* An empty object */ + } + LIST_ASSERT_TYPE(listObj); + + storePtr = ListObjStorePtr(listObj); + + LIST_ASSERT(ListObjSpanPtr(listObj) == NULL); + LIST_ASSERT(storePtr->firstUsed == 0); + LIST_ASSERT(storePtr->numUsed == 0); + LIST_ASSERT(storePtr->numAllocated >= objc); + + if (objc1) { + ObjArrayCopy(storePtr->slots, objc1, objv1); + } + if (objc2) { + ObjArrayCopy(&storePtr->slots[objc1], objc2, objv2); + } + storePtr->numUsed = objc; + return listObj; +} + +/* *---------------------------------------------------------------------- * - * Tcl_SetListObj -- + * TclListObjGetRep -- * - * Like 'Tcl_NewListObj', but operates on an existing 'Tcl_Obj'instead of - * creating a new one. + * This function returns a copy of the ListRep stored + * as the internal representation of an object. The reference + * counts of the (ListStore, ListSpan) contained in the representation + * are NOT incremented. * + * Results: + * The return value is normally TCL_OK; in this case *listRepP + * is set to a copy of the descriptor stored as the internal + * representation of the Tcl_Obj containing a list. if listPtr does not + * refer to a list object and the object can not be converted to one, + * TCL_ERROR is returned and an error message will be left in the + * interpreter's result if interp is not NULL. + * + * Side effects: + * The possible conversion of the object referenced by listPtr + * to a list object. *repPtr is initialized to the internal rep + * if result is TCL_OK, or set to NULL on error. *---------------------------------------------------------------------- */ +static int +TclListObjGetRep( + Tcl_Interp *interp, /* Used to report errors if not NULL. */ + Tcl_Obj *listObj, /* List object for which an element array is + * to be returned. */ + ListRep *repPtr) /* Location to store descriptor */ +{ + if (!TclHasInternalRep(listObj, &tclListType)) { + int result; + result = SetListFromAny(interp, listObj); + if (result != TCL_OK) { + /* Init to keep gcc happy wrt uninitialized fields at call site */ + repPtr->storePtr = NULL; + repPtr->spanPtr = NULL; + return result; + } + } + ListObjGetRep(listObj, repPtr); + LISTREP_CHECK(repPtr); + return TCL_OK; +} + +/* + *---------------------------------------------------------------------- + * + * Tcl_SetListObj -- + * + * Modify an object to be a list containing each of the objc elements of + * the object array referenced by objv. + * + * Results: + * None. + * + * Side effects: + * The object is made a list object and is initialized from the object + * pointers in objv. If objc is less than or equal to zero, an empty + * object is returned. The new object's string representation is left + * NULL. The ref counts of the elements in objv are incremented since the + * list now refers to them. The object's old string and internal + * representations are freed and its type is set NULL. + * + *---------------------------------------------------------------------- + */ void Tcl_SetListObj( Tcl_Obj *objPtr, /* Object whose internal rep to init. */ - int objc, /* Count of objects referenced by objv. */ + ListSizeT objc, /* Count of objects referenced by objv. */ Tcl_Obj *const objv[]) /* An array of pointers to Tcl objects. */ { - List *listRepPtr; - if (Tcl_IsShared(objPtr)) { Tcl_Panic("%s called with shared object", "Tcl_SetListObj"); } /* - * Free any old string rep and any internal rep for the old type. - */ - - TclFreeInternalRep(objPtr); - TclInvalidateStringRep(objPtr); - - /* * Set the object's type to "list" and initialize the internal rep. * However, if there are no elements to put in the list, just give the - * object an empty string rep and a NULL type. + * object an empty string rep and a NULL type. NOTE ListRepInit must + * not be called with objc == 0! */ if (objc > 0) { - listRepPtr = NewListInternalRep(objc, objv, 1); - ListSetInternalRep(objPtr, listRepPtr); + ListRep listRep; + /* TODO - perhaps ask for extra space? */ + ListRepInit(objc, objv, LISTREP_PANIC_ON_FAIL, &listRep); + ListObjReplaceRepAndInvalidate(objPtr, &listRep); } else { + TclFreeInternalRep(objPtr); + TclInvalidateStringRep(objPtr); Tcl_InitStringRep(objPtr, NULL, 0); } } @@ -365,20 +1343,18 @@ Tcl_SetListObj( * * TclListObjCopy -- * - * Creates a new 'Tcl_Obj' which is a pure copy of a list value. This - * provides for the C level a counterpart of the [lrange $list 0 end] - * command, while using internals details to be as efficient as possible. - * - * Value + * Makes a "pure list" copy of a list value. This provides for the C + * level a counterpart of the [lrange $list 0 end] command, while using + * internals details to be as efficient as possible. * - * The address of the new 'Tcl_Obj' which shares its internal - * representation with 'listPtr', and whose refCount is 0. If 'listPtr' - * is not actually a list, the value is NULL, and an error message is left - * in 'interp' if it is not NULL. - * - * Effect + * Results: + * Normally returns a pointer to a new Tcl_Obj, that contains the same + * list value as *listPtr does. The returned Tcl_Obj has a refCount of + * zero. If *listPtr does not hold a list, NULL is returned, and if + * interp is non-NULL, an error message is recorded there. * - * 'listPtr' is converted to a list if it isn't one already. + * Side effects: + * None. * *---------------------------------------------------------------------- */ @@ -386,137 +1362,290 @@ Tcl_SetListObj( Tcl_Obj * TclListObjCopy( Tcl_Interp *interp, /* Used to report errors if not NULL. */ - Tcl_Obj *listPtr) /* List object for which an element array is + Tcl_Obj *listObj) /* List object for which an element array is * to be returned. */ { - Tcl_Obj *copyPtr; - List *listRepPtr; + Tcl_Obj *copyObj; - ListGetInternalRep(listPtr, listRepPtr); - if (NULL == listRepPtr) { - if (SetListFromAny(interp, listPtr) != TCL_OK) { + if (!TclHasInternalRep(listObj, &tclListType)) { + if (SetListFromAny(interp, listObj) != TCL_OK) { return NULL; } } - TclNewObj(copyPtr); - TclInvalidateStringRep(copyPtr); - DupListInternalRep(listPtr, copyPtr); - return copyPtr; + TclNewObj(copyObj); + TclInvalidateStringRep(copyObj); + DupListInternalRep(listObj, copyObj); + return copyObj; } /* - *---------------------------------------------------------------------- + *------------------------------------------------------------------------ * - * TclListObjRange -- + * ListRepRange -- * - * Makes a slice of a list value. - * *listPtr must be known to be a valid list. + * Initializes a ListRep as a range within the passed ListRep. + * The range limits are clamped to the list boundaries. * * Results: - * Returns a pointer to the sliced list. - * This may be a new object or the same object if not shared. + * None. * * Side effects: - * The possible conversion of the object referenced by listPtr - * to a list object. - * - *---------------------------------------------------------------------- + * The ListStore and ListSpan referenced by in the returned ListRep + * may or may not be the same as those passed in. For example, the + * ListStore may differ because the range is small enough that a new + * ListStore is more memory-optimal. The ListSpan may differ because + * it is NULL or shared. Regardless, reference counts on the returned + * values are not incremented. Generally, ListObjReplaceRepAndInvalidate + * may be used to store the new ListRep back into an object or a + * ListRepIncrRefs followed by ListRepDecrRefs to free in case of errors. + * Any other use should be carefully reconsidered. + * TODO WARNING:- this is an awkward interface and easy for caller + * to get wrong. Mostly due to refcount combinations. Perhaps passing + * in the source listObj instead of source listRep might simplify. + * + *------------------------------------------------------------------------ */ - -Tcl_Obj * -TclListObjRange( - Tcl_Obj *listPtr, /* List object to take a range from. */ - int fromIdx, /* Index of first element to include. */ - int toIdx) /* Index of last element to include. */ +static void +ListRepRange( + ListRep *srcRepPtr, /* Contains source of the range */ + ListSizeT rangeStart, /* Index of first element to include */ + ListSizeT rangeEnd, /* Index of last element to include */ + int preserveSrcRep, /* If true, srcRepPtr contents must not be + modified (generally because a shared Tcl_Obj + references it) */ + ListRep *rangeRepPtr) /* Output. Must NOT be == srcRepPtr */ { - Tcl_Obj **elemPtrs; - int listLen, i, newLen; - List *listRepPtr; - - TclListObjGetElementsM(NULL, listPtr, &listLen, &elemPtrs); - - if (fromIdx < 0) { - fromIdx = 0; + Tcl_Obj **srcElems; + ListSizeT numSrcElems = ListRepLength(srcRepPtr); + ListSizeT rangeLen; + ListSizeT numAfterRangeEnd; + + LISTREP_CHECK(srcRepPtr); + + /* Take the opportunity to garbage collect */ + /* TODO - we probably do not need the preserveSrcRep here unlike later */ + if (!preserveSrcRep) { + /* T:listrep-1.{4,5,8,9},2.{4:7},3.{15:18},4.{7,8} */ + ListRepFreeUnreferenced(srcRepPtr); + } /* else T:listrep-2.{4.2,4.3,5.2,5.3,6.2,7.2,8.1} */ + + if (rangeStart < 0) { + rangeStart = 0; } - if (toIdx >= listLen) { - toIdx = listLen-1; + if (rangeEnd >= numSrcElems) { + rangeEnd = numSrcElems - 1; } - if (fromIdx > toIdx) { - Tcl_Obj *obj; - TclNewObj(obj); - return obj; - } - - newLen = toIdx - fromIdx + 1; - - if (Tcl_IsShared(listPtr) || - ((ListRepPtr(listPtr)->refCount > 1))) { - return Tcl_NewListObj(newLen, &elemPtrs[fromIdx]); + if (rangeStart > rangeEnd) { + /* Empty list of capacity 1. */ + ListRepInit(1, NULL, LISTREP_PANIC_ON_FAIL, rangeRepPtr); + return; } - /* - * In-place is possible. - */ + rangeLen = rangeEnd - rangeStart + 1; /* - * Even if nothing below cause any changes, we still want the - * string-canonizing effect of [lrange 0 end]. + * We can create a range one of four ways: + * (0) Range encapsulates entire list + * (1) Special case: deleting in-place from end of an unshared object + * (2) Use a ListSpan referencing the current ListStore + * (3) Creating a new ListStore + * (4) Removing all elements outside the range in the current ListStore + * Option (4) may only be done if caller has not disallowed it AND + * the ListStore is not shared. + * + * The choice depends on heuristics related to speed and memory. + * TODO - heuristics below need to be measured and tuned. + * + * Note: Even if nothing below cause any changes, we still want the + * string-canonizing effect of [lrange 0 end] so the Tcl_Obj should not + * be returned as is even if the range encompasses the whole list. */ + if (rangeStart == 0 && rangeEnd == (numSrcElems-1)) { + /* Option 0 - entire list. This may be used to canonicalize */ + /* T:listrep-1.10.1,2.8.1 */ + *rangeRepPtr = *srcRepPtr; /* Not ref counts not incremented */ + } else if (rangeStart == 0 && (!preserveSrcRep) + && (!ListRepIsShared(srcRepPtr) && srcRepPtr->spanPtr == NULL)) { + /* Option 1 - Special case unshared, exclude end elements, no span */ + LIST_ASSERT(srcRepPtr->storePtr->firstUsed == 0); /* If no span */ + ListRepElements(srcRepPtr, numSrcElems, srcElems); + numAfterRangeEnd = numSrcElems - (rangeEnd + 1); + /* Assert: Because numSrcElems > rangeEnd earlier */ + LIST_ASSERT(numAfterRangeEnd >= 0); + if (numAfterRangeEnd != 0) { + /* T:listrep-1.{8,9} */ + ObjArrayDecrRefs(srcElems, rangeEnd + 1, numAfterRangeEnd); + } + /* srcRepPtr->storePtr->firstUsed,numAllocated unchanged */ + srcRepPtr->storePtr->numUsed = rangeLen; + srcRepPtr->storePtr->flags = 0; + rangeRepPtr->storePtr = srcRepPtr->storePtr; /* Note no incr ref */ + rangeRepPtr->spanPtr = NULL; + } else if (ListSpanMerited(rangeLen, + srcRepPtr->storePtr->numUsed, + srcRepPtr->storePtr->numAllocated)) { + /* Option 2 - because span would be most efficient */ + ListSizeT spanStart = ListRepStart(srcRepPtr) + rangeStart; + if (!preserveSrcRep && srcRepPtr->spanPtr + && srcRepPtr->spanPtr->refCount <= 1) { + /* If span is not shared reuse it */ + /* T:listrep-2.7.3,3.{16,18} */ + srcRepPtr->spanPtr->spanStart = spanStart; + srcRepPtr->spanPtr->spanLength = rangeLen; + *rangeRepPtr = *srcRepPtr; + } else { + /* Span not present or is shared. */ + /* T:listrep-1.5,2.{5,7},4.{7,8} */ + rangeRepPtr->storePtr = srcRepPtr->storePtr; + rangeRepPtr->spanPtr = ListSpanNew(spanStart, rangeLen); + } + /* + * We have potentially created a new internal representation that + * references the same storage as srcRep but not yet incremented its + * reference count. So do NOT call freezombies if preserveSrcRep + * is mandated. + */ + if (!preserveSrcRep) { + /* T:listrep-1.{5.1,5.2,5.4},2.{5,7},3.{16,18},4.{7,8} */ + ListRepFreeUnreferenced(rangeRepPtr); + } + } else if (preserveSrcRep || ListRepIsShared(srcRepPtr)) { + /* Option 3 - span or modification in place not allowed/desired */ + /* T:listrep-2.{4,6} */ + ListRepElements(srcRepPtr, numSrcElems, srcElems); + /* TODO - allocate extra space? */ + ListRepInit(rangeLen, + &srcElems[rangeStart], + LISTREP_PANIC_ON_FAIL, + rangeRepPtr); + } else { + /* + * Option 4 - modify in place. Note that because of the invariant + * that spanless list stores must start at 0, we have to move + * everything to the front. + * TODO - perhaps if a span already exists, no need to move to front? + * or maybe no need to move all the way to the front? + * TODO - if range is small relative to allocation, allocate new? + */ - TclInvalidateStringRep(listPtr); - - /* - * Delete elements that should not be included. - */ + /* Asserts follow from call to ListRepFreeUnreferenced earlier */ + LIST_ASSERT(!preserveSrcRep); + LIST_ASSERT(!ListRepIsShared(srcRepPtr)); + LIST_ASSERT(ListRepStart(srcRepPtr) == srcRepPtr->storePtr->firstUsed); + LIST_ASSERT(ListRepLength(srcRepPtr) == srcRepPtr->storePtr->numUsed); - for (i = 0; i < fromIdx; i++) { - TclDecrRefCount(elemPtrs[i]); - } - for (i = toIdx + 1; i < listLen; i++) { - TclDecrRefCount(elemPtrs[i]); - } + ListRepElements(srcRepPtr, numSrcElems, srcElems); - if (fromIdx > 0) { - memmove(elemPtrs, &elemPtrs[fromIdx], - (size_t) newLen * sizeof(Tcl_Obj*)); + /* Free leading elements outside range */ + if (rangeStart != 0) { + /* T:listrep-1.4,3.15 */ + ObjArrayDecrRefs(srcElems, 0, rangeStart); + } + /* Ditto for trailing */ + numAfterRangeEnd = numSrcElems - (rangeEnd + 1); + /* Assert: Because numSrcElems > rangeEnd earlier */ + LIST_ASSERT(numAfterRangeEnd >= 0); + if (numAfterRangeEnd != 0) { + /* T:listrep-3.17 */ + ObjArrayDecrRefs(srcElems, rangeEnd + 1, numAfterRangeEnd); + } + memmove(&srcRepPtr->storePtr->slots[0], + &srcRepPtr->storePtr + ->slots[srcRepPtr->storePtr->firstUsed + rangeStart], + rangeLen * sizeof(Tcl_Obj *)); + srcRepPtr->storePtr->firstUsed = 0; + srcRepPtr->storePtr->numUsed = rangeLen; + srcRepPtr->storePtr->flags = 0; + if (srcRepPtr->spanPtr) { + /* In case the source has a span, update it for consistency */ + /* T:listrep-3.{15,17} */ + srcRepPtr->spanPtr->spanStart = srcRepPtr->storePtr->firstUsed; + srcRepPtr->spanPtr->spanLength = srcRepPtr->storePtr->numUsed; + } + rangeRepPtr->storePtr = srcRepPtr->storePtr; + rangeRepPtr->spanPtr = NULL; } - listRepPtr = ListRepPtr(listPtr); - listRepPtr->elemCount = newLen; + /* TODO - call freezombies here if !preserveSrcRep? */ - return listPtr; + /* Note ref counts intentionally not incremented */ + LISTREP_CHECK(rangeRepPtr); + return; } /* *---------------------------------------------------------------------- * - * Tcl_ListObjGetElements -- - * - * Retreive the elements in a list 'Tcl_Obj'. + * TclListObjRange -- * - * Value + * Makes a slice of a list value. + * *listObj must be known to be a valid list. * - * TCL_OK + * Results: + * Returns a pointer to the sliced list. + * This may be a new object or the same object if not shared. + * Returns NULL if passed listObj was not a list and could not be + * converted to one. * - * A count of list elements is stored, 'objcPtr', And a pointer to the - * array of elements in the list is stored in 'objvPtr'. + * Side effects: + * The possible conversion of the object referenced by listPtr + * to a list object. * - * The elements accessible via 'objvPtr' should be treated as readonly - * and the refCount for each object is _not_ incremented; the caller - * must do that if it holds on to a reference. Furthermore, the - * pointer and length returned by this function may change as soon as - * any function is called on the list object. Be careful about - * retaining the pointer in a local data structure. + *---------------------------------------------------------------------- + */ + +Tcl_Obj * +TclListObjRange( + Tcl_Obj *listObj, /* List object to take a range from. */ + ListSizeT rangeStart, /* Index of first element to include. */ + ListSizeT rangeEnd) /* Index of last element to include. */ +{ + ListRep listRep; + ListRep resultRep; + + int isShared; + if (TclListObjGetRep(NULL, listObj, &listRep) != TCL_OK) + return NULL; + + isShared = Tcl_IsShared(listObj); + + ListRepRange(&listRep, rangeStart, rangeEnd, isShared, &resultRep); + + if (isShared) { + /* T:listrep-1.10.1,2.{4.2,4.3,5.2,5.3,6.2,7.2,8.1} */ + TclNewObj(listObj); + } /* T:listrep-1.{4.3,5.1,5.2} */ + ListObjReplaceRepAndInvalidate(listObj, &resultRep); + return listObj; +} + +/* + *---------------------------------------------------------------------- * - * TCL_ERROR + * Tcl_ListObjGetElements -- * - * 'listPtr' is not a valid list. An error message is left in the - * interpreter's result if 'interp' is not NULL. + * This function returns an (objc,objv) array of the elements in a list + * object. * - * Effect + * Results: + * The return value is normally TCL_OK; in this case *objcPtr is set to + * the count of list elements and *objvPtr is set to a pointer to an + * array of (*objcPtr) pointers to each list element. If listPtr does not + * refer to a list object and the object can not be converted to one, + * TCL_ERROR is returned and an error message will be left in the + * interpreter's result if interp is not NULL. + * + * The objects referenced by the returned array should be treated as + * readonly and their ref counts are _not_ incremented; the caller must + * do that if it holds on to a reference. Furthermore, the pointer and + * length returned by this function may change as soon as any function is + * called on the list object; be careful about retaining the pointer in a + * local data structure. * - * 'listPtr' is converted to a list object if it isn't one already. + * Side effects: + * The possible conversion of the object referenced by listPtr + * to a list object. * *---------------------------------------------------------------------- */ @@ -525,35 +1654,18 @@ TclListObjRange( int Tcl_ListObjGetElements( Tcl_Interp *interp, /* Used to report errors if not NULL. */ - Tcl_Obj *listPtr, /* List object for which an element array is + Tcl_Obj *objPtr, /* List object for which an element array is * to be returned. */ - int *objcPtr, /* Where to store the count of objects + ListSizeT *objcPtr, /* Where to store the count of objects * referenced by objv. */ Tcl_Obj ***objvPtr) /* Where to store the pointer to an array of * pointers to the list's objects. */ { - List *listRepPtr; + ListRep listRep; - ListGetInternalRep(listPtr, listRepPtr); - - if (listRepPtr == NULL) { - int result; - int length; - - (void) Tcl_GetStringFromObj(listPtr, &length); - if (length == 0) { - *objcPtr = 0; - *objvPtr = NULL; - return TCL_OK; - } - result = SetListFromAny(interp, listPtr); - if (result != TCL_OK) { - return result; - } - ListGetInternalRep(listPtr, listRepPtr); - } - *objcPtr = listRepPtr->elemCount; - *objvPtr = listRepPtr->elements; + if (TclListObjGetRep(interp, objPtr, &listRep) != TCL_OK) + return TCL_ERROR; + ListRepElements(&listRep, *objcPtr, *objvPtr); return TCL_OK; } @@ -562,49 +1674,37 @@ Tcl_ListObjGetElements( * * Tcl_ListObjAppendList -- * - * Appends the elements of elemListPtr to those of listPtr. - * - * Value - * - * TCL_OK + * This function appends the elements in the list fromObj + * to toObj. toObj must not be shared else the function will panic. * - * Success. - * - * TCL_ERROR - * - * 'listPtr' or 'elemListPtr' are not valid lists. An error - * message is left in the interpreter's result if 'interp' is not NULL. - * - * Effect + * Results: + * The return value is normally TCL_OK. If fromObj or toObj do not + * refer to list values, TCL_ERROR is returned and an error message is + * left in the interpreter's result if interp is not NULL. * - * The reference count of each element of 'elemListPtr' as it is added to - * 'listPtr'. 'listPtr' and 'elemListPtr' are converted to 'tclListType' - * if they are not already. Appending the new elements may cause the - * array of element pointers in 'listObj' to grow. If any objects are - * appended to 'listPtr'. Any preexisting string representation of - * 'listPtr' is invalidated. + * Side effects: + * The reference counts of the elements in fromObj are incremented + * since the list now refers to them. toObj and fromObj are + * converted, if necessary, to list objects. Also, appending the new + * elements may cause toObj's array of element pointers to grow. + * toObj's old string representation, if any, is invalidated. * *---------------------------------------------------------------------- */ - int Tcl_ListObjAppendList( Tcl_Interp *interp, /* Used to report errors if not NULL. */ - Tcl_Obj *listPtr, /* List object to append elements to. */ - Tcl_Obj *elemListPtr) /* List obj with elements to append. */ + Tcl_Obj *toObj, /* List object to append elements to. */ + Tcl_Obj *fromObj) /* List obj with elements to append. */ { - int objc; + ListSizeT objc; Tcl_Obj **objv; - if (Tcl_IsShared(listPtr)) { + if (Tcl_IsShared(toObj)) { Tcl_Panic("%s called with shared object", "Tcl_ListObjAppendList"); } - /* - * Pull the elements to append from elemListPtr. - */ - - if (TCL_OK != TclListObjGetElementsM(interp, elemListPtr, &objc, &objv)) { + if (TclListObjGetElementsM(interp, fromObj, &objc, &objv) != TCL_OK) { return TCL_ERROR; } @@ -613,249 +1713,246 @@ Tcl_ListObjAppendList( * Delete zero existing elements. */ - return Tcl_ListObjReplace(interp, listPtr, LIST_MAX, 0, objc, objv); + return TclListObjAppendElements(interp, toObj, objc, objv); } /* - *---------------------------------------------------------------------- - * - * Tcl_ListObjAppendElement -- - * - * Like 'Tcl_ListObjAppendList', but Appends a single value to a list. - * - * Value - * - * TCL_OK + *------------------------------------------------------------------------ * - * 'objPtr' is appended to the elements of 'listPtr'. + * TclListObjAppendElements -- * - * TCL_ERROR + * Appends multiple elements to a Tcl_Obj list object. If + * the passed Tcl_Obj is not a list object, it will be converted to one + * and an error raised if the conversion fails. * - * listPtr does not refer to a list object and the object can not be - * converted to one. An error message will be left in the - * interpreter's result if interp is not NULL. + * The Tcl_Obj must not be shared though the internal representation + * may be. * - * Effect + * Results: + * On success, TCL_OK is returned with the specified elements appended. + * On failure, TCL_ERROR is returned with an error message in the + * interpreter if not NULL. * - * If 'listPtr' is not already of type 'tclListType', it is converted. - * The 'refCount' of 'objPtr' is incremented as it is added to 'listPtr'. - * Appending the new element may cause the the array of element pointers - * in 'listObj' to grow. Any preexisting string representation of - * 'listPtr' is invalidated. + * Side effects: + * None. * - *---------------------------------------------------------------------- + *------------------------------------------------------------------------ */ - -int -Tcl_ListObjAppendElement( + int TclListObjAppendElements ( Tcl_Interp *interp, /* Used to report errors if not NULL. */ - Tcl_Obj *listPtr, /* List object to append objPtr to. */ - Tcl_Obj *objPtr) /* Object to append to listPtr's list. */ + Tcl_Obj *toObj, /* List object to append */ + ListSizeT elemCount, /* Number of elements in elemObjs[] */ + Tcl_Obj * const elemObjv[]) /* Objects to append to toObj's list. */ { - List *listRepPtr, *newPtr = NULL; - int numElems, numRequired; - int needGrow, isShared, attempt; + ListRep listRep; + Tcl_Obj **toObjv; + ListSizeT toLen; + ListSizeT finalLen; - if (Tcl_IsShared(listPtr)) { - Tcl_Panic("%s called with shared object", "Tcl_ListObjAppendElement"); + if (Tcl_IsShared(toObj)) { + Tcl_Panic("%s called with shared object", "TclListObjAppendElements"); } - ListGetInternalRep(listPtr, listRepPtr); - if (listRepPtr == NULL) { - int result; - int length; + if (TclListObjGetRep(interp, toObj, &listRep) != TCL_OK) + return TCL_ERROR; /* Cannot be converted to a list */ - (void) Tcl_GetStringFromObj(listPtr, &length); - if (length == 0) { - Tcl_SetListObj(listPtr, 1, &objPtr); - return TCL_OK; - } - result = SetListFromAny(interp, listPtr); - if (result != TCL_OK) { - return result; - } - ListGetInternalRep(listPtr, listRepPtr); - } - - numElems = listRepPtr->elemCount; - numRequired = numElems + 1 ; - needGrow = (numRequired > listRepPtr->maxElemCount); - isShared = (listRepPtr->refCount > 1); + if (elemCount == 0) + return TCL_OK; /* Nothing to do. Note AFTER check for list above */ - if (numRequired > LIST_MAX) { - if (interp != NULL) { - Tcl_SetObjResult(interp, Tcl_ObjPrintf( - "max length of a Tcl list (%d elements) exceeded", - LIST_MAX)); - Tcl_SetErrorCode(interp, "TCL", "MEMORY", NULL); - } - return TCL_ERROR; + ListRepElements(&listRep, toLen, toObjv); + if (elemCount > LIST_MAX || toLen > (LIST_MAX - elemCount)) { + return ListLimitExceededError(interp); } - if (needGrow && !isShared) { + finalLen = toLen + elemCount; + if (!ListRepIsShared(&listRep)) { /* - * Need to grow + unshared internalrep => try to realloc + * Reuse storage if possible. Even if too small, realloc-ing instead + * of creating a new ListStore will save us on manipulating Tcl_Obj + * reference counts on the elements which is a substantial cost + * if the list is not small. */ + ListSizeT numTailFree; - attempt = 2 * numRequired; - if (attempt <= LIST_MAX) { - newPtr = (List *)attemptckrealloc(listRepPtr, LIST_SIZE(attempt)); - } - if (newPtr == NULL) { - attempt = numRequired + 1 + TCL_MIN_ELEMENT_GROWTH; - if (attempt > LIST_MAX) { - attempt = LIST_MAX; - } - newPtr = (List *)attemptckrealloc(listRepPtr, LIST_SIZE(attempt)); - } - if (newPtr == NULL) { - attempt = numRequired; - newPtr = (List *)attemptckrealloc(listRepPtr, LIST_SIZE(attempt)); - } - if (newPtr) { - listRepPtr = newPtr; - listRepPtr->maxElemCount = attempt; - needGrow = 0; - } - } - if (isShared || needGrow) { - Tcl_Obj **dst, **src = listRepPtr->elements; - - /* - * Either we have a shared internalrep and we must copy to write, or we - * need to grow and realloc attempts failed. Attempt internalrep copy. - */ - - attempt = 2 * numRequired; - newPtr = AttemptNewList(NULL, attempt, NULL); - if (newPtr == NULL) { - attempt = numRequired + 1 + TCL_MIN_ELEMENT_GROWTH; - if (attempt > LIST_MAX) { - attempt = LIST_MAX; - } - newPtr = AttemptNewList(NULL, attempt, NULL); - } - if (newPtr == NULL) { - attempt = numRequired; - newPtr = AttemptNewList(interp, attempt, NULL); - } - if (newPtr == NULL) { - /* - * All growth attempts failed; throw the error. - */ - - return TCL_ERROR; - } + ListRepFreeUnreferenced(&listRep); /* Collect garbage before checking room */ - dst = newPtr->elements; - newPtr->refCount++; - newPtr->canonicalFlag = listRepPtr->canonicalFlag; - newPtr->elemCount = listRepPtr->elemCount; + LIST_ASSERT(ListRepStart(&listRep) == listRep.storePtr->firstUsed); + LIST_ASSERT(ListRepLength(&listRep) == listRep.storePtr->numUsed); + LIST_ASSERT(toLen == listRep.storePtr->numUsed); - if (isShared) { - /* - * The original internalrep must remain undisturbed. Copy into the new - * one and bump refcounts - */ - while (numElems--) { - *dst = *src++; - Tcl_IncrRefCount(*dst++); + if (finalLen > listRep.storePtr->numAllocated) { + /* T:listrep-1.{2,11},3.6 */ + ListStore *newStorePtr; + newStorePtr = ListStoreReallocate(listRep.storePtr, finalLen); + if (newStorePtr == NULL) { + return MemoryAllocationError(interp, LIST_SIZE(finalLen)); } - listRepPtr->refCount--; - } else { + LIST_ASSERT(newStorePtr->numAllocated >= finalLen); + listRep.storePtr = newStorePtr; /* - * Old internalrep to be freed, re-use refCounts. + * WARNING: at this point the Tcl_Obj internal rep potentially + * points to freed storage if the reallocation returned a + * different location. Overwrite it to bring it back in sync. */ - - memcpy(dst, src, numElems * sizeof(Tcl_Obj *)); - ckfree(listRepPtr); - } - listRepPtr = newPtr; + ListObjStompRep(toObj, &listRep); + } /* else T:listrep-3.{4,5} */ + LIST_ASSERT(listRep.storePtr->numAllocated >= finalLen); + /* Current store big enough */ + numTailFree = ListRepNumFreeTail(&listRep); + LIST_ASSERT((numTailFree + listRep.storePtr->firstUsed) + >= elemCount); /* Total free */ + if (numTailFree < elemCount) { + /* Not enough room at back. Move some to front */ + /* T:listrep-3.5 */ + ListSizeT shiftCount = elemCount - numTailFree; + /* Divide remaining space between front and back */ + shiftCount += (listRep.storePtr->numAllocated - finalLen) / 2; + LIST_ASSERT(shiftCount <= listRep.storePtr->firstUsed); + if (shiftCount) { + /* T:listrep-3.5 */ + ListRepUnsharedShiftDown(&listRep, shiftCount); + } + } /* else T:listrep-3.{4,6} */ + ObjArrayCopy(&listRep.storePtr->slots[ListRepStart(&listRep) + + ListRepLength(&listRep)], + elemCount, + elemObjv); + listRep.storePtr->numUsed = finalLen; + if (listRep.spanPtr) { + /* T:listrep-3.{4,5,6} */ + LIST_ASSERT(listRep.spanPtr->spanStart + == listRep.storePtr->firstUsed); + listRep.spanPtr->spanLength = finalLen; + } /* else T:listrep-3.6.3 */ + LIST_ASSERT(ListRepStart(&listRep) == listRep.storePtr->firstUsed); + LIST_ASSERT(ListRepLength(&listRep) == finalLen); + LISTREP_CHECK(&listRep); + + ListObjReplaceRepAndInvalidate(toObj, &listRep); + return TCL_OK; } - ListResetInternalRep(listPtr, listRepPtr); - listRepPtr->refCount++; - TclFreeInternalRep(listPtr); - ListSetInternalRep(listPtr, listRepPtr); - listRepPtr->refCount--; - - /* - * Add objPtr to the end of listPtr's array of element pointers. Increment - * the ref count for the (now shared) objPtr. - */ - - listRepPtr->elements[listRepPtr->elemCount] = objPtr; - Tcl_IncrRefCount(objPtr); - listRepPtr->elemCount++; /* - * Invalidate any old string representation since the list's internal - * representation has changed. + * Have to make a new list rep, either shared or no room in old one. + * If the old list did not have a span (all elements at front), do + * not leave space in the front either, assuming all appends and no + * prepends. */ + if (ListRepInit(finalLen, + NULL, + listRep.spanPtr ? LISTREP_SPACE_FAVOR_BACK + : LISTREP_SPACE_ONLY_BACK, + &listRep) + != TCL_OK) { + return TCL_ERROR; + } + LIST_ASSERT(listRep.storePtr->numAllocated >= finalLen); - TclInvalidateStringRep(listPtr); + if (toLen) { + /* T:listrep-2.{2,9},4.5 */ + ObjArrayCopy(ListRepSlotPtr(&listRep, 0), toLen, toObjv); + } + ObjArrayCopy(ListRepSlotPtr(&listRep, toLen), elemCount, elemObjv); + listRep.storePtr->numUsed = finalLen; + if (listRep.spanPtr) { + /* T:listrep-4.5 */ + LIST_ASSERT(listRep.spanPtr->spanStart == listRep.storePtr->firstUsed); + listRep.spanPtr->spanLength = finalLen; + } + LISTREP_CHECK(&listRep); + ListObjReplaceRepAndInvalidate(toObj, &listRep); return TCL_OK; } /* *---------------------------------------------------------------------- * - * Tcl_ListObjIndex -- + * Tcl_ListObjAppendElement -- * - * Retrieve a pointer to the element of 'listPtr' at 'index'. The index - * of the first element is 0. + * This function is a special purpose version of Tcl_ListObjAppendList: + * it appends a single object referenced by elemObj to the list object + * referenced by toObj. If toObj is not already a list object, an + * attempt will be made to convert it to one. * - * Value + * Results: + * The return value is normally TCL_OK; in this case elemObj is added to + * the end of toObj's list. If toObj does not refer to a list object + * and the object can not be converted to one, TCL_ERROR is returned and + * an error message will be left in the interpreter's result if interp is + * not NULL. * - * TCL_OK + * Side effects: + * The ref count of elemObj is incremented since the list now refers to + * it. toObj will be converted, if necessary, to a list object. Also, + * appending the new element may cause listObj's array of element + * pointers to grow. toObj's old string representation, if any, is + * invalidated. * - * A pointer to the element at 'index' is stored in 'objPtrPtr'. If - * 'index' is out of range, NULL is stored in 'objPtrPtr'. This - * object should be treated as readonly and its 'refCount' is _not_ - * incremented. The caller must do that if it holds on to the - * reference. + *---------------------------------------------------------------------- + */ +int +Tcl_ListObjAppendElement( + Tcl_Interp *interp, /* Used to report errors if not NULL. */ + Tcl_Obj *toObj, /* List object to append elemObj to. */ + Tcl_Obj *elemObj) /* Object to append to toObj's list. */ +{ + /* + * TODO - compare perf with 8.6 to see if worth optimizing single + * element case + */ + return TclListObjAppendElements(interp, toObj, 1, &elemObj); +} + +/* + *---------------------------------------------------------------------- * - * TCL_ERROR + * Tcl_ListObjIndex -- * - * 'listPtr' is not a valid list. An an error message is left in the - * interpreter's result if 'interp' is not NULL. + * This function returns a pointer to the index'th object from the list + * referenced by listPtr. The first element has index 0. If index is + * negative or greater than or equal to the number of elements in the + * list, a NULL is returned. If listPtr is not a list object, an attempt + * will be made to convert it to a list. * - * Effect + * Results: + * The return value is normally TCL_OK; in this case objPtrPtr is set to + * the Tcl_Obj pointer for the index'th list element or NULL if index is + * out of range. This object should be treated as readonly and its ref + * count is _not_ incremented; the caller must do that if it holds on to + * the reference. If listPtr does not refer to a list and can't be + * converted to one, TCL_ERROR is returned and an error message is left + * in the interpreter's result if interp is not NULL. * - * If 'listPtr' is not already of type 'tclListType', it is converted. + * Side effects: + * listPtr will be converted, if necessary, to a list object. * *---------------------------------------------------------------------- */ - int Tcl_ListObjIndex( - Tcl_Interp *interp, /* Used to report errors if not NULL. */ - Tcl_Obj *listPtr, /* List object to index into. */ - int index, /* Index of element to return. */ - Tcl_Obj **objPtrPtr) /* The resulting Tcl_Obj* is stored here. */ + Tcl_Interp *interp, /* Used to report errors if not NULL. */ + Tcl_Obj *listObj, /* List object to index into. */ + ListSizeT index, /* Index of element to return. */ + Tcl_Obj **objPtrPtr) /* The resulting Tcl_Obj* is stored here. */ { - List *listRepPtr; - - ListGetInternalRep(listPtr, listRepPtr); - if (listRepPtr == NULL) { - int result; - int length; + Tcl_Obj **elemObjs; + ListSizeT numElems; - (void) Tcl_GetStringFromObj(listPtr, &length); - if (length == 0) { - *objPtrPtr = NULL; - return TCL_OK; - } - result = SetListFromAny(interp, listPtr); - if (result != TCL_OK) { - return result; - } - ListGetInternalRep(listPtr, listRepPtr); + /* + * TODO + * Unlike the original list code, this does not optimize for lindex'ing + * an empty string when the internal rep is not already a list. On the + * other hand, this code will be faster for the case where the object + * is currently a dict. Benchmark the two cases. + */ + if (TclListObjGetElementsM(interp, listObj, &numElems, &elemObjs) + != TCL_OK) { + return TCL_ERROR; } - - if ((index < 0) || (index >= listRepPtr->elemCount)) { + if ((index < 0) || (index >= numElems)) { *objPtrPtr = NULL; } else { - *objPtrPtr = listRepPtr->elements[index]; + *objPtrPtr = elemObjs[index]; } return TCL_OK; @@ -866,20 +1963,19 @@ Tcl_ListObjIndex( * * Tcl_ListObjLength -- * - * Retrieve the number of elements in a list. - * - * Value + * This function returns the number of elements in a list object. If the + * object is not already a list object, an attempt will be made to + * convert it to one. * - * TCL_OK - * - * A count of list elements is stored at the address provided by - * 'intPtr'. If 'listPtr' is not already of type 'tclListPtr', it is - * converted. - * - * TCL_ERROR + * Results: + * The return value is normally TCL_OK; in this case *intPtr will be set + * to the integer count of list elements. If listPtr does not refer to a + * list object and the object can not be converted to one, TCL_ERROR is + * returned and an error message will be left in the interpreter's result + * if interp is not NULL. * - * 'listPtr' is not a valid list. An error message will be left in - * the interpreter's result if 'interp' is not NULL. + * Side effects: + * The possible conversion of the argument object to a list object. * *---------------------------------------------------------------------- */ @@ -887,30 +1983,23 @@ Tcl_ListObjIndex( #undef Tcl_ListObjLength int Tcl_ListObjLength( - Tcl_Interp *interp, /* Used to report errors if not NULL. */ - Tcl_Obj *listPtr, /* List object whose #elements to return. */ - int *intPtr) /* The resulting length is stored here. */ + Tcl_Interp *interp, /* Used to report errors if not NULL. */ + Tcl_Obj *listObj, /* List object whose #elements to return. */ + ListSizeT *lenPtr) /* The resulting int is stored here. */ { - List *listRepPtr; - - ListGetInternalRep(listPtr, listRepPtr); - if (listRepPtr == NULL) { - int result; - int length; + ListRep listRep; - (void) Tcl_GetStringFromObj(listPtr, &length); - if (length == 0) { - *intPtr = 0; - return TCL_OK; - } - result = SetListFromAny(interp, listPtr); - if (result != TCL_OK) { - return result; - } - ListGetInternalRep(listPtr, listRepPtr); + /* + * TODO + * Unlike the original list code, this does not optimize for lindex'ing + * an empty string when the internal rep is not already a list. On the + * other hand, this code will be faster for the case where the object + * is currently a dict. Benchmark the two cases. + */ + if (TclListObjGetRep(interp, listObj, &listRep) != TCL_OK) { + return TCL_ERROR; } - - *intPtr = listRepPtr->elemCount; + *lenPtr = ListRepLength(&listRep); return TCL_OK; } @@ -919,296 +2008,498 @@ Tcl_ListObjLength( * * Tcl_ListObjReplace -- * - * Replace values in a list. - * - * If 'first' is zero or TCL_INDEX_NONE, it refers to the first element. If - * 'first' outside the range of elements in the list, no elements are - * deleted. + * This function replaces zero or more elements of the list referenced by + * listObj with the objects from an (objc,objv) array. The objc elements + * of the array referenced by objv replace the count elements in listPtr + * starting at first. * - * If 'count' is zero or TCL_INDEX_NONE no elements are deleted, and any new - * elements are inserted at the beginning of the list. + * If the argument first is zero or negative, it refers to the first + * element. If first is greater than or equal to the number of elements + * in the list, then no elements are deleted; the new elements are + * appended to the list. Count gives the number of elements to replace. + * If count is zero or negative then no elements are deleted; the new + * elements are simply inserted before first. * - * Value + * The argument objv refers to an array of objc pointers to the new + * elements to be added to listPtr in place of those that were deleted. + * If objv is NULL, no new elements are added. If listPtr is not a list + * object, an attempt will be made to convert it to one. * - * TCL_OK - * - * The first 'objc' values of 'objv' replaced 'count' elements in 'listPtr' - * starting at 'first'. If 'objc' 0, no new elements are added. - * - * TCL_ERROR - * - * 'listPtr' is not a valid list. An error message is left in the - * interpreter's result if 'interp' is not NULL. - * - * Effect - * - * If 'listPtr' is not of type 'tclListType', it is converted if possible. - * - * The 'refCount' of each element appended to the list is incremented. - * Similarly, the 'refCount' for each replaced element is decremented. + * Results: + * The return value is normally TCL_OK. If listPtr does not refer to a + * list object and can not be converted to one, TCL_ERROR is returned and + * an error message will be left in the interpreter's result if interp is + * not NULL. * - * If 'listPtr' is modified, any previous string representation is - * invalidated. + * Side effects: + * The ref counts of the objc elements in objv are incremented since the + * resulting list now refers to them. Similarly, the ref counts for + * replaced objects are decremented. listObj is converted, if necessary, + * to a list object. listObj's old string representation, if any, is + * freed. * *---------------------------------------------------------------------- */ - int Tcl_ListObjReplace( Tcl_Interp *interp, /* Used for error reporting if not NULL. */ - Tcl_Obj *listPtr, /* List object whose elements to replace. */ - int first, /* Index of first element to replace. */ - int count, /* Number of elements to replace. */ - int objc, /* Number of objects to insert. */ - Tcl_Obj *const objv[]) /* An array of objc pointers to Tcl objects to - * insert. */ + Tcl_Obj *listObj, /* List object whose elements to replace. */ + ListSizeT first, /* Index of first element to replace. */ + ListSizeT numToDelete, /* Number of elements to replace. */ + ListSizeT numToInsert, /* Number of objects to insert. */ + Tcl_Obj *const insertObjs[])/* Tcl objects to insert */ { - List *listRepPtr; - Tcl_Obj **elemPtrs; - int numElems, numRequired, numAfterLast, start, i, j; - int needGrow, isShared; - - if (Tcl_IsShared(listPtr)) { + ListRep listRep; + ListSizeT origListLen; + int lenChange; + int leadSegmentLen; + int tailSegmentLen; + ListSizeT numFreeSlots; + int leadShift; + int tailShift; + Tcl_Obj **listObjs; + int favor; + + if (Tcl_IsShared(listObj)) { Tcl_Panic("%s called with shared object", "Tcl_ListObjReplace"); } - ListGetInternalRep(listPtr, listRepPtr); - if (listRepPtr == NULL) { - int length; + if (TclListObjGetRep(interp, listObj, &listRep) != TCL_OK) + return TCL_ERROR; /* Cannot be converted to a list */ - (void) Tcl_GetStringFromObj(listPtr, &length); - if (length == 0) { - if (objc == 0) { - return TCL_OK; - } - Tcl_SetListObj(listPtr, objc, NULL); - } else { - int result = SetListFromAny(interp, listPtr); + /* TODO - will need modification if Tcl9 sticks to unsigned indices */ - if (result != TCL_OK) { - return result; - } - } - ListGetInternalRep(listPtr, listRepPtr); + /* Make limits sane */ + origListLen = ListRepLength(&listRep); + if (first < 0) { + first = 0; + } + if (first > origListLen) { + first = origListLen; /* So we'll insert after last element. */ + } + if (numToDelete < 0) { + numToDelete = 0; + } else if (first > ListSizeT_MAX - numToDelete /* Handle integer overflow */ + || origListLen < first + numToDelete) { + numToDelete = origListLen - first; + } + + if (numToInsert > ListSizeT_MAX - (origListLen - numToDelete)) { + return ListLimitExceededError(interp); + } + + if ((first+numToDelete) >= origListLen) { + /* Operating at back of list. Favor leaving space at back */ + favor = LISTREP_SPACE_FAVOR_BACK; + } else if (first == 0) { + /* Operating on front of list. Favor leaving space in front */ + favor = LISTREP_SPACE_FAVOR_FRONT; + } else { + /* Operating on middle of list. */ + favor = LISTREP_SPACE_FAVOR_NONE; } /* - * Note that when count == 0 and objc == 0, this routine is logically a - * no-op, removing and adding no elements to the list. However, by flowing - * through this routine anyway, we get the important side effect that the - * resulting listPtr is a list in canoncial form. This is important. - * Resist any temptation to optimize this case. + * There are a number of special cases to consider from an optimization + * point of view. + * (1) Pure deletes (numToInsert==0) from the front or back can be treated + * as a range op irrespective of whether the ListStore is shared or not + * (2) Pure inserts (numToDelete == 0) + * (2a) Pure inserts at the back can be treated as appends + * (2b) Pure inserts from the *front* can be optimized under certain + * conditions by inserting before first ListStore slot in use if there + * is room, again irrespective of sharing + * (3) If the ListStore is shared OR there is insufficient free space + * OR existing allocation is too large compared to new size, create + * a new ListStore + * (4) Unshared ListStore with sufficient free space. Delete, shift and + * insert within the ListStore. */ - elemPtrs = listRepPtr->elements; - numElems = listRepPtr->elemCount; + /* Note: do not do TclInvalidateStringRep as yet in case there are errors */ - if (first < 0) { - first = 0; - } - if (first >= numElems) { - first = numElems; /* So we'll insert after last element. */ + /* Check Case (1) - Treat pure deletes from front or back as range ops */ + if (numToInsert == 0) { + if (numToDelete == 0) { + /* + * Should force canonical even for no-op. Remember Tcl_Obj unshared + * so OK to invalidate string rep + */ + /* T:listrep-1.10,2.8 */ + TclInvalidateStringRep(listObj); + return TCL_OK; + } + if (first == 0) { + /* Delete from front, so return tail. */ + /* T:listrep-1.{4,5},2.{4,5},3.{15,16},4.7 */ + ListRep tailRep; + ListRepRange(&listRep, numToDelete, origListLen-1, 0, &tailRep); + ListObjReplaceRepAndInvalidate(listObj, &tailRep); + return TCL_OK; + } else if ((first+numToDelete) >= origListLen) { + /* Delete from tail, so return head */ + /* T:listrep-1.{8,9},2.{6,7},3.{17,18},4.8 */ + ListRep headRep; + ListRepRange(&listRep, 0, first-1, 0, &headRep); + ListObjReplaceRepAndInvalidate(listObj, &headRep); + return TCL_OK; + } + /* Deletion from middle. Fall through to general case */ } - if (count < 0) { - count = 0; - } else if (count > LIST_MAX /* Handle integer overflow */ - || numElems < first+count) { - count = numElems - first; - } + /* Garbage collect before checking the pure insert optimization */ + ListRepFreeUnreferenced(&listRep); - if (objc > LIST_MAX - (numElems - count)) { - if (interp != NULL) { - Tcl_SetObjResult(interp, Tcl_ObjPrintf( - "max length of a Tcl list (%d elements) exceeded", - LIST_MAX)); + /* + * Check Case (2) - pure inserts under certain conditions: + */ + if (numToDelete == 0) { + /* Case (2a) - Append to list. */ + if (first == origListLen) { + /* T:listrep-1.11,2.9,3.{5,6},2.2.1 */ + return TclListObjAppendElements( + interp, listObj, numToInsert, insertObjs); + } + + /* + * Case (2b) - pure inserts at front under some circumstances + * (i) Insertion must be at head of list + * (ii) The list's span must be at head of the in-use slots in the store + * (iii) There must be unused room at front of the store + * NOTE THIS IS TRUE EVEN IF THE ListStore IS SHARED as it will not + * affect the other Tcl_Obj's referencing this ListStore. + */ + if (first == 0 && /* (i) */ + ListRepStart(&listRep) == listRep.storePtr->firstUsed && /* (ii) */ + numToInsert <= listRep.storePtr->firstUsed /* (iii) */ + ) { + ListSizeT newLen; + LIST_ASSERT(numToInsert); /* Else would have returned above */ + listRep.storePtr->firstUsed -= numToInsert; + ObjArrayCopy(&listRep.storePtr->slots[listRep.storePtr->firstUsed], + numToInsert, + insertObjs); + listRep.storePtr->numUsed += numToInsert; + newLen = listRep.spanPtr->spanLength + numToInsert; + if (listRep.spanPtr && listRep.spanPtr->refCount <= 1) { + /* An unshared span record, re-use it */ + /* T:listrep-3.1 */ + listRep.spanPtr->spanStart = listRep.storePtr->firstUsed; + listRep.spanPtr->spanLength = newLen; + } else { + /* Need a new span record */ + if (listRep.storePtr->firstUsed == 0) { + listRep.spanPtr = NULL; + } else { + /* T:listrep-4.3 */ + listRep.spanPtr = + ListSpanNew(listRep.storePtr->firstUsed, newLen); + } + } + ListObjReplaceRepAndInvalidate(listObj, &listRep); + return TCL_OK; } - return TCL_ERROR; } - isShared = (listRepPtr->refCount > 1); - numRequired = numElems - count + objc; /* Known <= LIST_MAX */ - needGrow = numRequired > listRepPtr->maxElemCount; - for (i = 0; i < objc; i++) { - Tcl_IncrRefCount(objv[i]); + /* Just for readability of the code */ + lenChange = numToInsert - numToDelete; + leadSegmentLen = first; + tailSegmentLen = origListLen - (first + numToDelete); + numFreeSlots = listRep.storePtr->numAllocated - listRep.storePtr->numUsed; + + /* + * Before further processing, if unshared, try and reallocate to avoid + * new allocation below. This avoids expensive ref count manipulation + * later by not having to go through the ListRepInit and + * ListObjReplaceAndInvalidate below. + * TODO - we could be smarter about the reallocate. Use of realloc + * means all new free space is at the back. Instead, the realloc could + * be an explicit alloc and memmove which would let us redistribute + * free space. + */ + if (numFreeSlots < lenChange && !ListRepIsShared(&listRep)) { + /* T:listrep-1.{1,3,14,18,21},3.{3,10,11,14,27,32,41} */ + ListStore *newStorePtr = + ListStoreReallocate(listRep.storePtr, origListLen + lenChange); + if (newStorePtr == NULL) { + return MemoryAllocationError(interp, + LIST_SIZE(origListLen + lenChange)); + } + listRep.storePtr = newStorePtr; + numFreeSlots = + listRep.storePtr->numAllocated - listRep.storePtr->numUsed; + /* + * WARNING: at this point the Tcl_Obj internal rep potentially + * points to freed storage if the reallocation returned a + * different location. Overwrite it to bring it back in sync. + */ + ListObjStompRep(listObj, &listRep); } - if (needGrow && !isShared) { - /* Try to use realloc */ - List *newPtr = NULL; - int attempt = 2 * numRequired; - if (attempt <= LIST_MAX) { - newPtr = (List *)attemptckrealloc(listRepPtr, LIST_SIZE(attempt)); + /* + * Case (3) a new ListStore is required + * (a) The passed-in ListStore is shared + * (b) There is not enough free space in the unshared passed-in ListStore + * (c) The new unshared size is much "smaller" (TODO) than the allocated space + * TODO - for unshared case ONLY, consider a "move" based implementation + */ + if (ListRepIsShared(&listRep) || /* 3a */ + numFreeSlots < lenChange || /* 3b */ + (origListLen + lenChange) < (listRep.storePtr->numAllocated / 4) /* 3c */ + ) { + ListRep newRep; + Tcl_Obj **toObjs; + listObjs = &listRep.storePtr->slots[ListRepStart(&listRep)]; + ListRepInit(origListLen + lenChange, + NULL, + LISTREP_PANIC_ON_FAIL | favor, + &newRep); + toObjs = ListRepSlotPtr(&newRep, 0); + if (leadSegmentLen > 0) { + /* T:listrep-2.{2,3,13:18},4.{6,9,13:18} */ + ObjArrayCopy(toObjs, leadSegmentLen, listObjs); } - if (newPtr == NULL) { - attempt = numRequired + 1 + TCL_MIN_ELEMENT_GROWTH; - if (attempt > LIST_MAX) { - attempt = LIST_MAX; - } - newPtr = (List *)attemptckrealloc(listRepPtr, LIST_SIZE(attempt)); + if (numToInsert > 0) { + /* T:listrep-2.{1,2,3,10:18},4.{1,2,4,6,10:18} */ + ObjArrayCopy(&toObjs[leadSegmentLen], + numToInsert, + insertObjs); } - if (newPtr == NULL) { - attempt = numRequired; - newPtr = (List *)attemptckrealloc(listRepPtr, LIST_SIZE(attempt)); + if (tailSegmentLen > 0) { + /* T:listrep-2.{1,2,3,10:15},4.{1,2,4,6,9:12,16:18} */ + ObjArrayCopy(&toObjs[leadSegmentLen + numToInsert], + tailSegmentLen, + &listObjs[leadSegmentLen+numToDelete]); } - if (newPtr) { - listRepPtr = newPtr; - ListResetInternalRep(listPtr, listRepPtr); - elemPtrs = listRepPtr->elements; - listRepPtr->maxElemCount = attempt; - needGrow = numRequired > listRepPtr->maxElemCount; + newRep.storePtr->numUsed = origListLen + lenChange; + if (newRep.spanPtr) { + /* T:listrep-2.{1,2,3,10:18},4.{1,2,4,6,9:18} */ + newRep.spanPtr->spanLength = newRep.storePtr->numUsed; } + LISTREP_CHECK(&newRep); + ListObjReplaceRepAndInvalidate(listObj, &newRep); + return TCL_OK; } - if (!needGrow && !isShared) { - int shift; - /* - * Can use the current List struct. First "delete" count elements - * starting at first. - */ + /* + * Case (4) - unshared ListStore with sufficient room. + * After deleting elements, there will be a corresponding gap. If this + * gap does not match number of insertions, either the lead segment, + * or the tail segment, or both will have to be moved. + * The general strategy is to move the fewest number of elements. If + * + * TODO - what about appends to unshared ? Is below sufficiently optimal? + */ - for (j = first; j < first + count; j++) { - Tcl_Obj *victimPtr = elemPtrs[j]; + /* Following must hold for unshared listreps after ListRepFreeUnreferenced above */ + LIST_ASSERT(origListLen == listRep.storePtr->numUsed); + LIST_ASSERT(origListLen == ListRepLength(&listRep)); + LIST_ASSERT(ListRepStart(&listRep) == listRep.storePtr->firstUsed); - TclDecrRefCount(victimPtr); - } + LIST_ASSERT((numToDelete + numToInsert) > 0); - /* - * Shift the elements after the last one removed to their new - * locations. - */ + /* Base of slot array holding the list elements */ + listObjs = &listRep.storePtr->slots[ListRepStart(&listRep)]; + + /* + * Free up elements to be deleted. Before that, increment the ref counts + * for objects to be inserted in case there is overlap. T:listobj-11.1 + */ + if (numToInsert) { + /* T:listrep-1.{1,3,12:21},3.{2,3,7:14,23:41} */ + ObjArrayIncrRefs(insertObjs, 0, numToInsert); + } + if (numToDelete) { + /* T:listrep-1.{6,7,12:21},3.{19:41} */ + ObjArrayDecrRefs(listObjs, first, numToDelete); + } - start = first + count; - numAfterLast = numElems - start; - shift = objc - count; /* numNewElems - numDeleted */ - if ((numAfterLast > 0) && (shift != 0)) { - Tcl_Obj **src = elemPtrs + start; + /* + * TODO - below the moves are optimized but this may result in needing a + * span allocation. Perhaps for small lists, it may be more efficient to + * just move everything up front and save on allocating a span. + */ - memmove(src+shift, src, numAfterLast * sizeof(Tcl_Obj*)); - } - } else { + /* + * Calculate shifts if necessary to accomodate insertions. + * NOTE: all indices are relative to listObjs which is not necessarily the + * start of the ListStore storage area. + * + * leadShift - how much to shift the lead segment + * tailShift - how much to shift the tail segment + * insertTarget - index where to insert. + */ + + if (lenChange == 0) { + /* T:listrep-1.{12,15,19},3.{23,28,33}. Exact fit */ + leadShift = 0; + tailShift = 0; + } else if (lenChange < 0) { /* - * Cannot use the current List struct; it is shared, too small, or - * both. Allocate a new struct and insert elements into it. + * More deletions than insertions. The gap after deletions is large + * enough for insertions. Move a segment depending on size. */ - - List *oldListRepPtr = listRepPtr; - Tcl_Obj **oldPtrs = elemPtrs; - int newMax; - - if (needGrow) { - newMax = 2 * numRequired; + if (leadSegmentLen > tailSegmentLen) { + /* Tail segment smaller. Insert after lead, move tail down */ + /* T:listrep-1.{7,17,20},3.{21,2229,35} */ + leadShift = 0; + tailShift = lenChange; } else { - newMax = listRepPtr->maxElemCount; + /* Lead segment smaller. Insert before tail, move lead up */ + /* T:listrep-1.{6,13,16},3.{19,20,24,34} */ + leadShift = -lenChange; + tailShift = 0; } + } else { + LIST_ASSERT(lenChange > 0); /* Reminder */ - listRepPtr = AttemptNewList(NULL, newMax, NULL); - if (listRepPtr == NULL) { - unsigned int limit = LIST_MAX - numRequired; - unsigned int extra = numRequired - numElems - + TCL_MIN_ELEMENT_GROWTH; - int growth = (int) ((extra > limit) ? limit : extra); - - listRepPtr = AttemptNewList(NULL, numRequired + growth, NULL); - if (listRepPtr == NULL) { - listRepPtr = AttemptNewList(interp, numRequired, NULL); - if (listRepPtr == NULL) { - for (i = 0; i < objc; i++) { - /* See bug 3598580 */ -#if TCL_MAJOR_VERSION > 8 - Tcl_DecrRefCount(objv[i]); -#else - objv[i]->refCount--; -#endif - } - return TCL_ERROR; + /* + * We need to make room for the insertions. Again we have multiple + * possibilities. We may be able to get by just shifting one segment + * or need to shift both. In the former case, favor shifting the + * smaller segment. + */ + int leadSpace = ListRepNumFreeHead(&listRep); + int tailSpace = ListRepNumFreeTail(&listRep); + int finalFreeSpace = leadSpace + tailSpace - lenChange; + + LIST_ASSERT((leadSpace + tailSpace) >= lenChange); + if (leadSpace >= lenChange + && (leadSegmentLen < tailSegmentLen || tailSpace < lenChange)) { + /* Move only lead to the front to make more room */ + /* T:listrep-3.25,36,38, */ + leadShift = -lenChange; + tailShift = 0; + /* + * Redistribute the remaining free space between the front and + * back if either there is no tail space left or if the + * entire list is the head anyways. This is an important + * optimization for further operations like further asymmetric + * insertions. + */ + if (finalFreeSpace > 1 && (tailSpace == 0 || tailSegmentLen == 0)) { + int postShiftLeadSpace = leadSpace - lenChange; + if (postShiftLeadSpace > (finalFreeSpace/2)) { + ListSizeT extraShift = postShiftLeadSpace - (finalFreeSpace / 2); + leadShift -= extraShift; + tailShift = -extraShift; /* Move tail to the front as well */ } - } - } - - ListResetInternalRep(listPtr, listRepPtr); - listRepPtr->refCount++; - - elemPtrs = listRepPtr->elements; - - if (isShared) { + } /* else T:listrep-3.{7,12,25,38} */ + LIST_ASSERT(leadShift >= 0 || leadSpace >= -leadShift); + } else if (tailSpace >= lenChange) { + /* Move only tail segment to the back to make more room. */ + /* T:listrep-3.{8,10,11,14,26,27,30,32,37,39,41} */ + leadShift = 0; + tailShift = lenChange; /* - * The old struct will remain in place; need new refCounts for the - * new List struct references. Copy over only the surviving - * elements. + * See comments above. This is analogous. */ - - for (i=0; i < first; i++) { - elemPtrs[i] = oldPtrs[i]; - Tcl_IncrRefCount(elemPtrs[i]); - } - for (i = first + count, j = first + objc; - j < numRequired; i++, j++) { - elemPtrs[j] = oldPtrs[i]; - Tcl_IncrRefCount(elemPtrs[j]); + if (finalFreeSpace > 1 && (leadSpace == 0 || leadSegmentLen == 0)) { + int postShiftTailSpace = tailSpace - lenChange; + if (postShiftTailSpace > (finalFreeSpace/2)) { + /* T:listrep-1.{1,3,14,18,21},3.{2,3,26,27} */ + ListSizeT extraShift = postShiftTailSpace - (finalFreeSpace / 2); + tailShift += extraShift; + leadShift = extraShift; /* Move head to the back as well */ + } } - - oldListRepPtr->refCount--; + LIST_ASSERT(tailShift <= tailSpace); } else { /* - * The old struct will be removed; use its inherited refCounts. + * Both lead and tail need to be shifted to make room. + * Divide remaining free space equally between front and back. */ - - if (first > 0) { - memcpy(elemPtrs, oldPtrs, first * sizeof(Tcl_Obj *)); - } + /* T:listrep-3.{9,13,31,40} */ + LIST_ASSERT(leadSpace < lenChange); + LIST_ASSERT(tailSpace < lenChange); /* - * "Delete" count elements starting at first. + * leadShift = leadSpace - (finalFreeSpace/2) + * Thus leadShift <= leadSpace + * Also, + * = leadSpace - (leadSpace + tailSpace - lenChange)/2 + * = leadSpace/2 - tailSpace/2 + lenChange/2 + * >= 0 because lenChange > tailSpace */ - - for (j = first; j < first + count; j++) { - Tcl_Obj *victimPtr = oldPtrs[j]; - - TclDecrRefCount(victimPtr); + leadShift = leadSpace - (finalFreeSpace / 2); + tailShift = lenChange - leadShift; + if (tailShift > tailSpace) { + /* Account for integer division errors */ + leadShift += 1; + tailShift -= 1; } - /* - * Copy the elements after the last one removed, shifted to their - * new locations. + * Following must be true because otherwise one of the previous + * if clauses would have been taken. */ - - start = first + count; - numAfterLast = numElems - start; - if (numAfterLast > 0) { - memcpy(elemPtrs + first + objc, oldPtrs + start, - (size_t) numAfterLast * sizeof(Tcl_Obj *)); - } - - ckfree(oldListRepPtr); + LIST_ASSERT(leadShift > 0 && leadShift < lenChange); + LIST_ASSERT(tailShift > 0 && tailShift < lenChange); + leadShift = -leadShift; /* Lead is actually shifted downward */ } } - /* - * Insert the new elements into elemPtrs before "first". - */ - - for (i=0,j=first ; i<objc ; i++,j++) { - elemPtrs[j] = objv[i]; + /* Careful about order of moves! */ + if (leadShift > 0) { + /* Will happen when we have to make room at bottom */ + if (tailShift != 0 && tailSegmentLen != 0) { + /* T:listrep-1.{1,3,14,18},3.{2,3,26,27} */ + ListSizeT tailStart = leadSegmentLen + numToDelete; + memmove(&listObjs[tailStart + tailShift], + &listObjs[tailStart], + tailSegmentLen * sizeof(Tcl_Obj *)); + } + if (leadSegmentLen != 0) { + /* T:listrep-1.{3,6,16,18,21},3.{19,20,34} */ + memmove(&listObjs[leadShift], + &listObjs[0], + leadSegmentLen * sizeof(Tcl_Obj *)); + } + } else { + if (leadShift != 0 && leadSegmentLen != 0) { + /* T:listrep-3.{7,9,12,13,31,36,38,40} */ + memmove(&listObjs[leadShift], + &listObjs[0], + leadSegmentLen * sizeof(Tcl_Obj *)); + } + if (tailShift != 0 && tailSegmentLen != 0) { + /* T:listrep-1.{7,17},3.{8:11,13,14,21,22,35,37,39:41} */ + ListSizeT tailStart = leadSegmentLen + numToDelete; + memmove(&listObjs[tailStart + tailShift], + &listObjs[tailStart], + tailSegmentLen * sizeof(Tcl_Obj *)); + } + } + if (numToInsert) { + /* Do NOT use ObjArrayCopy here since we have already incr'ed ref counts */ + /* T:listrep-1.{1,3,12:21},3.{2,3,7:14,23:41} */ + memmove(&listObjs[leadSegmentLen + leadShift], + insertObjs, + numToInsert * sizeof(Tcl_Obj *)); } - /* - * Update the count of elements. - */ - - listRepPtr->elemCount = numRequired; - - /* - * Invalidate and free any old representations that may not agree - * with the revised list's internal representation. - */ + listRep.storePtr->firstUsed += leadShift; + listRep.storePtr->numUsed = origListLen + lenChange; + listRep.storePtr->flags = 0; - listRepPtr->refCount++; - TclFreeInternalRep(listPtr); - ListSetInternalRep(listPtr, listRepPtr); - listRepPtr->refCount--; + if (listRep.spanPtr && listRep.spanPtr->refCount <= 1) { + /* An unshared span record, re-use it, even if not required */ + /* T:listrep-3.{2,3,7:14},3.{19:41} */ + listRep.spanPtr->spanStart = listRep.storePtr->firstUsed; + listRep.spanPtr->spanLength = listRep.storePtr->numUsed; + } else { + /* Need a new span record */ + if (listRep.storePtr->firstUsed == 0) { + /* T:listrep-1.{7,12,15,17,19,20} */ + listRep.spanPtr = NULL; + } else { + /* T:listrep-1.{1,3,6.1,13,14,16,18,21} */ + listRep.spanPtr = ListSpanNew(listRep.storePtr->firstUsed, + listRep.storePtr->numUsed); + } + } - TclInvalidateStringRep(listPtr); + LISTREP_CHECK(&listRep); + ListObjReplaceRepAndInvalidate(listObj, &listRep); return TCL_OK; } @@ -1217,48 +2508,49 @@ Tcl_ListObjReplace( * * TclLindexList -- * - * Implements the 'lindex' command when objc==3. - * - * Implemented entirely as a wrapper around 'TclLindexFlat'. Reconfigures - * the argument format into required form while taking care to manage - * shimmering so as to tend to keep the most useful internalreps - * and/or avoid the most expensive conversions. + * This procedure handles the 'lindex' command when objc==3. * - * Value + * Results: + * Returns a pointer to the object extracted, or NULL if an error + * occurred. The returned object already includes one reference count for + * the pointer returned. * - * A pointer to the specified element, with its 'refCount' incremented, or - * NULL if an error occurred. + * Side effects: + * None. * - * Notes + * Notes: + * This procedure is implemented entirely as a wrapper around + * TclLindexFlat. All it does is reconfigure the argument format into the + * form required by TclLindexFlat, while taking care to manage shimmering + * in such a way that we tend to keep the most useful internalreps and/or + * avoid the most expensive conversions. * *---------------------------------------------------------------------- */ - Tcl_Obj * TclLindexList( Tcl_Interp *interp, /* Tcl interpreter. */ - Tcl_Obj *listPtr, /* List being unpacked. */ - Tcl_Obj *argPtr) /* Index or index list. */ + Tcl_Obj *listObj, /* List being unpacked. */ + Tcl_Obj *argObj) /* Index or index list. */ { - - int index; /* Index into the list. */ + ListSizeT index; /* Index into the list. */ Tcl_Obj *indexListCopy; - List *listRepPtr; + Tcl_Obj **indexObjs; + ListSizeT numIndexObjs; /* * Determine whether argPtr designates a list or a single index. We have * to be careful about the order of the checks to avoid repeated - * shimmering; see TIP#22 and TIP#33 for the details. + * shimmering; if internal rep is already a list do not shimmer it. + * see TIP#22 and TIP#33 for the details. */ - - ListGetInternalRep(argPtr, listRepPtr); - if ((listRepPtr == NULL) - && TclGetIntForIndexM(NULL , argPtr, INT_MAX - 1, &index) == TCL_OK) { + if (!TclHasInternalRep(argObj, &tclListType) + && TclGetIntForIndexM(NULL, argObj, ListSizeT_MAX - 1, &index) + == TCL_OK) { /* * argPtr designates a single index. */ - - return TclLindexFlat(interp, listPtr, 1, &argPtr); + return TclLindexFlat(interp, listObj, 1, &argObj); } /* @@ -1273,61 +2565,61 @@ TclLindexList( * implementation does not. */ - indexListCopy = TclListObjCopy(NULL, argPtr); + indexListCopy = TclListObjCopy(NULL, argObj); if (indexListCopy == NULL) { /* - * argPtr designates something that is neither an index nor a - * well-formed list. Report the error via TclLindexFlat. + * The argument is neither an index nor a well-formed list. + * Report the error via TclLindexFlat. + * TODO - This is as original. why not directly return an error? */ - - return TclLindexFlat(interp, listPtr, 1, &argPtr); + return TclLindexFlat(interp, listObj, 1, &argObj); } - ListGetInternalRep(indexListCopy, listRepPtr); - - assert(listRepPtr != NULL); - - listPtr = TclLindexFlat(interp, listPtr, listRepPtr->elemCount, - listRepPtr->elements); + ListObjGetElements(indexListCopy, numIndexObjs, indexObjs); + listObj = TclLindexFlat(interp, listObj, numIndexObjs, indexObjs); Tcl_DecrRefCount(indexListCopy); - return listPtr; + return listObj; } /* *---------------------------------------------------------------------- * - * TclLindexFlat -- - * - * The core of the 'lindex' command, with all index - * arguments presented as a flat list. + * TclLindexFlat -- * - * Value + * This procedure is the core of the 'lindex' command, with all index + * arguments presented as a flat list. * - * A pointer to the object extracted, with its 'refCount' incremented, or - * NULL if an error occurred. Thus, the calling code will usually do - * something like: + * Results: + * Returns a pointer to the object extracted, or NULL if an error + * occurred. The returned object already includes one reference count for + * the pointer returned. * - * Tcl_SetObjResult(interp, result); - * Tcl_DecrRefCount(result); + * Side effects: + * None. * + * Notes: + * The reference count of the returned object includes one reference + * corresponding to the pointer returned. Thus, the calling code will + * usually do something like: + * Tcl_SetObjResult(interp, result); + * Tcl_DecrRefCount(result); * *---------------------------------------------------------------------- */ - Tcl_Obj * TclLindexFlat( Tcl_Interp *interp, /* Tcl interpreter. */ - Tcl_Obj *listPtr, /* Tcl object representing the list. */ - int indexCount, /* Count of indices. */ + Tcl_Obj *listObj, /* Tcl object representing the list. */ + ListSizeT indexCount, /* Count of indices. */ Tcl_Obj *const indexArray[])/* Array of pointers to Tcl objects that * represent the indices in the list. */ { - int i; + ListSizeT i; - Tcl_IncrRefCount(listPtr); + Tcl_IncrRefCount(listObj); - for (i=0 ; i<indexCount && listPtr ; i++) { - int index, listLen = 0; + for (i=0 ; i<indexCount && listObj ; i++) { + ListSizeT index, listLen = 0; Tcl_Obj **elemPtrs = NULL, *sublistCopy; /* @@ -1336,18 +2628,16 @@ TclLindexFlat( * while we are still using it. See test lindex-8.4. */ - sublistCopy = TclListObjCopy(interp, listPtr); - Tcl_DecrRefCount(listPtr); - listPtr = NULL; + sublistCopy = TclListObjCopy(interp, listObj); + Tcl_DecrRefCount(listObj); + listObj = NULL; if (sublistCopy == NULL) { - /* - * The sublist is not a list at all => error. - */ - + /* The sublist is not a list at all => error. */ break; } - TclListObjGetElementsM(NULL, sublistCopy, &listLen, &elemPtrs); + LIST_ASSERT_TYPE(sublistCopy); + ListObjGetElements(sublistCopy, listLen, elemPtrs); if (TclGetIntForIndexM(interp, indexArray[i], /*endValue*/ listLen-1, &index) == TCL_OK) { @@ -1358,26 +2648,24 @@ TclLindexFlat( */ while (++i < indexCount) { - if (TclGetIntForIndexM(interp, indexArray[i], INT_MAX - 1, &index) + if (TclGetIntForIndexM( + interp, indexArray[i], ListSizeT_MAX - 1, &index) != TCL_OK) { Tcl_DecrRefCount(sublistCopy); return NULL; } } - TclNewObj(listPtr); + TclNewObj(listObj); } else { - /* - * Extract the pointer to the appropriate element. - */ - - listPtr = elemPtrs[index]; + /* Extract the pointer to the appropriate element. */ + listObj = elemPtrs[index]; } - Tcl_IncrRefCount(listPtr); + Tcl_IncrRefCount(listObj); } Tcl_DecrRefCount(sublistCopy); } - return listPtr; + return listObj; } /* @@ -1385,34 +2673,39 @@ TclLindexFlat( * * TclLsetList -- * - * The core of [lset] when objc == 4. Objv[2] may be either a + * Core of the 'lset' command when objc == 4. Objv[2] may be either a * scalar index or a list of indices. * It also handles 'lpop' when given a NULL value. * - * Implemented entirely as a wrapper around 'TclLindexFlat', as described - * for 'TclLindexList'. + * Results: + * Returns the new value of the list variable, or NULL if there was an + * error. The returned object includes one reference count for the + * pointer returned. * - * Value + * Side effects: + * None. * - * The new list, with the 'refCount' of 'valuPtr' incremented, or NULL if - * there was an error. + * Notes: + * This procedure is implemented entirely as a wrapper around + * TclLsetFlat. All it does is reconfigure the argument format into the + * form required by TclLsetFlat, while taking care to manage shimmering + * in such a way that we tend to keep the most useful internalreps and/or + * avoid the most expensive conversions. * *---------------------------------------------------------------------- */ - Tcl_Obj * TclLsetList( Tcl_Interp *interp, /* Tcl interpreter. */ - Tcl_Obj *listPtr, /* Pointer to the list being modified. */ - Tcl_Obj *indexArgPtr, /* Index or index-list arg to 'lset'. */ - Tcl_Obj *valuePtr) /* Value arg to 'lset' or NULL to 'lpop'. */ + Tcl_Obj *listObj, /* Pointer to the list being modified. */ + Tcl_Obj *indexArgObj, /* Index or index-list arg to 'lset'. */ + Tcl_Obj *valueObj) /* Value arg to 'lset' or NULL to 'lpop'. */ { - int indexCount = 0; /* Number of indices in the index list. */ + ListSizeT indexCount = 0; /* Number of indices in the index list. */ Tcl_Obj **indices = NULL; /* Vector of indices in the index list. */ - Tcl_Obj *retValuePtr; /* Pointer to the list to be returned. */ - int index; /* Current index in the list - discarded. */ + Tcl_Obj *retValueObj; /* Pointer to the list to be returned. */ + ListSizeT index; /* Current index in the list - discarded. */ Tcl_Obj *indexListCopy; - List *listRepPtr; /* * Determine whether the index arg designates a list or a single index. @@ -1420,36 +2713,33 @@ TclLsetList( * shimmering; see TIP #22 and #23 for details. */ - ListGetInternalRep(indexArgPtr, listRepPtr); - if (listRepPtr == NULL - && TclGetIntForIndexM(NULL, indexArgPtr, INT_MAX - 1, &index) == TCL_OK) { - /* - * indexArgPtr designates a single index. - */ - - return TclLsetFlat(interp, listPtr, 1, &indexArgPtr, valuePtr); - + if (!TclHasInternalRep(indexArgObj, &tclListType) + && TclGetIntForIndexM(NULL, indexArgObj, ListSizeT_MAX - 1, &index) + == TCL_OK) { + /* indexArgPtr designates a single index. */ + /* T:listrep-1.{2.1,12.1,15.1,19.1},2.{2.3,9.3,10.1,13.1,16.1}, 3.{4,5,6}.3 */ + return TclLsetFlat(interp, listObj, 1, &indexArgObj, valueObj); } - indexListCopy = TclListObjCopy(NULL, indexArgPtr); + indexListCopy = TclListObjCopy(NULL, indexArgObj); if (indexListCopy == NULL) { /* * indexArgPtr designates something that is neither an index nor a * well formed list. Report the error via TclLsetFlat. */ - - return TclLsetFlat(interp, listPtr, 1, &indexArgPtr, valuePtr); + return TclLsetFlat(interp, listObj, 1, &indexArgObj, valueObj); } - TclListObjGetElementsM(NULL, indexArgPtr, &indexCount, &indices); + LIST_ASSERT_TYPE(indexListCopy); + ListObjGetElements(indexListCopy, indexCount, indices); /* * Let TclLsetFlat handle the actual lset'ting. */ - retValuePtr = TclLsetFlat(interp, listPtr, indexCount, indices, valuePtr); + retValueObj = TclLsetFlat(interp, listObj, indexCount, indices, valueObj); Tcl_DecrRefCount(indexListCopy); - return retValuePtr; + return retValueObj; } /* @@ -1460,109 +2750,106 @@ TclLsetList( * Core engine of the 'lset' command. * It also handles 'lpop' when given a NULL value. * - * Value - * - * The resulting list - * - * The 'refCount' of 'valuePtr' is incremented. If 'listPtr' was not - * duplicated, its 'refCount' is incremented. The reference count of - * an unduplicated object is therefore 2 (one for the returned pointer - * and one for the variable that holds it). The reference count of a - * duplicate object is 1, reflecting that result is the only active - * reference. The caller is expected to store the result in the - * variable and decrement its reference count. (INST_STORE_* does - * exactly this.) - * - * NULL - * - * An error occurred. If 'listPtr' was duplicated, the reference - * count on the duplicate is decremented so that it is 0, causing any - * memory allocated by this function to be freed. - * - * - * Effect - * - * On entry, the reference count of 'listPtr' does not reflect any - * references held on the stack. The first action of this function is to - * determine whether 'listPtr' is shared and to create a duplicate - * unshared copy if it is. The reference count of the duplicate is - * incremented. At this point, the reference count is 1 in either case so - * that the object is considered unshared. + * Results: + * Returns the new value of the list variable, or NULL if an error + * occurred. The returned object includes one reference count for the + * pointer returned. * - * The unshared list is altered directly to produce the result. - * 'TclLsetFlat' maintains a linked list of 'Tcl_Obj' values whose string - * representations must be spoilt by threading via 'ptr2' of the - * two-pointer internal representation. On entry to 'TclLsetFlat', the - * values of 'ptr2' are immaterial; on exit, the 'ptr2' field of any - * Tcl_Obj that has been modified is set to NULL. + * Side effects: + * On entry, the reference count of the variable value does not reflect + * any references held on the stack. The first action of this function is + * to determine whether the object is shared, and to duplicate it if it + * is. The reference count of the duplicate is incremented. At this + * point, the reference count will be 1 for either case, so that the + * object will appear to be unshared. + * + * If an error occurs, and the object has been duplicated, the reference + * count on the duplicate is decremented so that it is now 0: this + * dismisses any memory that was allocated by this function. + * + * If no error occurs, the reference count of the original object is + * incremented if the object has not been duplicated, and nothing is done + * to a reference count of the duplicate. Now the reference count of an + * unduplicated object is 2 (the returned pointer, plus the one stored in + * the variable). The reference count of a duplicate object is 1, + * reflecting that the returned pointer is the only active reference. The + * caller is expected to store the returned value back in the variable + * and decrement its reference count. (INST_STORE_* does exactly this.) * *---------------------------------------------------------------------- */ - Tcl_Obj * TclLsetFlat( Tcl_Interp *interp, /* Tcl interpreter. */ - Tcl_Obj *listPtr, /* Pointer to the list being modified. */ - int indexCount, /* Number of index args. */ + Tcl_Obj *listObj, /* Pointer to the list being modified. */ + ListSizeT indexCount, /* Number of index args. */ Tcl_Obj *const indexArray[], /* Index args. */ - Tcl_Obj *valuePtr) /* Value arg to 'lset' or NULL to 'lpop'. */ + Tcl_Obj *valueObj) /* Value arg to 'lset' or NULL to 'lpop'. */ { - int index, len; + ListSizeT index, len; int result; - Tcl_Obj *subListPtr, *retValuePtr, *chainPtr; - Tcl_ObjInternalRep *irPtr; + Tcl_Obj *subListObj, *retValueObj; + Tcl_Obj *pendingInvalidates[10]; + Tcl_Obj **pendingInvalidatesPtr = pendingInvalidates; + ListSizeT numPendingInvalidates = 0; /* * If there are no indices, simply return the new value. (Without * indices, [lset] is a synonym for [set]. - * [lpop] does not use this but protect for NULL valuePtr just in case. + * [lpop] does not use this but protect for NULL valueObj just in case. */ if (indexCount == 0) { - if (valuePtr != NULL) { - Tcl_IncrRefCount(valuePtr); + if (valueObj != NULL) { + Tcl_IncrRefCount(valueObj); } - return valuePtr; + return valueObj; } /* * If the list is shared, make a copy we can modify (copy-on-write). We * use Tcl_DuplicateObj() instead of TclListObjCopy() for a few reasons: - * 1) we have not yet confirmed listPtr is actually a list; 2) We make a + * 1) we have not yet confirmed listObj is actually a list; 2) We make a * verbatim copy of any existing string rep, and when we combine that with * the delayed invalidation of string reps of modified Tcl_Obj's * implemented below, the outcome is that any error condition that causes - * this routine to return NULL, will leave the string rep of listPtr and + * this routine to return NULL, will leave the string rep of listObj and * all elements to be unchanged. */ - subListPtr = Tcl_IsShared(listPtr) ? Tcl_DuplicateObj(listPtr) : listPtr; + subListObj = Tcl_IsShared(listObj) ? Tcl_DuplicateObj(listObj) : listObj; /* * Anchor the linked list of Tcl_Obj's whose string reps must be * invalidated if the operation succeeds. */ - retValuePtr = subListPtr; - chainPtr = NULL; + retValueObj = subListObj; result = TCL_OK; + /* Allocate if static array for pending invalidations is too small */ + if (indexCount + > (int) (sizeof(pendingInvalidates) / sizeof(pendingInvalidates[0]))) { + pendingInvalidatesPtr = + (Tcl_Obj **) ckalloc(indexCount * sizeof(*pendingInvalidatesPtr)); + } + /* * Loop through all the index arguments, and for each one dive into the * appropriate sublist. */ do { - int elemCount; + ListSizeT elemCount; Tcl_Obj *parentList, **elemPtrs; /* * Check for the possible error conditions... */ - if (TclListObjGetElementsM(interp, subListPtr, &elemCount, &elemPtrs) - != TCL_OK) { + if (TclListObjGetElementsM(interp, subListObj, &elemCount, &elemPtrs) + != TCL_OK) { /* ...the sublist we're indexing into isn't a list at all. */ result = TCL_ERROR; break; @@ -1574,22 +2861,27 @@ TclLsetFlat( */ if (TclGetIntForIndexM(interp, *indexArray, elemCount - 1, &index) - != TCL_OK) { + != TCL_OK) { /* ...the index we're trying to use isn't an index at all. */ result = TCL_ERROR; - indexArray++; + indexArray++; /* Why bother with this increment? TBD */ break; } indexArray++; if (index < 0 || index > elemCount - || (valuePtr == NULL && index >= elemCount)) { + || (valueObj == NULL && index >= elemCount)) { /* ...the index points outside the sublist. */ if (interp != NULL) { - Tcl_SetObjResult(interp, Tcl_ObjPrintf( - "index \"%s\" out of range", Tcl_GetString(indexArray[-1]))); - Tcl_SetErrorCode(interp, "TCL", "VALUE", "INDEX" - "OUTOFRANGE", NULL); + Tcl_SetObjResult(interp, + Tcl_ObjPrintf("index \"%s\" out of range", + Tcl_GetString(indexArray[-1]))); + Tcl_SetErrorCode(interp, + "TCL", + "VALUE", + "INDEX" + "OUTOFRANGE", + NULL); } result = TCL_ERROR; break; @@ -1597,128 +2889,129 @@ TclLsetFlat( /* * No error conditions. As long as we're not yet on the last index, - * determine the next sublist for the next pass through the loop, and - * take steps to make sure it is an unshared copy, as we intend to - * modify it. + * determine the next sublist for the next pass through the loop, + * and take steps to make sure it is an unshared copy, as we intend + * to modify it. */ if (--indexCount) { - parentList = subListPtr; + parentList = subListObj; if (index == elemCount) { - TclNewObj(subListPtr); + TclNewObj(subListObj); } else { - subListPtr = elemPtrs[index]; + subListObj = elemPtrs[index]; } - if (Tcl_IsShared(subListPtr)) { - subListPtr = Tcl_DuplicateObj(subListPtr); + if (Tcl_IsShared(subListObj)) { + subListObj = Tcl_DuplicateObj(subListObj); } /* * Replace the original elemPtr[index] in parentList with a copy * we know to be unshared. This call will also deal with the * situation where parentList shares its internalrep with other - * Tcl_Obj's. Dealing with the shared internalrep case can cause - * subListPtr to become shared again, so detect that case and make - * and store another copy. + * Tcl_Obj's. Dealing with the shared internalrep case can + * cause subListObj to become shared again, so detect that case + * and make and store another copy. */ if (index == elemCount) { - Tcl_ListObjAppendElement(NULL, parentList, subListPtr); + Tcl_ListObjAppendElement(NULL, parentList, subListObj); } else { - TclListObjSetElement(NULL, parentList, index, subListPtr); + TclListObjSetElement(NULL, parentList, index, subListObj); } - if (Tcl_IsShared(subListPtr)) { - subListPtr = Tcl_DuplicateObj(subListPtr); - TclListObjSetElement(NULL, parentList, index, subListPtr); + if (Tcl_IsShared(subListObj)) { + subListObj = Tcl_DuplicateObj(subListObj); + TclListObjSetElement(NULL, parentList, index, subListObj); } /* - * The TclListObjSetElement() calls do not spoil the string rep of - * parentList, and that's fine for now, since all we've done so - * far is replace a list element with an unshared copy. The list - * value remains the same, so the string rep. is still valid, and - * unchanged, which is good because if this whole routine returns - * NULL, we'd like to leave no change to the value of the lset - * variable. Later on, when we set valuePtr in its proper place, - * then all containing lists will have their values changed, and - * will need their string reps spoiled. We maintain a list of all - * those Tcl_Obj's (via a little internalrep surgery) so we can spoil - * them at that time. + * The TclListObjSetElement() calls do not spoil the string rep + * of parentList, and that's fine for now, since all we've done + * so far is replace a list element with an unshared copy. The + * list value remains the same, so the string rep. is still + * valid, and unchanged, which is good because if this whole + * routine returns NULL, we'd like to leave no change to the + * value of the lset variable. Later on, when we set valueObj + * in its proper place, then all containing lists will have + * their values changed, and will need their string reps + * spoiled. We maintain a list of all those Tcl_Obj's (via a + * little internalrep surgery) so we can spoil them at that + * time. */ - irPtr = TclFetchInternalRep(parentList, &tclListType); - irPtr->twoPtrValue.ptr2 = chainPtr; - chainPtr = parentList; + pendingInvalidatesPtr[numPendingInvalidates] = parentList; + ++numPendingInvalidates; } } while (indexCount > 0); /* * Either we've detected and error condition, and exited the loop with * result == TCL_ERROR, or we've successfully reached the last index, and - * we're ready to store valuePtr. In either case, we need to clean up our - * string spoiling list of Tcl_Obj's. + * we're ready to store valueObj. On success, we need to invalidate + * the string representations of intermediate lists whose contained + * list element would have changed. */ + if (result == TCL_OK) { + while (numPendingInvalidates > 0) { + Tcl_Obj *objPtr; - while (chainPtr) { - Tcl_Obj *objPtr = chainPtr; - List *listRepPtr; - - /* - * Clear away our internalrep surgery mess. - */ - - irPtr = TclFetchInternalRep(objPtr, &tclListType); - listRepPtr = (List *)irPtr->twoPtrValue.ptr1; - chainPtr = (Tcl_Obj *)irPtr->twoPtrValue.ptr2; - - if (result == TCL_OK) { - - /* - * We're going to store valuePtr, so spoil string reps of all - * containing lists. - */ + --numPendingInvalidates; + objPtr = pendingInvalidatesPtr[numPendingInvalidates]; - listRepPtr->refCount++; - TclFreeInternalRep(objPtr); - ListSetInternalRep(objPtr, listRepPtr); - listRepPtr->refCount--; - - TclInvalidateStringRep(objPtr); - } else { - irPtr->twoPtrValue.ptr2 = NULL; + if (result == TCL_OK) { + /* + * We're going to store valueObj, so spoil string reps of all + * containing lists. + * TODO - historically, the storing of the internal rep was done + * because the ptr2 field of the internal rep was used to chain + * objects whose string rep needed to be invalidated. Now this + * is no longer the case, so replacing of the internal rep + * should not be needed. The TclInvalidateStringRep should + * suffice. Formulate a test case before changing. + */ + ListRep objInternalRep; + TclListObjGetRep(NULL, objPtr, &objInternalRep); + ListObjReplaceRepAndInvalidate(objPtr, &objInternalRep); + } } } + if (pendingInvalidatesPtr != pendingInvalidates) + ckfree(pendingInvalidatesPtr); + if (result != TCL_OK) { /* * Error return; message is already in interp. Clean up any excess * memory. */ - if (retValuePtr != listPtr) { - Tcl_DecrRefCount(retValuePtr); + if (retValueObj != listObj) { + Tcl_DecrRefCount(retValueObj); } return NULL; } /* - * Store valuePtr in proper sublist and return. The TCL_INDEX_NONE is - * to avoid a compiler warning (not a problem because we checked that - * we have a proper list - or something convertible to one - above). + * Store valueObj in proper sublist and return. The -1 is to avoid a + * compiler warning (not a problem because we checked that we have a + * proper list - or something convertible to one - above). */ - len = TCL_INDEX_NONE; - TclListObjLengthM(NULL, subListPtr, &len); - if (valuePtr == NULL) { - Tcl_ListObjReplace(NULL, subListPtr, index, 1, 0, NULL); + len = -1; + TclListObjLengthM(NULL, subListObj, &len); + if (valueObj == NULL) { + /* T:listrep-1.{4.2,5.4,6.1,7.1,8.3},2.{4,5}.4 */ + Tcl_ListObjReplace(NULL, subListObj, index, 1, 0, NULL); } else if (index == len) { - Tcl_ListObjAppendElement(NULL, subListPtr, valuePtr); + /* T:listrep-1.2.1,2.{2.3,9.3},3.{4,5,6}.3 */ + Tcl_ListObjAppendElement(NULL, subListObj, valueObj); } else { - TclListObjSetElement(NULL, subListPtr, index, valuePtr); - TclInvalidateStringRep(subListPtr); + /* T:listrep-1.{12.1,15.1,19.1},2.{10,13,16}.1 */ + TclListObjSetElement(NULL, subListObj, index, valueObj); + TclInvalidateStringRep(subListObj); } - Tcl_IncrRefCount(retValuePtr); - return retValuePtr; + Tcl_IncrRefCount(retValueObj); + return retValueObj; } /* @@ -1726,93 +3019,53 @@ TclLsetFlat( * * TclListObjSetElement -- * - * Set a single element of a list to a specified value. - * - * It is the caller's responsibility to invalidate the string - * representation of the 'listPtr'. + * Set a single element of a list to a specified value * - * Value - * - * TCL_OK - * - * Success. - * - * TCL_ERROR - * - * 'listPtr' does not refer to a list object and cannot be converted - * to one. An error message will be left in the interpreter result if - * interp is not NULL. - * - * TCL_ERROR - * - * An index designates an element outside the range [0..listLength-1], - * where 'listLength' is the count of elements in the list object - * designated by 'listPtr'. An error message is left in the - * interpreter result. - * - * Effect - * - * If 'listPtr' designates a shared object, 'Tcl_Panic' is called. If - * 'listPtr' is not already of type 'tclListType', it is converted and the - * internal representation is unshared. The 'refCount' of the element at - * 'index' is decremented and replaced in the list with the 'valuePtr', - * whose 'refCount' in turn is incremented. + * Results: + * The return value is normally TCL_OK. If listObj does not refer to a + * list object and cannot be converted to one, TCL_ERROR is returned and + * an error message will be left in the interpreter result if interp is + * not NULL. Similarly, if index designates an element outside the range + * [0..listLength-1], where listLength is the count of elements in the + * list object designated by listObj, TCL_ERROR is returned and an error + * message is left in the interpreter result. * + * Side effects: + * Tcl_Panic if listObj designates a shared object. Otherwise, attempts + * to convert it to a list with a non-shared internal rep. Decrements the + * ref count of the object at the specified index within the list, + * replaces with the object designated by valueObj, and increments the + * ref count of the replacement object. * *---------------------------------------------------------------------- */ - int TclListObjSetElement( Tcl_Interp *interp, /* Tcl interpreter; used for error reporting * if not NULL. */ - Tcl_Obj *listPtr, /* List object in which element should be + Tcl_Obj *listObj, /* List object in which element should be * stored. */ - int index, /* Index of element to store. */ - Tcl_Obj *valuePtr) /* Tcl object to store in the designated list + ListSizeT index, /* Index of element to store. */ + Tcl_Obj *valueObj) /* Tcl object to store in the designated list * element. */ { - List *listRepPtr; /* Internal representation of the list being - * modified. */ - Tcl_Obj **elemPtrs; /* Pointers to elements of the list. */ - int elemCount; /* Number of elements in the list. */ + ListRep listRep; + Tcl_Obj **elemPtrs; /* Pointers to elements of the list. */ + ListSizeT elemCount; /* Number of elements in the list. */ - /* - * Ensure that the listPtr parameter designates an unshared list. - */ + /* Ensure that the listObj parameter designates an unshared list. */ - if (Tcl_IsShared(listPtr)) { + if (Tcl_IsShared(listObj)) { Tcl_Panic("%s called with shared object", "TclListObjSetElement"); } - ListGetInternalRep(listPtr, listRepPtr); - if (listRepPtr == NULL) { - int result; - int length; - - (void) Tcl_GetStringFromObj(listPtr, &length); - if (length == 0) { - if (interp != NULL) { - Tcl_SetObjResult(interp, Tcl_ObjPrintf( - "index \"%d\" out of range", index)); - Tcl_SetErrorCode(interp, "TCL", "VALUE", "INDEX", - "OUTOFRANGE", NULL); - } - return TCL_ERROR; - } - result = SetListFromAny(interp, listPtr); - if (result != TCL_OK) { - return result; - } - ListGetInternalRep(listPtr, listRepPtr); + if (TclListObjGetRep(interp, listObj, &listRep) != TCL_OK) { + return TCL_ERROR; } - elemCount = listRepPtr->elemCount; - - /* - * Ensure that the index is in bounds. - */ + elemCount = ListRepLength(&listRep); + /* Ensure that the index is in bounds. */ if (index<0 || index>=elemCount) { if (interp != NULL) { Tcl_SetObjResult(interp, Tcl_ObjPrintf( @@ -1824,65 +3077,33 @@ TclListObjSetElement( } /* - * If the internal rep is shared, replace it with an unshared copy. + * Note - garbage collect this only AFTER checking indices above. + * Do not want to modify listrep and then not store it back in listObj. */ + ListRepFreeUnreferenced(&listRep); - if (listRepPtr->refCount > 1) { - Tcl_Obj **dst, **src = listRepPtr->elements; - List *newPtr = AttemptNewList(NULL, listRepPtr->maxElemCount, NULL); + /* Replace a shared internal rep with an unshared copy */ + if (listRep.storePtr->refCount > 1) { + ListRep newInternalRep; + /* T:listrep-2.{10,13,16}.1 */ + /* TODO - leave extra space? */ + ListRepClone(&listRep, &newInternalRep, LISTREP_PANIC_ON_FAIL); + listRep = newInternalRep; + } /* else T:listrep-1.{12.1,15.1,19.1} */ - if (newPtr == NULL) { - newPtr = AttemptNewList(interp, elemCount, NULL); - if (newPtr == NULL) { - return TCL_ERROR; - } - } - newPtr->refCount++; - newPtr->elemCount = elemCount; - newPtr->canonicalFlag = listRepPtr->canonicalFlag; - - dst = newPtr->elements; - while (elemCount--) { - *dst = *src++; - Tcl_IncrRefCount(*dst++); - } - - listRepPtr->refCount--; - - listRepPtr = newPtr; - ListResetInternalRep(listPtr, listRepPtr); - } - elemPtrs = listRepPtr->elements; - - /* - * Add a reference to the new list element. - */ - - Tcl_IncrRefCount(valuePtr); + /* Retrieve element array AFTER potential cloning above */ + ListRepElements(&listRep, elemCount, elemPtrs); /* - * Remove a reference from the old list element. + * Add a reference to the new list element and remove from old before + * replacing it. Order is important! */ - + Tcl_IncrRefCount(valueObj); Tcl_DecrRefCount(elemPtrs[index]); + elemPtrs[index] = valueObj; - /* - * Stash the new object in the list. - */ - - elemPtrs[index] = valuePtr; - - /* - * Invalidate outdated internalreps. - */ - - ListGetInternalRep(listPtr, listRepPtr); - listRepPtr->refCount++; - TclFreeInternalRep(listPtr); - ListSetInternalRep(listPtr, listRepPtr); - listRepPtr->refCount--; - - TclInvalidateStringRep(listPtr); + /* Internal rep may be cloned so replace */ + ListObjReplaceRepAndInvalidate(listObj, &listRep); return TCL_OK; } @@ -1892,34 +3113,33 @@ TclListObjSetElement( * * FreeListInternalRep -- * - * Deallocate the storage associated with the internal representation of a - * a list object. + * Deallocate the storage associated with a list object's internal + * representation. * - * Effect + * Results: + * None. * + * Side effects: * Frees listPtr's List* internal representation, if no longer shared. * May decrement the ref counts of element objects, which may free them. * *---------------------------------------------------------------------- */ - static void FreeListInternalRep( - Tcl_Obj *listPtr) /* List object with internal rep to free. */ + Tcl_Obj *listObj) /* List object with internal rep to free. */ { - List *listRepPtr; - - ListGetInternalRep(listPtr, listRepPtr); - assert(listRepPtr != NULL); - - if (listRepPtr->refCount-- <= 1) { - Tcl_Obj **elemPtrs = listRepPtr->elements; - int i, numElems = listRepPtr->elemCount; - - for (i = 0; i < numElems; i++) { - Tcl_DecrRefCount(elemPtrs[i]); - } - ckfree(listRepPtr); + ListRep listRep; + + ListObjGetRep(listObj, &listRep); + if (listRep.storePtr->refCount-- <= 1) { + ObjArrayDecrRefs( + listRep.storePtr->slots, + listRep.storePtr->firstUsed, listRep.storePtr->numUsed); + ckfree(listRep.storePtr); + } + if (listRep.spanPtr) { + ListSpanDecrRefs(listRep.spanPtr); } } @@ -1928,26 +3148,25 @@ FreeListInternalRep( * * DupListInternalRep -- * - * Initialize the internal representation of a list 'Tcl_Obj' to share the + * Initialize the internal representation of a list Tcl_Obj to share the * internal representation of an existing list object. * - * Effect + * Results: + * None. * - * The 'refCount' of the List internal rep is incremented. + * Side effects: + * The reference count of the List internal rep is incremented. * *---------------------------------------------------------------------- */ - static void DupListInternalRep( - Tcl_Obj *srcPtr, /* Object with internal rep to copy. */ - Tcl_Obj *copyPtr) /* Object with internal rep to set. */ + Tcl_Obj *srcObj, /* Object with internal rep to copy. */ + Tcl_Obj *copyObj) /* Object with internal rep to set. */ { - List *listRepPtr; - - ListGetInternalRep(srcPtr, listRepPtr); - assert(listRepPtr != NULL); - ListSetInternalRep(copyPtr, listRepPtr); + ListRep listRep; + ListObjGetRep(srcObj, &listRep); + ListObjOverwriteRep(copyObj, &listRep); } /* @@ -1955,31 +3174,26 @@ DupListInternalRep( * * SetListFromAny -- * - * Convert any object to a list. - * - * Value + * Attempt to generate a list internal form for the Tcl object "objPtr". * - * TCL_OK - * - * Success. The internal representation of 'objPtr' is set, and the type - * of 'objPtr' is 'tclListType'. - * - * TCL_ERROR - * - * An error occured during conversion. An error message is left in the - * interpreter's result if 'interp' is not NULL. + * Results: + * The return value is TCL_OK or TCL_ERROR. If an error occurs during + * conversion, an error message is left in the interpreter's result + * unless "interp" is NULL. * + * Side effects: + * If no error occurs, a list is stored as "objPtr"s internal + * representation. * *---------------------------------------------------------------------- */ - static int SetListFromAny( Tcl_Interp *interp, /* Used for error reporting if not NULL. */ Tcl_Obj *objPtr) /* The object to convert. */ { - List *listRepPtr; Tcl_Obj **elemPtrs; + ListRep listRep; /* * Dictionaries are a special case; they have a string representation such @@ -1993,7 +3207,7 @@ SetListFromAny( Tcl_Obj *keyPtr, *valuePtr; Tcl_DictSearch search; int done; - int size; + ListSizeT size; /* * Create the new list representation. Note that we do not need to do @@ -2005,17 +3219,22 @@ SetListFromAny( */ Tcl_DictObjSize(NULL, objPtr, &size); - listRepPtr = AttemptNewList(interp, size > 0 ? 2*size : 1, NULL); - if (!listRepPtr) { + /* TODO - leave space in front and/or back? */ + if (ListRepInitAttempt( + interp, size > 0 ? 2 * size : 1, NULL, &listRep) + != TCL_OK) { return TCL_ERROR; } - listRepPtr->elemCount = 2 * size; - /* - * Populate the list representation. - */ + LIST_ASSERT(listRep.spanPtr == NULL); /* Guard against future changes */ + LIST_ASSERT(listRep.storePtr->firstUsed == 0); + LIST_ASSERT((listRep.storePtr->flags & LISTSTORE_CANONICAL) == 0); - elemPtrs = listRepPtr->elements; + listRep.storePtr->numUsed = 2 * size; + + /* Populate the list representation. */ + + elemPtrs = listRep.storePtr->slots; Tcl_DictObjFirst(NULL, objPtr, &search, &keyPtr, &valuePtr, &done); while (!done) { *elemPtrs++ = keyPtr; @@ -2025,7 +3244,7 @@ SetListFromAny( Tcl_DictObjNext(&search, &keyPtr, &valuePtr, &done); } } else { - int estCount, length; + ListSizeT estCount, length; const char *limit, *nextElem = TclGetStringFromObj(objPtr, &length); /* @@ -2036,29 +3255,32 @@ SetListFromAny( estCount = TclMaxListLength(nextElem, length, &limit); estCount += (estCount == 0); /* Smallest list struct holds 1 * element. */ - listRepPtr = AttemptNewList(interp, estCount, NULL); - if (listRepPtr == NULL) { + /* TODO - allocate additional space? */ + if (ListRepInitAttempt(interp, estCount, NULL, &listRep) + != TCL_OK) { return TCL_ERROR; } - elemPtrs = listRepPtr->elements; - /* - * Each iteration, parse and store a list element. - */ + LIST_ASSERT(listRep.spanPtr == NULL); /* Guard against future changes */ + LIST_ASSERT(listRep.storePtr->firstUsed == 0); + + elemPtrs = listRep.storePtr->slots; + + /* Each iteration, parse and store a list element. */ while (nextElem < limit) { const char *elemStart; char *check; - int elemSize; + ListSizeT elemSize; int literal; if (TCL_OK != TclFindElement(interp, nextElem, limit - nextElem, &elemStart, &nextElem, &elemSize, &literal)) { - fail: - while (--elemPtrs >= listRepPtr->elements) { +fail: + while (--elemPtrs >= listRep.storePtr->slots) { Tcl_DecrRefCount(*elemPtrs); } - ckfree(listRepPtr); + ckfree(listRep.storePtr); return TCL_ERROR; } if (elemStart == limit) { @@ -2070,11 +3292,7 @@ SetListFromAny( check = Tcl_InitStringRep(*elemPtrs, literal ? elemStart : NULL, elemSize); if (elemSize && check == NULL) { - if (interp) { - Tcl_SetObjResult(interp, Tcl_NewStringObj( - "cannot construct list, out of memory", -1)); - Tcl_SetErrorCode(interp, "TCL", "MEMORY", NULL); - } + MemoryAllocationError(interp, elemSize); goto fail; } if (!literal) { @@ -2085,16 +3303,29 @@ SetListFromAny( Tcl_IncrRefCount(*elemPtrs++);/* Since list now holds ref to it. */ } - listRepPtr->elemCount = elemPtrs - listRepPtr->elements; + listRep.storePtr->numUsed = + elemPtrs - listRep.storePtr->slots; } + LISTREP_CHECK(&listRep); + /* * Store the new internalRep. We do this as late * as possible to allow the conversion code, in particular * Tcl_GetStringFromObj, to use the old internalRep. */ - ListSetInternalRep(objPtr, listRepPtr); + /* + * Note old string representation NOT to be invalidated. + * So do NOT use ListObjReplaceRepAndInvalidate. InternalRep to be freed AFTER + * IncrRefs so do not use ListObjOverwriteRep + */ + ListRepIncrRefs(&listRep); + TclFreeInternalRep(objPtr); + objPtr->internalRep.twoPtrValue.ptr1 = listRep.storePtr; + objPtr->internalRep.twoPtrValue.ptr2 = listRep.spanPtr; + objPtr->typePtr = &tclListType; + return TCL_OK; } @@ -2103,69 +3334,71 @@ SetListFromAny( * * UpdateStringOfList -- * - * Update the string representation for a list object. + * Update the string representation for a list object. Note: This + * function does not invalidate an existing old string rep so storage + * will be lost if this has not already been done. * - * Any previously-exising string representation is not invalidated, so - * storage is lost if this has not been taken care of. - * - * Effect + * Results: + * None. * - * The string representation of 'listPtr' is set to the resulting string. - * This string will be empty if the list has no elements. It is assumed - * that the list internal representation is not NULL. + * Side effects: + * The object's string is set to a valid string that results from the + * list-to-string conversion. This string will be empty if the list has + * no elements. The list internal representation should not be NULL and + * we assume it is not NULL. * *---------------------------------------------------------------------- */ - static void UpdateStringOfList( - Tcl_Obj *listPtr) /* List object with string rep to update. */ + Tcl_Obj *listObj) /* List object with string rep to update. */ { # define LOCAL_SIZE 64 char localFlags[LOCAL_SIZE], *flagPtr = NULL; - int numElems, i, length, bytesNeeded = 0; + ListSizeT numElems, i, length, bytesNeeded = 0; const char *elem, *start; char *dst; Tcl_Obj **elemPtrs; - List *listRepPtr; - - ListGetInternalRep(listPtr, listRepPtr); + ListRep listRep; - assert(listRepPtr != NULL); + ListObjGetRep(listObj, &listRep); + LISTREP_CHECK(&listRep); - numElems = listRepPtr->elemCount; + ListRepElements(&listRep, numElems, elemPtrs); /* * Mark the list as being canonical; although it will now have a string * rep, it is one we derived through proper "canonical" quoting and so * it's known to be free from nasties relating to [concat] and [eval]. + * However, we only do this if this is not a spanned list. Marking the + * storage canonical for a spanned list make ALL lists using the storage + * canonical which is not right. (Consider a list generated from a + * string and then this function called for a spanned list generated + * from it). On the other hand, a spanned list is always canonical + * (never generated from a string) so it does not have to be explicitly + * marked as such. The ListObjIsCanonical macro takes this into account. + * See the comments there. */ + if (listRep.spanPtr == NULL) { + LIST_ASSERT(listRep.storePtr->firstUsed == 0);/* Invariant */ + listRep.storePtr->flags |= LISTSTORE_CANONICAL; + } - listRepPtr->canonicalFlag = 1; - - /* - * Handle empty list case first, so rest of the routine is simpler. - */ + /* Handle empty list case first, so rest of the routine is simpler. */ if (numElems == 0) { - Tcl_InitStringRep(listPtr, NULL, 0); + Tcl_InitStringRep(listObj, NULL, 0); return; } - /* - * Pass 1: estimate space, gather flags. - */ + /* Pass 1: estimate space, gather flags. */ if (numElems <= LOCAL_SIZE) { flagPtr = localFlags; } else { - /* - * We know numElems <= LIST_MAX, so this is safe. - */ - + /* We know numElems <= LIST_MAX, so this is safe. */ flagPtr = (char *)ckalloc(numElems); } - elemPtrs = listRepPtr->elements; for (i = 0; i < numElems; i++) { flagPtr[i] = (i ? TCL_DONT_QUOTE_HASH : 0); elem = TclGetStringFromObj(elemPtrs[i], &length); @@ -2183,7 +3416,7 @@ UpdateStringOfList( * Pass 2: copy into string rep buffer. */ - start = dst = Tcl_InitStringRep(listPtr, NULL, bytesNeeded); + start = dst = Tcl_InitStringRep(listObj, NULL, bytesNeeded); TclOOM(dst, bytesNeeded); for (i = 0; i < numElems; i++) { flagPtr[i] |= (i ? TCL_DONT_QUOTE_HASH : 0); @@ -2193,7 +3426,7 @@ UpdateStringOfList( } /* Set the string length to what was actually written, the safe choice */ - (void) Tcl_InitStringRep(listPtr, NULL, dst - 1 - start); + (void) Tcl_InitStringRep(listObj, NULL, dst - 1 - start); if (flagPtr != localFlags) { ckfree(flagPtr); @@ -2201,6 +3434,61 @@ UpdateStringOfList( } /* + *------------------------------------------------------------------------ + * + * TclListTestObj -- + * + * Returns a list object with a specific internal rep and content. + * Used specifically for testing so span can be controlled explicitly. + * + * Results: + * Pointer to the Tcl_Obj containing the list. + * + * Side effects: + * None. + * + *------------------------------------------------------------------------ + */ +Tcl_Obj * +TclListTestObj (int length, int leadingSpace, int endSpace) +{ + if (length < 0) + length = 0; + if (leadingSpace < 0) + leadingSpace = 0; + if (endSpace < 0) + endSpace = 0; + + ListRep listRep; + ListSizeT capacity; + Tcl_Obj *listObj; + + TclNewObj(listObj); + + /* Only a test object so ignoring overflow checks */ + capacity = length + leadingSpace + endSpace; + if (capacity == 0) { + return listObj; + } + + ListRepInit(capacity, NULL, 0, &listRep); + + ListStore *storePtr = listRep.storePtr; + int i; + for (i = 0; i < length; ++i) { + storePtr->slots[i + leadingSpace] = Tcl_NewIntObj(i); + Tcl_IncrRefCount(storePtr->slots[i + leadingSpace]); + } + storePtr->firstUsed = leadingSpace; + storePtr->numUsed = length; + if (leadingSpace != 0) { + listRep.spanPtr = ListSpanNew(leadingSpace, length); + } + ListObjReplaceRepAndInvalidate(listObj, &listRep); + return listObj; +} + +/* * Local Variables: * mode: c * c-basic-offset: 4 diff --git a/generic/tclStubInit.c b/generic/tclStubInit.c index 4941348..87c9d0a 100644 --- a/generic/tclStubInit.c +++ b/generic/tclStubInit.c @@ -1118,7 +1118,9 @@ static const TclIntStubs tclIntStubs = { TclPtrUnsetVar, /* 256 */ TclStaticLibrary, /* 257 */ TclpCreateTemporaryDirectory, /* 258 */ - TclUnusedStubEntry, /* 259 */ + 0, /* 259 */ + TclListTestObj, /* 260 */ + TclListObjValidate, /* 261 */ }; static const TclIntPlatStubs tclIntPlatStubs = { diff --git a/generic/tclTest.c b/generic/tclTest.c index 9284969..d13b7ce 100644 --- a/generic/tclTest.c +++ b/generic/tclTest.c @@ -273,6 +273,7 @@ static Tcl_ObjCmdProc TestgetvarfullnameCmd; static Tcl_CmdProc TestinterpdeleteCmd; static Tcl_CmdProc TestlinkCmd; static Tcl_ObjCmdProc TestlinkarrayCmd; +static Tcl_ObjCmdProc TestlistrepCmd; static Tcl_ObjCmdProc TestlocaleCmd; static Tcl_CmdProc TestmainthreadCmd; static Tcl_CmdProc TestsetmainloopCmd; @@ -559,6 +560,12 @@ Tcltest_Init( } if (Tcl_GetCommandInfo(interp, "::tcl::build-info", &info)) { +#if TCL_MAJOR_VERSION > 8 + if (info.isNativeObjectProc == 2) { + Tcl_CreateObjCommand2(interp, "::tcl::test::build-info", + info.objProc2, (void *)version, NULL); + } else +#endif Tcl_CreateObjCommand(interp, "::tcl::test::build-info", info.objProc, (void *)version, NULL); } @@ -656,6 +663,7 @@ Tcltest_Init( NULL, NULL); Tcl_CreateCommand(interp, "testlink", TestlinkCmd, NULL, NULL); Tcl_CreateObjCommand(interp, "testlinkarray", TestlinkarrayCmd, NULL, NULL); + Tcl_CreateObjCommand(interp, "testlistrep", TestlistrepCmd, NULL, NULL); Tcl_CreateObjCommand(interp, "testlocale", TestlocaleCmd, NULL, NULL); Tcl_CreateCommand(interp, "testpanic", TestpanicCmd, NULL, NULL); @@ -770,7 +778,7 @@ Tcltest_Init( return TCL_ERROR; } case 3: - if (objc-1) { + if (objc > 1) { Tcl_SetVar2Ex(interp, "tcl_rcFileName", NULL, objv[1], TCL_GLOBAL_ONLY); } @@ -815,6 +823,12 @@ Tcltest_SafeInit( return TCL_ERROR; } if (Tcl_GetCommandInfo(interp, "::tcl::build-info", &info)) { +#if TCL_MAJOR_VERSION > 8 + if (info.isNativeObjectProc == 2) { + Tcl_CreateObjCommand2(interp, "::tcl::test::build-info", + info.objProc2, (void *)version, NULL); + } else +#endif Tcl_CreateObjCommand(interp, "::tcl::test::build-info", info.objProc, (void *)version, NULL); } @@ -925,7 +939,7 @@ TestasyncCmd( break; } } - Tcl_SetObjResult(interp, Tcl_NewStringObj(argv[3], -1)); + Tcl_SetObjResult(interp, Tcl_NewStringObj(argv[3], TCL_INDEX_NONE)); Tcl_MutexUnlock(&asyncTestMutex); return code; } else if (strcmp(argv[1], "marklater") == 0) { @@ -993,7 +1007,7 @@ AsyncHandlerProc( listArgv[3] = NULL; cmd = Tcl_Merge(3, listArgv); if (interp != NULL) { - code = Tcl_EvalEx(interp, cmd, -1, 0); + code = Tcl_EvalEx(interp, cmd, TCL_INDEX_NONE, 0); } else { /* * this should not happen, but by definition of how async handlers are @@ -1174,8 +1188,8 @@ CmdDelProc1( void *clientData) /* String to save. */ { Tcl_DStringInit(&delString); - Tcl_DStringAppend(&delString, "CmdDelProc1 ", -1); - Tcl_DStringAppend(&delString, (char *) clientData, -1); + Tcl_DStringAppend(&delString, "CmdDelProc1 ", TCL_INDEX_NONE); + Tcl_DStringAppend(&delString, (char *) clientData, TCL_INDEX_NONE); } static void @@ -1183,8 +1197,8 @@ CmdDelProc2( void *clientData) /* String to save. */ { Tcl_DStringInit(&delString); - Tcl_DStringAppend(&delString, "CmdDelProc2 ", -1); - Tcl_DStringAppend(&delString, (char *) clientData, -1); + Tcl_DStringAppend(&delString, "CmdDelProc2 ", TCL_INDEX_NONE); + Tcl_DStringAppend(&delString, (char *) clientData, TCL_INDEX_NONE); } /* @@ -1304,7 +1318,7 @@ TestcmdtraceCmd( if (strcmp(argv[1], "tracetest") == 0) { Tcl_DStringInit(&buffer); cmdTrace = Tcl_CreateTrace(interp, 50000, CmdTraceProc, &buffer); - result = Tcl_EvalEx(interp, argv[2], -1, 0); + result = Tcl_EvalEx(interp, argv[2], TCL_INDEX_NONE, 0); if (result == TCL_OK) { Tcl_ResetResult(interp); Tcl_AppendResult(interp, Tcl_DStringValue(&buffer), NULL); @@ -1320,13 +1334,13 @@ TestcmdtraceCmd( */ cmdTrace = Tcl_CreateTrace(interp, 50000, CmdTraceDeleteProc, NULL); - Tcl_EvalEx(interp, argv[2], -1, 0); + Tcl_EvalEx(interp, argv[2], TCL_INDEX_NONE, 0); } else if (strcmp(argv[1], "leveltest") == 0) { Interp *iPtr = (Interp *) interp; Tcl_DStringInit(&buffer); cmdTrace = Tcl_CreateTrace(interp, iPtr->numLevels + 4, CmdTraceProc, &buffer); - result = Tcl_EvalEx(interp, argv[2], -1, 0); + result = Tcl_EvalEx(interp, argv[2], TCL_INDEX_NONE, 0); if (result == TCL_OK) { Tcl_ResetResult(interp); Tcl_AppendResult(interp, Tcl_DStringValue(&buffer), NULL); @@ -1344,7 +1358,7 @@ TestcmdtraceCmd( cmdTrace = Tcl_CreateObjTrace(interp, 50000, TCL_ALLOW_INLINE_COMPILATION, ObjTraceProc, &deleteCalled, ObjTraceDeleteProc); - result = Tcl_EvalEx(interp, argv[2], -1, 0); + result = Tcl_EvalEx(interp, argv[2], TCL_INDEX_NONE, 0); Tcl_DeleteTrace(interp, cmdTrace); if (!deleteCalled) { Tcl_AppendResult(interp, "Delete wasn't called", NULL); @@ -1358,7 +1372,7 @@ TestcmdtraceCmd( Tcl_DStringInit(&buffer); t1 = Tcl_CreateTrace(interp, 1, CmdTraceProc, &buffer); t2 = Tcl_CreateTrace(interp, 50000, CmdTraceProc, &buffer); - result = Tcl_EvalEx(interp, argv[2], -1, 0); + result = Tcl_EvalEx(interp, argv[2], TCL_INDEX_NONE, 0); if (result == TCL_OK) { Tcl_ResetResult(interp); Tcl_AppendResult(interp, Tcl_DStringValue(&buffer), NULL); @@ -1433,7 +1447,7 @@ ObjTraceProc( const char *word = Tcl_GetString(objv[0]); if (!strcmp(word, "Error")) { - Tcl_SetObjResult(interp, Tcl_NewStringObj(command, -1)); + Tcl_SetObjResult(interp, Tcl_NewStringObj(command, TCL_INDEX_NONE)); return TCL_ERROR; } else if (!strcmp(word, "Break")) { return TCL_BREAK; @@ -1682,7 +1696,7 @@ DelDeleteProc( { DelCmd *dPtr = (DelCmd *)clientData; - Tcl_EvalEx(dPtr->interp, dPtr->deleteCmd, -1, 0); + Tcl_EvalEx(dPtr->interp, dPtr->deleteCmd, TCL_INDEX_NONE, 0); Tcl_ResetResult(dPtr->interp); ckfree(dPtr->deleteCmd); ckfree(dPtr); @@ -1797,7 +1811,7 @@ TestdoubledigitsObjCmd( type = types[type]; if (objc > 4) { if (strcmp(Tcl_GetString(objv[4]), "shorten")) { - Tcl_SetObjResult(interp, Tcl_NewStringObj("bad flag", -1)); + Tcl_SetObjResult(interp, Tcl_NewStringObj("bad flag", TCL_INDEX_NONE)); return TCL_ERROR; } type |= TCL_DD_SHORTEST; @@ -2041,7 +2055,7 @@ EncodingToUtfProc( TclEncoding *encodingPtr; encodingPtr = (TclEncoding *) clientData; - Tcl_EvalEx(encodingPtr->interp, encodingPtr->toUtfCmd, -1, TCL_EVAL_GLOBAL); + Tcl_EvalEx(encodingPtr->interp, encodingPtr->toUtfCmd, TCL_INDEX_NONE, TCL_EVAL_GLOBAL); len = strlen(Tcl_GetStringResult(encodingPtr->interp)); if (len > dstLen) { @@ -2073,7 +2087,7 @@ EncodingFromUtfProc( TclEncoding *encodingPtr; encodingPtr = (TclEncoding *) clientData; - Tcl_EvalEx(encodingPtr->interp, encodingPtr->fromUtfCmd, -1, TCL_EVAL_GLOBAL); + Tcl_EvalEx(encodingPtr->interp, encodingPtr->fromUtfCmd, TCL_INDEX_NONE, TCL_EVAL_GLOBAL); len = strlen(Tcl_GetStringResult(encodingPtr->interp)); if (len > dstLen) { @@ -3112,7 +3126,7 @@ TestlinkCmd( } } if (argv[6][0] != 0) { - tmp = Tcl_NewStringObj(argv[6], -1); + tmp = Tcl_NewStringObj(argv[6], TCL_INDEX_NONE); if (Tcl_GetWideIntFromObj(interp, tmp, &wideVar) != TCL_OK) { Tcl_DecrRefCount(tmp); return TCL_ERROR; @@ -3170,7 +3184,7 @@ TestlinkCmd( } if (argv[15][0]) { Tcl_WideInt w; - tmp = Tcl_NewStringObj(argv[15], -1); + tmp = Tcl_NewStringObj(argv[15], TCL_INDEX_NONE); if (Tcl_GetWideIntFromObj(interp, tmp, &w) != TCL_OK) { Tcl_DecrRefCount(tmp); return TCL_ERROR; @@ -3220,7 +3234,7 @@ TestlinkCmd( Tcl_UpdateLinkedVar(interp, "string"); } if (argv[6][0] != 0) { - tmp = Tcl_NewStringObj(argv[6], -1); + tmp = Tcl_NewStringObj(argv[6], TCL_INDEX_NONE); if (Tcl_GetWideIntFromObj(interp, tmp, &wideVar) != TCL_OK) { Tcl_DecrRefCount(tmp); return TCL_ERROR; @@ -3287,7 +3301,7 @@ TestlinkCmd( } if (argv[15][0]) { Tcl_WideInt w; - tmp = Tcl_NewStringObj(argv[15], -1); + tmp = Tcl_NewStringObj(argv[15], TCL_INDEX_NONE); if (Tcl_GetWideIntFromObj(interp, tmp, &w) != TCL_OK) { Tcl_DecrRefCount(tmp); return TCL_ERROR; @@ -3394,7 +3408,7 @@ TestlinkarrayCmd( return TCL_ERROR; } if (Tcl_GetIntFromObj(interp, objv[i++], &size) == TCL_ERROR) { - Tcl_SetObjResult(interp, Tcl_NewStringObj("wrong size value", -1)); + Tcl_SetObjResult(interp, Tcl_NewStringObj("wrong size value", TCL_INDEX_NONE)); return TCL_ERROR; } name = Tcl_GetString(objv[i++]); @@ -3406,7 +3420,7 @@ TestlinkarrayCmd( if (i < objc) { if (Tcl_GetWideIntFromObj(interp, objv[i], &addr) == TCL_ERROR) { Tcl_SetObjResult(interp, Tcl_NewStringObj( - "wrong address value", -1)); + "wrong address value", TCL_INDEX_NONE)); return TCL_ERROR; } } else { @@ -3425,6 +3439,158 @@ TestlinkarrayCmd( /* *---------------------------------------------------------------------- * + * TestlistrepCmd -- + * + * This function is invoked to generate a list object with a specific + * internal representation. + * + * Results: + * A standard Tcl result. + * + * Side effects: + * None. + * + *---------------------------------------------------------------------- + */ + +static int +TestlistrepCmd( + TCL_UNUSED(void *), + Tcl_Interp *interp, /* Current interpreter. */ + int objc, /* Number of arguments. */ + Tcl_Obj *const objv[]) /* Argument objects. */ +{ + /* Subcommands supported by this command */ + const char* subcommands[] = { + "new", + "describe", + "config", + "validate", + NULL + }; + enum { + LISTREP_NEW, + LISTREP_DESCRIBE, + LISTREP_CONFIG, + LISTREP_VALIDATE + } cmdIndex; + Tcl_Obj *resultObj = NULL; + + if (objc < 2) { + Tcl_WrongNumArgs(interp, 1, objv, "command ?arg ...?"); + return TCL_ERROR; + } + if (Tcl_GetIndexFromObj( + interp, objv[1], subcommands, "command", 0, &cmdIndex) + != TCL_OK) { + return TCL_ERROR; + } + switch (cmdIndex) { + case LISTREP_NEW: + if (objc < 3 || objc > 5) { + Tcl_WrongNumArgs(interp, 2, objv, "length ?leadSpace endSpace?"); + return TCL_ERROR; + } else { + int length; + int leadSpace = 0; + int endSpace = 0; + if (Tcl_GetIntFromObj(interp, objv[2], &length) != TCL_OK) { + return TCL_ERROR; + } + if (objc > 3) { + if (Tcl_GetIntFromObj(interp, objv[3], &leadSpace) != TCL_OK) { + return TCL_ERROR; + } + if (objc > 4) { + if (Tcl_GetIntFromObj(interp, objv[4], &endSpace) + != TCL_OK) { + return TCL_ERROR; + } + } + } + resultObj = TclListTestObj(length, leadSpace, endSpace); + } + break; + + case LISTREP_DESCRIBE: +#define APPEND_FIELD(targetObj_, structPtr_, fld_) \ + do { \ + Tcl_ListObjAppendElement( \ + interp, (targetObj_), Tcl_NewStringObj(#fld_, TCL_INDEX_NONE)); \ + Tcl_ListObjAppendElement( \ + interp, (targetObj_), Tcl_NewWideIntObj((structPtr_)->fld_)); \ + } while (0) + if (objc != 3) { + Tcl_WrongNumArgs(interp, 2, objv, "object"); + return TCL_ERROR; + } else { + Tcl_Obj **objs; + ListSizeT nobjs; + ListRep listRep; + Tcl_Obj *listRepObjs[4]; + + /* Force list representation */ + if (Tcl_ListObjGetElements(interp, objv[2], &nobjs, &objs) != TCL_OK) { + return TCL_ERROR; + } + ListObjGetRep(objv[2], &listRep); + listRepObjs[0] = Tcl_NewStringObj("store", TCL_INDEX_NONE); + listRepObjs[1] = Tcl_NewListObj(12, NULL); + Tcl_ListObjAppendElement( + interp, listRepObjs[1], Tcl_NewStringObj("memoryAddress", TCL_INDEX_NONE)); + Tcl_ListObjAppendElement( + interp, listRepObjs[1], Tcl_ObjPrintf("%p", listRep.storePtr)); + APPEND_FIELD(listRepObjs[1], listRep.storePtr, firstUsed); + APPEND_FIELD(listRepObjs[1], listRep.storePtr, numUsed); + APPEND_FIELD(listRepObjs[1], listRep.storePtr, numAllocated); + APPEND_FIELD(listRepObjs[1], listRep.storePtr, refCount); + APPEND_FIELD(listRepObjs[1], listRep.storePtr, flags); + if (listRep.spanPtr) { + listRepObjs[2] = Tcl_NewStringObj("span", TCL_INDEX_NONE); + listRepObjs[3] = Tcl_NewListObj(8, NULL); + Tcl_ListObjAppendElement(interp, + listRepObjs[3], + Tcl_NewStringObj("memoryAddress", TCL_INDEX_NONE)); + Tcl_ListObjAppendElement( + interp, listRepObjs[3], Tcl_ObjPrintf("%p", listRep.spanPtr)); + APPEND_FIELD(listRepObjs[3], listRep.spanPtr, spanStart); + APPEND_FIELD( + listRepObjs[3], listRep.spanPtr, spanLength); + APPEND_FIELD(listRepObjs[3], listRep.spanPtr, refCount); + } + resultObj = Tcl_NewListObj(listRep.spanPtr ? 4 : 2, listRepObjs); + } +#undef APPEND_FIELD + break; + + case LISTREP_CONFIG: + if (objc != 2) { + Tcl_WrongNumArgs(interp, 2, objv, "object"); + return TCL_ERROR; + } + resultObj = Tcl_NewListObj(2, NULL); + Tcl_ListObjAppendElement( + NULL, resultObj, Tcl_NewStringObj("LIST_SPAN_THRESHOLD", TCL_INDEX_NONE)); + Tcl_ListObjAppendElement( + NULL, resultObj, Tcl_NewWideIntObj(LIST_SPAN_THRESHOLD)); + break; + + case LISTREP_VALIDATE: + if (objc != 3) { + Tcl_WrongNumArgs(interp, 2, objv, "object"); + return TCL_ERROR; + } + TclListObjValidate(interp, objv[2]); /* Panics if invalid */ + resultObj = Tcl_NewObj(); + break; + } + Tcl_SetObjResult(interp, resultObj); + return TCL_OK; +} + +/* + *---------------------------------------------------------------------- + * * TestlocaleCmd -- * * This procedure implements the "testlocale" command. It is used @@ -3478,7 +3644,7 @@ TestlocaleCmd( } locale = setlocale(lcTypes[index], locale); if (locale) { - Tcl_SetStringObj(Tcl_GetObjResult(interp), locale, -1); + Tcl_SetStringObj(Tcl_GetObjResult(interp), locale, TCL_INDEX_NONE); } return TCL_OK; } @@ -3700,7 +3866,7 @@ PrintParse( break; } Tcl_ListObjAppendElement(NULL, objPtr, - Tcl_NewStringObj(typeString, -1)); + Tcl_NewStringObj(typeString, TCL_INDEX_NONE)); Tcl_ListObjAppendElement(NULL, objPtr, Tcl_NewStringObj(tokenPtr->start, tokenPtr->size)); Tcl_ListObjAppendElement(NULL, objPtr, @@ -3709,7 +3875,7 @@ PrintParse( Tcl_ListObjAppendElement(NULL, objPtr, parsePtr->commandStart ? Tcl_NewStringObj(parsePtr->commandStart + parsePtr->commandSize, - -1) : Tcl_NewObj()); + TCL_INDEX_NONE) : Tcl_NewObj()); } /* @@ -4028,7 +4194,7 @@ TestregexpObjCmd( char resinfo[TCL_INTEGER_SPACE * 2]; varName = Tcl_GetString(objv[2]); - TclRegExpRangeUniChar(regExpr, -1, &start, &end); + TclRegExpRangeUniChar(regExpr, TCL_INDEX_NONE, &start, &end); sprintf(resinfo, "%d %d", start, end-1); value = Tcl_SetVar2(interp, varName, NULL, resinfo, 0); if (value == NULL) { @@ -4068,15 +4234,15 @@ TestregexpObjCmd( Tcl_Obj *newPtr, *varPtr, *valuePtr; varPtr = objv[i]; - ii = ((cflags®_EXPECT) && i == objc-1) ? -1 : i; + ii = ((cflags®_EXPECT) && i == objc-1) ? TCL_INDEX_NONE : i; if (indices) { Tcl_Obj *objs[2]; - if (ii == -1) { + if (ii == TCL_INDEX_NONE) { TclRegExpRangeUniChar(regExpr, ii, &start, &end); } else if (ii > info.nsubs) { - start = -1; - end = -1; + start = TCL_INDEX_NONE; + end = TCL_INDEX_NONE; } else { start = info.matches[ii].start; end = info.matches[ii].end; @@ -4096,7 +4262,7 @@ TestregexpObjCmd( newPtr = Tcl_NewListObj(2, objs); } else { - if (ii == -1) { + if (ii == TCL_INDEX_NONE) { TclRegExpRangeUniChar(regExpr, ii, &start, &end); newPtr = Tcl_GetRange(objPtr, start, end); } else if (ii > info.nsubs || info.matches[ii].end <= 0) { @@ -4144,7 +4310,8 @@ TestregexpXflags( int *cflagsPtr, /* compile flags word */ int *eflagsPtr) /* exec flags word */ { - int i, cflags, eflags; + int i; + int cflags, eflags; cflags = *cflagsPtr; eflags = *eflagsPtr; @@ -4602,7 +4769,7 @@ TestfeventCmd( return TCL_ERROR; } if (interp2 != NULL) { - code = Tcl_EvalEx(interp2, argv[2], -1, TCL_EVAL_GLOBAL); + code = Tcl_EvalEx(interp2, argv[2], TCL_INDEX_NONE, TCL_EVAL_GLOBAL); Tcl_SetObjResult(interp, Tcl_GetObjResult(interp2)); return code; } else { @@ -4896,15 +5063,15 @@ GetTimesObjCmd( ckfree(objv); /* TclGetString 100000 times */ - fprintf(stderr, "TclGetStringFromObj of \"12345\" 100000 times\n"); - objPtr = Tcl_NewStringObj("12345", -1); + fprintf(stderr, "Tcl_GetStringFromObj of \"12345\" 100000 times\n"); + objPtr = Tcl_NewStringObj("12345", TCL_INDEX_NONE); Tcl_GetTime(&start); for (i = 0; i < 100000; i++) { (void) TclGetString(objPtr); } Tcl_GetTime(&stop); timePer = (stop.sec - start.sec)*1000000 + (stop.usec - start.usec); - fprintf(stderr, " %.3f usec per TclGetStringFromObj of \"12345\"\n", + fprintf(stderr, " %.3f usec per Tcl_GetStringFromObj of \"12345\"\n", timePer/100000); /* Tcl_GetIntFromObj 100000 times */ @@ -5244,7 +5411,7 @@ Testutf16stringObjCmd( } p = Tcl_GetUnicode(objv[1]); - Tcl_SetObjResult(interp, Tcl_NewUnicodeObj(p, -1)); + Tcl_SetObjResult(interp, Tcl_NewUnicodeObj(p, TCL_INDEX_NONE)); return TCL_OK; } @@ -5382,7 +5549,7 @@ TestsaveresultCmd( } freeCount = 0; - objPtr = NULL; /* Lint. */ + objPtr = NULL; switch ((enum options) index) { case RESULT_SMALL: Tcl_AppendResult(interp, "small result", NULL); @@ -5401,7 +5568,7 @@ TestsaveresultCmd( Tcl_SetResult(interp, (char *)"dynamic result", TestsaveresultFree); break; case RESULT_OBJECT: - objPtr = Tcl_NewStringObj("object result", -1); + objPtr = Tcl_NewStringObj("object result", TCL_INDEX_NONE); Tcl_SetObjResult(interp, objPtr); break; } @@ -5411,7 +5578,7 @@ TestsaveresultCmd( if (((enum options) index) == RESULT_OBJECT) { result = Tcl_EvalObjEx(interp, objv[2], 0); } else { - result = Tcl_EvalEx(interp, Tcl_GetString(objv[2]), -1, 0); + result = Tcl_EvalEx(interp, Tcl_GetString(objv[2]), TCL_INDEX_NONE, 0); } if (discard) { @@ -5663,7 +5830,7 @@ TestChannelCmd( if ((cmdName[0] == 's') && (strncmp(cmdName, "setchannelerror", len) == 0)) { - Tcl_Obj *msg = Tcl_NewStringObj(argv[3],-1); + Tcl_Obj *msg = Tcl_NewStringObj(argv[3], TCL_INDEX_NONE); Tcl_IncrRefCount(msg); Tcl_SetChannelError(chan, msg); @@ -5676,7 +5843,7 @@ TestChannelCmd( } if ((cmdName[0] == 's') && (strncmp(cmdName, "setchannelerrorinterp", len) == 0)) { - Tcl_Obj *msg = Tcl_NewStringObj(argv[3],-1); + Tcl_Obj *msg = Tcl_NewStringObj(argv[3], TCL_INDEX_NONE); Tcl_IncrRefCount(msg); Tcl_SetChannelErrorInterp(interp, msg); @@ -6024,7 +6191,7 @@ TestChannelCmd( } return TclChannelTransform(interp, chan, - Tcl_NewStringObj(argv[4], -1)); + Tcl_NewStringObj(argv[4], TCL_INDEX_NONE)); } if ((cmdName[0] == 'u') && (strncmp(cmdName, "unstack", len) == 0)) { @@ -6115,7 +6282,7 @@ TestChannelEventCmd( esPtr->chanPtr = chanPtr; esPtr->interp = interp; esPtr->mask = mask; - esPtr->scriptPtr = Tcl_NewStringObj(argv[4], -1); + esPtr->scriptPtr = Tcl_NewStringObj(argv[4], TCL_INDEX_NONE); Tcl_IncrRefCount(esPtr->scriptPtr); Tcl_CreateChannelHandler((Tcl_Channel) chanPtr, mask, @@ -6182,10 +6349,10 @@ TestChannelEventCmd( esPtr = esPtr->nextPtr) { if (esPtr->mask) { Tcl_ListObjAppendElement(interp, resultListPtr, Tcl_NewStringObj( - (esPtr->mask == TCL_READABLE) ? "readable" : "writable", -1)); + (esPtr->mask == TCL_READABLE) ? "readable" : "writable", TCL_INDEX_NONE)); } else { Tcl_ListObjAppendElement(interp, resultListPtr, - Tcl_NewStringObj("none", -1)); + Tcl_NewStringObj("none", TCL_INDEX_NONE)); } Tcl_ListObjAppendElement(interp, resultListPtr, esPtr->scriptPtr); } @@ -6397,16 +6564,12 @@ TestWrongNumArgsObjCmd( int objc, /* Number of arguments. */ Tcl_Obj *const objv[]) /* Argument objects. */ { - int i, length; + int i; + int length; const char *msg; if (objc < 3) { - /* - * Don't use Tcl_WrongNumArgs here, as that is the function - * we want to test! - */ - Tcl_AppendResult(interp, "insufficient arguments", NULL); - return TCL_ERROR; + goto insufArgs; } if (Tcl_GetIntFromObj(interp, objv[1], &i) != TCL_OK) { @@ -6422,6 +6585,7 @@ TestWrongNumArgsObjCmd( /* * Asked for more arguments than were given. */ + insufArgs: Tcl_AppendResult(interp, "insufficient arguments", NULL); return TCL_ERROR; } @@ -6532,7 +6696,7 @@ TestFilesystemObjCmd( res = Tcl_FSUnregister(&testReportingFilesystem); msg = (res == TCL_OK) ? "unregistered" : "failed"; } - Tcl_SetObjResult(interp, Tcl_NewStringObj(msg , -1)); + Tcl_SetObjResult(interp, Tcl_NewStringObj(msg , TCL_INDEX_NONE)); return res; } @@ -6614,7 +6778,7 @@ TestReport( Tcl_DString ds; Tcl_DStringInit(&ds); - Tcl_DStringAppend(&ds, "lappend filesystemReport ", -1); + Tcl_DStringAppend(&ds, "lappend filesystemReport ", TCL_INDEX_NONE); Tcl_DStringStartSublist(&ds); Tcl_DStringAppendElement(&ds, cmd); if (path != NULL) { @@ -6627,7 +6791,7 @@ TestReport( savedResult = Tcl_GetObjResult(interp); Tcl_IncrRefCount(savedResult); Tcl_SetObjResult(interp, Tcl_NewObj()); - Tcl_EvalEx(interp, Tcl_DStringValue(&ds), -1, 0); + Tcl_EvalEx(interp, Tcl_DStringValue(&ds), TCL_INDEX_NONE, 0); Tcl_DStringFree(&ds); Tcl_ResetResult(interp); Tcl_SetObjResult(interp, savedResult); @@ -6903,7 +7067,7 @@ TestSimpleFilesystemObjCmd( res = Tcl_FSUnregister(&simpleFilesystem); msg = (res == TCL_OK) ? "unregistered" : "failed"; } - Tcl_SetObjResult(interp, Tcl_NewStringObj(msg , -1)); + Tcl_SetObjResult(interp, Tcl_NewStringObj(msg , TCL_INDEX_NONE)); return res; } @@ -6930,7 +7094,7 @@ SimpleRedirect( Tcl_IncrRefCount(pathPtr); return pathPtr; } - origPtr = Tcl_NewStringObj(str+10,-1); + origPtr = Tcl_NewStringObj(str+10, TCL_INDEX_NONE); Tcl_IncrRefCount(origPtr); return origPtr; } @@ -7030,7 +7194,7 @@ SimpleListVolumes(void) /* Add one new volume */ Tcl_Obj *retVal; - retVal = Tcl_NewStringObj("simplefs:/", -1); + retVal = Tcl_NewStringObj("simplefs:/", TCL_INDEX_NONE); Tcl_IncrRefCount(retVal); return retVal; } @@ -7150,7 +7314,7 @@ TestNumUtfCharsCmd( Tcl_Obj *const objv[]) { if (objc > 1) { - int numBytes, len, limit = -1; + int numBytes, len, limit = TCL_INDEX_NONE; const char *bytes = Tcl_GetStringFromObj(objv[1], &numBytes); if (objc > 2) { @@ -7184,7 +7348,7 @@ TestFindFirstCmd( if (objc > 2) { (void) Tcl_GetIntFromObj(interp, objv[2], &len); } - Tcl_SetObjResult(interp, Tcl_NewStringObj(Tcl_UtfFindFirst(Tcl_GetString(objv[1]), len), -1)); + Tcl_SetObjResult(interp, Tcl_NewStringObj(Tcl_UtfFindFirst(Tcl_GetString(objv[1]), len), TCL_INDEX_NONE)); } return TCL_OK; } @@ -7206,7 +7370,7 @@ TestFindLastCmd( if (objc > 2) { (void) Tcl_GetIntFromObj(interp, objv[2], &len); } - Tcl_SetObjResult(interp, Tcl_NewStringObj(Tcl_UtfFindLast(Tcl_GetString(objv[1]), len), -1)); + Tcl_SetObjResult(interp, Tcl_NewStringObj(Tcl_UtfFindLast(Tcl_GetString(objv[1]), len), TCL_INDEX_NONE)); } return TCL_OK; } @@ -7283,7 +7447,7 @@ TestcpuidCmd( status = TclWinCPUID(index, regs); if (status != TCL_OK) { Tcl_SetObjResult(interp, - Tcl_NewStringObj("operation not available", -1)); + Tcl_NewStringObj("operation not available", TCL_INDEX_NONE)); return status; } for (i=0 ; i<4 ; ++i) { @@ -7329,7 +7493,7 @@ TestHashSystemHashCmd( hPtr = Tcl_CreateHashEntry(&hash, INT2PTR(i), &isNew); if (!isNew) { Tcl_SetObjResult(interp, Tcl_NewWideIntObj(i)); - Tcl_AppendToObj(Tcl_GetObjResult(interp)," creation problem",-1); + Tcl_AppendToObj(Tcl_GetObjResult(interp)," creation problem", TCL_INDEX_NONE); Tcl_DeleteHashTable(&hash); return TCL_ERROR; } @@ -7346,13 +7510,13 @@ TestHashSystemHashCmd( hPtr = Tcl_FindHashEntry(&hash, (char *) INT2PTR(i)); if (hPtr == NULL) { Tcl_SetObjResult(interp, Tcl_NewWideIntObj(i)); - Tcl_AppendToObj(Tcl_GetObjResult(interp)," lookup problem",-1); + Tcl_AppendToObj(Tcl_GetObjResult(interp)," lookup problem", TCL_INDEX_NONE); Tcl_DeleteHashTable(&hash); return TCL_ERROR; } if (PTR2INT(Tcl_GetHashValue(hPtr)) != i+42) { Tcl_SetObjResult(interp, Tcl_NewWideIntObj(i)); - Tcl_AppendToObj(Tcl_GetObjResult(interp)," value problem",-1); + Tcl_AppendToObj(Tcl_GetObjResult(interp)," value problem", TCL_INDEX_NONE); Tcl_DeleteHashTable(&hash); return TCL_ERROR; } @@ -7435,9 +7599,9 @@ NREUnwind_callback( &none, NULL); } else { Tcl_Obj *idata[3]; - idata[0] = Tcl_NewWideIntObj((int) ((char *) data[1] - (char *) data[0])); - idata[1] = Tcl_NewWideIntObj((int) ((char *) data[2] - (char *) data[0])); - idata[2] = Tcl_NewWideIntObj((int) ((char *) &none - (char *) data[0])); + idata[0] = Tcl_NewWideIntObj(((char *) data[1] - (char *) data[0])); + idata[1] = Tcl_NewWideIntObj(((char *) data[2] - (char *) data[0])); + idata[2] = Tcl_NewWideIntObj(((char *) &none - (char *) data[0])); Tcl_SetObjResult(interp, Tcl_NewListObj(3, idata)); } return TCL_OK; @@ -7535,15 +7699,15 @@ TestconcatobjCmd( */ Tcl_SetObjResult(interp, - Tcl_NewStringObj("Tcl_ConcatObj is unsafe:", -1)); + Tcl_NewStringObj("Tcl_ConcatObj is unsafe:", TCL_INDEX_NONE)); emptyPtr = Tcl_NewObj(); - list1Ptr = Tcl_NewStringObj("foo bar sum", -1); + list1Ptr = Tcl_NewStringObj("foo bar sum", TCL_INDEX_NONE); Tcl_ListObjLength(NULL, list1Ptr, &len); Tcl_InvalidateStringRep(list1Ptr); - list2Ptr = Tcl_NewStringObj("eeny meeny", -1); + list2Ptr = Tcl_NewStringObj("eeny meeny", TCL_INDEX_NONE); Tcl_ListObjLength(NULL, list2Ptr, &len); Tcl_InvalidateStringRep(list2Ptr); @@ -8106,7 +8270,7 @@ InterpCompiledVarResolver( resVarInfo->vInfo.fetchProc = MyCompiledVarFetch; resVarInfo->vInfo.deleteProc = MyCompiledVarFree; resVarInfo->var = NULL; - resVarInfo->nameObj = Tcl_NewStringObj(name, -1); + resVarInfo->nameObj = Tcl_NewStringObj(name, TCL_INDEX_NONE); Tcl_IncrRefCount(resVarInfo->nameObj); *rPtr = &resVarInfo->vInfo; return TCL_OK; @@ -8190,12 +8354,12 @@ int TestApplyLambdaObjCmd ( /* Create a lambda {{} {set a 42}} */ lambdaObjs[0] = Tcl_NewObj(); /* No parameters */ - lambdaObjs[1] = Tcl_NewStringObj("set a 42", -1); /* Body */ + lambdaObjs[1] = Tcl_NewStringObj("set a 42", TCL_INDEX_NONE); /* Body */ lambdaObj = Tcl_NewListObj(2, lambdaObjs); Tcl_IncrRefCount(lambdaObj); /* Create the command "apply {{} {set a 42}" */ - evalObjs[0] = Tcl_NewStringObj("apply", -1); + evalObjs[0] = Tcl_NewStringObj("apply", TCL_INDEX_NONE); Tcl_IncrRefCount(evalObjs[0]); /* * NOTE: IMPORTANT TO EXHIBIT THE BUG. We duplicate the lambda because diff --git a/tests-perf/comparePerf.tcl b/tests-perf/comparePerf.tcl new file mode 100644 index 0000000..f35da21 --- /dev/null +++ b/tests-perf/comparePerf.tcl @@ -0,0 +1,371 @@ +#!/usr/bin/tclsh +# ------------------------------------------------------------------------ +# +# comparePerf.tcl -- +# +# Script to compare performance data from multiple runs. +# +# ------------------------------------------------------------------------ +# +# See the file "license.terms" for information on usage and redistribution +# of this file. +# +# Usage: +# tclsh comparePerf.tcl [--regexp RE] [--ratio time|rate] [--combine] [--base BASELABEL] PERFFILE ... +# +# The test data from each input file is tabulated so as to compare the results +# of test runs. If a PERFFILE does not exist, it is retried by adding the +# .perf extension. If the --regexp is specified, only test results whose +# id matches RE are examined. +# +# If the --combine option is specified, results of test sets with the same +# label are combined and averaged in the output. +# +# If the --base option is specified, the BASELABEL is used as the label to use +# the base timing. Otherwise, the label of the first data file is used. +# +# If --ratio option is "time" the ratio of test timing vs base test timing +# is shown. If "rate" (default) the inverse is shown. +# +# If --no-header is specified, the header describing test configuration is +# not output. +# +# The format of input files is as follows: +# +# Each line must begin with one of the characters below followed by a space +# followed by a string whose semantics depend on the initial character. +# E - Full path to the Tcl executable that was used to generate the file +# V - The Tcl patchlevel of the implementation +# D - A description for the test run for human consumption +# L - A label used to identify run environment. The --combine option will +# average all measuremets that have the same label. An input file without +# a label is treated as having a unique label and not combined with any other. +# P - A test measurement (see below) +# R - The number of runs made for the each test +# # - A comment, may be an arbitrary string. Usually included in performance +# data to describe the test. This is silently ignored +# +# Any lines not matching one of the above are ignored with a warning to stderr. +# +# A line beginning with the "P" marker is a test measurement. The first word +# following is a floating point number representing the test runtime. +# The remaining line (after trimming of whitespace) is the id of the test. +# Test generators are encouraged to make the id a well-defined machine-parseable +# as well human readable description of the test. The id must not appear more +# than once. An example test measurement line: +# P 2.32280 linsert in unshared L[10000] 1 elems 10000 times at 0 (var) +# Note here the iteration count is not present. +# + +namespace eval perf::compare { + # List of dictionaries, one per input file + variable PerfData +} + +proc perf::compare::warn {message} { + puts stderr "Warning: $message" +} +proc perf::compare::print {text} { + puts stdout $text +} +proc perf::compare::slurp {testrun_path} { + variable PerfData + + set runtimes [dict create] + + set path [file normalize $testrun_path] + set fd [open $path] + array set header {} + while {[gets $fd line] >= 0} { + set line [regsub -all {\s+} [string trim $line] " "] + switch -glob -- $line { + "#*" { + # Skip comments + } + "R *" - + "L *" - + "D *" - + "V *" - + "T *" - + "E *" { + set marker [lindex $line 0] + if {[info exists header($marker)]} { + warn "Ignoring $marker record (duplicate): \"$line\"" + } + set header($marker) [string range $line 2 end] + } + "P *" { + if {[scan $line "P %f %n" runtime id_start] == 2} { + set id [string range $line $id_start end] + if {[dict exists $runtimes $id]} { + warn "Ignoring duplicate test id \"$id\"" + } else { + dict set runtimes $id $runtime + } + } else { + warn "Invalid test result line format: \"$line\"" + } + } + default { + puts stderr "Warning: ignoring unrecognized line \"$line\"" + } + } + } + close $fd + + set result [dict create Input $path Runtimes $runtimes] + foreach {c k} { + L Label + V Version + E Executable + D Description + } { + if {[info exists header($c)]} { + dict set result $k $header($c) + } + } + + return $result +} + +proc perf::compare::burp {test_sets} { + variable Options + + # Print the key for each test run + set header " " + set separator " " + foreach test_set $test_sets { + set test_set_key "\[[incr test_set_num]\]" + if {! $Options(--no-header)} { + print "$test_set_key" + foreach k {Label Executable Version Input Description} { + if {[dict exists $test_set $k]} { + print "$k: [dict get $test_set $k]" + } + } + } + append header $test_set_key $separator + set separator " "; # Expand because later columns have ratio + } + set header [string trimright $header] + + if {! $Options(--no-header)} { + print "" + if {$Options(--ratio) eq "rate"} { + set ratio_description "ratio of baseline to the measurement (higher is faster)." + } else { + set ratio_description "ratio of measurement to the baseline (lower is faster)." + } + print "The first column \[1\] is the baseline measurement." + print "Subsequent columns are pairs of the additional measurement and " + print $ratio_description + print "" + } + + # Print the actual test run data + + print $header + set test_sets [lassign $test_sets base_set] + set fmt {%#10.5f} + set fmt_ratio {%-6.2f} + foreach {id base_runtime} [dict get $base_set Runtimes] { + if {[info exists Options(--regexp)]} { + if {![regexp $Options(--regexp) $id]} { + continue + } + } + if {$Options(--print-test-number)} { + set line "[format %-4s [incr counter].]" + } else { + set line "" + } + append line [format $fmt $base_runtime] + foreach test_set $test_sets { + if {[dict exists $test_set Runtimes $id]} { + set runtime [dict get $test_set Runtimes $id] + if {$Options(--ratio) eq "time"} { + if {$base_runtime != 0} { + set ratio [format $fmt_ratio [expr {$runtime/$base_runtime}]] + } else { + if {$runtime == 0} { + set ratio "NaN " + } else { + set ratio "Inf " + } + } + } else { + if {$runtime != 0} { + set ratio [format $fmt_ratio [expr {$base_runtime/$runtime}]] + } else { + if {$base_runtime == 0} { + set ratio "NaN " + } else { + set ratio "Inf " + } + } + } + append line "|" [format $fmt $runtime] "|" $ratio + } else { + append line [string repeat { } 11] + } + } + append line "|" $id + print $line + } +} + +proc perf::compare::chew {test_sets} { + variable Options + + # Combine test sets that have the same label, averaging the values + set unlabeled_sets {} + array set labeled_sets {} + + foreach test_set $test_sets { + # If there is no label, treat as independent set + if {![dict exists $test_set Label]} { + lappend unlabeled_sets $test_set + } else { + lappend labeled_sets([dict get $test_set Label]) $test_set + } + } + + foreach label [array names labeled_sets] { + set combined_set [lindex $labeled_sets($label) 0] + set runtimes [dict get $combined_set Runtimes] + foreach test_set [lrange $labeled_sets($label) 1 end] { + dict for {id timing} [dict get $test_set Runtimes] { + dict lappend runtimes $id $timing + } + } + dict for {id timings} $runtimes { + set total [tcl::mathop::+ {*}$timings] + dict set runtimes $id [expr {$total/[llength $timings]}] + } + dict set combined_set Runtimes $runtimes + set labeled_sets($label) $combined_set + } + + # Choose the "base" test set + if {![info exists Options(--base)]} { + set first_set [lindex $test_sets 0] + if {[dict exists $first_set Label]} { + # Use label of first as the base + set Options(--base) [dict get $first_set Label] + } + } + + if {[info exists Options(--base)] && $Options(--base) ne ""} { + lappend combined_sets $labeled_sets($Options(--base));# Will error if no such + unset labeled_sets($Options(--base)) + } else { + lappend combined_sets [lindex $unlabeled_sets 0] + set unlabeled_sets [lrange $unlabeled_sets 1 end] + } + foreach label [array names labeled_sets] { + lappend combined_sets $labeled_sets($label) + } + lappend combined_sets {*}$unlabeled_sets + + return $combined_sets +} + +proc perf::compare::setup {argv} { + variable Options + + array set Options { + --ratio rate + --combine 0 + --print-test-number 0 + --no-header 0 + } + while {[llength $argv]} { + set argv [lassign $argv arg] + switch -glob -- $arg { + -r - + --regexp { + if {[llength $argv] == 0} { + error "Missing value for option $arg" + } + set argv [lassign $argv val] + set Options(--regexp) $val + } + --ratio { + if {[llength $argv] == 0} { + error "Missing value for option $arg" + } + set argv [lassign $argv val] + if {$val ni {time rate}} { + error "Value for option $arg must be either \"time\" or \"rate\"" + } + set Options(--ratio) $val + } + --print-test-number - + --combine - + --no-header { + set Options($arg) 1 + } + --base { + if {[llength $argv] == 0} { + error "Missing value for option $arg" + } + set argv [lassign $argv val] + set Options($arg) $val + } + -- { + # Remaining will be passed back to the caller + break + } + --* { + error "Unknown option $arg" + } + -* { + error "Unknown option -[lindex $arg 0]" + } + default { + # Remaining will be passed back to the caller + set argv [linsert $argv 0 $arg] + break; + } + } + } + + set paths {} + foreach path $argv { + set path [file join $path]; # Convert from native else glob fails + if {[file isfile $path]} { + lappend paths $path + continue + } + if {[file isfile $path.perf]} { + lappend paths $path.perf + continue + } + lappend paths {*}[glob -nocomplain $path] + } + return $paths +} +proc perf::compare::main {} { + variable Options + + set paths [setup $::argv] + if {[llength $paths] == 0} { + error "No test data files specified." + } + set test_data [list ] + set seen [dict create] + foreach path $paths { + if {![dict exists $seen $path]} { + lappend test_data [slurp $path] + dict set seen $path "" + } + } + + if {$Options(--combine)} { + set test_data [chew $test_data] + } + + burp $test_data +} + +perf::compare::main diff --git a/tests-perf/listPerf.tcl b/tests-perf/listPerf.tcl new file mode 100644 index 0000000..17f22e9 --- /dev/null +++ b/tests-perf/listPerf.tcl @@ -0,0 +1,1290 @@ +#!/usr/bin/tclsh +# ------------------------------------------------------------------------ +# +# listPerf.tcl -- +# +# This file provides performance tests for list operations. +# +# ------------------------------------------------------------------------ +# +# See the file "license.terms" for information on usage and redistribution +# of this file. +# +# Note: this file does not use the test-performance.tcl framework as we want +# more direct control over timerate options. + +catch {package require twapi} + +namespace eval perf::list { + variable perfScript [file normalize [info script]] + + # Test for each of these lengths + variable Lengths {10 100 1000 10000} + + variable RunTimes + set RunTimes(command) 0.0 + set RunTimes(total) 0.0 + + variable Options + array set Options { + --print-comments 0 + --print-iterations 0 + } + + # Procs used for calibrating overhead + proc proc2args {a b} {} + proc proc3args {a b c} {} + + proc print {s} { + puts $s + } + proc print_usage {} { + puts stderr "Usage: [file tail [info nameofexecutable]] $::argv0 \[options\] \[command ...\]" + puts stderr "\t--description DESC\tHuman readable description of test run" + puts stderr "\t--label LABEL\tA label used to identify test environment" + puts stderr "\t--print-comments\tPrint comment for each test" + puts stderr "\t--print-iterations\tPrint number of iterations run for each test" + } + + proc setup {argv} { + variable Options + variable Lengths + + while {[llength $argv]} { + set argv [lassign $argv arg] + switch -glob -- $arg { + --print-comments - + --print-iterations { + set Options($arg) 1 + } + --label - + --description { + if {[llength $argv] == 0} { + error "Missing value for option $arg" + } + set argv [lassign $argv val] + set Options($arg) $val + } + --lengths { + if {[llength $argv] == 0} { + error "Missing value for option $arg" + } + set argv [lassign $argv val] + set Lengths $val + } + -- { + # Remaining will be passed back to the caller + break + } + --* { + error "Unknown option $arg" + } + default { + # Remaining will be passed back to the caller + set argv [linsert $argv 0 $arg] + break; + } + } + } + + return $argv + } + proc format_timings {us iters} { + variable Options + if {!$Options(--print-iterations)} { + return "[format {%#10.4f} $us]" + } + return "[format {%#10.4f} $us] [format {%8d} $iters]" + } + proc measure {id script args} { + variable NullOverhead + variable RunTimes + variable Options + + set opts(-overhead) "" + set opts(-runs) 5 + while {[llength $args]} { + set args [lassign $args opt] + if {[llength $args] == 0} { + error "No argument supplied for $opt option. Test: $id" + } + set args [lassign $args val] + switch $opt { + -setup - + -cleanup - + -overhead - + -time - + -runs - + -reps { + set opts($opt) $val + } + default { + error "Unknown option $opt. Test: $id" + } + } + } + + set timerate_args {} + if {[info exists opts(-time)]} { + lappend timerate_args $opts(-time) + } + if {[info exists opts(-reps)]} { + if {[info exists opts(-time)]} { + set timerate_args [list $opts(-time) $opts(-reps)] + } else { + # Force the default for first time option + set timerate_args [list 1000 $opts(-reps)] + } + } elseif {[info exists opts(-time)]} { + set timerate_args [list $opts(-time)] + } + if {[info exists opts(-setup)]} { + uplevel 1 $opts(-setup) + } + # Cache the empty overhead to prevent unnecessary delays. Note if you modify + # to cache other scripts, the cache key must be AFTER substituting the + # overhead script in the caller's context. + if {$opts(-overhead) eq ""} { + if {![info exists NullOverhead]} { + set NullOverhead [lindex [timerate {}] 0] + } + set overhead_us $NullOverhead + } else { + # The overhead measurements might use setup so we need to setup + # first and then cleanup in preparation for setting up again for + # the script to be measured + if {[info exists opts(-setup)]} { + uplevel 1 $opts(-setup) + } + set overhead_us [lindex [uplevel 1 [list timerate $opts(-overhead)]] 0] + if {[info exists opts(-cleanup)]} { + uplevel 1 $opts(-cleanup) + } + } + set timings {} + for {set i 0} {$i < $opts(-runs)} {incr i} { + if {[info exists opts(-setup)]} { + uplevel 1 $opts(-setup) + } + lappend timings [uplevel 1 [list timerate -overhead $overhead_us $script {*}$timerate_args]] + if {[info exists opts(-cleanup)]} { + uplevel 1 $opts(-cleanup) + } + } + set timings [lsort -real -index 0 $timings] + if {$opts(-runs) > 15} { + set ignore [expr {$opts(-runs)/8}] + } elseif {$opts(-runs) >= 5} { + set ignore 2 + } else { + set ignore 0 + } + # Ignore highest and lowest + set timings [lrange $timings 0 end-$ignore] + # Average it out + set us 0 + set iters 0 + foreach timing $timings { + set us [expr {$us + [lindex $timing 0]}] + set iters [expr {$iters + [lindex $timing 2]}] + } + set us [expr {$us/[llength $timings]}] + set iters [expr {$iters/[llength $timings]}] + + set RunTimes(command) [expr {$RunTimes(command) + $us}] + print "P [format_timings $us $iters] $id" + } + proc comment {args} { + variable Options + if {$Options(--print-comments)} { + print "# [join $args { }]" + } + } + proc spanned_list {len} { + # Note - for small len, this will not create a spanned list + set delta [expr {$len/8}] + return [lrange [lrepeat [expr {$len+(2*$delta)}] a] $delta [expr {$delta+$len-1}]] + } + proc print_separator {command} { + comment [string repeat = 80] + comment Command: $command + } + + oo::class create ListPerf { + constructor {args} { + my variable Opts + # Note default Opts can be overridden in construct as well as in measure + set Opts [dict merge { + -setup { + set L [lrepeat $len a] + set Lspan [perf::list::spanned_list $len] + } -cleanup { + unset -nocomplain L + unset -nocomplain Lspan + unset -nocomplain L2 + } + } $args] + } + method measure {comment script locals args} { + my variable Opts + dict with locals {} + ::perf::list::measure $comment $script {*}[dict merge $Opts $args] + } + method option {opt val} { + my variable Opts + dict set Opts $opt $val + } + method option_unset {opt} { + my variable Opts + unset -nocomplain Opts($opt) + } + } + + proc linsert_describe {share_mode len at num iters} { + return "linsert L\[$len\] $share_mode $num elems $iters times at $at" + } + proc linsert_perf {} { + variable Lengths + + print_separator linsert + + ListPerf create perf -overhead {set L {}} -time 1000 + + # Note: Const indices take different path through bytecode than variable + # indices hence separate cases below + + + # Var case + foreach share_mode {shared unshared} { + set idx 0 + if {$share_mode eq "shared"} { + comment == Insert into empty lists + comment Insert one element into empty list + measure [linsert_describe shared 0 "0 (var)" 1 1] {linsert $L $idx ""} -setup {set idx 0; set L {}} + } else { + comment == Insert into empty lists + comment Insert one element into empty list + measure [linsert_describe unshared 0 "0 (var)" 1 1] {linsert {} $idx ""} -setup {set idx 0} + } + foreach idx_str [list 0 1 mid end-1 end] { + foreach len $Lengths { + if {$idx_str eq "mid"} { + set idx [expr {$len/2}] + } else { + set idx $idx_str + } + # perf option -reps $reps + set reps 1000 + if {$share_mode eq "shared"} { + comment Insert once to shared list with variable index + perf measure [linsert_describe shared $len "$idx (var)" 1 1] \ + {linsert $L $idx x} [list len $len idx $idx] -overhead {} -reps 100000 + + comment Insert multiple times to shared list with variable index + perf measure [linsert_describe shared $len "$idx (var)" 1 $reps] { + set L [linsert $L $idx X] + } [list len $len idx $idx] -reps $reps + + comment Insert multiple items multiple times to shared list with variable index + perf measure [linsert_describe shared $len "$idx (var)" 5 $reps] { + set L [linsert $L $idx X X X X X] + } [list len $len idx $idx] -reps $reps + } else { + # NOTE : the Insert once case is left out for unshared lists + # because it requires re-init on every iteration resulting + # in a lot of measurement noise + comment Insert multiple times to unshared list with variable index + perf measure [linsert_describe unshared $len "$idx (var)" 1 $reps] { + set L [linsert $L[set L {}] $idx X] + } [list len $len idx $idx] -reps $reps + comment Insert multiple items multiple times to unshared list with variable index + perf measure [linsert_describe unshared $len "$idx (var)" 5 $reps] { + set L [linsert $L[set L {}] $idx X X X X X] + } [list len $len idx $idx] -reps $reps + } + } + } + } + + # Const index + foreach share_mode {shared unshared} { + if {$share_mode eq "shared"} { + comment == Insert into empty lists + comment Insert one element into empty list + measure [linsert_describe shared 0 "0 (const)" 1 1] {linsert $L 0 ""} -setup {set L {}} + } else { + comment == Insert into empty lists + comment Insert one element into empty list + measure [linsert_describe unshared 0 "0 (const)" 1 1] {linsert {} 0 ""} + } + foreach idx_str [list 0 1 mid end end-1] { + foreach len $Lengths { + # Note end, end-1 explicitly calculated as otherwise they + # are not treated as const + if {$idx_str eq "mid"} { + set idx [expr {$len/2}] + } elseif {$idx_str eq "end"} { + set idx [expr {$len-1}] + } elseif {$idx_str eq "end-1"} { + set idx [expr {$len-2}] + } else { + set idx $idx_str + } + #perf option -reps $reps + set reps 100 + if {$share_mode eq "shared"} { + comment Insert once to shared list with const index + perf measure [linsert_describe shared $len "$idx (const)" 1 1] \ + "linsert \$L $idx x" [list len $len] -overhead {} -reps 10000 + + comment Insert multiple times to shared list with const index + perf measure [linsert_describe shared $len "$idx (const)" 1 $reps] \ + "set L \[linsert \$L $idx X\]" [list len $len] -reps $reps + + comment Insert multiple items multiple times to shared list with const index + perf measure [linsert_describe shared $len "$idx (const)" 5 $reps] \ + "set L \[linsert \$L $idx X X X X X\]" [list len $len] -reps $reps + } else { + comment Insert multiple times to unshared list with const index + perf measure [linsert_describe unshared $len "$idx (const)" 1 $reps] \ + "set L \[linsert \$L\[set L {}\] $idx X]" [list len $len] -reps $reps + + comment Insert multiple items multiple times to unshared list with const index + perf measure [linsert_describe unshared $len "$idx (const)" 5 $reps] \ + "set L \[linsert \$L\[set L {}\] $idx X X X X X]" [list len $len] -reps $reps + } + } + } + } + + # Note: no span tests because the inserts above will themselves create + # spanned lists + + perf destroy + } + + proc list_describe {len text} { + return "list L\[$len\] $text" + } + proc list_perf {} { + variable Lengths + + print_separator list + + ListPerf create perf + foreach len $Lengths { + set s [join [lrepeat $len x]] + comment Create a list from a string + perf measure [list_describe $len "from a string"] {list $s} [list s $s len $len] + } + foreach len $Lengths { + comment Create a list from expansion - single list (special optimal case) + perf measure [list_describe $len "from a {*}list"] {list {*}$L} [list len $len] + comment Create a list from two lists - real test of expansion speed + perf measure [list_describe $len "from a {*}list {*}list"] {list {*}$L {*}$L} [list len [expr {$len/2}]] + } + } + + proc lappend_describe {share_mode len num iters} { + return "lappend L\[$len\] $share_mode $num elems $iters times" + } + proc lappend_perf {} { + variable Lengths + + print_separator lappend + + ListPerf create perf -setup {set L [lrepeat [expr {$len/4}] x]} + + # Shared + foreach len $Lengths { + comment Append to a shared list variable multiple times + perf measure [lappend_describe shared [expr {$len/2}] 1 $len] { + set L2 $L; # Make shared + lappend L x + } [list len $len] -reps $len -overhead {set L2 $L} + } + + # Unshared + foreach len $Lengths { + comment Append to a unshared list variable multiple times + perf measure [lappend_describe unshared [expr {$len/2}] 1 $len] { + lappend L x + } [list len $len] -reps $len + } + + # Span + foreach len $Lengths { + comment Append to a unshared-span list variable multiple times + perf measure [lappend_describe unshared-span [expr {$len/2}] 1 $len] { + lappend Lspan x + } [list len $len] -reps $len + } + + perf destroy + } + + proc lpop_describe {share_mode len at reps} { + return "lpop L\[$len\] $share_mode at $at $reps times" + } + proc lpop_perf {} { + variable Lengths + + print_separator lpop + + ListPerf create perf + + # Shared + perf option -overhead {set L2 $L} + foreach len $Lengths { + set reps [expr {($len >= 1000 ? ($len/2) : $len) - 2}] + foreach idx {0 1 end-1 end} { + comment Pop element at position $idx from a shared list variable + perf measure [lpop_describe shared $len $idx $reps] { + set L2 $L + lpop L $idx + } [list len $len idx $idx] -reps $reps + } + } + + # Unshared + perf option -overhead {} + foreach len $Lengths { + set reps [expr {($len >= 1000 ? ($len/2) : $len) - 2}] + foreach idx {0 1 end-1 end} { + comment Pop element at position $idx from an unshared list variable + perf measure [lpop_describe unshared $len $idx $reps] { + lpop L $idx + } [list len $len idx $idx] -reps $reps + } + } + + perf destroy + + # Nested + ListPerf create perf -setup { + set L [lrepeat $len [list a b]] + } + + # Shared, nested index + perf option -overhead {set L2 $L; set L L2} + foreach len $Lengths { + set reps [expr {($len >= 1000 ? ($len/2) : $len) - 2}] + foreach idx {0 1 end-1 end} { + perf measure [lpop_describe shared $len "{$idx 0}" $reps] { + set L2 $L + lpop L $idx 0 + set L $L2 + } [list len $len idx $idx] -reps $reps + } + } + + # TODO - Nested Unshared + # Not sure how to measure performance. When unshared there is no copy + # so deleting a nested index repeatedly is not feasible + + perf destroy + } + + proc lassign_describe {share_mode len num reps} { + return "lassign L\[$len\] $share_mode $num elems $reps times" + } + proc lassign_perf {} { + variable Lengths + + print_separator lassign + + ListPerf create perf + + foreach share_mode {shared unshared} { + foreach len $Lengths { + if {$share_mode eq "shared"} { + set reps 1000 + comment Reflexive lassign - shared + perf measure [lassign_describe shared $len 1 $reps] { + set L2 $L + set L2 [lassign $L2 v] + } [list len $len] -overhead {set L2 $L} -reps $reps + + comment Reflexive lassign - shared, multiple + perf measure [lassign_describe shared $len 5 $reps] { + set L2 $L + set L2 [lassign $L2 a b c d e] + } [list len $len] -overhead {set L2 $L} -reps $reps + } else { + set reps [expr {($len >= 1000 ? ($len/2) : $len) - 2}] + comment Reflexive lassign - unshared + perf measure [lassign_describe unshared $len 1 $reps] { + set L [lassign $L v] + } [list len $len] -reps $reps + } + } + } + perf destroy + } + + proc lrepeat_describe {len num} { + return "lrepeat L\[$len\] $num elems at a time" + } + proc lrepeat_perf {} { + variable Lengths + + print_separator lrepeat + + ListPerf create perf -reps 100000 + foreach len $Lengths { + comment Generate a list from a single repeated element + perf measure [lrepeat_describe $len 1] { + lrepeat $len a + } [list len $len] + + comment Generate a list from multiple repeated elements + perf measure [lrepeat_describe $len 5] { + lrepeat $len a b c d e + } [list len $len] + } + + perf destroy + } + + proc lreverse_describe {share_mode len} { + return "lreverse L\[$len\] $share_mode" + } + proc lreverse_perf {} { + variable Lengths + + print_separator lreverse + + ListPerf create perf -reps 10000 + + foreach share_mode {shared unshared} { + foreach len $Lengths { + if {$share_mode eq "shared"} { + comment Reverse a shared list + perf measure [lreverse_describe shared $len] { + lreverse $L + } [list len $len] + + if {$len > 100} { + comment Reverse a shared-span list + perf measure [lreverse_describe shared-span $len] { + lreverse $Lspan + } [list len $len] + } + } else { + comment Reverse a unshared list + perf measure [lreverse_describe unshared $len] { + set L [lreverse $L[set L {}]] + } [list len $len] -overhead {set L $L; set L {}} + + if {$len >= 100} { + comment Reverse a unshared-span list + perf measure [lreverse_describe unshared-span $len] { + set Lspan [lreverse $Lspan[set Lspan {}]] + } [list len $len] -overhead {set Lspan $Lspan; set Lspan {}} + } + } + } + } + + perf destroy + } + + proc llength_describe {share_mode len} { + return "llength L\[$len\] $share_mode" + } + proc llength_perf {} { + variable Lengths + + print_separator llength + + ListPerf create perf -reps 100000 + + foreach len $Lengths { + comment Length of a list + perf measure [llength_describe shared $len] { + llength $L + } [list len $len] + + if {$len >= 100} { + comment Length of a span list + perf measure [llength_describe shared-span $len] { + llength $Lspan + } [list len $len] + } + } + + perf destroy + } + + proc lindex_describe {share_mode len at} { + return "lindex L\[$len\] $share_mode at $at" + } + proc lindex_perf {} { + variable Lengths + + print_separator lindex + + ListPerf create perf -reps 100000 + + foreach len $Lengths { + comment Index into a list + set idx [expr {$len/2}] + perf measure [lindex_describe shared $len $idx] { + lindex $L $idx + } [list len $len idx $idx] + + if {$len >= 100} { + comment Index into a span list + perf measure [lindex_describe shared-span $len $idx] { + lindex $Lspan $idx + } [list len $len idx $idx] + } + } + + perf destroy + } + + proc lrange_describe {share_mode len range} { + return "lrange L\[$len\] $share_mode range $range" + } + + proc lrange_perf {} { + variable Lengths + + print_separator lrange + + ListPerf create perf -time 1000 -reps 100000 + + foreach share_mode {shared unshared} { + foreach len $Lengths { + set eighth [expr {$len/8}] + set ranges [list \ + [list 0 0] [list 0 end-1] \ + [list $eighth [expr {3*$eighth}]] \ + [list $eighth [expr {7*$eighth}]] \ + [list 1 end] [list end-1 end] \ + ] + foreach range $ranges { + comment Range $range in $share_mode list of length $len + if {$share_mode eq "shared"} { + perf measure [lrange_describe shared $len $range] \ + "lrange \$L $range" [list len $len range $range] + } else { + perf measure [lrange_describe unshared $len $range] \ + "lrange \[lrepeat \$len\ a] $range" \ + [list len $len range $range] -overhead {lrepeat $len a} + } + } + + if {$len >= 100} { + foreach range $ranges { + comment Range $range in ${share_mode}-span list of length $len + if {$share_mode eq "shared"} { + perf measure [lrange_describe shared-span $len $range] \ + "lrange \$Lspan {*}$range" [list len $len range $range] + } else { + perf measure [lrange_describe unshared-span $len $range] \ + "lrange \[perf::list::spanned_list \$len\] $range" \ + [list len $len range $range] -overhead {perf::list::spanned_list $len} + } + } + } + } + } + + perf destroy + } + + proc lset_describe {share_mode len at} { + return "lset L\[$len\] $share_mode at $at" + } + proc lset_perf {} { + variable Lengths + + print_separator lset + + ListPerf create perf -reps 10000 + + # Shared + foreach share_mode {shared unshared} { + foreach len $Lengths { + foreach idx {0 1 end-1 end end+1} { + comment lset at position $idx in a $share_mode list variable + if {$share_mode eq "shared"} { + perf measure [lset_describe shared $len $idx] { + set L2 $L + lset L $idx X + } [list len $len idx $idx] -overhead {set L2 $L} + } else { + perf measure [lset_describe unshared $len $idx] { + lset L $idx X + } [list len $len idx $idx] + } + } + } + } + + perf destroy + + # Nested + ListPerf create perf -setup { + set L [lrepeat $len [list a b]] + } + + foreach share_mode {shared unshared} { + foreach len $Lengths { + foreach idx {0 1 end-1 end} { + comment lset at position $idx in a $share_mode list variable + if {$share_mode eq "shared"} { + perf measure [lset_describe shared $len "{$idx 0}"] { + set L2 $L + lset L $idx 0 X + } [list len $len idx $idx] -overhead {set L2 $L} + } else { + perf measure [lset_describe unshared $len "{$idx 0}"] { + lset L $idx 0 {X Y} + } [list len $len idx $idx] + } + } + } + } + + perf destroy + } + + proc lremove_describe {share_mode len at nremoved} { + return "lremove L\[$len\] $share_mode $nremoved elements at $at" + } + proc lremove_perf {} { + variable Lengths + + print_separator lremove + + ListPerf create perf -reps 10000 + + foreach share_mode {shared unshared} { + foreach len $Lengths { + foreach idx [list 0 1 [expr {$len/2}] end-1 end] { + if {$share_mode eq "shared"} { + comment Remove one element from shared list + perf measure [lremove_describe shared $len $idx 1] \ + {lremove $L $idx} [list len $len idx $idx] + + } else { + comment Remove one element from unshared list + set reps [expr {$len >= 1000 ? ($len/8) : ($len-2)}] + perf measure [lremove_describe unshared $len $idx 1] \ + {set L [lremove $L[set L {}] $idx]} [list len $len idx $idx] \ + -overhead {set L $L; set L {}} -reps $reps + } + } + if {$share_mode eq "shared"} { + comment Remove multiple elements from shared list + perf measure [lremove_describe shared $len [list 0 1 [expr {$len/2}] end-1 end] 5] { + lremove $L 0 1 [expr {$len/2}] end-1 end + } [list len $len] + } + } + # Span + foreach len $Lengths { + foreach idx [list 0 1 [expr {$len/2}] end-1 end] { + if {$share_mode eq "shared"} { + comment Remove one element from shared-span list + perf measure [lremove_describe shared-span $len $idx 1] \ + {lremove $Lspan $idx} [list len $len idx $idx] + } else { + comment Remove one element from unshared-span list + set reps [expr {$len >= 1000 ? ($len/8) : ($len-2)}] + perf measure [lremove_describe unshared-span $len $idx 1] \ + {set Lspan [lremove $Lspan[set Lspan {}] $idx]} [list len $len idx $idx] \ + -overhead {set Lspan $Lspan; set Lspan {}} -reps $reps + } + } + if {$share_mode eq "shared"} { + comment Remove multiple elements from shared-span list + perf measure [lremove_describe shared-span $len [list 0 1 [expr {$len/2}] end-1 end] 5] { + lremove $Lspan 0 1 [expr {$len/2}] end-1 end + } [list len $len] + } + } + } + + perf destroy + } + + proc lreplace_describe {share_mode len first last ninsert {times 1}} { + if {$last < $first} { + return "lreplace L\[$len\] $share_mode 0 ($first:$last) elems at $first with $ninsert elems $times times." + } + return "lreplace L\[$len\] $share_mode $first:$last with $ninsert elems $times times." + } + proc lreplace_perf {} { + variable Lengths + + print_separator lreplace + + set default_reps 10000 + ListPerf create perf -reps $default_reps + + foreach share_mode {shared unshared} { + # Insert only + foreach len $Lengths { + set reps [expr {$len <= 100 ? ($len-2) : ($len/8)}] + foreach first [list 0 1 [expr {$len/2}] end-1 end] { + if {$share_mode eq "shared"} { + comment Insert one to shared list + perf measure [lreplace_describe shared $len $first -1 1] { + lreplace $L $first -1 x + } [list len $len first $first] + + comment Insert multiple to shared list + perf measure [lreplace_describe shared $len $first -1 10] { + lreplace $L $first -1 X X X X X X X X X X + } [list len $len first $first] + + comment Insert one to shared list repeatedly + perf measure [lreplace_describe shared $len $first -1 1 $reps] { + set L [lreplace $L $first -1 x] + } [list len $len first $first] -reps $reps + + comment Insert multiple to shared list repeatedly + perf measure [lreplace_describe shared $len $first -1 10 $reps] { + set L [lreplace $L $first -1 X X X X X X X X X X] + } [list len $len first $first] -reps $reps + + } else { + comment Insert one to unshared list + perf measure [lreplace_describe unshared $len $first -1 1] { + set L [lreplace $L[set L {}] $first -1 x] + } [list len $len first $first] -overhead { + set L $L; set L {} + } -reps $reps + + comment Insert multiple to unshared list + perf measure [lreplace_describe unshared $len $first -1 10] { + set L [lreplace $L[set L {}] $first -1 X X X X X X X X X X] + } [list len $len first $first] -overhead { + set L $L; set L {} + } -reps $reps + } + } + } + + # Delete only + foreach len $Lengths { + set reps [expr {$len <= 100 ? ($len-2) : ($len/8)}] + foreach first [list 0 1 [expr {$len/2}] end-1 end] { + if {$share_mode eq "shared"} { + comment Delete one from shared list + perf measure [lreplace_describe shared $len $first $first 0] { + lreplace $L $first $first + } [list len $len first $first] + } else { + comment Delete one from unshared list + perf measure [lreplace_describe unshared $len $first $first 0] { + set L [lreplace $L[set L {}] $first $first x] + } [list len $len first $first] -overhead { + set L $L; set L {} + } -reps $reps + } + } + } + + # Insert + delete + foreach len $Lengths { + set reps [expr {$len <= 100 ? ($len-2) : ($len/8)}] + foreach range [list {0 1} {1 2} {end-2 end-1} {end-1 end}] { + lassign $range first last + if {$share_mode eq "shared"} { + comment Insertions more than deletions from shared list + perf measure [lreplace_describe shared $len $first $last 3] { + lreplace $L $first $last X Y Z + } [list len $len first $first last $last] + + comment Insertions same as deletions from shared list + perf measure [lreplace_describe shared $len $first $last 2] { + lreplace $L $first $last X Y + } [list len $len first $first last $last] + + comment Insertions fewer than deletions from shared list + perf measure [lreplace_describe shared $len $first $last 1] { + lreplace $L $first $last X + } [list len $len first $first last $last] + } else { + comment Insertions more than deletions from unshared list + perf measure [lreplace_describe unshared $len $first $last 3] { + set L [lreplace $L[set L {}] $first $last X Y Z] + } [list len $len first $first last $last] -overhead { + set L $L; set L {} + } -reps $reps + + comment Insertions same as deletions from unshared list + perf measure [lreplace_describe unshared $len $first $last 2] { + set L [lreplace $L[set L {}] $first $last X Y ] + } [list len $len first $first last $last] -overhead { + set L $L; set L {} + } -reps $reps + + comment Insertions fewer than deletions from unshared list + perf measure [lreplace_describe unshared $len $first $last 1] { + set L [lreplace $L[set L {}] $first $last X] + } [list len $len first $first last $last] -overhead { + set L $L; set L {} + } -reps $reps + } + } + } + # Spanned Insert + delete + foreach len $Lengths { + set reps [expr {$len <= 100 ? ($len-2) : ($len/8)}] + foreach range [list {0 1} {1 2} {end-2 end-1} {end-1 end}] { + lassign $range first last + if {$share_mode eq "shared"} { + comment Insertions more than deletions from shared-span list + perf measure [lreplace_describe shared-span $len $first $last 3] { + lreplace $Lspan $first $last X Y Z + } [list len $len first $first last $last] + + comment Insertions same as deletions from shared-span list + perf measure [lreplace_describe shared-span $len $first $last 2] { + lreplace $Lspan $first $last X Y + } [list len $len first $first last $last] + + comment Insertions fewer than deletions from shared-span list + perf measure [lreplace_describe shared-span $len $first $last 1] { + lreplace $Lspan $first $last X + } [list len $len first $first last $last] + } else { + comment Insertions more than deletions from unshared-span list + perf measure [lreplace_describe unshared-span $len $first $last 3] { + set Lspan [lreplace $Lspan[set Lspan {}] $first $last X Y Z] + } [list len $len first $first last $last] -overhead { + set Lspan $Lspan; set Lspan {} + } -reps $reps + + comment Insertions same as deletions from unshared-span list + perf measure [lreplace_describe unshared-span $len $first $last 2] { + set Lspan [lreplace $Lspan[set Lspan {}] $first $last X Y ] + } [list len $len first $first last $last] -overhead { + set Lspan $Lspan; set Lspan {} + } -reps $reps + + comment Insertions fewer than deletions from unshared-span list + perf measure [lreplace_describe unshared-span $len $first $last 1] { + set Lspan [lreplace $Lspan[set Lspan {}] $first $last X] + } [list len $len first $first last $last] -overhead { + set Lspan $Lspan; set Lspan {} + } -reps $reps + } + } + } + } + + perf destroy + } + + proc split_describe {len} { + return "split L\[$len\]" + } + proc split_perf {} { + variable Lengths + print_separator split + + ListPerf create perf -setup {set S [string repeat "x " $len]} + foreach len $Lengths { + comment Split a string + perf measure [split_describe $len] { + split $S " " + } [list len $len] + } + } + + proc join_describe {share_mode len} { + return "join L\[$len\] $share_mode" + } + proc join_perf {} { + variable Lengths + + print_separator join + + ListPerf create perf -reps 10000 + foreach len $Lengths { + comment Join a list + perf measure [join_describe shared $len] { + join $L + } [list len $len] + } + foreach len $Lengths { + comment Join a spanned list + perf measure [join_describe shared-span $len] { + join $Lspan + } [list len $len] + } + perf destroy + } + + proc lsearch_describe {share_mode len} { + return "lsearch L\[$len\] $share_mode" + } + proc lsearch_perf {} { + variable Lengths + + print_separator lsearch + + ListPerf create perf -reps 100000 + foreach len $Lengths { + comment Search a list + perf measure [lsearch_describe shared $len] { + lsearch $L needle + } [list len $len] + } + foreach len $Lengths { + comment Search a spanned list + perf measure [lsearch_describe shared-span $len] { + lsearch $Lspan needle + } [list len $len] + } + perf destroy + } + + proc foreach_describe {share_mode len} { + return "foreach L\[$len\] $share_mode" + } + proc foreach_perf {} { + variable Lengths + + print_separator foreach + + ListPerf create perf -reps 10000 + foreach len $Lengths { + comment Iterate through a list + perf measure [foreach_describe shared $len] { + foreach e $L {} + } [list len $len] + } + foreach len $Lengths { + comment Iterate a spanned list + perf measure [foreach_describe shared-span $len] { + foreach e $Lspan {} + } [list len $len] + } + perf destroy + } + + proc lmap_describe {share_mode len} { + return "lmap L\[$len\] $share_mode" + } + proc lmap_perf {} { + variable Lengths + + print_separator lmap + + ListPerf create perf -reps 10000 + foreach len $Lengths { + comment Iterate through a list + perf measure [lmap_describe shared $len] { + lmap e $L {} + } [list len $len] + } + foreach len $Lengths { + comment Iterate a spanned list + perf measure [lmap_describe shared-span $len] { + lmap e $Lspan {} + } [list len $len] + } + perf destroy + } + + proc get_sort_sample {{spanned 0}} { + variable perfScript + variable sortSampleText + + if {![info exists sortSampleText]} { + set fd [open $perfScript] + set sortSampleText [split [read $fd] ""] + close $fd + } + set sortSampleText [string range $sortSampleText 0 9999] + + # NOTE: do NOT cache list result in a variable as we need it unshared + if {$spanned} { + return [lrange [split $sortSampleText ""] 1 end-1] + } else { + return [split $sortSampleText ""] + } + } + proc lsort_describe {share_mode len} { + return "lsort L\[$len] $share_mode" + } + proc lsort_perf {} { + print_separator lsort + + ListPerf create perf -setup {} + + comment Sort a shared list + perf measure [lsort_describe shared [llength [perf::list::get_sort_sample]]] { + lsort $L + } {} -setup {set L [perf::list::get_sort_sample]} + + comment Sort a shared-span list + perf measure [lsort_describe shared-span [llength [perf::list::get_sort_sample 1]]] { + lsort $L + } {} -setup {set L [perf::list::get_sort_sample 1]} + + comment Sort an unshared list + perf measure [lsort_describe unshared [llength [perf::list::get_sort_sample]]] { + lsort [perf::list::get_sort_sample] + } {} -overhead {perf::list::get_sort_sample} + + comment Sort an unshared-span list + perf measure [lsort_describe unshared-span [llength [perf::list::get_sort_sample 1]]] { + lsort [perf::list::get_sort_sample 1] + } {} -overhead {perf::list::get_sort_sample 1} + + perf destroy + } + + proc concat_describe {canonicality len elemlen} { + return "concat L\[$len\] $canonicality with elements of length $elemlen" + } + proc concat_perf {} { + variable Lengths + + print_separator concat + + ListPerf create perf -reps 100000 + + foreach len $Lengths { + foreach elemlen {1 100} { + comment Pure lists (no string representation) + perf measure [concat_describe "pure lists" $len $elemlen] { + concat $L $L + } [list len $len elemlen $elemlen] -setup { + set L [lrepeat $len [string repeat a $elemlen]] + } + + comment Canonical lists (with string representation) + perf measure [concat_describe "canonical lists" $len $elemlen] { + concat $L $L + } [list len $len elemlen $elemlen] -setup { + set L [lrepeat $len [string repeat a $elemlen]] + append x x $L; # Generate string while keeping internal rep list + unset x + } + + comment Non-canonical lists + perf measure [concat_describe "non-canonical lists" $len $elemlen] { + concat $L $L + } [list len $len elemlen $elemlen] -setup { + set L [string repeat "[string repeat a $elemlen] " $len] + llength $L + } + } + } + + # Span version + foreach len $Lengths { + foreach elemlen {1 100} { + comment Pure span lists (no string representation) + perf measure [concat_describe "pure spanned lists" $len $elemlen] { + concat $L $L + } [list len $len elemlen $elemlen] -setup { + set L [lrange [lrepeat [expr {$len+2}] [string repeat a $elemlen]] 1 end-1] + } + + comment Canonical span lists (with string representation) + perf measure [concat_describe "canonical spanned lists" $len $elemlen] { + concat $L $L + } [list len $len elemlen $elemlen] -setup { + set L [lrange [lrepeat [expr {$len+2}] [string repeat a $elemlen]] 1 end-1] + append x x $L; # Generate string while keeping internal rep list + unset x + } + } + } + + perf destroy + } + + proc test {} { + variable RunTimes + variable Options + + set selections [perf::list::setup $::argv] + if {[llength $selections] == 0} { + set commands [info commands ::perf::list::*_perf] + } else { + set commands [lmap sel $selections { + if {$sel eq "help"} { + print_usage + continue + } + set cmd ::perf::list::${sel}_perf + if {$cmd ni [info commands ::perf::list::*_perf]} { + puts stderr "Error: command $sel is not known or supported. Skipping." + continue + } + set cmd + }] + } + comment Setting up + timerate -calibrate {} + if {[info exists Options(--label)]} { + print "L $Options(--label)" + } + print "V [info patchlevel]" + print "E [info nameofexecutable]" + if {[info exists Options(--description)]} { + print "D $Options(--description)" + } + set twapi_keys {-privatebytes -workingset -workingsetpeak} + if {[info commands ::twapi::get_process_memory_info] ne ""} { + set twapi_vm_pre [::twapi::get_process_memory_info] + } + foreach cmd [lsort -dictionary $commands] { + set RunTimes(command) 0.0 + $cmd + set RunTimes(total) [expr {$RunTimes(total)+$RunTimes(command)}] + print "P [format_timings $RunTimes(command) 1] [string range $cmd 14 end-5] total run time" + } + # Print total runtime in same format as timerate output + print "P [format_timings $RunTimes(total) 1] Total run time" + + if {[info exists twapi_vm_pre]} { + set twapi_vm_post [::twapi::get_process_memory_info] + set MB 1048576.0 + foreach key $twapi_keys { + set pre [expr {[dict get $twapi_vm_pre $key]/$MB}] + set post [expr {[dict get $twapi_vm_post $key]/$MB}] + print "P [format_timings $pre 1] Memory (MB) $key pre-test" + print "P [format_timings $post 1] Memory (MB) $key post-test" + print "P [format_timings [expr {$post-$pre}] 1] Memory (MB) delta $key" + } + } + if {[info commands memory] ne ""} { + foreach line [split [memory info] \n] { + if {$line eq ""} continue + set line [split $line] + set val [expr {[lindex $line end]/1000.0}] + set line [string trim [join [lrange $line 0 end-1]]] + print "P [format_timings $val 1] memdbg $line (in thousands)" + } + print "# Allocations not freed on exit written to the lost-memory.tmp file." + print "# These will have to be manually compared." + # env TCL_FINALIZE_ON_EXIT must be set to 1 for this. + # DO NOT SET HERE - set ::env(TCL_FINALIZE_ON_EXIT) 1 + # Must be set in environment before starting tclsh else bogus results + if {[info exists Options(--label)]} { + set dump_file list-memory-$Options(--label).memdmp + } else { + set dump_file list-memory-[pid].memdmp + } + memory onexit $dump_file + } + } +} + + +if {[info exists ::argv0] && [file tail $::argv0] eq [file tail [info script]]} { + ::perf::list::test +} diff --git a/tests/apply.test b/tests/apply.test index a5f1f8f..24b27cc 100644 --- a/tests/apply.test +++ b/tests/apply.test @@ -261,7 +261,7 @@ test apply-9.1 {leaking internal rep} -setup { lindex $lines 3 3 } set lam [list {} {set a 1}] -} -constraints memory -body { +} -constraints {memory} -body { set end [getbytes] for {set i 0} {$i < 5} {incr i} { ::apply [lrange $lam 0 end] diff --git a/tests/env.test b/tests/env.test index 9eacd5d..89e2d04 100644 --- a/tests/env.test +++ b/tests/env.test @@ -107,6 +107,7 @@ variable keep { CommonProgramFiles CommonProgramFiles(x86) ProgramFiles ProgramFiles(x86) CommonProgramW6432 ProgramW6432 WINECONFIGDIR WINEDATADIR WINEDLLDIR0 WINEHOMEDIR PROCESSOR_ARCHITECTURE + USERPROFILE } variable printenvScript [makeFile [string map [list @keep@ [list $keep]] { @@ -411,7 +412,7 @@ test env-7.3 { return [info exists ::env(test7_3)] }} } -cleanup cleanup1 -result 1 - + test env-8.0 { memory usage - valgrind does not report reachable memory } -body { @@ -421,6 +422,43 @@ test env-8.0 { } -result {i'm with dummy} +test env-9.0 { + Initialization of HOME from HOMEDRIVE and HOMEPATH +} -constraints win -setup { + setup1 + unset -nocomplain ::env(HOME) + set ::env(HOMEDRIVE) X: + set ::env(HOMEPATH) \\home\\path +} -cleanup { + cleanup1 +} -body { + set pipe [open |[list [interpreter]] r+] + puts $pipe {puts $::env(HOME); flush stdout; exit} + flush $pipe + set result [gets $pipe] + close $pipe + set result +} -result {X:\home\path} + +test env-9.1 { + Initialization of HOME from USERPROFILE +} -constraints win -setup { + setup1 + unset -nocomplain ::env(HOME) + unset -nocomplain ::env(HOMEDRIVE) + unset -nocomplain ::env(HOMEPATH) +} -cleanup { + cleanup1 +} -body { + set pipe [open |[list [interpreter]] r+] + puts $pipe {puts $::env(HOME); flush stdout; exit} + flush $pipe + set result [gets $pipe] + close $pipe + set result +} -result $::env(USERPROFILE) + + # cleanup rename getenv {} diff --git a/tests/listRep.test b/tests/listRep.test new file mode 100644 index 0000000..7883a21 --- /dev/null +++ b/tests/listRep.test @@ -0,0 +1,2538 @@ +# This file contains tests that specifically exercise the internal representation +# of a list. +# +# Copyright © 2022 Ashok P. Nadkarni +# +# See the file "license.terms" for information on usage and redistribution +# of this file, and for a DISCLAIMER OF ALL WARRANTIES. + +# Unlike the other files related to list commands which for the most part do +# black box testing focusing on functionality, this file does more of white box +# testing to exercise code paths that implement different list representations +# (with spans, leading free space etc., shared/unshared etc.) In addition to +# functional correctness, the tests also check for the expected internal +# representation as that pertains to performance heuristics. Generally speaking, +# combinations of the following need to be tested, +# - free space in front, back, neither, both of list representation +# - shared Tcl_Objs +# - shared internal reps (independent of shared Tcl_Objs) +# - byte-compiled vs non-compiled +# +# Being white box tests, they are sensitive to changes to further optimizations +# and changes in heuristics. That cannot be helped. + +if {"::tcltest" ni [namespace children]} { + package require tcltest 2.5 + namespace import -force ::tcltest::* +} + +::tcltest::loadTestedCommands +catch [list package require -exact tcl::test [info patchlevel]] + +testConstraint testlistrep [llength [info commands testlistrep]] + +proc describe {l args} {dict get [testlistrep describe $l] {*}$args} + +proc irange {first last} { + set l {} + while {$first <= $last} { + lappend l $first + incr first + } + return $l +} +proc leadSpace {l} { + # Returns the leading space in a list store + return [dict get [describe $l] store firstUsed] +} +proc tailSpace {l} { + # Returns the trailing space in a list store + array set rep [describe $l] + dict with rep(store) { + return [expr {$numAllocated - ($firstUsed + $numUsed)}] + } +} +proc allocated {l} { + # Returns the allocated space in a list store + return [dict get [describe $l] store numAllocated] +} +proc repStoreRefCount {l} { + # Returns the ref count for the list store + return [dict get [describe $l] store refCount] +} +proc validate {l} { + # Panics if internal listrep structures are not valid + testlistrep validate $l +} +proc leadSpaceMore {l} { + set leadSpace [leadSpace $l] + expr {$leadSpace > 0 && $leadSpace >= 2*[tailSpace $l]} +} +proc tailSpaceMore {l} { + set tailSpace [tailSpace $l] + expr {$tailSpace > 0 && $tailSpace >= 2*[leadSpace $l]} +} +proc spaceEqual {l} { + # 1 if lead and tail space shared (diff of 1 at most) and more than 0 + set leadSpace [leadSpace $l] + set tailSpace [tailSpace $l] + if {$leadSpace == 0 && $tailSpace == 0} { + # At least one must be positive + return 0 + } + set diff [expr {$leadSpace - $tailSpace}] + return [expr {$diff >= -1 && $diff <= 1}] +} +proc storeAddress {l} { + return [describe $l store memoryAddress] +} +proc sameStore {l1 l2} { + expr {[storeAddress $l1] == [storeAddress $l2]} +} +proc hasSpan {l args} { + # Returns 1 if list has a span. If args are specified, they are checked with + # span values (start and length) + array set rep [describe $l] + if {![info exists rep(span)]} { + return 0 + } + if {[llength $args] == 0} { + return 1; # No need to check values + } + lassign $args start len + if {[dict get $rep(span) spanStart] == $start && + [dict get $rep(span) spanLength] == $len} { + return 1 + } + return 0 +} +proc checkListrep {l listLen numAllocated leadSpace tailSpace {refCount 0}} { + # Checks if the internal representation of $l match + # passed arguments. Return "" if yes, else error messages. + array set rep [testlistrep describe $l] + + set rep(leadSpace) [dict get $rep(store) firstUsed] + set rep(numAllocated) [dict get $rep(store) numAllocated] + set rep(tailSpace) [expr { + $rep(numAllocated) - ($rep(leadSpace) + [dict get $rep(store) numUsed]) + }] + set rep(refCount) [dict get $rep(store) refCount] + + if {[info exists rep(span)]} { + set rep(listLen) [dict get $rep(span) spanLength] + } else { + set rep(listLen) [dict get $rep(store) numUsed] + } + + set errors [list] + foreach arg {listLen numAllocated leadSpace tailSpace} { + if {$rep($arg) != [set $arg]} { + lappend errors "$arg in list representation ($rep($arg)) is not expected value ([set $arg])." + } + } + # Check refCount only if caller has specified it as non-0 + if {$refCount && $refCount != $rep(refCount)} { + lappend errors "refCount in list representation ($rep(refCount)) is not expected value ($refCount)." + } + return $errors +} + +proc assertListrep {l listLen numAllocated leadSpace tailSpace {refCount 0}} { + # Like check_listrep but raises error + set errors [checkListrep $l $listLen $numAllocated $leadSpace $tailSpace $refCount] + if {[llength $errors]} { + error [join $errors \n] + } + return +} + +# The default length should be large enough that doubling the allocation will +# clearly distinguish free space allocation difference between front and back. +# (difference in the two should at least be 2 else we cannot tell if front +# or back was favored appropriately) +proc freeSpaceNone {{len 8}} {return [testlistrep new $len 0 0]} +proc freeSpaceLead {{len 8} {lead 3}} {return [testlistrep new $len $lead 0]} +proc freeSpaceTail {{len 8} {tail 3}} {return [testlistrep new $len 0 $tail]} +proc freeSpaceBoth {{len 8} {lead 3} {tail 3}} { + return [testlistrep new $len $lead $tail] +} +proc zombieSample {{len 1000} {leadzombies 100} {tailzombies 100}} { + # returns an unshared listrep with zombies in front and back + + # don't combine freespacenone and lrange else zombies are freed + set l [freeSpaceNone [expr {$len+$leadzombies+$tailzombies}]] + return [lrange $l $leadzombies [expr {$leadzombies+$len-1}]] +} + +# Just ensure above stubs return what's expected +if {[testConstraint testlistrep]} { + assertListrep [freeSpaceNone] 8 8 0 0 1 + assertListrep [freeSpaceLead] 8 11 3 0 1 + assertListrep [freeSpaceTail] 8 11 0 3 1 + assertListrep [freeSpaceBoth] 8 14 3 3 1 + assertListrep [zombieSample] 1000 1200 0 0 1 + if {![hasSpan [zombieSample]] || [dict get [testlistrep describe [zombieSample]] span spanStart] == 0} { + error "zombieSample span missing or span start is at 0." + } +} + +# Define some variables for some indices because the Tcl compiler will do some +# operations completely in byte code if indices are literals +set zero 0 +set one 1 +set two 2 +set four 4 +set end end + +# +# Test sets: +# 1.* - unshared internal rep, no spans, with no free space +# 2.* - shared internal rep, no spans, with no free space +# 3.* - unshared internal rep, spanned +# 4.* - shared internal rep, spanned +# 5.* - shared Tcl_Obj +# 6.* - lists with zombie Tcl_Obj's + +# +# listrep-1.* tests all operate on unshared listreps with no free space + +test listrep-1.1 { + Inserts in front of unshared list with no free space should reallocate with + equal free space at front and back -- linsert version +} -constraints testlistrep -body { + set l [linsert [freeSpaceNone] $zero 99] + validate $l + list $l [spaceEqual $l] +} -result [list {99 0 1 2 3 4 5 6 7} 1] + +test listrep-1.1.1 { + Inserts in front of unshared list with no free space should reallocate with + equal free space at front and back -- lreplace version +} -constraints testlistrep -body { + set l [lreplace [freeSpaceNone] $zero -1 99] + validate $l + list $l [spaceEqual $l] +} -result [list {99 0 1 2 3 4 5 6 7} 1] + +test listrep-1.2 { + Inserts at back of unshared list with no free space should allocate all + space at back -- linsert version +} -constraints testlistrep -body { + set l [linsert [freeSpaceNone] $end 99] + validate $l + list $l [leadSpace $l] [tailSpace $l] +} -result [list {0 1 2 3 4 5 6 7 99} 0 9] + +test listrep-1.2.1 { + Inserts at back of unshared list with no free space should allocate all + space at back -- lset version +} -constraints testlistrep -body { + set l [freeSpaceNone] + lset l $end+1 99 + validate $l + list $l [leadSpace $l] [tailSpace $l] +} -result [list {0 1 2 3 4 5 6 7 99} 0 9] + +test listrep-1.2.2 { + Inserts at back of unshared list with no free space should allocate all + space at back -- lappend version +} -constraints testlistrep -body { + set l [freeSpaceNone] + lappend l 99 + validate $l + list $l [leadSpace $l] [tailSpace $l] +} -result [list {0 1 2 3 4 5 6 7 99} 0 9] + +test listrep-1.3 { + Inserts in middle of unshared list with no free space should reallocate with + equal free space at front and back - linsert version +} -constraints testlistrep -body { + set l [linsert [freeSpaceNone] $four 99] + validate $l + list $l [spaceEqual $l] +} -result [list {0 1 2 3 99 4 5 6 7} 1] + +test listrep-1.3.1 { + Inserts in middle of unshared list with no free space should reallocate with + equal free space at front and back - lreplace version +} -constraints testlistrep -body { + set l [lreplace [freeSpaceNone] $four $four-1 99] + validate $l + list $l [spaceEqual $l] +} -result [list {0 1 2 3 99 4 5 6 7} 1] + +test listrep-1.4 { + Deletes from front of small unshared list with no free space should + just shift up leaving room at back - lreplace version +} -constraints testlistrep -body { + set l [lreplace [freeSpaceNone] $zero $zero] + validate $l + list $l [leadSpace $l] [tailSpace $l] +} -result [list {1 2 3 4 5 6 7} 0 1] + +test listrep-1.4.1 { + Deletes from front of small unshared list with no free space should + just shift up leaving room at back - lassign version +} -constraints testlistrep -body { + set l [lassign [freeSpaceNone] e] + validate $l + list $e $l [leadSpace $l] [tailSpace $l] +} -result [list 0 {1 2 3 4 5 6 7} 0 1] + +test listrep-1.4.2 { + Deletes from front of small unshared list with no free space should + just shift up leaving room at back - lpop version +} -constraints testlistrep -body { + set l [freeSpaceNone] + set e [lpop l $zero] + validate $l + list $e $l [leadSpace $l] [tailSpace $l] +} -result [list 0 {1 2 3 4 5 6 7} 0 1] + +test listrep-1.4.3 { + Deletes from front of small unshared list with no free space should + just shift up leaving room at back - lrange version +} -constraints testlistrep -body { + set l [lrange [freeSpaceNone] $one $end] + validate $l + list $l [leadSpace $l] [tailSpace $l] +} -result [list {1 2 3 4 5 6 7} 0 1] + +test listrep-1.4.4 { + Deletes from front of small unshared list with no free space should + just shift up leaving room at back - lremove version +} -constraints testlistrep -body { + set l [lremove [freeSpaceNone] $zero] + validate $l + list $l [leadSpace $l] [tailSpace $l] +} -result [list {1 2 3 4 5 6 7} 0 1] + +test listrep-1.5 { + Deletes from front of large unshared list with no free space should + create a span - lreplace version +} -constraints testlistrep -body { + set l [lreplace [freeSpaceNone 1000] $zero $one] + validate $l + list $l [leadSpace $l] [tailSpace $l] [hasSpan $l 2 998] +} -result [list [irange 2 999] 2 0 1] + +test listrep-1.5.1 { + Deletes from front of large unshared list with no free space should + create a span - lassign version +} -constraints testlistrep -body { + set l [lassign [freeSpaceNone 1000] e] + validate $l + list $e $l [leadSpace $l] [tailSpace $l] [hasSpan $l 1 999] +} -result [list 0 [irange 1 999] 1 0 1] + +test listrep-1.5.2 { + Deletes from front of large unshared list with no free space should + create a span - lrange version +} -constraints testlistrep -body { + set l [lrange [freeSpaceNone 1000] $two end] + validate $l + list $l [leadSpace $l] [tailSpace $l] [hasSpan $l 2 998] +} -result [list [irange 2 999] 2 0 1] + +test listrep-1.5.3 { + Deletes from front of large unshared list with no free space should + create a span - lremove version +} -constraints testlistrep -body { + set l [lremove [freeSpaceNone 1000] $zero] + validate $l + list $l [leadSpace $l] [tailSpace $l] [hasSpan $l 1 999] +} -result [list [irange 1 999] 1 0 1] + +test listrep-1.5.4 { + Deletes from front of large unshared list with no free space should + create a span - lpop version +} -constraints testlistrep -body { + set l [freeSpaceNone 1000] + set e [lpop l 0] + validate $l + list $e $l [leadSpace $l] [tailSpace $l] [hasSpan $l 1 999] +} -result [list 0 [irange 1 999] 1 0 1] + +test listrep-1.6 { + Deletes closer to front of large list should move (smaller) front segment + -- lreplace version +} -constraints testlistrep -body { + set l [lreplace [freeSpaceNone 1000] $four $four] + validate $l + list $l [leadSpace $l] [tailSpace $l] [hasSpan $l 1 999] +} -result [list [concat [irange 0 3] [irange 5 999]] 1 0 1] + +test listrep-1.6.1 { + Deletes closer to front of large list should move (smaller) front segment + -- lpop version +} -constraints testlistrep -body { + set l [freeSpaceNone 1000] + set e [lpop l $four] + validate $l + list $e $l [leadSpace $l] [tailSpace $l] [hasSpan $l 1 999] +} -result [list 4 [concat [irange 0 3] [irange 5 999]] 1 0 1] + +test listrep-1.7 { + Deletes closer to back of large list should move (smaller) back segment + and will not need a span - lreplace version +} -constraints testlistrep -body { + set l [lreplace [freeSpaceNone 1000] end-$four end-$four] + validate $l + list $l [leadSpace $l] [tailSpace $l] [hasSpan $l] +} -result [list [concat [irange 0 994] [irange 996 999]] 0 1 0] + +test listrep-1.7.1 { + Deletes closer to back of large list should move (smaller) back segment + and will not need a span - lpop version +} -constraints testlistrep -body { + set l [freeSpaceNone 1000] + set e [lpop l $end-4] + validate $l + list $e $l [leadSpace $l] [tailSpace $l] [hasSpan $l] +} -result [list 995 [concat [irange 0 994] [irange 996 999]] 0 1 0] + +test listrep-1.8 { + Deletes at back of small unshared list should not need a span - lreplace version +} -constraints testlistrep -body { + set l [lreplace [freeSpaceNone] end-$one end] + validate $l + list $l [leadSpace $l] [tailSpace $l] [hasSpan $l] +} -result [list {0 1 2 3 4 5} 0 2 0] + +test listrep-1.8.1 { + Deletes at back of small unshared list should not need a span - lrange version +} -constraints testlistrep -body { + set l [lrange [freeSpaceNone] $zero end-$two] + validate $l + list $l [leadSpace $l] [tailSpace $l] [hasSpan $l] +} -result [list {0 1 2 3 4 5} 0 2 0] + +test listrep-1.8.2 { + Deletes at back of small unshared list should not need a span - lremove version +} -constraints testlistrep -body { + set l [lremove [freeSpaceNone] $end-1 $end] + validate $l + list $l [leadSpace $l] [tailSpace $l] [hasSpan $l] +} -result [list {0 1 2 3 4 5} 0 2 0] + +test listrep-1.8.3 { + Deletes at back of small unshared list should not need a span - lpop version +} -constraints testlistrep -body { + set l [freeSpaceNone] + set e [lpop l $end] + validate $l + list $e $l [leadSpace $l] [tailSpace $l] [hasSpan $l] +} -result [list 7 {0 1 2 3 4 5 6} 0 1 0] + +test listrep-1.9 { + Deletes at back of large unshared list should not need a span - lreplace version +} -constraints testlistrep -body { + set l [lreplace [freeSpaceNone 1000] end-$four end] + validate $l + list $l [leadSpace $l] [tailSpace $l] [hasSpan $l] +} -result [list [irange 0 994] 0 5 0] + +test listrep-1.9.1 { + Deletes at back of large unshared list should not need a span - lrange version +} -constraints testlistrep -body { + set l [lrange [freeSpaceNone 1000] 0 $end-5] + validate $l + list $l [leadSpace $l] [tailSpace $l] [hasSpan $l] +} -result [list [irange 0 994] 0 5 0] + +test listrep-1.9.2 { + Deletes at back of large unshared list should not need a span - lremove version +} -constraints testlistrep -body { + set l [lremove [freeSpaceNone 1000] end-$four $end-3 end-$two $end-1 $end] + validate $l + list $l [leadSpace $l] [tailSpace $l] [hasSpan $l] +} -result [list [irange 0 994] 0 5 0] + +test listrep-1.9.3 { + Deletes at back of large unshared list should not need a span - lpop version +} -constraints testlistrep -body { + set l [freeSpaceNone 1000] + set e [lpop l $end] + validate $l + list $e $l [leadSpace $l] [tailSpace $l] [hasSpan $l] +} -result [list 999 [irange 0 998] 0 1 0] + +test listrep-1.10 { + no-op on unshared list should force a canonical list string - lreplace version +} -body { + lreplace { 1 2 3 4 } $zero -1 +} -result {1 2 3 4} + +test listrep-1.10.1 { + no-op on unshared list should force a canonical list string - lrange version +} -body { + lrange { 1 2 3 4 } $zero $end +} -result {1 2 3 4} + +test listrep-1.11 { + Append elements to large unshared list is optimized as lappend + so no free space in front - lreplace version +} -body { + # Note $end, not end else byte code compiler short-cuts + set l [lreplace [freeSpaceNone 1000] $end+1 $end+1 1000] + validate $l + list $l [leadSpace $l] [expr {[tailSpace $l] > 0}] [hasSpan $l] +} -result [list [irange 0 1000] 0 1 0] + +test listrep-1.11.1 { + Append elements to large unshared list is optimized as lappend + so no free space in front - linsert version +} -body { + # Note $end, not end else byte code compiler short-cuts + set l [linsert [freeSpaceNone 1000] $end+1 1000] + validate $l + list $l [leadSpace $l] [expr {[tailSpace $l] > 0}] [hasSpan $l] +} -result [list [irange 0 1000] 0 1 0] + +test listrep-1.11.2 { + Append elements to large unshared list leaves no free space in front + - lappend version +} -body { + # Note $end, not end else byte code compiler short-cuts + set l [freeSpaceNone 1000] + lappend l 1000 1001 + validate $l + list $l [leadSpace $l] [expr {[tailSpace $l] > 0}] [hasSpan $l] +} -result [list [irange 0 1001] 0 1 0] + + +test listrep-1.12 { + Replacement of elements at front with same number elements in unshared list + is in-place - lreplace version +} -body { + set l [lreplace [freeSpaceNone] $zero $one 10 11] + validate $l + list $l [leadSpace $l] [tailSpace $l] +} -result [list {10 11 2 3 4 5 6 7} 0 0] + +test listrep-1.12.1 { + Replacement of elements at front with same number elements in unshared list + is in-place - lset version +} -body { + set l [freeSpaceNone] + lset l 0 -1 + validate $l + list $l [leadSpace $l] [tailSpace $l] +} -result [list {-1 1 2 3 4 5 6 7} 0 0] + +test listrep-1.13 { + Replacement of elements at front with fewer elements in unshared list + results in a spanned list with space only in front +} -body { + set l [lreplace [freeSpaceNone] $zero $four 10] + validate $l + list $l [leadSpace $l] [tailSpace $l] +} -result [list {10 5 6 7} 4 0] + +test listrep-1.14 { + Replacement of elements at front with more elements in unshared list + results in a reallocated spanned list with space at front and back +} -body { + set l [lreplace [freeSpaceNone] $zero $one 10 11 12] + validate $l + list $l [spaceEqual $l] +} -result [list {10 11 12 2 3 4 5 6 7} 1] + +test listrep-1.15 { + Replacement of elements in middle with same number elements in unshared list + is in-place - lreplace version +} -body { + set l [lreplace [freeSpaceNone] $one $two 10 11] + validate $l + list $l [leadSpace $l] [tailSpace $l] +} -result [list {0 10 11 3 4 5 6 7} 0 0] + +test listrep-1.15.1 { + Replacement of elements in middle with same number elements in unshared list + is in-place - lset version +} -body { + set l [freeSpaceNone] + lset l $two -1 + validate $l + list $l [leadSpace $l] [tailSpace $l] +} -result [list {0 1 -1 3 4 5 6 7} 0 0] + +test listrep-1.16 { + Replacement of elements in front half with fewer elements in unshared list + results in a spanned list with space only in front since smaller segment moved +} -body { + set l [lreplace [freeSpaceNone] $one $four 10] + validate $l + list $l [leadSpace $l] [tailSpace $l] +} -result [list {0 10 5 6 7} 3 0] + +test listrep-1.17 { + Replacement of elements in back half with fewer elements in unshared list + results in a spanned list with space only at back +} -body { + set l [lreplace [freeSpaceNone] end-$four end-$one 10] + validate $l + list $l [leadSpace $l] [tailSpace $l] +} -result [list {0 1 2 10 7} 0 3] + +test listrep-1.18 { + Replacement of elements in middle more elements in unshared list + results in a reallocated spanned list with space at front and back +} -body { + set l [lreplace [freeSpaceNone] $one $two 10 11 12] + validate $l + list $l [spaceEqual $l] +} -result [list {0 10 11 12 3 4 5 6 7} 1] + +test listrep-1.19 { + Replacement of elements at back with same number elements in unshared list + is in-place - lreplace version +} -body { + set l [lreplace [freeSpaceNone] $end-1 $end 10 11] + validate $l + list $l [leadSpace $l] [tailSpace $l] +} -result [list {0 1 2 3 4 5 10 11} 0 0] + +test listrep-1.19.1 { + Replacement of elements at back with same number elements in unshared list + is in-place - lset version +} -body { + set l [freeSpaceNone] + lset l $end 10 + validate $l + list $l [leadSpace $l] [tailSpace $l] +} -result [list {0 1 2 3 4 5 6 10} 0 0] + +test listrep-1.20 { + Replacement of elements at back with fewer elements in unshared list + is in-place with space only at the back +} -body { + set l [lreplace [freeSpaceNone] $end-2 $end 10] + validate $l + list $l [leadSpace $l] [tailSpace $l] +} -result [list {0 1 2 3 4 10} 0 2] + +test listrep-1.21 { + Replacement of elements at back with more elements in unshared list + allocates new representation with equal space at front and back +} -body { + set l [lreplace [freeSpaceNone] $end-1 $end 10 11 12] + validate $l + list $l [spaceEqual $l] +} -result [list {0 1 2 3 4 5 10 11 12} 1] + +# +# listrep-2.* tests all operate on shared list reps with no free space. Note the +# *list internal rep* must be shared, not only the Tcl_Obj so just assigning to +# another variable does not suffice. The lrange construct on an variable's value +# will do the needful. + +test listrep-2.1 { + Inserts in front of shared list with no free space should reallocate with + more leading space in front - linsert version +} -constraints testlistrep -body { + set a [freeSpaceNone] + set b [lrange $a $zero end]; # Ensure shared listrep + set l [linsert $b $zero 99] + validate $l + list [repStoreRefCount $b] $l [leadSpaceMore $l] [repStoreRefCount $l] +} -result [list 2 {99 0 1 2 3 4 5 6 7} 1 1] + +test listrep-2.1.1 { + Inserts in front of shared list with no free space should reallocate with + more leading space in front - lreplace version +} -constraints testlistrep -body { + set a [freeSpaceNone] + set b [lrange $a $zero end]; # Ensure shared listrep + set l [lreplace $b $zero -1 99] + validate $l + list [repStoreRefCount $b] $l [leadSpaceMore $l] [repStoreRefCount $l] +} -result [list 2 {99 0 1 2 3 4 5 6 7} 1 1] + +test listrep-2.2 { + Inserts at back of shared list with no free space should reallocate with + more leading space in back - linsert version +} -constraints testlistrep -body { + set a [freeSpaceNone] + set b [lrange $a $zero end]; # Ensure shared listrep + set l [linsert $b $end 99] + validate $l + list [repStoreRefCount $b] $l [tailSpaceMore $l] [repStoreRefCount $l] +} -result [list 2 {0 1 2 3 4 5 6 7 99} 1 1] + +test listrep-2.2.1 { + Inserts at back of shared list with no free space should reallocate with + more leading space in back - lreplace version +} -constraints testlistrep -body { + set a [freeSpaceNone] + set b [lrange $a $zero end]; # Ensure shared listrep + set l [lreplace $b $end+1 end+$one 99] + validate $l + list [repStoreRefCount $b] $l [tailSpaceMore $l] [repStoreRefCount $l] +} -result [list 2 {0 1 2 3 4 5 6 7 99} 1 1] + +test listrep-2.2.2 { + Inserts at back of shared list with no free space should reallocate with + more leading space in back - lappend version +} -constraints testlistrep -body { + set a [freeSpaceNone] + set b [lrange $a $zero end]; # Ensure shared listrep + set l [lappend b 99] + validate $l + list [repStoreRefCount $b] $l [tailSpaceMore $l] [repStoreRefCount $l] +} -result [list 1 {0 1 2 3 4 5 6 7 99} 1 1] + +test listrep-2.2.3 { + Inserts at back of shared list with no free space should reallocate with + more leading space in back - lset version +} -constraints testlistrep -body { + set a [freeSpaceNone] + set b [lrange $a $zero end]; # Ensure shared listrep + set l [lset b $end+1 99] + validate $l + list [repStoreRefCount $b] $l [tailSpaceMore $l] [repStoreRefCount $l] +} -result [list 1 {0 1 2 3 4 5 6 7 99} 1 1] + +test listrep-2.3 { + Inserts in middle of shared list with no free space should reallocate with + equal spacing - linsert version +} -constraints testlistrep -body { + set a [freeSpaceNone] + set b [lrange $a $zero end]; # Ensure shared listrep + set l [linsert $b $four 99] + validate $l + list [repStoreRefCount $b] $l [spaceEqual $l] [repStoreRefCount $l] +} -result [list 2 {0 1 2 3 99 4 5 6 7} 1 1] + +test listrep-2.3.1 { + Inserts in middle of shared list with no free space should reallocate with + equal spacing - lreplace version +} -constraints testlistrep -body { + set a [freeSpaceNone] + set b [lrange $a $zero end]; # Ensure shared listrep + set l [lreplace $b $four $four-1 99] + validate $l + list [repStoreRefCount $b] $l [spaceEqual $l] [repStoreRefCount $l] +} -result [list 2 {0 1 2 3 99 4 5 6 7} 1 1] + +test listrep-2.4 { + Deletes from front of small shared list with no free space should + allocate new list of exact size - lreplace version +} -constraints testlistrep -body { + set a [freeSpaceNone] + set b [lrange $a $zero end]; # Ensure shared listrep + set l [lreplace $b $zero $zero] + validate $l + list [repStoreRefCount $b] $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list 2 {1 2 3 4 5 6 7} 0 0 1] + +test listrep-2.4.1 { + Deletes from front of small shared list with no free space should + allocate new list of exact size - lremove version +} -constraints testlistrep -body { + set a [freeSpaceNone] + set b [lrange $a $zero end]; # Ensure shared listrep + set l [lremove $b $zero $one] + validate $l + list [repStoreRefCount $b] $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list 2 {2 3 4 5 6 7} 0 0 1] + +test listrep-2.4.2 { + Deletes from front of small shared list with no free space should + allocate new list of exact size - lrange version +} -constraints testlistrep -body { + set a [freeSpaceNone] + set b [lrange $a $zero end]; # Ensure shared listrep + set l [lrange $b $one $end] + validate $l + list [repStoreRefCount $b] $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list 2 {1 2 3 4 5 6 7} 0 0 1] + +test listrep-2.4.3 { + Deletes from front of small shared list with no free space should + allocate new list of exact size - lassign version +} -constraints testlistrep -body { + set a [freeSpaceNone] + set b [lrange $a $zero end]; # Ensure shared listrep + set l [lassign $b e] + validate $l + list $e [repStoreRefCount $b] $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list 0 2 {1 2 3 4 5 6 7} 0 0 1] + +test listrep-2.4.4 { + Deletes from front of small shared list with no free space should + allocate new list of exact size - lpop version +} -constraints testlistrep -body { + set a [freeSpaceNone] + set l [lrange $a $zero end]; # Ensure shared listrep + set e [lpop l $zero] + validate $l + list $e $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list 0 {1 2 3 4 5 6 7} 0 0 1] + +test listrep-2.5 { + Deletes from front of large shared list with no free space should + create span - lreplace version +} -constraints testlistrep -body { + set a [freeSpaceNone 1000] + set b [lrange $a $zero end]; # Ensure shared listrep + set l [lreplace $b $zero $zero] + validate $l + # The listrep store should be shared among a, b, l (3 refs) + list [sameStore $b $l] [repStoreRefCount $b] $l [hasSpan $l] [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list 1 3 [irange 1 999] 1 0 0 3] + +test listrep-2.5.1 { + Deletes from front of large shared list with no free space should + create span - lremove version +} -constraints testlistrep -body { + set a [freeSpaceNone 1000] + set b [lrange $a $zero end]; # Ensure shared listrep + set l [lremove $b $zero $one] + validate $l + # The listrep store should be shared among a, b, l (3 refs) + list [sameStore $b $l] [repStoreRefCount $b] $l [hasSpan $l] [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list 1 3 [irange 2 999] 1 0 0 3] + +test listrep-2.5.2 { + Deletes from front of large shared list with no free space should + create span - lrange version +} -constraints testlistrep -body { + set a [freeSpaceNone 1000] + set b [lrange $a $zero end]; # Ensure shared listrep + set l [lrange $b $two $end] + validate $l + # The listrep store should be shared among a, b, l (3 refs) + list [sameStore $b $l] [repStoreRefCount $b] $l [hasSpan $l] [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list 1 3 [irange 2 999] 1 0 0 3] + +test listrep-2.5.3 { + Deletes from front of large shared list with no free space should + create span - lassign version +} -constraints testlistrep -body { + set a [freeSpaceNone 1000] + set b [lrange $a $zero end]; # Ensure shared listrep + set l [lassign $b e] + validate $l + # The listrep store should be shared among a, b, l (3 refs) + list $e [sameStore $b $l] [repStoreRefCount $b] $l [hasSpan $l] [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list 0 1 3 [irange 1 999] 1 0 0 3] + +test listrep-2.5.4 { + Deletes from front of large shared list with no free space should + create span - lpop version +} -constraints testlistrep -body { + set a [freeSpaceNone 1000] + set l [lrange $a $zero end]; # Ensure shared listrep + set e [lpop l $zero] + validate $l + # The listrep store should be shared among a, b, l (3 refs) + list $e $l [hasSpan $l] [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list 0 [irange 1 999] 1 0 0 2] + +test listrep-2.6 { + Deletes from back of small shared list with no free space should + allocate new list of exact size - lreplace version +} -constraints testlistrep -body { + set a [freeSpaceNone] + set b [lrange $a $zero end]; # Ensure shared listrep + set l [lreplace $b $end $end] + validate $l + list [repStoreRefCount $b] $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list 2 {0 1 2 3 4 5 6} 0 0 1] + +test listrep-2.6.1 { + Deletes from back of small shared list with no free space should + allocate new list of exact size - lremove version +} -constraints testlistrep -body { + set a [freeSpaceNone] + set b [lrange $a $zero end]; # Ensure shared listrep + set l [lremove $b $end $end-1] + validate $l + list [repStoreRefCount $b] $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list 2 {0 1 2 3 4 5} 0 0 1] + +test listrep-2.6.2 { + Deletes from back of small shared list with no free space should + allocate new list of exact size - lrange version +} -constraints testlistrep -body { + set a [freeSpaceNone] + set b [lrange $a $zero end]; # Ensure shared listrep + set l [lrange $b $zero $end-1] + validate $l + list [repStoreRefCount $b] $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list 2 {0 1 2 3 4 5 6} 0 0 1] + +test listrep-2.6.3 { + Deletes from back of small shared list with no free space should + allocate new list of exact size - lpop version +} -constraints testlistrep -body { + set a [freeSpaceNone] + set l [lrange $a $zero end]; # Ensure shared listrep + set e [lpop l] + validate $l + list $e $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list 7 {0 1 2 3 4 5 6} 0 0 1] + +test listrep-2.7 { + Deletes from back of large shared list with no free space should + use a span - lreplace version +} -constraints testlistrep -body { + set a [freeSpaceNone 1000] + set b [lrange $a $zero end]; # Ensure shared listrep + set l [lreplace $b $end $end] + validate $l + # Note lead and tail space is 0 because original list store in a,b is used + list [repStoreRefCount $b] $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list 3 [irange 0 998] 0 0 3] + +test listrep-2.7.1 { + Deletes from back of large shared list with no free space should + use a span - lremove version +} -constraints testlistrep -body { + set a [freeSpaceNone 1000] + set b [lrange $a $zero end]; # Ensure shared listrep + set l [lremove $b $end-1 $end] + validate $l + # Note lead and tail space is 0 because original list store in a,b is used + list [repStoreRefCount $b] $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list 3 [irange 0 997] 0 0 3] + +test listrep-2.7.2 { + Deletes from back of large shared list with no free space should + use a span - lrange version +} -constraints testlistrep -body { + set a [freeSpaceNone 1000] + set b [lrange $a $zero end]; # Ensure shared listrep + set l [lrange $b $zero $end-1] + validate $l + # Note lead and tail space is 0 because original list store in a,b is used + list [repStoreRefCount $b] $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list 3 [irange 0 998] 0 0 3] + +test listrep-2.7.3 { + Deletes from back of large shared list with no free space should + use a span - lpop version +} -constraints testlistrep -body { + set a [freeSpaceNone 1000] + set l [lrange $a $zero end]; # Ensure shared listrep + set e [lpop l] + validate $l + # Note lead and tail space is 0 because original list store in a,b is used + list $e $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list 999 [irange 0 998] 0 0 2] + +test listrep-2.8 { + no-op on shared list should force a canonical list representation + with original unchanged - lreplace version +} -body { + set l { 1 2 3 4 } + list [lreplace $l $zero -1] $l +} -result [list {1 2 3 4} { 1 2 3 4 }] + +test listrep-2.8.1 { + no-op on shared list should force a canonical list representation + with original unchanged - lrange version +} -body { + set l { 1 2 3 4 } + list [lrange $l $zero end] $l +} -result [list {1 2 3 4} { 1 2 3 4 }] + +test listrep-2.9 { + Appends to back of large shared list with no free space allocates new + list with space only at the back - lreplace version +} -constraints testlistrep -body { + set a [freeSpaceNone 1000] + set b [lrange $a $zero end]; # Ensure shared listrep + set l [lreplace $b $end+1 $end+1 1000] + validate $l + list [repStoreRefCount $b] $l [leadSpace $l] [expr {[tailSpace $l]>0}] [repStoreRefCount $l] +} -result [list 2 [irange 0 1000] 0 1 1] + +test listrep-2.9.1 { + Appends to back of large shared list with no free space allocates new + list with space only at the back - linsert version +} -constraints testlistrep -body { + set a [freeSpaceNone 1000] + set b [lrange $a $zero end]; # Ensure shared listrep + set l [linsert $b $end+1 1000 1001] + validate $l + list [repStoreRefCount $b] $l [leadSpace $l] [expr {[tailSpace $l]>0}] [repStoreRefCount $l] +} -result [list 2 [irange 0 1001] 0 1 1] + +test listrep-2.9.2 { + Appends to back of large shared list with no free space allocates new + list with space only at the back - lappend version +} -constraints testlistrep -body { + set a [freeSpaceNone 1000] + set l [lrange $a $zero end]; # Ensure shared listrep + lappend l 1000 + validate $l + list $l [leadSpace $l] [expr {[tailSpace $l]>0}] [repStoreRefCount $l] +} -result [list [irange 0 1000] 0 1 1] + +test listrep-2.9.3 { + Appends to back of large shared list with no free space allocates new + list with space only at the back - lset version +} -constraints testlistrep -body { + set a [freeSpaceNone 1000] + set l [lrange $a $zero end]; # Ensure shared listrep + lset l $end+1 1000 + validate $l + list $l [leadSpace $l] [expr {[tailSpace $l]>0}] [repStoreRefCount $l] +} -result [list [irange 0 1000] 0 1 1] + +test listrep-2.10 { + Replacement of elements at front with same number in shared list results + in a new list store with more space in front than back - lreplace version +} -constraints testlistrep -body { + set a [freeSpaceNone] + set b [lrange $a $zero end]; # Ensure shared listrep + set l [lreplace $b $zero $one 10 11] + validate $l + list [repStoreRefCount $b] $l [leadSpaceMore $l] [repStoreRefCount $l] +} -result [list 2 {10 11 2 3 4 5 6 7} 1 1] + +test listrep-2.10.1 { + Replacement of elements at front with same number in shared list results + in a new list store with no extra space - lset version +} -constraints testlistrep -body { + set a [freeSpaceNone] + set l [lrange $a $zero end]; # Ensure shared listrep + lset l $zero 10 + validate $l + list $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list {10 1 2 3 4 5 6 7} 0 0 1] + +test listrep-2.11 { + Replacement of elements at front with fewer elements in shared list + results in a new list store with more space in front than back +} -constraints testlistrep -body { + set a [freeSpaceNone] + set b [lrange $a $zero end]; # Ensure shared listrep + set l [lreplace $b $zero $four 10] + validate $l + list [repStoreRefCount $b] $l [leadSpaceMore $l] [repStoreRefCount $l] +} -result [list 2 {10 5 6 7} 1 1] + +test listrep-2.12 { + Replacement of elements at front with more elements in shared list + results in a new spanned list with more space in front +} -constraints testlistrep -body { + set a [freeSpaceNone] + set b [lrange $a $zero end]; # Ensure shared listrep + set l [lreplace $b $zero $one 10 11 12] + validate $l + list [repStoreRefCount $b] $l [leadSpaceMore $l] [repStoreRefCount $l] +} -result [list 2 {10 11 12 2 3 4 5 6 7} 1 1] + +test listrep-2.13 { + Replacement of elements in middle with same number in shared list results + in a new list store with equal space in front and back - lreplace version +} -constraints testlistrep -body { + set a [freeSpaceNone] + set b [lrange $a $zero end]; # Ensure shared listrep + set l [lreplace $b $one $two 10 11] + validate $l + list [repStoreRefCount $b] $l [spaceEqual $l] [repStoreRefCount $l] +} -result [list 2 {0 10 11 3 4 5 6 7} 1 1] + +test listrep-2.13.1 { + Replacement of elements in middle with same number in shared list results + in a new list store with exact allocation - lset version +} -constraints testlistrep -body { + set a [freeSpaceNone] + set l [lrange $a $zero end]; # Ensure shared listrep + lset l $one 10 + validate $l + list $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list {0 10 2 3 4 5 6 7} 0 0 1] + +test listrep-2.14 { + Replacement of elements in middle with fewer elements in shared list + results in a new list store with equal space +} -constraints testlistrep -body { + set a [freeSpaceNone] + set b [lrange $a $zero end]; # Ensure shared listrep + set l [lreplace $b $one 5 10] + validate $l + list [repStoreRefCount $b] $l [spaceEqual $l] [repStoreRefCount $l] +} -result [list 2 {0 10 6 7} 1 1] + +test listrep-2.15 { + Replacement of elements in middle with more elements in shared list + results in a new spanned list with space in front and back +} -constraints testlistrep -body { + set a [freeSpaceNone] + set b [lrange $a $zero end]; # Ensure shared listrep + set l [lreplace $b $one $two 10 11 12] + validate $l + list [repStoreRefCount $b] $l [spaceEqual $l] [repStoreRefCount $l] +} -result [list 2 {0 10 11 12 3 4 5 6 7} 1 1] + +test listrep-2.16 { + Replacement of elements at back with same number in shared list results + in a new list store with more space in back than front - lreplace version +} -constraints testlistrep -body { + set a [freeSpaceNone] + set b [lrange $a $zero end]; # Ensure shared listrep + set l [lreplace $b end-$one $end 10 11] + validate $l + list [repStoreRefCount $b] $l [tailSpaceMore $l] [repStoreRefCount $l] +} -result [list 2 {0 1 2 3 4 5 10 11} 1 1] + +test listrep-2.16.1 { + Replacement of elements at back with same number in shared list results + in a new list store with no extra - lreplace version +} -constraints testlistrep -body { + set a [freeSpaceNone] + set l [lrange $a $zero end]; # Ensure shared listrep + lset l $end 10 + validate $l + list $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list {0 1 2 3 4 5 6 10} 0 0 1] + +test listrep-2.17 { + Replacement of elements at back with fewer elements in shared list + results in a new list store with more space in back than front +} -constraints testlistrep -body { + set a [freeSpaceNone] + set b [lrange $a $zero end]; # Ensure shared listrep + set l [lreplace $b end-$four $end 10] + validate $l + list [repStoreRefCount $b] $l [tailSpaceMore $l] [repStoreRefCount $l] +} -result [list 2 {0 1 2 10} 1 1] + +test listrep-2.18 { + Replacement of elements at back with more elements in shared list + results in a new list store with more space in back than front +} -constraints testlistrep -body { + set a [freeSpaceNone] + set b [lrange $a $zero end]; # Ensure shared listrep + set l [lreplace $b end-$four $end 10] + validate $l + list [repStoreRefCount $b] $l [tailSpaceMore $l] [repStoreRefCount $l] +} -result [list 2 {0 1 2 10} 1 1] + +# +# listrep-3.* - tests on unshared spanned listreps + +test listrep-3.1 { + Inserts in front of unshared spanned list with room in front should just + shrink the lead space - linsert version +} -constraints testlistrep -body { + set l [linsert [freeSpaceBoth] $zero -2 -1] + validate $l + list $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list [irange -2 7] 1 3 1] + +test listrep-3.1.1 { + Inserts in front of unshared spanned list with room in front should just + shrink the lead space - lreplace version +} -constraints testlistrep -body { + set l [lreplace [freeSpaceBoth] $zero -1 -2 -1] + validate $l + list $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list [irange -2 7] 1 3 1] + +test listrep-3.2 { + Inserts in front of unshared spanned list with insufficient room in front + but enough total freespace should redistribute free space - linsert version +} -constraints testlistrep -body { + set l [linsert [freeSpaceBoth 8 1 10] $zero -2 -1] + validate $l + list $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list [irange -2 7] 5 4 1] + +test listrep-3.2.1 { + Inserts in front of unshared spanned list with insufficient room in front + but enough total freespace should redistribute free space - lreplace version +} -constraints testlistrep -body { + set l [lreplace [freeSpaceBoth 8 1 10] $zero -1 -2 -1] + validate $l + list $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list [irange -2 7] 5 4 1] + +test listrep-3.3 { + Inserts in front of unshared spanned list with insufficient total freespace + should reallocate with equal free space - linsert version +} -constraints testlistrep -body { + set l [linsert [freeSpaceBoth 8 1 1] $zero -3 -2 -1] + validate $l + list $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list [irange -3 7] 6 5 1] + +test listrep-3.3.1 { + Inserts in front of unshared spanned list with insufficient total freespace + should reallocate with equal free space - lreplace version +} -constraints testlistrep -body { + set l [lreplace [freeSpaceBoth 8 1 1] $zero -1 -3 -2 -1] + validate $l + list $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list [irange -3 7] 6 5 1] + +test listrep-3.4 { + Inserts at back of unshared spanned list with room at back should not + reallocate - linsert version +} -constraints testlistrep -body { + set l [linsert [freeSpaceBoth] $end 8] + validate $l + list $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list [irange 0 8] 3 2 1] + +test listrep-3.4.1 { + Inserts at back of unshared spanned list with room at back should not + reallocate - lreplace version +} -constraints testlistrep -body { + set l [lreplace [freeSpaceBoth] $end+1 $end+1 8 9] + validate $l + list $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list [irange 0 9] 3 1 1] + +test listrep-3.4.2 { + Inserts at back of unshared spanned list with room at back should not + reallocate - lappend version +} -constraints testlistrep -body { + set l [freeSpaceBoth] + lappend l 8 9 10 + validate $l + list $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list [irange 0 10] 3 0 1] + +test listrep-3.4.3 { + Inserts at back of unshared spanned list with room at back should not + reallocate - lset version +} -constraints testlistrep -body { + set l [freeSpaceBoth] + lset l $end+1 8 + validate $l + list $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list [irange 0 8] 3 2 1] + +test listrep-3.5 { + Inserts at back of unshared spanned list with insufficient room in back + but enough total freespace should redistribute free space - linsert version +} -constraints testlistrep -body { + set l [linsert [freeSpaceBoth 8 10 1] $end 8 9] + validate $l + list $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list [irange 0 9] 5 4 1] + +test listrep-3.5.1 { + Inserts at back of unshared spanned list with insufficient room in back + but enough total freespace should redistribute free space - lreplace version +} -constraints testlistrep -body { + set l [lreplace [freeSpaceBoth 8 10 1] $end+1 $end+1 8 9] + validate $l + list $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list [irange 0 9] 5 4 1] + +test listrep-3.5.2 { + Inserts at back of unshared spanned list with insufficient room in back + but enough total freespace should redistribute free space - lappend version +} -constraints testlistrep -body { + set l [freeSpaceBoth 8 10 1] + lappend l 8 9 + validate $l + list $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list [irange 0 9] 5 4 1] + +test listrep-3.5.3 { + Inserts at back of unshared spanned list with insufficient room in back + but enough total freespace should redistribute free space - lset version +} -constraints testlistrep -body { + set l [freeSpaceBoth 8 10 0] + lset l $end+1 8 + validate $l + list $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list [irange 0 8] 5 4 1] + +test listrep-3.6 { + Inserts in back of unshared spanned list with insufficient total freespace + should reallocate with all *additional* space at back. Note this differs + from the insert in front case because here we realloc(). - linsert version +} -constraints testlistrep -body { + set l [linsert [freeSpaceBoth 8 1 1] $end 8 9 10] + validate $l + list $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list [irange 0 10] 1 10 1] + +test listrep-3.6.1 { + Inserts in back of unshared spanned list with insufficient total freespace + should reallocate with all *additional* space at back. Note this differs + from the insert in front case because here we realloc() - lreplace version +} -constraints testlistrep -body { + set l [lreplace [freeSpaceBoth 8 1 1] $end+1 $end+1 8 9 10] + validate $l + list $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list [irange 0 10] 1 10 1] + +test listrep-3.6.2 { + Inserts in back of unshared spanned list with insufficient total freespace + should reallocate with all *additional* space at back. Note this differs + from the insert in front case because here we realloc() - lappend version +} -constraints testlistrep -body { + set l [freeSpaceBoth 8 1 1] + lappend l 8 9 10 + validate $l + list $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list [irange 0 10] 1 10 1] + +test listrep-3.6.3 { + Inserts in back of unshared spanned list with insufficient total freespace + should reallocate with all *additional* space at back. Note this differs + from the insert in front case because here we realloc() - lset version +} -constraints testlistrep -body { + set l [freeSpaceNone] + lset l $end+1 8 + validate $l + list $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list [irange 0 8] 0 9 1] + +test listrep-3.7 { + Inserts in front half of unshared spanned list with room in front should not + reallocate and should move front segment +} -constraints testlistrep -body { + set l [linsert [freeSpaceBoth] $one -2 -1] + validate $l + list $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list {0 -2 -1 1 2 3 4 5 6 7} 1 3 1] + +test listrep-3.8 { + Inserts in front half of unshared spanned list with insufficient leading + space but with enough tail space - linsert version +} -constraints testlistrep -body { + set l [linsert [freeSpaceBoth 8 1 5] $one -2 -1] + validate $l + list $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list {0 -2 -1 1 2 3 4 5 6 7} 1 3 1] + +test listrep-3.8.1 { + Inserts in front half of unshared spanned list with insufficient leading + space but with enough tail space - lreplace version +} -constraints testlistrep -body { + set l [lreplace [freeSpaceBoth 8 1 5] $one -1 -2 -1] + validate $l + list $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list {0 -2 -1 1 2 3 4 5 6 7} 1 3 1] + +test listrep-3.9 { + Inserts in front half of unshared spanned list with sufficient total + free space - linsert version +} -constraints testlistrep -body { + set l [linsert [freeSpaceBoth 8 2 2] $one -3 -2 -1] + validate $l + list $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list {0 -3 -2 -1 1 2 3 4 5 6 7} 0 1 1] + +test listrep-3.9.1 { + Inserts in front half of unshared spanned list with sufficient total + free space - lreplace version +} -constraints testlistrep -body { + set l [lreplace [freeSpaceBoth 8 2 2] $one -1 -3 -2 -1] + validate $l + list $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list {0 -3 -2 -1 1 2 3 4 5 6 7} 0 1 1] + +test listrep-3.10 { + Inserts in front half of unshared spanned list with insufficient total space. + Note use of realloc() means new space will be at the back - linsert version +} -constraints testlistrep -body { + set l [linsert [freeSpaceBoth 8 1 1] $one -3 -2 -1] + validate $l + list $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list {0 -3 -2 -1 1 2 3 4 5 6 7} 1 10 1] + +test listrep-3.10.1 { + Inserts in front half of unshared spanned list with insufficient total space. + Note use of realloc() means new space will be at the back - lreplace version +} -constraints testlistrep -body { + set l [lreplace [freeSpaceBoth 8 1 1] $one -1 -3 -2 -1] + validate $l + list $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list {0 -3 -2 -1 1 2 3 4 5 6 7} 1 10 1] + +test listrep-3.11 { + Inserts in back half of unshared spanned list with room in back should not + reallocate and should move back segment - linsert version +} -constraints testlistrep -body { + set l [linsert [freeSpaceBoth] $end-$one 8 9] + validate $l + list $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list {0 1 2 3 4 5 6 8 9 7} 3 1 1] + +test listrep-3.11.1 { + Inserts in back half of unshared spanned list with room in back should not + reallocate and should move back segment - lreplace version +} -constraints testlistrep -body { + set l [lreplace [freeSpaceBoth] $end -1 8 9] + validate $l + list $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list {0 1 2 3 4 5 6 8 9 7} 3 1 1] + +test listrep-3.12 { + Inserts in back half of unshared spanned list with insufficient tail + space but with enough leading space - linsert version +} -constraints testlistrep -body { + set l [linsert [freeSpaceBoth 8 5 1] $end-$one 8 9] + validate $l + list $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list {0 1 2 3 4 5 6 8 9 7} 3 1 1] + +test listrep-3.12.1 { + Inserts in back half of unshared spanned list with insufficient tail + space but with enough leading space - lreplace version +} -constraints testlistrep -body { + set l [lreplace [freeSpaceBoth 8 5 1] $end -1 8 9] + validate $l + list $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list {0 1 2 3 4 5 6 8 9 7} 3 1 1] + +test listrep-3.13 { + Inserts in back half of unshared spanned list with sufficient total + free space - linsert version +} -constraints testlistrep -body { + set l [linsert [freeSpaceBoth 8 2 2] $end-$one 8 9 10] + validate $l + list $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list {0 1 2 3 4 5 6 8 9 10 7} 0 1 1] + +test listrep-3.13.1 { + Inserts in back half of unshared spanned list with sufficient total + free space - lreplace version +} -constraints testlistrep -body { + set l [lreplace [freeSpaceBoth 8 2 2] $end -1 8 9 10] + validate $l + list $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list {0 1 2 3 4 5 6 8 9 10 7} 0 1 1] + +test listrep-3.14 { + Inserts in back half of unshared spanned list with insufficient + total space. Note use of realloc() means new space will be at the + back - linsert version +} -constraints testlistrep -body { + set l [linsert [freeSpaceBoth 8 1 1] $end-$one 8 9 10] + validate $l + list $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list {0 1 2 3 4 5 6 8 9 10 7} 1 10 1] + +test listrep-3.14.1 { + Inserts in back half of unshared spanned list with insufficient + total space. Note use of realloc() means new space will be at the + back - lrepalce version +} -constraints testlistrep -body { + set l [lreplace [freeSpaceBoth 8 1 1] $end -1 8 9 10] + validate $l + list $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list {0 1 2 3 4 5 6 8 9 10 7} 1 10 1] + +test listrep-3.15 { + Deletes from front of small unshared span list results in elements + moved up front and span removal - lreplace version +} -constraints testlistrep -body { + set l [lreplace [freeSpaceBoth] $zero $zero] + validate $l + list $l [leadSpace $l] [tailSpace $l] [hasSpan $l] +} -result [list {1 2 3 4 5 6 7} 0 7 0] + +test listrep-3.15.1 { + Deletes from front of small unshared span list results in elements + moved up front and span removal - lremove version +} -constraints testlistrep -body { + set l [lremove [freeSpaceBoth] $zero $one] + validate $l + list $l [leadSpace $l] [tailSpace $l] [hasSpan $l] +} -result [list {2 3 4 5 6 7} 0 8 0] + +test listrep-3.15.2 { + Deletes from front of small unshared span list results in elements + moved up front and span removal - lrange version +} -constraints testlistrep -body { + set l [lrange [freeSpaceBoth] $one $end] + validate $l + list $l [leadSpace $l] [tailSpace $l] [hasSpan $l] +} -result [list {1 2 3 4 5 6 7} 0 7 0] + +test listrep-3.15.3 { + Deletes from front of small unshared span list results in elements + moved up front and span removal - lassign version +} -constraints testlistrep -body { + set l [lassign [freeSpaceBoth] e] + validate $l + list $e $l [leadSpace $l] [tailSpace $l] [hasSpan $l] +} -result [list 0 {1 2 3 4 5 6 7} 0 7 0] + +test listrep-3.15.4 { + Deletes from front of small unshared span list results in elements + moved up front and span removal - lpop version +} -constraints testlistrep -body { + set l [freeSpaceBoth] + set e [lpop l $zero] + validate $l + list $l [leadSpace $l] [tailSpace $l] [hasSpan $l] +} -result [list {1 2 3 4 5 6 7} 0 7 0] + +test listrep-3.16 { + Deletes from front of large unshared span list results in another + span - lreplace version +} -constraints testlistrep -body { + set l [lreplace [freeSpaceBoth 1000 10 10] $zero $one] + validate $l + list $l [leadSpace $l] [tailSpace $l] [hasSpan $l 12 998] +} -result [list [irange 2 999] 12 10 1] + +test listrep-3.16.1 { + Deletes from front of large unshared span list results in another + span - lremove version +} -constraints testlistrep -body { + set l [lremove [freeSpaceBoth 1000 10 10] $zero $one] + validate $l + list $l [leadSpace $l] [tailSpace $l] [hasSpan $l 12 998] +} -result [list [irange 2 999] 12 10 1] + +test listrep-3.16.2 { + Deletes from front of large unshared span list results in another + span - lrange version +} -constraints testlistrep -body { + set l [lrange [freeSpaceBoth 1000 10 10] $two $end] + validate $l + list $l [leadSpace $l] [tailSpace $l] [hasSpan $l 12 998] +} -result [list [irange 2 999] 12 10 1] + +test listrep-3.16.3 { + Deletes from front of large unshared span list results in another + span - lassign version +} -constraints testlistrep -body { + set l [lassign [freeSpaceBoth 1000 10 10] e] + validate $l + list $e $l [leadSpace $l] [tailSpace $l] [hasSpan $l 11 999] +} -result [list 0 [irange 1 999] 11 10 1] + +test listrep-3.16.4 { + Deletes from front of large unshared span list results in another + span - lpop version +} -constraints testlistrep -body { + set l [freeSpaceBoth 1000 10 10] + set e [lpop l $zero] + validate $l + list $e $l [leadSpace $l] [tailSpace $l] [hasSpan $l 11 999] +} -result [list 0 [irange 1 999] 11 10 1] + +test listrep-3.17 { + Deletes from back of small unshared span list results in new store + without span - lreplace version +} -constraints testlistrep -body { + set l [lreplace [freeSpaceBoth] $end $end] + validate $l + list $l [leadSpace $l] [tailSpace $l] [hasSpan $l] +} -result [list {0 1 2 3 4 5 6} 0 7 0] + +test listrep-3.17.1 { + Deletes from back of small unshared span list results in new store + without span - lremove version +} -constraints testlistrep -body { + set l [lremove [freeSpaceBoth] $end] + validate $l + list $l [leadSpace $l] [tailSpace $l] [hasSpan $l] +} -result [list {0 1 2 3 4 5 6} 0 7 0] + +test listrep-3.17.2 { + Deletes from back of small unshared span list results in new store + without span - lrange version +} -constraints testlistrep -body { + set l [lrange [freeSpaceBoth] $zero $end-1] + validate $l + list $l [leadSpace $l] [tailSpace $l] [hasSpan $l] +} -result [list {0 1 2 3 4 5 6} 0 7 0] + +test listrep-3.17.3 { + Deletes from back of small unshared span list results in new store + without span - lpop version +} -constraints testlistrep -body { + set l [freeSpaceBoth] + set e [lpop l] + validate $l + list $e $l [leadSpace $l] [tailSpace $l] [hasSpan $l] +} -result [list 7 {0 1 2 3 4 5 6} 0 7 0] + +test listrep-3.18 { + Deletes from back of large unshared span list results in another + span - lreplace version +} -constraints testlistrep -body { + set l [lreplace [freeSpaceBoth 1000 10 10] $end-1 $end] + validate $l + list $l [leadSpace $l] [tailSpace $l] [hasSpan $l 10 998] +} -result [list [irange 0 997] 10 12 1] + +test listrep-3.18.1 { + Deletes from back of large unshared span list results in another + span - lremove version +} -constraints testlistrep -body { + set l [lremove [freeSpaceBoth 1000 10 10] $end-1 $end] + validate $l + list $l [leadSpace $l] [tailSpace $l] [hasSpan $l 10 998] +} -result [list [irange 0 997] 10 12 1] + +test listrep-3.18.2 { + Deletes from back of large unshared span list results in another + span - lrange version +} -constraints testlistrep -body { + set l [lrange [freeSpaceBoth 1000 10 10] $zero $end-2] + validate $l + list $l [leadSpace $l] [tailSpace $l] [hasSpan $l 10 998] +} -result [list [irange 0 997] 10 12 1] + +test listrep-3.18.3 { + Deletes from back of large unshared span list results in another + span - lpop version +} -constraints testlistrep -body { + set l [freeSpaceBoth 1000 10 10] + set e [lpop l] + validate $l + list $e $l [leadSpace $l] [tailSpace $l] [hasSpan $l 10 999] +} -result [list 999 [irange 0 998] 10 11 1] + +test listrep-3.19 { + Deletes from front half of small unshared span list results in + movement of smaller front segment - lreplace version +} -constraints testlistrep -body { + set l [lreplace [freeSpaceBoth] $one $two] + validate $l + list $l [leadSpace $l] [tailSpace $l] [hasSpan $l 5 6] +} -result [list {0 3 4 5 6 7} 5 3 1] + +test listrep-3.19.1 { + Deletes from front half of small unshared span list results in + movement of smaller front segment - lremove version +} -constraints testlistrep -body { + set l [lremove [freeSpaceBoth] $one $two] + validate $l + list $l [leadSpace $l] [tailSpace $l] [hasSpan $l 5 6] +} -result [list {0 3 4 5 6 7} 5 3 1] + +test listrep-3.20 { + Deletes from front half of large unshared span list results in + movement of smaller front segment - lreplace version +} -constraints testlistrep -body { + set l [lreplace [freeSpaceBoth 1000 10 10] $one $two] + validate $l + list $l [leadSpace $l] [tailSpace $l] [hasSpan $l 12 998] +} -result [list [list 0 {*}[irange 3 999]] 12 10 1] + +test listrep-3.20.1 { + Deletes from front half of large unshared span list results in + movement of smaller front segment - lremove version +} -constraints testlistrep -body { + set l [lremove [freeSpaceBoth 1000 10 10] $one $two] + validate $l + list $l [leadSpace $l] [tailSpace $l] [hasSpan $l 12 998] +} -result [list [list 0 {*}[irange 3 999]] 12 10 1] + +test listrep-3.21 { + Deletes from back half of small unshared span list results in + movement of smaller back segment - lreplace version +} -constraints testlistrep -body { + set l [lreplace [freeSpaceBoth] $end-2 $end-1] + validate $l + list $l [leadSpace $l] [tailSpace $l] [hasSpan $l 3 6] +} -result [list {0 1 2 3 4 7} 3 5 1] + +test listrep-3.21.1 { + Deletes from back half of small unshared span list results in + movement of smaller back segment - lremove version +} -constraints testlistrep -body { + set l [lremove [freeSpaceBoth] $end-2 $end-1] + validate $l + list $l [leadSpace $l] [tailSpace $l] [hasSpan $l 3 6] +} -result [list {0 1 2 3 4 7} 3 5 1] + +test listrep-3.22 { + Deletes from back half of large unshared span list results in + movement of smaller back segment - lreplace version +} -constraints testlistrep -body { + set l [lreplace [freeSpaceBoth 1000 10 10] $end-2 $end-1] + validate $l + list $l [leadSpace $l] [tailSpace $l] [hasSpan $l 10 998] +} -result [list [list {*}[irange 0 996] 999] 10 12 1] + +test listrep-3.22.1 { + Deletes from back half of large unshared span list results in + movement of smaller back segment - lremove version +} -constraints testlistrep -body { + set l [lremove [freeSpaceBoth 1000 10 10] $end-2 $end-1] + validate $l + list $l [leadSpace $l] [tailSpace $l] [hasSpan $l 10 998] +} -result [list [list {*}[irange 0 996] 999] 10 12 1] + +test listrep-3.23 { + Replacement of elements at front with same number elements in unshared + spanned list is in-place - lreplace version +} -body { + set l [lreplace [freeSpaceBoth] $zero $one 10 11] + list $l [leadSpace $l] [tailSpace $l] +} -result [list {10 11 2 3 4 5 6 7} 3 3] + +test listrep-3.23.1 { + Replacement of elements at front with same number elements in unshared + spanned list is in-place - lset version +} -body { + set l [freeSpaceBoth] + lset l $zero 10 + list $l [leadSpace $l] [tailSpace $l] +} -result [list {10 1 2 3 4 5 6 7} 3 3] + +test listrep-3.24 { + Replacement of elements at front with fewer elements in unshared + spanned list expands leading space - lreplace version +} -body { + set l [lreplace [freeSpaceBoth] $zero $four 10] + list $l [leadSpace $l] [tailSpace $l] +} -result [list {10 5 6 7} 7 3] + +test listrep-3.25 { + Replacement of elements at front with more elements in unshared + spanned list with sufficient leading space shrinks leading space +} -body { + set l [lreplace [freeSpaceBoth] $zero $one 10 11 12] + list $l [leadSpace $l] [tailSpace $l] +} -result [list {10 11 12 2 3 4 5 6 7} 2 3] + +test listrep-3.26 { + Replacement of elements at front with more elements in unshared + spanned list with insufficient leading space but sufficient total + free space +} -constraints testlistrep -body { + set l [lreplace [freeSpaceBoth 8 1 10] $zero $one 10 11 12 13] + validate $l + list $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list {10 11 12 13 2 3 4 5 6 7} 5 4 1] + +test listrep-3.27 { + Replacement of elements at front in unshared spanned list with insufficient + total freespace should reallocate with equal free space +} -constraints testlistrep -body { + set l [lreplace [freeSpaceBoth 8 1 1] $zero $one 10 11 12 13 14] + validate $l + list $l [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list {10 11 12 13 14 2 3 4 5 6 7} 6 5 1] + +test listrep-3.28 { + Replacement of elements at back with same number of elements in unshared + spanned list is in-place - lreplace version +} -body { + set l [lreplace [freeSpaceBoth] $end-1 $end 10 11] + validate $l + list $l [leadSpace $l] [tailSpace $l] +} -result [list {0 1 2 3 4 5 10 11} 3 3] + +test listrep-3.28.1 { + Replacement of elements at back with same number of elements in unshared + spanned list is in-place - lset version +} -body { + set l [freeSpaceBoth] + lset l $end 10 + validate $l + list $l [leadSpace $l] [tailSpace $l] +} -result [list {0 1 2 3 4 5 6 10} 3 3] + +test listrep-3.29 { + Replacement of elements at back with fewer elements in unshared + spanned list expands tail space +} -body { + set l [lreplace [freeSpaceBoth] $end-2 $end 10] + validate $l + list $l [leadSpace $l] [tailSpace $l] +} -result [list {0 1 2 3 4 10} 3 5] + +test listrep-3.30 { + Replacement of elements at back with more elements in unshared + spanned list with sufficient tail space shrinks tailspace +} -body { + set l [lreplace [freeSpaceBoth] $end-1 $end 10 11 12] + validate $l + list $l [leadSpace $l] [tailSpace $l] +} -result [list {0 1 2 3 4 5 10 11 12} 3 2] + +test listrep-3.31 { + Replacement of elements at back with more elements in unshared spanned list + with insufficient tail space but enough total free space moves up the span +} -body { + set l [lreplace [freeSpaceBoth 8 2 2] $end-1 $end 10 11 12 13 14] + validate $l + list $l [leadSpace $l] [tailSpace $l] +} -result [list {0 1 2 3 4 5 10 11 12 13 14} 0 1] + +test listrep-3.32 { + Replacement of elements at back with more elements in unshared spanned list + with insufficient total space reallocates with more room in the tail because + of realloc() +} -body { + set l [lreplace [freeSpaceBoth 8 1 1] $end-1 $end 10 11 12 13 14] + validate $l + list $l [leadSpace $l] [tailSpace $l] +} -result [list {0 1 2 3 4 5 10 11 12 13 14} 1 10] + +test listrep-3.33 { + Replacement of elements in the middle in an unshared spanned list with + the same number of elements - lreplace version +} -body { + set l [lreplace [freeSpaceBoth] $two $four 10 11 12] + validate $l + list $l [leadSpace $l] [tailSpace $l] +} -result [list {0 1 10 11 12 5 6 7} 3 3] + +test listrep-3.33.1 { + Replacement of elements in the middle in an unshared spanned list with + the same number of elements - lset version +} -body { + set l [freeSpaceBoth] + lset l $two 10 + validate $l + list $l [leadSpace $l] [tailSpace $l] +} -result [list {0 1 10 3 4 5 6 7} 3 3] + +test listrep-3.34 { + Replacement of elements in an unshared spanned list with fewer elements + in the front half moves the front (smaller) segment +} -body { + set l [lreplace [freeSpaceBoth] $two $four 10 11] + validate $l + list $l [leadSpace $l] [tailSpace $l] +} -result [list {0 1 10 11 5 6 7} 4 3] + +test listrep-3.35 { + Replacement of elements in an unshared spanned list with fewer elements + in the back half moves the tail (smaller) segment +} -body { + set l [lreplace [freeSpaceBoth] $end-2 $end-1 10] + validate $l + list $l [leadSpace $l] [tailSpace $l] +} -result [list {0 1 2 3 4 10 7} 3 4] + +test listrep-3.36 { + Replacement of elements in an unshared spanned list with more elements + when both front and back have room should move the smaller segment + (front case) +} -body { + set l [lreplace [freeSpaceBoth] $one $two 8 9 10] + validate $l + list $l [leadSpace $l] [tailSpace $l] +} -result [list {0 8 9 10 3 4 5 6 7} 2 3] + +test listrep-3.37 { + Replacement of elements in an unshared spanned list with more elements + when both front and back have room should move the smaller segment + (back case) +} -body { + set l [lreplace [freeSpaceBoth] $end-2 $end-1 8 9 10] + validate $l + list $l [leadSpace $l] [tailSpace $l] +} -result [list {0 1 2 3 4 8 9 10 7} 3 2] + +test listrep-3.38 { + Replacement of elements in an unshared spanned list with more elements + when only front has room +} -body { + set l [lreplace [freeSpaceBoth 8 3 1] $end-1 $end-1 8 9 10] + validate $l + list $l [leadSpace $l] [tailSpace $l] +} -result [list {0 1 2 3 4 5 8 9 10 7} 1 1] + +test listrep-3.39 { + Replacement of elements in an unshared spanned list with more elements + when only back has room +} -body { + set l [lreplace [freeSpaceBoth 8 1 3] $one $one 8 9 10] + validate $l + list $l [leadSpace $l] [tailSpace $l] +} -result [list {0 8 9 10 2 3 4 5 6 7} 1 1] + +test listrep-3.40 { + Replacement of elements in an unshared spanned list with more elements + when neither send has enough room by itself +} -body { + set l [lreplace [freeSpaceBoth] $one $one 8 9 10 11 12] + validate $l + list $l [leadSpace $l] [tailSpace $l] +} -result [list {0 8 9 10 11 12 2 3 4 5 6 7} 1 1] + +test listrep-3.41 { + Replacement of elements in an unshared spanned list with more elements + when there is not enough free space results in new allocation. The back + end has more space because of realloc() +} -body { + set l [lreplace [freeSpaceBoth 8 1 1] $one $one 8 9 10 11 12] + validate $l + list $l [leadSpace $l] [tailSpace $l] +} -result [list {0 8 9 10 11 12 2 3 4 5 6 7} 1 11] + +# +# 4.* - tests on shared spanned lists + +test listrep-4.1 { + Inserts in front of shared spanned list with used elements in lead space + creates new list rep with more lead than tail space - linsert version +} -constraints testlistrep -body { + set master [freeSpaceNone 1000] + set spanl [lrange $master $two $end-2] + set l [linsert $spanl $zero -1] + validate $l + list $master $spanl $l [leadSpaceMore $l] [hasSpan $l] [repStoreRefCount $master] [repStoreRefCount $spanl] [repStoreRefCount $l] +} -result [list [irange 0 999] [irange 2 997] [list -1 {*}[irange 2 997]] 1 1 2 2 1] + +test listrep-4.1.1 { + Inserts in front of shared spanned list with used elements in lead space + creates new list rep with more lead than tail space - lreplace version +} -constraints testlistrep -body { + set master [freeSpaceNone 1000] + set spanl [lrange $master $two $end-2] + set l [lreplace $spanl $zero -1 -2] + validate $l + list $master $spanl $l [leadSpaceMore $l] [hasSpan $l] [repStoreRefCount $master] [repStoreRefCount $spanl] [repStoreRefCount $l] +} -result [list [irange 0 999] [irange 2 997] [list -2 {*}[irange 2 997]] 1 1 2 2 1] + +test listrep-4.2 { + Inserts in front of shared spanned list with orphaned leading elements + allocate a new list rep with more lead than tail space - linsert version + TODO - ideally this should garbage collect the orphans and reuse the lead space + but that needs a "lprepend" command else the listrep operand is shared and hence + orphans cannot be freed +} -constraints testlistrep -body { + set master [freeSpaceLead 1000 100] + set spanl [lrange $master $two $end-2] + unset master; # So elements at 0, 1 are not used + set l [linsert $spanl $zero -1] + validate $l + list $spanl $l [sameStore $spanl $l] [leadSpaceMore $l] [hasSpan $l] [repStoreRefCount $spanl] [repStoreRefCount $l] +} -result [list [irange 2 997] [list -1 {*}[irange 2 997]] 0 1 1 1 1] + +test listrep-4.2.1 { + Inserts in front of shared spanned list with orphaned leading elements + allocate a new list rep with more lead than tail space - lreplace version + TODO - ideally this should garbage collect the orphans and reuse the lead space + but that needs a "lprepend" command else the listrep operand is shared and hence + orphans cannot be freed +} -constraints testlistrep -body { + set master [freeSpaceLead 1000 100] + set spanl [lrange $master $two $end-2] + unset master; # So elements at 0, 1 are not used + set l [lreplace $spanl $zero -1 -2] + validate $l + list $spanl $l [sameStore $spanl $l] [leadSpaceMore $l] [hasSpan $l] [repStoreRefCount $spanl] [repStoreRefCount $l] +} -result [list [irange 2 997] [list -2 {*}[irange 2 997]] 0 1 1 1 1] + +test listrep-4.3 { + Inserts in front of shared spanned list where span is at front of used + space reuses the same list store - linsert version +} -constraints testlistrep -body { + set master [freeSpaceLead 1000 100] + set spanl [lrange $master $zero $end-2] + set l [linsert $spanl $zero -1] + validate $l + list $spanl $l [sameStore $spanl $l] [leadSpace $l] [tailSpace $l] [hasSpan $l] [repStoreRefCount $spanl] [repStoreRefCount $l] +} -result [list [irange 0 997] [irange -1 997] 1 99 0 1 3 3] + +test listrep-4.3.1 { + Inserts in front of shared spanned list where span is at front of used + space reuses the same list store - lreplace version +} -constraints testlistrep -body { + set master [freeSpaceLead 1000 100] + set spanl [lrange $master $zero $end-2] + set l [lreplace $spanl $zero -1 -1] + validate $l + list $spanl $l [sameStore $spanl $l] [leadSpace $l] [tailSpace $l] [hasSpan $l] [repStoreRefCount $spanl] [repStoreRefCount $l] +} -result [list [irange 0 997] [irange -1 997] 1 99 0 1 3 3] + +test listrep-4.4 { + Inserts in front of shared spanned list where span is at front of used + space allocates new listrep if lead space insufficient even if total free space + is sufficient. New listrep should have more lead space than tail space. + - linsert version +} -constraints testlistrep -body { + set master [freeSpaceBoth 1000 2] + set spanl [lrange $master $zero $end-2] + set l [linsert $spanl $zero -3 -2 -1] + validate $l + list $spanl $l [sameStore $spanl $l] [leadSpaceMore $l] [hasSpan $l] [repStoreRefCount $spanl] [repStoreRefCount $l] +} -result [list [irange 0 997] [irange -3 997] 0 1 1 2 1] + +test listrep-4.4.1 { + Inserts in front of shared spanned list where span is at front of used + space allocates new listrep if lead space insufficient even if total free space + is sufficient. New listrep should have more lead space than tail space. + - lreplace version +} -constraints testlistrep -body { + set master [freeSpaceBoth 1000 2] + set spanl [lrange $master $zero $end-2] + set l [lreplace $spanl $zero -1 -3 -2 -1] + validate $l + list $spanl $l [sameStore $spanl $l] [leadSpaceMore $l] [hasSpan $l] [repStoreRefCount $spanl] [repStoreRefCount $l] +} -result [list [irange 0 997] [irange -3 997] 0 1 1 2 1] + +test listrep-4.5 { + Inserts in back of shared spanned list where span is at end of used space + still allocates a new listrep and trailing space is more than leading space + - linsert version +} -constraints testlistrep -body { + set master [freeSpaceBoth 1000 2] + set spanl [lrange $master $two $end] + set l [linsert $spanl $end 1000] + validate $l + list $spanl $l [sameStore $spanl $l] [tailSpaceMore $l] [hasSpan $l] [repStoreRefCount $spanl] [repStoreRefCount $l] +} -result [list [irange 2 999] [irange 2 1000] 0 1 1 2 1] + +test listrep-4.5.1 { + Inserts in back of shared spanned list where span is at end of used space + still allocates a new listrep and trailing space is more than leading space + - lreplace version +} -constraints testlistrep -body { + set master [freeSpaceBoth 1000 2] + set spanl [lrange $master $two $end] + set l [lreplace $spanl $end+1 $end+1 1000] + validate $l + list $spanl $l [sameStore $spanl $l] [tailSpaceMore $l] [hasSpan $l] [repStoreRefCount $spanl] [repStoreRefCount $l] +} -result [list [irange 2 999] [irange 2 1000] 0 1 1 2 1] + +test listrep-4.5.2 { + Inserts in back of shared spanned list where span is at end of used space + still allocates a new listrep and trailing space is more than leading space + - lappend version +} -constraints testlistrep -body { + set master [freeSpaceBoth 1000 2] + set l [lrange $master $two $end] + lappend l 1000 + validate $l + list $l [sameStore $master $l] [tailSpaceMore $l] [hasSpan $l] [repStoreRefCount $l] +} -result [list [irange 2 1000] 0 1 1 1] + +test listrep-4.5.3 { + Inserts in back of shared spanned list where span is at end of used space + still allocates a new listrep and trailing space is more than leading space + - lset version +} -constraints testlistrep -body { + set master [freeSpaceBoth 1000 2] + set l [lrange $master $two $end] + lset l $end+1 1000 + validate $l + list $l [sameStore $master $l] [tailSpaceMore $l] [hasSpan $l] [repStoreRefCount $l] +} -result [list [irange 2 1000] 0 1 1 1] + + +test listrep-4.6 { + Inserts in middle of shared spanned list allocates a new listrep with equal + lead and tail space - linsert version +} -constraints testlistrep -body { + set master [freeSpaceBoth 1000 2] + set spanl [lrange $master $two $end-2] + set i 200 + set l [linsert $spanl $i 1000] + validate $l + list $spanl $l [sameStore $spanl $l] [spaceEqual $l] [hasSpan $l] [repStoreRefCount $spanl] [repStoreRefCount $l] +} -result [list [irange 2 997] [concat [irange 2 201] 1000 [irange 202 997]] 0 1 1 2 1] + +test listrep-4.6.1 { + Inserts in middle of shared spanned list allocates a new listrep with equal + lead and tail space - lreplace version +} -constraints testlistrep -body { + set master [freeSpaceBoth 1000 2] + set spanl [lrange $master $two $end-2] + set i 200 + set l [lreplace $spanl $i -1 1000] + validate $l + list $spanl $l [sameStore $spanl $l] [spaceEqual $l] [hasSpan $l] [repStoreRefCount $spanl] [repStoreRefCount $l] +} -result [list [irange 2 997] [concat [irange 2 201] 1000 [irange 202 997]] 0 1 1 2 1] + +test listrep-4.7 { + Deletes from front of shared spanned list do not create a new allocation + - lreplace version +} -constraints testlistrep -body { + set master [freeSpaceNone 1000] + set spanl [lrange $master $two $end-2] + set l [lreplace $spanl $zero $one] + validate $l + list $spanl $l [sameStore $spanl $l] [hasSpan $l] [repStoreRefCount $spanl] [repStoreRefCount $l] +} -result [list [irange 2 997] [irange 4 997] 1 1 3 3] + +test listrep-4.7.1 { + Deletes from front of shared spanned list do not create a new allocation + - lremove version +} -constraints testlistrep -body { + set master [freeSpaceNone 1000] + set spanl [lrange $master $two $end-2] + set l [lremove $spanl $zero $one] + validate $l + list $spanl $l [sameStore $spanl $l] [hasSpan $l] [repStoreRefCount $spanl] [repStoreRefCount $l] +} -result [list [irange 2 997] [irange 4 997] 1 1 3 3] + +test listrep-4.7.2 { + Deletes from front of shared spanned list do not create a new allocation + - lrange version +} -constraints testlistrep -body { + set master [freeSpaceNone 1000] + set spanl [lrange $master $two $end-2] + set l [lrange $spanl $two $end] + validate $l + list $spanl $l [sameStore $spanl $l] [hasSpan $l] [repStoreRefCount $spanl] [repStoreRefCount $l] +} -result [list [irange 2 997] [irange 4 997] 1 1 3 3] + +test listrep-4.7.3 { + Deletes from front of shared spanned list do not create a new allocation + - lassign version +} -constraints testlistrep -body { + set master [freeSpaceNone 1000] + set spanl [lrange $master $two $end-2] + set l [lassign $spanl e] + validate $l + list $e $spanl $l [sameStore $spanl $l] [hasSpan $l] [repStoreRefCount $spanl] [repStoreRefCount $l] +} -result [list 2 [irange 2 997] [irange 3 997] 1 1 3 3] + +test listrep-4.7.4 { + Deletes from front of shared spanned list do not create a new allocation + - lpop version +} -constraints testlistrep -body { + set master [freeSpaceNone 1000] + set l [lrange $master $two $end-2] + set e [lpop l $zero] + validate $l + list $e $l [sameStore $master $l] [hasSpan $l] [repStoreRefCount $l] +} -result [list 2 [irange 3 997] 1 1 2] + +test listrep-4.8 { + Deletes from end of shared spanned list do not create a new allocation + - lreplace version +} -constraints testlistrep -body { + set master [freeSpaceNone 1000] + set spanl [lrange $master $two $end-2] + set l [lreplace $spanl $end-1 $end] + validate $l + list $spanl $l [sameStore $spanl $l] [hasSpan $l] [repStoreRefCount $spanl] [repStoreRefCount $l] +} -result [list [irange 2 997] [irange 2 995] 1 1 3 3] + +test listrep-4.8.1 { + Deletes from end of shared spanned list do not create a new allocation + - lremove version +} -constraints testlistrep -body { + set master [freeSpaceNone 1000] + set spanl [lrange $master $two $end-2] + set l [lremove $spanl $end-1 $end] + validate $l + list $spanl $l [sameStore $spanl $l] [hasSpan $l] [repStoreRefCount $spanl] [repStoreRefCount $l] +} -result [list [irange 2 997] [irange 2 995] 1 1 3 3] + +test listrep-4.8.2 { + Deletes from end of shared spanned list do not create a new allocation + - lrange version +} -constraints testlistrep -body { + set master [freeSpaceNone 1000] + set spanl [lrange $master $two $end-2] + set l [lrange $spanl 0 $end-2] + validate $l + list $spanl $l [sameStore $spanl $l] [hasSpan $l] [repStoreRefCount $spanl] [repStoreRefCount $l] +} -result [list [irange 2 997] [irange 2 995] 1 1 3 3] + +test listrep-4.8.3 { + Deletes from end of shared spanned list do not create a new allocation + - lpop version +} -constraints testlistrep -body { + set master [freeSpaceNone 1000] + set l [lrange $master $two $end-2] + set e [lpop l] + validate $l + list $e $l [sameStore $master $l] [hasSpan $l] [repStoreRefCount $l] +} -result [list 997 [irange 2 996] 1 1 2] + +test listrep-4.9 { + Deletes from middle of shared spanned list creates a new allocation with + equal free space at front and back - lreplace version +} -constraints testlistrep -body { + set master [freeSpaceNone 1000] + set spanl [lrange $master $two $end-2] + set i 500 + set l [lreplace $spanl $i $i] + validate $l + list $spanl $l [sameStore $spanl $l] [hasSpan $l] [spaceEqual $l] [repStoreRefCount $spanl] [repStoreRefCount $l] +} -result [list [irange 2 997] [concat [irange 2 501] [irange 503 997]] 0 1 1 2 1] + +test listrep-4.9.1 { + Deletes from middle of shared spanned list creates a new allocation with + equal free space at front and back - lremove version +} -constraints testlistrep -body { + set master [freeSpaceNone 1000] + set spanl [lrange $master $two $end-2] + set i 500 + set l [lremove $spanl $i $i] + validate $l + list $spanl $l [sameStore $spanl $l] [hasSpan $l] [spaceEqual $l] [repStoreRefCount $spanl] [repStoreRefCount $l] +} -result [list [irange 2 997] [concat [irange 2 501] [irange 503 997]] 0 1 1 2 1] + +test listrep-4.9.2 { + Deletes from middle of shared spanned list creates a new allocation with + equal free space at front and back - lpop version +} -constraints testlistrep -body { + set master [freeSpaceNone 1000] + set l [lrange $master $two $end-2] + set i 500 + set e [lpop l $i] + validate $l + list $e $l [sameStore $master $l] [hasSpan $l] [spaceEqual $l] [repStoreRefCount $l] +} -result [list 502 [concat [irange 2 501] [irange 503 997]] 0 1 1 1] + +test listrep-4.10 { + Replacements with same number of elements at front of shared spanned list + create a new allocation with more space in front - lreplace version +} -constraints testlistrep -body { + set master [freeSpaceNone 1000] + set spanl [lrange $master $two $end-2] + set l [lreplace $spanl $zero $one -2 -1] + validate $l + list $spanl $l [sameStore $spanl $l] [leadSpaceMore $l] [hasSpan $l] [repStoreRefCount $spanl] [repStoreRefCount $l] +} -result [list [irange 2 997] [concat {-2 -1} [irange 4 997]] 0 1 1 2 1] + +test listrep-4.10.1 { + Replacements with same number of elements at front of shared spanned list + create a new allocation with exact size +} -constraints testlistrep -body { + set master [freeSpaceNone 1000] + set l [lrange $master $two $end-2] + lset l $zero -1 + validate $l + list $l [sameStore $master $l] [hasSpan $l] [repStoreRefCount $l] +} -result [list [concat {-1} [irange 3 997]] 0 0 1] + +test listrep-4.11 { + Replacements with fewer elements at front of shared spanned list + create a new allocation with more space in front +} -constraints testlistrep -body { + set master [freeSpaceNone 1000] + set spanl [lrange $master $two $end-2] + set l [lreplace $spanl $zero $one -1] + validate $l + list $spanl $l [sameStore $spanl $l] [leadSpaceMore $l] [hasSpan $l] [repStoreRefCount $spanl] [repStoreRefCount $l] +} -result [list [irange 2 997] [concat {-1} [irange 4 997]] 0 1 1 2 1] + +test listrep-4.12 { + Replacements with more elements at front of shared spanned list + create a new allocation with more space in front +} -constraints testlistrep -body { + set master [freeSpaceNone 1000] + set spanl [lrange $master $two $end-2] + set l [lreplace $spanl $zero $one -3 -2 -1] + validate $l + list $spanl $l [sameStore $spanl $l] [leadSpaceMore $l] [hasSpan $l] [repStoreRefCount $spanl] [repStoreRefCount $l] +} -result [list [irange 2 997] [concat {-3 -2 -1} [irange 4 997]] 0 1 1 2 1] + +test listrep-4.13 { + Replacements with same number of elements at back of shared spanned list + create a new allocation with more space in back - lreplace version +} -constraints testlistrep -body { + set master [freeSpaceNone 1000] + set spanl [lrange $master $two $end-2] + set l [lreplace $spanl $end-1 $end 1000 1001] + validate $l + list $spanl $l [sameStore $spanl $l] [tailSpaceMore $l] [hasSpan $l] [repStoreRefCount $spanl] [repStoreRefCount $l] +} -result [list [irange 2 997] [concat [irange 2 995] {1000 1001}] 0 1 1 2 1] + +test listrep-4.13.1 { + Replacements with same number of elements at back of shared spanned list + create a new exact allocation with no span - lset version +} -constraints testlistrep -body { + set master [freeSpaceNone 1000] + set l [lrange $master $two $end-2] + lset l $end 1000 + validate $l + list $l [sameStore $master $l] [tailSpace $l] [hasSpan $l] [repStoreRefCount $l] +} -result [list [concat [irange 2 996] {1000}] 0 0 0 1] + +test listrep-4.14 { + Replacements with fewer elements at back of shared spanned list + create a new allocation with more space in back +} -constraints testlistrep -body { + set master [freeSpaceNone 1000] + set spanl [lrange $master $two $end-2] + set l [lreplace $spanl $end-1 $end 1000] + validate $l + list $spanl $l [sameStore $spanl $l] [tailSpaceMore $l] [hasSpan $l] [repStoreRefCount $spanl] [repStoreRefCount $l] +} -result [list [irange 2 997] [concat [irange 2 995] {1000}] 0 1 1 2 1] + +test listrep-4.15 { + Replacements with more elements at back of shared spanned list + create a new allocation with more space in back +} -constraints testlistrep -body { + set master [freeSpaceNone 1000] + set spanl [lrange $master $two $end-2] + set l [lreplace $spanl $end-1 $end 1000 1001 1002] + validate $l + list $spanl $l [sameStore $spanl $l] [tailSpaceMore $l] [hasSpan $l] [repStoreRefCount $spanl] [repStoreRefCount $l] +} -result [list [irange 2 997] [concat [irange 2 995] {1000 1001 1002}] 0 1 1 2 1] + +test listrep-4.16 { + Replacements with same number of elements in middle of shared spanned list + create a new allocation with equal lead and tail sapce +} -constraints testlistrep -body { + set master [freeSpaceNone 1000] + set spanl [lrange $master $two $end-2] + set l [lreplace $spanl $one $two -2 -1] + validate $l + list $spanl $l [sameStore $spanl $l] [spaceEqual $l] [hasSpan $l] [repStoreRefCount $spanl] [repStoreRefCount $l] +} -result [list [irange 2 997] [concat {2 -2 -1} [irange 5 997]] 0 1 1 2 1] + +test listrep-4.16.1 { + Replacements with same number of elements in middle of shared spanned list + create a new exact allocation - lset version +} -constraints testlistrep -body { + set master [freeSpaceNone 1000] + set l [lrange $master $two $end-2] + lset l $one -2 + validate $l + list $l [sameStore $master $l] [hasSpan $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list [concat {2 -2} [irange 4 997]] 0 0 0 1] + +test listrep-4.17 { + Replacements with fewer elements in middle of shared spanned list + create a new allocation with equal lead and tail sapce +} -constraints testlistrep -body { + set master [freeSpaceNone 1000] + set spanl [lrange $master $two $end-2] + set l [lreplace $spanl $end-2 $end-1 1000] + validate $l + list $spanl $l [sameStore $spanl $l] [spaceEqual $l] [hasSpan $l] [repStoreRefCount $spanl] [repStoreRefCount $l] +} -result [list [irange 2 997] [concat [irange 2 994] {1000 997}] 0 1 1 2 1] + +test listrep-4.18 { + Replacements with more elements in middle of shared spanned list + create a new allocation with equal lead and tail sapce +} -constraints testlistrep -body { + set master [freeSpaceNone 1000] + set spanl [lrange $master $two $end-2] + set l [lreplace $spanl $end-2 $end-1 1000 1001 1002] + validate $l + list $spanl $l [sameStore $spanl $l] [spaceEqual $l] [hasSpan $l] [repStoreRefCount $spanl] [repStoreRefCount $l] +} -result [list [irange 2 997] [concat [irange 2 994] {1000 1001 1002 997}] 0 1 1 2 1] + +# 5.* - tests on shared Tcl_Obj +# Tests when Tcl_Obj is shared but listrep is not. This is to ensure that +# checks for shared values check the Tcl_Obj reference counts in addition to +# the list internal representation reference counts. Probably some or all +# cases are already covered elsewhere but easier to just test than look. +test listrep-5.1 { + Verify that operation on a shared Tcl_Obj with a single-ref, spanless + list representation only modifies the target object - lappend version +} -constraints testlistrep -body { + set l [freeSpaceNone] + set l2 $l + set same [sameStore $l $l2] + lappend l 8 + list $same $l $l2 [sameStore $l $l2] +} -result [list 1 [irange 0 8] [irange 0 7] 0] + +test listrep-5.1.1 { + Verify that operation on a shared Tcl_Obj with a single-ref, spanless + list representation only modifies the target object - lset version +} -constraints testlistrep -body { + set l [freeSpaceNone] + set l2 $l + set same [sameStore $l $l2] + lset l $end+1 8 + list $same $l $l2 [sameStore $l $l2] +} -result [list 1 [irange 0 8] [irange 0 7] 0] + +test listrep-5.1.2 { + Verify that operation on a shared Tcl_Obj with a single-ref, spanless + list representation only modifies the target object - lpop version +} -constraints testlistrep -body { + set l [freeSpaceNone] + set l2 $l + set same [sameStore $l $l2] + lpop l + list $same $l $l2 [sameStore $l $l2] [hasSpan $l] +} -result [list 1 [irange 0 6] [irange 0 7] 0 0] + +test listrep-5.2 { + Verify that operation on a shared Tcl_Obj with a single-ref, spanned + list representation only modifies the target object - lappend version +} -constraints testlistrep -body { + set l [freeSpaceBoth 1000 10 10] + set l2 $l + set same [sameStore $l $l2] + lappend l 1000 + list $same $l $l2 [sameStore $l $l2] [hasSpan $l] [hasSpan $l2] +} -result [list 1 [irange 0 1000] [irange 0 999] 0 1 1] + +test listrep-5.2.1 { + Verify that operation on a shared Tcl_Obj with a single-ref, spanned + list representation only modifies the target object - lset version +} -constraints testlistrep -body { + set l [freeSpaceBoth 1000 10 10] + set l2 $l + set same [sameStore $l $l2] + lset l $end+1 1000 + list $same $l $l2 [sameStore $l $l2] [hasSpan $l] [hasSpan $l2] +} -result [list 1 [irange 0 1000] [irange 0 999] 0 1 1] + +test listrep-5.2.2 { + Verify that operation on a shared Tcl_Obj with a single-ref, spanned + list representation only modifies the target object - lpop version +} -constraints testlistrep -body { + set l [freeSpaceNone 1000] + set l2 $l + set same [sameStore $l $l2] + lpop l + list $same $l $l2 [sameStore $l $l2] [hasSpan $l] [hasSpan $l2] +} -result [list 1 [irange 0 998] [irange 0 999] 1 1 0] + +# +# 6.* - tests when lists contain zombies. +# The list implementation does lazy freeing in some cases so the list store +# contain Tcl_Obj's that are not actually referenced by any list (zombies). +# These are to be freed next time the list store is modified by a list +# operation as long as it is no longer shared. +test listrep-6.1 { + Verify that zombies are freed up - linsert at front +} -constraints testlistrep -body { + set l [zombieSample 200 10 10] + set addr [storeAddress $l] + # set l {} is for reference counts to drop to 1 + set l [linsert $l[set l {}] $zero -1] + list $l [expr {$addr == [storeAddress $l]}] [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list [list -1 {*}[irange 10 209]] 1 9 10 1] + +test listrep-6.1.1 { + Verify that zombies are freed up - linsert in middle +} -constraints testlistrep -body { + set l [zombieSample 200 10 10] + set addr [storeAddress $l] + # set l {} is for reference counts to drop to 1 + set l [linsert $l[set l {}] $one -1] + list $l [expr {$addr == [storeAddress $l]}] [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list [list 10 -1 {*}[irange 11 209]] 1 9 10 1] + +test listrep-6.1.2 { + Verify that zombies are freed up - linsert at end +} -constraints testlistrep -body { + set l [zombieSample 200 10 10] + set addr [storeAddress $l] + # set l {} is for reference counts to drop to 1 + set l [linsert $l[set l {}] $end 210] + list $l [expr {$addr == [storeAddress $l]}] [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list [irange 10 210] 1 10 9 1] + +test listrep-6.2 { + Verify that zombies are freed up - lrange version (whole) +} -constraints testlistrep -body { + set l [zombieSample 200 10 10] + set addr [storeAddress $l] + # set l {} is for reference counts to drop to 1 + set l [lrange $l[set l {}] $zero $end] + list $l [expr {$addr == [storeAddress $l]}] [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list [irange 10 209] 1 10 10 1] + +test listrep-6.2.1 { + Verify that zombies are freed up - lrange version (subrange) +} -constraints testlistrep -body { + set l [zombieSample 200 10 10] + set addr [storeAddress $l] + # set l {} is for reference counts to drop to 1 + set l [lrange $l[set l {}] $one $end-1] + list $l [expr {$addr == [storeAddress $l]}] [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list [irange 11 208] 1 11 11 1] + +test listrep-6.3 { + Verify that zombies are freed up - lassign version +} -constraints testlistrep -body { + set l [zombieSample 200 10 10] + set addr [storeAddress $l] + # set l {} is for reference counts to drop to 1 + set l [lassign $l[set l {}] e] + list $e $l [expr {$addr == [storeAddress $l]}] [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list 10 [irange 11 209] 1 11 10 1] + +test listrep-6.4 { + Verify that zombies are freed up - lremove version (front) +} -constraints testlistrep -body { + set l [zombieSample 200 10 10] + set addr [storeAddress $l] + # set l {} is for reference counts to drop to 1 + set l [lremove $l[set l {}] $zero] + list $l [expr {$addr == [storeAddress $l]}] [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list [irange 11 209] 1 11 10 1] + +test listrep-6.4.1 { + Verify that zombies are freed up - lremove version (back) +} -constraints testlistrep -body { + set l [zombieSample 200 10 10] + set addr [storeAddress $l] + # set l {} is for reference counts to drop to 1 + set l [lremove $l[set l {}] $end] + list $l [expr {$addr == [storeAddress $l]}] [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list [irange 10 208] 1 10 11 1] + +test listrep-6.5 { + Verify that zombies are freed up - lreplace at front +} -constraints testlistrep -body { + set l [zombieSample 200 10 10] + set addr [storeAddress $l] + # set l {} is for reference counts to drop to 1 + set l [lreplace $l[set l {}] $zero $one -3 -2 -1] + list $l [expr {$addr == [storeAddress $l]}] [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list [list -3 -2 -1 {*}[irange 12 209]] 1 9 10 1] + +test listrep-6.5.1 { + Verify that zombies are freed up - lreplace at back +} -constraints testlistrep -body { + set l [zombieSample 200 10 10] + set addr [storeAddress $l] + # set l {} is for reference counts to drop to 1 + set l [lreplace $l[set l {}] $end-1 $end -1 -2 -3] + list $l [expr {$addr == [storeAddress $l]}] [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list [list {*}[irange 10 207] -1 -2 -3] 1 10 9 1] + +test listrep-6.6 { + Verify that zombies are freed up - lappend +} -constraints testlistrep -body { + set l [zombieSample 200 10 10] + set addr [storeAddress $l] + lappend l 210 + list $l [expr {$addr == [storeAddress $l]}] [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list [irange 10 210] 1 10 9 1] + +test listrep-6.7 { + Verify that zombies are freed up - lpop version (front) +} -constraints testlistrep -body { + set l [zombieSample 200 10 10] + set addr [storeAddress $l] + set e [lpop l $zero] + list $e $l [expr {$addr == [storeAddress $l]}] [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list 10 [irange 11 209] 1 11 10 1] + +test listrep-6.7.1 { + Verify that zombies are freed up - lpop version (back) +} -constraints testlistrep -body { + set l [zombieSample 200 10 10] + set addr [storeAddress $l] + set e [lpop l] + list $e $l [expr {$addr == [storeAddress $l]}] [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list 209 [irange 10 208] 1 10 11 1] + +test listrep-6.8 { + Verify that zombies are freed up - lset version +} -constraints testlistrep -body { + set l [zombieSample 200 10 10] + set addr [storeAddress $l] + lset l $zero -1 + list $l [expr {$addr == [storeAddress $l]}] [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list [list -1 {*}[irange 11 209]] 1 10 10 1] + +test listrep-6.8.1 { + Verify that zombies are freed up - lset version (back) +} -constraints testlistrep -body { + set l [zombieSample 200 10 10] + set addr [storeAddress $l] + lset l $end+1 210 + list $l [expr {$addr == [storeAddress $l]}] [leadSpace $l] [tailSpace $l] [repStoreRefCount $l] +} -result [list [irange 10 210] 1 10 9 1] + + +# All done +::tcltest::cleanupTests + +return diff --git a/unix/tclUnixSock.c b/unix/tclUnixSock.c index d2068c3..b1dbdbc 100644 --- a/unix/tclUnixSock.c +++ b/unix/tclUnixSock.c @@ -434,7 +434,7 @@ TcpBlockModeProc( * * Side effects: * Processes socket events off the system queue. May process - * asynchroneous connects. + * asynchronous connects. * *---------------------------------------------------------------------- */ @@ -1351,7 +1351,7 @@ TcpConnect( } /* - * We need to forward the writable event that brought us here, bcasue + * We need to forward the writable event that brought us here, because * upon reading of getsockopt(SO_ERROR), at least some OSes clear the * writable state from the socket, and so a subsequent select() on * behalf of a script level [fileevent] would not fire. It doesn't diff --git a/win/tclWinInit.c b/win/tclWinInit.c index fdeb0aa..8fa176b 100644 --- a/win/tclWinInit.c +++ b/win/tclWinInit.c @@ -527,7 +527,14 @@ TclpSetVariables( Tcl_SetVar2(interp, "env", "HOME", Tcl_DStringValue(&ds), TCL_GLOBAL_ONLY); } else { - Tcl_SetVar2(interp, "env", "HOME", "c:\\", TCL_GLOBAL_ONLY); + /* None of HOME, HOMEDRIVE, HOMEPATH exists. Try USERPROFILE */ + ptr = Tcl_GetVar2(interp, "env", "USERPROFILE", TCL_GLOBAL_ONLY); + if (ptr != NULL && ptr[0]) { + Tcl_SetVar2(interp, "env", "HOME", ptr, TCL_GLOBAL_ONLY); + } else { + /* Last resort */ + Tcl_SetVar2(interp, "env", "HOME", "c:\\", TCL_GLOBAL_ONLY); + } } } diff --git a/win/tclWinSock.c b/win/tclWinSock.c index e806423..2213fe2 100644 --- a/win/tclWinSock.c +++ b/win/tclWinSock.c @@ -588,8 +588,8 @@ TcpBlockModeProc( * an error. * * Side effects: - * Processes socket events off the system queue. May process - * asynchroneous connect. + * Processes socket events off the system queue. + * May process asynchronous connect. * *---------------------------------------------------------------------- */ @@ -1810,7 +1810,7 @@ TcpConnect( } /* - * For asynchroneous connect set the socket in nonblocking mode + * For asynchronous connect set the socket in nonblocking mode * and activate connect notification */ @@ -1925,7 +1925,7 @@ TcpConnect( /* * Clear the tsd socket list pointer if we did not wait for - * the FD_CONNECT asynchroneously + * the FD_CONNECT asynchronously */ tsdPtr->pendingTcpState = NULL; |