diff options
Diffstat (limited to 'library')
-rw-r--r-- | library/http/cookiejar.tcl | 745 | ||||
-rw-r--r-- | library/http/effective_tld_names.txt.gz | bin | 0 -> 39188 bytes | |||
-rw-r--r-- | library/http/http.tcl | 121 | ||||
-rw-r--r-- | library/http/idna.tcl | 292 | ||||
-rw-r--r-- | library/http/pkgIndex.tcl | 2 | ||||
-rw-r--r-- | library/init.tcl | 2 | ||||
-rw-r--r-- | library/tcltest/pkgIndex.tcl | 2 | ||||
-rw-r--r-- | library/tcltest/tcltest.tcl | 47 | ||||
-rw-r--r-- | library/tzdata/Africa/Casablanca | 280 | ||||
-rw-r--r-- | library/tzdata/Africa/El_Aaiun | 256 | ||||
-rw-r--r-- | library/tzdata/Pacific/Honolulu | 5 |
11 files changed, 1296 insertions, 456 deletions
diff --git a/library/http/cookiejar.tcl b/library/http/cookiejar.tcl new file mode 100644 index 0000000..309ca7a --- /dev/null +++ b/library/http/cookiejar.tcl @@ -0,0 +1,745 @@ +# cookiejar.tcl -- +# +# Implementation of an HTTP cookie storage engine using SQLite. The +# implementation is done as a TclOO class, and includes a punycode +# encoder and decoder (though only the encoder is currently used). +# +# See the file "license.terms" for information on usage and redistribution of +# this file, and for a DISCLAIMER OF ALL WARRANTIES. + +# Dependencies +package require Tcl 8.6 +package require http 2.8.4 +package require sqlite3 +package require tcl::idna 1.0 + +# +# Configuration for the cookiejar package, plus basic support procedures. +# + +# This is the class that we are creating +if {![llength [info commands ::http::cookiejar]]} { + ::oo::class create ::http::cookiejar +} + +namespace eval [info object namespace ::http::cookiejar] { + proc setInt {*var val} { + upvar 1 ${*var} var + if {[catch {incr dummy $val} msg]} { + return -code error $msg + } + set var $val + } + proc setInterval {trigger *var val} { + upvar 1 ${*var} var + if {![string is integer -strict $val] || $val < 1} { + return -code error "expected positive integer but got \"$val\"" + } + set var $val + {*}$trigger + } + proc setBool {*var val} { + upvar 1 ${*var} var + if {[catch {if {$val} {}} msg]} { + return -code error $msg + } + set var [expr {!!$val}] + } + + proc setLog {*var val} { + upvar 1 ${*var} var + set var [::tcl::prefix match -message "log level" \ + {debug info warn error} $val] + } + + # Keep this in sync with pkgIndex.tcl and with the install directories in + # Makefiles + variable version 0.1 + + variable domainlist \ + http://publicsuffix.org/list/effective_tld_names.dat + variable domainfile \ + [file join [file dirname [info script]] effective_tld_names.txt.gz] + # The list is directed to from http://publicsuffix.org/list/ + variable loglevel info + variable vacuumtrigger 200 + variable retainlimit 100 + variable offline false + variable purgeinterval 60000 + variable refreshinterval 10000000 + variable domaincache {} + + # Some support procedures, none particularly useful in general + namespace eval support { + # Set up a logger if the http package isn't actually loaded yet. + if {![llength [info commands ::http::Log]]} { + proc ::http::Log args { + # Do nothing by default... + } + } + + namespace export * + proc locn {secure domain path {key ""}} { + if {$key eq ""} { + format "%s://%s%s" [expr {$secure?"https":"http"}] \ + [::tcl::idna encode $domain] $path + } else { + format "%s://%s%s?%s" \ + [expr {$secure?"https":"http"}] [::tcl::idna encode $domain] \ + $path $key + } + } + proc splitDomain domain { + set pieces [split $domain "."] + for {set i [llength $pieces]} {[incr i -1] >= 0} {} { + lappend result [join [lrange $pieces $i end] "."] + } + return $result + } + proc splitPath path { + set pieces [split [string trimleft $path "/"] "/"] + for {set j -1} {$j < [llength $pieces]} {incr j} { + lappend result /[join [lrange $pieces 0 $j] "/"] + } + return $result + } + proc isoNow {} { + set ms [clock milliseconds] + set ts [expr {$ms / 1000}] + set ms [format %03d [expr {$ms % 1000}]] + clock format $ts -format "%Y%m%dT%H%M%S.${ms}Z" -gmt 1 + } + proc log {level msg args} { + namespace upvar [info object namespace ::http::cookiejar] \ + loglevel loglevel + set who [uplevel 1 self class] + set mth [uplevel 1 self method] + set map {debug 0 info 1 warn 2 error 3} + if {[string map $map $level] >= [string map $map $loglevel]} { + set msg [format $msg {*}$args] + set LVL [string toupper $level] + ::http::Log "[isoNow] $LVL $who $mth - $msg" + } + } + } +} + +# Now we have enough information to provide the package. +package provide cookiejar \ + [set [info object namespace ::http::cookiejar]::version] + +# The implementation of the cookiejar package +::oo::define ::http::cookiejar { + self { + method configure {{optionName "\u0000\u0000"} {optionValue "\u0000\u0000"}} { + set tbl { + -domainfile {domainfile set} + -domainlist {domainlist set} + -domainrefresh {refreshinterval setInterval} + -loglevel {loglevel setLog} + -offline {offline setBool} + -purgeold {purgeinterval setInterval} + -retain {retainlimit setInt} + -vacuumtrigger {vacuumtrigger setInt} + } + dict lappend tbl -domainrefresh [namespace code { + my IntervalTrigger PostponeRefresh + }] + dict lappend tbl -purgeold [namespace code { + my IntervalTrigger PostponePurge + }] + if {$optionName eq "\u0000\u0000"} { + return [dict keys $tbl] + } + set opt [::tcl::prefix match -message "option" \ + [dict keys $tbl] $optionName] + set setter [lassign [dict get $tbl $opt] varname] + namespace upvar [namespace current] $varname var + if {$optionValue ne "\u0000\u0000"} { + {*}$setter var $optionValue + } + return $var + } + + method IntervalTrigger {method} { + # TODO: handle subclassing + foreach obj [info class instances [self]] { + [info object namespace $obj]::my $method + } + } + } + + variable purgeTimer deletions refreshTimer + constructor {{path ""}} { + namespace import [info object namespace [self class]]::support::* + + if {$path eq ""} { + sqlite3 [namespace current]::db :memory: + set storeorigin "constructed cookie store in memory" + } else { + sqlite3 [namespace current]::db $path + db timeout 500 + set storeorigin "loaded cookie store from $path" + } + + set deletions 0 + db transaction { + db eval { + --;# Store the persistent cookies in this table. + --;# Deletion policy: once they expire, or if explicitly + --;# killed. + CREATE TABLE IF NOT EXISTS persistentCookies ( + id INTEGER PRIMARY KEY, + secure INTEGER NOT NULL, + domain TEXT NOT NULL COLLATE NOCASE, + path TEXT NOT NULL, + key TEXT NOT NULL, + value TEXT NOT NULL, + originonly INTEGER NOT NULL, + expiry INTEGER NOT NULL, + lastuse INTEGER NOT NULL, + creation INTEGER NOT NULL); + CREATE UNIQUE INDEX IF NOT EXISTS persistentUnique + ON persistentCookies (domain, path, key); + CREATE INDEX IF NOT EXISTS persistentLookup + ON persistentCookies (domain, path); + + --;# Store the session cookies in this table. + --;# Deletion policy: at cookiejar instance deletion, if + --;# explicitly killed, or if the number of session cookies is + --;# too large and the cookie has not been used recently. + CREATE TEMP TABLE sessionCookies ( + id INTEGER PRIMARY KEY, + secure INTEGER NOT NULL, + domain TEXT NOT NULL COLLATE NOCASE, + path TEXT NOT NULL, + key TEXT NOT NULL, + originonly INTEGER NOT NULL, + value TEXT NOT NULL, + lastuse INTEGER NOT NULL, + creation INTEGER NOT NULL); + CREATE UNIQUE INDEX sessionUnique + ON sessionCookies (domain, path, key); + CREATE INDEX sessionLookup ON sessionCookies (domain, path); + + --;# View to allow for simple looking up of a cookie. + --;# Deletion policy: NOT SUPPORTED via this view. + CREATE TEMP VIEW cookies AS + SELECT id, domain, ( + CASE originonly WHEN 1 THEN path ELSE '.' || path END + ) AS path, key, value, secure, 1 AS persistent + FROM persistentCookies + UNION + SELECT id, domain, ( + CASE originonly WHEN 1 THEN path ELSE '.' || path END + ) AS path, key, value, secure, 0 AS persistent + FROM sessionCookies; + + --;# Encoded domain permission policy; if forbidden is 1, no + --;# cookie may be ever set for the domain, and if forbidden + --;# is 0, cookies *may* be created for the domain (overriding + --;# the forbiddenSuper table). + --;# Deletion policy: normally not modified. + CREATE TABLE IF NOT EXISTS domains ( + domain TEXT PRIMARY KEY NOT NULL, + forbidden INTEGER NOT NULL); + + --;# Domains that may not have a cookie defined for direct + --;# child domains of them. + --;# Deletion policy: normally not modified. + CREATE TABLE IF NOT EXISTS forbiddenSuper ( + domain TEXT PRIMARY KEY); + + --;# When we last retrieved the domain list. + CREATE TABLE IF NOT EXISTS domainCacheMetadata ( + id INTEGER PRIMARY KEY, + retrievalDate INTEGER, + installDate INTEGER); + } + + set cookieCount "no" + db eval { + SELECT COUNT(*) AS cookieCount FROM persistentCookies + } + log info "%s with %s entries" $storeorigin $cookieCount + + my PostponePurge + + if {$path ne ""} { + if {[db exists {SELECT 1 FROM domains}]} { + my RefreshDomains + } else { + my InitDomainList + my PostponeRefresh + } + } else { + set data [my GetDomainListOffline metadata] + my InstallDomainData $data $metadata + my PostponeRefresh + } + } + } + + method PostponePurge {} { + namespace upvar [info object namespace [self class]] \ + purgeinterval interval + catch {after cancel $purgeTimer} + set purgeTimer [after $interval [namespace code {my PurgeCookies}]] + } + + method PostponeRefresh {} { + namespace upvar [info object namespace [self class]] \ + refreshinterval interval + catch {after cancel $refreshTimer} + set refreshTimer [after $interval [namespace code {my RefreshDomains}]] + } + + method RefreshDomains {} { + # TODO: domain list refresh policy + my PostponeRefresh + } + + method HttpGet {url {timeout 0} {maxRedirects 5}} { + for {set r 0} {$r < $maxRedirects} {incr r} { + set tok [::http::geturl $url -timeout $timeout] + try { + if {[::http::status $tok] eq "timeout"} { + return -code error "connection timed out" + } elseif {[::http::ncode $tok] == 200} { + return [::http::data $tok] + } elseif {[::http::ncode $tok] >= 400} { + return -code error [::http::error $tok] + } elseif {[dict exists [::http::meta $tok] Location]} { + set url [dict get [::http::meta $tok] Location] + continue + } + return -code error \ + "unexpected state: [::http::code $tok]" + } finally { + ::http::cleanup $tok + } + } + return -code error "too many redirects" + } + method GetDomainListOnline {metaVar} { + upvar 1 $metaVar meta + namespace upvar [info object namespace [self class]] \ + domainlist url domaincache cache + lassign $cache when data + if {$when > [clock seconds] - 3600} { + log debug "using cached value created at %s" \ + [clock format $when -format {%Y%m%dT%H%M%SZ} -gmt 1] + dict set meta retrievalDate $when + return $data + } + log debug "loading domain list from %s" $url + try { + set when [clock seconds] + set data [my HttpGet $url] + set cache [list $when $data] + # TODO: Should we use the Last-Modified header instead? + dict set meta retrievalDate $when + return $data + } on error msg { + log error "failed to fetch list of forbidden cookie domains from %s: %s" \ + $url $msg + return {} + } + } + method GetDomainListOffline {metaVar} { + upvar 1 $metaVar meta + namespace upvar [info object namespace [self class]] \ + domainfile filename + log debug "loading domain list from %s" $filename + try { + set f [open $filename] + try { + if {[string match *.gz $filename]} { + zlib push gunzip $f + } + fconfigure $f -encoding utf-8 + dict set meta retrievalDate [file mtime $filename] + return [read $f] + } finally { + close $f + } + } on error {msg opt} { + log error "failed to read list of forbidden cookie domains from %s: %s" \ + $filename $msg + return -options $opt $msg + } + } + method InitDomainList {} { + namespace upvar [info object namespace [self class]] \ + offline offline + if {!$offline} { + try { + set data [my GetDomainListOnline metadata] + if {[string length $data]} { + my InstallDomainData $data $metadata + return + } + } on error {} { + log warn "attempting to fall back to built in version" + } + } + set data [my GetDomainListOffline metadata] + my InstallDomainData $data $metadata + } + + method InstallDomainData {data meta} { + set n [db total_changes] + db transaction { + foreach line [split $data "\n"] { + if {[string trim $line] eq ""} { + continue + } elseif {[string match //* $line]} { + continue + } elseif {[string match !* $line]} { + set line [string range $line 1 end] + set idna [string tolower [::tcl::idna encode $line]] + set utf [::tcl::idna decode [string tolower $line]] + db eval { + INSERT OR REPLACE INTO domains (domain, forbidden) + VALUES ($utf, 0); + } + if {$idna ne $utf} { + db eval { + INSERT OR REPLACE INTO domains (domain, forbidden) + VALUES ($idna, 0); + } + } + } else { + if {[string match {\*.*} $line]} { + set line [string range $line 2 end] + set idna [string tolower [::tcl::idna encode $line]] + set utf [::tcl::idna decode [string tolower $line]] + db eval { + INSERT OR REPLACE INTO forbiddenSuper (domain) + VALUES ($utf); + } + if {$idna ne $utf} { + db eval { + INSERT OR REPLACE INTO forbiddenSuper (domain) + VALUES ($idna); + } + } + } else { + set idna [string tolower [::tcl::idna encode $line]] + set utf [::tcl::idna decode [string tolower $line]] + } + db eval { + INSERT OR REPLACE INTO domains (domain, forbidden) + VALUES ($utf, 1); + } + if {$idna ne $utf} { + db eval { + INSERT OR REPLACE INTO domains (domain, forbidden) + VALUES ($idna, 1); + } + } + } + if {$utf ne [::tcl::idna decode [string tolower $idna]]} { + log warn "mismatch in IDNA handling for %s (%d, %s, %s)" \ + $idna $line $utf [::tcl::idna decode $idna] + } + } + + dict with meta { + set installDate [clock seconds] + db eval { + INSERT OR REPLACE INTO domainCacheMetadata + (id, retrievalDate, installDate) + VALUES (1, $retrievalDate, $installDate); + } + } + } + set n [expr {[db total_changes] - $n}] + log info "constructed domain info with %d entries" $n + } + + # This forces the rebuild of the domain data, loading it from + method forceLoadDomainData {} { + db transaction { + db eval { + DELETE FROM domains; + DELETE FROM forbiddenSuper; + INSERT OR REPLACE INTO domainCacheMetadata + (id, retrievalDate, installDate) + VALUES (1, -1, -1); + } + my InitDomainList + } + } + + destructor { + catch { + after cancel $purgeTimer + } + catch { + after cancel $refreshTimer + } + catch { + db close + } + return + } + + method GetCookiesForHostAndPath {listVar secure host path fullhost} { + upvar 1 $listVar result + log debug "check for cookies for %s" [locn $secure $host $path] + set exact [expr {$host eq $fullhost}] + db eval { + SELECT key, value FROM persistentCookies + WHERE domain = $host AND path = $path AND secure <= $secure + AND (NOT originonly OR domain = $fullhost) + AND originonly = $exact + } { + lappend result $key $value + db eval { + UPDATE persistentCookies SET lastuse = $now WHERE id = $id + } + } + set now [clock seconds] + db eval { + SELECT id, key, value FROM sessionCookies + WHERE domain = $host AND path = $path AND secure <= $secure + AND (NOT originonly OR domain = $fullhost) + AND originonly = $exact + } { + lappend result $key $value + db eval { + UPDATE sessionCookies SET lastuse = $now WHERE id = $id + } + } + } + + method getCookies {proto host path} { + set result {} + set paths [splitPath $path] + if {[regexp {[^0-9.]} $host]} { + set domains [splitDomain [string tolower [::tcl::idna encode $host]]] + } else { + # Ugh, it's a numeric domain! Restrict it to just itself... + set domains [list $host] + } + set secure [string equal -nocase $proto "https"] + # Open question: how to move these manipulations into the database + # engine (if that's where they *should* be). + # + # Suggestion from kbk: + #LENGTH(theColumn) <= LENGTH($queryStr) AND + #SUBSTR(theColumn, LENGTH($queryStr) LENGTH(theColumn)+1) = $queryStr + # + # However, we instead do most of the work in Tcl because that lets us + # do the splitting exactly right, and it's far easier to work with + # strings in Tcl than in SQL. + db transaction { + foreach domain $domains { + foreach p $paths { + my GetCookiesForHostAndPath result $secure $domain $p $host + } + } + return $result + } + } + + method BadDomain options { + if {![dict exists $options domain]} { + log error "no domain present in options" + return 0 + } + dict with options {} + if {$domain ne $origin} { + log debug "cookie domain varies from origin (%s, %s)" \ + $domain $origin + if {[string match .* $domain]} { + set dotd $domain + } else { + set dotd .$domain + } + if {![string equal -length [string length $dotd] \ + [string reverse $dotd] [string reverse $origin]]} { + log warn "bad cookie: domain not suffix of origin" + return 1 + } + } + if {![regexp {[^0-9.]} $domain]} { + if {$domain eq $origin} { + # May set for itself + return 0 + } + log warn "bad cookie: for a numeric address" + return 1 + } + db eval { + SELECT forbidden FROM domains WHERE domain = $domain + } { + if {$forbidden} { + log warn "bad cookie: for a forbidden address" + } + return $forbidden + } + if {[regexp {^[^.]+\.(.+)$} $domain -> super] && [db exists { + SELECT 1 FROM forbiddenSuper WHERE domain = $super + }]} then { + log warn "bad cookie: for a forbidden address" + return 1 + } + return 0 + } + + # A defined extension point to allow users to easily impose extra policies + # on whether to accept cookies from a particular domain and path. + method policyAllow {operation domain path} { + return true + } + + method storeCookie {options} { + db transaction { + if {[my BadDomain $options]} { + return + } + set now [clock seconds] + set persistent [dict exists $options expires] + dict with options {} + if {!$persistent} { + if {![my policyAllow session $domain $path]} { + log warn "bad cookie: $domain prohibited by user policy" + return + } + db eval { + INSERT OR REPLACE INTO sessionCookies ( + secure, domain, path, key, value, originonly, creation, + lastuse) + VALUES ($secure, $domain, $path, $key, $value, $hostonly, + $now, $now); + DELETE FROM persistentCookies + WHERE domain = $domain AND path = $path AND key = $key + AND secure <= $secure AND originonly = $hostonly + } + incr deletions [db changes] + log debug "defined session cookie for %s" \ + [locn $secure $domain $path $key] + } elseif {$expires < $now} { + if {![my policyAllow delete $domain $path]} { + log warn "bad cookie: $domain prohibited by user policy" + return + } + db eval { + DELETE FROM persistentCookies + WHERE domain = $domain AND path = $path AND key = $key + AND secure <= $secure AND originonly = $hostonly + } + set del [db changes] + db eval { + DELETE FROM sessionCookies + WHERE domain = $domain AND path = $path AND key = $key + AND secure <= $secure AND originonly = $hostonly + } + incr deletions [incr del [db changes]] + log debug "deleted %d cookies for %s" \ + $del [locn $secure $domain $path $key] + } else { + if {![my policyAllow set $domain $path]} { + log warn "bad cookie: $domain prohibited by user policy" + return + } + db eval { + INSERT OR REPLACE INTO persistentCookies ( + secure, domain, path, key, value, originonly, expiry, + creation, lastuse) + VALUES ($secure, $domain, $path, $key, $value, $hostonly, + $expires, $now, $now); + DELETE FROM sessionCookies + WHERE domain = $domain AND path = $path AND key = $key + AND secure <= $secure AND originonly = $hostonly + } + incr deletions [db changes] + log debug "defined persistent cookie for %s, expires at %s" \ + [locn $secure $domain $path $key] \ + [clock format $expires] + } + } + } + + method PurgeCookies {} { + namespace upvar [info object namespace [self class]] \ + vacuumtrigger trigger retainlimit retain + my PostponePurge + set now [clock seconds] + log debug "purging cookies that expired before %s" [clock format $now] + db transaction { + db eval { + DELETE FROM persistentCookies WHERE expiry < $now + } + incr deletions [db changes] + db eval { + DELETE FROM persistentCookies WHERE id IN ( + SELECT id FROM persistentCookies ORDER BY lastuse ASC + LIMIT -1 OFFSET $retain) + } + incr deletions [db changes] + db eval { + DELETE FROM sessionCookies WHERE id IN ( + SELECT id FROM sessionCookies ORDER BY lastuse + LIMIT -1 OFFSET $retain) + } + incr deletions [db changes] + } + + # Once we've deleted a fair bit, vacuum the database. Must be done + # outside a transaction. + if {$deletions > $trigger} { + set deletions 0 + log debug "vacuuming cookie database" + catch { + db eval { + VACUUM + } + } + } + } + + forward Database db + + method lookup {{host ""} {key ""}} { + set host [string tolower [::tcl::idna encode $host]] + db transaction { + if {$host eq ""} { + set result {} + db eval { + SELECT DISTINCT domain FROM cookies + ORDER BY domain + } { + lappend result [::tcl::idna decode [string tolower $domain]] + } + return $result + } elseif {$key eq ""} { + set result {} + db eval { + SELECT DISTINCT key FROM cookies + WHERE domain = $host + ORDER BY key + } { + lappend result $key + } + return $result + } else { + db eval { + SELECT value FROM cookies + WHERE domain = $host AND key = $key + LIMIT 1 + } { + return $value + } + return -code error "no such key for that host" + } + } + } +} + +# Local variables: +# mode: tcl +# fill-column: 78 +# End: diff --git a/library/http/effective_tld_names.txt.gz b/library/http/effective_tld_names.txt.gz Binary files differnew file mode 100644 index 0000000..9ce2b69 --- /dev/null +++ b/library/http/effective_tld_names.txt.gz diff --git a/library/http/http.tcl b/library/http/http.tcl index f82bced..7236bae 100644 --- a/library/http/http.tcl +++ b/library/http/http.tcl @@ -20,6 +20,7 @@ namespace eval http { if {![info exists http]} { array set http { -accept */* + -cookiejar {} -pipeline 1 -postfresh 0 -proxyhost {} @@ -127,6 +128,18 @@ namespace eval http { set defaultKeepalive 0 } + # Regular expression used to parse cookies + variable CookieRE {(?x) # EXPANDED SYNTAX + \s* # Ignore leading spaces + ([^][\u0000- ()<>@,;:\\""/?={}\u007f-\uffff]+) # Match the name + = # LITERAL: Equal sign + ([!\u0023-+\u002D-:<-\u005B\u005D-~]*) # Match the value + (?: + \s* ; \s* # LITERAL: semicolon + ([^\u0000]+) # Match the options + )? + } + namespace export geturl config reset wait formatQuery quoteString namespace export register unregister registerError # - Useful, but not exported: data, size, status, code, cleanup, error, @@ -892,8 +905,12 @@ proc http::geturl {url args} { } return -code error "Illegal characters in URL path" } + if {![regexp {^[^?#]+} $srvurl state(path)]} { + set state(path) / + } } else { set srvurl / + set state(path) / } if {$proto eq ""} { set proto http @@ -1354,12 +1371,16 @@ proc http::Connected {token proto phost srvurl} { puts $sock "$how $srvurl HTTP/$state(-protocol)" if {[dict exists $state(-headers) Host]} { # Allow Host spoofing. [Bug 928154] - puts $sock "Host: [dict get $state(-headers) Host]" + set hostHdr [dict get $state(-headers) Host] + regexp {^[^:]+} $hostHdr state(host) + puts $sock "Host: $hostHdr" } elseif {$port == $defport} { # Don't add port in this case, to handle broken servers. [Bug # #504508] + set state(host) $host puts $sock "Host: $host" } else { + set state(host) $host puts $sock "Host: $host:$port" } puts $sock "User-Agent: $http(-useragent)" @@ -1421,6 +1442,22 @@ proc http::Connected {token proto phost srvurl} { seek $state(-querychannel) $start } + # Note that we don't do Cookie2; that's much nastier and not normally + # observed in practice either. It also doesn't fix the multitude of + # bugs in the basic cookie spec. + if {$http(-cookiejar) ne ""} { + set cookies "" + set separator "" + foreach {key value} [{*}$http(-cookiejar) \ + getCookies $proto $host $state(path)] { + append cookies $separator $key = $value + set separator "; " + } + if {$cookies ne ""} { + puts $sock "Cookie: $cookies" + } + } + # Flush the request header and set up the fileevent that will either # push the POST data or read the response. # @@ -2693,6 +2730,11 @@ proc http::Event {sock token} { set state(connection) \ [string trim [string tolower $value]] } + set-cookie { + if {$http(-cookiejar) ne ""} { + ParseCookie $token [string trim $value] + } + } } lappend state(meta) $key [string trim $value] } @@ -2990,6 +3032,83 @@ proc http::IsBinaryContentType {type} { return true } +proc http::ParseCookie {token value} { + variable http + variable CookieRE + variable $token + upvar 0 $token state + + if {![regexp $CookieRE $value -> cookiename cookieval opts]} { + # Bad cookie! No biscuit! + return + } + + # Convert the options into a list before feeding into the cookie store; + # ugly, but quite easy. + set realopts {hostonly 1 path / secure 0 httponly 0} + dict set realopts origin $state(host) + dict set realopts domain $state(host) + foreach option [split [regsub -all {;\s+} $opts \u0000] \u0000] { + regexp {^(.*?)(?:=(.*))?$} $option -> optname optval + switch -exact -- [string tolower $optname] { + expires { + if {[catch { + #Sun, 06 Nov 1994 08:49:37 GMT + dict set realopts expires \ + [clock scan $optval -format "%a, %d %b %Y %T %Z"] + }] && [catch { + # Google does this one + #Mon, 01-Jan-1990 00:00:00 GMT + dict set realopts expires \ + [clock scan $optval -format "%a, %d-%b-%Y %T %Z"] + }] && [catch { + # This is in the RFC, but it is also in the original + # Netscape cookie spec, now online at: + # <URL:http://curl.haxx.se/rfc/cookie_spec.html> + #Sunday, 06-Nov-94 08:49:37 GMT + dict set realopts expires \ + [clock scan $optval -format "%A, %d-%b-%y %T %Z"] + }]} {catch { + #Sun Nov 6 08:49:37 1994 + dict set realopts expires \ + [clock scan $optval -gmt 1 -format "%a %b %d %T %Y"] + }} + } + max-age { + # Normalize + if {[string is integer -strict $optval]} { + dict set realopts expires [expr {[clock seconds] + $optval}] + } + } + domain { + # From the domain-matches definition [RFC 2109, section 2]: + # Host A's name domain-matches host B's if [...] + # A is a FQDN string and has the form NB, where N is a + # non-empty name string, B has the form .B', and B' is a + # FQDN string. (So, x.y.com domain-matches .y.com but + # not y.com.) + if {$optval ne "" && ![string match *. $optval]} { + dict set realopts domain [string trimleft $optval "."] + dict set realopts hostonly [expr { + ! [string match .* $optval] + }] + } + } + path { + if {[string match /* $optval]} { + dict set realopts path $optval + } + } + secure - httponly { + dict set realopts [string tolower $optname] 1 + } + } + } + dict set realopts key $cookiename + dict set realopts value $cookieval + {*}$http(-cookiejar) storeCookie $realopts +} + # http::getTextLine -- # # Get one line with the stream in crlf mode. diff --git a/library/http/idna.tcl b/library/http/idna.tcl new file mode 100644 index 0000000..2a7d289 --- /dev/null +++ b/library/http/idna.tcl @@ -0,0 +1,292 @@ +# cookiejar.tcl -- +# +# Implementation of IDNA (Internationalized Domain Names for +# Applications) encoding/decoding system, built on a punycode engine +# developed directly from the code in RFC 3492, Appendix C (with +# substantial modifications). +# +# This implementation includes code from that RFC, translated to Tcl; the +# other parts are: +# Copyright (c) 2014 Donal K. Fellows +# +# See the file "license.terms" for information on usage and redistribution of +# this file, and for a DISCLAIMER OF ALL WARRANTIES. + +namespace eval ::tcl::idna { + namespace ensemble create -command puny -map { + encode punyencode + decode punydecode + } + namespace ensemble create -command ::tcl::idna -map { + encode IDNAencode + decode IDNAdecode + puny puny + version {::apply {{} {package present tcl::idna} ::}} + } + + proc IDNAencode hostname { + set parts {} + # Split term from RFC 3490, Sec 3.1 + foreach part [split $hostname "\u002E\u3002\uFF0E\uFF61"] { + if {[regexp {[^-A-Za-z0-9]} $part]} { + if {[regexp {[^-A-Za-z0-9\u00a1-\uffff]} $part ch]} { + scan $ch %c c + if {$ch < "!" || $ch > "~"} { + set ch [format "\\u%04x" $c] + } + throw [list IDNA INVALID_NAME_CHARACTER $ch] \ + "bad character \"$ch\" in DNS name" + } + set part xn--[punyencode $part] + # Length restriction from RFC 5890, Sec 2.3.1 + if {[string length $part] > 63} { + throw [list IDNA OVERLONG_PART $part] \ + "hostname part too long" + } + } + lappend parts $part + } + return [join $parts .] + } + proc IDNAdecode hostname { + set parts {} + # Split term from RFC 3490, Sec 3.1 + foreach part [split $hostname "\u002E\u3002\uFF0E\uFF61"] { + if {[string match -nocase "xn--*" $part]} { + set part [punydecode [string range $part 4 end]] + } + lappend parts $part + } + return [join $parts .] + } + + variable digits [split "abcdefghijklmnopqrstuvwxyz0123456789" ""] + # Bootstring parameters for Punycode + variable base 36 + variable tmin 1 + variable tmax 26 + variable skew 38 + variable damp 700 + variable initial_bias 72 + variable initial_n 0x80 + + variable max_codepoint 0x10FFFF + + proc adapt {delta first numchars} { + variable base + variable tmin + variable tmax + variable damp + variable skew + + set delta [expr {$delta / ($first ? $damp : 2)}] + incr delta [expr {$delta / $numchars}] + set k 0 + while {$delta > ($base - $tmin) * $tmax / 2} { + set delta [expr {$delta / ($base-$tmin)}] + incr k $base + } + return [expr {$k + ($base-$tmin+1) * $delta / ($delta+$skew)}] + } + + # Main punycode encoding function + proc punyencode {string {case ""}} { + variable digits + variable tmin + variable tmax + variable base + variable initial_n + variable initial_bias + + if {![string is boolean $case]} { + return -code error "\"$case\" must be boolean" + } + + set in {} + foreach char [set string [split $string ""]] { + scan $char "%c" ch + lappend in $ch + } + set output {} + + # Initialize the state: + set n $initial_n + set delta 0 + set bias $initial_bias + + # Handle the basic code points: + foreach ch $string { + if {$ch < "\u0080"} { + if {$case eq ""} { + append output $ch + } elseif {[string is true $case]} { + append output [string toupper $ch] + } elseif {[string is false $case]} { + append output [string tolower $ch] + } + } + } + + set b [string length $output] + + # h is the number of code points that have been handled, b is the + # number of basic code points. + + if {$b > 0} { + append output "-" + } + + # Main encoding loop: + + for {set h $b} {$h < [llength $in]} {incr delta; incr n} { + # All non-basic code points < n have been handled already. Find + # the next larger one: + + set m inf + foreach ch $in { + if {$ch >= $n && $ch < $m} { + set m $ch + } + } + + # Increase delta enough to advance the decoder's <n,i> state to + # <m,0>, but guard against overflow: + + if {$m-$n > (0xffffffff-$delta)/($h+1)} { + throw {PUNYCODE OVERFLOW} "overflow in delta computation" + } + incr delta [expr {($m-$n) * ($h+1)}] + set n $m + + foreach ch $in { + if {$ch < $n && ([incr delta] & 0xffffffff) == 0} { + throw {PUNYCODE OVERFLOW} "overflow in delta computation" + } + + if {$ch != $n} { + continue + } + + # Represent delta as a generalized variable-length integer: + + for {set q $delta; set k $base} true {incr k $base} { + set t [expr {min(max($k-$bias, $tmin), $tmax)}] + if {$q < $t} { + break + } + append output \ + [lindex $digits [expr {$t + ($q-$t)%($base-$t)}]] + set q [expr {($q-$t) / ($base-$t)}] + } + + append output [lindex $digits $q] + set bias [adapt $delta [expr {$h==$b}] [expr {$h+1}]] + set delta 0 + incr h + } + } + + return $output + } + + # Main punycode decode function + proc punydecode {string {case ""}} { + variable tmin + variable tmax + variable base + variable initial_n + variable initial_bias + variable max_codepoint + + if {![string is boolean $case]} { + return -code error "\"$case\" must be boolean" + } + + # Initialize the state: + + set n $initial_n + set i 0 + set first 1 + set bias $initial_bias + + # Split the string into the "real" ASCII characters and the ones to + # feed into the main decoder. Note that we don't need to check the + # result of [regexp] because that RE will technically match any string + # at all. + + regexp {^(?:(.*)-)?([^-]*)$} $string -> pre post + if {[string is true -strict $case]} { + set pre [string toupper $pre] + } elseif {[string is false -strict $case]} { + set pre [string tolower $pre] + } + set output [split $pre ""] + set out [llength $output] + + # Main decoding loop: + + for {set in 0} {$in < [string length $post]} {incr in} { + # Decode a generalized variable-length integer into delta, which + # gets added to i. The overflow checking is easier if we increase + # i as we go, then subtract off its starting value at the end to + # obtain delta. + + for {set oldi $i; set w 1; set k $base} 1 {incr in} { + if {[set ch [string index $post $in]] eq ""} { + throw {PUNYCODE BAD_INPUT LENGTH} "exceeded input data" + } + if {[string match -nocase {[a-z]} $ch]} { + scan [string toupper $ch] %c digit + incr digit -65 + } elseif {[string match {[0-9]} $ch]} { + set digit [expr {$ch + 26}] + } else { + throw {PUNYCODE BAD_INPUT CHAR} \ + "bad decode character \"$ch\"" + } + incr i [expr {$digit * $w}] + set t [expr {min(max($tmin, $k-$bias), $tmax)}] + if {$digit < $t} { + set bias [adapt [expr {$i-$oldi}] $first [incr out]] + set first 0 + break + } + if {[set w [expr {$w * ($base - $t)}]] > 0x7fffffff} { + throw {PUNYCODE OVERFLOW} \ + "excessively large integer computed in digit decode" + } + incr k $base + } + + # i was supposed to wrap around from out+1 to 0, incrementing n + # each time, so we'll fix that now: + + if {[incr n [expr {$i / $out}]] > 0x7fffffff} { + throw {PUNYCODE OVERFLOW} \ + "excessively large integer computed in character choice" + } elseif {$n > $max_codepoint} { + if {$n >= 0x00d800 && $n < 0x00e000} { + # Bare surrogate?! + throw {PUNYCODE NON_BMP} \ + [format "unsupported character U+%06x" $n] + } + throw {PUNYCODE NON_UNICODE} "bad codepoint $n" + } + set i [expr {$i % $out}] + + # Insert n at position i of the output: + + set output [linsert $output $i [format "%c" $n]] + incr i + } + + return [join $output ""] + } +} + +package provide tcl::idna 1.0 + +# Local variables: +# mode: tcl +# fill-column: 78 +# End: diff --git a/library/http/pkgIndex.tcl b/library/http/pkgIndex.tcl index 4f74635..3bc111f 100644 --- a/library/http/pkgIndex.tcl +++ b/library/http/pkgIndex.tcl @@ -1,2 +1,4 @@ if {![package vsatisfies [package provide Tcl] 8.6-]} {return} package ifneeded http 2.9.0 [list tclPkgSetup $dir http 2.9.0 {{http.tcl source {::http::config ::http::formatQuery ::http::geturl ::http::reset ::http::wait ::http::register ::http::unregister ::http::mapReply}}}] +package ifneeded cookiejar 0.1 [list source [file join $dir cookiejar.tcl]] +package ifneeded tcl::idna 1.0 [list source [file join $dir idna.tcl]] diff --git a/library/init.tcl b/library/init.tcl index 51339d0..1221e61 100644 --- a/library/init.tcl +++ b/library/init.tcl @@ -810,7 +810,7 @@ foreach {safe package version file} { 1 opt 0.4.7 {opt optparse.tcl} 0 platform 1.0.14 {platform platform.tcl} 0 platform::shell 1.1.4 {platform shell.tcl} - 1 tcltest 2.4.1 {tcltest tcltest.tcl} + 1 tcltest 2.5.0 {tcltest tcltest.tcl} } { if {$isafe && !$safe} continue package ifneeded $package $version [list source [file join $dir {*}$file]] diff --git a/library/tcltest/pkgIndex.tcl b/library/tcltest/pkgIndex.tcl index eadb1bd..fde3ffe 100644 --- a/library/tcltest/pkgIndex.tcl +++ b/library/tcltest/pkgIndex.tcl @@ -9,4 +9,4 @@ # full path name of this file's directory. if {![package vsatisfies [package provide Tcl] 8.5-]} {return} -package ifneeded tcltest 2.4.1 [list source [file join $dir tcltest.tcl]] +package ifneeded tcltest 2.5.0 [list source [file join $dir tcltest.tcl]] diff --git a/library/tcltest/tcltest.tcl b/library/tcltest/tcltest.tcl index f1b6082..d67a900 100644 --- a/library/tcltest/tcltest.tcl +++ b/library/tcltest/tcltest.tcl @@ -22,7 +22,7 @@ namespace eval tcltest { # When the version number changes, be sure to update the pkgIndex.tcl file, # and the install directory in the Makefiles. When the minor version # changes (new feature) be sure to update the man page as well. - variable Version 2.4.1 + variable Version 2.5.0 # Compatibility support for dumb variables defined in tcltest 1 # Do not use these. Call [package provide Tcl] and [info patchlevel] @@ -1841,6 +1841,9 @@ proc tcltest::SubstArguments {argList} { # is optional; default is {}. # returnCodes - Expected return codes. This attribute is # optional; default is {0 2}. +# errorCode - Expected error code. This attribute is +# optional; default is {*}. It is a glob pattern. +# If given, returnCodes defaults to {1}. # setup - Code to run before $script (above). This # attribute is optional; default is {}. # cleanup - Code to run after $script (above). This @@ -1882,7 +1885,7 @@ proc tcltest::test {name description args} { # Pre-define everything to null except output and errorOutput. We # determine whether or not to trap output based on whether or not # these variables (output & errorOutput) are defined. - lassign {} constraints setup cleanup body result returnCodes match + lassign {} constraints setup cleanup body result returnCodes errorCode match # Set the default match mode set match exact @@ -1892,6 +1895,9 @@ proc tcltest::test {name description args} { # 'return' being used in the test script). set returnCodes [list 0 2] + # Set the default error code pattern + set errorCode "*" + # The old test format can't have a 3rd argument (constraints or # script) that starts with '-'. if {[string match -* [lindex $args 0]] || ([llength $args] <= 1)} { @@ -1901,7 +1907,7 @@ proc tcltest::test {name description args} { set testAttributes($element) $value } foreach item {constraints match setup body cleanup \ - result returnCodes output errorOutput} { + result returnCodes errorCode output errorOutput} { if {[info exists testAttributes(-$item)]} { set testAttributes(-$item) [uplevel 1 \ ::concat $testAttributes(-$item)] @@ -1912,7 +1918,7 @@ proc tcltest::test {name description args} { } set validFlags {-setup -cleanup -body -result -returnCodes \ - -match -output -errorOutput -constraints} + -errorCode -match -output -errorOutput -constraints} foreach flag [array names testAttributes] { if {$flag ni $validFlags} { @@ -1944,6 +1950,10 @@ proc tcltest::test {name description args} { foreach {strcode numcode} {ok 0 normal 0 error 1 return 2 break 3 continue 4} { set returnCodes [string map -nocase [list $strcode $numcode] $returnCodes] } + # errorCode without returnCode 1 is meaningless + if {$errorCode ne "*" && 1 ni $returnCodes} { + set returnCodes 1 + } } else { # This is parsing for the old test command format; it is here # for backward compatibility. @@ -1976,7 +1986,7 @@ proc tcltest::test {name description args} { set code [catch {uplevel 1 $setup} setupMsg] if {$code == 1} { set errorInfo(setup) $::errorInfo - set errorCode(setup) $::errorCode + set errorCodeRes(setup) $::errorCode } set setupFailure [expr {$code != 0}] @@ -2003,7 +2013,7 @@ proc tcltest::test {name description args} { lassign $testResult actualAnswer returnCode if {$returnCode == 1} { set errorInfo(body) $::errorInfo - set errorCode(body) $::errorCode + set errorCodeRes(body) $::errorCode } } @@ -2012,6 +2022,11 @@ proc tcltest::test {name description args} { if {!$setupFailure && ($returnCode ni $returnCodes)} { set codeFailure 1 } + set errorCodeFailure 0 + if {!$setupFailure && !$codeFailure && $returnCode == 1 && \ + ![string match $errorCode $errorCodeRes(body)]} { + set errorCodeFailure 1 + } # If expected output/error strings exist, we have to compare # them. If the comparison fails, then so did the test. @@ -2055,7 +2070,7 @@ proc tcltest::test {name description args} { set code [catch {uplevel 1 $cleanup} cleanupMsg] if {$code == 1} { set errorInfo(cleanup) $::errorInfo - set errorCode(cleanup) $::errorCode + set errorCodeRes(cleanup) $::errorCode } set cleanupFailure [expr {$code != 0}] @@ -2106,7 +2121,7 @@ proc tcltest::test {name description args} { variable numTests if {!($setupFailure || $cleanupFailure || $coreFailure || $outputFailure || $errorFailure || $codeFailure - || $scriptFailure)} { + || $errorCodeFailure || $scriptFailure)} { if {$testLevel == 1} { incr numTests(Passed) if {[IsVerbose pass]} { @@ -2159,7 +2174,7 @@ proc tcltest::test {name description args} { failed:\n$setupMsg" if {[info exists errorInfo(setup)]} { puts [outputChannel] "---- errorInfo(setup): $errorInfo(setup)" - puts [outputChannel] "---- errorCode(setup): $errorCode(setup)" + puts [outputChannel] "---- errorCode(setup): $errorCodeRes(setup)" } } if {$scriptFailure} { @@ -2171,6 +2186,10 @@ proc tcltest::test {name description args} { ($match matching):\n$result" } } + if {$errorCodeFailure} { + puts [outputChannel] "---- Error code was: '$errorCodeRes(body)'" + puts [outputChannel] "---- Error code should have been: '$errorCode'" + } if {$codeFailure} { switch -- $returnCode { 0 { set msg "Test completed normally" } @@ -2186,7 +2205,7 @@ proc tcltest::test {name description args} { if {[IsVerbose error]} { if {[info exists errorInfo(body)] && (1 ni $returnCodes)} { puts [outputChannel] "---- errorInfo: $errorInfo(body)" - puts [outputChannel] "---- errorCode: $errorCode(body)" + puts [outputChannel] "---- errorCode: $errorCodeRes(body)" } } } @@ -2212,7 +2231,7 @@ proc tcltest::test {name description args} { puts [outputChannel] "---- Test cleanup failed:\n$cleanupMsg" if {[info exists errorInfo(cleanup)]} { puts [outputChannel] "---- errorInfo(cleanup): $errorInfo(cleanup)" - puts [outputChannel] "---- errorCode(cleanup): $errorCode(cleanup)" + puts [outputChannel] "---- errorCode(cleanup): $errorCodeRes(cleanup)" } } if {$coreFailure} { @@ -2722,7 +2741,7 @@ proc tcltest::GetMatchingDirectories {rootdir} { # shell being tested # # Results: -# None. +# Whether there were any failures. # # Side effects: # None. @@ -2733,6 +2752,7 @@ proc tcltest::runAllTests { {shell ""} } { variable numTests variable failFiles variable DefaultValue + set failFilesAccum {} FillFilesExisted if {[llength [info level 0]] == 1} { @@ -2822,6 +2842,7 @@ proc tcltest::runAllTests { {shell ""} } { } if {$Failed > 0} { lappend failFiles $testFile + lappend failFilesAccum $testFile } } elseif {[regexp [join { {^Number of tests skipped } @@ -2868,7 +2889,7 @@ proc tcltest::runAllTests { {shell ""} } { puts [outputChannel] "" puts [outputChannel] [string repeat ~ 44] } - return + return [expr {[info exists testFileFailures] || [llength $failFilesAccum]}] } ##################################################################### diff --git a/library/tzdata/Africa/Casablanca b/library/tzdata/Africa/Casablanca index 33ad99b..3207e59 100644 --- a/library/tzdata/Africa/Casablanca +++ b/library/tzdata/Africa/Casablanca @@ -2,229 +2,59 @@ set TZData(:Africa/Casablanca) { {-9223372036854775808 -1820 0 LMT} - {-1773012580 0 0 WET} - {-956361600 3600 1 WEST} - {-950490000 0 0 WET} - {-942019200 3600 1 WEST} - {-761187600 0 0 WET} - {-617241600 3600 1 WEST} - {-605149200 0 0 WET} - {-81432000 3600 1 WEST} - {-71110800 0 0 WET} - {141264000 3600 1 WEST} - {147222000 0 0 WET} - {199756800 3600 1 WEST} - {207702000 0 0 WET} - {231292800 3600 1 WEST} - {244249200 0 0 WET} - {265507200 3600 1 WEST} - {271033200 0 0 WET} - {448243200 3600 0 CET} - {504918000 0 0 WET} - {1212278400 3600 1 WEST} - {1220223600 0 0 WET} - {1243814400 3600 1 WEST} - {1250809200 0 0 WET} - {1272758400 3600 1 WEST} - {1281222000 0 0 WET} - {1301788800 3600 1 WEST} - {1312066800 0 0 WET} - {1335664800 3600 1 WEST} - {1342749600 0 0 WET} - {1345428000 3600 1 WEST} - {1348970400 0 0 WET} - {1367114400 3600 1 WEST} - {1373162400 0 0 WET} - {1376100000 3600 1 WEST} - {1382839200 0 0 WET} - {1396144800 3600 1 WEST} - {1403920800 0 0 WET} - {1406944800 3600 1 WEST} - {1414288800 0 0 WET} - {1427594400 3600 1 WEST} - {1434247200 0 0 WET} - {1437271200 3600 1 WEST} - {1445738400 0 0 WET} - {1459044000 3600 1 WEST} - {1465092000 0 0 WET} - {1468116000 3600 1 WEST} - {1477792800 0 0 WET} - {1490493600 3600 1 WEST} - {1495332000 0 0 WET} - {1498960800 3600 1 WEST} - {1509242400 0 0 WET} - {1521943200 3600 1 WEST} - {1526176800 0 0 WET} - {1529200800 3600 1 WEST} - {1540692000 0 0 WET} - {1553997600 3600 1 WEST} - {1557021600 0 0 WET} - {1560045600 3600 1 WEST} - {1572141600 0 0 WET} - {1585447200 3600 1 WEST} - {1587261600 0 0 WET} - {1590285600 3600 1 WEST} - {1603591200 0 0 WET} - {1616896800 3600 1 WEST} - {1618106400 0 0 WET} - {1621130400 3600 1 WEST} - {1635645600 0 0 WET} - {1651975200 3600 1 WEST} - {1667095200 0 0 WET} - {1682215200 3600 1 WEST} - {1698544800 0 0 WET} - {1713060000 3600 1 WEST} - {1729994400 0 0 WET} - {1743904800 3600 1 WEST} - {1761444000 0 0 WET} - {1774749600 3600 1 WEST} - {1792893600 0 0 WET} - {1806199200 3600 1 WEST} - {1824948000 0 0 WET} - {1837648800 3600 1 WEST} - {1856397600 0 0 WET} - {1869098400 3600 1 WEST} - {1887847200 0 0 WET} - {1901152800 3600 1 WEST} - {1919296800 0 0 WET} - {1932602400 3600 1 WEST} - {1950746400 0 0 WET} - {1964052000 3600 1 WEST} - {1982800800 0 0 WET} - {1995501600 3600 1 WEST} - {2014250400 0 0 WET} - {2026951200 3600 1 WEST} - {2045700000 0 0 WET} - {2058400800 3600 1 WEST} - {2077149600 0 0 WET} - {2090455200 3600 1 WEST} - {2107994400 0 0 WET} - {2108602800 0 0 WET} - {2121904800 3600 1 WEST} - {2138234400 0 0 WET} - {2140052400 0 0 WET} - {2153354400 3600 1 WEST} - {2172103200 0 0 WET} - {2184804000 3600 1 WEST} - {2203552800 0 0 WET} - {2216253600 3600 1 WEST} - {2235002400 0 0 WET} - {2248308000 3600 1 WEST} - {2266452000 0 0 WET} - {2279757600 3600 1 WEST} - {2297901600 0 0 WET} - {2311207200 3600 1 WEST} - {2329351200 0 0 WET} - {2342656800 3600 1 WEST} - {2361405600 0 0 WET} - {2374106400 3600 1 WEST} - {2392855200 0 0 WET} - {2405556000 3600 1 WEST} - {2424304800 0 0 WET} - {2437610400 3600 1 WEST} - {2455754400 0 0 WET} - {2469060000 3600 1 WEST} - {2487204000 0 0 WET} - {2500509600 3600 1 WEST} - {2519258400 0 0 WET} - {2531959200 3600 1 WEST} - {2550708000 0 0 WET} - {2563408800 3600 1 WEST} - {2582157600 0 0 WET} - {2595463200 3600 1 WEST} - {2613607200 0 0 WET} - {2626912800 3600 1 WEST} - {2645056800 0 0 WET} - {2658362400 3600 1 WEST} - {2676506400 0 0 WET} - {2689812000 3600 1 WEST} - {2708560800 0 0 WET} - {2721261600 3600 1 WEST} - {2740010400 0 0 WET} - {2752711200 3600 1 WEST} - {2771460000 0 0 WET} - {2784765600 3600 1 WEST} - {2802909600 0 0 WET} - {2816215200 3600 1 WEST} - {2834359200 0 0 WET} - {2847664800 3600 1 WEST} - {2866413600 0 0 WET} - {2879114400 3600 1 WEST} - {2897863200 0 0 WET} - {2910564000 3600 1 WEST} - {2929312800 0 0 WET} - {2942013600 3600 1 WEST} - {2960762400 0 0 WET} - {2974068000 3600 1 WEST} - {2992212000 0 0 WET} - {3005517600 3600 1 WEST} - {3023661600 0 0 WET} - {3036967200 3600 1 WEST} - {3055716000 0 0 WET} - {3068416800 3600 1 WEST} - {3087165600 0 0 WET} - {3099866400 3600 1 WEST} - {3118615200 0 0 WET} - {3131920800 3600 1 WEST} - {3150064800 0 0 WET} - {3163370400 3600 1 WEST} - {3181514400 0 0 WET} - {3194820000 3600 1 WEST} - {3212964000 0 0 WET} - {3226269600 3600 1 WEST} - {3245018400 0 0 WET} - {3257719200 3600 1 WEST} - {3276468000 0 0 WET} - {3289168800 3600 1 WEST} - {3307917600 0 0 WET} - {3321223200 3600 1 WEST} - {3339367200 0 0 WET} - {3352672800 3600 1 WEST} - {3370816800 0 0 WET} - {3384122400 3600 1 WEST} - {3402871200 0 0 WET} - {3415572000 3600 1 WEST} - {3434320800 0 0 WET} - {3447021600 3600 1 WEST} - {3465770400 0 0 WET} - {3479076000 3600 1 WEST} - {3497220000 0 0 WET} - {3510525600 3600 1 WEST} - {3528669600 0 0 WET} - {3541975200 3600 1 WEST} - {3560119200 0 0 WET} - {3573424800 3600 1 WEST} - {3592173600 0 0 WET} - {3604874400 3600 1 WEST} - {3623623200 0 0 WET} - {3636324000 3600 1 WEST} - {3655072800 0 0 WET} - {3668378400 3600 1 WEST} - {3686522400 0 0 WET} - {3699828000 3600 1 WEST} - {3717972000 0 0 WET} - {3731277600 3600 1 WEST} - {3750026400 0 0 WET} - {3762727200 3600 1 WEST} - {3781476000 0 0 WET} - {3794176800 3600 1 WEST} - {3812925600 0 0 WET} - {3825626400 3600 1 WEST} - {3844375200 0 0 WET} - {3857680800 3600 1 WEST} - {3875824800 0 0 WET} - {3889130400 3600 1 WEST} - {3907274400 0 0 WET} - {3920580000 3600 1 WEST} - {3939328800 0 0 WET} - {3952029600 3600 1 WEST} - {3970778400 0 0 WET} - {3983479200 3600 1 WEST} - {4002228000 0 0 WET} - {4015533600 3600 1 WEST} - {4033677600 0 0 WET} - {4046983200 3600 1 WEST} - {4065127200 0 0 WET} - {4078432800 3600 1 WEST} - {4096576800 0 0 WET} + {-1773012580 0 0 +00} + {-956361600 3600 1 +00} + {-950490000 0 0 +00} + {-942019200 3600 1 +00} + {-761187600 0 0 +00} + {-617241600 3600 1 +00} + {-605149200 0 0 +00} + {-81432000 3600 1 +00} + {-71110800 0 0 +00} + {141264000 3600 1 +00} + {147222000 0 0 +00} + {199756800 3600 1 +00} + {207702000 0 0 +00} + {231292800 3600 1 +00} + {244249200 0 0 +00} + {265507200 3600 1 +00} + {271033200 0 0 +00} + {448243200 3600 0 +01} + {504918000 0 0 +00} + {1212278400 3600 1 +00} + {1220223600 0 0 +00} + {1243814400 3600 1 +00} + {1250809200 0 0 +00} + {1272758400 3600 1 +00} + {1281222000 0 0 +00} + {1301788800 3600 1 +00} + {1312066800 0 0 +00} + {1335664800 3600 1 +00} + {1342749600 0 0 +00} + {1345428000 3600 1 +00} + {1348970400 0 0 +00} + {1367114400 3600 1 +00} + {1373162400 0 0 +00} + {1376100000 3600 1 +00} + {1382839200 0 0 +00} + {1396144800 3600 1 +00} + {1403920800 0 0 +00} + {1406944800 3600 1 +00} + {1414288800 0 0 +00} + {1427594400 3600 1 +00} + {1434247200 0 0 +00} + {1437271200 3600 1 +00} + {1445738400 0 0 +00} + {1459044000 3600 1 +00} + {1465092000 0 0 +00} + {1468116000 3600 1 +00} + {1477792800 0 0 +00} + {1490493600 3600 1 +00} + {1495332000 0 0 +00} + {1498960800 3600 1 +00} + {1509242400 0 0 +00} + {1521943200 3600 1 +00} + {1526176800 0 0 +00} + {1529200800 3600 1 +00} + {1540598400 3600 0 +01} } diff --git a/library/tzdata/Africa/El_Aaiun b/library/tzdata/Africa/El_Aaiun index 7bdc496..e0f5e1c 100644 --- a/library/tzdata/Africa/El_Aaiun +++ b/library/tzdata/Africa/El_Aaiun @@ -3,217 +3,47 @@ set TZData(:Africa/El_Aaiun) { {-9223372036854775808 -3168 0 LMT} {-1136070432 -3600 0 -01} - {198291600 0 0 WET} - {199756800 3600 1 WEST} - {207702000 0 0 WET} - {231292800 3600 1 WEST} - {244249200 0 0 WET} - {265507200 3600 1 WEST} - {271033200 0 0 WET} - {1212278400 3600 1 WEST} - {1220223600 0 0 WET} - {1243814400 3600 1 WEST} - {1250809200 0 0 WET} - {1272758400 3600 1 WEST} - {1281222000 0 0 WET} - {1301788800 3600 1 WEST} - {1312066800 0 0 WET} - {1335664800 3600 1 WEST} - {1342749600 0 0 WET} - {1345428000 3600 1 WEST} - {1348970400 0 0 WET} - {1367114400 3600 1 WEST} - {1373162400 0 0 WET} - {1376100000 3600 1 WEST} - {1382839200 0 0 WET} - {1396144800 3600 1 WEST} - {1403920800 0 0 WET} - {1406944800 3600 1 WEST} - {1414288800 0 0 WET} - {1427594400 3600 1 WEST} - {1434247200 0 0 WET} - {1437271200 3600 1 WEST} - {1445738400 0 0 WET} - {1459044000 3600 1 WEST} - {1465092000 0 0 WET} - {1468116000 3600 1 WEST} - {1477792800 0 0 WET} - {1490493600 3600 1 WEST} - {1495332000 0 0 WET} - {1498960800 3600 1 WEST} - {1509242400 0 0 WET} - {1521943200 3600 1 WEST} - {1526176800 0 0 WET} - {1529200800 3600 1 WEST} - {1540692000 0 0 WET} - {1553997600 3600 1 WEST} - {1557021600 0 0 WET} - {1560045600 3600 1 WEST} - {1572141600 0 0 WET} - {1585447200 3600 1 WEST} - {1587261600 0 0 WET} - {1590285600 3600 1 WEST} - {1603591200 0 0 WET} - {1616896800 3600 1 WEST} - {1618106400 0 0 WET} - {1621130400 3600 1 WEST} - {1635645600 0 0 WET} - {1651975200 3600 1 WEST} - {1667095200 0 0 WET} - {1682215200 3600 1 WEST} - {1698544800 0 0 WET} - {1713060000 3600 1 WEST} - {1729994400 0 0 WET} - {1743904800 3600 1 WEST} - {1761444000 0 0 WET} - {1774749600 3600 1 WEST} - {1792893600 0 0 WET} - {1806199200 3600 1 WEST} - {1824948000 0 0 WET} - {1837648800 3600 1 WEST} - {1856397600 0 0 WET} - {1869098400 3600 1 WEST} - {1887847200 0 0 WET} - {1901152800 3600 1 WEST} - {1919296800 0 0 WET} - {1932602400 3600 1 WEST} - {1950746400 0 0 WET} - {1964052000 3600 1 WEST} - {1982800800 0 0 WET} - {1995501600 3600 1 WEST} - {2014250400 0 0 WET} - {2026951200 3600 1 WEST} - {2045700000 0 0 WET} - {2058400800 3600 1 WEST} - {2077149600 0 0 WET} - {2090455200 3600 1 WEST} - {2107994400 0 0 WET} - {2108602800 0 0 WET} - {2121904800 3600 1 WEST} - {2138234400 0 0 WET} - {2140052400 0 0 WET} - {2153354400 3600 1 WEST} - {2172103200 0 0 WET} - {2184804000 3600 1 WEST} - {2203552800 0 0 WET} - {2216253600 3600 1 WEST} - {2235002400 0 0 WET} - {2248308000 3600 1 WEST} - {2266452000 0 0 WET} - {2279757600 3600 1 WEST} - {2297901600 0 0 WET} - {2311207200 3600 1 WEST} - {2329351200 0 0 WET} - {2342656800 3600 1 WEST} - {2361405600 0 0 WET} - {2374106400 3600 1 WEST} - {2392855200 0 0 WET} - {2405556000 3600 1 WEST} - {2424304800 0 0 WET} - {2437610400 3600 1 WEST} - {2455754400 0 0 WET} - {2469060000 3600 1 WEST} - {2487204000 0 0 WET} - {2500509600 3600 1 WEST} - {2519258400 0 0 WET} - {2531959200 3600 1 WEST} - {2550708000 0 0 WET} - {2563408800 3600 1 WEST} - {2582157600 0 0 WET} - {2595463200 3600 1 WEST} - {2613607200 0 0 WET} - {2626912800 3600 1 WEST} - {2645056800 0 0 WET} - {2658362400 3600 1 WEST} - {2676506400 0 0 WET} - {2689812000 3600 1 WEST} - {2708560800 0 0 WET} - {2721261600 3600 1 WEST} - {2740010400 0 0 WET} - {2752711200 3600 1 WEST} - {2771460000 0 0 WET} - {2784765600 3600 1 WEST} - {2802909600 0 0 WET} - {2816215200 3600 1 WEST} - {2834359200 0 0 WET} - {2847664800 3600 1 WEST} - {2866413600 0 0 WET} - {2879114400 3600 1 WEST} - {2897863200 0 0 WET} - {2910564000 3600 1 WEST} - {2929312800 0 0 WET} - {2942013600 3600 1 WEST} - {2960762400 0 0 WET} - {2974068000 3600 1 WEST} - {2992212000 0 0 WET} - {3005517600 3600 1 WEST} - {3023661600 0 0 WET} - {3036967200 3600 1 WEST} - {3055716000 0 0 WET} - {3068416800 3600 1 WEST} - {3087165600 0 0 WET} - {3099866400 3600 1 WEST} - {3118615200 0 0 WET} - {3131920800 3600 1 WEST} - {3150064800 0 0 WET} - {3163370400 3600 1 WEST} - {3181514400 0 0 WET} - {3194820000 3600 1 WEST} - {3212964000 0 0 WET} - {3226269600 3600 1 WEST} - {3245018400 0 0 WET} - {3257719200 3600 1 WEST} - {3276468000 0 0 WET} - {3289168800 3600 1 WEST} - {3307917600 0 0 WET} - {3321223200 3600 1 WEST} - {3339367200 0 0 WET} - {3352672800 3600 1 WEST} - {3370816800 0 0 WET} - {3384122400 3600 1 WEST} - {3402871200 0 0 WET} - {3415572000 3600 1 WEST} - {3434320800 0 0 WET} - {3447021600 3600 1 WEST} - {3465770400 0 0 WET} - {3479076000 3600 1 WEST} - {3497220000 0 0 WET} - {3510525600 3600 1 WEST} - {3528669600 0 0 WET} - {3541975200 3600 1 WEST} - {3560119200 0 0 WET} - {3573424800 3600 1 WEST} - {3592173600 0 0 WET} - {3604874400 3600 1 WEST} - {3623623200 0 0 WET} - {3636324000 3600 1 WEST} - {3655072800 0 0 WET} - {3668378400 3600 1 WEST} - {3686522400 0 0 WET} - {3699828000 3600 1 WEST} - {3717972000 0 0 WET} - {3731277600 3600 1 WEST} - {3750026400 0 0 WET} - {3762727200 3600 1 WEST} - {3781476000 0 0 WET} - {3794176800 3600 1 WEST} - {3812925600 0 0 WET} - {3825626400 3600 1 WEST} - {3844375200 0 0 WET} - {3857680800 3600 1 WEST} - {3875824800 0 0 WET} - {3889130400 3600 1 WEST} - {3907274400 0 0 WET} - {3920580000 3600 1 WEST} - {3939328800 0 0 WET} - {3952029600 3600 1 WEST} - {3970778400 0 0 WET} - {3983479200 3600 1 WEST} - {4002228000 0 0 WET} - {4015533600 3600 1 WEST} - {4033677600 0 0 WET} - {4046983200 3600 1 WEST} - {4065127200 0 0 WET} - {4078432800 3600 1 WEST} - {4096576800 0 0 WET} + {198291600 0 0 +00} + {199756800 3600 1 +00} + {207702000 0 0 +00} + {231292800 3600 1 +00} + {244249200 0 0 +00} + {265507200 3600 1 +00} + {271033200 0 0 +00} + {1212278400 3600 1 +00} + {1220223600 0 0 +00} + {1243814400 3600 1 +00} + {1250809200 0 0 +00} + {1272758400 3600 1 +00} + {1281222000 0 0 +00} + {1301788800 3600 1 +00} + {1312066800 0 0 +00} + {1335664800 3600 1 +00} + {1342749600 0 0 +00} + {1345428000 3600 1 +00} + {1348970400 0 0 +00} + {1367114400 3600 1 +00} + {1373162400 0 0 +00} + {1376100000 3600 1 +00} + {1382839200 0 0 +00} + {1396144800 3600 1 +00} + {1403920800 0 0 +00} + {1406944800 3600 1 +00} + {1414288800 0 0 +00} + {1427594400 3600 1 +00} + {1434247200 0 0 +00} + {1437271200 3600 1 +00} + {1445738400 0 0 +00} + {1459044000 3600 1 +00} + {1465092000 0 0 +00} + {1468116000 3600 1 +00} + {1477792800 0 0 +00} + {1490493600 3600 1 +00} + {1495332000 0 0 +00} + {1498960800 3600 1 +00} + {1509242400 0 0 +00} + {1521943200 3600 1 +00} + {1526176800 0 0 +00} + {1529200800 3600 1 +00} + {1540598400 3600 0 +01} } diff --git a/library/tzdata/Pacific/Honolulu b/library/tzdata/Pacific/Honolulu index 5e70598..7d03b45 100644 --- a/library/tzdata/Pacific/Honolulu +++ b/library/tzdata/Pacific/Honolulu @@ -4,8 +4,9 @@ set TZData(:Pacific/Honolulu) { {-9223372036854775808 -37886 0 LMT} {-2334101314 -37800 0 HST} {-1157283000 -34200 1 HDT} - {-1155436200 -37800 0 HST} - {-880198200 -34200 1 HDT} + {-1155436200 -34200 0 HST} + {-880201800 -34200 1 HWT} + {-769395600 -34200 1 HPT} {-765376200 -37800 0 HST} {-712150200 -36000 0 HST} } |