summaryrefslogtreecommitdiffstats
path: root/Lib/test/test_zoneinfo
diff options
context:
space:
mode:
Diffstat (limited to 'Lib/test/test_zoneinfo')
-rw-r--r--Lib/test/test_zoneinfo/__init__.py1
-rw-r--r--Lib/test/test_zoneinfo/__main__.py3
-rw-r--r--Lib/test/test_zoneinfo/_support.py76
-rw-r--r--Lib/test/test_zoneinfo/data/update_test_data.py122
-rw-r--r--Lib/test/test_zoneinfo/data/zoneinfo_data.json190
-rw-r--r--Lib/test/test_zoneinfo/test_zoneinfo.py1994
6 files changed, 2386 insertions, 0 deletions
diff --git a/Lib/test/test_zoneinfo/__init__.py b/Lib/test/test_zoneinfo/__init__.py
new file mode 100644
index 0000000..98cc441
--- /dev/null
+++ b/Lib/test/test_zoneinfo/__init__.py
@@ -0,0 +1 @@
+from .test_zoneinfo import *
diff --git a/Lib/test/test_zoneinfo/__main__.py b/Lib/test/test_zoneinfo/__main__.py
new file mode 100644
index 0000000..5cc4e05
--- /dev/null
+++ b/Lib/test/test_zoneinfo/__main__.py
@@ -0,0 +1,3 @@
+import unittest
+
+unittest.main('test.test_zoneinfo')
diff --git a/Lib/test/test_zoneinfo/_support.py b/Lib/test/test_zoneinfo/_support.py
new file mode 100644
index 0000000..6bd8d8d
--- /dev/null
+++ b/Lib/test/test_zoneinfo/_support.py
@@ -0,0 +1,76 @@
+import contextlib
+import functools
+import sys
+import threading
+import unittest
+from test.support import import_fresh_module
+
+OS_ENV_LOCK = threading.Lock()
+TZPATH_LOCK = threading.Lock()
+TZPATH_TEST_LOCK = threading.Lock()
+
+
+def call_once(f):
+ """Decorator that ensures a function is only ever called once."""
+ lock = threading.Lock()
+ cached = functools.lru_cache(None)(f)
+
+ @functools.wraps(f)
+ def inner():
+ with lock:
+ return cached()
+
+ return inner
+
+
+@call_once
+def get_modules():
+ """Retrieve two copies of zoneinfo: pure Python and C accelerated.
+
+ Because this function manipulates the import system in a way that might
+ be fragile or do unexpected things if it is run many times, it uses a
+ `call_once` decorator to ensure that this is only ever called exactly
+ one time — in other words, when using this function you will only ever
+ get one copy of each module rather than a fresh import each time.
+ """
+ import zoneinfo as c_module
+
+ py_module = import_fresh_module("zoneinfo", blocked=["_zoneinfo"])
+
+ return py_module, c_module
+
+
+@contextlib.contextmanager
+def set_zoneinfo_module(module):
+ """Make sure sys.modules["zoneinfo"] refers to `module`.
+
+ This is necessary because `pickle` will refuse to serialize
+ an type calling itself `zoneinfo.ZoneInfo` unless `zoneinfo.ZoneInfo`
+ refers to the same object.
+ """
+
+ NOT_PRESENT = object()
+ old_zoneinfo = sys.modules.get("zoneinfo", NOT_PRESENT)
+ sys.modules["zoneinfo"] = module
+ yield
+ if old_zoneinfo is not NOT_PRESENT:
+ sys.modules["zoneinfo"] = old_zoneinfo
+ else: # pragma: nocover
+ sys.modules.pop("zoneinfo")
+
+
+class ZoneInfoTestBase(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ cls.klass = cls.module.ZoneInfo
+ super().setUpClass()
+
+ @contextlib.contextmanager
+ def tzpath_context(self, tzpath, lock=TZPATH_LOCK):
+ with lock:
+ old_path = self.module.TZPATH
+ try:
+ self.module.reset_tzpath(tzpath)
+ yield
+ finally:
+ self.module.reset_tzpath(old_path)
diff --git a/Lib/test/test_zoneinfo/data/update_test_data.py b/Lib/test/test_zoneinfo/data/update_test_data.py
new file mode 100644
index 0000000..f531ab3
--- /dev/null
+++ b/Lib/test/test_zoneinfo/data/update_test_data.py
@@ -0,0 +1,122 @@
+"""
+Script to automatically generate a JSON file containing time zone information.
+
+This is done to allow "pinning" a small subset of the tzdata in the tests,
+since we are testing properties of a file that may be subject to change. For
+example, the behavior in the far future of any given zone is likely to change,
+but "does this give the right answer for this file in 2040" is still an
+important property to test.
+
+This must be run from a computer with zoneinfo data installed.
+"""
+from __future__ import annotations
+
+import base64
+import functools
+import json
+import lzma
+import pathlib
+import textwrap
+import typing
+
+import zoneinfo
+
+KEYS = [
+ "Africa/Abidjan",
+ "Africa/Casablanca",
+ "America/Los_Angeles",
+ "America/Santiago",
+ "Asia/Tokyo",
+ "Australia/Sydney",
+ "Europe/Dublin",
+ "Europe/Lisbon",
+ "Europe/London",
+ "Pacific/Kiritimati",
+ "UTC",
+]
+
+TEST_DATA_LOC = pathlib.Path(__file__).parent
+
+
+@functools.lru_cache(maxsize=None)
+def get_zoneinfo_path() -> pathlib.Path:
+ """Get the first zoneinfo directory on TZPATH containing the "UTC" zone."""
+ key = "UTC"
+ for path in map(pathlib.Path, zoneinfo.TZPATH):
+ if (path / key).exists():
+ return path
+ else:
+ raise OSError("Cannot find time zone data.")
+
+
+def get_zoneinfo_metadata() -> typing.Dict[str, str]:
+ path = get_zoneinfo_path()
+
+ tzdata_zi = path / "tzdata.zi"
+ if not tzdata_zi.exists():
+ # tzdata.zi is necessary to get the version information
+ raise OSError("Time zone data does not include tzdata.zi.")
+
+ with open(tzdata_zi, "r") as f:
+ version_line = next(f)
+
+ _, version = version_line.strip().rsplit(" ", 1)
+
+ if (
+ not version[0:4].isdigit()
+ or len(version) < 5
+ or not version[4:].isalpha()
+ ):
+ raise ValueError(
+ "Version string should be YYYYx, "
+ + "where YYYY is the year and x is a letter; "
+ + f"found: {version}"
+ )
+
+ return {"version": version}
+
+
+def get_zoneinfo(key: str) -> bytes:
+ path = get_zoneinfo_path()
+
+ with open(path / key, "rb") as f:
+ return f.read()
+
+
+def encode_compressed(data: bytes) -> typing.List[str]:
+ compressed_zone = lzma.compress(data)
+ raw = base64.b85encode(compressed_zone)
+
+ raw_data_str = raw.decode("utf-8")
+
+ data_str = textwrap.wrap(raw_data_str, width=70)
+ return data_str
+
+
+def load_compressed_keys() -> typing.Dict[str, typing.List[str]]:
+ output = {key: encode_compressed(get_zoneinfo(key)) for key in KEYS}
+
+ return output
+
+
+def update_test_data(fname: str = "zoneinfo_data.json") -> None:
+ TEST_DATA_LOC.mkdir(exist_ok=True, parents=True)
+
+ # Annotation required: https://github.com/python/mypy/issues/8772
+ json_kwargs: typing.Dict[str, typing.Any] = dict(
+ indent=2, sort_keys=True,
+ )
+
+ compressed_keys = load_compressed_keys()
+ metadata = get_zoneinfo_metadata()
+ output = {
+ "metadata": metadata,
+ "data": compressed_keys,
+ }
+
+ with open(TEST_DATA_LOC / fname, "w") as f:
+ json.dump(output, f, **json_kwargs)
+
+
+if __name__ == "__main__":
+ update_test_data()
diff --git a/Lib/test/test_zoneinfo/data/zoneinfo_data.json b/Lib/test/test_zoneinfo/data/zoneinfo_data.json
new file mode 100644
index 0000000..ec4414a
--- /dev/null
+++ b/Lib/test/test_zoneinfo/data/zoneinfo_data.json
@@ -0,0 +1,190 @@
+{
+ "data": {
+ "Africa/Abidjan": [
+ "{Wp48S^xk9=GL@E0stWa761SMbT8$j-~f{VGF<>F7KxBg5R*{Ksocg8-YYVul=v7vZzaHN",
+ "uC=da5UI2rH18c!OnjV{y4u(+A!!VBKmY&$ORw>7UO^(500B;v0RR91bXh%WvBYQl0ssI2",
+ "00dcD"
+ ],
+ "Africa/Casablanca": [
+ "{Wp48S^xk9=GL@E0stWa761SMbT8$j;0b&Kz+C_;7KxBg5R*{N&yjMUR~;C-fDaSOU;q-~",
+ "FqW+4{YBjbcw}`a!dW>b)R2-0a+uwf`P3{_Y@HuCz}S$J$ZJ>R_V<~|Fk>sgX4=%0vUrh-",
+ "lt@YP^Wrus;j?`Th#xRPzf<<~Hp4DH^gZX>d{+WOp~HNu8!{uWu}&XphAd{j1;rB4|9?R!",
+ "pqruAFUMt8#*WcrVS{;kLlY(cJRV$w?d2car%R<ALOSO?^`4;ZZtI)%f^^G^>s>q9BgTU4",
+ "Ht-tQKZ7Z`9QqOb?R#b%z?rk>!CkH7jy3wja4NG2q)H}fNRKg8v{);Em;K3Cncf4C6&Oaj",
+ "V+DbX%o4+)CV3+e!Lm6dutu(0BQpH1T?W(~cQtKV*^_Pdx!LirjpTs?Bmt@vktjLq4;)O!",
+ "rrly=c*rwTwMJFd0I57`hgkc?=nyI4RZf9W$6DCWugmf&)wk^tWH17owj=#PGH7Xv-?9$j",
+ "njwDlkOE+BFNR9YXEmBpO;rqEw=e2IR-8^(W;8ma?M3JVd($2T>IW+0tk|Gm8>ftukRQ9J",
+ "8k3brzqMnVyjsLI-CKneFa)Lxvp_a<CkQEd#(pMA^rr}rBNElGA=*!M)puBdoErR9{kWL@",
+ "w=svMc6eZ^-(vQZrV<u^PY#nOIUDJ8%A&;BUVlY9=;@i2j2J1_`P>q40f}0J3VVoWL5rox",
+ "`Kptivcp}o5xA^@>qNI%?zo=Yj4AMV?kbAA)j(1%)+Pp)bSn+7Yk`M{oE}L-Z!G6<Dgq&*",
+ "(C-mFJfbEGDH5M^vBr65rcnsx*~|Em_GeU#B)(+T!|MG-nxj0@IPbp-nHejH3~>OMr5G+h",
+ "p)$3Lg{ono{4cN>Vr&>L4kXH;_VnBL5U!LgzqE%P7QQ*<E!guRW2SE@ayq@)G2nXqA2tGo",
+ "QIgc6>tue}O`3(TZ0`aKn&~8trOQ-rBXCp)f@P6RMO4l0+;b|5-pk9_ryNh}Zc*v%mvz_#",
+ "yd<xXt%~gT90dn4e{Ac<baL-)Y{L7&5G($I$>6fjB0g9{MmMnu8bG%#C~ugXK^S^k@?ab#",
+ "O|aE>dDTt4s4n69(~@t~!wniV%g<uWQat_i6>7khFx~I*4>Y|V$4j5%KPF*-FyKIi@!Ho&",
+ "x8QQsksYt8)D+W)Ni!=G`ogSu^vLL-l#7A7=iIAKL2SuZk9F}NfNk86VI)9WZE?%2wC-ya",
+ "F~z#Qsq)LH0|_D8^5fU8X%GeQ4TB>R-dlziA&tZe&1ada208!$nk`7bOFO2S00G<w{Sp8G",
+ "{cR_IvBYQl0ssI200dcD"
+ ],
+ "America/Los_Angeles": [
+ "{Wp48S^xk9=GL@E0stWa761SMbT8$j;0qH3OkDsf7KxBg5R*;z{h&-RlhRYu$%jt%!jv+I",
+ "JxhE=%W1?wYb!37Rb?(rgwFIAQI{L#8r*zy!$TMtER_1(vn(Zix^{AVB1(jwr$iL6h0Z!2",
+ "8Gb~UW@0~e512{Z%8}Qzdnjl~wJ1{c2>`Z@1A~t&lyL{p{eM{5)QGf7Mo5FW9==mlyXJt2",
+ "UwpntR7H0eSq!(aYq#aqUz&RM*tvuMI)AsM?K3-dV3-TT{t)!Iy#JTo=tXkzAM9~j2YbiO",
+ "ls3(H8Dc>Y|D1aqL51vjLbpYG;GvGTQB4bXuJ%mA;(B4eUpu$$@zv2vVcq-Y)VKbzp^tei",
+ "uzy}R{Luv<C;_cPe*n$Z<jeC9ogWF9=1mvvUYXS>DjpuVb`79O+CBmg{Wx!bvx$eu4zRE&",
+ "PehMb=&G<9$>iZ|bFE)0=4I?KLFGBC0I(0_svgw0%FiMsT%koo*!nEYc6GY@QnU}&4Isg;",
+ "l=|khi(!VaiSE2=Ny`&&tpi~~;{$u<GHlsr3Ze!iYsU205RFKsLnrXwOL?Mq08xffgS{6h",
+ "E|figx+&N%wbO}re@|}$l;g_6J-Wl%j|qev8A<T?NJ)`;2neGi_DHE4ET*W!c*ggPAgU+L",
+ "E9=bH7;maCUikw^R)UM;TdVvNkQ;FGgN=yQER`SZ1nOgPXr0LCebLety&}kVdmVmB=8eSg",
+ "td!1%p=a2wooIL!Da}OPXvKBfRo?YxqS>N}%f|7mBhAy;<Er2&_LfND#qXN~Mkgf!@4VFA",
+ "Hr%$c)wrKA2cJYWK2>s3YT^sy!$eG~?`9mNJC9@4Bac_p^BZh)Yd_rWW5qh-?tKY(>5VHO",
+ "L*iT8P@wCavLj^yYbnDR+4ukhS+xPrpl)iqB?u)bj9a2aW==g6G3lCJd>(+Blf<d4CF%7u",
+ "tlBUDki}J-!_Dy}5S(MrxSXy~$Z+hgH3P^<<w7D72L7I-R%H3(xm&q_DXxkp$owLTS6Wzk",
+ "hc3nn;laROa3)6hl&gH#)2Lif8fZe$@CdeJ-Zn&*>r)~^40F4f>cRZ^UF;RibfZ>0m73hR",
+ "C{$vTfC(STN`g7(B<=Z2556{}0`?p&|Akkst!4Xy4OT;A@c$XTUI3FRRjy*KA7uC56FD)z",
+ "^X{WV*sr(w!c$W357o!&eLO2wTDNOyw@gf(&R<<LF_3URI4=Ei`-%dM3T66j#9!aG7&b_@",
+ "g1-9vo?DzXZ5vGaf~w__p_@_X?OdvQ_r5bvy2hpESTf+{p?jL+!~!{g8-<-5$@d8EZV&-5",
+ "@a|;^1gB*R-~{EHFA-td_G2bt;~Y}>t;=-Tu1TV{>%8ZVATC9tjD8|(&`$9YHvZ9bVe#>w",
+ "|8c;Tg|xE&)`*}LwM*E}q}q8^Qja%p`_U)*5DdLI9O@!e=3jFjOCrCq28b_bb;s>%D#iJB",
+ "CWJi{JH!Js;6nfayos$kq^OEX00HO-lokL0!mqm{vBYQl0ssI200dcD"
+ ],
+ "America/Santiago": [
+ "{Wp48S^xk9=GL@E0stWa761SMbT8$j;0fRZ<6QtM7KxBg84(fsEAUJ$J{f-TXlPEUec5Ee",
+ "n+hsD4lC(QYax=JdSpoyje8%VM`GW}<Unz6IOY4=y66tfqG2X4E8xIJQ(~?r{`L~T!sI~o",
+ "VBl7Ao!R1A76Y8P6Y<TfwVHf@sl@S-D4OuAy5mq0MKJZ>{bJ8@y$A8O&*$pw{(f~Os#}2w",
+ "eX6^Rgi$IT%n^V^85L>$_c7{cB^#ogV=rHBJGiz-RQNFGK?gdPi|q)j`&8)}KJ{qo6dixa",
+ "9@yYyVg+%lo0nO+Tw0-w2hJ%mafy<Co(;L+24CYl&?rN0mrh90nxG?%1&Ed@za`Yd>WL)|",
+ ")<o0dZL-*?RFtH7dAv%G*O%l?qvq!0F5C?K#_ZoT{P$77IMoj3&8w3f&n36zquu~s`s0T)",
+ ";>?W6Bi%FWuGPA1Dru$XR4SZANsAthU2EoKH<MU4wYvUTlZGcLIDR+hSik>F6oEtKq`rwP",
+ "(VNegnI_NI%;ma$)wj{k!@KFB30Yo)IOr<QX7IQ@TBq9d;e3QAtYU?$PS-WoaiqwFrg4PR",
+ "A->l>)$)D|+(5h&+%2vuwGuy^@S8FT^s21V5};>VA9Iu;?8bHz#r<;JtfZDI1(FT@edh0#",
+ "MYW$A1qkMGIwTZqqdYNE3gl#zp&NbL9Mp=voqN|;?gqR&4$)1`znddtEyuKS*^nMMD=0^>",
+ "7^z6-C4P67UWOXuMBubP>j6i~03aR@jD^-Y`JSYu#Yp0P8dLLJ0QOPE8=BoiuRX59YW7xg",
+ "WiexjHX%&0?`ZQCdxCdL^qd1v@kOjQKaWo2Y1++~LcA%FTq?5o<?(jL(_Uo}I}k_Fwflcr",
+ "aovwSR_(ILA6li<iBLPQ0#rEet;W-*54kj#sZEGK*tAF{)HNkn#&Hc5`#eaRF;N#$<xQU?",
+ "E%zm?2+b5Ho>%}fX1-RIvlB)1#iTNomGnUL=nM!>Ix|AGtON7!F1O?53kqlC2o-`ZGw*+s",
+ "NM$^9znsIJMwlgscE`|O3|;BRgsQMYm~`uv+nvuv`nigRa}X=BX=A5Sw$)WEklF7&c>_~$",
+ "zJ(m--bqXgiN^w-U=BJH9C0Qro(x90zo@rK;&TJ$nI@&k$ORgOb2<MjjIhYfr;pFUGdMd!",
+ "0d&bOvyq3AZPCez8E(XSg2hBu2A&^k?w|1u8v3JE>s%gWbc}ok_27)Eoku~Fq|B-Ps+4J_",
+ "HPJMLJ2^_)cOU$p&3kNAlrV!)%~6r$BJ>OOi~=-<6byle{?zd4J{NG}o8tw|+#ZNLcpNwk",
+ "TuPE~sbJB8_RZb2DopStO+Wwux~F#S59zm%00I98;S&G=b(j+6vBYQl0ssI200dcD"
+ ],
+ "Asia/Tokyo": [
+ "{Wp48S^xk9=GL@E0stWa761SMbT8$j-~luMgIxeB7KxBg5R*;y?l4Rl4neXH3cv!OtfK@h",
+ "KZzauI)S!FSDREPhhBS6Fb$&Vv#7%;?Te|>pF^0HBr&z_Tk<%vMW_QqjevRZOp8XVFgP<8",
+ "TkT#`9H&0Ua;gT1#rZLV0HqbAKK;_z@nO;6t0L<i8TZ+%T<;ci2bYSG1u!mUSO5S3XcbN8",
+ "dIxbZ00Ex?wE_SDJu@vkvBYQl0ssI200dcD"
+ ],
+ "Australia/Sydney": [
+ "{Wp48S^xk9=GL@E0stWa761SMbT8$j;0T)o7+nA=7KxBg5R*_t6jS5T`_Ull(nK1_YY;k%",
+ ";_YdTuU3*!K)eKg@^kzjAtbo@Jd|KGai=Q%%sX5FI?*?LG!|m9cKH5~IEwI=PAr_Yc}w35",
+ ">}hOdk<>TdUa07R(LPI6@!GU$ty4=mwqHG-XVe*n(Yvgdlr+FqIU18!osi)48t~eWX8)&L",
+ "G)Ud^0zz@*AF+2r7E}N<P$kOfo*88g)_bOO?7N1Jr|HJyg+HXc7f4}?%Dur3w|~JU?<x4K",
+ "%RRC~q_D87;UyN{nLRu!fEqKeRR*U$vs>f9Y72K~o-T%}D&z%}#7g<qim`EbfhF7ntyAiP",
+ "%LFNc&!$@Kv)Olyf&Y9%(#SkM+%yI}S%b+@ZM2dH7DpmndGMIda<(`#E9q|?H(HzClx+l;",
+ "M?IEz1eF}r?}ay!V9?9rKD^-ayjE@wUMD$2kC!iwH`n=eVrJPmJyNKaW`LdJ68&u;2nF1K",
+ "kZjKCY_A<>2br?oH6ZiYH^%>J3D)TPKV(JY*bwjuw5=DsPB@~CrR<E_U_fJTF9ufU%!cXK",
+ "_4uM#!%%Q1e1G~{E}~vGVE0{Kxecm^NjtJM`c8EFHFTiUIVl@YUD8F+s!u8jz~6hte@oa|",
+ "qayb*^Lwd(etNmBro;aXQjkY8g(*`_JQ0%{V3QP2l!GGQ7D+v&k_PK0F(?f{GziU5>OZeN",
+ "x>A*H&CHrWt0`EP`m!F%waepl#|w#&`XgVc?~2M3uw$fGX~tf_Il!q#Aa<*8xlzQ2+7r6Z",
+ "^;Laa9F(WB_O&Dy2r>~@kSi16W{=6+i5GV=Uq~KX*~&HUN4oz7*O(gXIr}sDVcD`Ikgw#|",
+ "50ssal8s)Qy;?YGCf;*UKKKN!T4!Kqy_G;7<gSrPK{)5#a>PfQapugqvVBKy12v3TVH^L2",
+ "0?#5*VP~MOYfe$h`*L!7@tiW|_^X1N%<}`7YahiUYtMu5XwmOf3?dr+@zXHwW`z}ZDqZlT",
+ "<2Cs(<1%M!i6o&VK89BY0J7HPIo;O62s=|IbV^@y$N&#<x=a876<(U>=>i^F00FcHoDl#3",
+ "Mdv&xvBYQl0ssI200dcD"
+ ],
+ "Europe/Dublin": [
+ "{Wp48S^xk9=GL@E0stWa761SMbT8$j;0>b$_+0=h7KxBg5R*;&J77#T_U2R5sleVWFDmK~",
+ "Kzj5oh@`<njquRZ&tJIS(cXp1>QKHvW^6V{jU-w>qg1tSt0c^vh;?qAqA0%t?;#S~6U8Qi",
+ "v&f1s9IH#g$m1k1a#3+lylw4mwT4QnEUUQdwg+xnEcBlgu31bAVabn41OMZVLGz6NDwG%X",
+ "uQar!b>GI{qSahE`AG}$kRWbuI~JCt;38)Xwbb~Qggs55t+MAHIxgDxzTJ;2xXx99+qCy4",
+ "45kC#v_l8fx|G&jlVvaciR<-wwf22l%4(t@S6tnX39#_K(4S0fu$FUs$isu<UOJYm|4)2i",
+ "aEpsajn@}B#rnY=Cg_TXsm-A)*adXV&$klNTn3n{XXlaquu}6m{k%oRmY0Yyhlj*<W{D5m",
+ "22}OiqnwHT!tnK`wPqx?wiF%v{ipTrOkcJ5P@7OC4(-l`*&SB$Wd4Vf8gn?>d<i@%mP*e*",
+ "ttDj`9M1;9$YV@dhT)DVcwdq(Ly~KDm_&KL?{_mFwwYtJqRZBk)i1FVQy!40w_KyAg?hIA",
+ "=_{(3#S0eWsF8f%_4Zza$4@$lSmov+Huyn$vP^zJ|8-<C3#q#0kEs9cNg^xUR(m?wEWt-D",
+ "GctAh2nIo~fz%$m$I41=b_WuJ6M9g#A9_Epwqw{d0B|vzmg#_y<=_>9IKzCXB<o`d)**5V",
+ "6g!<<Jw1n5TrN-$)aYz4cLsTmpsUf-6L7ix+kk>78NkARYq@9Dc0TGkhz);NtM_SSzEffN",
+ "l{2^*CKGdp52h!52A)6q9fUSltXF{T*Ehc9Q7u8!W7pE(Fv$D$cKUAt6wY=DA1mGgxC*VX",
+ "q_If3G#FY6-Voj`fIKk`0}Cc72_SD{v>468LV{pyBI33^p0E?}RwDA6Pkq--C~0jF&Z@Pv",
+ "!dx_1SN_)jwz@P$(oK%P!Tk9?fRjK88yxhxlcFtTjjZ$DYssSsa#ufYrR+}}nKS+r384o~",
+ "!Uw$nwTbF~qgRsgr0N#d@KIinx%<pnyQ!|>hQB(SJyjJtDtIy(%mDm}ZBGN}dV6K~om|=U",
+ "VGkbciQ=^$_14|gT21!YQ)@y*Rd0i_lS6gtPBE9+ah%WIJPwzUTjIr+J1XckkmA!6WE16%",
+ "CVAl{Dn&-)=G$Bjh?bh0$Xt1UDcgXJjXzzojuw0>paV~?Sa`VN3FysqF<S*L0RYSAY3jt(",
+ "8wCD04RfyEcP(RNT%x7k(7m-9H3{zuQ`RZy-Rz%*&dldDVFF+TwSAPO1wRX^5W5@xJ9{vW",
+ "w?rc^NH({%Ie<rxKqSVy!Le-_`U&@W_(D+>xTzfKVAu*ucq#+m=|KSSMvp_#@-lwd+q*ue",
+ "FQ^5<D+|jLr?k{O39i8AX2Qb^zi9A<7XD1y!-W2|0Hk8JVkN;gl><|<0R-u4qYMbRqzSn&",
+ "Q7jSuvc%b+EZc%>nI(+&0Tl1Y>a6v4`uNFD-7$QrhHgS7Wnv~rDgfH;rQw3+m`LJxoM4v#",
+ "gK@?|B{RHJ*VxZgk#!p<_&-sjxOda0YaiJ1UnG41VPv(Et%ElzKRMcO$AfgU+Xnwg5p2_+",
+ "NrnZ1WfEj^fmHd^sx@%JWKkh#zaK0ox%rdP)zUmGZZnqmZ_9L=%6R8ibJH0bOT$AGhDo6{",
+ "fJ?;_U;D|^>5by2ul@i4Zf()InfFN}00EQ=q#FPL>RM>svBYQl0ssI200dcD"
+ ],
+ "Europe/Lisbon": [
+ "{Wp48S^xk9=GL@E0stWa761SMbT8$j;0=rf*IfWA7KxBg5R*;*X|PN+G3LqthM?xgkNUN_",
+ ")gCt1Sc%YT6^TTomk4yVHXeyvQj8}l<;q&s7K}#Vnc8lII1?)AHh$*>OKUU4S;*h>v*ep0",
+ "xTi1cK2{aY*|2D*-~K<;-{_W+r@NvZ7-|NZv($ek_C%VfP0xjWeZP#CPXD`IKkakjh(kUd",
+ "&H)m;^Q(jGjIyiyrcUMtOP)u3A>sw6ux;Bmp3x$4QvQKMx5TrCx_!$srWQuXNs&`9=^IY1",
+ "yc&C31!sQh7P=Mk*#6x8Z@5^%ehR8UW<EvzdWer9z;R6PrdUaWab3G>$OWw0KMw}P1ycI^",
+ "4eh12oBUOV?S>n*d!+EM@>x#9PZD12iD=zaC;7`8dTfkU_6d}OZvSFSbGgXeKw}XyX@D=(",
+ ")D0!^DBGr8pXWBT$S-yhLP>Z3ys^VW<kSQr?{jhl<+{Fki;mTI=&Stgy$rttN?ulQM$lDr",
+ "G7))C7Dx=J6V-e^(Qk|r;f~TvIw1KqRIC{8f^jPy#blstV{-&2a}ZJe!Zr2c_R4NT)L@bs",
+ "+gRRm6Wn)VWVNHeK*TEV=f#2KZqu%y?mTx#EfRiK0)TG7$$~=LGxx@0D|lS2up|oCON{YQ",
+ "oN5-H$!_n-Kx2*=RO!epEX>3}RQ6{NGGVJG6vf*MH93vvNW6yLjie1;{4tVhg-KnSf|G`!",
+ "Z;j$7gJ1ows~RD=@n7I6aFd8rOR_7Y?E-$clI%1o5gA@O!KPa^(8^iFFeFykI-+z>E$mvp",
+ "E_h`vbHPjqkLs`Dn-0FV`R@z|h!S(Lb;M&|Exr<u8#s-T(>!biY`%bfp$6`hK;GDhdP|^Q",
+ "*Ty*}1d41K>H2B{jrjE9aFK>yAQJBX9CD%-384S;0fw`PlprHGS`^b$oS-`I4VH7ji8ou-",
+ "g|060jfb1XcxiInT0oO<S+<vh^)XY;lr@|IeXj}%k;}|kSlDGaYidk^zB|gEYaet~F%QYd",
+ "f7pbnQKLZ0o7=kso86doS;J@aQ>oeR7#%e5Ug5#KW)nV<Rc;|LjUDdhk8*dYJQwYN?hzH%",
+ "0<XB$!(rpf2nxaL22M`L4pKx>SRvLHNe$SQHM@2)`S9L7>RL@<XAlxVQfb2=%lcu!h+Um0",
+ "Q+Z=itevTFy}-Jl<g5crK55BF`VsoPH~qP3QrG%YtrD#s{=gA7p)QI<i=EwY(cel8`B=#u",
+ "Yq<K;4T(QBF_GvrYueSk*}gfrCSg22+YH-1N<WYkp|DA-P-&va<Xu<}^yafJKlzezB-lS{",
+ "a++P_^gYmgrc9FO-K3s~`jAcqVV!k?NV2IFV^86`cr>Qx%fmm7?3u7P5TywFQ}C@S(pq}|",
+ "eLPT{C^{<0Q?uU&kSVd%!~8q3;Z0s3OqzF`$HRkePL5Ywgiwn{R(<RY8ut&RJ;$?J*w*n)",
+ ">zi+jmOBFrVpW;)@UsU#%$8BcV#h@}m$#!Fglo&bwb78aYqOG_W7h{eb(+39&-mk4EIXq_",
+ "_`30=8sfA3=!3TO_TyS5X22~?6nKngZ|bq=grdq=9X)3xAkA42L!~rmS)n3w-~;lgz%Fhn",
+ "(?rXdp2ho~9?wmVs2JwVt~?@FVD%`tN69{(i3oQa;O0<Hp#T5?$WIy3h`IlL00Hv}jT-;}",
+ "Z2tpNvBYQl0ssI200dcD"
+ ],
+ "Europe/London": [
+ "{Wp48S^xk9=GL@E0stWa761SMbT8$j;0`|pJ6!-O7KxBg5R*;$9DqzW!kQs3DZt(=0_!m1",
+ "4wvE`6N%Vj#u6PS_3S?~(2)&xn8}2}3Wr#kG8n2!x8>$E$lF&~Y#_H6bu6(BiwblJ>;-Fs",
+ "gA$Y$*?=X)n1pFkKn}F~`>=4)+LLQk?L*P!bhAm0;`N~z3QbUIyVrm%kOZ(n1JJsm0pyb8",
+ "!GV{d*C!9KXv;4v<seWRpo=ZZxGf)-5Qsn$3dw`uhF)+6#mgUoNF-Y2jN73pVhdTs*p0`Z",
+ "AbnT1puEtudB{Nul>D4Q>-k#+x(!V5L@w5M>v2V5<gcLskF+p`aGTSn{sY8^@MUc;2o{&V",
+ "R!$180N}BtfYKS)i9w=!<~&l?1Cv^PWs!&a9{s(35^yqGU$72DKX|IkRtDblB>a`B>t(|B",
+ "|Fqr4^-{S*%Ep~ojUtx_CRbSQ(uFwu2=KH)Q@EBs@ZqRXn4mU;B!68;;IQs3Ub=n&UU%*m",
+ "k&zwD36&JSwsN(%k&x?H+tN^6)23c`I0=5^N_R0~1>tsFZ`^`3z~rXSXT&qcwa#n!%+Z#P",
+ "PG}(D^_CCILXnF|GKwabBh*xFS?4rwGo2vtJUwzrbv_$5PO+`?$l{H-jGB@X%S!OAhw;D4",
+ "XFycN3!XqQ&EorJOD3>~^U%Luw!jF<;6_q-f-S|6<EHry?%{@fuyH`_+D%uTA@g0$5e!Yi",
+ "P1vQuevyS;jE(-R>{cQDfZ2(4Xf1MMLr1=SA=MwVf2%Pp%VP;jn)|5Tf!-DbUGn%I-r<KG",
+ "4jJ(Y#L-fJUpUb$yNfvhX*iqWZoG7T*WUfE6iQD9_^EWqExH`rc&jJ<o^E8-mM10WrZ_Vv",
+ "xx9nj<vMlEt*KfP*pyth!c_AKnrKtQTACX08#{pioAFnDq!53+h*hO^f*yrWjg0u2pUcgv",
+ "UlpEZ9G_dlhlW1J^h@gTt7{KPL2mRal;1juJ3Q8-!GXO#IPzT4ciJ-nB+nkphssM}Q7IAT",
+ "pM}AT%y(J!78F?>kYaH7?$$O!t)wwClAisr3eUoeB^~T=U*_P~Y2*KdnO87>B!19sV=xZ5",
+ "yApq26RxgqA|*tmsvtL#OhcF(C<0EGWHP)BF<g*iSWicU6k1<Ps?BQ$IWg-#s2uF-qXgJ_",
+ "!H_mZIMx*L%&a*_6;_trMCULk0ZYM<hfJlYBddHwRyYUDu3!C_lJZWTQ?c-R&@9054pj0k",
+ "kQ{Xi{A$&)&b#^G*}8w^qE5i<@aDxaJQs2E$W)AIqUXO{gQ;U8|FA%BD~sORzq44)AntUu",
+ "QHBO{{Pi<EpK!$x4(~7w)la!dN=M@L_j};6|5G&QfuO~2?Q7996z)78fqW<D#8tKNV(*qc",
+ "mfA>l?h)_*7!{LoJiv%RsOs!q->n+DcV%9~B@Rb<ISu%16c5H-7zQIq+SuS+s<lQOWK5+C",
+ "d*>C_1G_1g6`Yd~8|%-=2l~oGN!~TVv2Bnk>7wW8L@^?vX$f3AiT)(4nrCuTm9%(XC6Nai",
+ "E(;}7&=YZagjAN$O-cN;1u{dTkElmB0GT$|Wa)QMmKrx<|LCJ9qlUoFsUbD^H^6_8(w<0{",
+ "ftj&O1~p_%lh5z;zNV&sP<T$*OgK)_0B#JDtXOkhC;Bo7h)#RUy;vBiVLN-T$*7t*t9@ey",
+ "3Woa&24QZ_z38BQ@A(A<(9n@%R?}B`7%w2wowt~UU;bAlqCzr(H$M5t==jGIqMqCsE=Jwa",
+ "$3P+3^&|~i28@=d_u6Cgthe(Lq(wxKpdSDL|7X6Un<nrt00Gwuz#ISo`BbmvvBYQl0ssI2",
+ "00dcD"
+ ],
+ "Pacific/Kiritimati": [
+ "{Wp48S^xk9=GL@E0stWa761SMbT8$j-~jCaVO;<!7KxBg5R*{K!`A|q%C5j6({{dSEy5>+",
+ "NF2>iK{8KMUf+)<-)VxXbLxD(alL}N$AT-ogNbJSMMYeX+Z{jS)b8TK^PB=FxyBxzfmFto",
+ "eo0R`a(%NO?#aEH9|?Cv00000NIsFh6BW2800DjO0RR918Pu^`vBYQl0ssI200dcD"
+ ],
+ "UTC": [
+ "{Wp48S^xk9=GL@E0stWa761SMbT8$j-~e#|9bEt_7KxBg5R*|3h1|xhHLji!C57qW6L*|H",
+ "pEErm00000ygu;I+>V)?00B92fhY-(AGY&-0RR9100dcD"
+ ]
+ },
+ "metadata": {
+ "version": "2020a"
+ }
+} \ No newline at end of file
diff --git a/Lib/test/test_zoneinfo/test_zoneinfo.py b/Lib/test/test_zoneinfo/test_zoneinfo.py
new file mode 100644
index 0000000..05db03a
--- /dev/null
+++ b/Lib/test/test_zoneinfo/test_zoneinfo.py
@@ -0,0 +1,1994 @@
+from __future__ import annotations
+
+import base64
+import contextlib
+import dataclasses
+import importlib.metadata
+import io
+import json
+import lzma
+import os
+import pathlib
+import pickle
+import re
+import shutil
+import struct
+import tempfile
+import unittest
+from datetime import date, datetime, time, timedelta, timezone
+
+from . import _support as test_support
+from ._support import (
+ OS_ENV_LOCK,
+ TZPATH_LOCK,
+ TZPATH_TEST_LOCK,
+ ZoneInfoTestBase,
+)
+
+py_zoneinfo, c_zoneinfo = test_support.get_modules()
+
+try:
+ importlib.metadata.metadata("tzdata")
+ HAS_TZDATA_PKG = True
+except importlib.metadata.PackageNotFoundError:
+ HAS_TZDATA_PKG = False
+
+ZONEINFO_DATA = None
+ZONEINFO_DATA_V1 = None
+TEMP_DIR = None
+DATA_DIR = pathlib.Path(__file__).parent / "data"
+ZONEINFO_JSON = DATA_DIR / "zoneinfo_data.json"
+
+# Useful constants
+ZERO = timedelta(0)
+ONE_H = timedelta(hours=1)
+
+
+def setUpModule():
+ global TEMP_DIR
+ global ZONEINFO_DATA
+ global ZONEINFO_DATA_V1
+
+ TEMP_DIR = pathlib.Path(tempfile.mkdtemp(prefix="zoneinfo"))
+ ZONEINFO_DATA = ZoneInfoData(ZONEINFO_JSON, TEMP_DIR / "v2")
+ ZONEINFO_DATA_V1 = ZoneInfoData(ZONEINFO_JSON, TEMP_DIR / "v1", v1=True)
+
+
+def tearDownModule():
+ shutil.rmtree(TEMP_DIR)
+
+
+class TzPathUserMixin:
+ """
+ Adds a setUp() and tearDown() to make TZPATH manipulations thread-safe.
+
+ Any tests that require manipulation of the TZPATH global are necessarily
+ thread unsafe, so we will acquire a lock and reset the TZPATH variable
+ to the default state before each test and release the lock after the test
+ is through.
+ """
+
+ @property
+ def tzpath(self): # pragma: nocover
+ return None
+
+ def setUp(self):
+ with contextlib.ExitStack() as stack:
+ stack.enter_context(
+ self.tzpath_context(self.tzpath, lock=TZPATH_TEST_LOCK)
+ )
+ self.addCleanup(stack.pop_all().close)
+
+ super().setUp()
+
+
+class DatetimeSubclassMixin:
+ """
+ Replaces all ZoneTransition transition dates with a datetime subclass.
+ """
+
+ class DatetimeSubclass(datetime):
+ @classmethod
+ def from_datetime(cls, dt):
+ return cls(
+ dt.year,
+ dt.month,
+ dt.day,
+ dt.hour,
+ dt.minute,
+ dt.second,
+ dt.microsecond,
+ tzinfo=dt.tzinfo,
+ fold=dt.fold,
+ )
+
+ def load_transition_examples(self, key):
+ transition_examples = super().load_transition_examples(key)
+ for zt in transition_examples:
+ dt = zt.transition
+ new_dt = self.DatetimeSubclass.from_datetime(dt)
+ new_zt = dataclasses.replace(zt, transition=new_dt)
+ yield new_zt
+
+
+class ZoneInfoTest(TzPathUserMixin, ZoneInfoTestBase):
+ module = py_zoneinfo
+ class_name = "ZoneInfo"
+
+ def setUp(self):
+ super().setUp()
+
+ # This is necessary because various subclasses pull from different
+ # data sources (e.g. tzdata, V1 files, etc).
+ self.klass.clear_cache()
+
+ @property
+ def zoneinfo_data(self):
+ return ZONEINFO_DATA
+
+ @property
+ def tzpath(self):
+ return [self.zoneinfo_data.tzpath]
+
+ def zone_from_key(self, key):
+ return self.klass(key)
+
+ def zones(self):
+ return ZoneDumpData.transition_keys()
+
+ def fixed_offset_zones(self):
+ return ZoneDumpData.fixed_offset_zones()
+
+ def load_transition_examples(self, key):
+ return ZoneDumpData.load_transition_examples(key)
+
+ def test_str(self):
+ # Zones constructed with a key must have str(zone) == key
+ for key in self.zones():
+ with self.subTest(key):
+ zi = self.zone_from_key(key)
+
+ self.assertEqual(str(zi), key)
+
+ # Zones with no key constructed should have str(zone) == repr(zone)
+ file_key = self.zoneinfo_data.keys[0]
+ file_path = self.zoneinfo_data.path_from_key(file_key)
+
+ with open(file_path, "rb") as f:
+ with self.subTest(test_name="Repr test", path=file_path):
+ zi_ff = self.klass.from_file(f)
+ self.assertEqual(str(zi_ff), repr(zi_ff))
+
+ def test_repr(self):
+ # The repr is not guaranteed, but I think we can insist that it at
+ # least contain the name of the class.
+ key = next(iter(self.zones()))
+
+ zi = self.klass(key)
+ class_name = self.class_name
+ with self.subTest(name="from key"):
+ self.assertRegex(repr(zi), class_name)
+
+ file_key = self.zoneinfo_data.keys[0]
+ file_path = self.zoneinfo_data.path_from_key(file_key)
+ with open(file_path, "rb") as f:
+ zi_ff = self.klass.from_file(f, key=file_key)
+
+ with self.subTest(name="from file with key"):
+ self.assertRegex(repr(zi_ff), class_name)
+
+ with open(file_path, "rb") as f:
+ zi_ff_nk = self.klass.from_file(f)
+
+ with self.subTest(name="from file without key"):
+ self.assertRegex(repr(zi_ff_nk), class_name)
+
+ def test_key_attribute(self):
+ key = next(iter(self.zones()))
+
+ def from_file_nokey(key):
+ with open(self.zoneinfo_data.path_from_key(key), "rb") as f:
+ return self.klass.from_file(f)
+
+ constructors = (
+ ("Primary constructor", self.klass, key),
+ ("no_cache", self.klass.no_cache, key),
+ ("from_file", from_file_nokey, None),
+ )
+
+ for msg, constructor, expected in constructors:
+ zi = constructor(key)
+
+ # Ensure that the key attribute is set to the input to ``key``
+ with self.subTest(msg):
+ self.assertEqual(zi.key, expected)
+
+ # Ensure that the key attribute is read-only
+ with self.subTest(f"{msg}: readonly"):
+ with self.assertRaises(AttributeError):
+ zi.key = "Some/Value"
+
+ def test_bad_keys(self):
+ bad_keys = [
+ "Eurasia/Badzone", # Plausible but does not exist
+ "BZQ",
+ "America.Los_Angeles",
+ "🇨🇦", # Non-ascii
+ "America/New\ud800York", # Contains surrogate character
+ ]
+
+ for bad_key in bad_keys:
+ with self.assertRaises(self.module.ZoneInfoNotFoundError):
+ self.klass(bad_key)
+
+ def test_bad_keys_paths(self):
+ bad_keys = [
+ "/America/Los_Angeles", # Absolute path
+ "America/Los_Angeles/", # Trailing slash - not normalized
+ "../zoneinfo/America/Los_Angeles", # Traverses above TZPATH
+ "America/../America/Los_Angeles", # Not normalized
+ "America/./Los_Angeles",
+ ]
+
+ for bad_key in bad_keys:
+ with self.assertRaises(ValueError):
+ self.klass(bad_key)
+
+ def test_bad_zones(self):
+ bad_zones = [
+ b"", # Empty file
+ b"AAAA3" + b" " * 15, # Bad magic
+ ]
+
+ for bad_zone in bad_zones:
+ fobj = io.BytesIO(bad_zone)
+ with self.assertRaises(ValueError):
+ self.klass.from_file(fobj)
+
+ def test_fromutc_errors(self):
+ key = next(iter(self.zones()))
+ zone = self.zone_from_key(key)
+
+ bad_values = [
+ (datetime(2019, 1, 1, tzinfo=timezone.utc), ValueError),
+ (datetime(2019, 1, 1), ValueError),
+ (date(2019, 1, 1), TypeError),
+ (time(0), TypeError),
+ (0, TypeError),
+ ("2019-01-01", TypeError),
+ ]
+
+ for val, exc_type in bad_values:
+ with self.subTest(val=val):
+ with self.assertRaises(exc_type):
+ zone.fromutc(val)
+
+ def test_utc(self):
+ zi = self.klass("UTC")
+ dt = datetime(2020, 1, 1, tzinfo=zi)
+
+ self.assertEqual(dt.utcoffset(), ZERO)
+ self.assertEqual(dt.dst(), ZERO)
+ self.assertEqual(dt.tzname(), "UTC")
+
+ def test_unambiguous(self):
+ test_cases = []
+ for key in self.zones():
+ for zone_transition in self.load_transition_examples(key):
+ test_cases.append(
+ (
+ key,
+ zone_transition.transition - timedelta(days=2),
+ zone_transition.offset_before,
+ )
+ )
+
+ test_cases.append(
+ (
+ key,
+ zone_transition.transition + timedelta(days=2),
+ zone_transition.offset_after,
+ )
+ )
+
+ for key, dt, offset in test_cases:
+ with self.subTest(key=key, dt=dt, offset=offset):
+ tzi = self.zone_from_key(key)
+ dt = dt.replace(tzinfo=tzi)
+
+ self.assertEqual(dt.tzname(), offset.tzname, dt)
+ self.assertEqual(dt.utcoffset(), offset.utcoffset, dt)
+ self.assertEqual(dt.dst(), offset.dst, dt)
+
+ def test_folds_and_gaps(self):
+ test_cases = []
+ for key in self.zones():
+ tests = {"folds": [], "gaps": []}
+ for zt in self.load_transition_examples(key):
+ if zt.fold:
+ test_group = tests["folds"]
+ elif zt.gap:
+ test_group = tests["gaps"]
+ else:
+ # Assign a random variable here to disable the peephole
+ # optimizer so that coverage can see this line.
+ # See bpo-2506 for more information.
+ no_peephole_opt = None
+ continue
+
+ # Cases are of the form key, dt, fold, offset
+ dt = zt.anomaly_start - timedelta(seconds=1)
+ test_group.append((dt, 0, zt.offset_before))
+ test_group.append((dt, 1, zt.offset_before))
+
+ dt = zt.anomaly_start
+ test_group.append((dt, 0, zt.offset_before))
+ test_group.append((dt, 1, zt.offset_after))
+
+ dt = zt.anomaly_start + timedelta(seconds=1)
+ test_group.append((dt, 0, zt.offset_before))
+ test_group.append((dt, 1, zt.offset_after))
+
+ dt = zt.anomaly_end - timedelta(seconds=1)
+ test_group.append((dt, 0, zt.offset_before))
+ test_group.append((dt, 1, zt.offset_after))
+
+ dt = zt.anomaly_end
+ test_group.append((dt, 0, zt.offset_after))
+ test_group.append((dt, 1, zt.offset_after))
+
+ dt = zt.anomaly_end + timedelta(seconds=1)
+ test_group.append((dt, 0, zt.offset_after))
+ test_group.append((dt, 1, zt.offset_after))
+
+ for grp, test_group in tests.items():
+ test_cases.append(((key, grp), test_group))
+
+ for (key, grp), tests in test_cases:
+ with self.subTest(key=key, grp=grp):
+ tzi = self.zone_from_key(key)
+
+ for dt, fold, offset in tests:
+ dt = dt.replace(fold=fold, tzinfo=tzi)
+
+ self.assertEqual(dt.tzname(), offset.tzname, dt)
+ self.assertEqual(dt.utcoffset(), offset.utcoffset, dt)
+ self.assertEqual(dt.dst(), offset.dst, dt)
+
+ def test_folds_from_utc(self):
+ tests = []
+ for key in self.zones():
+ zi = self.zone_from_key(key)
+ with self.subTest(key=key):
+ for zt in self.load_transition_examples(key):
+ if not zt.fold:
+ continue
+
+ dt_utc = zt.transition_utc
+ dt_before_utc = dt_utc - timedelta(seconds=1)
+ dt_after_utc = dt_utc + timedelta(seconds=1)
+
+ dt_before = dt_before_utc.astimezone(zi)
+ self.assertEqual(dt_before.fold, 0, (dt_before, dt_utc))
+
+ dt_after = dt_after_utc.astimezone(zi)
+ self.assertEqual(dt_after.fold, 1, (dt_after, dt_utc))
+
+ def test_time_variable_offset(self):
+ # self.zones() only ever returns variable-offset zones
+ for key in self.zones():
+ zi = self.zone_from_key(key)
+ t = time(11, 15, 1, 34471, tzinfo=zi)
+
+ with self.subTest(key=key):
+ self.assertIs(t.tzname(), None)
+ self.assertIs(t.utcoffset(), None)
+ self.assertIs(t.dst(), None)
+
+ def test_time_fixed_offset(self):
+ for key, offset in self.fixed_offset_zones():
+ zi = self.zone_from_key(key)
+
+ t = time(11, 15, 1, 34471, tzinfo=zi)
+
+ with self.subTest(key=key):
+ self.assertEqual(t.tzname(), offset.tzname)
+ self.assertEqual(t.utcoffset(), offset.utcoffset)
+ self.assertEqual(t.dst(), offset.dst)
+
+
+class CZoneInfoTest(ZoneInfoTest):
+ module = c_zoneinfo
+
+ def test_fold_mutate(self):
+ """Test that fold isn't mutated when no change is necessary.
+
+ The underlying C API is capable of mutating datetime objects, and
+ may rely on the fact that addition of a datetime object returns a
+ new datetime; this test ensures that the input datetime to fromutc
+ is not mutated.
+ """
+
+ def to_subclass(dt):
+ class SameAddSubclass(type(dt)):
+ def __add__(self, other):
+ if other == timedelta(0):
+ return self
+
+ return super().__add__(other) # pragma: nocover
+
+ return SameAddSubclass(
+ dt.year,
+ dt.month,
+ dt.day,
+ dt.hour,
+ dt.minute,
+ dt.second,
+ dt.microsecond,
+ fold=dt.fold,
+ tzinfo=dt.tzinfo,
+ )
+
+ subclass = [False, True]
+
+ key = "Europe/London"
+ zi = self.zone_from_key(key)
+ for zt in self.load_transition_examples(key):
+ if zt.fold and zt.offset_after.utcoffset == ZERO:
+ example = zt.transition_utc.replace(tzinfo=zi)
+ break
+
+ for subclass in [False, True]:
+ if subclass:
+ dt = to_subclass(example)
+ else:
+ dt = example
+
+ with self.subTest(subclass=subclass):
+ dt_fromutc = zi.fromutc(dt)
+
+ self.assertEqual(dt_fromutc.fold, 1)
+ self.assertEqual(dt.fold, 0)
+
+
+class ZoneInfoDatetimeSubclassTest(DatetimeSubclassMixin, ZoneInfoTest):
+ pass
+
+
+class CZoneInfoDatetimeSubclassTest(DatetimeSubclassMixin, CZoneInfoTest):
+ pass
+
+
+class ZoneInfoTestSubclass(ZoneInfoTest):
+ @classmethod
+ def setUpClass(cls):
+ super().setUpClass()
+
+ class ZISubclass(cls.klass):
+ pass
+
+ cls.class_name = "ZISubclass"
+ cls.parent_klass = cls.klass
+ cls.klass = ZISubclass
+
+ def test_subclass_own_cache(self):
+ base_obj = self.parent_klass("Europe/London")
+ sub_obj = self.klass("Europe/London")
+
+ self.assertIsNot(base_obj, sub_obj)
+ self.assertIsInstance(base_obj, self.parent_klass)
+ self.assertIsInstance(sub_obj, self.klass)
+
+
+class CZoneInfoTestSubclass(ZoneInfoTest):
+ module = c_zoneinfo
+
+
+class ZoneInfoV1Test(ZoneInfoTest):
+ @property
+ def zoneinfo_data(self):
+ return ZONEINFO_DATA_V1
+
+ def load_transition_examples(self, key):
+ # We will discard zdump examples outside the range epoch +/- 2**31,
+ # because they are not well-supported in Version 1 files.
+ epoch = datetime(1970, 1, 1)
+ max_offset_32 = timedelta(seconds=2 ** 31)
+ min_dt = epoch - max_offset_32
+ max_dt = epoch + max_offset_32
+
+ for zt in ZoneDumpData.load_transition_examples(key):
+ if min_dt <= zt.transition <= max_dt:
+ yield zt
+
+
+class CZoneInfoV1Test(ZoneInfoV1Test):
+ module = c_zoneinfo
+
+
+@unittest.skipIf(
+ not HAS_TZDATA_PKG, "Skipping tzdata-specific tests: tzdata not installed"
+)
+class TZDataTests(ZoneInfoTest):
+ """
+ Runs all the ZoneInfoTest tests, but against the tzdata package
+
+ NOTE: The ZoneDumpData has frozen test data, but tzdata will update, so
+ some of the tests (particularly those related to the far future) may break
+ in the event that the time zone policies in the relevant time zones change.
+ """
+
+ @property
+ def tzpath(self):
+ return []
+
+ def zone_from_key(self, key):
+ return self.klass(key=key)
+
+
+@unittest.skipIf(
+ not HAS_TZDATA_PKG, "Skipping tzdata-specific tests: tzdata not installed"
+)
+class CTZDataTests(TZDataTests):
+ module = c_zoneinfo
+
+
+class WeirdZoneTest(ZoneInfoTestBase):
+ module = py_zoneinfo
+
+ def test_one_transition(self):
+ LMT = ZoneOffset("LMT", -timedelta(hours=6, minutes=31, seconds=2))
+ STD = ZoneOffset("STD", -timedelta(hours=6))
+
+ transitions = [
+ ZoneTransition(datetime(1883, 6, 9, 14), LMT, STD),
+ ]
+
+ after = "STD6"
+
+ zf = self.construct_zone(transitions, after)
+ zi = self.klass.from_file(zf)
+
+ dt0 = datetime(1883, 6, 9, 1, tzinfo=zi)
+ dt1 = datetime(1883, 6, 10, 1, tzinfo=zi)
+
+ for dt, offset in [(dt0, LMT), (dt1, STD)]:
+ with self.subTest(name="local", dt=dt):
+ self.assertEqual(dt.tzname(), offset.tzname)
+ self.assertEqual(dt.utcoffset(), offset.utcoffset)
+ self.assertEqual(dt.dst(), offset.dst)
+
+ dts = [
+ (
+ datetime(1883, 6, 9, 1, tzinfo=zi),
+ datetime(1883, 6, 9, 7, 31, 2, tzinfo=timezone.utc),
+ ),
+ (
+ datetime(2010, 4, 1, 12, tzinfo=zi),
+ datetime(2010, 4, 1, 18, tzinfo=timezone.utc),
+ ),
+ ]
+
+ for dt_local, dt_utc in dts:
+ with self.subTest(name="fromutc", dt=dt_local):
+ dt_actual = dt_utc.astimezone(zi)
+ self.assertEqual(dt_actual, dt_local)
+
+ dt_utc_actual = dt_local.astimezone(timezone.utc)
+ self.assertEqual(dt_utc_actual, dt_utc)
+
+ def test_one_zone_dst(self):
+ DST = ZoneOffset("DST", ONE_H, ONE_H)
+ transitions = [
+ ZoneTransition(datetime(1970, 1, 1), DST, DST),
+ ]
+
+ after = "STD0DST-1,0/0,J365/25"
+
+ zf = self.construct_zone(transitions, after)
+ zi = self.klass.from_file(zf)
+
+ dts = [
+ datetime(1900, 3, 1),
+ datetime(1965, 9, 12),
+ datetime(1970, 1, 1),
+ datetime(2010, 11, 3),
+ datetime(2040, 1, 1),
+ ]
+
+ for dt in dts:
+ dt = dt.replace(tzinfo=zi)
+ with self.subTest(dt=dt):
+ self.assertEqual(dt.tzname(), DST.tzname)
+ self.assertEqual(dt.utcoffset(), DST.utcoffset)
+ self.assertEqual(dt.dst(), DST.dst)
+
+ def test_no_tz_str(self):
+ STD = ZoneOffset("STD", ONE_H, ZERO)
+ DST = ZoneOffset("DST", 2 * ONE_H, ONE_H)
+
+ transitions = []
+ for year in range(1996, 2000):
+ transitions.append(
+ ZoneTransition(datetime(year, 3, 1, 2), STD, DST)
+ )
+ transitions.append(
+ ZoneTransition(datetime(year, 11, 1, 2), DST, STD)
+ )
+
+ after = ""
+
+ zf = self.construct_zone(transitions, after)
+
+ # According to RFC 8536, local times after the last transition time
+ # with an empty TZ string are unspecified. We will go with "hold the
+ # last transition", but the most we should promise is "doesn't crash."
+ zi = self.klass.from_file(zf)
+
+ cases = [
+ (datetime(1995, 1, 1), STD),
+ (datetime(1996, 4, 1), DST),
+ (datetime(1996, 11, 2), STD),
+ (datetime(2001, 1, 1), STD),
+ ]
+
+ for dt, offset in cases:
+ dt = dt.replace(tzinfo=zi)
+ with self.subTest(dt=dt):
+ self.assertEqual(dt.tzname(), offset.tzname)
+ self.assertEqual(dt.utcoffset(), offset.utcoffset)
+ self.assertEqual(dt.dst(), offset.dst)
+
+ # Test that offsets return None when using a datetime.time
+ t = time(0, tzinfo=zi)
+ with self.subTest("Testing datetime.time"):
+ self.assertIs(t.tzname(), None)
+ self.assertIs(t.utcoffset(), None)
+ self.assertIs(t.dst(), None)
+
+ def test_tz_before_only(self):
+ # From RFC 8536 Section 3.2:
+ #
+ # If there are no transitions, local time for all timestamps is
+ # specified by the TZ string in the footer if present and nonempty;
+ # otherwise, it is specified by time type 0.
+
+ offsets = [
+ ZoneOffset("STD", ZERO, ZERO),
+ ZoneOffset("DST", ONE_H, ONE_H),
+ ]
+
+ for offset in offsets:
+ # Phantom transition to set time type 0.
+ transitions = [
+ ZoneTransition(None, offset, offset),
+ ]
+
+ after = ""
+
+ zf = self.construct_zone(transitions, after)
+ zi = self.klass.from_file(zf)
+
+ dts = [
+ datetime(1900, 1, 1),
+ datetime(1970, 1, 1),
+ datetime(2000, 1, 1),
+ ]
+
+ for dt in dts:
+ dt = dt.replace(tzinfo=zi)
+ with self.subTest(offset=offset, dt=dt):
+ self.assertEqual(dt.tzname(), offset.tzname)
+ self.assertEqual(dt.utcoffset(), offset.utcoffset)
+ self.assertEqual(dt.dst(), offset.dst)
+
+ def test_empty_zone(self):
+ zf = self.construct_zone([], "")
+
+ with self.assertRaises(ValueError):
+ self.klass.from_file(zf)
+
+ def test_zone_very_large_timestamp(self):
+ """Test when a transition is in the far past or future.
+
+ Particularly, this is a concern if something:
+
+ 1. Attempts to call ``datetime.timestamp`` for a datetime outside
+ of ``[datetime.min, datetime.max]``.
+ 2. Attempts to construct a timedelta outside of
+ ``[timedelta.min, timedelta.max]``.
+
+ This actually occurs "in the wild", as some time zones on Ubuntu (at
+ least as of 2020) have an initial transition added at ``-2**58``.
+ """
+
+ LMT = ZoneOffset("LMT", timedelta(seconds=-968))
+ GMT = ZoneOffset("GMT", ZERO)
+
+ transitions = [
+ (-(1 << 62), LMT, LMT),
+ ZoneTransition(datetime(1912, 1, 1), LMT, GMT),
+ ((1 << 62), GMT, GMT),
+ ]
+
+ after = "GMT0"
+
+ zf = self.construct_zone(transitions, after)
+ zi = self.klass.from_file(zf, key="Africa/Abidjan")
+
+ offset_cases = [
+ (datetime.min, LMT),
+ (datetime.max, GMT),
+ (datetime(1911, 12, 31), LMT),
+ (datetime(1912, 1, 2), GMT),
+ ]
+
+ for dt_naive, offset in offset_cases:
+ dt = dt_naive.replace(tzinfo=zi)
+ with self.subTest(name="offset", dt=dt, offset=offset):
+ self.assertEqual(dt.tzname(), offset.tzname)
+ self.assertEqual(dt.utcoffset(), offset.utcoffset)
+ self.assertEqual(dt.dst(), offset.dst)
+
+ utc_cases = [
+ (datetime.min, datetime.min + timedelta(seconds=968)),
+ (datetime(1898, 12, 31, 23, 43, 52), datetime(1899, 1, 1)),
+ (
+ datetime(1911, 12, 31, 23, 59, 59, 999999),
+ datetime(1912, 1, 1, 0, 16, 7, 999999),
+ ),
+ (datetime(1912, 1, 1, 0, 16, 8), datetime(1912, 1, 1, 0, 16, 8)),
+ (datetime(1970, 1, 1), datetime(1970, 1, 1)),
+ (datetime.max, datetime.max),
+ ]
+
+ for naive_dt, naive_dt_utc in utc_cases:
+ dt = naive_dt.replace(tzinfo=zi)
+ dt_utc = naive_dt_utc.replace(tzinfo=timezone.utc)
+
+ self.assertEqual(dt_utc.astimezone(zi), dt)
+ self.assertEqual(dt, dt_utc)
+
+ def test_fixed_offset_phantom_transition(self):
+ UTC = ZoneOffset("UTC", ZERO, ZERO)
+
+ transitions = [ZoneTransition(datetime(1970, 1, 1), UTC, UTC)]
+
+ after = "UTC0"
+ zf = self.construct_zone(transitions, after)
+ zi = self.klass.from_file(zf, key="UTC")
+
+ dt = datetime(2020, 1, 1, tzinfo=zi)
+ with self.subTest("datetime.datetime"):
+ self.assertEqual(dt.tzname(), UTC.tzname)
+ self.assertEqual(dt.utcoffset(), UTC.utcoffset)
+ self.assertEqual(dt.dst(), UTC.dst)
+
+ t = time(0, tzinfo=zi)
+ with self.subTest("datetime.time"):
+ self.assertEqual(t.tzname(), UTC.tzname)
+ self.assertEqual(t.utcoffset(), UTC.utcoffset)
+ self.assertEqual(t.dst(), UTC.dst)
+
+ def construct_zone(self, transitions, after=None, version=3):
+ # These are not used for anything, so we're not going to include
+ # them for now.
+ isutc = []
+ isstd = []
+ leap_seconds = []
+
+ offset_lists = [[], []]
+ trans_times_lists = [[], []]
+ trans_idx_lists = [[], []]
+
+ v1_range = (-(2 ** 31), 2 ** 31)
+ v2_range = (-(2 ** 63), 2 ** 63)
+ ranges = [v1_range, v2_range]
+
+ def zt_as_tuple(zt):
+ # zt may be a tuple (timestamp, offset_before, offset_after) or
+ # a ZoneTransition object — this is to allow the timestamp to be
+ # values that are outside the valid range for datetimes but still
+ # valid 64-bit timestamps.
+ if isinstance(zt, tuple):
+ return zt
+
+ if zt.transition:
+ trans_time = int(zt.transition_utc.timestamp())
+ else:
+ trans_time = None
+
+ return (trans_time, zt.offset_before, zt.offset_after)
+
+ transitions = sorted(map(zt_as_tuple, transitions), key=lambda x: x[0])
+
+ for zt in transitions:
+ trans_time, offset_before, offset_after = zt
+
+ for v, (dt_min, dt_max) in enumerate(ranges):
+ offsets = offset_lists[v]
+ trans_times = trans_times_lists[v]
+ trans_idx = trans_idx_lists[v]
+
+ if trans_time is not None and not (
+ dt_min <= trans_time <= dt_max
+ ):
+ continue
+
+ if offset_before not in offsets:
+ offsets.append(offset_before)
+
+ if offset_after not in offsets:
+ offsets.append(offset_after)
+
+ if trans_time is not None:
+ trans_times.append(trans_time)
+ trans_idx.append(offsets.index(offset_after))
+
+ isutcnt = len(isutc)
+ isstdcnt = len(isstd)
+ leapcnt = len(leap_seconds)
+
+ zonefile = io.BytesIO()
+
+ time_types = ("l", "q")
+ for v in range(min((version, 2))):
+ offsets = offset_lists[v]
+ trans_times = trans_times_lists[v]
+ trans_idx = trans_idx_lists[v]
+ time_type = time_types[v]
+
+ # Translate the offsets into something closer to the C values
+ abbrstr = bytearray()
+ ttinfos = []
+
+ for offset in offsets:
+ utcoff = int(offset.utcoffset.total_seconds())
+ isdst = bool(offset.dst)
+ abbrind = len(abbrstr)
+
+ ttinfos.append((utcoff, isdst, abbrind))
+ abbrstr += offset.tzname.encode("ascii") + b"\x00"
+ abbrstr = bytes(abbrstr)
+
+ typecnt = len(offsets)
+ timecnt = len(trans_times)
+ charcnt = len(abbrstr)
+
+ # Write the header
+ zonefile.write(b"TZif")
+ zonefile.write(b"%d" % version)
+ zonefile.write(b" " * 15)
+ zonefile.write(
+ struct.pack(
+ ">6l", isutcnt, isstdcnt, leapcnt, timecnt, typecnt, charcnt
+ )
+ )
+
+ # Now the transition data
+ zonefile.write(struct.pack(f">{timecnt}{time_type}", *trans_times))
+ zonefile.write(struct.pack(f">{timecnt}B", *trans_idx))
+
+ for ttinfo in ttinfos:
+ zonefile.write(struct.pack(">lbb", *ttinfo))
+
+ zonefile.write(bytes(abbrstr))
+
+ # Now the metadata and leap seconds
+ zonefile.write(struct.pack(f"{isutcnt}b", *isutc))
+ zonefile.write(struct.pack(f"{isstdcnt}b", *isstd))
+ zonefile.write(struct.pack(f">{leapcnt}l", *leap_seconds))
+
+ # Finally we write the TZ string if we're writing a Version 2+ file
+ if v > 0:
+ zonefile.write(b"\x0A")
+ zonefile.write(after.encode("ascii"))
+ zonefile.write(b"\x0A")
+
+ zonefile.seek(0)
+ return zonefile
+
+
+class CWeirdZoneTest(WeirdZoneTest):
+ module = c_zoneinfo
+
+
+class TZStrTest(ZoneInfoTestBase):
+ module = py_zoneinfo
+
+ NORMAL = 0
+ FOLD = 1
+ GAP = 2
+
+ @classmethod
+ def setUpClass(cls):
+ super().setUpClass()
+
+ cls._populate_test_cases()
+ cls.populate_tzstr_header()
+
+ @classmethod
+ def populate_tzstr_header(cls):
+ out = bytearray()
+ # The TZif format always starts with a Version 1 file followed by
+ # the Version 2+ file. In this case, we have no transitions, just
+ # the tzstr in the footer, so up to the footer, the files are
+ # identical and we can just write the same file twice in a row.
+ for i in range(2):
+ out += b"TZif" # Magic value
+ out += b"3" # Version
+ out += b" " * 15 # Reserved
+
+ # We will not write any of the manual transition parts
+ out += struct.pack(">6l", 0, 0, 0, 0, 0, 0)
+
+ cls._tzif_header = bytes(out)
+
+ def zone_from_tzstr(self, tzstr):
+ """Creates a zoneinfo file following a POSIX rule."""
+ zonefile = io.BytesIO(self._tzif_header)
+ zonefile.seek(0, 2)
+
+ # Write the footer
+ zonefile.write(b"\x0A")
+ zonefile.write(tzstr.encode("ascii"))
+ zonefile.write(b"\x0A")
+
+ zonefile.seek(0)
+
+ return self.klass.from_file(zonefile, key=tzstr)
+
+ def test_tzstr_localized(self):
+ i = 0
+ for tzstr, cases in self.test_cases.items():
+ with self.subTest(tzstr=tzstr):
+ zi = self.zone_from_tzstr(tzstr)
+
+ for dt_naive, offset, _ in cases:
+ dt = dt_naive.replace(tzinfo=zi)
+
+ with self.subTest(tzstr=tzstr, dt=dt, offset=offset):
+ self.assertEqual(dt.tzname(), offset.tzname)
+ self.assertEqual(dt.utcoffset(), offset.utcoffset)
+ self.assertEqual(dt.dst(), offset.dst)
+
+ def test_tzstr_from_utc(self):
+ for tzstr, cases in self.test_cases.items():
+ with self.subTest(tzstr=tzstr):
+ zi = self.zone_from_tzstr(tzstr)
+
+ for dt_naive, offset, dt_type in cases:
+ if dt_type == self.GAP:
+ continue # Cannot create a gap from UTC
+
+ dt_utc = (dt_naive - offset.utcoffset).replace(
+ tzinfo=timezone.utc
+ )
+
+ # Check that we can go UTC -> Our zone
+ dt_act = dt_utc.astimezone(zi)
+ dt_exp = dt_naive.replace(tzinfo=zi)
+
+ self.assertEqual(dt_act, dt_exp)
+
+ if dt_type == self.FOLD:
+ self.assertEqual(dt_act.fold, dt_naive.fold, dt_naive)
+ else:
+ self.assertEqual(dt_act.fold, 0)
+
+ # Now check that we can go our zone -> UTC
+ dt_act = dt_exp.astimezone(timezone.utc)
+
+ self.assertEqual(dt_act, dt_utc)
+
+ def test_invalid_tzstr(self):
+ invalid_tzstrs = [
+ "PST8PDT", # DST but no transition specified
+ "+11", # Unquoted alphanumeric
+ "GMT,M3.2.0/2,M11.1.0/3", # Transition rule but no DST
+ "GMT0+11,M3.2.0/2,M11.1.0/3", # Unquoted alphanumeric in DST
+ "PST8PDT,M3.2.0/2", # Only one transition rule
+ # Invalid offsets
+ "STD+25",
+ "STD-25",
+ "STD+374",
+ "STD+374DST,M3.2.0/2,M11.1.0/3",
+ "STD+23DST+25,M3.2.0/2,M11.1.0/3",
+ "STD-23DST-25,M3.2.0/2,M11.1.0/3",
+ # Completely invalid dates
+ "AAA4BBB,M1443339,M11.1.0/3",
+ "AAA4BBB,M3.2.0/2,0349309483959c",
+ # Invalid months
+ "AAA4BBB,M13.1.1/2,M1.1.1/2",
+ "AAA4BBB,M1.1.1/2,M13.1.1/2",
+ "AAA4BBB,M0.1.1/2,M1.1.1/2",
+ "AAA4BBB,M1.1.1/2,M0.1.1/2",
+ # Invalid weeks
+ "AAA4BBB,M1.6.1/2,M1.1.1/2",
+ "AAA4BBB,M1.1.1/2,M1.6.1/2",
+ # Invalid weekday
+ "AAA4BBB,M1.1.7/2,M2.1.1/2",
+ "AAA4BBB,M1.1.1/2,M2.1.7/2",
+ # Invalid numeric offset
+ "AAA4BBB,-1/2,20/2",
+ "AAA4BBB,1/2,-1/2",
+ "AAA4BBB,367,20/2",
+ "AAA4BBB,1/2,367/2",
+ # Invalid julian offset
+ "AAA4BBB,J0/2,J20/2",
+ "AAA4BBB,J20/2,J366/2",
+ ]
+
+ for invalid_tzstr in invalid_tzstrs:
+ with self.subTest(tzstr=invalid_tzstr):
+ # Not necessarily a guaranteed property, but we should show
+ # the problematic TZ string if that's the cause of failure.
+ tzstr_regex = re.escape(invalid_tzstr)
+ with self.assertRaisesRegex(ValueError, tzstr_regex):
+ self.zone_from_tzstr(invalid_tzstr)
+
+ @classmethod
+ def _populate_test_cases(cls):
+ # This method uses a somewhat unusual style in that it populates the
+ # test cases for each tzstr by using a decorator to automatically call
+ # a function that mutates the current dictionary of test cases.
+ #
+ # The population of the test cases is done in individual functions to
+ # give each set of test cases its own namespace in which to define
+ # its offsets (this way we don't have to worry about variable reuse
+ # causing problems if someone makes a typo).
+ #
+ # The decorator for calling is used to make it more obvious that each
+ # function is actually called (if it's not decorated, it's not called).
+ def call(f):
+ """Decorator to call the addition methods.
+
+ This will call a function which adds at least one new entry into
+ the `cases` dictionary. The decorator will also assert that
+ something was added to the dictionary.
+ """
+ prev_len = len(cases)
+ f()
+ assert len(cases) > prev_len, "Function did not add a test case!"
+
+ NORMAL = cls.NORMAL
+ FOLD = cls.FOLD
+ GAP = cls.GAP
+
+ cases = {}
+
+ @call
+ def _add():
+ # Transition to EDT on the 2nd Sunday in March at 4 AM, and
+ # transition back on the first Sunday in November at 3AM
+ tzstr = "EST5EDT,M3.2.0/4:00,M11.1.0/3:00"
+
+ EST = ZoneOffset("EST", timedelta(hours=-5), ZERO)
+ EDT = ZoneOffset("EDT", timedelta(hours=-4), ONE_H)
+
+ cases[tzstr] = (
+ (datetime(2019, 3, 9), EST, NORMAL),
+ (datetime(2019, 3, 10, 3, 59), EST, NORMAL),
+ (datetime(2019, 3, 10, 4, 0, fold=0), EST, GAP),
+ (datetime(2019, 3, 10, 4, 0, fold=1), EDT, GAP),
+ (datetime(2019, 3, 10, 4, 1, fold=0), EST, GAP),
+ (datetime(2019, 3, 10, 4, 1, fold=1), EDT, GAP),
+ (datetime(2019, 11, 2), EDT, NORMAL),
+ (datetime(2019, 11, 3, 1, 59, fold=1), EDT, NORMAL),
+ (datetime(2019, 11, 3, 2, 0, fold=0), EDT, FOLD),
+ (datetime(2019, 11, 3, 2, 0, fold=1), EST, FOLD),
+ (datetime(2020, 3, 8, 3, 59), EST, NORMAL),
+ (datetime(2020, 3, 8, 4, 0, fold=0), EST, GAP),
+ (datetime(2020, 3, 8, 4, 0, fold=1), EDT, GAP),
+ (datetime(2020, 11, 1, 1, 59, fold=1), EDT, NORMAL),
+ (datetime(2020, 11, 1, 2, 0, fold=0), EDT, FOLD),
+ (datetime(2020, 11, 1, 2, 0, fold=1), EST, FOLD),
+ )
+
+ @call
+ def _add():
+ # Transition to BST happens on the last Sunday in March at 1 AM GMT
+ # and the transition back happens the last Sunday in October at 2AM BST
+ tzstr = "GMT0BST-1,M3.5.0/1:00,M10.5.0/2:00"
+
+ GMT = ZoneOffset("GMT", ZERO, ZERO)
+ BST = ZoneOffset("BST", ONE_H, ONE_H)
+
+ cases[tzstr] = (
+ (datetime(2019, 3, 30), GMT, NORMAL),
+ (datetime(2019, 3, 31, 0, 59), GMT, NORMAL),
+ (datetime(2019, 3, 31, 2, 0), BST, NORMAL),
+ (datetime(2019, 10, 26), BST, NORMAL),
+ (datetime(2019, 10, 27, 0, 59, fold=1), BST, NORMAL),
+ (datetime(2019, 10, 27, 1, 0, fold=0), BST, GAP),
+ (datetime(2019, 10, 27, 2, 0, fold=1), GMT, GAP),
+ (datetime(2020, 3, 29, 0, 59), GMT, NORMAL),
+ (datetime(2020, 3, 29, 2, 0), BST, NORMAL),
+ (datetime(2020, 10, 25, 0, 59, fold=1), BST, NORMAL),
+ (datetime(2020, 10, 25, 1, 0, fold=0), BST, FOLD),
+ (datetime(2020, 10, 25, 2, 0, fold=1), GMT, NORMAL),
+ )
+
+ @call
+ def _add():
+ # Austrialian time zone - DST start is chronologically first
+ tzstr = "AEST-10AEDT,M10.1.0/2,M4.1.0/3"
+
+ AEST = ZoneOffset("AEST", timedelta(hours=10), ZERO)
+ AEDT = ZoneOffset("AEDT", timedelta(hours=11), ONE_H)
+
+ cases[tzstr] = (
+ (datetime(2019, 4, 6), AEDT, NORMAL),
+ (datetime(2019, 4, 7, 1, 59), AEDT, NORMAL),
+ (datetime(2019, 4, 7, 1, 59, fold=1), AEDT, NORMAL),
+ (datetime(2019, 4, 7, 2, 0, fold=0), AEDT, FOLD),
+ (datetime(2019, 4, 7, 2, 1, fold=0), AEDT, FOLD),
+ (datetime(2019, 4, 7, 2, 0, fold=1), AEST, FOLD),
+ (datetime(2019, 4, 7, 2, 1, fold=1), AEST, FOLD),
+ (datetime(2019, 4, 7, 3, 0, fold=0), AEST, NORMAL),
+ (datetime(2019, 4, 7, 3, 0, fold=1), AEST, NORMAL),
+ (datetime(2019, 10, 5, 0), AEST, NORMAL),
+ (datetime(2019, 10, 6, 1, 59), AEST, NORMAL),
+ (datetime(2019, 10, 6, 2, 0, fold=0), AEST, GAP),
+ (datetime(2019, 10, 6, 2, 0, fold=1), AEDT, GAP),
+ (datetime(2019, 10, 6, 3, 0), AEDT, NORMAL),
+ )
+
+ @call
+ def _add():
+ # Irish time zone - negative DST
+ tzstr = "IST-1GMT0,M10.5.0,M3.5.0/1"
+
+ GMT = ZoneOffset("GMT", ZERO, -ONE_H)
+ IST = ZoneOffset("IST", ONE_H, ZERO)
+
+ cases[tzstr] = (
+ (datetime(2019, 3, 30), GMT, NORMAL),
+ (datetime(2019, 3, 31, 0, 59), GMT, NORMAL),
+ (datetime(2019, 3, 31, 2, 0), IST, NORMAL),
+ (datetime(2019, 10, 26), IST, NORMAL),
+ (datetime(2019, 10, 27, 0, 59, fold=1), IST, NORMAL),
+ (datetime(2019, 10, 27, 1, 0, fold=0), IST, FOLD),
+ (datetime(2019, 10, 27, 1, 0, fold=1), GMT, FOLD),
+ (datetime(2019, 10, 27, 2, 0, fold=1), GMT, NORMAL),
+ (datetime(2020, 3, 29, 0, 59), GMT, NORMAL),
+ (datetime(2020, 3, 29, 2, 0), IST, NORMAL),
+ (datetime(2020, 10, 25, 0, 59, fold=1), IST, NORMAL),
+ (datetime(2020, 10, 25, 1, 0, fold=0), IST, FOLD),
+ (datetime(2020, 10, 25, 2, 0, fold=1), GMT, NORMAL),
+ )
+
+ @call
+ def _add():
+ # Pacific/Kosrae: Fixed offset zone with a quoted numerical tzname
+ tzstr = "<+11>-11"
+
+ cases[tzstr] = (
+ (
+ datetime(2020, 1, 1),
+ ZoneOffset("+11", timedelta(hours=11)),
+ NORMAL,
+ ),
+ )
+
+ @call
+ def _add():
+ # Quoted STD and DST, transitions at 24:00
+ tzstr = "<-04>4<-03>,M9.1.6/24,M4.1.6/24"
+
+ M04 = ZoneOffset("-04", timedelta(hours=-4))
+ M03 = ZoneOffset("-03", timedelta(hours=-3), ONE_H)
+
+ cases[tzstr] = (
+ (datetime(2020, 5, 1), M04, NORMAL),
+ (datetime(2020, 11, 1), M03, NORMAL),
+ )
+
+ @call
+ def _add():
+ # Permanent daylight saving time is modeled with transitions at 0/0
+ # and J365/25, as mentioned in RFC 8536 Section 3.3.1
+ tzstr = "EST5EDT,0/0,J365/25"
+
+ EDT = ZoneOffset("EDT", timedelta(hours=-4), ONE_H)
+
+ cases[tzstr] = (
+ (datetime(2019, 1, 1), EDT, NORMAL),
+ (datetime(2019, 6, 1), EDT, NORMAL),
+ (datetime(2019, 12, 31, 23, 59, 59, 999999), EDT, NORMAL),
+ (datetime(2020, 1, 1), EDT, NORMAL),
+ (datetime(2020, 3, 1), EDT, NORMAL),
+ (datetime(2020, 6, 1), EDT, NORMAL),
+ (datetime(2020, 12, 31, 23, 59, 59, 999999), EDT, NORMAL),
+ (datetime(2400, 1, 1), EDT, NORMAL),
+ (datetime(2400, 3, 1), EDT, NORMAL),
+ (datetime(2400, 12, 31, 23, 59, 59, 999999), EDT, NORMAL),
+ )
+
+ @call
+ def _add():
+ # Transitions on March 1st and November 1st of each year
+ tzstr = "AAA3BBB,J60/12,J305/12"
+
+ AAA = ZoneOffset("AAA", timedelta(hours=-3))
+ BBB = ZoneOffset("BBB", timedelta(hours=-2), ONE_H)
+
+ cases[tzstr] = (
+ (datetime(2019, 1, 1), AAA, NORMAL),
+ (datetime(2019, 2, 28), AAA, NORMAL),
+ (datetime(2019, 3, 1, 11, 59), AAA, NORMAL),
+ (datetime(2019, 3, 1, 12, fold=0), AAA, GAP),
+ (datetime(2019, 3, 1, 12, fold=1), BBB, GAP),
+ (datetime(2019, 3, 1, 13), BBB, NORMAL),
+ (datetime(2019, 11, 1, 10, 59), BBB, NORMAL),
+ (datetime(2019, 11, 1, 11, fold=0), BBB, FOLD),
+ (datetime(2019, 11, 1, 11, fold=1), AAA, FOLD),
+ (datetime(2019, 11, 1, 12), AAA, NORMAL),
+ (datetime(2019, 12, 31, 23, 59, 59, 999999), AAA, NORMAL),
+ (datetime(2020, 1, 1), AAA, NORMAL),
+ (datetime(2020, 2, 29), AAA, NORMAL),
+ (datetime(2020, 3, 1, 11, 59), AAA, NORMAL),
+ (datetime(2020, 3, 1, 12, fold=0), AAA, GAP),
+ (datetime(2020, 3, 1, 12, fold=1), BBB, GAP),
+ (datetime(2020, 3, 1, 13), BBB, NORMAL),
+ (datetime(2020, 11, 1, 10, 59), BBB, NORMAL),
+ (datetime(2020, 11, 1, 11, fold=0), BBB, FOLD),
+ (datetime(2020, 11, 1, 11, fold=1), AAA, FOLD),
+ (datetime(2020, 11, 1, 12), AAA, NORMAL),
+ (datetime(2020, 12, 31, 23, 59, 59, 999999), AAA, NORMAL),
+ )
+
+ @call
+ def _add():
+ # Taken from America/Godthab, this rule has a transition on the
+ # Saturday before the last Sunday of March and October, at 22:00
+ # and 23:00, respectively. This is encoded with negative start
+ # and end transition times.
+ tzstr = "<-03>3<-02>,M3.5.0/-2,M10.5.0/-1"
+
+ N03 = ZoneOffset("-03", timedelta(hours=-3))
+ N02 = ZoneOffset("-02", timedelta(hours=-2), ONE_H)
+
+ cases[tzstr] = (
+ (datetime(2020, 3, 27), N03, NORMAL),
+ (datetime(2020, 3, 28, 21, 59, 59), N03, NORMAL),
+ (datetime(2020, 3, 28, 22, fold=0), N03, GAP),
+ (datetime(2020, 3, 28, 22, fold=1), N02, GAP),
+ (datetime(2020, 3, 28, 23), N02, NORMAL),
+ (datetime(2020, 10, 24, 21), N02, NORMAL),
+ (datetime(2020, 10, 24, 22, fold=0), N02, FOLD),
+ (datetime(2020, 10, 24, 22, fold=1), N03, FOLD),
+ (datetime(2020, 10, 24, 23), N03, NORMAL),
+ )
+
+ @call
+ def _add():
+ # Transition times with minutes and seconds
+ tzstr = "AAA3BBB,M3.2.0/01:30,M11.1.0/02:15:45"
+
+ AAA = ZoneOffset("AAA", timedelta(hours=-3))
+ BBB = ZoneOffset("BBB", timedelta(hours=-2), ONE_H)
+
+ cases[tzstr] = (
+ (datetime(2012, 3, 11, 1, 0), AAA, NORMAL),
+ (datetime(2012, 3, 11, 1, 30, fold=0), AAA, GAP),
+ (datetime(2012, 3, 11, 1, 30, fold=1), BBB, GAP),
+ (datetime(2012, 3, 11, 2, 30), BBB, NORMAL),
+ (datetime(2012, 11, 4, 1, 15, 44, 999999), BBB, NORMAL),
+ (datetime(2012, 11, 4, 1, 15, 45, fold=0), BBB, FOLD),
+ (datetime(2012, 11, 4, 1, 15, 45, fold=1), AAA, FOLD),
+ (datetime(2012, 11, 4, 2, 15, 45), AAA, NORMAL),
+ )
+
+ cls.test_cases = cases
+
+
+class CTZStrTest(TZStrTest):
+ module = c_zoneinfo
+
+
+class ZoneInfoCacheTest(TzPathUserMixin, ZoneInfoTestBase):
+ module = py_zoneinfo
+
+ def setUp(self):
+ self.klass.clear_cache()
+ super().setUp()
+
+ @property
+ def zoneinfo_data(self):
+ return ZONEINFO_DATA
+
+ @property
+ def tzpath(self):
+ return [self.zoneinfo_data.tzpath]
+
+ def test_ephemeral_zones(self):
+ self.assertIs(
+ self.klass("America/Los_Angeles"), self.klass("America/Los_Angeles")
+ )
+
+ def test_strong_refs(self):
+ tz0 = self.klass("Australia/Sydney")
+ tz1 = self.klass("Australia/Sydney")
+
+ self.assertIs(tz0, tz1)
+
+ def test_no_cache(self):
+
+ tz0 = self.klass("Europe/Lisbon")
+ tz1 = self.klass.no_cache("Europe/Lisbon")
+
+ self.assertIsNot(tz0, tz1)
+
+ def test_cache_reset_tzpath(self):
+ """Test that the cache persists when tzpath has been changed.
+
+ The PEP specifies that as long as a reference exists to one zone
+ with a given key, the primary constructor must continue to return
+ the same object.
+ """
+ zi0 = self.klass("America/Los_Angeles")
+ with self.tzpath_context([]):
+ zi1 = self.klass("America/Los_Angeles")
+
+ self.assertIs(zi0, zi1)
+
+ def test_clear_cache_explicit_none(self):
+ la0 = self.klass("America/Los_Angeles")
+ self.klass.clear_cache(only_keys=None)
+ la1 = self.klass("America/Los_Angeles")
+
+ self.assertIsNot(la0, la1)
+
+ def test_clear_cache_one_key(self):
+ """Tests that you can clear a single key from the cache."""
+ la0 = self.klass("America/Los_Angeles")
+ dub0 = self.klass("Europe/Dublin")
+
+ self.klass.clear_cache(only_keys=["America/Los_Angeles"])
+
+ la1 = self.klass("America/Los_Angeles")
+ dub1 = self.klass("Europe/Dublin")
+
+ self.assertIsNot(la0, la1)
+ self.assertIs(dub0, dub1)
+
+ def test_clear_cache_two_keys(self):
+ la0 = self.klass("America/Los_Angeles")
+ dub0 = self.klass("Europe/Dublin")
+ tok0 = self.klass("Asia/Tokyo")
+
+ self.klass.clear_cache(
+ only_keys=["America/Los_Angeles", "Europe/Dublin"]
+ )
+
+ la1 = self.klass("America/Los_Angeles")
+ dub1 = self.klass("Europe/Dublin")
+ tok1 = self.klass("Asia/Tokyo")
+
+ self.assertIsNot(la0, la1)
+ self.assertIsNot(dub0, dub1)
+ self.assertIs(tok0, tok1)
+
+
+class CZoneInfoCacheTest(ZoneInfoCacheTest):
+ module = c_zoneinfo
+
+
+class ZoneInfoPickleTest(TzPathUserMixin, ZoneInfoTestBase):
+ module = py_zoneinfo
+
+ def setUp(self):
+ self.klass.clear_cache()
+
+ with contextlib.ExitStack() as stack:
+ stack.enter_context(test_support.set_zoneinfo_module(self.module))
+ self.addCleanup(stack.pop_all().close)
+
+ super().setUp()
+
+ @property
+ def zoneinfo_data(self):
+ return ZONEINFO_DATA
+
+ @property
+ def tzpath(self):
+ return [self.zoneinfo_data.tzpath]
+
+ def test_cache_hit(self):
+ zi_in = self.klass("Europe/Dublin")
+ pkl = pickle.dumps(zi_in)
+ zi_rt = pickle.loads(pkl)
+
+ with self.subTest(test="Is non-pickled ZoneInfo"):
+ self.assertIs(zi_in, zi_rt)
+
+ zi_rt2 = pickle.loads(pkl)
+ with self.subTest(test="Is unpickled ZoneInfo"):
+ self.assertIs(zi_rt, zi_rt2)
+
+ def test_cache_miss(self):
+ zi_in = self.klass("Europe/Dublin")
+ pkl = pickle.dumps(zi_in)
+
+ del zi_in
+ self.klass.clear_cache() # Induce a cache miss
+ zi_rt = pickle.loads(pkl)
+ zi_rt2 = pickle.loads(pkl)
+
+ self.assertIs(zi_rt, zi_rt2)
+
+ def test_no_cache(self):
+ zi_no_cache = self.klass.no_cache("Europe/Dublin")
+
+ pkl = pickle.dumps(zi_no_cache)
+ zi_rt = pickle.loads(pkl)
+
+ with self.subTest(test="Not the pickled object"):
+ self.assertIsNot(zi_rt, zi_no_cache)
+
+ zi_rt2 = pickle.loads(pkl)
+ with self.subTest(test="Not a second unpickled object"):
+ self.assertIsNot(zi_rt, zi_rt2)
+
+ zi_cache = self.klass("Europe/Dublin")
+ with self.subTest(test="Not a cached object"):
+ self.assertIsNot(zi_rt, zi_cache)
+
+ def test_from_file(self):
+ key = "Europe/Dublin"
+ with open(self.zoneinfo_data.path_from_key(key), "rb") as f:
+ zi_nokey = self.klass.from_file(f)
+
+ f.seek(0)
+ zi_key = self.klass.from_file(f, key=key)
+
+ test_cases = [
+ (zi_key, "ZoneInfo with key"),
+ (zi_nokey, "ZoneInfo without key"),
+ ]
+
+ for zi, test_name in test_cases:
+ with self.subTest(test_name=test_name):
+ with self.assertRaises(pickle.PicklingError):
+ pickle.dumps(zi)
+
+ def test_pickle_after_from_file(self):
+ # This may be a bit of paranoia, but this test is to ensure that no
+ # global state is maintained in order to handle the pickle cache and
+ # from_file behavior, and that it is possible to interweave the
+ # constructors of each of these and pickling/unpickling without issues.
+ key = "Europe/Dublin"
+ zi = self.klass(key)
+
+ pkl_0 = pickle.dumps(zi)
+ zi_rt_0 = pickle.loads(pkl_0)
+ self.assertIs(zi, zi_rt_0)
+
+ with open(self.zoneinfo_data.path_from_key(key), "rb") as f:
+ zi_ff = self.klass.from_file(f, key=key)
+
+ pkl_1 = pickle.dumps(zi)
+ zi_rt_1 = pickle.loads(pkl_1)
+ self.assertIs(zi, zi_rt_1)
+
+ with self.assertRaises(pickle.PicklingError):
+ pickle.dumps(zi_ff)
+
+ pkl_2 = pickle.dumps(zi)
+ zi_rt_2 = pickle.loads(pkl_2)
+ self.assertIs(zi, zi_rt_2)
+
+
+class CZoneInfoPickleTest(ZoneInfoPickleTest):
+ module = c_zoneinfo
+
+
+class CallingConventionTest(ZoneInfoTestBase):
+ """Tests for functions with restricted calling conventions."""
+
+ module = py_zoneinfo
+
+ @property
+ def zoneinfo_data(self):
+ return ZONEINFO_DATA
+
+ def test_from_file(self):
+ with open(self.zoneinfo_data.path_from_key("UTC"), "rb") as f:
+ with self.assertRaises(TypeError):
+ self.klass.from_file(fobj=f)
+
+ def test_clear_cache(self):
+ with self.assertRaises(TypeError):
+ self.klass.clear_cache(["UTC"])
+
+
+class CCallingConventionTest(CallingConventionTest):
+ module = c_zoneinfo
+
+
+class TzPathTest(TzPathUserMixin, ZoneInfoTestBase):
+ module = py_zoneinfo
+
+ @staticmethod
+ @contextlib.contextmanager
+ def python_tzpath_context(value):
+ path_var = "PYTHONTZPATH"
+ try:
+ with OS_ENV_LOCK:
+ old_env = os.environ.get(path_var, None)
+ os.environ[path_var] = value
+ yield
+ finally:
+ if old_env is None:
+ del os.environ[path_var]
+ else:
+ os.environ[path_var] = old_env # pragma: nocover
+
+ def test_env_variable(self):
+ """Tests that the environment variable works with reset_tzpath."""
+ new_paths = [
+ ("", []),
+ ("/etc/zoneinfo", ["/etc/zoneinfo"]),
+ (f"/a/b/c{os.pathsep}/d/e/f", ["/a/b/c", "/d/e/f"]),
+ ]
+
+ for new_path_var, expected_result in new_paths:
+ with self.python_tzpath_context(new_path_var):
+ with self.subTest(tzpath=new_path_var):
+ self.module.reset_tzpath()
+ tzpath = self.module.TZPATH
+ self.assertSequenceEqual(tzpath, expected_result)
+
+ def test_env_variable_relative_paths(self):
+ test_cases = [
+ [("path/to/somewhere",), ()],
+ [
+ ("/usr/share/zoneinfo", "path/to/somewhere",),
+ ("/usr/share/zoneinfo",),
+ ],
+ [("../relative/path",), ()],
+ [
+ ("/usr/share/zoneinfo", "../relative/path",),
+ ("/usr/share/zoneinfo",),
+ ],
+ [("path/to/somewhere", "../relative/path",), ()],
+ [
+ (
+ "/usr/share/zoneinfo",
+ "path/to/somewhere",
+ "../relative/path",
+ ),
+ ("/usr/share/zoneinfo",),
+ ],
+ ]
+
+ for input_paths, expected_paths in test_cases:
+ path_var = os.pathsep.join(input_paths)
+ with self.python_tzpath_context(path_var):
+ with self.subTest("warning", path_var=path_var):
+ # Note: Per PEP 615 the warning is implementation-defined
+ # behavior, other implementations need not warn.
+ with self.assertWarns(self.module.InvalidTZPathWarning):
+ self.module.reset_tzpath()
+
+ tzpath = self.module.TZPATH
+ with self.subTest("filtered", path_var=path_var):
+ self.assertSequenceEqual(tzpath, expected_paths)
+
+ def test_reset_tzpath_kwarg(self):
+ self.module.reset_tzpath(to=["/a/b/c"])
+
+ self.assertSequenceEqual(self.module.TZPATH, ("/a/b/c",))
+
+ def test_reset_tzpath_relative_paths(self):
+ bad_values = [
+ ("path/to/somewhere",),
+ ("/usr/share/zoneinfo", "path/to/somewhere",),
+ ("../relative/path",),
+ ("/usr/share/zoneinfo", "../relative/path",),
+ ("path/to/somewhere", "../relative/path",),
+ ("/usr/share/zoneinfo", "path/to/somewhere", "../relative/path",),
+ ]
+ for input_paths in bad_values:
+ with self.subTest(input_paths=input_paths):
+ with self.assertRaises(ValueError):
+ self.module.reset_tzpath(to=input_paths)
+
+ def test_tzpath_type_error(self):
+ bad_values = [
+ "/etc/zoneinfo:/usr/share/zoneinfo",
+ b"/etc/zoneinfo:/usr/share/zoneinfo",
+ 0,
+ ]
+
+ for bad_value in bad_values:
+ with self.subTest(value=bad_value):
+ with self.assertRaises(TypeError):
+ self.module.reset_tzpath(bad_value)
+
+ def test_tzpath_attribute(self):
+ tzpath_0 = ["/one", "/two"]
+ tzpath_1 = ["/three"]
+
+ with self.tzpath_context(tzpath_0):
+ query_0 = self.module.TZPATH
+
+ with self.tzpath_context(tzpath_1):
+ query_1 = self.module.TZPATH
+
+ self.assertSequenceEqual(tzpath_0, query_0)
+ self.assertSequenceEqual(tzpath_1, query_1)
+
+
+class CTzPathTest(TzPathTest):
+ module = c_zoneinfo
+
+
+class TestModule(ZoneInfoTestBase):
+ module = py_zoneinfo
+
+ def test_getattr_error(self):
+ with self.assertRaises(AttributeError):
+ self.module.NOATTRIBUTE
+
+ def test_dir_contains_all(self):
+ """dir(self.module) should at least contain everything in __all__."""
+ module_all_set = set(self.module.__all__)
+ module_dir_set = set(dir(self.module))
+
+ difference = module_all_set - module_dir_set
+
+ self.assertFalse(difference)
+
+ def test_dir_unique(self):
+ """Test that there are no duplicates in dir(self.module)"""
+ module_dir = dir(self.module)
+ module_unique = set(module_dir)
+
+ self.assertCountEqual(module_dir, module_unique)
+
+
+class CTestModule(TestModule):
+ module = c_zoneinfo
+
+
+class ExtensionBuiltTest(unittest.TestCase):
+ """Smoke test to ensure that the C and Python extensions are both tested.
+
+ Because the intention is for the Python and C versions of ZoneInfo to
+ behave identically, these tests necessarily rely on implementation details,
+ so the tests may need to be adjusted if the implementations change. Do not
+ rely on these tests as an indication of stable properties of these classes.
+ """
+
+ def test_cache_location(self):
+ # The pure Python version stores caches on attributes, but the C
+ # extension stores them in C globals (at least for now)
+ self.assertFalse(hasattr(c_zoneinfo.ZoneInfo, "_weak_cache"))
+ self.assertTrue(hasattr(py_zoneinfo.ZoneInfo, "_weak_cache"))
+
+ def test_gc_tracked(self):
+ # The pure Python version is tracked by the GC but (for now) the C
+ # version is not.
+ import gc
+
+ self.assertTrue(gc.is_tracked(py_zoneinfo.ZoneInfo))
+ self.assertFalse(gc.is_tracked(c_zoneinfo.ZoneInfo))
+
+
+@dataclasses.dataclass(frozen=True)
+class ZoneOffset:
+ tzname: str
+ utcoffset: timedelta
+ dst: timedelta = ZERO
+
+
+@dataclasses.dataclass(frozen=True)
+class ZoneTransition:
+ transition: datetime
+ offset_before: ZoneOffset
+ offset_after: ZoneOffset
+
+ @property
+ def transition_utc(self):
+ return (self.transition - self.offset_before.utcoffset).replace(
+ tzinfo=timezone.utc
+ )
+
+ @property
+ def fold(self):
+ """Whether this introduces a fold"""
+ return self.offset_before.utcoffset > self.offset_after.utcoffset
+
+ @property
+ def gap(self):
+ """Whether this introduces a gap"""
+ return self.offset_before.utcoffset < self.offset_after.utcoffset
+
+ @property
+ def delta(self):
+ return self.offset_after.utcoffset - self.offset_before.utcoffset
+
+ @property
+ def anomaly_start(self):
+ if self.fold:
+ return self.transition + self.delta
+ else:
+ return self.transition
+
+ @property
+ def anomaly_end(self):
+ if not self.fold:
+ return self.transition + self.delta
+ else:
+ return self.transition
+
+
+class ZoneInfoData:
+ def __init__(self, source_json, tzpath, v1=False):
+ self.tzpath = pathlib.Path(tzpath)
+ self.keys = []
+ self.v1 = v1
+ self._populate_tzpath(source_json)
+
+ def path_from_key(self, key):
+ return self.tzpath / key
+
+ def _populate_tzpath(self, source_json):
+ with open(source_json, "rb") as f:
+ zoneinfo_dict = json.load(f)
+
+ zoneinfo_data = zoneinfo_dict["data"]
+
+ for key, value in zoneinfo_data.items():
+ self.keys.append(key)
+ raw_data = self._decode_text(value)
+
+ if self.v1:
+ data = self._convert_to_v1(raw_data)
+ else:
+ data = raw_data
+
+ destination = self.path_from_key(key)
+ destination.parent.mkdir(exist_ok=True, parents=True)
+ with open(destination, "wb") as f:
+ f.write(data)
+
+ def _decode_text(self, contents):
+ raw_data = b"".join(map(str.encode, contents))
+ decoded = base64.b85decode(raw_data)
+
+ return lzma.decompress(decoded)
+
+ def _convert_to_v1(self, contents):
+ assert contents[0:4] == b"TZif", "Invalid TZif data found!"
+ version = int(contents[4:5])
+
+ header_start = 4 + 16
+ header_end = header_start + 24 # 6l == 24 bytes
+ assert version >= 2, "Version 1 file found: no conversion necessary"
+ isutcnt, isstdcnt, leapcnt, timecnt, typecnt, charcnt = struct.unpack(
+ ">6l", contents[header_start:header_end]
+ )
+
+ file_size = (
+ timecnt * 5
+ + typecnt * 6
+ + charcnt
+ + leapcnt * 8
+ + isstdcnt
+ + isutcnt
+ )
+ file_size += header_end
+ out = b"TZif" + b"\x00" + contents[5:file_size]
+
+ assert (
+ contents[file_size : (file_size + 4)] == b"TZif"
+ ), "Version 2 file not truncated at Version 2 header"
+
+ return out
+
+
+class ZoneDumpData:
+ @classmethod
+ def transition_keys(cls):
+ return cls._get_zonedump().keys()
+
+ @classmethod
+ def load_transition_examples(cls, key):
+ return cls._get_zonedump()[key]
+
+ @classmethod
+ def fixed_offset_zones(cls):
+ if not cls._FIXED_OFFSET_ZONES:
+ cls._populate_fixed_offsets()
+
+ return cls._FIXED_OFFSET_ZONES.items()
+
+ @classmethod
+ def _get_zonedump(cls):
+ if not cls._ZONEDUMP_DATA:
+ cls._populate_zonedump_data()
+ return cls._ZONEDUMP_DATA
+
+ @classmethod
+ def _populate_fixed_offsets(cls):
+ cls._FIXED_OFFSET_ZONES = {
+ "UTC": ZoneOffset("UTC", ZERO, ZERO),
+ }
+
+ @classmethod
+ def _populate_zonedump_data(cls):
+ def _Africa_Abidjan():
+ LMT = ZoneOffset("LMT", timedelta(seconds=-968))
+ GMT = ZoneOffset("GMT", ZERO)
+
+ return [
+ ZoneTransition(datetime(1912, 1, 1), LMT, GMT),
+ ]
+
+ def _Africa_Casablanca():
+ P00_s = ZoneOffset("+00", ZERO, ZERO)
+ P01_d = ZoneOffset("+01", ONE_H, ONE_H)
+ P00_d = ZoneOffset("+00", ZERO, -ONE_H)
+ P01_s = ZoneOffset("+01", ONE_H, ZERO)
+
+ return [
+ # Morocco sometimes pauses DST during Ramadan
+ ZoneTransition(datetime(2018, 3, 25, 2), P00_s, P01_d),
+ ZoneTransition(datetime(2018, 5, 13, 3), P01_d, P00_s),
+ ZoneTransition(datetime(2018, 6, 17, 2), P00_s, P01_d),
+ # On October 28th Morocco set standard time to +01,
+ # with negative DST only during Ramadan
+ ZoneTransition(datetime(2018, 10, 28, 3), P01_d, P01_s),
+ ZoneTransition(datetime(2019, 5, 5, 3), P01_s, P00_d),
+ ZoneTransition(datetime(2019, 6, 9, 2), P00_d, P01_s),
+ ]
+
+ def _America_Los_Angeles():
+ LMT = ZoneOffset("LMT", timedelta(seconds=-28378), ZERO)
+ PST = ZoneOffset("PST", timedelta(hours=-8), ZERO)
+ PDT = ZoneOffset("PDT", timedelta(hours=-7), ONE_H)
+ PWT = ZoneOffset("PWT", timedelta(hours=-7), ONE_H)
+ PPT = ZoneOffset("PPT", timedelta(hours=-7), ONE_H)
+
+ return [
+ ZoneTransition(datetime(1883, 11, 18, 12, 7, 2), LMT, PST),
+ ZoneTransition(datetime(1918, 3, 31, 2), PST, PDT),
+ ZoneTransition(datetime(1918, 3, 31, 2), PST, PDT),
+ ZoneTransition(datetime(1918, 10, 27, 2), PDT, PST),
+ # Transition to Pacific War Time
+ ZoneTransition(datetime(1942, 2, 9, 2), PST, PWT),
+ # Transition from Pacific War Time to Pacific Peace Time
+ ZoneTransition(datetime(1945, 8, 14, 16), PWT, PPT),
+ ZoneTransition(datetime(1945, 9, 30, 2), PPT, PST),
+ ZoneTransition(datetime(2015, 3, 8, 2), PST, PDT),
+ ZoneTransition(datetime(2015, 11, 1, 2), PDT, PST),
+ # After 2038: Rules continue indefinitely
+ ZoneTransition(datetime(2450, 3, 13, 2), PST, PDT),
+ ZoneTransition(datetime(2450, 11, 6, 2), PDT, PST),
+ ]
+
+ def _America_Santiago():
+ LMT = ZoneOffset("LMT", timedelta(seconds=-16966), ZERO)
+ SMT = ZoneOffset("SMT", timedelta(seconds=-16966), ZERO)
+ N05 = ZoneOffset("-05", timedelta(seconds=-18000), ZERO)
+ N04 = ZoneOffset("-04", timedelta(seconds=-14400), ZERO)
+ N03 = ZoneOffset("-03", timedelta(seconds=-10800), ONE_H)
+
+ return [
+ ZoneTransition(datetime(1890, 1, 1), LMT, SMT),
+ ZoneTransition(datetime(1910, 1, 10), SMT, N05),
+ ZoneTransition(datetime(1916, 7, 1), N05, SMT),
+ ZoneTransition(datetime(2008, 3, 30), N03, N04),
+ ZoneTransition(datetime(2008, 10, 12), N04, N03),
+ ZoneTransition(datetime(2040, 4, 8), N03, N04),
+ ZoneTransition(datetime(2040, 9, 2), N04, N03),
+ ]
+
+ def _Asia_Tokyo():
+ JST = ZoneOffset("JST", timedelta(seconds=32400), ZERO)
+ JDT = ZoneOffset("JDT", timedelta(seconds=36000), ONE_H)
+
+ # Japan had DST from 1948 to 1951, and it was unusual in that
+ # the transition from DST to STD occurred at 25:00, and is
+ # denominated as such in the time zone database
+ return [
+ ZoneTransition(datetime(1948, 5, 2), JST, JDT),
+ ZoneTransition(datetime(1948, 9, 12, 1), JDT, JST),
+ ZoneTransition(datetime(1951, 9, 9, 1), JDT, JST),
+ ]
+
+ def _Australia_Sydney():
+ LMT = ZoneOffset("LMT", timedelta(seconds=36292), ZERO)
+ AEST = ZoneOffset("AEST", timedelta(seconds=36000), ZERO)
+ AEDT = ZoneOffset("AEDT", timedelta(seconds=39600), ONE_H)
+
+ return [
+ ZoneTransition(datetime(1895, 2, 1), LMT, AEST),
+ ZoneTransition(datetime(1917, 1, 1, 0, 1), AEST, AEDT),
+ ZoneTransition(datetime(1917, 3, 25, 2), AEDT, AEST),
+ ZoneTransition(datetime(2012, 4, 1, 3), AEDT, AEST),
+ ZoneTransition(datetime(2012, 10, 7, 2), AEST, AEDT),
+ ZoneTransition(datetime(2040, 4, 1, 3), AEDT, AEST),
+ ZoneTransition(datetime(2040, 10, 7, 2), AEST, AEDT),
+ ]
+
+ def _Europe_Dublin():
+ LMT = ZoneOffset("LMT", timedelta(seconds=-1500), ZERO)
+ DMT = ZoneOffset("DMT", timedelta(seconds=-1521), ZERO)
+ IST_0 = ZoneOffset("IST", timedelta(seconds=2079), ONE_H)
+ GMT_0 = ZoneOffset("GMT", ZERO, ZERO)
+ BST = ZoneOffset("BST", ONE_H, ONE_H)
+ GMT_1 = ZoneOffset("GMT", ZERO, -ONE_H)
+ IST_1 = ZoneOffset("IST", ONE_H, ZERO)
+
+ return [
+ ZoneTransition(datetime(1880, 8, 2, 0), LMT, DMT),
+ ZoneTransition(datetime(1916, 5, 21, 2), DMT, IST_0),
+ ZoneTransition(datetime(1916, 10, 1, 3), IST_0, GMT_0),
+ ZoneTransition(datetime(1917, 4, 8, 2), GMT_0, BST),
+ ZoneTransition(datetime(2016, 3, 27, 1), GMT_1, IST_1),
+ ZoneTransition(datetime(2016, 10, 30, 2), IST_1, GMT_1),
+ ZoneTransition(datetime(2487, 3, 30, 1), GMT_1, IST_1),
+ ZoneTransition(datetime(2487, 10, 26, 2), IST_1, GMT_1),
+ ]
+
+ def _Europe_Lisbon():
+ WET = ZoneOffset("WET", ZERO, ZERO)
+ WEST = ZoneOffset("WEST", ONE_H, ONE_H)
+ CET = ZoneOffset("CET", ONE_H, ZERO)
+ CEST = ZoneOffset("CEST", timedelta(seconds=7200), ONE_H)
+
+ return [
+ ZoneTransition(datetime(1992, 3, 29, 1), WET, WEST),
+ ZoneTransition(datetime(1992, 9, 27, 2), WEST, CET),
+ ZoneTransition(datetime(1993, 3, 28, 2), CET, CEST),
+ ZoneTransition(datetime(1993, 9, 26, 3), CEST, CET),
+ ZoneTransition(datetime(1996, 3, 31, 2), CET, WEST),
+ ZoneTransition(datetime(1996, 10, 27, 2), WEST, WET),
+ ]
+
+ def _Europe_London():
+ LMT = ZoneOffset("LMT", timedelta(seconds=-75), ZERO)
+ GMT = ZoneOffset("GMT", ZERO, ZERO)
+ BST = ZoneOffset("BST", ONE_H, ONE_H)
+
+ return [
+ ZoneTransition(datetime(1847, 12, 1), LMT, GMT),
+ ZoneTransition(datetime(2005, 3, 27, 1), GMT, BST),
+ ZoneTransition(datetime(2005, 10, 30, 2), BST, GMT),
+ ZoneTransition(datetime(2043, 3, 29, 1), GMT, BST),
+ ZoneTransition(datetime(2043, 10, 25, 2), BST, GMT),
+ ]
+
+ def _Pacific_Kiritimati():
+ LMT = ZoneOffset("LMT", timedelta(seconds=-37760), ZERO)
+ N1040 = ZoneOffset("-1040", timedelta(seconds=-38400), ZERO)
+ N10 = ZoneOffset("-10", timedelta(seconds=-36000), ZERO)
+ P14 = ZoneOffset("+14", timedelta(seconds=50400), ZERO)
+
+ # This is literally every transition in Christmas Island history
+ return [
+ ZoneTransition(datetime(1901, 1, 1), LMT, N1040),
+ ZoneTransition(datetime(1979, 10, 1), N1040, N10),
+ # They skipped December 31, 1994
+ ZoneTransition(datetime(1994, 12, 31), N10, P14),
+ ]
+
+ cls._ZONEDUMP_DATA = {
+ "Africa/Abidjan": _Africa_Abidjan(),
+ "Africa/Casablanca": _Africa_Casablanca(),
+ "America/Los_Angeles": _America_Los_Angeles(),
+ "America/Santiago": _America_Santiago(),
+ "Australia/Sydney": _Australia_Sydney(),
+ "Asia/Tokyo": _Asia_Tokyo(),
+ "Europe/Dublin": _Europe_Dublin(),
+ "Europe/Lisbon": _Europe_Lisbon(),
+ "Europe/London": _Europe_London(),
+ "Pacific/Kiritimati": _Pacific_Kiritimati(),
+ }
+
+ _ZONEDUMP_DATA = None
+ _FIXED_OFFSET_ZONES = None