From 9afed08fe97e898b2dce4636728248bb59d14e13 Mon Sep 17 00:00:00 2001 From: Paul Haesler Date: Thu, 18 Apr 2024 15:44:48 +1000 Subject: [PATCH 01/29] Fix documentatio error - thanks Ariana! --- docs/environment_variables.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/environment_variables.rst b/docs/environment_variables.rst index 506f8d526..b149f1508 100644 --- a/docs/environment_variables.rst +++ b/docs/environment_variables.rst @@ -17,7 +17,7 @@ the ``$DATACUBE_OWS_CFG_ALLOW_S3`` environment variable needs to be set to ``YES Open DataCube Database Connection --------------------------------- -The preferred method of configuring the ODC database is with the ``$DEFAULT_ODC_DB_URL`` +The preferred method of configuring the ODC database is with the ``$ODC_DEFAULT_DB_URL`` environment variable. The format of postgres connection URL is:: postgresql://:@:/ From d41a67f55be61458f08b0cee09afdcd87574a7cc Mon Sep 17 00:00:00 2001 From: Paul Haesler Date: Thu, 18 Apr 2024 15:58:29 +1000 Subject: [PATCH 02/29] Put background on diagram so it can be read in dark mode smh --- docs/diagrams/ows_diagram.png | Bin 51867 -> 61327 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/docs/diagrams/ows_diagram.png b/docs/diagrams/ows_diagram.png index 319758fec9fc6feedf49f0743ae4763531f17108..6ce2dd05f6d6cd18551642161b4960e3c9e40429 100644 GIT binary patch literal 61327 zcmdSAXH=9;&>%Vl1?44)7>Ei=jv^u%$sjpOMnRAqharQCk^}@KC&`&1XNFNSNFFi_ zISpaRVSwR2C(MPvO3^!&*N={R@jS;Sx!NWs$xED(oPpfIO=ZMg1nO(W@OE z&-;(2v0DvYEnBV>hdB5G0H;J|wKP`j@}6F}kb1xSOQo54LA9jv1G!$geg{}stV8TE zISzZ3*puqu?(X89oj4aBMjR~AykJ);#iSxF%)Cm;b?>(T`6;z#eQ=Y|Zr7hfksAO) zD?TL(_Rfd8CU+Zb50L$>vbY--LIjLR`jVbV+4K5b5VC|i=o6CRIWd)SvR4H)iAv*` zh%4ln{#+AbY%o-{6PC;b%)Gt7E89*00>Rm5)hAW{Fn*Op%kflXgPlWqI0Ujty)!e>s4S`b3A#8b*hZ|AgNH|WBi}qsjYkR zhAuZ^wwB4(lH>sOpfCoshjZJ@7U5v7-ZxOob@>Y3R{pJV3DEvPr*C55(@yb~x%nIr zhYXzd`&d4@EOii%Qw-qPjlT~uN2~sr+_Q~;FEHa%&AU}kHef|vJy8RJb@?g|q#hn8{lp7E9()u6nXQuz2SXuvl z@?n8;kB^JjMz0%?UXkc_Ia?Bp^Zg-@Av{!k$CwKjL)04ZLb2+l(nsh}2cist&qreqxOELoU+^ zfNR@PyE|p;jW5b985Jk)QS8(ge-$~I2oL5*hRUk)hXRMi$I7hGC{z{|P9>->3lAoC zi8NodPy|H%t)s+cmsbs8OxZL<^a^Z|W@bzn$t4 zs6bqgUX5~ZY=M0?6bJZ8$Lb(o{)T<&mZv1eE&zZ}0m^N!oh?#wX7f7r3zBG;^Qj&m z-?IUE;A+_Zo-nsgdHCzymmQ-4X*9FMI6g2%PhYpXD(xRaB0RHOc{m#7&(tRW31Y-Y z68!sQWwpEc?PDP1RWnZYm5CwnH^gVp+JTLNN`l-Z@H(j| zQ8$Q%O6mcCxfM=#|A9{4{5mXb)&}$clc~x7QU9DgiTXY;{O4PX8lHs(KJGHV0qyUS z1mA`Qm{>qJau8p^I;>@(hVC1Mzn0fIp|V>0^5VlanP2~i0Mx(8@qZ-ZYPYIVirJJj z1=vW9@H`#fA3Qcc$v6U^*kO)#UMWKZjD21}hnEB?Z%s7d!Zr5z27Yn|>a^a?%d1Yf`V;it# zPw$~9C%V3sxjmp90fpXFoh}%phqA4Im~wM4D^?I;-9QEBqsCwJY9^30+RPr99ghjL zja@YB2s&z*MQDj~`uvt&jdOC{I*NxpXDP0s5mMGE{+5}1Ir|}P{>X!yjq3`wykM)^ zS{gOYBB*oqwI6Yw9Ex#Qcj+LypPs^M|myywX_bH?sHJ~ zG;U+rX}~V_feFCvhZ~2S7RjZLYWo>F4uxAV3>~`Nv)dV9=9tHi7={N7OkB|~yRRNa zOjlS0*EaL3-0IjmLVTw^c3d*6;etv93$>JfoM9Yas9;2WKV*l{P23MZI1X^r+iD_h zNS>B7)a1Uo8G3`Z`gDVG=W@ld0|t)WO>Tg?kC;{Kshdg~7B|7Nj?amz!HbMw`xDXW zwe7i&-VhK!CDr)4TarmFswW%c+%ry znHz7vTj+z{%2a)=hx)L2zs7dcP;hg`Y+d2}U?4+=Rj4^r)F-WY3CQ6#@oY}he9Kme6xUYMs}r4bac`r% zN{Cbyqn94Uc=+|4NS)^Nm$swXWPGLcd0jygj|?IZ+Bnd4{!2~q6cQkb2qw)PeL%ARw4f(*k{q_ zPG=ITG~TWE=nFkhPe?O14x10bb9>=BF6oZ3>XJ;12{t|V6`tFkDEn18#!55@no*(s zlT0w#bxzdvUFdl$V^LQt`+?_q%#}}^*)?5S ztOl%wdhD5M|IS6tsq@s$0&YX4o_&tib|r`8tNSJsRi8%;YjujNYcX@LjdV4fji1IS zADp73d|%guaJFVlu5rid=s9(GHT{5~UaM{(4T0;d{Q9-_7SHlA_=ufe*uFoouD87R z0FPXs6k#Wo7g_SA=-8tPW>#s~<9AWuW$Se8c#s=kR@Vn}EYzcGdUNG7e!8F5$_A#t zDv+-(W;L?>)Hrt-A}{-A>`yLr6hE(UBJ^|ZliVk|*Y7uHo{ed?YFdC@m{W2d)U+%; zt3f7aGm1fDRXeshpMyXek()mKatOOZiEITs^XxRk)7ETP#mcEC6Zy}XgyllJJnT*v z4|t~okeST|mbp(}*4Yg*3@cn2i`V>juO&X z6|>c6cs572%E+s=7@bk=jyW`Ru5RJk9Z{?e=~TAg9x5xH(e>aFmIKoiZH=}f5Vz2F z7fQj~M6i+SPyt=fV?~qk@|X)@&7Yq_ENeu0<-iVF`@G>tBPFWVk#$ihF=f97*?8B2zE?8p|lKLXK;mJ8JU1!w|i%56s*UYPsDa+ zwH10)X?w4xB1NuUP2wA0q5BP3)z?c;S6h~)nrSz&Wks3|u=G&isSYffT1w_v9Fi9I z@Kdi*FUqgCy1#WYHQe~VvkB8Dry0X-@EJ{;je#C(Yxy=Olld71XYnBFg`GHj2D4E~ zBT~0EnV+|=W+#jKl_%@O+}BK#x9d&3qq;MvnP6#B#JP(x=DslgDPZjzwwtw&R?)A6 z?rH?Hgo>OVtl#z7(WnUx)ya7xS38j?%!t~ZeAt+&9Ygb6MQ4ja$Zoo_r831_blgl2 z_3{L&VKZCr-~2UK4l*1lQGpq(4&cCKDS2ma*QeAo7&Mw6S$OC1skOFT{2sZlmQM?| zmUmKZTnB>_y|bUiFoB<4EBIZ#hfN#LDXc5TgdOel#Xc9kxIwBeS~|NKwbgoJ-x9A| zx8ygmnd_qMG^8LzK73p;h^jEU*lK1O`NgPjv-{d> z^w~#+*&XdOPPTjxHy zE(F$5j@6?%^7zy_n4VX>BrZNksr>M=NRQ{NDjAyibKXNy=ju!kCE;7C_ky9sPUL*g zUt4uP;%Kj|D7t?48LxrGQy_>imms4ztkMXtqb_(eQaBg>e7{G|Ha@MC4%;iM z*cEf;*1N2jKuRw|n|)DI5NNVi@LBLlKYjMA(}9uN?5D1>tekF>gsK6yIU=5u(PO;cTiHX!Y$is6JV9Bx&&%`9%B@+&EPY<~BqBmD_zm7JeK^A?dB;HC)bUkz8a4f>TAQG!z4gNkIk~?z%vySvMV&IFJoi;~gjI5;uM8WGY~00AFVdUm}Qv`0;?|L7{SF{1W!y{}rE z#tQat`Rz^n(mN|^1r7A{YE4JIuDML8Jm8tiUgab!GMSMvgk9c`^RJQh>PxVhX}J0^ z#w0C*ZD9C=jn%6O(8+KV&9We>{V*25IEEgGIHsG_H9_7hKADint4k=E*!>)Q+&68| zXmUM#oQ*`J-%+ZyqQNeV8D3A`^#gM}P>;7B_;7l6S1%G08VHT?X;1O*2rmEi90-jG zlauP+2bk=ALY7HN3xPw04+Xn^jV1dVG1jQs7GOwI3J(RTjyKbnl%1CUE1&cT%uXlL zydc;Tu*7j?fjJJlI0E;dH$3mp7CF(M4>IO9@TxwFQP8vPSxav`g)vN5TjoZ_`)d}* zm+8jBuE733bW%^#T*<2^zDt^BM=Di>mCeYO@-#}<&AB8Qda^YX@tWz|B#Dm|g2QNp z3ZOx2>qUNtH$`gff=}XGpcJFWETm9T8D1WdrLE5=-GaZbuGV`W?&y(2Sn+{4iZ^lk zwiohIplIGcPTxw(tv3(6GB{z^pqdD-3+eqAEzg;Z2Ge1xXb0h(Y#)6atwXQI^Kt#- z*!;P{@NrbnmVzC4Lcdz#IGH=g4Q>`n za0_j8@RM(x3y0TutW!?4%(gfsKt5a>wYjS)YYRFeR9d~x>u=6MszgN9ofxJ}2{0X- zhvfR0eK==PkMRAFlU~X_{{pTRyT&_DW;LmqfMZ4cm}15@fmsnJmnYT|R>hue|6FYq zt3;%Dr0}SOXvwy`=uJPe247g!N}M=@8vN$y`mu$ryD82MS{6AQc~^p1STO<~m0#q! z^-oK!*sF;--z1vx&Ab%H5BZEpU+o6D7=%>IO-odFSgxT{-PXm^;<(wmsO0<;Jv57% z?WT9r_Ex-YM@9HptM%0g?YEuM9y2aP+e05gr-L2T1T2)MZWG#sG=?sx--GA(&HQ+| zLhaY|LcuJ6UnXiUy%g{M6OMyFr`ZUYj1hH-(bfe4G$=m=5^>mz&2%O774-Cet6Q@b zT-WD~kywbtrGNzAuZ3J6e+WVe75J=OhaY;dqY(+oLoGEUBO^cJ(aB#(ll1z-{DWC|)(7ZE4wvIeD2$-B)WjbFez?=O6I?Y23AaR*Ll|x%wbty+PD{ zV^1A(roNwieUa&XnZgwHda-Yn8O2gyqfAn>C1JcQFON!kyL^0pYaQZHG+z#LU(q*X ztk+L15H2SMQU@p#CbP)Ry;Tno>%T{8z$^iKTMq?U zHMDfrAJdH3R^*9cll<9Z!cMryRtiaUob!YSVa-j<8O?dZ=X1wrmB)rn9u8Bj*X--7 z^F90fcHRL{8NkEA^Ltn0IxSg~=N53xm2%Q`+&yw*UM$;MucL4MK zdC$L2=DaIh$8&|1I}O`>kIn%a-t;`?LcO%NFXK{PAUz`=`eJ?4`4_bJ0_&&x;Pk8i zre`7M1sIuZBI!6=gh}JMJ*2*7ud;0SH4rL?Wf)!b-5*~GA}?pq1CpmdMLvPE*336A zn(#!LEh}n8)J5)3nOdy*7BpE{Z+ul#vK`{nEcKZ!vFSVM-TnTgsg~FK)!@L)8=942 zljqp9P1MO4J!x<76J(r4#v0ZdMmKf3K|VL28Ges@;8OHD z&j+l1@T>(p;1S2BCw|jC!EgRm%>XL9y)s{vDYp(&{j$I>l7YrqDNR7gF5IptNQUHi z@>qOKqA-0a*`#E~T45{xAsWSdUw1bAXK&w(a<$W@c4O}Rxw6BE&bo<&X|d5f)XR-= z=!Uf#_B1=5OEm8_beFz7L-k93xQ%bk)Dz)E{9qz{2CVB9b03-?$b<2y<53qlXiej?9}n|V!YcdB<;XbJ8In!)g!~3qP1AJ9Ov~q({v!M$h4v+ zAi#bQleY~)bP_V~@gcVXXXE2SpHlJ_Wr4cg!Rt9w@4aC%%pRj#b)8{~;(y^9aos_? zZjYq~_!f{bcu9$Wra<3W_dK{yb;kFbkYQ}~a=9D*49NWbm<}o2Lk@sH%+K=ExTCoo zL=Bk~+$*02s}3FI%NNPa6+t7CEojVy*9nmv=%+ZpXYaF*)H&6WV1whvB;qLf{?U#g zZ;wva@YB^u$Vo!rRqd3$@4a_{4PT)b{DNfx#=m#GSdoBqO4w8DG%4u_eG{3N zio51b9KKN;?xNpN0WLZ`WHOKRoAZ=>sp__b86}<*XqS>V=``Uv-5JfHma{)u5Rjqd zBbab7B#eJ(k>18F1{4S=2s;;GzXkeut$&WKiGpF5!?)T8b1SLuwH5;uH&SP`?fdki z@*U=SYc(SkK_|j#6Z1Wv_wri({4&bx{m6rq1|FK-eyL_31$fGjx=pA0OV{xq`6dW2Qb67h$N{Z0 zdo*x*SFIl|H6TRTeB-Q<6H*UlYoXU3umf_e@W~L*`LWKPe(`R0M-wzpq%FCGe z3(-VQO*1u6*A_K^} z*e z+_o4hu#hd?@(7>=(s(%G36Anoudu5wSP$yoBf`U_f8i9fpT|etkwz53GP7=4LI}ct z!QaI?%IZvZ{m$E&^HJ?E$vSk{hhU&^fuoeS$o=(7SlA70(ag~d`SNw?sp0VdRz5LX`os)oRA!u>S|{eKN$UI0pGuVZQ* zk#tLc?>*@7f2A0x$vi+cJ~qCJvlO7*EMP-?5@rJ;>h-v7NcB_f6+{Dt$Ba-n_2^x! zpoAf=lmIFUHrdd$OKkj$vjE`mo8_Za=5aKBV*V8+=%2tq1r0tw++`g;cRjHB`GRtf zm{{)}ZoWnnu_r=g>90w#jSN(PQ=tBWeE={=)?YCG=RgIihMS_e>kQaang0;0W~OOi z(M!o9yw0}4fy7BSKr2AR>MDQF1;iIvo&gsXri;|z#W%Nok;07#$IXV5u>3Vopa-D8 zIsgj9``2FmzXS;ULI8Vnq1RHt|M}UE@h56v6heRWNP>p{Z9GulI$4r(ap0|*;_l*r_29^|mzvmZicD+nA^ zA%KAGZ!bXgVQdri#kgvt-5P8}ksFzH|Lc3Z>FBq~71+SrWa#k;=#}Zi{o0Wfm(-yw zA^n1xL%SaVmQ7a1wQ}V1S7H8K@jA__yb;~wRryY%K1O7l8%W(`gm1bd>{$0|r6rlu zOX|NaRkQ_ny)(25wEIPCRy0p5`C1Z>4`Qp=vZwawLmqgsP5R(ywngnF$$P z&mm;URT#=VTwcgMJw^}&&=zixBS-QT{5-j~thUk^FE+5DpAnUjs{H*#C@1oNbi|<4)JnZYD*Mmhh=5WB*2$GhKeIn@LTkLoKRLj-!X@W?hY85Lg zGr<36>Ri`K$Tyw+;TDeiBhSsU)z-@rzvG#W*G)sOk30OhQ!(GyB46Ag+}}5k7iODTjToJ-M;Fjl+EmqD6#oocH;l0# zww)|6JwJR1B)coutJ?bpvwGJX4K^KE8?Yrs8mkU@dd-n_@BbPQvp3HcvIkm?6LQst zecZ-3u_TQJku#_CF?2>|BWc<9ice>Gk33p+#%iXH=a@|pkZc(;CCdKf;>`-)<59fe zAEG@pnzUQ{>Js+zZ-gDyQP=?4pzrjn9{MyrvuSBV$rtlpHj;>5m=go}aZ`auxZO z?Zo)1K^UE?-T-2Ls~j9uB_=ji8^d0XL=Q zw-xvNtDjGbZ#Faicy^{+YP@GHXvY7lb?18lX1^5ScV<~?FZO1_Jxog{e>nMBo~*VH z1o6}$0TtFHq2DKQ|0NMZ%hD$zO-jz~Got2`AeN@pki@m)gt z&`6@FVU(yiR26suHh5@G2cG8+(r1M~yC|546V>Ey#o2D{z#Lo}QSpwL%e+Wsn-S&D zNVTD}sZn>`=2Fy!+cJ7pTs$KVi#v7k!liR4>V-oOjqm(}v4GNyn5E@0_l+5myPBUT zNN|_5zv^!C=$InF(#_*x=h`uB#w;s4$B!Ht06 zp#L2JoKFU;oU+gScNLKI*IWP1*Q7Y1I{VudM!Aq*ySW*2Hih%Rq#gmi4FCI<|9{>> zmaXe5u>40uvP6o%GE1bKX>guy^m5ZcNBsw{8rip+wH+NDA35G+iH?qb96EtBoPWOT z7{3%YBl!h{&N!Aq{|7d(1r})V0%v{p?AbHv7UIBpS2s7A7Gj`C{N+FP4hJ?z!6z&% z3>@K4<)oyfzeid0pX2|(#SH&@LKUv=Oe!M@gf@h9l-jSc*awPjJZPM8pFdiiHe)JW zVQulSU=%;wWS@7cw9H2~=SJ-sdHz1UCNk{t??0D;ytlGSze2~*9-$qc;?-Xnn)(A> zdv$+>3VH=OpcVH)k76EgX||ksn$;9Rw9Z9eY|KPZx)P!!>?6jm@_fc>O;mn54u4jx zfvr-v_$!FYr8mqgcC_DIxH|vIB?(?5hKBJ@~V>sxZ}sfI`6 z6Y9;r{M})r8MVT_!mZ~U!YNq-=y6Gr=3-IFo#MJ{zih+!W0&F6!4H#H=Z#xety@Aa zj*z97IZ&jx{8kxbfoW-0&Qibk=GS?<*RAu#`K z$7*lnwfda9V~Mx;S-J#fgSkJ=vtilxlcLp(ma^fV*Mf_~r*ZcK#W8?AqVfbuk-oRI4DzPDyzds> z2@^BnwuV7@O;ZZ)*P*qQT<>3bAYNYC3^f#OEppU$Uj3M}ZcM-W(Bjy7DlJFtTNo8T zHvC?@(Yx9ADWv3&UGUZ==7rL*?NoMC^0i%d?R4(ijKqYsPb2DjccU!lgHl8jsy4-Y zv!$zL&NS4c`HINoMgHa0S%|zo3SGrG^pvv3B_M9zG~9$l@HCjL5o}sJEmW;!TUIj6 zeEe&S@xyy;4Gr@7rPWPAubs8EwVTdLfCiBQ8dPvk=4(@U!HNoL1FN>olepCBeDu0R zZZH=6bIEXUWRfhsj542LD#3P|nUUSvowdg7BpRkdSD-JdSLxKmTFcfk7B!aS<>y|V z6&9|e!xcv-QD8>^9qN*zGsDJKWhbaoRx)BI&&XPBIF8g$EC+55^zca) ze3{4E1KGd0=x7(c=JmBTnrF7kC%nV083FpmFVMq_u^!a)ecA&h28Q60I37hPZ%S$O z60NciWWBKaI4pYS`3B0Z8e4ugOyKNA|?L zVJ?+!>b?25yd#^xrKuY)IZqZ74^c?7)jCLAnK0Vt%C0wf+Pz*;D^!gUnVFh9Z{w86 zK!6^!n+Sor0qICS;AK2(_d_|Z{Hz#rD7mSyzGb$2olEaSz7>Sx%yr1Sd3D7;7tg@X z{_4t+Chv>-Y~ky94!yM9{)$;)JDbEZAWcF=JD zXyUDQTmXl3MZ}-GV?!2z=Ws-Kb1Z~8O#e2J~eaI#Hpnv7u z*s9U<=g$uvtl`P(cM5DC-uKA?IpKA?%tr1VXyr+51&KK5r8%;3FG$SdTFOjXcm2P%!yPUQuXa(D-Hk=~PTCiwMBmI0(en+PWe zyg{)m^??sJ`wlRhPk4!*Se)em6ewj5;Ihco6D$UYih+Kz;BaqIqNO}7m;kdu24tVi znSw;kxp2#1|Kb9@uALnK-344~%G?7ReH+ z0Hy*t@fkq>d@U3;7YDX3#k7D2*8s9`A&|-(UOm$a9y}Rv+lbwOUpje} z-yv5FAh;L~q76XKdc{{vu2aB7phVoe__n)1yZX_CQHq{^ow9;W`XLWd6LNp@DilYIjQ=6nG8H z-HZ0Ar$QHy4op1EBoF{3@~9j~5nmfZ8193h5az?F9sXwv;3!Cb)Bn;k=(e=9+?)Pv z4UV24F69tA5dlKG7C$|;6Y!my`pWu0V#6&;Nu2Rd?$iFb3wh? zr;(1IfD_oxPbPeZ!1w%M1vVX4q_|1?_m8?12S96IAd$JS${Zz!U?@a=g87~KQ$Sw8 z>2U$dC;gOSe|Hl@_)p2#??mSBfs=V{j*8T{Z3$4NU%H_u+f7i8qq>QsIwlW5#ed(> zlLMf40H_TBEf})`pe9*2^Z*Si#|NP7IOq*#r1+e~FZ^QW6}-fUc;s-M#71U1LID6u zUV#s=^(Pcyiw0neOqvp%z8j^1r%VYzg8--}0OgT>2tX4Q@p^I4Kmhsy2mSpRARze> zKQ#hi*Aie?<{c{1`HtG(k1db@>bDR8^(BC+_M0D>7UKES7T7HF;|072H?<)_DbA4) zpw_)d`j{M0R8P=XID&q$llbp!7{FWsWC=X$kL|;$S>3nT84_2jY-} z6A&8#lq0`AK$El5fuy-&8;b!=_F4L(K>zW)e(_kkBgpSMe_FaLL$0tRVha&t$+ z)+vDY&)U0Rzkwy+ekm0F9rti)E;NLwDC&FN&C^Ga2v3$>;-|Q!0pUAa<>8k8m|V!U za_(2jkcTD)Zk|*(Ak~)w7R{oR`%?X0m?pXI6oR*r&cM|oeh4fHvHH6t@jt+lA-;sb z#eeh!koc^@W1JKoITL%iJszTT1Q>YLfuNT+&;7dwoN1*$s$PK&(1uTdHhhDJ{T0PW zg>G8L%t&WxI6(neW$x`=psxoITLFDga|6XOF*U$W={poz_9J8;z|GF%m55Mtg?ivV zB5vpH^4(c#VFHj%f|^|@5C3ip&JPZbra;FrFMXT_NE0naI#W=iZh*`gc4=vYM1Rxo zo7SYJ(gD$fGKU}RtmUR{@FAoR+_G|s*0@CG=5-QYNkn{F?-W2T5aTQ`{wILfub!v$ zv7V1S6T1oafG|3c1~4$pOvtOQCk(`8&Pm;(bdf*j?tkmCs`}zh$pr*xfwmTZF^Z#< z#3+0L6I~CS*I9(XpSFUK(`10RA>a-v0y-rLu&YUl{wUK5 z;5i^>F`V3(k)MMBa96tc7XbU6FPT_i)n3CcAHnHF#o=KeL4SzTKZtR+#jwFpBI&`0 z`(=x}mrkc}B6B*RIFkAcL2m^od?~i)2Kr4v?iD0`DQG)dEMBhFK5^;qA))X|pCK1QF@^McAPH?XsK@S4G)V+5A`VD*) z^M)8O%s?4Vy{m@+XQ|C80TT}j%!Mb6NB3LBax+P5R*;TWJLsF7 z2Lw#`(B59zJC+X zGhDu@biHGDthi42Uh<%1w>~SO5$u>Ah=3}88t!n~33!{D9bw!&uF+__^A_SdjmgR{p17XCR%(zXJOB zHWH_8<&7etO15tU25rVEZr|35@_B^Pi}x zW(fk6kZlQ%qh;*f>{@-}ym4B$BbjJRGa-el02K6s#kdUdfW6TsAfgq`A-L!5!^bfo zO&-KPzz1X>HBd(%ed!}qG^7bqa$={S`(!a-)`@zj{8S>l6{#8Lia))MN+D6qh zFN*R7CPG3r-CZ(ohv&vzwun&7YjH32@-oac=f*V8G5iyLON`>{-!uNdU%@-61*If^QzoaP`jT%V1%#3&!dHt^GeTAz()b>A0$YctU|B_I&is zxkR+2(@&iWPHV~=DEH%TpJQF|Ui;%6tbb-Y-=$R=)#JESHjP)ZmD6bn zS&c6W&l2RkZN~EsqI|+Xl(ct}?MyeCd>Ke7L5ajUPm%y-*}{W>{P4TOHU(ws*IL@F z&CnfI`bx$#G8u?elrQ@(B}@Q=pbD`2NoXMjIXJohv6G_+doJz>i1cD7rGG{HfiZ zpT+Y-@~a=uJIW^e(kD6%bdB?4-cRjWk_&o{tiA@94Yhi3<9h8;XOqEW0;~7vqm;GQ zw-p3WlYZI0i`5xB0D9r;A$c~}X)^A^MyfwX*fbydmFdg~j`{XxHGQ}g9j&>J6h&nA zxCq+M=|hmubR6ZcW+j3vrV>6(3y(NL@{-V9ReHZ=zeB-U#z%#Kvmd z^##GZaj~W7)-&V=p_m)iaeGSpO~+Hegk&0KAkQ8m#mwR96f~`3pMMFC8pHTa+2<;( z5gi<}#bLXV!XWFa*iyLkL|lhAdR9ArwTMeD=h!n79|cfAP18kzstNw zu_yJtf`qLk%u~VP(Gng!FK?pxrxVocF@ogNBlG*-j}!rix#Z7r4X~lMitPf*(+LVE z;QQ+mpbO+vpvBzV_q+RKgtsYyOd<_FLxCLdMuFQ!ZE0};z^^TM`Y+(qmk=V*n;UrK zxM(HNQ(=!1=*}w-QtUw8Hcqfq7-XD3q}YprQOlc@pG^c#sc3q%rkhHVm}aQ)@Gf>vv{=d)<)N% z9eB7on2>LEWW_WhKM;Kj4OEEm3ZHf+Gj;0P76228F(co)%L3w@u(<#nmRE*MAS~cG z2?1~SUTHbp0p1qnlWaz$S_xCCMk@(0bXvQ=t$Y_pD`k-(+v%_w8 zzp85n7O1^+DT*C1-Sh{ocM*$$>R&F=D?Vi2?cbCKXKyh57uP5P#REa{o|KnuS^ zAdeirwYMd`e~!d^NiGjWuW~>_qm%Crx%H2^!D^~2soi!Ql}AhT-E{Ixiz?4tJG?*x zK!66iFVjq5Ak*IVkOD1*l!p@pk~~I!+ z0H&pABOCyjBE_K4Od$L@M8OfD0tX8;`lxtnrw0Pq`o zvz`G5c(gQ^HAK+&d@~M-LS#jwCY(xM%%zVN7|-U^kT|}dwWI}S{m*=q})Kp#BkE-f3qH!$vAkh zG*<+4mug-=4tLM&l=7>0Z>e=O$%jgAK#8CZsNj>wV>$?c*$ z)S{?dynY=fR-{3xRT8&B;=n>NMx9){&X!?w{@#>)dz)zt+U(b_Uu4Eqpp&(=4WNT= zl5C%c9TRv@#V8-YrFAF=PdDy+**T}+2o*DJudSeB%)9HkvZ+|>54wB8au57;3*VcO z2Gpb!-FvRMM$wNFSG=~9P=p0IA}Tob@7fj=IF0nzYOkln{Q?qF+L_yfI1P`+X?RQ=ZJ|nd0&8Et^#d+i{ zHEUFHzL$@Zc&-91rO%h|tX~a&PWuS`nbv(5CCO(v5VHjU_Qcz@`Bzxs(P^s^IG$c{XpDCL+9IfRae~yTQ#!2Km|0C87FMas=bDg=|70tCBHA)H zu8`ZPIviE3Z+DCuGu+pEO|9}q#XjWTH(xWN;)rob$_u;IqusE{y{Z8#tG#)e!gXnP9GEmVIrL93NdhEFKP7HEi*& z@lPC*H~lIqq~@^;v0}I}(Wxl5{n3-imZsOE# zu6&FO@4iAaYlSh%DEM>KrQdGJ#FI4(+T+7QqOtYd+QzQYN3nx4D1m~{v$bVoyL&-K zUToz?>bjhk-&e{iNm0c|J-MZ} z)l6*BX560MPN8inqn^~}Z&#rQy7ki5$6E`~&lizw1jUvK7ZFcn#ayL-!(^KY%EGlb zOiW9#p3uBTmFsyEqCf`jb+)r&B$)vx=%GUM{J@)b1>|^JBaKy@uQrd>>kZv6Uhx{C zzm-~rsCjd<*0LMD+nBtRpVb$dXI?5de`#u({lg#D17~Y|x?NO8%~SR+T}iS}za+@_ zTjs-7=9@z2XZMs1VOj3B%XIy3Ds>fjT?8}74fh?a9$j_ryi8-LZD;%h z0^;$dhhbt_rwQNm98PmG9+JexLGt9oL6fxm{z9XE$GXrBCI`%FVmeR2sm+m|lDwHD zb*NYt$@Nn?*!hT;$&2&eLCy=-{D*#*IxDT(jd$4#oo5HLcuX`BpT2dNUEO6U?w-s! zs-693V&{H$>(z}(SF0ozSe3q@gs!_jQ5G#6`PWp%^3|SkiT%qrzAPu z^j>?Zb0 zydP5P>8vAaWd7Ck{d9G9(hY^LzJ+fGo6A0vtBxH+VG3icZwui&CoC*1v=cPJ;ZjTz z(VG7KT^r$pu!&h-tIgFLtkg8i?AIgIM;d**-nCguuEja9Sft*{N)l?qD%CN`SEpji zEg56zHk0bLqGxb5&zM(y zvZ}<0-{Fa!EhqYJmbfylDM#rmA&C)Q0_=G4;9zv_1XF)NRYcgF_=*!%u5w3OZ7(dW zxtm&ki3-0v32oM%r->5FFR|kBGL3WFsNoUnCW(sUrbKra{b~34?NAk9OuWL-aCIiW z=#?-QKTvXZ@`4YaTq!2AJKGyQQ&E&wuJB-XReM}jc~SOHfi~xH&xQ8!a#@=Qgxk1c zt$xs~apFS%?Y%;cLInhLi;Hy2O!oryLc6Uo0`XH$zoyhg?>x4j^(5*RYFNm4Rly$d z>6c);&_4tQZHm5*mSYjE9_;PK@%VjrHMToUl3UCZE>r$5-rh4Ts-|lfY(zmpKtw=^ z3WCxINDdNIBukK-bIv(Od61lwWXaGCG&v_F=h);NBs4)%H%&adec$(-nKS46GxJ^3 zf4tb%u3EKfRjqs7wW{`zGFp)8u%7yeP)mOqD~2j7K!S>wHnH@|AEF$33v*;3mEXCH zGW^Q5pHg@#YpmANw#G&+OkC`=)p6G0@5rG!SScA-5z*+~tk+hmSEM$l7QgDZZ0;Ss zYsqOCS@yE2K5i$I%u;Iz5(p);S~dY@v$4c6cJsySW-x?W|)`qf>+^^c1KNKv$%Ug?b*3cQ^VJxA!SF$7Lhc;S|7-0-9Arm<+AWI^;s zWxi;>%IIuqfm)NdtabQ}UQq_IRNdQip6Rr3wVj}^dWhLfHj8VIR^z9=gml8Z?7eke zX&07frZ%&cVp_aqqb0=D&U(TUFmk}VNVwW(i-Ig(ti&>jXZ1t2$qQQ2T)~rX`luk- zdd(#%mvL+J-fNUCw>Q={GhUK~tlFb(J+<4xEH4yi!HuY>K(#>*VIl;b*{F>VE zGd%Y``!%hWTW_tJGPgDgXPO_hlQLPuw8g=S0&dmqfu5$rI4!7Ann?HNIx^}=$yIR> z2XKQDRUjZga9En-D|GbTi+SCkCGHI*RQ(SB3pSmMOy;zAL~1@IfD{t0>lx5ddba-k zp2Mg{JRtzT#z?R~CYr%LirUlPpnh)(aHuiC0P8#qge=T^B0!D6j*%d6LeCM{_t{%R ztbvTl0F24%AA)Ng4m3)92&x2bE0S4lQd#W>`pN^eLf%6UstofF9$1wnDT@fftqcYV zI*bW`W`Wz27>JMTO+E?6aCg3|40p$k(YY(AEq%-@Lq~%t%7G~g)M*_6y#8V>fDb4M z#9G#&KKI|dKKf%X1%xXAw>vK0+(A~qCJ54cv9enEH87B5Su)hUo`a4?@)=;y07RYv zcKDN%1=k*U)Pw10{#yA;{T=^O;KU7IK1~e3*<%3(S8`lHj(4z#I_cN2=xClV8Q=p; zOZplHRyxLGR!H7Oe}B-D^J-k22+JJzv0LPRpPhvHj~Y{yE-@{yhMj;MDXJAlhxK&XA^w6`h5Z3eIBK4>q9*!@-Khi^W> zh`oIr-5d;*wFlIS1j36LtcXtRh)()LpcBl%g+N~PD*+FIAeWA2IQm~zQUQC2904J? zA?|!^Yt*&CcCG!BJ2bs7fqt*8dWX1e+(j~gkU&JC@ra5l{k%Z7vqJ?KD>97sVd2;- z091$}=%de!31$Yu4t#!v!`HNm4Jdlvu) zQ38ztK&Y1m;6TMg-0KDCXd3PT8lYRk+F%4&YSB-E+SJ5E5?3q)tGWPqmJ{ek$*ofJvs4Vi9fJ`2^%HA^`|sZ)zK*iv@DPm^l&n&Ye4aMIgm?R1bD9 z;TpAnmC;?AUNOMTK+m@TOH3yPtd6xqQqaC`0Vycy9))x79>2RR2ixkKZws}u0qNbO zTUZt0#an`7xlxJ?^fgm0d zpeP&4wJNe*#Ixi&_0p*;;FhJ5*=8N0osuT7IxjZdRD?`tM<-En%COIK7w9rXF2-|R z(Kh)dq%OFM`>WL5)Jb+UDXT_;Zy@lp=+^V~#*znZ1oOFU-)a;(c6}iVJ2W-BB46fN z3v}ScWzl)xtE^5xEQ8>XwcIc`cqexH_wI_{kxaPI52n(v;=?=(in-j_IWD^*H`FIH z3u^xoVDsnId*I%)7>7cSAR~P<- zH-}+J)@M0a%Obh4BH9TmG?qumXwO#Z4>jJX7~k9LdRO18`ep(PS^7Tj&-gel6MMHb z(-ADUyECyQnkQrL;eR-ctusnj=T7%+xvz}d2x){1*cx@mtGAX$lPKc`UseA>+oqDk z)lOV>CvOR@Cqm+&r^n1Er_l_F==>VtieN4$eZ5koj}r4&QJ0PBAj_i>J|Rxinlg>} zsjX=iwMnaC*j)X-TO`zv=L7*VCI&3{0YIJk558DBB=g$vDixen4-9rwW+Dn)kg1-q zVyjLxGn65p6wSy|zIGJbuzlMHwWMvCDXstNio?tUZzvFwweD49xydci^)`#KM0-hg zUme5dK5>VcafvOQDlAucZnWOv`0lN2wPG%0s9j9!D$#q@#-K?>^@F?zrNV8;`%LIi zX`WbAln67>3sOCLT)x9#+|Yn^V%Th5H_cf8*hwrAmqitHm~o(>%xF3fSin9wSIqs26Jx3763&)CnCW9;0^ zhxMwY#nyezZcx>%^`t87hyOfkGfF+3!YY4pLv@#rNv;@J9v&P9JzH1Z{Ne(x(!o!r z`jYUni)z{pfBN+s;qNT5w>t&d-w$94+Gv6}^xBAKJ;%$PQ;*ze?{IaOgAFMLZ6nqe ziv=)B5r!Mcy65QOcxs|a&$xvM;nyoo=IBy zEIh)+Ysbx!KF2e3B>>U@XgE2{vgaQ@WfJBkOMvzc z1D7C*r}j0sE0*kbJ4N@MVN3_pwkkLl+J51}et88n2UBt_;x?+qp76&w2nJF18rrL` zc&s+1yDDM3Kev*$O*fqj*j1EoXP2RRtz8t8_4Q!Y7J2I6p|R}SlDMDp8R9-gnqC*h zE>Q8^aj$B*Y4cLA&wW#~3JRs3lViwa%iU~TaM8#you>uzNzE~bWI-4J74d}94k=qk zD-*z~;9}2=tg*dCa#FAP*S;JUrO}sx6dYUlH7$G}bZCxM;N9{TqC4ytYm2PMQa1KN znCTPyA@0mwQZk9LOrfFjrS5AiiS_n-<*M^47hQ@ ze=vVhVE)4V-#6f|4Hs`6Dv%nWY7fAlgLdliPG5L zi|CvO9IY1sCRS`QO@)Qe|0HF9oaNK`ZZAND)r@dNtlb5Wvi_|wIgwhYr1unnUT|48 zaY4nv?manxdq|0o0C(C!0{{t0ApOKYBv^)SjWY_634n_+!a!mWfaJ z2gCtn8N>idCS@TZua*o%w5th09MQld07;kiCemlR2e)7vY5d`S5 z1dui_9B9BXA9!{3{+<;m#set!@BtyAoZt-I+A3K4#agJ_3<*Y@JwO~|>Aye;@7S@E zE|1ky+_)G?gT=s(3Aae$mDMxfUeW?}AY)EH(QR0|ke z`7@x2?q_tEE*=3;F2)IXmj~Eafp7fA=)$h~02_#nRZvI zu>l|Gx7v%fRcVgLm}Y~f^+CNpfFc_RF%_!*eHmlh4J4R?fKf;vM4bO~t9T(4Gl4P5 z7ED<(Oj*s>#0r0n!jTu#&R@L!NUD<_*2MY!{%bzGOB&FI++TRy8?v&ZfA8*HzN@g_ zE8-VskMN})BVB5xhS$;QjeE9~NIg*RGhpi3Q(!FX!xfNMlK@P(_TrZja?+05uT6k8 zO*AQOIuCWd(RSv(SHDIrJhGUKJ#67dhOYiVaG?4^(2ZQ5({B zt>2a+fb^B+th;5d%2j(Kr19WkJHvP7)?2#$n|g=WogMgmpQ0Y=$9UiB&>F+R^t z5Xb{K9NwO93>++UYV@fLyw+cG6NVwI4 znP={j2TNv!IQg46XPIEKw&I(`xni;SZp*o{!f!)!#WG}4VPj>*qG#~sx@xCmBOu`* zo`$;a3vY>qY_r4LT0DFms<+GaK8bBfeiPs=>QmnYV37|qQh@@AgP?ktEYeKjJnvlG55 zZe+L;Zb27%zRz;h2T2C~lpmVIkq$=fjIu~o_A57se)C4Q@K`UGadUe184an+YLbJY z3vm1M9?wQ^Uv7#DZKxV-iil*ccOYhp>!>7l4n6S!PIH^VW2*sN4g1Yj-n6VNdqpDoSw7FtWUEy%>M0%N zr=S7t0coi#agHjTGI89Bg>`$Q>`EGoTHOgZAF9yb$s)bwf08awj?1RiSbDkz*{Q=g zp9$RxXP^sx|J9)kFJf9!j@v>?VpXY3$kc_wy#&@bZh>Edn4MMI2yX=H4?_%+==ermpo$Lio0Dvz7eQ7 zzgMGY5XZe+P^NXLGR;oAT`?}M5!FM>bvpx8^)POYFjdV}x0pJ)Y3Hasa?YMnVPw#{ zXzd{G3Spt2)qQc)@{kq%Y3}4O^U8mzEi8;2TuPDXWX=il&>pOLyU0bFq=prk_c~*a zPl7Ya8Xg-STS*aF`H!B8`(iH_t8;R!#e#D4r?I@)XG#u<$I)YbqxKpmqdQy$yi3b6 zB;sP`2X#hg8}r#eH}`DQ56VK7Qj9pZA=Xu2WZ?CMl$ws@iZn_58KkWnIu; z53Q|fNo+?ep6GGv9n@qGm&-klwVwS4Oy~#_i3tVK;V3ckARX>DN!|A$w$Nw?J=d&#-IfM z$-Hxh16p}{R}hr!X4W|$uZ3lYMw7<(!D@7A{66?0Qy@V$yNa3@7&Io* z#N(bo+mc2mx=rmMHpS-D*RGkqQ8on*0chiv802ZvL{rVGA``cJQAzWON8fT=xnP9& zvxjlTh7v-Wn;JtIjnXIrL=jk+Q7q^>SA^)FV&@4>@K3!{P^%_EP^_DmSWB@s zuh6uxG_xseYV(--%sdf0^_@#%)VD6l%Gcn%gs-&+)Az!SA*0fzGW!!b9$wD;;$Z}f zg=oEf$%oD<{l(H~tqY?OVlhd@Q;kxcS~0bCkxB|Th0Kd9mO?kH>!pcC8k^YFDC~=Q zY;5~xZ3k+6sW=TkzcC4dUAR)mZz4qBDrfcL!spz!^!%ShWHB#O{_e1pTr~O-ojv1I zXY>7&COrwx;>t1n^W~(_tJewszQ%Iuv(1t^^w06mXL>0ddG6g)e_z^p9pDPo616GM zofIh;2tJ zo(a*=H1CL7XwzKP%Yg#g&*)>&jS>oNzp?NA7^R1*D-$fk5$ zoay4!(}&BUaVq#z(fb|E-Kkl51bL_V%v}AKNr z;_w=dPCj2&jP&*^MrJiaUN;mgqzLRHkMEzsw%Bq@@a8#n?3z)5QM$a$de^+w4vIZ* zYy#Wc+vjW9VpJZC3=Qtd)Orik6OLGb%IH>wUj9440i1n($zaqRg9u!hm*!7e^q>MP z8Md8x{@x9TYOh1}z+2eNIIDkX#LTSAiNYUhuiWm0Cj?4P@RDn|r;qQ3V*~^`j3uc>&++h-z8(B7F!!7@85(`=eTJy611I(kID%F1F3HB2yQ5+E`S1B^(4e_ZYU2+~H-`sXwsR ziHdhm=(+tIWl7!e;bR0&XE)nsF2?8HvOC(EJhvCLPz_V46l1H}eUSDYHWX;(Eo0(g zn`D#UrR+ODwD@}3(x{u!5*!jvCIM5Yf@=KMt^~gq&*RRa0YFlf-|Mg&QGDUqqL1>v zKSEm&kdowJLA->~!03pnjSnVgm#6%5#H>s_l1c(@V+kuWol--=A}Vz}B-NaZ2g_3k z+#2Vrs@*wAC60Hf;69nxGN)nqvne!rv|_MoVeiP>t52jTV6~>gmV8) z(zb&dKtWB?!raoqcFH@dHp*w+hjT73u}5AFX!wXYes2xk$zc+)h9e{OcF~4f+^!3F z88Y{st@3=ANed(x*+%0I+r~D&L>D8Axiv5A_-}Ax=TH;kV%&%l@gM@87T}gCOjz)o zw_JSpG>KZk6b$tm`2O;QorQ{BJT2^Wdf}|i5)l?^>F!#fo7ODLJ5H_Oq}H)Yi`Y3v z!_7M#OLbc`MLwI;vssyT=!brTyN;~BBb@2Z$UQ{jq3$Lj3>r=U#I}tr{R1+hUzHy4ZJkE8Mj1U#Y24E16^NURmdJdX7{Dwr(y3RIL~?1 zk{JyyJkGMhVvVoul2z>n3VAZD1}46th(Rmv-Z(TMs_q=6a6N@fQbvtj+ug=9T^H(; z02*eS1oXD}wfF9aGjv|*!4a2aCQr&TXUbUt--X%ip4}JG?q;+wm91O}U zPo($j7i5L5^WSxV?0KRZGrx|_ONf;OyD$ub(MvQ(N#zPb8u&%ltEVtqgI4Dc$vH&k zrpv>rS*J>34Ww#kl&XDtaX)2+TZ0m+6TVuG<|hh!TGf2bgk?+5t6C-dc?}g0)2@J? z!+_wo-5>&dvVD2cyzCR!;H)WN=Xld0V*cy#jnhFW>6Cc<{6w<|#yS$jaS2UPBjHv8W^VJn+ z`1IZyO#EC5C(u|HdHBy;cb7hSAfw0S$`4WPFmXpni+xox%Q^VO!MRx`PS6r^IAG)K zbd=_aNNW+e)Qd2rW8GcpcjGpkVps7n{h{ee`BI5rg~?CGZ;wCczEqenv}T}o+wyAm zs37eB>}7+hS-}>NBDsviv4e*7GaX3+7u#fPwPsup9~EpL6eIJ!-(|vSKeyTtX{poN zRx_9Q?gz}wcBVRD{YvGHaQy)=hLljAR4rXv=+RzsUMF?sd(XD+AIg~led zZk8BSnryKX`5%ce{~uB*K1iIU3(6MAo0%&|6S<(1zMy>$znq!+a5WcWAM(VQ)fc#FXJW~%%)E7# z36tS{0?vNCbaRXtS7*xb0bGzTj?~opix(QH^&dC;;~e{ntw8#?57Zh7@$ek~q;3!d z1O&vn(*_vLtfQ%%Oq*J-(am7z-LeY?$79i*3W3{o`!gNI4_R+$ax4iOeX0L9j|)(W z0EP^%Pqu*+;^g@?S%==Jki3f?XDfhs8XD57Y_6SvV$1*v3xS(_6L9$#QgZMfYe6J@72FA zqWFJT39keUs4EvpzL9|40a31^AA;=30B!YRIoLRCn2)xloNcivLrUC)e~g z%NSDQUu_lJyZ=iz|I=BamD~~Q(T#wN$r34imu+-v>w=$bQs?s0cy^avkzZY{ z_pC!I{0U`VUxt8{w7vD3&5)mOj%{gt7O_6O6d(ZBI5Xf73zC|BAq)YKz`a^GO=P)q zezQAX8A`!zkxK&`nmQQhBg_0c<5y?ZJxXIaUmZ(Pw_S2$$2J}Nr=EXl;ryGXAiv(L zH$YIZ!bAjh&CS;ISp@e%C=tO>u{JRQqt=2IHi;s)U^2_Ok`&EqM8VeY>+Fd1-oHw> z7e*4e176YdgnFM@2$hxw_^eA0`iu!&lm`O|^pkyFL>I1Up8*59kkHdI}K{#OuO zptCeRJ?%+IQEz)&)I|vn<`vKc59Fpo{=(XNO<7oKF92k$94}t38p~ z?d{#-%crezEiPEL+j6tD^AGQw%nF_~Q;=w|(a9BBjr+;=-sRxI z*x_Y|NyC%8l|SO1wOSg~7uQ9GMxxbjZ6CO;)DqItn*KL_0`g>wlNdA5F%k{4P6kP% z(8k{?n)!GPWKDuHiAh=PTsL}O1Zo^=3)q$*^yKs=+!Q7c+8}=#V?zr|mRa^K-l(yH zC#xd*&yBfl7L`i?uHY>XsakOkyTCJwGWBSH3UE=Vx^N!9>YPu*PyyVw>dxyZ1Iu>@ zYGN{R5|pw_!cs#YVc@~4JZX0Z8|;5M{v3?s&#nY+nb%t07s`YFZV^z*t@yPwl<_7<^SqDx_~7>Z^0R&vUiJ8OxH@z(G- zQt-#gbcy1ZK-(==B9yvVH{;OgW16x*^EaCMQ~CJGCSP{y-}2~ti zxgR(2ce4>ErvoE68IT2OZhXnO!UKStpsl8+vzj!aR_BoG5Fb&<=U#{N>3ySpj2Suq z_;Q4PY4Jzq8rEldCL)<3l!;Mj2@#h00;{8+)-9xE_j6$f4I}@~z4U2YH6GtN!C@mk zs@?sSQWZ*WC1XdS6hyeIc@{$US*NV6tV=q{Jcwo)25_Ts#p&SWw*2d+=+__Paz-ro z-L#y~YO9uw&ZbdLHAmJ7B}uo=#yf(g(KZFH0KEX2`LYPP%JxuF+c8& z>bfze-)u#ebgoFJ0sF#o$}}Uqvfq0@rZuxg4#+8m`2MV)cb_bRQY8D0uTgg?zZ%uT zL0LkS*6n;QhYS)+NnZQq8uQm^AyAp)XXfD#(ap+}7}A3AnbP^Yzq7O*J?nB;*H-0p z@Dc{LUPE{q*3@0$T^31~iZ^b->h;pV&eP?Y>s*^D2b#JIdiR={!R4wR4a*T(ODj+^ z`Ljvj^cG~3uId=>r!*iUX+~khFT}n>pHk>kz)N$jZcuS>%sQ=rz3d^Wa@7LA$e*sU z+k1Mp_vO=#nDoChJkio#kS~S17dC_Isg7%@5A!LDZ#SkV>&rhV@}H$&RYYXTP8ZQA zx$Go;$rMIy2blVtdQME%eqcCDyQ-yoHKN=$Wydr=Bte=+Fdhu!R{Qn5HCw@dS2c1B zS^gf*YLKA(9+<+agWvp~JaUhYh#RbvvzjoYHQy~(5bKi->Ytoiw-bR~(A4_;YvFDP z{J(8wGV$@ve1qNvoNYRgPV;gWIn%50`vIuz<*EzUza#n;Pcw`?{dal%{}ML%|6c{O znFhYk0sLOY{;wQS%-{Yb_5;J6$O|iA8vAof^1T~N>XH2K!~_4Oc>ljcqyIihcnqhy z7EibpD{x{Lz@2*xIZ*mZY?J zlu(DF?}EtuRdmi{Tf>H;E*~fqK(G*n{4ntJeT5dD#Yw-(H>DyptO)9prpL{n)^UX( zrY=?x%d=F-bXnau-%8b+Gw z4F}cV>FOo$H&x#7^I6{0Pm$jI6?f;WQcY^Rx_e1cLT5cb9U)7eJC}ZOLI<`GzH={X zVQ=+d-{lfg+P0E{DFO<3RhIBIJ>3nDs?HLZs!&!&;R#X84-MuzX7@wa(ktVbJHL3g zw_>Pf!;eFrURQVr5DJ}U`d-^ayAHX3?s}M#9#o1P+IuVDnKiul7eX8BImi6|tPA@! z;$1>?$)mu}M&$A2O}=Kd{ODws!%Jui=g;>k}Y%QWk}aZj5z zvdF#Cq%&+~`X8}0z7CD9+F>xf6Jno*hN#TTbNtBdd7TVeU+s9`rXuQ-R~9?jokvU6 zTLuIZOZ;q5tExtbN@0{G@6L}`W;lq0fbpj`es<-*slz90L#g2N)dWmdN9~1qvz`X7-lc!iQPC6}i3GMn#g0QI5AS zYi!>tTiK~kEW4(7E*!!$EwU_5B8}&}%oEc3UJ{Mafv?{?oq5@nb#n_s(G$Lt9^rZ0 z`a@{_8oblX&si;_;&5rtzKIDL1_+l@*3rbNAxoWd-C+z`#Qc|1sWQ~mKUElA#*^6w zd<{icbxWN4?EQW%1>ZaM2hg}38@tF!TKsPiv&(M05)~zGO1CMeEbHkm<1~hgQ_YrZ zXPJ95W7P47qCX5ZmSrQ^81|quSiD$saH^j7Bs;|Djkv7wlHCg^F zKX>WJQgnw&8XACn^t!mL%TX_{V`dyUzYF-A4!<|MZ1qWZ~?d;zh+ifsU z`-(b%+gWXGpZ(5Pb;meH)WIRe695u8?AFp9m6LlT~8f^4~|tI?6Su1 zTA5quqyk5Np~R{ z48&CIXL&p92p!z+kHLbC4l~~d{M0LbHTUTil!MOvY!!DM5 z+$~Oz;$f?eWxBHT8*3@IH&QaU?(YM|U6oYW;v+Im;FjB*o_|&}Z5qBS#2i2ttFkP; z3cX*a9SAN6Zjd>Ny}G&3BBvYVAZ3N7!lio?eqLG%H_C}*6cq=+Tqj#HaZr>4+hRxM==tlre2`zvJY9_amd3f&$@_(W}V16BM#_zKG^hZ(e?k@L_val`A;@-dv zc)1r3jnMvVWF6@?V1uk~QDWM|Vh$Jm3c(Xxbm=s6-!1RbrF?CiUFOkw_YbwW!k6Av~ zjg3)_IbT&$i??LxxylVH9*Q+HoK}9sPf%n!%;SEW_$K#ph2PFGLAWws(4$793{Gbe zrMRKXvN&N&w4{>HTZ{a!dL;z_;bj^V43aI{mV{@r>;kD@OQ*9rA zbH)2nHTTQ!G_@y7&p8ez4@U>^PQbYwLGd&xcTD;rMeJNg6NRstCl**2eQNy27YjLr zMt+UWE~qlStuIzsy^=y~AF%Q|MHs$uK#-``=Sh=EHhQ$KF>EAH{ZDJocMUP%3Qh`>8yg(}$Q7VI{ds=`)aI3Du(^mK!eb`#_^SMU6U&MS}kvHAV zWZQIUoZrIL5c%-#*R-rKPZR4Wob;%Z;mtYY);K)gU5F~XQOz+*7*D-Msf+eT6TZkH z?BfV5Bf)snYHhgVl7t9S8(maW2~lfaDw;tlNZeA}x^)UhH0D45o7~TdsUtfjwZz(M zV8@QjbB_j|S)o8X#ZqvEQ+0EZ2hBX62Se9Tp?@auB2^6yy8h9HbBDhWosdbb=X4=0ZQ_V?XJRw8;gdk-~sf+P|m6=E^p+YLS7%$q?GJS%RnFQ zs*}Nk+WxDHYNcRBFD|<&Ii0XXL)cZBhMsl2P^-fzF+kA)ODe%rm6Am-O-=Uz;T!L- zp5CX?AMLvOQ`^2aMri+V5L#n%G<}^z2#cJ`B&I*L-=YI+<|XX%nwM1mJgQ<}gcgcq zFicEZP)wnPbV9fw{(AgEGmieWRJl^P#_zQGy-Pv#`b68v&-m;Qj%d3{0ZYCeA z#GmpunI6*6ZlQbuz3+*zRjmtjJS)ZN&Py@5EA>UnmhAiSFi15;jt0FJR8kz5ctT6U z2yX!3K9F0?QpXDKr=yP17fwNwJ~9&nsT}-1j&8ySX3j&J3i%M8Df;*%a|NhlpK#J6 zs8N>3K9xj5x%W}#ODQ-AK@E2bqsS?82&d9f$YrGLQO_;|h8&;-m> z4rpD0iKGy2=0cfYy)SZ*&zmIxY|9B)ScK+))$`(Zr^;T}-L7rzsLBQwv=nt?G|6qqQi&dRSFjIf8sU1LV(^I>a2RVhcCFr3SM zPF;suNPK@+ya=<4i#GRKrS<(O}Vb@YH^ge;cR2EZBVvMEvP?~VJ${?QLDWVNI_p?-KTqtT}j}rJ2G+d za-jj0?sQ0Xu$AdGGDP^BWN>>agkJB>&LozM4&h^J!5n@VJf8<8F zQW~nAUSy4&2f*J&I~+6y8R`-#KtEVzdLxN~Q2^J?gyESe8v z9XTtIO?ct7y)QPf2__SK1;(&?h}`bwH!qkhau^GOp&zVDs1Rn6sA+ zC()TtAlp)L5B7om*@m32&L2)FVO@dc`wxeW=|yw5@5l(sqFx{c$fFL|>R_+fD-jfg zBbhyqZVhYIVs#uho(=_H@(rzPe>zAk_NMF}cK!|3Lso4K(sLb;C4c@@nU=_Wd9(3D z>-kR<`Kz13tk0Q8ua>e$HeG%?{*xJyOl9KC^zq2B>vhs;merT^hx$?HDdd#AX50Pv z530VT(w}?i8>JFlCgM-=P5F&xX-F8V^8yDX(s&CuLrAr2zMl2V{calzBs;?{B7O8L z7rUTx)q9PE`*+~yhtL81_}?epCyU!uWYQB+qTlP-@Fe{*in}tkhrQ%$p1&=piF`+B zyt`0mF=7AGuM_J?%N-{6+NhRM#Lr~uguOK0H#);4oiBJ*UPNjpSN<*Q6&0mvw9(Hc zt-9%yRe_S^SplQB8L2o*g|<3hDe7{!w%(1q3m@P>#fAEr!-t(5HYMkH?1p~Oju>LC z(NF!^2{+(+cwFde|9VlJMPF}x?}J66gBGNw=Rp4=xJ_ZLf$6hvF?Wr(^y;o=*d=5t z+XVLIGw|($r3sjfz~#mip{aL$gyG0%gMOLPBHbkdH8tLT6)WjOH^#R=qDUTflr^wT z)Y#D9L7cBIzm6bhEZx!`?@+<00)GuHZ~FEMz2tm29>62t+B074Va5moLU*zHBB_Xv(~^&-hmtJmb=mYdwX?^w_myVlcCO8_5}ha!gleJ=9<7 z)%}O({ts7upH!h63jFAlePMAdTbk?8*hS69R8J&3hMDH3Rc8(NXM3Je7m~@1AVU43 z8`7o6VjSM}j8*;|c++NDqw|d``o!wlq1AI_CtLFbbGD!Pan{ezX}5BYxB6CT7UeG1 z#^*Xi<|1>u(8mZ7d*n#-JadA;4Hnl9f6_7?YlY2}{;!W$#l(}TtrKqcEIa6-y#qg% z6f{j3y;Irb*DYO7=~bOij-RhmYP?UK@xLOb@_()y@o(Rf#a~VjA`Cb0-*fLHVa)1z zdZ{bw8u_db^~#9v7>>AFKH~9y+eoU5W0o3`E}jJ`SoFH3yv*>~=heE~rX-FxHV!o!ugt`!oQ5X_27}wyS{f>x zGuoo4wdx+XdgFAYqVtYSSU;pywEa2C%`YBx!GliU^e6NT7;w>=cKhce~Ix zoR~Gbu^f6z8*h00h|+|McDA?sglMdw^iZd&V@@x*8YLBKL^gI zPGxmQSC#fiYn`6x+v!9KDOBy8fH+Aqdc81XlCm!0&z~Cau^T!i+*j4Gp-So+BU&@} zbc!R3=}-ZQr1Cv=MbRZ=#9HDfUcU%a+i!3D$T=6Lk8o3v{qOcl&@{|E!BwWF@;Rzk zam{S|v!>FuQ3P7cDq5FgWL6Im$$JF{-x;juPdNjnVJnely85=(Y^J5N!`&(S4P5I! ztpb*{O+uU#w@K&koR&q2iDRpuy`I>$@ztdqMwV*L9!*RCjwW2s=309F<bW3w50IGh==h0awK_MNcMESn#K2@p@rxfMY27}hc;e; zlk&6&xmIEy^?Z}&yppPr+T+ji@Xwlr#rETH+Eb(>KNi_riv;hdmrr_|-y4v-H({nv z($60luO^BkyqxgZHIrywP2B8jqp+w#adn$7&-CiXSEU89C|bVmS#!>UIMO4S?r<4u ziV^Ox1suP2^JN8XQN{&GJ2a<%Q0myIvX* z%ox4nDR`?$-<icUA&TfJL%_enSci=Q6U z;huyk70Q@FhHqm;BYVc6rrq-9TEk-Y!1*Oc!P&1PcDIv*vW3UzdR1R}_3d*$_2u-% zd-K0rH;$qo9-banr(?699m36eLSjzJau`_1KHfSquOe#VEaA@76Y`bsrQK8iK(2^J`Z zwNUjDx*e8W|ffborKsH9Vuk3TQ(UpDsR$z&Hhw4!}5x@509A@`s z0iJgvF5luvokWN}-v9+nx2_k1hmDQ}>)Vdq<4jn(#x565RcpmAR~DA#$dcz4Zkoqg;5trf)4&L<@# zMA9G+#KzyR{43CLbNtztP?7FwJJ~J8li|rFp3vLNmKU@&V5iHC5q*$VnAv2cN%IkS z%AcG}Hh)3~)fyUIy`LyG z_vTbDFdc{zkTQN#=e1DZE2CXu?0A34GnSjVR=-r}{%sD*HJu9ymPy&pxbm8np3;-j z|GkiMqFxl^u64tI<0hQ#BU{@~X5~dY@06bOaOacxf;pMpj%<5eWP;DBl_g`XgKBGd zEz;%Vt$ljcn7`I9Guw=J0}SEeKJ%ZurYa1e1QE{;dLk|%gZ(KtsHF=IV1fN5SvM*Z zRoUI+fS4-XxHU9l*8&*=U%{|1SlS+bMH{smUxz(&abWJtm*Gxk{zlBgjhXnB{Bq`K z@5X#+8Qks9>`c}S?K9l(4bME%|G?*M<#!Xaxu#N5`(WJr;D0y~Rgvk@f#>0?-t1;N z$5OBzEXZYdxlM;~4wj*O#rFJiy>VXw$6N*Bi3p{7Bbv6XQ}gW;myUVpOIC4u@pn$& z`(P~8G&6lAQfay~<9iN_N^J4Eqv!D|J?uZz%S=6iD zD}!ICj20mdn<}C4xmT=M&2MwCzvvN~_D0lDwLTT~_I^knWxJBb4m<7|b}!-04dHNC z-1*7zgxeqe%d=I(S5W0!h}kVlNyCv22Z{zawWarSSiPp+B^y$voU1F1m}}zRKbZej zcPrG-;$I(sb!roa#u+!Ncx1;zLhh8v89a3Y-zIlATlWz}9UGlLJ>2JGSK03+;YP9h z?A<}?M&S;yuu{)<2jIKXjjXZK`=9_(8=war&V?+eK31nx}E zr7W8^ckpV_<|wHr&XeO;9qUxrz+`=G%(tpifY5%!Td-4a;TO5n6tT(B9FGk(W$oPa zj%)K=Sbs_Sw%=h58woKnQv@cid7YWd*{J9x9!C3YVAN(x^V<=72YEgFHiEo(-|`>T zTy$IM;xj=*n&?3fLOpVMO`-X420K*F0TrhhMQayCirT~SnEbnNN;|Uwtvye-B>RZ; zr0rPn*wEhVs2BS#jfH|5&Jek_2X+n?ljd_%;RWtN&l_ER6QnsG%kvW)RC~ERNed?0 z8hh*3yx>!`{DYX(?WOOMGJ2`sd_*AU)&PEva=$$nvt9kvcZrxVT6jb;d-;v6(DF1* zFzP!yi3i7qOO7m!GCp-Lxm|U06$^)PXVd0WvSICBGtPqJwUX@g7Ta$4i7!prbR0^= z^V=>Xh|-0E$V2cquO9A&b zn_nLs$k;2Wq?Z-G&dGi{^bYs2FkJ*uly0|j_5(Iqst=sJ*hzl-cB1hx>iPci7U+cC zuluYm{NKyF4icEYq*8p4EvjbACF*iAo+Fw3Zri@S5#}}9uS{gycZr`_U<2>GtUKJ{ zEB<^Knq@?TVhWt|{**TxG@eMw`Da}~8C71T`^}K+Ql5o}@UZ=RZZf;7`N!K%WKF{| z``khglDd%fuCt~54y@3D@0_JT^|J#^o(x!~Q8h{~7xGVs^1s-PGxDWACfyxB6R~OV ztL0w(^})2x0x?q#$0>M-%uSAL1y>!^*vaJH>MR}Z{txorGAgd7TLXj;NP-3n!4oui zf_q1>purQY@kW9>jU|NO?%KFR2X}XOcXxN~IlS+8@BQY^d)Jz^X72o%`UU4y)!B7+ z)vjkhb$ai*GXp9rDz6aaKNLbPyzrjaVHU9$a><_-Fly`fQQ{2t+SbEndY{BzezlOr zn(DlaBJK}ovj*@TR>st*WG|TWm-pFXIg;}2EmCK{4U_n{EFYiMgjTniWc5u^5s_Wp zweI?}@BD;b#FcjIa&!Zi_y>buav5v=fjxnBkOc^lOl9u}Dv1h4!&(`|!T?OJNjcQN z>&|pLIL8O*3VrS)8&D@qhHTB=;o)?aNzT9My00wZ`Ryhx~ye?J)aNix!ZK7-)@cBhRDaEw>L z6D|tBJqD^UR9PoYB;eh2iN14xx8GK@YjWh?dsj(}^TK~=7IZUNI^e;TRc?Hm zJahtI*-@p*Dh~mD`-?-#tY!5{BrB-R0sdu3?}+muAR&3sFM~+$26i#R+F<(3$QLe- zo8VV%v(OUgxZL%;oA@)C0oFe`+9+P#& zjl|mM^No~w`Hq731~R`#C`pb$iaRCivA~U<h4|I&H3qKKf){k>x37 zzW7QIN-Y0{*)%6v7p2wv+ZXeuxZN=YM-|aL2UjuUpBpNa8%nz=-S!LhSX%5Rjlj5} zw_*?ZD{^ z(==?f;;Rz+1j8P`YvT~VZ#MvNo(`C2!Is%dzS`AlkInc25 z47ToFZZ>4?3vYkXA!O4LF2+B% zD_4QWg8>iN-X144iftvmj)-kH8?3gM7;C3LSr6EwkZbw}%#|+js%5=T|9znUE%zOk zb7`UY`%T5#yA#|n7^?$aa*yIDgE5kHgy|i@UZOjsE9zxqq?(z4d_^j0KN`Myed5-{ zkbmORjjx1X>LXuNx8V62I z?AvM;n$tB%P922L)B-ekAo70TM=3O!mu^DR)`+sPASTZC0AG5Fif-1_dnkAK8TM@= zr*6F4+#XW}#|KT=>ujZAhH;_t=x_{K;CRJt@g*d!FGfdbyB}J^Uo{`O%q4ju?}AkEzf6p%fGv-FtL)1K4s|L@Fj>D{N&(7 zPD$Y4b>h-rJdt70>FmFYudd^$3c{RL6`=tQb3B+}uK>jGt<}6+G@M!?bE`vL0u@X$@Vm&P6P1z!X-<_i+>=0m5iXymdA>vD zP$D1Zbx*mRRT>B*_=ok;lS<3J-m!%zgW9cXy2yT|2HWMIOJ%6#zx|KyV|z!H>$x%U1cV(;ajBuI$+tZ=x!N23Gf)WP;{CL1S|T`#KK9IM#M_ zY)PC#m&f9U?FKEcxlhYA5y$r51(cmwi2ALIzKSwBrE|20SG%!_TE6SRjTSQigst2K zggsgvf1@`4L{2ZAoNJ))#^kLtUcx`CQA~&}`XUuMfZN|_I@KU3%c8&Y_otD=OQ%Bu1trkH!!us{lx_0(?M?%v^4FAmG z93>Lm{(1GXgAlhl8R95`y++fbv^w^6$;k6hv7>Hp#^X1p-nl=%(~6fUXFx61LriOkqwUS)8?A3MQH7D(V$?m1U6=2IRI$UoqZ$Q4%}$q8xs&w_u}ykk zDIYsMCPf#d-(G^a$zkTh^&E4FoCC`m#BxPo+xhF(5sOANg-hZ@rDP-b5{;kq9j|nK z>;CFDysttfnwPD0V{c#Q$}jl5m`D`ZH`;BkM8Q0M&%pBvhQ&O03xqCUdFfZZw5NEXLflw)Qqe$|mYYqAbH3kYc;R|O8XcCjF!1}%)RdgK4DKW0mW~SmlF*N>i)z0WRNZyE znP678-)ZC{(rYc=CW2z68@y$nrNsdt^MFj^(&w!(uQRct7hNr<)#JWd%$Jh(|Ad9< z%^Jg12F`W6X6mv9I;2B?TNIgm+!Mf9QVMhn@4)3Sq>}qxD_h#-X_A6+wHz2#nMIT^ ztagwdB*=AzAq|sWjQoScORAS11EDpk*lgJ}{AQMXx`Q3dR?#&f2A#sY!^vxJ=(y=% zv&Nv8oCm`wR(=m=KYhQ3xdY2Ei)S2xE^owixOOLVQ{o!ir`WEwW8PL`j|s7lgm?hB zSH{s_Dot#pT#K`f>G4q6Nt#kzsN=KtMcEov@gk>7OA0KTj-EO1Yb+9ZD{<>@`%Z{y zHSynj!1O`9MA{i@@jj_0Q)qlREPCG& zs{1iot&ZgpdaqNE5rIqoq~N1(yotLARN+ z@y%&x%D-fSOTDIAuKYJ#`afev27TT0zO^>&z^A#Yu-_$d0LV%aiRAG> zDkOU>@ndm~7du#K`|0FgM?d%*Z*RBgpFvNbwVO$CttM3VHco*~%;8_QFP{viZ+vz0 zuWT*<=J^ccC`;^%>9y=a6BpC_f5K_&5j+2Md{TAu`^P^4I4<#j0k}wW|F7^}{d-a& zBYBXA8uDvFer-n5{`e71;qz7Nd1x-`e;v7KSKZA7Y)8lv7jREEImZ@oppE1hzqQ*Q zr_Ht!aCWQrOdpqPxZV0b5O#D8sMCGq>4UvZk^jl9=bjRiLVQpyqqBC%|BU|8dW$qE zXF#Oe!O27Mk%S&ioS zHP+*iKS{Mmz1Z8JbN-TX>EzBI41yCIuJtBIU^FFE#j5pM?h~O~CIb5PPPu?>o`4D_ zL^Dxhx3+}FLMGP4<#wLaG_r36gbGQjtfixOwK3^(caZJq$3g6fD%Of0;%ZCzY=IeMy(!Nv)N zvX1wc;(B=nQCGICaa3pwCPIQDp{4^l&IIB$PB&BN$ zE+-;#EOa*UuyZF|4WdU;Tx1;#o-5gNX&+~QTKJ&DTcFkZoAk33AO7eW@y}t510-JD zL7P{56mNpM%J9gipI9oc)# zVL@_!}vu@zr*2M3<{`bgYo{cUOOkqPy26(c)rleieEv^1 z*rSOUM~(+`3r?p)9K32E&B|I?30(}0z{X-FV0dIzbo8~V>xfX17-Ye9(=VdOMe=T; zQbNfyhM*!n{_vDJQE*!OuyJPP#S=C$oi?tO;z`8=8{CANvhZhe<%=gXATFDc9KrjE z{4c^|#}Al6U5;7jGxYvNFJ{>J%5e5NOyv(Xz>TOx2dhm9dk& zbZWL~R&zV=V9^)RbWN}b{%Q%o;T)y(3o5Ev4*EKmSxTFE|i%MZI}3Es|Ss<&pV!l+jzSe(AF&cLQ2#i2$Z5TfuFE2tY zm~*O4>_^h*q9e4x4C+rD>1g{WWH2HT*RBv@Ux8*m`LtacB_6||e^<5N?+CJo0}c?}oq5=kVXr1uPE7 zdp=X&m5s&A01kD3N#5+{E2XEngC;F8ai_4~WNi&S(iYoL!r zA%o>zmA-%LO7q`L(F3va?*z=?Uzk4(5ks-QmZ5%9O#s-DE5Co^MgEJc=^SQ0{men_ z_ctL!U|4n3j@0)TZ3MJge!p*wSL5R|xy$HH@K#v(8SvAicffuqFnZwULx8zGKrTL9 z-!1bOz3{-}SRwr{^F9A7B+~zXI<@THc|>3qhSMC3a_o255?aSwmL z7s7p30UQ%P{}(?eQLplnP4-1QKSWulS|Se#CRJ{+kTfCMsSNyz_x(~{MP1mmxJWTr zXU}2uAXDb9z{iJx=3f(F`8j%vgz0c6R403xiO>>e-bPrOzeCeqAMeeR7;A8y*f%)Pu0`p1M zko^NS@!^5`X>FPm$SoXl051F#{cofv?<@7R9TL|EHqPpb96=$G~pGW<2^=>!6m74pd6YXlWL<<%kK?=N;v) zH|I02=acX|dYPGkL*Tg{uc3T571v?_tOKySj=EJ`q&2fD;b%=caLzbImu8uCoUTCc zN2L(6)*%r6J@WHixsOD@6#1wkLZHrr&CCB$?QBUz+Tr_Z1RGFEzYNjikI$)|Ca^Uo z+N342I!_bW>^~`mtYsZ3doGgxs$2T&Krr>2^6rTFE(5hQ6(GNJ*xm8U0(KWE`e7K0 zLU6Oc4}op@b?r~v;>g$y_Os3iABlA%0xkAWKp%m!t0JvK1_aX>3fdK_+x%I=$jUK9 zxK$2bM+g4ta>!c6fnrOd>8~W=;E`v*_qph&=qFTETO)63{}RDVb{uG)PB=?A%|UMf z%^+MqtN0*7qjz0HokM(5jim5rQ(szyOjBEXQFBR%ib{oE{^Csmw{%{Cs0cdKT_P^@ zO`}whVm1-3c*(4HMQJZ&~bpzdj{)%ALhGh(OW$D>M8Hs6?}jm!zbV#C1tLtw?Bc`<8kYN7q8`H z`;nfC?*l#emFK)CGu4%*%1w5{y?|bxhVh-2`wdz6BbV0(7TN=OZ1&EHU$(88{_?jZ zWQ%zIY}malFS{$<4!wprX+9|m25Ras{n@Bs`|iBDuCUqT*J>t*b4oaI{bj+dw?zG$umOF(mBt_r^|fW7JC-I}B(0tnaR!ZT9uvkF2F zb^5f&N>#;j#exc9ScMNF8_nOj5 z|1TT4)%m6~>$Yx|r7o1U?BSGuoS~8Ex$|A9S+$v@NOAXgjn#-+;h(PTV3yv!2iufL zu!!;8>++PL)?dQD%jglT**dRM$kpUSeg&`>^F!)+_~HHPfb0SM=`}gwxSKrl2<%8NN|?>2{`h{N%V@pN-FIw$21%_kxSl z%pgw}RUgnIet%~^Er+uoSn&Wdb5iKU3Cdj%(BAF@pRcn$W=*y zNE_V$3RH-N#=hgLvzyJ!pMQ}n``1ZSv7#YbJ=u|l5**pi1^v)V!_ZK={NiFt2Ee*N zS>;wo0be<-s=<+2DhU?XSj+2Mr^pw_j2~_S7;GRhpZHD?H0V^ZqUaGVJwgP2z*gur zyUsF3vV&L7O&R|>rV5iGNTQ{GupoeL9*S=Ke>2*DiTwWpAewwWFom^3Je|RPUvI8d zXifP#(V64EVrBMGV{_M5pJVpg4H+lB`+6fi@+>%q6(2h`e4-Sg`H|A3Dc_yNBa-yV z`ny~x`SxX0^6;m%r$)oSAfQXHk|+&$`oo}oB$dNjbq%_ znR0~J5b9keuh$;VSuMY_27A&e-9m)o2k7Z6dWgBb!OK6oL%+|vypUS1?l;#O@a>i{ z{xbh`D9;J8mGa%yl5@eVx^w96=G$>wd})EkV^fOpuMaO%gr-5kBl@(>e6{2qNTGP4 zLV9cFXLp9*qCzUwk2#Ruc%Jg*PRqsRT^wPSiA)aIgx=k(3eDr}T8x{*tP$gG$L16s zIE(a0%U}mw!=Tx~P%o=3T;pFCs=Re5O`q%ZqOB^Mp3tN;(=<3LVnUwbFYQOLA#ZqR(}UY5zV0CHwWLHoeDr?NKzA zmLqiDHRh#I?GdNOYxZHx2pV1*kDa9*J_5>t`@nbx#RlB9?%&Tfoblbs{pjx-otx8s zK7Ga*`sXLr>)NA*j9QHyW~R*3wWAPgCHR70D8>bTV$B!ZY zsikL+HrVD|4I#dQ1GISp6clug?_;lJa;%+&;(;$L)(_Pma~-zmA7{8}b1uc-FRiYF zd=JeeQRG_o?0BP1KOPK$iw-B!$armk!LK8hCzj@7Dmhq*Oo0--TZM^@Bbqj|+TXWkyBM~<~3NyJ>oZ$halNB)?Q>3$E4GSbIZ)+@Ry zX0vivXfjm`5$ELrg@oA`wxGD0#O1|>30rmsDzET$3ZVZ`%uTuS^a0vm$QHu=XInxq z1x;D#3)}D1)D5L|9L@|M6G~lL6(!SM?kDsH z6tv|KDUfEhDy#Ri8a5;PwEODAHT;68_otuT$KCPe!ByKD3>h$uU%a_MMMJ}T`7&mr z?Rn_Ok2b;jIA=2etop*xD1{afvG>*!wjmh2liOsAewZw|SB-hi?Pni8&YsrssRD(M zfc6bd%q2#2Wy1$*K|K7*A*Nr>Yiw3cYQbX2qULO7Z`K2EA|s_Y7k-7rWjJ;=Ab*PH zA6Y9oApY9IVE4gVvwN`~0AqrH3rFxG4hjIJv$^dGQIYiI?wfR)s|1cU(uVF|?eOWZ z{T?^?AgfbfB@p+R{-%XIrc7O_I!Y4x1^$4c-scieF*6YwHrigHViukKP<97ln+VPe ziO2|>;M$U~J_H4=@a>Ba)OT-V^G9(4*3)DVmczi=S-p;Nuh8(Xc@NrkPW1+-Smc%( z`P6M}v2m;Zxo$)1vpL{BdC~c;yg#P;5u=0$oiy2==vSs$WceLrl6zXs9>HCF_cbJ4;?%oqqSO2nv9SK6C>IQaT6e&yU9Q%MC zYKRprbC0m`75jzQb(uMq{qV1E69MV3kI906l`VxnakcbEP^ve`#6Z&_!g0`6tt)GU zug`4MrAlaYi*V!aHz-R<4Jc+(Twsak5=W(eJ^1xG<3_Mo ziFLm~ElGKR=R)CfStFiDL%YP#+8tMRb@q=|+5&_-^Eo@W^J3&njuf z0%LI!gKPMHwxmm5XW)40^`VfP`>E74ovh-|p@#KGR@h3;n>X4thZT_8u=c3zJBD&ZQBc6?`YK~+14V0!iOg! zSj!27t2ddiex#9kf3_gpikMdy6}}7sowp8^>+mzZjYzZWpbKz3!zu4L&DsSu9BssU zZ%rQI`pwq8MO(Z02;g4yjJ=tP89VkE#WcRpGb+i1xLscje?rB2_`5$h0C4EML7B!? zRIv7qj&SoBsY-tI2pC4dihihIMC)Y}xD5aS@VS;{WI~pIe^_)FYx7)>RZKk5@veKG z#I?Do*5dE3LHKG*KWIZprJ_0UwBqDk}Ay2cM0Og`2!vvdC;{DmF;*ctZ6 zM`FI@;-{(FUD>`PUTAGdY=vKXmvhiUbU}w2g!4z?;_6Ho-CFxePjOwXW5+cf(G&0Z z0HtEF3vCH>*-NZ@e|K3itM3QgbY=C;(J0{M8;+=2)~|(=^{zF|)jkr!nQm$#Ko(JT z(L%X{rp0fjd{!(`m84ly_1Z|T!a^TgCdBC*lpi%L%<=C^utzZ>v?yaN5%Q+Xa?YbH4Ut=*M@(VPWT`>3t+^MUg#x+Ri>v7xd(GDhb`7Zc?zj?BX4*FCS;HjvT0m z6lY(zAKw7qe5Oe3Xac7C?edIr)XINlGC=$p)P|1b1=bpJk9p=PWoLPa#pTM4ilfGr zr`(P0(yuDTX5fRL*b7~Vp_5Bveqdep`Y~HW-GW!sr`m7VW}y9nwH_I-E#j+7A&a5h z3F7%#;qWi&@aJ5Vy2oj+cU`x@hW5-+7+y1d9cF4ax|0mTrqGMpys(ev2c)5bDlyl_ zxK<+ewaTrVEgTpYhXI;997}|^RC*20p+!@Q@2%bgUvUxilEn`tHHtGT^Bg3%6mj&_0!>=P}t+-<>p_M!+{LuzC?c}WsTH?4Q=Oi8t@yM1b)+Jx|LB*b) zN6+04#iNo;BoV4=^5et_b#d>q-PI*&=7sd{Z`A9ja2q`wk}{nc=mQFI2AwZ^E)TAM zj6qKJx!vXp8K~YjYrNK!TvSXUyNlLpk6TuiZ@9>iMI~aiyZp6sKFg^u+`3wxbgGHx zYGJ@mxpab^BzQAoH`9|@=3)qfeLjNDr#ku7r;=o#SD-yG+sM4b)RTjD){j;$b!qu$ z32hWF$5*yQ+F~Wq?ixCaB8e(LnmVi&>vM4b{B)~0c`hEAhS%aEw?)^s4(2_G)<+9 zebjg)p0B2G9{bBTr&Fr^Q#O}NTO_;O$V?MEFa^|(4q8Z`e>5GoKWcR2erty2qj$2& zsFZ(9c@LdJ|5{a@jWO2JZqw`%+NW4EME8emEZ0e~PL?Gp=8|a3kWk$&aG|dwMeCI0 z2AA!^NM-~3p3@jsT=;E@=W1mr;%7^%D^8ToNwa7|gDaBOa)noe!g5OY>UM48xZ`qx z7UvcyNb6p-q#S!i$W0hF!~qay3WE%dJ>&^~P=)pxZ^<$N4(ny6h+DTeiVm}n8?ANu z;aK1vPii-VyK?u*>b2)5u)LqocF3vax86BMwXAF#_qWp^cE_jOt0H2^u;zswYq&S| zz`=hEdWKpNvVR~8FifYfa!~#F%Q}~N1nvECd}mx8Wb#&qk`+ei(fV>aG4`ej~cOTIJvQy4!u_zI2BSL6A;x*Uxu)Pj}=Fr=~f` z_#6|!?H3tQ@rfrHYH45Z?&wHlr@NBw#A8Ob1o5>T6U!zf+)kIrM|!6ngBl#*dX87L zm=x^(id&i&z6(7Yt}w&cj;C(ZxLr-4jiKAo8Nr)%_QJ0z`gJcx1&e}Yh?up7TiaRa z7@ZHaTgqQ9z7XL4-c<7eznlC%fd?|1x`ETd=eQ{d9cNhzjEdYZ&?^%wSqjcOO(^$+ zMVYPK+`laV>uQ!1TQGk!qU`br9WH>S4yC}qxv-}ovuwEOE|qIn3p#`STwy3g>S61| z#5_NuYkm#pe*4sJ?x%fMI&tMZAtO*8kJjTQ0J7-2OpPe;|3C~TBLFZr6mVm=tevyj zJ|Xa1CSY!T2xe{Hs(vg_9vl@0DS4Ebfdbw6DWMN*InsR^u~a~T)Lq6-2Q{C;PI&u1vL33 znxgUcDS4Y-d)sX-1HV+TRx9Q04^i=*li^N!r{ox9`T?}sQxk^-RoA}i9ql(GF=9Bo zx}Pq&dLa`fSI(rrPxG74u>^emXZqSLQJP+JS=n1R@tu>2Ur?+{M-bCjJb9Y$v>+7J z%&%Cnf%Iq&(*u8P1}8o31kY`rz^o*3Bn(U`W2OH)tYCAR|0kt8zXve~z1wvhwG1rN zU@L+s-Fj2kpUV$4p_P6U0J<`pdogbw<2;N`N;LA_2NcsJ%Aiiv^IeN9B7*(Q@q@jP zwGRAY%T$X(X0ll8KPQu}pLf`bqJ~gL9ciS&YPkR!BTu#I)xAL7fb9P39L-Xd1yhqLTj!}DiF-nT~omUyRX zM1%PrDN?9pdW~1{UKqBD-J|rw&%nZJshO*nSlC%-eY+X)W$J-hCUT+J>@S}D#a*dX zOLb92U-6?W3vX?i&GJ4MRT^jLPs)=So?kZaZgFHEm#1omw^y29c{T*Y^%V7mNX8ia zx*VQ|3!|^yMh^_OFL%-n=rhIa(jUZM0mj?#}&7Zu9XR;r^tTh(={>=URbPF4(KzC_Z z&S`DL>{e*yncpK`q2@nh8~ZT~vr0~Hq!3yXS*_2C{FSzvy#0f%HV-(ytKWgIRVroE z#hq&L{_w6&X}1nOP<=4Z4@CRs=&C9vR$b*l2w2 zeR3|ysPoo?t4)*;IOmM03uc$6=Ov6Ck9DqTV@PAYcHE|Fr#x>VYRp?vEd1J$IUenxR#k> z64d!iOjp;zno1T*u&}W>g^OE`jB9gvmtjJ%;7J~F*&QG+?cjwwxRfuA^7g_Er zjU@n0jBy1a68RG~CN32d1jE0mn!Y#>q>$+%mgRCDt{Ro=g`J}MV`^@ZL|muydb5_l zOgLg*k-d)PWg=P(%W#+Pwg+MflQGW%WJOxPP#FFVYt05-k)??t#w|sBt1kigw9*WY zuaNgds!;8~P2+b!Pl(>lCz0mJhX|~H#|7Arz0a3qEw(nekmOFN3?i;1Eb=>2bkF#{ zgMG;;zDkm1f=Kz~b=G@x{;C3LIjGojo=rnmP_gJL`&SVs#lFh@#Q>-R@qXqyARO#a zJu%+|vFOux&?oz9ZhbU}cAi+`p{Q)4;;+*~Dd6+SN2ov8Cff2Ss1&!O@JjES^TSV9 z(4mG1O8;en!>=e4%tFpfN_$rPLZ!aOlq}1uGE7IIOVS8i9&h-Q1?%9HUiToL6|X zduM_PA`fmi3DWO4En+UG8RcTS5ev>-eGY^%0ey=kgt0W993`c@-I=Sf{+ezo{+LfYHV4B;xHuH%qyFFn zXgHsqK-lTak`#SvlCf&hQU`pJdB|hQFFdWV$xVdCLH$ZDGXfOUF!1OC>2a$WW2NXU zM&FMr)&BZIc1W!8jC}0Et?`1j!Fo1x8K)UX-7a!r& zCIvqUev)8QXanA;#@U!XxT%K^>}9CgNp{h6S*^sV1X$!PN}{e(e)&kgF*#;){1P?G zw(6R*ewo9LW`ALI<+UdAeSfz#6_DX5Vlqoy_<`0o8am)vk3MnJPY}W6&d*xq z>NbVmP1S@OFieYCxK4>#WSk=D>70FYQFg_PqB@}Hu}c+i{?tqO#B}_)$rzlSP^)6P zYdza@I*QQRb#TyYxho)@>$q&>`xLaCs|I(R1%FmF+3kkeO8Yh zm3`Lemq1}R75U;f4n>P10vVE7^9_}WoLl5@Rz2>t5}b z$ne;?Y(Ystra9ON^>p4{G&5ront2EL=Mvp!0m0elokc)e#Pz7RiMq#o9qM*H8Y0T@ zNm~OOab-7SuT(IE49}Q;}lb*>QEZF?|Y#GJO_hLGiY8<&PEln1UYZUu=t&u zQ_YA>bKSMMV~=Z-yYLHlS_Ys@`j_9<7$-F5!~-Uhuj{MJvs*NK%px=KvBTg()|;~p z#Wg~=cF~Uf-E%9D=vzEEhj-!ZS@nzTdmM*`aXLBg%{d36H>YdwQS%ewU~5o86;~MS zWWN>%r&<^tZ#iRS|7F%OVUE>eCH7jn}-%>`-ePbmxIb{ z%oIbRDD=!TF%y@}BmIE%b4b5oyM08%g&V00S^ zX0Ni9sMD#6{ZV6rMVS=9!`~ zmYUn^B@2jv)4t0sxx2s&!`gSqw{3wZC54k)s2zG)M6HSJ_yTpW?05Lxwm${(<#83r zelXzpYGwjwUE6C+!8=f(MJr+bXEkW+OJE)CMaB`8$*pb6sP21cTvf%j z{)xz0v!=N*X6NLn@8qc_QsdoOdu+K8%?v@tTP-MXob9zXYmJFKMiD9 zuCx zRdz)b%O*V43pUjHEPl*c#JvxGS!Mzp-XBmLB+EsW(bhtA6Fvfg-VUG2LtE`kg9ZKU zE;>!Z^>I`>qRiEM>QOu;h z%{$rR>d#VtxjJ=7II41&L|s4)xZ;9L|(MgkzIevgfy|C@7&}n zQx&t6E~BxByHi6w)-b}&NcjW_7`mFMN2Je)XI*pdbNY0A?o$kFxS#{7-7a(RKnGL)xPG7H9-c7W z#?E1AesF%LwtS%=AD(1Lbyb&cuu4GR)oEO*OhsowGKG+`;dvgq>1nL9YJaF39E-U5 zRhQLmXDHM}se1t&W_xWL$M!wJ;OeUCbJ)jEpD4Rdo}i+Bc5bvgI(T$eIIEtQ7S=GM zam~e|E}#+_J~O8`2lsv5M{wWWMH3xd(`PR|GBZA0Sif{#`)U8?VCBQi@ggD;+`c+}*4*(FyA~b+fwhT9P+5><4V> zm)<55-)oX)DkNACFViwLgG^fsuS|$l%?`)e26Eb^&|FI$qJ9R~r-cMP;c|pBjHHLkAB@N zQh9$z+~?fU&rOa1;e{!LO@bG7*spUgQhT1MGJRgZGhn{`vPLgl3KfW%cMSo>uEn=ht ze}qm}&tCq~UiV$i=>po-n0Y(DjOkjA@IuCs?mqC+MwXunCRFCENG*(9ev`6WJmun( zrkg4h+g7_Hm8=~06-C9kqTR(}Cse_Oy&)I4IMB=*63ELmX1w`HCs$dB13D-OS%O67 zY1r6NzSHvDCZcIY>2$%Ycg{7%2qb-{YAWuPEt5(mT4C>!b1=X?cxeA4_sb@-#%L_K%=wGn{E%94!s$#L2MnxzZOb3hw9N=s>Ho3+PpC%Y$QWRiS z3~!;Lyyx#3$*xe_&`7gT5uurW=PXM4!VRu2cdmi~KbhuS;p&@-KKu})#uDltZLfH> zez21*1Eeu#DT8VFUA2Iw2R&A5FWeNdXAH<=PU_wMlq=LsPaQZ5L&iAG-1EnAfE?=zrJBFL zU#;#Xs+!Y<2LxZMy+jI;FCJ5j4)Q9qS>oY$s!7^lcRlg1QM1R8PZ9tjmQ8&{&W3kc zLiwxR)3jJBmDsQo3&QOg9Qx>$U%RB0CzeQ+w@eq0q*Kj9OPwxcKFd#+IAztE@`N0O zPR3CSy?}k0Tq@!JW0f;+q!54w1eT`IbCIJ~UiY{s8JQ>~-pkE4@P2LwwMo1+i3+DZ z^2FpDtF*B*b58MNjAqM7k~-ejKVkyhN`p3&xA5Vp%-_Pwy&(Lpgr5HmDr`1?1pkg3 zUwUWa^ZWcw#$X?m5WfVhZvRp#%^dYxHs2#8j~*Rt!7z5moSoX9YE6uh#e!=%^Vo0I zFyZfCZ`sznoelP;CKtdi_xl?hk>km0L}9E{WhQZdL(z)`cO=UXnagvdW$icDjzji! zywi#cY_kQ!iJ7@!qQm6TCw3}QB5rcovgA0f^-kXXGu&?kv$K*+7E`9+=7U4{FU)Gb zD&C=#bVYc_Q3H8DNge?%DskV}FDy0OBR6{Wqt%&OYA&gnNqb-Ere2YQrSuQCEyiAh zk5Bx&Ckp60W|+9xOkmw(@rARi{iRvGWZ>bnZk!>L#fc5L-DyI(7Xikngnfzi*1C)SC+H%NrYX zpV6_fT=jdNqN9hkUR%<+X-Wj!yQZV=U^-szZ=6IS+BybW?&tN%QDQ&cLRvg;*+2ms z@AEqhm^rE`k9ry=1YJSP?3F*L4`U^kJ^a!svJSCiCPY)jIXb?w-_J}N_nB2NF1SH{y$%oB2eE?G20&(59pB~13k^G?5 zU8^IU!GZl0FmOP8R*Xk3boP*t=zZ+!LXu6zc&hhC54A&xaTiNGT7Pg`#Y8J$HwP>J zk?V1B`zQe1^5_jhg0;;O+a6$^9u4EgQHpgoEa*ZX+XMGz{YSFWEPx+yCSa>nk@y)v zm>;>kh*o$m%P>hFrG=>dWd~Fel>T*j6HaJiDzyLfP$H^iFd@nV@iyuqAA=zCk@!+Z zKC~_KM%2+=@cE(@_~|@w-BR>uDf|_TJ2vB~z2S$4xXVZ(Yc#?nY`ClVQs%kLE60wn z{>sPF8MZG-;=Xb4jiI!W2S^l{pQMt^V*wq*Sb`x9Vc zyz24C2|UUgvEdYUrJ2vBtwga@&T$2&LmW4tOr{UwKc_!Ra13e(@srs-2~j>I|LGn0 z;6G4xkp~8M{GPm%+J8gPM2_SE{R9l_6|n=FlkZ!1xB|aj#3D9QA)LhYHmu=>8SE}7*pe^XO|RJm%-QByF>#4k590*#7#HQ|dN+zD0LuHgDi^SZNB2UM>G)+uh*^ly<~CGAb}#PNb8_aL|VSft#W(QS&SA6>z*G+p=6FY3<2 z)-_FYt^CKgCa^%J`!PZ2o#YdfR#NKGd&*uD>&APT1|+IG>B32sBJ_J=(KEl?K#-yIhW)u4Ere6WScXw}Q-Wpa*W z4A2`b0pfZ~3!ic6Z|fG!mwsjv-lwY|OgnW4fdeoGu{HVsd<4UJvzfZ+wkOy2rtOu< z+Xn{otH7q$?&(f&_w<5^VcxGn@zce-9=UaO5~ky96W)ESCg-Nc-w(bsBO^7Uyl#Xf z=NEs7Es?}Ry{%J+_RrAXz^3a4vOgy2OqYRxG(*^X{50@fXl-BJn2aU7+um90wP;C~ zc5mA8+zmr(rLfFiVlXX|pke+ES5Z+g8a$wTGfDsD&W|Azr5xC}H9;@!VPM>k;%>Bz zrjp!TBBA_5D7?pwTPbB?C)?tNiobH_PyRUdblF5QOtuRQ`r%UM{59W%~m3h z>1K1Y{5J0JSF`J&(G7wAF4iu$`+Qb?w)EyJ#mm#TUn2&nqcM-%YpQzsUZlN@*B2m7 zDH*!qs#7&BGZ~XCP%Bf|*&ng(Br7mQ@Z(Ky$QfB!mvn~lG#t;(&ur51@G#J|GkT{I z)Enk%ZH>qmsFi-{Q87C^UT(0vYwuf?(~*n%OhIvlCyar0wHC>yM#QQmw_+2swxa$j zrLIfdP3xVp$${-(Q)7vg8^s_NOHvOP6?JW-N%M)F&VhMi?pLWm6J_Oc*P62qa0+^z zhWr>)<(y3eW=;Sl5>2EgZ9LY{O-Zy-ADcgCd6TFsmvwcy=w~tbqvKj9HH3yNR3r#; zdKv~fCvkRaVrK!@3+`_~3clTVC9$|3Y@kdb?p5>_W-?MeAx2$(uu|Qt;JDE$A8G5^ zPaYMF%eMN>kQAd@6LGk((0&5}`j6-a+ah1{HH z0HDa{9ZqJjapl(^>d~Jbs#G=lZF3r%Hjj?_t8+oV>W$@W<_zDvafmr3eU!0zwcFq$o&H5J>1H^rG})0zv>GB&aANRq4IABy~?oP?jJ z7-r(6BK3`q-_ye8VzRqf(kB&_zh^Yol3GKv7$;XTsm+>B!=aae~Em23I2yt4s*oasX&BH_ud4Pd7`Ts?39Og*t0t26i2F!$l77o|3oRt}S<%EvSX@ z3=8?eIlR1PqZ4}h{GN(Nd4nL|{2f*jo%=6P zS1f3OJPrD@Ts}g-V*(DoC(Kv4D@X?Kfy*j#M(M+;hx&*rUen)s<`Nl-Be;iPZ`#zT zhl7HTmyQ>E4&#h%uIg{z_H5HrJEb|=SF`*Q&jsww*&^b|?p#2N{Cz~*tNh1|JfEoO z2Bxk7fRicQZhD=LCdIEduu7QSwNpa1{J3#Z+DHF_gb;~MJ+R52eMAJ&Tk9uF7zT0{ z%4Jx+yjS+45s8ky2gPMh`w#e58NHh>C-?N;-LI$@p-!|K?Gt(1Jf68-@aS2}HIFCo z=qT?v_hZh*EHse#9+n5+Wz1on>i@7Y8C7=n;bg&?K?Z5jYsBE|SIj`$8ARYgEHz&0MSzDvasmCP zE_?FfDSQp=Ys2&=j*^;v^=4-NQAF9wBnu?vI8jJ&k9P`bXZ7*2^4o5CBL}~}d`o)p z6P(&52{^`>R4r=Qj`41~VZ2M>VnpeKcOJ@!{*o%UB#-;>4hkfcLlNs)NrT!#_QkCg zd6H{{_5Ez2>EoQP6}bhOE-!SyttuHr>xUZ&5YM46AG0hcXow*oi~>LBfUW zJ*CGxC5t~LRQ#(QpICnYdDL9B*lQL=~19%6XX>N0x}&jMUQl_)HX7!m+2cxdZN<+}RiC`KB&v8(}cC zkDk9#d&*H8D{IwXpe7q9Sc4wH5&}6-xKC?oy!_^l9%&K>N12+J1`ny`4q=2*?KMrt zaCqZzx+6jfMY&OF(`62Vj11`XccK>`KQgRYv7NoNl>L(@l-x6w5blb0C3B#`_fzhl zkJyM@M%A`g;aIGn#=Z%ITw>w`R$&;9T$c?H6Q)JoST z#0Qf`CUT1$*`Y3gX~XSd>kqc{DZhAr_#suJ z2YOHw^z0C?I+475j~w2aUftwKt}Kq4O_^xB(E`zm&!+t~Dt)Xu2W5%s%*X|t-RTcF zzcb$boxWXRKh@bzaD82;t2&E;VfRA^PO|lf1fS${3^-5OIBg6kLb z9xIQHL6qZBpbjYfi4VJ3NQ>x7TQ~-qbBoq`-5r^ojc;oduxJoV zdo?r*rSZY)CCZ2S0lAR)9&{P+1oQ<-(Cr$SU>Q?AJq+h9lcZ_TA5(&1yG<;kvnxVP zicPI)3Hw(6={54b8$y(KUVH;s4q?6n6LG%A*pb}RFop|w5@N>#4BfnjkHu94au4$- zWT*hWS|(Os<$am@ZH_>s-6`YItepw_*fLz=#Q zlIA#ZYH=5lnnIF4s+g6x)JQa*31;lOP^JG1x`>T%VknJ2Gw=+M7%mf8FZf&(EhJLL zQ_%xYT^tC$wLWh&>?@*FGSRo!nBdOWwvDLkV1=;2@ej=d?N zd!aCUDi((-+wVBk=*IizKh@kv!Q$Sdhwx#tV@&*HgsDs8tsV=j{bBJ8Ge+#vAL&r% zf(VOF+Kxz#vv~nY#O-K+_seGPbN<@~E_?6MWA5>zCNMn_N!=x$1VONvb+cmBeq~Vn z2Qi(Nl|r)O>>rF_wB4_7TYZGR{4(AU4BVfOxh7x!*ko#(+GJ|I^+RrY&!Wb2Q2$AWF=zBZ1K3JWHbOnMYkueX;l`@C z=0sSYZkCB9yvM5Jw{i9uRq4oOdDmgyR*(loHIbkC-JPxoP9yUdJ%qa zZ79^iTR@Ytx8m40Vud8-N@2zLmsVjMYzbYf$tZ1tBS!z@Awd6rSi3$Z8`?Nd>?``_ zsuE5utC>&K4B=f0A^Rx|B2zFp{5e38nh+zr!2rxArj>GBp!GopGFHrSGhZoc-vQF$ zw~8L( zyprKQ-(ryq`u0}aKOJ@2Gq>2-s=5kprAmw)=jgsGTp&jI^4-60>hx0+yb5B*`gqxI zNYg{}NN|GrKtXuM7NvYs>S)B-c)v|WTzG|pZTHtSTew5jNRy1W7EPFq&iTN)jZlip zxVkvWso#`noVHFt10LM21;q$tcr@BA)R8Le=N}a=6Y&78>7io%y0UNn2^Pe8Qzw+Y zQEq195JTnZa^-KY{{|KeCvI`+!*+43Ln0ntBv!fnFxx8{hUO)C7fnh*%}RodFF-BD zpNG*7tM_$Qj)JxpRG0NCi$1x|tZBVG9=^D+nPywlXBchm?G*Gq$B$VR170n^xYog| zDxt|JG@)jOm(;Fj7?)YMy8ho_0Q8;xwL;p%Dhz~b2{4hWJv@U!4}ORWECPaF6S4<* zlNE5GNYpjRAy}kN^&OWCWvE8rwKPj&nP*9;F2eU@H07m$$B)-Souu}O7pYL6XPOL` ziCaiQ+32)80ymf5fK0+D-D}D1Zk@$1eaU-C6nC|a^u8~fR@7+c{$g5((3=?>?5dmL z>DyEHHqqO4^y%`bm#GP_yN)>-sm}gDu}VO5&|kr>%>9VY%mS}6^|>^98jm#dH6OD* zLW{F`I;!RC#!DCZ*((31e`LFV|8|?V7V*O6fo)gavXvedh{*%_!5ASme6_SuW0|d4 zY7<*PLIL@4GW!^ zp}I+JaQ8`^)qdWx5n}7m$@sXi)JDpvrThk}M((5|Zxb?g-{Ka+;shD=Dgyw~f|*RZ zU8sc$)S~yaG}Wa#`aEH;3^engb;-!Y&)+k`hV%qd4PfbjmMAJO?$$p@s)R;7*8Ke( zn2%fcqH6Kqnwu=w<=WN7lfAUXH=1f-BfaP1Yje=&YsXY)H{xwN%%63dBY@y-!GYo5 z;AljVzu-XF6up@J8#l%>6{eJCZ-*CdvtrzXRTW=%H}B;i;6BIee-7(icqkG2Z4Ndx znRS|D9Wz=`YbS@YL_#+<-E?~=wTDwoMLi2Kk~L=8F#x6cITW?OxS+6|z3!7_gvGSkCs6#-BeL!D+TPZW_j?qw!UaDtV24Z zlGc?Gwafm*ZB$5B>CTCYq05Y^x^h~Mm6wsc9#zAxqs@dfZKs7`=?J$RP@aF*bq5eB z%(WhW7}z$5H?S-GZUfl-A3U3C61h@Biu0D^cL1?6QxcvNcQ(Y5)hp^>e9bYCZ4&O~ zSjXK-FL!u`>5KO{c&a}p%1mQ6JmT83M=!MO{btr-qKEu^9WGI8xdE!K=3d%|FLz6s ztkQ$sA}qTFG*gO2W&~1wLbZ~r>|?6c!h-=Dqf%5#q5^XbG=ue_&U^`XknWh@w$3C{j^eg|S% z3y(W*J~3v4Hen@ILKUO>FDKV*RJek!N#1}41@4+KhjTY(erll~M{abDt61rC61$RG zrjZTqlRbs3#si6toR`gMFKpntMpfqj%zt>#T{er`Q7+5f=~yS}3%)(N%gSooIeow8 z<-S+bDo9miH}YmFvwo*JmBiN#{K!t-DR|~_Uf9CRR2mc1`#a7^SaH#HH1>O=94WwH z5Hmx@U>$Z+hOwJPHA^NS-p5e6!6byHDq9;mz_*M0toMkV6yt(W;c2fKmtol`_oXfB zUs~g?!d1@rGg9c~8zW*?4OjQc(Fy}W(pwA1HQF%ZisKiP5^Axr+$&kMYOe=N$!rA9 z)4VcHO_Y_JW%JwJ6ouo%y}4z1N1>6Y`4^pg?nNNAt%Didpc&a+hA1agq^jfvm_Zn5 zWUF_+QtAztakx58=vMClIDrx-Ly?U=GmHWEhV0K|h`;_elFFZw*H2-Ho_mA?IfV}D zmKs;~AzsbsEBm#;25KKlIZd_*Y{CBoH+{E+32vm8uN+G^WlU@ba8h~?9%5H|EbuOO zl|+vHoyds`#&B#ITCWu&ZFlKcB~$A0Rkzi&!x`L6gMJqDWL1b1PgJ9w&O1llpUrr6 zzKy98bA7_*em$` z7!OM|$*1o2>|yq2z*z>ZtU2cSlFC5ySP|g6G%BNMO{$}367LQZJGi4W^2ex490=3h zvAUIaFvz-QvIO9!f|$B^GQzPN`_B-cTb;}aGU3xA;rN|F$?r2*Ld@IJ_rL5d)cl9R{8n1gzFP-!^|8N zc3w2V`EADxcoOv$SY=pxLR65r~UUYxASwDB?S1Au)vwMI~qZ~KfCUik#w{ZD`J;Kk0gKIJM^l^EoV-#X` z3j2SqVM>63QUnmEhKXQ`ZN46FMvlNpAwV-)hC zb0XEl(2jUnt>!_ed;fW^qc&NaNx!sZXbJj<#g&q!5==JML$X zcE8@eTE%&CO?R8|)=3~gng#m*RP+C-{V%@Ew9adsG>7i39?KEu475G@OA4jxBm?+d LL$To5i}(KmMRS$e literal 51867 zcmce;1yr2P(k?oL1PGoGToW`9Ah=sdaCdiicMa|Y4G`Q0clQt=xDKu%xCI$(ko!*d zK6~G@*8cx>*ZR-7U)HxWZ*_Ndb#-_3Q`OapP>>VHKqW#2fj}6N5+9X7AjCls=qVZs z5Szf$a}|jKYoOIf3NJNM5#^nNb{@Y zJZFtHN5017AV-Q|Q=sn5msSD-HZ@ls-hc+uy6GiG67`Q7Km%dM8H@KM7U-nEFf8K) z+i4C$d{0BQ=O#NAjyIa~P?4!`DS*~bo?Bv*9M`aN6+N@Bg>rL}q>8J;ML^IvrfE?5`o5OZ=9W{N9i1xid3GiJ_YnU1@Otr#m&e-@$)0mMqvrHmO?wy72m%8K4( zgqBw#0F!Q266YYT@3rTDkhH?eL~B1P0Ae{j7hequt0{N>v#l9)fap}>gp7Yn1B~yS zo0TFpuk;YzancT?@SO61RE3^wuwqsaV9(&}V+g)zNJbl0Bbk1KvRb|CeAkoLy5F2` z6M`}7hYSKO_q!VF73nYGC!27~W|Dd`kj(`7O!_28q5k2+{07KUqJtk_NLiH{Su5}& z!BCt-@O$X_y3Vwz>dVSu=4eDZKr&Q!>P=LXq01i`uXHQ<%hZy!RLrRtoVC?X(-(vg zKmjW(tGLQlN1;-z?m@je5yQT+nm_&nXD%zlod z3UaooG0Zj5!|jl>2%na28iIoq<-~x)0$cULgQi1`*;)+efOxNlL|&mcK{>$EeRsSI zO0BVjAS}cz(RN=}FMG^7Vbl_8>o9lXtH9TIz zHv}jFVRPbk@+~Uqds;<^X=qFoNE3+FE64(J<9(tggcvnAGA94tRBh#|%SQxi3wtD& z(Q6x}n9L9xyhgxz-$S5g)=*~$h}jTJ$j%ljCaYm;BTdaC%xzW-@BmSTY=|`oV|;AL zJKT;RSzrQ4IWjTGKcWj?VB*?s<8s)c4+a|pREi%8BOZi`FB`0!E{wfQPW!eUUNQK0 zkpSu`phxbB{$luSvXgUxG1;Vaz=5hvMiK!8N*R!~3dC-CO{FXV^c6t-B07-B!RvWs z0Qx}Gr3T*`e~3Jh8-M~$nu!p2=<+|?XHCl@@|Afy`9kC+l!q}p*ssSHhMw^<o|j@pM%x>B+mSr6<>#xdbW2>-*}^P-3T?IeUYg&P^Jy;D^qMlHz#_KTK%`}=fqW=hU26bEwgTaZZ5F0_;3z0x4B zz!aNiP%-W`x!%TFL*If3Fhc&Pzp;~l`&Df5Em`kkh|avt5Bx=3R3e)iNW`<<+qL#z*p|_}-LrDOeJ|^C z=#{y6p*8xJ_sqS{I-)4{Fdv64A^GdF43B94!M(mztiK}IITm9uUhC?p?2sJKVp7fj z8hW4iDf@6rlJ~&;7)(gvAHW`-UzD?5U<%ThL z=}ET=Rj7LMHTWH4EDAZdaweIrCt!FvT~i2XaVu3-Emim~;aPu(YI1>wn_no$^WtHn zQVw<#IA0^47vsI%PEbFOVd=kilV=l6$Kn{j+hLt{+AX7b{K-68|9Y?p5_2w_%hKO; z&UQW@zPcTJL6sPvWXg;6km1QZ=WXKIr=Q`NsXQ8woXwi=)xbvhcCct+PyOpa=d!U6 z!rWs-3p5z1Xt=p>d&1HGf-rlWVDo4Zy-O*xBD@p8IsW8~bZ5Xxl?60D>^OF3`_O}!M*rF3Fz8Yv%Nyses!^-q_i%#*m zOGc%}^<&YO=DV2dK8Ka|PQ^jD3?)PKcQ>gW_kHc$A!u9ZDLgqmp)WE80pNu(;lf?$ka6@{^?+SbDo}E0TEm z1ZMjTx1vU+;ow>*yTzmw;RCx`LYrnwn9y`{mThuoX2s#5O~a8_ac8(tdB;ss0%c76 zp@k$bY}sxRHu$9C%nLd?Af4nXN8Vtus@*XyIQ>~w&r_Xgf)+Ne@TeC-uPGq=_|Trw zFGpB1O*HVfNRbaC{SQ9$+No%DlyQY;bLRdQ#(q~IcJ_l)El&Fc^5-iTbm;SD)eZD$ zXqrhrw3gLsb@YR^vM-d_H*rkjRHC#P2RW;_(K=;sse3@6_%$OUXq`;VDBace2fJ0F z;GBLHni8kwmOIRZ%9CH|MwTcst?=SryPmr1Fm*MXi=K-~Su;j@pkV;B$LAn>#x;MZ z>^_Lxa&((yddk9xA&M(4pj|Q8SyzSu6>yjLzx>sJ7e)8NyVB?!WCUkobhzF^-@Q17IsQk^XT;y2G4uIt(hdMM|eOxtd*Xu)mpJSJg4L zoBl>#2B(mVWwHH1{N^OPuz9T4#kI0s?MMnCLWlA zMwvDcqfXDb~@*sQDDb&g$<0k#08+lRd9j?88PDJ7kxI3xx{Hpc%qA%Uy zz*#FxovP$x*x9UA&r-GpTgdNlXCnz3?j#&f=>_MFx{ltB> zC=W7GY@HSEIOZZDtv|3EKI}+Zl=ipn>^}^9m-vcNQnT}k&W%OSaZKw?Ke|RF@~08` zED=x(F3A&?lvwc|njXdS$r_t?b6R=F@J(!lLOhkoQ1rr;inBf%R|Pi9~rkt z;_2#hotnIRK8Xuf1vmPBIZJp+Un}hOtmR~^%#q~3*{a#C*hfZQW>lEDjz!Xno#y9)FFnk9*!p?*bd?6Wk&wnLkKjMK>x;#%uVDQe2a zv~H#gfE;{d9pr_p#VwkSbMA9j$9ioefylG3a{!zf^zRQW&7k6#(A>F9a)&WB94(3O zJ;=UHglPL}9Z$JAr;G{IJ$%s*bwV0W%vsZ!7A4n%NNQTzx@-)`xbBR+y_L1fu_etf z?-}2n=J8jIlJDPZBArV9x+o^VoB@{4K%b2td#&=>+FOmWdqJeFWFk)BS2PiT=SB&R z^mx_Jxn6-kPHixrvp;NjT5(#$^EwTlvXsYfhlc(VEI)VgjmiNmQN?NIg(8N1N*>)5Il4mHsei z)w7s9woW?SxU*)O<$6j-^HmG3H`;+?r9mak6Pv4(SAZNCo0pvf=G6zmUOwmdWzd zl>4#&jD58CSTX${OIuFA*-UfujMg9X&hTICX3K<)eA!<4uAsfa)@H6}HkF}c^l-jU zE?f3PKaG|%^&0;n@J2c6mGfZPjW}70cRJye57={CC^aobRG;HU+|RNILz1(??wdVo z49n`tqn#J~z8RU0u4l$@+3}|ADdzcPMW#$p?A2G(zCtujQyLd*Grz~}ECqd+LX{x& zLevFmPC~4hZW!9v8U=clj)kJTqeiijf_H1c;rk2nZ(or^#F0s@T{zlf$3a_Is z=U3hv5F*asR=SHeHCY*ra?}YbU14Jad}7jq4^klss9j&H3Q2FR*Kd3whI<3IXVnGY znJ-QgxS@hgZTpguwky)US{1W6#>W$zb=%f`MIC2qHMbLF_Vt^E(h%OVlJo-h>8dQ= z95nkjEuO+P!&ggsJD1WEVJALXG5BOUmY=xi<#URMjFih9+c(E7f~9re;f%e``1T1{ zaw*&Mr`v0C((L%1@%|TUtI3No zWVF0?lTA*p;=X2KXlD=$@>@i|X2FoV5jr>h0>EUMW92T^=cLE1(os>PJN^O? z7viodj}C^>YrD*mOnuypU(EgKi8yh#&XeP4BZ*B|T!^iWEN`JkvPw&S+o3yM954n; zQMx#0p5`T+rPjW3ZPpc;<_)~_ANXyuEfpqb56*#-yO+Wi%)BV7x`d6Y4bAj+PmHI~$3wKQULqG_C4OBo(P#%-~nW zlu@*|5Q0qYIdk#tf!B2U1LN%;GUHu=LdR`Z-u!%`>zF1obnrWQgW=>GTb_olTeGAN zuKx~eYQGrTG$!Iyn=Vh`E!i<8@-{7h@Z8J1{bR7FFCZl&mvnjj92%lNsN(CqC@j?Y z`!ZgKem#R=pZ&>a7OT(j@Wl7-FCsh~EgxZ5q_SR-gI1O0!3gQ?ceH><)w*Zm*q9n`Pvm%mHpb#yim_a~5SJqn~2REd} z>u8~6(YRn92UcQVXAhm!KH1%rH81sV@{okiRGbhlzJFxw5b#gdx;!d&s|i$pF%3?> z)$#9Ol2|B6UOPhLB|twqCA)DepSu+(^ygUYvhPYtAo_)dHP*boT_610IAn7q>}dQO zg%dmoLxCRqd`J#uSeKV;>Xt)@qyveo^8CbgWiN_jV|J&Wze++W0&}YZ^0`T9(kz=Q z*+jR#`zf3|&XG{UZmQ6pA>{?hms_@ql8oT2e2`&k#Eh+3e^0M=)+-wUvgNUYX z*%yiXkbP2rSh6Pv2(G&=-AsL)5yGbhkGB+; zuz!a+t<40tWlb$tanYX$*F;`E6g*QVpZn%}?H?C!I` z(ET<>3^A;|(Tym_IX#7Gw2XYVT-|7g$mEh zJ!@KI+1I9hqGj~3=DL%fm_d{Aqd;Z)BA9LDlYf>ar$-FScJ<9#I8D*Ux@A%KX9P zy&tVVa;#}NfRc~of|e+DH?bI55}A31>&K-yF@7DS{TR92{#Hhle&x@^q#b_6cW!X? z&bjZPp{vbRmHx7W?ikU~@NrsZ)~rBk=NNGSKl$Dh5j3@VRC(2Gkf*Nq2>owSFRI${@I1L97MQ;{~71BQc(=sOqX|c z<;PyF=IXsb`I+F{U)Lym#VGmQl^q~oViG|CebxpE3?mmP(CQ2ZGpM3n{()yDP_h5hu!is4TY8x=@G*x zKvBhlt&Owq8e$Av^H(=@PW-5+2ZQ-DMW`p#2V!$T5@inhVTV;!aScekg5I=wW zEjN7rmdd>`=QtlSiO=OodVyX}MZ$brs=>0E`-lIP&&4fM4*v z3VpZq9(glB36WztjeezDZQ&C42`h{esDRVS+>)~FWW1>42Fywy@s7dMEi2SsOw*EX z87P1lxk1K@TBZ9hMQH7mKd^r0DVpPRq(w>P1vjd-)Eu03fPV}B^g{uG4kN9c7@wlV zG`6|CS8;p(GXaBW_et;;{YML4Ckf&XniJW0;_#YBK$C6x={nM?!Qul?&DwO0EN+lm ze>e)P#AXV8%i||)Vd4^m0ijX{qR&o@!5W4Qwg!#b!YMXW68JT?J6N=6`c>MAT;J(! z0e}RSe!4|e+QV+5ZGwarw-;0^E2{ewFH=aTSq&cuh+d)$aVQL1yE3K`9%=|Mjk{sE z<>mZm0CgY;S@*aRTXgv2Y=kbY0cLpZzzvVwT(q zuUm<@%1jS!MB6P@1R2`^B`s(}dtIXwHz90clX>|a?l@2s=sX4#s9Ha zy5r)3;}Kv&{R(Zv~&8%rE$&p=q`^N z_g6;k2tWr0mbgKzs>bkIv0~JOxT?jhGPrO>1>|G@Ye;wjx$<+|ACb)BetrrIKw zFItv8=W3pvGcxmv9c{F)2mx6V5`s3v+wlwxN(d2o#{7un$54*Ufs6S_-}QMR&!czyEEId2^vSA`q`5mto^8OxpqA7 zyZm4*g_(B5fS+#Kl|-O?*FZQ|k|lY_zbFG^T6=|W$8qv_!=C6eD>TAe#nRulde?P6 zHTt~w(OT`0iIWRWSbgzk}#E`V(B+fyaYI_dnG~Tki#x+l^~H zbNpK^J)zoWsm#H1N_R176O4T>tsA6uH`U{>9{A^$7a0dIP4yEXU3op)Om(KtP)^vS*jt9Yk1;PCb_9JBF6AdcV&A*1m8y;*oUaw88D&0^_? z9o-jM?{95GMf*L}W4Uw_kkZW?Q!3V)V4Z<8NB=7q;FGhWln|S%GfHmBhoA5C+?w{j z!aLGlI77u>HCimvG16PX`EIQVFYw9r6s$VD@h-w*E7?;G`5n4__?CENau$z?ovTTf zt;_#FIER*`u5(k=DSiOo-?raXiXC6DtIfvS?!dz8+UFBM&_l+TqUf{W9L>Ly|jlL4^tyz zR0lh4=?NUxf3p9s5VH0*;>(Oz96WTE7br9@8dAPJLBde&0^+#TN|MKBTVxM1n_i8O zAo1*VNba>7FWVYzL+NU?+ghyckhD$3Y)o9GtYbwj*b-Pxhog(LzU}a%#2ENtkr7!E*F06Fc{Ts!SRs#R2l$@nXqrd?~ zx!vFUw$y|9v)zH2cuJ|AWC-;}=0G1{j`PueVH3T@WfJh$MQ za`@fYQsqi#>EgEP@wWbNEAg^m3an7Xt%dzaQUsP3gR~%!-wB} z;U^nRvi7n4$CkFP^M70Zut3CjgNiFdSUODpklnfBCfwJDTyG17d7Tw8Tuhkvosqs% z5Roa8$l1$a- z`p{9y=UYRweEThB2Un7QGPk`!>(3?a;$b3Hm0Ijcp4bYx65W++k-jGe3)VTd54V?{>fBd3wu6 zyv!gOW5#WO9u#xxPbWrDWmVq3;6wAAexLUXA8lkM=p|O`$X6OEd!u^K`#AcJXBz(= zy@XnHEs{{a{LbAcMnS^jH8%-$BP(KPyG@>;Evtqo@4(y2b7(qyfAL@k-`+?kBKx0y zEJ&&Rz|@B_ASA;z>Lbm?=^>e#wfs>sM!R(2XK}rcJxas){3Q|F0qM%JL3-uxx7>o? zE)4<)RzV zRj?CDB+1(U$uQ3&;&}HaUe52pHl}nNI*5XpYUdxfvs*dp;{fdGp=2bSei`n_cyw-T zI8dRW3N|4b5Gdq;vjKgk_!mm~>&g7j(HS!n5d+kNe^P_-ZwIP*|1ap06n`{${WuH* z0674E!2hSa{x6YbmeZqs-Yn{0f%1ka6(#?UbsgLs+4Vm3Gi&VDDM^}x@ETAakobNq zL}eCB>YsJ~=lb|Bh5A2^m-{}!olomiK>2o06m0vyT5utN*0ELo30m`ki9iV-A~_65 z!vY03z(3&w{vW{bXnIe6GRXuMnjuCw5DsYlzYX!{fUyDaS0+jON!jcFuzDOo|6fv8 za+zyQGRgMR%6q0!pFHv{`?Po&3Hy5TXBqqNXI3JsCC~WXxUTqtKqPON7BPU-omosD za2IClCw+7;wUaKNTsb-Z9Q{uH(i>^xSB(s#My#%@M~O3Ui$$7K2!y}!-nn)P61!_H zAlK8XL5cfj{U%>n3V}eE;?#s718?dUee0XCOCqmcY}X-+oa<0at7+35u~=StM&n5K zli4VCO9;YfM@dj^T^+>l2fFXMhJRs}fabw4u4NS7TG6F!*g=B;LSBP?nBZ5Jwyc}| zKVSU4)pu9F^%J?ALBG2_!~|0sh;( zp3Q;Y{>O*`v&t7Y8NQ^^Tij72Xf5TRMIfx4w@F zdJmNsUyv_O(dt~T-6<~)ROBrfIDv0*Zw{~R;#;RJtq!g?@|*U4T0%(@^#3_OWQN8# z)Z15;vC1SUi$qW&4lj1*;K0p#t+6kC;uoW?z%Ro4-(`K)fLZwk_@4p!QeBw6ZbTQe zKjOCcv!q_RcB zG;ejo1eVnP*rudm0X@x z0Hx-RKA(zz%k4A9=B{EnZ`iYrxkFT6AG3}Lbs!PKNs5wm#p$2d=+ys8dvJ0zJd=!X4*-j7SAR> z-&!0h2pkT$?o8%a4kQvX`D**l_t4h$+#ao)6~Q#SgaJy`5mAgWvJwVm{|fx=<(w>L zpFB);dBx)QzO4NwCU1SmwYJFOle~WajdPEycMa|IQJa^38cu;c1XS+b((CG}Ih#_W zPt0%G?+($-o>2CGwSt;{X94A~j&Wn4gTPc0b#FOq@!ngSKn};5yHX^1;A`FY13HJ6~5?AoTD2J|o;qbwox32rGk!&-R zhjE-CK_ySr)4E~(NWOl-tFD4Ngv{q?aU22kMFiL%_@y3P!nvdV9gXBC?TA(a?sKAn}Q8gHdPSi%je$Eg}X2F+v*3vwcK zXQB)syn2w&U)0N;`_;O)jenxhWVJlQtJ0_aP+TKgH5KYBn077W+IHbM+T7OmtGBBZ zTs}TT-}Xh_@{#$TIGGA0{7I%sESZ2Gbg9Nd6gek&}M6bT$oA(Q&bLdeBh7aN<29T9ataV&qt0<5)dj*_L0WpdKNN<9w(Lu-X zg+ZL>h5iv4i12)a;{i@lC_%U&&;d6-yy54476SnwBiexs2!+jqAjaK`Kws-5KJu&} z4H5*xYOGFkLeOEH6!7_Kx(or9SBnn1`~kF_o&;`aB{-$&wpzTEpLbn%SF0r4f#33Y!1Ck#Aa8NiHkX(Tf zVDk}<4c-LrvjivrRVn^fMQ}s^iMxbOaR#u$_5@ggjb*Bk6tsbU!byOBW?t~A#ZklR zaUAl`VCH))p@0T908s;Yg`ot(x@8)=H180FJ%C6WMf9PT0Fr?|&=Q3iXc_z*VhoxD z62Jrgz@=aWq%fcc3c;jswoiaUwNF4{I-p!(3DDGXz=2p}#v*m-T>#Q`eL1ymOZgeECaN(VsSq5tH7cdR<*R0ehLb^e)fk(%VO|?!1h%N_TgO$A(Rx=I4 zv<5@+iiP7nl3znf^}bnz^EZ0_p5JN~rX~z1d*4CiT2V`gipyXZFs-18wEha%Yt4W@ zK7Ocvd)H4uKjgqqKfKX%S?YZ;`m<)~IUD^Th$L>G<<09E>*W%i4fYo+WkQd zj5irhMuL-@<@&LcHq?MIr~xx;;H?t(!gAx-{;6p*Ay5zAe4YrXr=^=2|C+A%2v{lQ zpX1TH8^L6hQ~RysFOm?xvj*$}s;wq1Ql}((1KB|t>V5&RoBRNp4htrm=}+MWN2_h19tvfhz5Vo zAbf7v#ID1H?BswJ%j5<^jZl;3LE|S)o+^q4vgXsWjKHjnC9ST(`AOA z>@fcJy+ZMBNjqm8k;_jk#|ap3!*^i3xc6r$pIH^$fd*{AE|?*J7;E4n+-LxD*}X_P zD!zmD?L4K2OUzk1P|bR4W&o5N1J=g;?{&mH8p2aGvB&asmhD~6GY@_n%ADAwW9_Z0 zrn9a%_Rn6BdBMPz8@A68=NZ3TP zuy&*~R>9#-7*LxYs7*Q{ZXFuQitVYAg5V999F%ZA^YW3PM|Ua8&y79`((p=Qw#IqMA8Yj zjx*%Kh1uw$zdULspMeIXohTyZjnl8yp*KHEHt9A61hNCCew_6+j`Jz-p5->f@`>s*!~#&K>Q1MFcsZePA^} z1gt9HGyVh=k0%Dy5zlTEY*U{{0D&t3#}y=O@D#Y#FmRVg2Q$YFP|C|h|01F)+lyF% z9tiP+=hX4(et(f2Boder&=fT%Dfblp>&1QMjnQVZA5DG9dQ#g4DcNgCSEefYm$Ub# z+AJ6m5HBZkx(xm~>$k41_qJb6VVH_Wu-0ZrQR6h1o%P@f;=jE*5&(SZWrwIIOtD z>@BIuDosJQFAUSK=b^j}dkq}kj$lq;768ivJP%^{z^59ku#1rP_L$e8gH>jP}5x-Q+GcQB_TnOBB5mkzcm#i8GK z=t*};qdB7`bzPe3{@D~D@|osawEf(0^7H)B@rE4gI51G}G$3dDl})Qn&+ij)DeB>N zfZ&5gmDu>5ihiX|*_GID%eztegRqy9eGC_Ww&tuQuZkP!?Tcr}J49gSa#dcQD8NyM zZTeU4r5X8g1*bu>EqnsW=lg<(^qJF4s!;wKsU!Xm##JLz32~_<%~RAfzj*=y-)*MM zdVGo$hk07^Ia#0S#T2Q3o6@O$SHacQGyPQeEg9Lz+&DXO|6DD|6<99{?V^c~Ng<#9 z#9KrEUTm#^O^H8YfA2@&PRUWa>20INS>?CFG$H7m?dF*JxtEg9^h4`;;~HHAj7|*Q zy$LBV+yRKMEQ1})vo8BM)i$Z(3PiQ|oZF8d=9aV?O}k`l%B3!Cm3smZO_fvPloNNJ zZvQ*Iz4_hW^!8M1?M`{Ax8+{Cd_Wr1X5*0DTW`M)OKwAv3yL>V0`Q1;LH;;cUH+8% z6FvPGAJ3(FwuWRank0dCxVHzk0(3<`DEtZlOYnDYjR*d9ct0<8i?d>=)^2HCS9Wi# z9@oXS>O7-3ZFQ;^jOCQdMLP}nqLh$f582BPI=nDa>|WAS2EDBN$)-5tFNoJ zN$c}mYG`Ysx?vEXg@wOlip77%a+fc_Fz4#Z*MN#*Hu$$V4`pm0? zp0`SgSt24VoNXVfe#lS~NC7qoE4$Bez)n;(Yao8F|9Qb}NU~ zO)WOcpU(7_!~0?5N>jaCeyaXC4;`MB{bODt$abyb9krE{fz9Y!=&kbh56;z7{0W~M zm^;h*<+Afg=P*=LQ>T)W6Dp@+cUrVe6&o~BC#8JF@{m}?{F&EB0SQZ4`3IGRMQ(}Q zYGP71ilbk=n@&YSoTQLDFQOJ-Q}hm6IqiW4@L=HKtH@!wfK5oB-~h|q1L{ArdX!d& zUOZFIDAW^g6Uj6o?V~`9xk+=8Hu`6* zLEL#B!;kE4yQ+)T*VbWh^nd?Y89|9c8=HtE=0$DFKz*?etirkDq8>o;9&rV|i zPL76;Py#yN!LwlB$S`kti~ggg8Qc) zyZJ!E5s29?w^7M=?B4_v{+dsWW5zY6C41gKv4RHj}#>P zS(f=Ha=g%+-ca<$g6R+koHsj+16l!p0P^bx0y{7yLIgrLu}_5SR4)|p!Bm^vc0?us zq{(;@zwuNBz~(A3fGa8?_VXKic;$7^uYx(G;Li_Ov;Z8JMHvuYp`@aupv6j0)5B<@ zr09(Ruz%G4n9n9RmJ0bC6(yA%PxnjAb+^lCBM_=sA+`?@L4^t1;8!tCUFA<$_>z z;t-Ae20&9w;A3IHIcPHY^HhaggwU;rg)E%ENV^xWlr3Nb@u}VS_2*!oBE4R@-E86C z*a?QxJ_?|xCKMznL)EOuO;0`o4tryF7^1_wC#=|lPfaymAP|O=RRZ@E8Luf2HwHL{ zKmkes@T`9E1P-Q$s)=IHg*Mdlb0^Ujf_iq$KDf{sD&drN=%d#FJ+nIdO z;f%Rb)iG7C>nsr~QsU4)b(8k9l$mu;mNedzOGzSMv7lbq=2BMV1 zy8KG-K0m>I^$MJ0kj*LhzyOJIs{=j#Bz(!BQ{J_98Ts<@F5PUE?W3-CVX0!iN(289 zSL@#3UTd*>J5`Ctw9SeDo%pCSPIA($oUfQbg(>WAX|(Ob)i(QVjyE|39B5u9F2`q@ z?N&mjtwmv(y-EO)RBykIY8<}};x#j805X_qzwvzJ;{OxN3G21lXx$^4ZmLKxC@34< z>t#Q!IdHX_w&p07P&)mvvfP+yr>Wvozp#3ONCQNc{_*buFc0h}O?AQM{3itxIgGl~ z_7`iy`k5B<#~(e@yt1csWD{N{Pw|Tf^^dCBMNr}Xr_2xLgRF=lZvfbjB-&pI< zJ`-Ba@8RmmUn$HOCjl~Xk085g+55xE*3i{({+O;C%X@`{@~Fo;DQ2Cl+{4MLgD?O| z0+$B9k84rMI<4uyaJ{b24jKwz(^|<*$_G2EIrnVE)}pSZJpfotPc0T2(aNh*XP*20 za2jdtnM;;lr?qr>1Sz_*QwvJ1oQkQkNmUfk{ZqBj+M)c}Prx;X({35;I<8;ZkISZ2 zq37X9ZV^@r~G zvCe(XQE4XZaX{a@=~dy1Vq6ij5i+{O3>`g^akL`)m%KbN*_p5N2^Qh~MA1cbEUzbj zTsQoPmPzXYr%z1;N^hx-3<>(|XZ7aojY2_$+#aDnlPTItMhb=qpX5_H_ zij6Is4CcIni(`2{$LC=rd#8Lv;M&jQQzQM! zi}$znA)FnU8+qiUBE?>rYmKJA355(y=B}Mw41P2xa$6V>+ENxKVeo`V;xKsJq-mNc z7Mf|=L0IV+=J;o&D^g##tgD`nB~7ZhoXA}ruk5WJMCn{w)xxf(vdcd$*5{9I7moP) zVv_r+$<_}GUlPwUzor_{I6qnYGdhW1bLRbMaH|>?zwM)@gqp?ft0&n+<`TnbzIT?M zlwG3P7`#R5ZV(n+IcxDKb*si`JXq?`DnZQazK~UsZLyTX3CURa_9@N^$SNzRJCN_f zY^Z8DS?cl|XptcHx_DQXDMb90?}(P9?eSRU@?<Wt@y{>9aAj5Teyr9IZDD$=5%nO;>zHF?w*6vGTY&fsd ztP8azZ||@Lm!`Q>7H6Nbx82>nUDd0O$J!a+oUC&%N+LKNeDgwL?p3;13)B)J7Z`R5TkLPjr zC}`m}1G&mY=VfGEj=yW>&Tq{}n|u;bb=nZWx~x1e&ty&6^gDH5)v1kT)Sj&|8N*rj z7<;aBKZTbk+V8kr$?UjNh;{l9N}$SM#=14sw%{}Kv2%*&{2glqCh71zY4-MF*#kMb zeITQB_E!1=?-8;{QIyuKS(iV6L{YfR^t>t;g*J_`!ph zYn3atV(vD{rONz*GV+{>G%@Hglk5XU|=ur6lyH z+t$Ugu2|cYlGIDTk8w6aUr$!Ao~pav2#CkFq^Fe$TuVa@u~g})RY;t1nE5Y7Zw39| z>aTV1kq*0=bUmN@^I^{Tx<8(-ej!ToYHjRmUuvEIC)y!*fh=*D{#dVaT&^~gnrVii zj?0P4W!ep?+sSNZCMJyZsEa2y&{jHVN$$-Gf%ho%kZ+UflBks#nZ)nK36@ZFpk-)W zBqmRk^?2n*>zAN~tMy4AC`^9pv0K0r=TW$TcHnN;hIf@UlFZcfg2>(csJFRswk#r5 zTk?k8h$G2Xd7ocguw~1sJ++yus7{ zEXmn^gzPP&fB*}lNbg!9d zMghuGv^4gYTP1M<+I5q?A%*2Ag;^9C^udIgA;P4>6hmP(QNsAbJ2MG2z4rv*d4RTU zDy+sd?^keVBB8eWzi2=`V>rzmsK@e;dPSz)ipCx~DX2lOH9dO_C?HP$@#&!mnlT%` zKb1D#Q!&^RpciNzV=37Y#zP2@OiHQ=5|Y)?k(3y(kl%8l|Fl9!tX1(q9~|OCLM%yv z119Gqwy1*wRfg|HtOyWmo%%SbaPdTI&r*7XOSc{X&3(WI8M;DGO+{1sAN2R`Xd889Thk zq;GIe!YPqK0)rJ_k=Je`s;Yn%5%G~(0z*@ccX|rOfNp*n%GaXCc33EN`--3v#6vWI zp)?aDsK%os!%2N(0p&Wiq>_C-!xgI4?aL~t>;s(!AZ8!gmY9tFYa6&SWB(uW-ZCnV zrdt@r-2%ZqxC8)!9r*Ne4!*xgmN_pYjKRlO_!k2z3D#y^$x^}qXOSOt_A%&{Pq@J%X^aofk8 zOR;$~E`9Be9@6!?8S0|QX?($4%03&-ioQs$wT<^<2%UbCg;ZFs=2OI~3{Dg8_cxUB z$+#F6pa=_6(w^YQ&L(fZ@G)?aad&*QMevxqU2H@Ufdo?hak68&sLBxECT2BWc2tEL5utr#Lp#LqGVx6bXGnd0` z*%QxQ@J~5cyyrtKaVFpzONA8vXMfuZK$Iig1#8g*1#vOZ2Jd7>2Y?3o8I($RQNIKeHRSC24(PFFv;Tl50Z_Kknsw|VSFYm$vV|1aQ6ZM%4uAyI zUIZ6fFf2*A)FhJa8aOsSNw+B2;>p!aVnxZ$Rh5~oV}Ez;Ysfb|hU;mVQ-TX#pPbl{ ziuc4)4GRH4DGN~ZJB1Y2&b%_St-kID~SJY}w+wV2x<}O$SNqxZutQK)KK*24qlKesog_MjmS$%N+;<2nz zy7uS|N2qUhzj zu&?BMi6ijwaRGp_mD!sV=HgYnAH@zCUi4d^sbsuU9nV(wGH?G}Cr{e@dr1le3Ir## zo`10``s}_Vki?gIpZIDDDKkCbUfmErEq3occBrMJQ!~slp+~ADtdeKd7kztJ7dqE$ zZ7t@p(-p+4y1v<1YC4QJM*A%OG-sjayjHtX*{spQr@`Dl&5gQf-~KTv>aDYa}Y8%GatF zjLjw;oNC-R{OHoFk_Oj!-rd;d-AFo01mHoI+&Kw5;MjI4Z9mpK;+#)7gpXxaKArrY zDzX)VQ29O~bcdu-Mo-t+9%j?r-?PxTjjdcU>low~74mK^TtY_n+>F;+Ldm8f1f1*< z7RBhZrQ9Hm3qP=jt#)mZ3+#blJ%lSRKd=?c0ZunYN=`R+VW7q<_RMS%MDx5VrXfLN zC?cynKZ>;Nc7(ejpIh+mxwg~N=x8N@`ES^mS#FNQmwZXdNSa>YTaVY#>`Y@VS!-U) zISMwS@T@?}fmDf+rSXa)pE09b@6Bc93|CIyWOq~<1syv)R?GwZ|S7KkOvfsNZ9ohk!e@E2}NE@!gT0IBfr_jk#hg zAulME!#``xt^NtjBQgtM9&rd(HVl>JTca<{jG6bb=BXUbr)(%)=dWVtDL3!kd%5B$vN*voQ7zb^MQ%xXa3gxUVe$*9c=gDKG%tC-SlNQEgrZ+ z8~P^ewh787lA@dP))U7m>Syd+-)5S`^#w$Fo)@$_kJZUv1;E8AAa=T9$(QE2nh!th zO3jIO`e*GbtXC-6s%L)si$ag0KLlQx;&H2dcgshW_p_tVk+6A2y5zb3LwvaT{ruz4 z?9nniJXt_UL8S4{DC=21X117@b!V!o)ldwJR)H5mFw`sBc?i02M9}GN{%V?J4(tph_ zT)*co76k0MA%W$hGBe8sCn>;!Bwt9&pZ*cgy}{TM@+sK?vIAJ3llJD|x8i{AEyBa) z)M>aYH26V+EL`WQ`#dj2~J|w(6QvlR3{uH$)BLi;ldCMppURHky$A%lB?SoQI za@uVX1sC}kN{a!5VuJ+GuPCe79}z9M0Jf~{;WwZK1kkcF0|5jyqpdAK*EN6&DbYWu z+ygMx1_97Dz#dYHzc6WE0AqARha| z;#FU7URDOO{%98*XdfU(3rZYX6D-@;op^sHWA^s0rz}^CdBbtAN8v03{uhAEvH`75ygYg-&(yPKW0I18?fB;4Vz%PKE z_UbzYYN(d-!Sc_qn`Q=yS|1VBaTazm^pVQ73fFyae!MHi3>cxsA4%oj#qV;idpgPJ zxB*567$==kpVKv*h9X}Jr+SGxe##IqZ*a01^)+NMFgOxl*j^%f3&7e8z#8qb&b#;; zZLEXkpt6*AzKL2t02if!Gpv(gN>}PHmWc-{8r1-n^REHZ$Oq+d)XNi|=L+}t_8Jc!0^voPH09a$F=qJO*3#mG9fV4z}-53Kve-A|h zPz^iBQvee-1SUWyB&pjG;A?2Uk97c0*|Y#SI0C#{^P5U8bTa9!W~MMhX~FC!Gbxi? zO@3K@A<$an^v_LzwT=ydp-foRRsg(sSuXn1hK<%)z{6< zV?!`XgaruS?2S&f&6p5^VW$AYw1$DgU3f#BLZiR_RtWUfd9`>c=iXac{hE1m1i zDuKoCpRyVamzv{Nq?myOY1t%u8EZTe7LST=5CJpI99i2f3%vtomO5RHFAk zI<(I*Qt=OldgX-#^EbiyG{9ULGAOUq6&=bUhjd4bX6iTY<2#Dmk_L(#D?8qOFD&3< zEnX(R*h;298mi+GZQGRXI4zKCHrup*Y9tEMAVG{8yK)}()Fk5A*ETcFVBTC06q7}X z%$ZFL|H0~HUI8H41?A*70b21(t8L+m^{Bon1Gc5-FsU(G!JrDGmf9ZU6^j$6j$yiX z)wSx;&J@Zl@plYcl8u&oTcyV7y^hD10?G6SU2YJHf#thabsb~X#*S9EAKJV%>7&Zh z59}5GcC-%?)?S=kTyq;qHly_o`O{B3q`kKLKleb!yc;F@VT5-<9MX5VxVA{s0;j?x z6wLQVfL7u#mS{#wgbC*^BNP^MQl5*AeyGn>#vTI_ZcNN3mqgmAf!OC1VTM z6?u{Cq|B>4jPDtvm2q&M@q=?rD_)Z>-wDXTJp9nqxM8b;a;P#xGTAM#Hen*y_(zF$ zsh-#tz2WIOEl0uANRtP4a=gRNnZl0I+v`*B%Z6i>W~=;L(&Bd`Vd^i9y(7~1kj-Ag zX$Pm`MokAZru?q1n(H8I%pHN38fF|9?CVpnMhLjy%*7y zO|orrb}l^l=>eL8fZ-afK%SJzOQU-_=0xz#rjQuM>n0 z-q(6_xS8bk$meR49Pr2AMV#ZRX0ce78(q@5SKKFQ+nLK?zK#)(3dKnIC8VJ^7q6N2 zygDQ`H@z<VI5QJg?H9rOiJuqSK^-EbYGc#B{je9E2lrH^dDbK zC9!hRenrnI7Gw3>j*zc0WHJ1ULl;ZAqlebz+LTM z@eXqE#cDY=E1-WO#_x*bkY+0K;E8ryz}zzeAMu9s-#aaE9ia&s(r7a7!U$y9Hx+Mt zBcm67)#_TCQ5M1CT}`C9*IZtJ(ExjlQ|9T)H=2)!KCG$PRl%7RSGduafDQj|O>aRW zT37Bh42hol#N2nsR1$U0$tmHZ2|V(o zEK+N%v=&oU_^Y4MIgFfKPSLkM$8;Q_M2z3EKzu3)tydm|#~cF5+3)h2!W z45NRAcZtQ}y4nMF0_~Hpk+0-$Njf*5gxOvl9!RfMUHm_T=mO>R9L+k%`939Auqk+z z(L#g0ZU>WZ;98NyTfqqF_A_SPfMO=3fA5~KIsAFWJl8mlH}(u)=D+hUEV8WU{Uv*V zo_?%wOZG|jhC~Tudr%|UtlGxiT2FUbVs)3qOLNQzc}rnPoK5%^E~b6|-N~TlU?XQw zOQTvrBrqoE5*Q*x<=?hI8Woh2fI1u{l z%D#)kw7z#qN;?;87K~zZd$uAo#ef^JF%ACoqJ-o<=$h$x4%q|P69Q5J6l*biU5e?K zPvLudqkRpmpdmpRC`HTNtU9e-4N$#`L-E`oQP-Yr&#s1wDU+5W*cJ1}lEQxm>yl+IWE=Ai7wtjTUyPD>9-_J%l?z`Z6bsp#11tsuv z*j*UOmCp5YqE_=B^|ILkfhPK7IiZj-BO1x&$@7 z7Vgst462&A*FJQkHYO%xl7ZggqcZywJB#--7cuZ@M)x19{EA?W(=rg)weY{ung1E1 zl9c^*pL|ySwjZpG-)i0L^9zN{!7obbl3K>nk|jAg8IQvA(SvPm!qhQ86C!|x2hUAX zBOZ`(9CN18{auf_WHzi{b9)N68vSI+9p{nX#(m13fxjRx?lx;KQarF4}engX+C-Gzpnh$@sg|mB-%iF-2|R9z{@uBrz8hhLbfNv)$PmE zbrv)H*Xpt{N?n1`0_U_tF+Q50-E@@@BmK)K@oD6WoHuWNnLbZ6_Wc-Tl-LX9?EFwB zF-hDdqV7Ik>HHpM=j}Xi&(r3r*Ud6B)%MW&+<57bVT|HegfiSg?()|KX4X(|O{ELm ze8n8HkZXOJSoCaGKnP?j1SmQUlBoZilA{@LY3=CsNR{e^Q=4g(rwt}?!K_ZzNv;*G z8Cr0AFeB z5Ty;yhnPJV3a!QaBSafrZji-))t2GYB`%{~_Lm^kmKz)Hd#&P;hUZV_+4IWo!o4WTXz2bLLLH zMvNOmF6qQWlf9NleGLRGW8OoUns2H_x%F!^29yK2KD2BoN2I>0DRr}_ja#}P{^sbL zSirS)`Xj7Bna&G&LKk%k=CXEdesxf&>3y`MrdPZEiuKYzEK#jFiqKXgNf%htij_>j zY%v*$d1a?Bc&Q5H(k1>Zy5;0Y96H7%FJQkcFrue{I^}daf?I(1J*`iZ*8xfBxU%Ts zRbLn81gyL`3!WL9b$|9kr~GZ-Z_x*6MkP$aCRF${zyRiAMk*R`sg>H=@&oM;bX_{o zgXg393&|`l9*L}j#?2{$7z!_9OnmNd4*5wnj786E%+PtI1)`sFD5*%ROc{8&otE~b zowe4^7Fipx+o+8*V)C_lHAFwM?A1?oyB%8~;dpADd&;|Z1&(Zn&z)i_v+%T8p@@5ZBs!>;CO76w5DIJ$Wt*1C zpON+R-kR~=y%yBs&FG2|jC4ER2~E2`v$LN&!BCT?H(h6xdp}V~>mbBydAR3xwOwob z)d-_zr7GZ`Ib~~wPcSGroKp4G4=LHm*4G8G^wc*^Ed0!CUQNXFVX1h3C6|^9)8#Do zRIk;Owph$cKt=PvEIrLTY&XiS%!hxz_*OH{n!;lwH}w^rd;W?z^-wqnaBYD=2<5Z= zC-+lUOr?nBr;Bgz>7$6EUVgL}&cPHy-+SjhKs)=T&6D;eT=#dQEv1sql*|sp>)74? zks|Awvuo=QPLQ6feq91HIC;)01ST~cLkd6M@L$ku8Sq}bACS{OfhW$kdJ%e3c|Vzv zXihsL_xWcamv=EX|6QQsF@M$F^R z{#oGt?GcPAg(omQm%u^px{=P(oEwP$GBub|nJhKHeyAeR&YE{mEy5QUxQ#Bz0!3XZ zXH?q4285s6FDM`;EmdI`qo~-rUjM-rxA3w=WGqtV!@z;C#L&<)-sv)v;dyY*ewzXM z!mGTSwVlNeD~2TvW6fMkt^In>V0Yz&--n*7xF5Y#UGOD?NIaygcg;?b28(*HH*Ub#$dhkqLB&$pXq_lu#ntfp#I`01h6*Taw%~3l`-Z_9A)&{uo&}oT(ifuVacvD^7zvIs?VCOq z*)GJs_)Dp%1dxxHk@WqYz0|%;v?6~D;79AA(Bka|Ci!Lku~!p?&M)=R6unmW;Sc@I z2e3b3jFQ`v&ElvX?);io++R5L2Lxp@+tb`14r28EMB%#bU;Sp*yz3q|BqYu$TEdZq ze{aZiNcfTLaS~Wf60pd~6Np$)WX_yJ^VG#PaeAlQheOHf2kF{}wl${X)FGv-t&I;c zJ@djJ_Wl_i4AqtV7(~$piebQD>0=?rR!w&G4oKp)RMe>cNH2$d>hi50VTk~;FCK_p zAy|n+`Tz0#74g0Jv{`-p-&QPN0gH8rW)Oc#;S|k~c^;FYr2MZI|FR0?YW_ZC{6F&k zn-X6Fy$o3j)flN`x%ZSC-3o;y^ZR}vDoIA(zTrqCM{2ti*x~XU@iH&=N*uCa=j(TL zM}eJmpdoRjEAcAgS+q7(WehPvmGj1gQil%Vs>kmaVoqt0+tAC9=rEqdaua40#j;eC zPX8{fwNgN=3vdpg#399@h@-Rz8w9roX=c37NjQSNg-)?%$XC3=3X$O%vcVtDv29&J z_G=xpu4Kh}u@rQScKyfl9VBd=249&COVI>rsAt?0GeT(U#p92f*wY4HP---3#Yy>! zc`NhDpC=9o>r<5Y+dW%BxQ3ND)E_#;49h0u#t=hw2qFv$C0<^Mbnsnlc@$Q*fhL(q zi0a>c`L)v?${CU~DBAhOuVahDi1HDXt@Fh@(kiSb<~~;ymhE#xj1f#n+@h67SJX!w zvQAs0t0ydpWn+;R-_2{T8wR^-atS5I8?sH{k*6HvW;ZST4wo!4l=YH>iFsbA$Sc>zg9dsY z?g?cpy%%}V76Js1sGJ@3a)1H;R<8qXzL4EbqzZXfV9@uj`;vgsO@RkJ2kDR%%V7Z( zLTJLpC}J9r2>m@-v3lmARw9sGtt$e_?iu$&jwsNput@ECjqTIyh%AOA8B`R!0HXZ^ zo)R^p)f{cC9$t=wMh5pDxXU$h&ncVe3N83QFP7s+YwxuS{*1Q3sO??cJ2On(Sz($@ zk8+P`h*QUQ#rV!c&rxn#vQy+8LzfWU3!;~5ir;%6!O}!%cO9z`-8a&oY$1zzb(%- z@%zlLBXYs#GULp!dcu`Vnay~KusrL^C#!d^A#|nplR3|F6OICw5_gp?_hpG550IeM zx`V@0SJA#y=yuy-hHrSSo&rQ_(oTf0_jCJ(G05CY_*F8=s@*W^t0Ksh4AA!|iND=A za%&ezN>%i0&&oNPM+@1oCBGN%kc#_ex+j&Ms|jE0b`Bw0ZdavENxaEf@7#>Al=nEb74CJqx7Og!z9H~*-A9JiM3=EuZB~yQDkuQ~#!>0_ zf~eDn$dEtdyYJOrnhoXehh(7~_b zzm{9die=0-k(8<$;~@8|u&Qh~>{3``!EG+yLpldq1HCtG&c+UrMsqMDU2udGVjWhF zTHR<-@vD4QcJQE0U(oU&Hq8FAJXcoQV&)+QNoP<_C9X?W>#>Y7Ncz&21+1R|1f@R^ zqs<<7Zg?zHD>-8yKzah5Shv+b?JgC$`{7H03v4+S;CFlRk$>&C*g|8>!bAp&&i5mx=G`& zIW|62&&KW7zm`uoj`UZQ-n>=)8NC=hPygpe=xdwL>CQAT&J?Xkj?AHvXkTh$pf9hU zhmfq>VfkE?U`2cv<<6?0J(9E?7g%9#f*kpTkXUy=NN4PizXGNfR~Jji0oIG-0kn3b zK_1ykMl6lc-6VQ?@O$x)oS(zkIfrem@*c`UD~3t9vK0PAX)m4A_m|GIq|{pu0?@lq ze-T$Qd|c7(knCz-6@F9~uGDS0jCRY;4P!$~ec8y^)aJ?A)!r9774HG@zq&CD@%)|X zz#AJtaz%8ddIf7hI}ihJoZ+WiHw@3uHoRTor*5s-we!?lWO0ca=VLCF z`eVMza_@kFVGSHZ76Y2qSfjD}J!#%0*!RKP1x_L{=PcUQU?g*t{{xs2xsI82>sKFD zo-Q>-ozu$Ki01nYt@FilH^w)*$W2)*kNW+78wVL`$Hel9`-~y=WcyW?;?d-)UC($g zRt~@4v#x3jG1+%fCS(RGKckxxpcX#(T|}~&iFaOlh5TYfc(3Y`7&^0+oFQxd%$9vv zbXe|hujb&`A@?F7k?)=|iUd4G8)Jm~whK)0&H2^p-`mfuWOoLm!8d*J)1=gw4my(F zZ5KH`k~kOn)rYo)SBf>J^c8K3gqO{4^HIofCn4H0tKK~v>&1Fzx{eGPRn6hblMhWZ zxM*tX_o!W(-D?d*i?Cb#ket&Uv^}Baij|RP{&Jh?u}`0OAO?u?aE6VMV7}DABRr-C zFjO%r5^Fwva#sTV77TTbxiZ155$_~dQur*b?abZvZ7;K%{_2)o%Rirr7e*1no2-Nt zfxInq)cd0rBxx^aRNOH53+1dZ?{(N6z>}O0HP8EecO$uE|NG!!aGe57$c4zxA zv|dwXTuHphm83!)8@Oo}vPvPva>a5LUEfz!ZqQ)@(t+|H<@J(z$@F^poEgU}*h=l+;MXcVzRHCb4@5SAWsnDt7)L{*!FrRwEnu1Va zb`r7uDJFK#GksznU8fqmZ^z?p)F~#4&eHR@&Qj^SDWgJTld9C>W7Z{}IgSQJ)RBCd zchYnGg?7~G)3PlGX(|e18);whMenD*q({gR`U%)1p`i)K=<>Q?LtA;OiG7%w@;18& z%ZE*e`L=zyp$kfmc9~K3Zo&Aw1)J8t$a!&cRMpm-xM^NY+xT>)-!rU`l9zXmB4SJ3 z_qQM$cM;|7h%Hnpk%fvKmN4q3(7LY`UU_Vc3lt>qK3J!zx&x%QM3^|8u(3`?FPLd;vX zf4hgemA*j-pag#qoWvy6q;&2fa1d8qONUt+{(4zb)tnm}q_?nLHdZRaeDsTrz}A{y2Sj4P6^wLgT+AFzJ*V8do; zxp>IeSgoj>oGE2_)#Yu~l;PC}gfFj!8qAHFDR^NJwBT*M5RxT8JrbA-tua}NJ3@oP z9vtPWvW_jsJZXhIW4aAv(e=D&2K(z0;{de1LOhn$n|WV2xLo6sKt$q{qTBlvHJD)m54sT zs}4|0{|nOTXoPP$@rSt>KPagCPn((#u-k9z1*P{V*ei>BM?+-|W=QNGh(o^2`OKI% zGi;+TM!mbsA({4iP3AFdg&@F9bd1en`7fto5(57kP3~nSm4zbrLKk|hAdS*@3OfFG zVbFE9`$jqne7|rg8hrhzJ`%e>v^5qQ#xbYoBKX{KvDdzzIwr1GCf z0{h+p(=$)zzKvnI20!Oi=8ccQmaBX-9WR>xT%s6r!znXCz?gVCWHQ78>MN5zLh5or zHwdzsB#`^-B!lzOu3c^FQVo5^wUP*xawANW8%kUPu_CE>dp##lLlG0J|>{Zt?$bcXq_r8qA~5P7Nm|;PE3Pchw2E64N^P+D+mk~IgS`3I4iH$~r&Xpjq;OwBo&f`vB6_If& zD8duXlDB)zh~;qoWM$Vtm-&{fAF-H@T79Gjd|I;WHnJ3+clqJ+F>JsWHxOBwZ9CZ) z{mQJ{Isj<|eHtY?cyKbJv>0%dDWB0dJ>{--ge`wYGriM%$KovTQ&gorF_{{<1-TJr z7~7|x9^+iw7CXPJ-m^ENKGUs*xV_Oj=H@s4r&fn(l{+1=WCRdeeZ)qAy0He z{0ci{d`~9zsQwtul=U9w(kT-X#23a;m-DzIb#!A|Jp}53D?9E<81mD@SI>&Sc*q~6 ziJBd=0W*NULkC>N?S#}C0h30%C2V?bNDe1KYm%(hyHGy=toGoPXZJ!I%+EG-e!(#ZL;S z4V5PQYg;Y}n*0wQW(+R~8rMjoGdvyUHXXW@329S(RkaU+D6&qb ztzsK=@|%|9TCl4#>wLY4rj|V}BZcMAyht~RpVec&=_!Aa_Tmk(+|RU_x5%ZZnXdE) zj}sE^fv-Zls?j7{=EBzP8=cJjcPqw~E%OZ^8cIhrVuap^*weMsqppqq9&D5zr(o$v zXh}^4*bkx2=8XT^3t%KiWdCQ?Swa@J`T~ec)CQ@Zrv<7(1wx#O8{-+yl1R`luA=^6 z*rGZZ`D1s_ja1Q{V*3O)hHVg~R7;pA6 z6l)Sgx_gu^mcIC<6YC7GrJDz30?(jw>yD ztx^d5!AZ&Qm2_o*sWa&VeGN4WQTl9xYeFA?lrAJA9daas)r5V8(bZna2er2MjdP2c zOjVI1vK1o^OZvcYfT{H;tYo1F^0ELl1Q@ot$Ij41=2M$(Z>16hlPzlWP0|<1?;S9+ zFc;Caz~0x?{IEFelxp!V$&uJPIU zsY0Idv#Zc9GdG2K^UFTlAuuG6P;|Dj=N}yMPh^-hQF9J_w)#M?vNom^OB?;}OBhR2{)Ksnk}Dgr1QaJE$eF8)BwsfI z7RS~Ucd8PCN#8C*MDu_ok^3e9uZck9>Fw>EUx%(YZWn674bjpBtzq2#g+lp(cMmT>teOi~57As&zb`6oH*hOH%Qcgu5^ZpRgN-Kpo+~w+`dT3QL&VoYuX-OgEZ7XHMpY# zD(qXfA`uy6qH~7{O*%_=tL(MPH;kr~cl+jm5+vJ5yav#n+)DEmr~<-iBWzeBNa84g zs8Z-U5xILb9po%n8M?@7$ZkP$yGhg!Bj}xY?`V$<^$x>32TbS4)fe3EP~SP~<@R-2 zge;Huo&*3wAM3ml$wax@P%}8Tv&OmE5qJ2 z=ya}uO>~JLK$^Md`F{y34&Sh4nt&osnCrR6zZ9Ts67YaVi#i4trBNZOS6+`lkT#)j zBb_1TU^ud2*{DNRLX^B(0nhF}T{y@n_#ikk=m9wuz)P_8|s)RQFaRJnE@}Ek>PJ0W9HHR6u!O0v6XXeu~hW9 zc5AV@c!ISm9{F&Hf;U`qKe(A%Apv?i{=f8tjlBXL5dWCtTCE4Nf z!@6D~G0v3v8J)Qc1jH&((w=?i(Om?;N=;MufehvATop z_nIi@4;gXT?~|hyzK~(hiKs_t{Y`+Nb3_Qbg!!KmbRRgfX8fSsd79b$sjRnNDtZCD zuHSfN2`@*S?*Z{x(fFoyG0KOy?AsJ}r9aU$7f+^T3D?qdx1fjd-~w^)-c5)1SW87qBfjm~i_2n}0sbrH^*#A3le@@ zds`tzi1Y?e8Avf4bSYXmQu=9ih@gNEY~S)+BsCucyqr2|l5Xx7n;!+H+VU}Jr+J`< za_fFQvC^tbZ;9h{+jqAEvEd%yn-Aet$`0Xu)z~cG(I)TEte0jf<#w-?boM=8 z6l{j89`GPBt^0E;d)#Xn8R_~DkXaaMd5hd~xnBa5-US?Fy2U)IyLr%7@CD3Y*?FjC z6<_+L%}K7qM1#o^qhla>j$A)s08TT6_X+>gn+OwT2k==7e@{i$L%X*3rFTF=oylBn z2qx#L=TRBau5IQC=eJVJij+yY^sd`B{>uCLY?z1keH#J;WTa2QK$4=M%fUCB>*|8Z z-X15ymdiKrkBYn!cV9<9g*sdUXB{jy+7*`+mSc+&n(2)Wgr4_VV{4}|`NiPZVkQrU z6_uO|$<0gTSsS2~Ivou|sbMR2HwlkG5OuQX-UWu`is~T(hczMcVKgNl{dC=NH6<#h z9XjiDqh#sI^YF(<-au~4$9y|TNJ+F#_HMOw$WxVVxF|@LKd2YnDhK}KfUPVbzw6)Y zRd4wdq&J@L-H0PVG?<58lM?orx6Cy`yLp^A=9-KG%*K0t^GDNu`5+L<5;$j2x_98A-Ao~;I`WF%+ z|8cyUGmtwK0f+vp@&6Mms7dhxa*6(k?la?nJxLPZWeDs~JK;e=>Qs6Jjt&~b@qprF z14kGjb=D%d<~T~OCS{o&f7a*V6`oihv~}9IHcQ@kt(bJVBK^XZFX>X1i_m=*=XDPz zQ04>X-yR6!-@+v~Y5J-4{LmFhB3KTz2b|{izlZr7z&G|uUXOr`syv8MFcii2nk>5D zr)e`g3>^un&-^J2aUSJ<=}>=~pUE)H12_-bwF$_0Eh9Qa>*I!DRso!8{RT+g3J`-; z0USg3<_7UQNuZk-3F#1#A}k1#K>+{@A;3+@FP;J5zmqbPNd^FW{sm)q1KTUFuv9aw`+FQ@gN zaXg@A)fyxDWwX_XRtzb!;Ug)&9t)_;_j{l}1Q1Ha5$oJBL_YF-buimx(7Ii6>)n3n zxBy7B2+!&EM_`xqMe8o4&eX?+cc=MdDzcWNP6nckI7oeR((^zTV|9D?45SaJCLOeM zQy-e!0=7oak&!HeJtyxTffLxqLXeNF(CeK#mc3mB3DJWok#Js1&!0X5t8=@@Abt3H zEB3U`=9-KB`Dqd=5kbiIj821RqS}Ll`c4%&5lBcKp91QvnV7%**hWJ> zdK|X)J6Ssw$WQGY+wjZnD%lGex-620{Gp`H>V2UK7j5%iN+}zXjP!gW&<-=rK9Yp& z;b6i0)}hyjdCEYVbLCsw6YemJre0b6&ctVM*Xuv|Xh;^{B+ry%K>e@6Mc2m7HIbSG zP-@^mGV1!xs;hXBKGcDmxHc7~Lp|&4AA$W}>Mx$?qHqzWSGjP z(+MWtqMgWw9NiR1c?=czs!etewRx@^%d9t(ETQrH!4lqv&c++VcD@w4z!6+QTpNRT zo83R!@zKMroMxA6UMO4;E&FAj8&&rx5II)$&_Lgtd&Zl=zPjhL8Q`3PJI?2-E9-|(0izQTBuvMt=2 zqRj?8JLQ6*FgP_25FTx`x6y3}A^*TVczHeKBUJNp09R%%Z^iKWKQ%C7%#z21QU!4} zFJRAqs_9zcMY4?bEV|m(+=`3ew-Wh@8`**kRW!C9+CfYDyy&4sji1N;^UzRq_o-8u zg>WDFbp7HQClUj%_vjg@)n%h*H(`MWIZ1dvn@cYD#ucI4k9@p6keX1`-8*jeNRfhI z49q>|E)BRc>dijAhKHAV(1w&26r>MKFKROX{9-|R(}F{icIs2v=;nd)J9QTY37UyD z=opiiB@Ikvc2I@Y)!8)EFs)&dx`8Cf>)W`W<+iTL=lK0ik!{{SS-iZu4Z&Z!#uBSS z>woIf4>dfI*l}(~`s7x`5cpcgG+z~r-c<6^Uo{iHvsy)cqjO^Ci)$MwQQ&a00pUVw zBlO?wK23sXZiL70_q$@snq=wq8#uY3L9cOaqZbQKWqq+b$=BPOm>;F!c83>rGcx-k znfSV6$59|dcivYb<=K-?niQ&Rc5A9X0!u0cSo@raGp%E%y&?tVCrjT94-aV=u38_^ z!7>%B$u2XfU#l8jlDtF#`3jxGfu~73?RxoihkZBG==0mTGBT?H`ne(p{?t9v#7DF!4 z2nCqOjy2f9>`-s)f8rJ{b)PO3P*y}0hRN~WmDcDX)$6?P6u;j7_Vi}OhRLq>aF?nB zG_1p>1RhDBdV66Ye|H|WSMN4w%VK@6A4!&KsFN ztSJZY#Mg@pN_>7mnnZ*S`x8xAeRtDNMsM>KIn;oC8?1Y)9SD6_FIlSxaH*$&#&~OUq z&@#BlO(>TTB1SY*fy6i$DWXa@gRPYRm5F!dAYkh2V;j+ed=gJk@>H=rb+&%J;ex%; zU7LOR*A`Fp@{H^$QM1?-r`o{=k7k|hcGV^E2#Wxj@Ue`xb!y3po9~+{bj6ObPqgX- zyd)*;GBt)h1&}*qpAn)VA>~BG@5|kmyyontnfbMrdYMN98gO3>D}5UVVU8~GI;gO_ zsQfw94zcSidF~O@`uhG}w{-7(tIoEMe!kYVfIw)cqDf)6Zyf78X%XiAbrhM?d{JW2 zg>Bb2(~Q6ehV~|@Z+MNw!1z6t0p?W{)}U1Hd|j&J_i7`PekbjDmde@l0rKN{qIo&t zdhB@SYvJv}CMwAL!L|q6x%-RVWmXxDy!l?EB1xsK9ciUT4%UjyUHDcz4@9wkYNHEt zO57dNaG#-EgdYSmr2{0q1aY9EKj@A1-2r{5yVX3=(p|sCfoOKcSyJd*I1Kao(Y4X; z#UHm_lFc@LnxBf2Em}W!Z7S{Lq7Qc|vaa`_dux;KcXmDpk?%ZqT*}MF;yjuC%0X(7 zQYRtU#PK9vLGE1XhQRV<_xkIW3hnRQ-elkR^|{IPxeqm{kd|Y-834;$88I7*GEg7X zxFB=|&V&)6ozI5w>~C05<_k*}&G z1FFpNqRwFZ;BI0-XRpJ8XI?nyG=+bwK|X?gd+6X*!E&77_{T|?gAa7&<2eh^e#eed zZKgKUN2?T=h@hC^Ay;uBk7a#z5RI0C$-1%YvFbpr_-fTwZO#=cpfY1;4JlhE5L4CE z`xOyb&f4a<40Z(jFrSe4lkUH!(%h=e@MIv-8o$-OBNinJjBpDW>^kH%-3>DM=7VKvq| z*o}m|70Ak6r#w;(=?e3fASE!rKDI*L&;B$EhVXpH0I0XJR_HL)OP(|JvN!2PJ~F}{ zmyPxZ({OHg+#>1G(bfPxJTTLUznVRm5W85`iJQw_u*7Hk8WAHG#QfM_>ic~-O15$R zM+^68qdR5k%63%EtGv(QckR=-i2E_7Zd#ef5^eZ4n&SxYIbLhtwwdJn18+D2DS@Xp0QmSBub)9Qf{du;&@eb!K|uX@!q}o z=B%8SGJRMG^e414)C<#i0H3ZAaI9@|H$I;J2aGB%hU!yj+x0)hi2r6_vzp_0H_<+^ zaK=E#-{)5z3G!*oK@hfTkO0mgtsh3Q)X z4cNl?xQkq6#LD{C?`=mHIm>Cl=dJvkLL|r9iJiEq18nc6-pdzS)NNR( zI$<~61q(YK=N`W^5n+1Xv=+GSc-Yzra716nH@FIgeMAEyo+Ka|9i$IkwqinAB;&qX zX??4Ll-xIoz<=?9DSoNEshEmE!!o)!Wjb2+5H@4aTGIAUc@r&!CFaKDVhrDoM89OW ztWF!^{&|q*Gx92P`x_n?Z~wp(dAis1l8zx;SiKdOAzD%;{bU&?t}chTXCssTvKG#l znw;dqzs>Aw>ObeY$KtMPCnB&IjPI!%rm8@xSTZ-xyiBb8Z9gi==`LMwH3Kqp(4D%Q znqiDt=I^iytVoZ@5N|~-ZuItSDd9r)m(Zkon#5khn zr{y+V#QS%?uc}lT>`wp+q|hBa3<%J=ljJzJHg=qO=rrmXcF@uGd_gGJ(z1QL9Nt(- z?tv9YCdJ1?qiB42u0Zb_um8D*pr(b8_%)7f#xI^|vxTAc)!rY`bB#gaWy-jX+oXx_ zEn?U`ASIKNhV304P%~J`f#vrbS)zcz#eDL*qbv1Y=3shxhvmKs4?&pyi)VsCD|Jve zaC24G($J)pfHp?RGja>LwMQC46}dpoyUnf^%hhYDev>3o=uK5hb+{bF*MkS`BA`{_ z|4KiPSqf?p)bv={Z6i>zV9Nxmz{fKjdyNzkv;gX7p3}PE4@>9%&c|6Z7M&2PdV? z7td<7`oH}Hh=o+}9-~TPy3=yJ*#o~p5dq3GoAHTxNpT%L^dkn$SzQtq-k8xVk!!Ey zk(*IR+QVo+527rU7;phKZL`y z#-cv!+8KxC;e{p*S>?RMt5{%T6UT}8CI+eZentm(*E0DE$#O7pp;%_HP+_|*QE{9@ zP$GR4zi1k%bDlr8lWqs7Q95E-z+?@2_N?Fv_@yh_Uz3+1^>oe7%@p|yqkk@^OMvrq zGVyed`$$3PA!Yi(WW=RM<^S)vYq`F=`>Y~$*EhYuvRZ|#8$Fy8hPhv z^j)y}IeCAu9K41;J3OVI;Wi9PuarGI+8ixU3#A6RzBHRdsk)GF!R&2sIu6vb7G;V- zmz#sv9M$Nv7OssA zOkwby0+{2jAn=G1K0*-&@b)PYiep;rwmLTe zG`53^5b_(SsOl}B%keK{6rnK4`INb^sxxiC4p2o&mbdt$^fT&QM#ex5whBByXYbwk z=FIbpctH%^r)nQRo9Wnf@a*pXes$7oZMha8xKr(3WX7vJ@PHp6g zvec#voh$Hzu~|BYx-B!ISV%Dplvz$zN3Mf^P3WUY4PuG8IB%iXwTRtvVk+$cKuhNj zy&R__3#wc#a(_;$W=Caz*@KQ(oiyOx0)Z_m3 z9po|N)7A`0gr?6o_$+6RK1-dHCUvL>X>I0TWosdebeIRg9jR)SE1{dAgX^*3Nmjd* z2FT8AG-IGSHeq?yYC%eoU_7C)SR6Bi_^p2mfL5Rqm<*7IyT)I>c#+jO$4r!nS4<=% zL_jRQ$&Yu%`tF_Pk(~^7;&_d!^tPc)tS^qrU;De;dLiExbn==}>n;I$U2aCn}%1uNVvl>ym&(M-nWR_?q=eQdU z`fzCAzb8dvB>X?|OJx-?>jsT?R>#2zI}CcK@$OPZh>EM|Q&bDo9eo~~_2*H`6!#7P z5@6CGc;;&EtatYi*>@mu)sbY*BU!@=m|;httkkm1nL+ib;-#6(BIB)`OFx@vI%u@yM)jVF(83Vi<`~b_S4(6QvqfI|+Y{?c ztc)k8T$1`C^2%rjNaKEnb?LMWWpj(1cwDx&${q*fT(cP=OIYd9$Hw=w>mzY!;f`-4 zdH+_!O{H*k(SO$hOu#DEq7=QJbYzk2jg=ZrE1x0>3Rw*98f7SW^PI3VhxR?uBce$qR zMn&o)5-nAF?oIQ@KEo95Q#x97WSem73|It}C-1_vRndc6==s7X(Cu4#TDj_UbW;_g z@d8)aMkEAU1~OV*z&#FY@~xhN}zSWPlAi6M7HYXWMabYf5lAxAG4M{w3>v zV1w-E&f4_?sZK%gnt-v#eEh2KojN$3*tg@iN3}MOhL|Jx>(Eq|3opi75>G_NhL8*x ztbw%l60j}ttec@-G?uKcf(*s&SLCj^M9A_4N*OpuXt#4hv}Tn!ZX_TCs%ooEE={bzh5J!$mRJdE;JW?$91jHQ0VZ{OXu~S zVkw4&+MPP5#Ym$J%AwrG-opXX8L&+JNYF%C-75BDHR-0?MuRy30CVtod(2ETNQE@+ zn0A6F)J{PRnEf;v;`v!SZ!pf(s$|5yVs&IeICyHnT4o^C@rkvH)eG4rKUKxZ=y2?oiI+ZW2}PLW?ywKG$=c^fNxk-w*IG5A7BE zxQwVM_t^Rrf*LLFS%o_9THkfwoweXCs@ztwq>VO@xu72p$dSTZsjln>dVaPcbmm{Q z`Es(x9V0nqPq!#3mz9QQBty|ot;KKZ8XBnWo5W|yp3oV5&&S{wMl{djJ@%HAvs7kF z+Xcd*uI+ogDRH;$id@}G)-e^NQ0oj{%=nqhH2O%>;n#W83enzdD)?+#_?I9rT`R*> zC}lc)$%Z$1*QHSL&+7VbGG@meGyotfp|GjY1XH(-w)#Buy}JU2kF;p4mG=W<@QD|8 z0gFX-lO`eHeaX!lZ$klj#W6t6)NfzTz;qBrHI)mu-**K4@hSJA8-^7j?r z?pN#jKG-K}8C~ft!dC}(0-XsS{@E)}?w;H$v%lvS>Lc>Q)#t;=L~3P>Lk1~L3Gf^+ zeN35}{xxm+C0$1UN1G(IRr?^?vv!d(!MO3L7qY8G>CycU)b_sx+8lkBkU|EF+1($R z@qaJqmsXP$m+qT)t*@jc3taWSu++7mWu?KQvUo6}Gn&QuvkxTDizougX0KVDS^0t= z8T80heF1xR%zsatr7y(&w5(oh#(3D2Ft|`%ci74=&{*znqH3`~U2;Ljc4*>mzA3hX zav{7BaJelqAw{U}?(*$F%J4x1Si564mfob~=^ZJ)yv&n8iPDlwc*iz+kQS46m|Z!g zX2)a$O515X?TumV2lr#s8hLJ}i3F;k>bt@gR?`*#yi+$!g0dIO2MMer&eRv5uX>Hr zE-D#Y7E^9Jl|%KXt{cP(_n%3+JY<`Ue%Ud3)Gvs47=P*2!8~xjHGPz3gm+AzM4|*0 zRZd_@G+uTlb9uyM%Ic+5;q|>2eeLcM4T&8nE`-I4q{j8~D<9I2F|xjX%R-IVf#M#Q zmCvn-#4jIF@$$cc&=ZSuz-Wf!3ctGfpo)}0|0O=$#FidjETFz)>rpPH&4Gd7q4~-s z$4Mgp;{{C|C5rUxnD!HAWs`VSCx6?&x+;)2n-}Jo8@zxO~anTuDj9eU&Ogad(kS=&#j(hq34jcGUii6)T zVnF(jbe5yh{t0NxOH+Gw-0^VjD<60U>IYc;pn}^tUXOIo22{cc$n*s_HC@UJ6A|ua zgO>q|%LdiZszK374=F@#2e%P;x0LiqvI6E;{j3IY%#FW510%T$qBnS*9eq+*9Ptu- zKN-U2%Ln6e*4d(h&klCKv!}cD`#bxWW|hTrUwnXeWnZ%FDsn59`<}K^!h>}20?ZjH zbd6O^NDH@eTp4ZsiIn3ILxzC$cJM^D1Vw!(Wx6x1= z*4q2IB=PGsWx#f~$~$9nMsYOTNptg zy;Fy^+oRhw7gV5@Vk(Z1R_>$t!49}%TdrM~*7AJ!r&K>fJVZIP12ZGw+iEaS=P^@pr=#swP{jWi5zgk@Mb1yvwt*xpE z8trKrAC!oSN9n|wxo2pgJYO>(i+XgYTAlGdK-Z4aOV(f+V%#*EZ8ZYQOt5$pk&7xh z%HpPcima8=Q9l>#=M0llPo?gT#XY9FU!Y}@rq7)*%EF35)!Rb!=6GHF(T^Oe_Ag60{ zxB*a6VIk)k$BiqIrMg^ay%sUrPW^&x?Cj>Ay$D#u#J-j;Ur{8n zMJ<-ZNw@Vw+f?3EaxOk1lnT(?WBq{CO8w0X@Tl45Ox@tAyVB>*2{GICJNgMv_#C%- z7*!-{)tQNnNJ?dMhV%EWYkrVug}?EO8_WG7kK5^G5!H~$9?nlQ$DH@GfKA3+1Ej6X z<_R2UyVq{?W8eRm*@YMJ(Ha3_ukSII*wtm7G80{a*wC7J@l{GbRm*dsCU#?Lo_Ks$ zo$qRORuyos{4V}ri?w}Ql+RKc9wT5jR#J;76*dA~P=}_X-Q4u$Ree{he6^XO8(Tt^kwrr*QPInf3S%5ktcoHvis`!1bkn6Kb$85=C#TGKF(y1V)C0CLl~ z&Le@Jzt(P@GfK?Mr93$J@r}Y~4y!AhVjknXj&=cHJ8ELLVv=0i)Oy%03AaDIe5<^N z%e;TRRDCw7{cCQ}+^k%M@I_8w*8RNF%^Cao;GCMH(vH2l?MAD;wgZ=KfZBrhDY^{0 z;T|EJr3r_>RMoflzf*wF6h`L?j3T7A0)T`~N!)1#!x;YJb-*iu*Ti$?qp1Dj&&%}J0Uk)MS!c(^2jk1SG_CFS3-*S`Q(n(nc#+DtL^jdEc`Ae>r zbov*>Di;@6wSdu*bg6l?jOA9N*x+u%_WBDq`~0tWT5j`{m7j4 zjTkji5WW@MdR_9hv4$!S*NccJe7%{{WkN1@C*VZqM4Vl~Gx(S}h?kL!7#E2WpXX7~ z3!g&J{+Ui(OnRwhvx0+`+I$cQ*pGdzOf7bx7tvOIfNP}%Z___f{RPGhUl=rt1~k)q z#Oc;B?@JRY<@n1RU}DX$EqYf)Fx$rQD4fMb8*$zWjq+h+{ogJI`v}DQwDLP*w0gll z)og`M(z`fp54@(5-5IOg!A6^K_pwL^E0Wo(w0DnJaU9nWzhP5_`bYHIV>11DQUPKN zCfSxz^0d6Ze4cb{#D4jF{_~fOaJQS%QSEOG2cFml{PX3KgH9AA3MG0NNdhFfSLXw- zUaIASN4~qq&l#F6CIoD9I+#7X>2E#r_fTRJNz&)fxmW$?7?!*cvl+ESO%+M-*;7`( zFSC|O`#3_=Ja)iz?#&j*-{Zd%)j^Vafgc)j7naS+xv4!Rgz{*KpLi~glGxS`BDZ6D(y*r z&FnQoGtY#~m%24uIr#pi2Q*{xG917YQ2+chx2Y^bq>+i|P`UA7o#eUZsU*LUSMb4s za1R>3oQ(=tzLa$B(9gPUO3_l@`ku8!le&-dGV866YORHD8G35IAE^bT4J}lNSf3wrYqygx{2zOu*T*VJV4IE6_4Yx^lY>2-tShLH~Mz=iVVfM5B3o?@LNyKiiB>oy^1}W}@nTa-Y znZ*tNW)RUSVlK~VBp(1Ot_yFg^6$G%yaNy!GLm4j<5@M$mgV~A)!%%3gs+fjja>Tj z(vylWspehWPAA;(Uq<)i9zUVFM)KuSH2M0;G7ak;1|3z5PkOujKg4F}LYK-v1IV}E z)irPUf76fz$NvpA(f`<7A$cG4KcWix9}s))FxK+;*2)9Ovr_cu**F-=YbQ{ugf%UB z{7T^x-i4@Y`RBgKEwT7`DetwAAvY!|X_~D|4CC6$&F*s2(zv ze=?3i@;d&d;~a+)jeJ?lQLP~`2xy{H(1q|4ezE!( zQFbps(t;dqyGJSJ%(|XA-YhJaRZ&_BO_HTDt`5U#oG9EjlK(2Jh{`A~nKy)5vd3-K zI1g9zw0i6>m=EvFOHxhbrP0?hK40J)x7P=~OP94E zrCT5dEP{N~LOUf`p3Xk%JisSb`Vva+Oto-+3txbyI_Wl?EpCvjmKR>qwG4HKAv%=DQjdcVtAAy*zNlnFDP2LOqRv#?v; zY+VLo&i_C1@i=O@>5Xw_lBKt|Z|S`D&G!V&m&@92R)vonMCiZVOR@TIQp{HP?mJqY z!wc8@E9d= z`?Zq6mA2p|ib6w;MHz`-E9F%UjK&)L+>N3P%R?}fByM{ydk~vuztC8esG75b90oez zWxw~QtxDDH5s^3hf@|dD&yjXbehr8s8*O*Yqvqu7$}aanT4}z@{dmVwFi%#JNCa{U z3+o>mOBOL-<6Z0HP(0fEI@N0u8bU>TugJkDCW%dnnp0`ro{nFvB5OBjA~-gN+5}mP z=@1@*Z*S6wdCZ5ca%0vLJ)r%}sB?6>> zf(})_40=qTGeAu0Zoy3&H9o_V?KLA9&@BcA0OMx0mM!CQYGuZ;f`G86DZLfJxNE_x zH(=dgQiQygDcG21isNvPM#u67?S_52D-?JD(ntfp1TvZ@?tA|=br^=|%2}Pn$12R# z>BX2C=QEO0I1o^C>C%mP=hkF&@7XwHNspNNKsf>wYjLtNkx(=Fhc`H%4;4p8Z)m!+ zKI0;GL965V{r&gGvh(ffpjVEkAwXr|)z_7$UH>Rd^s4?B;sDmpq(A?etTY|Tu{Hbr z=Lu`dbMy6@8xZR_Ubix1$T5wxepEIi^>i6hEZel0Z-sZdZAXTrENaxC@W`SJ(d}#F<4x43Ar*|{9;I~vq@3EqG`^}8sQez z0%Jn1a$EpJy>pap3Vd;*!9y72WPE;Jv=H@ZRVVQeO;PxS94ueHiuqK3+rL(~cEhWF z?X`0Jf68$dRBU~MJvU+~1ys8m!&&;@HY|JnD^ev zX6Cd7TR;}8+f5ph(*&qUNrge^mTM$$hX^c}=pUJC7~X(W4`!iKrsAP*wBU@bp=;Ib zm=3b|bB6V=CqBOQt)2A#U;uz3P8)e|+hH{AP3Fx5m%`y3f`KJx-*8qx3!p(o2#ve| zshha!)xI-TLkI_H=_-f|BHRTKaoExfCbP0>U^?foKjLIBT~pW$gP4YmxWYvY4nO%~ zXqbz3cSk+kQ{}7V?0zjxpaa`q0B7=>#SQ~if@)COhj>2V#s3%njPHd(;@%PC098W8 zT?-FHlQVWTa$yFjsaCHobgd{DGj}-lKm=A*eOgPT!e$qcb{lsB^u)XJzZ*9no+mT| zII!TAr02Ey=HYTEesSl5!^r_TpjPmk3QYE#y{?U7oA40_ISQxDSa*XXe^@9PA`KqG^&(1{&b2?Df=O5acv(nF#&F^5v}e6}_vn~IXt4QcaHrFlK~`l%<1O0V(` zcwjba`df05ipN@dW{dH%NQfd#M{OfIy@W_xP(nAy`V1DkBr= ze7~M1fpF6jAuM{fkPKx!7&=oW{`&;%E%%ZhW)vjg z-O8I#Oy*f-omqq?i$8DhV!I=nX~Ig&S1~#$FR_ zEq=Rcnah$%r!AV&?@wDVVh-xS-KPYT8)!C?#Mr(k(Y8Xn{T2@UDRKbRk@sIqkPOWMCWwBl zv>678^wGDz`d2P1&e}PJ0DWvEODi7KELV+0>&Zu{XXH2+m5Y;!d>lsa1%4fddis{! z*I^5@=5>GaJ2zH5gum_?r!iW`lZ!*Wr%|3`(K}y5jK8jbn+(%4;c9$|Y=UQYlAc7D zC)d@|LOy^~9D1S_uu9>HU+Ya1N_d-Ii54JENywo{({2B6KpIvKyjQ=kA4fS}r}lUw;MfDVMdX!4 ziCC_suk-1jaek$U zSm**K%m4+5K?R`4?p&)9&-49z47v5A^PCRyN5zue6luZF5p?I5e3ft~&)jqJSrG90 za=~2Q|mZenUrBgeB{K1v?q& z+&`meo~@0%8R)sCB30yZnaE~@LKZR;RfBSfiArH77)>41^TozW>}0yvq1^&jPnus& z<^weVp`kh__%2`SY`G?yRrLOW=ipnRjBnjWHwzMTq{mo9gbeyQWyr{4wbT}4F~?*6 zY_G@+9$iW`^aBbX7Mm+i-7)_0P;w?88a|-Pb$*#HAkn%S>BBY!E!M1~1k0-g&FbI1F3A8)NM9sJ>*d}mnKaM}re5C>}ilZn-2D46L_#+upKDATW&;?*DN4HE~A~ z*)3%`89nPS(_^t(hTip09fb6cX@ayGM-DeF>+E`J^#b;lEH4FO!#mzt{5sR+4fKo# zngCf?DF`c*E`gU3>TZm*{|nEs$+pD~-h)RD=$fn!xLS&6rM`fIhjwTeCt0bEEDx?F zcbj4cnD|6@dTUUy*klEVG4;z}Z$5f`gxj+k9VY~EwanK+8W4A}E3K5i%by#ddp}cf z8)-9RwIIL^QUkzTF(VkeCUS84lYuH#(SGxG27;sX>rJUK^Zut?&vV~>#MCw^vute^ zrTU}DjllPr=<}v0=8FyVv$Q;yq)WgR%>Luw|O`==b z|EhZLqoh0C3}|1!!eIp}elh4Inr5+4>ets4VUmt%o)-?9BwBgVR-jZpwZXIRs{4!n z)PuH#jAt_jFRvJ3tMN8w&gN6IV&bC6XgX=)m_mYaa20`$Ku8~k-EvW&*gi~nrQO{5 z#&+4PwOC7!gezEH7uIRsAAQ1RvyU20YZ=t+7_`yJ&>G#yBcxxWdBnasB(hQ|oK$`M zLDVrYlF(H)>E2T)>a;R89+V~nX6b`7GYZ9?AHiVO#LN@q_ngg9x!&TpEJ111^r+l; z^8~lVrtfJ8w(8gRC5uC{keCz|MVw3J3uZ{|m&3!6WN|vklvr#sor|Lth6d-cdg$x6 z_m9xs+AjJjZA~d@y(@%M!v!|ga08_Ve|ec<=zE*Z-O$*Vjw(@Mk~N zsr$72H5j#BfiV?w*WH*|T{Fi{+s)S8YZRX4Y*9HxdyOnQ)Jd7$x&P=!@{d{}Gj}hq z(S$*~^2-l8uVWYle|OJfdjStx_=Z|Xn>~8LMib7x#h^#e;;@B9=|l)L9Y(R~R{#sS z+*0uHhMJf2jTF=;K@num08$Q47mrCC%QUTA%*$JvuN6$o zJiIZEfv@Ilz*T6ltcK2{m%juh zC<(x%BBXa!?I^TF{z^Q1ENL z!e(k%aGMdsLBx@jm6gy53`zO zj}7gq1rax{K%vSy?{xIP?dxt7a3$G15<8OFmT*A~+}GmLpgiAyz%yT?`Al;Y9irGK zNVt&kb;U1N3Ri0oM-xQO{y4;N;ofCsP7Iv1IKJ-eh=c=8z?KYa<6fRzY;1s2#er7t z2SjNTqN*i#oVI;jP50fqs_3ZYJ5W`F15}V?iTQ1K*8duTp7tFZbH{hT^h-3iB7RJ&@Z+PDSA0Yz>#k~5Ur~w= zKHrg3tiGfeGz|+lI}#0|;MhGej&ZTz`F1x@o48)XHkOrW6&?$^epxYym$a?6fkTUCS58{Z^xPqmucm=FVl#}2&Iet=EA$qcO}*R4B z>n>tnSH2Xzt9KZ?rTe9&<(xbI911lC6kJ0R>S=LhJU1QQ&K0Ha{W%tke`3wbJF!%H zCrRhKVBh$Pm%SdAS#Z6Q60z>x$ch|bUMO8A7!_TM4e5RBEcH4R!!w$wys-Yhk+=v| z+Z!S%M(UrkH?r@Rm0i6{w-4-6$!`5mD!G8XkIT)(-yP z?)Pz9FarG@gm*%mnYgLVi^dqt*y(y6kojz-Z6jJTK~|7?CY(X)C#(B&!+uf4hW8=l za$2sRXzTm#Oktir_4-{!dfnoWUB>5=V2$|4&u@)AzIxBvAoOnQJqD@VnNz>Y7Gv+} z22l@Fl(`52#=OASLLjWP9Um_SCHavYyA18N(}BYL?zag19Qx?L(#L0|b6o*+v}b*c zc-GRSGDNJrM=*nF-Nd3-Su8pWZKpv!J1_wcwg6u?L@)bdLic z=*H=NrHRtk3X1;lnR;W>!FGMn#PCZ(Z!e58{?t>l+N|qj0@-NQSG0a&?`h*%RmmM! zX5Y|P<{wwfeDU^`(uizL$V$1ap4q&L@|In5_aQ@S);GiNIWz-%Cd|~y(OeXrdOUVrpJ?%T!9cmE z1*N=*gr&qgwDtbk+h^glSGEq1NA{HY Date: Thu, 18 Apr 2024 16:00:23 +1000 Subject: [PATCH 03/29] Import from odc.geo consistently, thanks Ariana. --- integration_tests/utils.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/integration_tests/utils.py b/integration_tests/utils.py index fff4849cc..391984686 100644 --- a/integration_tests/utils.py +++ b/integration_tests/utils.py @@ -5,7 +5,7 @@ # SPDX-License-Identifier: Apache-2.0 import enum -from odc.geo.geom import BoundingBox, Geometry +from odc.geo.geom import BoundingBox, Geometry, point from shapely.ops import triangulate, unary_union from datacube_ows.cube_pool import cube @@ -67,8 +67,6 @@ def native_bbox(self): ) def bbox_crs(self, crs): - from odc.geo.geom import point - low = point(*self.lower_corner, crs=self.native_crs).to_crs(crs) up = point(*self.upper_corner, crs=self.native_crs).to_crs(crs) return ( From e6c571a20346984d099e750eb27fb3271800f712 Mon Sep 17 00:00:00 2001 From: Paul Haesler Date: Thu, 18 Apr 2024 17:03:03 +1000 Subject: [PATCH 04/29] Split DataStacker into a separate file, start on typehints. --- datacube_ows/data.py | 336 +-------------------------------- datacube_ows/loading.py | 368 +++++++++++++++++++++++++++++++++++++ datacube_ows/wcs1_utils.py | 4 +- datacube_ows/wcs2_utils.py | 2 +- datacube_ows/wms_utils.py | 10 +- tests/test_data.py | 4 +- 6 files changed, 381 insertions(+), 343 deletions(-) create mode 100644 datacube_ows/loading.py diff --git a/datacube_ows/data.py b/datacube_ows/data.py index bae1ba141..0bf8d0e33 100644 --- a/datacube_ows/data.py +++ b/datacube_ows/data.py @@ -6,11 +6,9 @@ import json import logging import re -from collections import OrderedDict from datetime import date, datetime, timedelta from itertools import chain -import datacube import numpy import numpy.ma import pytz @@ -22,10 +20,10 @@ from pandas import Timestamp from rasterio.features import rasterize from rasterio.io import MemoryFile -from rasterio.warp import Resampling from datacube_ows.cube_pool import cube -from datacube_ows.mv_index import MVSelectOpts, mv_search +from datacube_ows.loading import DataStacker +from datacube_ows.mv_index import MVSelectOpts from datacube_ows.ogc_exceptions import WMSException from datacube_ows.ogc_utils import (ConfigException, dataset_center_time, solar_date, tz_for_geometry, @@ -33,341 +31,13 @@ from datacube_ows.ows_configuration import get_config from datacube_ows.query_profiler import QueryProfiler from datacube_ows.resource_limits import ResourceLimited -from datacube_ows.startup_utils import CredentialManager from datacube_ows.utils import default_to_utc, log_call from datacube_ows.wms_utils import (GetFeatureInfoParameters, GetMapParameters, - img_coords_to_geopoint, solar_correct_data) + img_coords_to_geopoint) _LOG = logging.getLogger(__name__) -class ProductBandQuery: - def __init__(self, products, bands, main=False, manual_merge=False, ignore_time=False, fuse_func=None): - self.products = products - self.bands = bands - self.manual_merge = manual_merge - self.fuse_func = fuse_func - self.ignore_time = ignore_time - self.main = main - self.key = ( - tuple((p.id for p in self.products)), - tuple(bands) - ) - - def __str__(self): - return f"Query bands {self.bands} from products {self.products}" - - def __hash__(self): - return hash(self.key) - - @classmethod - def style_queries(cls, style, resource_limited=False): - queries = [ - cls.simple_layer_query(style.product, style.needed_bands, - manual_merge=style.product.data_manual_merge, - fuse_func=style.product.fuse_func, - resource_limited=resource_limited) - ] - for fp in style.flag_products: - if fp.products_match(style.product.product_names): - for band in fp.bands: - assert band in style.needed_bands, "Style band not in needed bands list" - else: - if resource_limited: - pq_products = fp.low_res_products - else: - pq_products = fp.products - queries.append(cls( - pq_products, - tuple(fp.bands), - manual_merge=fp.manual_merge, - ignore_time=fp.ignore_time, - fuse_func=fp.fuse_func - )) - return queries - - @classmethod - def full_layer_queries(cls, layer, main_bands=None): - if main_bands: - needed_bands = main_bands - else: - needed_bands = set(layer.band_idx.band_cfg.keys()) - queries = [ - cls.simple_layer_query(layer, needed_bands, - manual_merge=layer.data_manual_merge, - fuse_func=layer.fuse_func, - resource_limited=False) - ] - for fpb in layer.allflag_productbands: - if fpb.products_match(layer.product_names): - for band in fpb.bands: - assert band in needed_bands, "main product band not in needed bands list" - else: - pq_products = fpb.products - queries.append(cls( - pq_products, - tuple(fpb.bands), - manual_merge=fpb.manual_merge, - ignore_time=fpb.ignore_time, - fuse_func=fpb.fuse_func - )) - return queries - - @classmethod - def simple_layer_query(cls, layer, bands, manual_merge=False, fuse_func=None, resource_limited=False): - if resource_limited: - main_products = layer.low_res_products - else: - main_products = layer.products - return cls(main_products, bands, manual_merge=manual_merge, main=True, fuse_func=fuse_func) - - -class DataStacker: - @log_call - def __init__(self, product, geobox, times, resampling=None, style=None, bands=None, **kwargs): - super(DataStacker, self).__init__(**kwargs) - self._product = product - self.cfg = product.global_cfg - self._geobox = geobox - self._resampling = resampling if resampling is not None else Resampling.nearest - self.style = style - if style: - self._needed_bands = list(style.needed_bands) - elif bands: - self._needed_bands = [self._product.band_idx.locale_band(b) for b in bands] - else: - self._needed_bands = list(self._product.band_idx.measurements.keys()) - - for band in self._product.always_fetch_bands: - if band not in self._needed_bands: - self._needed_bands.append(band) - self.raw_times = times - if product.mosaic_date_func: - self._times = [product.mosaic_date_func(product.ranges["times"])] - else: - self._times = [ - self._product.search_times( - t, self._geobox) - for t in times - ] - self.group_by = self._product.dataset_groupby() - self.resource_limited = False - - def needed_bands(self): - return self._needed_bands - - @log_call - def n_datasets(self, index, all_time=False, point=None): - return self.datasets(index, - all_time=all_time, point=point, - mode=MVSelectOpts.COUNT) - - def datasets(self, index, - all_flag_bands=False, - all_time=False, point=None, - mode=MVSelectOpts.DATASETS): - if mode == MVSelectOpts.EXTENT or all_time: - # Not returning datasets - use main product only - queries = [ - ProductBandQuery.simple_layer_query( - self._product, - self.needed_bands(), - self.resource_limited) - - ] - elif self.style: - # we have a style - lets go with that. - queries = ProductBandQuery.style_queries(self.style) - elif all_flag_bands: - queries = ProductBandQuery.full_layer_queries(self._product, self.needed_bands()) - else: - # Just take needed bands. - queries = [ProductBandQuery.simple_layer_query(self._product, self.needed_bands())] - - if point: - geom = point - else: - geom = self._geobox.extent - if all_time: - times = None - else: - times = self._times - results = [] - for query in queries: - if query.ignore_time: - qry_times = None - else: - qry_times = times - result = mv_search(index, - sel=mode, - times=qry_times, - geom=geom, - products=query.products) - if mode == MVSelectOpts.DATASETS: - result = datacube.Datacube.group_datasets(result, self.group_by) - if all_time: - return result - results.append((query, result)) - elif mode == MVSelectOpts.IDS: - if all_time: - return result - results.append((query, result)) - else: - return result - return OrderedDict(results) - - def create_nodata_filled_flag_bands(self, data, pbq): - var = None - for var in data.data_vars.variables.keys(): - break - if var is None: - raise WMSException("Cannot add default flag data as there is no non-flag data available") - template = getattr(data, var) - data_new_bands = {} - for band in pbq.bands: - default_value = pbq.products[0].measurements[band].nodata - new_data = numpy.ndarray(template.shape, dtype="uint8") - new_data.fill(default_value) - qry_result = template.copy(data=new_data) - data_new_bands[band] = qry_result - data = data.assign(data_new_bands) - for band in pbq.bands: - data[band].attrs["flags_definition"] = pbq.products[0].measurements[band].flags_definition - return data - - @log_call - def data(self, datasets_by_query, skip_corrections=False): - # pylint: disable=too-many-locals, consider-using-enumerate - # datasets is an XArray DataArray of datasets grouped by time. - data = None - for pbq, datasets in datasets_by_query.items(): - if data is not None and len(data.time) == 0: - # No data, so no need for masking data. - continue - measurements = pbq.products[0].lookup_measurements(pbq.bands) - fuse_func = pbq.fuse_func - if pbq.manual_merge: - qry_result = self.manual_data_stack(datasets, measurements, pbq.bands, skip_corrections, fuse_func=fuse_func) - else: - qry_result = self.read_data(datasets, measurements, self._geobox, resampling=self._resampling, fuse_func=fuse_func) - if data is None: - data = qry_result - continue - if len(data.time) == 0: - # No data, so no need for masking data. - continue - if pbq.ignore_time: - # regularise time dimension: - if len(qry_result.time) > 1: - raise WMSException("Cannot ignore time on PQ (flag) bands from a time-aware product") - elif len(qry_result.time) == len(data.time): - qry_result["time"] = data.time - else: - if len(qry_result.time) == 0: - data = self.create_nodata_filled_flag_bands(data, pbq) - continue - else: - data_new_bands = {} - for band in pbq.bands: - band_data = qry_result[band] - timeless_band_data = band_data.sel(time=qry_result.time.values[0]) - band_time_slices = [] - for dt in data.time.values: - band_time_slices.append(timeless_band_data) - timed_band_data = xarray.concat(band_time_slices, data.time) - data_new_bands[band] = timed_band_data - data = data.assign(data_new_bands) - continue - elif len(qry_result.time) == 0: - # Time-aware mask product has no data, but main product does. - data = self.create_nodata_filled_flag_bands(data, pbq) - continue - qry_result.coords["time"] = data.coords["time"] - data = xarray.combine_by_coords([data, qry_result], join="exact") - - return data - - @log_call - def manual_data_stack(self, datasets, measurements, bands, skip_corrections, fuse_func): - # pylint: disable=too-many-locals, too-many-branches - # manual merge - if self.style: - flag_bands = set(filter(lambda b: b in self.style.flag_bands, bands)) - non_flag_bands = set(filter(lambda b: b not in self.style.flag_bands, bands)) - else: - non_flag_bands = bands - flag_bands = set() - time_slices = [] - for dt in datasets.time.values: - tds = datasets.sel(time=dt) - merged = None - for ds in tds.values.item(): - d = self.read_data_for_single_dataset(ds, measurements, self._geobox, fuse_func=fuse_func) - extent_mask = None - for band in non_flag_bands: - for f in self._product.extent_mask_func: - if extent_mask is None: - extent_mask = f(d, band) - else: - extent_mask &= f(d, band) - if extent_mask is not None: - d = d.where(extent_mask) - if self._product.solar_correction and not skip_corrections: - for band in non_flag_bands: - d[band] = solar_correct_data(d[band], ds) - if merged is None: - merged = d - else: - merged = merged.combine_first(d) - if merged is None: - continue - for band in flag_bands: - # REVISIT: not sure about type converting one band like this? - merged[band] = merged[band].astype('uint16', copy=True) - merged[band].attrs = d[band].attrs - time_slices.append(merged) - - if not time_slices: - return None - result = xarray.concat(time_slices, datasets.time) - return result - - # Read data for given datasets and measurements per the output_geobox - # TODO: Make skip_broken passed in via config - @log_call - def read_data(self, datasets, measurements, geobox, skip_broken = True, resampling=Resampling.nearest, fuse_func=None): - CredentialManager.check_cred() - try: - return datacube.Datacube.load_data( - datasets, - geobox, - measurements=measurements, - fuse_func=fuse_func, - skip_broken_datasets=skip_broken, - patch_url=self._product.patch_url) - except Exception as e: - _LOG.error("Error (%s) in load_data: %s", e.__class__.__name__, str(e)) - raise - # Read data for single datasets and measurements per the output_geobox - # TODO: Make skip_broken passed in via config - @log_call - def read_data_for_single_dataset(self, dataset, measurements, geobox, skip_broken = True, resampling=Resampling.nearest, fuse_func=None): - datasets = [dataset] - dc_datasets = datacube.Datacube.group_datasets(datasets, self._product.time_resolution.dataset_groupby()) - CredentialManager.check_cred() - try: - return datacube.Datacube.load_data( - dc_datasets, - geobox, - measurements=measurements, - fuse_func=fuse_func, - skip_broken_datasets=skip_broken, - patch_url=self._product.patch_url) - except Exception as e: - _LOG.error("Error (%s) in load_data: %s", e.__class__.__name__, str(e)) - raise - - def datasets_in_xarray(xa): if xa is None: return 0 diff --git a/datacube_ows/loading.py b/datacube_ows/loading.py new file mode 100644 index 000000000..966413d63 --- /dev/null +++ b/datacube_ows/loading.py @@ -0,0 +1,368 @@ +from collections import OrderedDict + +import datetime +from typing import Iterable + +import datacube +import numpy +import xarray +from rasterio.enums import Resampling + +from odc.geo.geom import Geometry +from odc.geo.geobox import GeoBox +from odc.geo.warp import Resampling +from datacube_ows.data import _LOG +from datacube_ows.mv_index import MVSelectOpts, mv_search +from datacube_ows.ogc_exceptions import WMSException +from datacube_ows.ows_configuration import OWSNamedLayer +from datacube_ows.startup_utils import CredentialManager +from datacube_ows.styles import StyleDef +from datacube_ows.utils import log_call +from datacube_ows.wms_utils import solar_correct_data + + +class ProductBandQuery: + def __init__(self, + products: list[datacube.model.Product], + bands: list[datacube.model.Measurement], + main: bool = False, manual_merge: bool = False, ignore_time: bool = False, + fuse_func: datacube.api.core.FuserFunction | None = None + ): + self.products = products + self.bands = bands + self.manual_merge = manual_merge + self.fuse_func = fuse_func + self.ignore_time = ignore_time + self.main = main + self.key = ( + tuple((p.id for p in self.products)), + tuple(bands) + ) + + def __str__(self): + return f"Query bands {self.bands} from products {self.products}" + + def __hash__(self): + return hash(self.key) + + @classmethod + def style_queries(cls, style: StyleDef, resource_limited: bool = False) -> list["ProductBandQuery"]: + queries = [ + cls.simple_layer_query(style.product, style.needed_bands, + manual_merge=style.product.data_manual_merge, + fuse_func=style.product.fuse_func, + resource_limited=resource_limited) + ] + for fp in style.flag_products: + if fp.products_match(style.product.product_names): + for band in fp.bands: + assert band in style.needed_bands, "Style band not in needed bands list" + else: + if resource_limited: + pq_products = fp.low_res_products + else: + pq_products = fp.products + queries.append(cls( + pq_products, + tuple(fp.bands), + manual_merge=fp.manual_merge, + ignore_time=fp.ignore_time, + fuse_func=fp.fuse_func + )) + return queries + + @classmethod + def full_layer_queries(cls, + layer: OWSNamedLayer, + main_bands: list[datacube.model.Measurement] | None = None) -> list["ProductBandQuery"]: + if main_bands: + needed_bands = main_bands + else: + needed_bands = set(layer.band_idx.band_cfg.keys()) + queries = [ + cls.simple_layer_query(layer, needed_bands, + manual_merge=layer.data_manual_merge, + fuse_func=layer.fuse_func, + resource_limited=False) + ] + for fpb in layer.allflag_productbands: + if fpb.products_match(layer.product_names): + for band in fpb.bands: + assert band in needed_bands, "main product band not in needed bands list" + else: + pq_products = fpb.products + queries.append(cls( + pq_products, + tuple(fpb.bands), + manual_merge=fpb.manual_merge, + ignore_time=fpb.ignore_time, + fuse_func=fpb.fuse_func + )) + return queries + + @classmethod + def simple_layer_query(cls, layer: OWSNamedLayer, + bands: list[datacube.model.Measurement], + manual_merge: bool = False, + fuse_func: datacube.api.FuserFunction | None = None, + resource_limited: bool = False) -> "ProductBandQuery": + if resource_limited: + main_products = layer.low_res_products + else: + main_products = layer.products + return cls(main_products, bands, manual_merge=manual_merge, main=True, fuse_func=fuse_func) + + +class DataStacker: + @log_call + def __init__(self, + product: OWSNamedLayer, + geobox: GeoBox, + times: list[datetime.datetime], + resampling: Resampling | None = None, + style: StyleDef | None = None, + bands: list[str] | None = None): + self._product = product + self.cfg = product.global_cfg + self._geobox = geobox + self._resampling = resampling if resampling is not None else Resampling.nearest + self.style = style + if style: + self._needed_bands = list(style.needed_bands) + elif bands: + self._needed_bands = [self._product.band_idx.locale_band(b) for b in bands] + else: + self._needed_bands = list(self._product.band_idx.measurements.keys()) + + for band in self._product.always_fetch_bands: + if band not in self._needed_bands: + self._needed_bands.append(band) + self.raw_times = times + if product.mosaic_date_func: + self._times = [product.mosaic_date_func(product.ranges["times"])] + else: + self._times = [ + self._product.search_times( + t, self._geobox) + for t in times + ] + self.group_by = self._product.dataset_groupby() + self.resource_limited = False + + def needed_bands(self) -> list[str]: + return self._needed_bands + + @log_call + def n_datasets(self, + index: datacube.index.Index, + all_time: bool = False, + point: Geometry | None = None) -> int: + return self.datasets(index, + all_time=all_time, point=point, + mode=MVSelectOpts.COUNT) + + def datasets(self, index: datacube.index.Index, + all_flag_bands: bool = False, + all_time: bool = False, + point: Geometry | None = None, + mode: MVSelectOpts = MVSelectOpts.DATASETS) -> int | Iterable[datacube.model.Dataset]: + if mode == MVSelectOpts.EXTENT or all_time: + # Not returning datasets - use main product only + queries = [ + ProductBandQuery.simple_layer_query( + self._product, + self.needed_bands(), + self.resource_limited) + + ] + elif self.style: + # we have a style - lets go with that. + queries = ProductBandQuery.style_queries(self.style) + elif all_flag_bands: + queries = ProductBandQuery.full_layer_queries(self._product, self.needed_bands()) + else: + # Just take needed bands. + queries = [ProductBandQuery.simple_layer_query(self._product, self.needed_bands())] + + if point: + geom = point + else: + geom = self._geobox.extent + if all_time: + times = None + else: + times = self._times + results = [] + for query in queries: + if query.ignore_time: + qry_times = None + else: + qry_times = times + result = mv_search(index, + sel=mode, + times=qry_times, + geom=geom, + products=query.products) + if mode == MVSelectOpts.DATASETS: + result = datacube.Datacube.group_datasets(result, self.group_by) + if all_time: + return result + results.append((query, result)) + elif mode == MVSelectOpts.IDS: + if all_time: + return result + results.append((query, result)) + else: + return result + return OrderedDict(results) + + def create_nodata_filled_flag_bands(self, data, pbq): + var = None + for var in data.data_vars.variables.keys(): + break + if var is None: + raise WMSException("Cannot add default flag data as there is no non-flag data available") + template = getattr(data, var) + data_new_bands = {} + for band in pbq.bands: + default_value = pbq.products[0].measurements[band].nodata + new_data = numpy.ndarray(template.shape, dtype="uint8") + new_data.fill(default_value) + qry_result = template.copy(data=new_data) + data_new_bands[band] = qry_result + data = data.assign(data_new_bands) + for band in pbq.bands: + data[band].attrs["flags_definition"] = pbq.products[0].measurements[band].flags_definition + return data + + @log_call + def data(self, datasets_by_query, skip_corrections=False): + # pylint: disable=too-many-locals, consider-using-enumerate + # datasets is an XArray DataArray of datasets grouped by time. + data = None + for pbq, datasets in datasets_by_query.items(): + if data is not None and len(data.time) == 0: + # No data, so no need for masking data. + continue + measurements = pbq.products[0].lookup_measurements(pbq.bands) + fuse_func = pbq.fuse_func + if pbq.manual_merge: + qry_result = self.manual_data_stack(datasets, measurements, pbq.bands, skip_corrections, fuse_func=fuse_func) + else: + qry_result = self.read_data(datasets, measurements, self._geobox, resampling=self._resampling, fuse_func=fuse_func) + if data is None: + data = qry_result + continue + if len(data.time) == 0: + # No data, so no need for masking data. + continue + if pbq.ignore_time: + # regularise time dimension: + if len(qry_result.time) > 1: + raise WMSException("Cannot ignore time on PQ (flag) bands from a time-aware product") + elif len(qry_result.time) == len(data.time): + qry_result["time"] = data.time + else: + if len(qry_result.time) == 0: + data = self.create_nodata_filled_flag_bands(data, pbq) + continue + else: + data_new_bands = {} + for band in pbq.bands: + band_data = qry_result[band] + timeless_band_data = band_data.sel(time=qry_result.time.values[0]) + band_time_slices = [] + for dt in data.time.values: + band_time_slices.append(timeless_band_data) + timed_band_data = xarray.concat(band_time_slices, data.time) + data_new_bands[band] = timed_band_data + data = data.assign(data_new_bands) + continue + elif len(qry_result.time) == 0: + # Time-aware mask product has no data, but main product does. + data = self.create_nodata_filled_flag_bands(data, pbq) + continue + qry_result.coords["time"] = data.coords["time"] + data = xarray.combine_by_coords([data, qry_result], join="exact") + + return data + + @log_call + def manual_data_stack(self, datasets, measurements, bands, skip_corrections, fuse_func): + # pylint: disable=too-many-locals, too-many-branches + # manual merge + if self.style: + flag_bands = set(filter(lambda b: b in self.style.flag_bands, bands)) + non_flag_bands = set(filter(lambda b: b not in self.style.flag_bands, bands)) + else: + non_flag_bands = bands + flag_bands = set() + time_slices = [] + for dt in datasets.time.values: + tds = datasets.sel(time=dt) + merged = None + for ds in tds.values.item(): + d = self.read_data_for_single_dataset(ds, measurements, self._geobox, fuse_func=fuse_func) + extent_mask = None + for band in non_flag_bands: + for f in self._product.extent_mask_func: + if extent_mask is None: + extent_mask = f(d, band) + else: + extent_mask &= f(d, band) + if extent_mask is not None: + d = d.where(extent_mask) + if self._product.solar_correction and not skip_corrections: + for band in non_flag_bands: + d[band] = solar_correct_data(d[band], ds) + if merged is None: + merged = d + else: + merged = merged.combine_first(d) + if merged is None: + continue + for band in flag_bands: + # REVISIT: not sure about type converting one band like this? + merged[band] = merged[band].astype('uint16', copy=True) + merged[band].attrs = d[band].attrs + time_slices.append(merged) + + if not time_slices: + return None + result = xarray.concat(time_slices, datasets.time) + return result + + # Read data for given datasets and measurements per the output_geobox + # TODO: Make skip_broken passed in via config + @log_call + def read_data(self, datasets, measurements, geobox, skip_broken = True, resampling=Resampling.nearest, fuse_func=None): + CredentialManager.check_cred() + try: + return datacube.Datacube.load_data( + datasets, + geobox, + measurements=measurements, + fuse_func=fuse_func, + skip_broken_datasets=skip_broken, + patch_url=self._product.patch_url) + except Exception as e: + _LOG.error("Error (%s) in load_data: %s", e.__class__.__name__, str(e)) + raise + # Read data for single datasets and measurements per the output_geobox + # TODO: Make skip_broken passed in via config + @log_call + def read_data_for_single_dataset(self, dataset, measurements, geobox, skip_broken = True, resampling=Resampling.nearest, fuse_func=None): + datasets = [dataset] + dc_datasets = datacube.Datacube.group_datasets(datasets, self._product.time_resolution.dataset_groupby()) + CredentialManager.check_cred() + try: + return datacube.Datacube.load_data( + dc_datasets, + geobox, + measurements=measurements, + fuse_func=fuse_func, + skip_broken_datasets=skip_broken, + patch_url=self._product.patch_url) + except Exception as e: + _LOG.error("Error (%s) in load_data: %s", e.__class__.__name__, str(e)) + raise diff --git a/datacube_ows/wcs1_utils.py b/datacube_ows/wcs1_utils.py index 26c9bee25..7783380e9 100644 --- a/datacube_ows/wcs1_utils.py +++ b/datacube_ows/wcs1_utils.py @@ -14,7 +14,7 @@ from rasterio import MemoryFile from datacube_ows.cube_pool import cube -from datacube_ows.data import DataStacker +from datacube_ows.loading import DataStacker from datacube_ows.mv_index import MVSelectOpts from datacube_ows.ogc_exceptions import WCS1Exception from datacube_ows.ogc_utils import ConfigException @@ -23,7 +23,7 @@ from datacube_ows.wcs_utils import get_bands_from_styles -class WCS1GetCoverageRequest(): +class WCS1GetCoverageRequest: version = Version(1, 0, 0) # pylint: disable=too-many-instance-attributes, too-many-branches, too-many-statements, too-many-locals def __init__(self, args): diff --git a/datacube_ows/wcs2_utils.py b/datacube_ows/wcs2_utils.py index 4e4d998c8..6f09f6e93 100644 --- a/datacube_ows/wcs2_utils.py +++ b/datacube_ows/wcs2_utils.py @@ -12,7 +12,7 @@ from rasterio import MemoryFile from datacube_ows.cube_pool import cube -from datacube_ows.data import DataStacker +from datacube_ows.loading import DataStacker from datacube_ows.mv_index import MVSelectOpts from datacube_ows.ogc_exceptions import WCS2Exception from datacube_ows.ows_configuration import get_config diff --git a/datacube_ows/wms_utils.py b/datacube_ows/wms_utils.py index b8027e89b..38b016b43 100644 --- a/datacube_ows/wms_utils.py +++ b/datacube_ows/wms_utils.py @@ -18,7 +18,7 @@ from datacube_ows.ogc_exceptions import WMSException from datacube_ows.ogc_utils import ConfigException, create_geobox -from datacube_ows.ows_configuration import get_config +from datacube_ows.ows_configuration import get_config, OWSNamedLayer from datacube_ows.resource_limits import RequestScale from datacube_ows.styles import StyleDef from datacube_ows.styles.expression import ExpressionException @@ -126,7 +126,7 @@ def img_coords_to_geopoint(geobox, i, j): geobox.crs) -def get_product_from_arg(args, argname="layers"): +def get_product_from_arg(args, argname="layers") -> OWSNamedLayer: layers = args.get(argname, "").split(",") if len(layers) != 1: raise WMSException("Multi-layer requests not supported") @@ -168,7 +168,7 @@ def get_times_for_product(product): return ranges['times'] -def get_times(args, product): +def get_times(args, product: OWSNamedLayer) -> list[datetime]: # Time parameter times_raw = args.get('time', '') times = times_raw.split(',') @@ -176,7 +176,7 @@ def get_times(args, product): return list([parse_time_item(item, product) for item in times]) -def parse_time_item(item, product): +def parse_time_item(item: str, product: OWSNamedLayer) -> datetime: times = item.split('/') # Time range handling follows the implementation described by GeoServer # https://docs.geoserver.org/stable/en/user/services/wms/time.html @@ -319,7 +319,7 @@ def __init__(self, args): def method_specific_init(self, args): pass - def get_product(self, args): + def get_product(self, args) -> OWSNamedLayer: return get_product_from_arg(args) diff --git a/tests/test_data.py b/tests/test_data.py index d3d06a174..328e4ea68 100644 --- a/tests/test_data.py +++ b/tests/test_data.py @@ -12,7 +12,8 @@ from xarray import Dataset import datacube_ows.data -from datacube_ows.data import ProductBandQuery, get_s3_browser_uris +from datacube_ows.data import get_s3_browser_uris +from datacube_ows.loading import ProductBandQuery, DataStacker from datacube_ows.ogc_exceptions import WMSException from tests.test_styles import product_layer # noqa: F401 @@ -299,7 +300,6 @@ def test_user_date_sorter(): def test_create_nodata(dummy_raw_calc_data): - from datacube_ows.data import DataStacker ds = DataStacker.__new__(DataStacker) data_in = dummy_raw_calc_data prod = MagicMock() From 2bb0efd0f4d2dd813e8ce919b4f590d1019512b5 Mon Sep 17 00:00:00 2001 From: Paul Haesler Date: Fri, 19 Apr 2024 11:25:07 +1000 Subject: [PATCH 05/29] Fixing typehints, cleaning up import structure. --- datacube_ows/cfg_parser_impl.py | 3 +- datacube_ows/config_utils.py | 119 ++++++++++++++++++- datacube_ows/cube_pool.py | 12 +- datacube_ows/data.py | 3 +- datacube_ows/loading.py | 18 +-- datacube_ows/mv_index.py | 70 +++++------ datacube_ows/ogc_utils.py | 185 ++++++------------------------ datacube_ows/ows_configuration.py | 5 +- datacube_ows/resource_limits.py | 4 +- datacube_ows/styles/base.py | 3 +- datacube_ows/styles/component.py | 3 +- datacube_ows/styles/expression.py | 2 +- datacube_ows/styles/hybrid.py | 3 +- datacube_ows/styles/ramp.py | 3 +- datacube_ows/tile_matrix_sets.py | 3 +- datacube_ows/utils.py | 31 ++--- datacube_ows/wcs1_utils.py | 2 +- datacube_ows/wms_utils.py | 3 +- mypy.ini | 4 + setup.py | 7 +- tests/test_cfg_bandidx.py | 3 +- tests/test_cfg_cache_ctrl.py | 2 +- tests/test_cfg_global.py | 2 +- tests/test_cfg_inclusion.py | 4 +- tests/test_cfg_layer.py | 2 +- tests/test_cfg_metadata_types.py | 2 +- tests/test_cfg_tile_matrix_set.py | 2 +- tests/test_cfg_wcs.py | 2 +- tests/test_multidate_handler.py | 2 +- tests/test_ows_configuration.py | 22 ++-- tests/test_style_api.py | 2 +- tests/test_styles.py | 3 +- 32 files changed, 273 insertions(+), 258 deletions(-) create mode 100644 mypy.ini diff --git a/datacube_ows/cfg_parser_impl.py b/datacube_ows/cfg_parser_impl.py index a2fdcc22f..353aca80c 100755 --- a/datacube_ows/cfg_parser_impl.py +++ b/datacube_ows/cfg_parser_impl.py @@ -15,8 +15,9 @@ from deepdiff import DeepDiff from datacube_ows import __version__ -from datacube_ows.ows_configuration import (ConfigException, OWSConfig, +from datacube_ows.ows_configuration import (OWSConfig, OWSFolder, read_config) +from datacube_ows.config_utils import ConfigException @click.group(invoke_without_command=True) diff --git a/datacube_ows/config_utils.py b/datacube_ows/config_utils.py index 8a331a554..db7e33fd2 100644 --- a/datacube_ows/config_utils.py +++ b/datacube_ows/config_utils.py @@ -7,8 +7,9 @@ import logging import os from importlib import import_module +from itertools import chain from typing import (Any, Callable, Iterable, List, Mapping, MutableMapping, - Optional, Sequence, Set, Union, cast) + Optional, Sequence, Set, TypeVar, Union, cast) from urllib.parse import urlparse import fsspec @@ -17,7 +18,6 @@ from xarray import DataArray from datacube_ows.config_toolkit import deepinherit -from datacube_ows.ogc_utils import ConfigException, FunctionWrapper _LOG = logging.getLogger(__name__) @@ -32,6 +32,9 @@ CFG_DICT = MutableMapping[str, RAW_CFG] +F = TypeVar('F', bound=Callable[..., Any]) + + # inclusions defaulting to an empty list is dangerous, but note that it is never modified. # If modification of inclusions is a required, a copy (ninclusions) is made and modified instead. @@ -137,6 +140,12 @@ def import_python_obj(path: str) -> RAW_CFG: return cast(RAW_CFG, obj) +class ConfigException(Exception): + """ + General exception for OWS Configuration issues. + """ + + class OWSConfigNotReady(ConfigException): """ Exception raised when someone tries to use an OWSConfigEntry that isn't fully initialised yet. @@ -920,3 +929,109 @@ def create_mask(self, data: DataArray) -> DataArray: if mask is not None and self.invert: mask = ~mask # pylint: disable=invalid-unary-operand-type return mask + + +# Function wrapper for configurable functional elements +class FunctionWrapper: + """ + Function wrapper for configurable functional elements + """ + + def __init__(self, + product_or_style_cfg: OWSExtensibleConfigEntry, + func_cfg: F | Mapping[str, Any], + stand_alone: bool = False) -> None: + """ + + :param product_or_style_cfg: An instance of either NamedLayer or Style, + the context in which the wrapper operates. + :param func_cfg: A function or a configuration dictionary representing a function. + :param stand_alone: Optional boolean. + If False (the default) then only configuration dictionaries will be accepted. + """ + self.style_or_layer_cfg = product_or_style_cfg + if callable(func_cfg): + if not stand_alone: + raise ConfigException( + "Directly including callable objects in configuration is no longer supported. Please reference callables by fully qualified name.") + self._func = func_cfg + self._args = [] + self._kwargs = {} + self.band_mapper = None + self.pass_layer_cfg = False + elif isinstance(func_cfg, str): + self._func = get_function(func_cfg) + self._args = [] + self._kwargs = {} + self.band_mapper = None + self.pass_layer_cfg = False + else: + if stand_alone and callable(func_cfg["function"]): + self._func = func_cfg["function"] + elif callable(func_cfg["function"]): + raise ConfigException( + "Directly including callable objects in configuration is no longer supported. Please reference callables by fully qualified name.") + else: + self._func = get_function(func_cfg["function"]) + self._args = func_cfg.get("args", []) + self._kwargs = func_cfg.get("kwargs", {}).copy() + self.pass_layer_cfg = func_cfg.get("pass_layer_cfg", False) + if "pass_product_cfg" in func_cfg: + _LOG.warning("WARNING: pass_product_cfg in function wrapper definitions has been renamed " + "'mapped_bands'. Please update your config accordingly") + if func_cfg.get("mapped_bands", func_cfg.get("pass_product_cfg", False)): + if hasattr(product_or_style_cfg, "band_idx"): + # NamedLayer + from datacube_ows.ows_configuration import OWSNamedLayer + named_layer = cast(OWSNamedLayer, product_or_style_cfg) + b_idx = named_layer.band_idx + self.band_mapper = b_idx.band + else: + # Style + from datacube_ows.styles import StyleDef + style = cast(StyleDef, product_or_style_cfg) + b_idx = style.product.band_idx + delocaliser = style.local_band + self.band_mapper = lambda b: b_idx.band(delocaliser(b)) + else: + self.band_mapper = None + + def __call__(self, *args, **kwargs) -> Any: + if args and self._args: + calling_args: Iterable[Any] = chain(args, self._args) + elif args: + calling_args = args + else: + calling_args = self._args + if kwargs and self._kwargs: + calling_kwargs = self._kwargs.copy() + calling_kwargs.update(kwargs) + elif kwargs: + calling_kwargs = kwargs.copy() + else: + calling_kwargs = self._kwargs.copy() + + if self.band_mapper: + calling_kwargs["band_mapper"] = self.band_mapper + + if self.pass_layer_cfg: + calling_kwargs['layer_cfg'] = self.style_or_layer_cfg + + return self._func(*calling_args, **calling_kwargs) + + +def get_function(func: F | str) -> F: + """Converts a config entry to a function, if necessary + + :param func: Either a Callable object or a fully qualified function name str, or None + :return: a Callable object, or None + """ + if func is not None and not callable(func): + mod_name, func_name = func.rsplit('.', 1) + try: + mod = import_module(mod_name) + func = getattr(mod, func_name) + except (ImportError, ModuleNotFoundError, ValueError, AttributeError): + raise ConfigException(f"Could not import python object: {func}") + assert callable(func) + return cast(F, func) diff --git a/datacube_ows/cube_pool.py b/datacube_ows/cube_pool.py index c46f6f122..2739a68aa 100644 --- a/datacube_ows/cube_pool.py +++ b/datacube_ows/cube_pool.py @@ -6,7 +6,7 @@ import logging from contextlib import contextmanager from threading import Lock -from typing import Generator, MutableMapping, Optional +from typing import Generator from datacube import Datacube @@ -28,11 +28,11 @@ class CubePool: A Cube pool is a thread-safe resource pool for managing Datacube objects (which map to database connections). """ # _instances, global mapping of CubePools by app name - _instances: MutableMapping[str, "CubePool"] = {} + _instances: dict[str, "CubePool"] = {} _cubes_lock_: bool = False - _instance: Optional[Datacube] = None + _instance: Datacube | None = None def __new__(cls, app: str) -> "CubePool": """ @@ -54,7 +54,7 @@ def __init__(self, app: str) -> None: self._cubes_lock: Lock = Lock() self._cubes_lock_ = True - def get_cube(self) -> Optional[Datacube]: + def get_cube(self) -> Datacube | None: """ Return a Datacube object. Either generating a new Datacube, or recycling an unassigned one already in the pool. @@ -77,7 +77,7 @@ def _new_cube(self) -> Datacube: # Lowlevel CubePool API -def get_cube(app: str = "ows") -> Optional[Datacube]: +def get_cube(app: str = "ows") -> Datacube | None: """ Obtain a Datacube object from the appropriate pool @@ -90,7 +90,7 @@ def get_cube(app: str = "ows") -> Optional[Datacube]: # High Level Cube Pool API @contextmanager -def cube(app: str = "ows") -> Generator[Optional["datacube.api.core.Datacube"], None, None]: +def cube(app: str = "ows") -> Generator[Datacube | None, None, None]: """ Context manager for using a Datacube object from a pool. diff --git a/datacube_ows/data.py b/datacube_ows/data.py index 0bf8d0e33..54f69920f 100644 --- a/datacube_ows/data.py +++ b/datacube_ows/data.py @@ -25,9 +25,10 @@ from datacube_ows.loading import DataStacker from datacube_ows.mv_index import MVSelectOpts from datacube_ows.ogc_exceptions import WMSException -from datacube_ows.ogc_utils import (ConfigException, dataset_center_time, +from datacube_ows.ogc_utils import (dataset_center_time, solar_date, tz_for_geometry, xarray_image_as_png) +from datacube_ows.config_utils import ConfigException from datacube_ows.ows_configuration import get_config from datacube_ows.query_profiler import QueryProfiler from datacube_ows.resource_limits import ResourceLimited diff --git a/datacube_ows/loading.py b/datacube_ows/loading.py index 966413d63..1d68200b8 100644 --- a/datacube_ows/loading.py +++ b/datacube_ows/loading.py @@ -1,6 +1,7 @@ from collections import OrderedDict import datetime +import logging from typing import Iterable import datacube @@ -11,7 +12,6 @@ from odc.geo.geom import Geometry from odc.geo.geobox import GeoBox from odc.geo.warp import Resampling -from datacube_ows.data import _LOG from datacube_ows.mv_index import MVSelectOpts, mv_search from datacube_ows.ogc_exceptions import WMSException from datacube_ows.ows_configuration import OWSNamedLayer @@ -20,6 +20,8 @@ from datacube_ows.utils import log_call from datacube_ows.wms_utils import solar_correct_data +_LOG: logging.Logger = logging.getLogger(__name__) + class ProductBandQuery: def __init__(self, @@ -104,7 +106,7 @@ def full_layer_queries(cls, def simple_layer_query(cls, layer: OWSNamedLayer, bands: list[datacube.model.Measurement], manual_merge: bool = False, - fuse_func: datacube.api.FuserFunction | None = None, + fuse_func: datacube.api.core.FuserFunction | None = None, resource_limited: bool = False) -> "ProductBandQuery": if resource_limited: main_products = layer.low_res_products @@ -125,7 +127,7 @@ def __init__(self, self._product = product self.cfg = product.global_cfg self._geobox = geobox - self._resampling = resampling if resampling is not None else Resampling.nearest + self._resampling = resampling if resampling is not None else "nearest" self.style = style if style: self._needed_bands = list(style.needed_bands) @@ -335,7 +337,7 @@ def manual_data_stack(self, datasets, measurements, bands, skip_corrections, fus # Read data for given datasets and measurements per the output_geobox # TODO: Make skip_broken passed in via config @log_call - def read_data(self, datasets, measurements, geobox, skip_broken = True, resampling=Resampling.nearest, fuse_func=None): + def read_data(self, datasets, measurements, geobox, skip_broken = True, resampling="nearest", fuse_func=None): CredentialManager.check_cred() try: return datacube.Datacube.load_data( @@ -344,14 +346,15 @@ def read_data(self, datasets, measurements, geobox, skip_broken = True, resampli measurements=measurements, fuse_func=fuse_func, skip_broken_datasets=skip_broken, - patch_url=self._product.patch_url) + patch_url=self._product.patch_url, + resampling=resampling) except Exception as e: _LOG.error("Error (%s) in load_data: %s", e.__class__.__name__, str(e)) raise # Read data for single datasets and measurements per the output_geobox # TODO: Make skip_broken passed in via config @log_call - def read_data_for_single_dataset(self, dataset, measurements, geobox, skip_broken = True, resampling=Resampling.nearest, fuse_func=None): + def read_data_for_single_dataset(self, dataset, measurements, geobox, skip_broken = True, resampling="nearest", fuse_func=None): datasets = [dataset] dc_datasets = datacube.Datacube.group_datasets(datasets, self._product.time_resolution.dataset_groupby()) CredentialManager.check_cred() @@ -362,7 +365,8 @@ def read_data_for_single_dataset(self, dataset, measurements, geobox, skip_broke measurements=measurements, fuse_func=fuse_func, skip_broken_datasets=skip_broken, - patch_url=self._product.patch_url) + patch_url=self._product.patch_url, + resampling=resampling) except Exception as e: _LOG.error("Error (%s) in load_data: %s", e.__class__.__name__, str(e)) raise diff --git a/datacube_ows/mv_index.py b/datacube_ows/mv_index.py index 482f8eaa9..5c9cfda45 100644 --- a/datacube_ows/mv_index.py +++ b/datacube_ows/mv_index.py @@ -17,12 +17,18 @@ from sqlalchemy.dialects.postgresql import TSTZRANGE, UUID from sqlalchemy.sql.functions import count, func +from datacube.index import Index +from datacube.model import Product, Dataset +from sqlalchemy.engine.base import Engine +from sqlalchemy.sql.elements import ClauseElement + + from datacube_ows.utils import default_to_utc -def get_sqlalc_engine(index: "datacube.index.Index") -> "sqlalchemy.engine.base.Engine": +def get_sqlalc_engine(index: Index) -> Engine: # pylint: disable=protected-access - return index._db._engine + return index._db._engine # type: ignore[attr-defined] def get_st_view(meta: MetaData) -> Table: @@ -32,6 +38,8 @@ def get_st_view(meta: MetaData) -> Table: Column('spatial_extent', Geometry(from_text='ST_GeomFromGeoJSON', name='geometry')), Column('temporal_extent', TSTZRANGE()) ) + + _meta = MetaData() st_view = get_st_view(_meta) @@ -53,33 +61,28 @@ class MVSelectOpts(Enum): DATASETS = 4 INVALID = 9999 - def sel(self, stv: Table) -> Iterable["sqlalchemy.sql.elements.ClauseElement"]: + def sel(self, stv: Table) -> list[ClauseElement]: if self == self.ALL: return [stv] if self == self.IDS or self == self.DATASETS: return [stv.c.id] if self == self.COUNT: - return [cast("sqlalchemy.sql.elements.ClauseElement", count(stv.c.id))] + return [cast(ClauseElement, count(stv.c.id))] if self == self.EXTENT: return [text("ST_AsGeoJSON(ST_Union(spatial_extent))")] - assert False + raise Exception("Invalid selection option") -TimeSearchTerm = Union[ - Tuple[datetime.datetime, datetime.datetime], - datetime.datetime, -] -def mv_search(index: "datacube.index.Index", +DateOrDateTime = datetime.datetime | datetime.date +TimeSearchTerm = tuple[datetime.datetime, datetime.datetime] | tuple[datetime.date, datetime.date] | DateOrDateTime + +MVSearchResult = Iterable[Iterable[Any]] | Iterable[str] | Iterable[Dataset] | int | None | ODCGeom + +def mv_search(index: Index, sel: MVSelectOpts = MVSelectOpts.IDS, - times: Optional[Iterable[TimeSearchTerm]] = None, - geom: Optional[ODCGeom] = None, - products: Optional[Iterable["datacube.model.DatasetType"]] = None) -> Union[ - Iterable[Iterable[Any]], - Iterable[str], - Iterable["datacube.model.Dataset"], - int, - None, - ODCGeom]: + times: Iterable[TimeSearchTerm] | None = None, + geom: ODCGeom | None = None, + products: Iterable[Product] | None = None) -> MVSearchResult: """ Perform a dataset query via the space_time_view @@ -98,16 +101,16 @@ def mv_search(index: "datacube.index.Index", raise Exception("Must filter by product/layer") prod_ids = [p.id for p in products] - s = select(*sel.sel(stv)).where(stv.c.dataset_type_ref.in_(prod_ids)) + s = select(*sel.sel(stv)).where(stv.c.dataset_type_ref.in_(prod_ids)) # type: ignore[call-overload] if times is not None: or_clauses = [] for t in times: if isinstance(t, datetime.datetime): - t = datetime.datetime(t.year, t.month, t.day, t.hour, t.minute, t.second) - t = default_to_utc(t) - if not t.tzinfo: - t = t.replace(tzinfo=pytz.utc) - tmax = t + datetime.timedelta(seconds=1) + st: datetime.datetime = datetime.datetime(t.year, t.month, t.day, t.hour, t.minute, t.second) + st = default_to_utc(t) + if not st.tzinfo: + st = st.replace(tzinfo=pytz.utc) + tmax = st + datetime.timedelta(seconds=1) or_clauses.append( and_( func.lower(stv.c.temporal_extent) >= t, @@ -115,11 +118,11 @@ def mv_search(index: "datacube.index.Index", ) ) elif isinstance(t, datetime.date): - t = datetime.datetime(t.year, t.month, t.day, tzinfo=pytz.utc) - tmax = t + datetime.timedelta(days=1) + st = datetime.datetime(t.year, t.month, t.day, tzinfo=pytz.utc) + tmax = st + datetime.timedelta(days=1) or_clauses.append( and_( - func.lower(stv.c.temporal_extent) >= t, + func.lower(stv.c.temporal_extent) >= st, func.lower(stv.c.temporal_extent) < tmax, ) ) @@ -139,13 +142,13 @@ def mv_search(index: "datacube.index.Index", with engine.connect() as conn: if sel == MVSelectOpts.ALL: return conn.execute(s) - if sel == MVSelectOpts.IDS: + elif sel == MVSelectOpts.IDS: return [r[0] for r in conn.execute(s)] - if sel in (MVSelectOpts.COUNT, MVSelectOpts.EXTENT): + elif sel in (MVSelectOpts.COUNT, MVSelectOpts.EXTENT): for r in conn.execute(s): if sel == MVSelectOpts.COUNT: return r[0] - if sel == MVSelectOpts.EXTENT: + else: # MVSelectOpts.EXTENT geojson = r[0] if geojson is None: return None @@ -159,6 +162,9 @@ def mv_search(index: "datacube.index.Index", else: intersect = uniongeom return intersect - if sel == MVSelectOpts.DATASETS: + elif sel == MVSelectOpts.DATASETS: ids = [r[0] for r in conn.execute(s)] return index.datasets.bulk_get(ids) + else: + raise Exception("Invalid Selection Option") + raise Exception("Unreachable code reached") diff --git a/datacube_ows/ogc_utils.py b/datacube_ows/ogc_utils.py index d15867fc1..1eb3aca00 100644 --- a/datacube_ows/ogc_utils.py +++ b/datacube_ows/ogc_utils.py @@ -5,32 +5,33 @@ # SPDX-License-Identifier: Apache-2.0 import datetime import logging -from importlib import import_module from io import BytesIO -from itertools import chain -from typing import (Any, Callable, Mapping, MutableMapping, Optional, Sequence, - Tuple, TypeVar, Union, cast) +from typing import (Any, Mapping, Optional, Sequence, cast) from urllib.parse import urlparse import numpy +import xarray from affine import Affine from dateutil.parser import parse -from flask import request +from flask import request, Request from odc.geo.geobox import GeoBox from odc.geo.geom import CRS, Geometry from PIL import Image from pytz import timezone, utc from timezonefinder import TimezoneFinder +from datacube.model import Dataset +from datacube_ows.config_utils import OWSExtensibleConfigEntry + _LOG: logging.Logger = logging.getLogger(__name__) tf = TimezoneFinder(in_memory=True) -def dataset_center_time(dataset: "datacube.model.Dataset") -> datetime.datetime: +def dataset_center_time(dataset: Dataset) -> datetime.datetime: """ Determine a center_time for the dataset - Use metadata time if possible as this is what WMS uses to calculate it's temporal extents + Use metadata time if possible as this is what WMS uses to calculate its temporal extents datacube-core center time accessed through the dataset API is calculated and may not agree with the metadata document. @@ -43,7 +44,7 @@ def dataset_center_time(dataset: "datacube.model.Dataset") -> datetime.datetime: center_time = parse(metadata_time) except KeyError: try: - metadata_time: str = dataset.metadata_doc['properties']['dtr:start_datetime'] + metadata_time = dataset.metadata_doc['properties']['dtr:start_datetime'] center_time = parse(metadata_time) except KeyError: pass @@ -65,7 +66,7 @@ def solar_date(dt: datetime.datetime, tz: datetime.tzinfo) -> datetime.date: return dt.astimezone(tz).date() -def local_date(ds: "datacube.model.Dataset", tz: Optional[datetime.tzinfo] = None) -> datetime.date: +def local_date(ds: Dataset, tz: datetime.tzinfo | None = None) -> datetime.date: """ Calculate the local (solar) date for a dataset. @@ -79,7 +80,7 @@ def local_date(ds: "datacube.model.Dataset", tz: Optional[datetime.tzinfo] = Non return solar_date(dt_utc, tz) -def tz_for_dataset(ds: "datacube.model.Dataset") -> datetime.tzinfo: +def tz_for_dataset(ds: Dataset) -> datetime.tzinfo: """ Determine the timezone for a dataset (using it's extent) @@ -89,7 +90,7 @@ def tz_for_dataset(ds: "datacube.model.Dataset") -> datetime.tzinfo: return tz_for_geometry(ds.extent) -def tz_for_coord(lon: Union[float, int], lat: Union[float, int]) -> datetime.tzinfo: +def tz_for_coord(lon: float | int, lat: float | int) -> datetime.tzinfo: """ Determine the Timezone for given lat/long coordinates @@ -109,7 +110,7 @@ def tz_for_coord(lon: Union[float, int], lat: Union[float, int]) -> datetime.tzi return timezone(tzn) -def local_solar_date_range(geobox: GeoBox, date: datetime.date) -> Tuple[datetime.datetime, datetime.datetime]: +def local_solar_date_range(geobox: GeoBox, date: datetime.date) -> tuple[datetime.datetime, datetime.datetime]: """ Converts a date to a local solar date datetime range. @@ -123,7 +124,7 @@ def local_solar_date_range(geobox: GeoBox, date: datetime.date) -> Tuple[datetim return (start.astimezone(utc), end.astimezone(utc)) -def month_date_range(date: datetime.date) -> Tuple[datetime.datetime, datetime.datetime]: +def month_date_range(date: datetime.date) -> tuple[datetime.datetime, datetime.datetime]: """ Take a month from a date and convert to a one month long UTC datetime range encompassing the month. @@ -142,7 +143,7 @@ def month_date_range(date: datetime.date) -> Tuple[datetime.datetime, datetime.d return start, end -def year_date_range(date: datetime.date) -> Tuple[datetime.datetime, datetime.datetime]: +def year_date_range(date: datetime.date) -> tuple[datetime.datetime, datetime.datetime]: """ Convert a date to a UTC datetime range encompassing the calendar year including the date. @@ -156,7 +157,7 @@ def year_date_range(date: datetime.date) -> Tuple[datetime.datetime, datetime.da return start, end -def day_summary_date_range(date: datetime.date) -> Tuple[datetime.datetime, datetime.datetime]: +def day_summary_date_range(date: datetime.date) -> tuple[datetime.datetime, datetime.datetime]: """ Convert a date to a UTC datetime range encompassing the calendar date. @@ -208,26 +209,6 @@ def resp_headers(d: Mapping[str, str]) -> Mapping[str, str]: return get_config().response_headers(d) -F = TypeVar('F', bound=Callable[..., Any]) - - -def get_function(func: Union[F, str]) -> F: - """Converts a config entry to a function, if necessary - - :param func: Either a Callable object or a fully qualified function name str, or None - :return: a Callable object, or None - """ - if func is not None and not callable(func): - mod_name, func_name = func.rsplit('.', 1) - try: - mod = import_module(mod_name) - func = getattr(mod, func_name) - except (ImportError, ModuleNotFoundError, ValueError, AttributeError): - raise ConfigException(f"Could not import python object: {func}") - assert callable(func) - return cast(F, func) - - def parse_for_base_url(url: str) -> str: """ Extract the base URL from a URL @@ -240,7 +221,7 @@ def parse_for_base_url(url: str) -> str: return parsed -def get_service_base_url(allowed_urls: Union[Sequence[str], str], request_url: str) -> str: +def get_service_base_url(allowed_urls: list[str] | str, request_url: str) -> str: """ Choose the base URL to advertise in XML. @@ -263,9 +244,8 @@ def get_service_base_url(allowed_urls: Union[Sequence[str], str], request_url: s # Collects additional headers from flask request objects -def capture_headers(req: "flask.Request", - args_dict: MutableMapping[str, Optional[str]]) \ - -> MutableMapping[str, Optional[str]]: +def capture_headers(req: Request, + args_dict: dict[str, str | None]) -> dict[str, Optional[str]]: """ Capture significant flask metadata into the args dictionary @@ -282,104 +262,7 @@ def capture_headers(req: "flask.Request", return args_dict -class ConfigException(Exception): - """ - General exception for OWS Configuration issues. - """ - - -# Function wrapper for configurable functional elements - - -class FunctionWrapper: - """ - Function wrapper for configurable functional elements - """ - - def __init__(self, - product_or_style_cfg: Union[ - "datacube_ows.ows_configuration.OWSNamedLayer", "datacube_ows.styles.StyleDef"], - func_cfg: Union[F, Mapping[str, Any]], - stand_alone: bool = False) -> None: - """ - - :param product_or_style_cfg: An instance of either NamedLayer or Style, - the context in which the wrapper operates. - :param func_cfg: A function or a configuration dictionary representing a function. - :param stand_alone: Optional boolean. - If False (the default) then only configuration dictionaries will be accepted. - """ - self.style_or_layer_cfg = product_or_style_cfg - if callable(func_cfg): - if not stand_alone: - raise ConfigException( - "Directly including callable objects in configuration is no longer supported. Please reference callables by fully qualified name.") - self._func = func_cfg - self._args = [] - self._kwargs = {} - self.band_mapper = None - self.pass_layer_cfg = False - elif isinstance(func_cfg, str): - self._func = get_function(func_cfg) - self._args = [] - self._kwargs = {} - self.band_mapper = None - self.pass_layer_cfg = False - else: - if stand_alone and callable(func_cfg["function"]): - self._func = func_cfg["function"] - elif callable(func_cfg["function"]): - raise ConfigException( - "Directly including callable objects in configuration is no longer supported. Please reference callables by fully qualified name.") - else: - self._func = get_function(func_cfg["function"]) - self._args = func_cfg.get("args", []) - self._kwargs = func_cfg.get("kwargs", {}).copy() - self.pass_layer_cfg = func_cfg.get("pass_layer_cfg", False) - if "pass_product_cfg" in func_cfg: - _LOG.warning("WARNING: pass_product_cfg in function wrapper definitions has been renamed " - "'mapped_bands'. Please update your config accordingly") - if func_cfg.get("mapped_bands", func_cfg.get("pass_product_cfg", False)): - if hasattr(product_or_style_cfg, "band_idx"): - # NamedLayer - named_layer = cast("datacube_ows.ows_configuration.OWSNamedLayer", - product_or_style_cfg) - b_idx = named_layer.band_idx - self.band_mapper = b_idx.band - else: - # Style - style = cast("datacube_ows.styles.StyleDef", product_or_style_cfg) - b_idx = style.product.band_idx - delocaliser = style.local_band - self.band_mapper = lambda b: b_idx.band(delocaliser(b)) - else: - self.band_mapper = None - - def __call__(self, *args, **kwargs) -> Any: - if args and self._args: - calling_args = chain(args, self._args) - elif args: - calling_args = args - else: - calling_args = self._args - if kwargs and self._kwargs: - calling_kwargs = self._kwargs.copy() - calling_kwargs.update(kwargs) - elif kwargs: - calling_kwargs = kwargs.copy() - else: - calling_kwargs = self._kwargs.copy() - - if self.band_mapper: - calling_kwargs["band_mapper"] = self.band_mapper - - if self.pass_layer_cfg: - calling_kwargs['layer_cfg'] = self.style_or_layer_cfg - - return self._func(*calling_args, **calling_kwargs) - - -def cache_control_headers(max_age: int) -> str: +def cache_control_headers(max_age: int) -> dict[str, str]: if max_age <= 0: return {"cache-control": "no-cache"} else: @@ -388,7 +271,7 @@ def cache_control_headers(max_age: int) -> str: # Extent Mask Functions -def mask_by_val(data: "xarray.Dataset", band: str, val: Optional[Any] = None) -> "xarray.DataArray": +def mask_by_val(data: xarray.Dataset, band: str, val: Any = None) -> xarray.DataArray: """ Mask by value. Value to mask by may be supplied, or is taken from 'nodata' metadata by default. @@ -401,7 +284,7 @@ def mask_by_val(data: "xarray.Dataset", band: str, val: Optional[Any] = None) -> return data[band] != val -def mask_by_val2(data: "xarray.Dataset", band: str) -> "xarray.DataArray": +def mask_by_val2(data: xarray.Dataset, band: str) -> xarray.DataArray: """ Mask by value, using ODC canonical nodata value @@ -410,14 +293,14 @@ def mask_by_val2(data: "xarray.Dataset", band: str) -> "xarray.DataArray": return data[band] != data[band].nodata -def mask_by_bitflag(data: "xarray.Dataset", band: str) -> "xarray.DataArray": +def mask_by_bitflag(data: xarray.Dataset, band: str) -> xarray.DataArray: """ Mask by ODC metadata nodata value, as a bitflag """ return ~data[band] & data[band].attrs['nodata'] -def mask_by_val_in_band(data: "xarray.Dataset", band: str, mask_band: str, val: Any = None) -> "xarray.DataArray": +def mask_by_val_in_band(data: xarray.Dataset, band: str, mask_band: str, val: Any = None) -> xarray.DataArray: """ Mask all bands by a value in a particular band @@ -427,7 +310,7 @@ def mask_by_val_in_band(data: "xarray.Dataset", band: str, mask_band: str, val: return mask_by_val(data, mask_band, val) -def mask_by_quality(data: "xarray.Dataset", band: str) -> "xarray.DataArray": +def mask_by_quality(data: xarray.Dataset, band: str) -> xarray.DataArray: """ Mask by a quality band. @@ -439,7 +322,7 @@ def mask_by_quality(data: "xarray.Dataset", band: str) -> "xarray.DataArray": return mask_by_val(data, "quality") -def mask_by_extent_flag(data: "xarray.Dataset", band: str) -> "xarray.DataArray": +def mask_by_extent_flag(data: xarray.Dataset, band: str) -> xarray.DataArray: """ Mask by extent. @@ -448,7 +331,7 @@ def mask_by_extent_flag(data: "xarray.Dataset", band: str) -> "xarray.DataArray" return data["extent"] == 1 -def mask_by_extent_val(data: "xarray.Dataset", band: str) -> "xarray.DataArray": +def mask_by_extent_val(data: xarray.Dataset, band: str) -> xarray.DataArray: """ Mask by extent value using metadata nodata. @@ -457,7 +340,7 @@ def mask_by_extent_val(data: "xarray.Dataset", band: str) -> "xarray.DataArray": return mask_by_val(data, "extent") -def mask_by_nan(data: "xarray.Dataset", band: str) -> "numpy.NDArray": +def mask_by_nan(data: xarray.Dataset, band: str) -> numpy.ndarray: """ Mask by nan, for bands with floating point data """ @@ -467,8 +350,8 @@ def mask_by_nan(data: "xarray.Dataset", band: str) -> "numpy.NDArray": # Example mosaic date function def rolling_window_ndays( available_dates: Sequence[datetime.datetime], - layer_cfg: "datacube_ows.ows_configuration.OWSNamedLayer", - ndays: int = 6) -> Tuple[datetime.datetime, datetime.datetime]: + layer_cfg: OWSExtensibleConfigEntry, + ndays: int = 6) -> tuple[datetime.datetime, datetime.datetime]: idx = -ndays days = available_dates[idx:] start, _ = layer_cfg.search_times(days[idx]) @@ -486,7 +369,7 @@ def rolling_window_ndays( # Method for formatting urls, e.g. for use in feature_info custom inclusions. -def lower_get_args() -> MutableMapping[str, str]: +def lower_get_args() -> dict[str, str]: """ Return Flask request arguments, with argument names converted to lower case. @@ -504,9 +387,9 @@ def lower_get_args() -> MutableMapping[str, str]: def create_geobox( crs: CRS, - minx: Union[float, int], miny: Union[float, int], - maxx: Union[float, int], maxy: Union[float, int], - width: Optional[int] = None, height: Optional[int] = None, + minx: float | int, miny: float | int, + maxx: float | int, maxy: float | int, + width: int | None = None, height: int | None = None, ) -> GeoBox: """ Create an ODC Geobox. diff --git a/datacube_ows/ows_configuration.py b/datacube_ows/ows_configuration.py index 162d0d081..ef54ed79e 100644 --- a/datacube_ows/ows_configuration.py +++ b/datacube_ows/ows_configuration.py @@ -33,10 +33,9 @@ OWSExtensibleConfigEntry, OWSFlagBand, OWSMetadataConfig, cfg_expand, get_file_loc, import_python_obj, - load_json_obj) + load_json_obj, ConfigException, FunctionWrapper) from datacube_ows.cube_pool import ODCInitException, cube, get_cube -from datacube_ows.ogc_utils import (ConfigException, FunctionWrapper, - create_geobox, local_solar_date_range) +from datacube_ows.ogc_utils import (create_geobox, local_solar_date_range) from datacube_ows.resource_limits import (OWSResourceManagementRules, parse_cache_age) from datacube_ows.styles import StyleDef diff --git a/datacube_ows/resource_limits.py b/datacube_ows/resource_limits.py index ed8a7b530..bccb83282 100644 --- a/datacube_ows/resource_limits.py +++ b/datacube_ows/resource_limits.py @@ -11,8 +11,8 @@ from odc.geo.geobox import GeoBox from odc.geo.geom import CRS, polygon -from datacube_ows.config_utils import CFG_DICT, RAW_CFG, OWSConfigEntry -from datacube_ows.ogc_utils import (ConfigException, cache_control_headers, +from datacube_ows.config_utils import CFG_DICT, RAW_CFG, OWSConfigEntry, ConfigException +from datacube_ows.ogc_utils import (cache_control_headers, create_geobox) diff --git a/datacube_ows/styles/base.py b/datacube_ows/styles/base.py index b5e713130..ca35a4912 100644 --- a/datacube_ows/styles/base.py +++ b/datacube_ows/styles/base.py @@ -21,10 +21,9 @@ OWSExtensibleConfigEntry, OWSFlagBandStandalone, OWSIndexedConfigEntry, - OWSMetadataConfig) + OWSMetadataConfig, ConfigException, FunctionWrapper) from datacube_ows.legend_utils import get_image_from_url from datacube_ows.ogc_exceptions import WMSException -from datacube_ows.ogc_utils import ConfigException, FunctionWrapper _LOG: logging.Logger = logging.getLogger(__name__) diff --git a/datacube_ows/styles/component.py b/datacube_ows/styles/component.py index 686149b28..0aa0a9964 100644 --- a/datacube_ows/styles/component.py +++ b/datacube_ows/styles/component.py @@ -9,8 +9,7 @@ import numpy as np from xarray import DataArray, Dataset -from datacube_ows.config_utils import CFG_DICT -from datacube_ows.ogc_utils import ConfigException, FunctionWrapper +from datacube_ows.config_utils import CFG_DICT, ConfigException, FunctionWrapper from datacube_ows.styles.base import StyleDefBase # pylint: disable=abstract-method diff --git a/datacube_ows/styles/expression.py b/datacube_ows/styles/expression.py index dee244226..27a86c136 100644 --- a/datacube_ows/styles/expression.py +++ b/datacube_ows/styles/expression.py @@ -8,7 +8,7 @@ import lark from datacube.virtual.expr import formula_parser -from datacube_ows.ogc_utils import ConfigException +from datacube_ows.config_utils import ConfigException # Lark stuff. diff --git a/datacube_ows/styles/hybrid.py b/datacube_ows/styles/hybrid.py index 9020dd783..f13ff53da 100644 --- a/datacube_ows/styles/hybrid.py +++ b/datacube_ows/styles/hybrid.py @@ -7,8 +7,7 @@ from xarray import DataArray, Dataset -from datacube_ows.config_utils import CFG_DICT -from datacube_ows.ogc_utils import ConfigException +from datacube_ows.config_utils import CFG_DICT, ConfigException from datacube_ows.styles.base import StyleDefBase from datacube_ows.styles.component import ComponentStyleDef from datacube_ows.styles.ramp import ColorRampDef diff --git a/datacube_ows/styles/ramp.py b/datacube_ows/styles/ramp.py index 9363d9d85..2f9e448eb 100644 --- a/datacube_ows/styles/ramp.py +++ b/datacube_ows/styles/ramp.py @@ -24,8 +24,7 @@ from numpy import ubyte from xarray import Dataset -from datacube_ows.config_utils import CFG_DICT, OWSMetadataConfig -from datacube_ows.ogc_utils import ConfigException, FunctionWrapper +from datacube_ows.config_utils import CFG_DICT, OWSMetadataConfig, ConfigException, FunctionWrapper from datacube_ows.styles.base import StyleDefBase from datacube_ows.styles.expression import Expression diff --git a/datacube_ows/tile_matrix_sets.py b/datacube_ows/tile_matrix_sets.py index f322432fc..a6707e0f9 100644 --- a/datacube_ows/tile_matrix_sets.py +++ b/datacube_ows/tile_matrix_sets.py @@ -3,8 +3,7 @@ # # Copyright (c) 2017-2023 OWS Contributors # SPDX-License-Identifier: Apache-2.0 -from datacube_ows.config_utils import OWSConfigEntry -from datacube_ows.ogc_utils import ConfigException +from datacube_ows.config_utils import OWSConfigEntry, ConfigException # Scale denominators for WebMercator QuadTree Scale Set, starting from zoom level 0. # Currently goes to zoom level 14, where the pixel size at the equator is ~10m (i.e. Sentinel2 resolution) diff --git a/datacube_ows/utils.py b/datacube_ows/utils.py index b056b73ac..d6f28d66c 100644 --- a/datacube_ows/utils.py +++ b/datacube_ows/utils.py @@ -7,12 +7,18 @@ import logging from functools import wraps from time import monotonic -from typing import Any, Callable, List, Optional, TypeVar +from typing import Any, Callable, TypeVar, cast import pytz from numpy import datetime64 from numpy import datetime64 as npdt64 +from sqlalchemy.engine.base import Connection + +from datacube import Datacube +from datacube.api.query import GroupBy, solar_day +from datacube.model import Dataset + F = TypeVar('F', bound=Callable[..., Any]) def log_call(func: F) -> F: @@ -27,7 +33,7 @@ def log_wrapper(*args, **kwargs): _LOG = logging.getLogger() _LOG.debug("%s args: %s kwargs: %s", func.__name__, args, kwargs) return func(*args, **kwargs) - return log_wrapper + return cast(F, log_wrapper) def time_call(func: F) -> F: @@ -40,23 +46,22 @@ def time_call(func: F) -> F: For debugging or optimisation research only. Should not occur in mainline code. """ @wraps(func) - def timing_wrapper(*args, **kwargs): + def timing_wrapper(*args, **kwargs) -> Any: start: float = monotonic() result: Any = func(*args, **kwargs) stop: float = monotonic() _LOG = logging.getLogger() _LOG.debug("%s took: %d ms", func.__name__, int((stop - start) * 1000)) return result - return timing_wrapper + return cast(F, timing_wrapper) -def group_by_begin_datetime(pnames: Optional[List[str]] = None, - truncate_dates: bool = True) -> "datacube.api.query.GroupBy": +def group_by_begin_datetime(pnames: list[str] | None = None, + truncate_dates: bool = True) -> GroupBy: """ Returns an ODC GroupBy object, suitable for daily/monthly/yearly/etc statistical/summary data. (Or for sub-day time resolution data) """ - from datacube.api.query import GroupBy base_sort_key = lambda ds: ds.time.begin if pnames: index = { @@ -87,8 +92,7 @@ def group_by_begin_datetime(pnames: Optional[List[str]] = None, ) -def group_by_solar(pnames: Optional[List[str]] = None) -> "datacube.api.query.GroupBy": - from datacube.api.query import GroupBy, solar_day +def group_by_solar(pnames: list[str] | None = None) -> GroupBy: base_sort_key = lambda ds: ds.time.begin if pnames: index = { @@ -106,15 +110,14 @@ def group_by_solar(pnames: Optional[List[str]] = None) -> "datacube.api.query.Gr ) -def group_by_mosaic(pnames: Optional[List[str]] = None) -> "datacube.api.query.GroupBy": - from datacube.api.query import GroupBy, solar_day +def group_by_mosaic(pnames: list[str] | None = None) -> GroupBy: base_sort_key = lambda ds: ds.time.begin if pnames: index = { pn: i for i, pn in enumerate(pnames) } - sort_key = lambda ds: (solar_day(ds), index.get(ds.type.name), base_sort_key(ds)) + sort_key: Callable[[Dataset], tuple] = lambda ds: (solar_day(ds), index.get(ds.type.name), base_sort_key(ds)) else: sort_key = lambda ds: (solar_day(ds), base_sort_key(ds)) return GroupBy( @@ -125,7 +128,7 @@ def group_by_mosaic(pnames: Optional[List[str]] = None) -> "datacube.api.query.G ) -def get_sqlconn(dc: "datacube.Datacube") -> "sqlalchemy.engine.base.Connection": +def get_sqlconn(dc: Datacube) -> Connection: """ Extracts a SQLAlchemy database connection from a Datacube object. @@ -133,7 +136,7 @@ def get_sqlconn(dc: "datacube.Datacube") -> "sqlalchemy.engine.base.Connection": :return: A SQLAlchemy database connection object. """ # pylint: disable=protected-access - return dc.index._db._engine.connect() + return dc.index._db._engine.connect() # type: ignore[attr-defined] def find_matching_date(dt, dates) -> bool: diff --git a/datacube_ows/wcs1_utils.py b/datacube_ows/wcs1_utils.py index 7783380e9..c17b562a1 100644 --- a/datacube_ows/wcs1_utils.py +++ b/datacube_ows/wcs1_utils.py @@ -17,7 +17,7 @@ from datacube_ows.loading import DataStacker from datacube_ows.mv_index import MVSelectOpts from datacube_ows.ogc_exceptions import WCS1Exception -from datacube_ows.ogc_utils import ConfigException +from datacube_ows.config_utils import ConfigException from datacube_ows.ows_configuration import get_config from datacube_ows.resource_limits import ResourceLimited from datacube_ows.wcs_utils import get_bands_from_styles diff --git a/datacube_ows/wms_utils.py b/datacube_ows/wms_utils.py index 38b016b43..168631bcd 100644 --- a/datacube_ows/wms_utils.py +++ b/datacube_ows/wms_utils.py @@ -17,7 +17,8 @@ from rasterio.warp import Resampling from datacube_ows.ogc_exceptions import WMSException -from datacube_ows.ogc_utils import ConfigException, create_geobox +from datacube_ows.ogc_utils import create_geobox +from datacube_ows.config_utils import ConfigException from datacube_ows.ows_configuration import get_config, OWSNamedLayer from datacube_ows.resource_limits import RequestScale from datacube_ows.styles import StyleDef diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 000000000..bb3e1a2d0 --- /dev/null +++ b/mypy.ini @@ -0,0 +1,4 @@ +[mypy] +python_version = 3.10 +ignore_missing_imports = True +allow_redefinition = True diff --git a/setup.py b/setup.py index 46e27a6cd..ef5cb8cee 100644 --- a/setup.py +++ b/setup.py @@ -53,7 +53,12 @@ 'pylint', 'sphinx_click', 'pre-commit', - 'pipdeptree' + 'pipdeptree', + 'mypy', + 'types-pytz', + 'types-python-dateutil', + 'types-requests', + ] operational_requirements = [ diff --git a/tests/test_cfg_bandidx.py b/tests/test_cfg_bandidx.py index be311491c..1611be831 100644 --- a/tests/test_cfg_bandidx.py +++ b/tests/test_cfg_bandidx.py @@ -7,8 +7,7 @@ import pytest -from datacube_ows.config_utils import OWSConfigNotReady -from datacube_ows.ogc_utils import ConfigException +from datacube_ows.config_utils import OWSConfigNotReady, ConfigException from datacube_ows.ows_configuration import BandIndex diff --git a/tests/test_cfg_cache_ctrl.py b/tests/test_cfg_cache_ctrl.py index 21fa6d4b0..95d0b1462 100644 --- a/tests/test_cfg_cache_ctrl.py +++ b/tests/test_cfg_cache_ctrl.py @@ -7,7 +7,7 @@ import pytest -from datacube_ows.ogc_utils import ConfigException +from datacube_ows.config_utils import ConfigException from datacube_ows.resource_limits import CacheControlRules diff --git a/tests/test_cfg_global.py b/tests/test_cfg_global.py index 4ef73f19d..ccf0c38b7 100644 --- a/tests/test_cfg_global.py +++ b/tests/test_cfg_global.py @@ -5,7 +5,7 @@ # SPDX-License-Identifier: Apache-2.0 import pytest -from datacube_ows.ogc_utils import ConfigException +from datacube_ows.config_utils import ConfigException from datacube_ows.ows_configuration import ContactInfo, OWSConfig diff --git a/tests/test_cfg_inclusion.py b/tests/test_cfg_inclusion.py index 10feeb061..422ff65a2 100644 --- a/tests/test_cfg_inclusion.py +++ b/tests/test_cfg_inclusion.py @@ -8,8 +8,8 @@ import pytest -from datacube_ows.config_utils import get_file_loc -from datacube_ows.ows_configuration import ConfigException, read_config +from datacube_ows.config_utils import get_file_loc, ConfigException +from datacube_ows.ows_configuration import read_config src_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) if src_dir not in sys.path: diff --git a/tests/test_cfg_layer.py b/tests/test_cfg_layer.py index 87e0e951d..cfd8e58d7 100644 --- a/tests/test_cfg_layer.py +++ b/tests/test_cfg_layer.py @@ -9,7 +9,7 @@ import pytest -from datacube_ows.ogc_utils import ConfigException +from datacube_ows.config_utils import ConfigException from datacube_ows.ows_configuration import OWSFolder, OWSLayer, parse_ows_layer from datacube_ows.resource_limits import ResourceLimited diff --git a/tests/test_cfg_metadata_types.py b/tests/test_cfg_metadata_types.py index 34f62d7d1..ecf46ca66 100644 --- a/tests/test_cfg_metadata_types.py +++ b/tests/test_cfg_metadata_types.py @@ -7,7 +7,7 @@ import pytest -from datacube_ows.ogc_utils import ConfigException +from datacube_ows.config_utils import ConfigException from datacube_ows.ows_configuration import AttributionCfg, SuppURL diff --git a/tests/test_cfg_tile_matrix_set.py b/tests/test_cfg_tile_matrix_set.py index 5e641dc8e..85b010010 100644 --- a/tests/test_cfg_tile_matrix_set.py +++ b/tests/test_cfg_tile_matrix_set.py @@ -7,7 +7,7 @@ import pytest -from datacube_ows.ogc_utils import ConfigException +from datacube_ows.config_utils import ConfigException from datacube_ows.tile_matrix_sets import TileMatrixSet diff --git a/tests/test_cfg_wcs.py b/tests/test_cfg_wcs.py index 396e02bc0..fe7b2aad7 100644 --- a/tests/test_cfg_wcs.py +++ b/tests/test_cfg_wcs.py @@ -7,7 +7,7 @@ import pytest -from datacube_ows.ogc_utils import ConfigException +from datacube_ows.config_utils import ConfigException from datacube_ows.ows_configuration import WCSFormat, parse_ows_layer diff --git a/tests/test_multidate_handler.py b/tests/test_multidate_handler.py index fc6b6b3e6..ae584371e 100644 --- a/tests/test_multidate_handler.py +++ b/tests/test_multidate_handler.py @@ -8,7 +8,7 @@ import pytest import xarray as xr -from datacube_ows.ogc_utils import ConfigException +from datacube_ows.config_utils import ConfigException from datacube_ows.styles.base import StyleDefBase diff --git a/tests/test_ows_configuration.py b/tests/test_ows_configuration.py index 24373a743..5ede2d99c 100644 --- a/tests/test_ows_configuration.py +++ b/tests/test_ows_configuration.py @@ -16,14 +16,14 @@ def test_function_wrapper_lyr(): lyr = MagicMock() func_cfg = "tests.utils.a_function" - f = datacube_ows.ogc_utils.FunctionWrapper(lyr, func_cfg) + f = datacube_ows.config_utils.FunctionWrapper(lyr, func_cfg) assert f(7)[0] == "a7 b2 c3" assert f(5, c=4)[0] == "a5 b2 c4" assert f.band_mapper is None func_cfg = { "function": "tests.utils.a_function", } - f = datacube_ows.ogc_utils.FunctionWrapper(lyr, func_cfg) + f = datacube_ows.config_utils.FunctionWrapper(lyr, func_cfg) assert f(7, 8)[0] == "a7 b8 c3" func_cfg = { "function": "tests.utils.a_function", @@ -32,12 +32,12 @@ def test_function_wrapper_lyr(): "c": "ouple" } } - f = datacube_ows.ogc_utils.FunctionWrapper(lyr, func_cfg) + f = datacube_ows.config_utils.FunctionWrapper(lyr, func_cfg) result = f("pple", "eagle") assert result[0] == "apple beagle couple" assert result[1]["foo"] == "bar" assert f.band_mapper is None - f = datacube_ows.ogc_utils.FunctionWrapper(lyr, func_cfg) + f = datacube_ows.config_utils.FunctionWrapper(lyr, func_cfg) result = f(a="pple", b="eagle") assert result[0] == "apple beagle couple" assert result[1]["foo"] == "bar" @@ -46,11 +46,11 @@ def test_function_wrapper_lyr(): "function": "tests.utils.a_function", "args": ["bar", "ouple"] } - f = datacube_ows.ogc_utils.FunctionWrapper(lyr, func_cfg) + f = datacube_ows.config_utils.FunctionWrapper(lyr, func_cfg) result = f("pple") assert result[0] == "apple bbar couple" assert f.band_mapper is None - f = datacube_ows.ogc_utils.FunctionWrapper(lyr, func_cfg) + f = datacube_ows.config_utils.FunctionWrapper(lyr, func_cfg) result = f() assert result[0] == "abar bouple c3" assert f.band_mapper is None @@ -59,25 +59,25 @@ def test_function_wrapper_lyr(): "args": ["bar", "ouple"] } with pytest.raises(datacube_ows.config_utils.ConfigException) as e: - f = datacube_ows.ogc_utils.FunctionWrapper(lyr, func_cfg) + f = datacube_ows.config_utils.FunctionWrapper(lyr, func_cfg) assert "Could not import python object" in str(e.value) assert "so_fake.not_real.not_a_function" in str(e.value) def test_func_naked(): lyr = MagicMock() with pytest.raises(datacube_ows.config_utils.ConfigException) as e: - f = datacube_ows.ogc_utils.FunctionWrapper(lyr, { + f = datacube_ows.config_utils.FunctionWrapper(lyr, { "function": a_function, }) assert str("Directly including callable objects in configuration is no longer supported.") with pytest.raises(datacube_ows.config_utils.ConfigException) as e: - f = datacube_ows.ogc_utils.FunctionWrapper(lyr, a_function) + f = datacube_ows.config_utils.FunctionWrapper(lyr, a_function) assert str("Directly including callable objects in configuration is no longer supported.") - f = datacube_ows.ogc_utils.FunctionWrapper(lyr, { + f = datacube_ows.config_utils.FunctionWrapper(lyr, { "function": a_function, }, stand_alone=True) assert f("ardvark", "bllbbll")[0] == "aardvark bbllbbll c3" - f = datacube_ows.ogc_utils.FunctionWrapper(lyr, a_function, stand_alone=True) + f = datacube_ows.config_utils.FunctionWrapper(lyr, a_function, stand_alone=True) assert f("ardvark", "bllbbll")[0] == "aardvark bbllbbll c3" diff --git a/tests/test_style_api.py b/tests/test_style_api.py index 461eb9384..c950d453c 100644 --- a/tests/test_style_api.py +++ b/tests/test_style_api.py @@ -7,7 +7,7 @@ import pytest -from datacube_ows.ogc_utils import ConfigException +from datacube_ows.config_utils import ConfigException from datacube_ows.styles.api import ( # noqa: F401 isort:skip StandaloneStyle, apply_ows_style, diff --git a/tests/test_styles.py b/tests/test_styles.py index 409523d71..136772284 100644 --- a/tests/test_styles.py +++ b/tests/test_styles.py @@ -11,8 +11,7 @@ from xarray import DataArray, Dataset, concat import datacube_ows.styles -from datacube_ows.config_utils import OWSEntryNotFound -from datacube_ows.ogc_utils import ConfigException +from datacube_ows.config_utils import OWSEntryNotFound, ConfigException from datacube_ows.ows_configuration import BandIndex, OWSProductLayer From 378d2afd6d2c792a9eba1932b48ae833f8c1259c Mon Sep 17 00:00:00 2001 From: Paul Haesler Date: Fri, 19 Apr 2024 16:16:20 +1000 Subject: [PATCH 06/29] More fixing typehints, cleaning up import structure. --- datacube_ows/config_utils.py | 105 +++++++++++++++++++---------------- 1 file changed, 57 insertions(+), 48 deletions(-) diff --git a/datacube_ows/config_utils.py b/datacube_ows/config_utils.py index db7e33fd2..037062e68 100644 --- a/datacube_ows/config_utils.py +++ b/datacube_ows/config_utils.py @@ -14,11 +14,20 @@ import fsspec from datacube.utils.masking import make_mask +from datacube import Datacube +from datacube.model import Product from flask_babel import gettext as _ +from babel.messages import Catalog, Message from xarray import DataArray from datacube_ows.config_toolkit import deepinherit +TYPE_CHECKING = False +if TYPE_CHECKING: + import datacube_ows.ows_configuration.OWSConfig + import datacube_ows.ows_configuration.OWSNamedLayer + import datacube_ows.styles.base.StyleMask + _LOG = logging.getLogger(__name__) RAW_CFG = Union[ @@ -35,7 +44,6 @@ F = TypeVar('F', bound=Callable[..., Any]) - # inclusions defaulting to an empty list is dangerous, but note that it is never modified. # If modification of inclusions is a required, a copy (ninclusions) is made and modified instead. # pylint: disable=dangerous-default-value @@ -223,7 +231,7 @@ def __setattr__(self, name: str, val: Any) -> None: super().__setattr__(name, val) # Validate against database and prepare for use. - def make_ready(self, dc: "datacube.Datacube", *args, **kwargs) -> None: + def make_ready(self, dc: Datacube, *args, **kwargs) -> None: """ Perform second phase initialisation with a database connection. @@ -277,14 +285,14 @@ class OWSMetadataConfig(OWSConfigEntry): # Class registries, mapping metadata paths to their default value and whether the metadata value is # unique to that path, or has been inherited from a parent metadata path. - _metadata_registry: MutableMapping[str, str] = {} - _inheritance_registry: MutableMapping[str, bool] = {} + _metadata_registry: dict[str, str] = {} + _inheritance_registry: dict[str, bool] = {} - _msg_src: Optional["babel.messages.Catalog"] = None + _msg_src: Catalog | None = None # Inaccessible attributes to allow type checking abstract: str = "" - attribution: MutableMapping[str, str] = {} + attribution: dict[str, str] = {} def get_obj_label(self) -> str: """Return the metadata path prefix for this object.""" @@ -301,14 +309,14 @@ def can_inherit_from(self) -> Optional["OWSMetadataConfig"]: # Holders for managing inheritance. @property - def default_title(self) -> Optional[str]: + def default_title(self) -> str | None: return None @property - def default_abstract(self) -> Optional[str]: + def default_abstract(self) -> str | None: return None - _keywords: Set[str] = set() + _keywords: set[str] = set() def parse_metadata(self, cfg: CFG_DICT) -> None: """ @@ -398,7 +406,7 @@ def parse_metadata(self, cfg: CFG_DICT) -> None: self.register_metadata(self.get_obj_label(), f"lbl_{tick}", lbl) @property - def keywords(self) -> Set[str]: + def keywords(self) -> set[str]: """ Return the keywords for this object (with inheritance, but without metadata separation or translation) :return: A set of keywords. @@ -406,14 +414,14 @@ def keywords(self) -> Set[str]: return self._keywords @classmethod - def set_msg_src(cls, src: "babel.messages.Catalog") -> None: + def set_msg_src(cls, src: Catalog) -> None: """ Allow all OWSMetadatConfig subclasses to share a common message catalog. :param src: A Message Catalog object """ OWSMetadataConfig._msg_src = src - def read_metadata(self, lbl: str, fld: str) -> Optional[str]: + def read_metadata(self, lbl: str, fld: str) -> str | None: """ Read a general piece of metadata (potentially from another object). Resolution order: @@ -432,12 +440,12 @@ def read_metadata(self, lbl: str, fld: str) -> Optional[str]: if trans != lookup: return trans if self._msg_src is not None: - msg = cast("babel.messages.Catalog", self._msg_src).get(lookup) + msg: Message | None = cast(Catalog, self._msg_src).get(lookup) if not msg: - msg = self._metadata_registry.get(lookup) + msg_: str | None = self._metadata_registry.get(lookup) else: - msg = msg.string - return msg + msg_ = cast(str, msg.string) + return msg_ return self._metadata_registry.get(lookup) def read_inheritance(self, lbl: str, fld: str) -> bool: @@ -463,7 +471,7 @@ def register_metadata(self, lbl: str, fld: str, val: str, inherited: bool = Fals self._metadata_registry[lookup] = val self._inheritance_registry[lookup] = inherited - def read_local_metadata(self, fld: str) -> Optional[str]: + def read_local_metadata(self, fld: str) -> str | None: """ Read a general piece of metadata for this object. Resolution order: @@ -635,7 +643,7 @@ class OWSFlagBandStandalone: def __init__(self, band: str) -> None: self.pq_band = band self.canonical_band_name = band - self.pq_names: List["datacube.model.DatasetType"] = [] + self.pq_names: list[str] = [] self.pq_ignore_time = False self.pq_manual_merge = False self.pq_fuse_func: Optional[FunctionWrapper] = None @@ -657,33 +665,33 @@ def __init__(self, cfg: CFG_DICT, product_cfg: "datacube_ows.ows_configuration.O cfg = cast(CFG_DICT, self._raw_cfg) self.product = product_cfg pq_names = self.product.parse_pq_names(cfg) - self.pq_names = pq_names["pq_names"] + self.pq_names = cast(list[str], pq_names["pq_names"]) self.pq_low_res_names = pq_names["pq_low_res_names"] self.main_products = pq_names["main_products"] - self.pq_band = cfg["band"] + self.pq_band = str(cfg["band"]) self.canonical_band_name = self.pq_band # Update for aliasing on make_ready if "fuse_func" in cfg: self.pq_fuse_func: Optional[FunctionWrapper] = FunctionWrapper(self.product, cast(Mapping[str, Any], cfg["fuse_func"])) else: self.pq_fuse_func = None self.pq_ignore_time = cfg.get("ignore_time", False) - self.ignore_info_flags = cfg.get("ignore_info_flags", []) + self.ignore_info_flags = cast(list[str], cfg.get("ignore_info_flags", [])) self.pq_manual_merge = cfg.get("manual_merge", False) self.declare_unready("pq_products") self.declare_unready("flags_def") self.declare_unready("info_mask") # pylint: disable=attribute-defined-outside-init - def make_ready(self, dc: "datacube.Datacube", *args, **kwargs) -> None: + def make_ready(self, dc: Datacube, *args, **kwargs) -> None: """ Second round (db-aware) intialisation. :param dc: A Datacube object """ # pyre-ignore[16] - self.pq_products: List["datacube.model.DatasetType"] = [] + self.pq_products: list[Product] = [] # pyre-ignore[16] - self.pq_low_res_products: List["datacube.model.DatasetType"] = [] + self.pq_low_res_products: list[Product] = [] for pqn in self.pq_names: if pqn is not None: pq_product = dc.index.products.get_by_name(pqn) @@ -709,14 +717,14 @@ def make_ready(self, dc: "datacube.Datacube", *args, **kwargs) -> None: # A (hopefully) representative product product = self.pq_products[0] try: - meas = product.lookup_measurements([self.canonical_band_name])[self.canonical_band_name] + meas = product.lookup_measurements([str(self.canonical_band_name)])[str(self.canonical_band_name)] except KeyError: raise ConfigException( f"Band {self.pq_band} does not exist in product {product.name} - cannot be used as a flag band for layer {self.product.name}.") if "flags_definition" not in meas: raise ConfigException(f"Band {self.pq_band} in product {product.name} has no flags_definition in ODC - cannot be used as a flag band for layer {self.product.name}.") # pyre-ignore[16] - self.flags_def: Mapping[str, RAW_CFG] = meas["flags_definition"] + self.flags_def: dict[str, dict[str, RAW_CFG]] = meas["flags_definition"] for bitname in self.ignore_info_flags: bit = self.flags_def[bitname]["bits"] if not isinstance(bit, int): @@ -725,7 +733,9 @@ def make_ready(self, dc: "datacube.Datacube", *args, **kwargs) -> None: self.info_mask &= ~flag super().make_ready(dc, *args, **kwargs) -FlagBand = Union[OWSFlagBand, OWSFlagBandStandalone] + +FlagBand = OWSFlagBand | OWSFlagBandStandalone + class FlagProductBands(OWSConfigEntry): """ @@ -742,7 +752,7 @@ def __init__(self, flag_band: FlagBand, super().__init__({}) self.layer = layer self.bands: Set[str] = set() - self.bands.add(flag_band.canonical_band_name) + self.bands.add(str(flag_band.canonical_band_name)) self.flag_bands = {flag_band.pq_band: flag_band} self.product_names = tuple(flag_band.pq_names) self.ignore_time = flag_band.pq_ignore_time @@ -782,17 +792,18 @@ def add_flag_band(self, fb: FlagBand) -> None: self.declare_unready("low_res_products") # pylint: disable=attribute-defined-outside-init - def make_ready(self, dc: "datacube.Datacube", *args, **kwargs) -> None: + def make_ready(self, dc: Datacube, *args, **kwargs) -> None: """ Second round (db-aware) intialisation. :param dc: A Datacube object """ for fb in self.flag_bands.values(): + fb = cast(OWSFlagBand, fb) # pyre-ignore [16] - self.products: List["datacube.model.DatasetType"] = fb.pq_products + self.products: list[Product] = fb.pq_products # pyre-ignore [16] - self.low_res_products: List["datacube.model.DatasetType"] = fb.pq_low_res_products + self.low_res_products: list[Product] = fb.pq_low_res_products break if self.main_product: self.bands = set(self.layer.band_idx.band(b) for b in self.bands) @@ -810,7 +821,7 @@ def build_list_from_masks(cls, masks: Iterable["datacube_ows.styles.base.StyleMa :param layer: A named layer object :return: A list of FlagProductBands objects """ - flag_products = [] + flag_products: list["FlagProductBands"] = [] for mask in masks: handled = False for fp in flag_products: @@ -824,7 +835,7 @@ def build_list_from_masks(cls, masks: Iterable["datacube_ows.styles.base.StyleMa @classmethod def build_list_from_flagbands(cls, flagbands: Iterable[OWSFlagBand], - layer: "datacube_ows.ows_configuration.OWSNamedLayer") -> List["FlagProductBands"]: + layer: "datacube_ows.ows_configuration.OWSNamedLayer") -> list["FlagProductBands"]: """ Class method to instantiate a list of FlagProductBands from a list of OWS Flag Bands. @@ -834,7 +845,7 @@ def build_list_from_flagbands(cls, flagbands: Iterable[OWSFlagBand], :param layer: A named layer object :return: A list of FlagProductBands objects """ - flag_products = [] + flag_products: list["FlagProductBands"] = [] for fb in flagbands: handled = False for fp in flag_products: @@ -858,14 +869,13 @@ def context(self) -> str: return "a mask rule" VALUES_LABEL = "values" - def parse_rule_spec(self, cfg: CFG_DICT): - self.flags: Optional[CFG_DICT] = None + def parse_rule_spec(self, cfg: CFG_DICT) -> None: + self.flags: CFG_DICT | None = None self.or_flags: bool = False - self.values: Optional[List[int]] = None - self.invert: bool = cfg.get("invert", False) + self.values: list[int] | None = None + self.invert = bool(cfg.get("invert", False)) if "flags" in cfg: flags = cast(CFG_DICT, cfg["flags"]) - self.or_flags: bool = False if "or" in flags and "and" in flags: raise ConfigException( f"ValueMap rule in {self.context} combines 'and' and 'or' rules") @@ -874,10 +884,9 @@ def parse_rule_spec(self, cfg: CFG_DICT): flags = cast(CFG_DICT, flags["or"]) elif "and" in flags: flags = cast(CFG_DICT, flags["and"]) - self.flags: Optional[CFG_DICT] = flags + self.flags = flags else: self.flags = None - self.or_flags = False if "values" in cfg: val: Any = cfg["values"] @@ -890,9 +899,9 @@ def parse_rule_spec(self, cfg: CFG_DICT): self.values = None else: if isinstance(val, int): - self.values: Optional[List[int]] = [cast(int, val)] + self.values = [cast(int, val)] else: - self.values: Optional[List[int]] = cast(List[int], val) + self.values = cast(List[int], val) if not self.flags and not self.values: raise ConfigException( @@ -901,7 +910,7 @@ def parse_rule_spec(self, cfg: CFG_DICT): raise ConfigException( f"Mask rule in {self.context} has both a 'flags' and a 'values' section - choose one.") - def create_mask(self, data: DataArray) -> DataArray: + def create_mask(self, data: DataArray) -> DataArray | None: """ Create a mask from raw flag band data. @@ -909,7 +918,7 @@ def create_mask(self, data: DataArray) -> DataArray: :return: A boolean DataArray, True where the data matches this rule """ if self.values: - mask: Optional[DataArray] = None + mask: DataArray | None = None for v in cast(List[int], self.values): vmask = data == v if mask is None: @@ -919,11 +928,11 @@ def create_mask(self, data: DataArray) -> DataArray: elif self.or_flags: mask = None for f in cast(CFG_DICT, self.flags).items(): - f = {f[0]: f[1]} + d = {f[0]: f[1]} if mask is None: - mask = make_mask(data, **f) + mask = make_mask(data, **d) else: - mask |= make_mask(data, **f) + mask |= make_mask(data, **d) else: mask = make_mask(data, **cast(CFG_DICT, self.flags)) if mask is not None and self.invert: From d1b8cfb41b398f630aa821432f3dbcc5486f1fb5 Mon Sep 17 00:00:00 2001 From: Paul Haesler Date: Fri, 19 Apr 2024 16:51:04 +1000 Subject: [PATCH 07/29] More fixing typehints, cleaning up import structure. --- datacube_ows/config_toolkit.py | 8 +++--- datacube_ows/config_utils.py | 30 ++++++++++----------- datacube_ows/ogc_exceptions.py | 4 +-- datacube_ows/resource_limits.py | 43 +++++++++++++++++-------------- datacube_ows/styles/expression.py | 30 +++++++++++++++------ 5 files changed, 66 insertions(+), 49 deletions(-) diff --git a/datacube_ows/config_toolkit.py b/datacube_ows/config_toolkit.py index dc5ff6339..f8a4e736d 100644 --- a/datacube_ows/config_toolkit.py +++ b/datacube_ows/config_toolkit.py @@ -4,10 +4,10 @@ # Copyright (c) 2017-2023 OWS Contributors # SPDX-License-Identifier: Apache-2.0 from copy import deepcopy -from typing import Any, MutableMapping +from typing import Any -def deepinherit(parent: MutableMapping[str, Any], child: MutableMapping[str, Any]) -> MutableMapping[str, Any]: +def deepinherit(parent: dict[str, Any], child: dict[str, Any]) -> dict[str, Any]: """ Implements inheritance for configuration. @@ -15,12 +15,12 @@ def deepinherit(parent: MutableMapping[str, Any], child: MutableMapping[str, Any :param child: The child configuration to override the parent config :return: A new dictionary reflecting the inherited configuration """ - expanded: MutableMapping[str, Any] = deepcopy(parent) + expanded: dict[str, Any] = deepcopy(parent) deepupdate(expanded, child) return expanded -def deepupdate(target: MutableMapping[str, Any], src: MutableMapping[str, Any]) -> None: +def deepupdate(target: dict[str, Any], src: dict[str, Any]) -> None: for k in src: if isinstance(src[k], dict): if k not in target: diff --git a/datacube_ows/config_utils.py b/datacube_ows/config_utils.py index 037062e68..a6e6f26cd 100644 --- a/datacube_ows/config_utils.py +++ b/datacube_ows/config_utils.py @@ -8,7 +8,7 @@ import os from importlib import import_module from itertools import chain -from typing import (Any, Callable, Iterable, List, Mapping, MutableMapping, +from typing import (Any, Callable, Iterable, List, Mapping, Optional, Sequence, Set, TypeVar, Union, cast) from urllib.parse import urlparse @@ -35,11 +35,11 @@ str, int, float, - List[Any], - MutableMapping[str, Any] + list[Any], + dict[str, Any] ] -CFG_DICT = MutableMapping[str, RAW_CFG] +CFG_DICT = dict[str, RAW_CFG] F = TypeVar('F', bound=Callable[..., Any]) @@ -48,7 +48,7 @@ # If modification of inclusions is a required, a copy (ninclusions) is made and modified instead. # pylint: disable=dangerous-default-value def cfg_expand(cfg_unexpanded: RAW_CFG, - cwd: Optional[str] = None, inclusions: List[str] = []) -> RAW_CFG: + cwd: str | None = None, inclusions: list[str] = []) -> RAW_CFG: """ Recursively expand config inclusions. @@ -356,7 +356,7 @@ def parse_metadata(self, cfg: CFG_DICT) -> None: self._keywords = keyword_set.union(local_keyword_set) if self.METADATA_ATTRIBUTION: inheriting = False - attrib = cast(MutableMapping[str, str], cfg.get("attribution")) + attrib = cast(dict[str, str], cfg.get("attribution")) if attrib is None and inherit_from is not None: attrib = inherit_from.attribution inheriting = True @@ -377,7 +377,7 @@ def parse_metadata(self, cfg: CFG_DICT) -> None: acc = "none" self.register_metadata(self.get_obj_label(), FLD_ACCESS_CONSTRAINTS, acc) if self.METADATA_CONTACT_INFO: - cfg_contact_info: MutableMapping[str, str] = cast(MutableMapping[str, str], cfg.get("contact_info", {})) + cfg_contact_info: dict[str, str] = cast(dict[str, str], cfg.get("contact_info", {})) org = cfg_contact_info.get("organisation") position = cfg_contact_info.get("position") if org: @@ -385,7 +385,7 @@ def parse_metadata(self, cfg: CFG_DICT) -> None: if position: self.register_metadata(self.get_obj_label(), FLD_CONTACT_POSITION, position) if self.METADATA_DEFAULT_BANDS: - band_map = cast(MutableMapping[str, List[str]], cfg) + band_map = cast(dict[str, List[str]], cfg) for k, v in band_map.items(): if len(v): self.register_metadata(self.get_obj_label(), k, v[0]) @@ -575,10 +575,10 @@ class OWSExtensibleConfigEntry(OWSIndexedConfigEntry): A configuration object that can inherit from and extend an existing configuration object of the same type. """ def __init__(self, - cfg: RAW_CFG, keyvals: MutableMapping[str, str], global_cfg: "datacube_ows.ows_configuration.OWSConfig", + cfg: RAW_CFG, keyvals: dict[str, str], global_cfg: "datacube_ows.ows_configuration.OWSConfig", *args, - keyval_subs: Optional[MutableMapping[str, str]] = None, - keyval_defaults: Optional[MutableMapping[str, str]] = None, + keyval_subs: dict[str, str] | None = None, + keyval_defaults: dict[str, str] | None = None, expanded: bool = False, **kwargs) -> None: """ @@ -599,8 +599,8 @@ def __init__(self, @classmethod def expand_inherit(cls, cfg: CFG_DICT, global_cfg: "datacube_ows.ows_configuration.OWSConfig", - keyval_subs: Optional[MutableMapping[str, str]] = None, - keyval_defaults: Optional[MutableMapping[str, str]] = None) -> RAW_CFG: + keyval_subs: dict[str, str] | None = None, + keyval_defaults: dict[str, str] | None = None) -> RAW_CFG: """ Expand inherited config, and apply overrides. @@ -614,7 +614,7 @@ def expand_inherit(cls, lookup = True # Precludes e.g. defaulting style lookup to current layer. lookup_keys = {} - inherits = cast(MutableMapping[str, str], cfg["inherits"]) + inherits = cast(dict[str, str], cfg["inherits"]) for k in cls.INDEX_KEYS: if k not in inherits and keyval_defaults is not None and k not in keyval_defaults: lookup = False @@ -629,7 +629,7 @@ def expand_inherit(cls, parent_cfg = parent._raw_cfg else: parent_cfg = cfg["inherits"] - cfg = deepinherit(cast(MutableMapping[str, Any], parent_cfg), cfg) + cfg = deepinherit(cast(dict[str, Any], parent_cfg), cfg) cfg["inheritance_expanded"] = True return cfg diff --git a/datacube_ows/ogc_exceptions.py b/datacube_ows/ogc_exceptions.py index 094d2beb7..84376acd6 100644 --- a/datacube_ows/ogc_exceptions.py +++ b/datacube_ows/ogc_exceptions.py @@ -17,8 +17,8 @@ class OGCException(Exception): CURRENT_UPDATE_SEQUENCE = "CurrentUpdateSequence" INVALID_UPDATE_SEQUENCE = "InvalidUpdateSequence" - version = None - schema_url = None + version: str | None = None + schema_url: str | None = None # pylint: disable=super-init-not-called def __init__(self, msg, code=None, locator=None, http_response=400, valid_keys=None): diff --git a/datacube_ows/resource_limits.py b/datacube_ows/resource_limits.py index bccb83282..cd4f756b4 100644 --- a/datacube_ows/resource_limits.py +++ b/datacube_ows/resource_limits.py @@ -4,7 +4,7 @@ # Copyright (c) 2017-2023 OWS Contributors # SPDX-License-Identifier: Apache-2.0 import math -from typing import Any, Iterable, List, Mapping, Optional, Tuple, Union, cast +from typing import Any, Iterable, Mapping, cast import affine import numpy as np @@ -15,6 +15,10 @@ from datacube_ows.ogc_utils import (cache_control_headers, create_geobox) +TYPE_CHECKING = False +if TYPE_CHECKING: + import datacube_ows.ows_configuration.OWSConfig + def parse_cache_age(cfg, entry, section, default=0): try: @@ -36,11 +40,11 @@ class RequestScale: def __init__(self, native_crs: CRS, - native_resolution: Tuple[Union[float, int], Union[float, int]], + native_resolution: tuple[float | int, float | int], geobox: GeoBox, n_dates: int, - request_bands: Optional[Iterable[Mapping[str, Any]]] = None, - total_band_size: Optional[int] = None) -> None: + request_bands: Iterable[Mapping[str, Any]] | None = None, + total_band_size: int | None = None) -> None: self.resolution = self._metre_resolution(native_crs, native_resolution) self.crs = native_crs self.geobox = self._standardise_geobox(geobox) @@ -66,11 +70,10 @@ def _standardise_geobox(self, geobox: GeoBox) -> GeoBox: width=geobox.width, height=geobox.height ) - def _metre_resolution(self, crs: CRS, resolution: Tuple[Union[float, int], Union[float, int]]) \ - -> Tuple[float, float]: + def _metre_resolution(self, crs: CRS, resolution: tuple[float | int, float | int]) -> tuple[float, float]: # Convert native resolution to metres for ready comparison. if crs.units == ('metre', 'metre'): - return cast(Tuple[float, float], tuple(abs(r) for r in resolution)) + return cast(tuple[float, float], tuple(abs(r) for r in resolution)) resolution_rectangle = polygon( ((0, 0), (0, resolution[1]), resolution, (0, resolution[0]), (0, 0)), crs=crs) @@ -80,7 +83,7 @@ def _metre_resolution(self, crs: CRS, resolution: Tuple[Union[float, int], Union abs(proj_bbox.top - proj_bbox.bottom), ) - def pixel_span(self) -> Tuple[float, float]: + def pixel_span(self) -> tuple[float, float]: bbox = self.geobox.extent.boundingbox return ( (bbox.right - bbox.left) / self.geobox.width, @@ -103,7 +106,7 @@ def base_zoom_level(self) -> float: def load_adjusted_zoom_level(self) -> float: return self.base_zoom_level - self.zoom_lvl_offset - def res_xy(self) -> Union[int, float]: + def res_xy(self) -> int | float: return self.resolution[0] * self.resolution[1] def __truediv__(self, other: "RequestScale") -> float: @@ -140,7 +143,7 @@ def __init__(self, cfg: RAW_CFG, context: str, max_datasets: int) -> None: :param max_datasets: Over-arching maximum dataset limit in context. """ super().__init__(cfg) - self.rules = cast(Optional[List[CFG_DICT]], self._raw_cfg) + self.rules = cast(list[CFG_DICT] | None, self._raw_cfg) self.use_caching: bool = self.rules is not None self.max_datasets = max_datasets if not self.use_caching: @@ -149,7 +152,7 @@ def __init__(self, cfg: RAW_CFG, context: str, max_datasets: int) -> None: # Validate rules min_so_far: int = 0 max_max_age_so_far: int = 0 - for rule in cast(List[CFG_DICT], self.rules): + for rule in cast(list[CFG_DICT], self.rules): if "min_datasets" not in rule: raise ConfigException(f"Dataset cache rule does not contain a 'min_datasets' element in {context}") if "max_age" not in rule: @@ -186,18 +189,18 @@ def cache_headers(self, n_datasets: int) -> Mapping[str, str]: if n_datasets == 0 or n_datasets > self.max_datasets: return cache_control_headers(0) rule = None - for r in self.rules: - if n_datasets < r["min_datasets"]: + for r in cast(list[CFG_DICT], self.rules): + if n_datasets < cast(int, r["min_datasets"]): break rule = r if rule: - return cache_control_headers(rule['max_age']) + return cache_control_headers(cast(int, rule['max_age'])) else: return cache_control_headers(0) class ResourceLimited(Exception): - def __init__(self, reasons: List[str], wcs_hard=False): + def __init__(self, reasons: list[str], wcs_hard=False): self.reasons = reasons self.wcs_hard = wcs_hard super().__init__(f"Resource limit(s) exceeded: {','.join(reasons)}") @@ -220,13 +223,13 @@ def __init__(self, cfg = cast(CFG_DICT, self._raw_cfg) wms_cfg = cast(CFG_DICT, cfg.get("wms", {})) wcs_cfg = cast(CFG_DICT, cfg.get("wcs", {})) - self.zoom_fill = cast(List[int], wms_cfg.get("zoomed_out_fill_colour", [150, 180, 200, 160])) + self.zoom_fill = cast(list[int], wms_cfg.get("zoomed_out_fill_colour", [150, 180, 200, 160])) if len(self.zoom_fill) == 3: self.zoom_fill += [255] if len(self.zoom_fill) != 4: raise ConfigException(f"zoomed_out_fill_colour must have 3 or 4 elements in {context}") - self.min_zoom = cast(Optional[float], wms_cfg.get("min_zoom_factor")) - self.min_zoom_lvl = cast(Optional[Union[int, float]], wms_cfg.get("min_zoom_level")) + self.min_zoom = cast(float | None, wms_cfg.get("min_zoom_factor")) + self.min_zoom_lvl = cast(int | float | None, wms_cfg.get("min_zoom_level")) self.max_datasets_wms = cast(int, wms_cfg.get("max_datasets", 0)) self.max_datasets_wcs = cast(int, wcs_cfg.get("max_datasets", 0)) self.max_image_size_wcs = cast(int, wcs_cfg.get("max_image_size", 0)) @@ -248,7 +251,7 @@ def check_wms(self, n_datasets: int, zoom_factor: float, request_scale: RequestS :param request_scale: Model of the resource-intensiveness of the query :raises: ResourceLimited if any limits are exceeded. """ - limits_exceeded: List[str] = [] + limits_exceeded: list[str] = [] if self.max_datasets_wms > 0 and n_datasets > self.max_datasets_wms: limits_exceeded.append("too many datasets") if self.min_zoom is not None: @@ -272,7 +275,7 @@ def check_wcs(self, n_datasets: int, :param n_datasets: The number of datasets for the query :raises: ResourceLimited if any limits are exceeded. """ - limits_exceeded: List[str] = [] + limits_exceeded: list[str] = [] hard = False if self.max_datasets_wcs > 0 and n_datasets > self.max_datasets_wcs: limits_exceeded.append(f"too many datasets ({n_datasets}: maximum={self.max_datasets_wcs}") diff --git a/datacube_ows/styles/expression.py b/datacube_ows/styles/expression.py index 27a86c136..22acf77c7 100644 --- a/datacube_ows/styles/expression.py +++ b/datacube_ows/styles/expression.py @@ -5,10 +5,15 @@ # SPDX-License-Identifier: Apache-2.0 from typing import Any, Type, cast +from xarray import Dataset +import operator import lark from datacube.virtual.expr import formula_parser from datacube_ows.config_utils import ConfigException +TYPE_CHECKING = False +if TYPE_CHECKING: + import datacube_ows.styles.StyleDef # Lark stuff. @@ -34,7 +39,16 @@ class ExpressionEvaluator(lark.Transformer): """ Standard expression evaluator """ - from operator import add, floordiv, mod, mul, neg, pos, pow, sub, truediv + add = operator.add + floordiv = operator.floordiv + mod = operator.mod + mul = operator.mul + neg = operator.neg + pos = operator.pos + pow = operator.pow + sub = operator.sub + truediv = operator.truediv + not_ = inv = or_ = and_ = xor = not_supported("Bitwise logical operators") eq = ne = le = ge = lt = gt = not_supported("Comparison operators") lshift = rshift = not_supported("Left and right-shift operators") @@ -62,11 +76,11 @@ class BandListEvaluator(ExpressionEvaluator): """ Expression evaluator that returns a list of needed bands for the expression. """ - neg = pos = identity - add = sub = mul = truediv = floordiv = mod = pow = union + neg = pos = identity # type: ignore[assignment] + add = sub = mul = truediv = floordiv = mod = pow = union # type: ignore[assignment] - float_literal = empty_gen - int_literal = empty_gen + float_literal = empty_gen # type: ignore[assignment] + int_literal = empty_gen # type: ignore[assignment] def var_name(self, key): return set([self.ows_style.local_band(key.value)]) @@ -104,7 +118,7 @@ def __init__(self, style: "datacube_ows.styles.StyleDef", expr_str: str) -> None if len(self.needed_bands) == 0: raise ExpressionException(f"Expression references no bands: {self.expr_str}") - def eval_cls(self, data: "xarray.Dataset") -> ExpressionEvaluator: + def eval_cls(self, data: Dataset) -> ExpressionEvaluator: """" Return an appropriate Expression Evaluator for a given Dataset """ @@ -114,13 +128,13 @@ def eval_cls(self, data: "xarray.Dataset") -> ExpressionEvaluator: evaluator_cls = ExpressionEvaluator @lark.v_args(inline=True) - class ExpressionDataEvaluator(evaluator_cls): + class ExpressionDataEvaluator(evaluator_cls): # type: ignore[valid-type, misc] def var_name(self, key): return data[self.ows_style.local_band(key.value)] # pyre-ignore[19] return cast(ExpressionEvaluator, ExpressionDataEvaluator(self.style)) - def __call__(self, data: "xarray.Dataset") -> Any: + def __call__(self, data: Dataset) -> Any: evaluator: ExpressionEvaluator = self.eval_cls(data) return evaluator.transform(self.tree) From 4bc292a4ebbb4ccdd0b3df853814d2ee84f62570 Mon Sep 17 00:00:00 2001 From: Paul Haesler Date: Mon, 22 Apr 2024 14:29:29 +1000 Subject: [PATCH 08/29] More fixing typehints, cleaning up import structure - getting into style package. --- datacube_ows/config_utils.py | 18 ++-- datacube_ows/styles/base.py | 95 ++++++++++---------- datacube_ows/styles/component.py | 47 +++++----- datacube_ows/styles/ramp.py | 149 +++++++++++++++---------------- 4 files changed, 156 insertions(+), 153 deletions(-) diff --git a/datacube_ows/config_utils.py b/datacube_ows/config_utils.py index a6e6f26cd..9b51e60f7 100644 --- a/datacube_ows/config_utils.py +++ b/datacube_ows/config_utils.py @@ -577,8 +577,8 @@ class OWSExtensibleConfigEntry(OWSIndexedConfigEntry): def __init__(self, cfg: RAW_CFG, keyvals: dict[str, str], global_cfg: "datacube_ows.ows_configuration.OWSConfig", *args, - keyval_subs: dict[str, str] | None = None, - keyval_defaults: dict[str, str] | None = None, + keyval_subs: CFG_DICT | None = None, + keyval_defaults: CFG_DICT | None = None, expanded: bool = False, **kwargs) -> None: """ @@ -599,8 +599,8 @@ def __init__(self, @classmethod def expand_inherit(cls, cfg: CFG_DICT, global_cfg: "datacube_ows.ows_configuration.OWSConfig", - keyval_subs: dict[str, str] | None = None, - keyval_defaults: dict[str, str] | None = None) -> RAW_CFG: + keyval_subs: CFG_DICT | None = None, + keyval_defaults: CFG_DICT | None = None) -> RAW_CFG: """ Expand inherited config, and apply overrides. @@ -613,7 +613,7 @@ def expand_inherit(cls, if "inherits" in cfg: lookup = True # Precludes e.g. defaulting style lookup to current layer. - lookup_keys = {} + lookup_keys: CFG_DICT = {} inherits = cast(dict[str, str], cfg["inherits"]) for k in cls.INDEX_KEYS: if k not in inherits and keyval_defaults is not None and k not in keyval_defaults: @@ -870,10 +870,10 @@ def context(self) -> str: VALUES_LABEL = "values" def parse_rule_spec(self, cfg: CFG_DICT) -> None: - self.flags: CFG_DICT | None = None - self.or_flags: bool = False - self.values: list[int] | None = None - self.invert = bool(cfg.get("invert", False)) + self.flags: list[CFG_DICT] | CFG_DICT | None = None + self.or_flags: bool | list[bool] = False + self.values: list[list[int]] | list[int] | None = None + self.invert: bool | list[bool] = bool(cfg.get("invert", False)) if "flags" in cfg: flags = cast(CFG_DICT, cfg["flags"]) if "or" in flags and "and" in flags: diff --git a/datacube_ows/styles/base.py b/datacube_ows/styles/base.py index ca35a4912..950114486 100644 --- a/datacube_ows/styles/base.py +++ b/datacube_ows/styles/base.py @@ -5,8 +5,7 @@ # SPDX-License-Identifier: Apache-2.0 import io import logging -from typing import (Any, Iterable, List, Mapping, MutableMapping, Optional, - Set, Sized, Tuple, Type, Union, cast) +from typing import (Any, Iterable, Mapping, MutableMapping, Optional, Sized, Type, Union, cast) import datacube.model import numpy as np @@ -32,7 +31,9 @@ class LegendBase(OWSConfigEntry): """ Legend base class. """ - def __init__(self, style_or_mdh: Union["StyleDefBase", "StyleDefBase.Legend"], cfg: CFG_DICT) -> None: + def __init__(self, + style_or_mdh: Union["StyleDefBase", "StyleDefBase.Legend", "StyleDefBase.MultiDateHandler"], + cfg: CFG_DICT) -> None: super().__init__(cfg) raw_cfg = cast(CFG_DICT, self._raw_cfg) self.style_or_mdh = style_or_mdh @@ -49,24 +50,26 @@ def __init__(self, style_or_mdh: Union["StyleDefBase", "StyleDefBase.Legend"], c if self.show_legend and not self.legend_urls and self.style.auto_legend: self.parse_common_auto_elements(raw_cfg) - def parse_urls(self, cfg: RAW_CFG) -> MutableMapping[str, str]: + def parse_urls(self, cfg: RAW_CFG) -> dict[str, str]: if not cfg: return {} def_loc = self.global_config().default_locale if isinstance(cfg, str): - cfg = { + cfg_d: CFG_DICT = { def_loc: cfg } - if def_loc not in cfg: + else: + cfg_d = cast(CFG_DICT, cfg) + if def_loc not in cfg_d: raise ConfigException( f"No legend url for {self.get_obj_label()} supplied for default language {def_loc}" ) - urls = {} + urls: dict[str, str] = {} for locale in self.global_config().locales: - if locale in cfg: - urls[locale] = cfg[locale] + if locale in cfg_d: + urls[locale] = str(cfg_d[locale]) else: - urls[locale] = cfg[def_loc] + urls[locale] = str(cfg_d[def_loc]) return urls def parse_common_auto_elements(self, cfg: CFG_DICT): @@ -159,7 +162,7 @@ def __init__(self, product: "datacube_ows.ows_configuration.OWSNamedLayer", global_cfg=product.global_cfg, keyvals={ "layer": product.name, - "style": style_cfg.get("name", "stand_alone") + "style": str(style_cfg.get("name", "stand_alone")) }, keyval_subs={ "layer": { @@ -172,31 +175,33 @@ def __init__(self, product: "datacube_ows.ows_configuration.OWSNamedLayer", raw_cfg = cast(CFG_DICT, self._raw_cfg) self.stand_alone: bool = stand_alone if self.stand_alone: - self._metadata_registry: MutableMapping[str, str] = {} + self._metadata_registry: dict[str, str] = {} self.user_defined: bool = user_defined - self.local_band_map = cast(MutableMapping[str, List[str]], raw_cfg.get("band_map", {})) + self.local_band_map = cast(MutableMapping[str, str], raw_cfg.get("band_map", {})) + if self.local_band_map: + pass self.product: "datacube_ows.ows_configuration.OWSNamedLayer" = product if self.stand_alone: self.name = cast(str, raw_cfg.get("name", "stand_alone")) else: self.name = cast(str, raw_cfg["name"]) self.parse_metadata(raw_cfg) - self.masks: List[StyleMask] = [ + self.masks: list[StyleMask] = [ StyleMask(mask_cfg, self) - for mask_cfg in cast(List[CFG_DICT], raw_cfg.get("pq_masks", [])) + for mask_cfg in cast(list[CFG_DICT], raw_cfg.get("pq_masks", [])) ] if self.stand_alone: - self.flag_products: List[FlagProductBands] = [] + self.flag_products: list[FlagProductBands] = [] else: - self.flag_products: List[FlagProductBands] = FlagProductBands.build_list_from_masks(self.masks, - self.product) + self.flag_products = FlagProductBands.build_list_from_masks(self.masks, + self.product) - self.raw_needed_bands: Set[str] = set() - self.raw_flag_bands: Set[str] = set() + self.raw_needed_bands: set[str] = set() + self.raw_flag_bands: set[str] = set() self.declare_unready("needed_bands") self.declare_unready("flag_bands") - self.legend_cfg = self.Legend(self, raw_cfg.get("legend", {})) + self.legend_cfg = self.Legend(self, cast(CFG_DICT, raw_cfg.get("legend", {}))) if not defer_multi_date: self.parse_multi_date(raw_cfg) @@ -219,11 +224,11 @@ def make_ready(self, dc: "datacube.Datacube", *args, **kwargs) -> None: :param dc: A datacube object """ # pyre-ignore[16] - self.needed_bands: Set[str] = set() + self.needed_bands: set[str] = set() # pyre-ignore[16] - self.pq_product_bands: List[FlagProductBands] = [] + self.pq_product_bands: list[tuple[list[str], set[str]]] = [] # pyre-ignore[16] - self.flag_bands: Set[str] = set() + self.flag_bands: set[str] = set() for band in self.raw_needed_bands: self.needed_bands.add(self.local_band(band)) if not self.stand_alone: @@ -280,11 +285,11 @@ def local_band(self, band: str) -> str: def parse_multi_date(self, cfg: CFG_DICT) -> None: """Used by __init__()""" - self.multi_date_handlers: List["StyleDefBase.MultiDateHandler"] = [] - for mb_cfg in cast(List[CFG_DICT], cfg.get("multi_date", [])): + self.multi_date_handlers: list["StyleDefBase.MultiDateHandler"] = [] + for mb_cfg in cast(list[CFG_DICT], cfg.get("multi_date", [])): self.multi_date_handlers.append(self.MultiDateHandler(self, mb_cfg)) - def to_mask(self, data: xr.Dataset, extra_mask: Optional[xr.DataArray] = None) -> Optional[xr.DataArray]: + def to_mask(self, data: xr.Dataset, extra_mask: xr.DataArray | None = None) -> xr.DataArray | None: """ Generate a mask for some data. @@ -293,7 +298,7 @@ def to_mask(self, data: xr.Dataset, extra_mask: Optional[xr.DataArray] = None) - :return: A spatial mask with same dimensions and coordinates as data (including time). """ - def render_mask(data: xr.Dataset, mask: StyleMask) -> xr.DataArray: + def render_mask(data: xr.Dataset, mask: StyleMask) -> xr.DataArray | None: """ Calculate a style mask. :param data: Raw Data @@ -304,12 +309,12 @@ def render_mask(data: xr.Dataset, mask: StyleMask) -> xr.DataArray: odc_mask = mask.create_mask(pq_data) return odc_mask - result = extra_mask + result: xr.DataArray | None = extra_mask for mask in self.masks: mask_data = render_mask(data, mask) if result is None: result = mask_data - else: + elif mask_data is not None: result = result & mask_data return result @@ -326,7 +331,7 @@ def apply_mask_to_image(self, img_data: xr.Dataset, mask: Optional[xr.DataArray] """ if "alpha" not in img_data.data_vars.keys(): - nda_alpha = np.ndarray(img_data["red"].shape, dtype='uint8') + nda_alpha: np.ndarray = np.ndarray(img_data["red"].shape, dtype='uint8') nda_alpha.fill(255) alpha = xr.DataArray(nda_alpha, coords=img_data["red"].coords, @@ -382,7 +387,7 @@ def transform_single_date_data(self, data: xr.Dataset) -> xr.Dataset: """ raise NotImplementedError() - def render_legend(self, dates: Union[int, List[Any]]) -> Optional["PIL.Image.Image"]: + def render_legend(self, dates: int | list[Any]) -> Image.Image | None: """ Render legend, if possible :param dates: The number of dates to render the legend for (e.g. for delta) @@ -397,6 +402,7 @@ def render_legend(self, dates: Union[int, List[Any]]) -> Optional["PIL.Image.Ima locale = get_locale().language if locale not in self.global_config().locales: locale = self.global_config().default_locale + assert isinstance(locale, str) url = legend.legend_urls[locale] return get_image_from_url(url) if not legend.style_or_mdh.auto_legend: @@ -407,7 +413,7 @@ def render_legend(self, dates: Union[int, List[Any]]) -> Optional["PIL.Image.Ima return Image.open(bytesio) @staticmethod - def count_dates(count_or_sized_or_ds: Union[int, Sized, xr.Dataset]) -> int: + def count_dates(count_or_sized_or_ds: int | Sized | xr.Dataset) -> int: if isinstance(count_or_sized_or_ds, int): return count_or_sized_or_ds elif isinstance(count_or_sized_or_ds, xr.Dataset): @@ -419,14 +425,13 @@ def count_dates(count_or_sized_or_ds: Union[int, Sized, xr.Dataset]) -> int: else: return len(count_or_sized_or_ds) - def get_legend_cfg(self, count_or_sized_or_ds: Union[int, Sized, xr.Dataset] - ) -> LegendBase: + def get_legend_cfg(self, count_or_sized_or_ds: int | Sized | xr.Dataset) -> LegendBase: mdh = self.get_multi_date_handler(count_or_sized_or_ds) if mdh: return mdh.legend_cfg return self.legend_cfg - def get_multi_date_handler(self, count_or_sized_or_ds: Union[int, Sized, xr.Dataset] + def get_multi_date_handler(self, count_or_sized_or_ds: int | Sized | xr.Dataset ) -> Optional["StyleDefBase.MultiDateHandler"]: """ Get the appropriate multidate handler. @@ -454,9 +459,9 @@ def register_subclass(cls, subclass: Type["StyleDefBase"], triggers: Iterable[st if isinstance(triggers, str): triggers = [triggers] if priority: - style_class_priority_reg.append([subclass, triggers]) + style_class_priority_reg.append((subclass, triggers)) else: - style_class_reg.append([subclass, triggers]) + style_class_reg.append((subclass, triggers)) @classmethod def determine_subclass(cls, cfg: CFG_DICT) -> Optional[Type["StyleDefBase"]]: @@ -503,16 +508,16 @@ def __init__(self, style: "StyleDefBase", cfg: CFG_DICT) -> None: self.style = style if "allowed_count_range" not in raw_cfg: raise ConfigException("multi_date handler must have an allowed_count_range") - if len(cast(List[int], cfg["allowed_count_range"])) > 2: + if len(cast(list[int], cfg["allowed_count_range"])) > 2: raise ConfigException("multi_date handler allowed_count_range must have 2 and only 2 members") - self.min_count, self.max_count = cast(List[int], cfg["allowed_count_range"]) + self.min_count, self.max_count = cast(list[int], cfg["allowed_count_range"]) if self.max_count < self.min_count: raise ConfigException("multi_date handler allowed_count_range: minimum must be less than equal to maximum") self.animate = cast(bool, cfg.get("animate", False)) self.frame_duration: int = 1000 if "aggregator_function" in cfg: - self.aggregator: Optional[FunctionWrapper] = FunctionWrapper(style.product, + self.aggregator: FunctionWrapper | None = FunctionWrapper(style.product, cast(CFG_DICT, cfg["aggregator_function"]), stand_alone=self.style.stand_alone) elif self.animate: @@ -522,7 +527,7 @@ def __init__(self, style: "StyleDefBase", cfg: CFG_DICT) -> None: self.aggregator = None if self.non_animate_requires_aggregator: raise ConfigException("Aggregator function is required for non-animated multi-date handlers.") - self.legend_cfg = self.Legend(self, raw_cfg.get("legend", {})) + self.legend_cfg = self.Legend(self, cast(CFG_DICT, raw_cfg.get("legend", {}))) self.preserve_user_date_order = cast(bool, cfg.get("preserve_user_date_order", False)) def applies_to(self, count: int) -> bool: @@ -589,8 +594,8 @@ def lookup_impl(cls, # Style class registries -style_class_priority_reg: List[Tuple[Type[StyleDefBase], Iterable[str]]] = [] -style_class_reg: List[Tuple[Type[StyleDefBase], Iterable[str]]] = [] +style_class_priority_reg: list[tuple[Type[StyleDefBase], Iterable[str]]] = [] +style_class_reg: list[tuple[Type[StyleDefBase], Iterable[str]]] = [] class StyleMask(AbstractMaskRule): @@ -612,7 +617,7 @@ def __init__(self, cfg: CFG_DICT, style: StyleDefBase) -> None: else: self.flag_band = cast(FlagBand, self.style.product.flag_bands[self.band]) - def create_mask(self, data: xr.DataArray) -> xr.DataArray: + def create_mask(self, data: xr.DataArray) -> xr.DataArray | None: mask = super().create_mask(data) return mask diff --git a/datacube_ows/styles/component.py b/datacube_ows/styles/component.py index 0aa0a9964..4ea6d52e6 100644 --- a/datacube_ows/styles/component.py +++ b/datacube_ows/styles/component.py @@ -3,8 +3,7 @@ # # Copyright (c) 2017-2023 OWS Contributors # SPDX-License-Identifier: Apache-2.0 -from typing import (Any, Callable, Hashable, List, MutableMapping, Optional, - Union, cast) +from typing import Any, Callable, Hashable, cast import numpy as np from xarray import DataArray, Dataset @@ -12,9 +11,13 @@ from datacube_ows.config_utils import CFG_DICT, ConfigException, FunctionWrapper from datacube_ows.styles.base import StyleDefBase +TYPE_CHECKING = False +if TYPE_CHECKING: + from datacube_ows.ows_configuration import OWSNamedLayer + # pylint: disable=abstract-method -LINEAR_COMP_DICT = MutableMapping[str, Union[float, List[float]]] +LINEAR_COMP_DICT = dict[str, float | list[float]] class ComponentStyleDef(StyleDefBase): @@ -22,7 +25,7 @@ class ComponentStyleDef(StyleDefBase): Style Subclass that allows the behaviour of each component (red, green, blue, alpha) to be specified independently. """ - def __init__(self, product: "datacube_ows.ows_configuration.OWSNamedLayer", + def __init__(self, product: OWSNamedLayer, style_cfg: CFG_DICT, stand_alone: bool = False, defer_multi_date: bool = False, @@ -33,8 +36,8 @@ def __init__(self, product: "datacube_ows.ows_configuration.OWSNamedLayer", super().__init__(product, style_cfg, stand_alone=stand_alone, defer_multi_date=defer_multi_date, user_defined=user_defined) style_cfg: CFG_DICT = cast(CFG_DICT, self._raw_cfg) - self.raw_rgb_components: MutableMapping[str, Union[Callable, LINEAR_COMP_DICT]] = {} - raw_components = cast(MutableMapping[str, Union[Callable, CFG_DICT]], style_cfg["components"]) + self.raw_rgb_components: dict[str, Callable | LINEAR_COMP_DICT] = {} + raw_components = cast(dict[str, Callable | LINEAR_COMP_DICT], style_cfg["components"]) for imgband in ["red", "green", "blue", "alpha"]: components = raw_components.get(imgband) if components is None: @@ -48,7 +51,7 @@ def __init__(self, product: "datacube_ows.ows_configuration.OWSNamedLayer", if not self.stand_alone: if "additional_bands" not in style_cfg: raise ConfigException(f"Style with a function component must declare additional_bands.") - for b in cast(List[str], style_cfg.get("additional_bands", [])): + for b in cast(list[str], style_cfg.get("additional_bands", [])): self.raw_needed_bands.add(b) else: components = cast(LINEAR_COMP_DICT, components) @@ -56,11 +59,11 @@ def __init__(self, product: "datacube_ows.ows_configuration.OWSNamedLayer", for k in components.keys(): if k != "scale_range": self.raw_needed_bands.add(k) - self.rgb_components = cast(MutableMapping[str, Union[None, Callable, LINEAR_COMP_DICT]], {}) + self.rgb_components = cast(dict[str, None | Callable | LINEAR_COMP_DICT], {}) - self.scale_factor = style_cfg.get("scale_factor") + self.scale_factor = cast(float, style_cfg.get("scale_factor")) if "scale_range" in style_cfg: - self.scale_min, self.scale_max = cast(List[Optional[float]], style_cfg["scale_range"]) + self.scale_min, self.scale_max = cast(list[float | None], style_cfg["scale_range"]) elif self.scale_factor: self.scale_min = 0.0 self.scale_max = 255.0 * self.scale_factor @@ -68,18 +71,18 @@ def __init__(self, product: "datacube_ows.ows_configuration.OWSNamedLayer", self.scale_min = None self.scale_max = None - self.component_scale_ranges: MutableMapping[str, MutableMapping[str, float]] = {} + self.component_scale_ranges: dict[str, dict[str, float]] = {} for cn, cd in raw_components.items(): if not callable(cd) and "scale_range" in cd: - scale_range = cast(List[float], cd["scale_range"]) + scale_range = cast(list[float], cd["scale_range"]) self.component_scale_ranges[cn] = { "min": scale_range[0], "max": scale_range[1], } else: self.component_scale_ranges[cn] = { - "min": self.scale_min, - "max": self.scale_max, + "min": cast(float, self.scale_min), + "max": cast(float, self.scale_max), } # pylint: disable=attribute-defined-outside-init @@ -91,18 +94,16 @@ def make_ready(self, dc, *args, **kwargs) -> None: :param dc: A datacube object """ - self.rgb_components = cast(MutableMapping[str, Union[None, Callable, LINEAR_COMP_DICT]], {}) + self.rgb_components = cast(dict[str, None | Callable | LINEAR_COMP_DICT], {}) for band, component in self.raw_rgb_components.items(): if not component or callable(component): self.rgb_components[band] = component else: self.rgb_components[band] = self.dealias_components(component) super().make_ready(dc, *args, **kwargs) + self.raw_rgb_components = {} - - self.raw_rgb_components: MutableMapping[str, Union[Callable, LINEAR_COMP_DICT]] = {} - - def dealias_components(self, comp_in: Optional[LINEAR_COMP_DICT]) -> Optional[LINEAR_COMP_DICT]: + def dealias_components(self, comp_in: LINEAR_COMP_DICT | None) -> LINEAR_COMP_DICT | None: """ Convert a component dictionary with band aliases to a component dictionary using canonical band names. @@ -119,7 +120,7 @@ def dealias_components(self, comp_in: Optional[LINEAR_COMP_DICT]) -> Optional[LI for band_alias, value in comp_in.items() if band_alias not in ['scale_range'] } - def compress_band(self, component_name: str, imgband_data: "xarray.DataArray") -> "xarray.DataArray": + def compress_band(self, component_name: str, imgband_data: DataArray) -> DataArray: """ Compress dynamic range of a component data array to uint8 range (0-255) @@ -134,15 +135,15 @@ def compress_band(self, component_name: str, imgband_data: "xarray.DataArray") - return normalized * 255 - def transform_single_date_data(self, data: "xarray.Dataset") -> "xarray.Dataset": + def transform_single_date_data(self, data: Dataset) -> Dataset: """ Apply style to raw data to make an RGBA image xarray (single time slice only) :param data: Raw data, all bands. :return: RGBA uint8 xarray """ - imgdata = cast(MutableMapping[Hashable, Any], {}) - for imgband, components in self.rgb_components.items(): + imgdata = cast(dict[Hashable, Any], {}) + for imgband, components in cast(dict[str, Callable | LINEAR_COMP_DICT], self.rgb_components).items(): if callable(components): imgband_data = components(data) imgband_data = imgband_data.astype('uint8') diff --git a/datacube_ows/styles/ramp.py b/datacube_ows/styles/ramp.py index 2f9e448eb..ec2413767 100644 --- a/datacube_ows/styles/ramp.py +++ b/datacube_ows/styles/ramp.py @@ -8,8 +8,7 @@ from collections import defaultdict from decimal import ROUND_HALF_UP, Decimal from math import isclose -from typing import (Any, Hashable, List, MutableMapping, Optional, Tuple, - Union, cast) +from typing import (Any, Hashable, MutableMapping, Union, cast, Iterable) import matplotlib import numpy @@ -17,20 +16,20 @@ from matplotlib import pyplot as plt from matplotlib.colors import LinearSegmentedColormap, to_hex -try: - from numpy.typing import NDArray -except ImportError: - NDArray = numpy.ndarray from numpy import ubyte -from xarray import Dataset +from xarray import Dataset, DataArray -from datacube_ows.config_utils import CFG_DICT, OWSMetadataConfig, ConfigException, FunctionWrapper +from datacube_ows.config_utils import CFG_DICT, OWSMetadataConfig, ConfigException, FunctionWrapper, RAW_CFG from datacube_ows.styles.base import StyleDefBase from datacube_ows.styles.expression import Expression +TYPE_CHECKING = False +if TYPE_CHECKING: + from datacube_ows.ows_configuration import OWSNamedLayer + _LOG = logging.getLogger(__name__) -RAMP_SPEC = List[CFG_DICT] +RAMP_SPEC = list[CFG_DICT] UNSCALED_DEFAULT_RAMP = cast(RAMP_SPEC, [ @@ -71,7 +70,7 @@ ) -def scale_unscaled_ramp(rmin: Union[int, float, str], rmax: Union[int, float, str], unscaled: RAMP_SPEC) -> RAMP_SPEC: +def scale_unscaled_ramp(rmin: int | float | str, rmax: int | float | str, unscaled: RAMP_SPEC) -> RAMP_SPEC: """ Take a unscaled (normalised) ramp that covers values from 0.0 to 1.0 and scale it linearly to cover the provided range. @@ -99,10 +98,10 @@ def scale_unscaled_ramp(rmin: Union[int, float, str], rmax: Union[int, float, st ] -def crack_ramp(ramp: RAMP_SPEC) -> Tuple[ - List[float], - List[float], List[float], - List[float], List[float], +def crack_ramp(ramp: RAMP_SPEC) -> tuple[ + list[float], + list[float], list[float], + list[float], list[float], ]: """ Split a colour ramp into separate (input) value and (output) RGBA lists. @@ -110,22 +109,22 @@ def crack_ramp(ramp: RAMP_SPEC) -> Tuple[ :param ramp: input (scaled) colour-ramp definition :return: A tuple of four lists of floats: representing values, red, green, blue, alpha. """ - values = cast(List[float], []) - red = cast(List[float], []) - green = cast(List[float], []) - blue = cast(List[float], []) - alpha = cast(List[float], []) + values = cast(list[float], []) + red = cast(list[float], []) + green = cast(list[float], []) + blue = cast(list[float], []) + alpha = cast(list[float], []) for r in ramp: if isinstance(r["value"], float): value: float = cast(float, r["value"]) else: - value = float(cast(Union[int, str], r["value"])) + value = float(cast(int | str, r["value"])) values.append(value) color = Color(r["color"]) red.append(color.red) green.append(color.green) blue.append(color.blue) - alpha.append(float(cast(Union[float, int, str], r.get("alpha", 1.0)))) + alpha.append(float(cast(float | int | str, r.get("alpha", 1.0)))) return values, red, green, blue, alpha @@ -177,15 +176,15 @@ def __init__(self, style: StyleDefBase, if "color_ramp" in ramp_cfg: raw_scaled_ramp = ramp_cfg["color_ramp"] else: - rmin, rmax = cast(List[float], ramp_cfg["range"]) + rmin, rmax = cast(list[float], ramp_cfg["range"]) unscaled_ramp = UNSCALED_DEFAULT_RAMP if "mpl_ramp" in ramp_cfg: unscaled_ramp = read_mpl_ramp(cast(str, ramp_cfg["mpl_ramp"])) raw_scaled_ramp = scale_unscaled_ramp(rmin, rmax, unscaled_ramp) - self.ramp = raw_scaled_ramp + self.ramp = cast(list[CFG_DICT], raw_scaled_ramp) - self.values = cast(List[float], []) - self.components = cast(MutableMapping[str, List[float]], {}) + self.values = cast(list[float], []) + self.components = cast(MutableMapping[str, list[float]], {}) self.crack_ramp() # Handle the mutual interdepencies between the ramp and the legend @@ -200,7 +199,7 @@ def __init__(self, style: StyleDefBase, leg_begin_before_idx = None leg_end_before_idx = None for idx, col_point in enumerate(self.ramp): - col_val = col_point["value"] + col_val = cast(int | float, col_point["value"]) if not leg_begin_in_ramp and leg_begin_before_idx is None: if isclose(col_val, fleg_begin, abs_tol=1e-9): leg_begin_in_ramp = True @@ -248,23 +247,23 @@ def crack_ramp(self) -> None: "alpha": a } - def get_value(self, data: Union[float, "xarray.DataArray"], band: str) -> NDArray: + def get_value(self, data: float | DataArray, band: str) -> numpy.ndarray: return numpy.interp(data, self.values, self.components[band]) - def get_8bit_value(self, data: "xarray.DataArray", band: str) -> NDArray: - val: NDArray = self.get_value(data, band) - val = cast(NDArray, val * 255) + def get_8bit_value(self, data: DataArray, band: str) -> numpy.ndarray: + val: numpy.ndarray = self.get_value(data, band) + val = cast(numpy.ndarray, val * 255) # Is there a way to stop this raising a runtime warning? return val.astype(ubyte) - def apply(self, data: "xarray.DataArray") -> "xarray.Dataset": + def apply(self, data: DataArray) -> Dataset: imgdata = cast(MutableMapping[Hashable, Any], {}) for band in self.components: imgdata[band] = (data.dims, self.get_8bit_value(data, band)) imgdataset = Dataset(imgdata, coords=data.coords) return imgdataset - def color_alpha_at(self, val: float) -> Tuple[Color, float]: + def color_alpha_at(self, val: float) -> tuple[Color, float]: color = Color( rgb=( self.get_value(val, "red").item(), @@ -288,11 +287,11 @@ def __init__(self, style_or_mdh: Union["StyleDefBase", "StyleDefBase.Legend"], c if "begin" not in raw_cfg: self.begin = Decimal("nan") else: - self.begin = Decimal(cast(Union[str, float, int], raw_cfg["begin"])) + self.begin = Decimal(cast(str | float | int, raw_cfg["begin"])) if "end" not in raw_cfg: self.end = Decimal("nan") else: - self.end = Decimal(cast(Union[str, float, int], raw_cfg["end"])) + self.end = Decimal(cast(str | float | int, raw_cfg["end"])) # decimal_places, rounder def rounder_str(prec: int) -> str: @@ -305,32 +304,32 @@ def rounder_str(prec: int) -> str: rstr += "1" return rstr - self.decimal_places = raw_cfg.get("decimal_places", 1) + self.decimal_places = cast(int, raw_cfg.get("decimal_places", 1)) if self.decimal_places < 0: raise ConfigException("decimal_places cannot be negative") self.rounder = Decimal(rounder_str(self.decimal_places)) # Ticks - Non-explicit tick values deferred until we have parsed the associated ramp ticks_handled = False - self.ticks_every: Optional[Decimal] = None - self.tick_count: Optional[int] = None - self.ticks: List[Decimal] = [] + self.ticks_every: Decimal | None = None + self.tick_count: int | None = None + self.ticks: list[Decimal] = [] if "ticks_every" in raw_cfg: if "tick_count" in raw_cfg: raise ConfigException("Cannot use tick count and ticks_every in the same legend") if "ticks" in raw_cfg: raise ConfigException("Cannot use ticks and ticks_every in the same legend") - self.ticks_every = Decimal(cast(Union[int, float, str], raw_cfg["ticks_every"])) + self.ticks_every = Decimal(cast(int | float | str, raw_cfg["ticks_every"])) if self.ticks_every.is_zero() or self.ticks_every.is_signed(): raise ConfigException("ticks_every must be greater than zero") ticks_handled = True if "ticks" in raw_cfg: if "tick_count" in raw_cfg: raise ConfigException("Cannot use tick count and ticks in the same legend") - self.ticks = [Decimal(t) for t in cast(List[Union[str, int, float]], raw_cfg["ticks"])] + self.ticks = [Decimal(t) for t in cast(list[str | int | float], raw_cfg["ticks"])] ticks_handled = True if not ticks_handled: - self.tick_count = int(cast(Union[str, int], raw_cfg.get("tick_count", 1))) + self.tick_count = int(cast(str | int, raw_cfg.get("tick_count", 1))) if self.tick_count < 0: raise ConfigException("tick_count cannot be negative") # prepare for tick labels @@ -338,16 +337,16 @@ def rounder_str(prec: int) -> str: defaults = self.cfg_labels.get("default", {}) self.lbl_default_prefix = defaults.get("prefix", "") self.lbl_default_suffix = defaults.get("suffix", "") - self.tick_labels: List[str] = [] + self.tick_labels: list[str] = [] # handle matplotlib args - self.strip_location = cast(List[float], - raw_cfg.get("strip_location", [0.05, 0.5, 0.9, 0.15])) + self.strip_location = cast(tuple[float, float, float, float], + tuple(cast(Iterable[float], raw_cfg.get("strip_location", [0.05, 0.5, 0.9, 0.15])))) # throw error on legacy syntax self.fail_legacy() def fail_legacy(self) -> None: if any( - legent in self._raw_cfg + legent in cast(CFG_DICT, self._raw_cfg) for legent in ["major_ticks", "offset", "scale_by", "radix_point"] ): raise ConfigException( @@ -357,18 +356,18 @@ def fail_legacy(self) -> None: def register_ramp(self, ramp: ColorRamp) -> None: if self.begin.is_nan(): for col_def in ramp.ramp: - if isclose(col_def.get("alpha", 1.0), 1.0, abs_tol=1e-9): - self.begin = Decimal(col_def["value"]) + if isclose(cast(float, col_def.get("alpha", 1.0)), 1.0, abs_tol=1e-9): + self.begin = Decimal(cast(int | float, col_def["value"])) break if self.begin.is_nan(): - self.begin = Decimal(ramp.ramp[0]["value"]) + self.begin = Decimal(cast(int | float, ramp.ramp[0]["value"])) if self.end.is_nan(): for col_def in reversed(ramp.ramp): - if isclose(col_def.get("alpha", 1.0), 1.0, abs_tol=1e-9): - self.end = Decimal(col_def["value"]) + if isclose(cast(int | float, col_def.get("alpha", 1.0)), 1.0, abs_tol=1e-9): + self.end = Decimal(cast(int | float, col_def["value"])) break if self.end.is_nan(): - self.end = Decimal(ramp.ramp[-1]["value"]) + self.end = Decimal(cast(int | float, ramp.ramp[-1]["value"])) for t in self.ticks: if t < self.begin or t > self.end: raise ConfigException("Explicit ticks must all be within legend begin/end range") @@ -400,7 +399,7 @@ def register_ramp(self, ramp: ColorRamp) -> None: self.tick_labels.append( self.lbl_default_prefix + str(tick) + self.lbl_default_suffix ) - self.parse_metadata(self._raw_cfg) + self.parse_metadata(cast(CFG_DICT, self._raw_cfg)) # Check for legacy legend tips in ramp: for r in ramp.ramp: @@ -421,20 +420,20 @@ def tick_label(self, tick): _LOG.error("'%s' is a not a valid tick", tick) return None - def create_cdict_ticks(self) -> Tuple[ - MutableMapping[str, List[Tuple[float, float, float]]], + def create_cdict_ticks(self) -> tuple[ + MutableMapping[str, list[tuple[float, float, float]]], MutableMapping[float, str], ]: normalize_factor = float(self.end) - float(self.begin) - cdict = cast(MutableMapping[str, List[Tuple[float, float, float]]], dict()) - bands = cast(MutableMapping[str, List[Tuple[float, float, float]]], defaultdict(list)) + cdict = cast(MutableMapping[str, list[tuple[float, float, float]]], dict()) + bands = cast(MutableMapping[str, list[tuple[float, float, float]]], defaultdict(list)) started = False finished = False for index, ramp_point in enumerate(self.style_or_mdh.color_ramp.ramp): if finished: break - value = cast(Union[float, int], ramp_point.get("value")) + value = cast(float | int, ramp_point.get("value")) normalized = (value - float(self.begin)) / float(normalize_factor) if not started: @@ -476,7 +475,7 @@ def render(self, bytesio: io.BytesIO) -> None: plt.rcParams.update(self.mpl_rcparams) fig = plt.figure(figsize=(self.width, self.height)) ax = fig.add_axes(self.strip_location) - custom_map = LinearSegmentedColormap(self.plot_name(), cdict) + custom_map = LinearSegmentedColormap(self.plot_name(), cdict) # type: ignore[arg-type] color_bar = matplotlib.colorbar.ColorbarBase( ax, cmap=custom_map, @@ -488,7 +487,7 @@ def render(self, bytesio: io.BytesIO) -> None: # For MetadataConfig @property - def default_title(self) -> Optional[str]: + def default_title(self) -> str | None: return self.style.title @@ -500,7 +499,7 @@ class ColorRampDef(StyleDefBase): auto_legend = True def __init__(self, - product: "datacube_ows.ows_configuration.OWSNamedLayer", + product: OWSNamedLayer, style_cfg: CFG_DICT, stand_alone: bool = False, defer_multi_date: bool = False, @@ -511,23 +510,23 @@ def __init__(self, super(ColorRampDef, self).__init__(product, style_cfg, stand_alone=stand_alone, defer_multi_date=True, user_defined=user_defined) style_cfg = cast(CFG_DICT, self._raw_cfg) - self.color_ramp = ColorRamp(self, style_cfg, self.legend_cfg) + self.color_ramp = ColorRamp(self, style_cfg, cast(ColorRampDef.Legend, self.legend_cfg)) self.include_in_feature_info = bool(style_cfg.get("include_in_feature_info", True)) if "index_function" in style_cfg: - self.index_function = FunctionWrapper(self, + self.index_function: FunctionWrapper | Expression = FunctionWrapper(self, cast(CFG_DICT, style_cfg["index_function"]), stand_alone=self.stand_alone) if not self.stand_alone: - for band in cast(List[str], style_cfg["needed_bands"]): + for band in cast(list[str], style_cfg["needed_bands"]): self.raw_needed_bands.add(band) elif "index_expression" in style_cfg: self.index_function = Expression(self, cast(str, style_cfg["index_expression"])) for band in self.index_function.needed_bands: self.raw_needed_bands.add(band) if self.stand_alone: - self.needed_bands = [self.local_band(b) for b in self.raw_needed_bands] - self.flag_bands = [] + self.needed_bands = set(self.local_band(b) for b in self.raw_needed_bands) + self.flag_bands = set() else: raise ConfigException("Index function is required for index and hybrid styles. Style %s in layer %s" % ( self.name, @@ -536,7 +535,7 @@ def __init__(self, if not defer_multi_date: self.parse_multi_date(style_cfg) - def apply_index(self, data: "xarray.Dataset") -> "xarray.DataArray": + def apply_index(self, data: Dataset) -> DataArray: """ Caclulate index value across data. @@ -547,7 +546,7 @@ def apply_index(self, data: "xarray.Dataset") -> "xarray.DataArray": data['index_function'] = (index_data.dims, index_data.data) return data["index_function"] - def transform_single_date_data(self, data: "xarray.Dataset") -> "xarray.Dataset": + def transform_single_date_data(self, data: Dataset) -> Dataset: """ Apply style to raw data to make an RGBA image xarray (single time slice only) @@ -558,7 +557,8 @@ def transform_single_date_data(self, data: "xarray.Dataset") -> "xarray.Dataset" return self.color_ramp.apply(d) class Legend(RampLegendBase): - pass + def plot_name(self): + return f"{self.style.product.name}_{self.style.name}_{self.style_or_mdh.min_count}" class MultiDateHandler(StyleDefBase.MultiDateHandler): auto_legend = True @@ -572,13 +572,13 @@ def __init__(self, style: "ColorRampDef", cfg: CFG_DICT) -> None: """ super().__init__(style, cfg) if self.animate: - self.feature_info_label: Optional[str] = None + self.feature_info_label: str | None = None self.color_ramp = style.color_ramp else: - self.feature_info_label = cast(Optional[str], cfg.get("feature_info_label", None)) - self.color_ramp = ColorRamp(style, cfg, self.legend_cfg) + self.feature_info_label = cast(str | None, cfg.get("feature_info_label", None)) + self.color_ramp = ColorRamp(style, cfg, cast(ColorRampDef.Legend, self.legend_cfg)) - def transform_data(self, data: "xarray.Dataset") -> "xarray.Dataset": + def transform_data(self, data: Dataset) -> Dataset: """ Apply image transformation @@ -586,12 +586,9 @@ def transform_data(self, data: "xarray.Dataset") -> "xarray.Dataset": :return: RGBA image xarray. May have a time dimension """ xformed_data = cast("ColorRampDef", self.style).apply_index(data) - agg = self.aggregator(xformed_data) + agg = cast(FunctionWrapper, self.aggregator)(xformed_data) return self.color_ramp.apply(agg) - class Legend(RampLegendBase): - def plot_name(self): - return f"{self.style.product.name}_{self.style.name}_{self.style_or_mdh.min_count}" # Register ColorRampDef as Style subclass. StyleDefBase.register_subclass(ColorRampDef, ("range", "color_ramp")) From 72976783741f4228fde43701714d5accbf197a00 Mon Sep 17 00:00:00 2001 From: Paul Haesler Date: Mon, 22 Apr 2024 16:27:42 +1000 Subject: [PATCH 09/29] Styling engine cleanup. --- datacube_ows/config_utils.py | 72 +++++++++++------------ datacube_ows/mv_index.py | 2 +- datacube_ows/styles/base.py | 5 +- datacube_ows/styles/colormap.py | 99 ++++++++++++++++---------------- datacube_ows/styles/component.py | 2 +- datacube_ows/styles/hybrid.py | 17 +++--- datacube_ows/styles/ramp.py | 6 +- tests/conftest.py | 6 +- tests/test_legend_generator.py | 2 +- 9 files changed, 107 insertions(+), 104 deletions(-) diff --git a/datacube_ows/config_utils.py b/datacube_ows/config_utils.py index 9b51e60f7..1537eb3fa 100644 --- a/datacube_ows/config_utils.py +++ b/datacube_ows/config_utils.py @@ -8,8 +8,7 @@ import os from importlib import import_module from itertools import chain -from typing import (Any, Callable, Iterable, List, Mapping, - Optional, Sequence, Set, TypeVar, Union, cast) +from typing import (Any, Callable, Iterable, Mapping, Optional, Sequence, TypeVar, cast) from urllib.parse import urlparse import fsspec @@ -30,14 +29,7 @@ _LOG = logging.getLogger(__name__) -RAW_CFG = Union[ - None, - str, - int, - float, - list[Any], - dict[str, Any] -] +RAW_CFG = None | str | int | float | list["RAW_CFG"] | dict[str, "RAW_CFG"] CFG_DICT = dict[str, RAW_CFG] @@ -60,20 +52,20 @@ def cfg_expand(cfg_unexpanded: RAW_CFG, if cwd is None: cwd = os.getcwd() - if isinstance(cfg_unexpanded, Mapping): + if isinstance(cfg_unexpanded, dict): if "include" in cfg_unexpanded: if cfg_unexpanded["include"] in inclusions: raise ConfigException("Cyclic inclusion: %s" % cfg_unexpanded["include"]) - ninclusions: List[str] = inclusions.copy() - ninclusions.append(cfg_unexpanded["include"]) + raw_path = cast(str, cfg_unexpanded["include"]) + ninclusions: list[str] = inclusions.copy() + ninclusions.append(cast(str, raw_path)) # Perform expansion if "type" not in cfg_unexpanded or cfg_unexpanded["type"] == "json": # JSON Expansion - raw_path: str = cfg_unexpanded["include"] try: # Try in actual working directory json_obj: Any = load_json_obj(raw_path) - abs_path: str = os.path.abspath(cfg_unexpanded["include"]) + abs_path: str = os.path.abspath(raw_path) cwd = os.path.dirname(abs_path) # pylint: disable=broad-except except Exception: @@ -91,7 +83,7 @@ def cfg_expand(cfg_unexpanded: RAW_CFG, return cfg_expand(json_obj, cwd=cwd, inclusions=ninclusions) elif cfg_unexpanded["type"] == "python": # Python Expansion - return cfg_expand(import_python_obj(cfg_unexpanded["include"]), cwd=cwd, inclusions=ninclusions) + return cfg_expand(import_python_obj(raw_path), cwd=cwd, inclusions=ninclusions) else: raise ConfigException("Unsupported inclusion type: %s" % str(cfg_unexpanded["type"])) else: @@ -175,7 +167,7 @@ def __init__(self, cfg: RAW_CFG, *args, **kwargs) -> None: :param args: :param kwargs: """ - self._unready_attributes: Set[str] = set() + self._unready_attributes: set[str] = set() self._raw_cfg: RAW_CFG = cfg self.ready: bool = False @@ -347,7 +339,7 @@ def parse_metadata(self, cfg: CFG_DICT) -> None: else: self.register_metadata(self.get_obj_label(), "abstract", cast(str, local_abstract)) if self.METADATA_KEYWORDS: - local_keyword_set = set(cast(List[str], cfg.get("keywords", []))) + local_keyword_set = set(cast(list[str], cfg.get("keywords", []))) self.register_metadata(self.get_obj_label(), FLD_KEYWORDS, ",".join(local_keyword_set)) if inherit_from: keyword_set = inherit_from.keywords @@ -385,7 +377,7 @@ def parse_metadata(self, cfg: CFG_DICT) -> None: if position: self.register_metadata(self.get_obj_label(), FLD_CONTACT_POSITION, position) if self.METADATA_DEFAULT_BANDS: - band_map = cast(dict[str, List[str]], cfg) + band_map = cast(dict[str, list[str]], cfg) for k, v in band_map.items(): if len(v): self.register_metadata(self.get_obj_label(), k, v[0]) @@ -535,9 +527,9 @@ class OWSIndexedConfigEntry(OWSConfigEntry): """ A Config Entry object that can be looked up by name (i.e. so it can be inherited from) """ - INDEX_KEYS: List[str] = [] + INDEX_KEYS: list[str] = [] - def __init__(self, cfg: RAW_CFG, keyvals: Mapping[str, Any], *args, **kwargs) -> None: + def __init__(self, cfg: RAW_CFG, keyvals: dict[str, str], *args, **kwargs) -> None: """ Validate and store keyvals for indexed lookup. @@ -555,8 +547,8 @@ def __init__(self, cfg: RAW_CFG, keyvals: Mapping[str, Any], *args, **kwargs) -> @classmethod def lookup_impl(cls, cfg: "datacube_ows.ows_configuration.OWSConfig", - keyvals: Mapping[str, Any], - subs: Optional[Mapping[str, Any]] = None) -> "OWSIndexedConfigEntry": + keyvals: dict[str, str], + subs: CFG_DICT | None = None) -> "OWSIndexedConfigEntry": """ Lookup a config entry of this type by identifying label(s) @@ -577,8 +569,8 @@ class OWSExtensibleConfigEntry(OWSIndexedConfigEntry): def __init__(self, cfg: RAW_CFG, keyvals: dict[str, str], global_cfg: "datacube_ows.ows_configuration.OWSConfig", *args, - keyval_subs: CFG_DICT | None = None, - keyval_defaults: CFG_DICT | None = None, + keyval_subs: dict[str, Any] | None = None, + keyval_defaults: dict[str, str] | None = None, expanded: bool = False, **kwargs) -> None: """ @@ -599,8 +591,8 @@ def __init__(self, @classmethod def expand_inherit(cls, cfg: CFG_DICT, global_cfg: "datacube_ows.ows_configuration.OWSConfig", - keyval_subs: CFG_DICT | None = None, - keyval_defaults: CFG_DICT | None = None) -> RAW_CFG: + keyval_subs: dict[str, Any] | None = None, + keyval_defaults: dict[str, str] | None = None) -> RAW_CFG: """ Expand inherited config, and apply overrides. @@ -613,7 +605,7 @@ def expand_inherit(cls, if "inherits" in cfg: lookup = True # Precludes e.g. defaulting style lookup to current layer. - lookup_keys: CFG_DICT = {} + lookup_keys: dict[str, str] = {} inherits = cast(dict[str, str], cfg["inherits"]) for k in cls.INDEX_KEYS: if k not in inherits and keyval_defaults is not None and k not in keyval_defaults: @@ -622,7 +614,7 @@ def expand_inherit(cls, if k in inherits: lookup_keys[k] = inherits[k] elif keyval_defaults and k in keyval_defaults: - lookup_keys[k] = keyval_defaults[k] + lookup_keys[k] = str(keyval_defaults[k]) if lookup and lookup_keys: parent = cls.lookup_impl(global_cfg, keyvals=lookup_keys, subs=keyval_subs) # pylint: disable=protected-access @@ -751,7 +743,7 @@ def __init__(self, flag_band: FlagBand, """ super().__init__({}) self.layer = layer - self.bands: Set[str] = set() + self.bands: set[str] = set() self.bands.add(str(flag_band.canonical_band_name)) self.flag_bands = {flag_band.pq_band: flag_band} self.product_names = tuple(flag_band.pq_names) @@ -811,7 +803,7 @@ def make_ready(self, dc: Datacube, *args, **kwargs) -> None: @classmethod def build_list_from_masks(cls, masks: Iterable["datacube_ows.styles.base.StyleMask"], - layer: "datacube_ows.ows_configuration.OWSNamedLayer") -> List["FlagProductBands"]: + layer: "datacube_ows.ows_configuration.OWSNamedLayer") -> list["FlagProductBands"]: """ Class method to instantiate a list of FlagProductBands from a list of style masks. @@ -858,6 +850,9 @@ def build_list_from_flagbands(cls, flagbands: Iterable[OWSFlagBand], return flag_products +FlagSpec = dict[str, bool | str] + + class AbstractMaskRule(OWSConfigEntry): def __init__(self, band: str, cfg: CFG_DICT, mapper: Callable[[str], str] = lambda x: x) -> None: super().__init__(cfg) @@ -870,29 +865,26 @@ def context(self) -> str: VALUES_LABEL = "values" def parse_rule_spec(self, cfg: CFG_DICT) -> None: - self.flags: list[CFG_DICT] | CFG_DICT | None = None + self.flags: list[FlagSpec] | FlagSpec | None = None self.or_flags: bool | list[bool] = False self.values: list[list[int]] | list[int] | None = None self.invert: bool | list[bool] = bool(cfg.get("invert", False)) if "flags" in cfg: - flags = cast(CFG_DICT, cfg["flags"]) + flags = cast(FlagSpec, cfg["flags"]) if "or" in flags and "and" in flags: raise ConfigException( f"ValueMap rule in {self.context} combines 'and' and 'or' rules") elif "or" in flags: self.or_flags = True - flags = cast(CFG_DICT, flags["or"]) + flags = cast(FlagSpec, flags["or"]) elif "and" in flags: - flags = cast(CFG_DICT, flags["and"]) + flags = cast(FlagSpec, flags["and"]) self.flags = flags else: self.flags = None if "values" in cfg: val: Any = cfg["values"] - elif "enum" in cfg: - val = cfg["enum"] - _LOG.warning("enum in pq_masks is deprecated and will be removed in a future release. Refer to the documentation for the new syntax.") else: val = None if val is None: @@ -901,7 +893,7 @@ def parse_rule_spec(self, cfg: CFG_DICT) -> None: if isinstance(val, int): self.values = [cast(int, val)] else: - self.values = cast(List[int], val) + self.values = cast(list[int], val) if not self.flags and not self.values: raise ConfigException( @@ -919,7 +911,7 @@ def create_mask(self, data: DataArray) -> DataArray | None: """ if self.values: mask: DataArray | None = None - for v in cast(List[int], self.values): + for v in cast(list[int], self.values): vmask = data == v if mask is None: mask = vmask diff --git a/datacube_ows/mv_index.py b/datacube_ows/mv_index.py index 5c9cfda45..1a57a8225 100644 --- a/datacube_ows/mv_index.py +++ b/datacube_ows/mv_index.py @@ -70,7 +70,7 @@ def sel(self, stv: Table) -> list[ClauseElement]: return [cast(ClauseElement, count(stv.c.id))] if self == self.EXTENT: return [text("ST_AsGeoJSON(ST_Union(spatial_extent))")] - raise Exception("Invalid selection option") + raise AssertionError("Invalid selection option") DateOrDateTime = datetime.datetime | datetime.date diff --git a/datacube_ows/styles/base.py b/datacube_ows/styles/base.py index 950114486..9d7073132 100644 --- a/datacube_ows/styles/base.py +++ b/datacube_ows/styles/base.py @@ -90,7 +90,7 @@ def get_obj_label(self) -> str: if self.style == self.style_or_mdh: min_count: int = 1 else: - min_count = self.style_or_mdh.min_count + min_count = cast(int, self.style_or_mdh.min_count) return f"{style_label}.legend.{min_count}" @@ -205,6 +205,9 @@ def __init__(self, product: "datacube_ows.ows_configuration.OWSNamedLayer", if not defer_multi_date: self.parse_multi_date(raw_cfg) + self.min_count: int = 1 + self.max_count: int = 1 + # Over-ridden methods def global_config(self) -> "datacube_ows.ows_configuration.OWSConfig": """"Global config object""" diff --git a/datacube_ows/styles/colormap.py b/datacube_ows/styles/colormap.py index 4e4c09b02..7700ecc3b 100644 --- a/datacube_ows/styles/colormap.py +++ b/datacube_ows/styles/colormap.py @@ -6,7 +6,7 @@ import io import logging from datetime import datetime -from typing import Callable, List, MutableMapping, Optional, Union, cast +from typing import Callable, MutableMapping, Type, Union, cast import numpy import xarray @@ -16,9 +16,11 @@ from matplotlib import pyplot as plt from xarray import DataArray, Dataset -from datacube_ows.config_utils import (CFG_DICT, AbstractMaskRule, - ConfigException, OWSMetadataConfig) +from datacube_ows.config_utils import CFG_DICT, AbstractMaskRule, ConfigException, OWSMetadataConfig, FlagSpec from datacube_ows.styles.base import StyleDefBase +TYPE_CHECKING = False +if TYPE_CHECKING: + from datacube_ows.ows_configuration import OWSNamedLayer _LOG = logging.getLogger(__name__) @@ -29,7 +31,7 @@ class AbstractValueMapRule(AbstractMaskRule): Construct a ValueMap rule-set with ValueMapRule.value_map_from_config """ - def __init__(self, style_def: "ColorMapStyleDef", band: str, cfg: CFG_DICT) -> None: + def __init__(self, style_def: Union["ColorMapStyleDef", "ColorMapStyleDef.MultiDateHandler"], band: str, cfg: CFG_DICT) -> None: """ Construct a Value Map Rule @@ -44,7 +46,7 @@ def __init__(self, style_def: "ColorMapStyleDef", band: str, cfg: CFG_DICT) -> N self.title = cast(str, cfg["title"]) self.abstract = cast(str, cfg.get("abstract")) if self.title and self.abstract: - self.label: Optional[str] = f"{self.title} - {self.abstract}" + self.label: str | None = f"{self.title} - {self.abstract}" elif self.title: self.label = self.title elif self.abstract: @@ -63,13 +65,13 @@ def parse_color(self, cfg: CFG_DICT): if cfg.get("mask"): self.alpha = 0.0 else: - self.alpha = float(cast(Union[float, int, str], cfg.get("alpha", 1.0))) + self.alpha = float(cast(float | int | str, cfg.get("alpha", 1.0))) @classmethod def value_map_from_config(cls, style_or_mdh: Union["ColorMapStyleDef", "ColorMapStyleDef.MultiDateHandler"], cfg: CFG_DICT - ) -> MutableMapping[str, List["AbstractValueMapRule"]]: + ) -> dict[str, list["AbstractValueMapRule"]]: """ Create a multi-date value map rule set from a config specification @@ -79,20 +81,20 @@ def value_map_from_config(cls, :return: A value map ruleset dictionary. """ if isinstance(style_or_mdh, ColorMapStyleDef): - typ = ValueMapRule + typ: Type[AbstractValueMapRule] = ValueMapRule else: mdh = cast(ColorMapStyleDef.MultiDateHandler, style_or_mdh) if mdh.aggregator: - style_or_mdh = mdh.style + style_or_mdh = cast(ColorMapStyleDef, mdh.style) typ = ValueMapRule else: if mdh.min_count != mdh.max_count: raise ConfigException( "MultiDate value map only supported on multi-date handlers with min_count and max_count equal.") typ = MultiDateValueMapRule - vmap: MutableMapping[str, List["AbstractValueMapRule"]] = {} + vmap: dict[str, list["AbstractValueMapRule"]] = {} for band_name, rules in cfg.items(): - band_rules = [typ(style_or_mdh, band_name, rule) for rule in cast(List[CFG_DICT], rules)] + band_rules = [typ(style_or_mdh, band_name, rule) for rule in cast(list[CFG_DICT], rules)] vmap[band_name] = band_rules return vmap @@ -131,21 +133,21 @@ def __init__(self, mdh: "ColorMapStyleDef.MultiDateHandler", band: str, :param cfg: The rule specification """ self.mdh = mdh - self.invert: List[bool] = [] - self.flags: Optional[List[CFG_DICT]] = [] - self.or_flags: Optional[List[bool]] = [] - self.values: Optional[List[List[int]]] = [] - super().__init__(style_def=mdh.style, band=band, cfg=cfg) + self.invert: list[bool] = [] + self.flags: list[FlagSpec] = [] + self.or_flags: list[bool] = [] + self.values: list[list[int]] = [] + super().__init__(style_def=cast(ColorMapStyleDef.MultiDateHandler, mdh.style), band=band, cfg=cfg) def parse_rule_spec(self, cfg: CFG_DICT): if "invert" in cfg: - self.invert = [bool(b) for b in cfg["invert"]] + self.invert = [bool(b) for b in cast(list, cfg["invert"])] else: self.invert = [False] * self.mdh.max_count if len(self.invert) != self.mdh.max_count: raise ConfigException(f"Invert entry has wrong number of rule sets for date count") if "flags" in cfg: - date_flags = cast(CFG_DICT, cfg["flags"]) + date_flags = cast(list[CFG_DICT], cfg["flags"]) if len(date_flags) != self.mdh.max_count: raise ConfigException(f"Flags entry has wrong number of rule sets for date count") for flags in date_flags: @@ -154,24 +156,27 @@ def parse_rule_spec(self, cfg: CFG_DICT): raise ConfigException(f"MultiDateValueMap rule in {self.mdh.style.name} of layer {self.mdh.style.product.name} combines 'and' and 'or' rules") elif "or" in flags: or_flag = True - flags = cast(CFG_DICT, flags["or"]) + sflags = cast(FlagSpec, flags["or"]) elif "and" in flags: - flags = cast(CFG_DICT, flags["and"]) - self.flags.append(flags) + sflags = cast(FlagSpec, flags["and"]) + else: + sflags = cast(FlagSpec, flags) + self.flags.append(sflags) self.or_flags.append(or_flag) else: - self.flags = None - self.or_flags = None + self.flags = [] + self.or_flags = [] + if "values" in cfg: - self.values = cast(List[List[int]], list(cfg["values"])) + self.values = list(cast(list[list[int]], cfg["values"])) else: - self.values = None + self.values = [] if not self.flags and not self.values: raise ConfigException(f"Multi-Date Value map rule in {self.context} must have a non-empty 'flags' or 'values' section.") if self.flags and self.values: raise ConfigException(f"Multi-Date Value map rule in {self.context} has both a 'flags' and a 'values' section - choose one.") - def create_mask(self, data: DataArray) -> DataArray: + def create_mask(self, data: DataArray) -> DataArray | None: """ Create a mask from raw flag band data. @@ -179,14 +184,14 @@ def create_mask(self, data: DataArray) -> DataArray: :return: A boolean dateless DataArray, True where the data matches this rule """ date_slices = (data.sel(time=dt) for dt in data.coords["time"].values) - mask: Optional[DataArray] = None + mask: DataArray | None = None if self.values: for d_slice, vals, invert in zip(date_slices, self.values, self.invert): - d_mask: Optional[DataArray] = None + d_mask: DataArray | None = None if len(vals) == 0: d_mask = d_slice == d_slice else: - for v in cast(List[int], vals): + for v in cast(list[int], vals): vmask = d_slice == v if d_mask is None: d_mask = vmask @@ -196,27 +201,26 @@ def create_mask(self, data: DataArray) -> DataArray: d_mask = ~d_mask # pylint: disable=invalid-unary-operand-type if mask is None: mask = d_mask - else: + elif d_mask is not None: mask &= d_mask else: for d_slice, flags, or_flags, invert in zip(date_slices, self.flags, self.or_flags, self.invert): - d_mask: Optional[DataArray] = None + d_mask = None if not flags: d_mask = d_slice == d_slice elif or_flags: - for f in cast(CFG_DICT, flags).items(): - f = {f[0]: f[1]} + for f in ({k: v} for k, v in cast(CFG_DICT, flags).items()): if d_mask is None: d_mask = make_mask(d_slice, **f) else: d_mask |= make_mask(d_slice, **f) else: d_mask = make_mask(d_slice, **cast(CFG_DICT, flags)) - if invert: + if invert and d_mask is not None: d_mask = ~d_mask # pylint: disable=invalid-unary-operand-type if mask is None: mask = d_mask - else: + elif d_mask is not None: mask &= d_mask return mask @@ -227,7 +231,7 @@ def convert_to_uint8(fval): return clipped -def apply_value_map(value_map: MutableMapping[str, List[AbstractValueMapRule]], +def apply_value_map(value_map: MutableMapping[str, list[AbstractValueMapRule]], data: Dataset, band_mapper: Callable[[str], str]) -> Dataset: imgdata = Dataset(coords={k: v for k, v in data.coords.items() if k != "time"}) @@ -244,7 +248,7 @@ def apply_value_map(value_map: MutableMapping[str, List[AbstractValueMapRule]], bdata = ColorMapStyleDef.reint(bdata) for rule in reversed(rules): mask = rule.create_mask(bdata) - if mask.data.any(): + if mask is not None and mask.data.any(): for channel in ("red", "green", "blue", "alpha"): if channel == "alpha": val = convert_to_uint8(rule.alpha) @@ -268,18 +272,18 @@ class ColorMapLegendBase(StyleDefBase.Legend, OWSMetadataConfig): def __init__(self, style_or_mdh: Union["StyleDefBase", "StyleDefBase.Legend"], cfg: CFG_DICT) -> None: super().__init__(style_or_mdh, cfg) raw_cfg = cast(CFG_DICT, self._raw_cfg) - self.ncols = int(raw_cfg.get("ncols", 1)) + self.ncols = cast(int, raw_cfg.get("ncols", 1)) if self.ncols < 1: raise ConfigException("ncols must be a positive integer") - self.patches: List[PatchTemplate] = [] + self.patches: list[PatchTemplate] = [] - def register_value_map(self, value_map: MutableMapping[str, List["AbstractValueMapRule"]]) -> None: + def register_value_map(self, value_map: MutableMapping[str, list["AbstractValueMapRule"]]) -> None: for band in value_map.keys(): for idx, rule in reversed(list(enumerate(value_map[band]))): # only include values that are not transparent (and that have a non-blank title or abstract) if rule.alpha > 0.001 and rule.label: self.patches.append(PatchTemplate(idx, rule)) - self.parse_metadata(self._raw_cfg) + self.parse_metadata(cast(CFG_DICT, self._raw_cfg)) def render(self, bytesio: io.BytesIO) -> None: patches = [ @@ -309,11 +313,10 @@ def patch_label(self, idx: int): # For MetadataConfig @property - def default_title(self) -> Optional[str]: + def default_title(self) -> str | None: return "" - class ColorMapStyleDef(StyleDefBase): """ Style subclass for value-map styles @@ -321,7 +324,7 @@ class ColorMapStyleDef(StyleDefBase): auto_legend = True def __init__(self, - product: "datacube_ows.ows_configuration.OWSNamedLayer", + product: "OWSNamedLayer", style_cfg: CFG_DICT, stand_alone: bool = False, user_defined: bool = False) -> None: @@ -397,13 +400,13 @@ def __init__(self, style: "ColorMapStyleDef", cfg: CFG_DICT) -> None: :param cfg: The multidate handler configuration """ super().__init__(style, cfg) - self._value_map: Optional[MutableMapping[str, AbstractValueMapRule]] = None + self._value_map: dict[str, list[AbstractValueMapRule]] | None = None + tcfg = cast(CFG_DICT, self._raw_cfg) if self.animate: - if "value_map" in self._raw_cfg: + if "value_map" in tcfg: raise ConfigException("Multidate value maps not supported for animation handlers") else: - self._value_map = AbstractValueMapRule.value_map_from_config(self, - cast(CFG_DICT, self._raw_cfg["value_map"])) + self._value_map = AbstractValueMapRule.value_map_from_config(self, cast(CFG_DICT, tcfg["value_map"])) @property def value_map(self): diff --git a/datacube_ows/styles/component.py b/datacube_ows/styles/component.py index 4ea6d52e6..73faa2f9d 100644 --- a/datacube_ows/styles/component.py +++ b/datacube_ows/styles/component.py @@ -25,7 +25,7 @@ class ComponentStyleDef(StyleDefBase): Style Subclass that allows the behaviour of each component (red, green, blue, alpha) to be specified independently. """ - def __init__(self, product: OWSNamedLayer, + def __init__(self, product: "OWSNamedLayer", style_cfg: CFG_DICT, stand_alone: bool = False, defer_multi_date: bool = False, diff --git a/datacube_ows/styles/hybrid.py b/datacube_ows/styles/hybrid.py index f13ff53da..d6dddac31 100644 --- a/datacube_ows/styles/hybrid.py +++ b/datacube_ows/styles/hybrid.py @@ -3,14 +3,17 @@ # # Copyright (c) 2017-2023 OWS Contributors # SPDX-License-Identifier: Apache-2.0 -from typing import Optional, Union, cast +from typing import cast from xarray import DataArray, Dataset from datacube_ows.config_utils import CFG_DICT, ConfigException from datacube_ows.styles.base import StyleDefBase -from datacube_ows.styles.component import ComponentStyleDef +from datacube_ows.styles.component import ComponentStyleDef, LINEAR_COMP_DICT from datacube_ows.styles.ramp import ColorRampDef +TYPE_CHECKING = False +if TYPE_CHECKING: + from datacube_ows.ows_configuration import OWSNamedLayer class HybridStyleDef(ColorRampDef, ComponentStyleDef): @@ -22,7 +25,7 @@ class HybridStyleDef(ColorRampDef, ComponentStyleDef): auto_legend = False def __init__(self, - product: "datacube_ows.ows_configuration.OWSNamedLayer", + product: "OWSNamedLayer", style_cfg: CFG_DICT, defer_multi_date: bool = False, stand_alone: bool = False, @@ -35,11 +38,11 @@ def __init__(self, stand_alone=stand_alone, user_defined=user_defined) style_cfg = cast(CFG_DICT, self._raw_cfg) - self.component_ratio = float(cast(Union[float, str], style_cfg["component_ratio"])) + self.component_ratio = float(cast(float | str, style_cfg["component_ratio"])) if self.component_ratio < 0.0 or self.component_ratio > 1.0: raise ConfigException("Component ratio must be a floating point number between 0 and 1") - def transform_single_date_data(self, data: "xarray.Dataset") -> "xarray.Dataset": + def transform_single_date_data(self, data: Dataset) -> Dataset: """ Apply style to raw data to make an RGBA image xarray (single time slice only) @@ -57,8 +60,8 @@ def transform_single_date_data(self, data: "xarray.Dataset") -> "xarray.Dataset" rampdata = DataArray(self.color_ramp.get_value(d, band), coords=d.coords, dims=d.dims) - component_band_data: Optional[DataArray] = None - for c_band, c_intensity in self.rgb_components[band].items(): + component_band_data: DataArray | None = None + for c_band, c_intensity in cast(LINEAR_COMP_DICT, self.rgb_components[band]).items(): if callable(c_intensity): imgband_component_data = cast(DataArray, c_intensity(data[c_band], c_band, band)) else: diff --git a/datacube_ows/styles/ramp.py b/datacube_ows/styles/ramp.py index ec2413767..aaf691ccb 100644 --- a/datacube_ows/styles/ramp.py +++ b/datacube_ows/styles/ramp.py @@ -174,7 +174,7 @@ def __init__(self, style: StyleDefBase, """ self.style = style if "color_ramp" in ramp_cfg: - raw_scaled_ramp = ramp_cfg["color_ramp"] + raw_scaled_ramp = cast(list[CFG_DICT], ramp_cfg["color_ramp"]) else: rmin, rmax = cast(list[float], ramp_cfg["range"]) unscaled_ramp = UNSCALED_DEFAULT_RAMP @@ -499,7 +499,7 @@ class ColorRampDef(StyleDefBase): auto_legend = True def __init__(self, - product: OWSNamedLayer, + product: "OWSNamedLayer", style_cfg: CFG_DICT, stand_alone: bool = False, defer_multi_date: bool = False, @@ -588,6 +588,8 @@ def transform_data(self, data: Dataset) -> Dataset: xformed_data = cast("ColorRampDef", self.style).apply_index(data) agg = cast(FunctionWrapper, self.aggregator)(xformed_data) return self.color_ramp.apply(agg) + class Legend(RampLegendBase): + pass # Register ColorRampDef as Style subclass. diff --git a/tests/conftest.py b/tests/conftest.py index b7ae3a85f..fce5023f4 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1055,7 +1055,7 @@ def configs_for_combined_fc_wofs(): "pq_masks": [ { "band": "water", - "enum": 1, + "values": [1], } ] }, @@ -1069,7 +1069,7 @@ def configs_for_combined_fc_wofs(): "pq_masks": [ { "band": "water", - "enum": 1, + "values": [1], "invert": True, } ] @@ -1085,7 +1085,7 @@ def configs_for_combined_fc_wofs(): { # Mask out nodata pixels. "band": "water", - "enum": 1, + "values": [1], "invert": True, }, { diff --git a/tests/test_legend_generator.py b/tests/test_legend_generator.py index 663324b36..1b48ae623 100644 --- a/tests/test_legend_generator.py +++ b/tests/test_legend_generator.py @@ -68,7 +68,7 @@ def test_parse_colorramp_defaults(): assert legend.tick_labels == ["0.0", "1.0"] assert legend.width == 4.0 assert legend.height == 1.25 - assert legend.strip_location == [0.05, 0.5, 0.9, 0.15] + assert legend.strip_location == (0.05, 0.5, 0.9, 0.15) def test_parse_colorramp_legend_beginend(): From bfb517a6692797abcdfde61a432c39741d3f2c4b Mon Sep 17 00:00:00 2001 From: Paul Haesler Date: Mon, 22 Apr 2024 17:17:19 +1000 Subject: [PATCH 10/29] More typehint cleanup. --- datacube_ows/ows_configuration.py | 4 ++-- datacube_ows/protocol_versions.py | 10 +++++----- datacube_ows/wms_utils.py | 7 ++++--- 3 files changed, 11 insertions(+), 10 deletions(-) diff --git a/datacube_ows/ows_configuration.py b/datacube_ows/ows_configuration.py index ef54ed79e..fc786c1eb 100644 --- a/datacube_ows/ows_configuration.py +++ b/datacube_ows/ows_configuration.py @@ -368,7 +368,7 @@ def search_times(self, t, geobox=None): return times - def dataset_groupby(self, product_names: Optional[Sequence[str]] = None, is_mosaic=False): + def dataset_groupby(self, product_names: list[str] | None = None, is_mosaic=False): if self.is_subday(): return group_by_begin_datetime(product_names, truncate_dates=False) elif is_mosaic: @@ -421,7 +421,7 @@ def __init__(self, cfg, global_cfg, parent_layer=None, **kwargs): self.time_resolution = TimeRes.parse(cfg.get("time_resolution")) if not self.time_resolution: raise ConfigException(f"Invalid time resolution value {cfg['time_resolution']} in named layer {self.name}") - self.mosaic_date_func: Optional[FunctionWrapper] = None + self.mosaic_date_func: FunctionWrapper | None = None if "mosaic_date_func" in cfg: self.mosaic_date_func = FunctionWrapper(self, cfg["mosaic_date_func"]) if self.mosaic_date_func and not self.time_resolution.allow_mosaic(): diff --git a/datacube_ows/protocol_versions.py b/datacube_ows/protocol_versions.py index 014c2fa28..4dd09ad52 100644 --- a/datacube_ows/protocol_versions.py +++ b/datacube_ows/protocol_versions.py @@ -21,7 +21,7 @@ class SupportedSvcVersion: - def __init__(self, service: str, version: str, router, exception_class: OGCException) -> None: + def __init__(self, service: str, version: str, router, exception_class: type[OGCException]) -> None: self.service = service.lower() self.service_upper = service.upper() self.version = version @@ -32,7 +32,7 @@ def __init__(self, service: str, version: str, router, exception_class: OGCExcep class SupportedSvc: - def __init__(self, versions: Sequence[SupportedSvcVersion], default_exception_class: Optional[OGCException] = None): + def __init__(self, versions: Sequence[SupportedSvcVersion], default_exception_class: type[OGCException] | None = None): self.versions = sorted(versions, key=lambda x: x.version_parts) assert len(self.versions) > 0 self.service = self.versions[0].service @@ -47,7 +47,7 @@ def __init__(self, versions: Sequence[SupportedSvcVersion], default_exception_cl else: self.default_exception_class = self.versions[0].exception_class - def _clean_version_parts(self, unclean: Iterable[str]) -> Sequence[int]: + def _clean_version_parts(self, unclean: list[str]) -> list[int]: clean = [] for part in unclean: try: @@ -65,8 +65,8 @@ def _clean_version_parts(self, unclean: Iterable[str]) -> Sequence[int]: def negotiated_version(self, request_version: str) -> SupportedSvcVersion: if not request_version: return self.versions[-1] - parts: List[str] = list(request_version.split(".")) - rv_parts: List[int] = self._clean_version_parts(parts) + parts: list[str] = list(request_version.split(".")) + rv_parts: list[int] = self._clean_version_parts(parts) while len(rv_parts) < 3: rv_parts.append(0) for v in reversed(self.versions): diff --git a/datacube_ows/wms_utils.py b/datacube_ows/wms_utils.py index 168631bcd..3bd0216bc 100644 --- a/datacube_ows/wms_utils.py +++ b/datacube_ows/wms_utils.py @@ -4,7 +4,8 @@ # Copyright (c) 2017-2023 OWS Contributors # SPDX-License-Identifier: Apache-2.0 import math -from datetime import datetime +from datetime import datetime, date +from typing import cast import numpy import regex as re @@ -35,7 +36,7 @@ } -def _bounding_pts(minx, miny, maxx, maxy, src_crs, dst_crs=None): +def _bounding_pts(minx: int, miny: int, maxx: int, maxy: int, src_crs, dst_crs=None): # pylint: disable=too-many-locals p1 = geom.point(minx, maxy, src_crs) p2 = geom.point(minx, miny, src_crs) @@ -211,7 +212,7 @@ def parse_time_item(item: str, product: OWSNamedLayer) -> datetime: try: time = parse(times[0]) if not product.time_resolution.is_subday(): - time = time.date() + time = time.date() # type: ignore[assignment] except ValueError: raise WMSException( "Time dimension value '%s' not valid for this layer" % times[0], From a3c880c4ed6e38cbaf552d2104355530356f668b Mon Sep 17 00:00:00 2001 From: Paul Haesler Date: Tue, 23 Apr 2024 13:12:55 +1000 Subject: [PATCH 11/29] First pass of typehint cleanup complete - no errors but many untyped functions. --- datacube_ows/config_utils.py | 10 +++--- datacube_ows/loading.py | 57 +++++++++++++++++++----------- datacube_ows/mv_index.py | 22 +++++++++--- integration_tests/test_mv_index.py | 5 ++- tests/test_data.py | 4 +-- 5 files changed, 63 insertions(+), 35 deletions(-) diff --git a/datacube_ows/config_utils.py b/datacube_ows/config_utils.py index 1537eb3fa..4b395a37d 100644 --- a/datacube_ows/config_utils.py +++ b/datacube_ows/config_utils.py @@ -29,7 +29,7 @@ _LOG = logging.getLogger(__name__) -RAW_CFG = None | str | int | float | list["RAW_CFG"] | dict[str, "RAW_CFG"] +RAW_CFG = None | str | int | float | bool | list["RAW_CFG"] | dict[str, "RAW_CFG"] CFG_DICT = dict[str, RAW_CFG] @@ -79,7 +79,7 @@ def cfg_expand(cfg_unexpanded: RAW_CFG, except Exception: json_obj = None if json_obj is None: - raise ConfigException("Could not find json file %s" % raw_path) + raise ConfigException(f"Could not find json file {raw_path}") return cfg_expand(json_obj, cwd=cwd, inclusions=ninclusions) elif cfg_unexpanded["type"] == "python": # Python Expansion @@ -666,9 +666,9 @@ def __init__(self, cfg: CFG_DICT, product_cfg: "datacube_ows.ows_configuration.O self.pq_fuse_func: Optional[FunctionWrapper] = FunctionWrapper(self.product, cast(Mapping[str, Any], cfg["fuse_func"])) else: self.pq_fuse_func = None - self.pq_ignore_time = cfg.get("ignore_time", False) + self.pq_ignore_time = bool(cfg.get("ignore_time", False)) self.ignore_info_flags = cast(list[str], cfg.get("ignore_info_flags", [])) - self.pq_manual_merge = cfg.get("manual_merge", False) + self.pq_manual_merge = bool(cfg.get("manual_merge", False)) self.declare_unready("pq_products") self.declare_unready("flags_def") self.declare_unready("info_mask") @@ -750,7 +750,7 @@ def __init__(self, flag_band: FlagBand, self.ignore_time = flag_band.pq_ignore_time self.declare_unready("products") self.declare_unready("low_res_products") - self.manual_merge = flag_band.pq_manual_merge + self.manual_merge = bool(flag_band.pq_manual_merge) self.fuse_func = flag_band.pq_fuse_func # pyre-ignore[16] self.main_product = self.products_match(layer.product_names) diff --git a/datacube_ows/loading.py b/datacube_ows/loading.py index 1d68200b8..c5375d362 100644 --- a/datacube_ows/loading.py +++ b/datacube_ows/loading.py @@ -2,12 +2,14 @@ import datetime import logging -from typing import Iterable +from typing import Iterable, cast, Mapping +from uuid import UUID import datacube import numpy import xarray -from rasterio.enums import Resampling + +from sqlalchemy.engine import Row from odc.geo.geom import Geometry from odc.geo.geobox import GeoBox @@ -26,12 +28,12 @@ class ProductBandQuery: def __init__(self, products: list[datacube.model.Product], - bands: list[datacube.model.Measurement], + bands: Iterable[str], main: bool = False, manual_merge: bool = False, ignore_time: bool = False, fuse_func: datacube.api.core.FuserFunction | None = None ): self.products = products - self.bands = bands + self.bands = set(bands) self.manual_merge = manual_merge self.fuse_func = fuse_func self.ignore_time = ignore_time @@ -66,7 +68,7 @@ def style_queries(cls, style: StyleDef, resource_limited: bool = False) -> list[ pq_products = fp.products queries.append(cls( pq_products, - tuple(fp.bands), + list(fp.bands), manual_merge=fp.manual_merge, ignore_time=fp.ignore_time, fuse_func=fp.fuse_func @@ -76,9 +78,9 @@ def style_queries(cls, style: StyleDef, resource_limited: bool = False) -> list[ @classmethod def full_layer_queries(cls, layer: OWSNamedLayer, - main_bands: list[datacube.model.Measurement] | None = None) -> list["ProductBandQuery"]: + main_bands: list[str] | None = None) -> list["ProductBandQuery"]: if main_bands: - needed_bands = main_bands + needed_bands: Iterable[str] = main_bands else: needed_bands = set(layer.band_idx.band_cfg.keys()) queries = [ @@ -95,7 +97,7 @@ def full_layer_queries(cls, pq_products = fpb.products queries.append(cls( pq_products, - tuple(fpb.bands), + list(fpb.bands), manual_merge=fpb.manual_merge, ignore_time=fpb.ignore_time, fuse_func=fpb.fuse_func @@ -104,7 +106,7 @@ def full_layer_queries(cls, @classmethod def simple_layer_query(cls, layer: OWSNamedLayer, - bands: list[datacube.model.Measurement], + bands: Iterable[str], manual_merge: bool = False, fuse_func: datacube.api.core.FuserFunction | None = None, resource_limited: bool = False) -> "ProductBandQuery": @@ -114,6 +116,7 @@ def simple_layer_query(cls, layer: OWSNamedLayer, main_products = layer.products return cls(main_products, bands, manual_merge=manual_merge, main=True, fuse_func=fuse_func) +PerPBQReturnType = xarray.DataArray | Iterable[UUID] class DataStacker: @log_call @@ -159,15 +162,21 @@ def n_datasets(self, index: datacube.index.Index, all_time: bool = False, point: Geometry | None = None) -> int: - return self.datasets(index, + return cast(int, self.datasets(index, all_time=all_time, point=point, - mode=MVSelectOpts.COUNT) + mode=MVSelectOpts.COUNT)) def datasets(self, index: datacube.index.Index, all_flag_bands: bool = False, all_time: bool = False, point: Geometry | None = None, - mode: MVSelectOpts = MVSelectOpts.DATASETS) -> int | Iterable[datacube.model.Dataset]: + mode: MVSelectOpts = MVSelectOpts.DATASETS) -> (int + | Iterable[Row] + | Iterable[UUID] + | xarray.DataArray + | Geometry + | None + | Mapping[ProductBandQuery, PerPBQReturnType]): if mode == MVSelectOpts.EXTENT or all_time: # Not returning datasets - use main product only queries = [ @@ -194,7 +203,7 @@ def datasets(self, index: datacube.index.Index, times = None else: times = self._times - results = [] + results: list[tuple[ProductBandQuery, PerPBQReturnType]] = [] for query in queries: if query.ignore_time: qry_times = None @@ -206,16 +215,24 @@ def datasets(self, index: datacube.index.Index, geom=geom, products=query.products) if mode == MVSelectOpts.DATASETS: - result = datacube.Datacube.group_datasets(result, self.group_by) + grpd_result = datacube.Datacube.group_datasets( + cast(Iterable[datacube.model.Dataset], result), + self.group_by + ) if all_time: - return result - results.append((query, result)) + return grpd_result + results.append((query, grpd_result)) elif mode == MVSelectOpts.IDS: + result_ids = cast(Iterable[UUID], result) if all_time: - return result - results.append((query, result)) - else: - return result + return result_ids + results.append((query, result_ids)) + elif mode == MVSelectOpts.ALL: + return cast(Iterable[Row], result) + elif mode == MVSelectOpts.COUNT: + return cast(int, result) + else: # MVSelectOpts.EXTENT + return cast(Geometry | None, result) return OrderedDict(results) def create_nodata_filled_flag_bands(self, data, pbq): diff --git a/datacube_ows/mv_index.py b/datacube_ows/mv_index.py index 1a57a8225..cd9ce2a14 100644 --- a/datacube_ows/mv_index.py +++ b/datacube_ows/mv_index.py @@ -6,7 +6,9 @@ import datetime import json from enum import Enum -from typing import Any, Iterable, Optional, Tuple, Union, cast +from types import UnionType +from typing import Any, Iterable, Type, TypeVar, cast +from uuid import UUID as UUID_ import pytz from geoalchemy2 import Geometry @@ -19,6 +21,8 @@ from datacube.index import Index from datacube.model import Product, Dataset + +from sqlalchemy.engine import Row from sqlalchemy.engine.base import Engine from sqlalchemy.sql.elements import ClauseElement @@ -59,7 +63,6 @@ class MVSelectOpts(Enum): COUNT = 2 EXTENT = 3 DATASETS = 4 - INVALID = 9999 def sel(self, stv: Table) -> list[ClauseElement]: if self == self.ALL: @@ -73,16 +76,25 @@ def sel(self, stv: Table) -> list[ClauseElement]: raise AssertionError("Invalid selection option") +selection_return_types: dict[MVSelectOpts, Type | UnionType] = { + MVSelectOpts.ALL: Iterable[Row], + MVSelectOpts.IDS: Iterable[UUID_], + MVSelectOpts.DATASETS: Iterable[Dataset], + MVSelectOpts.COUNT: int, + MVSelectOpts.EXTENT: ODCGeom | None, +} + + +SelectOut = Iterable[Row] | Iterable[UUID_] | Iterable[Dataset] | int | ODCGeom | None DateOrDateTime = datetime.datetime | datetime.date TimeSearchTerm = tuple[datetime.datetime, datetime.datetime] | tuple[datetime.date, datetime.date] | DateOrDateTime -MVSearchResult = Iterable[Iterable[Any]] | Iterable[str] | Iterable[Dataset] | int | None | ODCGeom def mv_search(index: Index, sel: MVSelectOpts = MVSelectOpts.IDS, times: Iterable[TimeSearchTerm] | None = None, geom: ODCGeom | None = None, - products: Iterable[Product] | None = None) -> MVSearchResult: + products: Iterable[Product] | None = None) -> SelectOut: """ Perform a dataset query via the space_time_view @@ -147,7 +159,7 @@ def mv_search(index: Index, elif sel in (MVSelectOpts.COUNT, MVSelectOpts.EXTENT): for r in conn.execute(s): if sel == MVSelectOpts.COUNT: - return r[0] + return cast(int, r[0]) else: # MVSelectOpts.EXTENT geojson = r[0] if geojson is None: diff --git a/integration_tests/test_mv_index.py b/integration_tests/test_mv_index.py index 02bf0b7da..99f1d640e 100644 --- a/integration_tests/test_mv_index.py +++ b/integration_tests/test_mv_index.py @@ -39,9 +39,8 @@ def test_no_products(): def test_bad_set_opt(): cfg = get_config() lyr = list(cfg.product_index.values())[0] - with cube() as dc: - with pytest.raises(AssertionError) as e: - sel = mv_search(dc.index, MVSelectOpts.INVALID, products=lyr.products) + with pytest.raises(ValueError) as e: + sel = MVSelectOpts("INVALID") class MockGeobox: diff --git a/tests/test_data.py b/tests/test_data.py index 328e4ea68..290e59cf9 100644 --- a/tests/test_data.py +++ b/tests/test_data.py @@ -268,8 +268,8 @@ def test_pbq_ctor_full(product_layer): # noqa: F811 assert "Query bands {" in str(pbqs[0]) assert "} from products [FakeODCProduct(test_odc_product)]" in str(pbqs[0]) assert str(pbqs[1]) in ( - "Query bands ('wongle', 'pq') from products [FakeODCProduct(test_masking_product)]", - "Query bands ('pq', 'wongle') from products [FakeODCProduct(test_masking_product)]", + "Query bands {'wongle', 'pq'} from products [FakeODCProduct(test_masking_product)]", + "Query bands {'pq', 'wongle'} from products [FakeODCProduct(test_masking_product)]", ) From 77d4baf0fab746a7660da44ccee188a607b7873d Mon Sep 17 00:00:00 2001 From: Paul Haesler Date: Tue, 23 Apr 2024 16:42:05 +1000 Subject: [PATCH 12/29] data.py and loading.py now fully type-hinted. --- datacube_ows/data.py | 163 +++++++++++++++++------------- datacube_ows/loading.py | 48 ++++++--- datacube_ows/ows_configuration.py | 2 +- 3 files changed, 126 insertions(+), 87 deletions(-) diff --git a/datacube_ows/data.py b/datacube_ows/data.py index 54f69920f..401e1d117 100644 --- a/datacube_ows/data.py +++ b/datacube_ows/data.py @@ -8,11 +8,13 @@ import re from datetime import date, datetime, timedelta from itertools import chain +from typing import Iterable, cast, Any, Mapping import numpy import numpy.ma import pytz import xarray +from datacube.model import Dataset from datacube.utils.masking import mask_to_dict from flask import render_template from odc.geo import geom @@ -22,14 +24,15 @@ from rasterio.io import MemoryFile from datacube_ows.cube_pool import cube -from datacube_ows.loading import DataStacker +from datacube_ows.loading import DataStacker, ProductBandQuery from datacube_ows.mv_index import MVSelectOpts from datacube_ows.ogc_exceptions import WMSException from datacube_ows.ogc_utils import (dataset_center_time, solar_date, tz_for_geometry, xarray_image_as_png) -from datacube_ows.config_utils import ConfigException -from datacube_ows.ows_configuration import get_config +from datacube_ows.config_utils import ConfigException, RAW_CFG, CFG_DICT +from datacube_ows.ows_configuration import get_config, OWSNamedLayer, OWSConfig +from datacube_ows.styles import StyleDef from datacube_ows.query_profiler import QueryProfiler from datacube_ows.resource_limits import ResourceLimited from datacube_ows.utils import default_to_utc, log_call @@ -38,22 +41,15 @@ _LOG = logging.getLogger(__name__) +FlaskResponse = tuple[str | bytes, int, dict[str, str]] -def datasets_in_xarray(xa): - if xa is None: - return 0 - return sum(len(xa.values[i]) for i in range(0, len(xa.values))) - -def bbox_to_geom(bbox, crs): - return geom.box(bbox.left, bbox.bottom, bbox.right, bbox.top, crs) - - -def user_date_sorter(layer, odc_dates, geom, user_dates): +def user_date_sorter(layer: OWSNamedLayer, odc_dates: list[datetime], + geometry: geom.Geometry, user_dates: list[datetime]) -> xarray.DataArray: # TODO: Make more elegant. Just a little bit elegant would do. result = [] if layer.time_resolution.is_solar(): - tz = tz_for_geometry(geom) + tz = tz_for_geometry(geometry) else: tz = None @@ -97,8 +93,9 @@ def check_date(time_res, user_date, odc_date): class EmptyResponse(Exception): pass + @log_call -def get_map(args): +def get_map(args: dict[str, str]) -> FlaskResponse: # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals # Parse GET parameters params = GetMapParameters(args) @@ -131,22 +128,23 @@ def get_map(args): stacker.resource_limited = True qprof["resource_limited"] = str(e) if qprof.active: - q_ds_dict = stacker.datasets(dc.index, mode=MVSelectOpts.DATASETS) + q_ds_dict = cast(dict[ProductBandQuery, xarray.DataArray], + stacker.datasets(dc.index, mode=MVSelectOpts.DATASETS)) qprof["datasets"] = [] - for q, dss in q_ds_dict.items(): - query_res = {} + for q, dsxr in q_ds_dict.items(): + query_res: dict[str, Any] = {} query_res["query"] = str(q) query_res["datasets"] = [ [ f"{ds.id} ({ds.type.name})" for ds in tdss ] - for tdss in dss.values + for tdss in dsxr.values ] qprof["datasets"].append(query_res) if stacker.resource_limited and not params.product.low_res_product_names: qprof.start_event("extent-in-query") - extent = stacker.datasets(dc.index, mode=MVSelectOpts.EXTENT) + extent = cast(geom.Geometry | None, stacker.datasets(dc.index, mode=MVSelectOpts.EXTENT)) qprof.end_event("extent-in-query") if extent is None: qprof["write_action"] = "No extent: Write Empty" @@ -169,7 +167,7 @@ def get_map(args): qprof["n_summary_datasets"] = stacker.datasets(dc.index, mode=MVSelectOpts.COUNT) qprof.end_event("count-summary-datasets") qprof.start_event("fetch-datasets") - datasets = stacker.datasets(dc.index) + datasets = cast(dict[ProductBandQuery, xarray.DataArray], stacker.datasets(dc.index)) for flagband, dss in datasets.items(): if not dss.any(): _LOG.warning("Flag band %s returned no data", str(flagband)) @@ -181,20 +179,24 @@ def get_map(args): qprof.start_event("load-data") data = stacker.data(datasets) qprof.end_event("load-data") + if not data: + qprof["write_action"] = "No Data: Write Empty" + raise EmptyResponse() _LOG.debug("load stop %s %s", datetime.now().time(), args["requestid"]) qprof.start_event("build-masks") td_masks = [] for npdt in data.time.values: td = data.sel(time=npdt) - td_ext_mask = None + td_ext_mask_man: numpy.ndarray | None = None + td_ext_mask: xarray.DataArray | None = None band = "" for band in params.style.needed_bands: if band not in params.style.flag_bands: if params.product.data_manual_merge: - if td_ext_mask is None: - td_ext_mask = ~numpy.isnan(td[band]) + if td_ext_mask_man is None: + td_ext_mask_man = ~numpy.isnan(td[band]) else: - td_ext_mask &= ~numpy.isnan(td[band]) + td_ext_mask_man &= ~numpy.isnan(td[band]) else: for f in params.product.extent_mask_func: if td_ext_mask is None: @@ -202,7 +204,7 @@ def get_map(args): else: td_ext_mask &= f(td, band) if params.product.data_manual_merge: - td_ext_mask = xarray.DataArray(td_ext_mask) + td_ext_mask = xarray.DataArray(td_ext_mask_man) if td_ext_mask is None: td_ext_mask = xarray.DataArray( ~numpy.zeros( @@ -214,21 +216,17 @@ def get_map(args): td_masks.append(td_ext_mask) extent_mask = xarray.concat(td_masks, dim=data.time) qprof.end_event("build-masks") - if not data: - qprof["write_action"] = "No Data: Write Empty" - raise EmptyResponse() - else: - qprof["write_action"] = "Write Data" - if mdh and mdh.preserve_user_date_order: - sorter = user_date_sorter( - params.product, - data.time.values, - params.geobox.geographic_extent, - params.times) - data = data.sortby(sorter) - extent_mask = extent_mask.sortby(sorter) - - body = _write_png(data, params.style, extent_mask, qprof) + qprof["write_action"] = "Write Data" + if mdh and mdh.preserve_user_date_order: + sorter = user_date_sorter( + params.product, + data.time.values, + params.geobox.geographic_extent, + params.times) + data = data.sortby(sorter) + extent_mask = extent_mask.sortby(sorter) + + body = _write_png(data, params.style, extent_mask, qprof) except EmptyResponse: qprof.start_event("write") body = _write_empty(params.geobox) @@ -240,9 +238,10 @@ def get_map(args): return png_response(body, extra_headers=params.product.resource_limits.wms_cache_rules.cache_headers(n_datasets)) -def png_response(body, cfg=None, extra_headers=None): +def png_response(body: bytes, cfg: OWSConfig | None = None, extra_headers: dict[str, str] | None = None) -> FlaskResponse: if not cfg: cfg = get_config() + assert cfg is not None # For type checker if extra_headers is None: extra_headers = {} headers = {"Content-Type": "image/png"} @@ -252,7 +251,8 @@ def png_response(body, cfg=None, extra_headers=None): @log_call -def _write_png(data, style, extent_mask, qprof): +def _write_png(data: xarray.Dataset, style: StyleDef, extent_mask: xarray.DataArray, + qprof: QueryProfiler) -> bytes: qprof.start_event("combine-masks") mask = style.to_mask(data, extent_mask) qprof.end_event("combine-masks") @@ -272,7 +272,7 @@ def _write_png(data, style, extent_mask, qprof): @log_call -def _write_empty(geobox): +def _write_empty(geobox: GeoBox) -> bytes: with MemoryFile() as memfile: with memfile.open(driver='PNG', width=geobox.width, @@ -285,7 +285,7 @@ def _write_empty(geobox): return memfile.read() -def get_coordlist(geo, layer_name): +def get_coordlist(geo: geom.Geometry, layer_name: str) -> list[tuple[float | int, float | int]]: if geo.type == 'Polygon': coordinates_list = [geo.json["coordinates"]] elif geo.type == 'MultiPolygon': @@ -308,7 +308,7 @@ def get_coordlist(geo, layer_name): @log_call -def _write_polygon(geobox, polygon, zoom_fill, layer): +def _write_polygon(geobox: GeoBox, polygon: geom.Geometry, zoom_fill: list[int], layer: OWSNamedLayer) -> bytes: geobox_ext = geobox.extent if geobox_ext.within(polygon): data = numpy.full([geobox.height, geobox.width], fill_value=1, dtype="uint8") @@ -334,7 +334,9 @@ def _write_polygon(geobox, polygon, zoom_fill, layer): @log_call -def get_s3_browser_uris(datasets, pt=None, s3url="", s3bucket=""): +def get_s3_browser_uris(datasets: dict[ProductBandQuery, xarray.DataArray], + pt: geom.Geometry | None = None, + s3url: str = "", s3bucket: str = "") -> set[str]: uris = [] last_crs = None for pbq, dss in datasets.items(): @@ -357,7 +359,7 @@ def get_s3_browser_uris(datasets, pt=None, s3url="", s3bucket=""): regex = re.compile(r"s3:\/\/(?P[a-zA-Z0-9_\-\.]+)\/(?P[\S]+)/[a-zA-Z0-9_\-\.]+.yaml") # convert to browsable link - def convert(uri): + def convert(uri: str) -> str: uri_format = "http://{bucket}.s3-website-ap-southeast-2.amazonaws.com/?prefix={prefix}" uri_format_prod = str(s3url) + "/?prefix={prefix}" result = regex.match(uri) @@ -377,8 +379,8 @@ def convert(uri): @log_call -def _make_band_dict(prod_cfg, pixel_dataset): - band_dict = {} +def _make_band_dict(prod_cfg: OWSNamedLayer, pixel_dataset: xarray.Dataset) -> dict[str, dict[str, bool | str] | str]: + band_dict: dict[str, dict[str, bool | str] | str] = {} for k, v in pixel_dataset.data_vars.items(): band_val = pixel_dataset[k].item() flag_def = pixel_dataset[k].attrs.get("flags_definition") @@ -388,7 +390,7 @@ def _make_band_dict(prod_cfg, pixel_dataset): except TypeError as te: logging.warning('Working around for float bands') flag_dict = mask_to_dict(flag_def, int(band_val)) - ret_val = {} + ret_val: dict[str, bool | str] = {} for flag, val in flag_dict.items(): if not val: continue @@ -400,6 +402,7 @@ def _make_band_dict(prod_cfg, pixel_dataset): else: try: band_lbl = prod_cfg.band_idx.band_label(k) + assert k is not None # for type checker if band_val == pixel_dataset[k].nodata or numpy.isnan(band_val): band_dict[band_lbl] = "n/a" else: @@ -410,7 +413,7 @@ def _make_band_dict(prod_cfg, pixel_dataset): @log_call -def _make_derived_band_dict(pixel_dataset, style_index): +def _make_derived_band_dict(pixel_dataset: xarray.Dataset, style_index: dict[str, StyleDef]) -> dict[str, int | float]: """Creates a dict of values for bands derived by styles. This only works for styles with an `index_function` defined. @@ -431,24 +434,25 @@ def _make_derived_band_dict(pixel_dataset, style_index): return derived_band_dict -def geobox_is_point(geobox): - # TODO: Not 100% sure why this function is needed. +def geobox_is_point(geobox: GeoBox) -> bool: return geobox.height == 1 and geobox.width == 1 @log_call -def feature_info(args): +def feature_info(args: dict[str, str]) -> FlaskResponse: # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals # Parse GET parameters params = GetFeatureInfoParameters(args) - feature_json = {} + feature_json: CFG_DICT = {} geo_point = img_coords_to_geopoint(params.geobox, params.i, params.j) # shrink geobox to point # Prepare to extract feature info if geobox_is_point(params.geobox): + # request geobox is already 1x1 geo_point_geobox = params.geobox else: + # Make a 1x1 pixel geobox geo_point_geobox = GeoBox.from_geopolygon( geo_point, params.geobox.resolution, crs=params.geobox.crs) tz = tz_for_geometry(geo_point_geobox.geographic_extent) @@ -458,7 +462,7 @@ def feature_info(args): with cube() as dc: if not dc: raise WMSException("Database connectivity failure") - all_time_datasets = stacker.datasets(dc.index, all_time=True, point=geo_point) + all_time_datasets = cast(xarray.DataArray, stacker.datasets(dc.index, all_time=True, point=geo_point)) # Taking the data as a single point so our indexes into the data should be 0,0 h_coord = cfg.published_CRSs[params.crsid]["horizontal_coord"] @@ -473,8 +477,11 @@ def feature_info(args): # Group datasets by time, load only datasets that match the idx_date global_info_written = False feature_json["data"] = [] - fi_date_index = {} - time_datasets = stacker.datasets(dc.index, all_flag_bands=True, point=geo_point) + fi_date_index: dict[datetime, RAW_CFG] = {} + time_datasets = cast( + dict[ProductBandQuery, xarray.DataArray], + stacker.datasets(dc.index, all_flag_bands=True, point=geo_point) + ) data = stacker.data(time_datasets, skip_corrections=True) if data is not None: for dt in data.time.values: @@ -499,13 +506,14 @@ def feature_info(args): # Capture lat/long coordinates feature_json["lon"], feature_json["lat"] = ptg.coords[0] - date_info = {} + date_info: CFG_DICT = {} - ds = None + ds: Dataset | None = None for pbq, dss in time_datasets.items(): if pbq.main: ds = dss.sel(time=dt).values.tolist()[0] break + assert ds is not None if params.product.multi_product: if "platform" in ds.metadata_doc: date_info["source_product"] = "%s (%s)" % (ds.type.name, ds.metadata_doc["platform"]["code"]) @@ -513,29 +521,31 @@ def feature_info(args): date_info["source_product"] = ds.type.name # Extract data pixel - pixel_ds = td.isel(**isel_kwargs) + pixel_ds: xarray.Dataset = td.isel(**isel_kwargs) # type: ignore[arg-type] # Get accurate timestamp from dataset + assert ds.time is not None # For type checker if params.product.time_resolution.is_summary(): date_info["time"] = ds.time.begin.strftime("%Y-%m-%d") else: date_info["time"] = dataset_center_time(ds).strftime("%Y-%m-%d %H:%M:%S %Z") # Collect raw band values for pixel and derived bands from styles - date_info["bands"] = _make_band_dict(params.product, pixel_ds) - derived_band_dict = _make_derived_band_dict(pixel_ds, params.product.style_index) + date_info["bands"] = cast(RAW_CFG, _make_band_dict(params.product, pixel_ds)) + derived_band_dict = cast(RAW_CFG, _make_derived_band_dict(pixel_ds, params.product.style_index)) if derived_band_dict: date_info["band_derived"] = derived_band_dict # Add any custom-defined fields. for k, f in params.product.feature_info_custom_includes.items(): date_info[k] = f(date_info["bands"]) - feature_json["data"].append(date_info) - fi_date_index[dt] = feature_json["data"][-1] + cast(list[RAW_CFG], feature_json["data"]).append(date_info) + fi_date_index[dt] = cast(dict[str, list[RAW_CFG]], feature_json)["data"][-1] feature_json["data_available_for_dates"] = [] pt_native = None for d in all_time_datasets.coords["time"].values: dt_datasets = all_time_datasets.sel(time=d) for ds in dt_datasets.values.item(): + assert ds is not None # For type checker if pt_native is None: pt_native = geo_point.to_crs(ds.crs) elif pt_native.crs != ds.crs: @@ -544,18 +554,21 @@ def feature_info(args): # tolist() converts a numpy datetime64 to a python datatime dt = Timestamp(stacker.group_by.group_by_func(ds)).to_pydatetime() if params.product.time_resolution.is_subday(): - feature_json["data_available_for_dates"].append(dt.isoformat()) + cast(list[RAW_CFG], feature_json["data_available_for_dates"]).append(dt.isoformat()) else: - feature_json["data_available_for_dates"].append(dt.strftime("%Y-%m-%d")) + cast(list[RAW_CFG], feature_json["data_available_for_dates"]).append(dt.strftime("%Y-%m-%d")) break if time_datasets: - feature_json["data_links"] = sorted(get_s3_browser_uris(time_datasets, pt_native, s3_url, s3_bucket)) + feature_json["data_links"] = cast( + RAW_CFG, + sorted(get_s3_browser_uris(time_datasets, pt_native, s3_url, s3_bucket))) else: feature_json["data_links"] = [] if params.product.feature_info_include_utc_dates: - unsorted_dates = [] + unsorted_dates: list[str] = [] for tds in all_time_datasets: for ds in tds.values.item(): + assert ds is not None and ds.time is not None # for type checker if params.product.time_resolution.is_solar(): unsorted_dates.append(ds.center_time.strftime("%Y-%m-%d")) elif params.product.time_resolution.is_subday(): @@ -566,7 +579,7 @@ def feature_info(args): d.center_time.strftime("%Y-%m-%d") for d in all_time_datasets) # --- End code section requiring datacube. - result = { + result: CFG_DICT = { "type": "FeatureCollection", "features": [ { @@ -585,12 +598,16 @@ def feature_info(args): return json_response(result, cfg) -def json_response(result, cfg=None): +def json_response(result: CFG_DICT, cfg: OWSConfig | None = None) -> FlaskResponse: if not cfg: cfg = get_config() + assert cfg is not None # for type checker return json.dumps(result), 200, cfg.response_headers({"Content-Type": "application/json"}) -def html_json_response(result, cfg): +def html_json_response(result: CFG_DICT, cfg: OWSConfig | None = None) -> FlaskResponse: + if not cfg: + cfg = get_config() + assert cfg is not None # for type checker html_content = render_template("html_feature_info.html", result=result) return html_content, 200, cfg.response_headers({"Content-Type": "text/html"}) diff --git a/datacube_ows/loading.py b/datacube_ows/loading.py index c5375d362..9503b7ea5 100644 --- a/datacube_ows/loading.py +++ b/datacube_ows/loading.py @@ -176,7 +176,7 @@ def datasets(self, index: datacube.index.Index, | xarray.DataArray | Geometry | None - | Mapping[ProductBandQuery, PerPBQReturnType]): + | dict[ProductBandQuery, PerPBQReturnType]): if mode == MVSelectOpts.EXTENT or all_time: # Not returning datasets - use main product only queries = [ @@ -235,17 +235,17 @@ def datasets(self, index: datacube.index.Index, return cast(Geometry | None, result) return OrderedDict(results) - def create_nodata_filled_flag_bands(self, data, pbq): + def create_nodata_filled_flag_bands(self, data: xarray.Dataset, pbq: ProductBandQuery) -> xarray.Dataset: var = None for var in data.data_vars.variables.keys(): break if var is None: raise WMSException("Cannot add default flag data as there is no non-flag data available") - template = getattr(data, var) + template = cast(xarray.DataArray, getattr(data, cast(str, var))) data_new_bands = {} for band in pbq.bands: default_value = pbq.products[0].measurements[band].nodata - new_data = numpy.ndarray(template.shape, dtype="uint8") + new_data: numpy.ndarray = numpy.ndarray(template.shape, dtype="uint8") new_data.fill(default_value) qry_result = template.copy(data=new_data) data_new_bands[band] = qry_result @@ -255,10 +255,12 @@ def create_nodata_filled_flag_bands(self, data, pbq): return data @log_call - def data(self, datasets_by_query, skip_corrections=False): + def data(self, + datasets_by_query: dict[ProductBandQuery, xarray.DataArray], + skip_corrections=False) -> xarray.Dataset | None: # pylint: disable=too-many-locals, consider-using-enumerate # datasets is an XArray DataArray of datasets grouped by time. - data = None + data: xarray.Dataset | None = None for pbq, datasets in datasets_by_query.items(): if data is not None and len(data.time) == 0: # No data, so no need for masking data. @@ -269,6 +271,8 @@ def data(self, datasets_by_query, skip_corrections=False): qry_result = self.manual_data_stack(datasets, measurements, pbq.bands, skip_corrections, fuse_func=fuse_func) else: qry_result = self.read_data(datasets, measurements, self._geobox, resampling=self._resampling, fuse_func=fuse_func) + if qry_result is None: + continue if data is None: data = qry_result continue @@ -301,18 +305,24 @@ def data(self, datasets_by_query, skip_corrections=False): # Time-aware mask product has no data, but main product does. data = self.create_nodata_filled_flag_bands(data, pbq) continue + assert data is not None qry_result.coords["time"] = data.coords["time"] - data = xarray.combine_by_coords([data, qry_result], join="exact") + data = cast(xarray.Dataset, xarray.combine_by_coords([data, qry_result], join="exact")) return data @log_call - def manual_data_stack(self, datasets, measurements, bands, skip_corrections, fuse_func): + def manual_data_stack(self, + datasets: xarray.DataArray, + measurements: Mapping[str, datacube.model.Measurement], + bands: set[str], + skip_corrections: bool, + fuse_func: datacube.api.core.FuserFunction | None) -> xarray.Dataset | None: # pylint: disable=too-many-locals, too-many-branches # manual merge if self.style: - flag_bands = set(filter(lambda b: b in self.style.flag_bands, bands)) - non_flag_bands = set(filter(lambda b: b not in self.style.flag_bands, bands)) + flag_bands: Iterable[str] = set(filter(lambda b: b in self.style.flag_bands, bands)) # type: ignore[arg-type] + non_flag_bands: Iterable[str] = set(filter(lambda b: b not in self.style.flag_bands, bands)) #type: ignore[arg-type] else: non_flag_bands = bands flag_bands = set() @@ -354,7 +364,13 @@ def manual_data_stack(self, datasets, measurements, bands, skip_corrections, fus # Read data for given datasets and measurements per the output_geobox # TODO: Make skip_broken passed in via config @log_call - def read_data(self, datasets, measurements, geobox, skip_broken = True, resampling="nearest", fuse_func=None): + def read_data(self, + datasets: xarray.DataArray, + measurements: Mapping[str, datacube.model.Measurement], + geobox: GeoBox, + skip_broken: bool = True, + resampling: Resampling = "nearest", + fuse_func: datacube.api.core.FuserFunction | None = None) -> xarray.Dataset: CredentialManager.check_cred() try: return datacube.Datacube.load_data( @@ -368,10 +384,16 @@ def read_data(self, datasets, measurements, geobox, skip_broken = True, resampli except Exception as e: _LOG.error("Error (%s) in load_data: %s", e.__class__.__name__, str(e)) raise - # Read data for single datasets and measurements per the output_geobox + # TODO: Make skip_broken passed in via config @log_call - def read_data_for_single_dataset(self, dataset, measurements, geobox, skip_broken = True, resampling="nearest", fuse_func=None): + def read_data_for_single_dataset(self, + dataset: datacube.model.Dataset, + measurements: Mapping[str, datacube.model.Measurement], + geobox: GeoBox, + skip_broken: bool = True, + resampling: Resampling = "nearest", + fuse_func: datacube.api.core.FuserFunction | None = None) -> xarray.Dataset: datasets = [dataset] dc_datasets = datacube.Datacube.group_datasets(datasets, self._product.time_resolution.dataset_groupby()) CredentialManager.check_cred() diff --git a/datacube_ows/ows_configuration.py b/datacube_ows/ows_configuration.py index fc786c1eb..15bbd982e 100644 --- a/datacube_ows/ows_configuration.py +++ b/datacube_ows/ows_configuration.py @@ -158,7 +158,7 @@ def locale_band(self, name_alias): return b raise ConfigException(f"Unknown band: {name_alias} in layer {self.product.name}") - def band_label(self, name_alias): + def band_label(self, name_alias) -> str | None: canonical_name = self.band(name_alias) return self.read_local_metadata(canonical_name) From a0ee3a9981d8178d942e8be226babe1b38356945 Mon Sep 17 00:00:00 2001 From: Robbi Bishop-Taylor Date: Tue, 23 Apr 2024 16:41:25 +1000 Subject: [PATCH 13/29] Fix data links for products indexed with STAC metadata (#1011) (cherry picked from commit 9a4d8fd2685814bb49180e6d5fe238a5306a94fb) --- datacube_ows/data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacube_ows/data.py b/datacube_ows/data.py index 401e1d117..c5bbb372e 100644 --- a/datacube_ows/data.py +++ b/datacube_ows/data.py @@ -356,7 +356,7 @@ def get_s3_browser_uris(datasets: dict[ProductBandQuery, xarray.DataArray], uris = list(chain.from_iterable(uris)) unique_uris = set(uris) - regex = re.compile(r"s3:\/\/(?P[a-zA-Z0-9_\-\.]+)\/(?P[\S]+)/[a-zA-Z0-9_\-\.]+.yaml") + regex = re.compile(r"s3:\/\/(?P[a-zA-Z0-9_\-\.]+)\/(?P[\S]+)/[a-zA-Z0-9_\-\.]+.(yaml|json)") # convert to browsable link def convert(uri: str) -> str: From f5ec62d018fa925cbd15b0bf9d9ab2f64dadd2d1 Mon Sep 17 00:00:00 2001 From: Paul Haesler Date: Wed, 24 Apr 2024 15:46:49 +1000 Subject: [PATCH 14/29] Typehints throughout config. --- datacube_ows/config_utils.py | 42 +-- datacube_ows/data.py | 6 +- datacube_ows/ogc_utils.py | 2 +- datacube_ows/ows_configuration.py | 515 ++++++++++++++++-------------- datacube_ows/styles/component.py | 4 +- datacube_ows/tile_matrix_sets.py | 64 ++-- 6 files changed, 355 insertions(+), 278 deletions(-) diff --git a/datacube_ows/config_utils.py b/datacube_ows/config_utils.py index 4b395a37d..6fbc3cae9 100644 --- a/datacube_ows/config_utils.py +++ b/datacube_ows/config_utils.py @@ -24,6 +24,7 @@ TYPE_CHECKING = False if TYPE_CHECKING: import datacube_ows.ows_configuration.OWSConfig + import datacube_ows.ows_configuration.AttributionCfg import datacube_ows.ows_configuration.OWSNamedLayer import datacube_ows.styles.base.StyleMask @@ -39,8 +40,8 @@ # inclusions defaulting to an empty list is dangerous, but note that it is never modified. # If modification of inclusions is a required, a copy (ninclusions) is made and modified instead. # pylint: disable=dangerous-default-value -def cfg_expand(cfg_unexpanded: RAW_CFG, - cwd: str | None = None, inclusions: list[str] = []) -> RAW_CFG: +def cfg_expand(cfg_unexpanded: CFG_DICT, + cwd: str | None = None, inclusions: list[str] = []) -> CFG_DICT: """ Recursively expand config inclusions. @@ -87,7 +88,10 @@ def cfg_expand(cfg_unexpanded: RAW_CFG, else: raise ConfigException("Unsupported inclusion type: %s" % str(cfg_unexpanded["type"])) else: - return {k: cfg_expand(v, cwd=cwd, inclusions=inclusions) for k, v in cfg_unexpanded.items()} + return { + k: cfg_expand(cast(CFG_DICT, v), cwd=cwd, inclusions=inclusions) + for k, v in cfg_unexpanded.items() + } elif isinstance(cfg_unexpanded, Sequence) and not isinstance(cfg_unexpanded, str): return [cfg_expand(elem, cwd=cwd, inclusions=inclusions) for elem in cfg_unexpanded] else: @@ -125,7 +129,7 @@ def load_json_obj(path: str) -> RAW_CFG: return json.load(json_file) -def import_python_obj(path: str) -> RAW_CFG: +def import_python_obj(path: str) -> CFG_DICT: """Imports a python dictionary by fully-qualified path :param: A fully qualified python path. @@ -137,7 +141,7 @@ def import_python_obj(path: str) -> RAW_CFG: obj = getattr(mod, obj_name) except (ImportError, ValueError, ModuleNotFoundError, AttributeError): raise ConfigException(f"Could not import python object: {path}") - return cast(RAW_CFG, obj) + return cast(CFG_DICT, obj) class ConfigException(Exception): @@ -284,7 +288,7 @@ class OWSMetadataConfig(OWSConfigEntry): # Inaccessible attributes to allow type checking abstract: str = "" - attribution: dict[str, str] = {} + attribution: Optional["datacube_ows.ows_configuration.AttributionCfg"] = None def get_obj_label(self) -> str: """Return the metadata path prefix for this object.""" @@ -406,7 +410,7 @@ def keywords(self) -> set[str]: return self._keywords @classmethod - def set_msg_src(cls, src: Catalog) -> None: + def set_msg_src(cls, src: Catalog | None) -> None: """ Allow all OWSMetadatConfig subclasses to share a common message catalog. :param src: A Message Catalog object @@ -663,7 +667,7 @@ def __init__(self, cfg: CFG_DICT, product_cfg: "datacube_ows.ows_configuration.O self.pq_band = str(cfg["band"]) self.canonical_band_name = self.pq_band # Update for aliasing on make_ready if "fuse_func" in cfg: - self.pq_fuse_func: Optional[FunctionWrapper] = FunctionWrapper(self.product, cast(Mapping[str, Any], cfg["fuse_func"])) + self.pq_fuse_func: Optional[FunctionWrapper] = FunctionWrapper(self.product, cast(CFG_DICT, cfg["fuse_func"])) else: self.pq_fuse_func = None self.pq_ignore_time = bool(cfg.get("ignore_time", False)) @@ -939,8 +943,8 @@ class FunctionWrapper: """ def __init__(self, - product_or_style_cfg: OWSExtensibleConfigEntry, - func_cfg: F | Mapping[str, Any], + product_or_style_cfg: OWSExtensibleConfigEntry | None, + func_cfg: str | CFG_DICT | F, stand_alone: bool = False) -> None: """ @@ -955,10 +959,10 @@ def __init__(self, if not stand_alone: raise ConfigException( "Directly including callable objects in configuration is no longer supported. Please reference callables by fully qualified name.") - self._func = func_cfg - self._args = [] - self._kwargs = {} - self.band_mapper = None + self._func: Callable = func_cfg + self._args: list[RAW_CFG] = [] + self._kwargs: CFG_DICT = {} + self.band_mapper: Callable[[str], str] | None = None self.pass_layer_cfg = False elif isinstance(func_cfg, str): self._func = get_function(func_cfg) @@ -973,10 +977,10 @@ def __init__(self, raise ConfigException( "Directly including callable objects in configuration is no longer supported. Please reference callables by fully qualified name.") else: - self._func = get_function(func_cfg["function"]) - self._args = func_cfg.get("args", []) - self._kwargs = func_cfg.get("kwargs", {}).copy() - self.pass_layer_cfg = func_cfg.get("pass_layer_cfg", False) + self._func = get_function(cast(str, func_cfg["function"])) + self._args = cast(list[RAW_CFG], func_cfg.get("args", [])) + self._kwargs = cast(CFG_DICT, func_cfg.get("kwargs", {})).copy() + self.pass_layer_cfg = bool(func_cfg.get("pass_layer_cfg", False)) if "pass_product_cfg" in func_cfg: _LOG.warning("WARNING: pass_product_cfg in function wrapper definitions has been renamed " "'mapped_bands'. Please update your config accordingly") @@ -1005,7 +1009,7 @@ def __call__(self, *args, **kwargs) -> Any: else: calling_args = self._args if kwargs and self._kwargs: - calling_kwargs = self._kwargs.copy() + calling_kwargs: dict[str, Any] = self._kwargs.copy() calling_kwargs.update(kwargs) elif kwargs: calling_kwargs = kwargs.copy() diff --git a/datacube_ows/data.py b/datacube_ows/data.py index c5bbb372e..42c9b2ae8 100644 --- a/datacube_ows/data.py +++ b/datacube_ows/data.py @@ -8,7 +8,7 @@ import re from datetime import date, datetime, timedelta from itertools import chain -from typing import Iterable, cast, Any, Mapping +from typing import cast, Any import numpy import numpy.ma @@ -465,8 +465,8 @@ def feature_info(args: dict[str, str]) -> FlaskResponse: all_time_datasets = cast(xarray.DataArray, stacker.datasets(dc.index, all_time=True, point=geo_point)) # Taking the data as a single point so our indexes into the data should be 0,0 - h_coord = cfg.published_CRSs[params.crsid]["horizontal_coord"] - v_coord = cfg.published_CRSs[params.crsid]["vertical_coord"] + h_coord = cast(str, cfg.published_CRSs[params.crsid]["horizontal_coord"]) + v_coord = cast(str, cfg.published_CRSs[params.crsid]["vertical_coord"]) s3_bucket = cfg.s3_bucket s3_url = cfg.s3_url isel_kwargs = { diff --git a/datacube_ows/ogc_utils.py b/datacube_ows/ogc_utils.py index 1eb3aca00..cd16809e7 100644 --- a/datacube_ows/ogc_utils.py +++ b/datacube_ows/ogc_utils.py @@ -198,7 +198,7 @@ def tz_for_geometry(geom: Geometry) -> datetime.tzinfo: return datetime.timezone(datetime.timedelta(hours=offset)) -def resp_headers(d: Mapping[str, str]) -> Mapping[str, str]: +def resp_headers(d: dict[str, str]) -> dict[str, str]: """ Take a dictionary of http response headers and all required response headers from the configuration. diff --git a/datacube_ows/ows_configuration.py b/datacube_ows/ows_configuration.py index 15bbd982e..a620540d2 100644 --- a/datacube_ows/ows_configuration.py +++ b/datacube_ows/ows_configuration.py @@ -19,21 +19,26 @@ from collections.abc import Mapping from enum import Enum from importlib import import_module -from typing import Optional, Sequence +from typing import Callable, Optional, cast, Union, Any, Iterable import numpy from babel.messages.catalog import Catalog from babel.messages.pofile import read_po from odc.geo import CRS +from odc.geo.geobox import GeoBox from ows import Version from slugify import slugify +from datacube import Datacube +from datacube.model import Measurement +from datacube.api.query import GroupBy + from datacube_ows.config_utils import (FlagProductBands, OWSConfigEntry, OWSEntryNotFound, OWSExtensibleConfigEntry, OWSFlagBand, OWSMetadataConfig, cfg_expand, get_file_loc, import_python_obj, - load_json_obj, ConfigException, FunctionWrapper) + load_json_obj, ConfigException, FunctionWrapper, CFG_DICT, RAW_CFG, F) from datacube_ows.cube_pool import ODCInitException, cube, get_cube from datacube_ows.ogc_utils import (create_geobox, local_solar_date_range) from datacube_ows.resource_limits import (OWSResourceManagementRules, @@ -46,10 +51,10 @@ _LOG = logging.getLogger(__name__) -def read_config(path=None): +def read_config(path: str | None = None) -> CFG_DICT: cwd = None if path: - cfg_env = path + cfg_env: str | None = path else: cfg_env = os.environ.get("DATACUBE_OWS_CFG") if not cfg_env: @@ -74,27 +79,27 @@ class BandIndex(OWSMetadataConfig): METADATA_TITLE = False METADATA_ABSTRACT = False - def __init__(self, layer, band_cfg): + def __init__(self, layer: "OWSNamedLayer", band_cfg: CFG_DICT): if band_cfg is None: band_cfg = {} super().__init__(band_cfg) - self.band_cfg = band_cfg + self.band_cfg = cast(dict[str, list[str]], band_cfg) self.product = layer self.product_name = layer.name self.parse_metadata(band_cfg) - self._idx = {} + self._idx: dict[str, str] = {} self.add_aliases(self.band_cfg) self.declare_unready("_nodata_vals") self.declare_unready("measurements") self.declare_unready("_dtypes") - def global_config(self): + def global_config(self) -> "OWSConfig": return self.product.global_config() - def get_obj_label(self): + def get_obj_label(self) -> str: return self.product.get_obj_label() + ".bands" - def add_aliases(self, cfg): + def add_aliases(self, cfg: dict[str, list[str]]) -> None: for b, aliases in cfg.items(): if b in self._idx: raise ConfigException(f"Duplicate band name/alias: {b} in layer {self.product_name}") @@ -104,17 +109,19 @@ def add_aliases(self, cfg): raise ConfigException(f"Duplicate band name/alias: {a} in layer {self.product_name}") self._idx[a] = b - def make_ready(self, dc, *args, **kwargs): - def floatify_nans(inp): + def make_ready(self, dc: Datacube, *args, **kwargs) -> None: + def floatify_nans(inp: float | int | str) -> float | int: if isinstance(inp, str) and inp == "nan": return float(inp) + elif isinstance(inp, str): + raise ValueError("Invalid nodata value: {inp}") else: return inp default_to_all = not bool(self._raw_cfg) # pylint: disable=attribute-defined-outside-init - self.measurements = {} - self._nodata_vals = {} - self._dtypes = {} + self.measurements: dict[str, Measurement] = {} + self._nodata_vals: dict[str, int | float] = {} + self._dtypes: dict[str, numpy.dtype] = {} first_product = True for product in self.product.products: if first_product and default_to_all: @@ -123,7 +130,9 @@ def floatify_nans(inp): self.band_cfg[b] = [b] self.add_aliases(self.band_cfg) try: - prod_measurements = product.lookup_measurements(list(self.band_cfg.keys())) + prod_measurements = cast( + dict[str, Measurement], product.lookup_measurements(list(self.band_cfg.keys())) + ) if first_product: self.measurements = prod_measurements self._nodata_vals = {name: floatify_nans(model.nodata) for name, model in self.measurements.items()} @@ -143,12 +152,12 @@ def floatify_nans(inp): first_product = False super().make_ready(dc, *args, **kwargs) - def band(self, name_alias): + def band(self, name_alias: str) -> str: if name_alias in self._idx: return self._idx[name_alias] raise ConfigException(f"Unknown band name/alias: {name_alias} in layer {self.product.name}") - def locale_band(self, name_alias): + def locale_band(self, name_alias: str) -> str: try: return self.band(name_alias) except ConfigException: @@ -158,55 +167,55 @@ def locale_band(self, name_alias): return b raise ConfigException(f"Unknown band: {name_alias} in layer {self.product.name}") - def band_label(self, name_alias) -> str | None: + def band_label(self, name_alias) -> str: canonical_name = self.band(name_alias) - return self.read_local_metadata(canonical_name) + return cast(str, self.read_local_metadata(canonical_name)) - def nodata_val(self, name_alias): + def nodata_val(self, name_alias: str) -> float | int: name = self.band(name_alias) return self._nodata_vals[name] - def dtype_val(self, name_alias): + def dtype_val(self, name_alias: str) -> numpy.dtype: name = self.band(name_alias) return self._dtypes[name] - def dtype_size(self, name_alias): + def dtype_size(self, name_alias: str) -> int: return self.dtype_val(name_alias).itemsize - def band_labels(self): + def band_labels(self) -> list[str]: return [self.band_label(b) for b in self.band_cfg] - def band_nodata_vals(self): + def band_nodata_vals(self) -> list[int | float]: return [self.nodata_val(b) for b in self.band_cfg if b in self.band_cfg] class AttributionCfg(OWSConfigEntry): - def __init__(self, cfg, owner): + def __init__(self, cfg: CFG_DICT, owner: Union["OWSConfig", "OWSLayer"]): super().__init__(cfg) self.owner = owner - self.url = cfg.get("url") - logo = cfg.get("logo") + self.url = cast(str | None, cfg.get("url")) + logo = cast(dict[str, str] | None, cfg.get("logo")) if not self.title and not self.url and not logo: raise ConfigException("At least one of title, url and logo is required in an attribution definition") if not logo: - self.logo_width = None - self.logo_height = None - self.logo_url = None - self.logo_fmt = None + self.logo_width: int | None = None + self.logo_height: int | None = None + self.logo_url: str | None = None + self.logo_fmt: str | None = None else: - self.logo_width = logo.get("width") - self.logo_height = logo.get("height") - self.logo_url = logo.get("url") - self.logo_fmt = logo.get("format") + self.logo_width = cast(int | None, logo.get("width")) + self.logo_height = cast(int | None, logo.get("height")) + self.logo_url = cast(str | None, logo.get("url")) + self.logo_fmt = cast(str | None, logo.get("format")) if not self.logo_url or not self.logo_fmt: raise ConfigException("url and format must both be specified in an attribution logo.") @property - def title(self): - return self.owner.attribution_title + def title(self) -> str: + return self.owner.attribution_title # ???? @classmethod - def parse(cls, cfg, owner): + def parse(cls, cfg: CFG_DICT | None, owner: Union["OWSConfig", "OWSLayer"]) -> Optional["AttributionCfg"]: if not cfg: return None else: @@ -215,13 +224,13 @@ def parse(cls, cfg, owner): class SuppURL(OWSConfigEntry): @classmethod - def parse_list(cls, cfg): + def parse_list(cls, cfg: list[dict[str, str]] | None) -> list["SuppURL"]: if not cfg: return [] return [cls(u) for u in cfg] - def __init__(self, cfg): - super().__init__(cfg) + def __init__(self, cfg: dict[str, str]): + super().__init__(cast(RAW_CFG, cfg)) self.url = cfg["url"] self.format = cfg["format"] @@ -231,37 +240,40 @@ class OWSLayer(OWSMetadataConfig): METADATA_ATTRIBUTION = True named = False - def __init__(self, cfg, object_label, parent_layer=None, **kwargs): + def __init__(self, cfg: CFG_DICT, object_label: str, parent_layer: Optional["OWSLayer"]=None, **kwargs): super().__init__(cfg, **kwargs) self.object_label = object_label - self.global_cfg = kwargs["global_cfg"] + self.global_cfg: "OWSConfig" = kwargs["global_cfg"] self.parent_layer = parent_layer self.parse_metadata(cfg) # Inherit or override attribution if "attribution" in cfg: - self.attribution = AttributionCfg.parse(cfg.get("attribution"), self) + self.attribution = AttributionCfg.parse( # type: ignore[assignment] + cast(CFG_DICT | None, cfg.get("attribution")), + self + ) elif parent_layer: - self.attribution = self.parent_layer.attribution + self.attribution = cast(OWSLayer, self.parent_layer).attribution else: self.attribution = self.global_cfg.attribution - def global_config(self): + def global_config(self) -> "OWSConfig": return self.global_cfg - def can_inherit_from(self): + def can_inherit_from(self) -> Union["OWSConfig", "OWSLayer"]: if self.parent_layer: return self.parent_layer else: return self.global_cfg - def get_obj_label(self): + def get_obj_label(self) -> str: return self.object_label - def layer_count(self): + def layer_count(self) -> int: return 0 - def unready_layer_count(self): + def unready_layer_count(self) -> int: return 0 def __str__(self): @@ -269,7 +281,9 @@ def __str__(self): class OWSFolder(OWSLayer): - def __init__(self, cfg, global_cfg, parent_layer=None, sibling=0, **kwargs): + def __init__(self, cfg: CFG_DICT, global_cfg: "OWSConfig", + parent_layer: Optional["OWSFolder"] = None, + sibling: int = 0, **kwargs): if "label" in cfg: obj_lbl = f"folder.{cfg['label']}" elif parent_layer: @@ -280,13 +294,13 @@ def __init__(self, cfg, global_cfg, parent_layer=None, sibling=0, **kwargs): raise ConfigException(f"Duplicate folder label: {obj_lbl}") super().__init__(cfg, parent_layer=parent_layer, object_label=obj_lbl, global_cfg=global_cfg, **kwargs) self.slug_name = slugify(self.title, separator="_") - self.unready_layers = [] - self.child_layers = [] + self.unready_layers: list[OWSLayer] = [] + self.child_layers: list[OWSLayer] = [] if "layers" not in cfg: raise ConfigException("No layers section in folder layer %s" % self.title) child = 0 - for lyr_cfg in cfg["layers"]: - if isinstance(lyr_cfg, Mapping): + for lyr_cfg in cast(list[RAW_CFG], cfg["layers"]): + if isinstance(lyr_cfg, dict): try: lyr = parse_ows_layer(lyr_cfg, global_cfg=global_cfg, parent_layer=self, sibling=child) self.unready_layers.append(lyr) @@ -299,13 +313,13 @@ def __init__(self, cfg, global_cfg, parent_layer=None, sibling=0, **kwargs): _LOG.error("Non-dictionary where dictionary expected - check for trailing comma? %s...", repr(lyr_cfg)[0:50]) global_cfg.folder_index[obj_lbl] = self - def unready_layer_count(self): + def unready_layer_count(self) -> int: return sum([l.layer_count() for l in self.unready_layers]) - def layer_count(self): + def layer_count(self) -> int: return sum([l.layer_count() for l in self.child_layers]) - def make_ready(self, dc, *args, **kwargs): + def make_ready(self, dc: Datacube, *args, **kwargs) -> None: still_unready = [] for lyr in self.unready_layers: try: @@ -336,7 +350,7 @@ def allow_mosaic(self) -> bool: return not self.is_subday() @classmethod - def parse(cls, cfg: Optional[str]) -> Optional["TimeRes"]: + def parse(cls, cfg: str | None) -> Optional["TimeRes"]: if cfg is None: cfg = "solar" elif cfg == "raw": @@ -350,11 +364,13 @@ def parse(cls, cfg: Optional[str]) -> Optional["TimeRes"]: except ValueError: return None - def search_times(self, t, geobox=None): + def search_times(self, + t: datetime.datetime, + geobox: GeoBox | None = None) -> datetime.datetime | tuple[datetime.datetime, datetime.datetime]: if self.is_solar(): if geobox is None: raise ValueError("Solar time resolution search_times requires a geobox.") - times = local_solar_date_range(geobox, t) + times: datetime.datetime | tuple[datetime.datetime, datetime.datetime] = local_solar_date_range(geobox, t) elif self.is_subday(): # For subday products, return a single start datetime instead of a range. # mv_index will expand this to a one-second search range. @@ -368,7 +384,7 @@ def search_times(self, t, geobox=None): return times - def dataset_groupby(self, product_names: list[str] | None = None, is_mosaic=False): + def dataset_groupby(self, product_names: list[str] | None = None, is_mosaic=False) -> GroupBy: if self.is_subday(): return group_by_begin_datetime(product_names, truncate_dates=False) elif is_mosaic: @@ -386,13 +402,13 @@ class OWSNamedLayer(OWSExtensibleConfigEntry, OWSLayer): INDEX_KEYS = ["layer"] named = True - def __init__(self, cfg, global_cfg, parent_layer=None, **kwargs): - name = cfg["name"] + def __init__(self, cfg: CFG_DICT, global_cfg: "OWSConfig", parent_layer: OWSFolder | None = None, **kwargs): + name = cast(str, cfg["name"]) super().__init__(cfg, object_label=f"layer.{name}", global_cfg=global_cfg, parent_layer=parent_layer, keyvals={"layer": name}, **kwargs) self.name = name - cfg = self._raw_cfg + cfg = cast(CFG_DICT, self._raw_cfg) self.hide = False try: self.parse_product_names(cfg) @@ -415,45 +431,60 @@ def __init__(self, cfg, global_cfg, parent_layer=None, **kwargs): self.declare_unready("definition") if global_cfg.user_band_math_extension: - self.user_band_math = cfg.get("user_band_math", False) + self.user_band_math = bool(cfg.get("user_band_math", False)) else: self.user_band_math = False - self.time_resolution = TimeRes.parse(cfg.get("time_resolution")) - if not self.time_resolution: + tr = TimeRes.parse(cast(str | None, cfg.get("time_resolution"))) + if not tr: raise ConfigException(f"Invalid time resolution value {cfg['time_resolution']} in named layer {self.name}") + else: + self.time_resolution: TimeRes = tr self.mosaic_date_func: FunctionWrapper | None = None if "mosaic_date_func" in cfg: - self.mosaic_date_func = FunctionWrapper(self, cfg["mosaic_date_func"]) + self.mosaic_date_func = FunctionWrapper(self, cast(CFG_DICT, cfg["mosaic_date_func"])) if self.mosaic_date_func and not self.time_resolution.allow_mosaic(): raise ConfigException(f"Mosaic date function not supported for {self.time_resolution} time resolution.") - self.default_time_rule = cfg.get("default_time", DEF_TIME_LATEST) - if self.default_time_rule not in (DEF_TIME_LATEST, DEF_TIME_EARLIEST): + dtr: str = cast(str, cfg.get("default_time", DEF_TIME_LATEST)) + if dtr in (DEF_TIME_LATEST, DEF_TIME_EARLIEST): + self.default_time_rule: str | datetime.datetime | datetime.date = dtr + else: try: - self.default_time_rule = datetime.date.fromisoformat(self.default_time_rule) + if self.time_resolution.is_subday(): + self.default_time_rule = datetime.datetime.fromisoformat(dtr) + else: + self.default_time_rule = datetime.date.fromisoformat(dtr) except ValueError: raise ConfigException( - f"Invalid default_time value in named layer {self.name} ({self.default_time_rule})" + f"Invalid default_time value in named layer {self.name} ({dtr})" ) - self.time_axis = cfg.get("time_axis") + self.time_axis = cast(CFG_DICT | None, cfg.get("time_axis")) if self.time_axis: + if self.time_resolution.is_subday(): + raise ConfigException(f"Regular time axis is not supported for sub-day time resolutions") self.regular_time_axis = True if "time_interval" not in self.time_axis: raise ConfigException("No time_interval supplied in time_axis") - self.time_axis_interval = self.time_axis["time_interval"] - if not isinstance(self.time_axis_interval, int): + time_axis_interval = self.time_axis["time_interval"] + if isinstance(time_axis_interval, int): + self.time_axis_interval: int = time_axis_interval + else: raise ConfigException("time_interval must be an integer") if self.time_axis_interval <= 0: raise ConfigException("time_interval must be greater than zero") - self.time_axis_start = self.time_axis.get("start_date") - self.time_axis_end = self.time_axis.get("end_date") - if self.time_axis_start is not None: + time_axis_start = cast(str | None, self.time_axis.get("start_date")) + time_axis_end = cast(str | None, self.time_axis.get("end_date")) + if time_axis_start is None: + self.time_axis_start: datetime.date | None = None + else: try: - self.time_axis_start = datetime.date.fromisoformat(self.time_axis_start) + self.time_axis_start = datetime.date.fromisoformat(time_axis_start) except ValueError: raise ConfigException("time_axis start_date is not a valid ISO format date string") - if self.time_axis_end is not None: + if time_axis_end is None: + self.time_axis_end: datetime.date | None = None + else: try: - self.time_axis_end = datetime.date.fromisoformat(self.time_axis_end) + self.time_axis_end = datetime.date.fromisoformat(time_axis_end) except ValueError: raise ConfigException("time_axis end_date is not a valid ISO format date string") if (self.time_axis_end is not None @@ -472,40 +503,42 @@ def __init__(self, cfg, global_cfg, parent_layer=None, **kwargs): self.declare_unready("_ranges") self.declare_unready("bboxes") # TODO: sub-ranges - self.band_idx = BandIndex(self, cfg.get("bands")) + self.band_idx: BandIndex = BandIndex(self, cast(CFG_DICT, cfg.get("bands"))) self.cfg_native_resolution = cfg.get("native_resolution") self.cfg_native_crs = cfg.get("native_crs") self.declare_unready("resolution_x") self.declare_unready("resolution_y") - self.resource_limits = OWSResourceManagementRules(self.global_cfg, cfg.get("resource_limits", {}), f"Layer {self.name}") + self.resource_limits = OWSResourceManagementRules(self.global_cfg, + cast(CFG_DICT, cfg.get("resource_limits", {})), + f"Layer {self.name}") try: - self.parse_flags(cfg.get("flags", {})) + self.parse_flags(cast(CFG_DICT, cfg.get("flags", {}))) self.declare_unready("all_flag_band_names") except KeyError as e: raise ConfigException(f"Missing required config ({str(e)}) in flags section for layer {self.name}") try: - self.parse_image_processing(cfg["image_processing"]) + self.parse_image_processing(cast(CFG_DICT, cfg["image_processing"])) except KeyError as e: raise ConfigException(f"Missing required config ({str(e)}) in image processing section for layer {self.name}") - self.identifiers = cfg.get("identifiers", {}) + self.identifiers = cast(dict[str, str], cfg.get("identifiers", {})) for auth in self.identifiers.keys(): if auth not in self.global_cfg.authorities: raise ConfigException(f"Identifier with non-declared authority: {auth} in layer {self.name}") - self.parse_urls(cfg.get("urls", {})) - self.parse_feature_info(cfg.get("feature_info", {})) + self.parse_urls(cast(CFG_DICT, cfg.get("urls", {}))) + self.parse_feature_info(cast(CFG_DICT, cfg.get("feature_info", {}))) self.feature_info_include_utc_dates = cfg.get("feature_info_url_dates", False) if "patch_url_function" in cfg: - self.patch_url = FunctionWrapper(self, cfg["patch_url_function"]) + self.patch_url: FunctionWrapper | None = FunctionWrapper(self, cast(CFG_DICT, cfg["patch_url_function"])) else: self.patch_url = None try: - self.parse_styling(cfg["styling"]) + self.parse_styling(cast(CFG_DICT, cfg["styling"])) except KeyError as e: raise ConfigException(f"Missing required config item {e} in styling section for layer {self.name}") if self.global_cfg.wcs: try: - self.parse_wcs(cfg.get("wcs", {})) + self.parse_wcs(cast(CFG_DICT | bool, cfg.get("wcs", {}))) except KeyError as e: raise ConfigException(f"Missing required config item {e} in wcs section for layer {self.name}") @@ -522,14 +555,14 @@ def __init__(self, cfg, global_cfg, parent_layer=None, **kwargs): raise ConfigException(f"Duplicate layer name: {self.name}") self.global_cfg.product_index[self.name] = self - def time_axis_representation(self): + def time_axis_representation(self) -> str: if self.regular_time_axis: start, end = self.time_range(self.ranges) return f"{start.isoformat()}/{end.isoformat()}/P{self.time_axis_interval}D" return "" # pylint: disable=attribute-defined-outside-init - def make_ready(self, dc, *args, **kwargs): + def make_ready(self, dc: Datacube, *args, **kwargs): self.products = [] self.low_res_products = [] for i, prod_name in enumerate(self.product_names): @@ -551,7 +584,7 @@ def make_ready(self, dc, *args, **kwargs): self.force_range_update(dc) self.band_idx.make_ready(dc) self.resource_limits.make_ready(dc) - self.all_flag_band_names = set() + self.all_flag_band_names: set[str] = set() for fb in self.flag_bands.values(): fb.make_ready(dc) if fb.pq_band in self.all_flag_band_names: @@ -572,38 +605,45 @@ def make_ready(self, dc, *args, **kwargs): super().make_ready(dc, *args, **kwargs) # pylint: disable=attribute-defined-outside-init - def parse_image_processing(self, cfg): + def parse_image_processing(self, cfg: CFG_DICT): emf_cfg = cfg["extent_mask_func"] - if isinstance(emf_cfg, Mapping) or isinstance(emf_cfg, str): - self.extent_mask_func = [FunctionWrapper(self, emf_cfg)] + if isinstance(emf_cfg, dict) or isinstance(emf_cfg, str): + self.extent_mask_func = [FunctionWrapper(self, emf_cfg)] # type:ignore[type-var] else: - self.extent_mask_func = list([FunctionWrapper(self, emf) for emf in emf_cfg]) + self.extent_mask_func = [ + FunctionWrapper(self, emf) for emf in cast(list[CFG_DICT | str], emf_cfg) # type: ignore[type-var] + ] self.raw_afb = cfg.get("always_fetch_bands", []) self.declare_unready("always_fetch_bands") - self.solar_correction = cfg.get("apply_solar_corrections", False) - self.data_manual_merge = cfg.get("manual_merge", False) + self.solar_correction = bool(cfg.get("apply_solar_corrections", False)) + self.data_manual_merge = bool(cfg.get("manual_merge", False)) if self.solar_correction and not self.data_manual_merge: raise ConfigException("Solar correction requires manual_merge.") if self.data_manual_merge and not self.solar_correction and not self.multi_product: _LOG.warning("Manual merge is only recommended where solar correction is required and for multi-product layers.") if cfg.get("fuse_func"): - self.fuse_func = FunctionWrapper(self, cfg["fuse_func"]) + self.fuse_func: FunctionWrapper | None = FunctionWrapper( + self, + cast(str | CFG_DICT, cfg["fuse_func"]) + ) # type:ignore[type-var] else: self.fuse_func = None # pylint: disable=attribute-defined-outside-init - def ready_image_processing(self, dc): - self.always_fetch_bands = list([self.band_idx.band(b) for b in self.raw_afb]) + def ready_image_processing(self, dc: Datacube) -> None: + self.always_fetch_bands = list([self.band_idx.band(b) for b in cast(list[str], self.raw_afb)]) # pylint: disable=attribute-defined-outside-init - def parse_feature_info(self, cfg): - self.feature_info_include_utc_dates = cfg.get("include_utc_dates", False) - custom = cfg.get("include_custom", {}) - self.feature_info_custom_includes = {k: FunctionWrapper(self, v) for k, v in custom.items()} + def parse_feature_info(self, cfg: CFG_DICT): + self.feature_info_include_utc_dates = bool(cfg.get("include_utc_dates", False)) + custom = cast(dict[str, CFG_DICT | str], cfg.get("include_custom", {})) + self.feature_info_custom_includes = { + k: FunctionWrapper(self, v) for k, v in custom.items() # type:ignore[type-var] + } # pylint: disable=attribute-defined-outside-init - def parse_flags(self, cfg): + def parse_flags(self, cfg: CFG_DICT): self.flag_bands = {} if cfg: if isinstance(cfg, dict): @@ -614,7 +654,7 @@ def parse_flags(self, cfg): for fb_cfg in cfg: fb = OWSFlagBand(fb_cfg, self) self.flag_bands[fb.pq_band] = fb - pq_names_to_lowres_names = {} + pq_names_to_lowres_names: dict[list[str], list[str]] = {} for fb in self.flag_bands.values(): pns = fb.pq_names lrpns = fb.pq_low_res_names @@ -625,36 +665,36 @@ def parse_flags(self, cfg): self.allflag_productbands = FlagProductBands.build_list_from_flagbands(self.flag_bands.values(), self) # pylint: disable=attribute-defined-outside-init - def parse_urls(self, cfg): - self.feature_list_urls = SuppURL.parse_list(cfg.get("features", [])) - self.data_urls = SuppURL.parse_list(cfg.get("data", [])) + def parse_urls(self, cfg: CFG_DICT): + self.feature_list_urls = SuppURL.parse_list(cast(list[dict[str, str]], cfg.get("features", []))) + self.data_urls = SuppURL.parse_list(cast(list[dict[str, str]], cfg.get("data", []))) # pylint: disable=attribute-defined-outside-init - def parse_styling(self, cfg): + def parse_styling(self, cfg: CFG_DICT): self.styles = [] self.style_index = {} - for scfg in cfg["styles"]: + for scfg in cast(list[CFG_DICT], cfg["styles"]): style = StyleDef(self, scfg) self.styles.append(style) self.style_index[style.name] = style if "default_style" in cfg: if cfg["default_style"] not in self.style_index: raise ConfigException(f"Default style {cfg['default_style']} is not in the 'styles' for layer {self.name}") - self.default_style = self.style_index[cfg["default_style"]] + self.default_style = self.style_index[cast(str, cfg["default_style"])] else: self.default_style = self.styles[0] # pylint: disable=attribute-defined-outside-init - def parse_wcs(self, cfg): + def parse_wcs(self, cfg: CFG_DICT | bool): if cfg == False: self.wcs = False elif not self.global_cfg.wcs: self.wcs = False else: - self.wcs = not cfg.get("disable", False) + self.wcs = not cast(CFG_DICT, cfg).get("disable", False) if not self.wcs: return - + assert isinstance(cfg, dict) if "native_resolution" in cfg: if not self.cfg_native_resolution: _LOG.warning( @@ -761,7 +801,7 @@ def ready_native_specs(self): self.name, repr(self.cfg_native_resolution), self.resolution_x, self.resolution_y) # pylint: disable=attribute-defined-outside-init - def ready_wcs(self, dc): + def ready_wcs(self, dc: Datacube): if self.global_cfg.wcs and self.wcs: # Prepare Rectified Grids @@ -830,22 +870,22 @@ def ready_wcs(self, dc): ) } - def parse_product_names(self, cfg): + def parse_product_names(self, cfg: CFG_DICT): raise NotImplementedError() - def parse_pq_names(self, cfg): + def parse_pq_names(self, cfg: CFG_DICT): raise NotImplementedError() - def force_range_update(self, ext_dc=None): + def force_range_update(self, ext_dc: Datacube | None = None) -> None: if ext_dc: - dc = ext_dc + dc: Datacube | None = ext_dc else: dc = get_cube() + assert dc is not None self.hide = False - self._ranges = None try: from datacube_ows.product_ranges import get_ranges - self._ranges = get_ranges(dc, self) + self._ranges: dict[str, Any] = get_ranges(dc, self) if self._ranges is None: raise Exception("Null product range") self.bboxes = self.extract_bboxes() @@ -871,7 +911,7 @@ def force_range_update(self, ext_dc=None): self.hide = True self.bboxes = {} - def time_range(self, ranges=None): + def time_range(self, ranges: dict[str, Any] | None = None): if ranges is None: ranges = self.ranges if self.regular_time_axis and self.time_axis_start: @@ -885,12 +925,12 @@ def time_range(self, ranges=None): return (start, end) @property - def ranges(self): + def ranges(self) -> dict[str, Any]: if self.dynamic: self.force_range_update() return self._ranges - def extract_bboxes(self): + def extract_bboxes(self) -> dict[str, Any]: if self._ranges is None: return {} bboxes = {} @@ -907,7 +947,7 @@ def extract_bboxes(self): } return bboxes - def layer_count(self): + def layer_count(self) -> int: return 1 def search_times(self, t, geobox=None): @@ -920,14 +960,14 @@ def search_times(self, t, geobox=None): ) return self.time_resolution.search_times(t, geobox) - def dataset_groupby(self): + def dataset_groupby(self) -> GroupBy: return self.time_resolution.dataset_groupby(is_mosaic=self.mosaic_date_func is not None) def __str__(self): return "Named OWSLayer: %s" % self.name @classmethod - def lookup_impl(cls, cfg, keyvals, subs=None): + def lookup_impl(cls, cfg: "OWSConfig", keyvals: dict[str, str], subs: CFG_DICT | None = None): try: return cfg.product_index[keyvals["layer"]] except KeyError: @@ -937,13 +977,13 @@ def lookup_impl(cls, cfg, keyvals, subs=None): class OWSProductLayer(OWSNamedLayer): multi_product = False - def parse_product_names(self, cfg): - self.product_name = cfg["product_name"] - self.product_names = (self.product_name,) + def parse_product_names(self, cfg: CFG_DICT): + self.product_name = cast(str, cfg["product_name"]) + self.product_names: tuple[str, ...] = (self.product_name,) - self.low_res_product_name = cfg.get("low_res_product_name") + self.low_res_product_name = cast(str, cfg.get("low_res_product_name")) if self.low_res_product_name: - self.low_res_product_names = (self.low_res_product_name,) + self.low_res_product_names: tuple[str, ...] = (self.low_res_product_name,) else: self.low_res_product_names = tuple() if "product_names" in cfg: @@ -951,18 +991,18 @@ def parse_product_names(self, cfg): if "low_res_product_names" in cfg: raise ConfigException(f"'low_res_product_names' entry in non-multi-product layer {self.name} - use 'low_res_product_name' only") - def parse_pq_names(self, cfg): + def parse_pq_names(self, cfg: CFG_DICT): main_product = False if "dataset" in cfg: raise ConfigException(f"The 'dataset' entry in the flags section is no longer supported. Please refer to the documentation for the correct format (layer {self.name})") if "product" in cfg: - pq_names = (cfg["product"],) + pq_names: tuple[str, ...] = (cast(str, cfg["product"]),) else: pq_names = (self.product_name,) main_product = (pq_names[0] == self.product_name) if "low_res_product" in cfg: - pq_low_res_names = (cfg.get("low_res_product"),) + pq_low_res_names: tuple[str, ...] = (cast(str, cfg.get("low_res_product")),) elif main_product: pq_low_res_names = self.low_res_product_names else: @@ -982,12 +1022,12 @@ def parse_pq_names(self, cfg): class OWSMultiProductLayer(OWSNamedLayer): multi_product = True - def parse_product_names(self, cfg): - self.product_names = tuple(cfg["product_names"]) + def parse_product_names(self, cfg: CFG_DICT): + self.product_names = tuple(cast(list[str], cfg["product_names"])) self.product_name = self.product_names[0] - self.low_res_product_names = tuple(cfg.get("low_res_product_names", [])) + self.low_res_product_names = tuple(cast(list[str], cfg.get("low_res_product_names", []))) if self.low_res_product_names: - self.low_res_product_name = self.low_res_product_names[0] + self.low_res_product_name: str | None = self.low_res_product_names[0] else: self.low_res_product_name = None if "product_name" in cfg: @@ -995,19 +1035,19 @@ def parse_product_names(self, cfg): if "low_res_product_name" in cfg: raise ConfigException(f"'low_res_product_name' entry in multi-product layer {self.name} - use 'low_res_product_names' only") - def parse_pq_names(self, cfg): + def parse_pq_names(self, cfg: CFG_DICT): main_products = False if "datasets" in cfg: raise ConfigException(f"The 'datasets' entry in the flags section is no longer supported. Please refer to the documentation for the correct format (layer {self.name})") if "products" in cfg: - pq_names = tuple(cfg["products"]) + pq_names = tuple(cast(list[str], cfg["products"])) main_products = pq_names == self.product_names else: main_products = True pq_names = self.product_names if "low_res_products" in cfg: - pq_low_res_names = tuple(cfg["low_res_products"]) + pq_low_res_names = tuple(cast(list[str], cfg["low_res_products"])) else: pq_low_res_names = self.low_res_product_names if "product" in cfg: @@ -1020,11 +1060,16 @@ def parse_pq_names(self, cfg): "main_products": main_products, } - def dataset_groupby(self): - return self.time_resolution.dataset_groupby(self.product_names, is_mosaic=self.mosaic_date_func is not None) + def dataset_groupby(self) -> GroupBy: + return self.time_resolution.dataset_groupby( + list(self.product_names), + is_mosaic=self.mosaic_date_func is not None) -def parse_ows_layer(cfg, global_cfg, parent_layer=None, sibling=0): +def parse_ows_layer(cfg: CFG_DICT, + global_cfg: "OWSConfig", + parent_layer: OWSFolder | None = None, + sibling: int = 0) -> OWSLayer: if cfg.get("name", None): if cfg.get("multi_product", False): return OWSMultiProductLayer(cfg, global_cfg, parent_layer) @@ -1036,23 +1081,22 @@ def parse_ows_layer(cfg, global_cfg, parent_layer=None, sibling=0): class WCSFormat: @staticmethod - def from_cfg(cfg): - renderers = [] + def from_cfg(cfg: dict[str, CFG_DICT]) -> list["WCSFormat"]: + renderers: list[WCSFormat] = [] for name, fmt in cfg.items(): if "renderers" in fmt: renderers.append( WCSFormat( name, - fmt["mime"], - fmt["extension"], - fmt["renderers"], - fmt.get("multi-time", False) + cast(str, fmt["mime"]), + cast(str, fmt["extension"]), + cast(dict[int, CFG_DICT], fmt["renderers"]), + bool(fmt.get("multi-time", False)) ) ) return renderers - def __init__(self, name, mime, extension, renderers, - multi_time): + def __init__(self, name: str, mime: str, extension: str, renderers: dict[int, CFG_DICT], multi_time: bool): self.name = name self.mime = mime self.extension = extension @@ -1075,14 +1119,14 @@ def renderer(self, version): class ContactInfo(OWSConfigEntry): - def __init__(self, cfg, global_cfg): + def __init__(self, cfg: CFG_DICT, global_cfg: "OWSConfig"): super().__init__(cfg) self.global_cfg = global_cfg self.person = cfg.get("person") class Address(OWSConfigEntry): - def __init__(self, cfg): - super().__init__(cfg) + def __init__(self, cfg: dict[str, str]): + super().__init__(cast(CFG_DICT, cfg)) self.type = cfg.get("type") self.address = cfg.get("address") self.city = cfg.get("city") @@ -1091,27 +1135,27 @@ def __init__(self, cfg): self.country = cfg.get("country") @classmethod - def parse(cls, cfg): + def parse(cls, cfg: dict[str, str] | None) -> Optional["Address"]: if not cfg: return None else: return cls(cfg) - self.address = Address.parse(cfg.get("address")) - self.telephone = cfg.get("telephone") - self.fax = cfg.get("fax") - self.email = cfg.get("email") + self.address = Address.parse(cast(dict[str, str] | None, cfg.get("address"))) + self.telephone = cast(str | None, cfg.get("telephone")) + self.fax = cast(str | None, cfg.get("fax")) + self.email = cast(str | None, cfg.get("email")) @property - def organisation(self): + def organisation(self) -> str | None: return self.global_cfg.contact_org @property - def position(self): + def position(self) -> str | None: return self.global_cfg.contact_position @classmethod - def parse(cls, cfg, global_cfg): + def parse(cls, cfg, global_cfg) -> Optional["ContactInfo"]: if cfg: return cls(cfg, global_cfg) else: @@ -1119,7 +1163,7 @@ def parse(cls, cfg, global_cfg): class OWSConfig(OWSMetadataConfig): - _instance = None + _instance: Optional["OWSConfig"] = None initialised = False def __new__(cls, *args, **kwargs): @@ -1138,14 +1182,15 @@ def default_abstract(self) -> Optional[str]: return "" @property - def active_products(self): + def active_products(self) -> Iterable[OWSNamedLayer]: return filter(lambda x: not x.hide, self.product_index.values()) @property - def active_product_index(self): + def active_product_index(self) -> dict[str, OWSNamedLayer]: return {prod.name: prod for prod in self.active_products} - def __init__(self, refresh=False, cfg=None, ignore_msgfile=False, called_from_update_ranges=False): + def __init__(self, refresh=False, cfg: CFG_DICT | None = None, + ignore_msgfile=False, called_from_update_ranges=False): self.called_from_update_ranges = called_from_update_ranges if not self.initialised or refresh: self.msgfile = None @@ -1153,20 +1198,20 @@ def __init__(self, refresh=False, cfg=None, ignore_msgfile=False, called_from_up cfg = read_config() super().__init__(cfg) try: - self.parse_global(cfg["global"], ignore_msgfile) + self.parse_global(cast(CFG_DICT, cfg["global"]), ignore_msgfile) except KeyError as e: raise ConfigException( "Missing required config entry in 'global' section: %s" % str(e) ) if self.wms or self.wmts: - self.parse_wms(cfg.get("wms", {})) + self.parse_wms(cast(CFG_DICT, cfg.get("wms", {}))) else: self.parse_wms({}) if self.wcs: try: - self.parse_wcs(cfg.get("wcs")) + self.parse_wcs(cast(CFG_DICT, cfg.get("wcs"))) except KeyError as e: raise ConfigException( "Missing required config entry in 'wcs' section (with WCS enabled): %s" % str(e) @@ -1174,24 +1219,24 @@ def __init__(self, refresh=False, cfg=None, ignore_msgfile=False, called_from_up else: self.parse_wcs(None) try: - self.parse_layers(cfg["layers"]) + self.parse_layers(cast(list[CFG_DICT], cfg["layers"])) except KeyError as e: raise ConfigException("Missing required config entry in 'layers' section") try: if self.wmts: - self.parse_wmts(cfg.get("wmts", {})) + self.parse_wmts(cast(CFG_DICT, cfg.get("wmts", {}))) else: self.parse_wmts({}) except KeyError as e: raise ConfigException( "Missing required config entry in 'wmts' section (with WCS enabled): %s" % str(e) ) - self.catalog = None + self.catalog: Catalog | None = None self.initialised = True #pylint: disable=attribute-defined-outside-init - def make_ready(self, dc, *args, **kwargs): + def make_ready(self, dc: Datacube, *args, **kwargs): if self.msg_file_name: try: with open(self.msg_file_name, "rb") as fp: @@ -1200,24 +1245,30 @@ def make_ready(self, dc, *args, **kwargs): _LOG.warning("Message file %s does not exist - using metadata from config file", self.msg_file_name) else: self.set_msg_src(None) - self.native_product_index = {} + self.native_product_index: dict[str, OWSNamedLayer] = {} self.root_layer_folder.make_ready(dc, *args, **kwargs) super().make_ready(dc, *args, **kwargs) - def export_metadata(self): + def export_metadata(self) -> Catalog: if self.catalog is None: now = datetime.datetime.now() - self.catalog = Catalog(locale=self.default_locale, - domain=self.message_domain, - header_comment=f"""# Translations for datacube-ows metadata instance: + header: str = f"""# Translations for datacube-ows metadata instance: # {self.title} # -# {self.contact_info.organisation} {now.isoformat()} -#""", +""" + if self.contact_info: + header += """# {self.contact_info.organisation} {now.isoformat()} +#""" + else: + header += """# {now.isoformat()} + #""" + self.catalog = Catalog(locale=self.default_locale, + domain=self.message_domain, + header_comment=header, project=self.title, version=f"{now.isoformat()}", - copyright_holder=self.contact_info.organisation, - msgid_bugs_address=self.contact_info.email, + copyright_holder=self.contact_info.organisation if self.contact_info else None, + msgid_bugs_address=self.contact_info.email if self.contact_info else None, creation_date=now, revision_date=now, fuzzy=False) @@ -1233,31 +1284,32 @@ def export_metadata(self): self.catalog.add(id=k, string=v, auto_comments=[v]) return self.catalog - def parse_global(self, cfg, ignore_msgfile): - self._response_headers = cfg.get("response_headers", {}) - self.wms = cfg.get("services", {}).get("wms", True) - self.wmts = cfg.get("services", {}).get("wmts", True) - self.wcs = cfg.get("services", {}).get("wcs", False) + def parse_global(self, cfg: CFG_DICT, ignore_msgfile: bool): + self._response_headers = cast(dict[str, str], cfg.get("response_headers", {})) + services = cast(dict[str, bool], cfg.get("services", {})) + self.wms = services.get("wms", True) + self.wmts = services.get("wmts", True) + self.wcs = services.get("wcs", False) if not self.wms and not self.wmts and not self.wcs: raise ConfigException("At least one service must be active.") - self.locales = cfg.get("supported_languages", ["en"]) + self.locales = cast(list[str], cfg.get("supported_languages", ["en"])) if len(self.locales) < 1: raise ConfigException("You must support at least one language.") self.default_locale = self.locales[0] - self.message_domain = cfg.get("message_domain", "ows_cfg") + self.message_domain = cast(str, cfg.get("message_domain", "ows_cfg")) self.translations_dir = cfg.get("translations_directory") self.internationalised = self.translations_dir and len(self.locales) > 1 if self.internationalised: _LOG.info("Internationalisation enabled.") if ignore_msgfile: - self.msg_file_name = None + self.msg_file_name: str | None = None else: - self.msg_file_name = cfg.get("message_file") + self.msg_file_name = cast(str | None, cfg.get("message_file")) self.parse_metadata(cfg) self.allowed_urls = cfg["allowed_urls"] self.info_url = cfg["info_url"] self.contact_info = ContactInfo.parse(cfg.get("contact_info"), self) - self.attribution = AttributionCfg.parse(cfg.get("attribution"), self) + self.attribution = AttributionCfg.parse(cast(CFG_DICT | None, cfg.get("attribution")), self) def make_gml_name(name): if name.startswith("EPSG:"): @@ -1265,11 +1317,11 @@ def make_gml_name(name): else: return name - self.published_CRSs = {} - self.internal_CRSs = {} - CRS_aliases = {} - geographic_CRSs = [] - for crs_str, crsdef in cfg["published_CRSs"].items(): + self.published_CRSs: dict[str, CFG_DICT] = {} + self.internal_CRSs: dict[str, CFG_DICT] = {} + CRS_aliases: dict[str, CFG_DICT] = {} + geographic_CRSs: list[str] = [] + for crs_str, crsdef in cast(dict[str, CFG_DICT], cfg["published_CRSs"]).items(): if "alias" in crsdef: CRS_aliases[crs_str] = crsdef continue @@ -1302,7 +1354,7 @@ def make_gml_name(name): self.default_geographic_CRS = geographic_CRSs[0] for alias, alias_def in CRS_aliases.items(): - target_crs = alias_def["alias"] + target_crs = cast(str, alias_def["alias"]) if target_crs not in self.published_CRSs: _LOG.warning("CRS %s defined as alias for %s, which is not a published CRS - skipping", alias, target_crs) @@ -1312,15 +1364,15 @@ def make_gml_name(name): self.published_CRSs[alias]["gml_name"] = make_gml_name(alias) self.published_CRSs[alias]["alias_of"] = target_crs - def parse_wms(self, cfg): + def parse_wms(self, cfg: CFG_DICT): if not self.wms and not self.wmts: cfg = {} - self.s3_bucket = cfg.get("s3_bucket", "") - self.s3_url = cfg.get("s3_url", "") - self.s3_aws_zone = cfg.get("s3_aws_zone", "") + self.s3_bucket = cast(str, cfg.get("s3_bucket", "")) + self.s3_url = cast(str, cfg.get("s3_url", "")) + self.s3_aws_zone = cast(str, cfg.get("s3_aws_zone", "")) try: - self.wms_max_width = int(cfg.get("max_width", 256)) - self.wms_max_height = int(cfg.get("max_height", 256)) + self.wms_max_width = int(cast(str | int, cfg.get("max_width", 256))) + self.wms_max_height = int(cast(str | int, cfg.get("max_height", 256))) except ValueError: raise ConfigException( f"max_width and max_height in wms section must be integers: {cfg.get('max_width', 256)},{cfg.get('max_height', 256)}" @@ -1329,17 +1381,17 @@ def parse_wms(self, cfg): raise ConfigException( f"max_width and max_height in wms section must be positive integers: {cfg.get('max_width', 256)},{cfg.get('max_height', 256)}" ) - self.authorities = cfg.get("authorities", {}) + self.authorities = cast(dict[str, str], cfg.get("authorities", {})) self.user_band_math_extension = cfg.get("user_band_math_extension", False) self.wms_cap_cache_age = parse_cache_age(cfg, "caps_cache_maxage", "wms") if "attribution" in cfg: _LOG.warning("Attribution entry in top level 'wms' section will be ignored. Attribution should be moved to the 'global' section") - def parse_wcs(self, cfg): + def parse_wcs(self, cfg: CFG_DICT | None): if self.wcs: if not isinstance(cfg, Mapping): raise ConfigException("WCS section missing (and WCS is enabled)") - self.wcs_formats = WCSFormat.from_cfg(cfg["formats"]) + self.wcs_formats = WCSFormat.from_cfg(cast(dict[str, CFG_DICT], cfg["formats"])) self.wcs_formats_by_name = { fmt.name: fmt for fmt in self.wcs_formats @@ -1366,12 +1418,12 @@ def parse_wcs(self, cfg): self.wcs_cap_cache_age = 0 self.wcs_default_descov_age = 0 - def parse_wmts(self, cfg): - tms_cfgs = TileMatrixSet.default_tm_sets.copy() + def parse_wmts(self, cfg: CFG_DICT): + tms_cfgs = cast(dict[str, CFG_DICT], TileMatrixSet.default_tm_sets.copy()) if "tile_matrix_sets" in cfg: - for identifier, tms in cfg["tile_matrix_sets"].items(): + for identifier, tms in cast(dict[str, CFG_DICT], cfg["tile_matrix_sets"]).items(): tms_cfgs[identifier] = tms - self.tile_matrix_sets = {} + self.tile_matrix_sets: dict[str, TileMatrixSet] = {} for identifier, tms in tms_cfgs.items(): if len(identifier.split()) != 1: raise ConfigException(f"Invalid identifier: {identifier}") @@ -1379,24 +1431,24 @@ def parse_wmts(self, cfg): raise ConfigException(f"Tile matrix set identifiers must be unique: {identifier}") self.tile_matrix_sets[identifier] = TileMatrixSet(identifier, tms, self) - def parse_layers(self, cfg): - self.folder_index = {} - self.product_index = {} + def parse_layers(self, cfg: list[CFG_DICT]): + self.folder_index: dict[str, OWSFolder] = {} + self.product_index: dict[str, OWSNamedLayer] = {} self.declare_unready("native_product_index") - self.root_layer_folder = OWSFolder({ + self.root_layer_folder = OWSFolder(cast(CFG_DICT, { "title": "Root Folder (hidden)", "label": "ows_root_hidden", "layers": cfg - }, global_cfg=self, parent_layer=None) + }), global_cfg=self, parent_layer=None) @property - def layers(self): + def layers(self) -> list[OWSLayer]: return self.root_layer_folder.child_layers - def alias_bboxes(self, bboxes): - out = {} + def alias_bboxes(self, bboxes: CFG_DICT) -> CFG_DICT: + out: CFG_DICT = {} for crsid, crsdef in self.published_CRSs.items(): - a_crsid = crsdef["alias_of"] + a_crsid = cast(str, crsdef["alias_of"]) if a_crsid: if a_crsid in bboxes: out[crsid] = bboxes[a_crsid] @@ -1405,7 +1457,7 @@ def alias_bboxes(self, bboxes): out[crsid] = bboxes[crsid] return out - def crs(self, crsid): + def crs(self, crsid: str) -> CRS: if crsid not in self.published_CRSs: raise ConfigException(f"CRS {crsid} is not published") crs_def = self.published_CRSs[crsid] @@ -1416,17 +1468,18 @@ def crs(self, crsid): use_crs = crsid return CRS(use_crs) - def response_headers(self, d): + def response_headers(self, d: dict[str, str]) -> dict[str, str]: hdrs = self._response_headers.copy() hdrs.update(d) return hdrs -def get_config(refresh=False, called_from_update_ranges=False): +def get_config(refresh=False, called_from_update_ranges=False) -> OWSConfig: cfg = OWSConfig(refresh=refresh, called_from_update_ranges=called_from_update_ranges) if not cfg.ready: try: with cube() as dc: + assert dc is not None # For type checker cfg.make_ready(dc) except ODCInitException: pass diff --git a/datacube_ows/styles/component.py b/datacube_ows/styles/component.py index 73faa2f9d..48774302d 100644 --- a/datacube_ows/styles/component.py +++ b/datacube_ows/styles/component.py @@ -39,14 +39,14 @@ def __init__(self, product: "OWSNamedLayer", self.raw_rgb_components: dict[str, Callable | LINEAR_COMP_DICT] = {} raw_components = cast(dict[str, Callable | LINEAR_COMP_DICT], style_cfg["components"]) for imgband in ["red", "green", "blue", "alpha"]: - components = raw_components.get(imgband) + components = cast(Callable | LINEAR_COMP_DICT | CFG_DICT | None, raw_components.get(imgband)) if components is None: if imgband == "alpha": continue else: raise ConfigException(f"No components defined for {imgband} band in style {self.name}, layer {product.name}") elif callable(components) or "function" in components: - self.raw_rgb_components[imgband] = FunctionWrapper(self.product, components, + self.raw_rgb_components[imgband] = FunctionWrapper(self.product, cast(CFG_DICT | Callable, components), stand_alone=self.stand_alone) if not self.stand_alone: if "additional_bands" not in style_cfg: diff --git a/datacube_ows/tile_matrix_sets.py b/datacube_ows/tile_matrix_sets.py index a6707e0f9..99870d098 100644 --- a/datacube_ows/tile_matrix_sets.py +++ b/datacube_ows/tile_matrix_sets.py @@ -3,7 +3,12 @@ # # Copyright (c) 2017-2023 OWS Contributors # SPDX-License-Identifier: Apache-2.0 -from datacube_ows.config_utils import OWSConfigEntry, ConfigException +from typing import cast, Type +from datacube_ows.config_utils import OWSConfigEntry, ConfigException, CFG_DICT, RAW_CFG + +TYPE_CHECKING = False +if TYPE_CHECKING: + from datacube_ows.ows_configuration import OWSConfig # Scale denominators for WebMercator QuadTree Scale Set, starting from zoom level 0. # Currently goes to zoom level 14, where the pixel size at the equator is ~10m (i.e. Sentinel2 resolution) @@ -29,7 +34,7 @@ ] -def validate_2d_array(array, ident, label, typ): +def validate_2d_array(array: list, ident: str, label: str, typ: Type): try: if len(array) != 2: raise ConfigException(f"In tile matrix set {ident}, {label} must have two values: f{array}") @@ -38,55 +43,70 @@ def validate_2d_array(array, ident, label, typ): raise ConfigException(f"In tile matrix set {ident}, {label} must be a list of two values: f{array}") -def validate_array_typ(array, ident, label, typ): +def validate_array_typ(array: list, ident: str, label: str, typ: Type): for elem in array: if not isinstance(elem, typ): raise ConfigException(f"In tile matrix set {ident}, {label} has non-{typ.__name__} value of type {elem.__class__.__name__}: {elem}") class TileMatrixSet(OWSConfigEntry): - default_tm_sets = { + default_tm_sets: CFG_DICT = { "WholeWorld_WebMercator": { "crs": "EPSG:3857", - "matrix_origin": (-20037508.3427892, 20037508.3427892), - "tile_size": (256, 256), - "scale_set": webmerc_scale_set, + "matrix_origin": [-20037508.3427892, 20037508.3427892], + "tile_size": [256, 256], + "scale_set": cast(RAW_CFG, webmerc_scale_set), "wkss": "urn:ogc:def:wkss:OGC:1.0:GoogleMapsCompatible", }, } - def __init__(self, identifier, cfg, global_cfg): + def __init__(self, identifier: str, cfg: CFG_DICT, global_cfg: "OWSConfig"): super().__init__(cfg) self.global_cfg = global_cfg self.identifier = identifier - self.crs_name = cfg["crs"] + self.crs_name = cast(str, cfg["crs"]) if self.crs_name not in self.global_cfg.published_CRSs: raise ConfigException(f"Tile matrix set {identifier} has unpublished CRS: {self.crs_name}") - self.matrix_origin = cfg["matrix_origin"] - validate_2d_array(self.matrix_origin, identifier, "Matrix origin", float) - self.tile_size = cfg["tile_size"] - validate_2d_array(self.tile_size, identifier, "Tile size", int) - self.scale_set = cfg["scale_set"] + matrix_origin = cast(list, cfg["matrix_origin"]) + validate_2d_array(matrix_origin, identifier, "Matrix origin", float) + self.matrix_origin = cast(list[float], matrix_origin) + if len(self.matrix_origin) != 2: + raise ConfigException(f"The origin coordinates of tile matrix set {identifier} must have 2 dimensions") + tile_size = cast(list, cfg["tile_size"]) + validate_2d_array(tile_size, identifier, "Tile size", int) + if len(tile_size) != 2: + raise ConfigException(f"The tile size of tile matrix set {identifier} must have 2 dimensions") + self.tile_size = cast(list[int], tile_size) + scale_set = cast(list, cfg["scale_set"]) try: - validate_array_typ(self.scale_set, identifier, "Scale set", float) + validate_array_typ(scale_set, identifier, "Scale set", float) except TypeError: raise ConfigException(f"In tile matrix set {identifier}, scale_set is not a list") + self.scale_set = cast(list[float], scale_set) if len(self.scale_set) < 1: raise ConfigException(f"Tile matrix set {identifier} has no scale denominators in scale_set") self.force_raw_crs_name = bool(cfg.get("force_raw_crs_name", False)) - self.wkss = cfg.get("wkss") - self.initial_matrix_exponents = cfg.get("matrix_exponent_initial_offsets", (0, 0)) - validate_2d_array(self.initial_matrix_exponents, identifier, "Initial matrix exponents", int) - self.unit_coefficients = cfg.get("unit_coefficients", (1.0, -1.0)) - validate_2d_array(self.unit_coefficients, identifier, "Unit coefficients", float) + self.wkss = cast(str | None, cfg.get("wkss")) + initial_matrix_exponents = cast(list, cfg.get("matrix_exponent_initial_offsets", [0, 0])) + validate_2d_array(initial_matrix_exponents, identifier, "Initial matrix exponents", int) + if len(initial_matrix_exponents) != 2: + raise ConfigException( + f"The initial matrix exponents of tile matrix set {identifier} must have 2 dimensions") + self.initial_matrix_exponents = cast(list[int], initial_matrix_exponents) + unit_coefficients = cast(list, cfg.get("unit_coefficients", [1.0, -1.0])) + validate_2d_array(unit_coefficients, identifier, "Unit coefficients", float) + if len(unit_coefficients) != 2: + raise ConfigException( + f"The unit coefficients of tile matrix set {identifier} must have 2 dimensions") + self.unit_coefficients = cast(list[float], unit_coefficients) @property - def crs_cfg(self): + def crs_cfg(self) -> CFG_DICT: return self.global_cfg.published_CRSs[self.crs_name] @property - def crs_display(self): + def crs_display(self) -> str: if self.force_raw_crs_name: return self.crs_name if self.crs_name[:5] == "EPSG:": From 33cd19537ad31d012b11b5ed05a056abbbd596a9 Mon Sep 17 00:00:00 2001 From: Paul Haesler Date: Wed, 24 Apr 2024 16:48:40 +1000 Subject: [PATCH 15/29] Remove unused imports. --- datacube_ows/config_utils.py | 2 +- datacube_ows/mv_index.py | 2 +- datacube_ows/ogc_utils.py | 2 +- datacube_ows/ows_configuration.py | 12 +++++------- datacube_ows/protocol_versions.py | 2 +- datacube_ows/styles/ramp.py | 2 +- datacube_ows/wms_utils.py | 3 +-- 7 files changed, 11 insertions(+), 14 deletions(-) diff --git a/datacube_ows/config_utils.py b/datacube_ows/config_utils.py index 6fbc3cae9..b478da912 100644 --- a/datacube_ows/config_utils.py +++ b/datacube_ows/config_utils.py @@ -8,7 +8,7 @@ import os from importlib import import_module from itertools import chain -from typing import (Any, Callable, Iterable, Mapping, Optional, Sequence, TypeVar, cast) +from typing import Any, Callable, Iterable, Optional, Sequence, TypeVar, cast from urllib.parse import urlparse import fsspec diff --git a/datacube_ows/mv_index.py b/datacube_ows/mv_index.py index cd9ce2a14..bbaa903d4 100644 --- a/datacube_ows/mv_index.py +++ b/datacube_ows/mv_index.py @@ -7,7 +7,7 @@ import json from enum import Enum from types import UnionType -from typing import Any, Iterable, Type, TypeVar, cast +from typing import Iterable, Type, cast from uuid import UUID as UUID_ import pytz diff --git a/datacube_ows/ogc_utils.py b/datacube_ows/ogc_utils.py index cd16809e7..7fe394fe1 100644 --- a/datacube_ows/ogc_utils.py +++ b/datacube_ows/ogc_utils.py @@ -6,7 +6,7 @@ import datetime import logging from io import BytesIO -from typing import (Any, Mapping, Optional, Sequence, cast) +from typing import Any, Optional, Sequence, cast from urllib.parse import urlparse import numpy diff --git a/datacube_ows/ows_configuration.py b/datacube_ows/ows_configuration.py index a620540d2..b41741bff 100644 --- a/datacube_ows/ows_configuration.py +++ b/datacube_ows/ows_configuration.py @@ -19,7 +19,7 @@ from collections.abc import Mapping from enum import Enum from importlib import import_module -from typing import Callable, Optional, cast, Union, Any, Iterable +from typing import Optional, cast, Union, Any, Iterable import numpy from babel.messages.catalog import Catalog @@ -38,15 +38,13 @@ OWSExtensibleConfigEntry, OWSFlagBand, OWSMetadataConfig, cfg_expand, get_file_loc, import_python_obj, - load_json_obj, ConfigException, FunctionWrapper, CFG_DICT, RAW_CFG, F) + load_json_obj, ConfigException, FunctionWrapper, CFG_DICT, RAW_CFG) from datacube_ows.cube_pool import ODCInitException, cube, get_cube -from datacube_ows.ogc_utils import (create_geobox, local_solar_date_range) -from datacube_ows.resource_limits import (OWSResourceManagementRules, - parse_cache_age) +from datacube_ows.ogc_utils import create_geobox, local_solar_date_range +from datacube_ows.resource_limits import OWSResourceManagementRules, parse_cache_age from datacube_ows.styles import StyleDef from datacube_ows.tile_matrix_sets import TileMatrixSet -from datacube_ows.utils import (group_by_begin_datetime, group_by_mosaic, - group_by_solar) +from datacube_ows.utils import group_by_begin_datetime, group_by_mosaic, group_by_solar _LOG = logging.getLogger(__name__) diff --git a/datacube_ows/protocol_versions.py b/datacube_ows/protocol_versions.py index 4dd09ad52..62f1b6c0a 100644 --- a/datacube_ows/protocol_versions.py +++ b/datacube_ows/protocol_versions.py @@ -5,7 +5,7 @@ # SPDX-License-Identifier: Apache-2.0 import re -from typing import Callable, Iterable, List, Mapping, Optional, Sequence, Tuple +from typing import Callable, Mapping, Sequence, Tuple from datacube_ows.ogc_exceptions import (OGCException, WCS1Exception, WCS2Exception, WMSException, diff --git a/datacube_ows/styles/ramp.py b/datacube_ows/styles/ramp.py index aaf691ccb..e8c2966b9 100644 --- a/datacube_ows/styles/ramp.py +++ b/datacube_ows/styles/ramp.py @@ -19,7 +19,7 @@ from numpy import ubyte from xarray import Dataset, DataArray -from datacube_ows.config_utils import CFG_DICT, OWSMetadataConfig, ConfigException, FunctionWrapper, RAW_CFG +from datacube_ows.config_utils import CFG_DICT, OWSMetadataConfig, ConfigException, FunctionWrapper from datacube_ows.styles.base import StyleDefBase from datacube_ows.styles.expression import Expression diff --git a/datacube_ows/wms_utils.py b/datacube_ows/wms_utils.py index 3bd0216bc..9e73de0eb 100644 --- a/datacube_ows/wms_utils.py +++ b/datacube_ows/wms_utils.py @@ -4,8 +4,7 @@ # Copyright (c) 2017-2023 OWS Contributors # SPDX-License-Identifier: Apache-2.0 import math -from datetime import datetime, date -from typing import cast +from datetime import datetime import numpy import regex as re From 2b585f115bdf388126c7c1ef57f82fb535cbd03f Mon Sep 17 00:00:00 2001 From: Paul Haesler Date: Wed, 24 Apr 2024 17:18:32 +1000 Subject: [PATCH 16/29] Split GetFeatureInfo methods out of data.py and spit ogc_utils.py, --- datacube_ows/data.py | 332 +---------------------------- datacube_ows/feature_info.py | 294 +++++++++++++++++++++++++ datacube_ows/http_utils.py | 127 +++++++++++ datacube_ows/legend_generator.py | 2 +- datacube_ows/ogc.py | 4 +- datacube_ows/ogc_exceptions.py | 2 +- datacube_ows/ogc_utils.py | 283 +----------------------- datacube_ows/ows_configuration.py | 3 +- datacube_ows/resource_limits.py | 4 +- datacube_ows/time_utils.py | 202 ++++++++++++++++++ datacube_ows/wcs1.py | 3 +- datacube_ows/wcs2.py | 4 +- datacube_ows/wms.py | 5 +- datacube_ows/wmts.py | 5 +- integration_tests/test_mv_index.py | 2 +- tests/test_data.py | 13 +- tests/test_ogc_utils.py | 34 +-- 17 files changed, 672 insertions(+), 647 deletions(-) create mode 100644 datacube_ows/feature_info.py create mode 100644 datacube_ows/http_utils.py create mode 100644 datacube_ows/time_utils.py diff --git a/datacube_ows/data.py b/datacube_ows/data.py index 42c9b2ae8..9f7c08d06 100644 --- a/datacube_ows/data.py +++ b/datacube_ows/data.py @@ -3,20 +3,14 @@ # # Copyright (c) 2017-2023 OWS Contributors # SPDX-License-Identifier: Apache-2.0 -import json import logging -import re from datetime import date, datetime, timedelta -from itertools import chain from typing import cast, Any import numpy import numpy.ma import pytz import xarray -from datacube.model import Dataset -from datacube.utils.masking import mask_to_dict -from flask import render_template from odc.geo import geom from odc.geo.geobox import GeoBox from pandas import Timestamp @@ -27,22 +21,18 @@ from datacube_ows.loading import DataStacker, ProductBandQuery from datacube_ows.mv_index import MVSelectOpts from datacube_ows.ogc_exceptions import WMSException -from datacube_ows.ogc_utils import (dataset_center_time, - solar_date, tz_for_geometry, - xarray_image_as_png) -from datacube_ows.config_utils import ConfigException, RAW_CFG, CFG_DICT -from datacube_ows.ows_configuration import get_config, OWSNamedLayer, OWSConfig +from datacube_ows.ogc_utils import (xarray_image_as_png) +from datacube_ows.http_utils import FlaskResponse, json_response, png_response +from datacube_ows.time_utils import solar_date, tz_for_geometry +from datacube_ows.ows_configuration import OWSNamedLayer from datacube_ows.styles import StyleDef from datacube_ows.query_profiler import QueryProfiler from datacube_ows.resource_limits import ResourceLimited from datacube_ows.utils import default_to_utc, log_call -from datacube_ows.wms_utils import (GetFeatureInfoParameters, GetMapParameters, - img_coords_to_geopoint) +from datacube_ows.wms_utils import (GetMapParameters) _LOG = logging.getLogger(__name__) -FlaskResponse = tuple[str | bytes, int, dict[str, str]] - def user_date_sorter(layer: OWSNamedLayer, odc_dates: list[datetime], geometry: geom.Geometry, user_dates: list[datetime]) -> xarray.DataArray: @@ -238,18 +228,6 @@ def get_map(args: dict[str, str]) -> FlaskResponse: return png_response(body, extra_headers=params.product.resource_limits.wms_cache_rules.cache_headers(n_datasets)) -def png_response(body: bytes, cfg: OWSConfig | None = None, extra_headers: dict[str, str] | None = None) -> FlaskResponse: - if not cfg: - cfg = get_config() - assert cfg is not None # For type checker - if extra_headers is None: - extra_headers = {} - headers = {"Content-Type": "image/png"} - headers.update(extra_headers) - headers = cfg.response_headers(headers) - return body, 200, cfg.response_headers(headers) - - @log_call def _write_png(data: xarray.Dataset, style: StyleDef, extent_mask: xarray.DataArray, qprof: QueryProfiler) -> bytes: @@ -285,28 +263,6 @@ def _write_empty(geobox: GeoBox) -> bytes: return memfile.read() -def get_coordlist(geo: geom.Geometry, layer_name: str) -> list[tuple[float | int, float | int]]: - if geo.type == 'Polygon': - coordinates_list = [geo.json["coordinates"]] - elif geo.type == 'MultiPolygon': - coordinates_list = geo.json["coordinates"] - elif geo.type == 'GeometryCollection': - coordinates_list = [] - for geom in geo.json["geometries"]: - if geom["type"] == "Polygon": - coordinates_list.append(geom["coordinates"]) - elif geom["type"] == "MultiPolygon": - coordinates_list.extend(geom["coordinates"]) - else: - _LOG.warning( - "Extent contains non-polygon GeometryType (%s in GeometryCollection - ignoring), layer: %s", - geom["type"], - layer_name) - else: - raise Exception("Unexpected extent/geobox polygon geometry type: %s in layer %s" % (geo.type, layer_name)) - return coordinates_list - - @log_call def _write_polygon(geobox: GeoBox, polygon: geom.Geometry, zoom_fill: list[int], layer: OWSNamedLayer) -> bytes: geobox_ext = geobox.extent @@ -333,281 +289,3 @@ def _write_polygon(geobox: GeoBox, polygon: geom.Geometry, zoom_fill: list[int], return memfile.read() -@log_call -def get_s3_browser_uris(datasets: dict[ProductBandQuery, xarray.DataArray], - pt: geom.Geometry | None = None, - s3url: str = "", s3bucket: str = "") -> set[str]: - uris = [] - last_crs = None - for pbq, dss in datasets.items(): - if pbq.main: - for tds in dss: - for ds in tds.values.item(): - if pt and ds.extent: - if ds.crs != last_crs: - pt_native = pt.to_crs(ds.crs) - last_crs = ds.crs - if ds.extent.contains(pt_native): - uris.append(ds.uris) - else: - uris.append(ds.uris) - break - - uris = list(chain.from_iterable(uris)) - unique_uris = set(uris) - - regex = re.compile(r"s3:\/\/(?P[a-zA-Z0-9_\-\.]+)\/(?P[\S]+)/[a-zA-Z0-9_\-\.]+.(yaml|json)") - - # convert to browsable link - def convert(uri: str) -> str: - uri_format = "http://{bucket}.s3-website-ap-southeast-2.amazonaws.com/?prefix={prefix}" - uri_format_prod = str(s3url) + "/?prefix={prefix}" - result = regex.match(uri) - if result is not None: - if result.group("bucket") == str(s3bucket): - new_uri = uri_format_prod.format(prefix=result.group("prefix")) - else: - new_uri = uri_format.format(bucket=result.group("bucket"), - prefix=result.group("prefix")) - else: - new_uri = uri - return new_uri - - formatted = {convert(uri) for uri in unique_uris} - - return formatted - - -@log_call -def _make_band_dict(prod_cfg: OWSNamedLayer, pixel_dataset: xarray.Dataset) -> dict[str, dict[str, bool | str] | str]: - band_dict: dict[str, dict[str, bool | str] | str] = {} - for k, v in pixel_dataset.data_vars.items(): - band_val = pixel_dataset[k].item() - flag_def = pixel_dataset[k].attrs.get("flags_definition") - if flag_def: - try: - flag_dict = mask_to_dict(flag_def, band_val) - except TypeError as te: - logging.warning('Working around for float bands') - flag_dict = mask_to_dict(flag_def, int(band_val)) - ret_val: dict[str, bool | str] = {} - for flag, val in flag_dict.items(): - if not val: - continue - if val == True: - ret_val[flag_def[flag].get('description', flag)] = True - else: - ret_val[flag_def[flag].get('description', flag)] = val - band_dict[k] = ret_val - else: - try: - band_lbl = prod_cfg.band_idx.band_label(k) - assert k is not None # for type checker - if band_val == pixel_dataset[k].nodata or numpy.isnan(band_val): - band_dict[band_lbl] = "n/a" - else: - band_dict[band_lbl] = band_val - except ConfigException: - pass - return band_dict - - -@log_call -def _make_derived_band_dict(pixel_dataset: xarray.Dataset, style_index: dict[str, StyleDef]) -> dict[str, int | float]: - """Creates a dict of values for bands derived by styles. - This only works for styles with an `index_function` defined. - - :param xarray.Dataset pixel_dataset: A 1x1 pixel dataset containing band arrays - :param dict(str, StyleCfg) style_index: dict of style configuration dicts - :return: dict of style names to derived value - """ - derived_band_dict = {} - for style_name, style in style_index.items(): - if not style.include_in_feature_info: - continue - - if any(pixel_dataset[band] == pixel_dataset[band].nodata for band in style.needed_bands): - continue - - value = style.index_function(pixel_dataset).item() - derived_band_dict[style_name] = value if not numpy.isnan(value) else "n/a" - return derived_band_dict - - -def geobox_is_point(geobox: GeoBox) -> bool: - return geobox.height == 1 and geobox.width == 1 - - -@log_call -def feature_info(args: dict[str, str]) -> FlaskResponse: - # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals - # Parse GET parameters - params = GetFeatureInfoParameters(args) - feature_json: CFG_DICT = {} - - geo_point = img_coords_to_geopoint(params.geobox, params.i, params.j) - # shrink geobox to point - # Prepare to extract feature info - if geobox_is_point(params.geobox): - # request geobox is already 1x1 - geo_point_geobox = params.geobox - else: - # Make a 1x1 pixel geobox - geo_point_geobox = GeoBox.from_geopolygon( - geo_point, params.geobox.resolution, crs=params.geobox.crs) - tz = tz_for_geometry(geo_point_geobox.geographic_extent) - stacker = DataStacker(params.product, geo_point_geobox, params.times) - # --- Begin code section requiring datacube. - cfg = get_config() - with cube() as dc: - if not dc: - raise WMSException("Database connectivity failure") - all_time_datasets = cast(xarray.DataArray, stacker.datasets(dc.index, all_time=True, point=geo_point)) - - # Taking the data as a single point so our indexes into the data should be 0,0 - h_coord = cast(str, cfg.published_CRSs[params.crsid]["horizontal_coord"]) - v_coord = cast(str, cfg.published_CRSs[params.crsid]["vertical_coord"]) - s3_bucket = cfg.s3_bucket - s3_url = cfg.s3_url - isel_kwargs = { - h_coord: 0, - v_coord: 0 - } - if any(all_time_datasets): - # Group datasets by time, load only datasets that match the idx_date - global_info_written = False - feature_json["data"] = [] - fi_date_index: dict[datetime, RAW_CFG] = {} - time_datasets = cast( - dict[ProductBandQuery, xarray.DataArray], - stacker.datasets(dc.index, all_flag_bands=True, point=geo_point) - ) - data = stacker.data(time_datasets, skip_corrections=True) - if data is not None: - for dt in data.time.values: - td = data.sel(time=dt) - # Global data that should apply to all dates, but needs some data to extract - if not global_info_written: - global_info_written = True - # Non-geographic coordinate systems need to be projected onto a geographic - # coordinate system. Why not use EPSG:4326? - # Extract coordinates in CRS - data_x = getattr(td, h_coord) - data_y = getattr(td, v_coord) - - x = data_x[isel_kwargs[h_coord]].item() - y = data_y[isel_kwargs[v_coord]].item() - pt = geom.point(x, y, params.crs) - - # Project to EPSG:4326 - crs_geo = geom.CRS("EPSG:4326") - ptg = pt.to_crs(crs_geo) - - # Capture lat/long coordinates - feature_json["lon"], feature_json["lat"] = ptg.coords[0] - - date_info: CFG_DICT = {} - - ds: Dataset | None = None - for pbq, dss in time_datasets.items(): - if pbq.main: - ds = dss.sel(time=dt).values.tolist()[0] - break - assert ds is not None - if params.product.multi_product: - if "platform" in ds.metadata_doc: - date_info["source_product"] = "%s (%s)" % (ds.type.name, ds.metadata_doc["platform"]["code"]) - else: - date_info["source_product"] = ds.type.name - - # Extract data pixel - pixel_ds: xarray.Dataset = td.isel(**isel_kwargs) # type: ignore[arg-type] - - # Get accurate timestamp from dataset - assert ds.time is not None # For type checker - if params.product.time_resolution.is_summary(): - date_info["time"] = ds.time.begin.strftime("%Y-%m-%d") - else: - date_info["time"] = dataset_center_time(ds).strftime("%Y-%m-%d %H:%M:%S %Z") - # Collect raw band values for pixel and derived bands from styles - date_info["bands"] = cast(RAW_CFG, _make_band_dict(params.product, pixel_ds)) - derived_band_dict = cast(RAW_CFG, _make_derived_band_dict(pixel_ds, params.product.style_index)) - if derived_band_dict: - date_info["band_derived"] = derived_band_dict - # Add any custom-defined fields. - for k, f in params.product.feature_info_custom_includes.items(): - date_info[k] = f(date_info["bands"]) - - cast(list[RAW_CFG], feature_json["data"]).append(date_info) - fi_date_index[dt] = cast(dict[str, list[RAW_CFG]], feature_json)["data"][-1] - feature_json["data_available_for_dates"] = [] - pt_native = None - for d in all_time_datasets.coords["time"].values: - dt_datasets = all_time_datasets.sel(time=d) - for ds in dt_datasets.values.item(): - assert ds is not None # For type checker - if pt_native is None: - pt_native = geo_point.to_crs(ds.crs) - elif pt_native.crs != ds.crs: - pt_native = geo_point.to_crs(ds.crs) - if ds.extent and ds.extent.contains(pt_native): - # tolist() converts a numpy datetime64 to a python datatime - dt = Timestamp(stacker.group_by.group_by_func(ds)).to_pydatetime() - if params.product.time_resolution.is_subday(): - cast(list[RAW_CFG], feature_json["data_available_for_dates"]).append(dt.isoformat()) - else: - cast(list[RAW_CFG], feature_json["data_available_for_dates"]).append(dt.strftime("%Y-%m-%d")) - break - if time_datasets: - feature_json["data_links"] = cast( - RAW_CFG, - sorted(get_s3_browser_uris(time_datasets, pt_native, s3_url, s3_bucket))) - else: - feature_json["data_links"] = [] - if params.product.feature_info_include_utc_dates: - unsorted_dates: list[str] = [] - for tds in all_time_datasets: - for ds in tds.values.item(): - assert ds is not None and ds.time is not None # for type checker - if params.product.time_resolution.is_solar(): - unsorted_dates.append(ds.center_time.strftime("%Y-%m-%d")) - elif params.product.time_resolution.is_subday(): - unsorted_dates.append(ds.time.begin.isoformat()) - else: - unsorted_dates.append(ds.time.begin.strftime("%Y-%m-%d")) - feature_json["data_available_for_utc_dates"] = sorted( - d.center_time.strftime("%Y-%m-%d") for d in all_time_datasets) - # --- End code section requiring datacube. - - result: CFG_DICT = { - "type": "FeatureCollection", - "features": [ - { - "type": "Feature", - "properties": feature_json, - "geometry": { - "type": "Point", - "coordinates": geo_point.coords[0] - } - } - ] - } - if params.format == "text/html": - return html_json_response(result, cfg) - else: - return json_response(result, cfg) - - -def json_response(result: CFG_DICT, cfg: OWSConfig | None = None) -> FlaskResponse: - if not cfg: - cfg = get_config() - assert cfg is not None # for type checker - return json.dumps(result), 200, cfg.response_headers({"Content-Type": "application/json"}) - - -def html_json_response(result: CFG_DICT, cfg: OWSConfig | None = None) -> FlaskResponse: - if not cfg: - cfg = get_config() - assert cfg is not None # for type checker - html_content = render_template("html_feature_info.html", result=result) - return html_content, 200, cfg.response_headers({"Content-Type": "text/html"}) diff --git a/datacube_ows/feature_info.py b/datacube_ows/feature_info.py new file mode 100644 index 000000000..26613fdcc --- /dev/null +++ b/datacube_ows/feature_info.py @@ -0,0 +1,294 @@ +# This file is part of datacube-ows, part of the Open Data Cube project. +# See https://opendatacube.org for more information. +# +# Copyright (c) 2017-2023 OWS Contributors +# SPDX-License-Identifier: Apache-2.0 +import logging +import re +from datetime import datetime +from itertools import chain +from typing import cast + +import numpy +import xarray +from datacube.model import Dataset +from datacube.utils.masking import mask_to_dict +from odc.geo import geom +from odc.geo.geobox import GeoBox +from pandas import Timestamp + +from datacube_ows.config_utils import ConfigException, CFG_DICT, RAW_CFG +from datacube_ows.cube_pool import cube +from datacube_ows.loading import ProductBandQuery, DataStacker +from datacube_ows.ogc_exceptions import WMSException +from datacube_ows.http_utils import FlaskResponse, json_response, html_json_response +from datacube_ows.time_utils import dataset_center_time, tz_for_geometry +from datacube_ows.ows_configuration import OWSNamedLayer, get_config +from datacube_ows.styles import StyleDef +from datacube_ows.utils import log_call +from datacube_ows.wms_utils import GetFeatureInfoParameters, img_coords_to_geopoint + + +@log_call +def get_s3_browser_uris(datasets: dict[ProductBandQuery, xarray.DataArray], + pt: geom.Geometry | None = None, + s3url: str = "", s3bucket: str = "") -> set[str]: + uris = [] + last_crs = None + for pbq, dss in datasets.items(): + if pbq.main: + for tds in dss: + for ds in tds.values.item(): + if pt and ds.extent: + if ds.crs != last_crs: + pt_native = pt.to_crs(ds.crs) + last_crs = ds.crs + if ds.extent.contains(pt_native): + uris.append(ds.uris) + else: + uris.append(ds.uris) + break + + uris = list(chain.from_iterable(uris)) + unique_uris = set(uris) + + regex = re.compile(r"s3:\/\/(?P[a-zA-Z0-9_\-\.]+)\/(?P[\S]+)/[a-zA-Z0-9_\-\.]+.(yaml|json)") + + # convert to browsable link + def convert(uri: str) -> str: + uri_format = "http://{bucket}.s3-website-ap-southeast-2.amazonaws.com/?prefix={prefix}" + uri_format_prod = str(s3url) + "/?prefix={prefix}" + result = regex.match(uri) + if result is not None: + if result.group("bucket") == str(s3bucket): + new_uri = uri_format_prod.format(prefix=result.group("prefix")) + else: + new_uri = uri_format.format(bucket=result.group("bucket"), + prefix=result.group("prefix")) + else: + new_uri = uri + return new_uri + + formatted = {convert(uri) for uri in unique_uris} + + return formatted + + +@log_call +def _make_band_dict(prod_cfg: OWSNamedLayer, pixel_dataset: xarray.Dataset) -> dict[str, dict[str, bool | str] | str]: + band_dict: dict[str, dict[str, bool | str] | str] = {} + for k, v in pixel_dataset.data_vars.items(): + band_val = pixel_dataset[k].item() + flag_def = pixel_dataset[k].attrs.get("flags_definition") + if flag_def: + try: + flag_dict = mask_to_dict(flag_def, band_val) + except TypeError as te: + logging.warning('Working around for float bands') + flag_dict = mask_to_dict(flag_def, int(band_val)) + ret_val: dict[str, bool | str] = {} + for flag, val in flag_dict.items(): + if not val: + continue + if val == True: + ret_val[flag_def[flag].get('description', flag)] = True + else: + ret_val[flag_def[flag].get('description', flag)] = val + band_dict[k] = ret_val + else: + try: + band_lbl = prod_cfg.band_idx.band_label(k) + assert k is not None # for type checker + if band_val == pixel_dataset[k].nodata or numpy.isnan(band_val): + band_dict[band_lbl] = "n/a" + else: + band_dict[band_lbl] = band_val + except ConfigException: + pass + return band_dict + + +@log_call +def _make_derived_band_dict(pixel_dataset: xarray.Dataset, style_index: dict[str, StyleDef]) -> dict[str, int | float]: + """Creates a dict of values for bands derived by styles. + This only works for styles with an `index_function` defined. + + :param xarray.Dataset pixel_dataset: A 1x1 pixel dataset containing band arrays + :param dict(str, StyleCfg) style_index: dict of style configuration dicts + :return: dict of style names to derived value + """ + derived_band_dict = {} + for style_name, style in style_index.items(): + if not style.include_in_feature_info: + continue + + if any(pixel_dataset[band] == pixel_dataset[band].nodata for band in style.needed_bands): + continue + + value = style.index_function(pixel_dataset).item() + derived_band_dict[style_name] = value if not numpy.isnan(value) else "n/a" + return derived_band_dict + + +def geobox_is_point(geobox: GeoBox) -> bool: + return geobox.height == 1 and geobox.width == 1 + + +@log_call +def feature_info(args: dict[str, str]) -> FlaskResponse: + # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals + # Parse GET parameters + params = GetFeatureInfoParameters(args) + feature_json: CFG_DICT = {} + + geo_point = img_coords_to_geopoint(params.geobox, params.i, params.j) + # shrink geobox to point + # Prepare to extract feature info + if geobox_is_point(params.geobox): + # request geobox is already 1x1 + geo_point_geobox = params.geobox + else: + # Make a 1x1 pixel geobox + geo_point_geobox = GeoBox.from_geopolygon( + geo_point, params.geobox.resolution, crs=params.geobox.crs) + tz = tz_for_geometry(geo_point_geobox.geographic_extent) + stacker = DataStacker(params.product, geo_point_geobox, params.times) + # --- Begin code section requiring datacube. + cfg = get_config() + with cube() as dc: + if not dc: + raise WMSException("Database connectivity failure") + all_time_datasets = cast(xarray.DataArray, stacker.datasets(dc.index, all_time=True, point=geo_point)) + + # Taking the data as a single point so our indexes into the data should be 0,0 + h_coord = cast(str, cfg.published_CRSs[params.crsid]["horizontal_coord"]) + v_coord = cast(str, cfg.published_CRSs[params.crsid]["vertical_coord"]) + s3_bucket = cfg.s3_bucket + s3_url = cfg.s3_url + isel_kwargs = { + h_coord: 0, + v_coord: 0 + } + if any(all_time_datasets): + # Group datasets by time, load only datasets that match the idx_date + global_info_written = False + feature_json["data"] = [] + fi_date_index: dict[datetime, RAW_CFG] = {} + time_datasets = cast( + dict[ProductBandQuery, xarray.DataArray], + stacker.datasets(dc.index, all_flag_bands=True, point=geo_point) + ) + data = stacker.data(time_datasets, skip_corrections=True) + if data is not None: + for dt in data.time.values: + td = data.sel(time=dt) + # Global data that should apply to all dates, but needs some data to extract + if not global_info_written: + global_info_written = True + # Non-geographic coordinate systems need to be projected onto a geographic + # coordinate system. Why not use EPSG:4326? + # Extract coordinates in CRS + data_x = getattr(td, h_coord) + data_y = getattr(td, v_coord) + + x = data_x[isel_kwargs[h_coord]].item() + y = data_y[isel_kwargs[v_coord]].item() + pt = geom.point(x, y, params.crs) + + # Project to EPSG:4326 + crs_geo = geom.CRS("EPSG:4326") + ptg = pt.to_crs(crs_geo) + + # Capture lat/long coordinates + feature_json["lon"], feature_json["lat"] = ptg.coords[0] + + date_info: CFG_DICT = {} + + ds: Dataset | None = None + for pbq, dss in time_datasets.items(): + if pbq.main: + ds = dss.sel(time=dt).values.tolist()[0] + break + assert ds is not None + if params.product.multi_product: + if "platform" in ds.metadata_doc: + date_info["source_product"] = "%s (%s)" % (ds.type.name, ds.metadata_doc["platform"]["code"]) + else: + date_info["source_product"] = ds.type.name + + # Extract data pixel + pixel_ds: xarray.Dataset = td.isel(**isel_kwargs) # type: ignore[arg-type] + + # Get accurate timestamp from dataset + assert ds.time is not None # For type checker + if params.product.time_resolution.is_summary(): + date_info["time"] = ds.time.begin.strftime("%Y-%m-%d") + else: + date_info["time"] = dataset_center_time(ds).strftime("%Y-%m-%d %H:%M:%S %Z") + # Collect raw band values for pixel and derived bands from styles + date_info["bands"] = cast(RAW_CFG, _make_band_dict(params.product, pixel_ds)) + derived_band_dict = cast(RAW_CFG, _make_derived_band_dict(pixel_ds, params.product.style_index)) + if derived_band_dict: + date_info["band_derived"] = derived_band_dict + # Add any custom-defined fields. + for k, f in params.product.feature_info_custom_includes.items(): + date_info[k] = f(date_info["bands"]) + + cast(list[RAW_CFG], feature_json["data"]).append(date_info) + fi_date_index[dt] = cast(dict[str, list[RAW_CFG]], feature_json)["data"][-1] + feature_json["data_available_for_dates"] = [] + pt_native = None + for d in all_time_datasets.coords["time"].values: + dt_datasets = all_time_datasets.sel(time=d) + for ds in dt_datasets.values.item(): + assert ds is not None # For type checker + if pt_native is None: + pt_native = geo_point.to_crs(ds.crs) + elif pt_native.crs != ds.crs: + pt_native = geo_point.to_crs(ds.crs) + if ds.extent and ds.extent.contains(pt_native): + # tolist() converts a numpy datetime64 to a python datatime + dt = Timestamp(stacker.group_by.group_by_func(ds)).to_pydatetime() + if params.product.time_resolution.is_subday(): + cast(list[RAW_CFG], feature_json["data_available_for_dates"]).append(dt.isoformat()) + else: + cast(list[RAW_CFG], feature_json["data_available_for_dates"]).append(dt.strftime("%Y-%m-%d")) + break + if time_datasets: + feature_json["data_links"] = cast( + RAW_CFG, + sorted(get_s3_browser_uris(time_datasets, pt_native, s3_url, s3_bucket))) + else: + feature_json["data_links"] = [] + if params.product.feature_info_include_utc_dates: + unsorted_dates: list[str] = [] + for tds in all_time_datasets: + for ds in tds.values.item(): + assert ds is not None and ds.time is not None # for type checker + if params.product.time_resolution.is_solar(): + unsorted_dates.append(ds.center_time.strftime("%Y-%m-%d")) + elif params.product.time_resolution.is_subday(): + unsorted_dates.append(ds.time.begin.isoformat()) + else: + unsorted_dates.append(ds.time.begin.strftime("%Y-%m-%d")) + feature_json["data_available_for_utc_dates"] = sorted( + d.center_time.strftime("%Y-%m-%d") for d in all_time_datasets) + # --- End code section requiring datacube. + + result: CFG_DICT = { + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "properties": feature_json, + "geometry": { + "type": "Point", + "coordinates": geo_point.coords[0] + } + } + ] + } + if params.format == "text/html": + return html_json_response(result, cfg) + else: + return json_response(result, cfg) diff --git a/datacube_ows/http_utils.py b/datacube_ows/http_utils.py new file mode 100644 index 000000000..481576c05 --- /dev/null +++ b/datacube_ows/http_utils.py @@ -0,0 +1,127 @@ +# This file is part of datacube-ows, part of the Open Data Cube project. +# See https://opendatacube.org for more information. +# +# Copyright (c) 2017-2024 OWS Contributors +# SPDX-License-Identifier: Apache-2.0 +import json +from urllib.parse import urlparse + +from flask import Request, request, render_template + +from datacube_ows.config_utils import CFG_DICT +from datacube_ows.ows_configuration import OWSConfig, get_config + +FlaskResponse = tuple[str | bytes, int, dict[str, str]] + + +def resp_headers(d: dict[str, str]) -> dict[str, str]: + """ + Take a dictionary of http response headers and all required response headers from the configuration. + + :param d: + :return: + """ + from datacube_ows.ows_configuration import get_config + return get_config().response_headers(d) + + +def parse_for_base_url(url: str) -> str: + """ + Extract the base URL from a URL + + :param url: A URL + :return: The base URL (path and parameters stripped) + """ + parsed = urlparse(url) + parsed = (parsed.netloc + parsed.path).rstrip("/") + return parsed + + +def get_service_base_url(allowed_urls: list[str] | str, request_url: str) -> str: + """ + Choose the base URL to advertise in XML. + + :param allowed_urls: A list of allowed URLs, or a single allowed URL. + :param request_url: The URL the incoming request came from + :return: Return one of the allowed URLs. Either one that seems to match the request, or the first in the list + """ + if isinstance(allowed_urls, str): + return allowed_urls + parsed_request_url = parse_for_base_url(request_url) + parsed_allowed_urls = [parse_for_base_url(u) for u in allowed_urls] + try: + idx: int | None = parsed_allowed_urls.index(parsed_request_url) + except ValueError: + idx = None + url = allowed_urls[idx] if idx is not None else allowed_urls[0] + # template includes tailing /, strip any trail slash here to avoid duplicates + url = url.rstrip("/") + return url + + +def capture_headers(req: Request, + args_dict: dict[str, str | None]) -> dict[str, str | None]: + """ + Capture significant flask metadata into the args dictionary + + :param req: A Flask request + :param args_dict: A Flask args dictionary + :return: + """ + args_dict['referer'] = req.headers.get('Referer', None) + args_dict['origin'] = req.headers.get('Origin', None) + args_dict['requestid'] = req.environ.get("FLASK_REQUEST_ID") + args_dict['host'] = req.headers.get('Host', None) + args_dict['url_root'] = req.url_root + + return args_dict + + +def cache_control_headers(max_age: int) -> dict[str, str]: + if max_age <= 0: + return {"cache-control": "no-cache"} + else: + return {"cache-control": f"max-age={max_age}"} + + +def lower_get_args() -> dict[str, str]: + """ + Return Flask request arguments, with argument names converted to lower case. + + Get parameters in WMS are case-insensitive, and intended to be single use. + Spec does not specify which instance should be used if a parameter is provided more than once. + This function uses the LAST instance. + """ + d = {} + for k in request.args.keys(): + kl = k.lower() + for v in request.args.getlist(k): + d[kl] = v + return d + + +def json_response(result: CFG_DICT, cfg: OWSConfig | None = None) -> FlaskResponse: + if not cfg: + cfg = get_config() + assert cfg is not None # for type checker + return json.dumps(result), 200, cfg.response_headers({"Content-Type": "application/json"}) + + +def html_json_response(result: CFG_DICT, cfg: OWSConfig | None = None) -> FlaskResponse: + if not cfg: + cfg = get_config() + assert cfg is not None # for type checker + html_content = render_template("html_feature_info.html", result=result) + return html_content, 200, cfg.response_headers({"Content-Type": "text/html"}) + + +def png_response(body: bytes, cfg: OWSConfig | None = None, extra_headers: dict[str, str] | None = None) -> FlaskResponse: + if not cfg: + cfg = get_config() + assert cfg is not None # For type checker + if extra_headers is None: + extra_headers = {} + headers = {"Content-Type": "image/png"} + headers.update(extra_headers) + headers = cfg.response_headers(headers) + return body, 200, cfg.response_headers(headers) diff --git a/datacube_ows/legend_generator.py b/datacube_ows/legend_generator.py index e352c9a14..42f5445f0 100644 --- a/datacube_ows/legend_generator.py +++ b/datacube_ows/legend_generator.py @@ -12,7 +12,7 @@ from PIL import Image from datacube_ows.ogc_exceptions import WMSException -from datacube_ows.ogc_utils import resp_headers +from datacube_ows.http_utils import resp_headers from datacube_ows.wms_utils import GetLegendGraphicParameters # Do not use X Server backend diff --git a/datacube_ows/ogc.py b/datacube_ows/ogc.py index b4e6bd81d..f662a0f10 100644 --- a/datacube_ows/ogc.py +++ b/datacube_ows/ogc.py @@ -14,8 +14,7 @@ from datacube_ows.cube_pool import cube from datacube_ows.legend_generator import create_legend_for_style from datacube_ows.ogc_exceptions import OGCException, WMSException -from datacube_ows.ogc_utils import (capture_headers, get_service_base_url, - lower_get_args, resp_headers) +from datacube_ows.http_utils import resp_headers, get_service_base_url, capture_headers, lower_get_args from datacube_ows.ows_configuration import get_config from datacube_ows.protocol_versions import supported_versions from datacube_ows.startup_utils import * # pylint: disable=wildcard-import,unused-wildcard-import @@ -65,7 +64,6 @@ ) - # Flask Routes diff --git a/datacube_ows/ogc_exceptions.py b/datacube_ows/ogc_exceptions.py index 84376acd6..ea5e15e8c 100644 --- a/datacube_ows/ogc_exceptions.py +++ b/datacube_ows/ogc_exceptions.py @@ -9,7 +9,7 @@ from ows.common.types import OWSException, Version from ows.common.v20.encoders import xml_encode_exception_report -from datacube_ows.ogc_utils import resp_headers +from datacube_ows.http_utils import resp_headers class OGCException(Exception): diff --git a/datacube_ows/ogc_utils.py b/datacube_ows/ogc_utils.py index 7fe394fe1..476462943 100644 --- a/datacube_ows/ogc_utils.py +++ b/datacube_ows/ogc_utils.py @@ -3,271 +3,18 @@ # # Copyright (c) 2017-2023 OWS Contributors # SPDX-License-Identifier: Apache-2.0 -import datetime import logging from io import BytesIO -from typing import Any, Optional, Sequence, cast -from urllib.parse import urlparse +from typing import Any, cast import numpy import xarray from affine import Affine -from dateutil.parser import parse -from flask import request, Request from odc.geo.geobox import GeoBox -from odc.geo.geom import CRS, Geometry +from odc.geo.geom import CRS from PIL import Image -from pytz import timezone, utc -from timezonefinder import TimezoneFinder - -from datacube.model import Dataset -from datacube_ows.config_utils import OWSExtensibleConfigEntry _LOG: logging.Logger = logging.getLogger(__name__) -tf = TimezoneFinder(in_memory=True) - - -def dataset_center_time(dataset: Dataset) -> datetime.datetime: - """ - Determine a center_time for the dataset - - Use metadata time if possible as this is what WMS uses to calculate its temporal extents - datacube-core center time accessed through the dataset API is calculated and may - not agree with the metadata document. - - :param dataset: An ODC dataset. - :return: A datetime object representing the datasets center time - """ - center_time: datetime.datetime = dataset.center_time - try: - metadata_time: str = dataset.metadata_doc['extent']['center_dt'] - center_time = parse(metadata_time) - except KeyError: - try: - metadata_time = dataset.metadata_doc['properties']['dtr:start_datetime'] - center_time = parse(metadata_time) - except KeyError: - pass - return center_time - - -class NoTimezoneException(Exception): - """Exception, raised internally if no timezone can be found""" - - -def solar_date(dt: datetime.datetime, tz: datetime.tzinfo) -> datetime.date: - """ - Convert a datetime to a new timezone, and evalute as a date. - - :param dt: A datetime in an aribitrary timezone. - :param tz: The timezone to evaluate the date in. - :return: A date object. - """ - return dt.astimezone(tz).date() - - -def local_date(ds: Dataset, tz: datetime.tzinfo | None = None) -> datetime.date: - """ - Calculate the local (solar) date for a dataset. - - :param ds: An ODC dataset object - :param tz: (optional) A timezone object. If not provided, determine the timezone from extent of the dataset. - :return: A date object. - """ - dt_utc: datetime.datetime = dataset_center_time(ds) - if not tz: - tz = tz_for_geometry(ds.extent) - return solar_date(dt_utc, tz) - - -def tz_for_dataset(ds: Dataset) -> datetime.tzinfo: - """ - Determine the timezone for a dataset (using it's extent) - - :param ds: An ODC dataset object - :return: A timezone object - """ - return tz_for_geometry(ds.extent) - - -def tz_for_coord(lon: float | int, lat: float | int) -> datetime.tzinfo: - """ - Determine the Timezone for given lat/long coordinates - - :param lon: Longitude, in degress - :param lat: Latitude, in degrees - :return: A timezone object - :raises: NoTimezoneException - """ - try: - tzn: Optional[str] = tf.timezone_at(lng=lon, lat=lat) - except Exception as e: - # Generally shouldn't happen - a common symptom of various geographic and timezone related bugs - _LOG.warning("Timezone detection failed for lat %f, lon %s (%s)", lat, lon, str(e)) - raise - if not tzn: - raise NoTimezoneException("tz find failed.") - return timezone(tzn) - - -def local_solar_date_range(geobox: GeoBox, date: datetime.date) -> tuple[datetime.datetime, datetime.datetime]: - """ - Converts a date to a local solar date datetime range. - - :param geobox: Geometry used to determine the appropriate timezone for local date conversion - :param date: A date object - :return: A tuple of two UTC datetime objects, spanning 1 second shy of 24 hours. - """ - tz: datetime.tzinfo = tz_for_geometry(geobox.geographic_extent) - start = datetime.datetime(date.year, date.month, date.day, 0, 0, 0, tzinfo=tz) - end = datetime.datetime(date.year, date.month, date.day, 23, 59, 59, tzinfo=tz) - return (start.astimezone(utc), end.astimezone(utc)) - - -def month_date_range(date: datetime.date) -> tuple[datetime.datetime, datetime.datetime]: - """ - Take a month from a date and convert to a one month long UTC datetime range encompassing the month. - - Ignores timezone effects - suitable for statistical/summary data - - :param date: A date or datetime object to take the month and year from - :return: A tuple of two UTC datetime objects, delimiting an entire calendar month. - """ - start = datetime.datetime(date.year, date.month, 1, 0, 0, 0, tzinfo=utc) - y: int = date.year - m: int = date.month + 1 - if m == 13: - m = 1 - y = y + 1 - end = datetime.datetime(y, m, 1, 0, 0, 0, tzinfo=utc) - datetime.timedelta(days=1) - return start, end - - -def year_date_range(date: datetime.date) -> tuple[datetime.datetime, datetime.datetime]: - """ - Convert a date to a UTC datetime range encompassing the calendar year including the date. - - Ignores timezone effects - suitable for statistical/summary data - - :param date: A date or datetime object to take the year from - :return: A tuple of two UTC datetime objects, delimiting an entire calendar year. - """ - start = datetime.datetime(date.year, 1, 1, 0, 0, 0, tzinfo=utc) - end = datetime.datetime(date.year, 12, 31, 23, 59, 59, tzinfo=utc) - return start, end - - -def day_summary_date_range(date: datetime.date) -> tuple[datetime.datetime, datetime.datetime]: - """ - Convert a date to a UTC datetime range encompassing the calendar date. - - Ignores timezone effects - suitable for statistical/summary data - - :param date: A date or datetime object to take the day, month and year from - :return: A tuple of two UTC datetime objects, delimiting a calendar day. - """ - start = datetime.datetime(date.year, date.month, date.day, 0, 0, 0, tzinfo=utc) - end = datetime.datetime(date.year, date.month, date.day, 23, 59, 59, tzinfo=utc) - return start, end - - -def tz_for_geometry(geom: Geometry) -> datetime.tzinfo: - """ - Determine the timezone from a geometry. Be clever if we can, - otherwise use a minimal timezone based on the longitude. - - :param geom: A geometry object - :return: A timezone object - """ - crs_geo = CRS("EPSG:4326") - geo_geom: Geometry = geom.to_crs(crs_geo) - centroid: Geometry = geo_geom.centroid - try: - # 1. Try being smart with the centroid of the geometry - return tz_for_coord(centroid.coords[0][0], centroid.coords[0][1]) - except NoTimezoneException: - pass - for pt in geo_geom.boundary.coords: - try: - # 2. Try being smart all the points in the geometry - return tz_for_coord(pt[0], pt[1]) - except NoTimezoneException: - pass - # 3. Meh, just use longitude - offset = round(centroid.coords[0][0] / 15.0) - return datetime.timezone(datetime.timedelta(hours=offset)) - - -def resp_headers(d: dict[str, str]) -> dict[str, str]: - """ - Take a dictionary of http response headers and all required response headers from the configuration. - - :param d: - :return: - """ - from datacube_ows.ows_configuration import get_config - return get_config().response_headers(d) - - -def parse_for_base_url(url: str) -> str: - """ - Extract the base URL from a URL - - :param url: A URL - :return: The base URL (path and parameters stripped) - """ - parsed = urlparse(url) - parsed = (parsed.netloc + parsed.path).rstrip("/") - return parsed - - -def get_service_base_url(allowed_urls: list[str] | str, request_url: str) -> str: - """ - Choose the base URL to advertise in XML. - - :param allowed_urls: A list of allowed URLs, or a single allowed URL. - :param request_url: The URL the incoming request came from - :return: Return one of the allowed URLs. Either one that seems to match the request, or the first in the list - """ - if isinstance(allowed_urls, str): - return allowed_urls - parsed_request_url = parse_for_base_url(request_url) - parsed_allowed_urls = [parse_for_base_url(u) for u in allowed_urls] - try: - idx: Optional[int] = parsed_allowed_urls.index(parsed_request_url) - except ValueError: - idx = None - url = allowed_urls[idx] if idx is not None else allowed_urls[0] - # template includes tailing /, strip any trail slash here to avoid duplicates - url = url.rstrip("/") - return url - - -# Collects additional headers from flask request objects -def capture_headers(req: Request, - args_dict: dict[str, str | None]) -> dict[str, Optional[str]]: - """ - Capture significant flask metadata into the args dictionary - - :param req: A Flask request - :param args_dict: A Flask args dictionary - :return: - """ - args_dict['referer'] = req.headers.get('Referer', None) - args_dict['origin'] = req.headers.get('Origin', None) - args_dict['requestid'] = req.environ.get("FLASK_REQUEST_ID") - args_dict['host'] = req.headers.get('Host', None) - args_dict['url_root'] = req.url_root - - return args_dict - - -def cache_control_headers(max_age: int) -> dict[str, str]: - if max_age <= 0: - return {"cache-control": "no-cache"} - else: - return {"cache-control": f"max-age={max_age}"} - # Extent Mask Functions @@ -348,15 +95,6 @@ def mask_by_nan(data: xarray.Dataset, band: str) -> numpy.ndarray: # Example mosaic date function -def rolling_window_ndays( - available_dates: Sequence[datetime.datetime], - layer_cfg: OWSExtensibleConfigEntry, - ndays: int = 6) -> tuple[datetime.datetime, datetime.datetime]: - idx = -ndays - days = available_dates[idx:] - start, _ = layer_cfg.search_times(days[idx]) - _, end = layer_cfg.search_times(days[-1]) - return (start, end) # Sub-product extractors - Subproducts are currently unsupported @@ -369,22 +107,6 @@ def rolling_window_ndays( # Method for formatting urls, e.g. for use in feature_info custom inclusions. -def lower_get_args() -> dict[str, str]: - """ - Return Flask request arguments, with argument names converted to lower case. - - Get parameters in WMS are case-insensitive, and intended to be single use. - Spec does not specify which instance should be used if a parameter is provided more than once. - This function uses the LAST instance. - """ - d = {} - for k in request.args.keys(): - kl = k.lower() - for v in request.args.getlist(k): - d[kl] = v - return d - - def create_geobox( crs: CRS, minx: float | int, miny: float | int, @@ -478,6 +200,7 @@ def xarray_image_as_png(img_data, loop_over=None, animate=False, frame_duration= img_io.seek(0) return img_io.read() + def render_frame(img_data, width, height): """Render to a 3D numpy array an Xarray RGB(A) input diff --git a/datacube_ows/ows_configuration.py b/datacube_ows/ows_configuration.py index b41741bff..02d8b80f8 100644 --- a/datacube_ows/ows_configuration.py +++ b/datacube_ows/ows_configuration.py @@ -40,7 +40,8 @@ get_file_loc, import_python_obj, load_json_obj, ConfigException, FunctionWrapper, CFG_DICT, RAW_CFG) from datacube_ows.cube_pool import ODCInitException, cube, get_cube -from datacube_ows.ogc_utils import create_geobox, local_solar_date_range +from datacube_ows.ogc_utils import create_geobox +from datacube_ows.time_utils import local_solar_date_range from datacube_ows.resource_limits import OWSResourceManagementRules, parse_cache_age from datacube_ows.styles import StyleDef from datacube_ows.tile_matrix_sets import TileMatrixSet diff --git a/datacube_ows/resource_limits.py b/datacube_ows/resource_limits.py index cd4f756b4..210bbfc2f 100644 --- a/datacube_ows/resource_limits.py +++ b/datacube_ows/resource_limits.py @@ -12,8 +12,8 @@ from odc.geo.geom import CRS, polygon from datacube_ows.config_utils import CFG_DICT, RAW_CFG, OWSConfigEntry, ConfigException -from datacube_ows.ogc_utils import (cache_control_headers, - create_geobox) +from datacube_ows.ogc_utils import (create_geobox) +from datacube_ows.http_utils import cache_control_headers TYPE_CHECKING = False if TYPE_CHECKING: diff --git a/datacube_ows/time_utils.py b/datacube_ows/time_utils.py new file mode 100644 index 000000000..581713fed --- /dev/null +++ b/datacube_ows/time_utils.py @@ -0,0 +1,202 @@ +# This file is part of datacube-ows, part of the Open Data Cube project. +# See https://opendatacube.org for more information. +# +# Copyright (c) 2017-2023 OWS Contributors +# SPDX-License-Identifier: Apache-2.0 +import logging +import datetime +from typing import Optional, Sequence + +from datacube.model import Dataset +from dateutil.parser import parse +from odc.geo import Geometry, CRS +from odc.geo.geobox import GeoBox +from pytz import timezone, utc +from timezonefinder import TimezoneFinder + +from datacube_ows.config_utils import OWSExtensibleConfigEntry + +_LOG: logging.Logger = logging.getLogger(__name__) +tf = TimezoneFinder(in_memory=True) + + +class NoTimezoneException(Exception): + """Exception, raised internally if no timezone can be found""" + + +def dataset_center_time(dataset: Dataset) -> datetime.datetime: + """ + Determine a center_time for the dataset + + Use metadata time if possible as this is what WMS uses to calculate its temporal extents + datacube-core center time accessed through the dataset API is calculated and may + not agree with the metadata document. + + :param dataset: An ODC dataset. + :return: A datetime object representing the datasets center time + """ + center_time: datetime.datetime = dataset.center_time + try: + metadata_time: str = dataset.metadata_doc['extent']['center_dt'] + center_time = parse(metadata_time) + except KeyError: + try: + metadata_time = dataset.metadata_doc['properties']['dtr:start_datetime'] + center_time = parse(metadata_time) + except KeyError: + pass + return center_time + + +def solar_date(dt: datetime.datetime, tz: datetime.tzinfo) -> datetime.date: + """ + Convert a datetime to a new timezone, and evalute as a date. + + :param dt: A datetime in an aribitrary timezone. + :param tz: The timezone to evaluate the date in. + :return: A date object. + """ + return dt.astimezone(tz).date() + + +def local_date(ds: Dataset, tz: datetime.tzinfo | None = None) -> datetime.date: + """ + Calculate the local (solar) date for a dataset. + + :param ds: An ODC dataset object + :param tz: (optional) A timezone object. If not provided, determine the timezone from extent of the dataset. + :return: A date object. + """ + dt_utc: datetime.datetime = dataset_center_time(ds) + if not tz: + tz = tz_for_geometry(ds.extent) + return solar_date(dt_utc, tz) + + +def tz_for_dataset(ds: Dataset) -> datetime.tzinfo: + """ + Determine the timezone for a dataset (using it's extent) + + :param ds: An ODC dataset object + :return: A timezone object + """ + return tz_for_geometry(ds.extent) + + +def tz_for_coord(lon: float | int, lat: float | int) -> datetime.tzinfo: + """ + Determine the Timezone for given lat/long coordinates + + :param lon: Longitude, in degress + :param lat: Latitude, in degrees + :return: A timezone object + :raises: NoTimezoneException + """ + try: + tzn: Optional[str] = tf.timezone_at(lng=lon, lat=lat) + except Exception as e: + # Generally shouldn't happen - a common symptom of various geographic and timezone related bugs + _LOG.warning("Timezone detection failed for lat %f, lon %s (%s)", lat, lon, str(e)) + raise + if not tzn: + raise NoTimezoneException("tz find failed.") + return timezone(tzn) + + +def local_solar_date_range(geobox: GeoBox, date: datetime.date) -> tuple[datetime.datetime, datetime.datetime]: + """ + Converts a date to a local solar date datetime range. + + :param geobox: Geometry used to determine the appropriate timezone for local date conversion + :param date: A date object + :return: A tuple of two UTC datetime objects, spanning 1 second shy of 24 hours. + """ + tz: datetime.tzinfo = tz_for_geometry(geobox.geographic_extent) + start = datetime.datetime(date.year, date.month, date.day, 0, 0, 0, tzinfo=tz) + end = datetime.datetime(date.year, date.month, date.day, 23, 59, 59, tzinfo=tz) + return (start.astimezone(utc), end.astimezone(utc)) + + +def month_date_range(date: datetime.date) -> tuple[datetime.datetime, datetime.datetime]: + """ + Take a month from a date and convert to a one month long UTC datetime range encompassing the month. + + Ignores timezone effects - suitable for statistical/summary data + + :param date: A date or datetime object to take the month and year from + :return: A tuple of two UTC datetime objects, delimiting an entire calendar month. + """ + start = datetime.datetime(date.year, date.month, 1, 0, 0, 0, tzinfo=utc) + y: int = date.year + m: int = date.month + 1 + if m == 13: + m = 1 + y = y + 1 + end = datetime.datetime(y, m, 1, 0, 0, 0, tzinfo=utc) - datetime.timedelta(days=1) + return start, end + + +def year_date_range(date: datetime.date) -> tuple[datetime.datetime, datetime.datetime]: + """ + Convert a date to a UTC datetime range encompassing the calendar year including the date. + + Ignores timezone effects - suitable for statistical/summary data + + :param date: A date or datetime object to take the year from + :return: A tuple of two UTC datetime objects, delimiting an entire calendar year. + """ + start = datetime.datetime(date.year, 1, 1, 0, 0, 0, tzinfo=utc) + end = datetime.datetime(date.year, 12, 31, 23, 59, 59, tzinfo=utc) + return start, end + + +def day_summary_date_range(date: datetime.date) -> tuple[datetime.datetime, datetime.datetime]: + """ + Convert a date to a UTC datetime range encompassing the calendar date. + + Ignores timezone effects - suitable for statistical/summary data + + :param date: A date or datetime object to take the day, month and year from + :return: A tuple of two UTC datetime objects, delimiting a calendar day. + """ + start = datetime.datetime(date.year, date.month, date.day, 0, 0, 0, tzinfo=utc) + end = datetime.datetime(date.year, date.month, date.day, 23, 59, 59, tzinfo=utc) + return start, end + + +def tz_for_geometry(geom: Geometry) -> datetime.tzinfo: + """ + Determine the timezone from a geometry. Be clever if we can, + otherwise use a minimal timezone based on the longitude. + + :param geom: A geometry object + :return: A timezone object + """ + crs_geo = CRS("EPSG:4326") + geo_geom: Geometry = geom.to_crs(crs_geo) + centroid: Geometry = geo_geom.centroid + try: + # 1. Try being smart with the centroid of the geometry + return tz_for_coord(centroid.coords[0][0], centroid.coords[0][1]) + except NoTimezoneException: + pass + for pt in geo_geom.boundary.coords: + try: + # 2. Try being smart all the points in the geometry + return tz_for_coord(pt[0], pt[1]) + except NoTimezoneException: + pass + # 3. Meh, just use longitude + offset = round(centroid.coords[0][0] / 15.0) + return datetime.timezone(datetime.timedelta(hours=offset)) + + +def rolling_window_ndays( + available_dates: Sequence[datetime.datetime], + layer_cfg: OWSExtensibleConfigEntry, + ndays: int = 6) -> tuple[datetime.datetime, datetime.datetime]: + idx = -ndays + days = available_dates[idx:] + start, _ = layer_cfg.search_times(days[idx]) + _, end = layer_cfg.search_times(days[-1]) + return (start, end) diff --git a/datacube_ows/wcs1.py b/datacube_ows/wcs1.py index 9db8fe4ca..2eaedc3b1 100644 --- a/datacube_ows/wcs1.py +++ b/datacube_ows/wcs1.py @@ -5,9 +5,8 @@ # SPDX-License-Identifier: Apache-2.0 from flask import render_template -from datacube_ows.data import json_response from datacube_ows.ogc_exceptions import WCS1Exception -from datacube_ows.ogc_utils import cache_control_headers, get_service_base_url +from datacube_ows.http_utils import get_service_base_url, cache_control_headers, json_response from datacube_ows.ows_configuration import get_config from datacube_ows.query_profiler import QueryProfiler from datacube_ows.utils import log_call diff --git a/datacube_ows/wcs2.py b/datacube_ows/wcs2.py index 2c4ddeb37..b28096e93 100644 --- a/datacube_ows/wcs2.py +++ b/datacube_ows/wcs2.py @@ -16,10 +16,8 @@ kvp_decode_get_coverage) from ows.wcs.v21 import encoders as encoders_v21 -from datacube_ows.data import json_response from datacube_ows.ogc_exceptions import WCS2Exception -from datacube_ows.ogc_utils import (cache_control_headers, - get_service_base_url, resp_headers) +from datacube_ows.http_utils import resp_headers, get_service_base_url, cache_control_headers, json_response from datacube_ows.ows_configuration import get_config from datacube_ows.query_profiler import QueryProfiler from datacube_ows.utils import log_call diff --git a/datacube_ows/wms.py b/datacube_ows/wms.py index 3ce5b3eae..b05f305b0 100644 --- a/datacube_ows/wms.py +++ b/datacube_ows/wms.py @@ -5,10 +5,11 @@ # SPDX-License-Identifier: Apache-2.0 from flask import render_template -from datacube_ows.data import feature_info, get_map +from datacube_ows.data import get_map +from datacube_ows.feature_info import feature_info from datacube_ows.legend_generator import legend_graphic from datacube_ows.ogc_exceptions import WMSException -from datacube_ows.ogc_utils import cache_control_headers, get_service_base_url +from datacube_ows.http_utils import get_service_base_url, cache_control_headers from datacube_ows.ows_configuration import get_config from datacube_ows.utils import log_call diff --git a/datacube_ows/wmts.py b/datacube_ows/wmts.py index da3edefb9..a5f7419b4 100644 --- a/datacube_ows/wmts.py +++ b/datacube_ows/wmts.py @@ -7,9 +7,10 @@ from flask import render_template -from datacube_ows.data import feature_info, get_map +from datacube_ows.data import get_map +from datacube_ows.feature_info import feature_info from datacube_ows.ogc_exceptions import WMSException, WMTSException -from datacube_ows.ogc_utils import cache_control_headers, get_service_base_url +from datacube_ows.http_utils import get_service_base_url, cache_control_headers from datacube_ows.ows_configuration import get_config from datacube_ows.utils import log_call diff --git a/integration_tests/test_mv_index.py b/integration_tests/test_mv_index.py index 99f1d640e..5d423c712 100644 --- a/integration_tests/test_mv_index.py +++ b/integration_tests/test_mv_index.py @@ -8,7 +8,7 @@ from datacube_ows.cube_pool import cube from datacube_ows.mv_index import MVSelectOpts, mv_search -from datacube_ows.ogc_utils import local_solar_date_range +from datacube_ows.time_utils import local_solar_date_range from datacube_ows.ows_configuration import get_config diff --git a/tests/test_data.py b/tests/test_data.py index 290e59cf9..855ec667d 100644 --- a/tests/test_data.py +++ b/tests/test_data.py @@ -12,7 +12,8 @@ from xarray import Dataset import datacube_ows.data -from datacube_ows.data import get_s3_browser_uris +import datacube_ows.feature_info +from datacube_ows.feature_info import get_s3_browser_uris from datacube_ows.loading import ProductBandQuery, DataStacker from datacube_ows.ogc_exceptions import WMSException from tests.test_styles import product_layer # noqa: F401 @@ -135,7 +136,7 @@ def __init__(self): "fake": fake_style() } - band_dict = datacube_ows.data._make_derived_band_dict(fake_dataset(), style_dict) + band_dict = datacube_ows.feature_info._make_derived_band_dict(fake_dataset(), style_dict) assert band_dict["fake"] == "n/a" @@ -162,7 +163,7 @@ def __init__(self): "fake": fake_style() } - band_dict = datacube_ows.data._make_derived_band_dict(fake_dataset(), style_dict) + band_dict = datacube_ows.feature_info._make_derived_band_dict(fake_dataset(), style_dict) assert band_dict["fake"] == 10.10 @@ -186,7 +187,7 @@ def __getitem__(self, key): bands = ["fake"] - band_dict = datacube_ows.data._make_band_dict(product_layer, fake_dataset()) + band_dict = datacube_ows.feature_info._make_band_dict(product_layer, fake_dataset()) assert band_dict["fake"] == "n/a" @@ -232,13 +233,13 @@ def __getitem__(self, key): bands = ["fake"] - band_dict = datacube_ows.data._make_band_dict(product_layer, int_dataset()) + band_dict = datacube_ows.feature_info._make_band_dict(product_layer, int_dataset()) assert isinstance(band_dict["fake"], dict) assert band_dict["fake"] == { "Mask image as provided by JAXA - Ocean and water, lay over, shadowing, land.": 'lay_over' } - band_dict = datacube_ows.data._make_band_dict(product_layer, float_dataset()) + band_dict = datacube_ows.feature_info._make_band_dict(product_layer, float_dataset()) assert isinstance(band_dict["fake"], dict) assert band_dict["fake"] == { "Mask image as provided by JAXA - Ocean and water, lay over, shadowing, land.": 'lay_over' diff --git a/tests/test_ogc_utils.py b/tests/test_ogc_utils.py index 474d0b0f9..0bce0bfca 100644 --- a/tests/test_ogc_utils.py +++ b/tests/test_ogc_utils.py @@ -11,7 +11,9 @@ from odc.geo.geom import polygon from pytz import utc +import datacube_ows.http_utils import datacube_ows.ogc_utils +import datacube_ows.time_utils import datacube_ows.utils from tests.utils import dummy_da @@ -23,7 +25,7 @@ def __init__(self, meta): def test_dataset_center_time(): - dct = datacube_ows.ogc_utils.dataset_center_time + dct = datacube_ows.time_utils.dataset_center_time ds = DSCT({}) assert dct(ds).year == 1970 ds = DSCT({ @@ -61,28 +63,28 @@ def dummy_ds(): return ds def test_tz_for_dataset(dummy_ds): - ret = datacube_ows.ogc_utils.tz_for_dataset(dummy_ds) + ret = datacube_ows.time_utils.tz_for_dataset(dummy_ds) assert ret.zone == "Australia/Sydney" def test_tz_bad_coords(): with pytest.raises(Exception) as e: - tzinf = datacube_ows.ogc_utils.tz_for_coord(-88.8, 155.2) + tzinf = datacube_ows.time_utils.tz_for_coord(-88.8, 155.2) def test_local_date(dummy_ds): - ld = datacube_ows.ogc_utils.local_date(dummy_ds) + ld = datacube_ows.time_utils.local_date(dummy_ds) assert ld.year == 2020 assert ld.day == 26 - tz = datacube_ows.ogc_utils.tz_for_dataset(dummy_ds) - ld = datacube_ows.ogc_utils.local_date(dummy_ds, tz) + tz = datacube_ows.time_utils.tz_for_dataset(dummy_ds) + ld = datacube_ows.time_utils.local_date(dummy_ds, tz) assert ld.year == 2020 assert ld.day == 26 def test_month_date_range_wrap(): d = datetime.date(2019, 12, 1) - a, b = datacube_ows.ogc_utils.month_date_range(d) + a, b = datacube_ows.time_utils.month_date_range(d) assert a == datetime.datetime(2019, 12, 1, 0, 0, 0, tzinfo=utc) assert b == datetime.datetime(2019, 12, 31, 0, 0, 0, tzinfo=utc) @@ -92,43 +94,43 @@ def test_get_service_base_url(): # not a list allowed_urls = "https://foo.hello.world" request_url = "https://foo.bar.baz" - ret = datacube_ows.ogc_utils.get_service_base_url(allowed_urls, request_url) + ret = datacube_ows.http_utils.get_service_base_url(allowed_urls, request_url) assert ret == "https://foo.hello.world" # Value not in list allowed_urls = ["https://foo.hello.world", "https://alice.bob.eve"] request_url = "https://foo.bar.baz" - ret = datacube_ows.ogc_utils.get_service_base_url(allowed_urls, request_url) + ret = datacube_ows.http_utils.get_service_base_url(allowed_urls, request_url) assert ret == "https://foo.hello.world" # Value in list allowed_urls = ["https://foo.hello.world", "https://foo.bar.baz", "https://alice.bob.eve"] request_url = "https://foo.bar.baz" - ret = datacube_ows.ogc_utils.get_service_base_url(allowed_urls, request_url) + ret = datacube_ows.http_utils.get_service_base_url(allowed_urls, request_url) assert ret == "https://foo.bar.baz" # Trailing / allowed_urls = ["https://foo.bar.baz", "https://alice.bob.eve"] request_url = "https://foo.bar.baz/" - ret = datacube_ows.ogc_utils.get_service_base_url(allowed_urls, request_url) + ret = datacube_ows.http_utils.get_service_base_url(allowed_urls, request_url) assert ret == "https://foo.bar.baz" # include path allowed_urls = ["https://foo.bar.baz", "https://foo.bar.baz/wms/"] request_url = "https://foo.bar.baz/wms/" - ret = datacube_ows.ogc_utils.get_service_base_url(allowed_urls, request_url) + ret = datacube_ows.http_utils.get_service_base_url(allowed_urls, request_url) assert ret == "https://foo.bar.baz/wms" # use value from list instead of request allowed_urls = ["https://foo.bar.baz", "https://foo.bar.baz/wms/"] request_url = "http://foo.bar.baz/wms/" - ret = datacube_ows.ogc_utils.get_service_base_url(allowed_urls, request_url) + ret = datacube_ows.http_utils.get_service_base_url(allowed_urls, request_url) assert ret == "https://foo.bar.baz/wms" def test_parse_for_base_url(): url = "https://hello.world.bar:8000/wms/?CheckSomething" - ret = datacube_ows.ogc_utils.parse_for_base_url(url) + ret = datacube_ows.http_utils.parse_for_base_url(url) assert ret == "hello.world.bar:8000/wms" @@ -248,7 +250,7 @@ def test_mask_by_nan(): def test_rolling_window(): - from datacube_ows.ogc_utils import rolling_window_ndays + from datacube_ows.time_utils import rolling_window_ndays class DummyLayer: def search_times(self, d): @@ -274,7 +276,7 @@ def search_times(self, d): def test_day_summary_date_range(): - start, end = datacube_ows.ogc_utils.day_summary_date_range(datetime.date(2015, 5, 12)) + start, end = datacube_ows.time_utils.day_summary_date_range(datetime.date(2015, 5, 12)) assert start == datetime.datetime(2015, 5, 12, 0, 0, 0, tzinfo=utc) assert end == datetime.datetime(2015, 5, 12, 23, 59, 59, tzinfo=utc) From 2421fef72a90eaf499a5c902509fb0583c48beb8 Mon Sep 17 00:00:00 2001 From: Paul Haesler Date: Wed, 24 Apr 2024 19:57:05 +1000 Subject: [PATCH 17/29] Deprecate old path to rolling_windows_ndays function - redirect to time_utils --- datacube_ows/http_utils.py | 18 ++++++++++++------ datacube_ows/ogc_utils.py | 23 +++++++++++++++++++++-- datacube_ows/time_utils.py | 2 +- integration_tests/cfg/ows_test_cfg.py | 2 +- 4 files changed, 35 insertions(+), 10 deletions(-) diff --git a/datacube_ows/http_utils.py b/datacube_ows/http_utils.py index 481576c05..421eac351 100644 --- a/datacube_ows/http_utils.py +++ b/datacube_ows/http_utils.py @@ -4,16 +4,19 @@ # Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 import json +from typing import Optional from urllib.parse import urlparse - from flask import Request, request, render_template - from datacube_ows.config_utils import CFG_DICT -from datacube_ows.ows_configuration import OWSConfig, get_config + +TYPE_CHECKING = False +if TYPE_CHECKING: + from datacube_ows.ows_configuration import OWSConfig FlaskResponse = tuple[str | bytes, int, dict[str, str]] + def resp_headers(d: dict[str, str]) -> dict[str, str]: """ Take a dictionary of http response headers and all required response headers from the configuration. @@ -100,14 +103,16 @@ def lower_get_args() -> dict[str, str]: return d -def json_response(result: CFG_DICT, cfg: OWSConfig | None = None) -> FlaskResponse: +def json_response(result: CFG_DICT, cfg: Optional["OWSConfig"] = None) -> FlaskResponse: + from datacube_ows.ows_configuration import get_config if not cfg: cfg = get_config() assert cfg is not None # for type checker return json.dumps(result), 200, cfg.response_headers({"Content-Type": "application/json"}) -def html_json_response(result: CFG_DICT, cfg: OWSConfig | None = None) -> FlaskResponse: +def html_json_response(result: CFG_DICT, cfg: Optional["OWSConfig"] = None) -> FlaskResponse: + from datacube_ows.ows_configuration import get_config if not cfg: cfg = get_config() assert cfg is not None # for type checker @@ -115,7 +120,8 @@ def html_json_response(result: CFG_DICT, cfg: OWSConfig | None = None) -> FlaskR return html_content, 200, cfg.response_headers({"Content-Type": "text/html"}) -def png_response(body: bytes, cfg: OWSConfig | None = None, extra_headers: dict[str, str] | None = None) -> FlaskResponse: +def png_response(body: bytes, cfg: Optional["OWSConfig"] = None, extra_headers: dict[str, str] | None = None) -> FlaskResponse: + from datacube_ows.ows_configuration import get_config if not cfg: cfg = get_config() assert cfg is not None # For type checker diff --git a/datacube_ows/ogc_utils.py b/datacube_ows/ogc_utils.py index 476462943..2dc477d90 100644 --- a/datacube_ows/ogc_utils.py +++ b/datacube_ows/ogc_utils.py @@ -4,19 +4,38 @@ # Copyright (c) 2017-2023 OWS Contributors # SPDX-License-Identifier: Apache-2.0 import logging +import datetime from io import BytesIO from typing import Any, cast - +from deprecat import deprecat import numpy import xarray from affine import Affine from odc.geo.geobox import GeoBox from odc.geo.geom import CRS from PIL import Image +TYPE_CHECKING = False +if TYPE_CHECKING: + from datacube_ows.config_utils import OWSExtensibleConfigEntry + _LOG: logging.Logger = logging.getLogger(__name__) -# Extent Mask Functions + +@deprecat( + reason="The 'rolling_windows_ndays' mosaicing function has moved to 'datacube.time_utils' - " + "please import it from there.", + version="1.9.0" +) +def rolling_window_ndays( + available_dates: list[datetime.datetime], + layer_cfg: "OWSExtensibleConfigEntry", + ndays: int = 6) -> tuple[datetime.datetime, datetime.datetime]: + from datacube_ows.time_utils import rolling_window_ndays + return rolling_window_ndays(available_dates=available_dates, + layer_cfg=layer_cfg, + ndays=ndays) + def mask_by_val(data: xarray.Dataset, band: str, val: Any = None) -> xarray.DataArray: """ diff --git a/datacube_ows/time_utils.py b/datacube_ows/time_utils.py index 581713fed..562a91944 100644 --- a/datacube_ows/time_utils.py +++ b/datacube_ows/time_utils.py @@ -192,7 +192,7 @@ def tz_for_geometry(geom: Geometry) -> datetime.tzinfo: def rolling_window_ndays( - available_dates: Sequence[datetime.datetime], + available_dates: list[datetime.datetime], layer_cfg: OWSExtensibleConfigEntry, ndays: int = 6) -> tuple[datetime.datetime, datetime.datetime]: idx = -ndays diff --git a/integration_tests/cfg/ows_test_cfg.py b/integration_tests/cfg/ows_test_cfg.py index a85441e25..7d2f592cc 100644 --- a/integration_tests/cfg/ows_test_cfg.py +++ b/integration_tests/cfg/ows_test_cfg.py @@ -873,7 +873,7 @@ Latest imagery mosaic with no time dimension. """, "mosaic_date_func": { - "function": "datacube_ows.ogc_utils.rolling_window_ndays", + "function": "datacube_ows.time_utils.rolling_window_ndays", "pass_layer_cfg": True, "kwargs": { "ndays": 6, From febd90f3bd2a0d6d123713003190084d767025a9 Mon Sep 17 00:00:00 2001 From: Paul Haesler Date: Wed, 24 Apr 2024 20:01:59 +1000 Subject: [PATCH 18/29] Added mypy check. Removed isort check. --- .github/workflows/lint.yml | 30 ++++++++++++++++++------------ 1 file changed, 18 insertions(+), 12 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index f5a28b0c3..3a18e6679 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -51,7 +51,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: 3.9 + python-version: 3.10 - run: python -m pip install flake8 - name: flake8 cleanup imported but unused uses: liskin/gh-problem-matcher-wrap@v3 @@ -60,19 +60,25 @@ jobs: run: | flake8 . --exclude Dockerfile --ignore=E501 --select=F401,E201,E202,E203,E502,E241,E225,E306,E231,E226,E123,F811 - isort: + mypy: runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.10"] + name: MyPy steps: - - name: Checkout + - name: checkout git uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v5 with: - python-version: 3.9 - - run: python -m pip install isort - - name: isort - uses: liskin/gh-problem-matcher-wrap@v3 + fetch-depth: 0 + - name: Setup conda + uses: s-weigand/setup-conda@v1 with: - linters: isort - run: | - isort --check --diff . + update-conda: true + python-version: ${{ matrix.python-version }} + conda-channels: anaconda, conda-forge + - name: run mypy + run: | + sudo apt-get remove python3-openssl + pip install --upgrade -e '.[types]' + mypy datacube_ows From d0a4333999bcbd8efa76f756dc6a3f44ca13cf91 Mon Sep 17 00:00:00 2001 From: Paul Haesler Date: Wed, 24 Apr 2024 20:04:51 +1000 Subject: [PATCH 19/29] Updated licence headers. --- cfg_parser.py | 3 ++- datacube_ows/__init__.py | 3 ++- datacube_ows/band_utils.py | 3 ++- datacube_ows/cfg_parser_impl.py | 3 ++- datacube_ows/config_toolkit.py | 3 ++- datacube_ows/config_utils.py | 3 ++- datacube_ows/cube_pool.py | 3 ++- datacube_ows/data.py | 3 ++- datacube_ows/feature_info.py | 3 ++- datacube_ows/gunicorn_config.py | 3 ++- datacube_ows/http_utils.py | 1 + datacube_ows/legend_generator.py | 3 ++- datacube_ows/legend_utils.py | 3 ++- datacube_ows/loading.py | 6 ++++++ datacube_ows/mv_index.py | 3 ++- datacube_ows/ogc.py | 3 ++- datacube_ows/ogc_exceptions.py | 3 ++- datacube_ows/ogc_utils.py | 3 ++- datacube_ows/ows_cfg_example.py | 3 ++- datacube_ows/ows_configuration.py | 3 ++- datacube_ows/product_ranges.py | 3 ++- datacube_ows/protocol_versions.py | 3 ++- datacube_ows/query_profiler.py | 3 ++- datacube_ows/resource_limits.py | 3 ++- datacube_ows/startup_utils.py | 3 ++- datacube_ows/styles/__init__.py | 3 ++- datacube_ows/styles/api/__init__.py | 3 ++- datacube_ows/styles/api/base.py | 3 ++- datacube_ows/styles/base.py | 3 ++- datacube_ows/styles/colormap.py | 3 ++- datacube_ows/styles/component.py | 3 ++- datacube_ows/styles/expression.py | 3 ++- datacube_ows/styles/hybrid.py | 3 ++- datacube_ows/styles/ramp.py | 3 ++- datacube_ows/tile_matrix_sets.py | 3 ++- datacube_ows/time_utils.py | 3 ++- datacube_ows/update_ranges_impl.py | 3 ++- datacube_ows/utils.py | 3 ++- datacube_ows/wcs1.py | 3 ++- datacube_ows/wcs1_utils.py | 3 ++- datacube_ows/wcs2.py | 3 ++- datacube_ows/wcs2_utils.py | 3 ++- datacube_ows/wcs_scaler.py | 3 ++- datacube_ows/wcs_utils.py | 3 ++- datacube_ows/wms.py | 3 ++- datacube_ows/wms_utils.py | 3 ++- datacube_ows/wmts.py | 3 ++- datacube_ows/wsgi.py | 3 ++- docs/conf.py | 3 ++- integration_tests/__init__.py | 3 ++- integration_tests/cfg/__init__.py | 3 ++- integration_tests/cfg/ows_test_cfg.py | 3 ++- integration_tests/cfg/ows_test_cfg_bad.py | 3 ++- integration_tests/cfg/ows_test_cfg_no_i18n.py | 3 ++- integration_tests/cfg/utils.py | 3 ++- integration_tests/conftest.py | 3 ++- integration_tests/test_cfg_parser.py | 3 ++- integration_tests/test_cube_pool.py | 3 ++- integration_tests/test_i18n.py | 3 ++- integration_tests/test_layers.py | 3 ++- integration_tests/test_mv_index.py | 3 ++- integration_tests/test_routes.py | 3 ++- integration_tests/test_update_ranges.py | 3 ++- integration_tests/test_version.py | 3 ++- integration_tests/test_wcs_server.py | 3 ++- integration_tests/test_wms_server.py | 3 ++- integration_tests/test_wmts_server.py | 3 ++- integration_tests/utils.py | 3 ++- setup.py | 3 ++- tests/__init__.py | 3 ++- tests/cfg/__init__.py | 3 ++- tests/cfg/broken_nested.py | 3 ++- tests/cfg/minimal_cfg.py | 3 ++- tests/cfg/mixed_nested.py | 3 ++- tests/cfg/nested.py | 3 ++- tests/cfg/simple.py | 3 ++- tests/conftest.py | 3 ++- tests/test_band_utils.py | 3 ++- tests/test_cfg_bandidx.py | 3 ++- tests/test_cfg_cache_ctrl.py | 3 ++- tests/test_cfg_global.py | 3 ++- tests/test_cfg_inclusion.py | 3 ++- tests/test_cfg_layer.py | 3 ++- tests/test_cfg_metadata_types.py | 3 ++- tests/test_cfg_tile_matrix_set.py | 3 ++- tests/test_cfg_wcs.py | 3 ++- tests/test_config_toolkit.py | 3 ++- tests/test_data.py | 3 ++- tests/test_legend_generator.py | 3 ++- tests/test_mpl_cmaps.py | 3 ++- tests/test_multidate_handler.py | 3 ++- tests/test_mv_selopts.py | 3 ++- tests/test_no_db_routes.py | 3 ++- tests/test_ogc_utils.py | 3 ++- tests/test_ows_configuration.py | 3 ++- tests/test_protocol_versions.py | 3 ++- tests/test_pyproj.py | 3 ++- tests/test_qprof.py | 3 ++- tests/test_resource_limits.py | 3 ++- tests/test_startup.py | 3 ++- tests/test_style_api.py | 3 ++- tests/test_styles.py | 3 ++- tests/test_time_res_method.py | 3 ++- tests/test_utils.py | 3 ++- tests/test_wcs2_utils.py | 3 ++- tests/test_wcs_scaler.py | 3 ++- tests/test_wms_utils.py | 3 ++- tests/utils.py | 3 ++- update_ranges.py | 3 ++- 109 files changed, 221 insertions(+), 107 deletions(-) diff --git a/cfg_parser.py b/cfg_parser.py index 3818ba7c3..5dbd99872 100644 --- a/cfg_parser.py +++ b/cfg_parser.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + from datacube_ows.cfg_parser_impl import main from datacube_ows.startup_utils import initialise_debugging diff --git a/datacube_ows/__init__.py b/datacube_ows/__init__.py index 7c6ad5568..3e7649d48 100644 --- a/datacube_ows/__init__.py +++ b/datacube_ows/__init__.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + try: from ._version import version as __version__ except ImportError: diff --git a/datacube_ows/band_utils.py b/datacube_ows/band_utils.py index 97d27af75..9470b17f3 100644 --- a/datacube_ows/band_utils.py +++ b/datacube_ows/band_utils.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + from __future__ import division import numpy diff --git a/datacube_ows/cfg_parser_impl.py b/datacube_ows/cfg_parser_impl.py index 353aca80c..f84313a08 100755 --- a/datacube_ows/cfg_parser_impl.py +++ b/datacube_ows/cfg_parser_impl.py @@ -2,9 +2,10 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import json import os import sys diff --git a/datacube_ows/config_toolkit.py b/datacube_ows/config_toolkit.py index f8a4e736d..ef6cf525b 100644 --- a/datacube_ows/config_toolkit.py +++ b/datacube_ows/config_toolkit.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + from copy import deepcopy from typing import Any diff --git a/datacube_ows/config_utils.py b/datacube_ows/config_utils.py index b478da912..ecd6f156e 100644 --- a/datacube_ows/config_utils.py +++ b/datacube_ows/config_utils.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import json import logging import os diff --git a/datacube_ows/cube_pool.py b/datacube_ows/cube_pool.py index 2739a68aa..40043ac25 100644 --- a/datacube_ows/cube_pool.py +++ b/datacube_ows/cube_pool.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import logging from contextlib import contextmanager from threading import Lock diff --git a/datacube_ows/data.py b/datacube_ows/data.py index 9f7c08d06..ab421708c 100644 --- a/datacube_ows/data.py +++ b/datacube_ows/data.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import logging from datetime import date, datetime, timedelta from typing import cast, Any diff --git a/datacube_ows/feature_info.py b/datacube_ows/feature_info.py index 26613fdcc..fb8de0468 100644 --- a/datacube_ows/feature_info.py +++ b/datacube_ows/feature_info.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import logging import re from datetime import datetime diff --git a/datacube_ows/gunicorn_config.py b/datacube_ows/gunicorn_config.py index 4ddb23235..36152c67f 100644 --- a/datacube_ows/gunicorn_config.py +++ b/datacube_ows/gunicorn_config.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + """Gunicorn config for Prometheus internal metrics """ import os diff --git a/datacube_ows/http_utils.py b/datacube_ows/http_utils.py index 421eac351..2a2f70d6f 100644 --- a/datacube_ows/http_utils.py +++ b/datacube_ows/http_utils.py @@ -3,6 +3,7 @@ # # Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import json from typing import Optional from urllib.parse import urlparse diff --git a/datacube_ows/legend_generator.py b/datacube_ows/legend_generator.py index 42f5445f0..8f772b5ba 100644 --- a/datacube_ows/legend_generator.py +++ b/datacube_ows/legend_generator.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import io import logging diff --git a/datacube_ows/legend_utils.py b/datacube_ows/legend_utils.py index 0d43abb63..58b20748c 100644 --- a/datacube_ows/legend_utils.py +++ b/datacube_ows/legend_utils.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import io import logging from typing import Optional diff --git a/datacube_ows/loading.py b/datacube_ows/loading.py index 9503b7ea5..e566b636a 100644 --- a/datacube_ows/loading.py +++ b/datacube_ows/loading.py @@ -1,3 +1,9 @@ +# This file is part of datacube-ows, part of the Open Data Cube project. +# See https://opendatacube.org for more information. +# +# Copyright (c) 2017-2024 OWS Contributors +# SPDX-License-Identifier: Apache-2.0 + from collections import OrderedDict import datetime diff --git a/datacube_ows/mv_index.py b/datacube_ows/mv_index.py index bbaa903d4..3c9e62679 100644 --- a/datacube_ows/mv_index.py +++ b/datacube_ows/mv_index.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import datetime import json from enum import Enum diff --git a/datacube_ows/ogc.py b/datacube_ows/ogc.py index f662a0f10..5eac95218 100644 --- a/datacube_ows/ogc.py +++ b/datacube_ows/ogc.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import sys import traceback from time import monotonic diff --git a/datacube_ows/ogc_exceptions.py b/datacube_ows/ogc_exceptions.py index ea5e15e8c..6e2be393a 100644 --- a/datacube_ows/ogc_exceptions.py +++ b/datacube_ows/ogc_exceptions.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import traceback as tb from flask import render_template diff --git a/datacube_ows/ogc_utils.py b/datacube_ows/ogc_utils.py index 2dc477d90..9a89c9e29 100644 --- a/datacube_ows/ogc_utils.py +++ b/datacube_ows/ogc_utils.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import logging import datetime from io import BytesIO diff --git a/datacube_ows/ows_cfg_example.py b/datacube_ows/ows_cfg_example.py index 4cd14e80e..395927c5b 100644 --- a/datacube_ows/ows_cfg_example.py +++ b/datacube_ows/ows_cfg_example.py @@ -2,10 +2,11 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + # Example configuration file for datacube_ows. # # The file was originally the only documentation for the configuration file format. diff --git a/datacube_ows/ows_configuration.py b/datacube_ows/ows_configuration.py index 02d8b80f8..79fe62814 100644 --- a/datacube_ows/ows_configuration.py +++ b/datacube_ows/ows_configuration.py @@ -1,9 +1,10 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + # # Note this is NOT the configuration file! # diff --git a/datacube_ows/product_ranges.py b/datacube_ows/product_ranges.py index 25020b03d..bd4e1570c 100644 --- a/datacube_ows/product_ranges.py +++ b/datacube_ows/product_ranges.py @@ -1,9 +1,10 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + #pylint: skip-file import math diff --git a/datacube_ows/protocol_versions.py b/datacube_ows/protocol_versions.py index 62f1b6c0a..a53627681 100644 --- a/datacube_ows/protocol_versions.py +++ b/datacube_ows/protocol_versions.py @@ -1,9 +1,10 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import re from typing import Callable, Mapping, Sequence, Tuple diff --git a/datacube_ows/query_profiler.py b/datacube_ows/query_profiler.py index 6cdbde486..0d0fa3fb2 100644 --- a/datacube_ows/query_profiler.py +++ b/datacube_ows/query_profiler.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + from time import time diff --git a/datacube_ows/resource_limits.py b/datacube_ows/resource_limits.py index 210bbfc2f..96f22f508 100644 --- a/datacube_ows/resource_limits.py +++ b/datacube_ows/resource_limits.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import math from typing import Any, Iterable, Mapping, cast diff --git a/datacube_ows/startup_utils.py b/datacube_ows/startup_utils.py index a0d9a5a41..893cd2e03 100644 --- a/datacube_ows/startup_utils.py +++ b/datacube_ows/startup_utils.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import logging import os import warnings diff --git a/datacube_ows/styles/__init__.py b/datacube_ows/styles/__init__.py index 9d2ce7bc6..bc7589e15 100644 --- a/datacube_ows/styles/__init__.py +++ b/datacube_ows/styles/__init__.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + from datacube_ows.styles.base import StyleDefBase # noqa: F401 from datacube_ows.styles.colormap import ColorMapStyleDef # noqa: F401 from datacube_ows.styles.component import ComponentStyleDef # noqa: F401 diff --git a/datacube_ows/styles/api/__init__.py b/datacube_ows/styles/api/__init__.py index 7267194e7..1b7c253ea 100644 --- a/datacube_ows/styles/api/__init__.py +++ b/datacube_ows/styles/api/__init__.py @@ -1,9 +1,10 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + from datacube_ows.styles.api.base import ( # noqa: F401 isort:skip StandaloneStyle, apply_ows_style, apply_ows_style_cfg, generate_ows_legend_style, generate_ows_legend_style_cfg, diff --git a/datacube_ows/styles/api/base.py b/datacube_ows/styles/api/base.py index 12384313c..455505306 100644 --- a/datacube_ows/styles/api/base.py +++ b/datacube_ows/styles/api/base.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import xarray from datacube_ows.startup_utils import initialise_ignorable_warnings diff --git a/datacube_ows/styles/base.py b/datacube_ows/styles/base.py index 9d7073132..a5fa2ba37 100644 --- a/datacube_ows/styles/base.py +++ b/datacube_ows/styles/base.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import io import logging from typing import (Any, Iterable, Mapping, MutableMapping, Optional, Sized, Type, Union, cast) diff --git a/datacube_ows/styles/colormap.py b/datacube_ows/styles/colormap.py index 7700ecc3b..5fbcfacc1 100644 --- a/datacube_ows/styles/colormap.py +++ b/datacube_ows/styles/colormap.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import io import logging from datetime import datetime diff --git a/datacube_ows/styles/component.py b/datacube_ows/styles/component.py index 48774302d..7aa95c186 100644 --- a/datacube_ows/styles/component.py +++ b/datacube_ows/styles/component.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + from typing import Any, Callable, Hashable, cast import numpy as np diff --git a/datacube_ows/styles/expression.py b/datacube_ows/styles/expression.py index 22acf77c7..3985637b0 100644 --- a/datacube_ows/styles/expression.py +++ b/datacube_ows/styles/expression.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + from typing import Any, Type, cast from xarray import Dataset diff --git a/datacube_ows/styles/hybrid.py b/datacube_ows/styles/hybrid.py index d6dddac31..0f5f3ddb1 100644 --- a/datacube_ows/styles/hybrid.py +++ b/datacube_ows/styles/hybrid.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + from typing import cast from xarray import DataArray, Dataset diff --git a/datacube_ows/styles/ramp.py b/datacube_ows/styles/ramp.py index e8c2966b9..7b15f513b 100644 --- a/datacube_ows/styles/ramp.py +++ b/datacube_ows/styles/ramp.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import io import logging from collections import defaultdict diff --git a/datacube_ows/tile_matrix_sets.py b/datacube_ows/tile_matrix_sets.py index 99870d098..ad9b3d2d7 100644 --- a/datacube_ows/tile_matrix_sets.py +++ b/datacube_ows/tile_matrix_sets.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + from typing import cast, Type from datacube_ows.config_utils import OWSConfigEntry, ConfigException, CFG_DICT, RAW_CFG diff --git a/datacube_ows/time_utils.py b/datacube_ows/time_utils.py index 562a91944..08af46d35 100644 --- a/datacube_ows/time_utils.py +++ b/datacube_ows/time_utils.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import logging import datetime from typing import Optional, Sequence diff --git a/datacube_ows/update_ranges_impl.py b/datacube_ows/update_ranges_impl.py index 652a9b8ab..fefb91ec7 100755 --- a/datacube_ows/update_ranges_impl.py +++ b/datacube_ows/update_ranges_impl.py @@ -2,9 +2,10 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import importlib.resources import re import sys diff --git a/datacube_ows/utils.py b/datacube_ows/utils.py index d6f28d66c..6f8b44b73 100644 --- a/datacube_ows/utils.py +++ b/datacube_ows/utils.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import datetime import logging from functools import wraps diff --git a/datacube_ows/wcs1.py b/datacube_ows/wcs1.py index 2eaedc3b1..92c2bfe44 100644 --- a/datacube_ows/wcs1.py +++ b/datacube_ows/wcs1.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + from flask import render_template from datacube_ows.ogc_exceptions import WCS1Exception diff --git a/datacube_ows/wcs1_utils.py b/datacube_ows/wcs1_utils.py index c17b562a1..2c19b9215 100644 --- a/datacube_ows/wcs1_utils.py +++ b/datacube_ows/wcs1_utils.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import numpy import pytz import xarray diff --git a/datacube_ows/wcs2.py b/datacube_ows/wcs2.py index b28096e93..0a31d18b1 100644 --- a/datacube_ows/wcs2.py +++ b/datacube_ows/wcs2.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import logging from flask import request diff --git a/datacube_ows/wcs2_utils.py b/datacube_ows/wcs2_utils.py index 6f09f6e93..5ad659607 100644 --- a/datacube_ows/wcs2_utils.py +++ b/datacube_ows/wcs2_utils.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import collections import logging diff --git a/datacube_ows/wcs_scaler.py b/datacube_ows/wcs_scaler.py index 3b77e1185..503ab75b8 100644 --- a/datacube_ows/wcs_scaler.py +++ b/datacube_ows/wcs_scaler.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + from affine import Affine from odc.geo import geom as odc_geom diff --git a/datacube_ows/wcs_utils.py b/datacube_ows/wcs_utils.py index 3ada02eb7..9d70beb03 100644 --- a/datacube_ows/wcs_utils.py +++ b/datacube_ows/wcs_utils.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + from datacube_ows.ogc_exceptions import WCS1Exception, WCS2Exception diff --git a/datacube_ows/wms.py b/datacube_ows/wms.py index b05f305b0..5bcc94cac 100644 --- a/datacube_ows/wms.py +++ b/datacube_ows/wms.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + from flask import render_template from datacube_ows.data import get_map diff --git a/datacube_ows/wms_utils.py b/datacube_ows/wms_utils.py index 9e73de0eb..3cb5bd634 100644 --- a/datacube_ows/wms_utils.py +++ b/datacube_ows/wms_utils.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import math from datetime import datetime diff --git a/datacube_ows/wmts.py b/datacube_ows/wmts.py index a5f7419b4..479aaa4a7 100644 --- a/datacube_ows/wmts.py +++ b/datacube_ows/wmts.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import logging from flask import render_template diff --git a/datacube_ows/wsgi.py b/datacube_ows/wsgi.py index 6c7623918..a3034107e 100644 --- a/datacube_ows/wsgi.py +++ b/datacube_ows/wsgi.py @@ -1,9 +1,10 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + #pylint: skip-file import os import sys diff --git a/docs/conf.py b/docs/conf.py index 6b461f468..70a7107ae 100755 --- a/docs/conf.py +++ b/docs/conf.py @@ -3,9 +3,10 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import os import sys diff --git a/integration_tests/__init__.py b/integration_tests/__init__.py index bdebc4f4a..7f2605021 100644 --- a/integration_tests/__init__.py +++ b/integration_tests/__init__.py @@ -1,5 +1,6 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + diff --git a/integration_tests/cfg/__init__.py b/integration_tests/cfg/__init__.py index bdebc4f4a..7f2605021 100644 --- a/integration_tests/cfg/__init__.py +++ b/integration_tests/cfg/__init__.py @@ -1,5 +1,6 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + diff --git a/integration_tests/cfg/ows_test_cfg.py b/integration_tests/cfg/ows_test_cfg.py index 7d2f592cc..a1066f7bf 100644 --- a/integration_tests/cfg/ows_test_cfg.py +++ b/integration_tests/cfg/ows_test_cfg.py @@ -2,9 +2,10 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import os if os.environ.get("DATACUBE_OWS_CFG", "").startswith("integration_tests"): diff --git a/integration_tests/cfg/ows_test_cfg_bad.py b/integration_tests/cfg/ows_test_cfg_bad.py index 38088fe45..7b1255e2a 100644 --- a/integration_tests/cfg/ows_test_cfg_bad.py +++ b/integration_tests/cfg/ows_test_cfg_bad.py @@ -2,10 +2,11 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + # THIS IS A TESTING FILE FOR TESTING ERROR HANDLING. # Do not use it as an example, it is deliberately invalid. # diff --git a/integration_tests/cfg/ows_test_cfg_no_i18n.py b/integration_tests/cfg/ows_test_cfg_no_i18n.py index 597aff074..65e37e7d7 100644 --- a/integration_tests/cfg/ows_test_cfg_no_i18n.py +++ b/integration_tests/cfg/ows_test_cfg_no_i18n.py @@ -2,10 +2,11 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + # THIS IS A TESTING FILE # Please refer to datacube_ows/ows_cfg_example.py for EXAMPLE CONFIG diff --git a/integration_tests/cfg/utils.py b/integration_tests/cfg/utils.py index 8056a43bd..454fea466 100644 --- a/integration_tests/cfg/utils.py +++ b/integration_tests/cfg/utils.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + def trivial_identity(x): return x diff --git a/integration_tests/conftest.py b/integration_tests/conftest.py index 682b51ae0..a72a52e29 100644 --- a/integration_tests/conftest.py +++ b/integration_tests/conftest.py @@ -1,9 +1,10 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import os pytest_plugins = ["helpers_namespace"] diff --git a/integration_tests/test_cfg_parser.py b/integration_tests/test_cfg_parser.py index d98788459..1f7d3b5e1 100644 --- a/integration_tests/test_cfg_parser.py +++ b/integration_tests/test_cfg_parser.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import os import pytest diff --git a/integration_tests/test_cube_pool.py b/integration_tests/test_cube_pool.py index 054c2ec70..96252b4be 100644 --- a/integration_tests/test_cube_pool.py +++ b/integration_tests/test_cube_pool.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + from datacube import Datacube from datacube_ows.cube_pool import get_cube diff --git a/integration_tests/test_i18n.py b/integration_tests/test_i18n.py index 35edb778a..e19e230a4 100644 --- a/integration_tests/test_i18n.py +++ b/integration_tests/test_i18n.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import requests diff --git a/integration_tests/test_layers.py b/integration_tests/test_layers.py index 1a1e5c2e4..fd55ddc65 100644 --- a/integration_tests/test_layers.py +++ b/integration_tests/test_layers.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import os from datacube_ows.cube_pool import cube diff --git a/integration_tests/test_mv_index.py b/integration_tests/test_mv_index.py index 5d423c712..1ddad0823 100644 --- a/integration_tests/test_mv_index.py +++ b/integration_tests/test_mv_index.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import pytest from odc.geo.geom import box diff --git a/integration_tests/test_routes.py b/integration_tests/test_routes.py index 84fb75b29..5c89fb460 100644 --- a/integration_tests/test_routes.py +++ b/integration_tests/test_routes.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + """Run with DB to simulate actual function """ diff --git a/integration_tests/test_update_ranges.py b/integration_tests/test_update_ranges.py index 35bdb9ac8..603b3024c 100644 --- a/integration_tests/test_update_ranges.py +++ b/integration_tests/test_update_ranges.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + """Test update ranges on DB using Click testing https://click.palletsprojects.com/en/7.x/testing/ """ diff --git a/integration_tests/test_version.py b/integration_tests/test_version.py index dc63df857..d6e98a75b 100644 --- a/integration_tests/test_version.py +++ b/integration_tests/test_version.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + """Test update ranges on DB using Click testing https://click.palletsprojects.com/en/7.x/testing/ """ diff --git a/integration_tests/test_wcs_server.py b/integration_tests/test_wcs_server.py index 4d5d5c8c4..9e3000351 100644 --- a/integration_tests/test_wcs_server.py +++ b/integration_tests/test_wcs_server.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + from urllib import request import pytest diff --git a/integration_tests/test_wms_server.py b/integration_tests/test_wms_server.py index a75e8a259..54bfa3b47 100644 --- a/integration_tests/test_wms_server.py +++ b/integration_tests/test_wms_server.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + from urllib import request import pytest diff --git a/integration_tests/test_wmts_server.py b/integration_tests/test_wmts_server.py index 5b17af940..84baf24d2 100644 --- a/integration_tests/test_wmts_server.py +++ b/integration_tests/test_wmts_server.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + from urllib import request import pytest diff --git a/integration_tests/utils.py b/integration_tests/utils.py index 391984686..47cf548fd 100644 --- a/integration_tests/utils.py +++ b/integration_tests/utils.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import enum from odc.geo.geom import BoundingBox, Geometry, point diff --git a/setup.py b/setup.py index ef5cb8cee..22827bb71 100644 --- a/setup.py +++ b/setup.py @@ -3,9 +3,10 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + from setuptools import find_packages, setup install_requirements = [ diff --git a/tests/__init__.py b/tests/__init__.py index f39503754..2c8b1f916 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -2,5 +2,6 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + diff --git a/tests/cfg/__init__.py b/tests/cfg/__init__.py index f39503754..2c8b1f916 100644 --- a/tests/cfg/__init__.py +++ b/tests/cfg/__init__.py @@ -2,5 +2,6 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + diff --git a/tests/cfg/broken_nested.py b/tests/cfg/broken_nested.py index 188d687e7..59bd3c332 100644 --- a/tests/cfg/broken_nested.py +++ b/tests/cfg/broken_nested.py @@ -1,9 +1,10 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + mixed_3 = { "test": 2634, "subtest": { diff --git a/tests/cfg/minimal_cfg.py b/tests/cfg/minimal_cfg.py index 8abe45790..3d4c6bfc7 100644 --- a/tests/cfg/minimal_cfg.py +++ b/tests/cfg/minimal_cfg.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + ows_cfg = { "global": { "title": "Minimal test config", diff --git a/tests/cfg/mixed_nested.py b/tests/cfg/mixed_nested.py index 995ab2776..db33ab055 100644 --- a/tests/cfg/mixed_nested.py +++ b/tests/cfg/mixed_nested.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + mixed_1 = { "include": "tests/cfg/simple.json", "type": "json" diff --git a/tests/cfg/nested.py b/tests/cfg/nested.py index 3e874b1f7..e38e7d17f 100644 --- a/tests/cfg/nested.py +++ b/tests/cfg/nested.py @@ -1,10 +1,11 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + nested = { "include": "tests.cfg.simple.simple", "type": "python", diff --git a/tests/cfg/simple.py b/tests/cfg/simple.py index 91e234212..ba217b802 100644 --- a/tests/cfg/simple.py +++ b/tests/cfg/simple.py @@ -1,10 +1,11 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + simple = { "test": 123, } diff --git a/tests/conftest.py b/tests/conftest.py index fce5023f4..65bbafc8d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import datetime from unittest.mock import MagicMock diff --git a/tests/test_band_utils.py b/tests/test_band_utils.py index 4c5f60118..e267a954f 100644 --- a/tests/test_band_utils.py +++ b/tests/test_band_utils.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + """Test band math utilities """ import numpy as np diff --git a/tests/test_cfg_bandidx.py b/tests/test_cfg_bandidx.py index 1611be831..17cfd8637 100644 --- a/tests/test_cfg_bandidx.py +++ b/tests/test_cfg_bandidx.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + from unittest.mock import MagicMock import pytest diff --git a/tests/test_cfg_cache_ctrl.py b/tests/test_cfg_cache_ctrl.py index 95d0b1462..e030a4e7f 100644 --- a/tests/test_cfg_cache_ctrl.py +++ b/tests/test_cfg_cache_ctrl.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + from unittest.mock import MagicMock import pytest diff --git a/tests/test_cfg_global.py b/tests/test_cfg_global.py index ccf0c38b7..f6cd21b4f 100644 --- a/tests/test_cfg_global.py +++ b/tests/test_cfg_global.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import pytest from datacube_ows.config_utils import ConfigException diff --git a/tests/test_cfg_inclusion.py b/tests/test_cfg_inclusion.py index 422ff65a2..f87587cbb 100644 --- a/tests/test_cfg_inclusion.py +++ b/tests/test_cfg_inclusion.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import os import sys diff --git a/tests/test_cfg_layer.py b/tests/test_cfg_layer.py index cfd8e58d7..8d4a981be 100644 --- a/tests/test_cfg_layer.py +++ b/tests/test_cfg_layer.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import datetime import math from unittest.mock import MagicMock, patch diff --git a/tests/test_cfg_metadata_types.py b/tests/test_cfg_metadata_types.py index ecf46ca66..9a42c8962 100644 --- a/tests/test_cfg_metadata_types.py +++ b/tests/test_cfg_metadata_types.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + from unittest.mock import MagicMock import pytest diff --git a/tests/test_cfg_tile_matrix_set.py b/tests/test_cfg_tile_matrix_set.py index 85b010010..93977e913 100644 --- a/tests/test_cfg_tile_matrix_set.py +++ b/tests/test_cfg_tile_matrix_set.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + from unittest.mock import MagicMock import pytest diff --git a/tests/test_cfg_wcs.py b/tests/test_cfg_wcs.py index fe7b2aad7..ce4eb5043 100644 --- a/tests/test_cfg_wcs.py +++ b/tests/test_cfg_wcs.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + from unittest.mock import patch import pytest diff --git a/tests/test_config_toolkit.py b/tests/test_config_toolkit.py index 9c19955b8..3d1c7bd56 100644 --- a/tests/test_config_toolkit.py +++ b/tests/test_config_toolkit.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + from datacube_ows.config_toolkit import deepinherit diff --git a/tests/test_data.py b/tests/test_data.py index 855ec667d..3de13d56c 100644 --- a/tests/test_data.py +++ b/tests/test_data.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import datetime from unittest.mock import MagicMock diff --git a/tests/test_legend_generator.py b/tests/test_legend_generator.py index 1b48ae623..d08c519bb 100644 --- a/tests/test_legend_generator.py +++ b/tests/test_legend_generator.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + from decimal import Decimal from unittest.mock import MagicMock diff --git a/tests/test_mpl_cmaps.py b/tests/test_mpl_cmaps.py index 80e6eecfa..a0756b3e7 100644 --- a/tests/test_mpl_cmaps.py +++ b/tests/test_mpl_cmaps.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + """ Test creation of colour maps from matplotlib """ diff --git a/tests/test_multidate_handler.py b/tests/test_multidate_handler.py index ae584371e..8d0821a56 100644 --- a/tests/test_multidate_handler.py +++ b/tests/test_multidate_handler.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import numpy as np import pandas as pd import pytest diff --git a/tests/test_mv_selopts.py b/tests/test_mv_selopts.py index 6192189d9..68b80fb56 100644 --- a/tests/test_mv_selopts.py +++ b/tests/test_mv_selopts.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + from datacube_ows.mv_index import MVSelectOpts diff --git a/tests/test_no_db_routes.py b/tests/test_no_db_routes.py index bf55d1560..5190b59c0 100644 --- a/tests/test_no_db_routes.py +++ b/tests/test_no_db_routes.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + """Run with no DB to simulate connection failure """ import os diff --git a/tests/test_ogc_utils.py b/tests/test_ogc_utils.py index 0bce0bfca..ca4aa4c4c 100644 --- a/tests/test_ogc_utils.py +++ b/tests/test_ogc_utils.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import datetime from unittest.mock import MagicMock diff --git a/tests/test_ows_configuration.py b/tests/test_ows_configuration.py index 5ede2d99c..1e2267b1b 100644 --- a/tests/test_ows_configuration.py +++ b/tests/test_ows_configuration.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + from unittest.mock import MagicMock import pytest diff --git a/tests/test_protocol_versions.py b/tests/test_protocol_versions.py index a3bbd711f..4906d45a1 100644 --- a/tests/test_protocol_versions.py +++ b/tests/test_protocol_versions.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import pytest import datacube_ows.protocol_versions diff --git a/tests/test_pyproj.py b/tests/test_pyproj.py index 98f1eaacb..3922565f4 100644 --- a/tests/test_pyproj.py +++ b/tests/test_pyproj.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + from pyproj import CRS SUPPORTED_CRS = [ diff --git a/tests/test_qprof.py b/tests/test_qprof.py index a26be503f..e1ed6d306 100644 --- a/tests/test_qprof.py +++ b/tests/test_qprof.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + from datacube_ows.query_profiler import QueryProfiler diff --git a/tests/test_resource_limits.py b/tests/test_resource_limits.py index 8ff469051..68fc389ff 100644 --- a/tests/test_resource_limits.py +++ b/tests/test_resource_limits.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import pytest from odc.geo import CRS diff --git a/tests/test_startup.py b/tests/test_startup.py index 3fa7975ef..d0f6e8109 100644 --- a/tests/test_startup.py +++ b/tests/test_startup.py @@ -1,9 +1,10 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import os import sys from unittest.mock import MagicMock, patch diff --git a/tests/test_style_api.py b/tests/test_style_api.py index c950d453c..e837140df 100644 --- a/tests/test_style_api.py +++ b/tests/test_style_api.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + from decimal import Decimal import pytest diff --git a/tests/test_styles.py b/tests/test_styles.py index 136772284..8af561268 100644 --- a/tests/test_styles.py +++ b/tests/test_styles.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import datetime from unittest.mock import MagicMock, patch diff --git a/tests/test_time_res_method.py b/tests/test_time_res_method.py index 1bbf8cdef..cc04dd47e 100644 --- a/tests/test_time_res_method.py +++ b/tests/test_time_res_method.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + from datetime import datetime import pytest diff --git a/tests/test_utils.py b/tests/test_utils.py index 5b3422761..3266bb5c2 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import datetime from unittest.mock import MagicMock diff --git a/tests/test_wcs2_utils.py b/tests/test_wcs2_utils.py index a3029d8ad..77693371f 100644 --- a/tests/test_wcs2_utils.py +++ b/tests/test_wcs2_utils.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + from unittest.mock import MagicMock import pytest diff --git a/tests/test_wcs_scaler.py b/tests/test_wcs_scaler.py index 72b6d86eb..b4732ad4c 100644 --- a/tests/test_wcs_scaler.py +++ b/tests/test_wcs_scaler.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import datetime import pytest diff --git a/tests/test_wms_utils.py b/tests/test_wms_utils.py index cbb8f756a..8a8d15450 100644 --- a/tests/test_wms_utils.py +++ b/tests/test_wms_utils.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import datetime from unittest.mock import MagicMock diff --git a/tests/utils.py b/tests/utils.py index a95ebffb7..70d7638c4 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + import datetime import numpy as np diff --git a/update_ranges.py b/update_ranges.py index f994f52e1..502b7d123 100644 --- a/update_ranges.py +++ b/update_ranges.py @@ -1,8 +1,9 @@ # This file is part of datacube-ows, part of the Open Data Cube project. # See https://opendatacube.org for more information. # -# Copyright (c) 2017-2023 OWS Contributors +# Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 + from datacube_ows.update_ranges_impl import main if __name__ == '__main__': From d799c8db20eb6931f81c73fe11ec2fcddb8b6b36 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 24 Apr 2024 10:15:00 +0000 Subject: [PATCH 20/29] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- datacube_ows/cfg_parser_impl.py | 3 +-- datacube_ows/config_utils.py | 6 +++--- datacube_ows/data.py | 14 ++++++-------- datacube_ows/feature_info.py | 12 +++++++----- datacube_ows/http_utils.py | 4 +++- datacube_ows/legend_generator.py | 2 +- datacube_ows/loading.py | 12 +++++------- datacube_ows/mv_index.py | 9 +++------ datacube_ows/ogc.py | 3 ++- datacube_ows/ogc_utils.py | 6 ++++-- datacube_ows/ows_configuration.py | 24 +++++++++++++----------- datacube_ows/resource_limits.py | 5 +++-- datacube_ows/styles/base.py | 8 +++++--- datacube_ows/styles/colormap.py | 5 ++++- datacube_ows/styles/component.py | 3 ++- datacube_ows/styles/expression.py | 5 +++-- datacube_ows/styles/hybrid.py | 3 ++- datacube_ows/styles/ramp.py | 8 ++++---- datacube_ows/tile_matrix_sets.py | 6 ++++-- datacube_ows/time_utils.py | 4 ++-- datacube_ows/utils.py | 8 +++----- datacube_ows/wcs1.py | 3 ++- datacube_ows/wcs1_utils.py | 2 +- datacube_ows/wcs2.py | 4 +++- datacube_ows/wms.py | 2 +- datacube_ows/wms_utils.py | 4 ++-- datacube_ows/wmts.py | 2 +- integration_tests/__init__.py | 1 - integration_tests/cfg/__init__.py | 1 - integration_tests/test_mv_index.py | 2 +- tests/__init__.py | 1 - tests/cfg/__init__.py | 1 - tests/test_cfg_bandidx.py | 2 +- tests/test_cfg_inclusion.py | 2 +- tests/test_data.py | 2 +- tests/test_styles.py | 2 +- 36 files changed, 95 insertions(+), 86 deletions(-) diff --git a/datacube_ows/cfg_parser_impl.py b/datacube_ows/cfg_parser_impl.py index f84313a08..e76147463 100755 --- a/datacube_ows/cfg_parser_impl.py +++ b/datacube_ows/cfg_parser_impl.py @@ -16,9 +16,8 @@ from deepdiff import DeepDiff from datacube_ows import __version__ -from datacube_ows.ows_configuration import (OWSConfig, - OWSFolder, read_config) from datacube_ows.config_utils import ConfigException +from datacube_ows.ows_configuration import OWSConfig, OWSFolder, read_config @click.group(invoke_without_command=True) diff --git a/datacube_ows/config_utils.py b/datacube_ows/config_utils.py index ecd6f156e..5cf06705f 100644 --- a/datacube_ows/config_utils.py +++ b/datacube_ows/config_utils.py @@ -13,19 +13,19 @@ from urllib.parse import urlparse import fsspec -from datacube.utils.masking import make_mask +from babel.messages import Catalog, Message from datacube import Datacube from datacube.model import Product +from datacube.utils.masking import make_mask from flask_babel import gettext as _ -from babel.messages import Catalog, Message from xarray import DataArray from datacube_ows.config_toolkit import deepinherit TYPE_CHECKING = False if TYPE_CHECKING: - import datacube_ows.ows_configuration.OWSConfig import datacube_ows.ows_configuration.AttributionCfg + import datacube_ows.ows_configuration.OWSConfig import datacube_ows.ows_configuration.OWSNamedLayer import datacube_ows.styles.base.StyleMask diff --git a/datacube_ows/data.py b/datacube_ows/data.py index ab421708c..083d5424e 100644 --- a/datacube_ows/data.py +++ b/datacube_ows/data.py @@ -6,7 +6,7 @@ import logging from datetime import date, datetime, timedelta -from typing import cast, Any +from typing import Any, cast import numpy import numpy.ma @@ -19,18 +19,18 @@ from rasterio.io import MemoryFile from datacube_ows.cube_pool import cube +from datacube_ows.http_utils import FlaskResponse, json_response, png_response from datacube_ows.loading import DataStacker, ProductBandQuery from datacube_ows.mv_index import MVSelectOpts from datacube_ows.ogc_exceptions import WMSException -from datacube_ows.ogc_utils import (xarray_image_as_png) -from datacube_ows.http_utils import FlaskResponse, json_response, png_response -from datacube_ows.time_utils import solar_date, tz_for_geometry +from datacube_ows.ogc_utils import xarray_image_as_png from datacube_ows.ows_configuration import OWSNamedLayer -from datacube_ows.styles import StyleDef from datacube_ows.query_profiler import QueryProfiler from datacube_ows.resource_limits import ResourceLimited +from datacube_ows.styles import StyleDef +from datacube_ows.time_utils import solar_date, tz_for_geometry from datacube_ows.utils import default_to_utc, log_call -from datacube_ows.wms_utils import (GetMapParameters) +from datacube_ows.wms_utils import GetMapParameters _LOG = logging.getLogger(__name__) @@ -288,5 +288,3 @@ def _write_polygon(geobox: GeoBox, polygon: geom.Geometry, zoom_fill: list[int], for idx, fill in enumerate(zoom_fill, start=1): thing.write_band(idx, data * fill) return memfile.read() - - diff --git a/datacube_ows/feature_info.py b/datacube_ows/feature_info.py index fb8de0468..1fa961a87 100644 --- a/datacube_ows/feature_info.py +++ b/datacube_ows/feature_info.py @@ -18,16 +18,18 @@ from odc.geo.geobox import GeoBox from pandas import Timestamp -from datacube_ows.config_utils import ConfigException, CFG_DICT, RAW_CFG +from datacube_ows.config_utils import CFG_DICT, RAW_CFG, ConfigException from datacube_ows.cube_pool import cube -from datacube_ows.loading import ProductBandQuery, DataStacker +from datacube_ows.http_utils import (FlaskResponse, html_json_response, + json_response) +from datacube_ows.loading import DataStacker, ProductBandQuery from datacube_ows.ogc_exceptions import WMSException -from datacube_ows.http_utils import FlaskResponse, json_response, html_json_response -from datacube_ows.time_utils import dataset_center_time, tz_for_geometry from datacube_ows.ows_configuration import OWSNamedLayer, get_config from datacube_ows.styles import StyleDef +from datacube_ows.time_utils import dataset_center_time, tz_for_geometry from datacube_ows.utils import log_call -from datacube_ows.wms_utils import GetFeatureInfoParameters, img_coords_to_geopoint +from datacube_ows.wms_utils import (GetFeatureInfoParameters, + img_coords_to_geopoint) @log_call diff --git a/datacube_ows/http_utils.py b/datacube_ows/http_utils.py index 2a2f70d6f..973b5c816 100644 --- a/datacube_ows/http_utils.py +++ b/datacube_ows/http_utils.py @@ -7,7 +7,9 @@ import json from typing import Optional from urllib.parse import urlparse -from flask import Request, request, render_template + +from flask import Request, render_template, request + from datacube_ows.config_utils import CFG_DICT TYPE_CHECKING = False diff --git a/datacube_ows/legend_generator.py b/datacube_ows/legend_generator.py index 8f772b5ba..066c18059 100644 --- a/datacube_ows/legend_generator.py +++ b/datacube_ows/legend_generator.py @@ -12,8 +12,8 @@ # from flask import make_response from PIL import Image -from datacube_ows.ogc_exceptions import WMSException from datacube_ows.http_utils import resp_headers +from datacube_ows.ogc_exceptions import WMSException from datacube_ows.wms_utils import GetLegendGraphicParameters # Do not use X Server backend diff --git a/datacube_ows/loading.py b/datacube_ows/loading.py index e566b636a..330037694 100644 --- a/datacube_ows/loading.py +++ b/datacube_ows/loading.py @@ -4,22 +4,20 @@ # Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 -from collections import OrderedDict - import datetime import logging -from typing import Iterable, cast, Mapping +from collections import OrderedDict +from typing import Iterable, Mapping, cast from uuid import UUID import datacube import numpy import xarray - -from sqlalchemy.engine import Row - -from odc.geo.geom import Geometry from odc.geo.geobox import GeoBox +from odc.geo.geom import Geometry from odc.geo.warp import Resampling +from sqlalchemy.engine import Row + from datacube_ows.mv_index import MVSelectOpts, mv_search from datacube_ows.ogc_exceptions import WMSException from datacube_ows.ows_configuration import OWSNamedLayer diff --git a/datacube_ows/mv_index.py b/datacube_ows/mv_index.py index 3c9e62679..1dc1572ad 100644 --- a/datacube_ows/mv_index.py +++ b/datacube_ows/mv_index.py @@ -12,21 +12,18 @@ from uuid import UUID as UUID_ import pytz +from datacube.index import Index +from datacube.model import Dataset, Product from geoalchemy2 import Geometry from odc.geo.geom import Geometry as ODCGeom from psycopg2.extras import DateTimeTZRange from sqlalchemy import (SMALLINT, Column, MetaData, Table, and_, or_, select, text) from sqlalchemy.dialects.postgresql import TSTZRANGE, UUID -from sqlalchemy.sql.functions import count, func - -from datacube.index import Index -from datacube.model import Product, Dataset - from sqlalchemy.engine import Row from sqlalchemy.engine.base import Engine from sqlalchemy.sql.elements import ClauseElement - +from sqlalchemy.sql.functions import count, func from datacube_ows.utils import default_to_utc diff --git a/datacube_ows/ogc.py b/datacube_ows/ogc.py index 5eac95218..f1447762a 100644 --- a/datacube_ows/ogc.py +++ b/datacube_ows/ogc.py @@ -13,9 +13,10 @@ from datacube_ows import __version__ from datacube_ows.cube_pool import cube +from datacube_ows.http_utils import (capture_headers, get_service_base_url, + lower_get_args, resp_headers) from datacube_ows.legend_generator import create_legend_for_style from datacube_ows.ogc_exceptions import OGCException, WMSException -from datacube_ows.http_utils import resp_headers, get_service_base_url, capture_headers, lower_get_args from datacube_ows.ows_configuration import get_config from datacube_ows.protocol_versions import supported_versions from datacube_ows.startup_utils import * # pylint: disable=wildcard-import,unused-wildcard-import diff --git a/datacube_ows/ogc_utils.py b/datacube_ows/ogc_utils.py index 9a89c9e29..c0721d09c 100644 --- a/datacube_ows/ogc_utils.py +++ b/datacube_ows/ogc_utils.py @@ -4,17 +4,19 @@ # Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 -import logging import datetime +import logging from io import BytesIO from typing import Any, cast -from deprecat import deprecat + import numpy import xarray from affine import Affine +from deprecat import deprecat from odc.geo.geobox import GeoBox from odc.geo.geom import CRS from PIL import Image + TYPE_CHECKING = False if TYPE_CHECKING: from datacube_ows.config_utils import OWSExtensibleConfigEntry diff --git a/datacube_ows/ows_configuration.py b/datacube_ows/ows_configuration.py index 79fe62814..586dca8a8 100644 --- a/datacube_ows/ows_configuration.py +++ b/datacube_ows/ows_configuration.py @@ -20,33 +20,35 @@ from collections.abc import Mapping from enum import Enum from importlib import import_module -from typing import Optional, cast, Union, Any, Iterable +from typing import Any, Iterable, Optional, Union, cast import numpy from babel.messages.catalog import Catalog from babel.messages.pofile import read_po +from datacube import Datacube +from datacube.api.query import GroupBy +from datacube.model import Measurement from odc.geo import CRS from odc.geo.geobox import GeoBox from ows import Version from slugify import slugify -from datacube import Datacube -from datacube.model import Measurement -from datacube.api.query import GroupBy - -from datacube_ows.config_utils import (FlagProductBands, OWSConfigEntry, - OWSEntryNotFound, +from datacube_ows.config_utils import (CFG_DICT, RAW_CFG, ConfigException, + FlagProductBands, FunctionWrapper, + OWSConfigEntry, OWSEntryNotFound, OWSExtensibleConfigEntry, OWSFlagBand, OWSMetadataConfig, cfg_expand, get_file_loc, import_python_obj, - load_json_obj, ConfigException, FunctionWrapper, CFG_DICT, RAW_CFG) + load_json_obj) from datacube_ows.cube_pool import ODCInitException, cube, get_cube from datacube_ows.ogc_utils import create_geobox -from datacube_ows.time_utils import local_solar_date_range -from datacube_ows.resource_limits import OWSResourceManagementRules, parse_cache_age +from datacube_ows.resource_limits import (OWSResourceManagementRules, + parse_cache_age) from datacube_ows.styles import StyleDef from datacube_ows.tile_matrix_sets import TileMatrixSet -from datacube_ows.utils import group_by_begin_datetime, group_by_mosaic, group_by_solar +from datacube_ows.time_utils import local_solar_date_range +from datacube_ows.utils import (group_by_begin_datetime, group_by_mosaic, + group_by_solar) _LOG = logging.getLogger(__name__) diff --git a/datacube_ows/resource_limits.py b/datacube_ows/resource_limits.py index 96f22f508..a95ee701e 100644 --- a/datacube_ows/resource_limits.py +++ b/datacube_ows/resource_limits.py @@ -12,9 +12,10 @@ from odc.geo.geobox import GeoBox from odc.geo.geom import CRS, polygon -from datacube_ows.config_utils import CFG_DICT, RAW_CFG, OWSConfigEntry, ConfigException -from datacube_ows.ogc_utils import (create_geobox) +from datacube_ows.config_utils import (CFG_DICT, RAW_CFG, ConfigException, + OWSConfigEntry) from datacube_ows.http_utils import cache_control_headers +from datacube_ows.ogc_utils import create_geobox TYPE_CHECKING = False if TYPE_CHECKING: diff --git a/datacube_ows/styles/base.py b/datacube_ows/styles/base.py index a5fa2ba37..32fde1dcc 100644 --- a/datacube_ows/styles/base.py +++ b/datacube_ows/styles/base.py @@ -6,7 +6,8 @@ import io import logging -from typing import (Any, Iterable, Mapping, MutableMapping, Optional, Sized, Type, Union, cast) +from typing import (Any, Iterable, Mapping, MutableMapping, Optional, Sized, + Type, Union, cast) import datacube.model import numpy as np @@ -16,12 +17,13 @@ import datacube_ows.band_utils from datacube_ows.config_utils import (CFG_DICT, RAW_CFG, AbstractMaskRule, - FlagBand, FlagProductBands, + ConfigException, FlagBand, + FlagProductBands, FunctionWrapper, OWSConfigEntry, OWSEntryNotFound, OWSExtensibleConfigEntry, OWSFlagBandStandalone, OWSIndexedConfigEntry, - OWSMetadataConfig, ConfigException, FunctionWrapper) + OWSMetadataConfig) from datacube_ows.legend_utils import get_image_from_url from datacube_ows.ogc_exceptions import WMSException diff --git a/datacube_ows/styles/colormap.py b/datacube_ows/styles/colormap.py index 5fbcfacc1..ec8bc0d1a 100644 --- a/datacube_ows/styles/colormap.py +++ b/datacube_ows/styles/colormap.py @@ -17,8 +17,11 @@ from matplotlib import pyplot as plt from xarray import DataArray, Dataset -from datacube_ows.config_utils import CFG_DICT, AbstractMaskRule, ConfigException, OWSMetadataConfig, FlagSpec +from datacube_ows.config_utils import (CFG_DICT, AbstractMaskRule, + ConfigException, FlagSpec, + OWSMetadataConfig) from datacube_ows.styles.base import StyleDefBase + TYPE_CHECKING = False if TYPE_CHECKING: from datacube_ows.ows_configuration import OWSNamedLayer diff --git a/datacube_ows/styles/component.py b/datacube_ows/styles/component.py index 7aa95c186..5be3f2f13 100644 --- a/datacube_ows/styles/component.py +++ b/datacube_ows/styles/component.py @@ -9,7 +9,8 @@ import numpy as np from xarray import DataArray, Dataset -from datacube_ows.config_utils import CFG_DICT, ConfigException, FunctionWrapper +from datacube_ows.config_utils import (CFG_DICT, ConfigException, + FunctionWrapper) from datacube_ows.styles.base import StyleDefBase TYPE_CHECKING = False diff --git a/datacube_ows/styles/expression.py b/datacube_ows/styles/expression.py index 3985637b0..9e9804c32 100644 --- a/datacube_ows/styles/expression.py +++ b/datacube_ows/styles/expression.py @@ -4,14 +4,15 @@ # Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 +import operator from typing import Any, Type, cast -from xarray import Dataset -import operator import lark from datacube.virtual.expr import formula_parser +from xarray import Dataset from datacube_ows.config_utils import ConfigException + TYPE_CHECKING = False if TYPE_CHECKING: import datacube_ows.styles.StyleDef diff --git a/datacube_ows/styles/hybrid.py b/datacube_ows/styles/hybrid.py index 0f5f3ddb1..2f33af8c2 100644 --- a/datacube_ows/styles/hybrid.py +++ b/datacube_ows/styles/hybrid.py @@ -10,8 +10,9 @@ from datacube_ows.config_utils import CFG_DICT, ConfigException from datacube_ows.styles.base import StyleDefBase -from datacube_ows.styles.component import ComponentStyleDef, LINEAR_COMP_DICT +from datacube_ows.styles.component import LINEAR_COMP_DICT, ComponentStyleDef from datacube_ows.styles.ramp import ColorRampDef + TYPE_CHECKING = False if TYPE_CHECKING: from datacube_ows.ows_configuration import OWSNamedLayer diff --git a/datacube_ows/styles/ramp.py b/datacube_ows/styles/ramp.py index 7b15f513b..e4c34279e 100644 --- a/datacube_ows/styles/ramp.py +++ b/datacube_ows/styles/ramp.py @@ -9,18 +9,18 @@ from collections import defaultdict from decimal import ROUND_HALF_UP, Decimal from math import isclose -from typing import (Any, Hashable, MutableMapping, Union, cast, Iterable) +from typing import Any, Hashable, Iterable, MutableMapping, Union, cast import matplotlib import numpy from colour import Color from matplotlib import pyplot as plt from matplotlib.colors import LinearSegmentedColormap, to_hex - from numpy import ubyte -from xarray import Dataset, DataArray +from xarray import DataArray, Dataset -from datacube_ows.config_utils import CFG_DICT, OWSMetadataConfig, ConfigException, FunctionWrapper +from datacube_ows.config_utils import (CFG_DICT, ConfigException, + FunctionWrapper, OWSMetadataConfig) from datacube_ows.styles.base import StyleDefBase from datacube_ows.styles.expression import Expression diff --git a/datacube_ows/tile_matrix_sets.py b/datacube_ows/tile_matrix_sets.py index ad9b3d2d7..6cc21b012 100644 --- a/datacube_ows/tile_matrix_sets.py +++ b/datacube_ows/tile_matrix_sets.py @@ -4,8 +4,10 @@ # Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 -from typing import cast, Type -from datacube_ows.config_utils import OWSConfigEntry, ConfigException, CFG_DICT, RAW_CFG +from typing import Type, cast + +from datacube_ows.config_utils import (CFG_DICT, RAW_CFG, ConfigException, + OWSConfigEntry) TYPE_CHECKING = False if TYPE_CHECKING: diff --git a/datacube_ows/time_utils.py b/datacube_ows/time_utils.py index 08af46d35..7bf6f0b5c 100644 --- a/datacube_ows/time_utils.py +++ b/datacube_ows/time_utils.py @@ -4,13 +4,13 @@ # Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 -import logging import datetime +import logging from typing import Optional, Sequence from datacube.model import Dataset from dateutil.parser import parse -from odc.geo import Geometry, CRS +from odc.geo import CRS, Geometry from odc.geo.geobox import GeoBox from pytz import timezone, utc from timezonefinder import TimezoneFinder diff --git a/datacube_ows/utils.py b/datacube_ows/utils.py index 6f8b44b73..ee483fdf8 100644 --- a/datacube_ows/utils.py +++ b/datacube_ows/utils.py @@ -11,14 +11,12 @@ from typing import Any, Callable, TypeVar, cast import pytz -from numpy import datetime64 -from numpy import datetime64 as npdt64 - -from sqlalchemy.engine.base import Connection - from datacube import Datacube from datacube.api.query import GroupBy, solar_day from datacube.model import Dataset +from numpy import datetime64 +from numpy import datetime64 as npdt64 +from sqlalchemy.engine.base import Connection F = TypeVar('F', bound=Callable[..., Any]) diff --git a/datacube_ows/wcs1.py b/datacube_ows/wcs1.py index 92c2bfe44..f8eb9b224 100644 --- a/datacube_ows/wcs1.py +++ b/datacube_ows/wcs1.py @@ -6,8 +6,9 @@ from flask import render_template +from datacube_ows.http_utils import (cache_control_headers, + get_service_base_url, json_response) from datacube_ows.ogc_exceptions import WCS1Exception -from datacube_ows.http_utils import get_service_base_url, cache_control_headers, json_response from datacube_ows.ows_configuration import get_config from datacube_ows.query_profiler import QueryProfiler from datacube_ows.utils import log_call diff --git a/datacube_ows/wcs1_utils.py b/datacube_ows/wcs1_utils.py index 2c19b9215..9dea3d13c 100644 --- a/datacube_ows/wcs1_utils.py +++ b/datacube_ows/wcs1_utils.py @@ -14,11 +14,11 @@ from ows.util import Version from rasterio import MemoryFile +from datacube_ows.config_utils import ConfigException from datacube_ows.cube_pool import cube from datacube_ows.loading import DataStacker from datacube_ows.mv_index import MVSelectOpts from datacube_ows.ogc_exceptions import WCS1Exception -from datacube_ows.config_utils import ConfigException from datacube_ows.ows_configuration import get_config from datacube_ows.resource_limits import ResourceLimited from datacube_ows.wcs_utils import get_bands_from_styles diff --git a/datacube_ows/wcs2.py b/datacube_ows/wcs2.py index 0a31d18b1..5600749ed 100644 --- a/datacube_ows/wcs2.py +++ b/datacube_ows/wcs2.py @@ -17,8 +17,10 @@ kvp_decode_get_coverage) from ows.wcs.v21 import encoders as encoders_v21 +from datacube_ows.http_utils import (cache_control_headers, + get_service_base_url, json_response, + resp_headers) from datacube_ows.ogc_exceptions import WCS2Exception -from datacube_ows.http_utils import resp_headers, get_service_base_url, cache_control_headers, json_response from datacube_ows.ows_configuration import get_config from datacube_ows.query_profiler import QueryProfiler from datacube_ows.utils import log_call diff --git a/datacube_ows/wms.py b/datacube_ows/wms.py index 5bcc94cac..1a5ba9b86 100644 --- a/datacube_ows/wms.py +++ b/datacube_ows/wms.py @@ -8,9 +8,9 @@ from datacube_ows.data import get_map from datacube_ows.feature_info import feature_info +from datacube_ows.http_utils import cache_control_headers, get_service_base_url from datacube_ows.legend_generator import legend_graphic from datacube_ows.ogc_exceptions import WMSException -from datacube_ows.http_utils import get_service_base_url, cache_control_headers from datacube_ows.ows_configuration import get_config from datacube_ows.utils import log_call diff --git a/datacube_ows/wms_utils.py b/datacube_ows/wms_utils.py index 3cb5bd634..7c004d74c 100644 --- a/datacube_ows/wms_utils.py +++ b/datacube_ows/wms_utils.py @@ -17,10 +17,10 @@ from pytz import utc from rasterio.warp import Resampling +from datacube_ows.config_utils import ConfigException from datacube_ows.ogc_exceptions import WMSException from datacube_ows.ogc_utils import create_geobox -from datacube_ows.config_utils import ConfigException -from datacube_ows.ows_configuration import get_config, OWSNamedLayer +from datacube_ows.ows_configuration import OWSNamedLayer, get_config from datacube_ows.resource_limits import RequestScale from datacube_ows.styles import StyleDef from datacube_ows.styles.expression import ExpressionException diff --git a/datacube_ows/wmts.py b/datacube_ows/wmts.py index 479aaa4a7..41f0094e5 100644 --- a/datacube_ows/wmts.py +++ b/datacube_ows/wmts.py @@ -10,8 +10,8 @@ from datacube_ows.data import get_map from datacube_ows.feature_info import feature_info +from datacube_ows.http_utils import cache_control_headers, get_service_base_url from datacube_ows.ogc_exceptions import WMSException, WMTSException -from datacube_ows.http_utils import get_service_base_url, cache_control_headers from datacube_ows.ows_configuration import get_config from datacube_ows.utils import log_call diff --git a/integration_tests/__init__.py b/integration_tests/__init__.py index 7f2605021..8c7f306e7 100644 --- a/integration_tests/__init__.py +++ b/integration_tests/__init__.py @@ -3,4 +3,3 @@ # # Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 - diff --git a/integration_tests/cfg/__init__.py b/integration_tests/cfg/__init__.py index 7f2605021..8c7f306e7 100644 --- a/integration_tests/cfg/__init__.py +++ b/integration_tests/cfg/__init__.py @@ -3,4 +3,3 @@ # # Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 - diff --git a/integration_tests/test_mv_index.py b/integration_tests/test_mv_index.py index 1ddad0823..d7568b9f4 100644 --- a/integration_tests/test_mv_index.py +++ b/integration_tests/test_mv_index.py @@ -9,8 +9,8 @@ from datacube_ows.cube_pool import cube from datacube_ows.mv_index import MVSelectOpts, mv_search -from datacube_ows.time_utils import local_solar_date_range from datacube_ows.ows_configuration import get_config +from datacube_ows.time_utils import local_solar_date_range def test_full_layer(): diff --git a/tests/__init__.py b/tests/__init__.py index 2c8b1f916..8af01a7d9 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -4,4 +4,3 @@ # # Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 - diff --git a/tests/cfg/__init__.py b/tests/cfg/__init__.py index 2c8b1f916..8af01a7d9 100644 --- a/tests/cfg/__init__.py +++ b/tests/cfg/__init__.py @@ -4,4 +4,3 @@ # # Copyright (c) 2017-2024 OWS Contributors # SPDX-License-Identifier: Apache-2.0 - diff --git a/tests/test_cfg_bandidx.py b/tests/test_cfg_bandidx.py index 17cfd8637..5b4e324e7 100644 --- a/tests/test_cfg_bandidx.py +++ b/tests/test_cfg_bandidx.py @@ -8,7 +8,7 @@ import pytest -from datacube_ows.config_utils import OWSConfigNotReady, ConfigException +from datacube_ows.config_utils import ConfigException, OWSConfigNotReady from datacube_ows.ows_configuration import BandIndex diff --git a/tests/test_cfg_inclusion.py b/tests/test_cfg_inclusion.py index f87587cbb..0c2035941 100644 --- a/tests/test_cfg_inclusion.py +++ b/tests/test_cfg_inclusion.py @@ -9,7 +9,7 @@ import pytest -from datacube_ows.config_utils import get_file_loc, ConfigException +from datacube_ows.config_utils import ConfigException, get_file_loc from datacube_ows.ows_configuration import read_config src_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) diff --git a/tests/test_data.py b/tests/test_data.py index 3de13d56c..015c0225e 100644 --- a/tests/test_data.py +++ b/tests/test_data.py @@ -15,7 +15,7 @@ import datacube_ows.data import datacube_ows.feature_info from datacube_ows.feature_info import get_s3_browser_uris -from datacube_ows.loading import ProductBandQuery, DataStacker +from datacube_ows.loading import DataStacker, ProductBandQuery from datacube_ows.ogc_exceptions import WMSException from tests.test_styles import product_layer # noqa: F401 diff --git a/tests/test_styles.py b/tests/test_styles.py index 8af561268..d5da21f61 100644 --- a/tests/test_styles.py +++ b/tests/test_styles.py @@ -12,7 +12,7 @@ from xarray import DataArray, Dataset, concat import datacube_ows.styles -from datacube_ows.config_utils import OWSEntryNotFound, ConfigException +from datacube_ows.config_utils import ConfigException, OWSEntryNotFound from datacube_ows.ows_configuration import BandIndex, OWSProductLayer From 8b0f62c369df25a57cbd999c3a13fe780768b571 Mon Sep 17 00:00:00 2001 From: Paul Haesler Date: Wed, 24 Apr 2024 20:16:53 +1000 Subject: [PATCH 21/29] 3.10 as a string please. --- .github/workflows/lint.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 3a18e6679..00a4ee764 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -51,7 +51,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: 3.10 + python-version: ["3.10"] - run: python -m pip install flake8 - name: flake8 cleanup imported but unused uses: liskin/gh-problem-matcher-wrap@v3 From 4ef7cc10fe06182346e35e58c50d021d46ac9a2a Mon Sep 17 00:00:00 2001 From: Paul Haesler Date: Wed, 24 Apr 2024 20:18:45 +1000 Subject: [PATCH 22/29] Update dev requirements --- .github/workflows/lint.yml | 2 +- setup.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 00a4ee764..6fee828a5 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -80,5 +80,5 @@ jobs: - name: run mypy run: | sudo apt-get remove python3-openssl - pip install --upgrade -e '.[types]' + pip install --upgrade -e '.[dev]' mypy datacube_ows diff --git a/setup.py b/setup.py index 22827bb71..ad7f1abde 100644 --- a/setup.py +++ b/setup.py @@ -56,6 +56,7 @@ 'pre-commit', 'pipdeptree', 'mypy', + 'flake8', 'types-pytz', 'types-python-dateutil', 'types-requests', From 8849ffb60be49dac59488775f7a407f1dc2083aa Mon Sep 17 00:00:00 2001 From: Paul Haesler Date: Wed, 24 Apr 2024 20:30:17 +1000 Subject: [PATCH 23/29] Lintage. --- datacube_ows/data.py | 3 +-- datacube_ows/time_utils.py | 2 +- integration_tests/utils.py | 3 ++- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/datacube_ows/data.py b/datacube_ows/data.py index 083d5424e..628354bfd 100644 --- a/datacube_ows/data.py +++ b/datacube_ows/data.py @@ -52,8 +52,7 @@ def check_date(time_res, user_date, odc_date): elif time_res.is_summary(): norm_date = date(ts.year, ts.month, - ts.day, - tzinfo=pytz.utc) + ts.day) return norm_date == user_date else: norm_date = datetime(ts.year, diff --git a/datacube_ows/time_utils.py b/datacube_ows/time_utils.py index 7bf6f0b5c..6e5f27955 100644 --- a/datacube_ows/time_utils.py +++ b/datacube_ows/time_utils.py @@ -6,7 +6,7 @@ import datetime import logging -from typing import Optional, Sequence +from typing import Optional from datacube.model import Dataset from dateutil.parser import parse diff --git a/integration_tests/utils.py b/integration_tests/utils.py index 47cf548fd..f337b0396 100644 --- a/integration_tests/utils.py +++ b/integration_tests/utils.py @@ -303,7 +303,8 @@ def wcs1_args( crs_extent = extent.to_crs(crs) crs_bbox = crs_extent.boundingbox return { - "bbox": f"{min(crs_bbox.left,crs_bbox.right)},{min(crs_bbox.top,crs_bbox.bottom)},{max(crs_bbox.left,crs_bbox.right)},{max(crs_bbox.top,crs_bbox.bottom)}", + "bbox": f"{min(crs_bbox.left, crs_bbox.right)},{min(crs_bbox.top, crs_bbox.bottom)}," + f"{max(crs_bbox.left, crs_bbox.right)},{max(crs_bbox.top, crs_bbox.bottom)}", "times": ",".join(time_strs), } From f667502c0ae77c1e4e862244d802eb5f07c293c1 Mon Sep 17 00:00:00 2001 From: Paul Haesler Date: Wed, 24 Apr 2024 20:34:34 +1000 Subject: [PATCH 24/29] Lintage. --- datacube_ows/data.py | 1 - 1 file changed, 1 deletion(-) diff --git a/datacube_ows/data.py b/datacube_ows/data.py index 628354bfd..9e03686a9 100644 --- a/datacube_ows/data.py +++ b/datacube_ows/data.py @@ -10,7 +10,6 @@ import numpy import numpy.ma -import pytz import xarray from odc.geo import geom from odc.geo.geobox import GeoBox From a3b1be1e5406477f31ae22afd96568a2c4bcec5a Mon Sep 17 00:00:00 2001 From: Paul Haesler Date: Mon, 29 Apr 2024 14:28:23 +1000 Subject: [PATCH 25/29] Type hints in cfg_parser_impl.py --- datacube_ows/cfg_parser_impl.py | 75 +++++++++++++++++-------------- datacube_ows/ows_configuration.py | 2 +- 2 files changed, 42 insertions(+), 35 deletions(-) diff --git a/datacube_ows/cfg_parser_impl.py b/datacube_ows/cfg_parser_impl.py index e76147463..0c3a5d4f3 100755 --- a/datacube_ows/cfg_parser_impl.py +++ b/datacube_ows/cfg_parser_impl.py @@ -9,6 +9,7 @@ import json import os import sys +from typing import cast import click from babel.messages.pofile import write_po @@ -17,14 +18,14 @@ from datacube_ows import __version__ from datacube_ows.config_utils import ConfigException -from datacube_ows.ows_configuration import OWSConfig, OWSFolder, read_config +from datacube_ows.ows_configuration import OWSConfig, OWSFolder, read_config, OWSNamedLayer, OWSLayer @click.group(invoke_without_command=True) @click.option( "--version", is_flag=True, default=False, help="Show OWS version number and exit" ) -def main(version): +def main(version: bool): # type: ignore[return] # --version if version: click.echo(f"Open Data Cube Open Web Services (datacube-ows) version {__version__}") @@ -64,7 +65,7 @@ def main(version): "--output-file", help="Provide an output inventory file name with extension .json", ) -def check(parse_only, folders, styles, input_file, output_file, paths): +def check(parse_only: bool, folders: bool, styles: bool, input_file: str, output_file: str, paths: list[str]) -> int: """Check configuration files Takes a list of configuration specifications which are each loaded and validated in turn, @@ -92,7 +93,7 @@ def check(parse_only, folders, styles, input_file, output_file, paths): return 0 -def parse_path(path, parse_only, folders, styles, input_file, output_file): +def parse_path(path: str | None, parse_only: bool, folders: bool, styles: bool, input_file: str, output_file: str) -> bool: try: raw_cfg = read_config(path) cfg = OWSConfig(refresh=True, cfg=raw_cfg) @@ -139,7 +140,7 @@ def parse_path(path, parse_only, folders, styles, input_file, output_file): help="Write to a message file with the translatable metadata from the configuration. (Defaults to 'messages.po')" ) @click.argument("path", nargs=1, required=False) -def extract(path, cfg_only, msg_file): +def extract(path: str, cfg_only: bool, msg_file: str) -> int: """Extract metadata from existing configuration into a message file template. Takes a configuration specification which is loaded as per the $DATACUBE_OWS_CFG environment variable. @@ -195,7 +196,8 @@ def extract(path, cfg_only, msg_file): help="Configuration specification to use to determine translations directory and domain (defaults to environment $DATACUBE_OWS_CFG)" ) @click.argument("languages", nargs=-1) -def translation(languages, msg_file, new, domain, translations_dir, cfg): +def translation(languages: list[str], msg_file: str | None, new: bool, + domain: str | None, translations_dir: str | None, cfg: str | None) -> int: """Generate a new translations catalog based on the specified message file. Takes a list of languages to generate catalogs for. "all" can be included as a shorthand @@ -207,28 +209,31 @@ def translation(languages, msg_file, new, domain, translations_dir, cfg): if msg_file is None or domain is None or translations_dir is None or "all" in languages: try: raw_cfg = read_config(cfg) - cfg = OWSConfig(refresh=True, cfg=raw_cfg) + config = OWSConfig(refresh=True, cfg=raw_cfg) except ConfigException as e: click.echo(f"Config exception for path: {str(e)}") sys.exit(1) if domain is None: - click.echo(f"Using message domain '{cfg.message_domain}' from configuration") - domain = cfg.message_domain - if translations_dir is None and cfg.translations_dir is None: + click.echo(f"Using message domain '{config.message_domain}' from configuration") + domain = config.message_domain + if translations_dir is None and config.translations_dir is None: click.echo("No translations directory was supplied or is configured") sys.exit(1) elif translations_dir is None: - click.echo(f"Using translations directory '{cfg.translations_dir}' from configuration") - translations_dir = cfg.translations_dir - if msg_file is None and cfg.msg_file_name is None: + click.echo(f"Using translations directory '{config.translations_dir}' from configuration") + translations_dir = config.translations_dir + if msg_file is None and config.msg_file_name is None: click.echo("No message file name was supplied or is configured") sys.exit(1) elif msg_file is None: - click.echo(f"Using message file location '{cfg.msg_file_name}' from configuration") - msg_file = cfg.msg_file_name - all_langs = cfg.locales + click.echo(f"Using message file location '{config.msg_file_name}' from configuration") + msg_file = config.msg_file_name + all_langs = config.locales else: all_langs = [] + assert msg_file is not None # For type checker + assert domain is not None # For type checker + assert translations_dir is not None # For type checker try: fp = open(msg_file, "rb") fp.close() @@ -251,15 +256,16 @@ def translation(languages, msg_file, new, domain, translations_dir, cfg): return 0 -def create_translation(msg_file, translations_dir, domain, locale): +def create_translation(msg_file: str, translations_dir: str, domain: str, locale: str) -> bool: click.echo(f"Creating template for language: {locale}") os.system(f"pybabel init -i {msg_file} -d {translations_dir} -D {domain} -l {locale}") return True -def update_translation(msg_file, translations_dir, domain, locale): +def update_translation(msg_file: str, translations_dir: str, domain: str, locale: str) -> bool: click.echo(f"Updating template for language: {locale}") - os.system(f"pybabel update --no-fuzzy-matching --ignore-obsolete -i {msg_file} -d {translations_dir} -D {domain} -l {locale}") + os.system(f"pybabel update --no-fuzzy-matching --ignore-obsolete " + f"-i {msg_file} -d {translations_dir} -D {domain} -l {locale}") return True @@ -283,7 +289,7 @@ def update_translation(msg_file, translations_dir, domain, locale): help="Configuration specification to use to determine translations directory and domain (defaults to environment $DATACUBE_OWS_CFG)" ) @click.argument("languages", nargs=-1) -def compile_cmd(languages, domain, translations_dir, cfg): +def compile_cmd(languages: list[str], domain: str | None, translations_dir: str | None, cfg: str | None) -> int: """Compile completed translation files. Takes a list of languages to generate catalogs for. "all" can be included as a shorthand @@ -295,22 +301,23 @@ def compile_cmd(languages, domain, translations_dir, cfg): if domain is None or translations_dir is None or "all" in languages: try: raw_cfg = read_config(cfg) - cfg = OWSConfig(refresh=True, cfg=raw_cfg) + config = OWSConfig(refresh=True, cfg=raw_cfg) except ConfigException as e: click.echo(f"Config exception for path: {str(e)}") sys.exit(1) if domain is None: - click.echo(f"Using message domain '{cfg.message_domain}' from configuration") - domain = cfg.message_domain - if translations_dir is None and cfg.translations_dir is None: + click.echo(f"Using message domain '{config.message_domain}' from configuration") + domain = config.message_domain + if translations_dir is None and config.translations_dir is None: click.echo("No translations directory was supplied or is configured") sys.exit(1) elif translations_dir is None: - click.echo(f"Using translations directory '{cfg.translations_dir}' from configuration") - translations_dir = cfg.translations_dir - all_langs = cfg.locales + click.echo(f"Using translations directory '{config.translations_dir}' from configuration") + translations_dir = config.translations_dir + all_langs = config.locales else: all_langs = [] + assert translations_dir is not None for language in languages: if language == "all": for supp_lang in all_langs: @@ -321,18 +328,18 @@ def compile_cmd(languages, domain, translations_dir, cfg): return 0 -def compile_translation(translations_dir, domain, language): +def compile_translation(translations_dir: str, domain: str, language: str) -> bool: click.echo(f"Compiling template for language: {language}") os.system(f"pybabel compile -d {translations_dir} -D {domain} -l {language}") return True -def write_msg_file(msg_file, cfg): +def write_msg_file(msg_file: str, cfg: OWSConfig) -> None: with open(msg_file, "wb") as fp: write_po(fp, cfg.export_metadata()) -def layers_report(config_values, input_file, output_file): +def layers_report(config_values: dict[str, OWSNamedLayer], input_file: str, output_file: str): report = {"total_layers_count": len(config_values.values()), "layers": []} for lyr in config_values.values(): layer = { @@ -341,7 +348,7 @@ def layers_report(config_values, input_file, output_file): "styles_count": len(lyr.styles), "styles_list": [styl.name for styl in lyr.styles], } - report["layers"].append(layer) + cast(list[dict], report["layers"]).append(layer) if input_file: with open(input_file) as f: input_file_data = json.load(f) @@ -357,7 +364,7 @@ def layers_report(config_values, input_file, output_file): return True -def print_layers(layers, styles, depth): +def print_layers(layers: list[OWSLayer], styles: bool, depth: int): for lyr in layers: if isinstance(lyr, OWSFolder): indent(depth) @@ -370,13 +377,13 @@ def print_layers(layers, styles, depth): click.echo(f"{lyr} {depth}") -def print_styles(lyr, depth=0): +def print_styles(lyr: OWSNamedLayer, depth: int = 0): for styl in lyr.styles: indent(0, for_styles=True) print(f". {styl.name}") -def indent(depth, for_styles=False): +def indent(depth: int, for_styles: bool = False): for i in range(depth): click.echo(" ", nl=False) if for_styles: diff --git a/datacube_ows/ows_configuration.py b/datacube_ows/ows_configuration.py index 586dca8a8..b6dd4a14e 100644 --- a/datacube_ows/ows_configuration.py +++ b/datacube_ows/ows_configuration.py @@ -1299,7 +1299,7 @@ def parse_global(self, cfg: CFG_DICT, ignore_msgfile: bool): raise ConfigException("You must support at least one language.") self.default_locale = self.locales[0] self.message_domain = cast(str, cfg.get("message_domain", "ows_cfg")) - self.translations_dir = cfg.get("translations_directory") + self.translations_dir = cast(str | None, cfg.get("translations_directory")) self.internationalised = self.translations_dir and len(self.locales) > 1 if self.internationalised: _LOG.info("Internationalisation enabled.") From 678079da493ec249c1ed2968f51dadfe61b5d66f Mon Sep 17 00:00:00 2001 From: Paul Haesler Date: Mon, 29 Apr 2024 16:28:22 +1000 Subject: [PATCH 26/29] Type hints in product_ranges.py --- datacube_ows/ows_configuration.py | 16 +++++---- datacube_ows/product_ranges.py | 56 +++++++++++++++---------------- 2 files changed, 37 insertions(+), 35 deletions(-) diff --git a/datacube_ows/ows_configuration.py b/datacube_ows/ows_configuration.py index b6dd4a14e..989c6a1f5 100644 --- a/datacube_ows/ows_configuration.py +++ b/datacube_ows/ows_configuration.py @@ -887,24 +887,25 @@ def force_range_update(self, ext_dc: Datacube | None = None) -> None: self.hide = False try: from datacube_ows.product_ranges import get_ranges - self._ranges: dict[str, Any] = get_ranges(dc, self) + self._ranges = get_ranges(dc, self) if self._ranges is None: raise Exception("Null product range") self.bboxes = self.extract_bboxes() if self.default_time_rule == DEF_TIME_EARLIEST: - self.default_time = self._ranges["start_time"] + self.default_time = cast(datetime.datetime | datetime.date, self._ranges["start_time"]) elif isinstance(self.default_time_rule, - datetime.date) and self.default_time_rule in self._ranges["time_set"]: - self.default_time = self.default_time_rule + datetime.date) and self.default_time_rule in cast(set[datetime.datetime | datetime.date], + self._ranges["time_set"]): + self.default_time = cast(datetime.datetime | datetime.date, self.default_time_rule) elif isinstance(self.default_time_rule, datetime.date): _LOG.warning("default_time for named_layer %s is explicit date (%s) that is " " not available for the layer. Using most recent available date instead.", self.name, self.default_time_rule.isoformat() ) - self.default_time = self._ranges["end_time"] + self.default_time = cast(datetime.datetime | datetime.date, self._ranges["end_time"]) else: - self.default_time = self._ranges["end_time"] + self.default_time = cast(datetime.datetime | datetime.date, self._ranges["end_time"]) # pylint: disable=broad-except except Exception as a: @@ -930,13 +931,14 @@ def time_range(self, ranges: dict[str, Any] | None = None): def ranges(self) -> dict[str, Any]: if self.dynamic: self.force_range_update() + assert self._ranges is not None # For type checker return self._ranges def extract_bboxes(self) -> dict[str, Any]: if self._ranges is None: return {} bboxes = {} - for crs_id, bbox in self._ranges["bboxes"].items(): + for crs_id, bbox in cast(dict[str, dict[str, float]], self._ranges["bboxes"]).items(): if crs_id in self.global_cfg.published_CRSs: # Assume we've already handled coordinate swapping for # Vertical-coord first CRSs. Top is top, left is left. diff --git a/datacube_ows/product_ranges.py b/datacube_ows/product_ranges.py index bd4e1570c..ed39adf06 100644 --- a/datacube_ows/product_ranges.py +++ b/datacube_ows/product_ranges.py @@ -8,28 +8,30 @@ #pylint: skip-file import math -from datetime import datetime, timezone +from datetime import datetime, date, timezone +from typing import cast, Iterable, Callable, Any import datacube import odc.geo from psycopg2.extras import Json from sqlalchemy import text -from datacube_ows.ows_configuration import get_config +from datacube_ows.config_utils import CFG_DICT +from datacube_ows.ows_configuration import get_config, OWSConfig, OWSMultiProductLayer, TimeRes, OWSNamedLayer from datacube_ows.utils import get_sqlconn -def get_crsids(cfg=None): +def get_crsids(cfg: OWSConfig | None = None) -> Iterable[str]: if not cfg: cfg = get_config() return cfg.internal_CRSs.keys() -def get_crses(cfg=None): +def get_crses(cfg: OWSConfig | None = None) -> dict[str, odc.geo.CRS]: return {crsid: odc.geo.CRS(crsid) for crsid in get_crsids(cfg)} -def jsonise_bbox(bbox): +def jsonise_bbox(bbox: odc.geo.geom.BoundingBox) -> dict[str, float]: if isinstance(bbox, dict): return bbox else: @@ -41,7 +43,8 @@ def jsonise_bbox(bbox): } -def create_multiprod_range_entry(dc, product, crses): +def create_multiprod_range_entry(dc: datacube.Datacube, product: OWSMultiProductLayer, + crses: dict[str, odc.geo.CRS]) -> None: print("Merging multiproduct ranges for %s (ODC products: %s)" % ( product.name, repr(product.product_names) @@ -148,7 +151,8 @@ def create_multiprod_range_entry(dc, product, crses): return -def create_range_entry(dc, product, crses, time_resolution): +def create_range_entry(dc: datacube.Datacube, product: datacube.model.Product, + crses: dict[str, odc.geo.CRS], time_resolution: TimeRes) -> None: print("Updating range for ODC product %s..." % product.name) # NB. product is an ODC product conn = get_sqlconn(dc) @@ -237,14 +241,14 @@ def create_range_entry(dc, product, crses, time_resolution): ) # calculate bounding boxes - results = list(conn.execute(text(""" + lres = list(conn.execute(text(""" SELECT lat_min,lat_max,lon_min,lon_max FROM wms.product_ranges WHERE id=:p_id """), {"p_id": prodid})) - r = results[0] + r = lres[0] epsg4326 = odc.geo.CRS("EPSG:4326") box = odc.geo.geom.box( @@ -268,7 +272,7 @@ def create_range_entry(dc, product, crses, time_resolution): conn.close() -def bbox_projections(starting_box, crses): +def bbox_projections(starting_box: odc.geo.Geometry, crses: dict[str, odc.geo.CRS]) -> dict[str, dict[str, float]]: result = {} for crsid, crs in crses.items(): if crs.valid_region is not None: @@ -286,8 +290,8 @@ def bbox_projections(starting_box, crses): return result -def sanitise_bbox(bbox): - def sanitise_coordinate(coord, fallback): +def sanitise_bbox(bbox: odc.geo.geom.BoundingBox) -> dict[str, float]: + def sanitise_coordinate(coord: float, fallback: float) -> float: return coord if math.isfinite(coord) else fallback return { "top": sanitise_coordinate(bbox.top, float("9.999999999e99")), @@ -297,7 +301,7 @@ def sanitise_coordinate(coord, fallback): } -def datasets_exist(dc, product_name): +def datasets_exist(dc: datacube.Datacube, product_name: str) -> bool: conn = get_sqlconn(dc) results = conn.execute(text(""" @@ -313,9 +317,9 @@ def datasets_exist(dc, product_name): return list(results)[0][0] > 0 -def add_ranges(dc, product_names, merge_only=False): - odc_products = {} - ows_multiproducts = [] +def add_ranges(dc: datacube.Datacube, product_names: list[str], merge_only: bool = False) -> bool: + odc_products: dict[str, dict[str, list[OWSNamedLayer]]] = {} # Maps OWS layer names to + ows_multiproducts: list[OWSMultiProductLayer] = [] errors = False for pname in product_names: dc_product = None @@ -333,16 +337,14 @@ def add_ranges(dc, product_names, merge_only=False): repr(ows_product.product_names) )) if ows_product.multi_product: - ows_multiproducts.append(ows_product) + ows_multiproducts.append(cast(OWSMultiProductLayer, ows_product)) if not ows_product: print("Could not find product", pname, "in OWS config") dc_product = dc.index.products.get_by_name(pname) if dc_product: print("ODC Layer: %s" % pname) - if pname in odc_products: - odc_products[pname]["ows"].append(None) - else: - odc_products[pname] = {"ows": [None]} + if pname not in odc_products: + odc_products[pname] = {"ows": []} else: print("Unrecognised product name:", pname) errors = True @@ -384,10 +386,11 @@ def add_ranges(dc, product_names, merge_only=False): print("Done.") return errors -def get_ranges(dc, product, path=None, is_dc_product=False): +def get_ranges(dc: datacube.Datacube, product: OWSNamedLayer, + path: str | None = None) -> dict[str, Any] | None: cfg = product.global_cfg conn = get_sqlconn(dc) - if not is_dc_product and product.multi_product: + if product.multi_product: if path is not None: raise Exception("Combining subproducts and multiproducts is not yet supported") results = conn.execute(text(""" @@ -397,10 +400,7 @@ def get_ranges(dc, product, path=None, is_dc_product=False): {"pname": product.name} ) else: - if is_dc_product: - prod_id = product.id - else: - prod_id = product.product.id + prod_id = product.product.id if path is not None: results = conn.execute(text(""" SELECT * @@ -421,7 +421,7 @@ def get_ranges(dc, product, path=None, is_dc_product=False): for result in results: conn.close() if product.time_resolution.is_subday(): - dt_parser = lambda dts: datetime.fromisoformat(dts) + dt_parser: Callable[[str], datetime | date] = lambda dts: datetime.fromisoformat(dts) else: dt_parser = lambda dts: datetime.strptime(dts, "%Y-%m-%d").date() times = [dt_parser(d) for d in result.dates if d is not None] From a690d12d91519c713225cdac0c58e934dc7ff961 Mon Sep 17 00:00:00 2001 From: Paul Haesler Date: Tue, 30 Apr 2024 09:23:01 +1000 Subject: [PATCH 27/29] Fix type hints for update_ranges schema and fix associated test. --- datacube_ows/update_ranges_impl.py | 36 +++++++++++++++--------------- integration_tests/conftest.py | 4 +++- 2 files changed, 21 insertions(+), 19 deletions(-) diff --git a/datacube_ows/update_ranges_impl.py b/datacube_ows/update_ranges_impl.py index fefb91ec7..4e80babb0 100755 --- a/datacube_ows/update_ranges_impl.py +++ b/datacube_ows/update_ranges_impl.py @@ -10,6 +10,7 @@ import re import sys +import datacube import click import psycopg2 import sqlalchemy @@ -30,9 +31,9 @@ @click.option("--merge-only/--no-merge-only", default=False, help="When used with a multiproduct layer, the ranges for underlying datacube products are not updated.") @click.option("--version", is_flag=True, default=False, help="Print version string and exit") @click.argument("layers", nargs=-1) -def main(layers, - merge_only, - schema, views, role, version): +def main(layers: list[str], + merge_only: bool, + schema: bool, views: bool, role: str | None, version: bool) -> int: """Manage datacube-ows range tables. Valid invocations: @@ -78,6 +79,7 @@ def main(layers, dc = Datacube(app="ows_update_ranges") cfg = get_config(called_from_update_ranges=True) if schema: + assert role is not None # for type checker print("Checking schema....") print("Creating or replacing WMS database schema...") create_schema(dc, role) @@ -109,27 +111,29 @@ def main(layers, return 0 -def create_views(dc): +def create_views(dc: datacube.Datacube): from datacube.cfg import ODCConfig odc_cfg = ODCConfig().get_environment() dbname = odc_cfg.db_database run_sql(dc, "extent_views/create", database=dbname) -def refresh_views(dc): +def refresh_views(dc: datacube.Datacube): run_sql(dc, "extent_views/refresh") -def create_schema(dc, role): +def create_schema(dc: datacube.Datacube, role: str): run_sql(dc, "wms_schema/create", role=role) -def run_sql(dc, path, **params): +def run_sql(dc: datacube.Datacube, path: str, **params: str): if not importlib.resources.files("datacube_ows").joinpath(f"sql/{path}").is_dir(): print("Cannot find SQL resource directory - check your datacube-ows installation") return - files = sorted(importlib.resources.files("datacube_ows").joinpath(f"sql/{path}").iterdir()) + files = sorted( + importlib.resources.files("datacube_ows").joinpath(f"sql/{path}").iterdir() # type: ignore[type-var] + ) filename_req_pattern = re.compile(r"\d+[_a-zA-Z0-9]+_requires_(?P[_a-zA-Z0-9]+)\.sql") filename_pattern = re.compile(r"\d+[_a-zA-Z0-9]+\.sql") @@ -151,11 +155,11 @@ def run_sql(dc, path, **params): sql = "" first = True for line in fp: - line = str(line, "utf-8") - if first and line.startswith("--"): - print(line[2:]) + sline = str(line, "utf-8") + if first and sline.startswith("--"): + print(sline[2:]) else: - sql = sql + "\n" + line + sql = sql + "\n" + sline if first: print(f"Running {f}") first = False @@ -166,10 +170,6 @@ def run_sql(dc, path, **params): print(f"Required parameter {e} for file {f} not supplied - skipping") continue sql = sql.format(**kwargs) - if f.endswith("_raw.sql"): - q = SQL(sql) - with conn.connection.cursor() as psycopg2connection: - psycopg2connection.execute(q) - else: - conn.execute(text(sql)) + # Special handling of "_raw.sql" scripts no longer required in SQLAlchemy 2? + conn.execute(text(sql)) conn.close() diff --git a/integration_tests/conftest.py b/integration_tests/conftest.py index a72a52e29..f0f2be3de 100644 --- a/integration_tests/conftest.py +++ b/integration_tests/conftest.py @@ -12,6 +12,7 @@ from click.testing import CliRunner from pytest_localserver.http import WSGIServer +from datacube.cfg import ODCConfig from datacube_ows import ogc from datacube_ows.ogc import app @@ -96,7 +97,8 @@ def product_name(): @pytest.fixture def role_name(): - return os.getenv("DB_USERNAME") + odc_env = ODCConfig.get_environment() + return odc_env.db_username @pytest.fixture From 16c5e52dfd4e24f84d35bf0a1d731e0dd05ea272 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 29 Apr 2024 23:24:12 +0000 Subject: [PATCH 28/29] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- datacube_ows/cfg_parser_impl.py | 3 ++- datacube_ows/product_ranges.py | 7 ++++--- datacube_ows/update_ranges_impl.py | 2 +- integration_tests/conftest.py | 2 +- 4 files changed, 8 insertions(+), 6 deletions(-) diff --git a/datacube_ows/cfg_parser_impl.py b/datacube_ows/cfg_parser_impl.py index 0c3a5d4f3..502931299 100755 --- a/datacube_ows/cfg_parser_impl.py +++ b/datacube_ows/cfg_parser_impl.py @@ -18,7 +18,8 @@ from datacube_ows import __version__ from datacube_ows.config_utils import ConfigException -from datacube_ows.ows_configuration import OWSConfig, OWSFolder, read_config, OWSNamedLayer, OWSLayer +from datacube_ows.ows_configuration import (OWSConfig, OWSFolder, OWSLayer, + OWSNamedLayer, read_config) @click.group(invoke_without_command=True) diff --git a/datacube_ows/product_ranges.py b/datacube_ows/product_ranges.py index ed39adf06..d81204951 100644 --- a/datacube_ows/product_ranges.py +++ b/datacube_ows/product_ranges.py @@ -8,8 +8,8 @@ #pylint: skip-file import math -from datetime import datetime, date, timezone -from typing import cast, Iterable, Callable, Any +from datetime import date, datetime, timezone +from typing import Any, Callable, Iterable, cast import datacube import odc.geo @@ -17,7 +17,8 @@ from sqlalchemy import text from datacube_ows.config_utils import CFG_DICT -from datacube_ows.ows_configuration import get_config, OWSConfig, OWSMultiProductLayer, TimeRes, OWSNamedLayer +from datacube_ows.ows_configuration import (OWSConfig, OWSMultiProductLayer, + OWSNamedLayer, TimeRes, get_config) from datacube_ows.utils import get_sqlconn diff --git a/datacube_ows/update_ranges_impl.py b/datacube_ows/update_ranges_impl.py index 4e80babb0..69ab4367c 100755 --- a/datacube_ows/update_ranges_impl.py +++ b/datacube_ows/update_ranges_impl.py @@ -10,8 +10,8 @@ import re import sys -import datacube import click +import datacube import psycopg2 import sqlalchemy from datacube import Datacube diff --git a/integration_tests/conftest.py b/integration_tests/conftest.py index f0f2be3de..6fdf3982d 100644 --- a/integration_tests/conftest.py +++ b/integration_tests/conftest.py @@ -10,9 +10,9 @@ pytest_plugins = ["helpers_namespace"] import pytest from click.testing import CliRunner +from datacube.cfg import ODCConfig from pytest_localserver.http import WSGIServer -from datacube.cfg import ODCConfig from datacube_ows import ogc from datacube_ows.ogc import app From d6253056f8f2d05d540358eadfdc151faa92d6b5 Mon Sep 17 00:00:00 2001 From: Paul Haesler Date: Tue, 30 Apr 2024 09:45:41 +1000 Subject: [PATCH 29/29] Remove unused imports. --- datacube_ows/product_ranges.py | 1 - datacube_ows/update_ranges_impl.py | 1 - 2 files changed, 2 deletions(-) diff --git a/datacube_ows/product_ranges.py b/datacube_ows/product_ranges.py index ed39adf06..c891ad533 100644 --- a/datacube_ows/product_ranges.py +++ b/datacube_ows/product_ranges.py @@ -16,7 +16,6 @@ from psycopg2.extras import Json from sqlalchemy import text -from datacube_ows.config_utils import CFG_DICT from datacube_ows.ows_configuration import get_config, OWSConfig, OWSMultiProductLayer, TimeRes, OWSNamedLayer from datacube_ows.utils import get_sqlconn diff --git a/datacube_ows/update_ranges_impl.py b/datacube_ows/update_ranges_impl.py index 4e80babb0..fd98ff768 100755 --- a/datacube_ows/update_ranges_impl.py +++ b/datacube_ows/update_ranges_impl.py @@ -15,7 +15,6 @@ import psycopg2 import sqlalchemy from datacube import Datacube -from psycopg2.sql import SQL from sqlalchemy import text from datacube_ows import __version__