cI$^!G)_D`rDB~4!rfu4SLG%A=|^<+0%B+
z$VBR5%AX~ps#ISe)o_^RTv)QFCX~x&Lrg3-gh&
z%XaJt+lB_0pG0QxNn~JlJIdhs-xx8K?Y1!~DWtNWg><)#tzV2~R+}`V{|IG16WlZQ
zo{7ze@}qmSq|2q5SZ{f~IP1zZ4T@)lj|kli-C={N-ch{q8%ds(S)%O>(kXI{B;&Q$
z{qUyO?E0}0`pqVJrzezSma+-DzKy=IJ(@35qvvo(sBm-puPqu1tRFpE*JKJ*LsvKE!-ZD^9f6gh3w)nmvKq8Ou
zMj1Wvq{7<(#=<&dpAhi@aFrsA5BMG;cf*hyNxNEqN^&VXNYJS7vEb#ag2&?s6Ij*ixgD$bNMs7V
zq02u-mLPNi!@=)?9%GP?OqyfsW8@mvV1`f+tW9V$9}Q2EoD;gZ`RuP#m@cSFDa{wnIj3}%kmp2OLLO2E-z5=
z5(+pg9D+uRZKOv51)e0-(q>Cuq=G`CRG#@F)rb#b%iG+f)byM5U{q_UlFOFzV;+#N
zprLH8sljOjhQuaF@;5aEZ#U(u=ptXE;&mz>L`+1gL=7B&
zRDuL7v?97Fs&F{iU_4|2s^^?;
zpe^~{VFkW#YcaIrh+HUB({U+sIfh`C1}i$mAm>K9loA9^0zm*L(T)VFZQ%}LQGpEf
zk_MA=3Lpq+%CVk;gY$t&VGdBkctjN{)C>dK_aX!H83Z{s3y#(3xZ?;y-#Ss8A%6;B
zrsE}&^qOfHvqA(igk1|k7khb7-B5}@PD+u^2(pMQMHU#9C~0a=OFeO6p4IRz$b!P!
z*3T#97YK+Y>kEW61?3dgFTlb$pi!t+@LWPe60_c*8UZu`^f}bflX5~yaZ0YBC}$~?
zbH^#7Lgc4{k-T6QOPRzcPDfe_aNI)5n8Y9eN4Eeh8?lOjN1#!V!c)?NU6k%rrtBmzi
zfdGLc!4uKDyb~D$J2WIpSh<9X6jrcWky=HC6^tc+JRTD`!mbG0WZD-&qIBNhOwJ2r
zP0#ev=gk!9!XkuheRRLXbh-rc23FIcboTQ`qwql6nqhi}5QE?ie^7yHq~q8hpdC5Mu4Qt{bdM3nvP0?DK1Z8m@=NNSl;m?`?<=rn?2DAf54`I&wOWGhvic*sHUC8SpS3HA5WFP-iX?j6`J}2-7M;4pgwNA1%
z$X!T3E-KFc7E?vClu3Ro(RL7}t23rXHzXr#)
z$q8Q~9qqhhd${j>kjAlnHaXXR0k}!jHV~b0Jk2@BSr0Fume=Qx&e@0G*WTBz{2%k}
BK}-Mu
literal 0
HcmV?d00001
diff --git a/resources/lib/cheroot/__pycache__/server.cpython-37.opt-1.pyc b/resources/lib/cheroot/__pycache__/server.cpython-37.opt-1.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..065c6fdd5ca9506a598855f5e92497b7dc0306d6
GIT binary patch
literal 45642
zcmeIbeRN!BdLMS@&i7z2009sLKSW(o9|R)5slOgG_$R*}(7(QA)Xg`U60ePvM(~}i9b7$BIwZMLxIbJvjQg~msU2B;sPxe4
z(b7?Q&ek4YwMy3NBc(^=IafQj`e^Afznxfr1Z3XuyoEoRC>lfTzb|%Qu>(v
zQ0Y1QXzAnj!=>}KRr-XJ-G1JF#QDSl11)^PK4w2^KW0B}KVcuYN9{5DN&6}LgniO3
z+NbQ(_8I%E{j`0~e#U;*{+RvT(t*+i)V$CBIBGWCT-9*)m0x_>u+Q6{c+apuan~%p
zgs1227x46g^U~dT=^~yl*e~MwMf`dRPcPXQ@pKVS<9NDckK<_^PnYp@*`C1D1fC}F
zG-iaIqo9O$<0?>l)q4^)vU!@<4)0E2i`NZ)3qYooTS248
zqaKdRn&W#MMWz_ToGUDb=$#Eu^pDs|>_F}++Ic&e!R
zs;4SyGgn_ZiNnyX;&c8cn_RKx_WuGgyz70>CI*axiv4mlbe%*~9R==O>VXC%m0T&pP8DF;35s3OeAsvfmw
zxEVS8L3t;yRO{G3PI0Md9ea6V&N{VLS#o3vpE^@KbF6DshDD*0qFThtEpnZbX>BD7p6-N0iM*%Fku4Q4QSoE>$_)cN$V-hX@3%gdmR)n;WoN
zzuj1Itn-WO^@a0q2e8h1p0HlS!w~*mc-tDSdbvAQzyj_!E=awHEKG_6U_54Fa@K(m
zq)dW>pwu<)BjFAYn(#mf9@Qi}qicO>
z$cMHT=L80h0@wo*ybwSK`5Fs{K;sN9f=7Yvs>F7uY{%mqptCN(Iaju7tf?wMhUI#V
zH3B#rIe}b>@1m^jB
zp*%lhqaqj0_6z7u-79l_up853F+W>V>*WPNa|aJeAhUJeq}ElfR-OMK#v)BWk6aqD8K(z3|Re&Q(y9K&agN?KTeS0*!n5`GG^yaWz*k}nJ&9-
ztz5AyYamm~3$=>t`swS{y4|?rHut-mz$mN5wOYl)_^%cLHrF?r2X-c7^{PhI_A?hJ
z%Tv=6bI7rB7P5V%Wph;b^6L%9~_pH*{kJi6B9GzGZU9T
z*oTxq;Lzn<@DxUGP9Cyywd8tUuAle={Hk+4DG8_CI1(N|JLx9yJ^EsG%uV6uqRP`*lq&Er-KaZ33Ugjlm8I2+
zJ0Q1>jm?u6*GYYV_QIUX$jwc-Y5Y2;DwuiYa$Ip=cNZPiZV7X|zP6-*xe
z_(ruNuV1ey!b|DL_3PKWs_{W+RyYK+gUqq
zCqRZL?4+Fn`JN=Zh3}MYf-KMZ`Bwzt64N`X`S#WMWsHaHd17lqmjLlvi>k3oQi(f&
zxPX*o07|AvYA;q60EGZFY-h2uUIQt&x>j>mF+^x`qh1sUG`+y4@*jtspK*NLZ{pHI
zgI6*tUp2POjbCrYwvBtnR=gE=k9pv6@5bJZwc?(+VyY9@j8=Rb_xED%1^gD*zuAd{
zhL`Y?i{?s7y@?ztyq~DQvN?qNw3o4y_skpSR&px=uJuU5@M7RvKdZS`l#*g8nqzGx
z^t&x%JKIX$i)|%a$*mNga(r@s=2oGV^mKa_a(WUBbrw#y)(O(7J1ek57>dcm1T?hxB
z7-0%npGg+zeXiLP775Zj!MsUs{wPfQq?rD(DYRzxrZR>Wm!OloBS7}#;di}BJw?Kfn
zHyi6z7ccoqteol^zSe40&rgnDy>fNhFO
zlvkWhzqbrJvT-M%wfzG!O=Xv)64ydVgZptzr=M7ERO>*C3D>DD`bnYWOG#OAenQ$7
zyY0tTkW{a%I%w7g;ZuF-rw#Q2y7;Y6kDtHsI!FumM$K*BxVW}BX?r1)@a$<2UC`z!mYG9mAG3r{|=XW2X8T()g*b!-}yon&Rt(_YY?+NicB~a!Nz1zMV`WgcNp~G|p_)W~8KAMatP`0eKhaRtB@mxkEH;vV
z7|Hg4aX9}8K0+iIvHnc|eiiYQ9Jco02!m5l#v#)(}bs(gC0b
zVw=}08>Gi4HWnaT1TrGlX*8@FxvFABb+Fyho*KqO3zJv|e#SM1=`+fv5JvIle>GJH
z_f)B&Olv2hkL5ub@-3N5kITr{K`H$bxupY%j%UCwlAzU$}
z%f?DV9rDcW_&wuYP^KDqC3GWRLuQr5#qZlOgIJP;sXLq5TLuzXSSjma`Pax1Y>|Oj
z^XTrDc4nUL+RX&w<9wYoGh4Zh-77m8@MYQb;w=!JA49jwSRC7ld+4N)ak1vF5lLV%
zYH^Uz#d@yi*B3$T3lSU;6xB{jZTAx45(wwJvf6;0mh)@_LlRQS1V=L*&~4poOy>8z
zC0wSa(4{3OqD3~pDeH{eAt1Xz1GdEpV@bVW60A#cGWbRUw-f>_&1i7X;zd6$SkWF-f@=wlu)*|
zZ-S^yPP}$)?kzR^0%f$2&NF)+5Gq}&0NA`jt
zToP=%0Iq0lkkKcMmW@f=Vq<%}y
z++#a`S2Il&)L)QY{^|496=@}k)cg*Iu%pc#Q!j&x{VVf*9m6Q|jm@s9RBmXt4y0ql
ztJM6YbYFd(8FyfXY&cIY)USp+ge*o+B9;INox{8^J^uK+x{F=cU}eu>|EqdIJ-(
zYHGUq#EesODiA3~WWPnym?9SR6l|YBw<1Yj!E;HGNAHa(R2emVoQ)=CGR)@it|rRc
zUCl%GHi_R7RMp(CA+JWgz}#Sx8mcG*wRg>eN-5PVztBI`5Cl~mOG4_2Gz|g-;YPbZ&a>mr#yv*}b;ibJE
zBT_|Z!r#G&YZ?eYqpK;Y*q)egFA^+&e-capy08e}7WG9!=T)|5
z05_WY%I(LWroB=^2(eqyKSK1E@Nr4=bDV(xY-8h)e@94eW!cq7qFnicrZHZky0>YtAqZx-{erG1JRQHHoX-LHIi9rM6)S
z&h&dG^ysne%suF~y&Pm`pmH}ykkVsA`n6?xc}T_*pnXkf$sdE3ycb&X|1{8&_u5IX
zK>GIp&`qQ*ha?S>mM`1Tke6YTE^SMyuS@;r0k$-WHhkUdv(ZA-&X)I6B1fqXxLURy
zI(*@3b;`b0T%Pvnymp#$pH{*fSc$_r`;gabXV4DFd=40PR<}jO%DL(0NT9j3Cb;W1
ztvTobu22Uc3Rjus4@i|Z4^zz>Y`AE@3A=ONn)`rzs(E(LEZT6z8g;~U1Z(Q}4Rxb_
z9M;MW&-zwuY-~_f@pcDA)@He7S!Ma_RbHsRQ>St9v$GSgO=)dy97=gV4zQyrnD!x=
zT~mH~>Z<6yHOIWf_nyKfkmv|e;OC?_WiEX`Dfgwsf~qav$U$R{rC>tcm@re|*-~c0Oo$wJ01KqKuWQ=7xHf{SXwE5j-ZkgH
z_ifENW^^0IJOU(Q>X$jYVA%gs$2y>S4c9@umFQXr30u~M%=#aBsrDNHGA6*thrcxt
z)b@?tt0;PV=h|!6$(2U&^1tiq1A&$54a_UC;vOr^Pr!uKR&OGQ@Sq_uidI~y2X;v0
zG0JNVSELC$mmi7G3}CHGAh`GP%Zu=U(1-AeR~l_(aK8h4{s$KIBWVo(3|Z8tIn(!L
zQHOT{mu~iyD47t3V9agdg57Vp1(@y$S+Q}G(15rDBvH%-b_t9INn!w!MC4Hd`BG{I
zSGfaeGUKHKBf^-M0hGA`bx{oDQ3l3-b(8}_gGAFC0Bg7UvAbz|l4{8+T
z7EYr0-K;t=EZd9L%qbV#S%pWL~{UrDT}6S#y5hdsm?
z{s3D_TQ&BpEVDnwC36(-(H6l3J|byCyDox)wslwY;a!WTyPSxm24-qAOn(|Ws1Flz
zEV2h&yU;)lbhR?nT`l6bZna6z{dsnx_MK{z+e{PPNgKPjl_lJ8Um=Po*_%m$((B7S
zk$WWf6d3l0gm>nSivGoM_YywtJGgW)>n5yv%u=!n^vCueqrPdc$FR$i=P<$O0~t$pgm;oM~)%b1|7inej(Nl
zDVmHo)y2T-IHc`IX=V%+q4;8r1v}&-h<8!b%a#O#9=NE0wcPCV_F9E)aut@z;cht<
z7G$kesw%MXoSvLpAW6f2{N@CXi%OQ{`AUX(C!0~`5%Bs^gp31OtRDpK5xaoOo)8ZMyJGYtP*uV|%v~P^<^HaL#
zeu9$&vt2;eO}|gPtYG0AAPVGyeH5&Adpdkk{C(kuC}TFf#zLd!XCwbRyaH40b=S`|
z=>H&i&(FKm2nJ7LNa*(qbnEaG^YaVZ6-KPl{NC_M+}`|Ld*acU_GE-KCAXd0(~N!u+zbvVTF3LwgE(F-?xBzpN_JZO9)L#Id++f*0UkLG3Ywp|Mz0!D8d~Eo+byI3WTIgx2?HU*O1S&F@T@r*Oi{5p
za_-Ez(8&Gd>oZeYPSU6^HD8%Sfx)oIFPz)9j54gi5j#umMG#sliWM^iB4V9VoNgXD
zcjoD^s9o}y<~S<~DW?ZjB3<0Oj=8ZOQT6MkAyPQ56#oFShl@N|ms&1@wG9#@=!B3M
zVtgp#7;F!5S;5a5v-m?@xx@9{L=6EMnwaYo>up4&+Q7gr
zppeh%U%+KU@aJCPN~SbkRvjcIn|08@DToO}*O~a1xfQ1vZaZtk<;8s`cq1LY(MnKu
zPhK3tM#%0VT12_f#s3ygEaiRk=#meWwx?gxvidHHy>Kb;hkDnYX-tH(HdiUpFOz3fDu25`@;wp
z@t5JEbVO3#Z^i3oE4!6_KUV*Wdw5NBg
zu+_KKzqQYMXlr0I*UB#+-GX$XKCH#EsQ3TXZ42C%Krmno%(bJffxN+Q@8MQ&tFP7H
z+P4TEW^1rDh!#8>%-eymC#^!lV9&jW@SVm7BT)atRvIPDwK7{nXz{@NW@DKxXSui=
z+S=dRkFwqk${LBv(s{oC^SrEQ-HS>7FSLg8Mo{}N+6Q;fPpb!I{d!Q=p{T5^on6V`
zmwz7oaya~DbHqMkKlGmAJ+hr{W$(o!J<|OA3_zAZae)G`>_UJG;SEEZ@J`6z!7lE}bdsoofEzxz+O;Vn
zgLR3Z@Sw!1!kCCIhxe|!`E%{`Rgj(yxIx1e=DY>R)`k+7NNAB}CdaL_&z^bK
z8g;cNy!A{GArQu7qN<*21&9IuUXu4?XP-XZ&iiybFAKttXWBoWV=)~ygR+Ig-2%)a
zPXbN4&?Q2HNUV+?!UG%WML$__7pm3f{&v$lP`LP&UcELqb#;1H?#HiAPfv``HHXh(
zR
zA%@m+21gXJ70ZP)&Q0sssbgcs<|Cnl-lL23|0OBZJr2d@kq5N2
z7~yKtlU&6Vj=CebkOlm7|5nTsMIKGee#E>5OF|)QVZTF1VLvHb-tS+p-&$|LN8W3c
zg}8z{y9q)+wy}c<{#NdJ%J<;Vw|3zj5Ck%lpmoo55))lS4N1h}^E9!oxfgH~zO6N7
zY(!Ih-cLR579V%ViXC#Ezla~(f*nOgPg|pxMm91yjttB#8Xn
z6qM^5qTBN0>#FJ}WmUt2MV6&d`hMK4Zus$QZ=F@tpsF5T*fjMcEFuHy>e;qMX
zz&mPH894=b#?Q`8%uLO`DiUlzAu$Hx@cr_8C$G+2zc_QbJbUHh>?{7jr(VB0HxWFQ
zXC_{rc+*cnap?N_&R@hfUQzt$XQOqFVzEPf^OmcG{LF-atJEKonqi8z)}m}>?4Y*B
z&WBkz73!z(i~6f9p6P84%v5Vs1-ffexB6*j`wB1rCG%#&)mh4749hYarF2;9KD8Q0LxMy0W=8yb*3SfRdvhZo+)!Xmj>;s8|>fc7r-C5+PS{z
zZ1^VHxPvwOkI}~8APxMRxG@U31eA)D;w5PrNYx|l14#uBQ@RJaUP3%IkwzIGBz>BN
z%b7!FA_;{6a%STFu|uGu`AeP(O89V!H)ob8+qe
znGzob(BF72kVQ-;5-eLYjcAqJC2}TO8>k+j9L=LeWFhgFNEIx|At{ecp^oN558PxC
zB;dl0sP=C_Fb4;9W1G|@RpY`ff|5B0N+u3k_!ZDGpb>?RNr0-4L9_%4Tj&^ago&-#
zJF$?C!TaDqx6I994>Ss-;aLdeP$HqO(kpo)Kp&vjctNP|FZmw
zSOH;8C@%+k%v8S?lq?iJ=y_B9mJOj*xu-tT^-EzT9{z$>QY>;70q#fjs-;dpT5^f%P
z4EB>3v4@2&{|87*Ef==?>;tGh)k@hT&=RLM2aw*syl;De?)(zi?mR{dT!ZiczY)v=
z3?=mVd;6D%SJG(t-+C88@Zjv&H4k<#Gz15m=Y(>j-?R>Z1k?e_L7pRdO6ZRt(hJ31
z6qP{Y$!Q6w0r{E)wzwibflW9)21J?F{Y&8V3gq*LT7}98-yYJ==WhQ$dNufwj2uT%)vzq&!Ieo?g8VX}y
zfSRJwUJz%5=JMoY3a*Th_e>2nDHKAX|a<(CZ0GF?A_;G7v}v
zod0(3J#G6^Xna53>IG~!vngz5+Q)_=>(FXIxL#s4iP{B2%#K!smL+MjSe
z9>ooXbsF_^l?Ce$h#mw6)?0J18)8soXsbJTE%M(XiyC42Kg0uM$k0`5>D+P*vSdgH
zhGEfU2j0fG2i60Qivy!2?!mz^_)hBYr8qEFj7m02+3Ykz(0Deb5l}EKrvy8%wP@;>
z;~J?a>>jLJJX4cRM@jW1FAI0c+;Y!d6EZ7U*svC?SmecSmh;QK+Xb%=MtWFl
zd^3=&>{}iHMe;9!3*qt!iX^=}Xy@TTJG8uiC9S?Gm@sKWj`mJEq)23)_BNloPHY*@oZgUM>xd#`uQ;3sQ
zt>cZ{)a=!h&pr3d$4{P_{(xI~%q+!$0(m(%*MX|IkWfw-#SV2NNFEBD*c0#uYSPS?8
z&MnIr^Z-Sd3wFvL`dkd$1|Vx{$_s5}27vEkqK7lo7j9?3uKPHGLb@s<1e)
zLv7tLRNas&P`rr{j)9RuJIGeFs0hos8E3hu2wJrG44oH(i>yE=8xT{!fIL9=KZ-9u
z&Y)LGan9JezR3nmbA6NFNEyu&o$H!C56OwWWMtFo@8Yd0or&gzvI?N^N$m~$0e>Bf
zfABxx>jQ4C>S^TcKtk-$D-#zlkC}TjL!FF;Y(SJBU@W{Mltq-#X0&$y#GGx$OB-Sm
zR*M@gfQ*hrco{c{MwEh|#L}wQiAsfo(Q;b3ofVO-2119`e}s}WoALd;Ya_&;;-e5?
zf*Rk$BWO1VXbZUXIsQoNWAGO61V#*kb;PKKSpj8C6_2e_f?zk8r$^VWLu&~Jrg%{c
zAJ6bkOat~ns|b8SrUcQ+um)W9{k-q3BSMgiFftOEQ+v541*d8d_z4zxo|hfK@Yj(x
z#l`YT+!#r4Dhx0NuA@I@#)&Q|2r9%=NTcuwcMwYDd&!IBMQ8+cihML=8;BQ#*RgT|qMAg9QDDN?+e
zwkDz!Dp|3g9etNkxWF`8W36DPxKH4QQM?lI;4hiLUn-eG$Ygg`ul?bQ^U>yEn&JFD~;e@jLvX>5cgtuhWkT->DghVjo1g@
z17<26!Tllou-reSb&jQ@)Ypz3RWzq-J{p>)wYR1~h!Qt{-Rr
z>$r59l2P(TT4~w5OUW1T8lGuyl8|R=v|&a@xW7z2BtEz!^`G&w9Fx!S-5fkBU{uzT
z(jzH75>6{!xr~j!!+>l&0wVvm4af8dLGz^}&X@JKwiAQp7wH~Vxf2Ye71)oBuDggq
zSKExtp{(HF-1xOI2@)cfV(kg(%D9R^zY-fGjLW5CiUnnBuRnkwa40(ZsYG{TLv-7s
zvV2p``{tc_VGy)VGUlftgQ5G)^jKaq3tz(&BZ_0-H6}pM&)f+g39tO#K>Y85VRh>M
zJ{*mp5UUda%BdqXrDOXC+eyH_joNJ|{7EHQ0?cN0D)W`)UB3W0TW&vxbe{mz1iRvA
z!jb6M>9WZQCB8%`NB2%6OMa>BO@Su+Sg_?W%D
z@um&0GOz(81+hqB9)h5G^kEZXE4B>s`JkbmZvoMR$V-R^7~D0MlkOz992iRoRcMkz
zR(3h9GEBXdxeoReMixgQuFWiG!Jz&=7}Piz)SIn1h_=*0GSz92QHfR(MC4aMyng|#
z@c)c5dT^!S1!N}0+#oL#9*8!Oi%189-e+gMe)^dW8y=WnFy6V(q2%Bl2$jhi+t{RA
z@pt0F^8O@+#@-OkSU?P>VrXm3c$a1d@8FkSFv?6nqSKLjQ&Mq$3R0ifsVMz`X2$XR
zEBJjxetkFS)u?V6+6A9EwCdnTXcbRkA<)VHwz~)r
z!P7&TnXDmpW%S#B>eQ{=bYWih=->c5%e50alX|7sd^ZeYb#P6QiKoHj6Nd!)-%|gqFrsAjR
z2JWRN2+S&HS2*Q*gVDhKETb2$F)A)1am~zJok1u^AzvaLp}+_P;<_dLJ@BMbIIyCW
z=a6G;+4^0!XgwXLNqmhP`!f1rK*wgrOz6N0(0}Er0V6&KsRF_UJU@t;SSCp)KJiUIAjbkoF_#layA}Z)$g}x|wuTbBBeQvXMuc>Me>!6parbrD
z7hP`DkHb{C0qlr)%n|SX@$U)WtO3
ziV*e}@j!&_87^$+D5ds*nxV_JJh*?*6RIxqE4S!jgYtQkp_5{eFT|EJ_`t0kQW31D
zd$1VA8a$2l7F*6^y|n@q*6&fioPs<|BT#BjLj-#CO#|=uQGbGP?U1%0V0((FyOmlL
zeK&zX<@1=8ASxSavujhAPrZ)M%Tt$e;tvJPf$KjhppbkdRrmVz)SJQc{ez8z0d}p9
z6A~z&20T$X@(FcG5vcHlg&I#-s4`mR9hII4OpgG11m;w3gU2N~#|^?_=HWVNA@DWU
zFVr+`XM~Ebz-}-+R?yUrJvSqP(7R)R!g6b>t!@;|a-LR7v<%}w;LNJf7A_nMiix9U
zlYE|`6WOXZoauVGa~yEi)yn!>S9usynuCx|Z3}=(=F`>YGo5dS!_aOB+INL#THw?t
zu5F=0uFqa+p1#H@6C#%9DX~7h9M&NwT6nbsrcaOc!-#0Me0^&A^406J8c3%W5njBu
z>8Ein1}DrP++&c`Z*h<1JG<`ZKsR%%HUx=Zy;7dNI{xa!oO*~^;rgI(UYVbPuxf#H
zzNRF&WBeSv*058_Eqn8IL}!!-`tQ||VDb09<1J^~%-E~L`U6+{4l
zNQECkDkS>>(;!j9_JeedxEPx*M%qz=!GZDWa{dKm*2g5oNUD
zVAoPJQJjDy(r*Dc3WWW#vpaW4v}^(de1DL{Mus41kb|=qNXkl+SSaNy0ZamgE@Ija
zA}Y%E5gV?z_Nk8bu9x_?)oN+*5SzXOAZdgmC2n*zTO5gYHe2#V?M88J_)pYAqhxgy
z3QnlSu)9r(jtRFnI=h7`+{!QEHBI0KI6RQM%6xb{v&Ze))TI4hR
z_&bqL@h>yXtGwH_8Bvplekd?xChb{oeI9f@7HCy$OW-O2sTt^Y9z@p>>rloHn-hKxacl488^bZ9U28<{U
zfr#4jIj#i4Y$K@{sBD3^;XnLlyo8TS1hq@!6Gz15IO6@qJ3Dj#}|G
z9Kj7AP4jcqBY~rKPXN3pX3@QdB^@w~t1>ih9DNzU6jqLSJ4_;1
zAMhL$st^PL7K-?<%~Z1t#A*x=8x9Xxsp=CL+9&>o8Ca;4q!VyVTBD6uq_E0Hm^Ume
zIIZP7C`>N_A+(?h^Auwqw8Jn6J&ch7!l>9-DmeZRNCRha214-&*t1>yCk@wTtzYYz_#F-u!5sh+b0U&S@;#hA
zFIU7v#C<`oeRA#RwQAV=zR-{QP$v(Pfi`QvOMVs#$Ayj{7F
zfObwS^P3%kKR^#f)7WX~qF>p&6~m~ph0Yu_@)YHpYeSzkC^R5bH?96T+e7g*+LOki
zy%a8?Rin~E(P`*u7t}e1)MVJMuG%<>(r3*Dur^)GVb)_pKNXxdin5>7W#7Mw^qy4^
z!%7te+~{G2!J2WGU=I}}Z&A>L5&i~#R*!yI9hp7rz-rcYkT>GNVO$q>Yzei<6r!F?
zIRGeNPi9!vN2@QpXMGRuQz=n}*6bs9DR5EH7$}Witb<
znMI;bJq4iM>_F3u5G!1>Huli`>f`oU%I_i1qTrK*F5m!GP^faN
zejhtb{U5v>#|1(s8wiOeIC`@L_d>Nkj+Xy1mWvp}8qmS%;FO4uP1hOuW57!M!6F!+
zI5?N#4$h^62F0+z^Z1CXM+yNeJK7si@m*~t5|Or&8xRv7
zX`zmiwiq84D!mCU9FOP?t++zpDDdeJDADPMoO`E29O4iOH!=FS8iVN~YtYBjUgDi*
zbK0}g?t_mbiNQdZ%TI(W*nsC~S1*DLUA;iZbuZX+jw|TYSmh@`m3GML+hPjHvvpjN
zw!DG@gFx%BL(DyiUqm)6tDY_u*67WKOD79h9F1vng`)KelJxNrjI^^U+&<1Hp1OAX
zT*SnqBs@PGS`D0Fj2<3htYfsWP|6C5*blH))hCM`m%twU={VKsm8dLtv)-t0u8O(r
z@ksP^+!{rh$IqWXgK+jxu|aIEEWVAS5qX{*rVCPLSDJyan-Dh0zs?FN!or`fP9Hf6
z`#tu9=U~8wV%4ee*e~lCo31UTqk^I_K4y(hO<%lxd8T}E`mHhTID%$LsT69%0}bXB
z7?0^^!(7p^8*CECNe@KVL2KyTG?i_1Hq&lp67Qx%fyn9VG!lUf6}A&F8e^>-1FfFK
zPzFQX@vgS{gTsslaMCwz5XqrmM}Q1GEr@QDS_~RbQNV&T;^D-&3gI(`B^rw0P!PO%
zyP~QzuxO7rxS(7)65ezIsPje4^>2M&`q|NMK7U&JEMeX6FWgLhSf)uI@c2
z%X%FOnd8wq8|B&>V{Ta>e0+8cO@SPdL6RepBAP^B>A*HWYN`tenNYum9;=_^g`Ajr
z4iEt>kwD4+kqOyWWm5p04OnR`Q{Ie;W+2bA7BI0zjk
z1dKF_-G|kT!Ej)kiugvM?W(r5rSa-{1R`Dc1SZ=!0kCPcZCT~8ZO7h`16m|8xmBL}
zKha-xiIxjBGbeaxlfcXs6aVrM?o
z)6cr-!@}H)NqF_e>KnX=QyUD*H;@tARq7N%FE9S6ij{pVrd2gOf}Abbk;tRQm*ybBbE{+W0h
zXaKfPP_@bi)9@W7z5TY$CY2KqMWx)8
zDFmvTF2k}=dlmi(V35E_fso&dod-Z7p(2;&p=oNZ7ghjyAvJ~`9qIvC#!%WP7R#%(NmcQ7vMG1l`n6zrP~*H6I!gxb~=4cQiLF`%28
zo|yA{Xb49Gwuy_cX>j=nU;H9p%xh{x9NbaFdL72lE9wvV6V9Q5;$C6S;lzisso<^R
z=qx`gmU*++CdU2T#i{bOi?g#+Z%n{CrBW3;V?{BbpWtk0D?6ID0$w8&FiZZV204B7{dz5YC*}vNU|5@IV3LYy6Q1=tkh6G6)^`D{OQkH|MJ@lntUq_-U
zu}ZVD=wq}NR;V*o|A!*UWPCn
zFMzKAZ{A6%2USPV5gj)ol=N9VY`ly8W2kopHHlX~cJc-6WGE{HH3bkw5YZDzr?vQu
zhBA0FX~!7p*ak81IlzGkf@GAr!T1qSlYnDUH2Ja<+@<1yjC4hVlFYlYTd)fbPEj~B
zOLP_OMWCxi8a5yfS%WdJt@G4opMN{(5;huQ7f;X-9a%cKx*{m6l{svY570gJd$=rp
z{;%-gFTFU87{N^>koj3y(I|>2NfxN!Db4Wx8|FcvYgErI!U&>tC8;4|(S5L2Qeeu@
za)uA#Wd>9b)D@sDMhVHAoU>+sC;DQCyXJ<(mtZcUKcIIZ7Vs@IBXG=YuI`QAyI~io
zMMNrB-U8en$dcTD6bZ=c;owR)IJ6PyqDtv?ZW^BAx_9qod;JRgPA1UE0!erElI*pB
zt`FF&$ODq{m56J62@hfXLmP*AajnR+HmHxQ=ZU57!GSBtC9A2OL!6r8&0p4X0D&7}
zsB5A88aOjA>eKE6+Yyi`RJeA%8@*!TpLiyP$I1o1KRU51TjaSE#jOXZ;~5>=pNY+Z
zQ4|je#Jqs_h+g(%ITBEDcN3t~^}r`9@xt|!i?wz4rk{(-D2L9`Wdz?21EUL|iWYq-
z?ac%DIy-%UWSRf>oGkG>$@T+)5Y&Jfv!%`bfqyv5$G7fV(_p$X}t;K
zeUeF_W(i8}a^g%^hct*|f;eFS6C@MxdrH0Ok<5Xk>Rr=)lW%M@R}P<^C|4d}Ba5TH
z5P(JqEi4Dt!1oAh7%T#$5gY=%0#r;0!QS{Hcobu~58)ub2*U<=_c5TG`b`O9qig8L
z@B8c&+W`=F2%u~LDT4y!zM%nH1ilQZRfo!93x|Y|$VeUgSr6VKNj22jBD4^#qhhOB
z92W#EJTy*CkOwgMAj!dWz_=_#*xJ`r2eO@(fF6SEAh0#hpKr#`pFiEq;-VR8LHBU}
zCWaQvn!w5vh>%DLcIqlYK$VBN{mjJcvj`_}dB#uI5e2hW+3dj7e-8!tJyX|kM#9|H
zGDH%74=&~DtCuIPTzpGyvHVxL=eFlRkH6^eNzk+25u0m*Ls0{gMr$DoFi}Vf~a=2lV#sYS>l|&So
zl-IZ1ufE!f)lvWQKG5ay7GgYk1IvSmN0Qk*)XHoR**)9)NiA;=lTK!PP!nSq8q1nG
zKL+X?X86sKR(5&B?nT_Dp5=oQvt%HOVVHd<8`9$JFHbB0S8$H^?8owBMB1ril6qrD>Oo0W
zOugR-j}0_WYuUL~fko7v4k@BA6hs){+rh6Bj)-8rsEsS0;-<=ARZpucxcy#vL=J(O
z*b(CUzLB|(z{(gDxp#67q;hv;uHKyZZ3;oq8CpcMG%M%^+~v|;FwZ17)Z9+D&S31x
zz{#hwKyO(svnPe8;iJGW4vC)*SOx99NeJgB?kI#GCPvc^g%V3d>@9?oL5$(2#WGeM
z=J3K#I|zyKEZ_JlF9dx$MAxtI?sswV``YH!+80^mNssXD%e*Y{LhPvy@^XlmM{$8g
z{<^0C@X)Zy^;%8rQA+~^s&L=e*ce`to+=&eOmpEcT%U(%mlQInTai=I_5cBk!C#fk
z)kR&05C?xB#h3DYpRpS0^{@R_f1b^kxG692PA(UC_XbMJ1M$1J@kBpi%3#QnFj5#;
z7^Ea1R%ftA_ybEC6&++}OvH!MAs1<^1quWCKmwcnU{VI41R)~>b_Eh137*1o6O8pM
z$NQyW=7k*mbis0x?GY?T+pxt$Ne^zYTSZ>qd}y*(f&XbpzlSb>Lfk6)BfWt4?nBF7
z$4@ueWs>4Xyo)G`O6(GrMl?J5)KJ=_DRymp8PKMmxCY}!gtU?UOTijtYPxNt*0roo
z!YuJzXl`UOkbP}|OhL@R#4=3hfO~Y9g&!8Uh?=$%?<55C{N*=|
zjn9e)C?#JxNXc@``|dIJ?;0&tA&*Lh=XBt$9-Qov19nZ}WCqCXwt8AUVT4TF4U$aN
zQMd2}a*jCh)-QyuLSG&WFh>vWtuW79$nz#X*9~t!a9vt(-34GRDM3bsu1v#_P8rLH
z&;^V`0x;8j=4F!mIA(xO=HXOV!bz61pMp6jXB!A;5oJLWcLah0?v`+afB*7DZGL}H
z%P=A(o-n4`Jgg072)#OB&DZ$YhExLY1YEt$#q$a;bL3)|zueGYl4M9m@MAj7>f
zqIM8{Ffo)W07B?sumyy8<&%y;fN0#v-32Wuat3qsFl&4RWlm2{LIrwk!7f5UbwZi5bXjV1rJfiqNAG<
zZi#unTIC&aP}_V~2V|liS&SFe$9VS;FGL;S1|jSM68HpDB5quZwJ2WK5}3fT
z>;MdHfKGGR9|z;@UcpV^nxI2BB@^*PilLhj1f6&B=#TeF1SmM1c_fq0B=ZA>!AxIf
zIx~>T=lXMJGB@(cLRxIu7f@*Qk6`g7d|V5cZc~3d4kLKDdfTv&g~@-4Jd@gwY|YD;
z=&7t@?|@GhfV+sD)FI!|XESJtDmZ26%b-fd5gRyc!nLl?zC2aj8#;4#0(h)l^4B|z
z*aK^FR0v}LdP<1WEf6O@P;!7g`oOyxEqjE?_QJY?|Ba{3uY)ooCVv{M?vK!v5Qg)<
zW2yx0P9yDaL^vqb><^l^w}~_6T0?Y_fS&PgERYjjxTYO&1(k$t3cF;z#zQ(FDsDhC
zq^hES1=c>ob$l3*&K|PDiJQBAkGV^XpX{+*mJO(%%fIVprC%L7Go=W?S2YA5Wbx
ztZiP!T{|D_HPb;MrEHW+vBpg2|Cq>`DNFzp8!(OjBuAhPGae|UuC??7X3kZ@F8$8baAkUTIB;@be%P|pb5VjhA>o^fyJbfB0Ydx2|Y?24cWwsvtH
zj_^*otO)K@oEOS8)lGJ=w_L{AB^B9E$nql6gquLbFdVcf!48WO>rnkezVkI+I5HZq
z{~jO9e54f;+|uCbNBYrEY|O7N_*rr8f^&weV}JD^@&g7=#QYo%t|GUC>=wnm_I|`9Nc!`5Ucz{gB(+91Pek`TwssqyxaS{=xN0=%a
z+(4_2c}r=itff=?AQ=KnWWRwx$n(`2ju%jU?9u=){k$Z35mF-(GD(aDeVX`Fs8SVq
zImyczURHT|iIw*DLn|p|!h>OTruy{)h3Ccl-&5gE&dxj>rKqfEvEzi5#Qn
zl4`*p0yL1$!6%+ikvH1?SHRgL%nPmy@tK&jaH&62a0=zZ_ZgS)(SN6Kw~@(2&-)64
I@V-v`|I+}V&j0`b
literal 0
HcmV?d00001
diff --git a/resources/lib/cheroot/_compat.py b/resources/lib/cheroot/_compat.py
new file mode 100644
index 0000000..e98f91f
--- /dev/null
+++ b/resources/lib/cheroot/_compat.py
@@ -0,0 +1,66 @@
+"""Compatibility code for using Cheroot with various versions of Python."""
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+import re
+
+import six
+
+if six.PY3:
+ def ntob(n, encoding='ISO-8859-1'):
+ """Return the native string as bytes in the given encoding."""
+ assert_native(n)
+ # In Python 3, the native string type is unicode
+ return n.encode(encoding)
+
+ def ntou(n, encoding='ISO-8859-1'):
+ """Return the native string as unicode with the given encoding."""
+ assert_native(n)
+ # In Python 3, the native string type is unicode
+ return n
+
+ def bton(b, encoding='ISO-8859-1'):
+ """Return the byte string as native string in the given encoding."""
+ return b.decode(encoding)
+else:
+ # Python 2
+ def ntob(n, encoding='ISO-8859-1'):
+ """Return the native string as bytes in the given encoding."""
+ assert_native(n)
+ # In Python 2, the native string type is bytes. Assume it's already
+ # in the given encoding, which for ISO-8859-1 is almost always what
+ # was intended.
+ return n
+
+ def ntou(n, encoding='ISO-8859-1'):
+ """Return the native string as unicode with the given encoding."""
+ assert_native(n)
+ # In Python 2, the native string type is bytes.
+ # First, check for the special encoding 'escape'. The test suite uses
+ # this to signal that it wants to pass a string with embedded \uXXXX
+ # escapes, but without having to prefix it with u'' for Python 2,
+ # but no prefix for Python 3.
+ if encoding == 'escape':
+ return six.u(
+ re.sub(r'\\u([0-9a-zA-Z]{4})',
+ lambda m: six.unichr(int(m.group(1), 16)),
+ n.decode('ISO-8859-1')))
+ # Assume it's already in the given encoding, which for ISO-8859-1
+ # is almost always what was intended.
+ return n.decode(encoding)
+
+ def bton(b, encoding='ISO-8859-1'):
+ """Return the byte string as native string in the given encoding."""
+ return b
+
+
+def assert_native(n):
+ """Check whether the input is of nativ ``str`` type.
+
+ Raises:
+ TypeError: in case of failed check
+
+ """
+ if not isinstance(n, str):
+ raise TypeError('n must be a native str (got %s)' % type(n).__name__)
diff --git a/resources/lib/cheroot/cli.py b/resources/lib/cheroot/cli.py
new file mode 100644
index 0000000..6d59fb5
--- /dev/null
+++ b/resources/lib/cheroot/cli.py
@@ -0,0 +1,233 @@
+"""Command line tool for starting a Cheroot WSGI/HTTP server instance.
+
+Basic usage::
+
+ # Start a server on 127.0.0.1:8000 with the default settings
+ # for the WSGI app myapp/wsgi.py:application()
+ cheroot myapp.wsgi
+
+ # Start a server on 0.0.0.0:9000 with 8 threads
+ # for the WSGI app myapp/wsgi.py:main_app()
+ cheroot myapp.wsgi:main_app --bind 0.0.0.0:9000 --threads 8
+
+ # Start a server for the cheroot.server.Gateway subclass
+ # myapp/gateway.py:HTTPGateway
+ cheroot myapp.gateway:HTTPGateway
+
+ # Start a server on the UNIX socket /var/spool/myapp.sock
+ cheroot myapp.wsgi --bind /var/spool/myapp.sock
+
+ # Start a server on the abstract UNIX socket CherootServer
+ cheroot myapp.wsgi --bind @CherootServer
+"""
+
+import argparse
+from importlib import import_module
+import os
+import sys
+import contextlib
+
+import six
+
+from . import server
+from . import wsgi
+
+
+__metaclass__ = type
+
+
+class BindLocation:
+ """A class for storing the bind location for a Cheroot instance."""
+
+
+class TCPSocket(BindLocation):
+ """TCPSocket."""
+
+ def __init__(self, address, port):
+ """Initialize.
+
+ Args:
+ address (str): Host name or IP address
+ port (int): TCP port number
+ """
+ self.bind_addr = address, port
+
+
+class UnixSocket(BindLocation):
+ """UnixSocket."""
+
+ def __init__(self, path):
+ """Initialize."""
+ self.bind_addr = path
+
+
+class AbstractSocket(BindLocation):
+ """AbstractSocket."""
+
+ def __init__(self, addr):
+ """Initialize."""
+ self.bind_addr = '\0{}'.format(self.abstract_socket)
+
+
+class Application:
+ """Application."""
+
+ @classmethod
+ def resolve(cls, full_path):
+ """Read WSGI app/Gateway path string and import application module."""
+ mod_path, _, app_path = full_path.partition(':')
+ app = getattr(import_module(mod_path), app_path or 'application')
+
+ with contextlib.suppress(TypeError):
+ if issubclass(app, server.Gateway):
+ return GatewayYo(app)
+
+ return cls(app)
+
+ def __init__(self, wsgi_app):
+ """Initialize."""
+ if not callable(wsgi_app):
+ raise TypeError(
+ 'Application must be a callable object or '
+ 'cheroot.server.Gateway subclass'
+ )
+ self.wsgi_app = wsgi_app
+
+ def server_args(self, parsed_args):
+ """Return keyword args for Server class."""
+ args = {
+ arg: value
+ for arg, value in vars(parsed_args).items()
+ if not arg.startswith('_') and value is not None
+ }
+ args.update(vars(self))
+ return args
+
+ def server(self, parsed_args):
+ """Server."""
+ return wsgi.Server(**self.server_args(parsed_args))
+
+
+class GatewayYo:
+ """Gateway."""
+
+ def __init__(self, gateway):
+ """Init."""
+ self.gateway = gateway
+
+ def server(self, parsed_args):
+ """Server."""
+ server_args = vars(self)
+ server_args['bind_addr'] = parsed_args['bind_addr']
+ if parsed_args.max is not None:
+ server_args['maxthreads'] = parsed_args.max
+ if parsed_args.numthreads is not None:
+ server_args['minthreads'] = parsed_args.numthreads
+ return server.HTTPServer(**server_args)
+
+
+def parse_wsgi_bind_location(bind_addr_string):
+ """Convert bind address string to a BindLocation."""
+ # try and match for an IP/hostname and port
+ match = six.moves.urllib.parse.urlparse('//{}'.format(bind_addr_string))
+ try:
+ addr = match.hostname
+ port = match.port
+ if addr is not None or port is not None:
+ return TCPSocket(addr, port)
+ except ValueError:
+ pass
+
+ # else, assume a UNIX socket path
+ # if the string begins with an @ symbol, use an abstract socket
+ if bind_addr_string.startswith('@'):
+ return AbstractSocket(bind_addr_string[1:])
+ return UnixSocket(path=bind_addr_string)
+
+
+def parse_wsgi_bind_addr(bind_addr_string):
+ """Convert bind address string to bind address parameter."""
+ return parse_wsgi_bind_location(bind_addr_string).bind_addr
+
+
+_arg_spec = {
+ '_wsgi_app': dict(
+ metavar='APP_MODULE',
+ type=Application.resolve,
+ help='WSGI application callable or cheroot.server.Gateway subclass',
+ ),
+ '--bind': dict(
+ metavar='ADDRESS',
+ dest='bind_addr',
+ type=parse_wsgi_bind_addr,
+ default='[::1]:8000',
+ help='Network interface to listen on (default: [::1]:8000)',
+ ),
+ '--chdir': dict(
+ metavar='PATH',
+ type=os.chdir,
+ help='Set the working directory',
+ ),
+ '--server-name': dict(
+ dest='server_name',
+ type=str,
+ help='Web server name to be advertised via Server HTTP header',
+ ),
+ '--threads': dict(
+ metavar='INT',
+ dest='numthreads',
+ type=int,
+ help='Minimum number of worker threads',
+ ),
+ '--max-threads': dict(
+ metavar='INT',
+ dest='max',
+ type=int,
+ help='Maximum number of worker threads',
+ ),
+ '--timeout': dict(
+ metavar='INT',
+ dest='timeout',
+ type=int,
+ help='Timeout in seconds for accepted connections',
+ ),
+ '--shutdown-timeout': dict(
+ metavar='INT',
+ dest='shutdown_timeout',
+ type=int,
+ help='Time in seconds to wait for worker threads to cleanly exit',
+ ),
+ '--request-queue-size': dict(
+ metavar='INT',
+ dest='request_queue_size',
+ type=int,
+ help='Maximum number of queued connections',
+ ),
+ '--accepted-queue-size': dict(
+ metavar='INT',
+ dest='accepted_queue_size',
+ type=int,
+ help='Maximum number of active requests in queue',
+ ),
+ '--accepted-queue-timeout': dict(
+ metavar='INT',
+ dest='accepted_queue_timeout',
+ type=int,
+ help='Timeout in seconds for putting requests into queue',
+ ),
+}
+
+
+def main():
+ """Create a new Cheroot instance with arguments from the command line."""
+ parser = argparse.ArgumentParser(
+ description='Start an instance of the Cheroot WSGI/HTTP server.')
+ for arg, spec in _arg_spec.items():
+ parser.add_argument(arg, **spec)
+ raw_args = parser.parse_args()
+
+ # ensure cwd in sys.path
+ '' in sys.path or sys.path.insert(0, '')
+
+ # create a server based on the arguments provided
+ raw_args._wsgi_app.server(raw_args).safe_start()
diff --git a/resources/lib/cheroot/errors.py b/resources/lib/cheroot/errors.py
new file mode 100644
index 0000000..82412b4
--- /dev/null
+++ b/resources/lib/cheroot/errors.py
@@ -0,0 +1,58 @@
+"""Collection of exceptions raised and/or processed by Cheroot."""
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+import errno
+import sys
+
+
+class MaxSizeExceeded(Exception):
+ """Exception raised when a client sends more data then acceptable within limit.
+
+ Depends on ``request.body.maxbytes`` config option if used within CherryPy
+ """
+
+
+class NoSSLError(Exception):
+ """Exception raised when a client speaks HTTP to an HTTPS socket."""
+
+
+class FatalSSLAlert(Exception):
+ """Exception raised when the SSL implementation signals a fatal alert."""
+
+
+def plat_specific_errors(*errnames):
+ """Return error numbers for all errors in errnames on this platform.
+
+ The 'errno' module contains different global constants depending on
+ the specific platform (OS). This function will return the list of
+ numeric values for a given list of potential names.
+ """
+ errno_names = dir(errno)
+ nums = [getattr(errno, k) for k in errnames if k in errno_names]
+ # de-dupe the list
+ return list(dict.fromkeys(nums).keys())
+
+
+socket_error_eintr = plat_specific_errors('EINTR', 'WSAEINTR')
+
+socket_errors_to_ignore = plat_specific_errors(
+ 'EPIPE',
+ 'EBADF', 'WSAEBADF',
+ 'ENOTSOCK', 'WSAENOTSOCK',
+ 'ETIMEDOUT', 'WSAETIMEDOUT',
+ 'ECONNREFUSED', 'WSAECONNREFUSED',
+ 'ECONNRESET', 'WSAECONNRESET',
+ 'ECONNABORTED', 'WSAECONNABORTED',
+ 'ENETRESET', 'WSAENETRESET',
+ 'EHOSTDOWN', 'EHOSTUNREACH',
+)
+socket_errors_to_ignore.append('timed out')
+socket_errors_to_ignore.append('The read operation timed out')
+socket_errors_nonblocking = plat_specific_errors(
+ 'EAGAIN', 'EWOULDBLOCK', 'WSAEWOULDBLOCK')
+
+if sys.platform == 'darwin':
+ socket_errors_to_ignore.extend(plat_specific_errors('EPROTOTYPE'))
+ socket_errors_nonblocking.extend(plat_specific_errors('EPROTOTYPE'))
diff --git a/resources/lib/cheroot/makefile.py b/resources/lib/cheroot/makefile.py
new file mode 100644
index 0000000..a76f2ed
--- /dev/null
+++ b/resources/lib/cheroot/makefile.py
@@ -0,0 +1,387 @@
+"""Socket file object."""
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+import socket
+
+try:
+ # prefer slower Python-based io module
+ import _pyio as io
+except ImportError:
+ # Python 2.6
+ import io
+
+import six
+
+from . import errors
+
+
+class BufferedWriter(io.BufferedWriter):
+ """Faux file object attached to a socket object."""
+
+ def write(self, b):
+ """Write bytes to buffer."""
+ self._checkClosed()
+ if isinstance(b, str):
+ raise TypeError("can't write str to binary stream")
+
+ with self._write_lock:
+ self._write_buf.extend(b)
+ self._flush_unlocked()
+ return len(b)
+
+ def _flush_unlocked(self):
+ self._checkClosed('flush of closed file')
+ while self._write_buf:
+ try:
+ # ssl sockets only except 'bytes', not bytearrays
+ # so perhaps we should conditionally wrap this for perf?
+ n = self.raw.write(bytes(self._write_buf))
+ except io.BlockingIOError as e:
+ n = e.characters_written
+ del self._write_buf[:n]
+
+
+def MakeFile_PY3(sock, mode='r', bufsize=io.DEFAULT_BUFFER_SIZE):
+ """File object attached to a socket object."""
+ if 'r' in mode:
+ return io.BufferedReader(socket.SocketIO(sock, mode), bufsize)
+ else:
+ return BufferedWriter(socket.SocketIO(sock, mode), bufsize)
+
+
+class MakeFile_PY2(getattr(socket, '_fileobject', object)):
+ """Faux file object attached to a socket object."""
+
+ def __init__(self, *args, **kwargs):
+ """Initialize faux file object."""
+ self.bytes_read = 0
+ self.bytes_written = 0
+ socket._fileobject.__init__(self, *args, **kwargs)
+
+ def write(self, data):
+ """Sendall for non-blocking sockets."""
+ while data:
+ try:
+ bytes_sent = self.send(data)
+ data = data[bytes_sent:]
+ except socket.error as e:
+ if e.args[0] not in errors.socket_errors_nonblocking:
+ raise
+
+ def send(self, data):
+ """Send some part of message to the socket."""
+ bytes_sent = self._sock.send(data)
+ self.bytes_written += bytes_sent
+ return bytes_sent
+
+ def flush(self):
+ """Write all data from buffer to socket and reset write buffer."""
+ if self._wbuf:
+ buffer = ''.join(self._wbuf)
+ self._wbuf = []
+ self.write(buffer)
+
+ def recv(self, size):
+ """Receive message of a size from the socket."""
+ while True:
+ try:
+ data = self._sock.recv(size)
+ self.bytes_read += len(data)
+ return data
+ except socket.error as e:
+ what = (
+ e.args[0] not in errors.socket_errors_nonblocking
+ and e.args[0] not in errors.socket_error_eintr
+ )
+ if what:
+ raise
+
+ class FauxSocket:
+ """Faux socket with the minimal interface required by pypy."""
+
+ def _reuse(self):
+ pass
+
+ _fileobject_uses_str_type = six.PY2 and isinstance(
+ socket._fileobject(FauxSocket())._rbuf, six.string_types)
+
+ # FauxSocket is no longer needed
+ del FauxSocket
+
+ if not _fileobject_uses_str_type:
+ def read(self, size=-1):
+ """Read data from the socket to buffer."""
+ # Use max, disallow tiny reads in a loop as they are very
+ # inefficient.
+ # We never leave read() with any leftover data from a new recv()
+ # call in our internal buffer.
+ rbufsize = max(self._rbufsize, self.default_bufsize)
+ # Our use of StringIO rather than lists of string objects returned
+ # by recv() minimizes memory usage and fragmentation that occurs
+ # when rbufsize is large compared to the typical return value of
+ # recv().
+ buf = self._rbuf
+ buf.seek(0, 2) # seek end
+ if size < 0:
+ # Read until EOF
+ # reset _rbuf. we consume it via buf.
+ self._rbuf = io.BytesIO()
+ while True:
+ data = self.recv(rbufsize)
+ if not data:
+ break
+ buf.write(data)
+ return buf.getvalue()
+ else:
+ # Read until size bytes or EOF seen, whichever comes first
+ buf_len = buf.tell()
+ if buf_len >= size:
+ # Already have size bytes in our buffer? Extract and
+ # return.
+ buf.seek(0)
+ rv = buf.read(size)
+ self._rbuf = io.BytesIO()
+ self._rbuf.write(buf.read())
+ return rv
+
+ # reset _rbuf. we consume it via buf.
+ self._rbuf = io.BytesIO()
+ while True:
+ left = size - buf_len
+ # recv() will malloc the amount of memory given as its
+ # parameter even though it often returns much less data
+ # than that. The returned data string is short lived
+ # as we copy it into a StringIO and free it. This avoids
+ # fragmentation issues on many platforms.
+ data = self.recv(left)
+ if not data:
+ break
+ n = len(data)
+ if n == size and not buf_len:
+ # Shortcut. Avoid buffer data copies when:
+ # - We have no data in our buffer.
+ # AND
+ # - Our call to recv returned exactly the
+ # number of bytes we were asked to read.
+ return data
+ if n == left:
+ buf.write(data)
+ del data # explicit free
+ break
+ assert n <= left, 'recv(%d) returned %d bytes' % (left, n)
+ buf.write(data)
+ buf_len += n
+ del data # explicit free
+ # assert buf_len == buf.tell()
+ return buf.getvalue()
+
+ def readline(self, size=-1):
+ """Read line from the socket to buffer."""
+ buf = self._rbuf
+ buf.seek(0, 2) # seek end
+ if buf.tell() > 0:
+ # check if we already have it in our buffer
+ buf.seek(0)
+ bline = buf.readline(size)
+ if bline.endswith('\n') or len(bline) == size:
+ self._rbuf = io.BytesIO()
+ self._rbuf.write(buf.read())
+ return bline
+ del bline
+ if size < 0:
+ # Read until \n or EOF, whichever comes first
+ if self._rbufsize <= 1:
+ # Speed up unbuffered case
+ buf.seek(0)
+ buffers = [buf.read()]
+ # reset _rbuf. we consume it via buf.
+ self._rbuf = io.BytesIO()
+ data = None
+ recv = self.recv
+ while data != '\n':
+ data = recv(1)
+ if not data:
+ break
+ buffers.append(data)
+ return ''.join(buffers)
+
+ buf.seek(0, 2) # seek end
+ # reset _rbuf. we consume it via buf.
+ self._rbuf = io.BytesIO()
+ while True:
+ data = self.recv(self._rbufsize)
+ if not data:
+ break
+ nl = data.find('\n')
+ if nl >= 0:
+ nl += 1
+ buf.write(data[:nl])
+ self._rbuf.write(data[nl:])
+ del data
+ break
+ buf.write(data)
+ return buf.getvalue()
+ else:
+ # Read until size bytes or \n or EOF seen, whichever comes
+ # first
+ buf.seek(0, 2) # seek end
+ buf_len = buf.tell()
+ if buf_len >= size:
+ buf.seek(0)
+ rv = buf.read(size)
+ self._rbuf = io.BytesIO()
+ self._rbuf.write(buf.read())
+ return rv
+ # reset _rbuf. we consume it via buf.
+ self._rbuf = io.BytesIO()
+ while True:
+ data = self.recv(self._rbufsize)
+ if not data:
+ break
+ left = size - buf_len
+ # did we just receive a newline?
+ nl = data.find('\n', 0, left)
+ if nl >= 0:
+ nl += 1
+ # save the excess data to _rbuf
+ self._rbuf.write(data[nl:])
+ if buf_len:
+ buf.write(data[:nl])
+ break
+ else:
+ # Shortcut. Avoid data copy through buf when
+ # returning a substring of our first recv().
+ return data[:nl]
+ n = len(data)
+ if n == size and not buf_len:
+ # Shortcut. Avoid data copy through buf when
+ # returning exactly all of our first recv().
+ return data
+ if n >= left:
+ buf.write(data[:left])
+ self._rbuf.write(data[left:])
+ break
+ buf.write(data)
+ buf_len += n
+ # assert buf_len == buf.tell()
+ return buf.getvalue()
+ else:
+ def read(self, size=-1):
+ """Read data from the socket to buffer."""
+ if size < 0:
+ # Read until EOF
+ buffers = [self._rbuf]
+ self._rbuf = ''
+ if self._rbufsize <= 1:
+ recv_size = self.default_bufsize
+ else:
+ recv_size = self._rbufsize
+
+ while True:
+ data = self.recv(recv_size)
+ if not data:
+ break
+ buffers.append(data)
+ return ''.join(buffers)
+ else:
+ # Read until size bytes or EOF seen, whichever comes first
+ data = self._rbuf
+ buf_len = len(data)
+ if buf_len >= size:
+ self._rbuf = data[size:]
+ return data[:size]
+ buffers = []
+ if data:
+ buffers.append(data)
+ self._rbuf = ''
+ while True:
+ left = size - buf_len
+ recv_size = max(self._rbufsize, left)
+ data = self.recv(recv_size)
+ if not data:
+ break
+ buffers.append(data)
+ n = len(data)
+ if n >= left:
+ self._rbuf = data[left:]
+ buffers[-1] = data[:left]
+ break
+ buf_len += n
+ return ''.join(buffers)
+
+ def readline(self, size=-1):
+ """Read line from the socket to buffer."""
+ data = self._rbuf
+ if size < 0:
+ # Read until \n or EOF, whichever comes first
+ if self._rbufsize <= 1:
+ # Speed up unbuffered case
+ assert data == ''
+ buffers = []
+ while data != '\n':
+ data = self.recv(1)
+ if not data:
+ break
+ buffers.append(data)
+ return ''.join(buffers)
+ nl = data.find('\n')
+ if nl >= 0:
+ nl += 1
+ self._rbuf = data[nl:]
+ return data[:nl]
+ buffers = []
+ if data:
+ buffers.append(data)
+ self._rbuf = ''
+ while True:
+ data = self.recv(self._rbufsize)
+ if not data:
+ break
+ buffers.append(data)
+ nl = data.find('\n')
+ if nl >= 0:
+ nl += 1
+ self._rbuf = data[nl:]
+ buffers[-1] = data[:nl]
+ break
+ return ''.join(buffers)
+ else:
+ # Read until size bytes or \n or EOF seen, whichever comes
+ # first
+ nl = data.find('\n', 0, size)
+ if nl >= 0:
+ nl += 1
+ self._rbuf = data[nl:]
+ return data[:nl]
+ buf_len = len(data)
+ if buf_len >= size:
+ self._rbuf = data[size:]
+ return data[:size]
+ buffers = []
+ if data:
+ buffers.append(data)
+ self._rbuf = ''
+ while True:
+ data = self.recv(self._rbufsize)
+ if not data:
+ break
+ buffers.append(data)
+ left = size - buf_len
+ nl = data.find('\n', 0, left)
+ if nl >= 0:
+ nl += 1
+ self._rbuf = data[nl:]
+ buffers[-1] = data[:nl]
+ break
+ n = len(data)
+ if n >= left:
+ self._rbuf = data[left:]
+ buffers[-1] = data[:left]
+ break
+ buf_len += n
+ return ''.join(buffers)
+
+
+MakeFile = MakeFile_PY2 if six.PY2 else MakeFile_PY3
diff --git a/resources/lib/cheroot/server.py b/resources/lib/cheroot/server.py
new file mode 100644
index 0000000..4407049
--- /dev/null
+++ b/resources/lib/cheroot/server.py
@@ -0,0 +1,2001 @@
+"""
+A high-speed, production ready, thread pooled, generic HTTP server.
+
+For those of you wanting to understand internals of this module, here's the
+basic call flow. The server's listening thread runs a very tight loop,
+sticking incoming connections onto a Queue::
+
+ server = HTTPServer(...)
+ server.start()
+ -> while True:
+ tick()
+ # This blocks until a request comes in:
+ child = socket.accept()
+ conn = HTTPConnection(child, ...)
+ server.requests.put(conn)
+
+Worker threads are kept in a pool and poll the Queue, popping off and then
+handling each connection in turn. Each connection can consist of an arbitrary
+number of requests and their responses, so we run a nested loop::
+
+ while True:
+ conn = server.requests.get()
+ conn.communicate()
+ -> while True:
+ req = HTTPRequest(...)
+ req.parse_request()
+ -> # Read the Request-Line, e.g. "GET /page HTTP/1.1"
+ req.rfile.readline()
+ read_headers(req.rfile, req.inheaders)
+ req.respond()
+ -> response = app(...)
+ try:
+ for chunk in response:
+ if chunk:
+ req.write(chunk)
+ finally:
+ if hasattr(response, "close"):
+ response.close()
+ if req.close_connection:
+ return
+
+For running a server you can invoke :func:`start() ` (it
+will run the server forever) or use invoking :func:`prepare()
+` and :func:`serve() ` like this::
+
+ server = HTTPServer(...)
+ server.prepare()
+ try:
+ threading.Thread(target=server.serve).start()
+
+ # waiting/detecting some appropriate stop condition here
+ ...
+
+ finally:
+ server.stop()
+
+And now for a trivial doctest to exercise the test suite
+
+>>> 'HTTPServer' in globals()
+True
+
+"""
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+import os
+import io
+import re
+import email.utils
+import socket
+import sys
+import time
+import traceback as traceback_
+import logging
+import platform
+import xbmc
+
+try:
+ from functools import lru_cache
+except ImportError:
+ from backports.functools_lru_cache import lru_cache
+
+import six
+from six.moves import queue
+from six.moves import urllib
+
+from . import errors, __version__
+from ._compat import bton, ntou
+from .workers import threadpool
+from .makefile import MakeFile
+
+
+__all__ = ('HTTPRequest', 'HTTPConnection', 'HTTPServer',
+ 'SizeCheckWrapper', 'KnownLengthRFile', 'ChunkedRFile',
+ 'Gateway', 'get_ssl_adapter_class')
+
+"""
+Special KODI case:
+Android does not have support for grp and pwd
+But Python has issues reporting that this is running on Android (it shows as Linux2).
+We're instead using xbmc library to detect that.
+"""
+IS_WINDOWS = platform.system() == 'Windows'
+IS_ANDROID = xbmc.getCondVisibility('system.platform.linux') and xbmc.getCondVisibility('system.platform.android')
+
+if not (IS_WINDOWS or IS_ANDROID):
+ import grp
+ import pwd
+ import struct
+
+
+if IS_WINDOWS and hasattr(socket, 'AF_INET6'):
+ if not hasattr(socket, 'IPPROTO_IPV6'):
+ socket.IPPROTO_IPV6 = 41
+ if not hasattr(socket, 'IPV6_V6ONLY'):
+ socket.IPV6_V6ONLY = 27
+
+
+if not hasattr(socket, 'SO_PEERCRED'):
+ """
+ NOTE: the value for SO_PEERCRED can be architecture specific, in
+ which case the getsockopt() will hopefully fail. The arch
+ specific value could be derived from platform.processor()
+ """
+ socket.SO_PEERCRED = 17
+
+
+LF = b'\n'
+CRLF = b'\r\n'
+TAB = b'\t'
+SPACE = b' '
+COLON = b':'
+SEMICOLON = b';'
+EMPTY = b''
+ASTERISK = b'*'
+FORWARD_SLASH = b'/'
+QUOTED_SLASH = b'%2F'
+QUOTED_SLASH_REGEX = re.compile(b'(?i)' + QUOTED_SLASH)
+
+
+comma_separated_headers = [
+ b'Accept', b'Accept-Charset', b'Accept-Encoding',
+ b'Accept-Language', b'Accept-Ranges', b'Allow', b'Cache-Control',
+ b'Connection', b'Content-Encoding', b'Content-Language', b'Expect',
+ b'If-Match', b'If-None-Match', b'Pragma', b'Proxy-Authenticate', b'TE',
+ b'Trailer', b'Transfer-Encoding', b'Upgrade', b'Vary', b'Via', b'Warning',
+ b'WWW-Authenticate',
+]
+
+
+if not hasattr(logging, 'statistics'):
+ logging.statistics = {}
+
+
+class HeaderReader:
+ """Object for reading headers from an HTTP request.
+
+ Interface and default implementation.
+ """
+
+ def __call__(self, rfile, hdict=None):
+ """
+ Read headers from the given stream into the given header dict.
+
+ If hdict is None, a new header dict is created. Returns the populated
+ header dict.
+
+ Headers which are repeated are folded together using a comma if their
+ specification so dictates.
+
+ This function raises ValueError when the read bytes violate the HTTP
+ spec.
+ You should probably return "400 Bad Request" if this happens.
+ """
+ if hdict is None:
+ hdict = {}
+
+ while True:
+ line = rfile.readline()
+ if not line:
+ # No more data--illegal end of headers
+ raise ValueError('Illegal end of headers.')
+
+ if line == CRLF:
+ # Normal end of headers
+ break
+ if not line.endswith(CRLF):
+ raise ValueError('HTTP requires CRLF terminators')
+
+ if line[0] in (SPACE, TAB):
+ # It's a continuation line.
+ v = line.strip()
+ else:
+ try:
+ k, v = line.split(COLON, 1)
+ except ValueError:
+ raise ValueError('Illegal header line.')
+ v = v.strip()
+ k = self._transform_key(k)
+ hname = k
+
+ if not self._allow_header(k):
+ continue
+
+ if k in comma_separated_headers:
+ existing = hdict.get(hname)
+ if existing:
+ v = b', '.join((existing, v))
+ hdict[hname] = v
+
+ return hdict
+
+ def _allow_header(self, key_name):
+ return True
+
+ def _transform_key(self, key_name):
+ # TODO: what about TE and WWW-Authenticate?
+ return key_name.strip().title()
+
+
+class DropUnderscoreHeaderReader(HeaderReader):
+ """Custom HeaderReader to exclude any headers with underscores in them."""
+
+ def _allow_header(self, key_name):
+ orig = super(DropUnderscoreHeaderReader, self)._allow_header(key_name)
+ return orig and '_' not in key_name
+
+
+class SizeCheckWrapper:
+ """Wraps a file-like object, raising MaxSizeExceeded if too large."""
+
+ def __init__(self, rfile, maxlen):
+ """Initialize SizeCheckWrapper instance.
+
+ Args:
+ rfile (file): file of a limited size
+ maxlen (int): maximum length of the file being read
+ """
+ self.rfile = rfile
+ self.maxlen = maxlen
+ self.bytes_read = 0
+
+ def _check_length(self):
+ if self.maxlen and self.bytes_read > self.maxlen:
+ raise errors.MaxSizeExceeded()
+
+ def read(self, size=None):
+ """Read a chunk from rfile buffer and return it.
+
+ Args:
+ size (int): amount of data to read
+
+ Returns:
+ bytes: Chunk from rfile, limited by size if specified.
+
+ """
+ data = self.rfile.read(size)
+ self.bytes_read += len(data)
+ self._check_length()
+ return data
+
+ def readline(self, size=None):
+ """Read a single line from rfile buffer and return it.
+
+ Args:
+ size (int): minimum amount of data to read
+
+ Returns:
+ bytes: One line from rfile.
+
+ """
+ if size is not None:
+ data = self.rfile.readline(size)
+ self.bytes_read += len(data)
+ self._check_length()
+ return data
+
+ # User didn't specify a size ...
+ # We read the line in chunks to make sure it's not a 100MB line !
+ res = []
+ while True:
+ data = self.rfile.readline(256)
+ self.bytes_read += len(data)
+ self._check_length()
+ res.append(data)
+ # See https://github.com/cherrypy/cherrypy/issues/421
+ if len(data) < 256 or data[-1:] == LF:
+ return EMPTY.join(res)
+
+ def readlines(self, sizehint=0):
+ """Read all lines from rfile buffer and return them.
+
+ Args:
+ sizehint (int): hint of minimum amount of data to read
+
+ Returns:
+ list[bytes]: Lines of bytes read from rfile.
+
+ """
+ # Shamelessly stolen from StringIO
+ total = 0
+ lines = []
+ line = self.readline(sizehint)
+ while line:
+ lines.append(line)
+ total += len(line)
+ if 0 < sizehint <= total:
+ break
+ line = self.readline(sizehint)
+ return lines
+
+ def close(self):
+ """Release resources allocated for rfile."""
+ self.rfile.close()
+
+ def __iter__(self):
+ """Return file iterator."""
+ return self
+
+ def __next__(self):
+ """Generate next file chunk."""
+ data = next(self.rfile)
+ self.bytes_read += len(data)
+ self._check_length()
+ return data
+
+ next = __next__
+
+
+class KnownLengthRFile:
+ """Wraps a file-like object, returning an empty string when exhausted."""
+
+ def __init__(self, rfile, content_length):
+ """Initialize KnownLengthRFile instance.
+
+ Args:
+ rfile (file): file of a known size
+ content_length (int): length of the file being read
+
+ """
+ self.rfile = rfile
+ self.remaining = content_length
+
+ def read(self, size=None):
+ """Read a chunk from rfile buffer and return it.
+
+ Args:
+ size (int): amount of data to read
+
+ Returns:
+ bytes: Chunk from rfile, limited by size if specified.
+
+ """
+ if self.remaining == 0:
+ return b''
+ if size is None:
+ size = self.remaining
+ else:
+ size = min(size, self.remaining)
+
+ data = self.rfile.read(size)
+ self.remaining -= len(data)
+ return data
+
+ def readline(self, size=None):
+ """Read a single line from rfile buffer and return it.
+
+ Args:
+ size (int): minimum amount of data to read
+
+ Returns:
+ bytes: One line from rfile.
+
+ """
+ if self.remaining == 0:
+ return b''
+ if size is None:
+ size = self.remaining
+ else:
+ size = min(size, self.remaining)
+
+ data = self.rfile.readline(size)
+ self.remaining -= len(data)
+ return data
+
+ def readlines(self, sizehint=0):
+ """Read all lines from rfile buffer and return them.
+
+ Args:
+ sizehint (int): hint of minimum amount of data to read
+
+ Returns:
+ list[bytes]: Lines of bytes read from rfile.
+
+ """
+ # Shamelessly stolen from StringIO
+ total = 0
+ lines = []
+ line = self.readline(sizehint)
+ while line:
+ lines.append(line)
+ total += len(line)
+ if 0 < sizehint <= total:
+ break
+ line = self.readline(sizehint)
+ return lines
+
+ def close(self):
+ """Release resources allocated for rfile."""
+ self.rfile.close()
+
+ def __iter__(self):
+ """Return file iterator."""
+ return self
+
+ def __next__(self):
+ """Generate next file chunk."""
+ data = next(self.rfile)
+ self.remaining -= len(data)
+ return data
+
+ next = __next__
+
+
+class ChunkedRFile:
+ """Wraps a file-like object, returning an empty string when exhausted.
+
+ This class is intended to provide a conforming wsgi.input value for
+ request entities that have been encoded with the 'chunked' transfer
+ encoding.
+ """
+
+ def __init__(self, rfile, maxlen, bufsize=8192):
+ """Initialize ChunkedRFile instance.
+
+ Args:
+ rfile (file): file encoded with the 'chunked' transfer encoding
+ maxlen (int): maximum length of the file being read
+ bufsize (int): size of the buffer used to read the file
+ """
+ self.rfile = rfile
+ self.maxlen = maxlen
+ self.bytes_read = 0
+ self.buffer = EMPTY
+ self.bufsize = bufsize
+ self.closed = False
+
+ def _fetch(self):
+ if self.closed:
+ return
+
+ line = self.rfile.readline()
+ self.bytes_read += len(line)
+
+ if self.maxlen and self.bytes_read > self.maxlen:
+ raise errors.MaxSizeExceeded(
+ 'Request Entity Too Large', self.maxlen)
+
+ line = line.strip().split(SEMICOLON, 1)
+
+ try:
+ chunk_size = line.pop(0)
+ chunk_size = int(chunk_size, 16)
+ except ValueError:
+ raise ValueError('Bad chunked transfer size: ' + repr(chunk_size))
+
+ if chunk_size <= 0:
+ self.closed = True
+ return
+
+# if line: chunk_extension = line[0]
+
+ if self.maxlen and self.bytes_read + chunk_size > self.maxlen:
+ raise IOError('Request Entity Too Large')
+
+ chunk = self.rfile.read(chunk_size)
+ self.bytes_read += len(chunk)
+ self.buffer += chunk
+
+ crlf = self.rfile.read(2)
+ if crlf != CRLF:
+ raise ValueError(
+ "Bad chunked transfer coding (expected '\\r\\n', "
+ 'got ' + repr(crlf) + ')')
+
+ def read(self, size=None):
+ """Read a chunk from rfile buffer and return it.
+
+ Args:
+ size (int): amount of data to read
+
+ Returns:
+ bytes: Chunk from rfile, limited by size if specified.
+
+ """
+ data = EMPTY
+
+ if size == 0:
+ return data
+
+ while True:
+ if size and len(data) >= size:
+ return data
+
+ if not self.buffer:
+ self._fetch()
+ if not self.buffer:
+ # EOF
+ return data
+
+ if size:
+ remaining = size - len(data)
+ data += self.buffer[:remaining]
+ self.buffer = self.buffer[remaining:]
+ else:
+ data += self.buffer
+ self.buffer = EMPTY
+
+ def readline(self, size=None):
+ """Read a single line from rfile buffer and return it.
+
+ Args:
+ size (int): minimum amount of data to read
+
+ Returns:
+ bytes: One line from rfile.
+
+ """
+ data = EMPTY
+
+ if size == 0:
+ return data
+
+ while True:
+ if size and len(data) >= size:
+ return data
+
+ if not self.buffer:
+ self._fetch()
+ if not self.buffer:
+ # EOF
+ return data
+
+ newline_pos = self.buffer.find(LF)
+ if size:
+ if newline_pos == -1:
+ remaining = size - len(data)
+ data += self.buffer[:remaining]
+ self.buffer = self.buffer[remaining:]
+ else:
+ remaining = min(size - len(data), newline_pos)
+ data += self.buffer[:remaining]
+ self.buffer = self.buffer[remaining:]
+ else:
+ if newline_pos == -1:
+ data += self.buffer
+ self.buffer = EMPTY
+ else:
+ data += self.buffer[:newline_pos]
+ self.buffer = self.buffer[newline_pos:]
+
+ def readlines(self, sizehint=0):
+ """Read all lines from rfile buffer and return them.
+
+ Args:
+ sizehint (int): hint of minimum amount of data to read
+
+ Returns:
+ list[bytes]: Lines of bytes read from rfile.
+
+ """
+ # Shamelessly stolen from StringIO
+ total = 0
+ lines = []
+ line = self.readline(sizehint)
+ while line:
+ lines.append(line)
+ total += len(line)
+ if 0 < sizehint <= total:
+ break
+ line = self.readline(sizehint)
+ return lines
+
+ def read_trailer_lines(self):
+ """Read HTTP headers and yield them.
+
+ Returns:
+ Generator: yields CRLF separated lines.
+
+ """
+ if not self.closed:
+ raise ValueError(
+ 'Cannot read trailers until the request body has been read.')
+
+ while True:
+ line = self.rfile.readline()
+ if not line:
+ # No more data--illegal end of headers
+ raise ValueError('Illegal end of headers.')
+
+ self.bytes_read += len(line)
+ if self.maxlen and self.bytes_read > self.maxlen:
+ raise IOError('Request Entity Too Large')
+
+ if line == CRLF:
+ # Normal end of headers
+ break
+ if not line.endswith(CRLF):
+ raise ValueError('HTTP requires CRLF terminators')
+
+ yield line
+
+ def close(self):
+ """Release resources allocated for rfile."""
+ self.rfile.close()
+
+
+class HTTPRequest:
+ """An HTTP Request (and response).
+
+ A single HTTP connection may consist of multiple request/response pairs.
+ """
+
+ server = None
+ """The HTTPServer object which is receiving this request."""
+
+ conn = None
+ """The HTTPConnection object on which this request connected."""
+
+ inheaders = {}
+ """A dict of request headers."""
+
+ outheaders = []
+ """A list of header tuples to write in the response."""
+
+ ready = False
+ """When True, the request has been parsed and is ready to begin generating
+ the response. When False, signals the calling Connection that the response
+ should not be generated and the connection should close."""
+
+ close_connection = False
+ """Signals the calling Connection that the request should close. This does
+ not imply an error! The client and/or server may each request that the
+ connection be closed."""
+
+ chunked_write = False
+ """If True, output will be encoded with the "chunked" transfer-coding.
+
+ This value is set automatically inside send_headers."""
+
+ header_reader = HeaderReader()
+ """
+ A HeaderReader instance or compatible reader.
+ """
+
+ def __init__(self, server, conn, proxy_mode=False, strict_mode=True):
+ """Initialize HTTP request container instance.
+
+ Args:
+ server (HTTPServer): web server object receiving this request
+ conn (HTTPConnection): HTTP connection object for this request
+ proxy_mode (bool): whether this HTTPServer should behave as a PROXY
+ server for certain requests
+ strict_mode (bool): whether we should return a 400 Bad Request when
+ we encounter a request that a HTTP compliant client should not be
+ making
+ """
+ self.server = server
+ self.conn = conn
+
+ self.ready = False
+ self.started_request = False
+ self.scheme = b'http'
+ if self.server.ssl_adapter is not None:
+ self.scheme = b'https'
+ # Use the lowest-common protocol in case read_request_line errors.
+ self.response_protocol = 'HTTP/1.0'
+ self.inheaders = {}
+
+ self.status = ''
+ self.outheaders = []
+ self.sent_headers = False
+ self.close_connection = self.__class__.close_connection
+ self.chunked_read = False
+ self.chunked_write = self.__class__.chunked_write
+ self.proxy_mode = proxy_mode
+ self.strict_mode = strict_mode
+
+ def parse_request(self):
+ """Parse the next HTTP request start-line and message-headers."""
+ self.rfile = SizeCheckWrapper(self.conn.rfile,
+ self.server.max_request_header_size)
+ try:
+ success = self.read_request_line()
+ except errors.MaxSizeExceeded:
+ self.simple_response(
+ '414 Request-URI Too Long',
+ 'The Request-URI sent with the request exceeds the maximum '
+ 'allowed bytes.')
+ return
+ else:
+ if not success:
+ return
+
+ try:
+ success = self.read_request_headers()
+ except errors.MaxSizeExceeded:
+ self.simple_response(
+ '413 Request Entity Too Large',
+ 'The headers sent with the request exceed the maximum '
+ 'allowed bytes.')
+ return
+ else:
+ if not success:
+ return
+
+ self.ready = True
+
+ def read_request_line(self):
+ """Read and parse first line of the HTTP request.
+
+ Returns:
+ bool: True if the request line is valid or False if it's malformed.
+
+ """
+ # HTTP/1.1 connections are persistent by default. If a client
+ # requests a page, then idles (leaves the connection open),
+ # then rfile.readline() will raise socket.error("timed out").
+ # Note that it does this based on the value given to settimeout(),
+ # and doesn't need the client to request or acknowledge the close
+ # (although your TCP stack might suffer for it: cf Apache's history
+ # with FIN_WAIT_2).
+ request_line = self.rfile.readline()
+
+ # Set started_request to True so communicate() knows to send 408
+ # from here on out.
+ self.started_request = True
+ if not request_line:
+ return False
+
+ if request_line == CRLF:
+ # RFC 2616 sec 4.1: "...if the server is reading the protocol
+ # stream at the beginning of a message and receives a CRLF
+ # first, it should ignore the CRLF."
+ # But only ignore one leading line! else we enable a DoS.
+ request_line = self.rfile.readline()
+ if not request_line:
+ return False
+
+ if not request_line.endswith(CRLF):
+ self.simple_response(
+ '400 Bad Request', 'HTTP requires CRLF terminators')
+ return False
+
+ try:
+ method, uri, req_protocol = request_line.strip().split(SPACE, 2)
+ if not req_protocol.startswith(b'HTTP/'):
+ self.simple_response(
+ '400 Bad Request', 'Malformed Request-Line: bad protocol'
+ )
+ return False
+ rp = req_protocol[5:].split(b'.', 1)
+ rp = tuple(map(int, rp)) # Minor.Major must be threat as integers
+ if rp > (1, 1):
+ self.simple_response(
+ '505 HTTP Version Not Supported', 'Cannot fulfill request'
+ )
+ return False
+ except (ValueError, IndexError):
+ self.simple_response('400 Bad Request', 'Malformed Request-Line')
+ return False
+
+ self.uri = uri
+ self.method = method.upper()
+
+ if self.strict_mode and method != self.method:
+ resp = (
+ 'Malformed method name: According to RFC 2616 '
+ '(section 5.1.1) and its successors '
+ 'RFC 7230 (section 3.1.1) and RFC 7231 (section 4.1) '
+ 'method names are case-sensitive and uppercase.'
+ )
+ self.simple_response('400 Bad Request', resp)
+ return False
+
+ try:
+ if six.PY2: # FIXME: Figure out better way to do this
+ # Ref: https://stackoverflow.com/a/196392/595220 (like this?)
+ """This is a dummy check for unicode in URI."""
+ ntou(bton(uri, 'ascii'), 'ascii')
+ scheme, authority, path, qs, fragment = urllib.parse.urlsplit(uri)
+ except UnicodeError:
+ self.simple_response('400 Bad Request', 'Malformed Request-URI')
+ return False
+
+ if self.method == b'OPTIONS':
+ # TODO: cover this branch with tests
+ path = (uri
+ # https://tools.ietf.org/html/rfc7230#section-5.3.4
+ if self.proxy_mode or uri == ASTERISK
+ else path)
+ elif self.method == b'CONNECT':
+ # TODO: cover this branch with tests
+ if not self.proxy_mode:
+ self.simple_response('405 Method Not Allowed')
+ return False
+
+ # `urlsplit()` above parses "example.com:3128" as path part of URI.
+ # this is a workaround, which makes it detect netloc correctly
+ uri_split = urllib.parse.urlsplit(b'//' + uri)
+ _scheme, _authority, _path, _qs, _fragment = uri_split
+ _port = EMPTY
+ try:
+ _port = uri_split.port
+ except ValueError:
+ pass
+
+ # FIXME: use third-party validation to make checks against RFC
+ # the validation doesn't take into account, that urllib parses
+ # invalid URIs without raising errors
+ # https://tools.ietf.org/html/rfc7230#section-5.3.3
+ invalid_path = (
+ _authority != uri
+ or not _port
+ or any((_scheme, _path, _qs, _fragment))
+ )
+ if invalid_path:
+ self.simple_response('400 Bad Request',
+ 'Invalid path in Request-URI: request-'
+ 'target must match authority-form.')
+ return False
+
+ authority = path = _authority
+ scheme = qs = fragment = EMPTY
+ else:
+ uri_is_absolute_form = (scheme or authority)
+
+ disallowed_absolute = (
+ self.strict_mode
+ and not self.proxy_mode
+ and uri_is_absolute_form
+ )
+ if disallowed_absolute:
+ # https://tools.ietf.org/html/rfc7230#section-5.3.2
+ # (absolute form)
+ """Absolute URI is only allowed within proxies."""
+ self.simple_response(
+ '400 Bad Request',
+ 'Absolute URI not allowed if server is not a proxy.',
+ )
+ return False
+
+ invalid_path = (
+ self.strict_mode
+ and not uri.startswith(FORWARD_SLASH)
+ and not uri_is_absolute_form
+ )
+ if invalid_path:
+ # https://tools.ietf.org/html/rfc7230#section-5.3.1
+ # (origin_form) and
+ """Path should start with a forward slash."""
+ resp = (
+ 'Invalid path in Request-URI: request-target must contain '
+ 'origin-form which starts with absolute-path (URI '
+ 'starting with a slash "/").'
+ )
+ self.simple_response('400 Bad Request', resp)
+ return False
+
+ if fragment:
+ self.simple_response('400 Bad Request',
+ 'Illegal #fragment in Request-URI.')
+ return False
+
+ if path is None:
+ # FIXME: It looks like this case cannot happen
+ self.simple_response('400 Bad Request',
+ 'Invalid path in Request-URI.')
+ return False
+
+ # Unquote the path+params (e.g. "/this%20path" -> "/this path").
+ # https://www.w3.org/Protocols/rfc2616/rfc2616-sec5.html#sec5.1.2
+ #
+ # But note that "...a URI must be separated into its components
+ # before the escaped characters within those components can be
+ # safely decoded." https://www.ietf.org/rfc/rfc2396.txt, sec 2.4.2
+ # Therefore, "/this%2Fpath" becomes "/this%2Fpath", not
+ # "/this/path".
+ try:
+ # TODO: Figure out whether exception can really happen here.
+ # It looks like it's caught on urlsplit() call above.
+ atoms = [
+ urllib.parse.unquote_to_bytes(x)
+ for x in QUOTED_SLASH_REGEX.split(path)
+ ]
+ except ValueError as ex:
+ self.simple_response('400 Bad Request', ex.args[0])
+ return False
+ path = QUOTED_SLASH.join(atoms)
+
+ if not path.startswith(FORWARD_SLASH):
+ path = FORWARD_SLASH + path
+
+ if scheme is not EMPTY:
+ self.scheme = scheme
+ self.authority = authority
+ self.path = path
+
+ # Note that, like wsgiref and most other HTTP servers,
+ # we "% HEX HEX"-unquote the path but not the query string.
+ self.qs = qs
+
+ # Compare request and server HTTP protocol versions, in case our
+ # server does not support the requested protocol. Limit our output
+ # to min(req, server). We want the following output:
+ # request server actual written supported response
+ # protocol protocol response protocol feature set
+ # a 1.0 1.0 1.0 1.0
+ # b 1.0 1.1 1.1 1.0
+ # c 1.1 1.0 1.0 1.0
+ # d 1.1 1.1 1.1 1.1
+ # Notice that, in (b), the response will be "HTTP/1.1" even though
+ # the client only understands 1.0. RFC 2616 10.5.6 says we should
+ # only return 505 if the _major_ version is different.
+ sp = int(self.server.protocol[5]), int(self.server.protocol[7])
+
+ if sp[0] != rp[0]:
+ self.simple_response('505 HTTP Version Not Supported')
+ return False
+
+ self.request_protocol = req_protocol
+ self.response_protocol = 'HTTP/%s.%s' % min(rp, sp)
+
+ return True
+
+ def read_request_headers(self):
+ """Read self.rfile into self.inheaders. Return success."""
+ # then all the http headers
+ try:
+ self.header_reader(self.rfile, self.inheaders)
+ except ValueError as ex:
+ self.simple_response('400 Bad Request', ex.args[0])
+ return False
+
+ mrbs = self.server.max_request_body_size
+
+ try:
+ cl = int(self.inheaders.get(b'Content-Length', 0))
+ except ValueError:
+ self.simple_response(
+ '400 Bad Request',
+ 'Malformed Content-Length Header.')
+ return False
+
+ if mrbs and cl > mrbs:
+ self.simple_response(
+ '413 Request Entity Too Large',
+ 'The entity sent with the request exceeds the maximum '
+ 'allowed bytes.')
+ return False
+
+ # Persistent connection support
+ if self.response_protocol == 'HTTP/1.1':
+ # Both server and client are HTTP/1.1
+ if self.inheaders.get(b'Connection', b'') == b'close':
+ self.close_connection = True
+ else:
+ # Either the server or client (or both) are HTTP/1.0
+ if self.inheaders.get(b'Connection', b'') != b'Keep-Alive':
+ self.close_connection = True
+
+ # Transfer-Encoding support
+ te = None
+ if self.response_protocol == 'HTTP/1.1':
+ te = self.inheaders.get(b'Transfer-Encoding')
+ if te:
+ te = [x.strip().lower() for x in te.split(b',') if x.strip()]
+
+ self.chunked_read = False
+
+ if te:
+ for enc in te:
+ if enc == b'chunked':
+ self.chunked_read = True
+ else:
+ # Note that, even if we see "chunked", we must reject
+ # if there is an extension we don't recognize.
+ self.simple_response('501 Unimplemented')
+ self.close_connection = True
+ return False
+
+ # From PEP 333:
+ # "Servers and gateways that implement HTTP 1.1 must provide
+ # transparent support for HTTP 1.1's "expect/continue" mechanism.
+ # This may be done in any of several ways:
+ # 1. Respond to requests containing an Expect: 100-continue request
+ # with an immediate "100 Continue" response, and proceed normally.
+ # 2. Proceed with the request normally, but provide the application
+ # with a wsgi.input stream that will send the "100 Continue"
+ # response if/when the application first attempts to read from
+ # the input stream. The read request must then remain blocked
+ # until the client responds.
+ # 3. Wait until the client decides that the server does not support
+ # expect/continue, and sends the request body on its own.
+ # (This is suboptimal, and is not recommended.)
+ #
+ # We used to do 3, but are now doing 1. Maybe we'll do 2 someday,
+ # but it seems like it would be a big slowdown for such a rare case.
+ if self.inheaders.get(b'Expect', b'') == b'100-continue':
+ # Don't use simple_response here, because it emits headers
+ # we don't want. See
+ # https://github.com/cherrypy/cherrypy/issues/951
+ msg = self.server.protocol.encode('ascii')
+ msg += b' 100 Continue\r\n\r\n'
+ try:
+ self.conn.wfile.write(msg)
+ except socket.error as ex:
+ if ex.args[0] not in errors.socket_errors_to_ignore:
+ raise
+ return True
+
+ def respond(self):
+ """Call the gateway and write its iterable output."""
+ mrbs = self.server.max_request_body_size
+ if self.chunked_read:
+ self.rfile = ChunkedRFile(self.conn.rfile, mrbs)
+ else:
+ cl = int(self.inheaders.get(b'Content-Length', 0))
+ if mrbs and mrbs < cl:
+ if not self.sent_headers:
+ self.simple_response(
+ '413 Request Entity Too Large',
+ 'The entity sent with the request exceeds the '
+ 'maximum allowed bytes.')
+ return
+ self.rfile = KnownLengthRFile(self.conn.rfile, cl)
+
+ self.server.gateway(self).respond()
+ self.ready and self.ensure_headers_sent()
+
+ if self.chunked_write:
+ self.conn.wfile.write(b'0\r\n\r\n')
+
+ def simple_response(self, status, msg=''):
+ """Write a simple response back to the client."""
+ status = str(status)
+ proto_status = '%s %s\r\n' % (self.server.protocol, status)
+ content_length = 'Content-Length: %s\r\n' % len(msg)
+ content_type = 'Content-Type: text/plain\r\n'
+ buf = [
+ proto_status.encode('ISO-8859-1'),
+ content_length.encode('ISO-8859-1'),
+ content_type.encode('ISO-8859-1'),
+ ]
+
+ if status[:3] in ('413', '414'):
+ # Request Entity Too Large / Request-URI Too Long
+ self.close_connection = True
+ if self.response_protocol == 'HTTP/1.1':
+ # This will not be true for 414, since read_request_line
+ # usually raises 414 before reading the whole line, and we
+ # therefore cannot know the proper response_protocol.
+ buf.append(b'Connection: close\r\n')
+ else:
+ # HTTP/1.0 had no 413/414 status nor Connection header.
+ # Emit 400 instead and trust the message body is enough.
+ status = '400 Bad Request'
+
+ buf.append(CRLF)
+ if msg:
+ if isinstance(msg, six.text_type):
+ msg = msg.encode('ISO-8859-1')
+ buf.append(msg)
+
+ try:
+ self.conn.wfile.write(EMPTY.join(buf))
+ except socket.error as ex:
+ if ex.args[0] not in errors.socket_errors_to_ignore:
+ raise
+
+ def ensure_headers_sent(self):
+ """Ensure headers are sent to the client if not already sent."""
+ if not self.sent_headers:
+ self.sent_headers = True
+ self.send_headers()
+
+ def write(self, chunk):
+ """Write unbuffered data to the client."""
+ if self.chunked_write and chunk:
+ chunk_size_hex = hex(len(chunk))[2:].encode('ascii')
+ buf = [chunk_size_hex, CRLF, chunk, CRLF]
+ self.conn.wfile.write(EMPTY.join(buf))
+ else:
+ self.conn.wfile.write(chunk)
+
+ def send_headers(self):
+ """Assert, process, and send the HTTP response message-headers.
+
+ You must set self.status, and self.outheaders before calling this.
+ """
+ hkeys = [key.lower() for key, value in self.outheaders]
+ status = int(self.status[:3])
+
+ if status == 413:
+ # Request Entity Too Large. Close conn to avoid garbage.
+ self.close_connection = True
+ elif b'content-length' not in hkeys:
+ # "All 1xx (informational), 204 (no content),
+ # and 304 (not modified) responses MUST NOT
+ # include a message-body." So no point chunking.
+ if status < 200 or status in (204, 205, 304):
+ pass
+ else:
+ needs_chunked = (
+ self.response_protocol == 'HTTP/1.1'
+ and self.method != b'HEAD'
+ )
+ if needs_chunked:
+ # Use the chunked transfer-coding
+ self.chunked_write = True
+ self.outheaders.append((b'Transfer-Encoding', b'chunked'))
+ else:
+ # Closing the conn is the only way to determine len.
+ self.close_connection = True
+
+ if b'connection' not in hkeys:
+ if self.response_protocol == 'HTTP/1.1':
+ # Both server and client are HTTP/1.1 or better
+ if self.close_connection:
+ self.outheaders.append((b'Connection', b'close'))
+ else:
+ # Server and/or client are HTTP/1.0
+ if not self.close_connection:
+ self.outheaders.append((b'Connection', b'Keep-Alive'))
+
+ if (not self.close_connection) and (not self.chunked_read):
+ # Read any remaining request body data on the socket.
+ # "If an origin server receives a request that does not include an
+ # Expect request-header field with the "100-continue" expectation,
+ # the request includes a request body, and the server responds
+ # with a final status code before reading the entire request body
+ # from the transport connection, then the server SHOULD NOT close
+ # the transport connection until it has read the entire request,
+ # or until the client closes the connection. Otherwise, the client
+ # might not reliably receive the response message. However, this
+ # requirement is not be construed as preventing a server from
+ # defending itself against denial-of-service attacks, or from
+ # badly broken client implementations."
+ remaining = getattr(self.rfile, 'remaining', 0)
+ if remaining > 0:
+ self.rfile.read(remaining)
+
+ if b'date' not in hkeys:
+ self.outheaders.append((
+ b'Date',
+ email.utils.formatdate(usegmt=True).encode('ISO-8859-1'),
+ ))
+
+ if b'server' not in hkeys:
+ self.outheaders.append((
+ b'Server',
+ self.server.server_name.encode('ISO-8859-1'),
+ ))
+
+ proto = self.server.protocol.encode('ascii')
+ buf = [proto + SPACE + self.status + CRLF]
+ for k, v in self.outheaders:
+ buf.append(k + COLON + SPACE + v + CRLF)
+ buf.append(CRLF)
+ self.conn.wfile.write(EMPTY.join(buf))
+
+
+class HTTPConnection:
+ """An HTTP connection (active socket)."""
+
+ remote_addr = None
+ remote_port = None
+ ssl_env = None
+ rbufsize = io.DEFAULT_BUFFER_SIZE
+ wbufsize = io.DEFAULT_BUFFER_SIZE
+ RequestHandlerClass = HTTPRequest
+ peercreds_enabled = False
+ peercreds_resolve_enabled = False
+
+ def __init__(self, server, sock, makefile=MakeFile):
+ """Initialize HTTPConnection instance.
+
+ Args:
+ server (HTTPServer): web server object receiving this request
+ socket (socket._socketobject): the raw socket object (usually
+ TCP) for this connection
+ makefile (file): a fileobject class for reading from the socket
+ """
+ self.server = server
+ self.socket = sock
+ self.rfile = makefile(sock, 'rb', self.rbufsize)
+ self.wfile = makefile(sock, 'wb', self.wbufsize)
+ self.requests_seen = 0
+
+ self.peercreds_enabled = self.server.peercreds_enabled
+ self.peercreds_resolve_enabled = self.server.peercreds_resolve_enabled
+
+ # LRU cached methods:
+ # Ref: https://stackoverflow.com/a/14946506/595220
+ self.resolve_peer_creds = (
+ lru_cache(maxsize=1)(self.resolve_peer_creds)
+ )
+ self.get_peer_creds = (
+ lru_cache(maxsize=1)(self.get_peer_creds)
+ )
+
+ def communicate(self):
+ """Read each request and respond appropriately."""
+ request_seen = False
+ try:
+ while True:
+ # (re)set req to None so that if something goes wrong in
+ # the RequestHandlerClass constructor, the error doesn't
+ # get written to the previous request.
+ req = None
+ req = self.RequestHandlerClass(self.server, self)
+
+ # This order of operations should guarantee correct pipelining.
+ req.parse_request()
+ if self.server.stats['Enabled']:
+ self.requests_seen += 1
+ if not req.ready:
+ # Something went wrong in the parsing (and the server has
+ # probably already made a simple_response). Return and
+ # let the conn close.
+ return
+
+ request_seen = True
+ req.respond()
+ if req.close_connection:
+ return
+ except socket.error as ex:
+ errnum = ex.args[0]
+ # sadly SSL sockets return a different (longer) time out string
+ timeout_errs = 'timed out', 'The read operation timed out'
+ if errnum in timeout_errs:
+ # Don't error if we're between requests; only error
+ # if 1) no request has been started at all, or 2) we're
+ # in the middle of a request.
+ # See https://github.com/cherrypy/cherrypy/issues/853
+ if (not request_seen) or (req and req.started_request):
+ self._conditional_error(req, '408 Request Timeout')
+ elif errnum not in errors.socket_errors_to_ignore:
+ self.server.error_log('socket.error %s' % repr(errnum),
+ level=logging.WARNING, traceback=True)
+ self._conditional_error(req, '500 Internal Server Error')
+ except (KeyboardInterrupt, SystemExit):
+ raise
+ except errors.FatalSSLAlert:
+ pass
+ except errors.NoSSLError:
+ self._handle_no_ssl(req)
+ except Exception as ex:
+ self.server.error_log(
+ repr(ex), level=logging.ERROR, traceback=True)
+ self._conditional_error(req, '500 Internal Server Error')
+
+ linger = False
+
+ def _handle_no_ssl(self, req):
+ if not req or req.sent_headers:
+ return
+ # Unwrap wfile
+ self.wfile = MakeFile(self.socket._sock, 'wb', self.wbufsize)
+ msg = (
+ 'The client sent a plain HTTP request, but '
+ 'this server only speaks HTTPS on this port.'
+ )
+ req.simple_response('400 Bad Request', msg)
+ self.linger = True
+
+ def _conditional_error(self, req, response):
+ """Respond with an error.
+
+ Don't bother writing if a response
+ has already started being written.
+ """
+ if not req or req.sent_headers:
+ return
+
+ try:
+ req.simple_response(response)
+ except errors.FatalSSLAlert:
+ pass
+ except errors.NoSSLError:
+ self._handle_no_ssl(req)
+
+ def close(self):
+ """Close the socket underlying this connection."""
+ self.rfile.close()
+
+ if not self.linger:
+ self._close_kernel_socket()
+ self.socket.close()
+ else:
+ # On the other hand, sometimes we want to hang around for a bit
+ # to make sure the client has a chance to read our entire
+ # response. Skipping the close() calls here delays the FIN
+ # packet until the socket object is garbage-collected later.
+ # Someday, perhaps, we'll do the full lingering_close that
+ # Apache does, but not today.
+ pass
+
+ def get_peer_creds(self): # LRU cached on per-instance basis, see __init__
+ """Return the PID/UID/GID tuple of the peer socket for UNIX sockets.
+
+ This function uses SO_PEERCRED to query the UNIX PID, UID, GID
+ of the peer, which is only available if the bind address is
+ a UNIX domain socket.
+
+ Raises:
+ NotImplementedError: in case of unsupported socket type
+ RuntimeError: in case of SO_PEERCRED lookup unsupported or disabled
+
+ """
+ PEERCRED_STRUCT_DEF = '3i'
+
+ if IS_WINDOWS or self.socket.family != socket.AF_UNIX:
+ raise NotImplementedError(
+ 'SO_PEERCRED is only supported in Linux kernel and WSL'
+ )
+ elif not self.peercreds_enabled:
+ raise RuntimeError(
+ 'Peer creds lookup is disabled within this server'
+ )
+
+ try:
+ peer_creds = self.socket.getsockopt(
+ socket.SOL_SOCKET, socket.SO_PEERCRED,
+ struct.calcsize(PEERCRED_STRUCT_DEF)
+ )
+ except socket.error as socket_err:
+ """Non-Linux kernels don't support SO_PEERCRED.
+
+ Refs:
+ http://welz.org.za/notes/on-peer-cred.html
+ https://github.com/daveti/tcpSockHack
+ msdn.microsoft.com/en-us/commandline/wsl/release_notes#build-15025
+ """
+ six.raise_from( # 3.6+: raise RuntimeError from socket_err
+ RuntimeError,
+ socket_err,
+ )
+ else:
+ pid, uid, gid = struct.unpack(PEERCRED_STRUCT_DEF, peer_creds)
+ return pid, uid, gid
+
+ @property
+ def peer_pid(self):
+ """Return the id of the connected peer process."""
+ pid, _, _ = self.get_peer_creds()
+ return pid
+
+ @property
+ def peer_uid(self):
+ """Return the user id of the connected peer process."""
+ _, uid, _ = self.get_peer_creds()
+ return uid
+
+ @property
+ def peer_gid(self):
+ """Return the group id of the connected peer process."""
+ _, _, gid = self.get_peer_creds()
+ return gid
+
+ def resolve_peer_creds(self): # LRU cached on per-instance basis
+ """Return the username and group tuple of the peercreds if available.
+
+ Raises:
+ NotImplementedError: in case of unsupported OS
+ RuntimeError: in case of UID/GID lookup unsupported or disabled
+
+ """
+ if (IS_WINDOWS or IS_ANDROID):
+ raise NotImplementedError(
+ 'UID/GID lookup can only be done under UNIX-like OS'
+ )
+ elif not self.peercreds_resolve_enabled:
+ raise RuntimeError(
+ 'UID/GID lookup is disabled within this server'
+ )
+
+ user = pwd.getpwuid(self.peer_uid).pw_name # [0]
+ group = grp.getgrgid(self.peer_gid).gr_name # [0]
+
+ return user, group
+
+ @property
+ def peer_user(self):
+ """Return the username of the connected peer process."""
+ user, _ = self.resolve_peer_creds()
+ return user
+
+ @property
+ def peer_group(self):
+ """Return the group of the connected peer process."""
+ _, group = self.resolve_peer_creds()
+ return group
+
+ def _close_kernel_socket(self):
+ """Close kernel socket in outdated Python versions.
+
+ On old Python versions,
+ Python's socket module does NOT call close on the kernel
+ socket when you call socket.close(). We do so manually here
+ because we want this server to send a FIN TCP segment
+ immediately. Note this must be called *before* calling
+ socket.close(), because the latter drops its reference to
+ the kernel socket.
+ """
+ if six.PY2 and hasattr(self.socket, '_sock'):
+ self.socket._sock.close()
+
+
+def prevent_socket_inheritance(sock):
+ """Stub inheritance prevention.
+
+ Dummy function, since neither fcntl nor ctypes are available.
+ """
+ pass
+
+
+class HTTPServer:
+ """An HTTP server."""
+
+ _bind_addr = '127.0.0.1'
+ _interrupt = None
+
+ gateway = None
+ """A Gateway instance."""
+
+ minthreads = None
+ """The minimum number of worker threads to create (default 10)."""
+
+ maxthreads = None
+ """The maximum number of worker threads to create.
+
+ (default -1 = no limit)"""
+
+ server_name = None
+ """The name of the server; defaults to ``self.version``."""
+
+ protocol = 'HTTP/1.1'
+ """The version string to write in the Status-Line of all HTTP responses.
+
+ For example, "HTTP/1.1" is the default. This also limits the supported
+ features used in the response."""
+
+ request_queue_size = 5
+ """The 'backlog' arg to socket.listen(); max queued connections.
+
+ (default 5)."""
+
+ shutdown_timeout = 5
+ """The total time to wait for worker threads to cleanly exit.
+
+ Specified in seconds."""
+
+ timeout = 10
+ """The timeout in seconds for accepted connections (default 10)."""
+
+ version = 'Cheroot/' + __version__
+ """A version string for the HTTPServer."""
+
+ software = None
+ """The value to set for the SERVER_SOFTWARE entry in the WSGI environ.
+
+ If None, this defaults to ``'%s Server' % self.version``.
+ """
+
+ ready = False
+ """Internal flag which indicating the socket is accepting connections."""
+
+ max_request_header_size = 0
+ """The maximum size, in bytes, for request headers, or 0 for no limit."""
+
+ max_request_body_size = 0
+ """The maximum size, in bytes, for request bodies, or 0 for no limit."""
+
+ nodelay = True
+ """If True (the default since 3.1), sets the TCP_NODELAY socket option."""
+
+ ConnectionClass = HTTPConnection
+ """The class to use for handling HTTP connections."""
+
+ ssl_adapter = None
+ """An instance of ssl.Adapter (or a subclass).
+
+ You must have the corresponding SSL driver library installed.
+ """
+
+ peercreds_enabled = False
+ """If True, peer cred lookup can be performed via UNIX domain socket."""
+
+ peercreds_resolve_enabled = False
+ """If True, username/group will be looked up in the OS from peercreds."""
+
+ def __init__(
+ self, bind_addr, gateway,
+ minthreads=10, maxthreads=-1, server_name=None,
+ peercreds_enabled=False, peercreds_resolve_enabled=False,
+ ):
+ """Initialize HTTPServer instance.
+
+ Args:
+ bind_addr (tuple): network interface to listen to
+ gateway (Gateway): gateway for processing HTTP requests
+ minthreads (int): minimum number of threads for HTTP thread pool
+ maxthreads (int): maximum number of threads for HTTP thread pool
+ server_name (str): web server name to be advertised via Server
+ HTTP header
+ """
+ self.bind_addr = bind_addr
+ self.gateway = gateway
+
+ self.requests = threadpool.ThreadPool(
+ self, min=minthreads or 1, max=maxthreads)
+
+ if not server_name:
+ server_name = self.version
+ self.server_name = server_name
+ self.peercreds_enabled = peercreds_enabled
+ self.peercreds_resolve_enabled = (
+ peercreds_resolve_enabled and peercreds_enabled
+ )
+ self.clear_stats()
+
+ def clear_stats(self):
+ """Reset server stat counters.."""
+ self._start_time = None
+ self._run_time = 0
+ self.stats = {
+ 'Enabled': False,
+ 'Bind Address': lambda s: repr(self.bind_addr),
+ 'Run time': lambda s: (not s['Enabled']) and -1 or self.runtime(),
+ 'Accepts': 0,
+ 'Accepts/sec': lambda s: s['Accepts'] / self.runtime(),
+ 'Queue': lambda s: getattr(self.requests, 'qsize', None),
+ 'Threads': lambda s: len(getattr(self.requests, '_threads', [])),
+ 'Threads Idle': lambda s: getattr(self.requests, 'idle', None),
+ 'Socket Errors': 0,
+ 'Requests': lambda s: (not s['Enabled']) and -1 or sum(
+ [w['Requests'](w) for w in s['Worker Threads'].values()], 0),
+ 'Bytes Read': lambda s: (not s['Enabled']) and -1 or sum(
+ [w['Bytes Read'](w) for w in s['Worker Threads'].values()], 0),
+ 'Bytes Written': lambda s: (not s['Enabled']) and -1 or sum(
+ [w['Bytes Written'](w) for w in s['Worker Threads'].values()],
+ 0),
+ 'Work Time': lambda s: (not s['Enabled']) and -1 or sum(
+ [w['Work Time'](w) for w in s['Worker Threads'].values()], 0),
+ 'Read Throughput': lambda s: (not s['Enabled']) and -1 or sum(
+ [w['Bytes Read'](w) / (w['Work Time'](w) or 1e-6)
+ for w in s['Worker Threads'].values()], 0),
+ 'Write Throughput': lambda s: (not s['Enabled']) and -1 or sum(
+ [w['Bytes Written'](w) / (w['Work Time'](w) or 1e-6)
+ for w in s['Worker Threads'].values()], 0),
+ 'Worker Threads': {},
+ }
+ logging.statistics['Cheroot HTTPServer %d' % id(self)] = self.stats
+
+ def runtime(self):
+ """Return server uptime."""
+ if self._start_time is None:
+ return self._run_time
+ else:
+ return self._run_time + (time.time() - self._start_time)
+
+ def __str__(self):
+ """Render Server instance representing bind address."""
+ return '%s.%s(%r)' % (self.__module__, self.__class__.__name__,
+ self.bind_addr)
+
+ @property
+ def bind_addr(self):
+ """Return the interface on which to listen for connections.
+
+ For TCP sockets, a (host, port) tuple. Host values may be any IPv4
+ or IPv6 address, or any valid hostname. The string 'localhost' is a
+ synonym for '127.0.0.1' (or '::1', if your hosts file prefers IPv6).
+ The string '0.0.0.0' is a special IPv4 entry meaning "any active
+ interface" (INADDR_ANY), and '::' is the similar IN6ADDR_ANY for
+ IPv6. The empty string or None are not allowed.
+
+ For UNIX sockets, supply the filename as a string.
+
+ Systemd socket activation is automatic and doesn't require tempering
+ with this variable.
+ """
+ return self._bind_addr
+
+ @bind_addr.setter
+ def bind_addr(self, value):
+ """Set the interface on which to listen for connections."""
+ if isinstance(value, tuple) and value[0] in ('', None):
+ # Despite the socket module docs, using '' does not
+ # allow AI_PASSIVE to work. Passing None instead
+ # returns '0.0.0.0' like we want. In other words:
+ # host AI_PASSIVE result
+ # '' Y 192.168.x.y
+ # '' N 192.168.x.y
+ # None Y 0.0.0.0
+ # None N 127.0.0.1
+ # But since you can get the same effect with an explicit
+ # '0.0.0.0', we deny both the empty string and None as values.
+ raise ValueError("Host values of '' or None are not allowed. "
+ "Use '0.0.0.0' (IPv4) or '::' (IPv6) instead "
+ 'to listen on all active interfaces.')
+ self._bind_addr = value
+
+ def safe_start(self):
+ """Run the server forever, and stop it cleanly on exit."""
+ try:
+ self.start()
+ except (KeyboardInterrupt, IOError):
+ # The time.sleep call might raise
+ # "IOError: [Errno 4] Interrupted function call" on KBInt.
+ self.error_log('Keyboard Interrupt: shutting down')
+ self.stop()
+ raise
+ except SystemExit:
+ self.error_log('SystemExit raised: shutting down')
+ self.stop()
+ raise
+
+ def prepare(self):
+ """Prepare server to serving requests.
+
+ It binds a socket's port, setups the socket to ``listen()`` and does
+ other preparing things.
+ """
+ self._interrupt = None
+
+ if self.software is None:
+ self.software = '%s Server' % self.version
+
+ # Select the appropriate socket
+ self.socket = None
+ if os.getenv('LISTEN_PID', None):
+ # systemd socket activation
+ self.socket = socket.fromfd(3, socket.AF_INET, socket.SOCK_STREAM)
+ elif isinstance(self.bind_addr, six.string_types):
+ # AF_UNIX socket
+
+ # So we can reuse the socket...
+ try:
+ os.unlink(self.bind_addr)
+ except Exception:
+ pass
+
+ # So everyone can access the socket...
+ try:
+ os.chmod(self.bind_addr, 0o777)
+ except Exception:
+ pass
+
+ info = [
+ (socket.AF_UNIX, socket.SOCK_STREAM, 0, '', self.bind_addr)]
+ else:
+ # AF_INET or AF_INET6 socket
+ # Get the correct address family for our host (allows IPv6
+ # addresses)
+ host, port = self.bind_addr
+ try:
+ info = socket.getaddrinfo(
+ host, port, socket.AF_UNSPEC,
+ socket.SOCK_STREAM, 0, socket.AI_PASSIVE)
+ except socket.gaierror:
+ sock_type = socket.AF_INET
+ bind_addr = self.bind_addr
+
+ if ':' in host:
+ sock_type = socket.AF_INET6
+ bind_addr = bind_addr + (0, 0)
+
+ info = [(sock_type, socket.SOCK_STREAM, 0, '', bind_addr)]
+
+ if not self.socket:
+ msg = 'No socket could be created'
+ for res in info:
+ af, socktype, proto, canonname, sa = res
+ try:
+ self.bind(af, socktype, proto)
+ break
+ except socket.error as serr:
+ msg = '%s -- (%s: %s)' % (msg, sa, serr)
+ if self.socket:
+ self.socket.close()
+ self.socket = None
+
+ if not self.socket:
+ raise socket.error(msg)
+
+ # Timeout so KeyboardInterrupt can be caught on Win32
+ self.socket.settimeout(1)
+ self.socket.listen(self.request_queue_size)
+
+ # Create worker threads
+ self.requests.start()
+
+ self.ready = True
+ self._start_time = time.time()
+
+ def serve(self):
+ """Serve requests, after invoking :func:`prepare()`."""
+ while self.ready:
+ try:
+ self.tick()
+ except (KeyboardInterrupt, SystemExit):
+ raise
+ except Exception:
+ self.error_log('Error in HTTPServer.tick', level=logging.ERROR,
+ traceback=True)
+
+ if self.interrupt:
+ while self.interrupt is True:
+ # Wait for self.stop() to complete. See _set_interrupt.
+ time.sleep(0.1)
+ if self.interrupt:
+ raise self.interrupt
+
+ def start(self):
+ """Run the server forever.
+
+ It is shortcut for invoking :func:`prepare()` then :func:`serve()`.
+ """
+ # We don't have to trap KeyboardInterrupt or SystemExit here,
+ # because cherrypy.server already does so, calling self.stop() for us.
+ # If you're using this server with another framework, you should
+ # trap those exceptions in whatever code block calls start().
+ self.prepare()
+ self.serve()
+
+ def error_log(self, msg='', level=20, traceback=False):
+ """Write error message to log.
+
+ Args:
+ msg (str): error message
+ level (int): logging level
+ traceback (bool): add traceback to output or not
+ """
+ # Override this in subclasses as desired
+ sys.stderr.write(msg + '\n')
+ sys.stderr.flush()
+ if traceback:
+ tblines = traceback_.format_exc()
+ sys.stderr.write(tblines)
+ sys.stderr.flush()
+
+ def bind(self, family, type, proto=0):
+ """Create (or recreate) the actual socket object."""
+ self.socket = socket.socket(family, type, proto)
+ prevent_socket_inheritance(self.socket)
+ if not IS_WINDOWS:
+ # Windows has different semantics for SO_REUSEADDR,
+ # so don't set it.
+ # https://msdn.microsoft.com/en-us/library/ms740621(v=vs.85).aspx
+ self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+ if self.nodelay and not isinstance(self.bind_addr, str):
+ self.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+
+ if self.ssl_adapter is not None:
+ self.socket = self.ssl_adapter.bind(self.socket)
+
+ host, port = self.bind_addr[:2]
+
+ # If listening on the IPV6 any address ('::' = IN6ADDR_ANY),
+ # activate dual-stack. See
+ # https://github.com/cherrypy/cherrypy/issues/871.
+ listening_ipv6 = (
+ hasattr(socket, 'AF_INET6')
+ and family == socket.AF_INET6
+ and host in ('::', '::0', '::0.0.0.0')
+ )
+ if listening_ipv6:
+ try:
+ self.socket.setsockopt(
+ socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 0)
+ except (AttributeError, socket.error):
+ # Apparently, the socket option is not available in
+ # this machine's TCP stack
+ pass
+
+ self.socket.bind(self.bind_addr)
+ # TODO: keep requested bind_addr separate real bound_addr (port is
+ # different in case of ephemeral port 0)
+ self.bind_addr = self.socket.getsockname()
+ if family in (
+ # Windows doesn't have socket.AF_UNIX, so not using it in check
+ socket.AF_INET,
+ socket.AF_INET6,
+ ):
+ """UNIX domain sockets are strings or bytes.
+
+ In case of bytes with a leading null-byte it's an abstract socket.
+ """
+ self.bind_addr = self.bind_addr[:2]
+
+ def tick(self):
+ """Accept a new connection and put it on the Queue."""
+ try:
+ s, addr = self.socket.accept()
+ if self.stats['Enabled']:
+ self.stats['Accepts'] += 1
+ if not self.ready:
+ return
+
+ prevent_socket_inheritance(s)
+ if hasattr(s, 'settimeout'):
+ s.settimeout(self.timeout)
+
+ mf = MakeFile
+ ssl_env = {}
+ # if ssl cert and key are set, we try to be a secure HTTP server
+ if self.ssl_adapter is not None:
+ try:
+ s, ssl_env = self.ssl_adapter.wrap(s)
+ except errors.NoSSLError:
+ msg = ('The client sent a plain HTTP request, but '
+ 'this server only speaks HTTPS on this port.')
+ buf = ['%s 400 Bad Request\r\n' % self.protocol,
+ 'Content-Length: %s\r\n' % len(msg),
+ 'Content-Type: text/plain\r\n\r\n',
+ msg]
+
+ sock_to_make = s if six.PY3 else s._sock
+ wfile = mf(sock_to_make, 'wb', io.DEFAULT_BUFFER_SIZE)
+ try:
+ wfile.write(''.join(buf).encode('ISO-8859-1'))
+ except socket.error as ex:
+ if ex.args[0] not in errors.socket_errors_to_ignore:
+ raise
+ return
+ if not s:
+ return
+ mf = self.ssl_adapter.makefile
+ # Re-apply our timeout since we may have a new socket object
+ if hasattr(s, 'settimeout'):
+ s.settimeout(self.timeout)
+
+ conn = self.ConnectionClass(self, s, mf)
+
+ if not isinstance(self.bind_addr, six.string_types):
+ # optional values
+ # Until we do DNS lookups, omit REMOTE_HOST
+ if addr is None: # sometimes this can happen
+ # figure out if AF_INET or AF_INET6.
+ if len(s.getsockname()) == 2:
+ # AF_INET
+ addr = ('0.0.0.0', 0)
+ else:
+ # AF_INET6
+ addr = ('::', 0)
+ conn.remote_addr = addr[0]
+ conn.remote_port = addr[1]
+
+ conn.ssl_env = ssl_env
+
+ try:
+ self.requests.put(conn)
+ except queue.Full:
+ # Just drop the conn. TODO: write 503 back?
+ conn.close()
+ return
+ except socket.timeout:
+ # The only reason for the timeout in start() is so we can
+ # notice keyboard interrupts on Win32, which don't interrupt
+ # accept() by default
+ return
+ except socket.error as ex:
+ if self.stats['Enabled']:
+ self.stats['Socket Errors'] += 1
+ if ex.args[0] in errors.socket_error_eintr:
+ # I *think* this is right. EINTR should occur when a signal
+ # is received during the accept() call; all docs say retry
+ # the call, and I *think* I'm reading it right that Python
+ # will then go ahead and poll for and handle the signal
+ # elsewhere. See
+ # https://github.com/cherrypy/cherrypy/issues/707.
+ return
+ if ex.args[0] in errors.socket_errors_nonblocking:
+ # Just try again. See
+ # https://github.com/cherrypy/cherrypy/issues/479.
+ return
+ if ex.args[0] in errors.socket_errors_to_ignore:
+ # Our socket was closed.
+ # See https://github.com/cherrypy/cherrypy/issues/686.
+ return
+ raise
+
+ @property
+ def interrupt(self):
+ """Flag interrupt of the server."""
+ return self._interrupt
+
+ @interrupt.setter
+ def interrupt(self, interrupt):
+ """Perform the shutdown of this server and save the exception."""
+ self._interrupt = True
+ self.stop()
+ self._interrupt = interrupt
+
+ def stop(self):
+ """Gracefully shutdown a server that is serving forever."""
+ self.ready = False
+ if self._start_time is not None:
+ self._run_time += (time.time() - self._start_time)
+ self._start_time = None
+
+ sock = getattr(self, 'socket', None)
+ if sock:
+ if not isinstance(self.bind_addr, six.string_types):
+ # Touch our own socket to make accept() return immediately.
+ try:
+ host, port = sock.getsockname()[:2]
+ except socket.error as ex:
+ if ex.args[0] not in errors.socket_errors_to_ignore:
+ # Changed to use error code and not message
+ # See
+ # https://github.com/cherrypy/cherrypy/issues/860.
+ raise
+ else:
+ # Note that we're explicitly NOT using AI_PASSIVE,
+ # here, because we want an actual IP to touch.
+ # localhost won't work if we've bound to a public IP,
+ # but it will if we bound to '0.0.0.0' (INADDR_ANY).
+ for res in socket.getaddrinfo(host, port, socket.AF_UNSPEC,
+ socket.SOCK_STREAM):
+ af, socktype, proto, canonname, sa = res
+ s = None
+ try:
+ s = socket.socket(af, socktype, proto)
+ # See
+ # https://groups.google.com/group/cherrypy-users/
+ # browse_frm/thread/bbfe5eb39c904fe0
+ s.settimeout(1.0)
+ s.connect((host, port))
+ s.close()
+ except socket.error:
+ if s:
+ s.close()
+ if hasattr(sock, 'close'):
+ sock.close()
+ self.socket = None
+
+ self.requests.stop(self.shutdown_timeout)
+
+
+class Gateway:
+ """Base class to interface HTTPServer with other systems, such as WSGI."""
+
+ def __init__(self, req):
+ """Initialize Gateway instance with request.
+
+ Args:
+ req (HTTPRequest): current HTTP request
+ """
+ self.req = req
+
+ def respond(self):
+ """Process the current request. Must be overridden in a subclass."""
+ raise NotImplementedError
+
+
+# These may either be ssl.Adapter subclasses or the string names
+# of such classes (in which case they will be lazily loaded).
+ssl_adapters = {
+ 'builtin': 'cheroot.ssl.builtin.BuiltinSSLAdapter',
+ 'pyopenssl': 'cheroot.ssl.pyopenssl.pyOpenSSLAdapter',
+}
+
+
+def get_ssl_adapter_class(name='builtin'):
+ """Return an SSL adapter class for the given name."""
+ adapter = ssl_adapters[name.lower()]
+ if isinstance(adapter, six.string_types):
+ last_dot = adapter.rfind('.')
+ attr_name = adapter[last_dot + 1:]
+ mod_path = adapter[:last_dot]
+
+ try:
+ mod = sys.modules[mod_path]
+ if mod is None:
+ raise KeyError()
+ except KeyError:
+ # The last [''] is important.
+ mod = __import__(mod_path, globals(), locals(), [''])
+
+ # Let an AttributeError propagate outward.
+ try:
+ adapter = getattr(mod, attr_name)
+ except AttributeError:
+ raise AttributeError("'%s' object has no attribute '%s'"
+ % (mod_path, attr_name))
+
+ return adapter
diff --git a/resources/lib/cheroot/ssl/__init__.py b/resources/lib/cheroot/ssl/__init__.py
new file mode 100644
index 0000000..ec1a0d9
--- /dev/null
+++ b/resources/lib/cheroot/ssl/__init__.py
@@ -0,0 +1,51 @@
+"""Implementation of the SSL adapter base interface."""
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+from abc import ABCMeta, abstractmethod
+
+from six import add_metaclass
+
+
+@add_metaclass(ABCMeta)
+class Adapter:
+ """Base class for SSL driver library adapters.
+
+ Required methods:
+
+ * ``wrap(sock) -> (wrapped socket, ssl environ dict)``
+ * ``makefile(sock, mode='r', bufsize=DEFAULT_BUFFER_SIZE) ->
+ socket file object``
+ """
+
+ @abstractmethod
+ def __init__(
+ self, certificate, private_key, certificate_chain=None,
+ ciphers=None):
+ """Set up certificates, private key ciphers and reset context."""
+ self.certificate = certificate
+ self.private_key = private_key
+ self.certificate_chain = certificate_chain
+ self.ciphers = ciphers
+ self.context = None
+
+ @abstractmethod
+ def bind(self, sock):
+ """Wrap and return the given socket."""
+ return sock
+
+ @abstractmethod
+ def wrap(self, sock):
+ """Wrap and return the given socket, plus WSGI environ entries."""
+ raise NotImplementedError
+
+ @abstractmethod
+ def get_environ(self):
+ """Return WSGI environ entries to be merged into each request."""
+ raise NotImplementedError
+
+ @abstractmethod
+ def makefile(self, sock, mode='r', bufsize=-1):
+ """Return socket file object."""
+ raise NotImplementedError
diff --git a/resources/lib/cheroot/ssl/builtin.py b/resources/lib/cheroot/ssl/builtin.py
new file mode 100644
index 0000000..a19f7ee
--- /dev/null
+++ b/resources/lib/cheroot/ssl/builtin.py
@@ -0,0 +1,162 @@
+"""
+A library for integrating Python's builtin ``ssl`` library with Cheroot.
+
+The ssl module must be importable for SSL functionality.
+
+To use this module, set ``HTTPServer.ssl_adapter`` to an instance of
+``BuiltinSSLAdapter``.
+"""
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+try:
+ import ssl
+except ImportError:
+ ssl = None
+
+try:
+ from _pyio import DEFAULT_BUFFER_SIZE
+except ImportError:
+ try:
+ from io import DEFAULT_BUFFER_SIZE
+ except ImportError:
+ DEFAULT_BUFFER_SIZE = -1
+
+import six
+
+from . import Adapter
+from .. import errors
+from ..makefile import MakeFile
+
+
+if six.PY3:
+ generic_socket_error = OSError
+else:
+ import socket
+ generic_socket_error = socket.error
+ del socket
+
+
+def _assert_ssl_exc_contains(exc, *msgs):
+ """Check whether SSL exception contains either of messages provided."""
+ if len(msgs) < 1:
+ raise TypeError(
+ '_assert_ssl_exc_contains() requires '
+ 'at least one message to be passed.'
+ )
+ err_msg_lower = exc.args[1].lower()
+ return any(m.lower() in err_msg_lower for m in msgs)
+
+
+class BuiltinSSLAdapter(Adapter):
+ """A wrapper for integrating Python's builtin ssl module with Cheroot."""
+
+ certificate = None
+ """The filename of the server SSL certificate."""
+
+ private_key = None
+ """The filename of the server's private key file."""
+
+ certificate_chain = None
+ """The filename of the certificate chain file."""
+
+ context = None
+ """The ssl.SSLContext that will be used to wrap sockets."""
+
+ ciphers = None
+ """The ciphers list of SSL."""
+
+ def __init__(
+ self, certificate, private_key, certificate_chain=None,
+ ciphers=None):
+ """Set up context in addition to base class properties if available."""
+ if ssl is None:
+ raise ImportError('You must install the ssl module to use HTTPS.')
+
+ super(BuiltinSSLAdapter, self).__init__(
+ certificate, private_key, certificate_chain, ciphers)
+
+ self.context = ssl.create_default_context(
+ purpose=ssl.Purpose.CLIENT_AUTH,
+ cafile=certificate_chain
+ )
+ self.context.load_cert_chain(certificate, private_key)
+ if self.ciphers is not None:
+ self.context.set_ciphers(ciphers)
+
+ def bind(self, sock):
+ """Wrap and return the given socket."""
+ return super(BuiltinSSLAdapter, self).bind(sock)
+
+ def wrap(self, sock):
+ """Wrap and return the given socket, plus WSGI environ entries."""
+ EMPTY_RESULT = None, {}
+ try:
+ s = self.context.wrap_socket(
+ sock, do_handshake_on_connect=True, server_side=True,
+ )
+ except ssl.SSLError as ex:
+ if ex.errno == ssl.SSL_ERROR_EOF:
+ # This is almost certainly due to the cherrypy engine
+ # 'pinging' the socket to assert it's connectable;
+ # the 'ping' isn't SSL.
+ return EMPTY_RESULT
+ elif ex.errno == ssl.SSL_ERROR_SSL:
+ if _assert_ssl_exc_contains(ex, 'http request'):
+ # The client is speaking HTTP to an HTTPS server.
+ raise errors.NoSSLError
+
+ # Check if it's one of the known errors
+ # Errors that are caught by PyOpenSSL, but thrown by
+ # built-in ssl
+ _block_errors = (
+ 'unknown protocol', 'unknown ca', 'unknown_ca',
+ 'unknown error',
+ 'https proxy request', 'inappropriate fallback',
+ 'wrong version number',
+ 'no shared cipher', 'certificate unknown',
+ 'ccs received early',
+ )
+ if _assert_ssl_exc_contains(ex, *_block_errors):
+ # Accepted error, let's pass
+ return EMPTY_RESULT
+ elif _assert_ssl_exc_contains(ex, 'handshake operation timed out'):
+ # This error is thrown by builtin SSL after a timeout
+ # when client is speaking HTTP to an HTTPS server.
+ # The connection can safely be dropped.
+ return EMPTY_RESULT
+ raise
+ except generic_socket_error as exc:
+ """It is unclear why exactly this happens.
+
+ It's reproducible only under Python 2 with openssl>1.0 and stdlib
+ ``ssl`` wrapper, and only with CherryPy.
+ So it looks like some healthcheck tries to connect to this socket
+ during startup (from the same process).
+
+
+ Ref: https://github.com/cherrypy/cherrypy/issues/1618
+ """
+ if six.PY2 and exc.args == (0, 'Error'):
+ return EMPTY_RESULT
+ raise
+ return s, self.get_environ(s)
+
+ # TODO: fill this out more with mod ssl env
+ def get_environ(self, sock):
+ """Create WSGI environ entries to be merged into each request."""
+ cipher = sock.cipher()
+ ssl_environ = {
+ 'wsgi.url_scheme': 'https',
+ 'HTTPS': 'on',
+ 'SSL_PROTOCOL': cipher[1],
+ 'SSL_CIPHER': cipher[0]
+ # SSL_VERSION_INTERFACE string The mod_ssl program version
+ # SSL_VERSION_LIBRARY string The OpenSSL program version
+ }
+ return ssl_environ
+
+ def makefile(self, sock, mode='r', bufsize=DEFAULT_BUFFER_SIZE):
+ """Return socket file object."""
+ return MakeFile(sock, mode, bufsize)
diff --git a/resources/lib/cheroot/ssl/pyopenssl.py b/resources/lib/cheroot/ssl/pyopenssl.py
new file mode 100644
index 0000000..2185f85
--- /dev/null
+++ b/resources/lib/cheroot/ssl/pyopenssl.py
@@ -0,0 +1,267 @@
+"""
+A library for integrating pyOpenSSL with Cheroot.
+
+The OpenSSL module must be importable for SSL functionality.
+You can obtain it from `here `_.
+
+To use this module, set HTTPServer.ssl_adapter to an instance of
+ssl.Adapter. There are two ways to use SSL:
+
+Method One
+----------
+
+ * ``ssl_adapter.context``: an instance of SSL.Context.
+
+If this is not None, it is assumed to be an SSL.Context instance,
+and will be passed to SSL.Connection on bind(). The developer is
+responsible for forming a valid Context object. This approach is
+to be preferred for more flexibility, e.g. if the cert and key are
+streams instead of files, or need decryption, or SSL.SSLv3_METHOD
+is desired instead of the default SSL.SSLv23_METHOD, etc. Consult
+the pyOpenSSL documentation for complete options.
+
+Method Two (shortcut)
+---------------------
+
+ * ``ssl_adapter.certificate``: the filename of the server SSL certificate.
+ * ``ssl_adapter.private_key``: the filename of the server's private key file.
+
+Both are None by default. If ssl_adapter.context is None, but .private_key
+and .certificate are both given and valid, they will be read, and the
+context will be automatically created from them.
+"""
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+import socket
+import threading
+import time
+
+try:
+ from OpenSSL import SSL
+ from OpenSSL import crypto
+except ImportError:
+ SSL = None
+
+from . import Adapter
+from .. import errors, server as cheroot_server
+from ..makefile import MakeFile
+
+
+class SSL_fileobject(MakeFile):
+ """SSL file object attached to a socket object."""
+
+ ssl_timeout = 3
+ ssl_retry = .01
+
+ def _safe_call(self, is_reader, call, *args, **kwargs):
+ """Wrap the given call with SSL error-trapping.
+
+ is_reader: if False EOF errors will be raised. If True, EOF errors
+ will return "" (to emulate normal sockets).
+ """
+ start = time.time()
+ while True:
+ try:
+ return call(*args, **kwargs)
+ except SSL.WantReadError:
+ # Sleep and try again. This is dangerous, because it means
+ # the rest of the stack has no way of differentiating
+ # between a "new handshake" error and "client dropped".
+ # Note this isn't an endless loop: there's a timeout below.
+ time.sleep(self.ssl_retry)
+ except SSL.WantWriteError:
+ time.sleep(self.ssl_retry)
+ except SSL.SysCallError as e:
+ if is_reader and e.args == (-1, 'Unexpected EOF'):
+ return ''
+
+ errnum = e.args[0]
+ if is_reader and errnum in errors.socket_errors_to_ignore:
+ return ''
+ raise socket.error(errnum)
+ except SSL.Error as e:
+ if is_reader and e.args == (-1, 'Unexpected EOF'):
+ return ''
+
+ thirdarg = None
+ try:
+ thirdarg = e.args[0][0][2]
+ except IndexError:
+ pass
+
+ if thirdarg == 'http request':
+ # The client is talking HTTP to an HTTPS server.
+ raise errors.NoSSLError()
+
+ raise errors.FatalSSLAlert(*e.args)
+
+ if time.time() - start > self.ssl_timeout:
+ raise socket.timeout('timed out')
+
+ def recv(self, size):
+ """Receive message of a size from the socket."""
+ return self._safe_call(True, super(SSL_fileobject, self).recv, size)
+
+ def sendall(self, *args, **kwargs):
+ """Send whole message to the socket."""
+ return self._safe_call(False, super(SSL_fileobject, self).sendall,
+ *args, **kwargs)
+
+ def send(self, *args, **kwargs):
+ """Send some part of message to the socket."""
+ return self._safe_call(False, super(SSL_fileobject, self).send,
+ *args, **kwargs)
+
+
+class SSLConnection:
+ """A thread-safe wrapper for an SSL.Connection.
+
+ ``*args``: the arguments to create the wrapped ``SSL.Connection(*args)``.
+ """
+
+ def __init__(self, *args):
+ """Initialize SSLConnection instance."""
+ self._ssl_conn = SSL.Connection(*args)
+ self._lock = threading.RLock()
+
+ for f in ('get_context', 'pending', 'send', 'write', 'recv', 'read',
+ 'renegotiate', 'bind', 'listen', 'connect', 'accept',
+ 'setblocking', 'fileno', 'close', 'get_cipher_list',
+ 'getpeername', 'getsockname', 'getsockopt', 'setsockopt',
+ 'makefile', 'get_app_data', 'set_app_data', 'state_string',
+ 'sock_shutdown', 'get_peer_certificate', 'want_read',
+ 'want_write', 'set_connect_state', 'set_accept_state',
+ 'connect_ex', 'sendall', 'settimeout', 'gettimeout'):
+ exec("""def %s(self, *args):
+ self._lock.acquire()
+ try:
+ return self._ssl_conn.%s(*args)
+ finally:
+ self._lock.release()
+""" % (f, f))
+
+ def shutdown(self, *args):
+ """Shutdown the SSL connection.
+
+ Ignore all incoming args since pyOpenSSL.socket.shutdown takes no args.
+ """
+ self._lock.acquire()
+ try:
+ return self._ssl_conn.shutdown()
+ finally:
+ self._lock.release()
+
+
+class pyOpenSSLAdapter(Adapter):
+ """A wrapper for integrating pyOpenSSL with Cheroot."""
+
+ certificate = None
+ """The filename of the server SSL certificate."""
+
+ private_key = None
+ """The filename of the server's private key file."""
+
+ certificate_chain = None
+ """Optional. The filename of CA's intermediate certificate bundle.
+
+ This is needed for cheaper "chained root" SSL certificates, and should be
+ left as None if not required."""
+
+ context = None
+ """An instance of SSL.Context."""
+
+ ciphers = None
+ """The ciphers list of SSL."""
+
+ def __init__(
+ self, certificate, private_key, certificate_chain=None,
+ ciphers=None):
+ """Initialize OpenSSL Adapter instance."""
+ if SSL is None:
+ raise ImportError('You must install pyOpenSSL to use HTTPS.')
+
+ super(pyOpenSSLAdapter, self).__init__(
+ certificate, private_key, certificate_chain, ciphers)
+
+ self._environ = None
+
+ def bind(self, sock):
+ """Wrap and return the given socket."""
+ if self.context is None:
+ self.context = self.get_context()
+ conn = SSLConnection(self.context, sock)
+ self._environ = self.get_environ()
+ return conn
+
+ def wrap(self, sock):
+ """Wrap and return the given socket, plus WSGI environ entries."""
+ return sock, self._environ.copy()
+
+ def get_context(self):
+ """Return an SSL.Context from self attributes."""
+ # See https://code.activestate.com/recipes/442473/
+ c = SSL.Context(SSL.SSLv23_METHOD)
+ c.use_privatekey_file(self.private_key)
+ if self.certificate_chain:
+ c.load_verify_locations(self.certificate_chain)
+ c.use_certificate_file(self.certificate)
+ return c
+
+ def get_environ(self):
+ """Return WSGI environ entries to be merged into each request."""
+ ssl_environ = {
+ 'HTTPS': 'on',
+ # pyOpenSSL doesn't provide access to any of these AFAICT
+ # 'SSL_PROTOCOL': 'SSLv2',
+ # SSL_CIPHER string The cipher specification name
+ # SSL_VERSION_INTERFACE string The mod_ssl program version
+ # SSL_VERSION_LIBRARY string The OpenSSL program version
+ }
+
+ if self.certificate:
+ # Server certificate attributes
+ cert = open(self.certificate, 'rb').read()
+ cert = crypto.load_certificate(crypto.FILETYPE_PEM, cert)
+ ssl_environ.update({
+ 'SSL_SERVER_M_VERSION': cert.get_version(),
+ 'SSL_SERVER_M_SERIAL': cert.get_serial_number(),
+ # 'SSL_SERVER_V_START':
+ # Validity of server's certificate (start time),
+ # 'SSL_SERVER_V_END':
+ # Validity of server's certificate (end time),
+ })
+
+ for prefix, dn in [('I', cert.get_issuer()),
+ ('S', cert.get_subject())]:
+ # X509Name objects don't seem to have a way to get the
+ # complete DN string. Use str() and slice it instead,
+ # because str(dn) == ""
+ dnstr = str(dn)[18:-2]
+
+ wsgikey = 'SSL_SERVER_%s_DN' % prefix
+ ssl_environ[wsgikey] = dnstr
+
+ # The DN should be of the form: /k1=v1/k2=v2, but we must allow
+ # for any value to contain slashes itself (in a URL).
+ while dnstr:
+ pos = dnstr.rfind('=')
+ dnstr, value = dnstr[:pos], dnstr[pos + 1:]
+ pos = dnstr.rfind('/')
+ dnstr, key = dnstr[:pos], dnstr[pos + 1:]
+ if key and value:
+ wsgikey = 'SSL_SERVER_%s_DN_%s' % (prefix, key)
+ ssl_environ[wsgikey] = value
+
+ return ssl_environ
+
+ def makefile(self, sock, mode='r', bufsize=-1):
+ """Return socket file object."""
+ if SSL and isinstance(sock, SSL.ConnectionType):
+ timeout = sock.gettimeout()
+ f = SSL_fileobject(sock, mode, bufsize)
+ f.ssl_timeout = timeout
+ return f
+ else:
+ return cheroot_server.CP_fileobject(sock, mode, bufsize)
diff --git a/resources/lib/cheroot/test/__init__.py b/resources/lib/cheroot/test/__init__.py
new file mode 100644
index 0000000..e2a7b34
--- /dev/null
+++ b/resources/lib/cheroot/test/__init__.py
@@ -0,0 +1 @@
+"""Cheroot test suite."""
diff --git a/resources/lib/cheroot/test/conftest.py b/resources/lib/cheroot/test/conftest.py
new file mode 100644
index 0000000..9f5f928
--- /dev/null
+++ b/resources/lib/cheroot/test/conftest.py
@@ -0,0 +1,27 @@
+"""Pytest configuration module.
+
+Contains fixtures, which are tightly bound to the Cheroot framework
+itself, useless for end-users' app testing.
+"""
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+import pytest
+
+from ..testing import ( # noqa: F401
+ native_server, wsgi_server,
+)
+from ..testing import get_server_client
+
+
+@pytest.fixture # noqa: F811
+def wsgi_server_client(wsgi_server):
+ """Create a test client out of given WSGI server."""
+ return get_server_client(wsgi_server)
+
+
+@pytest.fixture # noqa: F811
+def native_server_client(native_server):
+ """Create a test client out of given HTTP server."""
+ return get_server_client(native_server)
diff --git a/resources/lib/cheroot/test/helper.py b/resources/lib/cheroot/test/helper.py
new file mode 100644
index 0000000..38f40b2
--- /dev/null
+++ b/resources/lib/cheroot/test/helper.py
@@ -0,0 +1,169 @@
+"""A library of helper functions for the Cheroot test suite."""
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+import datetime
+import logging
+import os
+import sys
+import time
+import threading
+import types
+
+from six.moves import http_client
+
+import six
+
+import cheroot.server
+import cheroot.wsgi
+
+from cheroot.test import webtest
+
+log = logging.getLogger(__name__)
+thisdir = os.path.abspath(os.path.dirname(__file__))
+serverpem = os.path.join(os.getcwd(), thisdir, 'test.pem')
+
+
+config = {
+ 'bind_addr': ('127.0.0.1', 54583),
+ 'server': 'wsgi',
+ 'wsgi_app': None,
+}
+
+
+class CherootWebCase(webtest.WebCase):
+ """Helper class for a web app test suite."""
+
+ script_name = ''
+ scheme = 'http'
+
+ available_servers = {
+ 'wsgi': cheroot.wsgi.Server,
+ 'native': cheroot.server.HTTPServer,
+ }
+
+ @classmethod
+ def setup_class(cls):
+ """Create and run one HTTP server per class."""
+ conf = config.copy()
+ conf.update(getattr(cls, 'config', {}))
+
+ s_class = conf.pop('server', 'wsgi')
+ server_factory = cls.available_servers.get(s_class)
+ if server_factory is None:
+ raise RuntimeError('Unknown server in config: %s' % conf['server'])
+ cls.httpserver = server_factory(**conf)
+
+ cls.HOST, cls.PORT = cls.httpserver.bind_addr
+ if cls.httpserver.ssl_adapter is None:
+ ssl = ''
+ cls.scheme = 'http'
+ else:
+ ssl = ' (ssl)'
+ cls.HTTP_CONN = http_client.HTTPSConnection
+ cls.scheme = 'https'
+
+ v = sys.version.split()[0]
+ log.info('Python version used to run this test script: %s' % v)
+ log.info('Cheroot version: %s' % cheroot.__version__)
+ log.info('HTTP server version: %s%s' % (cls.httpserver.protocol, ssl))
+ log.info('PID: %s' % os.getpid())
+
+ if hasattr(cls, 'setup_server'):
+ # Clear the wsgi server so that
+ # it can be updated with the new root
+ cls.setup_server()
+ cls.start()
+
+ @classmethod
+ def teardown_class(cls):
+ """Cleanup HTTP server."""
+ if hasattr(cls, 'setup_server'):
+ cls.stop()
+
+ @classmethod
+ def start(cls):
+ """Load and start the HTTP server."""
+ threading.Thread(target=cls.httpserver.safe_start).start()
+ while not cls.httpserver.ready:
+ time.sleep(0.1)
+
+ @classmethod
+ def stop(cls):
+ """Terminate HTTP server."""
+ cls.httpserver.stop()
+ td = getattr(cls, 'teardown', None)
+ if td:
+ td()
+
+ date_tolerance = 2
+
+ def assertEqualDates(self, dt1, dt2, seconds=None):
+ """Assert abs(dt1 - dt2) is within Y seconds."""
+ if seconds is None:
+ seconds = self.date_tolerance
+
+ if dt1 > dt2:
+ diff = dt1 - dt2
+ else:
+ diff = dt2 - dt1
+ if not diff < datetime.timedelta(seconds=seconds):
+ raise AssertionError('%r and %r are not within %r seconds.' %
+ (dt1, dt2, seconds))
+
+
+class Request:
+ """HTTP request container."""
+
+ def __init__(self, environ):
+ """Initialize HTTP request."""
+ self.environ = environ
+
+
+class Response:
+ """HTTP response container."""
+
+ def __init__(self):
+ """Initialize HTTP response."""
+ self.status = '200 OK'
+ self.headers = {'Content-Type': 'text/html'}
+ self.body = None
+
+ def output(self):
+ """Generate iterable response body object."""
+ if self.body is None:
+ return []
+ elif isinstance(self.body, six.text_type):
+ return [self.body.encode('iso-8859-1')]
+ elif isinstance(self.body, six.binary_type):
+ return [self.body]
+ else:
+ return [x.encode('iso-8859-1') for x in self.body]
+
+
+class Controller:
+ """WSGI app for tests."""
+
+ def __call__(self, environ, start_response):
+ """WSGI request handler."""
+ req, resp = Request(environ), Response()
+ try:
+ # Python 3 supports unicode attribute names
+ # Python 2 encodes them
+ handler = self.handlers[environ['PATH_INFO']]
+ except KeyError:
+ resp.status = '404 Not Found'
+ else:
+ output = handler(req, resp)
+ if (output is not None and
+ not any(resp.status.startswith(status_code)
+ for status_code in ('204', '304'))):
+ resp.body = output
+ try:
+ resp.headers.setdefault('Content-Length', str(len(output)))
+ except TypeError:
+ if not isinstance(output, types.GeneratorType):
+ raise
+ start_response(resp.status, resp.headers.items())
+ return resp.output()
diff --git a/resources/lib/cheroot/test/test.pem b/resources/lib/cheroot/test/test.pem
new file mode 100644
index 0000000..47a4704
--- /dev/null
+++ b/resources/lib/cheroot/test/test.pem
@@ -0,0 +1,38 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIICXAIBAAKBgQDBKo554mzIMY+AByUNpaUOP9bJnQ7ZLQe9XgHwoLJR4VzpyZZZ
+R9L4WtImEew05FY3Izerfm3MN3+MC0tJ6yQU9sOiU3vBW6RrLIMlfKsnRwBRZ0Kn
+da+O6xldVSosu8Ev3z9VZ94iC/ZgKzrH7Mjj/U8/MQO7RBS/LAqee8bFNQIDAQAB
+AoGAWOCF0ZrWxn3XMucWq2LNwPKqlvVGwbIwX3cDmX22zmnM4Fy6arXbYh4XlyCj
+9+ofqRrxIFz5k/7tFriTmZ0xag5+Jdx+Kwg0/twiP7XCNKipFogwe1Hznw8OFAoT
+enKBdj2+/n2o0Bvo/tDB59m9L/538d46JGQUmJlzMyqYikECQQDyoq+8CtMNvE18
+8VgHcR/KtApxWAjj4HpaHYL637ATjThetUZkW92mgDgowyplthusxdNqhHWyv7E8
+tWNdYErZAkEAy85ShTR0M5aWmrE7o0r0SpWInAkNBH9aXQRRARFYsdBtNfRu6I0i
+0lvU9wiu3eF57FMEC86yViZ5UBnQfTu7vQJAVesj/Zt7pwaCDfdMa740OsxMUlyR
+MVhhGx4OLpYdPJ8qUecxGQKq13XZ7R1HGyNEY4bd2X80Smq08UFuATfC6QJAH8UB
+yBHtKz2GLIcELOg6PIYizW/7v3+6rlVF60yw7sb2vzpjL40QqIn4IKoR2DSVtOkb
+8FtAIX3N21aq0VrGYQJBAIPiaEc2AZ8Bq2GC4F3wOz/BxJ/izvnkiotR12QK4fh5
+yjZMhTjWCas5zwHR5PDjlD88AWGDMsZ1PicD4348xJQ=
+-----END RSA PRIVATE KEY-----
+-----BEGIN CERTIFICATE-----
+MIIDxTCCAy6gAwIBAgIJAI18BD7eQxlGMA0GCSqGSIb3DQEBBAUAMIGeMQswCQYD
+VQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTESMBAGA1UEBxMJU2FuIERpZWdv
+MRkwFwYDVQQKExBDaGVycnlQeSBQcm9qZWN0MREwDwYDVQQLEwhkZXYtdGVzdDEW
+MBQGA1UEAxMNQ2hlcnJ5UHkgVGVhbTEgMB4GCSqGSIb3DQEJARYRcmVtaUBjaGVy
+cnlweS5vcmcwHhcNMDYwOTA5MTkyMDIwWhcNMzQwMTI0MTkyMDIwWjCBnjELMAkG
+A1UEBhMCVVMxEzARBgNVBAgTCkNhbGlmb3JuaWExEjAQBgNVBAcTCVNhbiBEaWVn
+bzEZMBcGA1UEChMQQ2hlcnJ5UHkgUHJvamVjdDERMA8GA1UECxMIZGV2LXRlc3Qx
+FjAUBgNVBAMTDUNoZXJyeVB5IFRlYW0xIDAeBgkqhkiG9w0BCQEWEXJlbWlAY2hl
+cnJ5cHkub3JnMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDBKo554mzIMY+A
+ByUNpaUOP9bJnQ7ZLQe9XgHwoLJR4VzpyZZZR9L4WtImEew05FY3Izerfm3MN3+M
+C0tJ6yQU9sOiU3vBW6RrLIMlfKsnRwBRZ0Knda+O6xldVSosu8Ev3z9VZ94iC/Zg
+KzrH7Mjj/U8/MQO7RBS/LAqee8bFNQIDAQABo4IBBzCCAQMwHQYDVR0OBBYEFDIQ
+2feb71tVZCWpU0qJ/Tw+wdtoMIHTBgNVHSMEgcswgciAFDIQ2feb71tVZCWpU0qJ
+/Tw+wdtooYGkpIGhMIGeMQswCQYDVQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5p
+YTESMBAGA1UEBxMJU2FuIERpZWdvMRkwFwYDVQQKExBDaGVycnlQeSBQcm9qZWN0
+MREwDwYDVQQLEwhkZXYtdGVzdDEWMBQGA1UEAxMNQ2hlcnJ5UHkgVGVhbTEgMB4G
+CSqGSIb3DQEJARYRcmVtaUBjaGVycnlweS5vcmeCCQCNfAQ+3kMZRjAMBgNVHRME
+BTADAQH/MA0GCSqGSIb3DQEBBAUAA4GBAL7AAQz7IePV48ZTAFHKr88ntPALsL5S
+8vHCZPNMevNkLTj3DYUw2BcnENxMjm1kou2F2BkvheBPNZKIhc6z4hAml3ed1xa2
+D7w6e6OTcstdK/+KrPDDHeOP1dhMWNs2JE1bNlfF1LiXzYKSXpe88eCKjCXsCT/T
+NluCaWQys3MS
+-----END CERTIFICATE-----
diff --git a/resources/lib/cheroot/test/test__compat.py b/resources/lib/cheroot/test/test__compat.py
new file mode 100644
index 0000000..d34e5eb
--- /dev/null
+++ b/resources/lib/cheroot/test/test__compat.py
@@ -0,0 +1,49 @@
+# -*- coding: utf-8 -*-
+"""Test suite for cross-python compatibility helpers."""
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+import pytest
+import six
+
+from cheroot._compat import ntob, ntou, bton
+
+
+@pytest.mark.parametrize(
+ 'func,inp,out',
+ [
+ (ntob, 'bar', b'bar'),
+ (ntou, 'bar', u'bar'),
+ (bton, b'bar', 'bar'),
+ ],
+)
+def test_compat_functions_positive(func, inp, out):
+ """Check that compat functions work with correct input."""
+ assert func(inp, encoding='utf-8') == out
+
+
+@pytest.mark.parametrize(
+ 'func',
+ [
+ ntob,
+ ntou,
+ ],
+)
+def test_compat_functions_negative_nonnative(func):
+ """Check that compat functions fail loudly for incorrect input."""
+ non_native_test_str = b'bar' if six.PY3 else u'bar'
+ with pytest.raises(TypeError):
+ func(non_native_test_str, encoding='utf-8')
+
+
+@pytest.mark.skip(reason='This test does not work now')
+@pytest.mark.skipif(
+ six.PY3,
+ reason='This code path only appears in Python 2 version.',
+)
+def test_ntou_escape():
+ """Check that ntou supports escape-encoding under Python 2."""
+ expected = u''
+ actual = ntou('hi'.encode('ISO-8859-1'), encoding='escape')
+ assert actual == expected
diff --git a/resources/lib/cheroot/test/test_conn.py b/resources/lib/cheroot/test/test_conn.py
new file mode 100644
index 0000000..f543dd9
--- /dev/null
+++ b/resources/lib/cheroot/test/test_conn.py
@@ -0,0 +1,897 @@
+"""Tests for TCP connection handling, including proper and timely close."""
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+import socket
+import time
+
+from six.moves import range, http_client, urllib
+
+import six
+import pytest
+
+from cheroot.test import helper, webtest
+
+
+timeout = 1
+pov = 'pPeErRsSiIsStTeEnNcCeE oOfF vViIsSiIoOnN'
+
+
+class Controller(helper.Controller):
+ """Controller for serving WSGI apps."""
+
+ def hello(req, resp):
+ """Render Hello world."""
+ return 'Hello, world!'
+
+ def pov(req, resp):
+ """Render pov value."""
+ return pov
+
+ def stream(req, resp):
+ """Render streaming response."""
+ if 'set_cl' in req.environ['QUERY_STRING']:
+ resp.headers['Content-Length'] = str(10)
+
+ def content():
+ for x in range(10):
+ yield str(x)
+
+ return content()
+
+ def upload(req, resp):
+ """Process file upload and render thank."""
+ if not req.environ['REQUEST_METHOD'] == 'POST':
+ raise AssertionError("'POST' != request.method %r" %
+ req.environ['REQUEST_METHOD'])
+ return "thanks for '%s'" % req.environ['wsgi.input'].read()
+
+ def custom_204(req, resp):
+ """Render response with status 204."""
+ resp.status = '204'
+ return 'Code = 204'
+
+ def custom_304(req, resp):
+ """Render response with status 304."""
+ resp.status = '304'
+ return 'Code = 304'
+
+ def err_before_read(req, resp):
+ """Render response with status 500."""
+ resp.status = '500 Internal Server Error'
+ return 'ok'
+
+ def one_megabyte_of_a(req, resp):
+ """Render 1MB response."""
+ return ['a' * 1024] * 1024
+
+ def wrong_cl_buffered(req, resp):
+ """Render buffered response with invalid length value."""
+ resp.headers['Content-Length'] = '5'
+ return 'I have too many bytes'
+
+ def wrong_cl_unbuffered(req, resp):
+ """Render unbuffered response with invalid length value."""
+ resp.headers['Content-Length'] = '5'
+ return ['I too', ' have too many bytes']
+
+ def _munge(string):
+ """Encode PATH_INFO correctly depending on Python version.
+
+ WSGI 1.0 is a mess around unicode. Create endpoints
+ that match the PATH_INFO that it produces.
+ """
+ if six.PY3:
+ return string.encode('utf-8').decode('latin-1')
+ return string
+
+ handlers = {
+ '/hello': hello,
+ '/pov': pov,
+ '/page1': pov,
+ '/page2': pov,
+ '/page3': pov,
+ '/stream': stream,
+ '/upload': upload,
+ '/custom/204': custom_204,
+ '/custom/304': custom_304,
+ '/err_before_read': err_before_read,
+ '/one_megabyte_of_a': one_megabyte_of_a,
+ '/wrong_cl_buffered': wrong_cl_buffered,
+ '/wrong_cl_unbuffered': wrong_cl_unbuffered,
+ }
+
+
+@pytest.fixture
+def testing_server(wsgi_server_client):
+ """Attach a WSGI app to the given server and pre-configure it."""
+ app = Controller()
+
+ def _timeout(req, resp):
+ return str(wsgi_server.timeout)
+ app.handlers['/timeout'] = _timeout
+ wsgi_server = wsgi_server_client.server_instance
+ wsgi_server.wsgi_app = app
+ wsgi_server.max_request_body_size = 1001
+ wsgi_server.timeout = timeout
+ wsgi_server.server_client = wsgi_server_client
+ return wsgi_server
+
+
+@pytest.fixture
+def test_client(testing_server):
+ """Get and return a test client out of the given server."""
+ return testing_server.server_client
+
+
+def header_exists(header_name, headers):
+ """Check that a header is present."""
+ return header_name.lower() in (k.lower() for (k, _) in headers)
+
+
+def header_has_value(header_name, header_value, headers):
+ """Check that a header with a given value is present."""
+ return header_name.lower() in (k.lower() for (k, v) in headers
+ if v == header_value)
+
+
+def test_HTTP11_persistent_connections(test_client):
+ """Test persistent HTTP/1.1 connections."""
+ # Initialize a persistent HTTP connection
+ http_connection = test_client.get_connection()
+ http_connection.auto_open = False
+ http_connection.connect()
+
+ # Make the first request and assert there's no "Connection: close".
+ status_line, actual_headers, actual_resp_body = test_client.get(
+ '/pov', http_conn=http_connection
+ )
+ actual_status = int(status_line[:3])
+ assert actual_status == 200
+ assert status_line[4:] == 'OK'
+ assert actual_resp_body == pov.encode()
+ assert not header_exists('Connection', actual_headers)
+
+ # Make another request on the same connection.
+ status_line, actual_headers, actual_resp_body = test_client.get(
+ '/page1', http_conn=http_connection
+ )
+ actual_status = int(status_line[:3])
+ assert actual_status == 200
+ assert status_line[4:] == 'OK'
+ assert actual_resp_body == pov.encode()
+ assert not header_exists('Connection', actual_headers)
+
+ # Test client-side close.
+ status_line, actual_headers, actual_resp_body = test_client.get(
+ '/page2', http_conn=http_connection,
+ headers=[('Connection', 'close')]
+ )
+ actual_status = int(status_line[:3])
+ assert actual_status == 200
+ assert status_line[4:] == 'OK'
+ assert actual_resp_body == pov.encode()
+ assert header_has_value('Connection', 'close', actual_headers)
+
+ # Make another request on the same connection, which should error.
+ with pytest.raises(http_client.NotConnected):
+ test_client.get('/pov', http_conn=http_connection)
+
+
+@pytest.mark.parametrize(
+ 'set_cl',
+ (
+ False, # Without Content-Length
+ True, # With Content-Length
+ )
+)
+def test_streaming_11(test_client, set_cl):
+ """Test serving of streaming responses with HTTP/1.1 protocol."""
+ # Initialize a persistent HTTP connection
+ http_connection = test_client.get_connection()
+ http_connection.auto_open = False
+ http_connection.connect()
+
+ # Make the first request and assert there's no "Connection: close".
+ status_line, actual_headers, actual_resp_body = test_client.get(
+ '/pov', http_conn=http_connection
+ )
+ actual_status = int(status_line[:3])
+ assert actual_status == 200
+ assert status_line[4:] == 'OK'
+ assert actual_resp_body == pov.encode()
+ assert not header_exists('Connection', actual_headers)
+
+ # Make another, streamed request on the same connection.
+ if set_cl:
+ # When a Content-Length is provided, the content should stream
+ # without closing the connection.
+ status_line, actual_headers, actual_resp_body = test_client.get(
+ '/stream?set_cl=Yes', http_conn=http_connection
+ )
+ assert header_exists('Content-Length', actual_headers)
+ assert not header_has_value('Connection', 'close', actual_headers)
+ assert not header_exists('Transfer-Encoding', actual_headers)
+
+ assert actual_status == 200
+ assert status_line[4:] == 'OK'
+ assert actual_resp_body == b'0123456789'
+ else:
+ # When no Content-Length response header is provided,
+ # streamed output will either close the connection, or use
+ # chunked encoding, to determine transfer-length.
+ status_line, actual_headers, actual_resp_body = test_client.get(
+ '/stream', http_conn=http_connection
+ )
+ assert not header_exists('Content-Length', actual_headers)
+ assert actual_status == 200
+ assert status_line[4:] == 'OK'
+ assert actual_resp_body == b'0123456789'
+
+ chunked_response = False
+ for k, v in actual_headers:
+ if k.lower() == 'transfer-encoding':
+ if str(v) == 'chunked':
+ chunked_response = True
+
+ if chunked_response:
+ assert not header_has_value('Connection', 'close', actual_headers)
+ else:
+ assert header_has_value('Connection', 'close', actual_headers)
+
+ # Make another request on the same connection, which should
+ # error.
+ with pytest.raises(http_client.NotConnected):
+ test_client.get('/pov', http_conn=http_connection)
+
+ # Try HEAD.
+ # See https://www.bitbucket.org/cherrypy/cherrypy/issue/864.
+ # TODO: figure out how can this be possible on an closed connection
+ # (chunked_response case)
+ status_line, actual_headers, actual_resp_body = test_client.head(
+ '/stream', http_conn=http_connection
+ )
+ assert actual_status == 200
+ assert status_line[4:] == 'OK'
+ assert actual_resp_body == b''
+ assert not header_exists('Transfer-Encoding', actual_headers)
+
+
+@pytest.mark.parametrize(
+ 'set_cl',
+ (
+ False, # Without Content-Length
+ True, # With Content-Length
+ )
+)
+def test_streaming_10(test_client, set_cl):
+ """Test serving of streaming responses with HTTP/1.0 protocol."""
+ original_server_protocol = test_client.server_instance.protocol
+ test_client.server_instance.protocol = 'HTTP/1.0'
+
+ # Initialize a persistent HTTP connection
+ http_connection = test_client.get_connection()
+ http_connection.auto_open = False
+ http_connection.connect()
+
+ # Make the first request and assert Keep-Alive.
+ status_line, actual_headers, actual_resp_body = test_client.get(
+ '/pov', http_conn=http_connection,
+ headers=[('Connection', 'Keep-Alive')],
+ protocol='HTTP/1.0',
+ )
+ actual_status = int(status_line[:3])
+ assert actual_status == 200
+ assert status_line[4:] == 'OK'
+ assert actual_resp_body == pov.encode()
+ assert header_has_value('Connection', 'Keep-Alive', actual_headers)
+
+ # Make another, streamed request on the same connection.
+ if set_cl:
+ # When a Content-Length is provided, the content should
+ # stream without closing the connection.
+ status_line, actual_headers, actual_resp_body = test_client.get(
+ '/stream?set_cl=Yes', http_conn=http_connection,
+ headers=[('Connection', 'Keep-Alive')],
+ protocol='HTTP/1.0',
+ )
+ actual_status = int(status_line[:3])
+ assert actual_status == 200
+ assert status_line[4:] == 'OK'
+ assert actual_resp_body == b'0123456789'
+
+ assert header_exists('Content-Length', actual_headers)
+ assert header_has_value('Connection', 'Keep-Alive', actual_headers)
+ assert not header_exists('Transfer-Encoding', actual_headers)
+ else:
+ # When a Content-Length is not provided,
+ # the server should close the connection.
+ status_line, actual_headers, actual_resp_body = test_client.get(
+ '/stream', http_conn=http_connection,
+ headers=[('Connection', 'Keep-Alive')],
+ protocol='HTTP/1.0',
+ )
+ actual_status = int(status_line[:3])
+ assert actual_status == 200
+ assert status_line[4:] == 'OK'
+ assert actual_resp_body == b'0123456789'
+
+ assert not header_exists('Content-Length', actual_headers)
+ assert not header_has_value('Connection', 'Keep-Alive', actual_headers)
+ assert not header_exists('Transfer-Encoding', actual_headers)
+
+ # Make another request on the same connection, which should error.
+ with pytest.raises(http_client.NotConnected):
+ test_client.get(
+ '/pov', http_conn=http_connection,
+ protocol='HTTP/1.0',
+ )
+
+ test_client.server_instance.protocol = original_server_protocol
+
+
+@pytest.mark.parametrize(
+ 'http_server_protocol',
+ (
+ 'HTTP/1.0',
+ 'HTTP/1.1',
+ )
+)
+def test_keepalive(test_client, http_server_protocol):
+ """Test Keep-Alive enabled connections."""
+ original_server_protocol = test_client.server_instance.protocol
+ test_client.server_instance.protocol = http_server_protocol
+
+ http_client_protocol = 'HTTP/1.0'
+
+ # Initialize a persistent HTTP connection
+ http_connection = test_client.get_connection()
+ http_connection.auto_open = False
+ http_connection.connect()
+
+ # Test a normal HTTP/1.0 request.
+ status_line, actual_headers, actual_resp_body = test_client.get(
+ '/page2',
+ protocol=http_client_protocol,
+ )
+ actual_status = int(status_line[:3])
+ assert actual_status == 200
+ assert status_line[4:] == 'OK'
+ assert actual_resp_body == pov.encode()
+ assert not header_exists('Connection', actual_headers)
+
+ # Test a keep-alive HTTP/1.0 request.
+
+ status_line, actual_headers, actual_resp_body = test_client.get(
+ '/page3', headers=[('Connection', 'Keep-Alive')],
+ http_conn=http_connection, protocol=http_client_protocol,
+ )
+ actual_status = int(status_line[:3])
+ assert actual_status == 200
+ assert status_line[4:] == 'OK'
+ assert actual_resp_body == pov.encode()
+ assert header_has_value('Connection', 'Keep-Alive', actual_headers)
+
+ # Remove the keep-alive header again.
+ status_line, actual_headers, actual_resp_body = test_client.get(
+ '/page3', http_conn=http_connection,
+ protocol=http_client_protocol,
+ )
+ actual_status = int(status_line[:3])
+ assert actual_status == 200
+ assert status_line[4:] == 'OK'
+ assert actual_resp_body == pov.encode()
+ assert not header_exists('Connection', actual_headers)
+
+ test_client.server_instance.protocol = original_server_protocol
+
+
+@pytest.mark.parametrize(
+ 'timeout_before_headers',
+ (
+ True,
+ False,
+ )
+)
+def test_HTTP11_Timeout(test_client, timeout_before_headers):
+ """Check timeout without sending any data.
+
+ The server will close the conn with a 408.
+ """
+ conn = test_client.get_connection()
+ conn.auto_open = False
+ conn.connect()
+
+ if not timeout_before_headers:
+ # Connect but send half the headers only.
+ conn.send(b'GET /hello HTTP/1.1')
+ conn.send(('Host: %s' % conn.host).encode('ascii'))
+ # else: Connect but send nothing.
+
+ # Wait for our socket timeout
+ time.sleep(timeout * 2)
+
+ # The request should have returned 408 already.
+ response = conn.response_class(conn.sock, method='GET')
+ response.begin()
+ assert response.status == 408
+ conn.close()
+
+
+def test_HTTP11_Timeout_after_request(test_client):
+ """Check timeout after at least one request has succeeded.
+
+ The server should close the connection without 408.
+ """
+ fail_msg = "Writing to timed out socket didn't fail as it should have: %s"
+
+ # Make an initial request
+ conn = test_client.get_connection()
+ conn.putrequest('GET', '/timeout?t=%s' % timeout, skip_host=True)
+ conn.putheader('Host', conn.host)
+ conn.endheaders()
+ response = conn.response_class(conn.sock, method='GET')
+ response.begin()
+ assert response.status == 200
+ actual_body = response.read()
+ expected_body = str(timeout).encode()
+ assert actual_body == expected_body
+
+ # Make a second request on the same socket
+ conn._output(b'GET /hello HTTP/1.1')
+ conn._output(('Host: %s' % conn.host).encode('ascii'))
+ conn._send_output()
+ response = conn.response_class(conn.sock, method='GET')
+ response.begin()
+ assert response.status == 200
+ actual_body = response.read()
+ expected_body = b'Hello, world!'
+ assert actual_body == expected_body
+
+ # Wait for our socket timeout
+ time.sleep(timeout * 2)
+
+ # Make another request on the same socket, which should error
+ conn._output(b'GET /hello HTTP/1.1')
+ conn._output(('Host: %s' % conn.host).encode('ascii'))
+ conn._send_output()
+ response = conn.response_class(conn.sock, method='GET')
+ try:
+ response.begin()
+ except (socket.error, http_client.BadStatusLine):
+ pass
+ except Exception as ex:
+ pytest.fail(fail_msg % ex)
+ else:
+ if response.status != 408:
+ pytest.fail(fail_msg % response.read())
+
+ conn.close()
+
+ # Make another request on a new socket, which should work
+ conn = test_client.get_connection()
+ conn.putrequest('GET', '/pov', skip_host=True)
+ conn.putheader('Host', conn.host)
+ conn.endheaders()
+ response = conn.response_class(conn.sock, method='GET')
+ response.begin()
+ assert response.status == 200
+ actual_body = response.read()
+ expected_body = pov.encode()
+ assert actual_body == expected_body
+
+ # Make another request on the same socket,
+ # but timeout on the headers
+ conn.send(b'GET /hello HTTP/1.1')
+ # Wait for our socket timeout
+ time.sleep(timeout * 2)
+ response = conn.response_class(conn.sock, method='GET')
+ try:
+ response.begin()
+ except (socket.error, http_client.BadStatusLine):
+ pass
+ except Exception as ex:
+ pytest.fail(fail_msg % ex)
+ else:
+ if response.status != 408:
+ pytest.fail(fail_msg % response.read())
+
+ conn.close()
+
+ # Retry the request on a new connection, which should work
+ conn = test_client.get_connection()
+ conn.putrequest('GET', '/pov', skip_host=True)
+ conn.putheader('Host', conn.host)
+ conn.endheaders()
+ response = conn.response_class(conn.sock, method='GET')
+ response.begin()
+ assert response.status == 200
+ actual_body = response.read()
+ expected_body = pov.encode()
+ assert actual_body == expected_body
+ conn.close()
+
+
+def test_HTTP11_pipelining(test_client):
+ """Test HTTP/1.1 pipelining.
+
+ httplib doesn't support this directly.
+ """
+ conn = test_client.get_connection()
+
+ # Put request 1
+ conn.putrequest('GET', '/hello', skip_host=True)
+ conn.putheader('Host', conn.host)
+ conn.endheaders()
+
+ for trial in range(5):
+ # Put next request
+ conn._output(
+ ('GET /hello?%s HTTP/1.1' % trial).encode('iso-8859-1')
+ )
+ conn._output(('Host: %s' % conn.host).encode('ascii'))
+ conn._send_output()
+
+ # Retrieve previous response
+ response = conn.response_class(conn.sock, method='GET')
+ # there is a bug in python3 regarding the buffering of
+ # ``conn.sock``. Until that bug get's fixed we will
+ # monkey patch the ``reponse`` instance.
+ # https://bugs.python.org/issue23377
+ if six.PY3:
+ response.fp = conn.sock.makefile('rb', 0)
+ response.begin()
+ body = response.read(13)
+ assert response.status == 200
+ assert body == b'Hello, world!'
+
+ # Retrieve final response
+ response = conn.response_class(conn.sock, method='GET')
+ response.begin()
+ body = response.read()
+ assert response.status == 200
+ assert body == b'Hello, world!'
+
+ conn.close()
+
+
+def test_100_Continue(test_client):
+ """Test 100-continue header processing."""
+ conn = test_client.get_connection()
+
+ # Try a page without an Expect request header first.
+ # Note that httplib's response.begin automatically ignores
+ # 100 Continue responses, so we must manually check for it.
+ conn.putrequest('POST', '/upload', skip_host=True)
+ conn.putheader('Host', conn.host)
+ conn.putheader('Content-Type', 'text/plain')
+ conn.putheader('Content-Length', '4')
+ conn.endheaders()
+ conn.send(b"d'oh")
+ response = conn.response_class(conn.sock, method='POST')
+ version, status, reason = response._read_status()
+ assert status != 100
+ conn.close()
+
+ # Now try a page with an Expect header...
+ conn.connect()
+ conn.putrequest('POST', '/upload', skip_host=True)
+ conn.putheader('Host', conn.host)
+ conn.putheader('Content-Type', 'text/plain')
+ conn.putheader('Content-Length', '17')
+ conn.putheader('Expect', '100-continue')
+ conn.endheaders()
+ response = conn.response_class(conn.sock, method='POST')
+
+ # ...assert and then skip the 100 response
+ version, status, reason = response._read_status()
+ assert status == 100
+ while True:
+ line = response.fp.readline().strip()
+ if line:
+ pytest.fail(
+ '100 Continue should not output any headers. Got %r' %
+ line)
+ else:
+ break
+
+ # ...send the body
+ body = b'I am a small file'
+ conn.send(body)
+
+ # ...get the final response
+ response.begin()
+ status_line, actual_headers, actual_resp_body = webtest.shb(response)
+ actual_status = int(status_line[:3])
+ assert actual_status == 200
+ expected_resp_body = ("thanks for '%s'" % body).encode()
+ assert actual_resp_body == expected_resp_body
+ conn.close()
+
+
+@pytest.mark.parametrize(
+ 'max_request_body_size',
+ (
+ 0,
+ 1001,
+ )
+)
+def test_readall_or_close(test_client, max_request_body_size):
+ """Test a max_request_body_size of 0 (the default) and 1001."""
+ old_max = test_client.server_instance.max_request_body_size
+
+ test_client.server_instance.max_request_body_size = max_request_body_size
+
+ conn = test_client.get_connection()
+
+ # Get a POST page with an error
+ conn.putrequest('POST', '/err_before_read', skip_host=True)
+ conn.putheader('Host', conn.host)
+ conn.putheader('Content-Type', 'text/plain')
+ conn.putheader('Content-Length', '1000')
+ conn.putheader('Expect', '100-continue')
+ conn.endheaders()
+ response = conn.response_class(conn.sock, method='POST')
+
+ # ...assert and then skip the 100 response
+ version, status, reason = response._read_status()
+ assert status == 100
+ skip = True
+ while skip:
+ skip = response.fp.readline().strip()
+
+ # ...send the body
+ conn.send(b'x' * 1000)
+
+ # ...get the final response
+ response.begin()
+ status_line, actual_headers, actual_resp_body = webtest.shb(response)
+ actual_status = int(status_line[:3])
+ assert actual_status == 500
+
+ # Now try a working page with an Expect header...
+ conn._output(b'POST /upload HTTP/1.1')
+ conn._output(('Host: %s' % conn.host).encode('ascii'))
+ conn._output(b'Content-Type: text/plain')
+ conn._output(b'Content-Length: 17')
+ conn._output(b'Expect: 100-continue')
+ conn._send_output()
+ response = conn.response_class(conn.sock, method='POST')
+
+ # ...assert and then skip the 100 response
+ version, status, reason = response._read_status()
+ assert status == 100
+ skip = True
+ while skip:
+ skip = response.fp.readline().strip()
+
+ # ...send the body
+ body = b'I am a small file'
+ conn.send(body)
+
+ # ...get the final response
+ response.begin()
+ status_line, actual_headers, actual_resp_body = webtest.shb(response)
+ actual_status = int(status_line[:3])
+ assert actual_status == 200
+ expected_resp_body = ("thanks for '%s'" % body).encode()
+ assert actual_resp_body == expected_resp_body
+ conn.close()
+
+ test_client.server_instance.max_request_body_size = old_max
+
+
+def test_No_Message_Body(test_client):
+ """Test HTTP queries with an empty response body."""
+ # Initialize a persistent HTTP connection
+ http_connection = test_client.get_connection()
+ http_connection.auto_open = False
+ http_connection.connect()
+
+ # Make the first request and assert there's no "Connection: close".
+ status_line, actual_headers, actual_resp_body = test_client.get(
+ '/pov', http_conn=http_connection
+ )
+ actual_status = int(status_line[:3])
+ assert actual_status == 200
+ assert status_line[4:] == 'OK'
+ assert actual_resp_body == pov.encode()
+ assert not header_exists('Connection', actual_headers)
+
+ # Make a 204 request on the same connection.
+ status_line, actual_headers, actual_resp_body = test_client.get(
+ '/custom/204', http_conn=http_connection
+ )
+ actual_status = int(status_line[:3])
+ assert actual_status == 204
+ assert not header_exists('Content-Length', actual_headers)
+ assert actual_resp_body == b''
+ assert not header_exists('Connection', actual_headers)
+
+ # Make a 304 request on the same connection.
+ status_line, actual_headers, actual_resp_body = test_client.get(
+ '/custom/304', http_conn=http_connection
+ )
+ actual_status = int(status_line[:3])
+ assert actual_status == 304
+ assert not header_exists('Content-Length', actual_headers)
+ assert actual_resp_body == b''
+ assert not header_exists('Connection', actual_headers)
+
+
+@pytest.mark.xfail(
+ reason='Server does not correctly read trailers/ending of the previous '
+ 'HTTP request, thus the second request fails as the server tries '
+ r"to parse b'Content-Type: application/json\r\n' as a "
+ 'Request-Line. This results in HTTP status code 400, instead of 413'
+ 'Ref: https://github.com/cherrypy/cheroot/issues/69'
+)
+def test_Chunked_Encoding(test_client):
+ """Test HTTP uploads with chunked transfer-encoding."""
+ # Initialize a persistent HTTP connection
+ conn = test_client.get_connection()
+
+ # Try a normal chunked request (with extensions)
+ body = (
+ b'8;key=value\r\nxx\r\nxxxx\r\n5\r\nyyyyy\r\n0\r\n'
+ b'Content-Type: application/json\r\n'
+ b'\r\n'
+ )
+ conn.putrequest('POST', '/upload', skip_host=True)
+ conn.putheader('Host', conn.host)
+ conn.putheader('Transfer-Encoding', 'chunked')
+ conn.putheader('Trailer', 'Content-Type')
+ # Note that this is somewhat malformed:
+ # we shouldn't be sending Content-Length.
+ # RFC 2616 says the server should ignore it.
+ conn.putheader('Content-Length', '3')
+ conn.endheaders()
+ conn.send(body)
+ response = conn.getresponse()
+ status_line, actual_headers, actual_resp_body = webtest.shb(response)
+ actual_status = int(status_line[:3])
+ assert actual_status == 200
+ assert status_line[4:] == 'OK'
+ expected_resp_body = ("thanks for '%s'" % b'xx\r\nxxxxyyyyy').encode()
+ assert actual_resp_body == expected_resp_body
+
+ # Try a chunked request that exceeds server.max_request_body_size.
+ # Note that the delimiters and trailer are included.
+ body = b'3e3\r\n' + (b'x' * 995) + b'\r\n0\r\n\r\n'
+ conn.putrequest('POST', '/upload', skip_host=True)
+ conn.putheader('Host', conn.host)
+ conn.putheader('Transfer-Encoding', 'chunked')
+ conn.putheader('Content-Type', 'text/plain')
+ # Chunked requests don't need a content-length
+ # conn.putheader("Content-Length", len(body))
+ conn.endheaders()
+ conn.send(body)
+ response = conn.getresponse()
+ status_line, actual_headers, actual_resp_body = webtest.shb(response)
+ actual_status = int(status_line[:3])
+ assert actual_status == 413
+ conn.close()
+
+
+def test_Content_Length_in(test_client):
+ """Try a non-chunked request where Content-Length exceeds limit.
+
+ (server.max_request_body_size).
+ Assert error before body send.
+ """
+ # Initialize a persistent HTTP connection
+ conn = test_client.get_connection()
+
+ conn.putrequest('POST', '/upload', skip_host=True)
+ conn.putheader('Host', conn.host)
+ conn.putheader('Content-Type', 'text/plain')
+ conn.putheader('Content-Length', '9999')
+ conn.endheaders()
+ response = conn.getresponse()
+ status_line, actual_headers, actual_resp_body = webtest.shb(response)
+ actual_status = int(status_line[:3])
+ assert actual_status == 413
+ expected_resp_body = (
+ b'The entity sent with the request exceeds '
+ b'the maximum allowed bytes.'
+ )
+ assert actual_resp_body == expected_resp_body
+ conn.close()
+
+
+def test_Content_Length_not_int(test_client):
+ """Test that malicious Content-Length header returns 400."""
+ status_line, actual_headers, actual_resp_body = test_client.post(
+ '/upload',
+ headers=[
+ ('Content-Type', 'text/plain'),
+ ('Content-Length', 'not-an-integer'),
+ ],
+ )
+ actual_status = int(status_line[:3])
+
+ assert actual_status == 400
+ assert actual_resp_body == b'Malformed Content-Length Header.'
+
+
+@pytest.mark.parametrize(
+ 'uri,expected_resp_status,expected_resp_body',
+ (
+ ('/wrong_cl_buffered', 500,
+ (b'The requested resource returned more bytes than the '
+ b'declared Content-Length.')),
+ ('/wrong_cl_unbuffered', 200, b'I too'),
+ )
+)
+def test_Content_Length_out(
+ test_client,
+ uri, expected_resp_status, expected_resp_body
+):
+ """Test response with Content-Length less than the response body.
+
+ (non-chunked response)
+ """
+ conn = test_client.get_connection()
+ conn.putrequest('GET', uri, skip_host=True)
+ conn.putheader('Host', conn.host)
+ conn.endheaders()
+
+ response = conn.getresponse()
+ status_line, actual_headers, actual_resp_body = webtest.shb(response)
+ actual_status = int(status_line[:3])
+
+ assert actual_status == expected_resp_status
+ assert actual_resp_body == expected_resp_body
+
+ conn.close()
+
+
+@pytest.mark.xfail(
+ reason='Sometimes this test fails due to low timeout. '
+ 'Ref: https://github.com/cherrypy/cherrypy/issues/598'
+)
+def test_598(test_client):
+ """Test serving large file with a read timeout in place."""
+ # Initialize a persistent HTTP connection
+ conn = test_client.get_connection()
+ remote_data_conn = urllib.request.urlopen(
+ '%s://%s:%s/one_megabyte_of_a'
+ % ('http', conn.host, conn.port)
+ )
+ buf = remote_data_conn.read(512)
+ time.sleep(timeout * 0.6)
+ remaining = (1024 * 1024) - 512
+ while remaining:
+ data = remote_data_conn.read(remaining)
+ if not data:
+ break
+ buf += data
+ remaining -= len(data)
+
+ assert len(buf) == 1024 * 1024
+ assert buf == b'a' * 1024 * 1024
+ assert remaining == 0
+ remote_data_conn.close()
+
+
+@pytest.mark.parametrize(
+ 'invalid_terminator',
+ (
+ b'\n\n',
+ b'\r\n\n',
+ )
+)
+def test_No_CRLF(test_client, invalid_terminator):
+ """Test HTTP queries with no valid CRLF terminators."""
+ # Initialize a persistent HTTP connection
+ conn = test_client.get_connection()
+
+ # (b'%s' % b'') is not supported in Python 3.4, so just use +
+ conn.send(b'GET /hello HTTP/1.1' + invalid_terminator)
+ response = conn.response_class(conn.sock, method='GET')
+ response.begin()
+ actual_resp_body = response.read()
+ expected_resp_body = b'HTTP requires CRLF terminators'
+ assert actual_resp_body == expected_resp_body
+ conn.close()
diff --git a/resources/lib/cheroot/test/test_core.py b/resources/lib/cheroot/test/test_core.py
new file mode 100644
index 0000000..7c91b13
--- /dev/null
+++ b/resources/lib/cheroot/test/test_core.py
@@ -0,0 +1,405 @@
+"""Tests for managing HTTP issues (malformed requests, etc)."""
+# -*- coding: utf-8 -*-
+# vim: set fileencoding=utf-8 :
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+import errno
+import socket
+
+import pytest
+import six
+from six.moves import urllib
+
+from cheroot.test import helper
+
+
+HTTP_BAD_REQUEST = 400
+HTTP_LENGTH_REQUIRED = 411
+HTTP_NOT_FOUND = 404
+HTTP_OK = 200
+HTTP_VERSION_NOT_SUPPORTED = 505
+
+
+class HelloController(helper.Controller):
+ """Controller for serving WSGI apps."""
+
+ def hello(req, resp):
+ """Render Hello world."""
+ return 'Hello world!'
+
+ def body_required(req, resp):
+ """Render Hello world or set 411."""
+ if req.environ.get('Content-Length', None) is None:
+ resp.status = '411 Length Required'
+ return
+ return 'Hello world!'
+
+ def query_string(req, resp):
+ """Render QUERY_STRING value."""
+ return req.environ.get('QUERY_STRING', '')
+
+ def asterisk(req, resp):
+ """Render request method value."""
+ method = req.environ.get('REQUEST_METHOD', 'NO METHOD FOUND')
+ tmpl = 'Got asterisk URI path with {method} method'
+ return tmpl.format(**locals())
+
+ def _munge(string):
+ """Encode PATH_INFO correctly depending on Python version.
+
+ WSGI 1.0 is a mess around unicode. Create endpoints
+ that match the PATH_INFO that it produces.
+ """
+ if six.PY3:
+ return string.encode('utf-8').decode('latin-1')
+ return string
+
+ handlers = {
+ '/hello': hello,
+ '/no_body': hello,
+ '/body_required': body_required,
+ '/query_string': query_string,
+ _munge('/привіт'): hello,
+ _munge('/Юххууу'): hello,
+ '/\xa0Ðblah key 0 900 4 data': hello,
+ '/*': asterisk,
+ }
+
+
+def _get_http_response(connection, method='GET'):
+ c = connection
+ kwargs = {'strict': c.strict} if hasattr(c, 'strict') else {}
+ # Python 3.2 removed the 'strict' feature, saying:
+ # "http.client now always assumes HTTP/1.x compliant servers."
+ return c.response_class(c.sock, method=method, **kwargs)
+
+
+@pytest.fixture
+def testing_server(wsgi_server_client):
+ """Attach a WSGI app to the given server and pre-configure it."""
+ wsgi_server = wsgi_server_client.server_instance
+ wsgi_server.wsgi_app = HelloController()
+ wsgi_server.max_request_body_size = 30000000
+ wsgi_server.server_client = wsgi_server_client
+ return wsgi_server
+
+
+@pytest.fixture
+def test_client(testing_server):
+ """Get and return a test client out of the given server."""
+ return testing_server.server_client
+
+
+def test_http_connect_request(test_client):
+ """Check that CONNECT query results in Method Not Allowed status."""
+ status_line = test_client.connect('/anything')[0]
+ actual_status = int(status_line[:3])
+ assert actual_status == 405
+
+
+def test_normal_request(test_client):
+ """Check that normal GET query succeeds."""
+ status_line, _, actual_resp_body = test_client.get('/hello')
+ actual_status = int(status_line[:3])
+ assert actual_status == HTTP_OK
+ assert actual_resp_body == b'Hello world!'
+
+
+def test_query_string_request(test_client):
+ """Check that GET param is parsed well."""
+ status_line, _, actual_resp_body = test_client.get(
+ '/query_string?test=True'
+ )
+ actual_status = int(status_line[:3])
+ assert actual_status == HTTP_OK
+ assert actual_resp_body == b'test=True'
+
+
+@pytest.mark.parametrize(
+ 'uri',
+ (
+ '/hello', # plain
+ '/query_string?test=True', # query
+ '/{0}?{1}={2}'.format( # quoted unicode
+ *map(urllib.parse.quote, ('Юххууу', 'ї', 'йо'))
+ ),
+ )
+)
+def test_parse_acceptable_uri(test_client, uri):
+ """Check that server responds with OK to valid GET queries."""
+ status_line = test_client.get(uri)[0]
+ actual_status = int(status_line[:3])
+ assert actual_status == HTTP_OK
+
+
+@pytest.mark.xfail(six.PY2, reason='Fails on Python 2')
+def test_parse_uri_unsafe_uri(test_client):
+ """Test that malicious URI does not allow HTTP injection.
+
+ This effectively checks that sending GET request with URL
+
+ /%A0%D0blah%20key%200%20900%204%20data
+
+ is not converted into
+
+ GET /
+ blah key 0 900 4 data
+ HTTP/1.1
+
+ which would be a security issue otherwise.
+ """
+ c = test_client.get_connection()
+ resource = '/\xa0Ðblah key 0 900 4 data'.encode('latin-1')
+ quoted = urllib.parse.quote(resource)
+ assert quoted == '/%A0%D0blah%20key%200%20900%204%20data'
+ request = 'GET {quoted} HTTP/1.1'.format(**locals())
+ c._output(request.encode('utf-8'))
+ c._send_output()
+ response = _get_http_response(c, method='GET')
+ response.begin()
+ assert response.status == HTTP_OK
+ assert response.fp.read(12) == b'Hello world!'
+ c.close()
+
+
+def test_parse_uri_invalid_uri(test_client):
+ """Check that server responds with Bad Request to invalid GET queries.
+
+ Invalid request line test case: it should only contain US-ASCII.
+ """
+ c = test_client.get_connection()
+ c._output(u'GET /йопта! HTTP/1.1'.encode('utf-8'))
+ c._send_output()
+ response = _get_http_response(c, method='GET')
+ response.begin()
+ assert response.status == HTTP_BAD_REQUEST
+ assert response.fp.read(21) == b'Malformed Request-URI'
+ c.close()
+
+
+@pytest.mark.parametrize(
+ 'uri',
+ (
+ 'hello', # ascii
+ 'привіт', # non-ascii
+ )
+)
+def test_parse_no_leading_slash_invalid(test_client, uri):
+ """Check that server responds with Bad Request to invalid GET queries.
+
+ Invalid request line test case: it should have leading slash (be absolute).
+ """
+ status_line, _, actual_resp_body = test_client.get(
+ urllib.parse.quote(uri)
+ )
+ actual_status = int(status_line[:3])
+ assert actual_status == HTTP_BAD_REQUEST
+ assert b'starting with a slash' in actual_resp_body
+
+
+def test_parse_uri_absolute_uri(test_client):
+ """Check that server responds with Bad Request to Absolute URI.
+
+ Only proxy servers should allow this.
+ """
+ status_line, _, actual_resp_body = test_client.get('http://google.com/')
+ actual_status = int(status_line[:3])
+ assert actual_status == HTTP_BAD_REQUEST
+ expected_body = b'Absolute URI not allowed if server is not a proxy.'
+ assert actual_resp_body == expected_body
+
+
+def test_parse_uri_asterisk_uri(test_client):
+ """Check that server responds with OK to OPTIONS with "*" Absolute URI."""
+ status_line, _, actual_resp_body = test_client.options('*')
+ actual_status = int(status_line[:3])
+ assert actual_status == HTTP_OK
+ expected_body = b'Got asterisk URI path with OPTIONS method'
+ assert actual_resp_body == expected_body
+
+
+def test_parse_uri_fragment_uri(test_client):
+ """Check that server responds with Bad Request to URI with fragment."""
+ status_line, _, actual_resp_body = test_client.get(
+ '/hello?test=something#fake',
+ )
+ actual_status = int(status_line[:3])
+ assert actual_status == HTTP_BAD_REQUEST
+ expected_body = b'Illegal #fragment in Request-URI.'
+ assert actual_resp_body == expected_body
+
+
+def test_no_content_length(test_client):
+ """Test POST query with an empty body being successful."""
+ # "The presence of a message-body in a request is signaled by the
+ # inclusion of a Content-Length or Transfer-Encoding header field in
+ # the request's message-headers."
+ #
+ # Send a message with neither header and no body.
+ c = test_client.get_connection()
+ c.request('POST', '/no_body')
+ response = c.getresponse()
+ actual_resp_body = response.fp.read()
+ actual_status = response.status
+ assert actual_status == HTTP_OK
+ assert actual_resp_body == b'Hello world!'
+
+
+def test_content_length_required(test_client):
+ """Test POST query with body failing because of missing Content-Length."""
+ # Now send a message that has no Content-Length, but does send a body.
+ # Verify that CP times out the socket and responds
+ # with 411 Length Required.
+
+ c = test_client.get_connection()
+ c.request('POST', '/body_required')
+ response = c.getresponse()
+ response.fp.read()
+
+ actual_status = response.status
+ assert actual_status == HTTP_LENGTH_REQUIRED
+
+
+@pytest.mark.parametrize(
+ 'request_line,status_code,expected_body',
+ (
+ (b'GET /', # missing proto
+ HTTP_BAD_REQUEST, b'Malformed Request-Line'),
+ (b'GET / HTTPS/1.1', # invalid proto
+ HTTP_BAD_REQUEST, b'Malformed Request-Line: bad protocol'),
+ (b'GET / HTTP/2.15', # invalid ver
+ HTTP_VERSION_NOT_SUPPORTED, b'Cannot fulfill request'),
+ )
+)
+def test_malformed_request_line(
+ test_client, request_line,
+ status_code, expected_body
+):
+ """Test missing or invalid HTTP version in Request-Line."""
+ c = test_client.get_connection()
+ c._output(request_line)
+ c._send_output()
+ response = _get_http_response(c, method='GET')
+ response.begin()
+ assert response.status == status_code
+ assert response.fp.read(len(expected_body)) == expected_body
+ c.close()
+
+
+def test_malformed_http_method(test_client):
+ """Test non-uppercase HTTP method."""
+ c = test_client.get_connection()
+ c.putrequest('GeT', '/malformed_method_case')
+ c.putheader('Content-Type', 'text/plain')
+ c.endheaders()
+
+ response = c.getresponse()
+ actual_status = response.status
+ assert actual_status == HTTP_BAD_REQUEST
+ actual_resp_body = response.fp.read(21)
+ assert actual_resp_body == b'Malformed method name'
+
+
+def test_malformed_header(test_client):
+ """Check that broken HTTP header results in Bad Request."""
+ c = test_client.get_connection()
+ c.putrequest('GET', '/')
+ c.putheader('Content-Type', 'text/plain')
+ # See https://www.bitbucket.org/cherrypy/cherrypy/issue/941
+ c._output(b'Re, 1.2.3.4#015#012')
+ c.endheaders()
+
+ response = c.getresponse()
+ actual_status = response.status
+ assert actual_status == HTTP_BAD_REQUEST
+ actual_resp_body = response.fp.read(20)
+ assert actual_resp_body == b'Illegal header line.'
+
+
+def test_request_line_split_issue_1220(test_client):
+ """Check that HTTP request line of exactly 256 chars length is OK."""
+ Request_URI = (
+ '/hello?'
+ 'intervenant-entreprise-evenement_classaction='
+ 'evenement-mailremerciements'
+ '&_path=intervenant-entreprise-evenement'
+ '&intervenant-entreprise-evenement_action-id=19404'
+ '&intervenant-entreprise-evenement_id=19404'
+ '&intervenant-entreprise_id=28092'
+ )
+ assert len('GET %s HTTP/1.1\r\n' % Request_URI) == 256
+
+ actual_resp_body = test_client.get(Request_URI)[2]
+ assert actual_resp_body == b'Hello world!'
+
+
+def test_garbage_in(test_client):
+ """Test that server sends an error for garbage received over TCP."""
+ # Connect without SSL regardless of server.scheme
+
+ c = test_client.get_connection()
+ c._output(b'gjkgjklsgjklsgjkljklsg')
+ c._send_output()
+ response = c.response_class(c.sock, method='GET')
+ try:
+ response.begin()
+ actual_status = response.status
+ assert actual_status == HTTP_BAD_REQUEST
+ actual_resp_body = response.fp.read(22)
+ assert actual_resp_body == b'Malformed Request-Line'
+ c.close()
+ except socket.error as ex:
+ # "Connection reset by peer" is also acceptable.
+ if ex.errno != errno.ECONNRESET:
+ raise
+
+
+class CloseController:
+ """Controller for testing the close callback."""
+
+ def __call__(self, environ, start_response):
+ """Get the req to know header sent status."""
+ self.req = start_response.__self__.req
+ resp = CloseResponse(self.close)
+ start_response(resp.status, resp.headers.items())
+ return resp
+
+ def close(self):
+ """Close, writing hello."""
+ self.req.write(b'hello')
+
+
+class CloseResponse:
+ """Dummy empty response to trigger the no body status."""
+
+ def __init__(self, close):
+ """Use some defaults to ensure we have a header."""
+ self.status = '200 OK'
+ self.headers = {'Content-Type': 'text/html'}
+ self.close = close
+
+ def __getitem__(self, index):
+ """Ensure we don't have a body."""
+ raise IndexError()
+
+ def output(self):
+ """Return self to hook the close method."""
+ return self
+
+
+@pytest.fixture
+def testing_server_close(wsgi_server_client):
+ """Attach a WSGI app to the given server and pre-configure it."""
+ wsgi_server = wsgi_server_client.server_instance
+ wsgi_server.wsgi_app = CloseController()
+ wsgi_server.max_request_body_size = 30000000
+ wsgi_server.server_client = wsgi_server_client
+ return wsgi_server
+
+
+def test_send_header_before_closing(testing_server_close):
+ """Test we are actually sending the headers before calling 'close'."""
+ _, _, resp_body = testing_server_close.server_client.get('/')
+ assert resp_body == b'hello'
diff --git a/resources/lib/cheroot/test/test_server.py b/resources/lib/cheroot/test/test_server.py
new file mode 100644
index 0000000..c53f7a8
--- /dev/null
+++ b/resources/lib/cheroot/test/test_server.py
@@ -0,0 +1,193 @@
+"""Tests for the HTTP server."""
+# -*- coding: utf-8 -*-
+# vim: set fileencoding=utf-8 :
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+import os
+import socket
+import tempfile
+import threading
+import time
+
+import pytest
+
+from .._compat import bton
+from ..server import Gateway, HTTPServer
+from ..testing import (
+ ANY_INTERFACE_IPV4,
+ ANY_INTERFACE_IPV6,
+ EPHEMERAL_PORT,
+ get_server_client,
+)
+
+
+def make_http_server(bind_addr):
+ """Create and start an HTTP server bound to bind_addr."""
+ httpserver = HTTPServer(
+ bind_addr=bind_addr,
+ gateway=Gateway,
+ )
+
+ threading.Thread(target=httpserver.safe_start).start()
+
+ while not httpserver.ready:
+ time.sleep(0.1)
+
+ return httpserver
+
+
+non_windows_sock_test = pytest.mark.skipif(
+ not hasattr(socket, 'AF_UNIX'),
+ reason='UNIX domain sockets are only available under UNIX-based OS',
+)
+
+
+@pytest.fixture
+def http_server():
+ """Provision a server creator as a fixture."""
+ def start_srv():
+ bind_addr = yield
+ httpserver = make_http_server(bind_addr)
+ yield httpserver
+ yield httpserver
+
+ srv_creator = iter(start_srv())
+ next(srv_creator)
+ yield srv_creator
+ try:
+ while True:
+ httpserver = next(srv_creator)
+ if httpserver is not None:
+ httpserver.stop()
+ except StopIteration:
+ pass
+
+
+@pytest.fixture
+def unix_sock_file():
+ """Check that bound UNIX socket address is stored in server."""
+ tmp_sock_fh, tmp_sock_fname = tempfile.mkstemp()
+
+ yield tmp_sock_fname
+
+ os.close(tmp_sock_fh)
+ os.unlink(tmp_sock_fname)
+
+
+def test_prepare_makes_server_ready():
+ """Check that prepare() makes the server ready, and stop() clears it."""
+ httpserver = HTTPServer(
+ bind_addr=(ANY_INTERFACE_IPV4, EPHEMERAL_PORT),
+ gateway=Gateway,
+ )
+
+ assert not httpserver.ready
+ assert not httpserver.requests._threads
+
+ httpserver.prepare()
+
+ assert httpserver.ready
+ assert httpserver.requests._threads
+ for thr in httpserver.requests._threads:
+ assert thr.ready
+
+ httpserver.stop()
+
+ assert not httpserver.requests._threads
+ assert not httpserver.ready
+
+
+def test_stop_interrupts_serve():
+ """Check that stop() interrupts running of serve()."""
+ httpserver = HTTPServer(
+ bind_addr=(ANY_INTERFACE_IPV4, EPHEMERAL_PORT),
+ gateway=Gateway,
+ )
+
+ httpserver.prepare()
+ serve_thread = threading.Thread(target=httpserver.serve)
+ serve_thread.start()
+
+ serve_thread.join(0.5)
+ assert serve_thread.is_alive()
+
+ httpserver.stop()
+
+ serve_thread.join(0.5)
+ assert not serve_thread.is_alive()
+
+
+@pytest.mark.parametrize(
+ 'ip_addr',
+ (
+ ANY_INTERFACE_IPV4,
+ ANY_INTERFACE_IPV6,
+ )
+)
+def test_bind_addr_inet(http_server, ip_addr):
+ """Check that bound IP address is stored in server."""
+ httpserver = http_server.send((ip_addr, EPHEMERAL_PORT))
+
+ assert httpserver.bind_addr[0] == ip_addr
+ assert httpserver.bind_addr[1] != EPHEMERAL_PORT
+
+
+@non_windows_sock_test
+def test_bind_addr_unix(http_server, unix_sock_file):
+ """Check that bound UNIX socket address is stored in server."""
+ httpserver = http_server.send(unix_sock_file)
+
+ assert httpserver.bind_addr == unix_sock_file
+
+
+@pytest.mark.skip(reason="Abstract sockets don't work currently")
+@non_windows_sock_test
+def test_bind_addr_unix_abstract(http_server):
+ """Check that bound UNIX socket address is stored in server."""
+ unix_abstract_sock = b'\x00cheroot/test/socket/here.sock'
+ httpserver = http_server.send(unix_abstract_sock)
+
+ assert httpserver.bind_addr == unix_abstract_sock
+
+
+PEERCRED_IDS_URI = '/peer_creds/ids'
+PEERCRED_TEXTS_URI = '/peer_creds/texts'
+
+
+class _TestGateway(Gateway):
+ def respond(self):
+ req = self.req
+ conn = req.conn
+ req_uri = bton(req.uri)
+ if req_uri == PEERCRED_IDS_URI:
+ peer_creds = conn.peer_pid, conn.peer_uid, conn.peer_gid
+ return ['|'.join(map(str, peer_creds))]
+ elif req_uri == PEERCRED_TEXTS_URI:
+ return ['!'.join((conn.peer_user, conn.peer_group))]
+ return super(_TestGateway, self).respond()
+
+
+@pytest.mark.skip(
+ reason='Test HTTP client is not able to work through UNIX socket currently'
+)
+@non_windows_sock_test
+def test_peercreds_unix_sock(http_server, unix_sock_file):
+ """Check that peercred lookup and resolution work when enabled."""
+ httpserver = http_server.send(unix_sock_file)
+ httpserver.gateway = _TestGateway
+ httpserver.peercreds_enabled = True
+
+ testclient = get_server_client(httpserver)
+
+ expected_peercreds = os.getpid(), os.getuid(), os.getgid()
+ expected_peercreds = '|'.join(map(str, expected_peercreds))
+ assert testclient.get(PEERCRED_IDS_URI) == expected_peercreds
+ assert 'RuntimeError' in testclient.get(PEERCRED_TEXTS_URI)
+
+ httpserver.peercreds_resolve_enabled = True
+ import grp
+ expected_textcreds = os.getlogin(), grp.getgrgid(os.getgid()).gr_name
+ expected_textcreds = '!'.join(map(str, expected_textcreds))
+ assert testclient.get(PEERCRED_TEXTS_URI) == expected_textcreds
diff --git a/resources/lib/cheroot/test/webtest.py b/resources/lib/cheroot/test/webtest.py
new file mode 100644
index 0000000..43448f5
--- /dev/null
+++ b/resources/lib/cheroot/test/webtest.py
@@ -0,0 +1,581 @@
+"""Extensions to unittest for web frameworks.
+
+Use the WebCase.getPage method to request a page from your HTTP server.
+Framework Integration
+=====================
+If you have control over your server process, you can handle errors
+in the server-side of the HTTP conversation a bit better. You must run
+both the client (your WebCase tests) and the server in the same process
+(but in separate threads, obviously).
+When an error occurs in the framework, call server_error. It will print
+the traceback to stdout, and keep any assertions you have from running
+(the assumption is that, if the server errors, the page output will not
+be of further significance to your tests).
+"""
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+import pprint
+import re
+import socket
+import sys
+import time
+import traceback
+import os
+import json
+import unittest
+import warnings
+
+from six.moves import range, http_client, map, urllib_parse
+import six
+
+from more_itertools.more import always_iterable
+
+
+def interface(host):
+ """Return an IP address for a client connection given the server host.
+
+ If the server is listening on '0.0.0.0' (INADDR_ANY)
+ or '::' (IN6ADDR_ANY), this will return the proper localhost.
+ """
+ if host == '0.0.0.0':
+ # INADDR_ANY, which should respond on localhost.
+ return '127.0.0.1'
+ if host == '::':
+ # IN6ADDR_ANY, which should respond on localhost.
+ return '::1'
+ return host
+
+
+try:
+ # Jython support
+ if sys.platform[:4] == 'java':
+ def getchar():
+ """Get a key press."""
+ # Hopefully this is enough
+ return sys.stdin.read(1)
+ else:
+ # On Windows, msvcrt.getch reads a single char without output.
+ import msvcrt
+
+ def getchar():
+ """Get a key press."""
+ return msvcrt.getch()
+except ImportError:
+ # Unix getchr
+ import tty
+ import termios
+
+ def getchar():
+ """Get a key press."""
+ fd = sys.stdin.fileno()
+ old_settings = termios.tcgetattr(fd)
+ try:
+ tty.setraw(sys.stdin.fileno())
+ ch = sys.stdin.read(1)
+ finally:
+ termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
+ return ch
+
+
+# from jaraco.properties
+class NonDataProperty:
+ """Non-data property decorator."""
+
+ def __init__(self, fget):
+ """Initialize a non-data property."""
+ assert fget is not None, 'fget cannot be none'
+ assert callable(fget), 'fget must be callable'
+ self.fget = fget
+
+ def __get__(self, obj, objtype=None):
+ """Return a class property."""
+ if obj is None:
+ return self
+ return self.fget(obj)
+
+
+class WebCase(unittest.TestCase):
+ """Helper web test suite base."""
+
+ HOST = '127.0.0.1'
+ PORT = 8000
+ HTTP_CONN = http_client.HTTPConnection
+ PROTOCOL = 'HTTP/1.1'
+
+ scheme = 'http'
+ url = None
+
+ status = None
+ headers = None
+ body = None
+
+ encoding = 'utf-8'
+
+ time = None
+
+ @property
+ def _Conn(self):
+ """Return HTTPConnection or HTTPSConnection based on self.scheme.
+
+ * from http.client.
+ """
+ cls_name = '{scheme}Connection'.format(scheme=self.scheme.upper())
+ return getattr(http_client, cls_name)
+
+ def get_conn(self, auto_open=False):
+ """Return a connection to our HTTP server."""
+ conn = self._Conn(self.interface(), self.PORT)
+ # Automatically re-connect?
+ conn.auto_open = auto_open
+ conn.connect()
+ return conn
+
+ def set_persistent(self, on=True, auto_open=False):
+ """Make our HTTP_CONN persistent (or not).
+
+ If the 'on' argument is True (the default), then self.HTTP_CONN
+ will be set to an instance of HTTP(S)?Connection
+ to persist across requests.
+ As this class only allows for a single open connection, if
+ self already has an open connection, it will be closed.
+ """
+ try:
+ self.HTTP_CONN.close()
+ except (TypeError, AttributeError):
+ pass
+
+ self.HTTP_CONN = (
+ self.get_conn(auto_open=auto_open)
+ if on
+ else self._Conn
+ )
+
+ @property
+ def persistent(self): # noqa: D401; irrelevant for properties
+ """Presense of the persistent HTTP connection."""
+ return hasattr(self.HTTP_CONN, '__class__')
+
+ @persistent.setter
+ def persistent(self, on):
+ self.set_persistent(on)
+
+ def interface(self):
+ """Return an IP address for a client connection.
+
+ If the server is listening on '0.0.0.0' (INADDR_ANY)
+ or '::' (IN6ADDR_ANY), this will return the proper localhost.
+ """
+ return interface(self.HOST)
+
+ def getPage(self, url, headers=None, method='GET', body=None,
+ protocol=None, raise_subcls=None):
+ """Open the url with debugging support. Return status, headers, body.
+
+ url should be the identifier passed to the server, typically a
+ server-absolute path and query string (sent between method and
+ protocol), and should only be an absolute URI if proxy support is
+ enabled in the server.
+
+ If the application under test generates absolute URIs, be sure
+ to wrap them first with strip_netloc::
+
+ class MyAppWebCase(WebCase):
+ def getPage(url, *args, **kwargs):
+ super(MyAppWebCase, self).getPage(
+ cheroot.test.webtest.strip_netloc(url),
+ *args, **kwargs
+ )
+
+ `raise_subcls` must be a tuple with the exceptions classes
+ or a single exception class that are not going to be considered
+ a socket.error regardless that they were are subclass of a
+ socket.error and therefore not considered for a connection retry.
+ """
+ ServerError.on = False
+
+ if isinstance(url, six.text_type):
+ url = url.encode('utf-8')
+ if isinstance(body, six.text_type):
+ body = body.encode('utf-8')
+
+ self.url = url
+ self.time = None
+ start = time.time()
+ result = openURL(url, headers, method, body, self.HOST, self.PORT,
+ self.HTTP_CONN, protocol or self.PROTOCOL,
+ raise_subcls)
+ self.time = time.time() - start
+ self.status, self.headers, self.body = result
+
+ # Build a list of request cookies from the previous response cookies.
+ self.cookies = [('Cookie', v) for k, v in self.headers
+ if k.lower() == 'set-cookie']
+
+ if ServerError.on:
+ raise ServerError()
+ return result
+
+ @NonDataProperty
+ def interactive(self):
+ """Determine whether tests are run in interactive mode.
+
+ Load interactivity setting from environment, where
+ the value can be numeric or a string like true or
+ False or 1 or 0.
+ """
+ env_str = os.environ.get('WEBTEST_INTERACTIVE', 'True')
+ is_interactive = bool(json.loads(env_str.lower()))
+ if is_interactive:
+ warnings.warn(
+ 'Interactive test failure interceptor support via '
+ 'WEBTEST_INTERACTIVE environment variable is deprecated.',
+ DeprecationWarning
+ )
+ return is_interactive
+
+ console_height = 30
+
+ def _handlewebError(self, msg):
+ print('')
+ print(' ERROR: %s' % msg)
+
+ if not self.interactive:
+ raise self.failureException(msg)
+
+ p = (' Show: '
+ '[B]ody [H]eaders [S]tatus [U]RL; '
+ '[I]gnore, [R]aise, or sys.e[X]it >> ')
+ sys.stdout.write(p)
+ sys.stdout.flush()
+ while True:
+ i = getchar().upper()
+ if not isinstance(i, type('')):
+ i = i.decode('ascii')
+ if i not in 'BHSUIRX':
+ continue
+ print(i.upper()) # Also prints new line
+ if i == 'B':
+ for x, line in enumerate(self.body.splitlines()):
+ if (x + 1) % self.console_height == 0:
+ # The \r and comma should make the next line overwrite
+ sys.stdout.write('<-- More -->\r')
+ m = getchar().lower()
+ # Erase our "More" prompt
+ sys.stdout.write(' \r')
+ if m == 'q':
+ break
+ print(line)
+ elif i == 'H':
+ pprint.pprint(self.headers)
+ elif i == 'S':
+ print(self.status)
+ elif i == 'U':
+ print(self.url)
+ elif i == 'I':
+ # return without raising the normal exception
+ return
+ elif i == 'R':
+ raise self.failureException(msg)
+ elif i == 'X':
+ sys.exit()
+ sys.stdout.write(p)
+ sys.stdout.flush()
+
+ @property
+ def status_code(self): # noqa: D401; irrelevant for properties
+ """Integer HTTP status code."""
+ return int(self.status[:3])
+
+ def status_matches(self, expected):
+ """Check whether actual status matches expected."""
+ actual = (
+ self.status_code
+ if isinstance(expected, int) else
+ self.status
+ )
+ return expected == actual
+
+ def assertStatus(self, status, msg=None):
+ """Fail if self.status != status.
+
+ status may be integer code, exact string status, or
+ iterable of allowed possibilities.
+ """
+ if any(map(self.status_matches, always_iterable(status))):
+ return
+
+ tmpl = 'Status {self.status} does not match {status}'
+ msg = msg or tmpl.format(**locals())
+ self._handlewebError(msg)
+
+ def assertHeader(self, key, value=None, msg=None):
+ """Fail if (key, [value]) not in self.headers."""
+ lowkey = key.lower()
+ for k, v in self.headers:
+ if k.lower() == lowkey:
+ if value is None or str(value) == v:
+ return v
+
+ if msg is None:
+ if value is None:
+ msg = '%r not in headers' % key
+ else:
+ msg = '%r:%r not in headers' % (key, value)
+ self._handlewebError(msg)
+
+ def assertHeaderIn(self, key, values, msg=None):
+ """Fail if header indicated by key doesn't have one of the values."""
+ lowkey = key.lower()
+ for k, v in self.headers:
+ if k.lower() == lowkey:
+ matches = [value for value in values if str(value) == v]
+ if matches:
+ return matches
+
+ if msg is None:
+ msg = '%(key)r not in %(values)r' % vars()
+ self._handlewebError(msg)
+
+ def assertHeaderItemValue(self, key, value, msg=None):
+ """Fail if the header does not contain the specified value."""
+ actual_value = self.assertHeader(key, msg=msg)
+ header_values = map(str.strip, actual_value.split(','))
+ if value in header_values:
+ return value
+
+ if msg is None:
+ msg = '%r not in %r' % (value, header_values)
+ self._handlewebError(msg)
+
+ def assertNoHeader(self, key, msg=None):
+ """Fail if key in self.headers."""
+ lowkey = key.lower()
+ matches = [k for k, v in self.headers if k.lower() == lowkey]
+ if matches:
+ if msg is None:
+ msg = '%r in headers' % key
+ self._handlewebError(msg)
+
+ def assertNoHeaderItemValue(self, key, value, msg=None):
+ """Fail if the header contains the specified value."""
+ lowkey = key.lower()
+ hdrs = self.headers
+ matches = [k for k, v in hdrs if k.lower() == lowkey and v == value]
+ if matches:
+ if msg is None:
+ msg = '%r:%r in %r' % (key, value, hdrs)
+ self._handlewebError(msg)
+
+ def assertBody(self, value, msg=None):
+ """Fail if value != self.body."""
+ if isinstance(value, six.text_type):
+ value = value.encode(self.encoding)
+ if value != self.body:
+ if msg is None:
+ msg = 'expected body:\n%r\n\nactual body:\n%r' % (
+ value, self.body)
+ self._handlewebError(msg)
+
+ def assertInBody(self, value, msg=None):
+ """Fail if value not in self.body."""
+ if isinstance(value, six.text_type):
+ value = value.encode(self.encoding)
+ if value not in self.body:
+ if msg is None:
+ msg = '%r not in body: %s' % (value, self.body)
+ self._handlewebError(msg)
+
+ def assertNotInBody(self, value, msg=None):
+ """Fail if value in self.body."""
+ if isinstance(value, six.text_type):
+ value = value.encode(self.encoding)
+ if value in self.body:
+ if msg is None:
+ msg = '%r found in body' % value
+ self._handlewebError(msg)
+
+ def assertMatchesBody(self, pattern, msg=None, flags=0):
+ """Fail if value (a regex pattern) is not in self.body."""
+ if isinstance(pattern, six.text_type):
+ pattern = pattern.encode(self.encoding)
+ if re.search(pattern, self.body, flags) is None:
+ if msg is None:
+ msg = 'No match for %r in body' % pattern
+ self._handlewebError(msg)
+
+
+methods_with_bodies = ('POST', 'PUT', 'PATCH')
+
+
+def cleanHeaders(headers, method, body, host, port):
+ """Return request headers, with required headers added (if missing)."""
+ if headers is None:
+ headers = []
+
+ # Add the required Host request header if not present.
+ # [This specifies the host:port of the server, not the client.]
+ found = False
+ for k, v in headers:
+ if k.lower() == 'host':
+ found = True
+ break
+ if not found:
+ if port == 80:
+ headers.append(('Host', host))
+ else:
+ headers.append(('Host', '%s:%s' % (host, port)))
+
+ if method in methods_with_bodies:
+ # Stick in default type and length headers if not present
+ found = False
+ for k, v in headers:
+ if k.lower() == 'content-type':
+ found = True
+ break
+ if not found:
+ headers.append(
+ ('Content-Type', 'application/x-www-form-urlencoded'))
+ headers.append(('Content-Length', str(len(body or ''))))
+
+ return headers
+
+
+def shb(response):
+ """Return status, headers, body the way we like from a response."""
+ if six.PY3:
+ h = response.getheaders()
+ else:
+ h = []
+ key, value = None, None
+ for line in response.msg.headers:
+ if line:
+ if line[0] in ' \t':
+ value += line.strip()
+ else:
+ if key and value:
+ h.append((key, value))
+ key, value = line.split(':', 1)
+ key = key.strip()
+ value = value.strip()
+ if key and value:
+ h.append((key, value))
+
+ return '%s %s' % (response.status, response.reason), h, response.read()
+
+
+def openURL(url, headers=None, method='GET', body=None,
+ host='127.0.0.1', port=8000, http_conn=http_client.HTTPConnection,
+ protocol='HTTP/1.1', raise_subcls=None):
+ """
+ Open the given HTTP resource and return status, headers, and body.
+
+ `raise_subcls` must be a tuple with the exceptions classes
+ or a single exception class that are not going to be considered
+ a socket.error regardless that they were are subclass of a
+ socket.error and therefore not considered for a connection retry.
+ """
+ headers = cleanHeaders(headers, method, body, host, port)
+
+ # Trying 10 times is simply in case of socket errors.
+ # Normal case--it should run once.
+ for trial in range(10):
+ try:
+ # Allow http_conn to be a class or an instance
+ if hasattr(http_conn, 'host'):
+ conn = http_conn
+ else:
+ conn = http_conn(interface(host), port)
+
+ conn._http_vsn_str = protocol
+ conn._http_vsn = int(''.join([x for x in protocol if x.isdigit()]))
+
+ if six.PY3 and isinstance(url, bytes):
+ url = url.decode()
+ conn.putrequest(method.upper(), url, skip_host=True,
+ skip_accept_encoding=True)
+
+ for key, value in headers:
+ conn.putheader(key, value.encode('Latin-1'))
+ conn.endheaders()
+
+ if body is not None:
+ conn.send(body)
+
+ # Handle response
+ response = conn.getresponse()
+
+ s, h, b = shb(response)
+
+ if not hasattr(http_conn, 'host'):
+ # We made our own conn instance. Close it.
+ conn.close()
+
+ return s, h, b
+ except socket.error as e:
+ if raise_subcls is not None and isinstance(e, raise_subcls):
+ raise
+ else:
+ time.sleep(0.5)
+ if trial == 9:
+ raise
+
+
+def strip_netloc(url):
+ """Return absolute-URI path from URL.
+
+ Strip the scheme and host from the URL, returning the
+ server-absolute portion.
+
+ Useful for wrapping an absolute-URI for which only the
+ path is expected (such as in calls to getPage).
+
+ >>> strip_netloc('https://google.com/foo/bar?bing#baz')
+ '/foo/bar?bing'
+
+ >>> strip_netloc('//google.com/foo/bar?bing#baz')
+ '/foo/bar?bing'
+
+ >>> strip_netloc('/foo/bar?bing#baz')
+ '/foo/bar?bing'
+ """
+ parsed = urllib_parse.urlparse(url)
+ scheme, netloc, path, params, query, fragment = parsed
+ stripped = '', '', path, params, query, ''
+ return urllib_parse.urlunparse(stripped)
+
+
+# Add any exceptions which your web framework handles
+# normally (that you don't want server_error to trap).
+ignored_exceptions = []
+
+# You'll want set this to True when you can't guarantee
+# that each response will immediately follow each request;
+# for example, when handling requests via multiple threads.
+ignore_all = False
+
+
+class ServerError(Exception):
+ """Exception for signalling server error."""
+
+ on = False
+
+
+def server_error(exc=None):
+ """Server debug hook.
+
+ Return True if exception handled, False if ignored.
+ You probably want to wrap this, so you can still handle an error using
+ your framework when it's ignored.
+ """
+ if exc is None:
+ exc = sys.exc_info()
+
+ if ignore_all or exc[0] in ignored_exceptions:
+ return False
+ else:
+ ServerError.on = True
+ print('')
+ print(''.join(traceback.format_exception(*exc)))
+ return True
diff --git a/resources/lib/cheroot/testing.py b/resources/lib/cheroot/testing.py
new file mode 100644
index 0000000..f01d0aa
--- /dev/null
+++ b/resources/lib/cheroot/testing.py
@@ -0,0 +1,144 @@
+"""Pytest fixtures and other helpers for doing testing by end-users."""
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+from contextlib import closing
+import errno
+import socket
+import threading
+import time
+
+import pytest
+from six.moves import http_client
+
+import cheroot.server
+from cheroot.test import webtest
+import cheroot.wsgi
+
+EPHEMERAL_PORT = 0
+NO_INTERFACE = None # Using this or '' will cause an exception
+ANY_INTERFACE_IPV4 = '0.0.0.0'
+ANY_INTERFACE_IPV6 = '::'
+
+config = {
+ cheroot.wsgi.Server: {
+ 'bind_addr': (NO_INTERFACE, EPHEMERAL_PORT),
+ 'wsgi_app': None,
+ },
+ cheroot.server.HTTPServer: {
+ 'bind_addr': (NO_INTERFACE, EPHEMERAL_PORT),
+ 'gateway': cheroot.server.Gateway,
+ },
+}
+
+
+def cheroot_server(server_factory):
+ """Set up and tear down a Cheroot server instance."""
+ conf = config[server_factory].copy()
+ bind_port = conf.pop('bind_addr')[-1]
+
+ for interface in ANY_INTERFACE_IPV6, ANY_INTERFACE_IPV4:
+ try:
+ actual_bind_addr = (interface, bind_port)
+ httpserver = server_factory( # create it
+ bind_addr=actual_bind_addr,
+ **conf
+ )
+ except OSError:
+ pass
+ else:
+ break
+
+ threading.Thread(target=httpserver.safe_start).start() # spawn it
+ while not httpserver.ready: # wait until fully initialized and bound
+ time.sleep(0.1)
+
+ yield httpserver
+
+ httpserver.stop() # destroy it
+
+
+@pytest.fixture(scope='module')
+def wsgi_server():
+ """Set up and tear down a Cheroot WSGI server instance."""
+ for srv in cheroot_server(cheroot.wsgi.Server):
+ yield srv
+
+
+@pytest.fixture(scope='module')
+def native_server():
+ """Set up and tear down a Cheroot HTTP server instance."""
+ for srv in cheroot_server(cheroot.server.HTTPServer):
+ yield srv
+
+
+class _TestClient:
+ def __init__(self, server):
+ self._interface, self._host, self._port = _get_conn_data(server)
+ self._http_connection = self.get_connection()
+ self.server_instance = server
+
+ def get_connection(self):
+ name = '{interface}:{port}'.format(
+ interface=self._interface,
+ port=self._port,
+ )
+ return http_client.HTTPConnection(name)
+
+ def request(
+ self, uri, method='GET', headers=None, http_conn=None,
+ protocol='HTTP/1.1',
+ ):
+ return webtest.openURL(
+ uri, method=method,
+ headers=headers,
+ host=self._host, port=self._port,
+ http_conn=http_conn or self._http_connection,
+ protocol=protocol,
+ )
+
+ def __getattr__(self, attr_name):
+ def _wrapper(uri, **kwargs):
+ http_method = attr_name.upper()
+ return self.request(uri, method=http_method, **kwargs)
+
+ return _wrapper
+
+
+def _probe_ipv6_sock(interface):
+ # Alternate way is to check IPs on interfaces using glibc, like:
+ # github.com/Gautier/minifail/blob/master/minifail/getifaddrs.py
+ try:
+ with closing(socket.socket(family=socket.AF_INET6)) as sock:
+ sock.bind((interface, 0))
+ except (OSError, socket.error) as sock_err:
+ # In Python 3 socket.error is an alias for OSError
+ # In Python 2 socket.error is a subclass of IOError
+ if sock_err.errno != errno.EADDRNOTAVAIL:
+ raise
+ else:
+ return True
+
+ return False
+
+
+def _get_conn_data(server):
+ if isinstance(server.bind_addr, tuple):
+ host, port = server.bind_addr
+ else:
+ host, port = server.bind_addr, 0
+
+ interface = webtest.interface(host)
+
+ if ':' in interface and not _probe_ipv6_sock(interface):
+ interface = '127.0.0.1'
+ if ':' in host:
+ host = interface
+
+ return interface, host, port
+
+
+def get_server_client(server):
+ """Create and return a test client for the given server."""
+ return _TestClient(server)
diff --git a/resources/lib/cheroot/workers/__init__.py b/resources/lib/cheroot/workers/__init__.py
new file mode 100644
index 0000000..098b8f2
--- /dev/null
+++ b/resources/lib/cheroot/workers/__init__.py
@@ -0,0 +1 @@
+"""HTTP workers pool."""
diff --git a/resources/lib/cheroot/workers/__pycache__/__init__.cpython-37.opt-1.pyc b/resources/lib/cheroot/workers/__pycache__/__init__.cpython-37.opt-1.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..12d5b1a8019f8508d3abd702fd24138928ded017
GIT binary patch
literal 220
zcmZ?b<>g`kg6noC<0OIfV-N=h7=a82ATH(r5-AK(3@MDk44O<;LLMO@0Se{$McJuE
z#R>)a`8j%invA#DCRR$p~XOTF*(ImF^&ZVE{P?HF+ur>
zxtV$CG2ZzpnK6keDfxNDF$FoL>6v+YiKQu-`Fh0#`6ZcYl`%!B#rdU0$*Dl?nMpCp
p8L372`6V$>t7GEhGxIV_;^XxSDsOSvTk{82~ApLDc{N
literal 0
HcmV?d00001
diff --git a/resources/lib/cheroot/workers/__pycache__/threadpool.cpython-37.opt-1.pyc b/resources/lib/cheroot/workers/__pycache__/threadpool.cpython-37.opt-1.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ead052e1cc24197d59b2908522e2297f3765e66e
GIT binary patch
literal 8031
zcmd5>O^hSQb?!eln;Z`3XLm=Vl_PZIH8iwWgc2vQqiojuGpk6DF<$MAJQ6WhqlaCb
zX->17TV0$Vf@2t%4Wf$x`<57n!O6vN;DawgklgHpfB=CHz65CG66E4F;7cw6bVQTZz^u!G_g
zEhwHDjd?bIrZuX8*;qik6O_xzBO{NR;c
zKV^Y?l<)(_-ENXZ^{1GkRx`5&e>Y8{o?u?s=_XucP7of3X_&-Wwadd;c&%RC6u950
z=~>}%kM&s1$ST`X)|M2XmDTS<2yT+x(Z@jL4zAQi!L)`Br5J(UFasknpJ)v$uz~`9
z3xSOZ7qik9@3E5x<4J%08@p}R6zt%dtqGndqJ4g({3Wd*2xZM
zY0~3OhS$U0ohD=_iP%wFknX5`kvq{n$%>u_QJ!~|rYGmD_m{@HsOL{Vbv1I{HG>mE
z`P^tBYDpE`W{&4^InZ+x11(Puq#ke7Z0Z~P9p-si+4DL{(2J;E@jTj!{0ZhABuxmN
zr@^YzBT7xuu-rkhge$dCIJ#vxrfj<$D>u<;8Y{qw0gRY19EF#nG7A`vfo*g+$@O2y
z1}hxqplK$t2;*MTOQREjJ|Y0QZxevsR>Ix;NgM-VK(?DmtflpG*~Moa9Yi9LB|_S4
zH%!wWP?xl%7;4D{-9%;55)?Y)L`Xr{^yYyZ3cPs~yDyU^p1ka~A|LO!1OOWfci&Il
zRQO!5pzdxZs3%=Dv^1@oQV{xusf*dNH1+uj%`O!oWc#kS`Nlh2>)+Vkc=X`ycOGnR
zA^+SJgljXZdJ9wAxrLW1Q&>ekX$A?h(T5gARbzgsDYwO{rd{leQ
zJz~3~ehLs%5=aWi4uBn@HL;Hk;h-g!s=cPiuXEyiQN
zIDA-=5v{;mogS-WeE)rFl1Rt5IOe2Bbfzg$Dm_9t?A0z$Hta9
zw<5o@8~C@^X;iWdOE2q|-hX*=r@<*9^yzu^TT#*kVcteXy?6QaU%YJk*X8uBiv<+2
z^6rUXsYh7nL}&aO6)PyR0&j&8!;Fc2o|!mbMeCcmCg%M6O=Vt9<%owuu=ttT9DV9+
zHa^R2UP7m5_VqM6GOo$~@$A!q))i`8>)q9W_JM
zx~qZt0^!Tr=$G%&=D3frIsFpB!QH~xdmsMhABX?*(NAwH|DpCrd5tlw6Kp&RRL2TQg
zWz^@WKGF9V1I^Of6|@xFm9HS|@=!<1ylknq=Vcr2(6%7k7TXK54R>f;8jaO1t!lD;
z3FBa7Sr*^b9=qEZJAT#D#Im^3G7ie$)pwX1?H*oZR1_-JuOBl
zok*e29`b~oD4a$!k*=LQfJ=0$LV-~XKy?0y2_CXT7|gi$L`S#9lMegS17V7&n9b?PV4O2oh=T9@8pC%cKygEtW{bKdKUth{-W3f6gW9Ez+&
zakOl%naZn3+BVqn3>jo5f(H*`Q;OBJ!isJHv4&pJDY&!oTc$h1L2X&Titj3(FX}5g
zjBO%ss6$Mx#209Si&RjM$d{?0k)@KPZaA3<|060SBV82Ih{(ygxKg@vik4xS7Cz3G
zWMJ4tD0MVYHIR?!h_%9N{7V75xK&U7SdGPy#$SqbKmuatsnuz=rkuox`i
zcP@B7xDs4_qBkmd>sqk#1Qgtu51s?444Zf-*f?*wc1S=l(vTO1vMEvtkoneDRZLGa*do#(i1h;
z7eF`iC5Ax=?N5n4p8hXBP0}ZQ=$Cj!a>{F{NN&)`$~=8Qc4_PpFPOq=7Y0_Ke@NV6
z4+DnBF&is0jrnpW7-Q^l
z{~AJ4N>{aok+-daq54-Y3~d}BPkC6f2x$#{RE}>5>HE&~b?sX#@S2eVyv6HhfEJMTiL;^l5eB)4{)VK>zYB-
zPp+VXuLCDAp=LS4SjV7<3&x=^OCWj)j3-jh>mb!LI=3N832l
zSJ8TAoEotsaPV=apBk`w*frWrdDvkF))Pk*1}1R>o?FMRzyU~L1Id=caoQHep%WCP
zZGU~6Ah0*iK%
zUMw2+ZZAL()+odtwk#_uJ*K&Ol_MkNWfslppHR!`v8=C}j!`yN5GhynDHErjQ+gYl
z_!{0H`5)5V5C`ihaq5tJ7JX>R?2H)GK<ycI?Wom;bE
zo3f)_79~fy9ffhN{*=6sI?Ah+6(WjZdN)G6mD!wiV09CSk=~S0BAH3@jUuHyba|T|
z(b5~m*aL95-H4oBxQO$gK3;P?BkD@gMY(23LqAFP54f8#Vi*9kfjNgtG-VO
z7Gs4|!a;sSDtr~IIli7Nd=UzVQy3b;lqwepnUr?FH(I%>PAX{)-1?{wp1l0Tub2
zMp1eGXiUUP_t0}mEaztgRn`AJwBr=v9-yHjiNO}*2vU(lyD;(@p8Z58I0~2Z5S?zx
zmyuhO|1SuY{Hw!y9P00JzsXv?C^{LtxDAQ2+7$`18T|0iV+XVOW{-0g3$^2tgW&v<
zE`NufTJ0o^Rjy2NOlHedfB*=B*TflulkWFKfIlCyBK=L^JzB?KY$sjzfb)cB)w?N<
z4CoIKdCy;gh+0wc)ElUCB7v-!&mb#;@@SEbDv;|VA&U3Z!L;6-MX*Jw=6~Yt^p8Py6$If!t&Q1=Oco!Tfa;hR(tt#UeA^m8w{fL(
zHsTa46BK;SnX43>IcLGKoonSU$)g6qiH=a{Z@X3x>cHQ2oK(h5Du{@2 0:
+ budget = max(self.max - len(self._threads), 0)
+ else:
+ # self.max <= 0 indicates no maximum
+ budget = float('inf')
+
+ n_new = min(amount, budget)
+
+ workers = [self._spawn_worker() for i in range(n_new)]
+ while not all(worker.ready for worker in workers):
+ time.sleep(.1)
+ self._threads.extend(workers)
+
+ def _spawn_worker(self):
+ worker = WorkerThread(self.server)
+ worker.setName('CP Server ' + worker.getName())
+ worker.start()
+ return worker
+
+ def shrink(self, amount):
+ """Kill off worker threads (not below self.min)."""
+ # Grow/shrink the pool if necessary.
+ # Remove any dead threads from our list
+ for t in self._threads:
+ if not t.isAlive():
+ self._threads.remove(t)
+ amount -= 1
+
+ # calculate the number of threads above the minimum
+ n_extra = max(len(self._threads) - self.min, 0)
+
+ # don't remove more than amount
+ n_to_remove = min(amount, n_extra)
+
+ # put shutdown requests on the queue equal to the number of threads
+ # to remove. As each request is processed by a worker, that worker
+ # will terminate and be culled from the list.
+ for n in range(n_to_remove):
+ self._queue.put(_SHUTDOWNREQUEST)
+
+ def stop(self, timeout=5):
+ """Terminate all worker threads.
+
+ Args:
+ timeout (int): time to wait for threads to stop gracefully
+ """
+ # Must shut down threads here so the code that calls
+ # this method can know when all threads are stopped.
+ for worker in self._threads:
+ self._queue.put(_SHUTDOWNREQUEST)
+
+ # Don't join currentThread (when stop is called inside a request).
+ current = threading.currentThread()
+ if timeout is not None and timeout >= 0:
+ endtime = time.time() + timeout
+ while self._threads:
+ worker = self._threads.pop()
+ if worker is not current and worker.isAlive():
+ try:
+ if timeout is None or timeout < 0:
+ worker.join()
+ else:
+ remaining_time = endtime - time.time()
+ if remaining_time > 0:
+ worker.join(remaining_time)
+ if worker.isAlive():
+ # We exhausted the timeout.
+ # Forcibly shut down the socket.
+ c = worker.conn
+ if c and not c.rfile.closed:
+ try:
+ c.socket.shutdown(socket.SHUT_RD)
+ except TypeError:
+ # pyOpenSSL sockets don't take an arg
+ c.socket.shutdown()
+ worker.join()
+ except (AssertionError,
+ # Ignore repeated Ctrl-C.
+ # See
+ # https://github.com/cherrypy/cherrypy/issues/691.
+ KeyboardInterrupt):
+ pass
+
+ @property
+ def qsize(self):
+ """Return the queue size."""
+ return self._queue.qsize()
diff --git a/resources/lib/cheroot/wsgi.py b/resources/lib/cheroot/wsgi.py
new file mode 100644
index 0000000..a04c943
--- /dev/null
+++ b/resources/lib/cheroot/wsgi.py
@@ -0,0 +1,423 @@
+"""This class holds Cheroot WSGI server implementation.
+
+Simplest example on how to use this server::
+
+ from cheroot import wsgi
+
+ def my_crazy_app(environ, start_response):
+ status = '200 OK'
+ response_headers = [('Content-type','text/plain')]
+ start_response(status, response_headers)
+ return [b'Hello world!']
+
+ addr = '0.0.0.0', 8070
+ server = wsgi.Server(addr, my_crazy_app)
+ server.start()
+
+The Cheroot WSGI server can serve as many WSGI applications
+as you want in one instance by using a PathInfoDispatcher::
+
+ path_map = {
+ '/': my_crazy_app,
+ '/blog': my_blog_app,
+ }
+ d = wsgi.PathInfoDispatcher(path_map)
+ server = wsgi.Server(addr, d)
+"""
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+import sys
+
+import six
+from six.moves import filter
+
+from . import server
+from .workers import threadpool
+from ._compat import ntob, bton
+
+
+class Server(server.HTTPServer):
+ """A subclass of HTTPServer which calls a WSGI application."""
+
+ wsgi_version = (1, 0)
+ """The version of WSGI to produce."""
+
+ def __init__(
+ self, bind_addr, wsgi_app, numthreads=10, server_name=None,
+ max=-1, request_queue_size=5, timeout=10, shutdown_timeout=5,
+ accepted_queue_size=-1, accepted_queue_timeout=10,
+ peercreds_enabled=False, peercreds_resolve_enabled=False,
+ ):
+ """Initialize WSGI Server instance.
+
+ Args:
+ bind_addr (tuple): network interface to listen to
+ wsgi_app (callable): WSGI application callable
+ numthreads (int): number of threads for WSGI thread pool
+ server_name (str): web server name to be advertised via
+ Server HTTP header
+ max (int): maximum number of worker threads
+ request_queue_size (int): the 'backlog' arg to
+ socket.listen(); max queued connections
+ timeout (int): the timeout in seconds for accepted connections
+ shutdown_timeout (int): the total time, in seconds, to
+ wait for worker threads to cleanly exit
+ accepted_queue_size (int): maximum number of active
+ requests in queue
+ accepted_queue_timeout (int): timeout for putting request
+ into queue
+ """
+ super(Server, self).__init__(
+ bind_addr,
+ gateway=wsgi_gateways[self.wsgi_version],
+ server_name=server_name,
+ peercreds_enabled=peercreds_enabled,
+ peercreds_resolve_enabled=peercreds_resolve_enabled,
+ )
+ self.wsgi_app = wsgi_app
+ self.request_queue_size = request_queue_size
+ self.timeout = timeout
+ self.shutdown_timeout = shutdown_timeout
+ self.requests = threadpool.ThreadPool(
+ self, min=numthreads or 1, max=max,
+ accepted_queue_size=accepted_queue_size,
+ accepted_queue_timeout=accepted_queue_timeout)
+
+ @property
+ def numthreads(self):
+ """Set minimum number of threads."""
+ return self.requests.min
+
+ @numthreads.setter
+ def numthreads(self, value):
+ self.requests.min = value
+
+
+class Gateway(server.Gateway):
+ """A base class to interface HTTPServer with WSGI."""
+
+ def __init__(self, req):
+ """Initialize WSGI Gateway instance with request.
+
+ Args:
+ req (HTTPRequest): current HTTP request
+ """
+ super(Gateway, self).__init__(req)
+ self.started_response = False
+ self.env = self.get_environ()
+ self.remaining_bytes_out = None
+
+ @classmethod
+ def gateway_map(cls):
+ """Create a mapping of gateways and their versions.
+
+ Returns:
+ dict[tuple[int,int],class]: map of gateway version and
+ corresponding class
+
+ """
+ return dict(
+ (gw.version, gw)
+ for gw in cls.__subclasses__()
+ )
+
+ def get_environ(self):
+ """Return a new environ dict targeting the given wsgi.version."""
+ raise NotImplementedError
+
+ def respond(self):
+ """Process the current request.
+
+ From :pep:`333`:
+
+ The start_response callable must not actually transmit
+ the response headers. Instead, it must store them for the
+ server or gateway to transmit only after the first
+ iteration of the application return value that yields
+ a NON-EMPTY string, or upon the application's first
+ invocation of the write() callable.
+ """
+ response = self.req.server.wsgi_app(self.env, self.start_response)
+ try:
+ for chunk in filter(None, response):
+ if not isinstance(chunk, six.binary_type):
+ raise ValueError('WSGI Applications must yield bytes')
+ self.write(chunk)
+ finally:
+ # Send headers if not already sent
+ self.req.ensure_headers_sent()
+ if hasattr(response, 'close'):
+ response.close()
+
+ def start_response(self, status, headers, exc_info=None):
+ """WSGI callable to begin the HTTP response."""
+ # "The application may call start_response more than once,
+ # if and only if the exc_info argument is provided."
+ if self.started_response and not exc_info:
+ raise AssertionError('WSGI start_response called a second '
+ 'time with no exc_info.')
+ self.started_response = True
+
+ # "if exc_info is provided, and the HTTP headers have already been
+ # sent, start_response must raise an error, and should raise the
+ # exc_info tuple."
+ if self.req.sent_headers:
+ try:
+ six.reraise(*exc_info)
+ finally:
+ exc_info = None
+
+ self.req.status = self._encode_status(status)
+
+ for k, v in headers:
+ if not isinstance(k, str):
+ raise TypeError(
+ 'WSGI response header key %r is not of type str.' % k)
+ if not isinstance(v, str):
+ raise TypeError(
+ 'WSGI response header value %r is not of type str.' % v)
+ if k.lower() == 'content-length':
+ self.remaining_bytes_out = int(v)
+ out_header = ntob(k), ntob(v)
+ self.req.outheaders.append(out_header)
+
+ return self.write
+
+ @staticmethod
+ def _encode_status(status):
+ """Cast status to bytes representation of current Python version.
+
+ According to :pep:`3333`, when using Python 3, the response status
+ and headers must be bytes masquerading as unicode; that is, they
+ must be of type "str" but are restricted to code points in the
+ "latin-1" set.
+ """
+ if six.PY2:
+ return status
+ if not isinstance(status, str):
+ raise TypeError('WSGI response status is not of type str.')
+ return status.encode('ISO-8859-1')
+
+ def write(self, chunk):
+ """WSGI callable to write unbuffered data to the client.
+
+ This method is also used internally by start_response (to write
+ data from the iterable returned by the WSGI application).
+ """
+ if not self.started_response:
+ raise AssertionError('WSGI write called before start_response.')
+
+ chunklen = len(chunk)
+ rbo = self.remaining_bytes_out
+ if rbo is not None and chunklen > rbo:
+ if not self.req.sent_headers:
+ # Whew. We can send a 500 to the client.
+ self.req.simple_response(
+ '500 Internal Server Error',
+ 'The requested resource returned more bytes than the '
+ 'declared Content-Length.')
+ else:
+ # Dang. We have probably already sent data. Truncate the chunk
+ # to fit (so the client doesn't hang) and raise an error later.
+ chunk = chunk[:rbo]
+
+ self.req.ensure_headers_sent()
+
+ self.req.write(chunk)
+
+ if rbo is not None:
+ rbo -= chunklen
+ if rbo < 0:
+ raise ValueError(
+ 'Response body exceeds the declared Content-Length.')
+
+
+class Gateway_10(Gateway):
+ """A Gateway class to interface HTTPServer with WSGI 1.0.x."""
+
+ version = 1, 0
+
+ def get_environ(self):
+ """Return a new environ dict targeting the given wsgi.version."""
+ req = self.req
+ req_conn = req.conn
+ env = {
+ # set a non-standard environ entry so the WSGI app can know what
+ # the *real* server protocol is (and what features to support).
+ # See http://www.faqs.org/rfcs/rfc2145.html.
+ 'ACTUAL_SERVER_PROTOCOL': req.server.protocol,
+ 'PATH_INFO': bton(req.path),
+ 'QUERY_STRING': bton(req.qs),
+ 'REMOTE_ADDR': req_conn.remote_addr or '',
+ 'REMOTE_PORT': str(req_conn.remote_port or ''),
+ 'REQUEST_METHOD': bton(req.method),
+ 'REQUEST_URI': bton(req.uri),
+ 'SCRIPT_NAME': '',
+ 'SERVER_NAME': req.server.server_name,
+ # Bah. "SERVER_PROTOCOL" is actually the REQUEST protocol.
+ 'SERVER_PROTOCOL': bton(req.request_protocol),
+ 'SERVER_SOFTWARE': req.server.software,
+ 'wsgi.errors': sys.stderr,
+ 'wsgi.input': req.rfile,
+ 'wsgi.input_terminated': bool(req.chunked_read),
+ 'wsgi.multiprocess': False,
+ 'wsgi.multithread': True,
+ 'wsgi.run_once': False,
+ 'wsgi.url_scheme': bton(req.scheme),
+ 'wsgi.version': self.version,
+ }
+
+ if isinstance(req.server.bind_addr, six.string_types):
+ # AF_UNIX. This isn't really allowed by WSGI, which doesn't
+ # address unix domain sockets. But it's better than nothing.
+ env['SERVER_PORT'] = ''
+ try:
+ env['X_REMOTE_PID'] = str(req_conn.peer_pid)
+ env['X_REMOTE_UID'] = str(req_conn.peer_uid)
+ env['X_REMOTE_GID'] = str(req_conn.peer_gid)
+
+ env['X_REMOTE_USER'] = str(req_conn.peer_user)
+ env['X_REMOTE_GROUP'] = str(req_conn.peer_group)
+
+ env['REMOTE_USER'] = env['X_REMOTE_USER']
+ except RuntimeError:
+ """Unable to retrieve peer creds data.
+
+ Unsupported by current kernel or socket error happened, or
+ unsupported socket type, or disabled.
+ """
+ else:
+ env['SERVER_PORT'] = str(req.server.bind_addr[1])
+
+ # Request headers
+ env.update(
+ ('HTTP_' + bton(k).upper().replace('-', '_'), bton(v))
+ for k, v in req.inheaders.items()
+ )
+
+ # CONTENT_TYPE/CONTENT_LENGTH
+ ct = env.pop('HTTP_CONTENT_TYPE', None)
+ if ct is not None:
+ env['CONTENT_TYPE'] = ct
+ cl = env.pop('HTTP_CONTENT_LENGTH', None)
+ if cl is not None:
+ env['CONTENT_LENGTH'] = cl
+
+ if req.conn.ssl_env:
+ env.update(req.conn.ssl_env)
+
+ return env
+
+
+class Gateway_u0(Gateway_10):
+ """A Gateway class to interface HTTPServer with WSGI u.0.
+
+ WSGI u.0 is an experimental protocol, which uses unicode for keys
+ and values in both Python 2 and Python 3.
+ """
+
+ version = 'u', 0
+
+ def get_environ(self):
+ """Return a new environ dict targeting the given wsgi.version."""
+ req = self.req
+ env_10 = super(Gateway_u0, self).get_environ()
+ env = dict(map(self._decode_key, env_10.items()))
+
+ # Request-URI
+ enc = env.setdefault(six.u('wsgi.url_encoding'), six.u('utf-8'))
+ try:
+ env['PATH_INFO'] = req.path.decode(enc)
+ env['QUERY_STRING'] = req.qs.decode(enc)
+ except UnicodeDecodeError:
+ # Fall back to latin 1 so apps can transcode if needed.
+ env['wsgi.url_encoding'] = 'ISO-8859-1'
+ env['PATH_INFO'] = env_10['PATH_INFO']
+ env['QUERY_STRING'] = env_10['QUERY_STRING']
+
+ env.update(map(self._decode_value, env.items()))
+
+ return env
+
+ @staticmethod
+ def _decode_key(item):
+ k, v = item
+ if six.PY2:
+ k = k.decode('ISO-8859-1')
+ return k, v
+
+ @staticmethod
+ def _decode_value(item):
+ k, v = item
+ skip_keys = 'REQUEST_URI', 'wsgi.input'
+ if six.PY3 or not isinstance(v, bytes) or k in skip_keys:
+ return k, v
+ return k, v.decode('ISO-8859-1')
+
+
+wsgi_gateways = Gateway.gateway_map()
+
+
+class PathInfoDispatcher:
+ """A WSGI dispatcher for dispatch based on the PATH_INFO."""
+
+ def __init__(self, apps):
+ """Initialize path info WSGI app dispatcher.
+
+ Args:
+ apps (dict[str,object]|list[tuple[str,object]]): URI prefix
+ and WSGI app pairs
+ """
+ try:
+ apps = list(apps.items())
+ except AttributeError:
+ pass
+
+ # Sort the apps by len(path), descending
+ def by_path_len(app):
+ return len(app[0])
+ apps.sort(key=by_path_len, reverse=True)
+
+ # The path_prefix strings must start, but not end, with a slash.
+ # Use "" instead of "/".
+ self.apps = [(p.rstrip('/'), a) for p, a in apps]
+
+ def __call__(self, environ, start_response):
+ """Process incoming WSGI request.
+
+ Ref: :pep:`3333`
+
+ Args:
+ environ (Mapping): a dict containing WSGI environment variables
+ start_response (callable): function, which sets response
+ status and headers
+
+ Returns:
+ list[bytes]: iterable containing bytes to be returned in
+ HTTP response body
+
+ """
+ path = environ['PATH_INFO'] or '/'
+ for p, app in self.apps:
+ # The apps list should be sorted by length, descending.
+ if path.startswith(p + '/') or path == p:
+ environ = environ.copy()
+ environ['SCRIPT_NAME'] = environ['SCRIPT_NAME'] + p
+ environ['PATH_INFO'] = path[len(p):]
+ return app(environ, start_response)
+
+ start_response('404 Not Found', [('Content-Type', 'text/plain'),
+ ('Content-Length', '0')])
+ return ['']
+
+
+# compatibility aliases
+globals().update(
+ WSGIServer=Server,
+ WSGIGateway=Gateway,
+ WSGIGateway_u0=Gateway_u0,
+ WSGIGateway_10=Gateway_10,
+ WSGIPathInfoDispatcher=PathInfoDispatcher,
+)
diff --git a/resources/lib/cherrypy/__init__.py b/resources/lib/cherrypy/__init__.py
new file mode 100644
index 0000000..8e27c81
--- /dev/null
+++ b/resources/lib/cherrypy/__init__.py
@@ -0,0 +1,370 @@
+"""CherryPy is a pythonic, object-oriented HTTP framework.
+
+CherryPy consists of not one, but four separate API layers.
+
+The APPLICATION LAYER is the simplest. CherryPy applications are written as
+a tree of classes and methods, where each branch in the tree corresponds to
+a branch in the URL path. Each method is a 'page handler', which receives
+GET and POST params as keyword arguments, and returns or yields the (HTML)
+body of the response. The special method name 'index' is used for paths
+that end in a slash, and the special method name 'default' is used to
+handle multiple paths via a single handler. This layer also includes:
+
+ * the 'exposed' attribute (and cherrypy.expose)
+ * cherrypy.quickstart()
+ * _cp_config attributes
+ * cherrypy.tools (including cherrypy.session)
+ * cherrypy.url()
+
+The ENVIRONMENT LAYER is used by developers at all levels. It provides
+information about the current request and response, plus the application
+and server environment, via a (default) set of top-level objects:
+
+ * cherrypy.request
+ * cherrypy.response
+ * cherrypy.engine
+ * cherrypy.server
+ * cherrypy.tree
+ * cherrypy.config
+ * cherrypy.thread_data
+ * cherrypy.log
+ * cherrypy.HTTPError, NotFound, and HTTPRedirect
+ * cherrypy.lib
+
+The EXTENSION LAYER allows advanced users to construct and share their own
+plugins. It consists of:
+
+ * Hook API
+ * Tool API
+ * Toolbox API
+ * Dispatch API
+ * Config Namespace API
+
+Finally, there is the CORE LAYER, which uses the core API's to construct
+the default components which are available at higher layers. You can think
+of the default components as the 'reference implementation' for CherryPy.
+Megaframeworks (and advanced users) may replace the default components
+with customized or extended components. The core API's are:
+
+ * Application API
+ * Engine API
+ * Request API
+ * Server API
+ * WSGI API
+
+These API's are described in the `CherryPy specification
+`_.
+"""
+
+try:
+ import pkg_resources
+except ImportError:
+ pass
+
+from threading import local as _local
+
+from ._cperror import (
+ HTTPError, HTTPRedirect, InternalRedirect,
+ NotFound, CherryPyException,
+)
+
+from . import _cpdispatch as dispatch
+
+from ._cptools import default_toolbox as tools, Tool
+from ._helper import expose, popargs, url
+
+from . import _cprequest, _cpserver, _cptree, _cplogging, _cpconfig
+
+import cherrypy.lib.httputil as _httputil
+
+from ._cptree import Application
+from . import _cpwsgi as wsgi
+
+from . import process
+try:
+ from .process import win32
+ engine = win32.Win32Bus()
+ engine.console_control_handler = win32.ConsoleCtrlHandler(engine)
+ del win32
+except ImportError:
+ engine = process.bus
+
+from . import _cpchecker
+
+__all__ = (
+ 'HTTPError', 'HTTPRedirect', 'InternalRedirect',
+ 'NotFound', 'CherryPyException',
+ 'dispatch', 'tools', 'Tool', 'Application',
+ 'wsgi', 'process', 'tree', 'engine',
+ 'quickstart', 'serving', 'request', 'response', 'thread_data',
+ 'log', 'expose', 'popargs', 'url', 'config',
+)
+
+
+__import__('cherrypy._cptools')
+__import__('cherrypy._cprequest')
+
+
+tree = _cptree.Tree()
+
+
+try:
+ __version__ = pkg_resources.require('cherrypy')[0].version
+except Exception:
+ __version__ = 'unknown'
+
+
+engine.listeners['before_request'] = set()
+engine.listeners['after_request'] = set()
+
+
+engine.autoreload = process.plugins.Autoreloader(engine)
+engine.autoreload.subscribe()
+
+engine.thread_manager = process.plugins.ThreadManager(engine)
+engine.thread_manager.subscribe()
+
+engine.signal_handler = process.plugins.SignalHandler(engine)
+
+
+class _HandleSignalsPlugin(object):
+ """Handle signals from other processes.
+
+ Based on the configured platform handlers above.
+ """
+
+ def __init__(self, bus):
+ self.bus = bus
+
+ def subscribe(self):
+ """Add the handlers based on the platform."""
+ if hasattr(self.bus, 'signal_handler'):
+ self.bus.signal_handler.subscribe()
+ if hasattr(self.bus, 'console_control_handler'):
+ self.bus.console_control_handler.subscribe()
+
+
+engine.signals = _HandleSignalsPlugin(engine)
+
+
+server = _cpserver.Server()
+server.subscribe()
+
+
+def quickstart(root=None, script_name='', config=None):
+ """Mount the given root, start the builtin server (and engine), then block.
+
+ root: an instance of a "controller class" (a collection of page handler
+ methods) which represents the root of the application.
+ script_name: a string containing the "mount point" of the application.
+ This should start with a slash, and be the path portion of the URL
+ at which to mount the given root. For example, if root.index() will
+ handle requests to "http://www.example.com:8080/dept/app1/", then
+ the script_name argument would be "/dept/app1".
+
+ It MUST NOT end in a slash. If the script_name refers to the root
+ of the URI, it MUST be an empty string (not "/").
+ config: a file or dict containing application config. If this contains
+ a [global] section, those entries will be used in the global
+ (site-wide) config.
+ """
+ if config:
+ _global_conf_alias.update(config)
+
+ tree.mount(root, script_name, config)
+
+ engine.signals.subscribe()
+ engine.start()
+ engine.block()
+
+
+class _Serving(_local):
+ """An interface for registering request and response objects.
+
+ Rather than have a separate "thread local" object for the request and
+ the response, this class works as a single threadlocal container for
+ both objects (and any others which developers wish to define). In this
+ way, we can easily dump those objects when we stop/start a new HTTP
+ conversation, yet still refer to them as module-level globals in a
+ thread-safe way.
+ """
+
+ request = _cprequest.Request(_httputil.Host('127.0.0.1', 80),
+ _httputil.Host('127.0.0.1', 1111))
+ """
+ The request object for the current thread. In the main thread,
+ and any threads which are not receiving HTTP requests, this is None."""
+
+ response = _cprequest.Response()
+ """
+ The response object for the current thread. In the main thread,
+ and any threads which are not receiving HTTP requests, this is None."""
+
+ def load(self, request, response):
+ self.request = request
+ self.response = response
+
+ def clear(self):
+ """Remove all attributes of self."""
+ self.__dict__.clear()
+
+
+serving = _Serving()
+
+
+class _ThreadLocalProxy(object):
+
+ __slots__ = ['__attrname__', '__dict__']
+
+ def __init__(self, attrname):
+ self.__attrname__ = attrname
+
+ def __getattr__(self, name):
+ child = getattr(serving, self.__attrname__)
+ return getattr(child, name)
+
+ def __setattr__(self, name, value):
+ if name in ('__attrname__', ):
+ object.__setattr__(self, name, value)
+ else:
+ child = getattr(serving, self.__attrname__)
+ setattr(child, name, value)
+
+ def __delattr__(self, name):
+ child = getattr(serving, self.__attrname__)
+ delattr(child, name)
+
+ @property
+ def __dict__(self):
+ child = getattr(serving, self.__attrname__)
+ d = child.__class__.__dict__.copy()
+ d.update(child.__dict__)
+ return d
+
+ def __getitem__(self, key):
+ child = getattr(serving, self.__attrname__)
+ return child[key]
+
+ def __setitem__(self, key, value):
+ child = getattr(serving, self.__attrname__)
+ child[key] = value
+
+ def __delitem__(self, key):
+ child = getattr(serving, self.__attrname__)
+ del child[key]
+
+ def __contains__(self, key):
+ child = getattr(serving, self.__attrname__)
+ return key in child
+
+ def __len__(self):
+ child = getattr(serving, self.__attrname__)
+ return len(child)
+
+ def __nonzero__(self):
+ child = getattr(serving, self.__attrname__)
+ return bool(child)
+ # Python 3
+ __bool__ = __nonzero__
+
+
+# Create request and response object (the same objects will be used
+# throughout the entire life of the webserver, but will redirect
+# to the "serving" object)
+request = _ThreadLocalProxy('request')
+response = _ThreadLocalProxy('response')
+
+# Create thread_data object as a thread-specific all-purpose storage
+
+
+class _ThreadData(_local):
+ """A container for thread-specific data."""
+
+
+thread_data = _ThreadData()
+
+
+# Monkeypatch pydoc to allow help() to go through the threadlocal proxy.
+# Jan 2007: no Googleable examples of anyone else replacing pydoc.resolve.
+# The only other way would be to change what is returned from type(request)
+# and that's not possible in pure Python (you'd have to fake ob_type).
+def _cherrypy_pydoc_resolve(thing, forceload=0):
+ """Given an object or a path to an object, get the object and its name."""
+ if isinstance(thing, _ThreadLocalProxy):
+ thing = getattr(serving, thing.__attrname__)
+ return _pydoc._builtin_resolve(thing, forceload)
+
+
+try:
+ import pydoc as _pydoc
+ _pydoc._builtin_resolve = _pydoc.resolve
+ _pydoc.resolve = _cherrypy_pydoc_resolve
+except ImportError:
+ pass
+
+
+class _GlobalLogManager(_cplogging.LogManager):
+ """A site-wide LogManager; routes to app.log or global log as appropriate.
+
+ This :class:`LogManager` implements
+ cherrypy.log() and cherrypy.log.access(). If either
+ function is called during a request, the message will be sent to the
+ logger for the current Application. If they are called outside of a
+ request, the message will be sent to the site-wide logger.
+ """
+
+ def __call__(self, *args, **kwargs):
+ """Log the given message to the app.log or global log.
+
+ Log the given message to the app.log or global
+ log as appropriate.
+ """
+ # Do NOT use try/except here. See
+ # https://github.com/cherrypy/cherrypy/issues/945
+ if hasattr(request, 'app') and hasattr(request.app, 'log'):
+ log = request.app.log
+ else:
+ log = self
+ return log.error(*args, **kwargs)
+
+ def access(self):
+ """Log an access message to the app.log or global log.
+
+ Log the given message to the app.log or global
+ log as appropriate.
+ """
+ try:
+ return request.app.log.access()
+ except AttributeError:
+ return _cplogging.LogManager.access(self)
+
+
+log = _GlobalLogManager()
+# Set a default screen handler on the global log.
+log.screen = True
+log.error_file = ''
+# Using an access file makes CP about 10% slower. Leave off by default.
+log.access_file = ''
+
+
+@engine.subscribe('log')
+def _buslog(msg, level):
+ log.error(msg, 'ENGINE', severity=level)
+
+
+# Use _global_conf_alias so quickstart can use 'config' as an arg
+# without shadowing cherrypy.config.
+config = _global_conf_alias = _cpconfig.Config()
+config.defaults = {
+ 'tools.log_tracebacks.on': True,
+ 'tools.log_headers.on': True,
+ 'tools.trailing_slash.on': True,
+ 'tools.encode.on': True
+}
+config.namespaces['log'] = lambda k, v: setattr(log, k, v)
+config.namespaces['checker'] = lambda k, v: setattr(checker, k, v)
+# Must reset to get our defaults applied.
+config.reset()
+
+checker = _cpchecker.Checker()
+engine.subscribe('start', checker)
diff --git a/resources/lib/cherrypy/__main__.py b/resources/lib/cherrypy/__main__.py
new file mode 100644
index 0000000..6674f7c
--- /dev/null
+++ b/resources/lib/cherrypy/__main__.py
@@ -0,0 +1,5 @@
+"""CherryPy'd cherryd daemon runner."""
+from cherrypy.daemon import run
+
+
+__name__ == '__main__' and run()
diff --git a/resources/lib/cherrypy/__pycache__/__init__.cpython-37.opt-1.pyc b/resources/lib/cherrypy/__pycache__/__init__.cpython-37.opt-1.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6f9316f218cdd6f920ef361e4bca8aa63166658f
GIT binary patch
literal 11421
zcmc&)OK%*!6w6^i3hITVBTb5JMHD<9wP;8Z)QmgEgTO-a$tKw8zqt2*oD+Xh&
zac4q4mx5!hN#}U$gmXf^mxI?@C!Le6Q_iW@Y3H=88$tgw&Y9L(=WOeobFOvXIp4bA
zTtFM)bg}ii0W4aVoHu0eN^q`aJGOit#mG0EDfv8x=gZDzUF%&1e&fOA))faU;8*tF
zI?$Y}xKHz|2Rgs%&U}3ZtGuXI`J*m?8U6X?&8l-XU9H6?0PmV}4X}?nZ}V9;`9epZ
zS$16B&+T6a)LQQ(*Z1exi36SK=d{fm&P_l*56HK+DuBGHAhXw;n+h&}=Y__vZvKj$
zyrbpMr|^7v^WFVlr}({x9`EhHf1q%BcmFqO{cY6W9;p8yt-o_mW2f1fA2fD`|MrFH
ze8@lKS6|Rdf5*@6-vyrYd|r)XH`zIM{s-Op2xBg=ix_i}7cgdlFTBv4k5T(NyM)?H
z64D}HWN*CColkJLaleQ5H`x^0r{t*n{60o4v0Lmiy8@`6_RdS5edavi4>lk22ktWe
z6y-xu<#bAKL+mN4D2f2$TkI+zUghUE9|4oc{4rpD&Zhb2?g~4L@`)^0`PKdR*$gOM
zW!HYtfX!2WVgHK*&C;A_&X;UD{lEQJ7Jue`#b(*{A2j}K(_u4rG*-*s=0NRd`(I1y
zUfW*-b-t01Z}4vp3~<|cqE+AV{-&g93maUB-s7I_N495odhtfs_8YTySl{H0_tw%3K%;ya<(s#Plaz(&}P{3wp>aNTZ)u^qPgtX=QM_IlVAcEmfL@M3Py
zKVGr}ug67%!K)io@%X{g!u;yeqhp~~pTp~P0>9zKKIXDL
z!R;O4$1!i)UR3eySa43WHv%t;I6AbM-QrjYi)QT|ERfsWYi!tc;k9w|+j1H?su2po
zqfXdnm^%cl;XY4S9@rf(-l*A&1V8~*q?+z{O>S>su7Hban#V^E!5iG)=27L|;;O{s
z@uMfJ00F`QugKowJt>AH2U}>SSdp-{c84zOT{_4XA)k-~NJz5@B
zDP$vFvx$UJhc|pL$na`|JNC5SX8idyA=!<*+2l#jzFvpHM=DG
zQJ@%K_qsvc2L=+U727S;`(TrTZEyP?!SvfrRHZ~A9D#*oy6pu~h>06Pm+|O!rD9)`
z>zU@yJ0Z}Vw!JtOKEwvgCWKo?MF7ZBOc$5GSXz0s{BUu3wJ)ZUvh|+L_%;v14ul#L
zV~v3wP)$^`mtwmk!fhX`t@v#)p(Ul)_Ua+F22s4xh3K_ou=Kkwq&<}blEnxcs1tOR
z5DbWZg$NUIvCReexb2IuO)@l_Qhz38R~3D*4IwaL=eoov-OVZ8b26qg4-qO8u%Sxc
zZu)ILRHe`us-g`wRH0ZhR26Rs?lG5nu{Ts7gw3IMQa6i2gksiS4&zV4Zks7trG^#G
zd~CFMXhh)GQ-*!Hy14wLuT{X6a0mQh+t~fs5a1oy6^Aw{u2^&%iuBP2=@l@>7k0SQ
zu7CkpwBqD|ic_L{KMc1>qY(zH5Us)IdiXqh{n(GN^`I8gt_w;=m$6UL(2z>8Qu)Mh
z1IOMhO%4=NEx7P#Wl;%Srdxox5(y|RIdpn>8Q9<)5-GPV7T1C#k%mb%6RpnM_WZ!B
zV~avsH~c2%%XG5+RoJx~9;rsZy;Vuo%@NR`#Au#r!PkK;C~8Y}O@b&Td|GOpOmo*N
z4|&te?FP0W$?z~Qs&>okVHT!dER&tU-=p8IQ6YH@%*
zSheEVeBXj(+*p)SoxQE3+c0I-6Sd9Kw`Wi8Ey<3g5~7UwL{jYU2J{xNNX^Q(+0vDE
zUQf+d<<3SNccR;Kb4{$ITdx7>xoju)A9nmLe=dXY1UmV{Z{1quC-P~Rt9nuh!iE?8
zq~o<(NJjEql8g=Ri)3;MHd{cdvYnEYvt5)N&!8_pZ}1LTRKOjD24$(RR*mFX%6FI4
zCgeG>NXV;ZQdHVFDRn~Fv}Tl;(3HTWg7K*~Nk;Ijv_n$DBWVTMjxCN2)s*c@Gtk5X
ze8DRX*om4xJwkmppquen*zw!%+`O{aVlq0Q!4eZ|5@Q5>oQeqoTU2Y?
zJ)SYQMnoX-3S6`MLbv+GwATQnz)ABe}B7hd+jM{?lll{4N~(lOvu_w=(WJ8wa-H4
zuX&6?qpvB&UGuuk4{I=|vA^D11OMQSLY~%OC)P5Oueq+@_G8zrb$UrTd-($)Co$G{
zUq6aowXA1@HX{A#ZsL;b`V}5xZBN@oy!65lXX$Cv*f+@=PwNfT-;ni|s%IuBVC_!N
zGbS})>Ow_!eURQ6k!#6>VwIcrOU{55Q5bM?gJKZ|S;I0mXKBNW$h(sC6xV@WG7@#`
zN^gl8-mAK(;wCArinrUOXJe3lj|9y)F!5$>7WRvh71n+!y_f0Ebg;ni+5qN@+^Khcc)#t;7n3$k>5;0m;%&PQMNLr>Z5bMU8}1z}1VE6*gmv}osgzX+
zA5`Qr1>*=?76fo@pq8oEV2mMQ5xg|CspcUERBt3LJ=sp0EsGWgd`GnriRYbIG6xu8
zMMC<*kQomf0d6rtm}<%8cS64%PrVWpVIuweXd~RPZ9YoKUzH$av?NOq1+%`=}bvPX+~mUjza2*JzZJCs#B<#7vjWQow%3raE3zf
zsky0Ys&S>KNjzTn;qHNS%!i{(BA0S{KsKlvu~Ui}{Li!-La6lT*dVR4Wl_@%@E8+ggcD0PL@kQ~TN-1bo?UYndNkK|pQjl8?K8i);b4CZqE3RPFfOe6JsN(?H71)zeL9Nz(}901LMGO%4i!w-lf7|
zdSQS^S&5Z#A7dj>GZm;v@-VUGyuXL>m?!0crLL0)3^NMZZ6c?_<@QY7fS~{}i|A63
zTfy#v8bO5Lh5_D&cz{o7;$=#CEVgtVv7cpPM@}MTtsi|#W~jO>3PmsgX`r+UBwlRE
z6b3*8Z3vZuE5T+G0~~<81XPFrkYSm6{cWT%2+u4e8Hk>D{Ahz>GB}=yG^-Fr8Hp$f
z-0=_`?I2<%q4y#`K;YeNby5jb3nG}%)aZo}p);qnk!QF0j!a!ilpxj!onZ^1U1#kc
zg5)SBv6Z`zW(RUD%F-y4mB!9WFeCEs5wqf_)_pzl){%$udTPV%j@-QUUhM|{H-CBz
zG4Y?lJsmPpx!*I1X65(n8SsAKvMKM!5H?!Gn<%QLGRS$wA?Z!5AoSQjpl3v0u%;LF
z-SJGKYScF5n&eZHhYGg?wy|&Vb}y~)7F;!%zR0sMGMz_WSZx{p7z{e;4;b-6BjBER
zg~Kop@c)QQc~8dfM2?aiwOkz+H_`Vp<@TEG=i3>9lJ{P#_Gv)>!=Y>hqmyGoYL
zq}1d{2*B%n105AkN~jZyRs})Y@B?-TEd-)&I?i?fg{D#B4Y%|4qY%vd<~xo0=guHW
z*B-p^t)e(99m3oVY@dyd7;fRF1>DWJuxFBk_yP{1+>Hie5ruOxDJlg+s3EULOM=sS
zpB9y`j3P$aQnl2+T|#qG*!F@h7oSpBDVu!(eTce$#icxJ7@bLY*GDfgpEDPLAvT-`
zlp!{liVf*P5L8(&A0&ctph!QkqJDfgA)*
zP53`Qk6hmO7xEU`V2e^F*g)pdycMrR4az(7pk0@81AXRRI!cT6JLfxxD428ikl^W1
zT>1-&R*%}q3kIf|r=d=`#t;+|9r;tOhcM#Yy@d_~3A
zRD6SiydJ_Fprq#R7t(@$f-e7rDjy#semY(V80P_AqDt@&XuJNHBB7!)QO>;5Gjw1iH&Ec#T
z=m?|fj35$e;B?MAY!8w%Zk7O4P_nU?i58S+(hvxa>0%vEL;NC|+{N`vv#rP7dopT%
z5H=rrZJZYi@2@e(JR+zx61MwIA0Y0>@u$oJAUKB?-=w%*g&&CCaJGr~t3v`Te8k0R
z=qGb9x20jc{cRuGootDee#T%*z1RB9AAZ|EwoyS=-XEQjP$7m4R@J-)W#eZkG+Vbh
z&M>%&^VYjm*9|^ySi_Bwm0|nWBnbEcB
zY13eo#kJ{a)BN6STYK8~<{n%c@+AL~)U&z!BPcFcqY_ye2~?;_%zT;414*PmZLnNC
z#iv-!OQJ&c^h->VpvzcIwIJqjQ<`amo~Sk{;B$~rq<(B-$wQvv)(+iehXGYcI)rp2
zF~W84(B>(lN-Lj6F2&M!&%Y%72q{$O+^7(-Y0v_$h*HtLu{ckcKY;>@EqtUkqr-|K
zcU#yjQZDxZ#HEk5Kw?9Y7>Y&7XD(&`eeL_&(
zs$~0+S;*rn<(On}zKOv1Iz6}mBxAxy0X=QD
zq9#r>>4S*@jHFbRpw?l=o4+T7Nb72b5H!l#JtNQ3Xy3T8K)S5%;Zu+ryoZzhh7EjU
zfJ6_~rrpui51<0!9N@DTYB;94;{-co92G+=%nctU2bIqcmy6D5;Bpya8MSgLScSfo
z=-Y?!FW(8gR-Jht23Wv3FS+r`^-%Cfp8SiX0rNNWOVaezcz7r6!l*eR5rM5sPK}1iAuFUQ^W!Mm@
z7hZYhDyI@77$nZ2kRv34MTP3fMUtnmPRZTi0Rbb*_K!_v4oHw2B#u)-J43vN!a3Eq
zLVmrLr-n?N<2mK*RgnIWC%obmJzb!JG8Ezr3a9*x4v;_UMlyfwoLEqYWec$g?#ts)
zA&DEC`pMjipVEYu#`Sx)JT4~
zo|p)WoRNUCxovcHO6i9s&e(i6#wjm3C|o!dFMpG1ZD(A$eyL_~CKPMjmhvN5
zp?{q)e?Y}F6~95@6e+{ZMN&l440oB=mI*MGxES_JrSm7IXfU}wNkRHoqCtSLSm|2{
zM$I7=q#5K60Q3f^(oQN;KwLU*NJf<+k}q=KIz^BiM-1W$jr%nU9MY>VRU)TCPM`c9
zCQ1r)lodOr1ts@NocjWwtE;4ZMmZ3aj`IFOzJ0%i^d5$w!}qk
zWn4vllHN^gq^!eXHx2zHYAPmrnGX$-T|pCC+7Frfc0Fn1Dj
literal 0
HcmV?d00001
diff --git a/resources/lib/cherrypy/__pycache__/_cpchecker.cpython-37.opt-1.pyc b/resources/lib/cherrypy/__pycache__/_cpchecker.cpython-37.opt-1.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..dde88f45f6bbc7f1c8c93e0a89cbb5e65e06c211
GIT binary patch
literal 10406
zcmb_i&2Jn>cJHt0=@||`M9Gvaf3;=H;%Mo%q;+hqENShPV#m8)MvxuLJEAgbPBqCU
zXQqc$J>;;_1cK1XCOJ7d1&bgU9s(plPC-sd5G;`W3!DT9`mmQhEs#UbKIQkSx@X8S
zB^#T}V5+ODtE%hcz2E!2SM!7E>8gg$?$2f)EqqVYenmIM$3o^^{KQ=(OlxRNXGU9Z
z=<;eb40$yhCaz}N>XaHKUDdH0_6HiXSm|?(m4fnt*>ITgu~xT}r8^rzYb)UHTEt!C
zIp4dt=ZY{6gzI;i+ll(!IAE^d>k04Ic(1lx*E46eAGYJLD_T@ie5R0j7eDcHB!Px;
zX*4E`$7op0WY*_eqr^(g#?@vXb6Dkby-{XWjAn|}P+DQrYzEgVn`LvjPO(#L9@iQ>
z&CcLD&7NTktjx~hcaA;F&a>y(^X!Gcsy1dqtuf19WG|uC9CO*rxSm?q8uRP|dj{WIV*E6X1410}TLVkfQvO2D3*=6<}T+gv3_ByW5vNG1@jjWunjsKszroHF3p1xA?
zMiBC&>(vfhejG+!;Z>_HJ`Xm6t{ZQJ!i`opgI4S!*YewKTHV+Uy6aa7FC%xZhiH
zLlg~yR-ZN^@LLeXBd>*rV)i*YR%pCcC_lZBxWKUF(udAm9EVja4
zFJMjd*392?OLd~U_z7mx@iD@-z%=$SaV&E?A2MW^2feo63PuZX`SIh?T45@3{dfI6
zPrm+>7Is#C=xX412R~6ml4?WkP$T_;EEvi2A9lky^xI()keW!9f&9mQw-tD#63;Yy
zQLo>|jeT2EUfX`-)Y=Nr(dn!xv}zlBH&`BEs~Y@dcC`T?62I9FpKW`
z%8w!zu3+gQGb_Dz9|Ga|eHKQZ=tXh3wzt9qfvso-XdbSvV8NiSy}gxYt5+;yZ!dG2
z%@BiaHlL@t(4;gYnac<56%R)iBVfpP4M=SyhU@
zAkojt>p|?tG0(~yzMxFq;w8+hF&DMj@wOJ}O=c@yk~^f5GOF_m5*I&l28m|ax~)69
zq0i`5d}s8TV~d&(wne*4AG(g@iI$h?slK2|SvvK;oB$LopM#eyV0#qw=KFM`dQnuX
zax`^Y4nHeH#f85fWDam8SdTcidM#`RY7V5U-6*r@Iyn=PXAi$Z7@jGF;n-@tKN1NV
zJS~9~iN8c<@E5VR34s`zhZ+Rn4_6=>`$lY~2FgnZhIn;*CN(x~v4ktk(U!sAqTH~|
zj6;10V{@qQn}gq>I>UJX{>547zW5-KcaDS1cEMVc?DlUqO&VW0rC5&lBy0k8mu02k5XVJ~jdqIsG2mHx(Z
z^2sL(y$Zk{gLv#-VkW;U}(bPCBZL^m_hdBYLi
zrXNS>-bDRI1&O+!kcjL}46U_w1Z11})S3XFxGAUBByDOQS+C#1i^YpbG-E-pK^1KL
zNG0av_l#~KZ|KR*LcD&HmyCpv)Q`B36vCe)Gx(=i8|tyKX>MAFghPLui(v_32*H9F
zzP3HLu0hb&s3wH1EbD!Wde(jk;^s)qx<2@Ud|r}I-^5dUzdX3i4D@+n-%0JElRCgQ
zmYTx~u**vAw7h2K;>RFU6>0aOp=%_t@$}}*a5gm$fshc`IS8u_
zVJ+t?BZajfR!6ozm#Ynb2Zk9^^15h6Fon7BdHh){0>HDZsDhW2VMulY?jFE1f^
zBg5n+wTBnQBln_kZz17i+me5Ox{yQYVjlu2E%{p1Zb!QS+X=Z}%1yq{12^iD&82!6
zAq=Y=@NUS0Vz};)Bg_NDphGmkAKCpvEhW9|`9i)@*1_Nv&&@I-{y@&&!
zxu8FCrgH5T>#)0XnJL0SR>5jDV`VDw2
z6^dV^Cq^W)G9jo4dYQEuh245Zf-irKT3@0%W=E`NSbB*q88$;g0$-$Or5(TB4>G+w
zsc)B*D$pY3tY)NeAyjt6bv{5Z!bPGP=Sam2OJ9JsIU{Y3t)GXALFp_QiI}_l|-4ySY|<9o%P>4TGiEHp~27vh;65+hS)}Az24k0wj;1*-E}E
zhxl!zy&Dx$lUyS?J)#sRfU)0ogHA8r8@2g``(f<1qi9QjvTjMCDuu>V+|sK+5oucEziALVx_iBXa9`-cnxY
z)_CXZ%k}D{@V9!rb%Y4KbGGob-$ZmtUpc$h513419Jjm!7YY
z?%&5#F^^=7^$roBQdLh10{leAeoWRzM52q%1{=p@?XxG!8i>T;x}*zbNf#~?zf|}w
z?nzt*rQp}&>TrtKWonM-0dQH$pe53`e#aJ)1uodI!
zV&JWNM`U>{QFWI^Za0d7hqZ4ZLi`nMghE0GFepj$)8vMbZBlNYRdO>H-)1px+U1-v
zk4eHD5DetNOkS4T0TcDa_9NLLnnN5w;?!rbVI(>G`WO1(hsc2i3@wt3FZ4&(Aw}>%
z;E9-cLOj6RUPvJo%`$4AOU+G3)~kVX>cA>^E@vf`O!ZA?vkGQVVg@tewcKXbzKy%m
zfxchH)y5TEfR(o#{>RMOx4VCj8kKlzvo@TT9N|AwXTOq``IoG+Z_DzTw2T&2RwfdA
zXzW*I>1=
zOg`aJ6ep)AOVMUIdFg~wijzRPp_s9kTt_u3fDQ0hMbt)oL=I=k4R&OBCF-wlfNjt_
zs2)NRJ0K;@`?X|2pH}l=NGI^SF_{F(p%m*OE!+ds=)h#P8De5^7cTN8cn+PM%D8RN
z5aLCm?TZa}jYl1XQb4$tieaHLVgMnS4=s}9I52Kkl`DI0)e-6S7L@5ExpN0UWUV%gU=0@_+OQJAfTzD59yN3OLWM}JlO6B
za4*I*X~NPF(UF8HtHGIW7Ga0X#2qJ3GPCHV@MRE268s94*m7t`Ezp#rPT!}_M*N9i
zlg)|ZNsE+OG)z8?(nfi$-)>{DjWVKMF*1{|Gp}Gn37N8Z`)DXWLZX?(kCa<7XUyo1
zQ3d8W@@s=f)o_Pz?z$L8&9DsHgdY!=#WBuE?;iD0)7F#o$8qZD3zHX>jABuGa4v)9
z24n&e9WTEov+Zcz^IM40h$ih)GD8I*9uZgfxk8|H)HdoSRjLJVD+s#D*?fmQu@UVy
zVWwKaDlC*p&J+*g4UQ+WQ<2=ovM63NHVTUnH+nnwI{sEb6mxM@>xiB&-pA@M*6CAC
zp35By7OeUGc1#>22EUb|tOiZ1d1P9Rc`BfN4A+}cPkP!7hsx-+>M7-#(B0-5!pJN;
zO;7U~6jN_ZQ(+vDqlu|!vlCO!YR9HJHJVn`ZOPaccAla;qY&0Fa5MN<06oHYaOnOa
zHx8f(F$Jb}27jHJC|{=d1Bk4-W%G|>YghugmG`NI*n=^0_b$j%8?`RTT86rxtOYo&
zrbgc1&ty$g-KQ`EBXXkZP;XUP2P7M9?BjI+%O$|Fo!q*|BNC$mj42>bem|Tq852ez
z84wr%0)f&j=mDeQFfqiEW4`AluPc}-DtRs1o$`*uMe>HKKJMq68ocxk{R5ID
z&ON0^MECdzdoqitPUaK?9+Q%+I_{%hQn2D;pCy>^-$pLewr_kF6Ewt=XGH&cH9R$R_lc90K$~1diW}LG0oxH2mJ+Us4Mi{*Oe4
z$UQ)fa$HNx2RgI1EWVSLS&4LftIYpL<(Z9Gp^W=vy2o$P9utj`Tb|q^pG4uW(s-%cE7^OBy8q
z^?70%FiR|}Q03KV5Fna58Wzq63JcQ$NfTrB$j(YftT0*Q77BqVG^CvEA5rcCC9hIK
z>}Cu_lBw{IQC5V8h%9ABSB$}^F&hfWPMSF(P>Mgqz`hZx&KA%kdB=DY9Vmt%5>Nyzl9!70
zctVTO>JW_}jHjPqNa!5>bNHLFexMHx!u13FPqm+FKh@XZC*mC2!O55&1NGr4BIYae
z`Pcx6!;nKPSbdQh-7dVBUOik*9qz63DCLiYK(9_9S<_?0H-<>!ub=8xDT!{
znm8#k&f@7=?Ize$j(X;DHnkeDJ?IbjJA&Uw_lRYYf50D6GK(aeS7)p_`&37*R%YU0
zJ*$jlf}f(Qr5$>V_0dcGCgv@tPwyL!2+!eoswxd)>9`MM?+1wZ`nRv8V
zM1aK#^?hsb84gwy*Zv4~N-_kYZB_U+V#7lVVH663lo6g$;gm9-IB9vyL?FW+mcf7#
zxJjK&lx>a!6xZ@Fo;)&LP9C0Ut;W&1oQmRr0GkbSRUB2Y2*-64J-~@E{;WW@453@Z
zGAJOds(JwO5eMZG{9Bkme~%I=aukYE2Lw2lCh=-E3-EX8k&LX8Un{Ao0(*U@FBFPYC>?IX3b%BX7c&m`_d
zzHsDqqz4y^>*sH9EPc%T||<11MaF|mi$u>{-tuPA8OX|dlUc*
zaLS9yDERQ+&YR>(s_a|zCm>*1e(C|Yyx(SWOqZ@+d)vE$@6}6G{nEo5H?BUqG;%AK
z;i1V>IG6TZYSu`B3iD2Ui1#R&P>5M&tVb(xi#C_)XrMqKIcEzUn5b9Rm+R+wgnC)E
z+3ZBDkH|wat2LY3eZQ@AHY+!AyxnRxIY|pAvLOHD!3p{K+mzHPAz9$pDY;3>2b55R
ziT{`q>2uwq+-H;!Y2hoBNYEe+RKm_F7gI8x;z;p1X)CRYco&INvkc30EN7-#vJ7Y5
zIqS?hv(DRentuH2Sa|-7Q$c#sagd%yy^8D&h_s8oWOz=|)yufeDmWkYD1NC-j@+5d
eR{ubg?M@nA=G;_EcpJ;FETasi!8XUZw7y*4ONRn1tw_s-m3=X~co
z=broN+?;3O+4$|!x_yyE%
zRP2@fl5L4XcSe52887nEQ`;}|Gkk`Z(K^Ie_#y70<;AD?Sw1`LKgTP>{_}j!^z*}f
z{;B29iV9Yn>mKF{+3{G~xfYw@5utuz;OTMz!7r|AK8nl9jO}e;7-0GJ4na8)&X`8s#*
zTD1$o-(1VOmh}2T8g7MAm=0Jg;exdj$ubqj+w59LNIAGYU^`*jVUGeCCYfT7gj8V?
zE0(m`?Lpc};=1Q;CQOPn2xF`*zxprmUcqLdI`@W!TK_J6mp?h;AKj*
zvxo&-DwRPi#a1lAo)|6|58G`aMcl&3Es^dB5x=^wN_rv=Ur)L5s{e$_NF+EnY_W=DYdARB8*0Y$m-f}sH~)-P>btbgzl
z+d=hWXM7WrcL8RDPTY^x{W;;tWDVoiiW({Ga*l3VPG=!-`$?D)c
z9P@{DE;K#B?oPttD!^U{F0kKoip95Tc3#S46op$ayqkBw+_-%CgAX^}Z#mS7hibUI
zbq#-Y1Vw7?Sx;;m*FsqC!Y(H5;AQXjPsJw3mtZxmq#=HVo5qsUIf=ZYWxDjRU=ZnjZgah>p>bc?j!*a
z-fsLX;b9}-9C4`8kFxDBt_Kkk^uR^O1J-XG)z
zqVy?3qkzIHlpRT@+!SC0ng*Xi<)igm3i71q5Y{%$~w4cDsmLyfpktI}e
zh6)146o=5#@@K1P9mqe6w<-C@246-G$)DO!9JvUSJI3UDme%EyS9CE^#n^kw$EPgL
z#*)|E+zWATs5C%)&WoDQdC8FWb!wDHURNhoNL$PGY|obSXiX{eI%9lqynUk`G--ZJ
zvJYugm=c;yr5SV?jWL6e96fMkJ++@Xsr#(J9q#hNQ*y;3FYep>F1aJU4IQ(z`@OOa
z&Zt^JU+f-jG_tjC-rM*v*x3DK!@vKlAAMVEA+OU9)nuQ$_^TxpDI!PeKuqLz`^fs9
zI68{Bx1Q6`=L;xmh1^x)V?-vLZXKDsI1uvUwoI}<;+5TdnbhSgQPA7s!PPG?`Cs^3
zyGwK&)UQMd&Ovn*4KrYOBFNQB+FRsNjL#3H;&IxfLnwEo!1mdeJdBRlkWiB2e2l-M
zfd~bqvR$xC_#JkqtU30l;}~ZAsSCB`5`pml_NaoSsH2sVw4W@Zc%2sHG1@rBiz@J<
zDdQ1AH5VBgqiCQ`rby9Kmkxt&VF~4LZB9|axwU&5CmQk}BwGyWlTJ5unzOcz6ZX8S
z)rwj~V4TYQ&?X?iA!Q=-f=0jGM6B=831^!qHXXpU$-aRk#&O8yNx*#m^UclMcdsRJ
zEOhqqqL`)ajm!Cr6#Xb@i53}w9;(MG;aUQfx^SpHYu_eg>^b|C>Dsqy1xcYJFCuqS
zBClle!z@Wf6DNl<%y9FPiFZ2C!W&2n^)fdXo0Bt}<|N@*S<&*AE!R-~uhPkLn4HfH
zcNmH#DZHEDU!~SGfcgAbAR|i$V@9%EKzm9S;)FK-BvK=s%;`Tu%E(gGC>heZZ;)iJ
zX&^OnL=$qXlN_T6LO7AdS~Oq<#p9DEBwn}fFMNwEe4pC3q=5C=
zl)uwx4ch4Z--Cd^BU(ATzrJJm$W&jn!T@=CC!i|}k`}hIy0XqzuB_k+O_|{?Vm`4o~f
za#J3q{L17iuV69k?4-TuPnc<_Qk0j>Due{qd}HNEvYb3i!@sL-e^%SbXPQl(w3Y
literal 0
HcmV?d00001
diff --git a/resources/lib/cherrypy/__pycache__/_cpconfig.cpython-37.opt-1.pyc b/resources/lib/cherrypy/__pycache__/_cpconfig.cpython-37.opt-1.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b0c97fa7eb6fb665041fc2cca0b64ca8e35b1cf5
GIT binary patch
literal 8919
zcmc&(-ESP%b)S#@;%G_HvTR4PV_YoalB-fH)OCPXkzFa0EgP{}MW!NTO<^?Lxl0Z?
zJF~iXR@82mK~V}wpNhT}2n>|$TYv&BiUjC$-}*1iLthFM$iI-M{LZ;EyGtqw;+J;G
zy*qQ~-gD1AAHQ?Xo$oI%dlo*2Kfn0oU*EH=|Dl`ma{(8(@zwv0CbYU%XopU0cWwN-
zu`_VHu1$TFxH71AtGKR)wU<`67S_WCe(U$Ga4Ga&TA?Sa`;B9_+d$8Su!)|gT*AE<
zF3r^P
zmKDAkzJ;C@xxzi)LeG`(Dta!;i`?@zdYmt<_B+A)Ma%N;rb$2A9jPFT(nRR7&g4M!
zQzh>1Nu|aQ#vSk6m?)V&6Ok6tU>M7ROfngY=TRWSs7Is%6-nI@KagWB0wsmcRFv#$
z?{F{b?TI89NRjPHF$ji3ni#|*xgpHMK`<6OQlxzmBx8|{hY~z^cT{gL%49DaDd}Bb
z_<+BHRq;?h8%dp+6^DV!3=wf>IENj|TJpJ6<3sQ-
z3n`@v;)oKS-s61@#~s5Cq_D6N9`W(-APM$l02vR`5i5ljDmK5c22Y2^NeK&7MY^*O
z8H*^>GVX7PK#MgHC^%T_h^;-+IV3Ey!}NL#{$nOZYRArKBfR>79GhXoAluszFsPQ2
zF;wteiboGWZ;5p>GWjAP^Vtxs8!e1sO063-sebD_d>w0i`fqwiH9zNQDiq
zBOd8d5Xa*UY$(!(Bhnf;9jf$s6tYx#w>^v_9cIv4hlRAR(llGgDu&{R^wZv8B0&rt
zk9POOI2{=ih;=GP&^k$FAl0s1=iyLJVSPOBW70s;QOb0fYAxv>!qaeuSc?&<^=(u~5)^Hj2oO;m_D)oTdlF(P&6}VLGH%
zu?MBbQi*j9_etP;ZSwPbvKI#ijuzegJPOTx5=fy(Wa3y4)*6ThW2h{d;}C3u`}^R9
zaA(8A#;7L|@YqC9f1#p|c)W)m(lyhB$Ma)`E6k#y8#DCtU4CxcL`n*bIuU-4eT9&4(9yYVJA)2
zZn9X9+AMP3o46+jX`vcY&zy*S0e3gXe)fT3ovF{XwwE6InSu&-g5H6~I-kJ*WU&xU
z4`m;U3FQm;O5AVXWDe*7eUwB@OhoHb8ONzOOjR7VJg?IcNt(%WdyLyVaxZuu8NW^w
z=xKKZS3{J5?+_f5