From d54803d039cc6fbb2bee1adb6604258d8e8184b8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lo=C3=AFc=20GREFFIER?= Date: Wed, 6 Sep 2023 23:21:49 +0200 Subject: [PATCH 1/4] Add Check Style --- .checkstyle/checkstyle.xml | 382 +++++++++++ .github/workflows/on_pull_request.yml | 3 + .github/workflows/on_push_main.yml | 3 + .readme/contributing/check_style.png | Bin 0 -> 25430 bytes .readme/contributing/reformat_code.png | Bin 0 -> 16410 bytes .readme/contributing/save_actions.png | Bin 0 -> 22277 bytes .readme/contributing/scan.png | Bin 0 -> 14729 bytes CONTRIBUTING.md | 27 +- .../kstreamplify/KafkaStreamsStarterTest.java | 53 +- .../TopologyErrorHandlerTest.java | 77 ++- .../constants/HttpServerConstants.java | 14 +- .../constants/InitializerConstants.java | 10 +- .../constants/PropertyConstants.java | 16 +- .../context/KafkaStreamsExecutionContext.java | 26 +- .../converter/AvroToJsonConverter.java | 40 +- .../converter/JsonToAvroConverter.java | 246 ++++--- .../deduplication/DedupKeyProcessor.java | 51 +- .../deduplication/DedupKeyValueProcessor.java | 39 +- .../DedupWithPredicateProcessor.java | 58 +- .../deduplication/DeduplicationUtils.java | 126 ++-- .../DlqDeserializationExceptionHandler.java | 57 +- .../error/DlqExceptionHandler.java | 49 +- .../error/DlqProductionExceptionHandler.java | 51 +- .../error/GenericErrorProcessor.java | 35 +- .../kstreamplify/error/ProcessingError.java | 21 +- .../kstreamplify/error/ProcessingResult.java | 85 ++- .../error/TopologyErrorHandler.java | 82 +-- .../initializer/KafkaStreamsInitializer.java | 101 +-- .../initializer/KafkaStreamsStarter.java | 13 +- .../michelin/kstreamplify/model/DlqTopic.java | 4 +- .../model/RestServiceResponse.java | 7 +- .../model/TopologyExposeJsonModel.java | 41 -- .../kstreamplify/model/TopologyObject.java | 25 - .../model/TopologyObjectType.java | 21 - .../kstreamplify/model/TopologyPart.java | 36 -- .../properties/PropertiesUtils.java | 37 +- ...{RocksDBConfig.java => RocksDbConfig.java} | 100 +-- .../rest/DefaultProbeController.java | 63 +- .../services/ConvertTopology.java | 123 ---- .../kstreamplify/services/ProbeService.java | 73 ++- .../kstreamplify/utils/SerdesUtils.java | 25 +- .../kstreamplify/utils/TopicUtils.java | 27 +- .../kstreamplify/utils/TopicWithSerde.java | 38 +- .../utils/WindowStateStoreUtils.java | 18 +- .../converter/AvroToJsonConverterTest.java | 59 +- .../SpringKafkaStreamsInitializer.java | 31 +- .../properties/KafkaProperties.java | 14 +- .../rest/SpringProbeController.java | 19 +- pom.xml | 603 +++++++++--------- 49 files changed, 1769 insertions(+), 1260 deletions(-) create mode 100644 .checkstyle/checkstyle.xml create mode 100644 .readme/contributing/check_style.png create mode 100644 .readme/contributing/reformat_code.png create mode 100644 .readme/contributing/save_actions.png create mode 100644 .readme/contributing/scan.png delete mode 100644 kstreamplify-core/src/main/java/com/michelin/kstreamplify/model/TopologyExposeJsonModel.java delete mode 100644 kstreamplify-core/src/main/java/com/michelin/kstreamplify/model/TopologyObject.java delete mode 100644 kstreamplify-core/src/main/java/com/michelin/kstreamplify/model/TopologyObjectType.java delete mode 100644 kstreamplify-core/src/main/java/com/michelin/kstreamplify/model/TopologyPart.java rename kstreamplify-core/src/main/java/com/michelin/kstreamplify/properties/{RocksDBConfig.java => RocksDbConfig.java} (52%) delete mode 100644 kstreamplify-core/src/main/java/com/michelin/kstreamplify/services/ConvertTopology.java diff --git a/.checkstyle/checkstyle.xml b/.checkstyle/checkstyle.xml new file mode 100644 index 00000000..13b8e22b --- /dev/null +++ b/.checkstyle/checkstyle.xml @@ -0,0 +1,382 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/.github/workflows/on_pull_request.yml b/.github/workflows/on_pull_request.yml index bce9442f..02231c54 100644 --- a/.github/workflows/on_pull_request.yml +++ b/.github/workflows/on_pull_request.yml @@ -23,6 +23,9 @@ jobs: - name: Build run: mvn clean compile + - name: Check Style + run: mvn checkstyle:check + - name: Test run: mvn test diff --git a/.github/workflows/on_push_main.yml b/.github/workflows/on_push_main.yml index b6cf53ee..6529192e 100644 --- a/.github/workflows/on_push_main.yml +++ b/.github/workflows/on_push_main.yml @@ -31,6 +31,9 @@ jobs: mvn clean compile echo current_version=$(echo $(mvn help:evaluate -Dexpression=project.version -q -DforceStdout)) >> $GITHUB_OUTPUT + - name: Check Style + run: mvn checkstyle:check + - name: Test run: mvn test diff --git a/.readme/contributing/check_style.png b/.readme/contributing/check_style.png new file mode 100644 index 0000000000000000000000000000000000000000..9db02b4c7ec75b8c28e8a4c2eb340c8208ba27e3 GIT binary patch literal 25430 zcmZU42UrtL*LKtgQ4tV3T_pq&0qG)Dk+K-NfOLh>o3zl2BBFE>TBy=N2uSas^w4`R zQj{8cZ}~Uyyzlq@|G(FzWM^k*&di)S=RWtG{i-N0O@4*u3IqZn2V`CauK_KUX zFP#TVCRn}3z(1GY%V^j^AQa#Oin?=6))@#0#NZO}^7(70k>#q(PIQCrzj4qc89x-W z1~RE$9(MWpdEuAJ{{XS%yfgwd#ux7Wyg*7xUw{8qr8p3J?^+VOP|FYnz_pW(c^Y)t zmM$UBz*q}|L$K_l02t5LO+)^QNh{>s==d)pd-=ingS{dT z#{T&n7ggfz@d3ee)za*05kGH9RG)xPEH5oDP2V|CYzRagqwk{EbB>+a!i1m8B99Ad zQZyXqq^jKXT0_o(TuxermpDpdJ=ANl#1EgZ!Ww#Z09BX(<~*iQT8n0hHW+I9RN46b z-P3B2mYcnYSI=b&xOLgTN(@6O6gP{Ou~L2FUGCnJ%KCOtCrxffzIZk0tTM6l$?lZP zu<`tfEgYgH3yQ|Ze3BB)BZ%9X)1KAw3RIK?VDzClIcZuNF5s;2ea!4sxyyYtD4}c# zPHtKie*Nsh7)->gJ#lMq#J3F4odOMvuv;Um66h`;rmgDLK z`^l}5QtQcTmg~XCUz^fKk{vYap%vw$gcC7%)#2(~&2Av)QQh?F$;?T@4VwL`xPsM_ z*`tJIO_LSO*wLSw(WBPM@r@XQ`zYg)Lw`((An952%y-bjZ(R4fLSL%mUVhA?j~ILH z3Z+F?dZZG@t=o_K2Pck_2gMGt5($LeLyrYVbJ6|P6I|b_=kNN}V$r8 z&XW;u#>0M6D^G)^)#FG?54;{fa{E2(tQT1vl#*T)VILlLLz^7a)&un`g7?~ev=?9BycIktJbC@^#_7)V@bhSRk-G8BqNuU zzhg2WH#I`(mMdg$(9&7%6G<-v4`ndem7jH~GNn0*Dp zx+jI$(cagH_q)~Jp6hd-6N6nFdl9?B#eYPC#|T|Jhs9QcyB!0;ZpKT}Co4@Ip7(js zbe?#5H+=v4RpD#sv+hFhTtb0uHNcetE4=frr}L4R_JrWt-rgC!y9==mh7@9&ByxvRW5NSxs2GAXZBFu)8|8Gf!G9yj9&!DN?zg zh>t0;u3G(|BT=*SQ|x#rV$`!@x33kmQl9CbcEXxF`b&yw&1BnSD;RhLV`a;ziaHv5 zE!_+K12LUBGLM3uO7Ip%eRG%Xj-&f(?aZqD(NK-h?uP-!@rgaA(V)uJ4;O+hMhlJ; z<3<@y*4cxP)&_hTDm)J*c0~&D)p)HH*WbrCRxPS_;@Anx*6{&xt@^7suH1=q=cdYn zOOjwA28}SxqpqP(Q%SNoAPgCuIz4`@dsEUKPhk~A zi!CSXv>dG_cV@IoM62;t!B(z&B3g_tZ#Q==cdcs()uhGns)Fsh$zhk7QF5IN8iOC1 zgiQ=FEy0iBJsmMcGl5n*bVd1E?W)V1g|);%YK!`0Yh@o~~(ky}f` zLS200gASDyOI^XLHFLjqtzCb2vuiPqxDkrmV^$~KCjVM?o-7Us5UiHkYb^1?p|5mO z+!SPu9mtI3VGEC(UoWzllFDFY&(um*lBpm9_c3br1@G1QP)<-Kf`+<}D zjGi-&9y614v{6Il$L?CiBKgis61(Fy`07J`Pr|Mz;aGt{spYXVN;!G5n)u6K5R<|O z3GR%_+5ZA87HFl8zehBl`O%wYQbsIj5-wbBu;bBA^OUz(rQsyygx+yHDRP1^uVCS^ zx`P+9C+sw>2*}xP4jg*?u3kN?ULCDm%hD3v#xovld74Li@XXvxLD)z+bzQkg^U*js zpQ&#p&1j2$H(^1phwiJi&~(R9JK{XnPUlQjrw5xKBb51Z5j#1Gr6QED{X5A`C$<)| zB@f1Hs&~h7YaI9Qi0zdPH(Fc79jj}_pA@mbpywpC{8g{sxzMUtX1yONkbAs3>hXu* zyhclyt8Ptbd2k66jjO|`JW|-WPu^%Cw>-5xHqS-&#SFFa=&xp`2WsG_*hn7YY!zFs zjzT3Y>FaK=)W+$sqTLShCBckh@sf#zCr&$zu6L?cj&U{4N1Ln5hb4Dn#14O@iVc+? zIBJO=4T`OOtr^yaUIAiNELa*oku>fJ2bl}9I z)t6v2T143SRo2N!6U*N1w@E#6<{ZCO3fk|dGd2+iq+G=oj)?a=bN zJzi6j<8(g8ocI2<-CNP7*XM z;hz<#xOH~dLOb_GNax?Z4f%%~FufgO^&en!9+T45I6z!J9WLC3KB}`ZH39}!l z7=)eHe*f|&r9Ar~;qn&sf0r;#!iIY$C?SeZWhjV3Jgi$*i!OtB0jF9C%5&E3ThzZr zKV59**FqZ=&F#!nQn5^96a53j^b57aqj7>m`#h2@2%m8DfY z<1+1CKMIOn%0P?n;FXz~h#~t|y5_yWaX(Pvv*`mR2E_$0;vmk7n2k{)$4obx6k?;J zFG5179532Pj}o~p`7OU za5^EcLjc`U8)K7iqcxYGitq@Di1d!%!7t-4{M+U=$IGYd^6euC2(@zi@tHf;=i^0O zkxuquw|O8YRJ)`$k^gp#gHA??y@_jtcE$+_JQ0T5Q9}y99D0|&IurhuAS8WPtGuk-gxmZO*jM79#3iY$R<%3ZeQHAkxp6+)aNDEVE&p84 zt;~iy2NzYy=!68_7yA1f*f^Qc;C!D>Xn0*Fh9^O*f>ww>Pc+8g9w`Th+uLtjd;+Ss zHIL&T%W*&Bt+dB8@hBt=Rx=|u+Q#g7^UulFT{JMLo4FB9EgB?8(b#yp@Lu<5_I)_r z|Fr5Ik6=d=G!xi^f&$}Ygs(u+9x&&r#<;`}DfsqPu3?dRtRj*G4?Kj@zL zrF{rTZo%~`Xg!zqC&)SL|4Nk1IO-)dihRrd`$?GJ#>mQMb||HXpi%j7V&pwK)|2fd zXobpisciX*MYML+ojbvwJSISiu`}x9xbh$Gd950!6}(Y*us&*oN4hM>W#w z^ZMYn@aoaO@$5j!|rVLUETo)y*pI@=?QM8DR`HXBHRnm)u{GZ?h62i>l3Lf@IGKE93)^Nwh#7%92Q%1tN~YsN(P{C>Y|u6H=Xmdimp zD9m$d!B2GrhPkTwNQjBi-y+5~DVu6EEU055mep^CNp-r)F}IyXz;dvETI=tk!Hw@= z+X`tgLC=MT-nlzP#C@kYt6jN~^uGDGcJ+aBTFe)UtMSdVRCvZW!OH{S&N!L&pS!KQ zW~MFW`&rNJ*np`i$bW9U`=lW?QZ7(fq+~ufG;M~uMd0{=@6~xrfM>bGk9x!`GJsi= z&E=MUbc)VLr==WKi>A$7Ed?u~mcj&~S3l%>%o!n1aNAsfWtvG(wA(~anc4u1)oDLn zm#TqeKb(uzs2n>4TNs+!)7t#1m?+dR7$)$?p#h5Ki+K3bHD#qAeP4DC8LscX!$O9p zqluYc#xv5zge-tj@nmWsxWgqaQIWYGsm<_wPEXi^z_dIE%-O zXXRQ?z6gWD>hG@91WggoDL-q9#ZBvme!ABxQx9$hE z!`x*x=NlWf#l!{TKhNAEbJ?h51c@*UI5Hk7LBTMuD^i{sK9}MG8hQ(1rVQVfV>SJ6 z__I>UE7kgCyZLv}tUXm`u27ZF=S!&A{blZ%&kyh%J+xa0A0&p_4?~v+*!1A`DBgUn zPD^p5xw^um3RM$h21=|VjPk7S%@xj)e>ODid`na5Ghd4rZ1C5Qe31UKej5QY6xy*Bu+j)D07Sx`nNHe3Y zJzQo%r!ZkU>(!vBt;2RQXVr-kfg@BfI)y)+HxBr&Lw$p!QDyjtJi742e%adsl~V9R zu>E{eKp%}F?A17QoHc)iA>ahTi_BUuS~vg}<+v@MDlL$>tmSQXTV_1Tag$t4|5$deiI*;BMC+bdnQvWd03SEaY7^*Ib1e?gws| zr78C1 zom&SXB0L{zbWZ8$dlyaSt+%f_h2@kB{OqGzEa>F_6x5>DOa9({W)Z-~nIVYzmU z>w{txK(eak>%dOO_L%lVI#%r|l1?kRUV@ohQi>K%wweuM@eQag4-G>aQ^G-yknxaz z2>MEkDb6kg0)c7Vrznf3BI3)R(AFzFmZ~fFLG*_B69uIKQROKxI74+mU1DZ0wHrd# zp5Um?P$c@bp!?spy?Q8E+V$Xn;LckV1uQ5*7-XlKhu)~`g-P*1hQ7FdEamy%I0bEBa4go$e%;JvZti3f`y8<%fkif<>fBBE928RkS5#3kM6#l&hsRA z{1-QNttIy#GVZN=k9tmb?IZ|Sx;KU5RizoTzq`ISKK2%(1yRNu7PPST;3cp{2=k{J zfP;RxAkv8AUzB*Q59%E-e&t_$`xd1P-xx64oMPg&`xnf5{e(GhE>a!*7ft>bmi-UF z{BM!s1DM8UAE&&#ljT96C)Z}4-pFfJRaF%g37$Jdr0cakH!c?g`8Jz@Knrhg*>j(Q zFc4IownYSF|M`t!JFI}kXi7zoPkHFW#dq}$qdqAGa)I)N`#ULT<5#? zlK4d54!Kgl;+)SZ{7bsgeH+RL`^B;-#1ODbbnu%r|>?Z=eeL7!uirY z`WBnno4$e!OPXsCP}sc;eUu~uWx%#;!KA5$!6E_+5gR+Pxwp&ZxUq13l%f%AnC6-A zgpR@J%tE4$@#icB%0W2!N?gE-?-&&y>tb(4f1*yYs^7CadA|xltvW1-FgTOcB**<|Zf`#} z8&zzXr%)nHKkkZxd~?3?Q~DOV4F@&u=P!m{q3gSo_VIS@pt*p_$@8d8Y=Po@chI)tIBkva?hsaZ&aV?7BX+;d#_@B zm5Yn~Ms7QBnCbS_JSCgf-iyslCJ&sFkv4X;cBD-CVsGCsz~4*ZtsR}7I8fY?!yqaL zA$j3u_jQ$Xo&7Ul^!7i-VVIAc*;II8+t1T1t1I~R`ySgTjw9*x&YRA!VvbB@qI)AN z{cK|pNLs$&7Zog~2Y_Z{xV=T3*Z!R)n?2F+%v{WDrVR&E(5ntBhuo~F_iOR7UO?Jl z^XGD$F4JR}+g~Tbi4649Z!Awzhh)3|!5!o0v1B$v5l?Bbvd^B-lkJNx5^wRW1QrV#3cl2|GpW~#ob@z+Yv`nx$zt@;8O zqhB!Ny~+k`e~9F@+C;t4r!L?xq?ax_hN5G3hB7)kTp4mL0;)SzLKPMi2!14RZk3<` z0r-wu42|H#F5)3w;}ZWrN}EY1a+uipVO2f7RZ)$0?7QxvTZHM0R-fQf&&o4Mo0FVf z3KKIu-+oyl{nkZ?y^~P%Jl8tUj}OPP!}xR!fkzifQ_u^#?3o;gmS4O-I$iyGaHw#h zT|qkoo70i{!Q{&9+{y%5MyOSSQqjvr-uZbYQ%uSZyI{lXB_rKm&vJ!TbD0ej^LfM! z4L62w&|=_4H7rAeCaOI~9%#PoP031+bn$u|oi3L+@Zs0}b;kL(c?&K7;grVrZ3r}3p_Kuu|5}tmur9LdbQRn2i+r^)v5?RM#?o#=xaAp4Y zoprkt&bc$;hf*Aqe&%)YQPAvQ0Qs4+3ji`99l z2;pno;(x?BV1VSg>ws5LH|Eau;8vRnN+U_H*T#MAznAGYARx4<{#P`ROR?l>bm|Hkuo^)0fH zIAEZEFu_kPj^TSe!^mhY1S{$N^v-m^mrdLJT-k*uU%`MKwBK)0V7wnmxjI~tQ=m6r zzG$in#hY++lpHk%Uq+47yaxy$jITczJdQUZwc2(rtx!8?W5FQb-seR3QA{yKrcsdG zbgHL^U$^DORX2O^9YU8AAF$Fi=6B6Tw|*G2nETy2qY26dfKOo zVOzGa7Mf&cbC7W<;=JO0*s#G?2=94R9R44uKb4Jc+Yf}o=sEax^gp0~L#HGVSq8~> zD1-3|1xoxgz8{pvY?}4MRCDuh9QlY26Ly)1XFwliQ3=kk09+cgpUi9Y4`yZZ9ZMOzR!;9{==79u-?AOKGhiURef+;;BQ^oQ)S&Nd4iri9>&i4$!-c3 zSN?m9ng4f&M^uf1;=8TwcjkvDx5WfX2t z_7>F;;I4Y}U<*6?`HC4TD0%IKjx|d=dWE7IJ$<3b*7Ds`ah#!>T(tU^b!gW2=E4s0 zER1eGXKQvzg$D%;&wdI3ECK*z34IhjFPyuNW+zf)IFx42`pT}BG5RKLU~U!v;*u$? zIm$;Omfj|)9N^udV=2GHR6Pc zJ4esHpVK8wA!)R3Th(u;83W(etsnex0FQ)z2+U1W-~DkjfAMD@&(l(rAyE)--PlyL_QQ~ zjW0%-+4Z?B>0@(&H0v?Ms723%>gNjbon~4r7bLjywd|~l()as+%Fpmd0@{*h9}B}N zERW19b9frz0x6aAmyBRne|e~v4JqepRV2z-48Rr~E+O|tx8G_7CdKKBRW!pgv39w( zg=DUOl3HCeu=x7>=@EIW_Xk2@4+;l74ia8EYmLyaU6C14o%6($m2hA)#I88j5`OSh z@HHc=)OI`PDU{h)ZdJ)^hkjzFjn?WA5n8QQ>J;1ksJ?OObgpnQi0t0GrQ0tFH#|bc zgR4Cw_ELpq`MD0uv_GzwZ)=zKW`S|$tB|7aHYa>b;CVV`$Ike2ajP@lo4=X$PLfWy z_V11nxsttM*9@z1QRRZ9>)Z>frlxYru}pQ)3Z$_qO8!^xPSVw$c?H@Jm#-(`9yb*w zn*ME5kzIl!m!BR(H~U9r3+2WGl_cb$7>pmL_(w<|533g6{+QXg*#$?8wTw$QhT6RN z#~-!oF>!@`EVF?#4-ot`@z{Ai(>E+ahEbxX^IIXidz@=nvd$3OQ*GK9z67WK;u=7= zJpbNdzzOsqp2C&Xjhp{ga<5u0a_Ko-nWKJ-mlM7946B=UcvO13Qj^YjIC)|Nbvd?# zVi9ZE*&#C5Mprg8d?WSL-WbxO>9zuaG#+3y?m7oTcCeNl@UsAVKMUX;4XKL!H*VMT z!Df9dBR5A#)k{^`>s{xzNweCqs#k0x5~}Gsqh^0|*i>(rphg%A+~LYrdCqCRx9*L8 zujaDfS=dA_AvR`jGOf%CZQH932ov;fgt5t%MTA8u+@@8IK8YTv>9db`@lpQW+FId@ zn3Q6MLbVxnCEZcke&++*!r^l(KJI2hfq;?ib$suXU0-o6PLfq>E=M=loDU0>Q)*Wi zezY94bI83Bs#H`8S9XkOSW@RtYt4%okZC%aCM%ZOA-~EqO#zxj!*K;}R9XT;6J5C7 z7W94CG-0DIgv;{n6Y3=`yMXy#6#OJ?Tw#I5)I5gjxWHCgg<~#{Qup`~{)MokuHc(- zh88XLpg!cQTv@Uq1?AY>KtCXjUOVMmGClw2+Rj+Ln^<%vra*cz<_Ye_)?d`-e5HIU z6S*=sK;S5)wroC58aq)ts2R4#UAR=y`*^S&TC{y{wcvOxT|GXZ-7)SET;Mm<2hCh5 z5ssGWtyY$6nbf(xQxoS1@~E80Dn%|;9j{!__GKFNoE7~=I+Oh(K2OkQHq!Cq+iJo( z%$6fUuD3<)mBzAs4Q2v}rvl>ODZj$r?*Kuh`5R6i(eE<`D9wGiF=|omI!Rh%fGWJ_ zpf`_Flw(fFFSt*=*LNp6neb_cX!n9JxKb@uf+Hdl08aY9Pxo7{7En#tIwb~1+x zrbojx#IJ~O1Wk~~GDSY(!u^I}JS2vr5h0A+`2s-jjO{iWZh%s&ay|*0@e?sy)Yb~> ztR4CNo;x@HEo{N=*(`nYLl{N@ZI6w@00%iM2B=?I4el4gQCM){W~0{JFU+7>qCf}x zkJptb1@6~Ey)U16;xJiLzg z-OJS{2Q z7!2ZooTS5Qd!AAwBTvgg;(PP4WN%E$J`k|a?EFl+tGlnPm@dJm%Hx>I^^eNvqFi0h zBE(qO-DHI6m+pS*y*Cz$1@{%uTh(vf^0mAz1Qe0xu>Pb@1+{!j|I7sFCh=%O(FW#R zSUvR`OvQ1?-VGtFVP%CC(QB+gX!n8=gY{n{Tb-t*94g0UC9}4TTb_2L2UqR1w9xG8 z44aMSjJ%g~%*{8VvPN$8d-vlLCkHGUI}~f=h{!-Dmer;@p9gSuyCiMUFdlUEiJnYI z1RYF+=DCdFB}I*)*yO0EjYm%mm_EpHCzEG#K8xx2`$fq1ro2aM-asY0f%9D(mRnnP zL65&L8*8HBC(aE*MXD4$E`4R%K?P(9bG5}zZ&V{k{M0Qgq$V}`qY+(blX!}83IsPY ztJ=j~daRpKQi^HC1m5$Q|MwT6j&A)6YW@b7^nzX3;`6k)01((b>V}dq#km@hI?7Sv z<%E_D%;f#~;IWa3LC)_I*b0;JSkzB?@$=hA2-x0LUQrWp0TACBav^c@r}wY5_9 zvCGX6dgDOp4$?|N4^+@MjiqCgsm!m0EW>-ajH*PsJnx~k`|X!)qGruTPE4Z^-52kx zj4#G2<@fdL>)a126X3>KwcAU`dWxxsn6)kQ{I#lRwPP{yWWzN;|E!p;U`YjlPCesn zP1K!%fwGi`2e>C=WEEl^BiM(zqZNo)?%-v$1oXVkGhU(Je6JO*q5G`HZ;`46zYvU> zer*&szF1Sf6Rp(9D6hr7;m1IBQ-=i{HmJaUJh+?z7UKiQSn8Y=e1?M1$}6SB*@_6Z z>B=t!OdX+5Res6kqkchKqaGW@cB&)I8At^*S*xopCHeMLSdA?HIvE-X_$UCRAfyE6 zvE>&uxKAKI`{b1j3-CP?-KP9HPUeho(BCvkY5%g4AdJ!akz|J3=qPH+!q<+Oq z4)+dqCfB}o4%M6?Lk}41yoI~%$fZ{USM3?uFV}DvC=|^=!l|bNpp7HL-K-?1y`djV zqIr91TE!vafU*wj`ePO1kRZ(y&<1z2{F0GaihA>F>Tf*T?rSwTQ-LeO1U~;n$NG6$ z4J6_~e)AzC`X;Xcy-2@0pHQi}k%^<0)+PVZ_w^dr=3c~t*tdRIDczb2o*(@Ai!bn* z3utbDhBI10zFjMHw10>CV}Y_OnLZ^>GLEK)TB`Q2WE!^|Tsf{6kpTf?CY4E*;iD6O z6Y0EKc!8P0Ul9_$$3pTf2vqazXKAK}|8z3Ol2wGQ@_ei4R?&6ahAA2rr?VyDZ4sSL zE_eSON-pt<1Bv3BMOxS7VMx%fC4aIuV!2f2zn!EJwLB8Q-8`?f9|-?-%g-1R!^}zJBIpzg}kAGc@+V zERA_g3gq$Idnc+*^a^WO`(zQFxVV zhy&8k5AbokefO6Bno6i-2#j<}U;g%Uckx$zJA7SeRz4}?e5=?_^9eNL^Vk!!o*zxmldUAl{ayI!Pyh~M5`TV8 zsR6136I4%vVQw~mDo%y>V79+M)CMEzM~VsGqX(c>j%_VgfsYIbQMsf0g^;L}Xi1I9 zuG`=7Rzh8j&iWY!t0se1&Gbs>BvDDMU->OQc<(tPgZCB%5_3jB6_3g~V5U~;=E2Uq zz=A^Bbuhq$5T6q}-bA2^6WK>$3(;*%gUf2;%lBGpktThe#7-xn_@J;2r3+vXN#q!b zgeay8NHdBGYcx^4SPU#A@jSWvzMfwC!&fRtI)kYwF7TMdJB$e*WsF>!#?L;@>mWJJ zYKTsU{FDx-aT*ZDbDn+D~#MuGYK%X0@97FOC*p$@dxrdl@u_M$!zY-bBer`UTzkrxP-=rkSD$905@zl@L z4jeluR(M&?3-V}N%K2V`(E6`C^MXoAok6oSJbptIB=8l`ed*3uAk61rrctveYw*LL zPusO2582T-9l54LSPlYkl!gwm^b;a{rH?xKBkcaEiY=KZaqgkVdtgxQ z1w>Q=YO1RJeSKYRBwp9Kaevd2(N(qg9iu4(MG6H5Tr(^l^SP z-7<-xmX`NvrXhwuWfZXMqg~?tP73W*X_wD|_WT@V!yGjo&o``Pxf2K;tSHI{?24G~ zVCYOHrTxiIsrlw~09h=GxtrNPPrE>Ko16OyudpZ=^*?zjZ{mxE#FYYQNDc)N~EEW_K|V#Pd9wlD=} z)haIBycH_*5HmBP!{oUGn(aEKy!oGRuhpaUok!dtMEd?M3MdcoN_m2do}h534Gvm~ z-{0S_PMq4AsJ{1wxnV<}ePjjFFz{|;D}*ZIs@6N)=f4r5Lnay6s{4I3L}_)iB?~Ce zP8)jOrr332rWO|xt5yb#^$1>1>bz)7^`ndl5OPN^ecG07`v;E&$MOb`e*1pXy}|=$ z3Biw71!CWAmc5JLk14+mA?}BV@6f`UlTZ4sw?YE88ET>DTB5$re;EE1m!72h_g>C( z_{MBsTX<>~OS&;JH3ZzPPXJQOL`}PFdVC0WpASb&b}eOCk0D#?`plrC`^u8bW7F+b zX`bP9f6-px{(Fvwzp2;CJir9ad%}VPiOwFGSK6D(3Qo2Zi$qrBHPOqobmeY_K-6sg z`i)<5+}u_6mfH^KaC5Rc2a;G9gp>n56G+Z{kEb8OQtF3Q13?_QrjxbM#Pfn8m98Mc|F)@`7dCudba#vA z1rw)k_4S3tJiT23K;r6^fiIk7=*MVr6~k!6v@gnUkNsW^D3buPnmfns48i&e?Z#8* zi1Pigz984Lk>^9Z{(7yeLprc&P?x@>_UlaU_*WJcge4amZFK9or3F_@Uvzwt(u|%V zn=TlPAi{s9(H-5Kc$k~BoM^jOyz~gRP((SBt>a3a%AG(?znQ63);MPTT<}WKd55`L zd!yW<*$e9Ofl8OZmJ~L)(1raS8+MrY^uY5x;J29=DI|16l7c$~gq8vJD@ySE>M#h zeOqpAL#jN;=xdezh5oE&(T_B)@c3OUJe0)eRkSLoiy7!UZ?Y@Z?TEVm5x*#QR5;f_ z2np|RsQy}1n2Ku{aphZXv*Ht&t3hj6=(u{ds2R=m=vml`b0^33G1yLTKF|^Fp(NLM z?bBHl>2QCgR z7mp&d=(ZmaFSuK{)@VHUxBZtUr%v^Q6XCF@kJ_q z%wC>fE4BG^zoz9Hn2Rem7>RN6SZMh;?h*uNNjT($-Qh6*?A~X)VPlF@X^i@ft$W@w};v8%nOi!aS?kIG7 zfP`iYgbj7j8!BEmCEz^~87~WE{UlrvAueSaQZUR&#yhICuL~tP6JAs9z))KKg1d!dNAHyyp9FoRUWGx1}}~sPFX~ADUr;Pw05OLz;pTqV(M4g`H1)$k;=7Dx*?*Opqlpf7H^(lfnl#WXVBj z9n5_Ls~f!bW&Avn6UMg^cmA!0^vJ9#$UqfBO^zRa(Bm{FbJ{a4iTN8MlGLT^`e!1x zGU2La~cL+R6Lm1y}p%f(N4#!{$fHZzHAZT!)NKzv& zpRehQC_64dcKA3o(zg1)e^CJbuIv)`$B zXE~xFsN3Me5T>FSH;m2MT7TW#cS!(FCKw(tD#w)T+p&~rp;r{2(vLQYA^1~#`F9)6stB|$6*WCYTs@r zn|YN07X^>JMz@6#!8|LbC=T_ndxj<7StB(p#^)gJ@_9S@D$jt62X`1Ra#&R3l`Lv6!U#A7-oBbhZ}eAPRw<5n?8#jFYa*;aX2JS z7u?@`0V2YI5ofHxZF(Bh#nj`7D!;fBvz)s%uI`0+5}p*++XX3G6B`I#UT1gqmG`eJ zu9=sfH{cR{=wiTChk;ILZ@%T1u(CbE`M&xgA4a<7%M77?Y1OJ znz5b$=P1s0?BS{CA1>lf{=QbTW6N5viuoY=cnkfQ1zj26X6}Ft4&&?DDbEXc&A%Pp zdY8k@`8Rlo&-$v;)ta3HFi3K1fZ}_Wua8-GACL~cM`v4SBv5BuhY9wmXhC6_ZJgGX zuDyJyKk~+ zqpTwjbql--9gxnZ2(Nu8y%L$TDBzs$kXmI1af7;pK)j_ptbtmTI5z3D_2V@22CcgKeTgK~J_@d;@BFIwdR~cWd zw_EjxEm&b5*;?;Aj*aQ*pYMOB1;D~GnfljD?$G|_+zy+JRV3Ox}ru@7IiP`{zA zH9C?Y;(0RL*C+n?_8B5?xViSLNhtLZ|82?Nt>GVXlGA;Wa?*~4K4$Hd)h20I1MABV zldyRD(Yh%9EjrK^c6V>u~pRn7eoW)}Wyj6`sj|?u!L;zxd&@WY-zC(6Y zVg0Cp6|i`B2#UAF3FbvX#n)pH?h?nZJ^Ml6kDK?f{J2z>9v@A)bL~}{oTU%<%2#!i zsNM>XC)DG*SsXR@dDIP&ej~FiK<4h>Z(ViWV2~jpIA)yx+>5j3(s|9&E#`V0WebV(n-!(Ift)U z^bV~NNvd!HB7ZX3Lz~XIS*___4s~>jIx;I-p>;L_(ttpblRJQ+M`LM_gsWC9w)vv? z{?*};?)S(P7o%@xh2{e6{9qKqeF1I1_s;f->RgC$PjimnL!*dsGX-Y-$}DP|xkkTd zHkTkbzXDgl#DergBEcE|h2h&exgvx8%V^^p&RCX1BtQQE$OePaBbm?Oo9r?SwodZg zx2JN-PezJHjis)E0r<#hOuFw*8-<$OHp^TH_~8*eUy*`!Hv^1#AGJf5mX=stkj#jD zm&#R~V$?gybFxn}9grDDV7U9NJekccq?m@70Qer=RsYW5g4j@7+4(&EjrCKgDfFXADOnvFW?W;l7+5|mMv{5w!hpG*E4o! zt1CZ$laYODAim@CdbM}DulXm?33F>oS0H)nDNZxfe&oPRd(M)w_7L{ICNZ!VVHHNc zX7c3An1B^hFLG(Oy9$*3Unq@~?;QpzEWK24pz5H#d>Yig_5-3nNfg!5yDCU#!BhY7 znw~}2&%JG$C|1OpeC*;T&hj}3m==lN2S3mS!)uW#?+f&Y`Z}&s703%eKQr~^apu>s zxhc8{Q^}l$nBV5XxCXqyh?Dp#coDe+)F>D6CwN>&oFWJ@CO=JyJN2r7nM4VX~hS$bee{q(oMufO(P2wPHa_| z(f?k=za4}H0YUT$$6KQb)L-K4YxrEPRLvsXoox@(>rJ}UL8|GD4V2gAAE(&DO0FVyyDxJ0(csb?wt-Y|B34__wpI%b=u9=gp2OX=nZ_9aKQ{uRvvAqHsB zdz+*r02~t$hj5t68;4_P__Erjj;rLp!YI^6<}_y!0`Y5rT4IjIz{EM;9+nE#?Yd9@ zfZ5f9ewiDo%WAw}+6Soi5U(UIoELzlj!D@a!h*94Grf)ha|jN~aVkC*Pr_MadY&0`>S#qTYO6ck6sjbqctG^z*!)gc_NG7Z z_h~{3o!AzmjkH?RZ>AM+cEJ`x7gB}I-qb<|g)W_bLIWY@M}*aV{mjeZVpP&%=kX6Q zGv*(V`On7E3VB-gvz%XOc8Oy|*G0`4G}e55Ie@s+kVTLL=cZI|nn8+^F5;Rzx z9hE)-9escYUsRkuWvPt7>K|-x$l70cgRqPs^Yo3RAtm z^ueFFv|v}_RN%rYW;yc%>ZTZwHcwSC50=yiliP_(2X2yE+|xnHb%`|ymuI?NQHUSQ zYOiqiT0nR>wn{Zo<{&s(b#5M zX}&Ne!&HJjeqfp7&XI-0j8CV(P+-*dg7iveZ3MG(fqUGnX&Z>9;kpW10HEn zO_&&$jGV{N^*2@|Ak2cu^L%ZQS*o@p8sZ=6UJSr81G?&j(P!_A3&zZT{ua&)-koLT zZhiU4EedjgXS4bi_55SVN*OiLq!3S{hz16{h^u$1>1XuT|5tSOkXY-i{^?=Ue=StT z6ZDwp?|a0zN`Dbs+TsB`n1~zap*?M8Hr#eaP1s}U1r;8Cw4)96V-YXg2&J_?`v!n0 z7J}x5Klq7MZg67If4&q055qtmcEZfohO2kB>Ggd4*WNK3{cEfOC1)lbC!ARrYD$Eu zi`-Y^>gzm|vWtqjg^F!C_se@Ps^q@%Y0mX0nzV=YC%t2&1#j>h#VbaI2@pvW6cf5& z{J7~ei;>}h0ZY3hG2x%+-nQV%NXcGG3H0D#_8kfTLA;~S56nQMb~m?3qG$0Nn)}jO zlgY&7l(4eOyADlIL$v*4PRj?$$NxN(NAuPNb4Nx_p4sasIAuOSYxcUn(U;icdKQ*} z;!?%gT5>^|C@=>zmsSCcDlO5kTWppUV27Z2JNrH^YW5kTL`ugts*NdJL=liEYUu3X zBz`I)`wJ%yThJ6O`U>B+Cf#Ll(cf^+M+GgXwkMr+mO!~N80Vzl;FP6f=rck&f` z?k$Sh5=Gw5j+~Uyf(bpVh_jxhNQG zd~>d`2qd`Fm!H?C=fUmPNGT*zkhceJhPAf%%XT`AL?^vz7g^{Xa)O{wx*3t-6n8uC zjV?;Fhz6Jh9s1e+s)zP&1%~nF-w8aGjJWsmwJvN2^75hXBZ-}#x_Bx_Uf%2fQv=L6 zUDha(y3+N(n%OS=G|F4W(fz-QE<{6_KooRpuA*jLCYkZ)obnm;jW5WZw&nGJ54X5z z27W@T*a*|G<>f88g!w;>TzNcH-Tx<4k`|S%XfeiGc7;L6mXN0kDrR+=fw2_R= zSVv4WLbmM8*vdL6MaVRxk|l;IgpmBsojg6y^ZNet^VfCf-gC}9_nyyre?Fi0`=fd= z4^r~p>w`hN1se;1T^@rA+s1nGN5cxSj0aw^GNPyc(X~S7?qXS@9)ETwV)czuZ8UOuiuJv6RF7ggGj{PcL1oV85ntz)QdI1p;`HP+rAbweDcPgrW z@3poiK)6>Mg6R1`3E7E%BhS}e!_A&T=!;c_FDbwS)uxIEFX2^P*eM$>0>(L)txCr9 z_rY}b(QM4hl;Aa$%4t95Uc^?hyoDI5)0RyoIVJWJtHuwR)a-D+p|@&V`FO}0sdw_Fsx3u#PLI9^P(cGW zH1=(4?Bh$pY=qkYA;m)_#D4tSv9{$D(v^}kyf}TZo>O3jyEuvhIf~GYFvC_@Z}6iy z2#j#u2eN>lMyAJ}nw)Yf9I>O0Q#VV2FR}bQM>UPJI+PPHuY|u3g~d$TeCL&@B+Vyw zh8@0r3i`Sb3H6_z5w2CCv9-1r@K^Z?!`#5v^ZPgZInQs zwBg}XIEDG862Z|)fo{IgRjv^y+$>LR>81MyT*y+)RDAUzfMl)d4BmiJyn= z)y?%+-oK{u^ib|4{8a}$OD?vPS6vhCLDzd3?!9(?%Yrg(h14CJ>?IC%*6Xn<2)X;N zGO?d2gMuobQdOd>5}u_oZY4S-$KVo8mFj+`k-o1~1yW`=@H@Mi8ftFurLV~ub`KsF z$nNtE>NMYwiB1IhY+Jb{I!*3$d$ebdVE1!a*-A6#?s8+w%7*^PhG2%&s$Z@B9E3IC zHA?Wj%9Ca#%Bz0V47f*&`r*fhDQVxcoKgVR(yE^5+*we)lrc*DE94U{XA!^bz2^P7`9chU_oDJK-Di10$fhnmWaOP7C>T)>1r+XSvER9#^|Y$}vL`>S9Tq<6c<}WlWszaYD!PG8r80qp%af~eXNTNm9>gHP=P@ULz$Sc{V~Z1xcXW|83{kg#-Q=H1_uh8hb4W*`|L%O=X=f#=VBe zVamAu$wg&)p$3UIB{A34oaeKlY#H)wtYcG=PX9u+wcc<_CoI?m7y&wf2_V0a*WTc4&jid{pY~O#>Uc(u$U4esQ?SgE)BsB zZ?7#8w)llyvFw2f`AKolF4%M50CWR35T`ncU2fam-p!~>j@S1z^7kIi5c5blo!})` zrtKHf0^>dRr}G&sgW#cBdI>&pK7J41>nJb>tM0zMRY9Q8!N!yFGD+hxP~|s(?7B86 z2e=-Qh<)o;1@||bmni^E-DePrOB^{L)f$x^rRf|Oh}pLFKR3;17?(biU|GNL&_T|D zfP<-_nb-Ss-l}i=hjw`K|78tDEQcw-J8)8>(%m>skN{wSF?d7^y9!^BFlq)F{Q7)o zUh(wb%0Rqo^x*=aRA69S9FIG+0QSAno-iM!{&)5R@3iX5HIMc`G%N{`;g%J-{5)-- zCfGluvT8@#4xxrslMW}jg-L%6js^ZpE)z&M=Q4kb_sKS&j(GoeQ;kdl^{RGxg06@&53|&q0^1rrx+0 zgx$IjZkgPa;?LX4?7vd7x0lU{znLH44A=~bL6RdSE6qckXqD{(^F)f%bx}4N!raBT8T+4tHO6}o6zCq)IyNKguH#e zwfNfNb`e@6E@PnP>*Uo$?&(#_rCmU1gIja6mD$>xX z%$NK(oN-GaKy!Q21;<%@1p=>sm9I?z;NH+|oIE^A&mLTYt>KcHkaFzI2o?T`0gL*Zw3RUmsd^& z1Q>OFTuxR9Ho<7m(e*F>DE8_*r>F&1Bc-H%=%l^-fnlP+Drd-Lvvi};RHwj5A! zq~gAa-34g*_h&cpD6_`J9oEkf^6~k)`OgYJkn`<)bUR7;I}{vc`v<88m&Am3haLkY z9L%RX?;*PdSuX>~3HVPeVLdjmZUVta2ibY(3XbamYSf=YlF)x0z;!&KQdPf%osVj3 zn0Yarye!xdx*9X{-Y;Ax*=f!+>LnjB_5)?!V)q6ELR{VIo@?uohf;1@#gnPjg_Q8o zSijBdTBK_;OqCc1z}@+~Gpaz8zULl5xRM67aeht|ccIow1%G=Sc)Ix(PHHg&%QS+@ zetbC7!8}^r@Le)Uy|`Z4F5en>!OR34qy9clDa=2rlv$O(sD0r4ex$ZWPk z9Qx;Mv{1t#m6C*Tp}$T25bVQ`I}Zn*c7_@m5FCRnyDn^V?vNBNw(DL&FE0n=AZU_N zRVzlhJLmLx zz-*;pY*I-+zPzg{XNQ+)J~A8K(=wptDi@QBN&u;YTHX-H_kdx+X~>Bz6=vb;-u(I$ zZ+-Qm;$|mA$#DmB&;+2zfs|~h>n4HeX7eS-6jD(x2^%?+hTRzVTbKH|5{-6>^#-O5R+s3p<^w2DMge9rINdvdU=qEF z3>M<(%P-&E+XPyih(<-sV20W-_JnwxP5q&K7F3JD;pY?Y8g}ZFqcI$pLPGd>e**r8 zI-pMREMq~`woa9iK4A-jr*G>~cEUwoxrNLOa%re`|AStW@y9!)HJbI@(VqrUv7E)$ zE2k4$ET{TL{W+#6{T+#e=TJ8X3M(!NfvQER#CD9Q__iyzEd21QdauyvjMSz`#(mv| znjW(jJHLn*?OqoB6^qX;&jx)oN#$t<Gddb^~0%ZO*k)zsHGAWW#aY0%A31W;ZAyDB6HR?uWWjF zF3G(E=Fp(5HN|*ZX=r_UwVBlrmc%W(Mz&g%wH$Jkgam8T_72nNlOf!&>3pSt3hM?N z*pb?|c}KH!u@600Es{U0eXY=RL|hsYi0y2xheH}mCF(=PUQ57T=LC~e?>N1erRXHl zzX36T!?9|Ilp__Y_Kxz(^;SL|4SpPG93I#$(15YCAIh>S!HTgSru&1sG^etnljJMa z)ngDzyYAsqXI<+LIW+x(=haR6*N#P42JtDHe>Xlj=F^dA3@Ty45n-U10X0??A!F-Q znI<;v;fB||Sv7J-o)Z07y+0uO3D}x^4~O_Lj#%y;D*q5r{u!mSa9YD(HuHCk0QUJWnUz>BUr$i^%k%khQ)ES>BHM zC#(PLc1$natIBhc$e#HmtNc(&)Msl-7?82qd4v49<%`KJFBkpe5MynyE0nQSD=9cH z%XdiK7OJ}iY)>O>z;d&^rL$E#}Y=uq)#uL;zT>DMU0 z(yiarXCog@t1L&^zNihX+&;jX7{e#LA4a!mqJv_qo#Wb^ldh#`oRS5j(TW|c?Kuwg zD4H5tvy4i!>-GaN!R|tYdetp{us>~5qXOtyc&gd6 zc3G}N#)9S0T;Vr};(xiZq?;Z(8(2rs_V-vY=hyZLB|4eyIt;@Li`|CR4c~fmZ0>`k z^k5@a;$2F$XxoUF#S*WC#@pky}(+c>J0>G!Da9p6l^AZ4oltYqR z7GfqsZWOSNPyiPAC%&ZS3{I`@gf|hhPywS0UEf+_|J#C|Hwcv;JZ!Lp zjsd%zu2Q2=P~@N@cumsLtR3top@j(g{C!LnItsdiJfHPl_RnD07x5D8UQCOLq3YOq z^QA0sStCr20ra|0Jv}{ztpzyx-KP4(V=tw*DUgQT*S?*T*Hjc{Q+gF38;vaOG4eMa zJ_*>^vb~_p;Cp?i$C;H2cP@Cb8M!5bA1wYnmM+j>8#XI8m2VgvV!VGRs!ogupZA^HoljGLj*YDC7$T$bp4w){Vv9+C7zFPOlk#635 z>HAH}T$|g+$n|RXr)NR4QBUo**dLX1SuJcW4{(*r-Fb;&%OaWaLJAJiSLfdq3$?>44m+zbyIzf& zDSs*0m@4tq*=>8r&b=yA55iHNsgeY(&sjU>4!@}^OHODyGy)HO}<2if3%0v&B(_Xk=#0`=I<{lp?I`S--#i+`cAtkd`I+_Fv?0_dN zaAikyGOH6rj}Qe49Sfz}v*y-^o@nadYI_ zL?OMfJgPZ27knACjeQP0@7bL)V1vagVgbQFTNK?XI)l4QU=elBFL z?mqU|jn>)6Y|7gB-JW=I5t~XBv)YzJL@b9tsvVluJQBn&oq2qEo7Yzj?Zqv{DI}9~ z2FK=^WK)7#esL2R_Y8WmaO>|^dA&o8sxqD-IP_zRUaMiv%f%^^0HhU@Uq zED&zRt3!aij%@mS5ww-fRjSmK%zN}h^Sv1pc zNf-zi$(hjqe8J9j=w@KMA;?%u&Qt{JSKe}pL6GgH4Z7MQ_$fL(McwgZ{_{)Es&$+? z++No?8)%Z>^1M-?s7-I`h&P;wuqS*sN!FIpnhNNisj;dynw`bD227JXwz*q5+Znit61e?f_n#HPo;v9_B;P+-2 z^~lD)wM6zjCX$M3iffB}(%pP;=-wOELHbKum(P-Gv!X!N0Q6E($BDLa^e0E0zxnGc zsJP>O`Qp*Sx5`Dl?JmrB&Gn{=(Xoz3ZWm{Y}Q_4H{Y8;^*}`$$;CBd z4+Z#-X8{=6rCpWfwQD7nWb;EfuP}uaHf6aQrN;77et;lr^DIwUe&w{PV8yH7?T70LO|qIh>3FT9?EI@R`ACfy7i%dWMLS;nr0QJ3 zBS(K}PTN>V8z}+^Q{i zdY9o}QpS}f!+;&f&79<&6DgdGS&RkW7p3;hTj7w?nl zov@2$sjaHH{65|))o7<4at|+DMla4}*+PH&2xsDhkXS!r6@t|Bp>tgZ>j>_L= zo}4LPW@m=K%`Jj?;e*pVHZATQh3H7VAvbYEfJnMP%(NUSY|odLbk~u`#-*qdZ~0bk z?G@(V)hiyqnlzIYdO&->^r{`YZB*^$ol>$r9=*74CS;Lp^`lG5<9+b}`PWJFEAw+X zTf!)v3|K`IAGJXUY%0(o(}+Lw$PplGq?=8^9CO4Sc2w}YYOD-YqSbX-^+~fmva+ub zt(ooAujNW1-XF$GowX?@$G8T+@dl@GZdRm%$|7Ff+BJUrfnq2ZiT8H*6%maNSk$w7 zuk{z?KgCoiQt1?7PRxmO#w;#B-;Y?ID1=~BJ9R5#YYL_&b)-9d^3|8--M?>F-DSUN z<85&)h>4|sc-wEFh*05*6@+kR2gLun37ljlu7#M*pYR>uU|Gng^hv?)bhIpta%oF367r qt{|w`0?-YBWZ)3ISLl#Kk$}0MO-{e<{(2M|JVNjMnW9q{|N1|IJE&j) literal 0 HcmV?d00001 diff --git a/.readme/contributing/reformat_code.png b/.readme/contributing/reformat_code.png new file mode 100644 index 0000000000000000000000000000000000000000..31bf239b0315a2a5f83f2a7b99dbe40d4becae31 GIT binary patch literal 16410 zcmZ{L2RxhK_rDI+Dy1~`_*84gD5^%SQdI02E7WeOo!GNVQL1RDJySc1TCrNI_Nr04 zC=xMB?fHK~Ki}W?zw&zFdY=2-d(V09J?DMi=OkQHU5T8Ofs}}dh+O%}V{Ia$%MwIH z7w%lW2wX8i?=S#=t~oz3bR#0V(MGf>btn_wm2M1cW9#1GI;3e;+BNeRHKuBquW76(D!+w{@J>dHf~@+ zfh@m7EomygWBJ*gZM9wZcnu|I)@W)r`;vjCw}wY@B~e(7e3-e7wAAtjrG|3Xu}^ z$oqD+c&Pc#15*O4s>76(mAO85jn$;eAP!qk+G{%>+Y4chKp^G@+2@`F()`n4KgakQ zO_(zSOoSyUo(OaCydhf)U)b!PtXL4Goi<6XFBKDViOHZQNJvEVtPyRJnA+&q=zBU~ zJGb8E!`e{0U$`rC0=M>?pTvq~auS9Cq<;WQY^ePGHCtn5vRcaf_Vasg4ztt2(W2AG zl$ICH6*HaQCpx@?yl#w2+TEouo zUp&m+Ya6c}nof^`u@S`7HikL|RD1a?*wP1$)-J4$g;-zI4CMcg#Li?#=aM9`n4T^O z%%q6RxONxY7od-tzfAZ9`2;LZYN-t(HQXE8L+x3Jh~PEoy3^kqzmB#ezu;4M18P1U zjO>?&gUg`^{=V*;$cvw0(6ZpYNIIiMexqzK2?>eDj?6l1_e8_i-Fuf+U$mR*Agt%Z zYg(c>H@$k9LkREUz5;Zb&#BMEtov%6j!1Bp=#G>_MN{A#g~Sp7f%IjjScpxD!sD0=*qv$v8_h6TY3h4`3Hb>oTTj3E(|J?3pzYEhkxXJ#qpnTB7DU&@5 zIL(3Rp=!09ehPD5K%P2p{7(0FYsbF5sJ{Qn8w!>>VA})7X6qzd1NZTwvo@mjG%f&) zK=rpjh0v`ufQjL)u_U>tR?$=(#LXi}&qa-$v=8}d2k~Kqje?0qfj9W{X;gNlJ*(>c zfV1c!Jy}j)c|ls9X(B3ZWiU>P0)UyvI%fG^VM6D$gr54K@X)^^cZXstHO zR<5jed_qsd3o|}i3=XMM$M2cWcoOB4?{59+z*fh`vK30I1_;u#sg5FaB2e5=}O=WUtC$LGNn@gK~f)$71U%>Y-_+?P@zBkdQ*TwG;ec9@AxegTXR7_vaL z>;*0H#U56`R`r7ba|sLLSSCdSHv`dx`@p9@3!D&s=YkRNXW-0UUc{97{#o2wVRE@E zd!2GdWFt!c%z$q$cdi^&Ad$!}9i1h`Z($r4l3wlkhZBr-xD(Tvm3s$;hW5g$-Tlmw zBor(?0sot|Wsi-YebYP-J+ttJm3e2=1Iv(AYR)Iv+y8bDfI<9cF?ZC|{?1CkncMj5 zPGD!I67~pK(D@a@3n7R#`+_(SeJ1dk-Ci9*@*6V|n!p4^n3sfKXV#oBDV9`LpkKyQ znj)p(Tq(dH&w|k=#-75L#zJ+FKZkJTOQA{^)bHE&QBb5vL;+6&&tW0EzuWo~_X|h$ zy?$dOzV#%7v*)P^mVz(~{MvWnaxfS?Ps@KJg<-CUoHOa&^eV0*^-@`7d?+SvMtZp$3NXhCx3rCeT1fuHC;t z*bW);@P+7r1ITiOC-A(i2ELOq_gmxMW?`fMp~drxPo-OZ4$zlBhYO*PoORO}@vT4L z*j{x6#jnY0cpojx?W;GcazooTc`a<`t9=aoO{EUaGr@p*Iq{uiF{R+|#>`EQE{)E|hNVMLIl4TW1@?C9`F%Gq9 zbHtUf&iD4lh~R72n)~AS+J_6jAy5I%SkcC*7?fGS-e$D6F1+PUnWXA{>L_5tV{00~ z2s)M1e(n8xkvu%FmAuL>!{|OJ4g}W=nk@cVF)BrQ%2jjm<%p)NU-etQI0gGU1P<^4btY0Pduy~ef+r`&Q6y4 zr)_d72TZasw%l0tYv4nSj-79Bj>)wI15N0`l{VUS_R}mdXH3k-b>nDhuf0MIziuy?BU-GMkKo%&4 zrd>xMAsp{+bo!N-Ay5q=oyE>*<){X*iveqR3b?Z`t=RUDu=_`!zQ?R;n5ZB8^>p27 zBmKok_4XMZW5|1^yYyjXMS3T2{P9i25BP*e=Fss%M6cI8tQK}8+c z^L70&S){YCBu#Jv3T!aa-SEL{wy2a;*g!JwP-o_~MW(l~pU5UY!naIXs@znav_3Rp zjosQ3MZd(fWuus-nC|Q6eNXSz#~$*xeRDnO$Fp4}4SVzH4%4-#T_1N~UiQcF)h9gA zff?;KHij^pE!c&?>0u;l7~#U>>+O8Ej@`D3oZbPyQF`L|UNOw)*Xx^ZZN|8nlPCMR7P4%pS5)!RzW9by^%=)&8vN3(?ZO{!=~1fx?3yVn05o1=hW(Y+Yz5! z=0M&(26~S}<8O4xS-t zi31=7f!)EbHeVy{C(zg|UL;+(+MSJvtsiEX#Jd}M4@OQ(-lQ30PrO~q>)%^0Aqq~4T5S*M?6rK z=sgZ6cc@P-qsA1*!bwgZ-nwRNR)zMgxD0zvaw%6`X7XgK>S201Yx1+N?N8wqBt&p_ z{DT+`=~JP`!aa2a~hX<40f<&$!6-rPEE$Ey#pKasK67=4~#9xC*@58+N#!%hyTtz{HN!1>=vbvg& z)=^m#)i=GW@195yC&SaOFoWjb&Br#c4 z8!!AFMujRz-&I+)R5o3r&jzOozAJqYS~6YVFs?E^Fg!g1DldIjnM_uwrvvqktFp2B z#0ztAhnHf;ACh)w@k=e@wK!>t_xJ*!KI!PXvNwBH_u%79<+{*|CzV`{cHdfW)R%)YnosL2|Z?zE6e;E!}!8Sda)1^ZyhMv$zEg^u!F3PZ}6h`yaP` z$^V{GmU*3yPLhwz*DCVn^cetzqrj9wQ=Og5O-)V6j!&%+6BF$PQD05Svr0f|xlMV|j@|9`s!X5^nI#BVR3A}l<|JUc;gbLVO)ujNYB-Y}ceeJ12m+rd&L_z~pQkm{U{ z!4Fl9^_3TQ$s@N3s5+zT-4Ox_E1n)__>(4jxe9FtT~ zXFux5D@@mb4I_xKe#mo4#lvs_rqIzmG!nDMQl`cwS^7fvMl=;jTrw7&ak-tvXFD!sB_h64vmvGvp{u~AMQeDCW*+U=(h z;gw?4pr0vcQa!_GLUoRVS7fKV3i)ILZ)k15zKf zwvb8pv}2A_rFt}6@exMxrgEX^vrr=f>8ZS}S#lsd^rJnCORqn7aN-co?!(;*S4c*V z4L4K_I|3(^b~$2NhkcV=jmIKrtn#qao*DaR8FE(+7a>qj$ID<~=<`=|;;_=`K|fiV zVC%*O6_*@1yBoI^v>z0BV-T5xe=5i|ACNLYW%ySi8n@>$iSngqSSBy<&F< zRI0{NNT*uv)~qcW6p!% zf6w0EVDS-;^Ibw`g6;da{a;^oAXYk_D=p^N|iL+Apuog@F*7K%3Y z2AaSHz(gK>yg@iQXE)ED`g{Fy{+F|*5c>Sv{y)Y#?=F=CK9*NmX#S~pP+Bf%^B{Qw zeR<9W2+Y2xQx9CkH06_?S(qQda^Pt6<5Lz#4RUjV{}O~o=Z}h+@BVNL(tgKSr4SGJ z_037FZY5f`GQMo?L4YD53{!mz>|kwHKNRfv+smiB`o`*__O)Lhc~x3>L5`)h8GgYj z&Q{sKfGMliKEPKAq_Hs3x3QV9`ybnB!3L-W9vZK$0A`G5MN@ddjACc4MYyh<`ttCK z+TP{nAeK=i!OYBhJoj*Tde47hMBUoW_30-(0%hIhdSIThYBg^b=uVs;w*XmAc$2aD zG^x1^S9Eezym1{MC<19f!Ov}6dv+md7N7q4VaCefMiag;0NQ<}C#;3ZYJ8$fCRGS# zlfb3O(!*wBFFA}NX*i+cdrEB_4PHA(xKZRh<8{nMEJUg;_M2WA+}hG?{Tr2@Z_NRk zuJ!3AOeFK9WDxY2xF2~hhU)SdLUVakfBRvfYWvFjrpBH1l9Qd;GAo8$C+|Q1oD9oG z>qcmKp4t-Io1k9${PUgEk`NocEG+M#@b6BNb za^wyP5@szDw*Zs644u`QK(L#fdcMZufqAQkH(1`;-4>nv!8Q|bV+(a%Qw$uzMy&hX zZhRbWlv{i!dS7N-*`)rPOiL&oLH5o)!NMkn4Dbm77|TN6mVn0v4uqs>ZY#u!htEff zIli?#w!4@qdl)&RKQTN!T~m)!6BfNaguHuEg+1T5kp^F4i-q{r-t0#%;v)Q4aQu9Y zk2eu2Lw}NF#*kMS&h%^UIBC*1&qI%4d-GM5v!fqrcP%mLUM(FN>;k+s5vFXhwdHKC zJgmbcYroq3Tf)}on?Q-fAjr~iXIgXdsnbj88g4rMf|lt)UKn>7S#f#A~T_IWbF66-Uk3jkko;%Mvi zd&>|`{L=~>kw$l|$LbCqy<_M5X_5dKzqCJHT+il)xpb(;qi;P+FObFGf|ZWU>?v1& zLhn`GSAlM>DhATvBUJ0?*kiDerD}ky0S-PK?+4YUCz9tZVD7BAxHXj{T2l;0d``Gz ziwHxDjmV&l#6!{5RYPG*kpX{J6a)2_A#kH7wVw^iK8A*HS)5=Y*(St*0s&vOkKUoL z@*JIUZCmrxzZ^FuJ)CRyikn%7%w}}WUQXL~1~e1jNnbF?Fh7%0dozO?Gu~yXeeydK z5VNOoaqksjMdwyA5G{roS796=0gqO+R3E;Z6&LR@tMTsh_q}6D{rYsPX9IRVC&7ce z!k#~gpo(yv-5mO_%d&tgI)_C-9{@|7(R4ubKWOQ{+W>L*e<3H}nKQKW|AA&1Tr%}L z41sX~@Wadno!N4L*`#f%y5#*p86bcGYYE75CeRl_xb@bC%r0-r%mvtmpA#3(?CqHW zKKlir)0-#E6f9DI?y>1^L{wv<>yMedDdEHzZRoO?V5QN&44L)?Z0$uNk#g|nvii@r z0Opl-M=8{~cptqHXe1cDc>U{)DyMBH;MJQeQ_oeLYZ_<)D|)pIm=ypMfNSRGy^>t* zx{CX%b=wlhYQ)2O<H9Q|uC`0gK0GJ1XD^a)0*U&OJ9J$`BU=3&;s8wicYrj! zUHXhc(_;wb%8N+rLsi||Gz18zD`#VS=cusJw9v1bH?a+u4tRk_L#lBAjrgzjvB1%O zRqE?U$Ium(j30yPZwY-``D;%eurbM3bUZT?pM>fQS5%ZFM=tXOpJ5hGU8tiB`O92^ zhkRVL_DHzucF8oh&10hP3=#5gf!D!kGPXjoS{Xy+{=jGW|LNTgh(^vP|{o#VN-D)mbPP~1*h_E`7`G9=pn zXf)uk@$@+0Xk8+bu+LDbnAq=nzGKM8GK0+_9j^x|^yKs%SdAdPJE07??WbqnJ`jCh z5xN!-aO9=A>^1e=j~1-#P9`xiW3n9>imV&`d)}({>p!(!D?3 zLAq}Tq}mG^qUnMXlak9HQR=d&atg07e(=-0S>m8>r1Jch1u;X#jO*H9QW!I%;wN@L znMqQg`?{m$2i!S3u~Q#Qzsb`sL;RrwYRy29t6fS4>lkhrH|Nk3J_mlV9Pr%!z_iM3 zBB9H-7&MV2W3zu=Pm~(zCi_)Y?^@pC{#Yzuj~;btHQjKPS-El0oExw+;vMDq%E9+a zR-dFv{je?2bQjEp=5IgO2QPK5b|pzr_C)AhJH&kqhqS6gwYKuJGq z=w)Prf!8RTfQumQvHrxb*TU5w%dGOM^hbrU1rKNo@6@rR)aS41(kM`yMZ>q`RF~F$ zbh0~9iq1pa7?nx01-K~|esJ2or3VKbPPL_5AJKQOQ3qV|QA=X*^7x2;QW6*Us`ukH zW3fU=>yJOKC0XDaN9z4we*~`AM5N;H!P(v??s+|9_AwVuM-L5{-ne4w4fd83g~FyzP#Pw|lyxCr_oT-|_rts8 zPo~S*PF}#Y9CRx_2)CMdKe(B@{HjeJ2kCL>{!ll2r1M)q=6LISz;VK<`~KPtfGXfB zT8+nyUPSdRPoQBR@e%R-?Sn{92g?hLTOXC?hAG|rH5u-ksV=!iGF!AKyS)}0tt*$@W6hm6rkUcpl?MEJv9C69R9AHXwDNNxH`fRp|2|bly(` z4YizL9G6EX$v5{GQ@3CPG!E_+RdT;qV)+d3TYrcweSV<+X=ZRI+2i$(KO3N6obUi+ zp+iUc`>nZd9wBk?(#vzplQ98j*CV4L+1<%71(+Qv4ArQ;xDwf)<%j^!QWK1}6}(Iz z15t_idIfeZ2zICR>DyeR<@YNQ1Ln{=TX0xM&HL&}uwF-el3tE!R#5ZtUkM*Ze2py)jEV`{E)A`yF~pSJjFw(uXg8=$ zSA;wtnp(RAjY^CGX+>>|bksyObTA^4+fVl!PbzLeaaUm*zrX-NJ>bW-+x|3vlTV|< zpdgLU(xtcflbLG7*P^P}m5I9-x3AsEiLnL5V<9&rzShK7)l>a>Qtuo$mvmMcZwc7nE- zo%nl~?VKEq*!{{?G@w2+HwOMZh>?4i?8K>>RT>K8jxkYjvDV!>MBh!9LjWPI@43i8 z&hX%QG`a;Qu$3{wU(q=NN@Q_~w-HCM`+fH=ztPEmKxOYZd3S)e<>3{Mk-JLiZUv!y z)7M;O=o)@a-VPO_Umy1wPHY}nim$&lWxMtMJ*Kd}^4SLFFfI9jt>x0cdAw)2XxXKA zf80~1NY0rg0%C$!7FJ74pYbDS?>+q(gd-PZ&84dtX?~+4EOc8qDz611Z{m|&`U~RoHi0lCJZA+mS*m_eK{pW`g=bt{# zn|o5YzI+3gXm<&4&4BddDH49}K&i-!Ikwu1Nz9oMcgSJ0PYE#Rw$)`>Xa*6GEL5Nr zceV?I0chiIxc3qb&q9A<1)MqXx(IlPfN%OiJ7gr6&eog(7+~4g3VFbC|B6Rw0CHr2 zzggPhv3f^WUmkB~mS)9~_;@TEn;^oK7HUdY#?b4;4WKU1>xALXU=}dJ!eu<cPXZBUQCTqQ3i#hdd&RgV@89Fd3xGT9jXJpihHA`-fg6pFoVu89LE8q_Fxzd!wibv zJIsh3AGTkxupeF_7kr7t(souF;a%XU)^s0ZN~I*R>8mbR*NsjQ_J+6kH6FVfhJs$< zHS_6~xV%0V8$+`?t~(kbS|@WMfTHt1av#-8%jJj7MA@}%rQLP$m+yX@KraQGPj1wJ z3fUB3`>Bem-1hP*(6ulSVk5`4c@X%le$v)bY#7{S*er?|)h7*tHfxVaY z;%(?#pSHOTo1Wh2?Bvh>$@3g1BNaMht!ITgoulT*$BQYU3R(-yjw&`?5DL;`+$jHw zf!*Mw+~X{#rC*b#Z&qfVl#=HTB9qR=~fG3_55Zo`z5 z77WZ~p-E6On&Tu;YFTsR4t?cfqUh;1H=d35%0bWu#*feCfg6pPxPW*Y*-TQwcmAU> zq;v0q^>EFpUVd;XzD%@bJg(~361(N~3fnRr_THxo#H8DP?!C%o6g(9u#cr)<`l%*x zTVcDT@;IM$zbEblQHKxmuy_Aa=b*5|Mey_w2CVl7l;FS)ot)m*2`&Yt z0ivbU+{@kB>29&K(V5`c=fXxjF(Rj{;qRqeL)SN1C?5I86~E!DEO+xw@}ZWoP)Zo>GZ(>I}~IRlU1w?OHZ|ojSPka z@P`zy8KxFld=hTHbGlJgTUqG)dH+*@^Ho^T2o|!O`vY4ydz{*u3(ij(+`DX%prW^w zl43bL)@SBmt*_Uyu>40$B_Bw2G^%CubQ>7UYi(j7nX3z)Yy7bAO9vJhi+U6uk|r23 zg_%oVbwRMZ^#Zh(EHqZf-B-KUZIKV|w+Z{@3z8lKwQ-X?*$>=})iLXE9YF8!49wP% za)9RxUiJQag-kt&ouatcOoJ9=(Z;>zfwy zi*Eq;x2;47Tzdy2Ta*ZT%&mD0|FsuXtt(cWC8L$BsS71jE*G%d@pn$c0LkRt9al*Y zGw7ji^!pca%65f-m$JCmc>1dgeLsV)I}Vl!g!Ipy1b<}EhcAl#p}|P+^Si#?TO34ZH(=6Y_RCY z=deG_`B|j?40`w`A$LdmR2$Q2+qgf~KW2^@m-oO%ocO4{Z)k^+gKYJS2O#d-{CgJq zSwZonXmG8CNh3ocST|sMO}5R|`JU7PbQd<@)c6d3yX54-(b{xZ2 z9Esk+LSEw_y~0kt__$Ni8arF6E~#$NpZma>-MBomNQ$jRFNS(9f11K*^BRO74&FLh zk&OE#lLv=!-!F`NDD(p;2|N}!*@|uqaCNjwiTRpwjG0r0FY#S*OL3iVSlK6aTzIbO z|80BeEq-Xtu|U736k}2BFl+J2GP<&9!}ldZL1AC&S zUCBX}RrdoF*hRQcw$0KV!{HNh0ror?eM|h$VhusTZPx$b!q*H0m4t;mcgn>AFW%*Gc985}Bp3%n*}>pe_A&>jBNuBw{CYZ`TR{ zP7+j-#P|^Dhf8-2-`;e}PKh?x-0mrO;{<+%mc4{P-FAjPmW6stq-M2J0?f(a6WA=< zsDUd$$CFvN*^&@k_m_=Gg7bb=mzo8RVvtYly8*k<^spD;p#f;pq=1gWuw744hkvEI zaLxr0Kn_qd2`AqM1z*SjYlfwp`jBTNpD`f>Y;leo04f1y2xAr#X+fW{3WPib;9Vr% z=;ZcA{Ns8KNjiUOdgg@BN5KGU1b)HNNk)l4r#!p)2U=2GRAhn?^76OZn!G35lIYg# zlp^t+vNwg*(8P#JiNQ>iFy1+gp>rZjO-xIKnKsmP1}nL-%weg%eGnVm!(@VahAxR2 zo&y^K*8)H;;P*Lu4*UZkv;RRc=V`w4%2@z$@U9*2@%UfL@+z&y3*cxd?PbZ=t5 zyYjZM)}%iJ)2iQS{8i&M)*9OuyLQfElb}l8Tjz+7XuJqK$hD>ExwSzdch9H@Y?1l3 ze4SRfdBYvoi9KGAe#u(3UZo4#T%m-Ayt@e~?5td}wVpNATo4mGg4DNtaKA$7Cf`9O5y%W_96Z$MnbbAS3q^ZE#(jI^>-!~vQUAVlcqTf} z*$nkqmpS3&Dxy`SP%QDcWdZn9QMWj!_LYEyS!7qC_U&3*W;@5EWQBJjv05X-7ws44u}k8~Uc&GG=; znyDJ{i9!q&mDDJ&g0*h@8K*^5KRzryY{r&=_I&wt32(Yum7T zj7sb|7UyBclc{;k3gn?VD>b#_c&VQiE??|;*?&i|zN=Uj2j;}RUx9G3l)h>ac^dPj zrS++068f>lIY&nXWH-ERsp=0OyRQ^Rf5x*6kLsn}o$=e8u)aO7m$Lo5C=f6KmkfxZGPMagj|-L}Cobi3e-YI=FnU2jux zbY_)@ei5$lZpx#vm-Sn}r}mKBJ~7F-Sep{r-qnJJHz>e%{ykUsQBnj8c*v@yoYqJ}R!*lC_bBPDaekuqd@vCwGeMIn z5%s@OB6baHUCm;C*&<^gOQO%)xI_dJny8L2;ZA~yKav-|1f2aIS*GE$0K2FDYuI!E zENYe!adkkVR;ga3%yf5W<9E~tCg~E?ee>jTKPZ#;yrU!x!?GKDf9saPDAB> zyX?j%rDRLz)_VYMJ0TQZoSX9?{Yjr!7Mw)Su5O2T5KX})fg+Z(vv?Lf1AB|$n~0kq z0=WX*d9DB`E}jCSZh|NNTNVAc0f+{C0Gsn~gj{1@fsNAsX&Wn5-?Ql~H)G|nI#I_3L%Hh_IwN!*m!DcNvz%@WLa6mP#7EvGvppDNpg?|1( zIs#KpZIvin@I+N3GzZZ-#yb+}d{Z&VaL0AKCj7yMZ&Ty2QN{bEytV`U@Nn&&2_@lc z#N3}$rc&bmWiJ3lxJ51CoBItMC%c8>Poj<^(|FA1*!@SjW(E&%2d~G)mFu;EX7SoMBn+nq`Lt=j^_Xtn^dIGEa z&+6)BPk;8ew5ohBck`m;Q}A(Tzzj-2^E1IC#@@c^*oa1;kde|&lijL!TW~S^puYWD z)ayay=Znvcvf!;U;%7t*pyun71|}xP#>Ndbu_Oc9pKWt9!NqRK75;U;@1SN`Qd~q| zh3hiuuo-}X&PvP&%YkpnnD8KhFK|dm2_+Gpq17qH_akX)BHUzW$>LmSgZGm`{m5$5 zu6zws*hZ~BKa5*Qg8ttLts~!RTs>~b@x*F6wQ>GHU?Im<;l7*u3HvYwD6t=}C1M6? z_j+5G)q8;mL$Q}+jfDWL=N$s;>Gu7c_1rT34Dy1rTe_ZsNZ`}}mc1X@=H>{uo7?pD z6LnBkWE&Y$VAU~Lzn=~izAaw!hviFXm$ksP;xQ9`KY4hB%h(D^8?Y&uxn!oZ^b`Sc zlH$&DDne%YFVt_4*?h)bY>FavAN}0(7^ocr_FS1ssYb)xFCg9Gu=ipB^fhrdFZ2ZdQW6Rtg&=%CN?`9$o+1F^Fs!M6*=v5~{ny#2PlA zN^Ezk=g&7CIQM)vQg8>h29ml2-1D%;7T7`*C~#)|iV}~j%^t1B#oHX$H;W6S-{zp6 zGz+R;0X7jF6@w9~_sZx}-k!PCqY(+P@r3gYsls(8n!UPoP_KialB&(h=7toEC59-y z{yNZ1OvMYF@B2#7f)476j!mAA(185wN+~uqAop}MhgAUwHD`0hn>UlME%bMccNBz` z%L~~RualnY`_PM7Df7yu_j5t7S7l-PBSo}W;X89fFIzs_r^3Vg3c-Z5rL6k9cho?x zsQi;(S+5(vO6o32)AQjHeOD0%e{yGa$COFN!(hX=7C@2VHKgJ)1k2gvptz|@3@o~r z+TXS0!RnShNhgj=KP4{ev%KrL4A)HTKyga01inB;UXhab@;_^9#(m1wv#{0e>sgb& z$5niYYj0XUmKBb8l`%<0L)t$lh82Ie{zAX-x=U@H-|vy#(htAqE37}Y`zn?zB1Ht@ ztu*kh^4$^i_jVo#r)8E^wk-80K-(#Y60{ZKa+Qmc!D-uHPu0Xy2ZX?y7lm6(tCNMpzOCdJ+Y z6rA{`Tqa*R0`U4l>iAlV^biNT#K26d#7_ zG&yO<0H6bI6UborJM>;})1orpZVm5pwh;4zo)!5A&jCNQ`d-89$k%Ot2o`*i^;}k&; z(cz*bXL4taX`rO$pwOTvHuFnl+AeTNZhZ&YmRt+SNI`+oPpTYY`D37*RtyzB&c7BU ze(+}Z3U9u;nCs?NJO2hTe@e8Q@HV5!vQKs(I;SLXc1b&*?h@zTNKPfXeOifmVmQt7 z7I0t*+pVzD^#-Fw%5LF~_6F!O>mP=9f;IlGPEOFn$eR)JQ3GqWa^FBiV(I`5rOM<{ zu}j?f_Cj`+|DVOcciXhVNcU73-341uxC!=K;y`H%n$0L;$D*KHT{^#i(rd=UCq$=N zHz|auqy0Jk?FQ4Tl*Bd{9K=LFVe5|TaVXX}E(wz*Ag4ZI*!r{uSnF0b;dShENSxO2 zvOj1Gghe%6+2a`1SYyrr2MP8Dd5hK*O6`VS4z#4ON+A<1N!>~bDpOIIn+<0i_E2Da z2IK|n>Rgt$jP3159Vb6jyRuVcp`HnRv@^Urh4vZ!0lFLoi+hXVq#!uwU3}*?l;yga zz7CLUP;YTy)3^+LtEkYa!LW0elCK>66*I5Bz>Bv3nIu+NG5{P<0tf;^T3Kkd?gAuZ znCW-)=H%dLVZ6Dit*m-C`$63}A+YC@ z@cO-FovG|7GkgUO7ApSm``P+KI4XBN>vtF}RCuI%W59|4n(Vd3$46^-?~#}){j#lY8ctT^CU33v`ba6tWfUg@ik zT3wVt{I}aEL+U@-ux`Kxbl8J0td*D*Imvhsyf6__N)CEwKMELo!JpFVm$tTo#q--* z!zOa~9CHy0YM|=REmfM?lB!4c-8xB;nc%T5IVeUEik=FP_S)SarZB~JH_+C<`8yP$ zl7h*XddG5qUtgb%t=neLr{Gm6Wuv{Wa|r(iZ~o6gYekLGy+r}$X7yO?7!oLy9YoDy zftqPUEMycLp_+r8JYBsGsF;Z8=P$sEHq$ogHJo&*%<^sHAR;8OolxC3)lF?X#yr#( zbtwDL!n?fpa@dQxI*Nv}5U`F$-ZK|YkPE0BcEv!*fl^Ry?~JbC0S^Tmqe!JtIl4sp z?DYsC>Ut;DEHH6G2%xbuKr5S!U~eoOTH9Xx4#M-npb`33IQ&yANsPeUTU9-YMH);8vYwcoMi0tI5|TV^yE zS69Ts5=HuTOP9YICxh)be{Wpy08PJGTfE(j+OHayyRA?EMe%$dNH{SA zrfC)czQ88~%p6p->3ZW5!NvRq^}z0tEu@$5Iq4j_Pb8}|YjvG)6Sydq!pdw0gVp<6 zULeY#2fkn)5l6;p{5J?$9z~Yl$tEIFy9m1=hDH-v1-?30`B{(wJboTi$U^a*UvL|e zlrQD|U0HPh!LmvQIL!8tSuv{X0WS#H*XJq8zX`#!DCRtT0ZN1UPGpE~Up3=hYw{*6 OURgo?F4Yj>r4wo(bchv{5=43zBE3loy@>Q~ zC;RG=>{a;2& zrgGtQTzTU~MRgG@;T*x29{hDe?!$@Ys}Ddo4L$FlxOxV3%@aD!)QE^ARjdDy=Ze7>@Sw@>i3iyKC8I$lr5?>CRQ04rF zWc(|+lzv8VwrryHjS%U9LX;^prNXDj5aN{3r`t;7{sO(B22q&B`)MNy{gvv`_P?$Yhl-7Jvj*VzZ` zsxlFrFkKe3ntCu>3oKV=;ic;Gb7fC=z16S#aPCbVybhjH1LPOl?FA=ZaH3j2#x74} zsE`kyjKBjuT%uV+-%q=Qpha5T?*Cpak8uEt_Ev*{pZhH?2^jgLtWJ|}|P)js!$Km+~4a&(I0 zkD9oGaB45xU3mStfaXI=WUfbGY`TXOgtC!9cS+N;yM4oUG#}idA5QrTxW5qaK-)gq zebIzF-2|hQxGw@-szhZA8o3a6up6LsL^$y)$dUlY=J0VcYKPj|LUzWw^-#;T+^Aji z>h0Q*%H2)h#a+UUIUcO7Yhep&G)Q85TQD%&lbW(pPpc-Uz2Fm`#P6VV=M$D=;L6>_ z@};u59I>T}RkYaR6RefydA*I-cJtnwr+tUF`x=M$$v77+0lo0z{k;w<4Zja6S~k`W&^9a${0V=cDZbS?G>8=SQ+hU49) z?v>dsl!0ZT#Y_D=v*5|Q`)Wv{6Xw2KsBGolqO{-ihm2V6?; zXQ0uYo!1B0iB2-Ph`w=K_NX%^=E|w6UG3aFT2}1Tob0&|rzYpjBuW0 zJ8AG$yRW5Do9*-{*9|Od7P+Oma@B?GT0cc@mxBCEWwqK3p>?iwA5`y?B6p7L)-6?T zZZGKzlksCx+e@tENtf0l?d63Nz}#MJhhj39jBJ=CGdlW^=&ONkxU5y=0@&;7CV0QQ zFJ@%BHWjrsWU*>DoWF#poi8IzX3!2*Y^Ts#mn=^TE|qWRY!0vG&4qhgG$$G975p5U^JZrC}F9oM%-E!)&+(;3d9@ zdZC4S>~={Bk}Dl9Y^a}W#T(w!lLyP=tOvE7DDTmUi%fPEZp7W;g9sG)wX`OZwQ?~< zkDm83xBK|3xBiV)m0P^t?nI+%$9(IndXvOO_dzk%+bN%%$>8?BrN-^oNB(36CTg=C)?EhAJn zYHO8segw7~d`sBdewk*;sASrUnx2QeR!omNSg6n48*8|T3ac$2;14gICGr_VSGF78 zX`1pGOOWXHm?;NNRZS+kNQMZm+^q6g(ksxNNSpbI(|2_8T7>LqH^y%_cjQ(VZ1bX? zY-nv}XDTlYY@+t+jY5g8(FL$ZE=o!(dc&+c~dH*WE5Yt;>YYVyef>^@9UjwwKG_& zilMJuYIxhnTj{)W(lm+_i zMS88uT~@n&mxPMSMZDg;F%-4D@;!=IN@`~lwdqp5nO!WIzm}qR*$~zJZPTFoN2Nke zX@X%X!aEn463-d;6Pg6+=tq&cZCKX#Zd>sLF~bk+A67F%yF8EjVXJK?re7W)rO z2>UM4-Ik9Rydm?JG%wwuZyJKoR*($mEPcg&!i)+#v^SBEU)6o&P7zKij%zL|cRZ%H zvX_KV6C+Mu7TtC{+`ECd;F9*lMmzm{tC{s;-qO(op0edWdLz{8x8yz}kNuDE2%fTy zGQH?r9`DI1fA6^k52iANEA{;OwltVkQ1s4t$gryUEO=epqVf%0At9P5y6za-ef4nK z`HS-;i}uMCsWhYI#g6iWEgWrx*H#&}x@v1UCSu5~c5E<$XFJuOexz(-Uzode)XP7& z+P0?a7Ig$q#YUOT=^P$%PbYm4*=c{HEOw;j>-}V5ORY-M2MmJo3wZmSusKFpr_asL z^BQHbBX_x@Rf0Qk#?ZH&p*H)N4%+P9?;jU^Y~qlEv|4s{#5}KcXn6HfW$~TT#S|I; zT%#}c&96>U)9aT%ZN`k~?2UC^4%#>AE-LaIR+VB58z>#MYUhGV!)-Ot%wf`4eeP30 zFJC<^ak2hRi<8sA(SfM`s=ca_CwuH82g9rfnM?b)>b+rBuO*{}Ot@ji8kK+eP|4Hf z^*$bb!~&Ljcr||Wj^`H2Xl*-T_%69xXi+p+8|`i`Ijk=r z!%3xdGehk-yM^q%T8Yh()j9vgJ?z`o5f|bDwcU<6^>DS*%xQX_O1H!i4E1HB-ON66 zOYUCFs~aOaYbz>JTW=3aM}PRI`$~shDm=wm`AjbJ0%^wo5i6QO8Geauya7fkJD7-v z!A2s*SVgwN)R`nGLEB$fLZdEd+Ex&tHtZo#czM3!d&& zAuqm4t=`V8wKEjk3p{!8VCv(@K5BRuf8aI6Zq)Hiq^J6cg1ujsJ2O4$4s;l+_v*B# ztp2o2Cv5$8N5P&kw345bp>vIz(zA?N8KQ7}V1z{ik2@IPH7IjR$6^J2xpx~4;;M(- zCZ<}YHaD`3O5LZbC6|A|Sy^@p0GC-bt&Td#OxTZs^XwO&wA%~X3?Ntc>F?xLuhbtn z*P4@`>MTX&w=l0RGF_#qHT{Lf4FpS1hc+S;rTuMWnJdWBJP;e0+ z$WN3WMtMw4f1@XFNEl0zd*8G2?(_^Q#BV3CF7CI?8ZfV}5rZJ+7bwkrz?4o-51hBl zw{;`5i{?Eu8qr;i9NoueD>NLe?BEUx=YLGK2x+%Bj($&@siD-kkG^b@Z)?zTgi@|T zn-8%F=zspou>hukqbI=;7}lvk%I*WTJwz{#HUu&W+YXgDj350F$^>co_fPzUp{>RE z<6qaFz(Z_#(KODk+^hXTn5FWL)f-76Ahz^a+ne$B5 zTvUg}K3zyJ@!Vbs0XSc~&+?!Vnual49)^pxnC?0Vh zwBgdq`r%@F)ThREI;kC8koUYX>%{?%F^4EGJ=+m2*W$2Yn=nNAjSbm<#`14tdQtij zqlxv1)*IYl75M|ixg7Ff&-1WE6V3^uJG}Zr~EA?t~X$r3ehKx9vm?mbUJI#qFnRW6f({XN^AVK^>Z3ZT~5C z>(B%U0aLX1#+FxWMB@R=z`%gcNPkl<5fo6@d3ss>6$OD_g;FPA3=9mA>+5?RG+!U@ zdH!8D6nqjeE&vNzCZ^e!jF6`xXJ1gNd<~ei_{cxU){K+b)g`dbO7;`GJ%4=fC?oE@ zI{yK4xH=F}0oF!`>m;M?_`0M^t~U+IXEHf=ogEl=HiEwBb}mH&{hI z-Ja(^Gg@t{7z(!;jERZiD%K8rj>gaDu)0qUNYid!VZ9*ZUgGj%r03;!a43z^x6bcB zS54Md?t5%tEgIkWi`7dP#ACeZ_YWL;^d`sd+j0$kN0`a)r8OxA7HUi!gp^qtDRc}z zU64_Zf%O|iN;0+RK~s*8R8jBK?NuEIag1Hw&*rTvGd6E;tC&t{*V|ke6!wx4O?6!B z4@up0kp!MXUI{BKG^bvEqQ(0MA$t)!v5?RT~#mdylt#2MV*f0NzMM8{Lh$&g) zGq+VRn>7rfvDKpOor-AeDC{J7n8`0T^+e%64~iqr?A+=-n{hNGDtf-1O-5cZ#d0}CBi35Nxab>=2v34rBS(8e>5}>Q z3MSznZ%wJ9H;j5NXbedMo6xUqS>@b*2a^#!_fB%Fwy0>i9)Y$lEoU%pXXvpWXR+ zFC#WM@7GCKAt2I7`BuOKf_|~{nQXhvDPlKCb$Sn_Xk-%cQ2xy-2;pHKbRPP@_Xaa;Qg41S<_MAO9h`c zexCq`aP7rvYn-~v#VW?cQ(4F*kdmd_I=nx==$|pCv7z9_)x(1gkE%W0@(LXH5=_Gy z%|XSscrBUKbF+2(aR+;XyKWZGcC|}cBGTb2E1TxzoXU&86XS@s?aI(2Gluo#w`o zSYwE;!F5R4(*4Hq&e&WV3jwkZfry&#Z+_;Mgo!J6MvbNOOEjJiiJfQv9zy=(vzyaB zPAG{3ZvSVOpK$8pMCR6JOoMT4ZBeuK75+aN(Os&6yS%9ts!y4|)t{d*?i7#B;(Fcq z{^EL&4l6>hgQbQ#wP!65%_;3q=N;|NbhWs<`EK6Ncl&RJG%W5|)yBK$k}52RJJ<-z zLt0FN_Zg8TxjVLRTsZfTWXGa7F!Izx%>>t-JHS=gWSPJCLwkV=n%!JxP;ghr#Uo5g zN4pT7u85~gA8;949#@G!PiNU)3`Lwk?4{e;uqg}$b-4%|64IWlXs%VqA& zld}Ghm*D1f@$!skC+_DE?&egfX0k+q?cV!W=69e4i>}8UHJDFgQ#d@3jD}&JxL|*V zY=c#X8aCh%$mo3TCM2z1g;pvAu*~k(U3^_T1sIUL6_QH3dA_G-$stE9I^!|+b}r$z zXA&bXA5^*(?g%%Y#g*LJ&bs@2Hh2A!jhWq#@2c!BtgG}t)+!>|kigB5GJlDilO?)4 z1o@i1^^77%pc}{a1Vqfq=-Dz%1BQpn*U2g-TM19oF{7DEZ0O7gT#_9}PgU#Yor)^4 zLbphpV^U7|Ybq(-9>|)=jA$`OD66wWT?S+_tRVezfCD?L_L;uHC_YNt+S)3*>T;Yg zgfT9C#;>Pex8q5&x;y^%tAJEKHH5On8gMGrB7;MatV}MauiZd5E0#a>f)?_Vc1~wy zG`=%iD~pd+t9>%!o!jLhLoF{2W%YBSy&nKBvu$m19CVp}^u@_v?8FEc=AcXQL{<-Y>;&W`?$oq>ahR za7!zkTLLEkG3%mKyveiiGnTy}?V_y~eImi9J)}N`W;r((F+e$|) z&zf5BAUf5+rcKq}_2U;Wm8Y7s&yr^+p6wIvy6Jm_&jFT8=}pNVG+QAFX<0{Ag>~+N zvFD`*2iunB4&mO5?c^%}DuT4A0NN@D@f))MK;;m-yZ{Hb<0JrNbM-oO^8*GbGBI5^ zF)i&&yJTZK>S7C$9h-W+b^8GY=X}7t?Zi#uH##~xB$Si}GvQn9OfPPFv%O0=_pgO+ zBi4!r!-<*>_Hg3x*2s-w>yh57gQxTqJR4>EuO%%3ZLek=6XDWl8+0Ji71)(Q)Wp(& zK3$Mv__zleO@#o%fiO1V1ep^&mcPv)GsD@pZ7zTi4M%ek_<#w6RG%fVA46qQC(L>JuUdsFES72^5*v4u>q-UVjZt!rtO|TCbV01A>c<<4!OaQ}~ zLA7VE6Z;Xi*F#F(*R9z21&7qsUsZpy8$x*Yc%Qydb*7Sj=q-tx-z~>dLFr?}m4(ht zX6GuSTf6PYfA#0o5s1bM7FIi(a{6AkVT}5yBY3Y1*1|nYH{)t&t>@z?01y6%`90su zt9cm6rI`k9=GqV~HYG*nTMQ(wZ(!tHhrB8~V_es2mC)z(5KI(xBNZwju>bfZ2gUl7 z$;A1m^2zIc_6~U4FckU<5u%CLXpo`d4|bT}8f;j|*FcuK7~dCi_~PnvpUOtGBVj0xRR+9nJ91 z`CYFRx1^hSSzCF=eAdrr*;lUe19j)~Fyj~ZXc?yRb(84>a?&*4ItmoiBR9U4+|ofa zujjYZAl2*ZbA_I78V@V3ij8~2U$ws5S7V>^NysEQtzh&Hmm4@QSD~xDxLz*ZH7u8U zlEOt6p_!ggg48T#@$M&=h8vmhp0k`q6D?PDYjfvvTY3~GZ^h9gW%=Sb)rG#g;qCLe zgXhjn#8w8Z+Fqee5o8gYt#YdKZ?N{9AZH9P>|&)(W$xW02cCq;|FRsnJ(|r9XMbtN z1_OS~Akxy)3LBT64bU)#@SO~$LWFYfIGvI zo!pj;v5=%YDoj{EP6k{#KDzW5H1qjo$3jHoZ#XaJoU$M6wU#8PNw(NwKMuN^ig04_ zVl0Y_YSH(+V`dy`=~6m>9-_MkiO!J0$O6Lebw}U^yB2}?Q;jRxUhtMa-!rrsr1rx{ zH$vj)ds7T}ghu;T6ESFbD{Wshh@QFX!@cugKVl-gA!P^i6B9iqX0uWI15$aMLFm?#q%QD@ zl=ekp&O``Yjsy2GZ^&M%)q5T=VSlbsU#?z6l(BkuKu@%=diMFn5C)q+2P?+&?C#vH zg>Xt{vUP$BCG(OF(!+q$ddUUd(USV?nPU%O>pS7X;$QN&pu#rUm0MlO6Wol*t)x8j z{CGC@E!Cm-ZXXr#N+l+H3|{l24_3pksAMuZQnT7JIudbN=GJ~Ku<_F19_-B26M0Y0 z8;DBrhk_`1isThPA!Q;3IGYA<$1g+5640O=il zYuncx1Twn=rCzs8!zT0wIcn__VFXhzTS-PA{j!HyolVbA9rQO^h8+DQgFowM)B2msn!|^#cvN0o?KOS) z%(6t)ulPp(g^-D~GR3+@b302L6jo%j(=~SIA@2l-*$rqVDq>wS+4+>@qskzYekMsQ+>e5na%pJA3P%8fcKpKvboe!gF<5^>Ek-3 zwmBP6uBSjxFFQJkO448?myS0xHuZgFPtO|o$)4pvw)qAojYm>Qb6XfV?4s*8RKT8w z|MupOA1ma#)F0IXaNe_^uk(`@$E0X_bFnwXsTvEeIIc~*Zk62Sx*<)p5O)@MhF)<{ zr1JxJO*T(({x;Zz!$tPVf5iO7nww}0-4+4h*VFS8 zX+oBbPF*LvQ*|}aKH+_Ab1U1YL6NT^y8SBB2S&z0JJQjFpDEri()L94RBT(F^YzE~ zSCl2uz=aj50I}KuBb4{2Jhjs4xu|0+9&9W2rm$P4@~xczJi1B?x~uJ2_&vdAk(E95 z71T20bf{U9?4xy+E`Kt_wIIHwiPcAwSudz+A@!RW#{N(M_u!sbVSjELU z^YZWhD@%2rnTp!W@M_f;>9AV>a1T2V2t?^9?md`TLfW!u*fBO+(eDmt^o9dbLP>{XAfV~` zSWIePZsZLJ!K20&*XGL0eHIwr*m2NT-y43CQ20DuyF0qYvJA8T@-mW9Q}SlQb@NqG zl{Q7nMbyTiPv`GuDh6Ds1$m^QM{IwVA#N?!J@vSUbagHqF}gpUB=@%fBb3-trVQ>ey_%iSBMX|_> z(-HQ3Pj91U`{TW>6*|aJb%);p-O{I)y7durA2vDN;Kz+DpW75FzIiSDQtPV2$wRag zu7%Ek`jttfKf=(bO?{iXuBNOSDgY&Zr|D?YWS=XekB6~bzxKi{8gu$Q^X$xw+qAv; zh39g4;;h<-$jG9Zl!ls4@wk(={YHBB43a9fUnvR$3qh{W)aaj}i!6rdmSQq;b93EY z_VIDOa_?@{uZ-z9*baXi-pspYN2y4 zwj(jUSmbry0xb)5_+U}A?i8HM2tKz}cl_|$_7{h){)J2#+FKF%vk)YI4VnOG6cZB@ z>b12xj)mrTM2w8*AAhv50LOnhuz_c#RwVr)*<81HbI9S-6V3D0#1%cMU}vO;h>MWX zj;(3wF0vNaalGoePNPe&wlxcjT)B@QMk+tv!fv$CcGjt4kU-Ym^xWw^OJdI`P}FfV zUr!xzu3}EdCu}fBirJa1&^G3;%;I0x)V$oVky(~IfpAiOlja_J8v<<8Qj!H!d$X?Y zO%qP#9L=p}n9g%k60|PJpR2ZA$NT+|$Bw+&SkG`2d5d?BsZ}x7HrB^TtO<4%j=XeN zA-3*gjqwP7KGzlF0qy+VULOUijA4U&N-<@5&olq5s`hj=eV`^@vfIHlwuH|59@UIH zUKO=nZFd_{m{Ey(`Uz$K3F`;=+GgB0l0^+B7<^Km6AIrDs934z^}AeYzdW`Q?HaR@XiB zR`x9~w@FO}(a1ZuoHV-&rCv3gJgB;LV4t& zlVqwrw-Lq)nH5q7=u-#N%sY$ceOo+wqADB2T3cdNluTrI(fxhV`74?g_^{C%W6GOG zBRib)`Rx{~-lRDEz9A|j#4m6r&Gz*Z$2K3!g3x+}HIqU3Cjm!|N_D-jQG=E$OB(qt zF{rsAOR!_XsfWC#GhC%BX>t{#Cs&4gekzEI+wZ*x=)P+UZzh$Nx`eO*qAJ5y7NZYq6yE9VY))uDnc?TFg!0Ai*FsoIO z*-vM|CkM=ojErjy%2+ERUg*zwk{R4?2$mW5Ice%V}0ukwqeaV&l`Ajq8^=t1)V^U`z0N0?NhE-2Y5 zY6yh$e~4?Y_NN!W5cBWV_)K_V7$3uEc#y&!R>|&TqSKs0Aoi$z5FV0KMZPwDm2o=4 zYSpz@TG4noE$eXBud40j919Z%-`~%bqJl7{shZ!jbGxO(wU%C;|B#6g$6JI&mT!t= zVz~sYG_4|ZTRDQyQPx~F7P2$%*y+EuH$Nq{6f3{ELTVigno2-DU=mbv<-8%dF+cuz zUtrxwbRFD#S)CQ>r~$bk=oy;M|zRKjYMcLP``A+!M&kL%Grtqp=cTYUI^EW@-pF1#DxXRV2%z&C=#&f>AA|tU)i97ymV70DhAY6Q@8^*$ z`Y;k5_S5eZ6aB}>$NaSo-I7zhRWiVu@^`-b`Y15?ItmDQbNKTNAvR2iW{!)e)cNl= z3&GLR4R^>0}&kNk`2CBePHie69EneJ$^d_0*snduxqoS1km`x3{AUbbVp;9>6k78mkgu(<93+Y z)GKg%H0EA`&B7NqvBBes3>Dqsm~Csoe9$1T{4d-YwyC1_SUm8)ZI|xXl=y zubbS>zL@BDE0b%_6w&UrF%IM59W_a>$uFj!J|$<9vDKVrQy+nmytvSgige5e+&u z_J4Jyv)kB{d%<2+3M$Rj3D373juFkCpWj>L!3r%*3?|Pjn4lY~V9g^h0eAQI<9aPD z2AvtF!odTL+b*OTU?aKD^`$JGI!{BBEzk7j5DIZ5^3OR-uB2JuaC!IKOe2O6JxnZ4 z9&M7V9}t|M?(tH;u217-do8pHMlQK1=fzCaJ$z~NQ`z(q_SteOpC1AK0<2kmKK7A$ ze>|M9Y_M(*wd+c^eA*qKnqfF6qi}oSoJ51ZqU%Q7n;229rBY$>OqLIp)%-YLiwrx< zHm0z3W{w{*5Ne3kSt)Z3bqxzebXkb?=~DspKDXi1&W(^t!4qnD20=~bhPf>H?B{cZ za{0>j@@4D%n*a!VkyQCzVSm^Q#vKhzk^g%%PwrU6=b3(m+UKPJ?=&)(8Nw*zvD#OV ziS!5GxNAbFe1#Pa!U}{IR)iWVPc^}i$JTqGEz<*oi5}M*yL$vJpw}`YSl%<1DLzp> zBaC!L^gx(xe_qJ4TB)D1_V@!(2I}lu<-|}|CF00l;tcgR+$Lrne-?cfG9V8oMmAH zM)pXs!NIpgj)7j#LjMCc(im}`+#K7>@RQt zMjnU&nhZ4(xaldF`TiMJihgT~Rt|8gD_$9W{AehuvjFveY>d!g7GY+- z7lht0{39T%B`j^zoCmzA>{&|uTUZtS^`Y6m9Jpr@)9F{0&IUpqZ{o?*QR|wMbPzR%M1YIXcbcKxi zgvDAE5X1gu1e#-z=o}eiL{C&!rm(ppnxO7Ux;V~02k?wvYE)qw!l@fi@aHv6Bk##Kx|ZRL$W~JVACX|3cKS%!n2+Z%<0*inMRRIz$w(L**vps%G zjuf=^-z?}~_R&8Dk3h}Jdmy&YivCZ|wOt1gFk0xPXXtpt>W7nQPw0O|?SrxQEe7Nz7-pai&&Cu?8@gT z=*qN^o5x}W5f1*VIDIrc8mt|^$@upkNWBMj$u80=9Lq5+@8cOPaNBU~xvho(xZ1CQ zg|?UwgR%|rYIz7PgcCr?$m7enfc3_xi@N;X|=yqXJB~F#|GVP>`m-Q z2~OVv(QYrMmEKkQ<~@P}diLn}E#+&e2Xo@AH%RAg)6#xp2$TDSAwcMIM;Cg#EOtib|)nuUN>R3Vx*Rk|Rpg zj05x1)&9YLA1%u1qOrcduDO&mQdS^JX~t08U**m(Q5s0T(3b@Eg697hzoHzU>9xt} z5S4`J*jP_1S4wb=;$IH`aPHq3O&c!$o~dj@-Au!ZA9&+&*3A%xIl0v#a6F$97a&Gx z{2%kJDGMDpZOAi*1Zy$14q#*e==0A)bP*XQlVj)J=#mQe- z(d)$5`!>B})vt+xdzv?oy0|kjUA3{VLPNV`WsM+<*zaw9_7Yp^zbB}ULG&;%GB0$a zcyAB>;)iG1m2%Gs3!hi9PkEb5 z?^{_mj0l6D?O{_JWutLjhA?74{)$;10VnBk7sBuCtTEzMN(~!4^XU=dh%|A)JHys} zIRH>Or92Kw5&YAA#>Sk_e(%mum_x>+9(b~XNJpJC%Nnqo-1Q`l>AzOqbqY)A^h)<06c`v{4KDjJf#I5OoS6pqkk6 ze0W7ZUkk{2Gkq&mC}w00pL1Sv(1L4Q^kh(S3}Dw84uQCcW8oZ*#av{q?)&%pWryW@ zuG|VW_3G3wkPPJ^8fR^5^v<~7%X?4Obwl-M zc9Op}&JRRHkiyAQG(U!?U1zE=Sx<&GsX>-BH~% z_&nN^)5f3ez~0GhmXaB2pWN-cat-3l303bElZG(Te~K}8|@XppYbwXP;)5q7$wN!LHB%=WdRD+~Fp}4J20sX5 zWm^$BP~?28D$eHR_jALRtvF91&C4^GM#-omnTy^c8Sgpllvx`l(u6A*01Xm+M|7Ij z_54vFKl8aW4@Dy7ApScqcmrWXUNBf!_A<=8VEwio=FvX-K7T-kI&^T35lEEqBbX4k zz%;Z!0d1Ao;9vF4Pn2C~a;OL|`0%yw>m3NAuA5;9a&@@J(`4ViCuQ7f>Y`sO%v{n& zx=5~ccn3RE{!&_6In5V@onPGB)`&uQu*@T)g%VLe?jJa%11AW zIvTz9wTM0?v=B0v(dP~uR5NmW#KQ*UnSN7>ivtV3P2kVC%_+bJCCU|1Q8iH4h6*~B zkH-!(9=|;o5Y5Rxj?_xl%0m)v>ZCruMcj$J(!w#DtQ7inl{fCf6^*s{D-MYfmmnF2Lb4v$)~}7H8Rz7}htp5Lrgw+oFSo!< zH%TOuB%uo+NJiYwV+KD@F(;LYAD_!kE!o<}YDAyPq~{+QSs zjeMkY7l@Qov~Zlhq(dfruB@zl_am|HBaEMto(aVI)0RJ}A7%g|3V$Ae6iW{kZL4mJ zKxiPKP|raKJp_lef{2l#W1(h%TCNHXK>~=b8URYz4eAI(ul+z_u`;zwmP>EHLm+IO zf;Q>zQTPUcN8nG+rT74tDcAV2zkY8xdQ{GfNp=@*Ms``dsTVUIt+X3C;T0d2eexec zTq*mAWu*HF#A4*+(?t+HL~%QFa}nt3P&XtVL-3&fCCytb1DvDiaU8f^Uxa1L*sMa> zLFN)ClwDTTY%5{~z)yS7+v=pWp~k@daog=wx6CORx}*Q^>eFD+Fj&TQO-Bu_kln_o zKV4CUE4R&@G?>qTnVTh`m3N&J=E%5KCZwu1^HV3zrqcIx4fC&r?tl%Sc+lz0GZlkb zvAhGqgxq<{`Kzf8tfZ8dDIXWkN$f}K>y!2g#xLqEa@??imr_N4|_X8b6Gwb(F9w4@bNYvMDvdJq95P3QAjF+z@M>o;vz8; zB%im%5KVN;x`+#KASbaGY_rAWXKdnEIq@AW+6l30@qB{8_b#~-Y;GK;_BO#jIXM<$ zGPMh*;o2#WA|~zU)4FL(0jF@fcSbt*EutBQ;^5_%biZ~HtBG*ZK0jCLZv&A8$6!X! zBm;M+&vooMN9a{zprXX_tVxRrWy{>;d1FF$n32axrY8m949&s*_N6z2mpxe{vlI?v zD94i{G0l1cp7UvpA0QKHEUxkg6F{%J8Hl)9Cw(NivtJ4_Wnlr`3)G>nT~8q-rI_$ad(&T#9ASQpO7|KHLhrSAfQ(R8PiYR=l*#%NU(GT|F&( zq%PN4+i3K%cjB@JFGpZXM|yJ1;_ zJK{}dA~Y&L^u@;D$q0;!*XI$t+?xpco|VDVsj}WmP`WvdXe_TNQXl>KjLm%l@JrW?^#Qs4KU$&qgsW($d6oas|m@ zxl)75v^mbCq?Cq)AsKU^7xJhHD-eU}dhwKKdsggY{>9J(-5WdW+81nXdSv87rYtsW zTAfE3D4dQGIFp}lD|7t7?@>_F`uWFh+AX7w!px#qVA4hUqu9q8QW{6n7S)}B#+fGE zPy2gcDIqKhjq7TGwb`P(lKw5~c0{xPjd=cNVD49t3n=rO#vjsGN~8ZxE-C3M$Nq!T z{!6;b;V31+urY}HpiS!{!M!Bkmi1rhr#_m8%VTTVKfv#jV6g80`&( zz_qw3e{d8BQe!|6zoZsi-0O?HXz{&eO_!g)a%+I>nCUj)Wy$xRRHLe{e$T53BYZKs zV&{ z0uIvUnOn@2naGi)`>mz#71`#hUjN$6F!@Gw)zAJ}IyYu9-M;T+LRh+njIJ6aqbN=L zGliK_bd95JDZkfNW|Walh@Rq&w)8J7MD%dq<9ZM$fXryy)m2fC!mLU>XogKhVF)T} zlFC3jliy+)CM+(j{H#3@ry*n22D8ynP4NdCfEPCDuPo%^;!U|nmV|@wQZ;M>)v9fT zo_4(OS3}3PNBX78%b|dO#Xk?3iZ1%x0YH>534k)>u$pMS^dxXsi)%^F)m5m%s6(RT zR1>xo<>XwmJDlDb=E5k1l{vs~ElC#%|49LlKzQ{HgcHw!=hA|GFcOvtHjFRyTl+dX zyazaFb=UmNl@HGh;RLW!zEF0#Noo3RI(7Az#?XaRtHRB@#w*~kIUE3W(LhSi?N*hO zLcKj9i0WwBls)~zU3JNNx5w5o+Y4&)Vqel*ifg${Jk&!fUmm5TgAfABQ0-g`Twjjj zd!Ba3{2b9L_G;&F7wA;%+@E0;Yl9Pe;$yO>l6`Y}P8~8atKI71ltJa6UM?#qArU z!r1#5p`4-Ahzrm?fS^i$vdowwpr`EN9~-`y_*dobQ_SP@5XWpxENSJ0#9364LzN zG1A^@hd@`OvY4<&R)$tn;Ik^?Xh>M_Z74h-j&=Rip;|_HfAe@SGiG+9FvZ>P9+c$v zy!T$=yhE~*N~%?P*|nekr^tOd|X@l40Cm~BU*Oh@d`MQko&de^=L-i za&nF%|^T z<%C2}k+`V;`QSnb5=c1c`tL|JAlm_s4iasLW)X2pOHG|htc^PiC41@vtshBnKtf)a zSjNuIPKHaL9Z@Il%@Z{MnbxxdPmO-b$csP@wjTQm-!p&FEFcIsei6{@jxZj@-&4Hl zr#F!tZocxs=n5ed} zQ8URwKv|5OgXs1SOWpl0CGuTY=8(q#dIfnZE#}$9R3-(oQ8eJrBgbdUPwx(9)T2(9 zGr0*L%}n0$1htGfvljavK49!4R81*l!x-uWS7IHF15oI!Tn@ zwVW~Q00@K7xErVHVr}6}e{`;J!>3vD?N)R)T22$iDoMs#x2o{?U*=?s&w|ro67*&t zTk4ENs)udt8+Y(2D~hVPR8{f)<@%U>kF1lwoE5li2;0}KDANssF{RMjxo`UKi}q?K zj-*@5l6ir28XVYW+WGqTxz79OQ=C0IZF+`s__JQKxmn+RoTvP4Y>0~z9aalx19ig) z$#_2bn}@Fb17-kFh?TpsK{qsI9}%A^?uYcY@bBu_qTCzftWb(OcQSn!5_^7RW)8%f z)W*Jks^oGCuag!*1!`+iG(dTFG37Yzs7l^!R3=$uxSMl8$B(O>d0Ly%BZl##KfGLn zF&N!%kQ;9Z?cyZOGYSVYDb}szJ9e&GKF?h*JM8&vmohQdo*TKX&fiICbxkgyXG8t8 z5Vl5*bWP`rRTXE#AB%g2|10zcJZj?C(LPg@rjxFWJ;j#G)`a`q`4Nb=A+fXt$`ML& znC(h1=qX$J{|`nAWF=OA_m@IyiJ=9=Yc$1j_!Q7=o?1_x1fFkL*ZI5^QZr)nOqJ3R zuBHs~hg`t%ZU^k@nR+0vETQPy;AefieZm^?Yhq%erX^L;uT#Kdj>n#+3cE<1&Q4}1 zpi*n*pE4%Ki%k7TAQ+VO2*NmjfBg|Oe`qf={$2e?CA`wk9jtCEF|%64G|k{;L&-Ckxn;`< z6?$vW=ZCu}D&z`74sI-NTL^fJ(QvmHUY5o#w-Ust$u{R+;JB{&-lm(4_BA-}EqI~w zL#b7nDG#{Q<}H(6hfd#vCPkWJAk;5)A8kvhSjWZ3Z&00{Wn}zFkxc6gCf9uf|9B|+ zC+I_8lhqL^q{JFhI$-8iw%RT(M)@&M_^eVEE4EL*T0L_Nlrl=;g^t2d0OM5I&-9l0 zZ2XjFHmIaGDl4okyDb+nsh>z`AUyx;Nth)-CB4->vmwPPlt5Jn4lL0#unEMNC_x!6 zHC)p0zD+49P1G|(!pie;D{LxgIM+gelM?Cx9FvT&j`r=&jWiE6()3|=?vBQA+eB#! zx1a;n=}lA`6Z15j=2pI?TlV5oV0Ll$bItS!eP(^ zgeVko)DW>R5!UpNW}hRd+qCl|j9L6d3TzWQh7QGVN*AMLm~Icw8@alq_Gay0j$R(E zJpR_O5S%PSfvW(pK8)2+a2e=#YT7RcPsxv^K;K^$SmlR~R89i%3`*4IpOp9yn1TpO z7T5;>906(8ziybQCLE)Oj4hf^^e`#_4946$e=);D3xCct`^y%I=`bE~AI*qZV+zEZ z5KWo1l$~$| zPk#cugZR}eLpx4sPvQ8a00?rZr;3yRXHxGMj5|3260bnWhy<7McspMyNG( zu2c4yhU_o90`ZZxc#nvsOoR2o_E!8z`!t|(AKED{9bCD;L8@HHl*BGO=bfe8fdDGJ zkM8Kn1O6q4XEI!-Hb3Y+?=O-7f{02Vg&rb)g_J1vWm6*_tUDUDn^f&&OYhr#_FByA zp2YZJvORXYpmZN;5&ZH8gJQzI>Z zKfNoz3p~9$La4w_S4FdUaPuQa=kOxtua;}wLNG~rAt#0IhCtlM-U@qkdbLwtW&rD< zzy;0Rt%lf|#}Msy3J0a>tGLY!-VOVYr)o@pCC@ItQ-o(X)X%l} zP6(My>rKRA7TBxtu48Z;m5OvL76dD>tte1Ktp#RR*bT1`J}x(TGUIWH1aW?OToP!U92w!f_P*}X<$>D^AB~G0rlCF?!dd-)!MaRX#1@3sfV>N z#ter8QYigdYw42yzc)R#{R>n3k}mNJSE4Y9qjnB9x3ug4LiTI#w@2ujQqo&bcEXS#{yp83ZMdgYdYG!0+r>M;YxAf$ ze}IG*v9A>{AkT*!`Gwo0G$E3v*jp8iahXXA3pX2E8$}65;5{Cltl~a4KB;}42Fy4+ zvFL_6rrhcOYHIpMr>D}T^uLZaS-2uPTDO%U(@DNlROZ97hOB%?x^MicaP71Fi2TLv z;-Lpk?~M7nb5uwET2L${AIqG*m&8z+Q@vv&7_e^R8t5PKit_4(0~t2rJZ>#2-DqBZFi7c7-g9{cU5eH!Y8h5e%*DJubyet2V173cbj5D=PB z{D%2|ABaPO4g3ap52rHJ#gnRIzs-44bq@lh&M6QV zcPiVt{{>7sIU@=^;Wp2fTZLcfwBe|H&#$koi940>{~d-q>K#{sC`3Rtjf{-e7k9G7 zG7n>L^#zo~M8NM+oJP9@6p$7l83NB!eZz9GNJ`ct;6^F0d^A>tp&ku<+F^QT^G*6& z7M9@nCX_;5jsb)1q$4B9PwnrxSJBGZVVa{^cJzV6MzyuPO$7MaFQ9=lWzvkx*dV`r zNv?T`=LE_e`qfn_DDix(2vKD4pbJPY295Q)&~6lK`X8Kk7GC)UUJIWfddL_FHBk!M zOg==L0qddKp})cA0hVo(v-M4gTE{V#h0H%j(od$w3f*xpVUNVD=%gDe1J4!Y=l55P z`euHnc?GzuOu)B!@16e00`hZDu{Dm6;IRq}KJVP;A%%!zs4$?xqX3ovRAruewr(+& z=8gGwxS$j5fbjMm57O(!xZ#z1nHP-U|4%1Z9?fRDhojTc>7X5@?J&WprKhSWLeo;6 zqG5=XT54}mOEeS>g2rg4wn}Q9gix*2SVKi^u~!+iwl>m=CAGy?wbT}b`-RNhJNMjk z?#%q3@4Ro`^L@Yfd7j_%{Ji;>ff#~OwytCn0?ktt>Ik8os4Xm9wt(xbCJ#nMoXc1a zR;3i>6#AOttk4bCI`Z%0m&fT~oSpILJh7sFYsFyJH4Gn%3Uo5-hkz~&{}a2~1-!dn zN`mAs3s5Bu1YCVwhC*?@JRY6vcfS!meXz}lq1_}^@p{W2iC46ZA?5i5rmdbn59{5s zY?214!8Op_1!LdJH&gnYc<{Lta^7^t6EIH5NZ}`u0*kpgC8wS1?Ghf3G$){_K8B`2 z62GZ61+^2t(x&Knyq7-F>l1>%B%BH~5ULX!Hm@tdnl^}Ns*h>t7oe{(hvdq*^awcW znI*>PNMTxWdlxDQTg-^}-=b(a+t{b1PZkUTjAF+Z@hf1-xIG9&+$UurFkD`$77QnJ z=1XbfOF)`zNY7Ess)645Hi z&o+-|8ZoNgv|UA{wb7WyiFBcY+QA{Lb=kSEh3eC_A{dp44cWF- zz?z(wS9^TOTgp5rf^`sdEzQiut@6@?Aa?&67$?zWSv-~XNWM)EX_;C$)6v;$qfibf z>FhcnZ$j;?98WG%sWipw4`TTqXt>O&I|85pKR3v~9Z^670W(9^)t%jUfDX~zv1&i? zScu`J$@XO|TJz5iu%@wR0?|RM3e_2$AkZ?O7a~w|HX^>59B~#hOtF`-1qo(cdU)z?|6quo{@oK7G9yPNE}BA zAP7h<46|9C7g810vv71u*k-07YHm#rJ1))~bvo0N&O#Q*NMje8k7g{3yWO{Sz7fgd zj8YDlF5Wp>z4;=hd=IvIB6@1WM4hEA2wdj?!n#pyRzLqn=-^OoIx7W0Q-0=_8?AZ6 z!&g*=gWerE0~0DmeaU=g8-94T%ns4IcpUcN;X3zAQ1Z&;B(3NPPvG>~5jZ^=SJ`aOWoz$a1&bzu zjHo!uh-$I0VXQN&DG2$>{vmT%-o`UKxuWcon`?35V%T9-(dPaUM5(ZE4D}w$1v4Ql z`pZiH+C_Seb4krqSs>KAyS$@Pt_9k%*h?O&m=}!pwGB{%1%y@^uw3SJ&A0OnW~(=a zB(?~5iS_7~i+D_EQ@G5Cjr20x(>HpBi^GJ|ec}1QtghE2FwiRj4rlBhbU5McM3DEg zb|^>a_NIOdxNMKbuSPTba~O16>J7@npTmb4$81lE=)r3t6WdCk(xMQp%`R`=omcO6 zEDc@CTXdKFO}dFpMv6AiMPCoT;&XLYZjf@gufp0k$Ibv+@4)aaxwE}r6~dj3VXgu% z`no;9F(L_S+&j^>m?f;l&^M^tmf_*1tTfC1{UD+?G{lBelmQ{lIdqor8f*qhr^@U0ta?ak;jy{%y6GI2oTA z*JcjQ8sB$a*2!=tcmDEqdgt_D;u#qCUE+T661nw|iaiZhYUFXmTek>d&BdB=w0TfsOE<7uB6s6pp!;L;=b?8CW%w=Z_~ zQ5dzS-JYJ1iD7%b9#=_s73!!T_>G-g%Ef+jWWek5`9Znq>{rM6WN~BdNk0STH)=# zWa9=+PYLE)2k|Nwc8Jnu0r9272l=J$BBH)- z?}^4~g$r?BqCj?ps-ErNS?sde>zvVBIBO1!zB=%grOMJF%*UbyLm5q{N<)kbozYGR zbkYrD=YyS8W-6`@O3#S2Y1!If^#>n}PrCN7>(NvG^rcl7yt2?Q&FJh88FvySfUWjRUpz4}`f>AOCg%b|&!o z;tqz~ zLKzxRJL{n^EMCq)(ZiNzbgnl z`Jllx?>|x|gM5`sl|BR|P* zC}!R$4(qOcYk%I|3Lvi{{2e<%m6$Dg-+NpyBaFF>oM>zX2iO^f#%c{rXg+bcx6okZ zEis2Rugdud7a5RXDvb&jsGeG7az@%;5|yp#LN zcMH`Mcx>eV4`LfY#+^u>zL2T=(F>W6An6fSJ6QDCUIoclr*<(oos zGS<4PGROA3b%t)z*D6JSlwB7RhL6lVRIh3dO&dd{Qa+y&XfW;k3t0<52_a_n5pzbiaX&qY*I)Bqpy0c2NVGYJA~Ld38LK+O%d zr`BbHF6$gWZPAaAdQTTLNr`f6%;)59uAwJZks>E1Sjf-lGOMm zFw{H4`1`;Ay`THuyPx}c`vYgrKC@%(wO4%C+Gj$aD$C;EqPm5Jg@rFKC#8mkb^SUP z7Pd9cHQ-60wrvXVf$Jcro9US5?84yFvC=G?PrQ^G={c>%%+d z&nM2k$hW=RdF1zIJh#0s+q=T@lUGJ32KgIH8JwNd@`{SyMJms{-AW7@jQ9+zIUXGO zv^=<%--AadWbdlsdC^#L{MPA9WB#klkBaZ~=$X6!8?#(nJlWUv zRX&H$eAJnoHJ;HXop^=QI-Mi!aBzO{z58eYi-2;Oc{eqCMbqKM&E18`=F{nbfPjO8 z1ADJO)B-8@`S}lRn%c@tzwbdg30+M4=_FFEyL!zv?`*}S%!<{XX5A#o+4tarj5&OGY0SmbEn!?cH2c^H5Mw`_r}CxHzCde|NS9-5k263EJGHNMqv^vyd$q5lMD*QM)*I z{X!zN*XovSRAUjMKkGY*uqE+;iKB;MbfKG&2!4XI}{xq)Dhi?Gh&Fk4UW=<5)r?PRa8`rrjxkf@1<{T z8HHlXWA5BG_><1=@>IGzM~V^T9E#VQbC08e4pPv8XTC8wyKP0luYQYHQgI>>C$551ZBd zUVl+HNpKR^;aJX)b=r|3+(@4u@X$L`5-F{T@7Uhlowes^r5c2tC4}3NgxwaOWuRv7 zzQY-C$NGmhBQThQJ0hd);|uyvqh6ir4Rs1m_WlK;Nf{pk)_t~ZRVfKi-e3=tN|_gm zwUmhR&N)rG3Gb0aE-`6DEUDLj+7h7|s#B}t;?ao&hJtGZW7iV6(j6rTle_h?6WI5d z*KNIYK2f#3%}FP#oiNdaZG0KDpsoD|O^u}K-SiVa3DXQ+qO9RgFt9m(R)iqOE)KyA z3YC*5yfv+UGoM7WOMhA)vv1{f_4;dggBR*%U6^J>`h1|U_wPy%=$`(_=~CFQWqZc} zvC?LwW&?;I5P`fuSmY-F)n%79!;GI{b1Xk112QAU*lqswxAKnH7 z&54Q-aHqp3mxz{(55tW{X(9D*8&%iF>|L@5Kx&d11nHmoPZ9KAf+|JRQHpDm7qVMF zt|SK@_Xd0O?j|9=k0o5EqfQvBbK0`QilE=gdu!C0K0#!0Un=Km&T+d%LH8RP`KOp!^MN&RMS1n-7bve>2i1 zTe}(PY8<})o-Z`MEuE2aBrPmCaao5lD>|eQzLoAtVU;WStXA9^f8p zmmI7yx`>uK2c}lPUj(w%PP{D3i znXMMZkO`%4F=Hy<`BfBY!lodI;m4>q@Vl?{$0&8;b}Gs1RC<+I^rQ<^Bpc z;LjSrx1bBrACM&&%a0fMt`9;4+yP6ahqRkR=X${{hBts2u&_$Cv4Q7AH)7R>hIq@d zrTh!ub>*QI6ahTN&D_Y%EqrcaY#fsOF)yz+w=oq{EcC7u z7@4T)%6vZ7SXx*l0f-O8ZiTz^^YM|Ala-TQQ_arE?KCu01{5Am?eF2KsSj`Cd$n++ z;tvWxbmy}JgrMyKjwe*Ep9zAcN)F~J#|6l4MU#P9;eWh`n`vka9~}_tGRd$&fJFfV zhr%3qn2QQCyH`|qW1zqN%9$0;SXJCCmX^+mvqIkj!=afJCyI)C$*;j%Py}HN48|1*s9o(DDem5aA;-ddrYWCSy*{ zDDS(waYYEv_Ez|Ask)H%cab|L;*#PZM5ey>{LxHW*8RJxh7U$>X-|Ow=3|9qTRiU0 zH0lG@xyd*|V-z?4;HH*)Uf`WeLO|aYWvh;kEJPp$mbb}{J}`EusvA-laUcU81}lVO zUoEd30oOwX3*gEjjBQO7H5Vv+r4&gOtX5CJNpE}5RL8M&B2zY=U!Uz<+IEFh_l*r zVGL0`5Vn0ZQ3-L^KT)VHVv}g4;NF`keg*+`cMrR0%!ozxI}6J^CI)MemA|r^F3A+; zo$Df04sIZ=b6Rhj{cZ4kRDFTMv3d{L5SSC%e7ZU@xL^mF2(CcDE5$4KUknl4!qJu2 z>1f@P)Fgi2B$$|xB*}Xh3(JHa!uAdh_h>P>X|jlI&4GZ6MODY(XW~bAK%dfd3R+ud zk$@Lalg4&RR8-a?8l8WrBR?zLOK9~o`DP^^Sel=2`^>8}{kp|6h11@x6+pA$2LdEK z^4CW8{o;8+YbK)#(4DFwRf1yJTyc5!)05l8-bavlJ^_x3guBecgH}-j%ol*0{%j`-yh~UIHvNui4m$6qZ-+V8g zjWO}7xjSw8OOt-i;&E64FL|3^S3Gz2(jhrls&zJ~Y;EX6U5N=T*x6D>;gNW6Fas^P zK|L#yh~eY%ntZe&Naz0myN^ z_-DJ4*^D;79HFMqPDTAMQUbAG*J=(gcKh5IY4cl4Bsw}kX}=G`ww|Y{xcxYg6eYrn zHqS)hvydOrQPMmRcRU^Bp)V;7LhO4V{w@8EuIr~UZPU$FZFl46ni_FF+=PHmofvxK zmo)iU1%KU~I9%2mzpW$OPvj1b(YupZ?E9P>;!f)_B!e>;bY*>@!b!0L-uB;CcADdE zw&R#oJi7%7%i2~mSg(!vyqhf@ZxBOf2|40J1_HahewayoSimMi%=X}Za;~o7@D$o& zlZ$p*JU-Q?*X%1ggthd?_yYsOBlt;(gPOYheRrRBzJqtR!Q)`Kpg}IC=2miBi9|43 zhXehsq@W>yQ)_g(fnMdDT(Ey8as7d3+dLRSPXHKt$IzK;Xnh4V4iDHk;16vJ|K9jp z4FNVYHRU_n@)drV3hP$5+FYPO)awcI4WlJ z^jB!h!MO~u_C(<02{0ULTyhPtn>&^GSFQ*_k7}-(MT578V=ab3{y@3g2X4gz0R`cQ zuKL5^faaikWh4<9O=6)P2pk%cUSf-dbHC{^nm46Zw%Z?)QmW-5oARCw2cf!o8% zOao{BgXGy^B+wHpN&zaTpwukpt+IBP-Y i~auq0$HeILpvw#c6>-peG}~or4jTTtoFNK!@hhk zJ=UJ0#7Dr{tKuprb_s%W8yW6^+rzhnWW$u!JK*7?e=x{!sNKOnAkd%4{#{t)Z@{20cMW(=&z8cf#f3)!djrS~E~v{oY1ID7a}oS-kq*gVTCjdlp&=P97lOWF`8p1$ zj0k*Cjm;6h?c-7|>LL2kNq}JzU}4CKIOyi>&#<`%<_8@2?Qbuk*6qxHs}pzihDB^q zyk0v9*dq>gO7+ zDJ3MsVe#ZlHJbxg%p4Ts-SY*?cq=6B5u&vm)QuIr1W z`bbav@Yt7~f{;%2r&UuAXSnAZfA1uHp7BSRA6oDcvd9K`Td3#?yQNwJc52jbuS$cS zDhRucELV`V&b;hP#8F@A`ccBqkAgI{sIyT}5;*B&rDQ1bXYMYwNoAfF9fos>L< zpJYhKU&hH~Cf9(wtw~)V71$uoVwJ_8!yF0(pjY;}*1R9v36(sN$p@4>^)z4-aD;&M zECEO+T)z5Bfi9$%v!#`aD@_^`_`Ar$U>QDSd8;RBU!G|PVsAH#?`IUw6zb>m0F?3O zJ;VWdlD`@UxIZn(cMU;5VB5k3dC%EYcQDGy0$@nMQamFLk`T5SGM+xDsWHUPacdU6 z?Uv^DBh-yZY^LEq97ov-&`&=L)J;Wa_~Zr~}3>|HVTlJit-O+Badt$YIM zYTZ8s?T)P4)~I}86;J08&0wL_3A{fW6y2}(jSX?~UyZLF_D(lTMW^!{7lF#M$yRSI~f|z)x7RIS(Sq-xc5jcF`0u zCuQ=5?cn^@{Bfh2jb~2P&(ho6dx#)xb5pCuVb`yi2cE@B zd0xe$gr}8>GuZCaqiVgcyKcKJR#_9wMJ0U${28qko4w2MdP(FiA# z1pp=vc6Im1bX4b_tCliW)9m53qy6ms@eeqkmhrbpOB_(h8TH4Mo0%txIoN2VAJ0ez z{sg9u)47VcTU&11UIial5d*+)1n~bKuJq_A0Vn|MMt}U*Rlk~BAp2R5<YGlqH*A<-y&!XQv_Q`qvJcDX2Klp~ z-4?LEHUzv6^oZ$iZ4v?R{@=BI1ijVO(&#N(;jU>#4f)>~xOuK<4|xY0_$iUw;1whE z_sQSY_&L1se@L=G6 zjPO$oM>iiN1~aV+;=Q)wZ;y_`1|D$Jf%S86uG#aGK841_`u#F^7$SNVoMN?-fdj1g z&CV<12hBdQqYvC(n4*h?@i)*3D5soh+HvBqYTD`ijVQ87Phpu>(g*F{Y6$H$-Dp5FIgM z`7u(0snBQu3;w)>4oJ^H$SzzP)RP`h@0Py7FQdj(ApaXwmBHDrt=*%88 zd|4xzWuhk`bfHRTz%1<}kN_dZv4L70DP3FF93DEKf5St_N4@Y@{^o%oQ%IT=#gmui z@UXSzg-g+%sGlA5xP96XL#7l1v*XH|JUgum1gr6i1X!f$t zO;%QhNxCWkNNKA0&Z9nmt$VZ1?-EcmXzcYElg|j`fS65&JusyFNsJ7K#iYfe))p1z z^%QM#Tj4~Q8A96pLLK->7%(D21inT*1AD4+wbUpxDBC}BQFa?GJd*CZ^#3L~#{8L zDTy(0kFVx5xd-`VW~Mp9AGk1-8I(*q~z-Z$t%we~CtbHSP z=+G0dGG~H;99+e63-hJJF#@hOJ8|0|=J2uLh+;i2pavranGBHkL@KzN(6N-Rll4f_ zM^hMSm1TxRAT{OW;S3kdS4-ce15+haUpfo@QAy-YlnKA_Qu2&r3rMK|jE2k$KpGlSOV$KAkxb-&ZFL(VC?5ZQ6>&YF zk1OR_MZrE9Ap->$BB-mBEt9CvrrX z3=SZo9|HZ0LLZxV*b{@V|F!HmrhpY9v#!VKAy)tSm<;^DdHBLaOjC208i?rHJLm6V zc2ytI!gU+mbeXyx6%`>D_TmOG|6rOzUP-CT-zIagHux)rwSq)0X50HkH&@|aa7@=s z1bv`3iEs!yQ3d#U6 z{xdN8>7=-Xx2f7h6%kVnK(x1MgApho0g1qfq0Ko&MrM|X z`e}!6~n%8 zbd(Adg7T!857z^?`3b{B>2Vn#afWvU*4Q(c(TPQ%+$IFgM74G^We`h2*_uSnf&3vl zmk3DWC*TYqR^~Q{fe0Ld!XPI3C!4S35xUh3odpk!4Y*gJ3aj9Xwbt0(8OWa(41`rMz7)Q%(3qm<}MeJaVmvT zH=N$U0Ao_{r+pB><@^iKywS^?L7~4+>KiQM$&H&&t}Ox9WPD0%gjdbWLxD)Jy?N;m z#?#^i`M+ku=0MQ@i`jF2@)#4|ZU)7{O_2iu0&a|=7GHn@ii4MvLnVf3W& z+HZC$;EvD1m`9N?db7zF=9-o9E|HH4a`U*Vw3$EU`p#npm2!dN&|MLZ**DDfKZ*B$ zt2zhB+1dnn*UiGtFgv#$olypCg#hH>fH17iXv%$eEmA>R^FD^Cehd`ASn0uAKo0k> z)#QM5{$1{!kLawi(v?GpVR~J{Vp1*F0i=p?CV)kiis#Zkv!T`tC)BF=hfN+(G?=|m zydvN)>GaBNe!n~)iu5PMrrn#?s-(bpIbdTy`LwCg#@LnEzQBO5zrgMv5Qfeu1z`M) zCG_b94mV8*HX0C-f-58*Ke=fFtdYL24XzsMZDBXV#Z84Fv;amFe&zc99mxL!9sjQo z{-1gOQ{#|C*i&d7Iu{0OohiYYv<(kKPppv!U)gYbC~vXpex46Yeam?)Q~16RQt>3XWC#+2?|M z5$>?F){FYgo8L!>!8uRg_#YLQPuy16SX6N{43RA{q-MpqRZ4K$)`r6QO^v>^ZP!CO z_X~zs$^`#(!p;(niB01cX)diQ)6|=l!w4l4q@C?*eun{=!_cI``zyZQDS{0QOfeXU zYMh>$9Cfedh}3t>NO`|b#Ml(TyH!LKrW%YAMy3kQ7I|yL-^8FB{X5e}|PGsjyTviG=|?>A@U=C*HXrxnEvGnbK(sZ4!v z6!$>6pN{wHRfj8V7Lt?`nkwk3$|r5f#3v+q^Nq@@J=e`^{9gL*2ucM_ujdVV!lkXB|gGc4s)j5`WlXYMAUf{}i z1!9kf5~}=&i1a-~ULN*b9`|rvu2D3jUQ3(}6N;Y<6H1&P&P4m1xYvKv0y5IY7Nun5 z)h=nZW0PF2tC?%2qe{@7mkivsRZZ4Dh}DyDZQiI;GX6^LvJbfO_vXU|0K1o#%E3PO zhf2$Tu7Z5W@v&Dtj<)(!TupCIpq*00@83669vgR**!&sQ zF&HBFz4Xi0IxF@`|Bo;c9bOss=WFDNhl2S|(_*p?!=VOGZN2|Aj-SNK+6c1b)Onuv zyBOV}qD)OO()X_MF+YB;HFLWgG+ujhp7;hZ2Ir_hDin!y_|b;jtM$$vR?ZfNUdujb zsNNhBL3^@msRqMp3!V1}YLy%zM5d+ifnkrihs~vf1gHOQce`BB7K! ziT(KNVgB0-w8i!t8%?B_eOwQ;NBMVf6UpMoq?K4khL`?q_7;8brqVXIuuB_e?%2o? z;fk}7x!z|+z(^_A+nJoI2b;(hNS!df<(+Ry-DQliK1)l5AQEN8B92=}rbgYnP(b!2sz}eQ}A3>Jv zfJduorcJer&UQZjW6R~NtdJU4c?L#1;q;KmKZ7U>8_(dH@{HVtiR1P^o0j1{97^|d zItauj64bBz{Vp(7N!yxg@+`@yad!22G&MpT{cW6D|C#>Ud!rbMG5p>fo-a=QAL>h+ zRuArcZ(L^Md?50+GZ-NOd03quYUW;6?Q6P@i)gW*ApI&|Yf+UhrqcVvq<6(<)YUch z-AL^IZq+eFK~^*F8><~{rNhimiJCntB_{0tkGrCiwOeU>`OjC6R>lq&?wTDGRhOUceOoA6n|Q-Gnei4j$5@hKHNO-$ z#v}8+#1c5w@zk9Z85OAd)N5|$%qhk(hrj6lczI(6c*Vu!n4puty2^l-a#Pq%oR@Xm z3n_i2Zk8Rgm)RVd74Z^3$TTbSk&1DQsRovkGYkXgZ-j#tNbb+jX(Hfp<$&^Z7d)@? zL`Ceu1KZ$4bTJNv&&#B(3Gwg^8d*vbEc=Zjl%^p4^Eb;mf6tU%AGyXE-PP%e?p{&$n#$7n;xkmSRH#OcLUf@iqo5Il>)+d z(!Gzy)0{r7NCj8u64=mb{v23%Z#3i8z-afL-<*^40YM=^VOH%kB)qoeGk)lno&9v+7c>&wW4f3JFlo4vcIV-?|VM2DB*M3p)g)XpTF~3 zR4|YPK<{RYOS4U$DlW;|iW^1b30m!mf9`G#XDy#D<_7z>|2k0Ae$!zwxIX=8lwW}B zK~cyTHLtQmd_!gVe!ld_DW==J+w@1R9k`*teY6|A!MAvhP9daL=2j@k9hFi-A>^_a zfB%s3fK!f1RXnnQb||WpqN)gX@BW`O)2);6Q5FQe#>bSMwYkNLf^O=`H)m)JfqnR} znwH`C^km5sbrwX6mwIS4jsByTI`SSt?6iCW>BI##h!g)Y&{UJiyH#=c+$*`Cy@T7! z46P#Ta(0m>)x3N>?Di6-qxNz)wTca?jSMEArZE6&9hdOY>G`7jYG=ZkhqLpy-o$~| z!8X$~M@c45?3LQevrZK*X}3LJl-bMNJUWXEn z&8XlHT)t^X>k^6=hv_+9C)?`2C)Nr|yEiY`?N7TY>>JybLEE%=9?;`g<895;zCnsU zJ?E0RAhp+xyP+us(IeSf?dvPGB~yJE?=LzQG4=K zTFF3;aOb-#l&zppjn?H|tDp0Zo>y((k3I>XJq@3O2seJzmhF*crMvs5`*)R_>ZHhp zoj&R*T(i~@$~>S*uSuvnT1u!=lw_37cHMVXzdNf$GqQ{f_Dq-XzR1tHR5Nw5F)%Hc zd*p)O8`*n5N$5ng5?|@Qtxj##UU#y35shu?vOrZ8ek^=UFxLGRXzR=A=4|oAV!pRI ztR2ykdi`koo`Ls@p#2>M;Iwr`SSTjdhD2|=Xw}muMQ?9WCa~zcisdqea9v**>d`N< ziO+N75^oR$pt}#WaX{F?{tFcmGhcgD++2f;4N1PXoPF@)!OfkC;K*rn(fBjx4X#KD zch!*^zgu#e=B_O5u3OZQA_mRDvRlGY>nE+NQ-wPw}J(cHbFFcY=6 zv(mR!0D6PH&wMDL3e$6#J}jx%tx&=Jjj|RUUi12L-^gFVY-3&Uv()=I<-9|KI!E7^ z@~AYnN^Lu_8G?+XA(8oYej;a+`mfgbOpUiYMFpcrru8Bhq86S z#eEPE;R(B>%{*w`{yZ#F-(3Cnh&L^=_w)gq))xb}fZv9^fKi*rlMZCq6&sqGU=hf{ zqwwq?cRA8W_H8Nd+B>aVt5Q-Q+T402xah<-IGE>C9OSKG7a_O97|Z=<7E&n;1V zWSK(ijqLYru43_L)6gbiAN!-G+za4c9hK=MkA_CiP1M5PS{?fBI_Bnjt4mMf;l6y0 zWumlr6&(%WSiT?m&aaM~?%+<}Z1k@GO9*`~fI9OmG>+BeAQRwiF}vN|(9X*H#!O+>{*jSEei1j{^<5HGiu(_W4h{$w zYPssBrza&`y%|JGTDreK)maPKT>H5hZ@bCbS6_Kd%UD%ci;AS2*wm??RgdD@N<4BT zPkU(cB?|{>$6MFh692mZz!x3ai#d}bfUgir7Zr1Anp}vrWEl?Kih7ujg5(k&Q772(xp7XQF5dGqVN@@BbF*X&=2N^N4g&CoQY<_JFs|~N1wHOxhu4jEbGnLa-S3mIc)XKK}*Ov7|ji+iy&X(2fh7Vk?0Scb8-TRCHw8pGv zt6e<#Dkc5Zo%78Bv&Out9hjYNCQ9%YXx>TpITu~iE2XWK=87-*dIvTIDwin<&z{-e zX7LtWr+rNF(|)~_S_KwivxMrsr|KGF@YF(i3jh=V=rj2V06|hji>HR;ZjT zqt;e&_KNl2(Y|}jlPX3tsYwz}7be>+i%QGoP5vCyji7bC$xIm$-!gj_+x4}v$o=U? zljZ}f^D@-}*3E{KX=%N&cN^BAs;|{4<2ju32>Nj*NDZEwc0K*{&l~pBWDNzv$f!T; zTiG(=naRizWOjj(`!}yqaaav-aE0{Ly^F6LDR9Mx@knaux-oAaI zs7+2+SS+r;y-7z(S3Pysx)t82EmKS$pRi1k;Ov~*$kjwcR}}7@c$`^rz|A*(@@#xR z#lD#Q2qZjN?%K>6yo0$I^TWaTx2m4fE`Rby4ttA6s+1SAOwt`}y)r)+t6tB4v!s53f~;sQSgfDY9u#@Mgl`so+=&X757uGWOT#f;nLH{;*6D#EG z2^kn7LR23;0Av+US{jzCwo@~kv}afMr-si_Gn2xd-tMz{y*7rA*9z!a)_+<*LQ1!d zTEgRGn2&DlPP2+fhbwXzSaOYpDYUaHZ$y3G?ky~`pQ+l@znQHVW`lFzkaBr3H zx~C&Y(W!F^?#b*nzKFAm;f;e$lCo(FjX<=Q=#gYpYu>xReDr!)HuEu6t$?AHp|wwt zf~^M6?4^uTx_?F=1WL<-YZO9ya*<|gDBj>*_a`FP%@YOw6uuzpyZh=JcgduT+)qZ? zoYZL7zDNmyHI)!MBeiDFTZ{TZS&2cJ{Vg`D5n<0%Oeu}G*N0I!Nk5Sm@pLEpdS9o`nYSlA)iw1jPk#7q?SbnIk|OO- z!ddlG5*(UxRK$fHl1#|2Q9>9cMJSWarv!`S~9TASByk{rxc&fGWHkLC_ z(0oxQ+%7Fn`Z0CZBdINY&dv1LLes?uA+MaK^y_OkjNVowdPMN_CRebvpm}NUhV-22 zqL3ty$K=Vv$wE=Rb&`dWEs46?>CIqQ*V*?fy>5(UbsJhoOoJ3)@%Ock=X-i$+UldxSR6yOpoISil7#Y&TDo)9<;<{!A(qBwjgCr9D2| zjJ_FN+Zt+*G?}KrpUswDr%GO@PAT8VYag8g5(*vPH)l4cBD?YqBa4#8PezU&?0)4e z$+r`lE^ajWwDsM=cUfKdi@Dc>2LocwU6dA^5a^43;Zw2Uad5K=->jYldzVd=DT>C^1aDxL*rQHJ*6Q*{xL|(fd}Cp>DcK%Rr+uaDR*ey0hsa3 z>u74&?e)y$l#cCaq{uWPU2*zH_rtMgZ z^<_Nzx;F7=Gc}_h@=U~&$N~D)P6BbxWL79QhQx} zJ#;mq6%AkN8uoVjrjy)N-qtll{RVu>C*gbJ}OV(vKjdr4&4iM?<2!SH4N=~}vQ z(^*cW(Pg5Z(M93P1z*QiKNHrWlCRl z5xq`TdDaiuF0BisCr#LA4u3Bq0`qDQ;6QOB0?2jBjec_#AKwzC9t%1drWgv9h?H-=Pql?38!lv=8Hdh_qk@h96wsZN+a9nzW}aNN)P(* zs0cYUq@H7!cA4(me7Gp_!uNzQ-RH!u&I)g4Wp9orcvYuIQC%tkPmE!bFtlWN0`=`zMB!5dxBk-Y%hI-$wFhhauabG$SnVrmaYAo)fEHzbGUGpjK0RyqN41LFEz;E@RhrBuVC^=_9T0zhF^<%?Z+93+QarC} zbKB*gj2|w*rQ{3SF`QtCNJ;R5EiM&J%1Es28y~(M1ai|hmC*ER#N)LF}|Qq zc?5Xw@cA`f`gEe)+`LLh{*Ux->OQYxkBu5X&R^jB&rB!M**{DVqh>ZI)A10v?a$-1 zj~CvftL*lGv#9Chxcv9vI_0>>h+r%{Vfr#n>J;DZT%9}$wzDPJ%zVMZ%Gzfngp6*p zZKkrmSbm$34solj7Q815+Z3V~Y7J{HxzYg_ zlV8lr*1GS;X7q#S=X!3RNl2SHKYJexl)R)3x~0%%VG?TABM9tC5&)o!Cw z_O?b7eNThZKRd*6;3}VLsnToG9gvLg8g1mFDZ0 z1hc|eCYBVyku45rx9xn=>?W%5Hw-w(RIkd9CUiW_W_!_nbs89m4ZH|-bs~0kZjL^b zXmNxdkDSt>-xa%l2Ruai_jhRo_D_dP3W>B>B$;k9Faj^0WddgbcdH%@JOSRpx;pnl zcm9tzdcbeP(5Ge&8g0Ov%^6z2Sz`hE{5!<{lCBLF5Uo(2qYqTEThe main test class to extend to execute unit tests on topology

+ *

The main test class to extend to execute unit tests on topology

. *

It provides a {@link TopologyTestDriver} and a {@link TestOutputTopic} for the DLQ

*/ public abstract class KafkaStreamsStarterTest { private static final String STATE_DIR = "/tmp/kafka-streams/"; /** - * The topology test driver + * The topology test driver. */ protected TopologyTestDriver testDriver; /** - * The dlq topic, initialized in {@link #generalSetUp()} + * The dlq topic, initialized in {@link #generalSetUp()}. */ protected TestOutputTopic dlqTopic; /** - * Set up topology test driver + * Set up topology test driver. */ @BeforeEach void generalSetUp() { @@ -52,7 +51,8 @@ void generalSetUp() { KafkaStreamsExecutionContext.registerProperties(properties); KafkaStreamsExecutionContext.setSerdesConfig(Collections - .singletonMap(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, "mock://" + getClass().getName())); + .singletonMap(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, + "mock://" + getClass().getName())); var starter = getKafkaStreamsStarter(); @@ -61,20 +61,22 @@ void generalSetUp() { StreamsBuilder streamsBuilder = new StreamsBuilder(); starter.topology(streamsBuilder); - testDriver = new TopologyTestDriver(streamsBuilder.build(), properties, getInitialWallClockTime()); + testDriver = + new TopologyTestDriver(streamsBuilder.build(), properties, getInitialWallClockTime()); - dlqTopic = testDriver.createOutputTopic(KafkaStreamsExecutionContext.getDlqTopicName(), new StringDeserializer(), SerdesUtils.getSerdesForValue().deserializer()); + dlqTopic = testDriver.createOutputTopic(KafkaStreamsExecutionContext.getDlqTopicName(), + new StringDeserializer(), SerdesUtils.getSerdesForValue().deserializer()); } /** - * Method to override to provide the KafkaStreamsStarter to test + * Method to override to provide the KafkaStreamsStarter to test. * * @return The KafkaStreamsStarter to test */ protected abstract KafkaStreamsStarter getKafkaStreamsStarter(); /** - * Default base wall clock time for topology test driver + * Default base wall clock time for topology test driver. * * @return The default wall clock time as instant */ @@ -83,7 +85,7 @@ protected Instant getInitialWallClockTime() { } /** - * Method to close everything properly at the end of the test + * Method to close everything properly at the end of the test. */ @AfterEach void generalTearDown() throws IOException { @@ -93,26 +95,31 @@ void generalTearDown() throws IOException { } /** - * Creates an input test topic on the testDriver using the provided topicWithSerde + * Creates an input test topic on the testDriver using the provided topicWithSerde. * * @param topicWithSerde The topic with serde used to crete the test topic * @param The serializable type of the key * @param The serializable type of the value * @return The corresponding TestInputTopic */ - protected TestInputTopic createInputTestTopic(TopicWithSerde topicWithSerde) { - return this.testDriver.createInputTopic(topicWithSerde.getUnPrefixedName(), topicWithSerde.getKeySerde().serializer(), topicWithSerde.getValueSerde().serializer()); + protected TestInputTopic createInputTestTopic( + TopicWithSerde topicWithSerde) { + return this.testDriver.createInputTopic(topicWithSerde.getUnPrefixedName(), + topicWithSerde.getKeySerde().serializer(), topicWithSerde.getValueSerde().serializer()); } /** - * Creates an output test topic on the testDriver using the provided topicWithSerde + * Creates an output test topic on the testDriver using the provided topicWithSerde. * * @param topicWithSerde The topic with serde used to crete the test topic * @param The serializable type of the key * @param The serializable type of the value * @return The corresponding TestOutputTopic */ - protected TestOutputTopic createOutputTestTopic(TopicWithSerde topicWithSerde) { - return this.testDriver.createOutputTopic(topicWithSerde.getUnPrefixedName(), topicWithSerde.getKeySerde().deserializer(), topicWithSerde.getValueSerde().deserializer()); + protected TestOutputTopic createOutputTestTopic( + TopicWithSerde topicWithSerde) { + return this.testDriver.createOutputTopic(topicWithSerde.getUnPrefixedName(), + topicWithSerde.getKeySerde().deserializer(), + topicWithSerde.getValueSerde().deserializer()); } } diff --git a/kstreamplify-core-test/src/test/java/com/michelin/kstreamplify/TopologyErrorHandlerTest.java b/kstreamplify-core-test/src/test/java/com/michelin/kstreamplify/TopologyErrorHandlerTest.java index db52f428..c30c20b1 100644 --- a/kstreamplify-core-test/src/test/java/com/michelin/kstreamplify/TopologyErrorHandlerTest.java +++ b/kstreamplify-core-test/src/test/java/com/michelin/kstreamplify/TopologyErrorHandlerTest.java @@ -1,10 +1,13 @@ package com.michelin.kstreamplify; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.michelin.kstreamplify.avro.KafkaError; import com.michelin.kstreamplify.error.ProcessingResult; import com.michelin.kstreamplify.error.TopologyErrorHandler; import com.michelin.kstreamplify.initializer.KafkaStreamsStarter; import com.michelin.kstreamplify.utils.SerdesUtils; +import java.util.List; import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.common.serialization.StringDeserializer; import org.apache.kafka.common.serialization.StringSerializer; @@ -17,20 +20,17 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import java.util.List; - -import static org.junit.jupiter.api.Assertions.assertEquals; - class TopologyErrorHandlerTest extends KafkaStreamsStarterTest { - private final String AVRO_TOPIC = "avroTopic"; + private static final String AVRO_TOPIC = "avroTopic"; + private static final String STRING_TOPIC = "stringTopic"; + private static final String OUTPUT_AVRO_TOPIC = "outputAvroTopic"; + private static final String OUTPUT_STRING_TOPIC = "outputStringTopic"; + private static final String DLQ_TOPIC = "dlqTopic"; + private TestInputTopic avroInputTopic; - private final String STRING_TOPIC = "stringTopic"; private TestInputTopic stringInputTopic; - private final String OUTPUT_AVRO_TOPIC = "outputAvroTopic"; private TestOutputTopic avroOutputTopic; - private final String OUTPUT_STRING_TOPIC = "outputStringTopic"; private TestOutputTopic stringOutputTopic; - private final String DLQ_TOPIC = "dlqTopic"; private TestOutputTopic dlqTopic; @Override @@ -44,33 +44,44 @@ public String dlqTopic() { @Override public void topology(StreamsBuilder streamsBuilder) { KStream> stringStream = streamsBuilder - .stream(STRING_TOPIC, Consumed.with(Serdes.String(), Serdes.String())) - .mapValues(value -> "error".equals(value) ? - ProcessingResult.fail(new NullPointerException(), value) : ProcessingResult.success(value)); + .stream(STRING_TOPIC, Consumed.with(Serdes.String(), Serdes.String())) + .mapValues(value -> "error".equals(value) + ? ProcessingResult.fail(new NullPointerException(), value) : + ProcessingResult.success(value)); TopologyErrorHandler.catchErrors(stringStream) - .to(OUTPUT_STRING_TOPIC, Produced.with(Serdes.String(), Serdes.String())); + .to(OUTPUT_STRING_TOPIC, Produced.with(Serdes.String(), Serdes.String())); - KStream> avroStream = streamsBuilder - .stream(AVRO_TOPIC, Consumed.with(Serdes.String(), SerdesUtils.getSerdesForValue())) - .mapValues(value -> value == null ? - ProcessingResult.fail(new NullPointerException(), null) : ProcessingResult.success(value)); + KStream> avroStream = + streamsBuilder + .stream(AVRO_TOPIC, Consumed.with(Serdes.String(), + SerdesUtils.getSerdesForValue())) + .mapValues(value -> value == null + ? ProcessingResult.fail(new NullPointerException(), null) : + ProcessingResult.success(value)); TopologyErrorHandler.catchErrors(avroStream) - .to(OUTPUT_AVRO_TOPIC, Produced.with(Serdes.String(), SerdesUtils.getSerdesForValue())); + .to(OUTPUT_AVRO_TOPIC, + Produced.with(Serdes.String(), SerdesUtils.getSerdesForValue())); } }; } @BeforeEach void setUp() { - stringInputTopic = testDriver.createInputTopic(STRING_TOPIC, new StringSerializer(), new StringSerializer()); - avroInputTopic = testDriver.createInputTopic(AVRO_TOPIC, new StringSerializer(), SerdesUtils.getSerdesForValue().serializer()); - - stringOutputTopic = testDriver.createOutputTopic(OUTPUT_STRING_TOPIC, new StringDeserializer(), new StringDeserializer()); - avroOutputTopic = testDriver.createOutputTopic(OUTPUT_AVRO_TOPIC, new StringDeserializer(), SerdesUtils.getSerdesForValue().deserializer()); - - dlqTopic = testDriver.createOutputTopic(DLQ_TOPIC, new StringDeserializer(), SerdesUtils.getSerdesForValue().deserializer()); + stringInputTopic = testDriver.createInputTopic(STRING_TOPIC, new StringSerializer(), + new StringSerializer()); + avroInputTopic = testDriver.createInputTopic(AVRO_TOPIC, new StringSerializer(), + SerdesUtils.getSerdesForValue().serializer()); + + stringOutputTopic = + testDriver.createOutputTopic(OUTPUT_STRING_TOPIC, new StringDeserializer(), + new StringDeserializer()); + avroOutputTopic = testDriver.createOutputTopic(OUTPUT_AVRO_TOPIC, new StringDeserializer(), + SerdesUtils.getSerdesForValue().deserializer()); + + dlqTopic = testDriver.createOutputTopic(DLQ_TOPIC, new StringDeserializer(), + SerdesUtils.getSerdesForValue().deserializer()); } @Test @@ -86,7 +97,7 @@ void shouldContinueWhenProcessingValueIsValid() { } @Test - void shouldSendExceptionToDLQWhenProcessingValueIsInvalid() { + void shouldSendExceptionToDlqWhenProcessingValueIsInvalid() { stringInputTopic.pipeInput("key", "error"); var resultDlq = dlqTopic.readValuesToList(); @@ -100,13 +111,13 @@ void shouldSendExceptionToDLQWhenProcessingValueIsInvalid() { void shouldContinueWhenProcessingValueIsValidAvro() { KafkaError avroModel = KafkaError.newBuilder() - .setTopic("topic") - .setStack("stack") - .setPartition(0) - .setOffset(0) - .setCause("cause") - .setValue("value") - .build(); + .setTopic("topic") + .setStack("stack") + .setPartition(0) + .setOffset(0) + .setCause("cause") + .setValue("value") + .build(); avroInputTopic.pipeInput("key", avroModel); diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/constants/HttpServerConstants.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/constants/HttpServerConstants.java index ebd625df..9ee74b92 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/constants/HttpServerConstants.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/constants/HttpServerConstants.java @@ -4,37 +4,37 @@ import lombok.NoArgsConstructor; /** - * HTTP server constants + * HTTP server constants. */ @NoArgsConstructor(access = AccessLevel.PRIVATE) public final class HttpServerConstants { /** - * Readiness probe path property name + * Readiness probe path property name. */ public static final String READINESS_PROPERTY = "readiness_path"; /** - * Liveness probe path property name + * Liveness probe path property name. */ public static final String LIVENESS_PROPERTY = "liveness_path"; /** - * Topology property name + * Topology property name. */ public static final String TOPOLOGY_PROPERTY = "expose_topology_path"; /** - * Readiness default path + * Readiness default path. */ public static final String READINESS_DEFAULT_PATH = "ready"; /** - * Liveness default path + * Liveness default path. */ public static final String LIVENESS_DEFAULT_PATH = "liveness"; /** - * Topology default path + * Topology default path. */ public static final String TOPOLOGY_DEFAULT_PATH = "topology"; } diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/constants/InitializerConstants.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/constants/InitializerConstants.java index 67ed8daf..1d278b7c 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/constants/InitializerConstants.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/constants/InitializerConstants.java @@ -4,27 +4,27 @@ import lombok.NoArgsConstructor; /** - * Kafka Streams initialization constants + * Kafka Streams initialization constants. */ @NoArgsConstructor(access = AccessLevel.PRIVATE) public final class InitializerConstants { /** - * Server port property name + * Server port property name. */ public static final String SERVER_PORT_PROPERTY = "server.port"; /** - * Default host + * Default host. */ public static final String LOCALHOST = "localhost"; /** - * Name of the property containing of the name of the var env containing the IP + * Name of the property containing of the name of the var env containing the IP. */ public static final String IP_SYSTEM_VARIABLE_PROPERTY = "ip.env.var.name"; /** - * Default var env name containing the IP + * Default var env name containing the IP. */ public static final String IP_SYSTEM_VARIABLE_DEFAULT = "MY_POD_IP"; } diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/constants/PropertyConstants.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/constants/PropertyConstants.java index 1a07ac13..bb462dcd 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/constants/PropertyConstants.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/constants/PropertyConstants.java @@ -4,42 +4,42 @@ import lombok.NoArgsConstructor; /** - * Property constants + * Property constants. */ @NoArgsConstructor(access = AccessLevel.PRIVATE) public final class PropertyConstants { /** - * Property separator + * Property separator. */ public static final String PROPERTY_SEPARATOR = "."; /** - * Kafka properties prefix + * Kafka properties prefix. */ public static final String KAFKA_PROPERTIES_PREFIX = "kafka.properties"; /** - * Default property file name + * Default property file name. */ public static final String DEFAULT_PROPERTY_FILE = "application.yml"; /** - * Prefix property name + * Prefix property name. */ public static final String PREFIX_PROPERTY_NAME = "prefix"; /** - * Topic property name + * Topic property name. */ public static final String TOPIC_PROPERTY_NAME = "topic"; /** - * Remap property name + * Remap property name. */ public static final String REMAP_PROPERTY_NAME = "remap"; /** - * Default prefix property name + * Default prefix property name. */ public static final String SELF = "self"; } diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/context/KafkaStreamsExecutionContext.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/context/KafkaStreamsExecutionContext.java index b1cb1cbe..35f3543f 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/context/KafkaStreamsExecutionContext.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/context/KafkaStreamsExecutionContext.java @@ -1,19 +1,19 @@ package com.michelin.kstreamplify.context; -import com.michelin.kstreamplify.constants.PropertyConstants; +import static com.michelin.kstreamplify.constants.PropertyConstants.PREFIX_PROPERTY_NAME; +import static com.michelin.kstreamplify.constants.PropertyConstants.PROPERTY_SEPARATOR; +import static com.michelin.kstreamplify.constants.PropertyConstants.SELF; + +import java.util.Map; +import java.util.Properties; import lombok.Getter; import lombok.Setter; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; import org.apache.kafka.streams.StreamsConfig; -import java.util.Map; -import java.util.Properties; - -import static com.michelin.kstreamplify.constants.PropertyConstants.*; - /** - * The class to represent the context of the KStream + * The class to represent the context of the KStream. */ @Slf4j public class KafkaStreamsExecutionContext { @@ -38,7 +38,6 @@ public class KafkaStreamsExecutionContext { * prefix: * self: "myNamespacePrefix." * } - * */ @Getter private static String prefix; @@ -47,7 +46,7 @@ private KafkaStreamsExecutionContext() { } /** - * Register KStream properties + * Register KStream properties. * * @param properties The Kafka Streams properties */ @@ -57,14 +56,17 @@ public static void registerProperties(Properties properties) { } prefix = properties.getProperty(PREFIX_PROPERTY_NAME + PROPERTY_SEPARATOR + SELF, ""); - if (StringUtils.isNotBlank(prefix) && properties.containsKey(StreamsConfig.APPLICATION_ID_CONFIG)) { + if (StringUtils.isNotBlank(prefix) + && properties.containsKey(StreamsConfig.APPLICATION_ID_CONFIG)) { properties.setProperty(StreamsConfig.APPLICATION_ID_CONFIG, - prefix.concat(properties.getProperty(StreamsConfig.APPLICATION_ID_CONFIG))); + prefix.concat(properties.getProperty(StreamsConfig.APPLICATION_ID_CONFIG))); } KafkaStreamsExecutionContext.properties = properties; StringBuilder stringBuilderProperties = new StringBuilder("Kafka Stream properties:\n"); - properties.forEach((key, value) -> stringBuilderProperties.append("\t").append(key).append(" = ").append(value).append("\n")); + properties.forEach( + (key, value) -> stringBuilderProperties.append("\t").append(key).append(" = ") + .append(value).append("\n")); log.info(stringBuilderProperties.toString()); } } diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/converter/AvroToJsonConverter.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/converter/AvroToJsonConverter.java index e1ef992c..d74acf2a 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/converter/AvroToJsonConverter.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/converter/AvroToJsonConverter.java @@ -19,27 +19,28 @@ import com.google.gson.Gson; import com.google.gson.GsonBuilder; -import org.apache.avro.Schema.Field; -import org.apache.avro.generic.GenericRecord; -import org.apache.avro.util.Utf8; - import java.time.Instant; import java.util.HashMap; import java.util.List; import java.util.Map; +import org.apache.avro.Schema.Field; +import org.apache.avro.generic.GenericRecord; +import org.apache.avro.util.Utf8; /** - * The class to convert Avro to Json + * The class to convert Avro to Json. */ public class AvroToJsonConverter { - private AvroToJsonConverter() { } + private AvroToJsonConverter() { + } private static final Gson gson = new GsonBuilder() - .setPrettyPrinting() - .create(); + .setPrettyPrinting() + .create(); /** - * Convert the record from avro format to json format + * Convert the record from avro format to json format. + * * @param inputRecord the record in avro format * @return the record in json format */ @@ -48,7 +49,8 @@ public static String convertRecord(GenericRecord inputRecord) { } /** - * convert avro to a map for json format + * Convert avro to a map for json format. + * * @param inputRecord record in avro * @return map for json format */ @@ -64,15 +66,15 @@ private static Map recordAsMap(GenericRecord inputRecord) { if (recordValue instanceof List recordValueAsList) { recordValue = recordValueAsList - .stream() - .map(value -> { - if (value instanceof GenericRecord genericRecord) { - return recordAsMap(genericRecord); - } else { - return value.toString(); - } - }) - .toList(); + .stream() + .map(value -> { + if (value instanceof GenericRecord genericRecord) { + return recordAsMap(genericRecord); + } else { + return value.toString(); + } + }) + .toList(); } if (recordValue instanceof Map recordValueAsMap) { diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/converter/JsonToAvroConverter.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/converter/JsonToAvroConverter.java index b3ba9455..df86e144 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/converter/JsonToAvroConverter.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/converter/JsonToAvroConverter.java @@ -4,11 +4,6 @@ import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonParser; -import org.apache.avro.LogicalTypes; -import org.apache.avro.Schema; -import org.apache.avro.generic.GenericRecord; -import org.apache.avro.specific.SpecificRecordBase; - import java.math.BigDecimal; import java.math.MathContext; import java.math.RoundingMode; @@ -17,15 +12,20 @@ import java.util.HashMap; import java.util.Map; import java.util.Optional; +import org.apache.avro.LogicalTypes; +import org.apache.avro.Schema; +import org.apache.avro.generic.GenericRecord; +import org.apache.avro.specific.SpecificRecordBase; /** - * The class to convert Json to Avro + * The class to convert Json to Avro. */ public class JsonToAvroConverter { /** - * convert a file in json to avro - * @param file the file in json + * Convert a file in json to avro. + * + * @param file the file in json * @param schema the avro schema to use * @return the record in avro */ @@ -34,14 +34,17 @@ public static SpecificRecordBase jsonToAvro(String file, Schema schema) { } /** - * convert json to avro + * Convert json to avro. + * * @param jsonEvent the json record - * @param schema the avro schema to use + * @param schema the avro schema to use * @return the record in avro */ public static SpecificRecordBase jsonToAvro(JsonObject jsonEvent, Schema schema) { try { - SpecificRecordBase record = baseClass(schema.getNamespace(), schema.getName()).getDeclaredConstructor().newInstance(); + SpecificRecordBase record = + baseClass(schema.getNamespace(), schema.getName()).getDeclaredConstructor() + .newInstance(); populateGenericRecordFromJson(jsonEvent, record); return record; } catch (Exception e) { @@ -50,107 +53,127 @@ public static SpecificRecordBase jsonToAvro(JsonObject jsonEvent, Schema schema) } /** - * populate avro records from json + * Populate avro records from json. + * * @param jsonObject json data to provide to the avro record - * @param record the avro record to populate + * @param message the avro record to populate */ - private static void populateGenericRecordFromJson(JsonObject jsonObject, SpecificRecordBase record) { - // Iterate over object attributes - jsonObject.keySet().forEach( - currentKey -> { - try { - var currentValue = jsonObject.get(currentKey); + private static void populateGenericRecordFromJson(JsonObject jsonObject, + SpecificRecordBase message) { + // Iterate over object attributes + jsonObject.keySet().forEach( + currentKey -> { + try { + var currentValue = jsonObject.get(currentKey); - // If this is an object, add to prefix and call method again - if (currentValue instanceof JsonObject currentValueJsonObject) { - Schema currentSchema = record.getSchema().getField(currentKey).schema(); + // If this is an object, add to prefix and call method again + if (currentValue instanceof JsonObject currentValueJsonObject) { + Schema currentSchema = message.getSchema().getField(currentKey).schema(); - // If the current value is a UNION - if (currentSchema.getType().equals(Schema.Type.UNION)) { - // Then research the first NOT NULL sub value - Optional notNullSchema = currentSchema.getTypes().stream() - .filter(s -> !s.getType().equals(Schema.Type.NULL)) - .findAny(); + // If the current value is a UNION + if (currentSchema.getType().equals(Schema.Type.UNION)) { + // Then research the first NOT NULL sub value + Optional notNullSchema = currentSchema.getTypes().stream() + .filter(s -> !s.getType().equals(Schema.Type.NULL)) + .findAny(); - if (notNullSchema.isPresent()) { - currentSchema = notNullSchema.get(); - } + if (notNullSchema.isPresent()) { + currentSchema = notNullSchema.get(); } + } - switch (currentSchema.getType()) { - case RECORD -> { - SpecificRecordBase currentRecord = baseClass(record.getSchema().getNamespace(), currentSchema.getName()).getDeclaredConstructor().newInstance(); - populateGenericRecordFromJson(currentValueJsonObject, currentRecord); - record.put(currentKey, currentRecord); - } - case MAP -> { - Map map = new HashMap<>(); - if (!currentSchema.getValueType().getType().equals(Schema.Type.RECORD)) { - for (String key : currentValueJsonObject.keySet()) { - Object value = populateFieldWithCorrespondingType(currentValueJsonObject.get(key), currentSchema.getValueType().getType()); - map.put(key, value); - } - } else { - for (String key : currentValueJsonObject.keySet()) { - SpecificRecordBase mapValueRecord = baseClass(record.getSchema().getNamespace(), currentSchema.getValueType().getName()).getDeclaredConstructor().newInstance(); - populateGenericRecordFromJson(currentValueJsonObject.get(key).getAsJsonObject(), mapValueRecord); - map.put(key, mapValueRecord); - } + switch (currentSchema.getType()) { + case RECORD -> { + SpecificRecordBase currentRecord = + baseClass(message.getSchema().getNamespace(), + currentSchema.getName()).getDeclaredConstructor() + .newInstance(); + populateGenericRecordFromJson(currentValueJsonObject, + currentRecord); + message.put(currentKey, currentRecord); + } + case MAP -> { + Map map = new HashMap<>(); + if (!currentSchema.getValueType().getType() + .equals(Schema.Type.RECORD)) { + for (String key : currentValueJsonObject.keySet()) { + Object value = populateFieldWithCorrespondingType( + currentValueJsonObject.get(key), + currentSchema.getValueType().getType()); + map.put(key, value); + } + } else { + for (String key : currentValueJsonObject.keySet()) { + SpecificRecordBase mapValueRecord = + baseClass(message.getSchema().getNamespace(), + currentSchema.getValueType() + .getName()).getDeclaredConstructor() + .newInstance(); + populateGenericRecordFromJson( + currentValueJsonObject.get(key).getAsJsonObject(), + mapValueRecord); + map.put(key, mapValueRecord); } - record.put(currentKey, map); } - default -> record.put(currentKey, - populateFieldWithCorrespondingType(currentValue, currentSchema.getType())); + message.put(currentKey, map); } + default -> message.put(currentKey, + populateFieldWithCorrespondingType(currentValue, + currentSchema.getType())); } + } else if (currentValue instanceof JsonArray jsonArray) { // If this is an Array, call method for each one of them + var arraySchema = message.getSchema().getField(currentKey).schema(); + Schema arrayType = arraySchema.getType() != Schema.Type.UNION + ? arraySchema : + arraySchema.getTypes().stream() + .filter(s -> s.getType() != Schema.Type.NULL) + .findFirst().get(); + Schema elementType = arrayType.getElementType(); - // If this is an Array, call method for each one of them - else if (currentValue instanceof JsonArray jsonArray) { - var arraySchema = record.getSchema().getField(currentKey).schema(); - Schema arrayType = arraySchema.getType() != Schema.Type.UNION ? - arraySchema : - arraySchema.getTypes().stream() - .filter(s -> s.getType() != Schema.Type.NULL) - .findFirst().get(); - Schema elementType = arrayType.getElementType(); - - if (elementType != null && Schema.Type.RECORD.equals(elementType.getType())) { - ArrayList recordArray = new ArrayList<>(); - for (int i = 0; i < jsonArray.size(); i++) { - SpecificRecordBase currentRecord = baseClass(record.getSchema().getNamespace(), elementType.getName()).getDeclaredConstructor().newInstance(); - populateGenericRecordFromJson((JsonObject) jsonArray.get(i), currentRecord); - recordArray.add(currentRecord); - } - record.put(currentKey, recordArray); - } else { - ArrayList objArray = new ArrayList<>(); - for (int i = 0; i < ((JsonArray) currentValue).size(); i++) { - Object obj = populateFieldWithCorrespondingType((((JsonArray) currentValue).get(i)), elementType.getType()); - objArray.add(obj); - } - record.put(currentKey, objArray); + if (elementType != null + && Schema.Type.RECORD.equals(elementType.getType())) { + ArrayList recordArray = new ArrayList<>(); + for (int i = 0; i < jsonArray.size(); i++) { + SpecificRecordBase currentRecord = + baseClass(message.getSchema().getNamespace(), + elementType.getName()).getDeclaredConstructor() + .newInstance(); + populateGenericRecordFromJson((JsonObject) jsonArray.get(i), + currentRecord); + recordArray.add(currentRecord); } - } - // Otherwise, put the value in the record after parsing according to its corresponding schema type - else { - if (!jsonObject.get(currentKey).isJsonNull()) { - populateFieldInRecordWithCorrespondingType(jsonObject, currentKey, record); + message.put(currentKey, recordArray); + } else { + ArrayList objArray = new ArrayList<>(); + for (int i = 0; i < ((JsonArray) currentValue).size(); i++) { + Object obj = populateFieldWithCorrespondingType( + (((JsonArray) currentValue).get(i)), elementType.getType()); + objArray.add(obj); } + message.put(currentKey, objArray); + } + } else { // Otherwise, put the value in the record after parsing according to its corresponding schema type + if (!jsonObject.get(currentKey).isJsonNull()) { + populateFieldInRecordWithCorrespondingType(jsonObject, currentKey, + message); } - } catch (Exception e) { - throw new RuntimeException(e); } + } catch (Exception e) { + throw new RuntimeException(e); } - ); + } + ); } /** - * populate field with corresponding type + * Populate field with corresponding type. + * * @param jsonElement the json element to convert - * @param type the type of the element + * @param type the type of the element * @return the element converted with the corresponding type */ - private static Object populateFieldWithCorrespondingType(JsonElement jsonElement, Schema.Type type){ + private static Object populateFieldWithCorrespondingType(JsonElement jsonElement, + Schema.Type type) { return switch (type) { case INT -> jsonElement.getAsInt(); case LONG -> jsonElement.getAsLong(); @@ -162,26 +185,32 @@ private static Object populateFieldWithCorrespondingType(JsonElement jsonElement } /** - * populate field in record with corresponding type + * Populate field in record with corresponding type. + * * @param jsonObject data to provide to the avro record - * @param fieldName the name to populate - * @param result the avro record populated + * @param fieldName the name to populate + * @param result the avro record populated */ - private static void populateFieldInRecordWithCorrespondingType(JsonObject jsonObject, String fieldName, GenericRecord result) { + private static void populateFieldInRecordWithCorrespondingType(JsonObject jsonObject, + String fieldName, + GenericRecord result) { Schema fieldSchema = result.getSchema().getField(fieldName).schema(); - Optional optionalFieldType = fieldSchema.getType() != Schema.Type.UNION ? Optional.of(fieldSchema) : + Optional optionalFieldType = + fieldSchema.getType() != Schema.Type.UNION ? Optional.of(fieldSchema) : fieldSchema.getTypes() - .stream() - .filter(s -> s.getType() != Schema.Type.NULL) - .findFirst(); + .stream() + .filter(s -> s.getType() != Schema.Type.NULL) + .findFirst(); if (optionalFieldType.isPresent()) { Schema fieldType = optionalFieldType.get(); switch (fieldType.getType()) { case INT -> result.put(fieldName, jsonObject.get(fieldName).getAsInt()); case LONG -> { - if (fieldType.getLogicalType() != null && fieldType.getLogicalType().getName().equals("timestamp-millis")) { - result.put(fieldName, Instant.ofEpochSecond(jsonObject.get(fieldName).getAsLong())); + if (fieldType.getLogicalType() != null + && fieldType.getLogicalType().getName().equals("timestamp-millis")) { + result.put(fieldName, + Instant.ofEpochSecond(jsonObject.get(fieldName).getAsLong())); } else { result.put(fieldName, jsonObject.get(fieldName).getAsLong()); } @@ -190,12 +219,16 @@ private static void populateFieldInRecordWithCorrespondingType(JsonObject jsonOb case DOUBLE -> result.put(fieldName, jsonObject.get(fieldName).getAsDouble()); case BOOLEAN -> result.put(fieldName, jsonObject.get(fieldName).getAsBoolean()); case BYTES -> { - if (fieldType.getLogicalType() != null && fieldType.getLogicalType().getName().equals("decimal")) { + if (fieldType.getLogicalType() != null + && fieldType.getLogicalType().getName().equals("decimal")) { result.put( - fieldName, - new BigDecimal(jsonObject.get(fieldName).getAsString()) - .setScale(((LogicalTypes.Decimal) fieldType.getLogicalType()).getScale(), RoundingMode.HALF_UP) - .round(new MathContext(((LogicalTypes.Decimal) fieldType.getLogicalType()).getPrecision())) + fieldName, + new BigDecimal(jsonObject.get(fieldName).getAsString()) + .setScale( + ((LogicalTypes.Decimal) fieldType.getLogicalType()).getScale(), + RoundingMode.HALF_UP) + .round(new MathContext( + ((LogicalTypes.Decimal) fieldType.getLogicalType()).getPrecision())) ); } else { // This is not supposed to happen, that would mean that the given field is in Byte format @@ -208,9 +241,10 @@ private static void populateFieldInRecordWithCorrespondingType(JsonObject jsonOb } /** - * get base class + * Get base class. + * * @param baseNamespace the namespace of the class - * @param typeName the class type + * @param typeName the class type * @return the base class */ @SuppressWarnings("unchecked") diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupKeyProcessor.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupKeyProcessor.java index 0bb4d140..43ef3909 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupKeyProcessor.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupKeyProcessor.java @@ -1,6 +1,7 @@ package com.michelin.kstreamplify.deduplication; import com.michelin.kstreamplify.error.ProcessingResult; +import java.time.Duration; import org.apache.avro.specific.SpecificRecord; import org.apache.kafka.streams.processor.PunctuationType; import org.apache.kafka.streams.processor.api.Processor; @@ -9,36 +10,36 @@ import org.apache.kafka.streams.state.TimestampedKeyValueStore; import org.apache.kafka.streams.state.ValueAndTimestamp; -import java.time.Duration; - /** - * Transformer class for the deduplication mechanism on keys of a given topic + * Transformer class for the deduplication mechanism on keys of a given topic. * * @param The type of the value */ -public class DedupKeyProcessor implements Processor> { +public class DedupKeyProcessor + implements Processor> { /** - * Kstream context for this transformer + * Kstream context for this transformer. */ private ProcessorContext> processorContext; + /** - * Window store containing all the records seen on the given window + * Window store containing all the records seen on the given window. */ private TimestampedKeyValueStore dedupTimestampedStore; /** - * Window store name, initialized @ construction + * Window store name, initialized @ construction. */ private final String dedupStoreName; /** - * Retention window for the statestore. Used for fetching data + * Retention window for the statestore. Used for fetching data. */ private final Duration retentionWindowDuration; /** - * Constructor + * Constructor. * * @param dedupStoreName The name of the constructor * @param retentionWindowDuration The retentionWindow Duration @@ -54,32 +55,36 @@ public void init(ProcessorContext> context) { dedupTimestampedStore = this.processorContext.getStateStore(dedupStoreName); - processorContext.schedule(Duration.ofHours(1), PunctuationType.WALL_CLOCK_TIME, currentTimestamp -> { - try (var iterator = dedupTimestampedStore.all()) { - while (iterator.hasNext()) { - var currentRecord = iterator.next(); - if (currentRecord.value.timestamp() + retentionWindowDuration.toMillis() < currentTimestamp) { - dedupTimestampedStore.delete(currentRecord.key); + processorContext.schedule(Duration.ofHours(1), PunctuationType.WALL_CLOCK_TIME, + currentTimestamp -> { + try (var iterator = dedupTimestampedStore.all()) { + while (iterator.hasNext()) { + var currentRecord = iterator.next(); + if (currentRecord.value.timestamp() + retentionWindowDuration.toMillis() + < currentTimestamp) { + dedupTimestampedStore.delete(currentRecord.key); + } } } - } - }); + }); } @Override - public void process(Record record) { - String key = record.key(); + public void process(Record message) { + String key = message.key(); try { // Retrieve the matching key in the statestore and return null if found (signaling a duplicate) if (dedupTimestampedStore.get(key) == null) { - // First time we see this record, store entry in the windowstore and forward the record to the output - dedupTimestampedStore.put(key, ValueAndTimestamp.make(key, processorContext.currentStreamTimeMs())); + // First time we see this record, store entry in the window store and forward the record to the output + dedupTimestampedStore.put(key, + ValueAndTimestamp.make(key, processorContext.currentStreamTimeMs())); - processorContext.forward(ProcessingResult.wrapRecordSuccess(record)); + processorContext.forward(ProcessingResult.wrapRecordSuccess(message)); } } catch (Exception e) { - processorContext.forward(ProcessingResult.wrapRecordFailure(e, record, "Couldn't figure out what to do with the current payload: An unlikely error occurred during deduplication transform")); + processorContext.forward(ProcessingResult.wrapRecordFailure(e, message, + "Couldn't figure out what to do with the current payload: An unlikely error occurred during deduplication transform")); } } diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupKeyValueProcessor.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupKeyValueProcessor.java index 4520d1b2..7ec9e813 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupKeyValueProcessor.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupKeyValueProcessor.java @@ -2,42 +2,44 @@ import com.michelin.kstreamplify.error.ProcessingResult; +import java.time.Duration; +import java.time.Instant; import org.apache.avro.specific.SpecificRecord; import org.apache.kafka.streams.processor.api.Processor; import org.apache.kafka.streams.processor.api.ProcessorContext; import org.apache.kafka.streams.processor.api.Record; import org.apache.kafka.streams.state.WindowStore; -import java.time.Duration; -import java.time.Instant; - /** - * Transformer class for the deduplication mechanism on both keys and values of a given topic + * Transformer class for the deduplication mechanism on both keys and values of a given topic. * * @param The type of the value */ -public class DedupKeyValueProcessor implements Processor> { +public class DedupKeyValueProcessor + implements Processor> { /** - * Kstream context for this transformer + * Kstream context for this transformer. */ private ProcessorContext> processorContext; + /** - * Window store containing all the records seen on the given window + * Window store containing all the records seen on the given window. */ private WindowStore dedupWindowStore; /** - * Window store name, initialized @ construction + * Window store name, initialized @ construction. */ private final String windowStoreName; + /** - * Retention window for the statestore. Used for fetching data + * Retention window for the statestore. Used for fetching data. */ private final Duration retentionWindowDuration; /** - * Constructor method + * Constructor. * * @param windowStoreName The window store name * @param retentionWindowHours The retention window duration @@ -55,26 +57,29 @@ public void init(ProcessorContext> context) { } @Override - public void process(Record record) { + public void process(Record message) { try { // Get the record timestamp - var currentInstant = Instant.ofEpochMilli(record.timestamp()); + var currentInstant = Instant.ofEpochMilli(message.timestamp()); // Retrieve all the matching keys in the stateStore and return null if found it (signaling a duplicate) - try (var resultIterator = dedupWindowStore.backwardFetch(record.key(), currentInstant.minus(retentionWindowDuration), currentInstant.plus(retentionWindowDuration))) { + try (var resultIterator = dedupWindowStore.backwardFetch(message.key(), + currentInstant.minus(retentionWindowDuration), + currentInstant.plus(retentionWindowDuration))) { while (resultIterator != null && resultIterator.hasNext()) { var currentKeyValue = resultIterator.next(); - if (record.value().equals(currentKeyValue.value)) { + if (message.value().equals(currentKeyValue.value)) { return; } } } // First time we see this record, store entry in the window store and forward the record to the output - dedupWindowStore.put(record.key(), record.value(), record.timestamp()); - processorContext.forward(ProcessingResult.wrapRecordSuccess(record)); + dedupWindowStore.put(message.key(), message.value(), message.timestamp()); + processorContext.forward(ProcessingResult.wrapRecordSuccess(message)); } catch (Exception e) { - processorContext.forward(ProcessingResult.wrapRecordFailure(e, record, "Couldn't figure out what to do with the current payload: An unlikely error occured during deduplication transform")); + processorContext.forward(ProcessingResult.wrapRecordFailure(e, message, + "Couldn't figure out what to do with the current payload: An unlikely error occurred during deduplication transform")); } } } diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupWithPredicateProcessor.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupWithPredicateProcessor.java index 0a0a8998..73d3098b 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupWithPredicateProcessor.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupWithPredicateProcessor.java @@ -2,6 +2,8 @@ import com.michelin.kstreamplify.error.ProcessingResult; +import java.time.Duration; +import java.util.function.Function; import org.apache.avro.specific.SpecificRecord; import org.apache.kafka.streams.processor.PunctuationType; import org.apache.kafka.streams.processor.api.Processor; @@ -10,49 +12,49 @@ import org.apache.kafka.streams.state.TimestampedKeyValueStore; import org.apache.kafka.streams.state.ValueAndTimestamp; -import java.time.Duration; -import java.util.function.Function; - /** - * Transformer class for the deduplication mechanism on keys of a given topic + * Transformer class for the deduplication mechanism on keys of a given topic. * * @param The type of the key * @param The type of the value */ -public class DedupWithPredicateProcessor implements Processor> { +public class DedupWithPredicateProcessor + implements Processor> { /** - * Kstream context for this transformer + * Kstream context for this transformer. */ private ProcessorContext> processorContext; + /** - * Window store containing all the records seen on the given window + * Window store containing all the records seen on the given window. */ private TimestampedKeyValueStore dedupTimestampedStore; /** - * Window store name, initialized @ construction + * Window store name, initialized @ construction. */ private final String dedupStoreName; /** - * Retention window for the statestore. Used for fetching data + * Retention window for the statestore. Used for fetching data. */ private final Duration retentionWindowDuration; /** - * + * Deduplication key extractor. */ private final Function deduplicationKeyExtractor; /** - * Constructor method + * Constructor. * * @param dedupStoreName Name of the deduplication state store * @param retentionWindowDuration Retention window duration * @param deduplicationKeyExtractor Deduplication function */ - public DedupWithPredicateProcessor(String dedupStoreName, Duration retentionWindowDuration, Function deduplicationKeyExtractor) { + public DedupWithPredicateProcessor(String dedupStoreName, Duration retentionWindowDuration, + Function deduplicationKeyExtractor) { this.dedupStoreName = dedupStoreName; this.retentionWindowDuration = retentionWindowDuration; this.deduplicationKeyExtractor = deduplicationKeyExtractor; @@ -64,31 +66,35 @@ public void init(ProcessorContext> context) { dedupTimestampedStore = this.processorContext.getStateStore(dedupStoreName); - processorContext.schedule(Duration.ofHours(1), PunctuationType.WALL_CLOCK_TIME, (currentTimestamp) -> { - try (var iterator = dedupTimestampedStore.all()) { - while (iterator.hasNext()) { - var currentRecord = iterator.next(); - if (currentRecord.value.timestamp() + retentionWindowDuration.toMillis() < currentTimestamp) { - dedupTimestampedStore.delete(currentRecord.key); + processorContext.schedule(Duration.ofHours(1), PunctuationType.WALL_CLOCK_TIME, + (currentTimestamp) -> { + try (var iterator = dedupTimestampedStore.all()) { + while (iterator.hasNext()) { + var currentRecord = iterator.next(); + if (currentRecord.value.timestamp() + retentionWindowDuration.toMillis() + < currentTimestamp) { + dedupTimestampedStore.delete(currentRecord.key); + } } } - } - }); + }); } @Override - public void process(Record record) { + public void process(Record message) { try { - String identifier = deduplicationKeyExtractor.apply(record.value()); + String identifier = deduplicationKeyExtractor.apply(message.value()); // Retrieve the matching identifier in the statestore and return null if found it (signaling a duplicate) if (dedupTimestampedStore.get(identifier) == null) { - // First time we see this record, store entry in the windowstore and forward the record to the output - dedupTimestampedStore.put(identifier, ValueAndTimestamp.make(record.value(), record.timestamp())); - processorContext.forward(ProcessingResult.wrapRecordSuccess(record)); + // First time we see this record, store entry in the window store and forward the record to the output + dedupTimestampedStore.put(identifier, + ValueAndTimestamp.make(message.value(), message.timestamp())); + processorContext.forward(ProcessingResult.wrapRecordSuccess(message)); } } catch (Exception e) { - processorContext.forward(ProcessingResult.wrapRecordFailure(e, record, "Couldn't figure out what to do with the current payload: An unlikely error occured during deduplication transform")); + processorContext.forward(ProcessingResult.wrapRecordFailure(e, message, + "Couldn't figure out what to do with the current payload: An unlikely error occurred during deduplication transform")); } } } diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DeduplicationUtils.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DeduplicationUtils.java index 14b3bfaa..1d08f202 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DeduplicationUtils.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DeduplicationUtils.java @@ -2,6 +2,8 @@ import com.michelin.kstreamplify.error.ProcessingResult; import com.michelin.kstreamplify.utils.SerdesUtils; +import java.time.Duration; +import java.util.function.Function; import org.apache.avro.specific.SpecificRecord; import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.streams.StreamsBuilder; @@ -12,9 +14,6 @@ import org.apache.kafka.streams.state.TimestampedKeyValueStore; import org.apache.kafka.streams.state.WindowStore; -import java.time.Duration; -import java.util.function.Function; - /** * Deduplication utility class. Only streams with String keys are supported. */ @@ -38,34 +37,48 @@ private DeduplicationUtils() { * @param streamsBuilder Stream builder instance for topology editing * @param initialStream Stream containing the events that should be deduplicated * @param windowDuration Window of time on which we should watch out for duplicates - * @param Generic Type of the Stream value. Key type is not implemented because using anything other than a String as the key is retarded. You can quote me on this. + * @param Generic Type of the Stream value. + * Key type is not implemented because using anything other than a String as the key is retarded. + * You can quote me on this. * @return KStream with a processingResult */ - public static KStream> deduplicateKeys(StreamsBuilder streamsBuilder, KStream initialStream, Duration windowDuration) { + public static KStream> deduplicateKeys( + StreamsBuilder streamsBuilder, KStream initialStream, Duration windowDuration) { - return deduplicateKeys(streamsBuilder, initialStream, DEFAULT_DEDUP_NAME + DEFAULT_WINDOWSTORE, DEFAULT_DEDUP_NAME + DEFAULT_REPARTITION, windowDuration); + return deduplicateKeys(streamsBuilder, initialStream, + DEFAULT_DEDUP_NAME + DEFAULT_WINDOWSTORE, DEFAULT_DEDUP_NAME + DEFAULT_REPARTITION, + windowDuration); } /** - * Deduplicate the input stream on the input key using a window store for the given period of time + * Deduplicate the input stream on the input key using a window store for the given period of time. * * @param streamsBuilder Stream builder instance for topology editing * @param initialStream Stream containing the events that should be deduplicated * @param storeName Statestore name * @param repartitionName Repartition topic name * @param windowDuration Window of time to keep in the window store - * @param Generic Type of the Stream value. Key type is not implemented because using anything other than a String as the key is retarded. You can quote me on this. + * @param Generic Type of the Stream value. + * Key type is not implemented because using anything other than a String as the key is retarded. + * You can quote me on this. * @return Resulting de-duplicated Stream */ - public static KStream> deduplicateKeys(StreamsBuilder streamsBuilder, KStream initialStream, String storeName, String repartitionName, Duration windowDuration) { - - StoreBuilder> dedupStore = Stores.timestampedKeyValueStoreBuilder( - Stores.persistentTimestampedKeyValueStore(storeName), Serdes.String(), Serdes.String()); + public static KStream> deduplicateKeys( + StreamsBuilder streamsBuilder, KStream initialStream, String storeName, + String repartitionName, Duration windowDuration) { + + StoreBuilder> dedupStore = + Stores.timestampedKeyValueStoreBuilder( + Stores.persistentTimestampedKeyValueStore(storeName), Serdes.String(), + Serdes.String()); streamsBuilder.addStateStore(dedupStore); - var repartitioned = initialStream.repartition(Repartitioned.with(Serdes.String(), SerdesUtils.getSerdesForValue()).withName(repartitionName)); - return repartitioned.process(() -> new DedupKeyProcessor<>(storeName, windowDuration), storeName); + var repartitioned = initialStream.repartition( + Repartitioned.with(Serdes.String(), SerdesUtils.getSerdesForValue()) + .withName(repartitionName)); + return repartitioned.process(() -> new DedupKeyProcessor<>(storeName, windowDuration), + storeName); } /** @@ -75,12 +88,17 @@ public static KStream> * @param streamsBuilder Stream builder instance for topology editing * @param initialStream Stream containing the events that should be deduplicated * @param windowDuration Window of time on which we should watch out for duplicates - * @param Generic Type of the Stream value. Key type is not implemented because using anything other than a String as the key is retarded. You can quote me on this. + * @param Generic Type of the Stream value. + * Key type is not implemented because using anything other than a String as the key is retarded. + * You can quote me on this. * @return KStream with a processingResult */ - public static KStream> deduplicateKeyValues(StreamsBuilder streamsBuilder, KStream initialStream, Duration windowDuration) { + public static KStream> deduplicateKeyValues( + StreamsBuilder streamsBuilder, KStream initialStream, Duration windowDuration) { - return deduplicateKeyValues(streamsBuilder, initialStream, DEFAULT_DEDUP_NAME + DEFAULT_WINDOWSTORE, DEFAULT_DEDUP_NAME + DEFAULT_REPARTITION, windowDuration); + return deduplicateKeyValues(streamsBuilder, initialStream, + DEFAULT_DEDUP_NAME + DEFAULT_WINDOWSTORE, DEFAULT_DEDUP_NAME + DEFAULT_REPARTITION, + windowDuration); } /** @@ -92,38 +110,60 @@ public static KStream> * @param storeName Statestore name * @param repartitionName Repartition topic name * @param windowDuration Window of time to keep in the window store - * @param Generic Type of the Stream value. Key type is not implemented because using anything other than a String as the key is retarded. You can quote me on this. + * @param Generic Type of the Stream value. + * Key type is not implemented because using anything other than a String as the key is retarded. + * You can quote me on this. * @return Resulting de-duplicated Stream */ - public static KStream> deduplicateKeyValues(StreamsBuilder streamsBuilder, KStream initialStream, String storeName, String repartitionName, Duration windowDuration) { + public static KStream> deduplicateKeyValues( + StreamsBuilder streamsBuilder, KStream initialStream, String storeName, + String repartitionName, Duration windowDuration) { StoreBuilder> dedupWindowStore = Stores.windowStoreBuilder( - Stores.persistentWindowStore(storeName, windowDuration, windowDuration, false), Serdes.String(), SerdesUtils.getSerdesForValue()); + Stores.persistentWindowStore(storeName, windowDuration, windowDuration, false), + Serdes.String(), SerdesUtils.getSerdesForValue()); streamsBuilder.addStateStore(dedupWindowStore); - var repartitioned = initialStream.repartition(Repartitioned.with(Serdes.String(), SerdesUtils.getSerdesForValue()).withName(repartitionName)); - return repartitioned.process(() -> new DedupKeyValueProcessor<>(storeName, windowDuration), storeName); + var repartitioned = initialStream.repartition( + Repartitioned.with(Serdes.String(), SerdesUtils.getSerdesForValue()) + .withName(repartitionName)); + return repartitioned.process(() -> new DedupKeyValueProcessor<>(storeName, windowDuration), + storeName); } /** - *

Deduplicate the input stream by applying the deduplicationKeyExtractor function on each record to generate a unique signature for the record.

+ *

Deduplicate the input stream by applying the deduplicationKeyExtractor function on each record + * to generate a unique signature for the record.

*

Uses a window store for the given period of time.

*

The input stream should have a String key.

- *

⚠ This constructor should not be used if using the deduplicator multiple times in the same topology. Use {@link DeduplicationUtils#deduplicateWithPredicate(StreamsBuilder, KStream, String storeName, String repartitionName, Duration, Function)} in this scenario.

+ *

⚠ This constructor should not be used if using the deduplicator multiple times in the same topology. + * Use {@link + * DeduplicationUtils#deduplicateWithPredicate(StreamsBuilder, KStream, String storeName, String repartitionName, Duration, Function)} + * in this scenario.

* * @param streamsBuilder Stream builder instance for topology editing * @param initialStream Stream containing the events that should be deduplicated * @param windowDuration Window of time to keep in the window store - * @param deduplicationKeyExtractor Function that should extract a deduplication key in String format. This key acts like a comparison vector. A recommended approach is to concatenate all necessary fields in String format to provide a unique identifier for comparison between records. - * @param Generic Type of the Stream value. Key type is not implemented because using anything other than a String as the key is retarded. You can quote me on this. + * @param deduplicationKeyExtractor Function that should extract a deduplication key in String format. + * This key acts like a comparison vector. + * A recommended approach is to concatenate all necessary fields in String format + * to provide a unique identifier for comparison between records. + * @param Generic Type of the Stream value. + * Key type is not implemented because using anything other than a String as the key is retarded. + * You can quote me on this. * @return Resulting de-duplicated Stream */ - public static KStream> deduplicateWithPredicate(StreamsBuilder streamsBuilder, KStream initialStream, Duration windowDuration, Function deduplicationKeyExtractor) { - return deduplicateWithPredicate(streamsBuilder, initialStream, DEFAULT_DEDUP_NAME + DEFAULT_WINDOWSTORE, DEFAULT_DEDUP_NAME + DEFAULT_REPARTITION, windowDuration, deduplicationKeyExtractor); + public static KStream> deduplicateWithPredicate( + StreamsBuilder streamsBuilder, KStream initialStream, Duration windowDuration, + Function deduplicationKeyExtractor) { + return deduplicateWithPredicate(streamsBuilder, initialStream, + DEFAULT_DEDUP_NAME + DEFAULT_WINDOWSTORE, DEFAULT_DEDUP_NAME + DEFAULT_REPARTITION, + windowDuration, deduplicationKeyExtractor); } /** - *

Deduplicate the input stream by applying the deduplicationKeyExtractor function on each record to generate a unique signature for the record.

+ *

Deduplicate the input stream by applying the deduplicationKeyExtractor function + * on each record to generate a unique signature for the record.

*

Uses a window store for the given period of time.

*

The input stream should have a String key.

* @@ -132,16 +172,30 @@ public static KStream> * @param storeName Statestore name * @param repartitionName Repartition topic name * @param windowDuration Window of time to keep in the window store - * @param deduplicationKeyExtractor Function that should extract a deduplication key in String format. This key acts like a comparison vector. A recommended approach is to concatenate all necessary fields in String format to provide a unique identifier for comparison between records. - * @param Generic Type of the Stream value. Key type is not implemented because using anything other than a String as the key is retarded. You can quote me on this. + * @param deduplicationKeyExtractor Function that should extract a deduplication key in String format. + * This key acts like a comparison vector. + * A recommended approach is to concatenate all necessary fields + * in String format to provide a unique identifier for comparison between records. + * @param Generic Type of the Stream value. + * Key type is not implemented because using anything other than a String as the key is retarded. + * You can quote me on this. * @return Resulting de-duplicated Stream */ - public static KStream> deduplicateWithPredicate(StreamsBuilder streamsBuilder, KStream initialStream, String storeName, String repartitionName, Duration windowDuration, Function deduplicationKeyExtractor) { - StoreBuilder> dedupStore = Stores.timestampedKeyValueStoreBuilder( - Stores.persistentTimestampedKeyValueStore(storeName), Serdes.String(), SerdesUtils.getSerdesForValue()); + public static KStream> deduplicateWithPredicate( + StreamsBuilder streamsBuilder, KStream initialStream, String storeName, + String repartitionName, Duration windowDuration, + Function deduplicationKeyExtractor) { + StoreBuilder> dedupStore = + Stores.timestampedKeyValueStoreBuilder( + Stores.persistentTimestampedKeyValueStore(storeName), Serdes.String(), + SerdesUtils.getSerdesForValue()); streamsBuilder.addStateStore(dedupStore); - var repartitioned = initialStream.repartition(Repartitioned.with(Serdes.String(), SerdesUtils.getSerdesForValue()).withName(repartitionName)); - return repartitioned.process(() -> new DedupWithPredicateProcessor<>(storeName, windowDuration, deduplicationKeyExtractor), storeName); + var repartitioned = initialStream.repartition( + Repartitioned.with(Serdes.String(), SerdesUtils.getSerdesForValue()) + .withName(repartitionName)); + return repartitioned.process( + () -> new DedupWithPredicateProcessor<>(storeName, windowDuration, + deduplicationKeyExtractor), storeName); } } diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/DlqDeserializationExceptionHandler.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/DlqDeserializationExceptionHandler.java index 3c1cc745..7ceffa03 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/DlqDeserializationExceptionHandler.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/DlqDeserializationExceptionHandler.java @@ -2,25 +2,25 @@ import com.michelin.kstreamplify.avro.KafkaError; import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext; +import java.util.Map; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.KafkaException; import org.apache.kafka.streams.errors.DeserializationExceptionHandler; import org.apache.kafka.streams.processor.ProcessorContext; -import org.apache.kafka.common.KafkaException; - -import java.util.Map; /** - * The class managing deserialization exceptions + * The class managing deserialization exceptions. */ @Slf4j -public class DlqDeserializationExceptionHandler extends DlqExceptionHandler implements DeserializationExceptionHandler { +public class DlqDeserializationExceptionHandler extends DlqExceptionHandler + implements DeserializationExceptionHandler { private static final Object GUARD = new Object(); /** - * Manage deserialization exceptions + * Manage deserialization exceptions. * * @param processorContext the processor context * @param consumerRecord the record to deserialize @@ -28,34 +28,49 @@ public class DlqDeserializationExceptionHandler extends DlqExceptionHandler impl * @return FAIL or CONTINUE */ @Override - public DeserializationHandlerResponse handle(ProcessorContext processorContext, ConsumerRecord consumerRecord, Exception consumptionException) { + public DeserializationHandlerResponse handle(ProcessorContext processorContext, + ConsumerRecord consumerRecord, + Exception consumptionException) { if (StringUtils.isBlank(KafkaStreamsExecutionContext.getDlqTopicName())) { - log.warn("Failed to route deserialization error to the designated DLQ (Dead Letter Queue) topic. Please make sure to define a DLQ topic in your KafkaStreamsStarter bean configuration."); + log.warn( + "Failed to route deserialization error to the designated DLQ (Dead Letter Queue) topic. " + + + "Please make sure to define a DLQ topic in your KafkaStreamsStarter bean configuration."); return DeserializationHandlerResponse.FAIL; } try { var builder = KafkaError.newBuilder(); - enrichWithException(builder, consumptionException, consumerRecord.key(), consumerRecord.value()) - .setContextMessage("An exception occurred during the stream internal deserialization") - .setOffset(consumerRecord.offset()) - .setPartition(consumerRecord.partition()) - .setTopic(consumerRecord.topic()); + enrichWithException(builder, consumptionException, consumerRecord.key(), + consumerRecord.value()) + .setContextMessage( + "An exception occurred during the stream internal deserialization") + .setOffset(consumerRecord.offset()) + .setPartition(consumerRecord.partition()) + .setTopic(consumerRecord.topic()); boolean isCausedByKafka = consumptionException.getCause() instanceof KafkaException; - //If the cause of this exception is a KafkaException and if getCause == sourceException (see Throwable.getCause - including SerializationException) - //use to handle poison pill => sent message into dlq and continue our life. - if(isCausedByKafka || consumptionException.getCause() == null) { - producer.send(new ProducerRecord<>(KafkaStreamsExecutionContext.getDlqTopicName(), consumerRecord.key(), builder.build())).get(); + // If the cause of this exception is a KafkaException and if getCause == sourceException + // (see Throwable.getCause - including SerializationException) + // use to handle poison pill => sent message into dlq and continue our life. + if (isCausedByKafka || consumptionException.getCause() == null) { + producer.send(new ProducerRecord<>(KafkaStreamsExecutionContext.getDlqTopicName(), + consumerRecord.key(), builder.build())).get(); return DeserializationHandlerResponse.CONTINUE; } } catch (InterruptedException ie) { - log.error("Interruption while sending the deserialization exception {} for key {}, value {} and topic {} to DLQ topic {}", consumptionException, - consumerRecord.key(), consumerRecord.value(), consumerRecord.topic(), KafkaStreamsExecutionContext.getDlqTopicName(), ie); + log.error( + "Interruption while sending the deserialization exception {} for key {}, value {} and topic {} to DLQ topic {}", + consumptionException, + consumerRecord.key(), consumerRecord.value(), consumerRecord.topic(), + KafkaStreamsExecutionContext.getDlqTopicName(), ie); Thread.currentThread().interrupt(); } catch (Exception e) { - log.error("Cannot send the deserialization exception {} for key {}, value {} and topic {} to DLQ topic {}", consumptionException, - consumerRecord.key(), consumerRecord.value(), consumerRecord.topic(), KafkaStreamsExecutionContext.getDlqTopicName(), e); + log.error( + "Cannot send the deserialization exception {} for key {}, value {} and topic {} to DLQ topic {}", + consumptionException, + consumerRecord.key(), consumerRecord.value(), consumerRecord.topic(), + KafkaStreamsExecutionContext.getDlqTopicName(), e); } // here we only have exception like UnknownHostException for example or TimeoutException ... diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/DlqExceptionHandler.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/DlqExceptionHandler.java index 7d635d25..84e723b8 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/DlqExceptionHandler.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/DlqExceptionHandler.java @@ -2,51 +2,56 @@ import com.michelin.kstreamplify.avro.KafkaError; import io.confluent.kafka.serializers.KafkaAvroSerializer; -import lombok.extern.slf4j.Slf4j; -import org.apache.kafka.clients.producer.KafkaProducer; -import org.apache.kafka.clients.producer.ProducerConfig; -import org.apache.kafka.common.errors.RecordTooLargeException; -import org.apache.kafka.common.serialization.ByteArraySerializer; - import java.io.PrintWriter; import java.io.StringWriter; import java.nio.ByteBuffer; import java.util.Map; import java.util.Properties; +import lombok.extern.slf4j.Slf4j; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.common.errors.RecordTooLargeException; +import org.apache.kafka.common.serialization.ByteArraySerializer; /** - * The class to manage DLQ exception + * The class to manage DLQ exception. */ @Slf4j public abstract class DlqExceptionHandler { /** - * The DLQ producer + * The DLQ producer. */ protected static KafkaProducer producer; /** - * Create a producer + * Create a producer. + * * @param clientId The producer client id - * @param configs The producer configs + * @param configs The producer configs */ protected static void instantiateProducer(String clientId, Map configs) { Properties properties = new Properties(); properties.putAll(configs); - properties.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName()); - properties.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class.getName()); + properties.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, + ByteArraySerializer.class.getName()); + properties.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, + KafkaAvroSerializer.class.getName()); properties.setProperty(ProducerConfig.CLIENT_ID_CONFIG, clientId); producer = new KafkaProducer<>(properties); } /** - * enrich with exception - * @param builder the error builder + * Enrich with exception. + * + * @param builder the error builder * @param exception the exception to add - * @param key the record key - * @param value the record value + * @param key the record key + * @param value the record value * @return the error enriched by the exception */ - protected KafkaError.Builder enrichWithException(KafkaError.Builder builder, Exception exception, byte[] key, byte[] value) { + protected KafkaError.Builder enrichWithException(KafkaError.Builder builder, + Exception exception, byte[] key, + byte[] value) { StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); exception.printStackTrace(pw); @@ -54,9 +59,11 @@ protected KafkaError.Builder enrichWithException(KafkaError.Builder builder, Exc boolean tooLarge = exception instanceof RecordTooLargeException; return builder - .setCause(exception.getCause() != null ? exception.getCause().getMessage() : "Unknown cause") - .setValue(tooLarge ? "The record is too large to be set as value (" + value.length + " bytes). The key will be used instead" : null) - .setStack(sw.toString()) - .setByteValue(tooLarge ? ByteBuffer.wrap(key) : ByteBuffer.wrap(value)); + .setCause( + exception.getCause() != null ? exception.getCause().getMessage() : "Unknown cause") + .setValue(tooLarge ? "The record is too large to be set as value (" + value.length + + " bytes). The key will be used instead" : null) + .setStack(sw.toString()) + .setByteValue(tooLarge ? ByteBuffer.wrap(key) : ByteBuffer.wrap(value)); } } diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/DlqProductionExceptionHandler.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/DlqProductionExceptionHandler.java index ad685047..b53484f5 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/DlqProductionExceptionHandler.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/DlqProductionExceptionHandler.java @@ -2,31 +2,36 @@ import com.michelin.kstreamplify.avro.KafkaError; import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext; +import java.util.Map; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.common.errors.RetriableException; import org.apache.kafka.streams.errors.ProductionExceptionHandler; -import java.util.Map; - /** - * The class managing DLQ production exceptions + * The class managing DLQ production exceptions. */ @Slf4j -public class DlqProductionExceptionHandler extends DlqExceptionHandler implements ProductionExceptionHandler { +public class DlqProductionExceptionHandler extends DlqExceptionHandler + implements ProductionExceptionHandler { private static final Object GUARD = new Object(); /** - * Manage production exceptions - * @param producerRecord the record to produce + * Manage production exceptions. + * + * @param producerRecord the record to produce * @param productionException the exception on producing * @return FAIL or CONTINUE */ @Override - public ProductionExceptionHandlerResponse handle(ProducerRecord producerRecord, Exception productionException) { + public ProductionExceptionHandlerResponse handle(ProducerRecord producerRecord, + Exception productionException) { if (StringUtils.isBlank(KafkaStreamsExecutionContext.getDlqTopicName())) { - log.warn("Failed to route production error to the designated DLQ (Dead Letter Queue) topic. Please make sure to define a DLQ topic in your KafkaStreamsStarter bean configuration."); + log.warn( + "Failed to route production error to the designated DLQ (Dead Letter Queue) topic. " + + + "Please make sure to define a DLQ topic in your KafkaStreamsStarter bean configuration."); return ProductionExceptionHandlerResponse.FAIL; } @@ -35,20 +40,30 @@ public ProductionExceptionHandlerResponse handle(ProducerRecord if (!retryable) { try { var builder = KafkaError.newBuilder(); - enrichWithException(builder, productionException, producerRecord.key(), producerRecord.value()) - .setContextMessage("An exception occurred during the stream internal production") - .setOffset(-1) - .setPartition(producerRecord.partition() == null ? -1 : producerRecord.partition()) - .setTopic(producerRecord.topic()); + enrichWithException(builder, productionException, producerRecord.key(), + producerRecord.value()) + .setContextMessage( + "An exception occurred during the stream internal production") + .setOffset(-1) + .setPartition( + producerRecord.partition() == null ? -1 : producerRecord.partition()) + .setTopic(producerRecord.topic()); - producer.send(new ProducerRecord<>(KafkaStreamsExecutionContext.getDlqTopicName(), producerRecord.key(), builder.build())).get(); + producer.send(new ProducerRecord<>(KafkaStreamsExecutionContext.getDlqTopicName(), + producerRecord.key(), builder.build())).get(); } catch (InterruptedException ie) { - log.error("Interruption while sending the production exception {} for key {}, value {} and topic {} to DLQ topic {}", productionException, - producerRecord.key(), producerRecord.value(), producerRecord.topic(), KafkaStreamsExecutionContext.getDlqTopicName(), ie); + log.error( + "Interruption while sending the production exception {} for key {}, value {} and topic {} to DLQ topic {}", + productionException, + producerRecord.key(), producerRecord.value(), producerRecord.topic(), + KafkaStreamsExecutionContext.getDlqTopicName(), ie); Thread.currentThread().interrupt(); } catch (Exception e) { - log.error("Cannot send the production exception {} for key {}, value {} and topic {} to DLQ topic {}", productionException, - producerRecord.key(), producerRecord.value(), producerRecord.topic(), KafkaStreamsExecutionContext.getDlqTopicName(), e); + log.error( + "Cannot send the production exception {} for key {}, value {} and topic {} to DLQ topic {}", + productionException, + producerRecord.key(), producerRecord.value(), producerRecord.topic(), + KafkaStreamsExecutionContext.getDlqTopicName(), e); return ProductionExceptionHandlerResponse.CONTINUE; } diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/GenericErrorProcessor.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/GenericErrorProcessor.java index 70aeca41..73e60f5f 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/GenericErrorProcessor.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/GenericErrorProcessor.java @@ -1,23 +1,25 @@ package com.michelin.kstreamplify.error; import com.michelin.kstreamplify.avro.KafkaError; +import java.io.PrintWriter; +import java.io.StringWriter; import org.apache.kafka.streams.processor.api.FixedKeyProcessor; import org.apache.kafka.streams.processor.api.FixedKeyProcessorContext; import org.apache.kafka.streams.processor.api.FixedKeyRecord; import org.apache.kafka.streams.processor.api.RecordMetadata; -import java.io.PrintWriter; -import java.io.StringWriter; - /** - * Generic error processor + * Generic error processor. + * * @param The type of the failed record */ -public class GenericErrorProcessor implements FixedKeyProcessor, KafkaError> { +public class GenericErrorProcessor + implements FixedKeyProcessor, KafkaError> { private FixedKeyProcessorContext context; /** - * init context + * Init context. + * * @param context the context to init */ @Override @@ -26,7 +28,8 @@ public void init(FixedKeyProcessorContext context) { } /** - * process the error + * Process the error. + * * @param fixedKeyRecord the record to process an error */ @Override @@ -38,14 +41,16 @@ public void process(FixedKeyRecord> fixedKeyRecord) { RecordMetadata recordMetadata = context.recordMetadata().orElse(null); KafkaError error = KafkaError.newBuilder() - .setCause(fixedKeyRecord.value().getException().getMessage()) - .setContextMessage(fixedKeyRecord.value().getContextMessage()) - .setOffset(recordMetadata != null ? recordMetadata.offset() : -1) - .setPartition(recordMetadata != null ? recordMetadata.partition() : -1) - .setStack(sw.toString()) - .setTopic(recordMetadata != null && recordMetadata.topic() != null ? recordMetadata.topic() : "Outside topic context") - .setValue(fixedKeyRecord.value().getKafkaRecord()) - .build(); + .setCause(fixedKeyRecord.value().getException().getMessage()) + .setContextMessage(fixedKeyRecord.value().getContextMessage()) + .setOffset(recordMetadata != null ? recordMetadata.offset() : -1) + .setPartition(recordMetadata != null ? recordMetadata.partition() : -1) + .setStack(sw.toString()) + .setTopic( + recordMetadata != null && recordMetadata.topic() != null ? recordMetadata.topic() : + "Outside topic context") + .setValue(fixedKeyRecord.value().getKafkaRecord()) + .build(); context.forward(fixedKeyRecord.withValue(error)); } diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/ProcessingError.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/ProcessingError.java index c4992ecd..0547388c 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/ProcessingError.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/ProcessingError.java @@ -5,31 +5,33 @@ import org.apache.avro.generic.GenericRecord; /** - * The processing error class + * The processing error class. + * * @param The type of the failed record */ @Getter public class ProcessingError { /** - * The exception that occurred + * The exception that occurred. */ private final Exception exception; /** - * The failed Kafka record + * The failed Kafka record. */ private final String kafkaRecord; /** - * A context message defined when the error is caught + * A context message defined when the error is caught. */ private final String contextMessage; /** - * Constructor - * @param exception The exception + * Constructor. + * + * @param exception The exception * @param contextMessage The context message - * @param kafkaRecord The failed Kafka record + * @param kafkaRecord The failed Kafka record */ public ProcessingError(Exception exception, String contextMessage, V kafkaRecord) { this.exception = exception; @@ -43,8 +45,9 @@ public ProcessingError(Exception exception, String contextMessage, V kafkaRecord } /** - * Constructor - * @param exception The exception + * Constructor. + * + * @param exception The exception * @param kafkaRecord The failed Kafka record */ public ProcessingError(Exception exception, V kafkaRecord) { diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/ProcessingResult.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/ProcessingResult.java index 9454c353..607ccbb0 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/ProcessingResult.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/ProcessingResult.java @@ -4,7 +4,7 @@ import org.apache.kafka.streams.processor.api.Record; /** - * The processing result class + * The processing result class. * * @param The type of the successful record * @param The type of the failed record @@ -12,17 +12,17 @@ @Getter public class ProcessingResult { /** - * The successful record + * The successful record. */ private V value; /** - * The failed record wrapped in a processing error + * The failed record wrapped in a processing error. */ private ProcessingError error; /** - * Private constructor that sets the success value + * Private constructor that sets the success value. * * @param value The success value */ @@ -31,7 +31,7 @@ private ProcessingResult(V value) { } /** - * Private constructor that sets the error value + * Private constructor that sets the error value. * * @param error the ProcessingError containing the */ @@ -40,7 +40,7 @@ private ProcessingResult(ProcessingError error) { } /** - * Create a successful processing result + * Create a successful processing result. * * @param value The successful record value * @param The type of the successful record @@ -54,16 +54,19 @@ public static ProcessingResult success(V value) { /** *

Wraps a record's value with ProcessingResult.success(V value).

- *

The resulting stream needs to be handled with TopologyErrorHandler#catchErrors(KStream) for automatic DLQ redirection on failed records.

+ *

The resulting stream needs to be handled with TopologyErrorHandler#catchErrors(KStream) + * for automatic DLQ redirection on failed records.

* - * @param record The resulting successful Record from the processor that needs to be wrapped in a ProcessingResult - * @param The type of the record key - * @param The type of the ProcessingResult successful value - * @param The type of the ProcessingResult error value + * @param message The resulting successful Record from the processor that needs to be wrapped in a ProcessingResult + * @param The type of the record key + * @param The type of the ProcessingResult successful value + * @param The type of the ProcessingResult error value * @return The initial Record, with value wrapped in a ProcessingResult */ - public static Record> wrapRecordSuccess(Record record) { - return new Record<>(record.key(), ProcessingResult.success(record.value()), record.timestamp()); + public static Record> wrapRecordSuccess( + Record message) { + return new Record<>(message.key(), ProcessingResult.success(message.value()), + message.timestamp()); } /** @@ -71,7 +74,8 @@ public static Record> wrapRecordSuccess(Re * Wraps a key, value and timestamp in a Record with ProcessingResult#success(V value) as value. *

*

- * The resulting stream needs to be handled with TopologyErrorHandler#catchErrors(KStream) for automatic DLQ redirection on failed records. + * The resulting stream needs to be handled with TopologyErrorHandler#catchErrors(KStream) + * for automatic DLQ redirection on failed records. *

* * @param key The key to put in the resulting record @@ -82,13 +86,14 @@ public static Record> wrapRecordSuccess(Re * @param The type of the ProcessingResult error value * @return A Record with value wrapped in a {@link ProcessingResult} */ - public static Record> wrapRecordSuccess(K key, V value, long timestamp) { + public static Record> wrapRecordSuccess(K key, V value, + long timestamp) { return new Record<>(key, ProcessingResult.success(value), timestamp); } /** *

- * Create a failed processing result + * Create a failed processing result. *

*

* If you are using this in a Processor, refer to {@link ProcessingResult#wrapRecordFailure(Exception, Record)} for easier syntax. @@ -109,7 +114,8 @@ public static ProcessingResult fail(Exception e, V2 failedRecordV * Create a failed processing result. *

*

- * If you are using this in a Processor, refer to {@link ProcessingResult#wrapRecordFailure(Exception, Record, String)} for easier syntax. + * If you are using this in a Processor, refer to {@link ProcessingResult#wrapRecordFailure(Exception, Record, String)} + * for easier syntax. *

* * @param e The exception @@ -119,7 +125,8 @@ public static ProcessingResult fail(Exception e, V2 failedRecordV * @param The type of the failed record * @return A processing result containing the failed record */ - public static ProcessingResult fail(Exception e, V2 failedRecordValue, String contextMessage) { + public static ProcessingResult fail(Exception e, V2 failedRecordValue, + String contextMessage) { return new ProcessingResult<>(new ProcessingError<>(e, contextMessage, failedRecordValue)); } @@ -128,7 +135,8 @@ public static ProcessingResult fail(Exception e, V2 failedRecordV * Wraps a record's value with {@link ProcessingResult#fail(Exception, Object)} )}. *

*

- * The resulting stream needs to be handled with TopologyErrorHandler#catchErrors(KStream) for automatic DLQ redirection on failed records. + * The resulting stream needs to be handled with TopologyErrorHandler#catchErrors(KStream) for automatic + * DLQ redirection on failed records. *

* * @param e The initial exception @@ -138,8 +146,10 @@ public static ProcessingResult fail(Exception e, V2 failedRecordV * @param The type of the ProcessingResult error value * @return The initial Record, with value wrapped in a {@link ProcessingError} and {@link ProcessingResult} */ - public static Record> wrapRecordFailure(Exception e, Record failedRecord) { - return new Record<>(failedRecord.key(), ProcessingResult.fail(e, failedRecord.value()), failedRecord.timestamp()); + public static Record> wrapRecordFailure(Exception e, + Record failedRecord) { + return new Record<>(failedRecord.key(), ProcessingResult.fail(e, failedRecord.value()), + failedRecord.timestamp()); } /** @@ -147,7 +157,8 @@ public static Record> wrapRecordFailure(Ex * Wraps a record's value with {@link ProcessingResult#fail(Exception, Object, String)}. *

*

- * The resulting stream needs to be handled with TopologyErrorHandler#catchErrors(KStream) for automatic DLQ redirection on failed records. + * The resulting stream needs to be handled with TopologyErrorHandler#catchErrors(KStream) + * for automatic DLQ redirection on failed records. *

* * @param e The initial exception @@ -158,13 +169,19 @@ public static Record> wrapRecordFailure(Ex * @param The type of the ProcessingResult error value * @return The initial Record, with value wrapped in a {@link ProcessingError} and {@link ProcessingResult} */ - public static Record> wrapRecordFailure(Exception e, Record failedRecord, String contextMessage) { - return new Record<>(failedRecord.key(), ProcessingResult.fail(e, failedRecord.value(), contextMessage), failedRecord.timestamp()); + public static Record> wrapRecordFailure(Exception e, + Record failedRecord, + String contextMessage) { + return new Record<>(failedRecord.key(), + ProcessingResult.fail(e, failedRecord.value(), contextMessage), + failedRecord.timestamp()); } /** - *

Wraps a key, value and timestamp in a Record with {@link ProcessingResult#fail(Exception, Object, String)} as value.

- *

The resulting stream needs to be handled with TopologyErrorHandler#catchErrors(KStream) for automatic DLQ redirection on failed records.

+ *

Wraps a key, value and timestamp in a Record with {@link ProcessingResult#fail(Exception, Object, String)} + * as value.

+ *

The resulting stream needs to be handled with TopologyErrorHandler#catchErrors(KStream) for automatic + * DLQ redirection on failed records.

* * @param e The initial exception * @param key The key to put in the resulting record @@ -175,7 +192,10 @@ public static Record> wrapRecordFailure(Ex * @param The type of the ProcessingResult error value * @return A Record with value wrapped in a {@link ProcessingError} and {@link ProcessingResult} */ - public static Record> wrapRecordFailure(Exception e, K key, V2 failedValue, long timestamp) { + public static Record> wrapRecordFailure(Exception e, + K key, + V2 failedValue, + long timestamp) { return new Record<>(key, ProcessingResult.fail(e, failedValue), timestamp); } @@ -184,7 +204,8 @@ public static Record> wrapRecordFailure(Ex * Wraps a key, value and timestamp in a Record with {@link ProcessingResult#fail(Exception, Object, String)} as value. *

*

- * The resulting stream needs to be handled with TopologyErrorHandler#catchErrors(KStream) for automatic DLQ redirection on failed records. + * The resulting stream needs to be handled with TopologyErrorHandler#catchErrors(KStream) for automatic + * DLQ redirection on failed records. *

* * @param e The initial exception @@ -197,12 +218,16 @@ public static Record> wrapRecordFailure(Ex * @param The type of the ProcessingResult error value * @return A Record with value wrapped in a {@link ProcessingError} and {@link ProcessingResult} */ - public static Record> wrapRecordFailure(Exception e, K key, V2 failedValue, long timestamp, String contextMessage) { + public static Record> wrapRecordFailure(Exception e, + K key, + V2 failedValue, + long timestamp, + String contextMessage) { return new Record<>(key, ProcessingResult.fail(e, failedValue, contextMessage), timestamp); } /** - * Is the processing result valid ? + * Is the processing result valid. * Is it valid either if it contains a successful value or an error * * @return true if valid, false otherwise diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/TopologyErrorHandler.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/TopologyErrorHandler.java index b3f12ea0..e8e6ee06 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/TopologyErrorHandler.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/TopologyErrorHandler.java @@ -2,6 +2,7 @@ import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext; import com.michelin.kstreamplify.utils.SerdesUtils; +import java.util.Map; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; import org.apache.kafka.common.serialization.Serdes; @@ -11,79 +12,86 @@ import org.apache.kafka.streams.kstream.Named; import org.apache.kafka.streams.kstream.Produced; -import java.util.Map; - /** - * The topology error handler class + * The topology error handler class. */ @Slf4j public class TopologyErrorHandler { private static final String BRANCHING_NAME_NOMINAL = "branch-nominal"; - private TopologyErrorHandler() { } + private TopologyErrorHandler() { + } /** - * Catch the errors from the given stream + * Catch the errors from the given stream. + * * @param stream The stream of processing result that may contain processing errors + * @param The key type + * @param The type of the successful record + * @param The type of the failed record * @return A stream filtered from all processing errors - * @param The key type - * @param The type of the successful record - * @param The type of the failed record */ - public static KStream catchErrors(KStream> stream) { + public static KStream catchErrors(KStream> stream) { return catchErrors(stream, false); } /** - * Catch the errors from the given stream - * @param stream The stream of processing result that may contain processing errors + * Catch the errors from the given stream. + * + * @param stream The stream of processing result that may contain processing errors * @param allowTombstone Allow sending tombstone in DLQ topic or to be returned + * @param The key type + * @param The type of the successful record + * @param The type of the failed record * @return A stream filtered from all processing errors - * @param The key type - * @param The type of the successful record - * @param The type of the failed record */ - public static KStream catchErrors(KStream> stream, boolean allowTombstone) { - Map>> branches; + public static KStream catchErrors(KStream> stream, + boolean allowTombstone) { + Map>> branches; String branchNamePrefix = stream.toString().split("@")[1]; if (!allowTombstone) { branches = stream - .filter((key, value) -> value != null) - .filterNot((key, value) -> value.getValue() == null && value.getError() == null) - .split(Named.as(branchNamePrefix)) - .branch((key, value) -> value.isValid(), Branched.as(BRANCHING_NAME_NOMINAL)) - .defaultBranch(Branched.withConsumer(ks -> TopologyErrorHandler.handleErrors(ks - .mapValues(ProcessingResult::getError)))); + .filter((key, value) -> value != null) + .filterNot((key, value) -> value.getValue() == null && value.getError() == null) + .split(Named.as(branchNamePrefix)) + .branch((key, value) -> value.isValid(), Branched.as(BRANCHING_NAME_NOMINAL)) + .defaultBranch(Branched.withConsumer(ks -> TopologyErrorHandler.handleErrors(ks + .mapValues(ProcessingResult::getError)))); } else { branches = stream - .filter((key, value) -> value != null) - .split(Named.as(branchNamePrefix)) - .branch((key, value) -> value.getError() == null, Branched.as(BRANCHING_NAME_NOMINAL)) - .defaultBranch(Branched.withConsumer(ks -> TopologyErrorHandler.handleErrors(ks - .mapValues(ProcessingResult::getError)))); + .filter((key, value) -> value != null) + .split(Named.as(branchNamePrefix)) + .branch((key, value) -> value.getError() == null, + Branched.as(BRANCHING_NAME_NOMINAL)) + .defaultBranch(Branched.withConsumer(ks -> TopologyErrorHandler.handleErrors(ks + .mapValues(ProcessingResult::getError)))); } return branches - .get(branchNamePrefix + BRANCHING_NAME_NOMINAL) - .mapValues(ProcessingResult::getValue); + .get(branchNamePrefix + BRANCHING_NAME_NOMINAL) + .mapValues(ProcessingResult::getValue); } /** - * Process a stream of processing errors and route it to the configured DLQ topic + * Process a stream of processing errors and route it to the configured DLQ topic. + * * @param errorsStream The stream of processing errors - * @param The key type - * @param The value type + * @param The key type + * @param The value type */ private static void handleErrors(KStream> errorsStream) { if (StringUtils.isBlank(KafkaStreamsExecutionContext.getDlqTopicName())) { - log.warn("Failed to route topology error to the designated DLQ (Dead Letter Queue) topic. Please make sure to define a DLQ topic in your KafkaStreamsStarter bean configuration."); + log.warn( + "Failed to route topology error to the designated DLQ (Dead Letter Queue) topic. " + + + "Please make sure to define a DLQ topic in your KafkaStreamsStarter bean configuration."); return; } errorsStream - .map((key, value) -> new KeyValue<>(key == null ? "null" : key.toString(), value)) - .processValues(GenericErrorProcessor::new) - .to(KafkaStreamsExecutionContext.getDlqTopicName(), Produced.with(Serdes.String(), - SerdesUtils.getSerdesForValue())); + .map((key, value) -> new KeyValue<>(key == null ? "null" : key.toString(), value)) + .processValues(GenericErrorProcessor::new) + .to(KafkaStreamsExecutionContext.getDlqTopicName(), Produced.with(Serdes.String(), + SerdesUtils.getSerdesForValue())); } } diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/initializer/KafkaStreamsInitializer.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/initializer/KafkaStreamsInitializer.java index ffa821ee..e81ec4e4 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/initializer/KafkaStreamsInitializer.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/initializer/KafkaStreamsInitializer.java @@ -1,9 +1,14 @@ package com.michelin.kstreamplify.initializer; +import static com.michelin.kstreamplify.constants.InitializerConstants.SERVER_PORT_PROPERTY; + import com.michelin.kstreamplify.constants.InitializerConstants; import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext; import com.michelin.kstreamplify.properties.PropertiesUtils; import com.michelin.kstreamplify.rest.DefaultProbeController; +import java.util.HashMap; +import java.util.Properties; +import java.util.stream.Collectors; import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; @@ -14,70 +19,65 @@ import org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler; import org.apache.kafka.streams.state.HostInfo; -import java.util.HashMap; -import java.util.Properties; -import java.util.stream.Collectors; - -import static com.michelin.kstreamplify.constants.InitializerConstants.SERVER_PORT_PROPERTY; - /** - * The Kafka Streams initializer class + * The Kafka Streams initializer class. */ @Slf4j @Getter public class KafkaStreamsInitializer { /** - * The Kafka Streams instance + * The Kafka Streams instance. */ private KafkaStreams kafkaStreams; /** - * The Kafka Streams starter + * The Kafka Streams starter. */ private KafkaStreamsStarter kafkaStreamsStarter; /** - * The topology + * The topology. */ private Topology topology; /** - * The Kafka properties + * The Kafka properties. */ protected Properties kafkaProperties; /** - * The application properties + * The application properties. */ protected Properties properties; /** - * The DLQ topic + * The DLQ topic. */ private String dlq; /** - * The host info + * The host info. */ private HostInfo hostInfo; /** - * The server port + * The server port. */ protected int serverPort; /** - * Init the Kafka Streams - * @param kStreamsStarter The Kafka Streams starter + * Init the Kafka Streams. + * + * @param streamsStarter The Kafka Streams starter */ - public void init(KafkaStreamsStarter kStreamsStarter) { - kafkaStreamsStarter = kStreamsStarter; - + public void init(KafkaStreamsStarter streamsStarter) { + kafkaStreamsStarter = streamsStarter; + initProperties(); - + initSerdesConfig(); - initDLQ(); + initDlq(); initHostInfo(); @@ -103,61 +103,65 @@ public void init(KafkaStreamsStarter kStreamsStarter) { } /** - * Init the Kafka Streams execution context + * Init the Kafka Streams execution context. */ private void initSerdesConfig() { KafkaStreamsExecutionContext.setSerdesConfig( - kafkaProperties.entrySet().stream().collect( - Collectors.toMap( - e -> String.valueOf(e.getKey()), - e -> String.valueOf(e.getValue()), - (prev, next) -> next, HashMap::new - )) + kafkaProperties.entrySet().stream().collect( + Collectors.toMap( + e -> String.valueOf(e.getKey()), + e -> String.valueOf(e.getValue()), + (prev, next) -> next, HashMap::new + )) ); } - + /** - * Init the Kafka Streams default DLQ + * Init the Kafka Streams default DLQ. */ - private void initDLQ() { + private void initDlq() { dlq = kafkaStreamsStarter.dlqTopic(); KafkaStreamsExecutionContext.setDlqTopicName(dlq); } /** - * Init the host information + * Init the host information. */ private void initHostInfo() { - String ipEnvVarName = (String) kafkaProperties.get(InitializerConstants.IP_SYSTEM_VARIABLE_PROPERTY); + String ipEnvVarName = + (String) kafkaProperties.get(InitializerConstants.IP_SYSTEM_VARIABLE_PROPERTY); if (StringUtils.isBlank(ipEnvVarName)) { ipEnvVarName = InitializerConstants.IP_SYSTEM_VARIABLE_DEFAULT; } - String myIP = System.getenv(ipEnvVarName); - String host = StringUtils.isNotBlank(myIP) ? myIP : InitializerConstants.LOCALHOST; + String myIp = System.getenv(ipEnvVarName); + String host = StringUtils.isNotBlank(myIp) ? myIp : InitializerConstants.LOCALHOST; hostInfo = new HostInfo(host, serverPort); - log.info("The Kafka Streams \"{}\" is running on {}:{}", KafkaStreamsExecutionContext.getProperties() - .getProperty(StreamsConfig.APPLICATION_ID_CONFIG), hostInfo.host(), hostInfo.port()); + log.info("The Kafka Streams \"{}\" is running on {}:{}", + KafkaStreamsExecutionContext.getProperties() + .getProperty(StreamsConfig.APPLICATION_ID_CONFIG), hostInfo.host(), + hostInfo.port()); KafkaStreamsExecutionContext.getProperties().put(StreamsConfig.APPLICATION_SERVER_CONFIG, - String.format("%s:%s", hostInfo.host(), hostInfo.port())); + String.format("%s:%s", hostInfo.host(), hostInfo.port())); } /** - * Init the HTTP server + * Init the HTTP server. */ protected void initHttpServer() { new DefaultProbeController(this); } /** - * Init all properties + * Init all properties. */ protected void initProperties() { properties = PropertiesUtils.loadProperties(); - serverPort = (Integer) properties.get(SERVER_PORT_PROPERTY);; + serverPort = (Integer) properties.get(SERVER_PORT_PROPERTY); + ; kafkaProperties = PropertiesUtils.loadKafkaProperties(properties); @@ -165,25 +169,28 @@ protected void initProperties() { } /** - * Default uncaught exception handler + * Default uncaught exception handler. + * * @param exception The exception * @return The execution */ - protected StreamsUncaughtExceptionHandler.StreamThreadExceptionResponse onStreamsUncaughtException(Throwable exception) { + protected StreamsUncaughtExceptionHandler.StreamThreadExceptionResponse onStreamsUncaughtException( + Throwable exception) { log.error("A not covered exception occurred in {} Kafka Streams. Shutting down...", - kafkaProperties.get(StreamsConfig.APPLICATION_ID_CONFIG), exception); + kafkaProperties.get(StreamsConfig.APPLICATION_ID_CONFIG), exception); return StreamsUncaughtExceptionHandler.StreamThreadExceptionResponse.SHUTDOWN_CLIENT; } /** - * Default state change listener + * Default state change listener. + * * @param newState The new state * @param oldState The old state */ protected void onStateChange(KafkaStreams.State newState, KafkaStreams.State oldState) { if (newState.equals(KafkaStreams.State.ERROR)) { log.error("The {} Kafka Streams is in error state...", - kafkaProperties.get(StreamsConfig.APPLICATION_ID_CONFIG)); + kafkaProperties.get(StreamsConfig.APPLICATION_ID_CONFIG)); System.exit(3); } } diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/initializer/KafkaStreamsStarter.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/initializer/KafkaStreamsStarter.java index c16bbcd6..9648220d 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/initializer/KafkaStreamsStarter.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/initializer/KafkaStreamsStarter.java @@ -4,17 +4,18 @@ import org.apache.kafka.streams.StreamsBuilder; /** - * The Kafka Streams starter interface + * The Kafka Streams starter interface. */ public abstract class KafkaStreamsStarter { /** - * Define the topology of the Kafka Streams + * Define the topology of the Kafka Streams. + * * @param streamsBuilder The streams builder */ public abstract void topology(StreamsBuilder streamsBuilder); /** - *

Define the dead letter queue (DLQ) topic

+ *

Define the dead letter queue (DLQ) topic

. *

If you don't want to use the DLQ topic, you can return {@link org.apache.commons.lang3.StringUtils#EMPTY}

* * @return The dead letter queue (DLQ) topic @@ -22,8 +23,10 @@ public abstract class KafkaStreamsStarter { public abstract String dlqTopic(); /** - * Define runnable code after the Kafka Streams startup + * Define runnable code after the Kafka Streams startup. + * * @param kafkaStreams The Kafka Streams instance */ - public void onStart(KafkaStreams kafkaStreams) { } + public void onStart(KafkaStreams kafkaStreams) { + } } diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/model/DlqTopic.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/model/DlqTopic.java index f92d3e9f..9bd57a1b 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/model/DlqTopic.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/model/DlqTopic.java @@ -4,13 +4,13 @@ import lombok.Getter; /** - * The dead letter queue (DLQ) topic + * The dead letter queue (DLQ) topic. */ @Getter @Builder public class DlqTopic { /** - * The DLQ topic name + * The DLQ topic name. */ private String name; } diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/model/RestServiceResponse.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/model/RestServiceResponse.java index f4a00688..eb61978f 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/model/RestServiceResponse.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/model/RestServiceResponse.java @@ -6,7 +6,8 @@ import lombok.Setter; /** - * Rest service response + * Rest service response. + * * @param The body type */ @Getter @@ -15,12 +16,12 @@ @Builder public class RestServiceResponse { /** - * The HTTP status + * The HTTP status. */ private int status; /** - * The request body + * The request body. */ private T body; } diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/model/TopologyExposeJsonModel.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/model/TopologyExposeJsonModel.java deleted file mode 100644 index 3bdd5788..00000000 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/model/TopologyExposeJsonModel.java +++ /dev/null @@ -1,41 +0,0 @@ -package com.michelin.kstreamplify.model; - -import lombok.Getter; -import lombok.Setter; - -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -/** - * The topology expose class - */ -@Getter -@Setter -public class TopologyExposeJsonModel { - /** - * The input topics - */ - private Set inTopicNameList = new HashSet<>(); - - /** - * The output topics - */ - private Set outTopicNameList = new HashSet<>(); - - /** - * The state stores - */ - private Set streamStateStore = new HashSet<>(); - - /** - * The internal stream content - */ - private List internalStreamContent = new ArrayList<>(); - - /** - * The stream name - */ - private String streamName; -} diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/model/TopologyObject.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/model/TopologyObject.java deleted file mode 100644 index 1a8f1ee4..00000000 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/model/TopologyObject.java +++ /dev/null @@ -1,25 +0,0 @@ -package com.michelin.kstreamplify.model; - -import lombok.AllArgsConstructor; -import lombok.Getter; -import lombok.NoArgsConstructor; -import lombok.Setter; - -/** - * The topology class - */ -@Getter -@Setter -@NoArgsConstructor -@AllArgsConstructor -public class TopologyObject { - /** - * The topology type - */ - private TopologyObjectType type; - - /** - * The topology name - */ - private String objectName; -} diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/model/TopologyObjectType.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/model/TopologyObjectType.java deleted file mode 100644 index 532247f7..00000000 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/model/TopologyObjectType.java +++ /dev/null @@ -1,21 +0,0 @@ -package com.michelin.kstreamplify.model; - -/** - * The topology type enum - */ -public enum TopologyObjectType { - /** - * The input topic type - */ - TOPIC_IN, - - /** - * The output topic type - */ - TOPIC_OUT, - - /** - * The stream type - */ - STREAM; -} diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/model/TopologyPart.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/model/TopologyPart.java deleted file mode 100644 index 37ff73fd..00000000 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/model/TopologyPart.java +++ /dev/null @@ -1,36 +0,0 @@ -package com.michelin.kstreamplify.model; - -import lombok.Getter; -import lombok.NoArgsConstructor; -import lombok.Setter; - -import java.util.ArrayList; -import java.util.List; - -/** - * The topology part class - */ -@Getter -@Setter -@NoArgsConstructor -public class TopologyPart { - /** - * The input element - */ - TopologyObject inputElement = new TopologyObject(); - - /** - * The sub element name - */ - TopologyObject subElementName = new TopologyObject(); - - /** - * The output element - */ - TopologyObject outputElement = new TopologyObject(); - - /** - * The transformation - */ - List detailedTransformation = new ArrayList<>(); -} diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/properties/PropertiesUtils.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/properties/PropertiesUtils.java index 98e470a2..879e19e9 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/properties/PropertiesUtils.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/properties/PropertiesUtils.java @@ -1,30 +1,33 @@ package com.michelin.kstreamplify.properties; -import lombok.AccessLevel; -import lombok.NoArgsConstructor; -import org.apache.commons.lang3.StringUtils; -import org.yaml.snakeyaml.Yaml; +import static com.michelin.kstreamplify.constants.PropertyConstants.DEFAULT_PROPERTY_FILE; +import static com.michelin.kstreamplify.constants.PropertyConstants.KAFKA_PROPERTIES_PREFIX; +import static com.michelin.kstreamplify.constants.PropertyConstants.PROPERTY_SEPARATOR; import java.io.IOException; import java.io.InputStream; import java.util.LinkedHashMap; import java.util.Properties; - -import static com.michelin.kstreamplify.constants.PropertyConstants.*; +import lombok.AccessLevel; +import lombok.NoArgsConstructor; +import org.apache.commons.lang3.StringUtils; +import org.yaml.snakeyaml.Yaml; /** - * Properties utils + * Properties utils. */ @NoArgsConstructor(access = AccessLevel.PRIVATE) public final class PropertiesUtils { /** - * Load the properties from the default properties file + * Load the properties from the default properties file. + * * @return The properties */ public static Properties loadProperties() { Yaml yaml = new Yaml(); - try (InputStream inputStream = PropertiesUtils.class.getClassLoader().getResourceAsStream(DEFAULT_PROPERTY_FILE)) { + try (InputStream inputStream = PropertiesUtils.class.getClassLoader() + .getResourceAsStream(DEFAULT_PROPERTY_FILE)) { LinkedHashMap propsMap = yaml.load(inputStream); return parsePropertiesMap(propsMap); } catch (IOException e) { @@ -33,7 +36,8 @@ public static Properties loadProperties() { } /** - * Get the Kafka properties only from the given properties + * Get the Kafka properties only from the given properties. + * * @param props The properties * @return The Kafka properties */ @@ -41,14 +45,16 @@ public static Properties loadKafkaProperties(Properties props) { Properties resultProperties = new Properties(); for (var prop : props.entrySet()) { if (StringUtils.contains(prop.getKey().toString(), KAFKA_PROPERTIES_PREFIX)) { - resultProperties.put(StringUtils.remove(prop.getKey().toString(), KAFKA_PROPERTIES_PREFIX + PROPERTY_SEPARATOR), prop.getValue()); + resultProperties.put(StringUtils.remove(prop.getKey().toString(), + KAFKA_PROPERTIES_PREFIX + PROPERTY_SEPARATOR), prop.getValue()); } } return resultProperties; } /** - * Parse a map into Properties + * Parse a map into Properties. + * * @param map The map * @return The properties */ @@ -57,9 +63,10 @@ private static Properties parsePropertiesMap(LinkedHashMap map) } /** - * Parse a given key - * @param key The key - * @param map The underlying map + * Parse a given key. + * + * @param key The key + * @param map The underlying map * @param props The properties * @return The properties */ diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/properties/RocksDBConfig.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/properties/RocksDbConfig.java similarity index 52% rename from kstreamplify-core/src/main/java/com/michelin/kstreamplify/properties/RocksDBConfig.java rename to kstreamplify-core/src/main/java/com/michelin/kstreamplify/properties/RocksDbConfig.java index 1cfff70f..85ce2884 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/properties/RocksDBConfig.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/properties/RocksDbConfig.java @@ -1,111 +1,102 @@ package com.michelin.kstreamplify.properties; import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext; +import java.util.Map; import org.apache.kafka.streams.state.RocksDBConfigSetter; import org.rocksdb.BlockBasedTableConfig; import org.rocksdb.CompressionType; import org.rocksdb.Options; -import java.util.Map; - /** - * The RockDB configuration class + * The RockDB configuration class. */ -public class RocksDBConfig implements RocksDBConfigSetter { +public class RocksDbConfig implements RocksDBConfigSetter { /** - * The RocksDB cache size config key + * The RocksDB cache size config key. */ public static final String ROCKSDB_CACHE_SIZE_CONFIG = "rocksdb.config.cache.size"; /** - * The RocksDB write buffer size config key + * The RocksDB write buffer size config key. */ - public static final String ROCKSDB_WRITE_BUFFER_SIZE_CONFIG = "rocksdb.config.write.buffer.size"; + public static final String ROCKSDB_WRITE_BUFFER_SIZE_CONFIG = + "rocksdb.config.write.buffer.size"; /** - * The RocksDB block size config key + * The RocksDB block size config key. */ public static final String ROCKSDB_BLOCK_SIZE_CONFIG = "rocksdb.config.block.size"; /** - * The RocksDB max write buffer config + * The RocksDB max write buffer config. */ public static final String ROCKSDB_MAX_WRITE_BUFFER_CONFIG = "rocksdb.config.max.write.buffer"; /** - * The RocksDB compression type config key + * The RocksDB compression type config key. */ public static final String ROCKSDB_COMPRESSION_TYPE_CONFIG = "rocksdb.config.compression.type"; /** - * The RocksDB cache index block enabled config + * The RocksDB cache index block enabled config. */ - public static final String ROCKSDB_CACHE_INDEX_BLOCK_ENABLED_CONFIG = "rocksdb.config.cache.index.block.enabled"; + public static final String ROCKSDB_CACHE_INDEX_BLOCK_ENABLED_CONFIG = + "rocksdb.config.cache.index.block.enabled"; /** - * One KB in B + * One KB in B. */ private static final long ONE_KB = 1024L; /** - * The RocksDB default cache size + * The RocksDB default cache size. */ public static final Long ROCKSDB_CACHE_SIZE_DEFAULT = 16 * ONE_KB * ONE_KB; /** - * The RocksDB default write buffer size + * The RocksDB default write buffer size. */ public static final Long ROCKSDB_WRITE_BUFFER_SIZE_DEFAULT = 4 * ONE_KB * ONE_KB; /** - * The RocksDB default block size + * The RocksDB default block size. */ public static final Long ROCKSDB_BLOCK_SIZE_DEFAULT = 4 * ONE_KB; /** - * The RocksDB default max write buffer + * The RocksDB default max write buffer. */ public static final Integer ROCKSDB_MAX_WRITE_BUFFER_DEFAULT = 2; /** - * The RocksDB default compression type + * The RocksDB default compression type. */ public static final String ROCKSDB_COMPRESSION_TYPE_DEFAULT = ""; /** - * The RocksDB default cache index block enabled + * The RocksDB default cache index block enabled. */ public static final Boolean ROCKSDB_CACHE_INDEX_BLOCK_ENABLED_DEFAULT = true; /** - * The RocksDB cache + * The RocksDB cache. */ private org.rocksdb.Cache cache = null; /** - * Set the RocksDB configuration + * Set the RocksDB configuration. + * * @param storeName The store name - * @param options The options - * @param configs The configs + * @param options The options + * @param configs The configs */ @Override - public void setConfig(final String storeName, final Options options, final Map configs) { - long blockCacheSize = KafkaStreamsExecutionContext.getProperties().containsKey(ROCKSDB_CACHE_SIZE_CONFIG) ? - Long.parseLong(KafkaStreamsExecutionContext.getProperties().getProperty(ROCKSDB_CACHE_SIZE_CONFIG)) : ROCKSDB_CACHE_SIZE_DEFAULT; - - long writeBufferSize = KafkaStreamsExecutionContext.getProperties().containsKey(ROCKSDB_WRITE_BUFFER_SIZE_CONFIG) ? - Long.parseLong(KafkaStreamsExecutionContext.getProperties().getProperty(ROCKSDB_WRITE_BUFFER_SIZE_CONFIG)) : ROCKSDB_WRITE_BUFFER_SIZE_DEFAULT; - - long blockSize = KafkaStreamsExecutionContext.getProperties().containsKey(ROCKSDB_BLOCK_SIZE_CONFIG) ? - Long.parseLong(KafkaStreamsExecutionContext.getProperties().getProperty(ROCKSDB_BLOCK_SIZE_CONFIG)) : ROCKSDB_BLOCK_SIZE_DEFAULT; - - int maxWriteBuffer = KafkaStreamsExecutionContext.getProperties().containsKey(ROCKSDB_MAX_WRITE_BUFFER_CONFIG) ? - Integer.parseInt(KafkaStreamsExecutionContext.getProperties().getProperty(ROCKSDB_MAX_WRITE_BUFFER_CONFIG)) : ROCKSDB_MAX_WRITE_BUFFER_DEFAULT; - - boolean cacheIndexBlock = KafkaStreamsExecutionContext.getProperties().containsKey(ROCKSDB_CACHE_INDEX_BLOCK_ENABLED_CONFIG) ? - Boolean.parseBoolean(KafkaStreamsExecutionContext.getProperties().getProperty(ROCKSDB_CACHE_INDEX_BLOCK_ENABLED_CONFIG)) : ROCKSDB_CACHE_INDEX_BLOCK_ENABLED_DEFAULT; - - String compressionType = KafkaStreamsExecutionContext.getProperties().getProperty(ROCKSDB_COMPRESSION_TYPE_CONFIG, ROCKSDB_COMPRESSION_TYPE_DEFAULT); + public void setConfig(final String storeName, final Options options, + final Map configs) { + long blockCacheSize = + KafkaStreamsExecutionContext.getProperties().containsKey(ROCKSDB_CACHE_SIZE_CONFIG) + ? Long.parseLong(KafkaStreamsExecutionContext.getProperties() + .getProperty(ROCKSDB_CACHE_SIZE_CONFIG)) : ROCKSDB_CACHE_SIZE_DEFAULT; if (cache == null) { cache = new org.rocksdb.LRUCache(blockCacheSize); @@ -113,11 +104,40 @@ public void setConfig(final String storeName, final Options options, final Map { - exchange.sendResponseHeaders(ProbeService.readinessProbe(kafkaStreamsInitializer).getStatus(), 0); + exchange.sendResponseHeaders( + ProbeService.readinessProbe(kafkaStreamsInitializer).getStatus(), 0); var output = exchange.getResponseBody(); output.close(); exchange.close(); @@ -54,11 +67,13 @@ private void readinessProbe(KafkaStreamsInitializer kafkaStreamsInitializer, Str /** - * Kubernetes' liveness probe + * Kubernetes' liveness probe. */ - private void livenessProbe(KafkaStreamsInitializer kafkaStreamsInitializer, String livenessPath) { + private void livenessProbe(KafkaStreamsInitializer kafkaStreamsInitializer, + String livenessPath) { server.createContext(livenessPath, (exchange -> { - exchange.sendResponseHeaders(ProbeService.livenessProbe(kafkaStreamsInitializer).getStatus(), 0); + exchange.sendResponseHeaders( + ProbeService.livenessProbe(kafkaStreamsInitializer).getStatus(), 0); var output = exchange.getResponseBody(); output.close(); exchange.close(); @@ -66,9 +81,10 @@ private void livenessProbe(KafkaStreamsInitializer kafkaStreamsInitializer, Stri } /** - * Get the Kafka Streams topology + * Get the Kafka Streams topology. */ - private void exposeTopology(KafkaStreamsInitializer kafkaStreamsInitializer, String exposeTopologyPath) { + private void exposeTopology(KafkaStreamsInitializer kafkaStreamsInitializer, + String exposeTopologyPath) { server.createContext(exposeTopologyPath, (exchange -> { var restServiceResponse = ProbeService.exposeTopology(kafkaStreamsInitializer); @@ -83,7 +99,8 @@ private void exposeTopology(KafkaStreamsInitializer kafkaStreamsInitializer, Str /** - * Callback to override in case of custom endpoint definition + * Callback to override in case of custom endpoint definition. + * * @param kafkaStreamsInitializer The Kafka Streams initializer */ protected void endpointCaller(KafkaStreamsInitializer kafkaStreamsInitializer) { diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/services/ConvertTopology.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/services/ConvertTopology.java deleted file mode 100644 index 79283dea..00000000 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/services/ConvertTopology.java +++ /dev/null @@ -1,123 +0,0 @@ -package com.michelin.kstreamplify.services; - -import com.michelin.kstreamplify.model.TopologyExposeJsonModel; -import com.michelin.kstreamplify.model.TopologyObject; -import com.michelin.kstreamplify.model.TopologyObjectType; -import com.michelin.kstreamplify.model.TopologyPart; -import org.apache.kafka.streams.Topology; -import org.apache.kafka.streams.TopologyDescription; - -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -/** - * The convert topology class - */ -public class ConvertTopology { - private static final String SINK = "-SINK-"; - private static final String SOURCE = "-SOURCE-"; - - private ConvertTopology() { } - - /** - * Convert the Kafka Streams topology as REST JSON response - * @param streamName The Kafka Streams name - * @param topology The Kafka Streams topology - * @return The Kafka Streams topology as REST JSON response - */ - public static TopologyExposeJsonModel convertTopologyForRest(String streamName, Topology topology) { - var result = new TopologyExposeJsonModel(); - result.setStreamName(streamName); - for (TopologyDescription.Subtopology subTopology : topology.describe().subtopologies()) { - handleSubTopology(subTopology, result); - } - return result; - } - - private static void handleSubTopology(TopologyDescription.Subtopology subTopology, TopologyExposeJsonModel obj){ - Set nodeProcessed = new HashSet<>(); - for (TopologyDescription.Node node : subTopology.nodes()) { - if (!nodeProcessed.contains(node.name())){ - handleNode(nodeProcessed, obj, node, new TopologyPart()); - } - } - } - - private static void handleNode(Set nodeProcessed, TopologyExposeJsonModel obj, TopologyDescription.Node node, TopologyPart currentNodeAncestorsPath) { - nodeProcessed.add(node.name()); - List currentElements = currentNodeAncestorsPath.getDetailedTransformation(); - currentElements.add(node.name()); - currentNodeAncestorsPath.setDetailedTransformation(currentElements); - - TopologyObject elementName = currentNodeAncestorsPath.getSubElementName(); - - if (node.successors().size() > 1) { - var t = obj.getInternalStreamContent(); - - elementName.setObjectName(obj.getStreamName()+"\\r\\n"+"Element "+(obj.getInternalStreamContent().size()+1)); - elementName.setType(TopologyObjectType.STREAM); - currentNodeAncestorsPath.setSubElementName(elementName); - t.add(currentNodeAncestorsPath); - - obj.setInternalStreamContent(t); - currentNodeAncestorsPath = new TopologyPart(); - currentNodeAncestorsPath.setInputElement(elementName); - } - - if (node.successors().isEmpty()) { - var t = obj.getInternalStreamContent(); - - elementName.setObjectName(obj.getStreamName()+"\\r\\n"+"Element "+ (obj.getInternalStreamContent().size() + 1)); - elementName.setType(TopologyObjectType.STREAM); - currentNodeAncestorsPath.setSubElementName(elementName); - - TopologyObject elementNameSink = new TopologyObject(); - elementNameSink.setType(TopologyObjectType.TOPIC_OUT); - currentNodeAncestorsPath.setOutputElement(elementNameSink); - - if (node.name().contains(SINK)) { - TopologyDescription.Sink sink = (TopologyDescription.Sink)node; - elementNameSink.setObjectName(sink.topic()); - } - - t.add(currentNodeAncestorsPath); - - obj.setInternalStreamContent(t); - currentNodeAncestorsPath = new TopologyPart(); - currentNodeAncestorsPath.setInputElement(elementName); - } - - if (node.name().contains(SOURCE)) { - TopologyDescription.Source source = (TopologyDescription.Source)node; - var t = obj.getInTopicNameList(); - TopologyObject elementNameSource = new TopologyObject(); - elementNameSource.setType(TopologyObjectType.TOPIC_IN); - for (String topic : source.topicSet()){ - elementNameSource.setObjectName(topic); - t.add(topic); - } - currentNodeAncestorsPath.setInputElement(elementNameSource); - - obj.setInTopicNameList(t); - } else { - if (node.name().contains(SINK)) { - TopologyObject elementNameSink = new TopologyObject(); - elementNameSink.setType(TopologyObjectType.TOPIC_OUT); - - TopologyDescription.Sink sink = (TopologyDescription.Sink)node; - elementNameSink.setObjectName(sink.topic()); - var t = obj.getOutTopicNameList(); - t.add(sink.topic()); - obj.setOutTopicNameList(t); - currentNodeAncestorsPath.setOutputElement(elementNameSink); - } - } - - for (TopologyDescription.Node nodeBelow : node.successors()) { - handleNode(nodeProcessed, obj, nodeBelow, currentNodeAncestorsPath); - currentNodeAncestorsPath = new TopologyPart(); - currentNodeAncestorsPath.setInputElement(elementName); - } - } -} diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/services/ProbeService.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/services/ProbeService.java index 23ca9ab2..a52111f7 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/services/ProbeService.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/services/ProbeService.java @@ -1,8 +1,9 @@ package com.michelin.kstreamplify.services; +import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext; import com.michelin.kstreamplify.initializer.KafkaStreamsInitializer; import com.michelin.kstreamplify.model.RestServiceResponse; -import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext; +import java.net.HttpURLConnection; import lombok.AccessLevel; import lombok.NoArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -10,64 +11,86 @@ import org.apache.kafka.streams.StreamsConfig; import org.apache.kafka.streams.processor.internals.StreamThread; -import java.net.HttpURLConnection; - /** - * Kafka Streams probe service + * Kafka Streams probe service. */ @Slf4j @NoArgsConstructor(access = AccessLevel.PRIVATE) public final class ProbeService { /** - * Kubernetes' readiness probe + * Kubernetes' readiness probe. + * * @param kafkaStreamsInitializer The Kafka Streams initializer * @return An HTTP response based on the Kafka Streams state */ - public static RestServiceResponse readinessProbe(KafkaStreamsInitializer kafkaStreamsInitializer) { + public static RestServiceResponse readinessProbe( + KafkaStreamsInitializer kafkaStreamsInitializer) { if (kafkaStreamsInitializer.getKafkaStreams() != null) { log.debug("Kafka Stream \"{}\" state: {}", - KafkaStreamsExecutionContext.getProperties().getProperty(StreamsConfig.APPLICATION_ID_CONFIG), - kafkaStreamsInitializer.getKafkaStreams().state()); + KafkaStreamsExecutionContext.getProperties() + .getProperty(StreamsConfig.APPLICATION_ID_CONFIG), + kafkaStreamsInitializer.getKafkaStreams().state()); - if (kafkaStreamsInitializer.getKafkaStreams().state() == KafkaStreams.State.REBALANCING) { - long startingThreadCount = kafkaStreamsInitializer.getKafkaStreams().metadataForLocalThreads() + if (kafkaStreamsInitializer.getKafkaStreams().state() + == KafkaStreams.State.REBALANCING) { + long startingThreadCount = + kafkaStreamsInitializer.getKafkaStreams().metadataForLocalThreads() .stream() - .filter(t -> StreamThread.State.STARTING.name().compareToIgnoreCase(t.threadState()) == 0 || StreamThread.State.CREATED.name().compareToIgnoreCase(t.threadState()) == 0) + .filter(t -> StreamThread.State.STARTING.name() + .compareToIgnoreCase(t.threadState()) == 0 + || StreamThread.State.CREATED.name() + .compareToIgnoreCase(t.threadState()) == 0) .count(); - if (startingThreadCount == kafkaStreamsInitializer.getKafkaStreams().metadataForLocalThreads().size()) { - return RestServiceResponse.builder().status(HttpURLConnection.HTTP_NO_CONTENT).build(); + if (startingThreadCount + == kafkaStreamsInitializer.getKafkaStreams().metadataForLocalThreads().size()) { + return RestServiceResponse.builder() + .status(HttpURLConnection.HTTP_NO_CONTENT).build(); } } - return kafkaStreamsInitializer.getKafkaStreams().state().isRunningOrRebalancing() ? - RestServiceResponse.builder().status(HttpURLConnection.HTTP_OK).build() : RestServiceResponse.builder().status(HttpURLConnection.HTTP_UNAVAILABLE).build(); + return kafkaStreamsInitializer.getKafkaStreams().state().isRunningOrRebalancing() + ? RestServiceResponse.builder().status(HttpURLConnection.HTTP_OK).build() : + RestServiceResponse.builder().status(HttpURLConnection.HTTP_UNAVAILABLE) + .build(); } - return RestServiceResponse.builder().status(HttpURLConnection.HTTP_BAD_REQUEST).build(); + return RestServiceResponse.builder().status(HttpURLConnection.HTTP_BAD_REQUEST) + .build(); } /** - * Kubernetes' liveness probe + * Kubernetes' liveness probe. + * * @param kafkaStreamsInitializer The Kafka Streams initializer * @return An HTTP response based on the Kafka Streams state */ - public static RestServiceResponse livenessProbe(KafkaStreamsInitializer kafkaStreamsInitializer) { + public static RestServiceResponse livenessProbe( + KafkaStreamsInitializer kafkaStreamsInitializer) { if (kafkaStreamsInitializer.getKafkaStreams() != null) { - return kafkaStreamsInitializer.getKafkaStreams().state() != KafkaStreams.State.NOT_RUNNING ? RestServiceResponse.builder().status(HttpURLConnection.HTTP_OK).build() - : RestServiceResponse.builder().status(HttpURLConnection.HTTP_INTERNAL_ERROR).build(); + return kafkaStreamsInitializer.getKafkaStreams().state() + != KafkaStreams.State.NOT_RUNNING + ? RestServiceResponse.builder().status(HttpURLConnection.HTTP_OK).build() + : + RestServiceResponse.builder().status(HttpURLConnection.HTTP_INTERNAL_ERROR) + .build(); } - return RestServiceResponse.builder().status(HttpURLConnection.HTTP_NO_CONTENT).build(); + return RestServiceResponse.builder().status(HttpURLConnection.HTTP_NO_CONTENT) + .build(); } /** - * Get the Kafka Streams topology + * Get the Kafka Streams topology. + * * @param kafkaStreamsInitializer The Kafka Streams initializer * @return The Kafka Streams topology */ - public static RestServiceResponse exposeTopology(KafkaStreamsInitializer kafkaStreamsInitializer) { + public static RestServiceResponse exposeTopology( + KafkaStreamsInitializer kafkaStreamsInitializer) { if (kafkaStreamsInitializer.getTopology() != null) { - return RestServiceResponse.builder().status(HttpURLConnection.HTTP_OK).body(kafkaStreamsInitializer.getTopology().describe().toString()).build(); + return RestServiceResponse.builder().status(HttpURLConnection.HTTP_OK) + .body(kafkaStreamsInitializer.getTopology().describe().toString()).build(); } - return RestServiceResponse.builder().status(HttpURLConnection.HTTP_NO_CONTENT).build(); + return RestServiceResponse.builder().status(HttpURLConnection.HTTP_NO_CONTENT) + .build(); } } diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/SerdesUtils.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/SerdesUtils.java index dc21e384..d7086849 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/SerdesUtils.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/SerdesUtils.java @@ -1,40 +1,45 @@ package com.michelin.kstreamplify.utils; -import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde; import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext; +import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde; import org.apache.avro.specific.SpecificRecord; /** - * The Serdes utils class + * The Serdes utils class. */ public final class SerdesUtils { - private SerdesUtils() { } + private SerdesUtils() { + } /** - * Return a key serdes for a requested class + * Return a key serdes for a requested class. + * * @param The class of requested serdes * @return a serdes for requested class */ - public static SpecificAvroSerde getSerdesForKey() { + public static SpecificAvroSerde getSerdesForKey() { return getSerdes(true); } /** - * Return a value serdes for a requested class + * Return a value serdes for a requested class. + * * @param The class of requested serdes * @return a serdes for requested class */ - public static SpecificAvroSerde getSerdesForValue() { + public static SpecificAvroSerde getSerdesForValue() { return getSerdes(false); } /** - * Return a serdes for a requested class + * Return a serdes for a requested class. + * * @param isSerdeForKey Is the serdes for a key or a value - * @param The class of requested serdes + * @param The class of requested serdes * @return a serdes for requested class */ - private static SpecificAvroSerde getSerdes(boolean isSerdeForKey) { + private static SpecificAvroSerde getSerdes( + boolean isSerdeForKey) { SpecificAvroSerde serde = new SpecificAvroSerde<>(); serde.configure(KafkaStreamsExecutionContext.getSerdesConfig(), isSerdeForKey); return serde; diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/TopicUtils.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/TopicUtils.java index 90c6d722..3e56e3a7 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/TopicUtils.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/TopicUtils.java @@ -1,11 +1,14 @@ package com.michelin.kstreamplify.utils; -import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext; +import static com.michelin.kstreamplify.constants.PropertyConstants.PREFIX_PROPERTY_NAME; +import static com.michelin.kstreamplify.constants.PropertyConstants.PROPERTY_SEPARATOR; +import static com.michelin.kstreamplify.constants.PropertyConstants.REMAP_PROPERTY_NAME; +import static com.michelin.kstreamplify.constants.PropertyConstants.TOPIC_PROPERTY_NAME; -import static com.michelin.kstreamplify.constants.PropertyConstants.*; +import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext; /** - * The topic utils class + * The topic utils class. */ public final class TopicUtils { private TopicUtils() { @@ -42,17 +45,17 @@ public static String prefixAndDynamicRemap(String topicName, String prefixProper // Check for dynamic remap in properties String resultTopicName = properties.getProperty( - TOPIC_PROPERTY_NAME - + PROPERTY_SEPARATOR - + REMAP_PROPERTY_NAME - + PROPERTY_SEPARATOR - + topicName, - topicName); + TOPIC_PROPERTY_NAME + + PROPERTY_SEPARATOR + + REMAP_PROPERTY_NAME + + PROPERTY_SEPARATOR + + topicName, + topicName); // check if topic prefix property exists - String prefix = properties.getProperty(PREFIX_PROPERTY_NAME + PROPERTY_SEPARATOR + prefixPropertyKey, ""); + String prefix = + properties.getProperty(PREFIX_PROPERTY_NAME + PROPERTY_SEPARATOR + prefixPropertyKey, + ""); return prefix.concat(resultTopicName); } - - } diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/TopicWithSerde.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/TopicWithSerde.java index 69e3902c..736cbd38 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/TopicWithSerde.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/TopicWithSerde.java @@ -1,6 +1,8 @@ package com.michelin.kstreamplify.utils; +import static com.michelin.kstreamplify.constants.PropertyConstants.SELF; + import lombok.AccessLevel; import lombok.AllArgsConstructor; import lombok.Getter; @@ -15,10 +17,8 @@ import org.apache.kafka.streams.kstream.Produced; import org.apache.kafka.streams.state.KeyValueStore; -import static com.michelin.kstreamplify.constants.PropertyConstants.SELF; - /** - * Wrapper class for simplifying topics interactions and their behaviors + * Wrapper class for simplifying topics interactions and their behaviors. * * @param The model used as the key avro of the topic. Can be String (Recommended) * @param The model used as the value avro of the topic. @@ -26,7 +26,7 @@ @AllArgsConstructor(access = AccessLevel.PUBLIC) public final class TopicWithSerde { /** - * Name of the topic + * Name of the topic. */ private final String topicName; @@ -39,25 +39,26 @@ public final class TopicWithSerde { * prefix: * nsKey: "myNamespacePrefix." * } - *

If the topic name is {@code myTopic} , at stream initialization the topic name wil resolve to {@code myNamespacePrefix.myTopic}

+ *

If the topic name is {@code myTopic} , at stream initialization the topic name wil resolve + * to {@code myNamespacePrefix.myTopic}

*/ private final String prefixPropertyKey; /** - * Key serde for the topic + * Key serde for the topic. */ @Getter private final Serde keySerde; /** - * Value serde for the topic + * Value serde for the topic. */ @Getter private final Serde valueSerde; /** - *

Additional constructor which uses default parameter "self" for prefixPropertyKey

+ *

Additional constructor which uses default parameter "self" for prefixPropertyKey.

* *

For instance, with the given following configuration :

*
{@code
@@ -66,7 +67,8 @@ public final class TopicWithSerde {
      *     prefix:
      *       self: "myNamespacePrefix."
      * }
- *

If the topic name is {@code myTopic} , at stream initialization the topic name wil resolve to {@code myNamespacePrefix.myTopic}

+ *

If the topic name is {@code myTopic} , at stream initialization the topic name wil resolve + * to {@code myNamespacePrefix.myTopic}

* * @param topicName Name of the topic * @param keySerde Key serde for the topic @@ -80,16 +82,17 @@ public TopicWithSerde(String topicName, Serde keySerde, Serde valueSerde) } /** - * Get the un-prefixed name of the Topic for specific usage + * Get the un-prefixed name of the Topic for specific usage. * - * @return The name of the topic, as defined during initialization, without ns4kafka prefixing + * @return The name of the topic, as defined during initialization */ public String getUnPrefixedName() { return topicName; } /** - * Override of the toString method, dynamically builds the topicName based on springBoot properties for environment/application + * Override of the toString method, dynamically builds the topicName based on springBoot + * properties for environment/application. * * @return The prefixed name of the topic */ @@ -99,7 +102,8 @@ public String toString() { } /** - * Wrapper for the .stream method of KafkaStreams. Allows simple usage of a topic with type inference + * Wrapper for the .stream method of KafkaStreams. + * Allows simple usage of a topic with type inference * * @param sb The streamsBuilder * @return a Kstream from the given topic @@ -116,7 +120,9 @@ public KStream stream(StreamsBuilder sb) { * @return a Ktable from the given topic */ public KTable table(StreamsBuilder sb, String storeName) { - return sb.table(this.toString(), Consumed.with(keySerde, valueSerde), Materialized.>as(storeName).withKeySerde(keySerde).withValueSerde(valueSerde)); + return sb.table(this.toString(), Consumed.with(keySerde, valueSerde), + Materialized.>as(storeName).withKeySerde(keySerde) + .withValueSerde(valueSerde)); } /** @@ -127,7 +133,9 @@ public KTable table(StreamsBuilder sb, String storeName) { * @return a GlobalKtable from the given topic */ public GlobalKTable globalTable(StreamsBuilder sb, String storeName) { - return sb.globalTable(this.toString(), Consumed.with(keySerde, valueSerde), Materialized.>as(storeName).withKeySerde(keySerde).withValueSerde(valueSerde)); + return sb.globalTable(this.toString(), Consumed.with(keySerde, valueSerde), + Materialized.>as(storeName).withKeySerde(keySerde) + .withValueSerde(valueSerde)); } /** diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/WindowStateStoreUtils.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/WindowStateStoreUtils.java index 9ae54646..2cc7192e 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/WindowStateStoreUtils.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/WindowStateStoreUtils.java @@ -1,18 +1,19 @@ package com.michelin.kstreamplify.utils; -import org.apache.kafka.streams.state.WindowStore; - import java.time.Duration; import java.time.Instant; +import org.apache.kafka.streams.state.WindowStore; /** - * The window state store utils + * The window state store utils. */ public final class WindowStateStoreUtils { - private WindowStateStoreUtils() { } + private WindowStateStoreUtils() { + } /** - * Put the key/value into the state store + * Put the key/value into the state store. + * * @param stateStore The stateStore * @param key The key * @param value The value @@ -24,7 +25,8 @@ public static void put(WindowStore stateStore, K key, V value) { } /** - * Get the value by the key from the state store + * Get the value by the key from the state store. + * * @param stateStore The stateStore * @param key The key * @param retentionDays The delay of retention @@ -33,7 +35,9 @@ public static void put(WindowStore stateStore, K key, V value) { * @return The last value inserted in the state store for the key */ public static V get(WindowStore stateStore, K key, int retentionDays) { - var resultIterator = stateStore.backwardFetch(key, Instant.now().minus(Duration.ofDays(retentionDays)), Instant.now()); + var resultIterator = + stateStore.backwardFetch(key, Instant.now().minus(Duration.ofDays(retentionDays)), + Instant.now()); if (resultIterator != null && resultIterator.hasNext()) { return resultIterator.next().value; } diff --git a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/converter/AvroToJsonConverterTest.java b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/converter/AvroToJsonConverterTest.java index d34b894f..6dc908ea 100644 --- a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/converter/AvroToJsonConverterTest.java +++ b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/converter/AvroToJsonConverterTest.java @@ -1,20 +1,19 @@ package com.michelin.kstreamplify.converter; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.google.gson.Gson; import com.google.gson.JsonObject; import com.michelin.kstreamplify.avro.KafkaTestAvro; import com.michelin.kstreamplify.avro.MapElement; import com.michelin.kstreamplify.avro.SubKafkaTestAvro; import com.michelin.kstreamplify.avro.SubSubKafkaTestAvro; -import lombok.extern.slf4j.Slf4j; -import org.junit.jupiter.api.Test; - import java.math.BigDecimal; import java.time.Instant; import java.util.List; import java.util.Map; - -import static org.junit.jupiter.api.Assertions.assertEquals; +import lombok.extern.slf4j.Slf4j; +import org.junit.jupiter.api.Test; @Slf4j class AvroToJsonConverterTest { @@ -26,14 +25,18 @@ void shouldConvertAvroToJson() { var gson = new Gson(); var jsonObject = gson.fromJson(jsonString, JsonObject.class); - + assertEquals("false", jsonObject.get("booleanField").getAsString()); assertEquals("1970-01-01T00:00:00.001Z", jsonObject.get("dateField").getAsString()); assertEquals("10", jsonObject.get("quantityField").getAsString()); assertEquals("test", jsonObject.get("stringField").getAsString()); - assertEquals("1970-01-01T00:00:00.002Z", jsonObject.getAsJsonArray("split").get(0).getAsJsonObject().getAsJsonArray("subSplit").get(0).getAsJsonObject().get("subSubDateField").getAsString()); - assertEquals("1970-01-01T00:00:00.003Z", jsonObject.getAsJsonObject("members").getAsJsonObject("key1").get("mapDateField").getAsString()); + assertEquals("1970-01-01T00:00:00.002Z", + jsonObject.getAsJsonArray("split").get(0).getAsJsonObject().getAsJsonArray("subSplit") + .get(0).getAsJsonObject().get("subSubDateField").getAsString()); + assertEquals("1970-01-01T00:00:00.003Z", + jsonObject.getAsJsonObject("members").getAsJsonObject("key1").get("mapDateField") + .getAsString()); assertEquals("val1", jsonObject.getAsJsonObject("membersString").get("key1").getAsString()); assertEquals("val1", jsonObject.getAsJsonArray("listString").get(0).getAsString()); assertEquals("val2", jsonObject.getAsJsonArray("listString").get(1).getAsString()); @@ -43,25 +46,25 @@ void shouldConvertAvroToJson() { private KafkaTestAvro getKafkaTest() { return KafkaTestAvro.newBuilder() - .setStringField("test") - .setDateField(Instant.ofEpochMilli(1)) - .setQuantityField(BigDecimal.TEN) - .setMembers(Map.of("key1", MapElement.newBuilder() - .setMapDateField(Instant.ofEpochMilli(3)) - .setMapQuantityField(BigDecimal.ONE) - .build())) - .setMembersString(Map.of("key1","val1")) - .setListString(List.of("val1","val2")) - .setSplit(List.of( - SubKafkaTestAvro.newBuilder() - .setSubField("subTest") - .setSubSplit(List.of( - SubSubKafkaTestAvro.newBuilder() - .setSubSubField("subSubTest") - .setSubSubDateField(Instant.ofEpochMilli(2)) - .setSubSubIntField(8) - .build())) - .build())) - .build(); + .setStringField("test") + .setDateField(Instant.ofEpochMilli(1)) + .setQuantityField(BigDecimal.TEN) + .setMembers(Map.of("key1", MapElement.newBuilder() + .setMapDateField(Instant.ofEpochMilli(3)) + .setMapQuantityField(BigDecimal.ONE) + .build())) + .setMembersString(Map.of("key1", "val1")) + .setListString(List.of("val1", "val2")) + .setSplit(List.of( + SubKafkaTestAvro.newBuilder() + .setSubField("subTest") + .setSubSplit(List.of( + SubSubKafkaTestAvro.newBuilder() + .setSubSubField("subSubTest") + .setSubSubDateField(Instant.ofEpochMilli(2)) + .setSubSubIntField(8) + .build())) + .build())) + .build(); } } diff --git a/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/initializer/SpringKafkaStreamsInitializer.java b/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/initializer/SpringKafkaStreamsInitializer.java index aa78a951..b3cb5367 100644 --- a/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/initializer/SpringKafkaStreamsInitializer.java +++ b/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/initializer/SpringKafkaStreamsInitializer.java @@ -13,41 +13,41 @@ import org.springframework.context.ConfigurableApplicationContext; import org.springframework.stereotype.Component; -import java.io.IOException; - /** - * The Kafka Streams initializer class + * The Kafka Streams initializer class. */ @Slf4j @Component @ConditionalOnBean(KafkaStreamsStarter.class) -public class SpringKafkaStreamsInitializer extends KafkaStreamsInitializer implements ApplicationRunner { +public class SpringKafkaStreamsInitializer extends KafkaStreamsInitializer + implements ApplicationRunner { /** - * The application context + * The application context. */ @Autowired private ConfigurableApplicationContext applicationContext; /** - * The server port + * The server port. */ @Value("${server.port:8080}") private int springBootServerPort; /** - * The Kafka properties + * The Kafka properties. */ @Autowired private KafkaProperties springBootKafkaProperties; /** - * The Kafka Streams starter + * The Kafka Streams starter. */ @Autowired private KafkaStreamsStarter kafkaStreamsStarter; /** - * Run method + * Run method. + * * @param args the program arguments */ @Override @@ -56,7 +56,7 @@ public void run(ApplicationArguments args) { } /** - * ${@inheritDoc} + * {@inheritDoc} */ @Override protected void initHttpServer() { @@ -64,7 +64,7 @@ protected void initHttpServer() { } /** - * ${@inheritDoc} + * {@inheritDoc} */ @Override protected void initProperties() { @@ -74,16 +74,17 @@ protected void initProperties() { } /** - * ${@inheritDoc} + * {@inheritDoc} */ @Override - protected StreamsUncaughtExceptionHandler.StreamThreadExceptionResponse onStreamsUncaughtException(Throwable exception) { + protected StreamsUncaughtExceptionHandler.StreamThreadExceptionResponse onStreamsUncaughtException( + Throwable exception) { closeApplicationContext(); return super.onStreamsUncaughtException(exception); } /** - * ${@inheritDoc} + * {@inheritDoc} */ @Override protected void onStateChange(KafkaStreams.State newState, KafkaStreams.State oldState) { @@ -93,7 +94,7 @@ protected void onStateChange(KafkaStreams.State newState, KafkaStreams.State old } /** - * Close the application context + * Close the application context. */ private void closeApplicationContext() { if (applicationContext != null) { diff --git a/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/properties/KafkaProperties.java b/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/properties/KafkaProperties.java index f0544a6d..04057298 100644 --- a/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/properties/KafkaProperties.java +++ b/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/properties/KafkaProperties.java @@ -1,16 +1,15 @@ package com.michelin.kstreamplify.properties; +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; import lombok.Getter; import lombok.Setter; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.context.annotation.Configuration; -import java.util.HashMap; -import java.util.Map; -import java.util.Properties; - /** - * The Kafka properties class + * The Kafka properties class. */ @Getter @Setter @@ -18,12 +17,13 @@ @ConfigurationProperties(prefix = "kafka") public class KafkaProperties { /** - * The Kafka properties + * The Kafka properties. */ private final Map properties = new HashMap<>(); /** - * Return the Kafka properties as {@link java.util.Properties} + * Return the Kafka properties as {@link java.util.Properties}. + * * @return The Kafka properties */ public Properties asProperties() { diff --git a/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/rest/SpringProbeController.java b/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/rest/SpringProbeController.java index 95d4de0d..4bdd79d2 100644 --- a/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/rest/SpringProbeController.java +++ b/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/rest/SpringProbeController.java @@ -11,19 +11,20 @@ import org.springframework.web.bind.annotation.RestController; /** - * Spring Boot probe controller + * Spring Boot probe controller. */ @RestController @ConditionalOnBean(KafkaStreamsStarter.class) public class SpringProbeController { /** - * The Kafka Streams initializer + * The Kafka Streams initializer. */ @Autowired private SpringKafkaStreamsInitializer kafkaStreamsInitializer; /** - * Readiness Kubernetes probe endpoint + * Readiness Kubernetes probe endpoint. + * * @return An HTTP response based on the Kafka Streams state */ @GetMapping("/${readiness_path:ready}") @@ -32,7 +33,8 @@ public ResponseEntity readinessProbe() { } /** - * Liveness Kubernetes probe endpoint + * Liveness Kubernetes probe endpoint. + * * @return An HTTP response based on the Kafka Streams state */ @GetMapping("/${liveness_path:liveness}") @@ -41,7 +43,8 @@ public ResponseEntity livenessProbe() { } /** - * Get the Kafka Streams topology + * Get the Kafka Streams topology. + * * @return The Kafka Streams topology */ @GetMapping("/${expose_topology_path:topology}") @@ -50,11 +53,13 @@ public ResponseEntity exposeTopology() { } /** - * Convert the probe service response into an HTTP response entity + * Convert the probe service response into an HTTP response entity. + * * @param serviceResponse The probe service response * @return An HTTP response */ - private static ResponseEntity convertToResponseEntity(RestServiceResponse serviceResponse) { + private static ResponseEntity convertToResponseEntity( + RestServiceResponse serviceResponse) { return ResponseEntity.status(serviceResponse.getStatus()).body(serviceResponse.getBody()); } } diff --git a/pom.xml b/pom.xml index 41fe3e30..6d6a768a 100644 --- a/pom.xml +++ b/pom.xml @@ -1,313 +1,344 @@ - 4.0.0 - pom + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> + 4.0.0 + pom - com.michelin - kstreamplify - 0.1.1-SNAPSHOT - kstreamplify - Kstreamplify is a Java library that brings new features on top of Kafka Streams. - https://github.com/michelin/kstreamplify + com.michelin + kstreamplify + 0.1.1-SNAPSHOT + kstreamplify + Kstreamplify is a Java library that brings new features on top of Kafka Streams. + https://github.com/michelin/kstreamplify - - - The Apache License, Version 2.0 - http://www.apache.org/licenses/LICENSE-2.0.txt - - + + + The Apache License, Version 2.0 + http://www.apache.org/licenses/LICENSE-2.0.txt + + - - - ossrh - https://s01.oss.sonatype.org/content/repositories/snapshots - - - ossrh - https://s01.oss.sonatype.org/service/local/staging/deploy/maven2/ - - + + + ossrh + https://s01.oss.sonatype.org/content/repositories/snapshots + + + ossrh + https://s01.oss.sonatype.org/service/local/staging/deploy/maven2/ + + - - - sebastienviale - Sebastien Viale - dif-hap-dev@michelin.com - https://github.com/sebastienviale - Michelin - - Developer - - - - clvacher - Clement Vacher - dif-hap-dev@michelin.com - https://github.com/clvacher - CGI - - Developer - - - - alexbosch3000 - Alexandre Bosch - dif-hap-dev@michelin.com - https://github.com/alexbosch3000 - Michelin - - Developer - - - - clvacher - Clement Vacher - dif-hap-dev@michelin.com - https://github.com/clvacher - CGI - - Developer - - - - loicgreffier - Loïc Greffier - dif-hap-dev@michelin.com - https://github.com/loicgreffier - Michelin - - Developer - - - - adriencalime - Adrien Calime - dif-hap-dev@michelin.com - https://github.com/adriencalime - Michelin - - Developer - - - + + + sebastienviale + Sebastien Viale + dif-hap-dev@michelin.com + https://github.com/sebastienviale + Michelin + + Developer + + + + clvacher + Clement Vacher + dif-hap-dev@michelin.com + https://github.com/clvacher + CGI + + Developer + + + + alexbosch3000 + Alexandre Bosch + dif-hap-dev@michelin.com + https://github.com/alexbosch3000 + Michelin + + Developer + + + + clvacher + Clement Vacher + dif-hap-dev@michelin.com + https://github.com/clvacher + CGI + + Developer + + + + loicgreffier + Loïc Greffier + dif-hap-dev@michelin.com + https://github.com/loicgreffier + Michelin + + Developer + + + + adriencalime + Adrien Calime + dif-hap-dev@michelin.com + https://github.com/adriencalime + Michelin + + Developer + + + - - - confluent - https://packages.confluent.io/maven/ - - + + + confluent + https://packages.confluent.io/maven/ + + - - scm:git:https://github.com/michelin/kstreamplify.git - scm:git:https://github.com/michelin/kstreamplify.git - scm:git:https://github.com/michelin/kstreamplify.git - + + scm:git:https://github.com/michelin/kstreamplify.git + scm:git:https://github.com/michelin/kstreamplify.git + scm:git:https://github.com/michelin/kstreamplify.git + - + kstreamplify-core - kstreamplify-spring-boot - kstreamplify-core-test - + kstreamplify-spring-boot + kstreamplify-core-test + - - 1.11.2 - 2.13.0 - 3.13.0 - 2.10.1 - 17 - 5.10.0 - 3.4.0 - 7.5.0 - 1.18.28 - 17 - 17 - 3.1.3 - michelin - michelin_kstreamplify - ${project.artifactId} - https://sonarcloud.io - + + 1.11.2 + 2.13.0 + 3.13.0 + 2.10.1 + 17 + 5.10.0 + 3.4.0 + 7.5.0 + 1.18.28 + 17 + 17 + 3.1.3 + michelin + michelin_kstreamplify + ${project.artifactId} + https://sonarcloud.io + - - - org.apache.kafka - kafka-streams - ${kafka.version} - + + + org.apache.kafka + kafka-streams + ${kafka.version} + - - org.apache.kafka - kafka-streams-test-utils - ${kafka.version} - + + org.apache.kafka + kafka-streams-test-utils + ${kafka.version} + - - io.confluent - kafka-streams-avro-serde - ${kafka-streams-avro-serde.version} - + + io.confluent + kafka-streams-avro-serde + ${kafka-streams-avro-serde.version} + - - org.projectlombok - lombok - ${lombok.version} - provided - + + org.projectlombok + lombok + ${lombok.version} + provided + - - commons-io - commons-io - ${commons-io.version} - + + commons-io + commons-io + ${commons-io.version} + - - org.junit.jupiter - junit-jupiter - ${junit-jupiter.version} - + + org.junit.jupiter + junit-jupiter + ${junit-jupiter.version} + - - com.google.code.gson - gson - ${gson.version} - + + com.google.code.gson + gson + ${gson.version} + - - org.apache.commons - commons-lang3 - ${commons-lang3.version} - + + org.apache.commons + commons-lang3 + ${commons-lang3.version} + - + - - - - org.apache.maven.plugins - maven-source-plugin - 3.3.0 - - - attach-sources - verify - - jar-no-fork - - - - + + + + org.apache.maven.plugins + maven-source-plugin + 3.3.0 + + + attach-sources + verify + + jar-no-fork + + + + - - org.apache.maven.plugins - maven-javadoc-plugin - 3.5.0 - - - attach-javadocs - verify - - jar - - - - + + org.apache.maven.plugins + maven-javadoc-plugin + 3.5.0 + + + attach-javadocs + verify + + jar + + + + - - org.apache.maven.plugins - maven-surefire-plugin - 3.1.2 - - - org.junit.jupiter - junit-jupiter-engine - ${junit-jupiter.version} - - - + + org.apache.maven.plugins + maven-surefire-plugin + 3.1.2 + + + org.junit.jupiter + junit-jupiter-engine + ${junit-jupiter.version} + + + - - org.jacoco - jacoco-maven-plugin - 0.8.10 - - - jacoco-initialize - - prepare-agent - - - - report - prepare-package - - report - - - - post-unit-test - test - - report - - - - target/jacoco.exec - - target/jacoco-ut - - - - - + + org.jacoco + jacoco-maven-plugin + 0.8.10 + + + jacoco-initialize + + prepare-agent + + + + report + prepare-package + + report + + + + post-unit-test + test + + report + + + + target/jacoco.exec + + target/jacoco-ut + + + + - - - - org.apache.avro - avro-maven-plugin - ${avro.version} - - - generate-sources - - schema - - - String - true - ${project.basedir}/src/main/avro/ - ${project.basedir}/target/generated-sources - - - - - - - + + org.apache.maven.plugins + maven-checkstyle-plugin + 3.3.0 + + + com.puppycrawl.tools + checkstyle + 10.12.3 + + + + .checkstyle/checkstyle.xml + + ${project.build.sourceDirectory} + ${project.build.testSourceDirectory} + + info + true + + + + check-style + verify + + check + + + + + - - - sign - - - - org.apache.maven.plugins - maven-gpg-plugin - 3.1.0 - - - sign-artifacts - verify - - sign - - - - - - - - + + + + org.apache.avro + avro-maven-plugin + ${avro.version} + + + generate-sources + + schema + + + String + true + ${project.basedir}/src/main/avro/ + ${project.basedir}/target/generated-sources + + + + + + + + + + + sign + + + + org.apache.maven.plugins + maven-gpg-plugin + 3.1.0 + + + sign-artifacts + verify + + sign + + + + + + + + From 3162d018bb9697fb398b5cd270c7626f7faaaa47 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lo=C3=AFc=20GREFFIER?= Date: Wed, 6 Sep 2023 23:41:30 +0200 Subject: [PATCH 2/4] Fix code smells --- .../kstreamplify/converter/JsonToAvroConverter.java | 6 +++--- .../deduplication/DedupWithPredicateProcessor.java | 2 +- .../kstreamplify/initializer/KafkaStreamsInitializer.java | 1 - 3 files changed, 4 insertions(+), 5 deletions(-) diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/converter/JsonToAvroConverter.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/converter/JsonToAvroConverter.java index df86e144..d80a66fe 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/converter/JsonToAvroConverter.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/converter/JsonToAvroConverter.java @@ -42,11 +42,11 @@ public static SpecificRecordBase jsonToAvro(String file, Schema schema) { */ public static SpecificRecordBase jsonToAvro(JsonObject jsonEvent, Schema schema) { try { - SpecificRecordBase record = + SpecificRecordBase message = baseClass(schema.getNamespace(), schema.getName()).getDeclaredConstructor() .newInstance(); - populateGenericRecordFromJson(jsonEvent, record); - return record; + populateGenericRecordFromJson(jsonEvent, message); + return message; } catch (Exception e) { return null; } diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupWithPredicateProcessor.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupWithPredicateProcessor.java index 73d3098b..47698340 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupWithPredicateProcessor.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupWithPredicateProcessor.java @@ -67,7 +67,7 @@ public void init(ProcessorContext> context) { dedupTimestampedStore = this.processorContext.getStateStore(dedupStoreName); processorContext.schedule(Duration.ofHours(1), PunctuationType.WALL_CLOCK_TIME, - (currentTimestamp) -> { + currentTimestamp -> { try (var iterator = dedupTimestampedStore.all()) { while (iterator.hasNext()) { var currentRecord = iterator.next(); diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/initializer/KafkaStreamsInitializer.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/initializer/KafkaStreamsInitializer.java index e81ec4e4..a7c311c0 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/initializer/KafkaStreamsInitializer.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/initializer/KafkaStreamsInitializer.java @@ -161,7 +161,6 @@ protected void initProperties() { properties = PropertiesUtils.loadProperties(); serverPort = (Integer) properties.get(SERVER_PORT_PROPERTY); - ; kafkaProperties = PropertiesUtils.loadKafkaProperties(properties); From ecba134beb5eb8cc476d9afb06e3393f3f47b222 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lo=C3=AFc=20GREFFIER?= Date: Wed, 6 Sep 2023 23:53:23 +0200 Subject: [PATCH 3/4] Switch check style and build steps --- .github/workflows/on_pull_request.yml | 6 +++--- .github/workflows/on_push_main.yml | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/on_pull_request.yml b/.github/workflows/on_pull_request.yml index 02231c54..eb4610e8 100644 --- a/.github/workflows/on_pull_request.yml +++ b/.github/workflows/on_pull_request.yml @@ -20,12 +20,12 @@ jobs: distribution: 'temurin' cache: maven - - name: Build - run: mvn clean compile - - name: Check Style run: mvn checkstyle:check + - name: Build + run: mvn clean compile + - name: Test run: mvn test diff --git a/.github/workflows/on_push_main.yml b/.github/workflows/on_push_main.yml index 6529192e..26c81df1 100644 --- a/.github/workflows/on_push_main.yml +++ b/.github/workflows/on_push_main.yml @@ -25,15 +25,15 @@ jobs: gpg-private-key: ${{ secrets.GPG_PRIVATE_KEY }} gpg-passphrase: MAVEN_GPG_PASSPHRASE + - name: Check Style + run: mvn checkstyle:check + - name: Build id: build run: | mvn clean compile echo current_version=$(echo $(mvn help:evaluate -Dexpression=project.version -q -DforceStdout)) >> $GITHUB_OUTPUT - - name: Check Style - run: mvn checkstyle:check - - name: Test run: mvn test From 6e8bfed69a85ae253a58cadfb4a969aa2fa818db Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lo=C3=AFc=20Greffier?= Date: Mon, 18 Sep 2023 12:50:26 +0200 Subject: [PATCH 4/4] Fix lint --- .../converter/JsonToAvroConverter.java | 7 +++++-- .../deduplication/DedupKeyProcessor.java | 3 ++- .../deduplication/DedupKeyValueProcessor.java | 3 ++- .../DedupWithPredicateProcessor.java | 3 ++- .../deduplication/DeduplicationUtils.java | 21 ++++++++++++------- .../DlqDeserializationExceptionHandler.java | 3 ++- .../error/DlqProductionExceptionHandler.java | 3 ++- .../kstreamplify/error/ProcessingResult.java | 21 ++++++++++++------- .../kstreamplify/utils/TopicWithSerde.java | 3 ++- 9 files changed, 45 insertions(+), 22 deletions(-) diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/converter/JsonToAvroConverter.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/converter/JsonToAvroConverter.java index d80a66fe..d6fc5bb2 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/converter/JsonToAvroConverter.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/converter/JsonToAvroConverter.java @@ -121,7 +121,8 @@ private static void populateGenericRecordFromJson(JsonObject jsonObject, populateFieldWithCorrespondingType(currentValue, currentSchema.getType())); } - } else if (currentValue instanceof JsonArray jsonArray) { // If this is an Array, call method for each one of them + } else if (currentValue instanceof JsonArray jsonArray) { + // If this is an Array, call method for each one of them var arraySchema = message.getSchema().getField(currentKey).schema(); Schema arrayType = arraySchema.getType() != Schema.Type.UNION ? arraySchema : @@ -152,7 +153,9 @@ private static void populateGenericRecordFromJson(JsonObject jsonObject, } message.put(currentKey, objArray); } - } else { // Otherwise, put the value in the record after parsing according to its corresponding schema type + } else { + // Otherwise, put the value in the record after parsing according to its + // corresponding schema type if (!jsonObject.get(currentKey).isJsonNull()) { populateFieldInRecordWithCorrespondingType(jsonObject, currentKey, message); diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupKeyProcessor.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupKeyProcessor.java index 43ef3909..a3494088 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupKeyProcessor.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupKeyProcessor.java @@ -84,7 +84,8 @@ public void process(Record message) { } } catch (Exception e) { processorContext.forward(ProcessingResult.wrapRecordFailure(e, message, - "Couldn't figure out what to do with the current payload: An unlikely error occurred during deduplication transform")); + "Couldn't figure out what to do with the current payload: " + + "An unlikely error occurred during deduplication transform")); } } diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupKeyValueProcessor.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupKeyValueProcessor.java index 7ec9e813..cc8a48da 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupKeyValueProcessor.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupKeyValueProcessor.java @@ -79,7 +79,8 @@ public void process(Record message) { processorContext.forward(ProcessingResult.wrapRecordSuccess(message)); } catch (Exception e) { processorContext.forward(ProcessingResult.wrapRecordFailure(e, message, - "Couldn't figure out what to do with the current payload: An unlikely error occurred during deduplication transform")); + "Couldn't figure out what to do with the current payload: " + + "An unlikely error occurred during deduplication transform")); } } } diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupWithPredicateProcessor.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupWithPredicateProcessor.java index 47698340..3df508f8 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupWithPredicateProcessor.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupWithPredicateProcessor.java @@ -94,7 +94,8 @@ public void process(Record message) { } } catch (Exception e) { processorContext.forward(ProcessingResult.wrapRecordFailure(e, message, - "Couldn't figure out what to do with the current payload: An unlikely error occurred during deduplication transform")); + "Couldn't figure out what to do with the current payload: " + + "An unlikely error occurred during deduplication transform")); } } } diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DeduplicationUtils.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DeduplicationUtils.java index 1d08f202..3924c491 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DeduplicationUtils.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DeduplicationUtils.java @@ -38,7 +38,8 @@ private DeduplicationUtils() { * @param initialStream Stream containing the events that should be deduplicated * @param windowDuration Window of time on which we should watch out for duplicates * @param Generic Type of the Stream value. - * Key type is not implemented because using anything other than a String as the key is retarded. + * Key type is not implemented because using anything other than + * a String as the key is retarded. * You can quote me on this. * @return KStream with a processingResult */ @@ -59,7 +60,8 @@ public static KStream> * @param repartitionName Repartition topic name * @param windowDuration Window of time to keep in the window store * @param Generic Type of the Stream value. - * Key type is not implemented because using anything other than a String as the key is retarded. + * Key type is not implemented because using anything other than + * a String as the key is retarded. * You can quote me on this. * @return Resulting de-duplicated Stream */ @@ -89,7 +91,8 @@ public static KStream> * @param initialStream Stream containing the events that should be deduplicated * @param windowDuration Window of time on which we should watch out for duplicates * @param Generic Type of the Stream value. - * Key type is not implemented because using anything other than a String as the key is retarded. + * Key type is not implemented because using anything other than + * a String as the key is retarded. * You can quote me on this. * @return KStream with a processingResult */ @@ -111,7 +114,8 @@ public static KStream> * @param repartitionName Repartition topic name * @param windowDuration Window of time to keep in the window store * @param Generic Type of the Stream value. - * Key type is not implemented because using anything other than a String as the key is retarded. + * Key type is not implemented because using anything other + * than a String as the key is retarded. * You can quote me on this. * @return Resulting de-duplicated Stream */ @@ -138,7 +142,8 @@ public static KStream> *

The input stream should have a String key.

*

⚠ This constructor should not be used if using the deduplicator multiple times in the same topology. * Use {@link - * DeduplicationUtils#deduplicateWithPredicate(StreamsBuilder, KStream, String storeName, String repartitionName, Duration, Function)} + * DeduplicationUtils#deduplicateWithPredicate(StreamsBuilder, KStream, String storeName, + * String repartitionName, Duration, Function)} * in this scenario.

* * @param streamsBuilder Stream builder instance for topology editing @@ -149,7 +154,8 @@ public static KStream> * A recommended approach is to concatenate all necessary fields in String format * to provide a unique identifier for comparison between records. * @param Generic Type of the Stream value. - * Key type is not implemented because using anything other than a String as the key is retarded. + * Key type is not implemented because using anything other + * than a String as the key is retarded. * You can quote me on this. * @return Resulting de-duplicated Stream */ @@ -177,7 +183,8 @@ public static KStream> * A recommended approach is to concatenate all necessary fields * in String format to provide a unique identifier for comparison between records. * @param Generic Type of the Stream value. - * Key type is not implemented because using anything other than a String as the key is retarded. + * Key type is not implemented because using anything other than + * a String as the key is retarded. * You can quote me on this. * @return Resulting de-duplicated Stream */ diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/DlqDeserializationExceptionHandler.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/DlqDeserializationExceptionHandler.java index 7ceffa03..75abc4e8 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/DlqDeserializationExceptionHandler.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/DlqDeserializationExceptionHandler.java @@ -60,7 +60,8 @@ public DeserializationHandlerResponse handle(ProcessorContext processorContext, } } catch (InterruptedException ie) { log.error( - "Interruption while sending the deserialization exception {} for key {}, value {} and topic {} to DLQ topic {}", + "Interruption while sending the deserialization exception {} for key {}, " + + "value {} and topic {} to DLQ topic {}", consumptionException, consumerRecord.key(), consumerRecord.value(), consumerRecord.topic(), KafkaStreamsExecutionContext.getDlqTopicName(), ie); diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/DlqProductionExceptionHandler.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/DlqProductionExceptionHandler.java index b53484f5..7e376adc 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/DlqProductionExceptionHandler.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/DlqProductionExceptionHandler.java @@ -53,7 +53,8 @@ public ProductionExceptionHandlerResponse handle(ProducerRecord producerRecord.key(), builder.build())).get(); } catch (InterruptedException ie) { log.error( - "Interruption while sending the production exception {} for key {}, value {} and topic {} to DLQ topic {}", + "Interruption while sending the production exception {} for key {}, value {} " + + "and topic {} to DLQ topic {}", productionException, producerRecord.key(), producerRecord.value(), producerRecord.topic(), KafkaStreamsExecutionContext.getDlqTopicName(), ie); diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/ProcessingResult.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/ProcessingResult.java index 607ccbb0..858ccc63 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/ProcessingResult.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/ProcessingResult.java @@ -96,7 +96,8 @@ public static Record> wrapRecordSuccess(K * Create a failed processing result. *

*

- * If you are using this in a Processor, refer to {@link ProcessingResult#wrapRecordFailure(Exception, Record)} for easier syntax. + * If you are using this in a Processor, refer to + * {@link ProcessingResult#wrapRecordFailure(Exception, Record)} for easier syntax. *

* * @param e The exception @@ -114,7 +115,8 @@ public static ProcessingResult fail(Exception e, V2 failedRecordV * Create a failed processing result. *

*

- * If you are using this in a Processor, refer to {@link ProcessingResult#wrapRecordFailure(Exception, Record, String)} + * If you are using this in a Processor, refer to + * {@link ProcessingResult#wrapRecordFailure(Exception, Record, String)} * for easier syntax. *

* @@ -140,7 +142,8 @@ public static ProcessingResult fail(Exception e, V2 failedRecordV *

* * @param e The initial exception - * @param failedRecord The resulting failed Record from the processor that needs to be wrapped in a {@link ProcessingResult} + * @param failedRecord The resulting failed Record from + * the processor that needs to be wrapped in a {@link ProcessingResult} * @param The type of the record key * @param The type of the ProcessingResult successful value * @param The type of the ProcessingResult error value @@ -162,7 +165,8 @@ public static Record> wrapRecordFailure(Ex *

* * @param e The initial exception - * @param failedRecord The resulting failed Record from the processor that needs to be wrapped in a {@link ProcessingResult} + * @param failedRecord The resulting failed Record from + * the processor that needs to be wrapped in a {@link ProcessingResult} * @param contextMessage The custom context message that will be added in the stack trace * @param The type of the record key * @param The type of the ProcessingResult successful value @@ -185,7 +189,8 @@ public static Record> wrapRecordFailure(Ex * * @param e The initial exception * @param key The key to put in the resulting record - * @param failedValue The resulting failed value from the processor that needs to be wrapped in a {@link ProcessingResult} + * @param failedValue The resulting failed value from + * the processor that needs to be wrapped in a {@link ProcessingResult} * @param timestamp The timestamp to apply on the resulting record * @param The type of the record key * @param The type of the ProcessingResult successful value @@ -201,7 +206,8 @@ public static Record> wrapRecordFailure(Ex /** *

- * Wraps a key, value and timestamp in a Record with {@link ProcessingResult#fail(Exception, Object, String)} as value. + * Wraps a key, value and timestamp in a Record + * with {@link ProcessingResult#fail(Exception, Object, String)} as value. *

*

* The resulting stream needs to be handled with TopologyErrorHandler#catchErrors(KStream) for automatic @@ -210,7 +216,8 @@ public static Record> wrapRecordFailure(Ex * * @param e The initial exception * @param key The key to put in the resulting record - * @param failedValue The resulting failed value from the processor that needs to be wrapped in a {@link ProcessingResult} + * @param failedValue The resulting failed value from the processor + * that needs to be wrapped in a {@link ProcessingResult} * @param timestamp The timestamp to apply on the resulting record * @param contextMessage The custom context message that will be added in the stack trace * @param The type of the record key diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/TopicWithSerde.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/TopicWithSerde.java index 736cbd38..3c0bab83 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/TopicWithSerde.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/TopicWithSerde.java @@ -31,7 +31,8 @@ public final class TopicWithSerde { private final String topicName; /** - *

Name of the property key defined under kafka.properties.prefix. Used to prefix the topicName dynamically at runtime.

+ *

Name of the property key defined under kafka.properties.prefix. + * Used to prefix the topicName dynamically at runtime.

*

For instance, with the given following configuration :

*
{@code
      * kafka: