From 6a6df1976332f55b5292081dc32e147254dace6e Mon Sep 17 00:00:00 2001 From: Greenmask CI Date: Sat, 7 Dec 2024 18:50:01 +0000 Subject: [PATCH] Deployed cbd4b9b to dev with MkDocs 1.6.1 and mike 2.1.3 --- dev/404.html | 27 +- dev/architecture/index.html | 27 +- .../social/release_notes/greenmask_0_2_6.png | Bin 0 -> 32759 bytes .../core_functions/index.html | 27 +- .../faker_function/index.html | 27 +- .../custom_functions/index.html | 27 +- .../advanced_transformers/index.html | 27 +- .../advanced_transformers/json/index.html | 27 +- .../advanced_transformers/template/index.html | 27 +- .../template_record/index.html | 27 +- .../dynamic_parameters/index.html | 27 +- dev/built_in_transformers/index.html | 27 +- .../parameters_templating/index.html | 27 +- .../standard_transformers/cmd/index.html | 27 +- .../standard_transformers/dict/index.html | 27 +- .../standard_transformers/hash/index.html | 27 +- .../standard_transformers/index.html | 27 +- .../standard_transformers/masking/index.html | 27 +- .../noise_date/index.html | 27 +- .../noise_float/index.html | 27 +- .../noise_int/index.html | 27 +- .../noise_numeric/index.html | 27 +- .../random_amount_with_currency/index.html | 27 +- .../random_bool/index.html | 27 +- .../random_cc_number/index.html | 27 +- .../random_cc_type/index.html | 27 +- .../random_century/index.html | 27 +- .../random_choice/index.html | 27 +- .../random_currency/index.html | 27 +- .../random_date/index.html | 27 +- .../random_day_of_month/index.html | 27 +- .../random_day_of_week/index.html | 27 +- .../random_domain_name/index.html | 27 +- .../random_e164_phone_number/index.html | 27 +- .../random_email/index.html | 27 +- .../random_float/index.html | 27 +- .../random_int/index.html | 27 +- .../random_ip/index.html | 27 +- .../random_latitude/index.html | 27 +- .../random_longitude/index.html | 27 +- .../random_mac/index.html | 27 +- .../random_month_name/index.html | 27 +- .../random_numeric/index.html | 27 +- .../random_paragraph/index.html | 27 +- .../random_password/index.html | 27 +- .../random_person/index.html | 27 +- .../random_phone_number/index.html | 27 +- .../random_sentence/index.html | 27 +- .../random_string/index.html | 27 +- .../random_timezone/index.html | 27 +- .../random_toll_free_phone_number/index.html | 27 +- .../random_unix_timestamp/index.html | 27 +- .../random_url/index.html | 27 +- .../random_username/index.html | 27 +- .../random_uuid/index.html | 27 +- .../random_word/index.html | 27 +- .../random_year_string/index.html | 27 +- .../real_address/index.html | 27 +- .../regexp_replace/index.html | 27 +- .../standard_transformers/replace/index.html | 27 +- .../standard_transformers/set_null/index.html | 27 +- .../transformation_condition/index.html | 27 +- .../transformation_engines/index.html | 27 +- .../transformation_inheritance/index.html | 27 +- dev/commands/delete/index.html | 27 +- dev/commands/dump/index.html | 27 +- dev/commands/index.html | 27 +- dev/commands/list-dumps/index.html | 27 +- dev/commands/list-transformers/index.html | 27 +- dev/commands/restore/index.html | 27 +- dev/commands/show-dump/index.html | 27 +- dev/commands/show-transformer/index.html | 27 +- dev/commands/validate/index.html | 27 +- dev/configuration/index.html | 27 +- dev/database_subset/index.html | 27 +- dev/index.html | 29 +- dev/installation/index.html | 27 +- dev/overrides/main.html | 2 +- dev/playground/index.html | 27 +- dev/release_notes/greenmask_0_1_0/index.html | 27 +- .../greenmask_0_1_0_beta/index.html | 27 +- dev/release_notes/greenmask_0_1_1/index.html | 27 +- dev/release_notes/greenmask_0_1_10/index.html | 27 +- dev/release_notes/greenmask_0_1_11/index.html | 27 +- dev/release_notes/greenmask_0_1_12/index.html | 27 +- dev/release_notes/greenmask_0_1_13/index.html | 27 +- dev/release_notes/greenmask_0_1_14/index.html | 27 +- dev/release_notes/greenmask_0_1_2/index.html | 27 +- dev/release_notes/greenmask_0_1_3/index.html | 27 +- dev/release_notes/greenmask_0_1_4/index.html | 27 +- dev/release_notes/greenmask_0_1_5/index.html | 27 +- dev/release_notes/greenmask_0_1_6/index.html | 27 +- dev/release_notes/greenmask_0_1_7/index.html | 27 +- dev/release_notes/greenmask_0_1_8/index.html | 27 +- dev/release_notes/greenmask_0_1_9/index.html | 27 +- dev/release_notes/greenmask_0_2_0/index.html | 27 +- .../greenmask_0_2_0_b1/index.html | 27 +- .../greenmask_0_2_0_b2/index.html | 27 +- dev/release_notes/greenmask_0_2_1/index.html | 27 +- dev/release_notes/greenmask_0_2_2/index.html | 27 +- dev/release_notes/greenmask_0_2_3/index.html | 27 +- dev/release_notes/greenmask_0_2_4/index.html | 27 +- dev/release_notes/greenmask_0_2_5/index.html | 29 +- dev/release_notes/greenmask_0_2_6/index.html | 3360 +++++++++++++++++ dev/search/search_index.json | 2 +- dev/sitemap.xml | 4 + dev/sitemap.xml.gz | Bin 840 -> 842 bytes 107 files changed, 5893 insertions(+), 206 deletions(-) create mode 100644 dev/assets/images/social/release_notes/greenmask_0_2_6.png create mode 100644 dev/release_notes/greenmask_0_2_6/index.html diff --git a/dev/404.html b/dev/404.html index b0df2f06..0ae275fd 100644 --- a/dev/404.html +++ b/dev/404.html @@ -87,7 +87,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -247,7 +247,7 @@
  • - + @@ -2349,6 +2349,8 @@ + + @@ -2389,6 +2391,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/architecture/index.html b/dev/architecture/index.html index c45a9075..b066b83a 100644 --- a/dev/architecture/index.html +++ b/dev/architecture/index.html @@ -122,7 +122,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -284,7 +284,7 @@
  • - + @@ -2494,6 +2494,8 @@ + + @@ -2534,6 +2536,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/assets/images/social/release_notes/greenmask_0_2_6.png b/dev/assets/images/social/release_notes/greenmask_0_2_6.png new file mode 100644 index 0000000000000000000000000000000000000000..286967a685ddf1c1f962d96d269b54e559fbc9cd GIT binary patch literal 32759 zcmeFZXHb({)HaM=K~NOrNIe!%x`=>)fQo?hPUxugUPCp2N>Pv^hzLmUgb;cQ0R;sF z>4eZiq(gvELrEy_&N;_to_T+NGvANzVM_OI9p#P?7{YWK#ITc)2CZJ{;M2s z=*55hejV_R<5qO)MC!!!P0d5z=;@o))2wa7zr%Q2pQ){)A=)7)&)$4hZvcmxw_nUL zOSp>2`LG*UqT2Y&uIE=04zo-|&+kX82T`gZ;6uSru);_R>cqd#A3C1}7exJeW`EM~ z-)E)wjtl(z%#iiim4BZJMA6Xw`|Oiq@Tq^FsowZ{{GVsRxs1<_{qyW=|NsAX|6f`= zTTh>VV}V~~udAX9cdGJPlALdO9jdqJ&hh`31Ezb9NE2MIQ2YAhQK&1$>B2$Vs{RzO zYZ&J-JNoKw*c16n@o=Yl_dN1^Q?;Z8kVf7lX<)R7qa}JkTOBOPonF$CQ7(9PfZJmyh zZewszXgycQ&v$-h%(})=F`t+`z^yJnc9UQ1fzES{L?Q6j!?O>Z@26z?DKXd`jJ)?> z>A`Pq3!|zRmK^282Zh0A{PtH2*4K<7aTZuaf6mA$bxX(pI30dl{QeV}7gJugHRj}& zg`fU5I|c_(?YW*Y97g8IB5uV8(Hyp?hwFX&_3v~ zlhW+u?F`1`{>J#chNIKjWi)gPS2*AbqQw3faYR3|rUYElU(JL0DjYYmtDJ^p2#l8@ ztKzBIFDk9(aAeF7l)Rp&FE-K5vzWB5OR{RZMw-~E{kT2)a(;emxI@8zrFX)=1~sw% z5+vc^t4!|5md5yEvhNVyZN5{B&O=X-x^%J^9Gcf4j&%eIS#*aVP_c`5U{mALR|MJ&3%A@ zrKv5CjQq%crR3N-W%A3_R}o&P^y^j%X7ptGeBK^b+dAq(*=kD~JojVM#!%=E@%*m5 zZf%kJ`~a>fRr}R}xgJZ`!840WUUkKlh4t@FlXdXd2yBT96*BOGhdnDJC1xx=yfu|i zIId?wJ==t+|CcU&;qbmq`Gj1cp^?HV{c1c8UDG&oYrJU9ziMVs1o7*BVMF<)+zo7Tcepl+Kl|(0mENZs^AXtFjeb@wVk;~68`0L0!gl9c4`qCe9s#qR$JjH zTty$bHgdQYap52Yk|6YnX{j*=4^gN}=&o02Bfqq~kYT@XH=RG462DS`mW5w!mLh-W z-gTl3l&E8x;YWKnr$Z)W{lD5$LI$Jt4-cO81gf0dK@Z^@tsX_e&mY28#SdO|vZk+8 zpum+5`!+d)&!)L|{zj?8ZzK`(Y17DG?!XPP!Z1H;XJ!!AQ<`kMnT~5!o9C19y6=~9!0jqgpuFREopCEcrTy3nvkfL7W zvr;&Q+8*o`cgBbdJoZjeXHki$+<8&`$u?){#{uf2iLuW7I@?C=Dtcaov@YsQjF@U` z3LqW1n(yY}6Sl*u?cr_y7Y6B9qJT^yu)b=$>Pm|2G^>)=){AN8XvyI?$KCE*M z=RETIi3u{9^q(!=3&iWp6&9>-$5k$GxsxK1{zuFPbPb%u5qv(5vKU^)x2cDq9OAa= z{Tq6or6<_SVLgKzYbN83$Ha3-%={NJD-T&m^AMvUVZ6xVCu}`7ggW#X0e#mJYJ+_D z%2)Ob!Qa%ZX;+*)^p!FB&Ibo82x6;BE$GrE6hOeKQ|tZtb_rz}Tnnl6Z+T(@%Ha z&dCHXTOIROhw}C|^(AubQi>M@EeaBBHHQ4hAFgT^eaWKc=`Bj1#I67|ot3jt8b#_) z#rZE%*bE3hShe{=Whb`+MV?%T!>$@$u?OWHt03y&mR6NS!jOq($>GzZ(Hi+3(gHk4 z?MvQU2eF@0Nz2HXAd>cq9i=K8^VQW zz4w?-lU@4}eRiq2RH=zD#v`!o+cj1L=Z>12X!986Q<0sWr0ud3WRIpy{c6pX-XE8a z8HDgZbyZKzPLcLTl%qlD+dzpWMyaT!y#r~uT3NB`-^h20VsmBb%MwqT*kJMrlzP z`~c~V+*!*8>&~smMb8Yr_MHaUH{zwR0%HIf!Do11mI zR4k618*unCdX?)^k){nkKYah?U+cDDvt&~}oACDemZQ}J5hFX~gF90_Fd4GL>@tOE z&I_}&4N-H!D)*>=o~wXO*NOT++dFetoHG-;cx5F3pDrx4H6mW2>*HL%Iq9!{BGT*B zqVTlK;AXtbI)#$wKkm3O{yl{hb*#nF>=fpC#bV#Y3AKN=XO&%Uro#E@i*oOAQo)?} zlJ)JkbuYADl;bJ3&YxD>wSxAe{$628FNkGr?#r7h9jSC9r^-(1L#ZI?)j4I-sO>+S zuw8b9|NOH`HfU7oiTxg8hflEe2>PuE$r42(GtkiVnw~A)UtnUM_M!BtN=EsQgJYKd z!6Z)4Tzl`;o$=Xz6W#e;HKRiH*p^I4{;w^z{geJdSdWpVoxyTtGVvLnQ68gE|9U?x z;xL-B^x?AOL2qDai_JjInW!}iYW@~3>u}xqWwk9cvz^kmWE0-ICgyGbsuKRU=X>yW z8+&f-}3X%^>i(ehpzKB>6jC_9jtFQ^Sb*A!yB`XBy7q-Dj1^p;<7Da z)w2no7Mwn~)-LwE8jqA6TNY2Lf6_LHx)3$FbRx|0MU}AyJ@Z?~tdV)ojFL8b#>?zk zEG}zY1}5vIED;MlDBj6*x@QmG(r$2lG!@#EQ_n5a^F^1BIc>Z&;GbU9jvuTVJtGu@ zh3D5GuqH|u+NVhlhAGu#O#3^ zS#3Ek*@^t^-Wl)InewsOk^*#^dSP33ehm`aymf;QUeKEna`V7u@H;Es>Bte!fc9{U z+2~Liloo%~bXBr%-ee_3dN*2Yf$1t@#H{~iwOqAFnfZ8=uDO0=Uyw`lJ(+$uVIiRo zB+7UCs>my!I1~FHnH8PNqIi^x5mT`xLz*B664DbkI2Xkp;!Tc}Q*Y#ha^TqazAl1+ zwxnDpKgUo)_@(a14`|Hc<}FD9Q=@cBa$z+|%x)r*RH9(>i^^#PiJS$BwI zyK1*RwP!58#F^fvT=a#Hgm`sh3p~8;9>tlsrT=krUV)m5PilZ7C@~U@zdDz65s2yk^SbDbvD%K4g;QKn{0ZjDLR_QnS+a zbEt%FzG`(sSTD(jamua8(W{t&e6`CBfl5TfccPDwZx7QV=b@Q7Pa|!{XFbQ{mWK;g zOO7l!k`E+Vy!JiPs=Hu09V?*S8Wg&&1Fj2FL$9i`o^-lgkWy?u+_Xt(>U_WnbM9)Z zuP{OK-Dm+TNUx$cMChS?2)Q7Y)A4!=8~Aivg*^MI%D>h&DBWIQ{o3;?4LN$yp&V{W z57v!_r|HBfRV4n}p*X`FH>-}dY=Ou5s=bB5b-tc1kYoTp?3S%bsqk(5l&aB0UcKQb zN#6CU2UqrFm2XFNY0AHVxMFu zPYuTu?`k1AM?FdS@J)}(>(e3P_&v$=A6!JuB@^ziesY(Qd>5~e~Yvi>wf(x4u8C#+O-CfpW|R%m4#0sv8TCt95-#@Te$H| z$A>!*GZ%a1gy7#1C+b)}>Kp#9aDp|Yf4GPdlvzYw$2iD|+1=ceiCG!3)&DKDc{S=X z3%fD*SAWXFv5{!bf(p4)#Je2boqe$m(8^2lM>~@uJw0~Opdsu_QQC%F_0yZ;g%CP} z5SyPd2hRI?>DtH5l`kL-7iwfL@V!YnlPFwV?hKkT*%}jXDYS|&RKeT*Ltqs&^-yws zj&E$n5GT7HQyxB@nDsY``P=34iSv($$>R z3!J$icgWV%HCxv#y=#>BG68Vha`8B#v!h@mASE4+ZgLj<3A&J%)q=8qmEaS9Onh-# z>8$(hNe>tLZ~0yKMuOb^>JCd`+2%=iD>}9rm3gCvnjV@-uu@1Ipp~!FI?)-U6VtZ3 z-OF?P7phrv;f8Wc{7XoOu{XgtO4Xgh)^+c9GAqZciFC%pyvopI0aG)udKBLT>{{3Q z5C7=qJ0AOl62wsz*xR1lQJ|+7b;o9pitjKU*e^5T2GB7*l^@7dnAlUz1=h`#vk)JV zgiQez(P1Bj++%X%BG_`*bKi==)TV>!3&m-PImNB&w2`G8vFhsY{M$Pq*B_IM9`wS( z2@lMXhh;vf`o?XaF5|80E9}ySj;S>>rEY>vB=abh64vc2l&j9nb45p01$m*@BM#mV zIrCwZ&MWt~1->3`)xnCDadV7Wh#XCnoy*(7>LFRW_Wr>MpZ|I_+m_4vDLa~hVY@Kb zy|8%80hhvm>2Wi`6xLHfP#sY1XCa^OXF1R0(CGb9H)97IhY_E;E^<$;2C6&&D%7B~ z&?D!T>r7XfOjj=`P*W!%+1h_SY*Ds}?aOCVYv%9Si5{fUOd0ANM;_{1)cjIV4)&Oe$d`I-cF|EyQVvQENr8UV1zMplHO(eL#1k)DAFaIVwoicg@Jl5$}L<<75g|>rA?0$)kk6GmreT1G+9iyN3P7c z-Tt+>A&nX2L$Wl0lNvk}LDLzNEUUNkUcKNhMlCK7<%|XoSb}q>D;g3%2Bo!2Z>E1w zTcA8B78-K`-89q+z~`-&R)X{!MOO2KlKLR7;oO%#Nr$q}i!_~+9ifrYn4a($Y|ZKD z-V>d2y=Qooy7t``{(`wpmGg1CLx_o)5aG;UAcm*?1rG zHC@98y*C~&U0PV`1oN|GT&_|=I+!_#RpS!l5p4J%`2eA`ojqJ-^pT05SHQMWQ_Rnh zkI^K;^k*x)zwQ|i;X%W3H194Q{P_Cr8FKRQrRbINv4h4qy`YzfZU4SeD(3r2iexkl z1UQe13^bq52h{KMA*)N^q%JE<$<=(9!CO~voyUlexNQe1yrc$z6{r^VNF~gu4l_8w zrIy)L2m&y#*+KBy;ui``(Qh!PcV$QvRYOR^nJ@#8+4r-M6`(OuDd=XQ$#m%mwYjzH zZD3g%Uj=h2)(>Bdt)cM1b>}Uxs9P;j=|gZ=EQ%mjc5|_(3+ix?#h@5>dvv zlWe-SS%Z_|>E1g0b#{w1+a}*eJ{b@+R-tcUgf4E~CPvV9$$4uX6k#^%suw6f;$Ap+ z%EF&x95Sfg?qPtH1>%6t`@WY_O`7ajVyZ*EzwV*w?QIx?cTsLH40dlwtdG#F4+x|7N@rCtP>ED_I2*rS*Z^}%6_@r#xFNMUrB$EmoHvCXod?|l9QM)wl>-V zT@V$8mhq(A6qNGQs}VNJ07H^~(P`JB4b&<SkDWlCNtWy%js zD-a#kKUbv8QpN2RLBA%rIGnxzzEs)}k@khzLfDrI2@%Y?XC2pw>+*q)&kwAZqmsuE zkf@a@`Nx=R*ZF=Q7=fGbIaBpPxsW{ZTU)I99Dq!3u4Nx+KxwxQi}S8ka7iibMH>u% zT46gra`VSYjQN`1Qfkb?SF}X~xne%8ZU4$vDj2nG&s$NPG->IO(4(D~TWKmXoCjbW zn0|Z~%%_lQNsBd&my0%PrhdQIz=?@qeE5vKv`Pc-d%a(k)i`kFdKEa17E_bmfY|L_ zmg7IcpsBKcmHWEBY`D_RQ-Ka%V4=9u)(~_=@hvYav;y zWP6|{2HVUVVI3Hf_!^sYrk_lJP+`0U7{9-9>=FCel(6ZtI;wl((o0Cf(&a=$q(14m zpZdr5GU+E^PM{gi0Ry?M)JHd9)vi*BBX$RLgV5|MDsl>5Vo7&$)H2u3L?RCiSMeUU z2`i5n!so#JqK(2Zw4Z$jy6jOh&djVrOOI`zVZ!0FkyEVCNofP85=rDqv@BP98mE{t z$lLv%0X0in6AH|AI4XT|j+bN07yk_mB@u-j^$T;Mn?-5?XSWWGF9(T*G)nt^r1BqS zJrS}ORMh%=!lHI4nQO*yQO(6UM+f2)FsNJ(u_qG<))1worwPa8V?Gz*8&UjxFAZvo zyAMX};%JeO^dqY>?+KD!!YBm&;__A}U@KBd{0_JtlFK}xZ2Z8`)9kY0$W6g7f%$=T zP=xFQcRdA{ny^ZHxW##{)r$8p&<=tjU>$k&p&SuZM{t|6TsB9p%SR%=%-nT+E;Nz|jsb|P#kj7IqWwNr=WjrBz@XPNZe zOpKz+fz_~~x;VW|MPQ8%Uc4gmjs@dshM9M?#JRoC|bZiG4FJPzhtP2f&3 zfH*0VykPg7%t&p@zx2NVGwohPYJi=oNq+j}^xj~*v*y@c49rbN%(RV;A_k)2X>xykrqTEN~+RIij+Yd=?I(Zr?{%DsaKoTt>mbPlv zb>U8~fEWbRAf44iGyYiYcYXrprOSIu$b97(X)w0WzbLvXs%!o(X)!5YzuJgIdQBY( z_OgnEmNMXtY9514`RPRgtuLqP;j0NL0wwcjL2=e`^&_`5{Gt%l=!1Lne3xCujx9I6 zQRo_&LPNJIT)W|sbmtJBNn4Ievc=sGa*@S{?epSb;+ou7m;$;hxI6C(O!j?_MfU&} zDf)B=_^Iq%pA;765 z;y}M{8!6GugwXi>2JIHfI;i`Uy5AAKH8b;IZ;>~s-;2??tAoBc@Vu!=OmE!=T#~U(#xoz-HcNry6f+4XE74lkb=xg*u`D4H#v^A;B4vqz9QGBO zo%Oi7+yl>5({`j;r&PT`JMw^8Clulk7TU%dh2_`xkQ|qw`=Pg@?FL^%_DkMF67R++ zQPY*?K|J-Gt+XqL?f9VmK=Y^`ctp?fv$Jrs8YG~`rsuhw#*MwM3h5vtQ87j*eaI!e zA*C#BqVZ9h@dRK8KEkmld|^dGz1Tz$6qwbfo~6}*8jQ5vR#AC3=539|wh4A^bPNXV zy4z)R_lPivB$tjkv3`uzvnIHB!q@!80@la=VXA7z{AJvex}!$e0$QL!o5BtJ6^ zzo};ZA>K#aXdV3rrP&c-Q0=~U7e4kB13$dla&GfG^wExJV?OPzEgkoxXnOwG^QzCE zD$0O?dUzv$@=MImXNjxU{?zZ#u0SfKcp;73k2uOspA4!8PEPLz86qQ z6TJeX$cdq^AvbYj*)cphURwDDc7P=>`f8R}p>S%{|FHN<+WoB1MWv#_Zo7zss?+%! z!j9kf6TMtxWb07@T=W`hI;L&~8#wDx&-DfX>zb&uz=Nr=T{V%^p-gzQSqa~O-x|?eX65lSlj$X2u#^9eX4b%ZsPKwpTPdsKOq5V58EB50SNg5qWl1U zB5V;i9S$*KwY3#bFfCptw`CaIm+8yfe5;}<%^I)8xF{|W~U zd;jX?IOr?t_geg@OcZV)y9ThDeB+Bc=0@)rX6JU*bhnE<_$C`}P)ML2C-;}c>onic zCn9`|6us98dODu=67^zmAvru9OLmsvV3H<;(V z3QNZkU*cYXDM1F*D4&$xLID#&9W;e9vED*n`>stDJ9yUT*n=6Z1J~7DAfW)AtjEYo z#JLjV_mp)&J?5yJ;L5ySY0P4UeeIln%+4mGeH(%5qNImJKr$*9QVE~gavf^5rS@GA z2I3H)qV`Pr_s?yK^|fzG_O>++O*TZ5I|$bPv%Q>DDkw@tL;vy9&l_?S%1gO}Qg8?n zLs(D#rhkf;;74^2Q@%Kq*J4lQ-9pIO#GPRsvrsVnzjZV;BghSK;6<;nN3iXWKR&yJU+!|^r0)*q5l{&7OO zuu`e~!TN`|PIs}-@jD%w9F&SNZj^dvN(?UnC+J}=5R1jUyfG^pv}RSoff9Rh@(NAm zV{v69?!Iaez5?BP{Lpf&8p|YiDiv#+YCkhkWjY^8C$@JWv6Ww@yEPlAuOvjptJD^q zIx&Tu1tNkwKV0ImM6Kc0Y+5PzM~OuC%(!D5d`g$~2VgXf{kl7a!x?XABK3_cpN{g!Y`R!8AReUYy(q)zt*zmlOJA72oG zcDaboqdm*?MVVu4}h(*lZ+`IHA;d{z-aCiA;u{M&~Hc0{KXP5|b@3$Wf+0;aQaW#8z)a;l5=u=o%|m0s+W`fw5FQq=wJB*3 z%4aPtVG)Sn4Wa4nc(0;?AxK_mx8FgyYR#<=8oG@G@`D_{`T4l*SE{zAyUWL}?`O8Z z2d3XRat)qS|K0;uZPn(F zJ^(7y06LTpEUNdShQil?0P`L*a^RXtos#ydv;JD9fLXm~&N#;uw~rt0RJWrJ@q?4j z`dSI!(x{^LrUbuJ=lWMBza71&-lz>%&WwGN(;aNAdL!5{Hz_NAPyQF{8VW*VH%yYG zOJA`GL@HgSG0uEb#{%7KmJlljIc=gUpQmOi2h1;l_DLVW+HPhRp#3mH2kghnNNITl$q02?+0T0;1^l#C7Q#S` znZZ2fA**WAZ7b#@0runKGxJ3G>a4pz8p_PSow*BLjGo-MsUYFxke2_rd_?b#b8ts| z7W3w)vE+v8dCjhh1iu;LU&!(BTYAwK9iOJ>bX-In*BnUoPRycQM?(Fp%CFU(6=Q3r9UVjhmRH&6?i)tWp{ef!`mzu0Cz${@AM zjt=!ikcb3yZSf=t-qE2yBxb~t{yA&o`|S~ew!nmeK}lTW%E!$gYkn z&Ltu~v6_hc)TspI-QMX_E;>v1sY88ey8aVR2n))Cslc<2zyDLb+A%xDVJBm|9KDqv z%XM_j5 z8$?d-6g{I`OQPbMp%2b6cEGH%9*9i7y6Oj|Pq`KRoZa}3GFJ=L@4+0ioVTAs{^^}t ztse?u{w6g0;GX$xp}UR@!RnaJpW9OzE-8LE{8t6Bga4x(ELMeF8ILG76-Po6UARih z%?s%*oG%uf_)q4GP`2!<-}IlQ_gLsR%hqFL%=!n+HRjK_d|xZNUaEVk&qKSFi@F~) zgPMM!9b>Jx=Rcg|(|m_uU%`B~E;?=$%PIIrGz{L&R(6&%VcXH-s=3HxTJ?~BHN?RN z--nnob%G&m#qYZ0ABL$6{-+}z`hNcL%dr1jX#CJ|Qs3ISXy`So$?p08xll*rIwW%{ zE$g3`>si-`z2vHom-@ zmQ~?a{Cx4tr?cMto&UI+6Y?vC&5EDjb?vOcZ`%I^)LNku?U}#d*fMRl)^{Y(Ea_h6 zZ!i1}yBYz5M(GjHwJ-3RNa~)|e)r$y3^nr?Uca+RYMWJF;8U%4+OUe*+DCFm zD$u;_$ecUi9RMTNj_UAhStEaTIQiba`x}IcwVyo5&ce6M0nBAqkS| zEe}(nxE+=Cov^{F`can0cl7?;ioV&;C=j#HA%kben5qfEOnQjhsLpHiBdQUUd0NSe z%~_~a$%qn7b3RQJk9?<6FjWso^N{}qyg=(AJb%n7&M6|PMmv)&=1};%ht$k{A0zi8 zLm_OEsh?SNQ5P$#tRFgol5z3DlmGs6ssFDxpO(c7vmT2ek^1e`UcC37M$_#@9L>|# zb%Gaotdd)0Bm0_Hi-j)2uU&dJDf(YPW-yo4vHe9hw4Q7N;EUVE=HlZ^e*!Q9 z{T)}zp{I8;k0wCg`+3Dr5Nqao+ar8$YbXBfOvEsqY6^-jfsfBh>PNZqcJrkXVU|wS z;8TBI;6Lo-F~hWLp-{^04urh{%F)d_KQ)3+{TDvQ^hg}f{9b3dm=z(F+2PrU6Rp$- zLGAAx>DT^^#*C`cvDtRsSblzp$7f2r>!!c8Bk>}9Jp18<8wcgqOE8$1HYbgPNS)Ndv;(AIh#fo3TD7^DIWbKt``FvvK z_(g_e|Fiv5(BavRUkOW3s@Ls9#8YNS28Z{QyshGAwr@KvtzBDi>i9(MX{0R*HX{Z!a`DPCLNTNE6r>oE;oS}VA$oAxw zB0Hm=7ykr;mIO*Yy5VssLVDcexIn&q$<`*Q5<2F$vXNT)bbeR<^)uXP&h+J zlxZ7QNAx&66O%+HysG@4gEkM9WNw_xXg1*b_*y{6vg3K84DzYSk&^Bb_o+ zHNu2`R`Ghr`d-jm>zbbiMD1TojR|^PA_86c zZy`r($h0A z5jipnTW_dN>(MD}8yL?&PV*2x2t>uxx>9yebRvKXyHHi{{nX>fOvLj#Kr;tsmb5q* zATnOqoiznhje;tp{3;=S`8{I!95JxMePo4otQs9wsce9+6(?{cW`x9QDZsW572{MM z%<_RjpwsiryOA5vJ3Cp_Q6$vpDsVB>B=uy>F0gXIC!KYub9#Fbca^BOdLD)$FW$&A z5xn?@CqJt91MMiZt=}gZg#v59dc2s%b95znyd)k>g>xm%`kC0+b(U5*7{Y-FTH(wo zexMuH(MiCwBqll@jGRZO2O7F^jv$w(vJ{tP010IQ#t)o?5lz4e4+MvZS51ibGv@$- zNnA^_*zNToWFH_21!bUWNRClBP&$2VTXJ4@k&s4psGUdVd$nuPwA zy_Ik%&pb)%(Ax;a0n}*>^|BUnO%GmnvZ4d+T!Z}`1OY4bkC2W1jw#M^>Uen%=yk~C z;7RJkIMq+3X}H{g|E|VKE4(0vD%sV+f2&70CO3a6nIi%Y;gftY<{@~G#&OqzMmb-- z>>&BM$}uQ#jsTIkrr-njCl)4~W&OhQVg!+6Gc}pJh?+a5k7LyY4;Elu|UU+wrc+ z_5p+oEYQ}@x~^C*Y<`XiI#I#n@QcY?cB{vLD7ViB2zSmxrASzJiA6-AClldjRgOYsKZ$H6dZtecWRsyWX->`HZ$PR7djPk_2a(pnEN|2gn;in~ z>aT~v*3H6-@3Fwj!9Q*tMLtO#8*+=T8d#lJ@&i;s^1jW?6RLqFo=akQve!h{bzr!9 z1>8J6RU9Yg$=RDT7~M4#{w!RcE)O?xeYL6nW@so7Dx$W=L+l6Zj?;Yo`?$cHKv9pZ zzz1GVo<1#YAs-q1gUG2KBYsbh(bNL#iHw+)R|SXM;Nl>vX1t{OIb+w*nQSol6igO@ zpr=`p6Z)`IlI+HAo6ahBSZ(SCLtLji7RYX1+eg0K5EXsLEq6L}+ zB=JX|eLX#Wz+}S)1CmIO`j??`oEA7~Kp$T5Gk-FP)J$JvXB7vfeOGgi9&6Et8yv@- zeG7@Kg+Bu?PDm8?`>R`rkG&C6_`_OT9z=FFFU7i*R zEC*Upoj;1H#D!7a9k11aaon0njfrs7Qx$un%e{7B+(CSSrH_VGr}bHZtZ{}f2rEBU zdKMixEimHzio#IUyJFCYQi}u0BkNiCjc1fVpb~|}xj$x6jW|)Q33#;EKx^&gIM10k zHjnD86yL18OS=gztkeVIf5Ih>Y(P`z;m_=Q&)h_NF2_c6;PNP#&p_Z23YBIB8Mf#VCD5VWuFL#E(<6$V5HLnWgioMSk*5D13D zQo5g!AVu@t=GWEss}oP}IlmwR4g*Mg{<}LfSLqLj1c2FuI&aO*kG5|M^#nQHaCV2W z<3CfstyZ?<&tC#UOp`dyyx}4vCP}=4f#CHL)Fdr=W_(TG-08^nju6lNUQp$ z=EO}3&`|i7#ex39bd5k=khR7oi4L1V1AEYg{s;=AZSY0*dr$vDKd=6l_NzF*`(@m{;B2J9YcRjxw7k~C4b=lvrzMBLYcN*`tGwowT+ zZ=3Xi52}*D<8-1Iq(NhbN>QeGlBS!CrCxv_G^qhS=fRl++?@3+tZu*De1 z`xyrfmuY9m>5>2iCw5mC?{Z#}KsmmS460vHaixd+>I|H*~a)oao zI$yy62y)1yF;%~ZAGNf_S@~}nc>3Kzq;uXeqWt1t1Wv8OO?VFhP;bKqf{n$(jMgh$|vH=HBfcYaU$?7 z4vl1yLm?0WutYs8IGbHVPr=&u4l`WT&f;U`51i)fnShE98w*??;O6sUL!J@`BeoXj zWYruhr|V3l>($*$6gy1CZY|u3{HU*Y@psxWn%;88XP7DegJ1N8a-e3`2nA9cK5U1P z=jvxioulCg%s}JRoCm?o}%}bce@J|k#sp+Eotv(uwoaC)vKZ8lR zjgM0`x|RpcsCDS~cPrri0^ChMpg>ajg$AoPZAnxbG)Vl$-)*Xq@37!_03wP?V{wOu z^7Gs`6nT^|aGkajSFdMKbUAU;Ygs1^obSQ<;`1w=KN;HBPvlm8QBntnoQ1sYRIxng z0Mh#?Nch{^3a3x;4o~X2*O>r@Mk*;tnQAoy|3%dTC3dQ$pvD5J?im$|1}nDu4G=lN zfuRR@LIHrHvTU8!xl)0=ngO2WB<0p~0NR+I;*oa-cm@bWlSeZn$!#^-z@We#62{lh zED+w62cTXz3kwh(rg>YX4e=m?~2&HGDN*%TLBPPgT%l@^x_?7 zzNx7Y8X+^=Att+u@;Q=F;qHps77w%zPfWE>wP)epWRohwy!F=_W zmnEe;Y*bxk#c*{@YQ?**X$vM#Ne5gj%!{y#2^w=2hZU|qqiRQ*_$7eHfluhB9?-W` z3g1FKI%a@+`&1@&NE6g2MBsXrCh0uzQh|D-@u4sI^Qd;rHs{~UR4eah^fFd%+Zm$x zXp^a-hTx~R)T^LsGhwzmmgxV0cCT&qW%bxi;J&R%Tb9TfawXp|eH7t#su1?%BM5vX zxJzx&Ce%c8_PSOh06!jSqahzxH^2S6#(Xf^34b98uzZW}0H0R}n`c!z@udDW(;}@7 z*n!sEN^ByT!QfrKsW1_ri+9HVLxy9*#WN@Uv+&P=yX3cVQ&$YoOX_Krf{uXDx}8cg zi_!yyQNXauSH8bkhLb`8Mu7AUEG;V~5*6~mk50g`ts zy(OS(W=5o?Dz5nVd`nx3d6zJ-(q_*!0m;VPkxkcMmM?Dh#p$-Odi??~KQFv>7f^N9 z#|1>`ehA2=PW)z^V^bgU0!wBj`<3c1qxHCdzG}p8^2?ln(L>);G!Hp|bqMh8^}>jl z!s@2+a`CU|z4SzeWpVarq<5-%Kp!R~)_?nxBV7|uNQnK!;0Q@b$Al;*@i-G0tQd8E z?_jq0)V=|r{&WeBLhTH>0^3YaO|gABbHkK;kr1z3rMk%htohym)DSF-%!un-YS)4{ zMVa=j;*F-AQ@v4*qd}hmWd3s9>``7c3+OZ;lnVpd=+lJo6j?DdRirF+hcAS`?;AW_ zJnWAEI|3_#1W_MAS>6k3xr4gRw0fa3&?gBisr8<)KbR?(cxeLzse5O8A)6|Q2Oh@* zhb3cbV-b0-oVYmA=H;tDw{F)xwWrQ$uH66hO?F;IWMt)1^WJHWfnRz^Mjj;}iLgfZ zcVS~Euj@6M8UPP*(&Unfyahj-2>^tXQnrAW>N;338@S;G;Ak3El@4&AO2PJImF$pJ z+7L04K0wqo<$?yg_yYtG6x+wXb2VJz=ecdDV~s{=&X3#Lh30P1TF$?S-vVYxujN;_ zs6EjMvZ8qVWG^_!gWo8P1%RXGDm@?%7*vfM_4*%cOu=M3!Fla?(DO*H0J*aD zr$CVxFTX;*eVHgrnP{-rh-cbR-(5B}PkW#XJXs>YCwnALpT~m0Dk!IB|LXC*Iz=O) zL*Ozet+LsibPA%!`^ zM)gf0d}OoE43NdhWxuFm#hrY7=Ed+j#{p3KF^=J^JJ3=ddy)09F?QOAcE8vCp}^3S zDK293!yUp@d>WCBwmW`CG4mrI!XX^+AEL zmgk+F7x7Fghtu8aQHS4fZ-LHm=5GO8(mS~ z*x$u1ux$ZVv6P8biQ*{LT7WEyVs}&lW~oZMRx?(SBWvA(cfgPhI+AFf3&;}En07q) zq6RhKj6;l&-2?&T0g?8(DEy|rMtaapyuvYJf=HF1ht}n6i_@5yFYo-{IJY1ep8h^wx=j6#Bs3 z#wl=xHo{5xn>QXfTH4GE&z65>>JqPbsxnskHkciiTVLgQp~YD|ReIp8%sKXKY{JgK z1&GmB4j2@IdZs_mxmQn}e^ABLF2rGda1=pX`=QeX0^mSxtQIL+wU>Gm;EOvd54Xd; z_CewXIawVYbPDj(*S;3znY>@OKO0whgW?n%&`-vQ9bWPbCp~d*x+S6pKP15 zs=kC{9>fmyxb@k<@9H+?Uy}uH{>ckc+Mt=k=9nbE9DSw^M3IiQ=Fh1v5HRYvL2CcZ zPr@8GGwCV-s?R>4;u6DbzfdmA3%ai<$^Gi;&%ul5GFy?SUh$nZpAnZubS-Sv4c!Fe z5g1QiQj^cV+l?)rJ3v)Mk3DAQPxzeEp>w|HeUozd>QP+Ug7o)6&umMpN{|g3qUF{? zsK2?3wFloL6-5JQt$5VZM}z6FO&hGHodil^JB`Fjo@MneU}_gzw9)<1Ytrdws9i8n zC~5XSC^DvlELuT7;e+RTpi(&bAuq-ch?+Y!@|U7oC|l%Aw6Vv==SBPXkp1!B1Z66wz7P^dWGRY| zYDi-@mMF`N8OlBkty-nUmL;U@j4;+v2-#&8`_f{YY$2lIcfb05uj`yY&Y$O8=eo|R z>*}9Wv%KH$*XzEY_w)I9sj%hP5Tkh}No9alJ@ki|T{>w;4`^)5M<#S}Bxqhif#^R% zqbBYXydKH#%!n$jTnH5N*dUVOpWFX;m96IX4-IX7Jegvh=HxNE0OMiUmBiwD4E!eo z&)QC?DDAk#{j9A$_w8mHSaHcXzC~W1oD$Bwgt1V`sTix*@@2eyw`gX{*qsm7t(~)o zYf6P2V^@GCZup+y*Zl7AtzyX+NaxQAgdsaPk{fPc`^1K)f7PJf#HsOOUmH*o;O~h_ z{F7<(dU|YAq%%b@-B?xVD^Mg>b+nhD{NBX<;c#pG9(aB`2L7JA@0OI`v7q~twRFWI ztsu7LW7@9<=^sVte*pPYjJ5C8390K&J_hYF0`m+`0T6ot4wJnw!nn-6!m)&kXliK* zTB61ARQ+VrAc@G2HsBa^Zh2o~<~08IwpSptifh)OWYV-@jYb9zaB}^y$74T{2aHJs&UXJbnb6 zUmiZZZ=L?iNr(N~G2Pnx?uv*?ss$GJSCE=(=IE&CeFI4*PFyoL}ND#Jo3> z#dhzQ;(~bmX*uo++v9x#EFq47GMeq_BLQk5OF8?8D9{2>i~#CI1<{%ZAd+Iz+U~de zzp>$Z^m}vt*qg!n3ivZHc>lIsU;mJhQZn1%x7e0!V(I=Zq5ttNBs-twr2M`yms^U$ z7W;jWu%m1}v&D+VDn8Pi?C~u6`?5I7%As^@Rc%qJ|0IRaJnvY_HsLeF5}h>vh%&Qb zYM6z2!n6B*AFsO9vD5nKw%tsWSC3t8GCLK`j|lYao4uwDRuU7U`rN=m*~ywp~M16Wg(4)l;vf z-`G(7=JcYc2d(IVQ5vz4Ndgf;38h*T_SEcS_>!WHS~hHl8@f>q6pr0P6)$O7KZ{te z#(XEdM!vQs=reY@gIeyM4WH5XW2sAI{14~Y#)m;s-yZKi@h``*{xAFG9N`Rj+*S<|I)KHG#FBl(CJNNgWhlV;B9(M<6oKQL#IXWSSGDVcBW+0 zG*aMZG0!++$W%L(RmquK%Z2hqlB3u%?U6Ca z){QOamnU~g)|Dl6M;vmVO{;aLLLR;;My+;=aYBFb!w@qV*hVFsk5rZhXZ+PfKfr!ydboZP4a`^LH{8)GVFm|)BP2l3qH%BzFBPc&n zdD8t;c4}*ilfl@JYz-lLhFep`vRk=bk)b2pGF|acL7E}0rvrJ#WSHRlPS`=-=kCs- zv7O1o!%U)aH(f3~B1$b{1DevrhaN5B)c;rJ!*5!gtSx|X-Zsi#XR z9e(pf+N?mR~`0{&aZ`5k^{=GV}H@T@kfbA-yXr?-)O!$ zV!OQ@o@ZEw$L!rb&2^H!CW@m-@KeQIFJ8yVNE>V2Sk1Q!?`6o9bC296ZK+v?Qvu3J z*rc2yBT}Msm@z&#yq@mj3T^1@MrD3wOS+F`HSh_?mJ7 zS0^V2ON{cDsxH1wGKsE&^!dFO8yKt7m{cnDESK^*SNAmkz62*Th3PTh$(1uP!O8 z!JOJ~Qn!E4JndCHT%_nTfzMvI$vv4)yMTS)NbnT8Fj4NIq-_{&L)B~~X-2yERv_1X zFG+;pnOpo~*mSD$rPcZ^ITfE^Ofl!pQ|W~df{98iN2QNIFkXUMWdtf7u|%_wq(oj8 z(5#rB=YmFBN-=CTtP1Q)w&umZ$t3T;4iRX5ti7^3u8H&TG~MD^7twkQ^KMGz-qIDh z)K*eJzOI+;^6EtEZ-MMSYZesZq)qU zc9qX)x!fma=+JW-e)SwUWWDftTPjF|IgWee8Pqcs0gu4+LQ~?prn<&^*H2_!{71)Q z%*9?b-g?VwTy0`UwL>39y>hZH57sniNv!#Gnh}hW2);1|V_W)lVPeZrJz`0!tVC6$ zd&-%e!cI?ZRm>nRT^{QSik%WO(D(ek;Qg;gI@jXp^82lbc}Ma=i-zR2%Q>!^4AQg9 z^9e36u#`Oq6@hG=SK*8GjF{3-um_W^a*ywob-m=2$IgAfKXkMqL-UBq^$VS&p4Y5kKuU&H<(l1zgk3UeOHexj{` z_l?lD*yrYS24h4R-(Pa_{m5sFdFgl6g3nPNgFZrvQ+h2(JcCAMyW;~o5*8EDY##x> zgdf+KYCM1RF_BjfZeG6)b|wzVWGr069k-KCZCNCdN!pwW%5zbM(xn7?tU&bbtXtU8 zJWxRk^`xjCZ~qd?9eBaRM8ke0WP<9#=UD-`$Y^G9DL}I1^wA&!&`vK^Lcgh)_-7Pz zy%~FYi)Z72$7uNofL>@Y4tmH|GD?a8+>afc$8NFbg*t%b+ZE&mh7>MwMVL#dUZXP( z+Ol-dSHXWr!Fd!drksa57Vv%T2|>((?t+H;~uv zys)^y@@I4oq-U4~P{(U2^9zY@z3V^8h4VW;4viGvD(~%9*vaVrQVG*AC0LanmfYQX z@TFfCY%~38pMrK_84b<`%2@$y%jVUocY7V%-2ia|VbD|`e8bY;zz<YIl!jYiji@U0|vxta9}j$QeN`LuFC6S z(fcU$h?(OvIg|}PDZSZ0+sW!j8ZWhKejX}qanCm~K@_M!E7A##6_htzUDSO{6?AIY3kzBra-fB=>BqPY3 zMuW?ERZHIRc_R3B;MI5sAzBHU{xOJC&4S0m#rO*0>GpY0y~d>abycWSwsG=gbD`K8 zZ>vb9UI1hQwy>hO^nA*TE1zZ9s;fQUVHCaIbetS;+zY)&U)fjT5SQ*|<0YWjCAwu< zZ5Xgo4z=IZqM?}Rgf)md5FjPdldF4wHIF+Vuc5^!9(BfyWD%*gc*n$YuyzGY%dpIZ>-%&-r;M#0J2KV8NLb z+4idK!;t19afKa;ArZ}Qte*DyTSuN{S-VOjJey2QtgHLG(V7S=OV=XoO_>8bYB727 z>-N?x5k(=D?gkDQ75b!WIv1QnV`t*!*1}h8nAp8A#vE*Z%WP;=G+KP|DrB<&&w!xK zyYvsTvTem+>AJNogR3B|YJy2Sab}loOJl$dms7CTw5n&82ws3F8-%?-7plodmxA8PgQRa`qw#0&tHfk(Mw4u?Pg)Nhef?ayt87d zvTo(>xuhdzjK!b@DCW-Ok zywe&NRPKN{5bpHCdIuxl@)A*1WWW-@c3q6yvtJ=X2eIDd%3DLZoJv0wn54pUD;6G` z?jlk0d-VCF12Z7Awy!fQ(ov!>L*!%V8qbPr7~llM&_ZuTBbNqmKIzb=mekAb&QOw`<1E+Mg%aE?U90V2T1AhfRWyTSuodjou3h#m?qhoqE79Z1e(*`VCydb7S zUQltnoqqV7_Fp-Ts@8VDga#mo`cVWED7Mh7bR50qd!W9G%zcA_G|)f^%};(xyIshr zvNi%x6_7?*moAOe#+V)iaVZCL5Wp8U4o|vWvjF5MT|SL4YxSKfZ3I*>);o%HP!j1Z%x~=9Q!J zDetj~ODMO!JYbaEMLz1Uo42_vYoj42);`|K@&!b4h3NYC?}~KXX>K;ExzQ2(WN6!* z5YKU)#{~-r4+YrPt-qm}{-yISHznzT{_>9EWJT#>>$mr&_)q`740qEcU@snhD`)Nc z?o>J_iSfA&LVKJ+(RJb9zuO`O+~newOT{s2o@?DF1Gug;VnB?w)c;t>k~IUK(;&5R z;-k*|kDMVQ9wI`6E`DIG6vKOE+Xjet3hirI!rmk`jRMYf7GQx`dhcVZL1DmAL%ytl z`mTrHJc7xSx!uY;b{^bOD@nHBurqJnX0Sc<{FOtP0v#{8i|e^?^j1#E{R>bv)sx)w zbz!Ikcsk|tp^YxG!@T%5mGY25%o*%Mdr}$#OM`$ySm>6Q$*A*{O|G%|IR$?(Hf+Vh zbLW9#LUkCwf?J|E_FzwAit6-noxVRFBzEfw@MQ#DpB|p4L)__NQ4{|SuB(Aa@?5*( z=agtdsn+my^5Q^6vgrq+3(GMM(=&v5dTWZt)ZA4+^$$ys770G_R{O{KjDg>DunP`C zBGVHaMUxP`Zj!X{@-c;_7VcY5{~C}Cl+gaINAXZ7I6O-`{oiX)k>CZVOo{ zX&7n{>X+O`wC_$o4wP*}@pj z^3>qolPffwZ?fVvNI%10We&mav|d1q`g!AAc%aT)ignEDu1JLMesH~EakyL}xVs=? zhG);gdONO+enhlf*k*B>o6vTn$!_qVa-fZkT0U3&y-^<|O9kVXs+xNkU)l&}W78x1 zV*)^@DW4tPI9@uxF8F?1e^>GC2~L~YimEI5|tiXK3%h> zbzW*%Y|NX8w)fi`Z?3uKzH8-t7Oxp;S#3hM0lT*ctj7@)C*$+Z3n#hVbj zVsQuej=Bvo(p?I?#ArJQ8)9(xb6IJ|m-+tI55LZYv?>z)nbn{ELexjk&b#Pd^xl_E zmx?{=I_CR(kv5e@TMU>>qY2^@S6q-W@}ic<83Qx)`H~VH!tCAYNfBQF0xzpD-DC&S zyGTk4Godta*gU5eu`tl0{8h|wwb_+sX90xT!TpQMTYAWyV18LtCX2v14` z-2MFYyC|9s?lDXLv!qsqO*&QUk%8@JME=T5b`vzlEca4q61g6qWJ6k$g8eunyj7&< z+k&1B?yv(s2sk`*1|6ET9Nz?)?YVu60}ws}*Z>hsjTEzVXR|?k#^idjZ8?W814dLc zR)Ia0s-Bix&=oe{e|Y;YOn6QC3P5hpi1Jr`0Kty(p<1a47`9Bdzke#A!x zS1`B%Ent4!4kXruS%G*4PDfW|L_-@?JE{_gd@3di6Ofi^jMG}6{rGC0vfL|*^;&@^ z<03(Apcep3Xu4eVe0N_m5WV@@$tucffa_PW3S6^;hlifXZz=(>ua8pRu1FJ5E(-+* zCz@QrfGW06t;80}ItGCFDt3zlOjYfaZU*cPA%-qAK#;;vIGNX}H8&H;Fko2XfOAHu zQ0Vf}Ihuaq$QkPGk2o1Ab$c62G1e+S67d^!Lq(+Rs16R;8B`XUxt)O z){3%v^&kTK^4qd?s&Kgyl03O4f<2ROEOA2+qU*!vb>O%nhjIE+E_Hau+^@i7Fw#DbP)=gYd+ zhqgX`32uQ`&6av#=Rqc9WM^9^vqfA}cQ|z`>IwJW+$yf&`@a6cQ}Fr@^K&J{6+jE3 z2()7_uUJptmnC|cLFrS~gk}+dJe(c(lF3*9e6xR8AE?;B>HaLIM`;PNU=A9dA6IJI z$iu^d&gizSn^g*onaS?d7a%>EB;SscmopBucn16@SVe$1ad4+nX6IO<`FjgJ)IHjqU8V%E z-l}VcUW3_PDaW_~u+>oZ%}?C0uJ?rlos-?dImXdygWA8&x>hjJ`i1>0+$B-zROefK zT8NQKo{Uaz=C3#m!-6)g{k@q$#~{Z(d^Xh}OH?j}k!(u_a%^#HEYQk@SW1A9Gc|B| z4dj9hoN+$+-|>smMNg!OuelDi>Gw)7qj5?-H}6m;Kmh%%%sX|N?+31Ocm5}qfA~h5 ziVe=oQ2JC&2|f~fJyTf>;~8{i5H5;Knm463RnL|q99vtaKN{08pQU&vAGZB@=@KGI zf$vAGx4`;_?j5cpN6lKASUy@0r|Ll3K_kph9WgCob#p#`pH;9{$SkI{M*cc+NW6at z{Bl-Wefg&iL!%i9TsUGP`q9S|o6f7Pd1+hq^gOyiO*X&0%Yp}B*Lgcf0&H--Pvk*R z)Qd=Pk(Lira?x)91E5Cjrc51!5!uBsq3s8KEU35aRKHS=Sfa7&tNHih$(+KZePcIf zcXtKI;Zg+UIOko`@WQ1KH(|(NPM|skG8oCtL@3q0ZC#g5M#0%l`PII6(V3+Y+>MG3 z$c-p-6SZ0qM=erhg0;JhKZ+nFzTMt;jhS^|!KIKirfSoJ<>k?w%fXWOX(C(6M~hq! zMt@q)n9z$cbuoxk^1Acj2vc*fdL=`!gWehMaUw{`q6>V8GO*xiT-v;Or1{n4LY%4H zCBT|+=DSstd{oviaeoM*13ZwQAJASpi4L$UDGA9uEITCa!}&?A(R*kN3?cyPZJe4q zeZH*Re4%qlIXHQ>4FdXW_WLKGr(QPBuMpES{m8V%?>O+oQVV$x z&Nt7JWsi;?f%%FDrD=W2FQY5xRWcbCH)PIXB0%t6jJ#Ws2#~=s!i&vT>=AQdv9p(c z2ubj|n83nnVg@plMnfO&*{MGH5|}Om&)DnXE;QZW@%D%vxNn=xXCu-~ilcROPn5$c z2PFpiZ@*@iJsAR6Pxsl(hfH(0hDtu8rNM-<{NK+PObEhoda@Zqj5Nt4Js`S7EN%P* zI|luG;|K8ni}stBzG*JGv|J4$uiQiyMm){`X!{-#t11ZiRiA!N%f2v$z;NyTg6LTlC{$_?T#}BlDHKW+g1^kK*BRkzDn>_qgO^_$fu#P9WhU{ z;4g1~WKwB~zVP26AUIkF1}J&=1eFNyWP;7=JUhpOP47}?>H>Qgmc^5a_R7^k!jsJE zc^k9m@AalN%L!@s8bSnI6*kA5K48MCTCC~%iuLZ5^(^(`yeJgMGXnK~Sm1Cz9;ffo ztF+r$G51X-Ds0`XQ-Ddu`Ru@QSbqxIk9oK2Xk;mdpL??WhM2dnVkP*axSu6=$m5yV zcUA>`WW(pVG5OLEftsK``YRLa-AfVS6BST469&Lpr8Z-HeBP=Tre^8=@Pcrr ziP+pxkV3aW`mQ5_YKmCsI{k}G_90vQsu76{8PaimR_Uq@S!Ss2RpGgrXu511@-q`%eM*DEGk-W1h6e0zx};c}+ydIDL3_ z=rK3M{btP!nC#9PhOf+Yh=+?wZX1wPZVVTh*XqyJjXZnSB&!MFyBehr*40j z<9Ds7=r3qmFEgSSvWO8D0#JYE?FGx(| zC9G_!+x!bdEfGdk^~{hAfZ_8>Uyec_iX7hAJcq=CVD=3b0s^coFkCt5FTQiu`5=tK z5Wya4C}@;W*1P0vIy`YtWUVq?3n1%R!+Ep)yO(Ci-f3V>6DpwC&Nc*?D|>{G<_ygj z%B)29l;}@3O)g^;^U6d)*<<QxtLB(wM3o)2KnAxqCFm&aF~!Ls zu{$fCWRs*d*@Y~aqoC2EGgH#2CNQv_ZPZ7fXGo&A|9}i0$cJ7Hm3|bL@9UdJpO|t; z{ThM)##>EaxKAxt=FjL2vN18(9XL~`+4QZ)8e)QMS&35Vy0`R+dmvM9jysyp7MWze z*rXRjeUF8Cz;!qrPnqTyazOfkIf}Pxm4__u&o^p`@`ahu0ks1sjZT~p^x^wpJ!k~{ zYuK;r(ILeojQH7W`DZQ$*(h*j=^(a z9e}f%Br3vEdZ9u$89FGddD-}PGS)i;Uc>o3K@X{{k_yvfAfQAI*g+x;(itDdr&D2& zx(+aCR>lbcji!SpgNbiIBTLI_r4z=dg+duO8Ny=V4h9t6*mlt@5Mys0L#KS9f2dt( z1;c1B$dL$twPt{`0!5{^U>W8|i1q|MjgQ2u5D-ZDl2fn!6L6E3FV%HFOdU4m=x!L%bk%gWec$^MM_9=G7}mbFd&!06ShRY7<~J*kHPfC;Qlc zBG(YnB>XP|$^YZ0|4G2}{5h!b=mZWO5S;TOc3|9?7y^3R9;zkJT-pVj!!8M*(x z^87gv_&>m<|6R0}KmXmIo$S9Ktv{!3|A#Tf^FNb`{%k$}Pj5X7E8`un(f$8+M-a#5 z+kpo>JS^Z5!!P3R!^N*EUqeX3x6!|8cy`#|9YD^$--s>qP3Gt?X0H9cg8ZNRXPX^~ W*?ZEp`v=jh>7Oz - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2589,6 +2589,8 @@ + + @@ -2629,6 +2631,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/advanced_transformers/custom_functions/faker_function/index.html b/dev/built_in_transformers/advanced_transformers/custom_functions/faker_function/index.html index 7c01838d..9801965c 100644 --- a/dev/built_in_transformers/advanced_transformers/custom_functions/faker_function/index.html +++ b/dev/built_in_transformers/advanced_transformers/custom_functions/faker_function/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2511,6 +2511,8 @@ + + @@ -2551,6 +2553,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/advanced_transformers/custom_functions/index.html b/dev/built_in_transformers/advanced_transformers/custom_functions/index.html index d5e0d6fb..980b02eb 100644 --- a/dev/built_in_transformers/advanced_transformers/custom_functions/index.html +++ b/dev/built_in_transformers/advanced_transformers/custom_functions/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2399,6 +2399,8 @@ + + @@ -2439,6 +2441,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/advanced_transformers/index.html b/dev/built_in_transformers/advanced_transformers/index.html index b9e59123..019631e5 100644 --- a/dev/built_in_transformers/advanced_transformers/index.html +++ b/dev/built_in_transformers/advanced_transformers/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2397,6 +2397,8 @@ + + @@ -2437,6 +2439,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/advanced_transformers/json/index.html b/dev/built_in_transformers/advanced_transformers/json/index.html index e87432da..738bb923 100644 --- a/dev/built_in_transformers/advanced_transformers/json/index.html +++ b/dev/built_in_transformers/advanced_transformers/json/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2484,6 +2484,8 @@ + + @@ -2524,6 +2526,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/advanced_transformers/template/index.html b/dev/built_in_transformers/advanced_transformers/template/index.html index e74eaf54..3c1cde62 100644 --- a/dev/built_in_transformers/advanced_transformers/template/index.html +++ b/dev/built_in_transformers/advanced_transformers/template/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2499,6 +2499,8 @@ + + @@ -2539,6 +2541,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/advanced_transformers/template_record/index.html b/dev/built_in_transformers/advanced_transformers/template_record/index.html index e670faee..fa7c59c5 100644 --- a/dev/built_in_transformers/advanced_transformers/template_record/index.html +++ b/dev/built_in_transformers/advanced_transformers/template_record/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2475,6 +2475,8 @@ + + @@ -2515,6 +2517,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/dynamic_parameters/index.html b/dev/built_in_transformers/dynamic_parameters/index.html index 2d73303c..1aa69db9 100644 --- a/dev/built_in_transformers/dynamic_parameters/index.html +++ b/dev/built_in_transformers/dynamic_parameters/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2480,6 +2480,8 @@ + + @@ -2520,6 +2522,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/index.html b/dev/built_in_transformers/index.html index b25d3fee..f811aa53 100644 --- a/dev/built_in_transformers/index.html +++ b/dev/built_in_transformers/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2395,6 +2395,8 @@ + + @@ -2435,6 +2437,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/parameters_templating/index.html b/dev/built_in_transformers/parameters_templating/index.html index 091867df..3108b9a5 100644 --- a/dev/built_in_transformers/parameters_templating/index.html +++ b/dev/built_in_transformers/parameters_templating/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2468,6 +2468,8 @@ + + @@ -2508,6 +2510,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/cmd/index.html b/dev/built_in_transformers/standard_transformers/cmd/index.html index cb73cef7..08f5395b 100644 --- a/dev/built_in_transformers/standard_transformers/cmd/index.html +++ b/dev/built_in_transformers/standard_transformers/cmd/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2526,6 +2526,8 @@ + + @@ -2566,6 +2568,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/dict/index.html b/dev/built_in_transformers/standard_transformers/dict/index.html index 38a9a639..dc6feaa3 100644 --- a/dev/built_in_transformers/standard_transformers/dict/index.html +++ b/dev/built_in_transformers/standard_transformers/dict/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2460,6 +2460,8 @@ + + @@ -2500,6 +2502,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/hash/index.html b/dev/built_in_transformers/standard_transformers/hash/index.html index 873d2c4a..e57fac52 100644 --- a/dev/built_in_transformers/standard_transformers/hash/index.html +++ b/dev/built_in_transformers/standard_transformers/hash/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2451,6 +2451,8 @@ + + @@ -2491,6 +2493,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/index.html b/dev/built_in_transformers/standard_transformers/index.html index 1d708391..cc9f71d5 100644 --- a/dev/built_in_transformers/standard_transformers/index.html +++ b/dev/built_in_transformers/standard_transformers/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2397,6 +2397,8 @@ + + @@ -2437,6 +2439,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/masking/index.html b/dev/built_in_transformers/standard_transformers/masking/index.html index 5806d478..802cbe59 100644 --- a/dev/built_in_transformers/standard_transformers/masking/index.html +++ b/dev/built_in_transformers/standard_transformers/masking/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2460,6 +2460,8 @@ + + @@ -2500,6 +2502,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/noise_date/index.html b/dev/built_in_transformers/standard_transformers/noise_date/index.html index 5fc9dbf8..e4a875f2 100644 --- a/dev/built_in_transformers/standard_transformers/noise_date/index.html +++ b/dev/built_in_transformers/standard_transformers/noise_date/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2478,6 +2478,8 @@ + + @@ -2518,6 +2520,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/noise_float/index.html b/dev/built_in_transformers/standard_transformers/noise_float/index.html index 4143a694..9702d70e 100644 --- a/dev/built_in_transformers/standard_transformers/noise_float/index.html +++ b/dev/built_in_transformers/standard_transformers/noise_float/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2469,6 +2469,8 @@ + + @@ -2509,6 +2511,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/noise_int/index.html b/dev/built_in_transformers/standard_transformers/noise_int/index.html index 897ed673..81de95ff 100644 --- a/dev/built_in_transformers/standard_transformers/noise_int/index.html +++ b/dev/built_in_transformers/standard_transformers/noise_int/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2469,6 +2469,8 @@ + + @@ -2509,6 +2511,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/noise_numeric/index.html b/dev/built_in_transformers/standard_transformers/noise_numeric/index.html index 92f71209..9e8f9959 100644 --- a/dev/built_in_transformers/standard_transformers/noise_numeric/index.html +++ b/dev/built_in_transformers/standard_transformers/noise_numeric/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2469,6 +2469,8 @@ + + @@ -2509,6 +2511,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/random_amount_with_currency/index.html b/dev/built_in_transformers/standard_transformers/random_amount_with_currency/index.html index 03c3d7fc..405f972d 100644 --- a/dev/built_in_transformers/standard_transformers/random_amount_with_currency/index.html +++ b/dev/built_in_transformers/standard_transformers/random_amount_with_currency/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2460,6 +2460,8 @@ + + @@ -2500,6 +2502,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/random_bool/index.html b/dev/built_in_transformers/standard_transformers/random_bool/index.html index 3c9ff2fb..70a2e290 100644 --- a/dev/built_in_transformers/standard_transformers/random_bool/index.html +++ b/dev/built_in_transformers/standard_transformers/random_bool/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2460,6 +2460,8 @@ + + @@ -2500,6 +2502,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/random_cc_number/index.html b/dev/built_in_transformers/standard_transformers/random_cc_number/index.html index eb5539d4..efc0e6a2 100644 --- a/dev/built_in_transformers/standard_transformers/random_cc_number/index.html +++ b/dev/built_in_transformers/standard_transformers/random_cc_number/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2460,6 +2460,8 @@ + + @@ -2500,6 +2502,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/random_cc_type/index.html b/dev/built_in_transformers/standard_transformers/random_cc_type/index.html index 95ff7c69..061f09ee 100644 --- a/dev/built_in_transformers/standard_transformers/random_cc_type/index.html +++ b/dev/built_in_transformers/standard_transformers/random_cc_type/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2460,6 +2460,8 @@ + + @@ -2500,6 +2502,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/random_century/index.html b/dev/built_in_transformers/standard_transformers/random_century/index.html index b8bffb4c..d92e02fe 100644 --- a/dev/built_in_transformers/standard_transformers/random_century/index.html +++ b/dev/built_in_transformers/standard_transformers/random_century/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2460,6 +2460,8 @@ + + @@ -2500,6 +2502,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/random_choice/index.html b/dev/built_in_transformers/standard_transformers/random_choice/index.html index 8f87ed1b..e84b1841 100644 --- a/dev/built_in_transformers/standard_transformers/random_choice/index.html +++ b/dev/built_in_transformers/standard_transformers/random_choice/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2460,6 +2460,8 @@ + + @@ -2500,6 +2502,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/random_currency/index.html b/dev/built_in_transformers/standard_transformers/random_currency/index.html index ff301397..9ee0870c 100644 --- a/dev/built_in_transformers/standard_transformers/random_currency/index.html +++ b/dev/built_in_transformers/standard_transformers/random_currency/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2460,6 +2460,8 @@ + + @@ -2500,6 +2502,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/random_date/index.html b/dev/built_in_transformers/standard_transformers/random_date/index.html index 75d04058..3856c72b 100644 --- a/dev/built_in_transformers/standard_transformers/random_date/index.html +++ b/dev/built_in_transformers/standard_transformers/random_date/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2478,6 +2478,8 @@ + + @@ -2518,6 +2520,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/random_day_of_month/index.html b/dev/built_in_transformers/standard_transformers/random_day_of_month/index.html index 6268d763..9f67cff2 100644 --- a/dev/built_in_transformers/standard_transformers/random_day_of_month/index.html +++ b/dev/built_in_transformers/standard_transformers/random_day_of_month/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2460,6 +2460,8 @@ + + @@ -2500,6 +2502,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/random_day_of_week/index.html b/dev/built_in_transformers/standard_transformers/random_day_of_week/index.html index 693a4359..f9fa440a 100644 --- a/dev/built_in_transformers/standard_transformers/random_day_of_week/index.html +++ b/dev/built_in_transformers/standard_transformers/random_day_of_week/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2460,6 +2460,8 @@ + + @@ -2500,6 +2502,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/random_domain_name/index.html b/dev/built_in_transformers/standard_transformers/random_domain_name/index.html index 753b9741..09f8add3 100644 --- a/dev/built_in_transformers/standard_transformers/random_domain_name/index.html +++ b/dev/built_in_transformers/standard_transformers/random_domain_name/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2460,6 +2460,8 @@ + + @@ -2500,6 +2502,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/random_e164_phone_number/index.html b/dev/built_in_transformers/standard_transformers/random_e164_phone_number/index.html index 1e003c35..f0010bb3 100644 --- a/dev/built_in_transformers/standard_transformers/random_e164_phone_number/index.html +++ b/dev/built_in_transformers/standard_transformers/random_e164_phone_number/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2460,6 +2460,8 @@ + + @@ -2500,6 +2502,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/random_email/index.html b/dev/built_in_transformers/standard_transformers/random_email/index.html index 8cca6b31..014d085a 100644 --- a/dev/built_in_transformers/standard_transformers/random_email/index.html +++ b/dev/built_in_transformers/standard_transformers/random_email/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2478,6 +2478,8 @@ + + @@ -2518,6 +2520,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/random_float/index.html b/dev/built_in_transformers/standard_transformers/random_float/index.html index 608359ce..5ac09aef 100644 --- a/dev/built_in_transformers/standard_transformers/random_float/index.html +++ b/dev/built_in_transformers/standard_transformers/random_float/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2469,6 +2469,8 @@ + + @@ -2509,6 +2511,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/random_int/index.html b/dev/built_in_transformers/standard_transformers/random_int/index.html index b867da51..db1b4a15 100644 --- a/dev/built_in_transformers/standard_transformers/random_int/index.html +++ b/dev/built_in_transformers/standard_transformers/random_int/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2478,6 +2478,8 @@ + + @@ -2518,6 +2520,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/random_ip/index.html b/dev/built_in_transformers/standard_transformers/random_ip/index.html index 73b5441d..ffe5872c 100644 --- a/dev/built_in_transformers/standard_transformers/random_ip/index.html +++ b/dev/built_in_transformers/standard_transformers/random_ip/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2478,6 +2478,8 @@ + + @@ -2518,6 +2520,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/random_latitude/index.html b/dev/built_in_transformers/standard_transformers/random_latitude/index.html index 242d7e53..6000a39a 100644 --- a/dev/built_in_transformers/standard_transformers/random_latitude/index.html +++ b/dev/built_in_transformers/standard_transformers/random_latitude/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2460,6 +2460,8 @@ + + @@ -2500,6 +2502,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/random_longitude/index.html b/dev/built_in_transformers/standard_transformers/random_longitude/index.html index af702bff..acde1854 100644 --- a/dev/built_in_transformers/standard_transformers/random_longitude/index.html +++ b/dev/built_in_transformers/standard_transformers/random_longitude/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2460,6 +2460,8 @@ + + @@ -2500,6 +2502,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/random_mac/index.html b/dev/built_in_transformers/standard_transformers/random_mac/index.html index bc4fd19d..c8c580a7 100644 --- a/dev/built_in_transformers/standard_transformers/random_mac/index.html +++ b/dev/built_in_transformers/standard_transformers/random_mac/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2460,6 +2460,8 @@ + + @@ -2500,6 +2502,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/random_month_name/index.html b/dev/built_in_transformers/standard_transformers/random_month_name/index.html index 8734d6bf..f1da01c2 100644 --- a/dev/built_in_transformers/standard_transformers/random_month_name/index.html +++ b/dev/built_in_transformers/standard_transformers/random_month_name/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2460,6 +2460,8 @@ + + @@ -2500,6 +2502,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/random_numeric/index.html b/dev/built_in_transformers/standard_transformers/random_numeric/index.html index 43e2e56c..9dcca06b 100644 --- a/dev/built_in_transformers/standard_transformers/random_numeric/index.html +++ b/dev/built_in_transformers/standard_transformers/random_numeric/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2469,6 +2469,8 @@ + + @@ -2509,6 +2511,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/random_paragraph/index.html b/dev/built_in_transformers/standard_transformers/random_paragraph/index.html index 29a661e3..7def77cf 100644 --- a/dev/built_in_transformers/standard_transformers/random_paragraph/index.html +++ b/dev/built_in_transformers/standard_transformers/random_paragraph/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2460,6 +2460,8 @@ + + @@ -2500,6 +2502,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/random_password/index.html b/dev/built_in_transformers/standard_transformers/random_password/index.html index fa79eba5..0b42f3e8 100644 --- a/dev/built_in_transformers/standard_transformers/random_password/index.html +++ b/dev/built_in_transformers/standard_transformers/random_password/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2460,6 +2460,8 @@ + + @@ -2500,6 +2502,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/random_person/index.html b/dev/built_in_transformers/standard_transformers/random_person/index.html index fbc3ccae..6e72ff44 100644 --- a/dev/built_in_transformers/standard_transformers/random_person/index.html +++ b/dev/built_in_transformers/standard_transformers/random_person/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2502,6 +2502,8 @@ + + @@ -2542,6 +2544,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/random_phone_number/index.html b/dev/built_in_transformers/standard_transformers/random_phone_number/index.html index eeb2a245..a48ca240 100644 --- a/dev/built_in_transformers/standard_transformers/random_phone_number/index.html +++ b/dev/built_in_transformers/standard_transformers/random_phone_number/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2460,6 +2460,8 @@ + + @@ -2500,6 +2502,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/random_sentence/index.html b/dev/built_in_transformers/standard_transformers/random_sentence/index.html index 963594d0..41e6a355 100644 --- a/dev/built_in_transformers/standard_transformers/random_sentence/index.html +++ b/dev/built_in_transformers/standard_transformers/random_sentence/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2460,6 +2460,8 @@ + + @@ -2500,6 +2502,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/random_string/index.html b/dev/built_in_transformers/standard_transformers/random_string/index.html index 59e4735d..13798556 100644 --- a/dev/built_in_transformers/standard_transformers/random_string/index.html +++ b/dev/built_in_transformers/standard_transformers/random_string/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2460,6 +2460,8 @@ + + @@ -2500,6 +2502,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/random_timezone/index.html b/dev/built_in_transformers/standard_transformers/random_timezone/index.html index 62d81006..903f2fb8 100644 --- a/dev/built_in_transformers/standard_transformers/random_timezone/index.html +++ b/dev/built_in_transformers/standard_transformers/random_timezone/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2460,6 +2460,8 @@ + + @@ -2500,6 +2502,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/random_toll_free_phone_number/index.html b/dev/built_in_transformers/standard_transformers/random_toll_free_phone_number/index.html index 2c7c10ad..3760b635 100644 --- a/dev/built_in_transformers/standard_transformers/random_toll_free_phone_number/index.html +++ b/dev/built_in_transformers/standard_transformers/random_toll_free_phone_number/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2460,6 +2460,8 @@ + + @@ -2500,6 +2502,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/random_unix_timestamp/index.html b/dev/built_in_transformers/standard_transformers/random_unix_timestamp/index.html index 5a18732f..21a03420 100644 --- a/dev/built_in_transformers/standard_transformers/random_unix_timestamp/index.html +++ b/dev/built_in_transformers/standard_transformers/random_unix_timestamp/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2469,6 +2469,8 @@ + + @@ -2509,6 +2511,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/random_url/index.html b/dev/built_in_transformers/standard_transformers/random_url/index.html index 5620415d..a6d6709b 100644 --- a/dev/built_in_transformers/standard_transformers/random_url/index.html +++ b/dev/built_in_transformers/standard_transformers/random_url/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2460,6 +2460,8 @@ + + @@ -2500,6 +2502,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/random_username/index.html b/dev/built_in_transformers/standard_transformers/random_username/index.html index bfb01c29..e60ca817 100644 --- a/dev/built_in_transformers/standard_transformers/random_username/index.html +++ b/dev/built_in_transformers/standard_transformers/random_username/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2460,6 +2460,8 @@ + + @@ -2500,6 +2502,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/random_uuid/index.html b/dev/built_in_transformers/standard_transformers/random_uuid/index.html index 524981ee..17d074a5 100644 --- a/dev/built_in_transformers/standard_transformers/random_uuid/index.html +++ b/dev/built_in_transformers/standard_transformers/random_uuid/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2460,6 +2460,8 @@ + + @@ -2500,6 +2502,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/random_word/index.html b/dev/built_in_transformers/standard_transformers/random_word/index.html index 71ec142c..6dadc6f0 100644 --- a/dev/built_in_transformers/standard_transformers/random_word/index.html +++ b/dev/built_in_transformers/standard_transformers/random_word/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2460,6 +2460,8 @@ + + @@ -2500,6 +2502,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/random_year_string/index.html b/dev/built_in_transformers/standard_transformers/random_year_string/index.html index 5019f932..83055a24 100644 --- a/dev/built_in_transformers/standard_transformers/random_year_string/index.html +++ b/dev/built_in_transformers/standard_transformers/random_year_string/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2460,6 +2460,8 @@ + + @@ -2500,6 +2502,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/real_address/index.html b/dev/built_in_transformers/standard_transformers/real_address/index.html index 6c5b4ed8..dbdadab6 100644 --- a/dev/built_in_transformers/standard_transformers/real_address/index.html +++ b/dev/built_in_transformers/standard_transformers/real_address/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2475,6 +2475,8 @@ + + @@ -2515,6 +2517,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/regexp_replace/index.html b/dev/built_in_transformers/standard_transformers/regexp_replace/index.html index 82ddcd2f..395e4bab 100644 --- a/dev/built_in_transformers/standard_transformers/regexp_replace/index.html +++ b/dev/built_in_transformers/standard_transformers/regexp_replace/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2460,6 +2460,8 @@ + + @@ -2500,6 +2502,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/replace/index.html b/dev/built_in_transformers/standard_transformers/replace/index.html index 4cf09d11..7dbf3550 100644 --- a/dev/built_in_transformers/standard_transformers/replace/index.html +++ b/dev/built_in_transformers/standard_transformers/replace/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2460,6 +2460,8 @@ + + @@ -2500,6 +2502,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/standard_transformers/set_null/index.html b/dev/built_in_transformers/standard_transformers/set_null/index.html index c4915d67..0266560e 100644 --- a/dev/built_in_transformers/standard_transformers/set_null/index.html +++ b/dev/built_in_transformers/standard_transformers/set_null/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2460,6 +2460,8 @@ + + @@ -2500,6 +2502,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/transformation_condition/index.html b/dev/built_in_transformers/transformation_condition/index.html index cdd2b0df..81386d5f 100644 --- a/dev/built_in_transformers/transformation_condition/index.html +++ b/dev/built_in_transformers/transformation_condition/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2516,6 +2516,8 @@ + + @@ -2556,6 +2558,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/transformation_engines/index.html b/dev/built_in_transformers/transformation_engines/index.html index a5d9e190..e755076b 100644 --- a/dev/built_in_transformers/transformation_engines/index.html +++ b/dev/built_in_transformers/transformation_engines/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2477,6 +2477,8 @@ + + @@ -2517,6 +2519,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/built_in_transformers/transformation_inheritance/index.html b/dev/built_in_transformers/transformation_inheritance/index.html index 0565a9f3..5094d1c7 100644 --- a/dev/built_in_transformers/transformation_inheritance/index.html +++ b/dev/built_in_transformers/transformation_inheritance/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2537,6 +2537,8 @@ + + @@ -2577,6 +2579,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/commands/delete/index.html b/dev/commands/delete/index.html index dbe48448..8c85908f 100644 --- a/dev/commands/delete/index.html +++ b/dev/commands/delete/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2405,6 +2405,8 @@ + + @@ -2445,6 +2447,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/commands/dump/index.html b/dev/commands/dump/index.html index 24fb2ce0..2e786370 100644 --- a/dev/commands/dump/index.html +++ b/dev/commands/dump/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2455,6 +2455,8 @@ + + @@ -2495,6 +2497,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/commands/index.html b/dev/commands/index.html index c2a4de69..567a882b 100644 --- a/dev/commands/index.html +++ b/dev/commands/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2395,6 +2395,8 @@ + + @@ -2435,6 +2437,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/commands/list-dumps/index.html b/dev/commands/list-dumps/index.html index b6d5377a..b7f3129e 100644 --- a/dev/commands/list-dumps/index.html +++ b/dev/commands/list-dumps/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2440,6 +2440,8 @@ + + @@ -2480,6 +2482,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/commands/list-transformers/index.html b/dev/commands/list-transformers/index.html index 3ce3de2a..f575ea05 100644 --- a/dev/commands/list-transformers/index.html +++ b/dev/commands/list-transformers/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2440,6 +2440,8 @@ + + @@ -2480,6 +2482,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/commands/restore/index.html b/dev/commands/restore/index.html index ff23aafa..ab5c0f29 100644 --- a/dev/commands/restore/index.html +++ b/dev/commands/restore/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2491,6 +2491,8 @@ + + @@ -2531,6 +2533,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/commands/show-dump/index.html b/dev/commands/show-dump/index.html index 4312a35b..18729a63 100644 --- a/dev/commands/show-dump/index.html +++ b/dev/commands/show-dump/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2440,6 +2440,8 @@ + + @@ -2480,6 +2482,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/commands/show-transformer/index.html b/dev/commands/show-transformer/index.html index 73616e9d..469986e3 100644 --- a/dev/commands/show-transformer/index.html +++ b/dev/commands/show-transformer/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2440,6 +2440,8 @@ + + @@ -2480,6 +2482,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/commands/validate/index.html b/dev/commands/validate/index.html index 658e6138..a13b2c80 100644 --- a/dev/commands/validate/index.html +++ b/dev/commands/validate/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2405,6 +2405,8 @@ + + @@ -2445,6 +2447,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/configuration/index.html b/dev/configuration/index.html index 9a03155d..2f5f29fc 100644 --- a/dev/configuration/index.html +++ b/dev/configuration/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2531,6 +2531,8 @@ + + @@ -2571,6 +2573,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/database_subset/index.html b/dev/database_subset/index.html index d33b0943..1467f8d0 100644 --- a/dev/database_subset/index.html +++ b/dev/database_subset/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2583,6 +2583,8 @@ + + @@ -2623,6 +2625,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/index.html b/dev/index.html index d76b5178..57bb2874 100644 --- a/dev/index.html +++ b/dev/index.html @@ -14,7 +14,7 @@ - + @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2395,6 +2395,8 @@ + + @@ -2435,6 +2437,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/installation/index.html b/dev/installation/index.html index 76d3b22d..0fe82073 100644 --- a/dev/installation/index.html +++ b/dev/installation/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2478,6 +2478,8 @@ + + @@ -2518,6 +2520,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/overrides/main.html b/dev/overrides/main.html index b8053afc..f0af0f73 100644 --- a/dev/overrides/main.html +++ b/dev/overrides/main.html @@ -1,7 +1,7 @@ {% extends "base.html" %} {% block announce %} - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released {% endblock %} {% block outdated %} diff --git a/dev/playground/index.html b/dev/playground/index.html index 0735653d..ebf54b79 100644 --- a/dev/playground/index.html +++ b/dev/playground/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2460,6 +2460,8 @@ + + @@ -2500,6 +2502,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/release_notes/greenmask_0_1_0/index.html b/dev/release_notes/greenmask_0_1_0/index.html index c37e1425..e7d12a59 100644 --- a/dev/release_notes/greenmask_0_1_0/index.html +++ b/dev/release_notes/greenmask_0_1_0/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2390,6 +2390,8 @@ + + @@ -2433,6 +2435,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/release_notes/greenmask_0_1_0_beta/index.html b/dev/release_notes/greenmask_0_1_0_beta/index.html index a2e638ab..9e6dd94c 100644 --- a/dev/release_notes/greenmask_0_1_0_beta/index.html +++ b/dev/release_notes/greenmask_0_1_0_beta/index.html @@ -122,7 +122,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -284,7 +284,7 @@
  • - + @@ -2388,6 +2388,8 @@ + + @@ -2431,6 +2433,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/release_notes/greenmask_0_1_1/index.html b/dev/release_notes/greenmask_0_1_1/index.html index 4e1ae710..871aedea 100644 --- a/dev/release_notes/greenmask_0_1_1/index.html +++ b/dev/release_notes/greenmask_0_1_1/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2390,6 +2390,8 @@ + + @@ -2433,6 +2435,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/release_notes/greenmask_0_1_10/index.html b/dev/release_notes/greenmask_0_1_10/index.html index ed195675..b2084f6d 100644 --- a/dev/release_notes/greenmask_0_1_10/index.html +++ b/dev/release_notes/greenmask_0_1_10/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2390,6 +2390,8 @@ + + @@ -2433,6 +2435,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/release_notes/greenmask_0_1_11/index.html b/dev/release_notes/greenmask_0_1_11/index.html index b6207b73..d6dafebb 100644 --- a/dev/release_notes/greenmask_0_1_11/index.html +++ b/dev/release_notes/greenmask_0_1_11/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2390,6 +2390,8 @@ + + @@ -2433,6 +2435,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/release_notes/greenmask_0_1_12/index.html b/dev/release_notes/greenmask_0_1_12/index.html index 6a9a8fe4..522060a0 100644 --- a/dev/release_notes/greenmask_0_1_12/index.html +++ b/dev/release_notes/greenmask_0_1_12/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2390,6 +2390,8 @@ + + @@ -2433,6 +2435,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/release_notes/greenmask_0_1_13/index.html b/dev/release_notes/greenmask_0_1_13/index.html index 3230cfe9..0c1e986b 100644 --- a/dev/release_notes/greenmask_0_1_13/index.html +++ b/dev/release_notes/greenmask_0_1_13/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2390,6 +2390,8 @@ + + @@ -2433,6 +2435,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/release_notes/greenmask_0_1_14/index.html b/dev/release_notes/greenmask_0_1_14/index.html index 21da5445..369c3271 100644 --- a/dev/release_notes/greenmask_0_1_14/index.html +++ b/dev/release_notes/greenmask_0_1_14/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2390,6 +2390,8 @@ + + @@ -2433,6 +2435,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/release_notes/greenmask_0_1_2/index.html b/dev/release_notes/greenmask_0_1_2/index.html index e8bd3c6e..a446bd51 100644 --- a/dev/release_notes/greenmask_0_1_2/index.html +++ b/dev/release_notes/greenmask_0_1_2/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2390,6 +2390,8 @@ + + @@ -2433,6 +2435,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/release_notes/greenmask_0_1_3/index.html b/dev/release_notes/greenmask_0_1_3/index.html index 477f0939..92504e36 100644 --- a/dev/release_notes/greenmask_0_1_3/index.html +++ b/dev/release_notes/greenmask_0_1_3/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2390,6 +2390,8 @@ + + @@ -2433,6 +2435,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/release_notes/greenmask_0_1_4/index.html b/dev/release_notes/greenmask_0_1_4/index.html index 667b5f1a..f7e66101 100644 --- a/dev/release_notes/greenmask_0_1_4/index.html +++ b/dev/release_notes/greenmask_0_1_4/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2390,6 +2390,8 @@ + + @@ -2433,6 +2435,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/release_notes/greenmask_0_1_5/index.html b/dev/release_notes/greenmask_0_1_5/index.html index b36dffff..552fd474 100644 --- a/dev/release_notes/greenmask_0_1_5/index.html +++ b/dev/release_notes/greenmask_0_1_5/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2390,6 +2390,8 @@ + + @@ -2433,6 +2435,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/release_notes/greenmask_0_1_6/index.html b/dev/release_notes/greenmask_0_1_6/index.html index 82675288..277b045e 100644 --- a/dev/release_notes/greenmask_0_1_6/index.html +++ b/dev/release_notes/greenmask_0_1_6/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2390,6 +2390,8 @@ + + @@ -2433,6 +2435,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/release_notes/greenmask_0_1_7/index.html b/dev/release_notes/greenmask_0_1_7/index.html index 2136ee99..b947bcae 100644 --- a/dev/release_notes/greenmask_0_1_7/index.html +++ b/dev/release_notes/greenmask_0_1_7/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2390,6 +2390,8 @@ + + @@ -2433,6 +2435,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/release_notes/greenmask_0_1_8/index.html b/dev/release_notes/greenmask_0_1_8/index.html index 435c79cd..5950338c 100644 --- a/dev/release_notes/greenmask_0_1_8/index.html +++ b/dev/release_notes/greenmask_0_1_8/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2390,6 +2390,8 @@ + + @@ -2433,6 +2435,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/release_notes/greenmask_0_1_9/index.html b/dev/release_notes/greenmask_0_1_9/index.html index 9f13d5b6..b7578844 100644 --- a/dev/release_notes/greenmask_0_1_9/index.html +++ b/dev/release_notes/greenmask_0_1_9/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2390,6 +2390,8 @@ + + @@ -2433,6 +2435,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/release_notes/greenmask_0_2_0/index.html b/dev/release_notes/greenmask_0_2_0/index.html index 08cbfd5b..42b5af49 100644 --- a/dev/release_notes/greenmask_0_2_0/index.html +++ b/dev/release_notes/greenmask_0_2_0/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2390,6 +2390,8 @@ + + @@ -2433,6 +2435,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/release_notes/greenmask_0_2_0_b1/index.html b/dev/release_notes/greenmask_0_2_0_b1/index.html index 6b52c215..ac592953 100644 --- a/dev/release_notes/greenmask_0_2_0_b1/index.html +++ b/dev/release_notes/greenmask_0_2_0_b1/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2390,6 +2390,8 @@ + + @@ -2433,6 +2435,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/release_notes/greenmask_0_2_0_b2/index.html b/dev/release_notes/greenmask_0_2_0_b2/index.html index 0b29de68..299160db 100644 --- a/dev/release_notes/greenmask_0_2_0_b2/index.html +++ b/dev/release_notes/greenmask_0_2_0_b2/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2390,6 +2390,8 @@ + + @@ -2433,6 +2435,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/release_notes/greenmask_0_2_1/index.html b/dev/release_notes/greenmask_0_2_1/index.html index 9ff06e9f..3e4a4e4e 100644 --- a/dev/release_notes/greenmask_0_2_1/index.html +++ b/dev/release_notes/greenmask_0_2_1/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2390,6 +2390,8 @@ + + @@ -2433,6 +2435,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/release_notes/greenmask_0_2_2/index.html b/dev/release_notes/greenmask_0_2_2/index.html index 0121686d..62bee8c4 100644 --- a/dev/release_notes/greenmask_0_2_2/index.html +++ b/dev/release_notes/greenmask_0_2_2/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2390,6 +2390,8 @@ + + @@ -2433,6 +2435,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/release_notes/greenmask_0_2_3/index.html b/dev/release_notes/greenmask_0_2_3/index.html index f5e7845b..6c47ea4b 100644 --- a/dev/release_notes/greenmask_0_2_3/index.html +++ b/dev/release_notes/greenmask_0_2_3/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2390,6 +2390,8 @@ + + @@ -2433,6 +2435,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/release_notes/greenmask_0_2_4/index.html b/dev/release_notes/greenmask_0_2_4/index.html index 7c23c3b6..4f1cd8ff 100644 --- a/dev/release_notes/greenmask_0_2_4/index.html +++ b/dev/release_notes/greenmask_0_2_4/index.html @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2390,6 +2390,8 @@ + + @@ -2433,6 +2435,27 @@ +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + + +
  • diff --git a/dev/release_notes/greenmask_0_2_5/index.html b/dev/release_notes/greenmask_0_2_5/index.html index fe4338c2..9666ef5e 100644 --- a/dev/release_notes/greenmask_0_2_5/index.html +++ b/dev/release_notes/greenmask_0_2_5/index.html @@ -11,7 +11,7 @@ - + @@ -124,7 +124,7 @@ - A new version 0.2.5 (2024.11.16) is released + A new version 0.2.6 (2024.12.07) is released @@ -286,7 +286,7 @@
  • - + @@ -2390,6 +2390,8 @@ + + @@ -2431,6 +2433,27 @@ + + +
  • + + + + + Greenmask 0.2.6 + + + + +
  • + + + + + + + + diff --git a/dev/release_notes/greenmask_0_2_6/index.html b/dev/release_notes/greenmask_0_2_6/index.html new file mode 100644 index 00000000..5d1a2740 --- /dev/null +++ b/dev/release_notes/greenmask_0_2_6/index.html @@ -0,0 +1,3360 @@ + + + + + + + + + + + + + + + + + + + + + + + + + Greenmask 0.2.6 - Greenmask — PostgreSQL database anonymization and synthetic data generation tool + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + + + + Skip to content + + +
    +
    + + + +
    + + + + + + +
    + + +
    + +
    + + + + + + + + + +
    +
    + + + +
    +
    +
    + + + + + + + +
    +
    +
    + + + +
    +
    +
    + + + +
    +
    +
    + + + +
    +
    + + + + + + + +

    Greenmask 0.2.6

    +

    This release introduces new features and bug fixes.

    +

    Changes

    +
      +
    • Introduces --disable-trigers, --use-session-replication-role-replica and --superuser options +for restore command. It allows to disable triggers during data section restore #248. +Closes feature request #228
    • +
    • Fix skipping unknown type when silent is true #251
    • +
    • Added sonar qube quality gate badge #250
    • +
    +

    Full Changelog: v0.2.5...v0.2.6

    + +

    Feel free to reach out to us if you have any questions or need assistance:

    + + + + + + + + + + + + + + + + + + + + + +
    +
    + + + +
    + + + +
    + + + +
    +
    +
    +
    + + + + + + + + + + + + + \ No newline at end of file diff --git a/dev/search/search_index.json b/dev/search/search_index.json index a9439856..f591437d 100644 --- a/dev/search/search_index.json +++ b/dev/search/search_index.json @@ -1 +1 @@ -{"config":{"lang":["en"],"separator":"[\\s\\-]+","pipeline":["stopWordFilter"]},"docs":[{"location":"","title":"About Greenmask","text":""},{"location":"#dump-anonymization-and-synthetic-data-generation-tool","title":"Dump anonymization and synthetic data generation tool","text":"

    Greenmask is a powerful open-source utility that is designed for logical database backup dumping, anonymization, synthetic data generation and restoration. It has ported PostgreSQL libraries, making it reliable. It is stateless and does not require any changes to your database schema. It is designed to be highly customizable and backward-compatible with existing PostgreSQL utilities, fast and reliable.

    "},{"location":"#key-features","title":"Key features","text":""},{"location":"#use-cases","title":"Use cases","text":"

    Greenmask is ideal for various scenarios, including:

    "},{"location":"#links","title":"Links","text":""},{"location":"architecture/","title":"Architecture","text":""},{"location":"architecture/#introduction","title":"Introduction","text":"

    It is evident that the most appropriate approach for executing logical backup dumping and restoration is by leveraging the core PostgreSQL utilities, specifically pg_dump and pg_restore. Greenmask has been purposefully designed to align with PostgreSQL's native utilities, ensuring compatibility. Greenmask primarily handles data dumping operations independently and delegates the responsibilities of schema dumping and restoration to pg_dump and pg_restore respectively, maintaining seamless integration with PostgreSQL's standard tools.

    "},{"location":"architecture/#backup-process","title":"Backup process","text":"

    The process of backing up PostgreSQL databases is divided into three distinct sections:

    Greenmask focuses exclusively on the data section during runtime. It delegates the handling of the pre-data and post-data sections to the core PostgreSQL utilities, pg_dump and pg_restore.

    Greenmask employs the directory format of pg_dump and pg_restore. This format is particularly suitable for parallel execution and partial restoration, and it includes clear metadata files that aid in determining the backup and restoration steps. Greenmask has been optimized to work seamlessly with remote storage systems and anonymization procedures.

    When performing data dumping, Greenmask utilizes the COPY command in TEXT format, maintaining reliability and compatibility with the vanilla PostgreSQL utilities.

    Additionally, Greenmask supports parallel execution, significantly reducing the time required for the dumping process.

    "},{"location":"architecture/#storage-options","title":"Storage options","text":"

    The core PostgreSQL utilities, pg_dump and pg_restore, traditionally operate with files in a directory format, offering no alternative methods. To meet modern backup requirements and provide flexible approaches, Greenmask introduces the concept of storages.

    "},{"location":"architecture/#restoration-process","title":"Restoration process","text":"

    In the restoration process, Greenmask combines the capabilities of different tools:

    Greenmask also supports parallel restoration, which can significantly reduce the time required to complete the restoration process. This parallel execution enhances the efficiency of restoring large datasets.

    "},{"location":"architecture/#data-anonymization-and-validation","title":"Data anonymization and validation","text":"

    Greenmask works with COPY lines, collects schema metadata using the Golang driver, and employs this driver in the encoding and decoding process. The validate command offers a way to assess the impact on both schema (validation warnings) and data (transformation and displaying differences). This command allows you to validate the schema and data transformations, ensuring the desired outcomes during the anonymization process.

    "},{"location":"architecture/#customization","title":"Customization","text":"

    If your table schema relies on functional dependencies between columns, you can address this challenge using the TemplateRecord transformer. This transformer enables you to define transformation logic for entire tables, offering type-safe operations when assigning new values.

    Greenmask provides a framework for creating your custom transformers, which can be reused efficiently. These transformers can be seamlessly integrated without requiring recompilation, thanks to the PIPE (stdin/stdout) interaction.

    Note

    Furthermore, Greenmask's architecture is designed to be highly extensible, making it possible to introduce other interaction protocols, such as HTTP or Socket, for conducting anonymization procedures.

    "},{"location":"architecture/#postgresql-version-compatibility","title":"PostgreSQL version compatibility","text":"

    Greenmask is compatible with PostgreSQL versions 11 and higher.

    "},{"location":"configuration/","title":"Configuration","text":"
    # Configuration\n

    The configuration is organized into six sections:

    "},{"location":"configuration/#common-section","title":"common section","text":"

    In the common section of the configuration, you can specify the following settings:

    Note

    Greenmask exclusively manages data dumping and data restoration processes, delegating schema dumping to the pg_dumputility and schema restoration to the pg_restore utility. Both pg_dump and pg_restore rely on a toc.dat file located in a specific directory, which contains metadata and object definitions. Therefore, the tmp_dir parameter is essential for storing the toc.dat file during the dumping or restoration procedure. It is important to note that all artifacts in this directory will be automatically deleted once the Greenmask command is completed.

    "},{"location":"configuration/#log-section","title":"log section","text":"

    In the log section of the configuration, you can specify the following settings:

    "},{"location":"configuration/#storage-section","title":"storage section","text":"

    In the storage section, you can configure the storage driver for storing the dumped data. Currently, two storage type options are supported: directory and s3.

    directory options3 option

    The directory storage option refers to a filesystem directory where the dump data will be stored.

    Parameters include path which specifies the path to the directory in the filesystem where the dumps will be stored.

    directory storage config example
    storage:\n  type: \"directory\"\n  directory:\n    path: \"/home/user_name/storage_dir\" # (1)\n

    By choosing the s3 storage option, you can store dump data in an S3-like remote storage service, such as Amazon S3 or Azure Blob Storage. Here are the parameters you can configure for S3 storage:

    s3 storage config example for Minio running in Docker
    storage:  \n  type: \"s3\"\n  s3:\n    endpoint: \"http://localhost:9000\"\n    bucket: \"testbucket\"\n    region: \"us-east-1\"\n    access_key_id: \"Q3AM3UQ867SPQQA43P2F\"\n    secret_access_key: \"zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG\"\n
    "},{"location":"configuration/#dump-section","title":"dump section","text":"

    In the dump section of the configuration, you configure the greenmask dump command. It includes the following parameters:

    Here is an example configuration for the dump section:

    dump section config example

    dump:\n  pg_dump_options:\n    dbname: \"host=/run/postgresql user=postgres dbname=demo\"\n    jobs: 10\n    exclude-schema: \"(\\\"teSt\\\"*|test*)\"\n    table: \"bookings.flights\"\n    load-via-partition-root: true\n\n  transformation:\n    - schema: \"bookings\"\n      name: \"flights\"\n      query: \"select * from bookings.flights3 limit 1000000\"\n      columns_type_override:\n        post_code: \"int4\" # (1)\n      transformers:\n        - name: \"RandomDate\"\n          params:\n            min: \"2023-01-01 00:00:00.0+03\"\n            max: \"2023-01-02 00:00:00.0+03\"\n            column: \"scheduled_departure\"\n\n        - name: \"NoiseDate\"\n          params:\n            ratio: \"01:00:00\"\n            column: \"scheduled_arrival\"\n\n        - name: \"RegexpReplace\"\n          params:\n            column: \"status\"\n            regexp: \"On Time\"\n            replace: \"Delayed\"\n\n        - name: \"RandomInt\" # (2)\n          params:\n            column: \"post_code\"\n            min: \"11\"\n            max: \"99\"\n\n    - schema: \"bookings\"\n      name: \"aircrafts_data\"\n      subset_conds: # (3)\n        - \"bookings.aircrafts_data.model = 'Boeing 777-300-2023'\"\n      transformers:\n        - name: \"Json\"\n          params:\n            column: \"model\"\n            operations:\n              - operation: \"set\"\n                path: \"en\"\n                value: \"Boeing 777-300-2023\"\n              - operation: \"set\"\n                path: \"crewSize\"\n                value: 10\n\n        - name: \"NoiseInt\"\n          params:\n            ratio: 0.9\n            column: \"range\"\n

    1. Override the post_code column type to int4 (INTEGER). This is necessary because the post_code column originally has a TEXT type, but it contains values that resemble integers. By explicitly overriding the type to int4, we ensure compatibility with transformers that work with integer types, such as RandomInt.
    2. After the type is overridden, we can apply a compatible transformer.
    3. Database subset condition applied to the aircrafts_data table. The subset condition filters the data based on the model column.
    "},{"location":"configuration/#validate-section","title":"validate section","text":"

    In the validate section of the configuration, you can specify parameters for the greenmask validate command. Here is an example of the validate section configuration:

    validate section config example

    validate:\n  tables: # (1)\n    - \"orders\"\n    - \"public.cart\"\n  data: true # (2)\n  diff: true # (3)\n  rows_limit: 10 # (4)\n  resolved_warnings: # (5)\n    - \"8d436fae67b2b82b36bd3afeb0c93f30\"\n  table_format: \"horizontal\" # (7)\n  format: \"text\" # (6)\n  schema: true # (8)\n  transformed_only: true # (9)\n  warnings: true # (10)\n

    1. A list of tables to validate. If this list is not empty, the validation operation will only be performed for the specified tables. Tables can be written with or without the schema name (e. g., \"public.cart\" or \"orders\").
    2. Specifies whether to perform data transformation for a limited set of rows. If set to true, data transformation will be performed, and the number of rows transformed will be limited to the value specified in the rows_limit parameter (default is 10).
    3. Specifies whether to perform diff operations for the transformed data. If set to true, the validation process will find the differences between the original and transformed data. See more details in the validate command documentation.
    4. Limits the number of rows to be transformed during validation. The default limit is 10 rows, but you can change it by modifying this parameter.
    5. A hash list of resolved warnings. These warnings have been addressed and resolved in a previous validation run.
    6. Specifies the format of the transformation output. Possible values are [horizontal|vertical]. The default format is horizontal. You can choose the format that suits your needs. See more details in the validate command documentation.
    7. The output format (json or text)
    8. Specifies whether to validate the schema current schema with the previous and print the differences if any.
    9. If set to true, transformation output will be only with the transformed columns and primary keys
    10. If set to then all the warnings be printed
    "},{"location":"configuration/#restore-section","title":"restore section","text":"

    In the restore section of the configuration, you can specify parameters for the greenmask restore command. It contains pg_restore settings and custom script execution settings. Below you can find the available parameters:

    As mentioned in the architecture, a backup contains three sections: pre-data, data, and post-data. The custom script execution allows you to customize and control the restoration process by executing scripts or commands at specific stages. The available restoration stages and their corresponding execution conditions are as follows:

    Each stage can have a \"when\" condition with one of the following possible values:

    Below you can find one of the possible versions for the scripts part of the restore section:

    scripts definition example

    scripts:\n  pre-data: # (1)\n    - name: \"pre-data before script [1] with query\"\n      when: \"before\"\n      query: \"create table script_test(stage text)\"\n    - name: \"pre-data before script [2]\"\n      when: \"before\"\n      query: \"insert into script_test values('pre-data before')\"\n    - name: \"pre-data after test script [1]\"\n      when: \"after\"\n      query: \"insert into script_test values('pre-data after')\"\n    - name: \"pre-data after script with query_file [1]\"\n      when: \"after\"\n      query_file: \"pre-data-after.sql\"\n  data: # (2)\n    - name: \"data before script with command [1]\"\n      when: \"before\"\n      command: # (4)\n        - \"data-after.sh\"\n        - \"param1\"\n        - \"param2\"\n    - name: \"data after script [1]\"\n      when: \"after\"\n      query_file: \"data-after.sql\"\n  post-data: # (3)\n    - name: \"post-data before script [1]\"\n      when: \"before\"\n      query: \"insert into script_test values('post-data before')\"\n    - name: \"post-data after script with query_file [1]\"\n      when: \"after\"\n      query_file: \"post-data-after.sql\"\n

    1. List of pre-data stage scripts. This section contains scripts that are executed before or after the restoration of the pre-data section. The scripts include SQL queries and query files.
    2. List of data stage scripts. This section contains scripts that are executed before or after the restoration of the data section. The scripts include shell commands with parameters and SQL query files.
    3. List of post-data stage scripts. This section contains scripts that are executed before or after the restoration of the post-data section. The scripts include SQL queries and query files.
    4. Command in the first argument and the parameters in the rest of the list. When specifying a command to be executed in the scripts section, you provide the command name as the first item in a list, followed by any parameters or arguments for that command. The command and its parameters are provided as a list within the script configuration.
    "},{"location":"configuration/#restoration-error-exclusion","title":"restoration error exclusion","text":"

    You can configure which errors to ignore during the restoration process by setting the insert_error_exclusions parameter. This parameter can be applied globally or per table. If both global and table-specific settings are defined, the table-specific settings will take precedence. Below is an example of how to configure the insert_error_exclusions parameter. You can specify constraint names from your database schema or the error codes returned by PostgreSQL. codes in the PostgreSQL documentation.

    parameter defintion
    insert_error_exclusions:\n\n  global:\n    error_codes: [\"23505\"] # (1)\n    constraints: [\"PK_ProductReview_ProductReviewID\"] # (2)\n  tables: # (3)\n    - schema: \"production\"\n      name: \"productreview\"\n      constraints: [\"PK_ProductReview_ProductReviewID\"]\n      error_codes: [\"23505\"]\n
    1. List of strings that contains postgresql error codes
    2. List of strings that contains constraint names (globally)
    3. List of tables with their schema, name, constraints, and error codes

    Here is an example configuration for the restore section:

    restore:\n  scripts:\n      pre-data: # (1)\n        - name: \"pre-data before script [1] with query\"\n          when: \"before\"\n          query: \"create table script_test(stage text)\"\n\n  insert_error_exclusions:\n    tables:\n      - schema: \"production\"\n        name: \"productreview\"\n        constraints:\n          - \"PK_ProductReview_ProductReviewID\"\n        error_codes:\n          - \"23505\"\n    global:\n      error_codes:\n        - \"23505\"\n\n  pg_restore_options:\n    jobs: 10\n    exit-on-error: false\n    dbname: \"postgresql://postgres:example@localhost:54316/transformed\"\n    table: \n      - \"productreview\"\n    pgzip: true\n    inserts: true\n    on-conflict-do-nothing: true\n    restore-in-order: true\n
    "},{"location":"configuration/#environment-variable-configuration","title":"Environment variable configuration","text":"

    It's also possible to configure Greenmask through environment variables.

    Greenmask will automatically parse any environment variable that matches the configuration in the config file by substituting the dot (.) separator for an underscore (_) and uppercasing it. As an example, the config file below would apply the same configuration as defining the LOG_LEVEL=debug environment variable

    config.yaml
    log:\n  level: debug\n
    "},{"location":"configuration/#global-configuration-variables","title":"Global configuration variables","text":""},{"location":"configuration/#postgres-connection-variables","title":"Postgres connection variables","text":"

    Additionaly, there are some environment variables exposed by the dump and restore commands to facilitate the connection configuration with a Postgres database

    "},{"location":"database_subset/","title":"Database subset","text":"

    Greenmask allows you to define a subset condition for filtering data during the dump process. This feature is useful when you need to dump only a part of the database, such as a specific table or a set of tables. It automatically ensures data consistency by including all related data from other tables that are required to maintain the integrity of the subset. The subset condition can be defined using subset_conds attribute that can be defined on the table in the transformation section (see examples).

    Info

    Greenmask genrates queries for subset conditions based on the introspected schema using joins and recursive queries. It cannot be responsible for query optimization. The subset quries might be slow due to the complexity of the queries and/or lack of indexes. Circular are resolved using recursive queries.

    "},{"location":"database_subset/#detail","title":"Detail","text":"

    The subset is a list of SQL conditions that are applied to table. The conditions are combined with AND operator. You need to specify the schema, table and column name when pointing out the column to filter by to avoid ambiguity. The subset condition must be a valid SQL condition.

    Subset condition example
    subset_conds:\n  - 'person.businessentity.businessentityid IN (274, 290, 721, 852)'\n
    "},{"location":"database_subset/#use-cases","title":"Use cases","text":""},{"location":"database_subset/#references-with-null-values","title":"References with NULL values","text":"

    For references that do not have NOT NULL constraints, Greenmask will automatically generate LEFT JOIN queries with the appropriate conditions to ensure integrity checks. You can rely on Greenmask to handle such cases correctly\u2014no special configuration is needed, as it performs this automatically based on the introspected schema.

    "},{"location":"database_subset/#circular-reference","title":"Circular reference","text":"

    Greenmask supports circular references between tables. You can define a subset condition for any table, and Greenmask will automatically generate the appropriate queries for the table subset using recursive queries. The subset system ensures data consistency by validating all records found through the recursive queries. If a record does not meet the subset condition, it will be excluded along with its parent records, preventing constraint violations.

    Warning

    Currently (v0.2b2), Greenmask can resolve multi-cylces in one strogly connected component, but only for one group of vertexes. If you have SSC that contains 2 groups of vertexes, Greenmask will not be able to resolve it. For instance we have 2 cycles with tables A, B, C (first group) and B, C, E (second group). Greenmask will not be able to resolve it. But if you have only one group of vertexes one and more cycles in the same group of tables (for instance A, B, C), Greenmask works with it. This will be fixed in the future. See second example below. In practice this is quite rare situation and 99% of people will not face this issue.

    You can read the Wikipedia article about Circular reference here.

    "},{"location":"database_subset/#virtual-references","title":"Virtual references","text":"

    During the development process, there are situations where foreign keys need to be removed. The reasons can vary\u2014from improving performance to simplifying the database structure. Additionally, some foreign keys may exist within loosely structured data, such as JSON, where PostgreSQL cannot create foreign keys at all. These limitations could significantly hinder the capabilities of a subset system. Greenmask offers a flexible solution to this problem by allowing the declaration of virtual references in the configuration, enabling the preservation and management of logical relationships between tables, even in the absence of explicit foreign keys. Virtual reference can be called virtual foreign key as well.

    The virtual_references can be defined in dump section. It contains the list of virtual references. First you set the table where you want to define virtual reference. In the attribute references define the list of tables that are referenced by the table. In the columns attribute define the list of columns that are used in the foreign key reference. The not_null attribute is optional and defines if the FK has not null constraint. If true Greenmask will generate INNER JOIN instead of LEFT JOIN by default it is false. The expression needs to be used when you want to use some expression to get the value of the column in the referencing table. For instance, if you have JSONB column in the audit_logs table that contains order_id field, you can use this field as FK reference.

    Info

    You do not need to define primry key of the referenced table. Greenmask will automatically resolve it and use it in the join condition.

    Virtual references example
    dump:\n  virtual_references:\n    - schema: \"public\" # (1)\n      name: \"orders\" # (2)\n      references: # (3)\n        - schema: \"public\" # (4) \n          name: \"customers\" # (5)\n          columns: # (6)\n            - name: \"customer_id\"\n          not_null: false # (7)\n\n    - schema: \"public\"\n      name: \"audit_logs\"\n      references:\n        - schema: \"public\"\n          name: \"orders\"\n          columns:\n            - expression: \"(public.audit_logs.log_data ->> 'order_id')::INT\" # (8)\n
    1. The schema name of table that has foreign key reference (table that own FK reference)
    2. The table name that has foreign key reference (table that own FK reference)
    3. List of virtual references
    4. The schema name of the table that has foreign key reference (referencing table)
    5. The table name that has foreign key reference (referencing table)
    6. List of columns that are used in the foreign key reference. Each column has one of property defined at the same time:

      • name - column name in the referencing table
      • expression - expression that is used to get the value of the column in the referencing table
    7. not_null - is FK has not null constraint. If true Default it is false

    8. expression - expression that is used to get the value of the column in the referencing table
    "},{"location":"database_subset/#polymorphic-references","title":"Polymorphic references","text":"

    Greenmask supports polymorphic references. You can define a virtual reference for a table with polymorphic references using polymorphic_exprs attribute. The polymorphic_exprs attribute is a list of expressions that are used to make a polymorphic reference. For instance we might have a table comments that has polymorphic reference to posts and videos. The table comments might have commentable_id and commentable_type columns. The commentable_type column contains the type of the table that is referenced by the commentable_id column. The example of the config:

    Polymorphic references example
    dump:\n  virtual_references:\n    - schema: \"public\"\n      name: \"comments\"\n      references:\n        - schema: \"public\"\n          name: \"videos\"\n          polymorphic_exprs:\n            - \"public.comments.commentable_type = 'video'\"\n          columns:\n            - name: \"commentable_id\"\n        - schema: \"public\"\n          name: \"posts\"\n          polymorphic_exprs:\n            - \"public.comments.commentable_type = 'post'\"\n          columns:\n            - name: \"commentable_id\"\n

    Warning

    The plimorphic references cannot be non_null because the commentable_id column can be NULL if the commentable_type is not set or different that the values defined in the polymorphic_exprs attribute.

    "},{"location":"database_subset/#troubleshooting","title":"Troubleshooting","text":""},{"location":"database_subset/#exclude-the-records-that-has-null-values-in-the-referenced-column","title":"Exclude the records that has NULL values in the referenced column","text":"

    If you want to exclude records that have NULL values in the referenced column, you can manually add this condition to the subset condition for the table. Greenmask does not automatically exclude records with NULL values because it applies a LEFT OUTER JOIN on nullable foreign keys.

    "},{"location":"database_subset/#some-table-is-not-filtered-by-the-subset-condition","title":"Some table is not filtered by the subset condition","text":"

    Greenmask builds a table dependency graph based on the introspected schema and existing foreign keys. If a table is not filtered by the subset condition, it means that the table either does not reference another table that is filtered by the subset condition or the table itself does not have a subset condition applied.

    If you have a table with a removed foreign key and want to filter it by the subset condition, you need to define a virtual reference. For more information on virtual references, refer to the Virtual References section.

    Info

    If you find any issues related to the code or greenmask is not working as expected, do not hesitate to contact us directly or by creating an issue in the repository.

    "},{"location":"database_subset/#error-column-reference-id-is-ambiguous","title":"ERROR: column reference \"id\" is ambiguous","text":"

    If you see the error message ERROR: column reference \"{column name}\" is ambiguous, you have specified the column name without the table and/or schema name. To avoid ambiguity, always specify the schema and table name when pointing out the column to filter by. For instance if you want to filter employees by employee_id column, you should use public.employees.employee_id instead of employee_id.

    Valid subset condition
    public.employees.employee_id IN (1, 2, 3)\n
    "},{"location":"database_subset/#the-subset-condition-is-not-working-correctly-how-can-i-verify-it","title":"The subset condition is not working correctly. How can I verify it?","text":"

    Run greenmask with --log-level=debug to see the generated SQL queries. You will find the generated SQL queries in the log output. Validate this query in your database client to ensure that the subset condition is working as expected.

    For example:

    $ greenmask dump --config config.yaml --log-level=debug\n\n2024-08-29T19:06:18+03:00 DBG internal/db/postgres/context/context.go:202 > Debug query Schema=person Table=businessentitycontact pid=1638339\n2024-08-29T19:06:18+03:00 DBG internal/db/postgres/context/context.go:203 > SELECT \"person\".\"businessentitycontact\".* FROM \"person\".\"businessentitycontact\"  INNER JOIN \"person\".\"businessentity\" ON \"person\".\"businessentitycontact\".\"businessentityid\" = \"person\".\"businessentity\".\"businessentityid\" AND ( person.businessentity.businessentityid between 400 and 800 OR person.businessentity.businessentityid between 800 and 900 ) INNER JOIN \"person\".\"person\" ON \"person\".\"businessentitycontact\".\"personid\" = \"person\".\"person\".\"businessentityid\" WHERE TRUE AND ((\"person\".\"person\".\"businessentityid\") IN (SELECT \"person\".\"businessentity\".\"businessentityid\" FROM \"person\".\"businessentity\"   WHERE ( ( person.businessentity.businessentityid between 400 and 800 OR person.businessentity.businessentityid between 800 and 900 ) )))\n pid=1638339\n
    "},{"location":"database_subset/#dump-is-too-slow","title":"Dump is too slow","text":"

    If the dump process is too slow the generated query might be too complex. In this case you can:

    "},{"location":"database_subset/#example-dump-a-subset-of-the-database","title":"Example: Dump a subset of the database","text":"

    Info

    All examples based on playground database. Read more about the playground database in the Playground section.

    The following example demonstrates how to dump a subset of the person schema. The subset condition is applied to the businessentity and password tables. The subset condition filters the data based on the businessentityid and passwordsalt columns, respectively.

    Subset configuration example
    transformation:\n  - schema: \"person\"\n    name: \"businessentity\"\n    subset_conds:\n      - 'person.businessentity.businessentityid IN (274, 290, 721, 852)'\n    transformers:\n      - name: \"RandomDate\"\n        params:\n          column: \"modifieddate\"\n          min: \"2020-01-01 00:00:00\"\n          max: \"2024-06-26 00:00:00\"\n          truncate: \"day\"\n          keep_null: false\n\n  - schema: \"person\"\n    name: \"password\"\n    subset_conds:\n      - >\n        person.password.passwordsalt = '329eacbe-c883-4f48-b8b6-17aa4627efff'\n
    "},{"location":"database_subset/#example-dump-a-subset-with-circular-reference","title":"Example: Dump a subset with circular reference","text":"Create tables with multi cyles
    -- Step 1: Create tables without foreign keys\nDROP TABLE IF EXISTS employees CASCADE;\nCREATE TABLE employees\n(\n    employee_id   SERIAL PRIMARY KEY,\n    name          VARCHAR(100) NOT NULL,\n    department_id INT -- Will reference departments(department_id)\n);\n\nDROP TABLE IF EXISTS departments CASCADE;\nCREATE TABLE departments\n(\n    department_id SERIAL PRIMARY KEY,\n    name          VARCHAR(100) NOT NULL,\n    project_id    INT -- Will reference projects(project_id)\n);\n\nDROP TABLE IF EXISTS projects CASCADE;\nCREATE TABLE projects\n(\n    project_id       SERIAL PRIMARY KEY,\n    name             VARCHAR(100) NOT NULL,\n    lead_employee_id INT, -- Will reference employees(employee_id)\n    head_employee_id INT  -- Will reference employees(employee_id)\n);\n\n-- Step 2: Alter tables to add foreign key constraints\nALTER TABLE employees\n    ADD CONSTRAINT fk_department\n        FOREIGN KEY (department_id) REFERENCES departments (department_id);\n\nALTER TABLE departments\n    ADD CONSTRAINT fk_project\n        FOREIGN KEY (project_id) REFERENCES projects (project_id);\n\nALTER TABLE projects\n    ADD CONSTRAINT fk_lead_employee\n        FOREIGN KEY (lead_employee_id) REFERENCES employees (employee_id);\n\nALTER TABLE projects\n    ADD CONSTRAINT fk_lead_employee2\n        FOREIGN KEY (head_employee_id) REFERENCES employees (employee_id);\n\n-- Insert projects\nINSERT INTO projects (name, lead_employee_id)\nSELECT 'Project ' || i, NULL\nFROM generate_series(1, 10) AS s(i);\n\n-- Insert departments\nINSERT INTO departments (name, project_id)\nSELECT 'Department ' || i, i\nFROM generate_series(1, 10) AS s(i);\n\n-- Insert employees and assign 10 of them as project leads\nINSERT INTO employees (name, department_id)\nSELECT 'Employee ' || i, (i / 10) + 1\nFROM generate_series(1, 99) AS s(i);\n\n-- Assign 10 employees as project leads\nUPDATE projects\nSET lead_employee_id = (SELECT employee_id\n                        FROM employees\n                        WHERE employees.department_id = projects.project_id\n                        LIMIT 1),\n    head_employee_id = 3\nWHERE project_id <= 10;\n

    This schema has two cycles:

    Greenmask can simply resolve it by generating a recursive query with integrity checks for subset and join conditions.

    The example below will fetch the data for both 3 employees and related departments and projects.

    Subset configuration example
    transformation:\n  - schema: \"public\"\n    name: \"employees\"\n    subset_conds:\n      - \"public.employees.employee_id in (1, 2, 3)\"\n

    But this will return empty result, because the subset condition is not met for all related tables because project with project_id=1 has reference to employee with employee_id=3 that is invalid for subset condition.

    Subset configuration example
    transformation:\n  - schema: \"public\"\n    name: \"employees\"\n    subset_conds:\n      - \"public.employees.employee_id in (1, 2)\"\n
    "},{"location":"database_subset/#example-dump-a-subset-with-virtual-references","title":"Example: Dump a subset with virtual references","text":"

    In this example, we will create a subset of the tables with virtual references. The subset will include the orders table and its related tables customers and audit_logs. The orders table has a virtual reference to the customers table, and the audit_logs table has a virtual reference to the orders table.

    Create tables with virtual references
    -- Create customers table\nCREATE TABLE customers\n(\n    customer_id   SERIAL PRIMARY KEY,\n    customer_name VARCHAR(100)\n);\n\n-- Create orders table\nCREATE TABLE orders\n(\n    order_id    SERIAL PRIMARY KEY,\n    customer_id INT, -- This should reference customers.customer_id, but no FK constraint is defined\n    order_date  DATE\n);\n\n-- Create payments table\nCREATE TABLE payments\n(\n    payment_id     SERIAL PRIMARY KEY,\n    order_id       INT, -- This should reference orders.order_id, but no FK constraint is defined\n    payment_amount DECIMAL(10, 2),\n    payment_date   DATE\n);\n\n-- Insert test data into customers table\nINSERT INTO customers (customer_name)\nVALUES ('John Doe'),\n       ('Jane Smith'),\n       ('Alice Johnson');\n\n-- Insert test data into orders table\nINSERT INTO orders (customer_id, order_date)\nVALUES (1, '2023-08-01'), -- Related to customer John Doe\n       (2, '2023-08-05'), -- Related to customer Jane Smith\n       (3, '2023-08-07');\n-- Related to customer Alice Johnson\n\n-- Insert test data into payments table\nINSERT INTO payments (order_id, payment_amount, payment_date)\nVALUES (1, 100.00, '2023-08-02'), -- Related to order 1 (John Doe's order)\n       (2, 200.50, '2023-08-06'), -- Related to order 2 (Jane Smith's order)\n       (3, 300.75, '2023-08-08');\n-- Related to order 3 (Alice Johnson's order)\n\n\n-- Create a table with a multi-key reference (composite key reference)\nCREATE TABLE order_items\n(\n    order_id     INT,               -- Should logically reference orders.order_id\n    item_id      INT,               -- Composite part of the key\n    product_name VARCHAR(100),\n    quantity     INT,\n    PRIMARY KEY (order_id, item_id) -- Composite primary key\n);\n\n-- Create a table with a JSONB column that contains a reference value\nCREATE TABLE audit_logs\n(\n    log_id   SERIAL PRIMARY KEY,\n    log_data JSONB -- This JSONB field will contain references to other tables\n);\n\n-- Insert data into order_items table with multi-key reference\nINSERT INTO order_items (order_id, item_id, product_name, quantity)\nVALUES (1, 1, 'Product A', 3), -- Related to order_id = 1 from orders table\n       (1, 2, 'Product B', 5), -- Related to order_id = 1 from orders table\n       (2, 1, 'Product C', 2), -- Related to order_id = 2 from orders table\n       (3, 1, 'Product D', 1);\n-- Related to order_id = 3 from orders table\n\n-- Insert data into audit_logs table with JSONB reference value\nINSERT INTO audit_logs (log_data)\nVALUES ('{\n  \"event\": \"order_created\",\n  \"order_id\": 1,\n  \"details\": {\n    \"customer_name\": \"John Doe\",\n    \"total\": 100.00\n  }\n}'),\n       ('{\n         \"event\": \"payment_received\",\n         \"order_id\": 2,\n         \"details\": {\n           \"payment_amount\": 200.50,\n           \"payment_date\": \"2023-08-06\"\n         }\n       }'),\n       ('{\n         \"event\": \"item_added\",\n         \"order_id\": 1,\n         \"item\": {\n           \"item_id\": 2,\n           \"product_name\": \"Product B\",\n           \"quantity\": 5\n         }\n       }');\n

    The following example demonstrates how to make a subset for keys that does not have FK constraints but a data relationship exists.

    dump:\n  virtual_references:\n    - schema: \"public\"\n      name: \"orders\"\n      references:\n        - schema: \"public\"\n          name: \"customers\"\n          columns:\n            - name: \"customer_id\"\n          not_null: true\n\n    - schema: \"public\"\n      name: \"payments\"\n      references:\n        - schema: \"public\"\n          name: \"orders\"\n          columns:\n            - name: \"order_id\"\n          not_null: true\n\n    - schema: \"public\"\n      name: \"order_items\"\n      references:\n        - schema: \"public\"\n          name: \"orders\"\n          columns:\n            - name: \"order_id\"\n          not_null: true\n        - schema: \"public\"\n          name: \"products\"\n          columns:\n            - name: \"product_id\"\n          not_null: true\n\n    - schema: \"public\"\n      name: \"audit_logs\"\n      references:\n        - schema: \"public\"\n          name: \"orders\"\n          columns:\n            - expression: \"(public.audit_logs.log_data ->> 'order_id')::INT\"\n          not_null: false\n        - schema: \"public\"\n          name: \"order_items\"\n          columns:\n            - expression: \"(public.audit_logs.log_data -> 'item' ->> 'item_id')::INT\"\n            - expression: \"(public.audit_logs.log_data ->> 'order_id')::INT\"\n          not_null: false\n\n  transformation:\n\n    - schema: \"public\"\n      name: \"customers\"\n      subset_conds:\n        - \"public.customers.customer_id in (1)\"\n

    As a result, the customers table will be dumped with the orders table and its related tables payments, order_items, and audit_logs. The subset condition will be applied to the customers table, and the data will be filtered based on the customer_id column.

    "},{"location":"database_subset/#example-dump-a-subset-with-polymorphic-references","title":"Example: Dump a subset with polymorphic references","text":"

    In this example, we will create a subset of the tables with polymorphic references. This example includes the comments table and its related tables posts and videos.

    Create tables with polymorphic references and insert data
    -- Create the Posts table\nCREATE TABLE posts\n(\n    id      SERIAL PRIMARY KEY,\n    title   VARCHAR(255) NOT NULL,\n    content TEXT         NOT NULL\n);\n\n-- Create the Videos table\nCREATE TABLE videos\n(\n    id    SERIAL PRIMARY KEY,\n    title VARCHAR(255) NOT NULL,\n    url   VARCHAR(255) NOT NULL\n);\n\n-- Create the Comments table with a polymorphic reference\nCREATE TABLE comments\n(\n    id               SERIAL PRIMARY KEY,\n    commentable_id   INT         NOT NULL, -- Will refer to either posts.id or videos.id\n    commentable_type VARCHAR(50) NOT NULL, -- Will store the type of the associated record\n    body             TEXT        NOT NULL,\n    created_at       TIMESTAMP DEFAULT CURRENT_TIMESTAMP\n);\n\n\n-- Insert data into the Posts table\nINSERT INTO posts (title, content)\nVALUES ('First Post', 'This is the content of the first post.'),\n       ('Second Post', 'This is the content of the second post.');\n\n-- Insert data into the Videos table\nINSERT INTO videos (title, url)\nVALUES ('First Video', 'https://example.com/video1'),\n       ('Second Video', 'https://example.com/video2');\n\n-- Insert data into the Comments table, associating some comments with posts and others with videos\n-- For posts:\nINSERT INTO comments (commentable_id, commentable_type, body)\nVALUES (1, 'post', 'This is a comment on the first post.'),\n       (2, 'post', 'This is a comment on the second post.');\n\n-- For videos:\nINSERT INTO comments (commentable_id, commentable_type, body)\nVALUES (1, 'video', 'This is a comment on the first video.'),\n       (2, 'video', 'This is a comment on the second video.');\n

    The comments table has a polymorphic reference to the posts and videos tables. Depending on the value of the commentable_type column, the commentable_id column will reference either the posts.id or videos.id column.

    The following example demonstrates how to make a subset for tables with polymorphic references.

    Subset configuration example
    dump:\n  virtual_references:\n    - schema: \"public\"\n      name: \"comments\"\n      references:\n        - schema: \"public\"\n          name: \"posts\"\n          polymorphic_exprs:\n            - \"public.comments.commentable_type = 'post'\"\n          columns:\n            - name: \"commentable_id\"\n        - schema: \"public\"\n          name: \"videos\"\n          polymorphic_exprs:\n            - \"public.comments.commentable_type = 'video'\"\n          columns:\n            - name: \"commentable_id\"\n\n  transformation:\n    - schema: \"public\"\n      name: \"posts\"\n      subset_conds:\n        - \"public.posts.id in (1)\"\n

    This example selects only the first post from the posts table and its related comments from the comments table. The comments are associated with videos are included without filtering because the subset condition is applied only to the posts table and related comments.

    The resulted records will be:

    transformed=# select * from comments;\n id | commentable_id | commentable_type |                 body                  |         created_at         \n----+----------------+------------------+---------------------------------------+----------------------------\n  1 |              1 | post             | This is a comment on the first post.  | 2024-09-18 05:27:54.217405\n  2 |              2 | post             | This is a comment on the second post. | 2024-09-18 05:27:54.217405\n  3 |              1 | video            | This is a comment on the first video. | 2024-09-18 05:27:54.229794\n(3 rows)\n
    "},{"location":"installation/","title":"Installation","text":""},{"location":"installation/#prerequisites","title":"Prerequisites","text":""},{"location":"installation/#via-docker","title":"Via docker","text":"

    You can find the docker images in the:

    1. Docker-hub page

    To run the greenmask container from DockerHub, use the following command:

    docker run -it greenmask/greenmask:latest\n

    1. GitHub container registry

    To run the greenmask container from Github registry, use the following command:

    docker run -it ghcr.io/greenmaskio/greenmask:latest\n

    Info

    For pre-releases (rc, beta, etc.), use explicit tags like v0.2.0b2.

    "},{"location":"installation/#via-brew","title":"Via brew","text":"

    The greenmask build is available in brew, but only a production build is available. To install the greenmask via brew, use the following command:

    brew install greenmask\n
    "},{"location":"installation/#from-source","title":"From source","text":"
    1. Clone the Greenmask repository by using the following command:

      git clone git@github.com:GreenmaskIO/greenmask.git\n
    2. Once the repository is cloned, execute the following command to build Greenmask:

      make build\n

    After completing the build process, you will find the binary named greenmask in the root directory of the repository. Execute the binary to start using Greenmask.

    "},{"location":"installation/#playground","title":"Playground","text":"

    Greenmask Playground is a sandbox environment for your experiments in Docker with sample databases included to help you try Greenmask without any additional actions. Read the Playground guide to learn more.

    "},{"location":"playground/","title":"Greenmask Playground","text":"

    Greenmask Playground is a sandbox environment in Docker with sample databases included to help you try Greenmask without any additional actions. It includes the following components:

    Warning

    To complete this guide, you must have Docker and docker-compose installed.

    "},{"location":"playground/#setting-up-greenmask-playground","title":"Setting up Greenmask Playground","text":"
    1. Clone the greenmask repository and navigate to its directory by running the following commands:

      git clone git@github.com:GreenmaskIO/greenmask.git && cd greenmask\n
    2. Once you have cloned the repository, start the environment by running Docker Compose:

      docker-compose run greenmask\n

    Tip

    If you're experiencing problems with pulling images from Docker Hub, you can build the Greenmask image from source by running the following command:

    docker-compose run greenmask-from-source\n

    Now you have Greenmask Playground up and running with a shell prompt inside the container. All further operations will be carried out within this container's shell.

    "},{"location":"playground/#commands","title":"Commands","text":"

    Below you can see Greenmask commands:

    To learn more about them, see Commands.

    "},{"location":"playground/#transformers","title":"Transformers","text":"

    A configuration file is mandatory for Greenmask functioning. The pre-defined configuration file is stored at the repository root directory (./playground/config.yml). It also serves to define transformers which you can update to your liking in order to use Greenmask Playground more effectively and to get better understanding of the tool itself. To learn how to customize a configuration file, see Configuration

    The pre-defined configuration file uses the NoiseDate transformer as an example. To learn more about other transformers and how to use them, see Transformers.

    "},{"location":"built_in_transformers/","title":"About transformers","text":"

    Transformers in Greenmask are methods which are applied to anonymize sensitive data. All Greenmask transformers are split into the following groups:

    "},{"location":"built_in_transformers/dynamic_parameters/","title":"Dynamic parameters","text":""},{"location":"built_in_transformers/dynamic_parameters/#description","title":"Description","text":"

    Most transformers in Greenmask have dynamic parameters. This functionality is possible because Greenmask utilizes a database driver that can encode and decode raw values into their actual type representations.

    This allows you to retrieve parameter values directly from the records. This capability is particularly beneficial when you need to resolve functional dependencies between fields or satisfy constraints. Greenmask processes transformations sequentially. Therefore, when you reference a field that was transformed in a previous step, you will access the transformed value.

    "},{"location":"built_in_transformers/dynamic_parameters/#definition","title":"Definition","text":"
    dynamic_params:\n  - column: \"column_name\" # (1)\n    cast_to: \"cast_function\" # (2)\n    template: \"template_function\" # (3)\n    default_value: any # (4)\n
    1. Name of the column from which the value is retrieved.
    2. Function used to cast the column value to the desired type.
    3. Default value used if the column's value is NULL.
    4. Template used for casting the column value to the desired type.
    "},{"location":"built_in_transformers/dynamic_parameters/#dynamic-parameter-options","title":"Dynamic parameter options","text":""},{"location":"built_in_transformers/dynamic_parameters/#cast-functions","title":"Cast functions","text":"name description input type output type UnixNanoToDate Cast int value as Unix Timestamp in Nano Seconds to date type int2, int4, int8, numeric, float4, float8 date UnixMicroToDate Cast int value as Unix Timestamp in Micro Seconds to date type int2, int4, int8, numeric, float4, float8 date UnixMilliToDate Cast int value as Unix Timestamp in Milli Seconds to date type int2, int4, int8, numeric, float4, float8 date UnixSecToDate Cast int value as Unix Timestamp in Seconds to date type int2, int4, int8, numeric, float4, float8 date UnixNanoToTimestamp Cast int value as Unix Timestamp in Nano Seconds to timestamp type int2, int4, int8, numeric, float4, float8 timestamp UnixMicroToTimestamp Cast int value as Unix Timestamp in Micro Seconds to timestamp type int2, int4, int8, numeric, float4, float8 timestamp UnixMilliToTimestamp Cast int value as Unix Timestamp in Milli Seconds to timestamp type int2, int4, int8, numeric, float4, float8 timestamp UnixSecToTimestamp Cast int value as Unix Timestamp in Seconds to timestamp type int2, int4, int8, numeric, float4, float8 timestamp UnixNanoToTimestampTz Cast int value as Unix Timestamp in Nano Seconds to timestamptz type int2, int4, int8, numeric, float4, float8 timestamptz UnixMicroToTimestampTz Cast int value as Unix Timestamp in Micro Seconds to timestamptz type int2, int4, int8, numeric, float4, float8 timestamptz UnixMilliToTimestampTz Cast int value as Unix Timestamp in Milli Seconds to timestamptz type int2, int4, int8, numeric, float4, float8 timestamptz UnixSecToTimestampTz Cast int value as Unix Timestamp in Seconds to timestamptz type int2, int4, int8, numeric, float4, float8 timestamptz DateToUnixNano Cast date value to int value as a Unix Timestamp in Nano Seconds date int2, int4, int8, numeric, float4, float8 DateToUnixMicro Cast date value to int value as a Unix Timestamp in Micro Seconds date int2, int4, int8, numeric, float4, float8 DateToUnixMilli Cast date value to int value as a Unix Timestamp in Milli Seconds date int2, int4, int8, numeric, float4, float8 DateToUnixSec Cast date value to int value as a Unix Timestamp in Seconds date int2, int4, int8, numeric, float4, float8 TimestampToUnixNano Cast timestamp value to int value as a Unix Timestamp in Nano Seconds timestamp int2, int4, int8, numeric, float4, float8 TimestampToUnixMicro Cast timestamp value to int value as a Unix Timestamp in Micro Seconds timestamp int2, int4, int8, numeric, float4, float8 TimestampToUnixMilli Cast timestamp value to int value as a Unix Timestamp in Milli Seconds timestamp int2, int4, int8, numeric, float4, float8 TimestampToUnixSec Cast timestamp value to int value as a Unix Timestamp in Seconds timestamp int2, int4, int8, numeric, float4, float8 TimestampTzToUnixNano Cast timestamptz value to int value as a Unix Timestamp in Nano Seconds timestamptz int2, int4, int8, numeric, float4, float8 TimestampTzToUnixMicro Cast timestamptz value to int value as a Unix Timestamp in Micro Seconds timestamptz int2, int4, int8, numeric, float4, float8 TimestampTzToUnixMilli Cast timestamptz value to int value as a Unix Timestamp in Milli Seconds timestamptz int2, int4, int8, numeric, float4, float8 TimestampTzToUnixSec Cast timestamptz value to int value as a Unix Timestamp in Seconds timestamptz int2, int4, int8, numeric, float4, float8 FloatToInt Cast float value to one of integer type. The fractional part will be discarded numeric, float4, float8 int2, int4, int8, numeric IntToFloat Cast int value to one of integer type int2, int4, int8, numeric numeric, float4, float8 IntToBool Cast int value to boolean. The value with 0 is false, 1 is true int2, int4, int8, numeric, float4, float8 bool BoolToInt Cast boolean value to int. The value false is 0, true is 1 bool int2, int4, int8, numeric, float4, float8"},{"location":"built_in_transformers/dynamic_parameters/#example-functional-dependency-resolution-between-columns","title":"Example: Functional dependency resolution between columns","text":"

    There is simplified schema of the table humanresources.employee from the playground:

           Column      |            Type                      \n------------------+-----------------------------\n businessentityid | integer                      \n jobtitle         | character varying(50)        \n birthdate        | date                        \n hiredate         | date                         \nCheck constraints:\n    CHECK (birthdate >= '1930-01-01'::date AND birthdate <= (now() - '18 years'::interval))\n

    As you can see, there is a functional dependency between the birthdate and hiredate columns. Logically, the hiredate should be later than the birthdate. Additionally, the birthdate should range from 1930-01-01 to 18 years prior to the current date.

    Imagine that you need to generate random birthdate and hiredate columns. To ensure these dates satisfy the constraints, you can use dynamic parameters in the RandomDate transformer:

    - schema: \"humanresources\"\n  name: \"employee\"\n  transformers:\n\n    - name: \"RandomDate\" # (1)\n      params:\n        column: \"birthdate\"\n        min: '{{ now | tsModify \"-30 years\" | .EncodeValue }}' # (2)\n        max: '{{ now | tsModify \"-18 years\" | .EncodeValue }}' # (3)\n\n    - name: \"RandomDate\" # (4)\n      params:\n        column: \"hiredate\"\n        max: \"{{ now | .EncodeValue }}\" # (5)\n      dynamic_params:\n        min:\n          column: \"birthdate\" # (6)\n          template: '{{ .GetValue | tsModify \"18 years\" | .EncodeValue }}' # (7)\n
    1. Firstly we generate the RadnomDate for birthdate column. The result of the transformation will used as the minimum value for the next transformation for hiredate column.
    2. Apply the template for static parameter. It calculates the now date and subtracts 30 years from it. The result is 1994. The function tsModify return not a raw data, but time.Time object. For getting the raw value suitable for birthdate type we need to pass this value to .EncodeValue function. This value is used as the minimum value for the birthdate column.
    3. The same as the previous step, but we subtract 18 years from the now date. The result is 2002.
    4. Generate the RadnomDate for hiredate column based on the value from the birthdate.
    5. Set the maximum value for the hiredate column. The value is the current date.
    6. The min parameter is set to the value of the birthdate column from the previous step.
    7. The template gets the value of the randomly generated birthdate value and adds 18 years to it.

    Below is the result of the transformation:

    From the result, you can see that all functional dependencies and constraints are satisfied.

    "},{"location":"built_in_transformers/parameters_templating/","title":"Parameters templating","text":""},{"location":"built_in_transformers/parameters_templating/#description","title":"Description","text":"

    It is allowed to generate parameter values from templates. It is useful when you don't want to write values manually, but instead want to generate and initialize them dynamically.

    Here you can find the list of template functions that can be used in the template Custom functions.

    You can encode and decode objects using the driver function bellow.

    "},{"location":"built_in_transformers/parameters_templating/#template-functions","title":"Template functions","text":"Function Description Signature .GetColumnType Returns a string with the column type. .GetColumnType(name string) (typeName string, err error) .EncodeValueByColumn Encodes a value of any type into its raw string representation using the specified column name. Encoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .EncodeValueByColumn(name string, value any) (res any, err error) .DecodeValueByColumn Decodes a value from its raw string representation to a Golang type using the specified column name. Decoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .DecodeValueByColumn(name string, value any) (res any, err error) .EncodeValueByType Encodes a value of any type into its string representation using the specified type name. Encoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .EncodeValueByType(name string, value any) (res any, err error) .DecodeValueByType Decodes a value from its raw string representation to a Golang type using the specified type name. Decoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .DecodeValueByType(name string, value any) (res any, err error) .DecodeValue Decodes a value from its raw string representation to a Golang type using the data type assigned to the table column specified in the column parameter. Decoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .DecodeValueByColumn(value any) (res any, err error) .EncodeValue Encodes a value of any type into its string representation using the type assigned to the table column specified in the column parameter. Encoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .EncodeValue(value any) (res any, err error)

    Warning

    If column parameter is not linked to column parameter, then functions .DecodeValue and .EncodeValue will return an error. You can use .DecodeValueByType and .EncodeValueByType or .DecodeValueByColumn and .EncodeValueByColumn instead.

    "},{"location":"built_in_transformers/parameters_templating/#example","title":"Example","text":"

    In the example below, the min and max values for the birth_date column are generated dynamically using the now template function. The value returns the current date and time. The tsModify function is then used to subtract 30 (and 18) years. But because the parameter type is mapped on column parameter type, the EncodeValue function is used to encode the value into the column type.

    For example, if we have the now date as 2021-01-01, the dynamically calculated min value will be 1994-01-01 and the max value will be 2006-01-01.

    CREATE TABLE account\n(\n    id         SERIAL PRIMARY KEY,\n    gender     VARCHAR(1) NOT NULL,\n    email      TEXT       NOT NULL NOT NULL UNIQUE,\n    first_name TEXT       NOT NULL,\n    last_name  TEXT       NOT NULL,\n    birth_date DATE,\n    created_at TIMESTAMP  NOT NULL DEFAULT NOW()\n);\n\nINSERT INTO account (first_name, gender, last_name, birth_date, email)\nVALUES ('John', 'M', 'Smith', '1980-01-01', 'john.smith@gmail.com');\n
    - schema: \"public\"\n  name: \"account\"\n  transformers:\n    - name: \"RandomDate\"\n      params:\n        column: \"birth_date\"\n        min: '{{ now | tsModify \"-30 years\" | .EncodeValue }}' # 1994\n        max: '{{ now | tsModify \"-18 years\" | .EncodeValue }}' # 2006\n

    Result

    ColumnOriginalValueTransformedValue birth_date1980-01-011995-09-06"},{"location":"built_in_transformers/transformation_condition/","title":"Transformation Condition","text":""},{"location":"built_in_transformers/transformation_condition/#description","title":"Description","text":"

    The transformation condition feature allows you to execute a defined transformation only if a specified condition is met. The condition must be defined as a boolean expression that evaluates to true or false. Greenmask uses expr-lang/expr under the hood. You can use all functions and syntax provided by the expr library.

    You can use the same functions that are described in the built-in transformers

    The transformers are executed one by one - this helps you create complex transformation pipelines. For instance depending on value chosen in the previous transformer, you can decide to execute the next transformer or not.

    "},{"location":"built_in_transformers/transformation_condition/#record-descriptors","title":"Record descriptors","text":"

    To improve the user experience, Greenmask offers special namespaces for accessing values in different formats: either the driver-encoded value in its real type or as a raw string.

    You can access a specific column\u2019s value using record.column_name for the real type or raw_record.column_name for the raw string value.

    Warning

    A record may always be modified by previous transformers before the condition is evaluated. This means Greenmask does not retain the original record value and instead provides the current modified value for condition evaluation.

    "},{"location":"built_in_transformers/transformation_condition/#null-values-condition","title":"Null values condition","text":"

    To check if the value is null, you can use null value for the comparisson. This operation works compatibly with SQL operator IS NULL or IS NOT NULL.

    Is null cond example
    record.accountnumber == null && record.date > now()\n
    Is not null cond example
    record.accountnumber != null && record.date <= now()\n
    "},{"location":"built_in_transformers/transformation_condition/#expression-scope","title":"Expression scope","text":"

    Expression scope can be on table or specific transformer. If you define the condition on the table scope, then the condition will be evaluated before any transformer is executed. If you define the condition on the transformer scope, then the condition will be evaluated before the specified transformer is executed.

    Table scope
    - schema: \"purchasing\"\n  name: \"vendor\"\n  when: 'record.accountnumber == null || record.accountnumber == \"ALLENSON0001\"'\n  transformers:\n    - name: \"RandomString\"\n      params:\n        column: \"accountnumber\"\n        min_length: 9\n        max_length: 12\n        symbols: \"1234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ\"\n
    Transformer scope
    - schema: \"purchasing\"\n  name: \"vendor\"\n  transformers:\n    - name: \"RandomString\"\n      when: 'record.accountnumber != null || record.accountnumber == \"ALLENSON0001\"'\n      params:\n        column: \"accountnumber\"\n        min_length: 9\n        max_length: 12\n        symbols: \"1234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ\"\n
    "},{"location":"built_in_transformers/transformation_condition/#int-and-float-value-definition","title":"Int and float value definition","text":"

    It is important to create the integer or float value in the correct format. If you want to define the integer value you must write a number without dot (1, 2, etc.). If you want to define the float value you must write a number with dot (1.0, 2.0, etc.).

    Warning

    You may see a wrong comparison result if you compare int and float, for example 1 == 1.0 will return false.

    "},{"location":"built_in_transformers/transformation_condition/#architecture","title":"Architecture","text":"

    Greenmask encodes the way only when evaluating the condition - this allows to optimize the performance of the transformation if you have a lot of conditions that uses or (||) or and (&&) operators.

    "},{"location":"built_in_transformers/transformation_condition/#example-chose-random-value-and-execute-one-of","title":"Example: Chose random value and execute one of","text":"

    In the following example, the RandomChoice transformer is used to choose a random value from the list of values. Depending on the chosen value, the Replace transformer is executed to set the activeflag column to true or false.

    In this case the condition scope is on the transformer level.

    - schema: \"purchasing\"\n  name: \"vendor\"\n  transformers:\n    - name: \"RandomChoice\"\n      params:\n        column: \"name\"\n        values:\n          - \"test1\"\n          - \"test2\"\n\n    - name: \"Replace\"\n      when: 'record.name == \"test1\"'\n      params:\n        column: \"activeflag\"\n        value: \"false\"\n\n    - name: \"Replace\"\n      when: 'record.name == \"test2\"'\n      params:\n        column: \"activeflag\"\n        value: \"true\"\n
    "},{"location":"built_in_transformers/transformation_condition/#example-do-not-transform-specific-columns","title":"Example: Do not transform specific columns","text":"

    In the following example, the RandomString transformer is executed only if the businessentityid column value is not equal to 1492 or 1.

      - schema: \"purchasing\"\n    name: \"vendor\"\n    when: '!(record.businessentityid | has([1492, 1]))'\n    transformers:\n      - name: \"RandomString\"\n        params:\n          column: \"accountnumber\"\n          min_length: 9\n          max_length: 12\n          symbols: \"1234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ\"\n
    "},{"location":"built_in_transformers/transformation_condition/#example-check-the-json-attribute-value","title":"Example: Check the json attribute value","text":"

    In the following example, the RandomString transformer is executed only if the a attribute in the json_data column is equal to 1.

    - schema: \"public\"\n  name: \"jsondata\"\n  when: 'raw_record.json_data | jsonGet(\"a\") == 1'\n  transformers:\n    - name: \"RandomString\"\n      params:\n        column: \"accountnumber\"\n        min_length: 9\n        max_length: 12\n        symbols: \"1234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ\"\n
    "},{"location":"built_in_transformers/transformation_engines/","title":"Transformation engine","text":"

    The greenmask provides two engines random and hash. Most of the transformers has engine parameters that by default is set to random. Use hash engine when you need to generate deterministic data - the same input will always produce the same output.

    Info

    Greenmask employs the SHA-3 algorithm to hash input values. While this function is cryptographically secure, it does exhibit lower performance. We plan to introduce additional hash functions in the future to offer a balance between security and performance. For example, SipHash, which provides a good trade-off between security and performance, is currently in development and is expected to be included in the stable v0.2 release of Greenmask.

    Warning

    The hash engine does not guarantee the uniqueness of generated values. Although transformers such as Hash, RandomEmail, and RandomUuid typically have a low probability of producing duplicate values The feature to ensure uniqueness is currently under development at Greenmask and is expected to be released in future updates. For the latest status, please visit the Greenmask roadmap.

    "},{"location":"built_in_transformers/transformation_engines/#details","title":"Details","text":""},{"location":"built_in_transformers/transformation_engines/#example-schema","title":"Example schema","text":"

    The next examples will be run on the following schema and sample data:

    CREATE TABLE account\n(\n    id         SERIAL PRIMARY KEY,\n    gender     VARCHAR(1) NOT NULL,\n    email      TEXT       NOT NULL NOT NULL UNIQUE,\n    first_name TEXT       NOT NULL,\n    last_name  TEXT       NOT NULL,\n    birth_date DATE,\n    created_at TIMESTAMP  NOT NULL DEFAULT NOW()\n);\n\nINSERT INTO account (first_name, gender, last_name, birth_date, email)\nVALUES ('John', 'M', 'Smith', '1980-01-01', 'john.smith@gmail.com');\n\nCREATE TABLE orders\n(\n    id          SERIAL PRIMARY KEY,\n    account_id  INTEGER REFERENCES account (id),\n    total_price NUMERIC(10, 2),\n    created_at  TIMESTAMP NOT NULL DEFAULT NOW(),\n    paid_at     TIMESTAMP\n);\n\nINSERT INTO orders (account_id, total_price, created_at, paid_at)\nVALUES (1, 100.50, '2024-05-01', '2024-05-02'),\n       (1, 200.75, '2024-05-03', NULL);\n
    "},{"location":"built_in_transformers/transformation_engines/#random-engine","title":"Random engine","text":"

    The random engine serves as the default engine for the greenmask. It operates using a pseudo-random number generator, which is initialized with a random seed sourced from a cryptographically secure random number generator. Employ the random engine when you need to generate random data and do not require reproducibility of the same transformation results with the same input.

    The following example demonstrates how to configure the RandomDate transformer to generate random.

    - schema: \"public\"\n  name: \"account\"\n  transformers:\n    - name: \"RandomDate\"\n      params:\n        column: \"birth_date\"\n        engine: \"random\" # (1)\n        min: '1970-01-01'\n        max: '2000-01-01'\n
    1. random engine is explicitly specified, although it is the default value.

    Results:

    ColumnOriginalValueTransformedValue birth_date1980-01-011970-02-23

    Keep in mind that the random engine is always generates different values for the same input. For instance in we run the previous example multiple times we will get different results.

    "},{"location":"built_in_transformers/transformation_engines/#hash-engine","title":"Hash engine","text":"

    The hash engine is designed to generate deterministic data. It uses the SHA-3 algorithm to hash the input value. The hash engine is particularly useful when you need to generate the same output for the same input. For example, when you want to transform values that are used as primary or foreign keys in a database.

    For secure reason it is suggested set global greenmask salt via GREENMASK_GLOBAL_SALT environment variable. The salt is added to the hash input to prevent the possibility of reverse engineering the original value from the hashed output. The value is hex encoded with variadic length. For example, GREENMASK_GLOBAL_SALT=a5eddc84e762e810. Generate a strong random salt and keep it secret.

    The following example demonstrates how to configure the RandomInt transformer to generate deterministic data using the hash engine. The public.account.id and public.orders.account_id columns will have the same values.

    - schema: \"public\"\n  name: \"account\"\n  transformers:\n\n    - name: \"RandomInt\"\n      params:\n        column: \"id\"\n        engine: hash\n        min: 1\n        max: 2147483647\n\n- schema: \"public\"\n  name: \"orders\"\n  transformers:\n\n    - name: \"RandomInt\"\n      params:\n        column: \"account_id\"\n        engine: hash\n        min: 1\n        max: 2147483647\n

    Result:

    ColumnOriginalValueTransformedValue id1130162079 ColumnOriginalValueTransformedValue account_id1130162079"},{"location":"built_in_transformers/transformation_inheritance/","title":"Transformation Inheritance","text":""},{"location":"built_in_transformers/transformation_inheritance/#description","title":"Description","text":"

    If you have partitioned tables or want to apply a transformation to a primary key and propagate it to all tables referencing that column, you can do so with Greenmask.

    "},{"location":"built_in_transformers/transformation_inheritance/#apply-for-inherited","title":"Apply for inherited","text":"

    Using apply_for_inherited, you can apply transformations to all partitions of a partitioned table, including any subpartitions.

    "},{"location":"built_in_transformers/transformation_inheritance/#configuration-conflicts","title":"Configuration conflicts","text":"

    When a partition has a transformation defined manually via config, and apply_for_inherited is set on the parent table, Greenmask will merge both the inherited and manually defined configurations. The manually defined transformation will execute last, giving it higher priority.

    If this situation occurs, you will see the following information in the log:

    {\n  \"level\": \"info\",\n  \"ParentTableSchema\": \"public\",\n  \"ParentTableName\": \"sales\",\n  \"ChildTableSchema\": \"public\",\n  \"ChildTableName\": \"sales_2022_feb\",\n  \"ChildTableConfig\": [\n    {\n      \"name\": \"RandomDate\",\n      \"params\": {\n        \"column\": \"sale_date\",\n        \"engine\": \"random\",\n        \"max\": \"2005-01-01\",\n        \"min\": \"2001-01-01\"\n      }\n    }\n  ],\n  \"time\": \"2024-11-03T22:14:01+02:00\",\n  \"message\": \"config will be merged: found manually defined transformers on the partitioned table\"\n}\n
    "},{"location":"built_in_transformers/transformation_inheritance/#apply-for-references","title":"Apply for references","text":"

    Using apply_for_references, you can apply transformations to columns involved in a primary key or in tables with a foreign key that references that column. This simplifies the transformation process by requiring you to define the transformation only on the primary key column, which will then be applied to all tables referencing that column.

    The transformer must be deterministic or support hash engine and the hash engin must be set in the configuration file.

    List of transformers that supports apply_for_references:

    "},{"location":"built_in_transformers/transformation_inheritance/#end-to-end-identifiers","title":"End-to-End Identifiers","text":"

    End-to-end identifiers in databases are unique identifiers that are consistently used across multiple tables in a relational database schema, allowing for a seamless chain of references from one table to another. These identifiers typically serve as primary keys in one table and are propagated as foreign keys in other tables, creating a direct, traceable link from one end of a data relationship to the other.

    Greenmask can detect end-to-end identifiers and apply transformations across the entire sequence of tables. These identifiers are detected when the following condition is met: the foreign key serves as both a primary key and a foreign key in the referenced table.

    "},{"location":"built_in_transformers/transformation_inheritance/#configuration-conflicts_1","title":"Configuration conflicts","text":"

    When on the referenced column a transformation is manually defined via config, and the apply_for_references is set on parent table, the transformation defined will be chosen and the inherited transformation will be ignored. You will receive a INFO message in the logs.

    {\n  \"level\": \"info\",\n  \"TransformerName\": \"RandomInt\",\n  \"ParentTableSchema\": \"public\",\n  \"ParentTableName\": \"tablea\",\n  \"ChildTableSchema\": \"public\",\n  \"ChildTableName\": \"tablec\",\n  \"ChildColumnName\": \"id2\",\n  \"TransformerConfig\": {\n    \"name\": \"RandomInt\",\n    \"apply_for_references\": true\n  },\n  \"time\": \"2024-11-03T21:28:10+02:00\",\n  \"message\": \"skipping apply transformer for reference: found manually configured transformer\"\n}\n
    "},{"location":"built_in_transformers/transformation_inheritance/#limitations","title":"Limitations","text":"

    Warning

    We do not recommend using apply_for_references with transformation conditions, as these conditions are not inherited by transformers on the referenced columns. This may lead to inconsistencies in the data.

    "},{"location":"built_in_transformers/transformation_inheritance/#example-1-partitioned-tables","title":"Example 1. Partitioned tables","text":"

    In this example, we have a partitioned table sales that is partitioned by year and then by month. Each partition contains a subset of data based on the year and month of the sale. The sales table has a primary key sale_id and is partitioned by sale_date. The sale_date column is transformed using the RandomDate transformer.

    CREATE TABLE sales\n(\n    sale_id   SERIAL         NOT NULL,\n    sale_date DATE           NOT NULL,\n    amount    NUMERIC(10, 2) NOT NULL\n) PARTITION BY RANGE (EXTRACT(YEAR FROM sale_date));\n\n-- Step 2: Create first-level partitions by year\nCREATE TABLE sales_2022 PARTITION OF sales\n    FOR VALUES FROM (2022) TO (2023)\n    PARTITION BY LIST (EXTRACT(MONTH FROM sale_date));\n\nCREATE TABLE sales_2023 PARTITION OF sales\n    FOR VALUES FROM (2023) TO (2024)\n    PARTITION BY LIST (EXTRACT(MONTH FROM sale_date));\n\n-- Step 3: Create second-level partitions by month for each year, adding PRIMARY KEY on each partition\n\n-- Monthly partitions for 2022\nCREATE TABLE sales_2022_jan PARTITION OF sales_2022 FOR VALUES IN (1)\n    WITH (fillfactor = 70);\nCREATE TABLE sales_2022_feb PARTITION OF sales_2022 FOR VALUES IN (2);\nCREATE TABLE sales_2022_mar PARTITION OF sales_2022 FOR VALUES IN (3);\n-- Continue adding monthly partitions for 2022...\n\n-- Monthly partitions for 2023\nCREATE TABLE sales_2023_jan PARTITION OF sales_2023 FOR VALUES IN (1);\nCREATE TABLE sales_2023_feb PARTITION OF sales_2023 FOR VALUES IN (2);\nCREATE TABLE sales_2023_mar PARTITION OF sales_2023 FOR VALUES IN (3);\n-- Continue adding monthly partitions for 2023...\n\n-- Step 4: Insert sample data\nINSERT INTO sales (sale_date, amount)\nVALUES ('2022-01-15', 100.00);\nINSERT INTO sales (sale_date, amount)\nVALUES ('2022-02-20', 150.00);\nINSERT INTO sales (sale_date, amount)\nVALUES ('2023-03-10', 200.00);\n

    To transform the sale_date column in the sales table and all its partitions, you can use the following configuration:

    - schema: public\n  name: sales\n  apply_for_inherited: true\n  transformers:\n    - name: RandomDate\n      params:\n        min: \"2022-01-01\"\n        max: \"2022-03-01\"\n        column: \"sale_date\"\n        engine: \"random\"\n
    "},{"location":"built_in_transformers/transformation_inheritance/#example-2-simple-table-references","title":"Example 2. Simple table references","text":"

    This is ordinary table references where the primary key of the users table is referenced in the orders table.

    -- Enable the extension for UUID generation (if not enabled)\nCREATE EXTENSION IF NOT EXISTS \"uuid-ossp\";\n\nCREATE TABLE users\n(\n    user_id  UUID PRIMARY KEY DEFAULT uuid_generate_v4(),\n    username VARCHAR(50) NOT NULL\n);\n\nCREATE TABLE orders\n(\n    order_id   UUID PRIMARY KEY DEFAULT uuid_generate_v4(),\n    user_id    UUID REFERENCES users (user_id),\n    order_date DATE NOT NULL\n);\n\nINSERT INTO users (username)\nVALUES ('john_doe');\nINSERT INTO users (username)\nVALUES ('jane_smith');\n\nINSERT INTO orders (user_id, order_date)\nVALUES ((SELECT user_id FROM users WHERE username = 'john_doe'), '2024-10-31'),\n       ((SELECT user_id FROM users WHERE username = 'jane_smith'), '2024-10-30');\n

    To transform the username column in the users table, you can use the following configuration:

    - schema: public\n  name: users\n  apply_for_inherited: true\n  transformers:\n    - name: RandomUuid\n      apply_for_references: true\n      params:\n        column: \"user_id\"\n        engine: \"hash\"\n

    This will apply the RandomUuid transformation to the user_id column in the orders table automatically.

    "},{"location":"built_in_transformers/transformation_inheritance/#example-3-references-on-tables-with-end-to-end-identifiers","title":"Example 3. References on tables with end-to-end identifiers","text":"

    In this example, we have three tables: tablea, tableb, and tablec. All tables have a composite primary key. In the tables tableb and tablec, the primary key is also a foreign key that references the primary key of tablea. This means that all PKs are end-to-end identifiers.

    CREATE TABLE tablea\n(\n    id1  INT,\n    id2  INT,\n    data VARCHAR(50),\n    PRIMARY KEY (id1, id2)\n);\n\nCREATE TABLE tableb\n(\n    id1    INT,\n    id2    INT,\n    detail VARCHAR(50),\n    PRIMARY KEY (id1, id2),\n    FOREIGN KEY (id1, id2) REFERENCES tablea (id1, id2) ON DELETE CASCADE\n);\n\nCREATE TABLE tablec\n(\n    id1         INT,\n    id2         INT,\n    description VARCHAR(50),\n    PRIMARY KEY (id1, id2),\n    FOREIGN KEY (id1, id2) REFERENCES tableb (id1, id2) ON DELETE CASCADE\n);\n\nINSERT INTO tablea (id1, id2, data)\nVALUES (1, 1, 'Data A1'),\n       (2, 1, 'Data A2'),\n       (3, 1, 'Data A3');\n\nINSERT INTO tableb (id1, id2, detail)\nVALUES (1, 1, 'Detail B1'),\n       (2, 1, 'Detail B2'),\n       (3, 1, 'Detail B3');\n\nINSERT INTO tablec (id1, id2, description)\nVALUES (1, 1, 'Description C1'),\n       (2, 1, 'Description C2'),\n       (3, 1, 'Description C3');\n

    To transform the data column in tablea, you can use the following configuration:

    - schema: public\n  name: \"tablea\"\n  apply_for_inherited: true\n  transformers:\n    - name: RandomInt\n      apply_for_references: true\n      params:\n        min: 0\n        max: 100\n        column: \"id1\"\n        engine: \"hash\"\n    - name: RandomInt\n      apply_for_references: true\n      params:\n        min: 0\n        max: 100\n        column: \"id2\"\n        engine: \"hash\"\n

    This will apply the RandomInt transformation to the id1 and id2 columns in tableb and tablec automatically.

    "},{"location":"built_in_transformers/advanced_transformers/","title":"Advanced transformers","text":"

    Advanced transformers are modifiable anonymization methods that users can adjust based on their needs by using custom functions.

    Below you can find an index of all advanced transformers currently available in Greenmask.

    1. Json \u2014 changes a JSON content by using delete and set operations.
    2. Template \u2014 executes a Go template of your choice and applies the result to a specified column.
    3. TemplateRecord \u2014 modifies records by using a Go template of your choice and applies the changes via the PostgreSQL driver.
    "},{"location":"built_in_transformers/advanced_transformers/json/","title":"Json","text":"

    Change a JSON document using delete and set operations. NULL values are kept.

    "},{"location":"built_in_transformers/advanced_transformers/json/#parameters","title":"Parameters","text":"Name Properties Description Default Required Supported DB types column The name of the column to be affected Yes json, jsonb operations A list of operations that contains editing delete and set Yes - \u221f operation Specifies the operation type: set or delete Yes - \u221f path The path to an object to be modified. See path syntax below. Yes - \u221f value A value to be assigned to the provided path No - \u221f value_template A Golang template to be assigned to the provided path. See the list of template functions below. No - \u221f error_not_exist Throws an error if the key does not exist by the provided path. Disabled by default. false No -"},{"location":"built_in_transformers/advanced_transformers/json/#description","title":"Description","text":"

    The Json transformer applies a sequence of changing operations (set and/or delete) to a JSON document. The value can be static or dynamic. For the set operation type, a static value is provided in the value parameter, while a dynamic value is provided in the value_template parameter, taking the data received after template execution as a result. Both the value and value_template parameters are mandatory for the set operation.

    "},{"location":"built_in_transformers/advanced_transformers/json/#path-syntax","title":"Path syntax","text":"

    The Json transformer is based on tidwall/sjson and supports the same path syntax. See their documentation for syntax rules.

    "},{"location":"built_in_transformers/advanced_transformers/json/#template-functions","title":"Template functions","text":"Function Description Signature .GetPath Returns the current path to which the operation is being applied .GetPath() (path string) .GetOriginalValue Returns the original value to which the current operation path is pointing. If the value at the specified path does not exist, it returns nil. .GetOriginalValue() (value any) .OriginalValueExists Returns a boolean value indicating whether the specified path exists or not. .OriginalValueExists() (exists bool) .GetColumnValue Returns an encoded into Golang type value for a specified column or throws an error. A value can be any of int, float, time, string, bool, or slice or map. .GetColumnValue(name string) (value any, err error) .GetRawColumnValue Returns a raw value for a specified column as a string or throws an error .GetRawColumnValue(name string) (value string, err error) .EncodeValueByColumn Encodes a value of any type into its raw string representation using the specified column name. Encoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .EncodeValueByColumn(name string, value any) (res any, err error) .DecodeValueByColumn Decodes a value from its raw string representation to a Golang type using the specified column name. Decoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .DecodeValueByColumn(name string, value any) (res any, err error) .EncodeValueByType Encodes a value of any type into its string representation using the specified type name. Encoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .EncodeValueByType(name string, value any) (res any, err error) .DecodeValueByType Decodes a value from its raw string representation to a Golang type using the specified type name. Decoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .DecodeValueByType(name string, value any) (res any, err error)"},{"location":"built_in_transformers/advanced_transformers/json/#example-changing-json-document","title":"Example: Changing JSON document","text":"Json transformer example
    - schema: \"bookings\"\n  name: \"aircrafts_data\"\n  transformers:\n    - name: \"Json\"\n      params:\n        column: \"model\"\n        operations:\n          - operation: \"set\"\n            path: \"en\"\n            value: \"Boeing 777-300-2023\"\n          - operation: \"set\"\n            path: \"seats\"\n            error_not_exist: True\n            value_template: \"{{ randomInt 100 400 }}\"\n          - operation: \"set\"\n            path: \"details.preperties.1\"\n            value: {\"name\": \"somename\", \"description\": null}\n          - operation: \"delete\"\n            path: \"values.:2\"\n
    "},{"location":"built_in_transformers/advanced_transformers/template/","title":"Template","text":"

    Execute a Go template and automatically apply the result to a specified column.

    "},{"location":"built_in_transformers/advanced_transformers/template/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes any template A Go template string Yes - validate Validates the template result using the PostgreSQL driver decoding procedure. Throws an error if a custom type does not have an encode-decoder implementation. false No -"},{"location":"built_in_transformers/advanced_transformers/template/#description","title":"Description","text":"

    The Template transformer executes Go templates and automatically applies the template result to a specified column. Go template system is designed to be extensible, enabling developers to access data objects and incorporate custom functions programmatically. For more information, you can refer to the official Go Template documentation.

    With the Template transformer, you can implement complicated transformation logic using basic or custom template functions. Below you can get familiar with the basic template functions for the Template transformer. For more information about available custom template functions, see Custom functions.

    Warning

    Pay attention to the whitespaces in templates. Use dash-wrapped - brackets {{- -}} for trimming the spaces. For example, the value \"2023-12-19\" is not the same as \" 2023-12-19 \" and it may throw an error when restoring.

    "},{"location":"built_in_transformers/advanced_transformers/template/#template-functions","title":"Template functions","text":"Function Description Signature .GetColumnType Returns a string with the column type. .GetColumnType(name string) (typeName string, err error) .GetValue Returns the column value for column assigned in the column parameter, encoded by the PostgreSQL driver into any type along with any associated error. Supported types include int, float, time, string, bool, as well as slice or map of any type. .GetValue() (value any, err error) .GetRawValue Returns a raw value as a string for column assigned in the column parameter. .GetRawColumnValue(name string) (value string, err error) .GetColumnValue Returns an encoded value for a specified column or throws an error. A value can be any of int, float, time, string, bool, or slice or map. .GetColumnValue(name string) (value any, err error) .GetRawColumnValue Returns a raw value for a specified column as a string or throws an error .GetRawColumnValue(name string) (value string, err error) .EncodeValue Encodes a value of any type into its string representation using the type assigned to the table column specified in the column parameter. Encoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .EncodeValue(value any) (res any, err error) .DecodeValue Decodes a value from its raw string representation to a Golang type using the data type assigned to the table column specified in the column parameter. Decoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .DecodeValueByColumn(value any) (res any, err error) .EncodeValueByColumn Encodes a value of any type into its raw string representation using the specified column name. Encoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .EncodeValueByColumn(name string, value any) (res any, err error) .DecodeValueByColumn Decodes a value from its raw string representation to a Golang type using the specified column name. Decoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .DecodeValueByColumn(name string, value any) (res any, err error) .EncodeValueByType Encodes a value of any type into its string representation using the specified type name. Encoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .EncodeValueByType(name string, value any) (res any, err error) .DecodeValueByType Decodes a value from its raw string representation to a Golang type using the specified type name. Decoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .DecodeValueByType(name string, value any) (res any, err error)"},{"location":"built_in_transformers/advanced_transformers/template/#example-update-the-firstname-column","title":"Example: Update the firstname column","text":"

    Below you can see the table structure:

    "},{"location":"built_in_transformers/advanced_transformers/template/#change-rule","title":"Change rule","text":"

    The goal is to modify the firstname column based on the following conditions:

    "},{"location":"built_in_transformers/advanced_transformers/template/#using-a-template-function","title":"Using a template function","text":"

    To generate random names, you can use the fakerFirstName template function, which is designed to create synthetic names.

    Template transformer example
    - schema: \"humanresources\"\n  name: \"employee\"\n  transformation:\n    - name: \"Template\"\n      params:\n        column: \"firstname\"\n        template: >\n          {{- if eq .GetValue \"Terri\" -}}\n            Mary\n          {{- else -}}\n            {{- fakerFirstName -}} Jr\n          {{- end -}}\n\n        validate: true\n

    Expected result:

    Value = TerryValue != Terri column name original value transformed firstname Terri Mary column name original value transformed firstname Ken Jr Mike"},{"location":"built_in_transformers/advanced_transformers/template_record/","title":"TemplateRecord","text":"

    Modify records using a Go template and apply changes by using the PostgreSQL driver functions. This transformer provides a way to implement custom transformation logic.

    "},{"location":"built_in_transformers/advanced_transformers/template_record/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types columns A list of columns to be affected by the template. The list of columns will be checked for constraint violations. No any template A Go template string Yes - validate Validate the template result via PostgreSQL driver decoding procedure. Throws an error if a custom type does not have an encode-decoder implementation. false No -"},{"location":"built_in_transformers/advanced_transformers/template_record/#description","title":"Description","text":"

    TemplateRecord uses Go templates to change data. However, while the Template transformer operates with a single column and automatically applies results, the TemplateRecord transformer can make changes to a set of columns in the string, and using driver functions .SetValue or .SetRawValue is mandatory to do that.

    With the TemplateRecord transformer, you can implement complicated transformation logic using basic or custom template functions. Below you can get familiar with the basic template functions for the TemplateRecord transformer. For more information about available custom template functions, see Custom functions.

    "},{"location":"built_in_transformers/advanced_transformers/template_record/#template-functions","title":"Template functions","text":"Function Description Signature .GetColumnType Returns a string with the column type. .GetColumnType(name string) (typeName string, err error) .GetColumnValue Returns an encoded value for a specified column or throws an error. A value can be any of int, float, time, string, bool, or slice or map. .GetColumnValue(name string) (value any, err error) .GetRawColumnValue Returns a raw value for a specified column as a string or throws an error .GetRawColumnValue(name string) (value string, err error) .SetColumnValue Sets a new value of a specific data type to the column. The value assigned must be compatible with the PostgreSQL data type of the column. For example, it is allowed to assign an int value to an INTEGER column, but you cannot assign a float value to a timestamptz column. SetColumnValue(name string, v any) (bool, error) .SetRawColumnValue Sets a new raw value for a column, inheriting the column's existing data type, without performing data type validation. This can lead to errors when restoring the dump if the assigned value is not compatible with the column type. To ensure compatibility, consider using the .DecodeValueByColumn function followed by .SetColumnValue, for example, {{ \"13\" \\| .DecodeValueByColumn \"items_amount\" \\| .SetColumnValue \"items_amount\" }}. .SetRawColumnValue(name string, value any) (err error) .EncodeValueByColumn Encodes a value of any type into its raw string representation using the specified column name. Encoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .EncodeValueByColumn(name string, value any) (res any, err error) .DecodeValueByColumn Decodes a value from its raw string representation to a Golang type using the specified column name. Decoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .DecodeValueByColumn(name string, value any) (res any, err error) .EncodeValueByType Encodes a value of any type into its string representation using the specified type name. Encoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .EncodeValueByType(name string, value any) (res any, err error) .DecodeValueByType Decodes a value from its raw string representation to a Golang type using the specified type name. Decoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .DecodeValueByType(name string, value any) (res any, err error)"},{"location":"built_in_transformers/advanced_transformers/template_record/#example-generate-a-random-created_at-and-updated_at-dates","title":"Example: Generate a random created_at and updated_at dates","text":"

    Below you can see the table structure:

    The goal is to modify the \"created_at\" and \"updated_at\" columns based on the following rules:

    Template transformer example
    - name: \"TemplateRecord\"\n  params:\n    columns:\n      - \"created_at\"\n      - \"updated_at\"\n    template: >\n      {{ $val := .GetColumnValue \"created_at\" }}\n      {{ if isNotNull $val }}\n          {{ $createdAtValue := now }}\n          {{ $maxUpdatedDate := date_modify \"24h\" $createdAtValue }}\n          {{ $updatedAtValue := randomDate $createdAtValue $maxUpdatedDate }}\n          {{ .SetColumnValue \"created_at\" $createdAtValue }}\n          {{ .SetColumnValue \"updated_at\" $updatedAtValue }}\n      {{ end }}\n    validate: true\n

    Expected result:

    column name original value transformed created_at 2021-01-20 07:01:00.513325+00 2023-12-17 19:37:29.910054Z updated_at 2021-08-09 21:27:00.513325+00 2023-12-18 10:05:25.828498Z"},{"location":"built_in_transformers/advanced_transformers/custom_functions/","title":"Template custom functions","text":"

    Within Greenmask, custom functions play a crucial role, providing a wide array of options for implementing diverse logic. Under the hood, the custom functions are based on the sprig Go's template functions. Greenmask enhances this capability by introducing additional functions and transformation functions. These extensions mirror the logic found in the standard transformers but offer you the flexibility to implement intricate and comprehensive logic tailored to your specific needs.

    Currently, you can use template custom functions for the advanced transformers:

    and for the Transformation condition feature as well.

    Custom functions are arbitrarily divided into 2 groups:

    "},{"location":"built_in_transformers/advanced_transformers/custom_functions/core_functions/","title":"Core functions","text":"

    Below you can find custom core functions which are divided into categories based on the transformation purpose.

    "},{"location":"built_in_transformers/advanced_transformers/custom_functions/core_functions/#postgresql-driver-functions","title":"PostgreSQL driver functions","text":"Function Description null Returns the NULL value that can be used for the driver encoding-decoding operations isNull Returns true if the checked value is NULL isNotNull Returns true if the checked value is not NULL sqlCoalesce Works as a standard SQL coalesce function. It allows you to choose the first non-NULL argument from the list."},{"location":"built_in_transformers/advanced_transformers/custom_functions/core_functions/#json-output-function","title":"JSON output function","text":"Function Description jsonExists Checks if the path value exists in JSON. Returns true if the path exists. mustJsonGet Gets the JSON attribute value by path and throws an error if the path does not exist mustJsonGetRaw Gets the JSON attribute raw value by path and throws an error if the path does not exist jsonGet Gets the JSON attribute value by path and returns nil if the path does not exist jsonGetRaw Gets the JSON attribute raw value by path and returns nil if the path does not exist jsonSet Sets the value for the JSON document by path jsonSetRaw Sets the raw value for the JSON document by path jsonDelete Deletes an attribute from the JSON document by path jsonValidate Validates the JSON document syntax and throws an error if there are any issues jsonIsValid Checks the JSON document for validity and returns true if it is valid toJsonRawValue Casts any type of value to the raw JSON value"},{"location":"built_in_transformers/advanced_transformers/custom_functions/core_functions/#testing-functions","title":"Testing functions","text":"Function Description isInt Checks if the value of an integer type isFloat Checks if the value of a float type isNil Checks if the value is nil isString Checks if the value of a string type isMap Checks if the value of a map type isSlice Checks if the value of a slice type isBool Checks if the value of a boolean type"},{"location":"built_in_transformers/advanced_transformers/custom_functions/core_functions/#transformation-and-generators","title":"Transformation and generators","text":""},{"location":"built_in_transformers/advanced_transformers/custom_functions/core_functions/#masking","title":"masking","text":"

    Replaces characters with asterisk * symbols depending on the provided masking rule. If the value is NULL, it is kept unchanged. This function is based on ggwhite/go-masker.

    Masking rulesSignatureParametersReturn values Rule Description Example input Example output default Returns the sequence of * symbols of the same length test1234 ******** name Masks the second and the third letters ABCD A**D password Always returns a sequence of * address Keeps first 6 letters, masks the rest Larnaca, makarios st Larnac************* email Keeps a domain and the first 3 letters, masks the rest ggw.chang@gmail.com ggw****@gmail.com mobile Masks 3 digits starting from the 4th digit 0987654321 0987***321 telephone Removes (, ), , - symbols, masks last 4 digits of a telephone number, and formats it to (??)????-???? 0227993078 (02)2799-**** id Masks last 4 digits of an ID A123456789 A12345**** credit_card Masks 6 digits starting from the 7th digit 1234567890123456 123456******3456 url Masks the password part of the URL (if applicable) http://admin:mysecretpassword@localhost:1234/uri http://admin:xxxxx@localhost:1234/uri

    masking(dataType string, value string) (res string, err error)

    "},{"location":"built_in_transformers/advanced_transformers/custom_functions/core_functions/#truncatedate","title":"truncateDate","text":"

    Truncates datetime up to the provided part.

    SignatureParametersReturn values

    truncateDate(part string, original time.Time) (res time.Time, err error)

    "},{"location":"built_in_transformers/advanced_transformers/custom_functions/core_functions/#noisedatepginterval","title":"noiseDatePgInterval","text":"

    Adds or subtracts a random duration in the provided interval to or from the original date value.

    SignatureParametersReturn values

    noiseDate(interval string, original time.Time) (res time.Time, err error)

    "},{"location":"built_in_transformers/advanced_transformers/custom_functions/core_functions/#noisefloat","title":"noiseFloat","text":"

    Adds or subtracts a random fraction to or from the original float value. Multiplies the original float value by a provided random value that is not higher than the ratio parameter and adds it to the original value with the option to specify the decimal via the decimal parameter.

    SignatureParametersReturn values

    noiseFloat(ratio float, decimal int, value float) (res float64, err error)

    "},{"location":"built_in_transformers/advanced_transformers/custom_functions/core_functions/#noiseint","title":"noiseInt","text":"

    Adds or subtracts a random fraction to or from the original integer value. Multiplies the original integer value by a provided random value that is not higher than the ratio parameter and adds it to the original value.

    SignatureParametersReturn values

    noiseInt(ratio float, value float) (res int, err error)

    "},{"location":"built_in_transformers/advanced_transformers/custom_functions/core_functions/#randombool","title":"randomBool","text":"

    Generates a random boolean value.

    "},{"location":"built_in_transformers/advanced_transformers/custom_functions/core_functions/#randomdate","title":"randomDate","text":"

    Generates a random date within the provided interval.

    SignatureParametersReturn values

    randomDate(min time.Time, max time.Time) (res time.Time, err error)

    "},{"location":"built_in_transformers/advanced_transformers/custom_functions/core_functions/#randomfloat","title":"randomFloat","text":"

    Generates a random float value within the provided interval.

    SignatureParametersReturn values

    randomFloat(min any, max any, decimal int) (res float, err error)

    "},{"location":"built_in_transformers/advanced_transformers/custom_functions/core_functions/#randomint","title":"randomInt","text":"

    Generates a random integer value within the provided interval.

    SignatureParametersReturn values

    randomInt(min int, max int) (res int, err error)

    "},{"location":"built_in_transformers/advanced_transformers/custom_functions/core_functions/#randomstring","title":"randomString","text":"

    Generates a random string using the provided characters within the specified length range.

    SignatureParametersReturn values

    randomString(minLength int, maxLength int, symbols string) (res string, err error)

    "},{"location":"built_in_transformers/advanced_transformers/custom_functions/core_functions/#roundfloat","title":"roundFloat","text":"

    Rounds a float value up to provided decimal.

    SignatureParametersReturn values

    roundFloat(decimal int, original float) (res float, err error)

    "},{"location":"built_in_transformers/advanced_transformers/custom_functions/core_functions/#tsmodify","title":"tsModify","text":"

    Modify original time value by adding or subtracting the provided interval. The interval is a string in the format of the PostgreSQL interval.

    SignatureParametersReturn values

    tsModify(interval string, val time.Time) (time.Time, error)

    "},{"location":"built_in_transformers/advanced_transformers/custom_functions/faker_function/","title":"Faker functions","text":"

    Greenmask uses go-faker/faker under the hood for generating of synthetic data.

    "},{"location":"built_in_transformers/advanced_transformers/custom_functions/faker_function/#faker-functions-address","title":"Faker functions: Address","text":"Function Description Signature fakerRealAddress Generates a random real-world address that includes: city, state, postal code, latitude, and longitude fakerRealAddress() (res ReadAddress) fakerLatitude Generates random fake latitude fakerLatitude() (res float64) fakerLongitude Generates random fake longitude fakerLongitude() (res float64)"},{"location":"built_in_transformers/advanced_transformers/custom_functions/faker_function/#faker-functions-datetime","title":"Faker functions: Datetime","text":"Function Description Signature fakerUnixTime Generates random Unix time in seconds fakerLongitude() (res int64) fakerDate Generates random date with the pattern of YYYY-MM-DD fakerDate() (res string) fakerTimeString Generates random time fakerTimeString() (res string) fakerMonthName Generates a random month fakerMonthName() (res string) fakerYearString Generates a random year fakerYearString() (res string) fakerDayOfWeek Generates a random day of a week fakerDayOfWeek() (res string) fakerDayOfMonth Generates a random day of a month fakerDayOfMonth() (res string) fakerTimestamp Generates a random timestamp with the pattern of YYYY-MM-DD HH:MM:SS fakerTimestamp() (res string) fakerCentury Generates a random century fakerCentury() (res string) fakerTimezone Generates a random timezone name fakerTimezone() (res string) fakerTimeperiod Generates a random time period with the patter of either AM or PM fakerTimeperiod() (res string)"},{"location":"built_in_transformers/advanced_transformers/custom_functions/faker_function/#faker-functions-internet","title":"Faker functions: Internet","text":"Function Description Signature fakerEmail Generates a random email fakerEmail() (res string) fakerMacAddress Generates a random MAC address fakerMacAddress() (res string) fakerDomainName Generates a random domain name fakerDomainName() (res string) fakerURL Generates a random URL with the pattern of https://www.domainname.some/somepath fakerURL() (res string) fakerUsername Generates a random username fakerUsername() (res string) fakerIPv4 Generates a random IPv4 address fakerIPv4() (res string) fakerIPv6 Generates a random IPv6 address fakerIPv6() (res string) fakerPassword Generates a random password fakerPassword() (res string)"},{"location":"built_in_transformers/advanced_transformers/custom_functions/faker_function/#faker-functions-words-and-sentences","title":"Faker functions: words and sentences","text":"Function Description Signature fakerWord Generates a random word fakerWord() (res string) fakerSentence Generates a random sentence fakerSentence() (res string) fakerParagraph Generates a random sequence of sentences as a paragraph fakerParagraph() (res string)"},{"location":"built_in_transformers/advanced_transformers/custom_functions/faker_function/#faker-functions-payment","title":"Faker functions: Payment","text":"Function Description Signature fakerCCType Generates a random credit card type, e.g. VISA, MasterCard, etc. fakerCCType() (res string) fakerCCNumber Generates a random credit card number fakerCCNumber() (res string) fakerCurrency Generates a random currency name fakerCurrency() (res string) fakerAmountWithCurrency Generates random amount preceded with random currency fakerAmountWithCurrency() (res string)"},{"location":"built_in_transformers/advanced_transformers/custom_functions/faker_function/#faker-functions-person","title":"Faker functions: Person","text":"Function Description Signature fakerTitleMale Generates a random male title from the predefined list fakerTitleMale() (res string) fakerTitleFemale Generates a random female title from the predefined list fakerTitleFemale() (res string) fakerFirstName Generates a random first name fakerFirstName() (res string) fakerFirstNameMale Generates a random male first name fakerFirstNameMale() (res string) fakerFirstNameFemale Generates a random female first name fakerFirstNameFemale() (res string) fakerFirstLastName Generates a random last name fakerFirstLastName() (res string) fakerName Generates a random full name preceded with a title fakerName() (res string)"},{"location":"built_in_transformers/advanced_transformers/custom_functions/faker_function/#faker-functions-phone","title":"Faker functions: Phone","text":"Function Description Signature fakerPhoneNumber Generates a random phone number fakerPhoneNumber() (res string) fakerTollFreePhoneNumber Generates a random phone number with the pattern of (123) 456-7890 fakerTollFreePhoneNumber() (res string) fakerE164PhoneNumber Generates a random phone number with the pattern of +12345678900 fakerE164PhoneNumber() (res string)"},{"location":"built_in_transformers/advanced_transformers/custom_functions/faker_function/#faker-functions-uuid","title":"Faker functions: UUID","text":"Function Description Signature fakerUUIDHyphenated Generates a random unique user ID separated by hyphens fakerUUID() (res string) fakerUUIDDigit Generates a random unique user ID in the HEX format fakerUUIDDigit() (res string)"},{"location":"built_in_transformers/standard_transformers/","title":"Standard transformers","text":"

    Standard transformers are ready-to-use methods that require no customization and perform with just as little as parameters input. Below you can find an index of all standard transformers currently available in Greenmask.

    1. Cmd \u2014 transforms data via external program using stdin and stdout interaction.
    2. Dict \u2014 replaces values matched by dictionary keys.
    3. Hash \u2014 generates a hash of the text value.
    4. Masking \u2014 masks a value using one of the masking behaviors depending on your domain.
    5. NoiseDate \u2014 randomly adds or subtracts a duration within the provided ratio interval to the original date value.
    6. NoiseFloat \u2014 adds or subtracts a random fraction to the original float value.terval to the original date value.
    7. NoiseNumeric \u2014 adds or subtracts a random fraction to the original numeric value.
    8. NoiseInt \u2014 adds or subtracts a random fraction to the original integer value.
    9. RandomBool \u2014 generates random boolean values.
    10. RandomChoice \u2014 replaces values randomly chosen from a provided list.
    11. RandomDate \u2014 generates a random date in a specified interval.
    12. RandomFloat \u2014 generates a random float within the provided interval.
    13. RandomInt \u2014 generates a random integer within the provided interval.
    14. RandomString \u2014 generates a random string using the provided characters within the specified length range.
    15. RandomUuid \u2014 generates a random unique user ID.
    16. RandomLatitude \u2014 generates a random latitude value.
    17. RandomLongitude \u2014 generates a random longitude value.
    18. RandomUnixTimestamp \u2014 generates a random Unix timestamp.
    19. RandomDayOfWeek \u2014 generates a random day of the week.
    20. RandomDayOfMonth \u2014 generates a random day of the month.
    21. RandomMonthName \u2014 generates the name of a random month.
    22. RandomYearString \u2014 generates a random year as a string.
    23. RandomCentury \u2014 generates a random century.
    24. RandomTimezone \u2014 generates a random timezone.
    25. RandomEmail \u2014 generates a random email address.
    26. RandomUsername \u2014 generates a random username.
    27. RandomPassword \u2014 generates a random password.
    28. RandomDomainName \u2014 generates a random domain name.
    29. RandomURL \u2014 generates a random URL.
    30. RandomMac \u2014 generates a random MAC addresses.
    31. RandomIP \u2014 generates a random IPv4 or IPv6 addresses.
    32. RandomWord \u2014 generates a random word.
    33. RandomSentence \u2014 generates a random sentence.
    34. RandomParagraph \u2014 generates a random paragraph.
    35. RandomCCType \u2014 generates a random credit card type.
    36. RandomCCNumber \u2014 generates a random credit card number.
    37. RandomCurrency \u2014 generates a random currency code.
    38. RandomAmountWithCurrency \u2014 generates a random monetary amount with currency.
    39. RandomPerson \u2014 generates a random person data (first name, last name, etc.)
    40. RandomPhoneNumber \u2014 generates a random phone number.
    41. RandomTollFreePhoneNumber \u2014 generates a random toll-free phone number.
    42. RandomE164PhoneNumber \u2014 generates a random phone number in E.164 format.
    43. RealAddress \u2014 generates a real address.
    44. RegexpReplace \u2014 replaces a string using a regular expression.
    45. Replace \u2014 replaces an original value by the provided one.
    46. SetNull \u2014 sets NULL value to the column.
    "},{"location":"built_in_transformers/standard_transformers/cmd/","title":"Cmd","text":"

    Transform data via external program using stdin and stdout interaction.

    "},{"location":"built_in_transformers/standard_transformers/cmd/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types columns A list of column names to be affected. If empty, the entire tuple is used. Read about the structure further. Yes Any executable The path to the executable parameter file Yes - args A list of parameters for the executable No - driver The row driver with parameters that is used for interacting with cmd. See details below. {\"name\": \"csv\"} No - validate Performs a decoding operation using the PostgreSQL driver for data received from the command to ensure the data format is correct false No - timeout Timeout for sending and receiving data from the external command 2s No - expected_exit_code The expected exit code on SIGTERM signal. If the exit code is unexpected, the transformation exits with an error. 0 No - skip_on_behaviour Skips transformation call if one of the provided columns has a null value (any) or each of the provided columns has null values (all). This option works together with the skip_on_null_input parameter on columns. Possible values: all, any. all No -

    Warning

    The parameter validate_output=true may cause an error if the type does not have a PostgreSQL driver decoder implementation. Most of the types, such as int, float, text, varchar, date, timestamp, etc., have encoders and decoders, as well as inherited types like domain types based on them.

    "},{"location":"built_in_transformers/standard_transformers/cmd/#description","title":"Description","text":"

    The Cmd transformer allows you to send original data to an external program via stdin and receive transformed data from stdout. It supports various interaction formats such as json, csv, or plain text for one-column transformations. The interaction is performed line by line, so at the end of each sent data, a new line symbol \\n must be included.

    "},{"location":"built_in_transformers/standard_transformers/cmd/#types-of-interaction-modes","title":"Types of interaction modes","text":""},{"location":"built_in_transformers/standard_transformers/cmd/#text","title":"text","text":"

    Textual driver that is used only for one column transformation, thus you cannot provide here more than one column. The value encodes into string laterally. For example, 2023-01-03 01:00:00.0+03.

    "},{"location":"built_in_transformers/standard_transformers/cmd/#json","title":"json","text":"

    JSON line driver. It has two formats that can be passed through driver.json_data_format: [text|bytes]. Use the bytes format for binary datatypes. Use the text format for non-binary datatypes and for those that can be represented as string literals. The default json_data_format is text.

    Text format with indexesBytes format with indexes
    {\n  \"column1\": {\n    \"d\": \"some_value1\",\n    \"n\": false,\n  },\n  \"column2\": {\n    \"d\": \"some_value2\",\n    \"n\": false,\n  }\n}\n
    {\n  \"column1\": {\n    \"d\": \"aGVsbG8gd29ybHNeODcxMjE5MCUlJSUlJQ==\",\n    \"n\": false,\n  },\n  \"column2\": {\n    \"d\": \"aGVsbG8gd29ybHNeODcxMjE5MCUlJSUlJQ==\",\n    \"n\": false,\n  }\n}\n

    where:

    "},{"location":"built_in_transformers/standard_transformers/cmd/#csv","title":"csv","text":"

    CSV driver (comma-separated). The number of attributes is the same as the number of table columns, but the columns that were not mentioned in the columns list are empty. The NULL value is represented as \\N. Each attribute is escaped by a quote (\"). For example, if the transformed table has attributes id, title, and created_at, and only id and created_at require transformation, then the CSV line will look as follows:

    csv line example
    \"123\",\"\",\"2023-01-03 01:00:00.0+03\"\n
    "},{"location":"built_in_transformers/standard_transformers/cmd/#column-object-attributes","title":"Column object attributes","text":""},{"location":"built_in_transformers/standard_transformers/cmd/#example-apply-transformation-performed-by-external-command-in-text-format","title":"Example: Apply transformation performed by external command in TEXT format","text":"

    In the following example, jobtitle columns is transformed via external command transformer.

    External transformer in python example
    #!/usr/bin/env python3\nimport signal\nimport sys\n\nsignal.signal(signal.SIGTERM, lambda sig, frame: exit(0))\n\n\n# If we want to implement a simple generator, we need read the line from stdin and write any result to stdout\nfor _ in sys.stdin:\n    # Writing the result to stdout with new line and flushing the buffer\n    sys.stdout.write(\"New Job Title\")\n    sys.stdout.write(\"\\n\")\n    sys.stdout.flush()\n
    Cmd transformer config example
    - schema: \"humanresources\"\n  name: \"employee\"\n  transformers:\n    - name: \"Cmd\"\n      params:\n        driver:\n          name: \"text\"\n        expected_exit_code: -1\n        skip_on_null_input: true\n        validate: true\n        skip_on_behaviour: \"any\"\n        timeout: 60s\n        executable: \"/var/lib/playground/test.py\"\n        columns:\n          - name: \"jobtitle\"\n            skip_original_data: true\n            skip_on_null_input: true \n
    "},{"location":"built_in_transformers/standard_transformers/cmd/#example-apply-transformation-performed-by-external-command-in-json-format","title":"Example: Apply transformation performed by external command in JSON format","text":"

    In the following example, jobtitle and loginid columns are transformed via external command transformer.

    External transformer in python example
    #!/usr/bin/env python3\nimport json\nimport signal\nimport sys\n\nsignal.signal(signal.SIGTERM, lambda sig, frame: exit(0))\n\nfor line in sys.stdin:\n    res = json.loads(line)\n    # Setting dummy values\n    res[\"jobtitle\"] = {\"d\": \"New Job Title\", \"n\": False}\n    res[\"loginid\"][\"d\"] = \"123\"\n\n    # Writing the result to stdout with new line and flushing the buffer\n    sys.stdout.write(json.dumps(res))\n    sys.stdout.write(\"\\n\")\n    sys.stdout.flush()\n
    Cmd transformer config example
    - schema: \"humanresources\"\n  name: \"employee\"\n  transformers:\n    - name: \"Cmd\"\n      params:\n        driver:\n          name: \"json\" # (1)\n          json_data_format: \"text\" # (4)\n        expected_exit_code: -1\n        skip_on_null_input: true\n        validate: true\n        skip_on_behaviour: \"any\" # (2)\n        timeout: 60s\n        executable: \"/var/lib/playground/test.py\"\n        columns:\n          - name: \"jobtitle\"\n            skip_original_data: true\n            skip_on_null_input: true # (3)\n          - name: \"loginid\"\n            skip_original_data: false # (5)\n            skip_on_null_input: true # (3)\n

    { .annotate }

    1. Validate the received data via decode procedure using the PostgreSQL driver. Note that this may cause an error if the type is not supported in the PostgreSQL driver.
    2. Skip transformation (keep the values) if one of the affected columns (not_affected=false) has a null value.
    3. If a column has a null value, then skip it. This works in conjunction with skip_on_behaviour. Since it has the value any, if one of the columns (jobtitle or loginid) has a null value, then skip the transformation call.
    4. The format of JSON can be either text or bytes. The default value is text.
    5. The skip_original_data attribute is set to true the date will not be transfered to the command. This column will contain the empty original data
    "},{"location":"built_in_transformers/standard_transformers/dict/","title":"Dict","text":"

    Replace values matched by dictionary keys.

    "},{"location":"built_in_transformers/standard_transformers/dict/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes any values Value replace mapping as in: {\"string\": \"string\"}. The string with value \"\\N\" is considered NULL. No - default Shown if no value has been matched with dict. The string with value \"\\N\" is considered NULL. By default is empty. No - fail_not_matched When no value is matched with the dict, fails the replacement process if set to true, or keeps the current value, if set to false. true No - validate Performs the encode-decode procedure using column type to ensure that values have correct type true No -"},{"location":"built_in_transformers/standard_transformers/dict/#description","title":"Description","text":"

    The Dict transformer uses a user-provided key-value dictionary to replace values based on matches specified in the values parameter mapping. These provided values must align with the PostgreSQL type format. To validate the values format before application, you can utilize the validate parameter, triggering a decoding procedure via the PostgreSQL driver.

    If there are no matches by key, an error will be raised according to a default fail_not_matched: true parameter. You can change this behaviour by providing the default parameter, value from which will be shown in case of a missing match.

    In certain cases where the driver type does not support the validation operation, an error may occur. For setting or matching a NULL value, use a string with the \\N sequence.

    "},{"location":"built_in_transformers/standard_transformers/dict/#example-replace-marital-status","title":"Example: Replace marital status","text":"

    The following example replaces marital status from S to M or from M to S and raises an error if there is no match:

    Dict transformer example
    - schema: \"humanresources\"\n  name: \"employee\"\n  transformers:\n    - name: \"Dict\"\n      params:\n        column: \"maritalstatus\"\n        values:\n          \"S\": \"M\"\n          \"M\": \"S\"\n        validate: true\n        fail_not_matched: true\n

    Result

    ColumnOriginalValueTransformedValue maritalstatusSM"},{"location":"built_in_transformers/standard_transformers/hash/","title":"Hash","text":"

    Generate a hash of the text value using the Scrypt hash function under the hood. NULL values are kept.

    "},{"location":"built_in_transformers/standard_transformers/hash/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar salt Hex encoded salt string. This value may be provided via environment variable GREENMASK_GLOBAL_SALT Yes text, varchar function Hash algorithm to anonymize data. Can be any of md5, sha1, sha256, sha512, sha3-224, sha3-254, sha3-384, sha3-512. sha1 No - max_length Indicates whether to truncate the hash tail and specifies at what length. Can be any integer number, where 0 means \"no truncation\". 0 No -"},{"location":"built_in_transformers/standard_transformers/hash/#example-generate-hash-from-job-title","title":"Example: Generate hash from job title","text":"

    The following example generates a hash from the jobtitle into sha1 and truncates the results after the 10th character.

    We can set the salt via the environment variable GREENMASK_GLOBAL_SALT:

    export GREENMASK_GLOBAL_SALT=\"12343567baaa\"\n
    Hash transformer example
    - schema: \"humanresources\"\n  name: \"employee\"\n  transformers:\n    - name: \"Hash\"\n      params:\n        column: \"jobtitle\"\n        function: \"sha1\"\n        max_length: 10\n
    Expected result
    | column name | original value                   | transformed |\n|-------------|----------------------------------|-------------|\n| jobtitle    | Research and Development Manager | 3a456da5c5  |\n
    "},{"location":"built_in_transformers/standard_transformers/masking/","title":"Masking","text":"

    Mask a value using one of the masking rules depending on your domain. NULL values are kept.

    "},{"location":"built_in_transformers/standard_transformers/masking/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar type Data type of attribute (default, password, name, addr, email, mobile, tel, id, credit, url) default No -"},{"location":"built_in_transformers/standard_transformers/masking/#description","title":"Description","text":"

    The Masking transformer replaces characters with asterisk * symbols depending on the provided data type. If the value is NULL, it is kept unchanged. It is based on ggwhite/go-masker and supports the following masking rules:

    Type Description default Returns * symbols with the same length, e.g. input: test1234 output: ******** name Masks the second letter the third letter in a word, e. g. input: ABCD output: A**D password Always returns ************ address Keeps first 6 letters, masks the rest, e. g. input: Larnaca, makarios st output: Larnac************* email Keeps a domain and the first 3 letters, masks the rest, e. g. input: ggw.chang@gmail.com output: ggw****@gmail.com mobile Masks 3 digits starting from the 4th digit, e. g. input: 0987654321 output: 0987***321 telephone Removes (, ), , - chart, and masks last 4 digits of telephone number, then formats it to (??)????-????, e. g. input: 0227993078 output: (02)2799-**** id Masks last 4 digits of ID number, e. g. input: A123456789 output: A12345**** credit_cart Masks 6 digits starting from the 7th digit, e. g. input 1234567890123456 output 123456******3456 url Masks the password part of the URL, if applicable, e. g. http://admin:mysecretpassword@localhost:1234/uri output: http://admin:xxxxx@localhost:1234/uri"},{"location":"built_in_transformers/standard_transformers/masking/#example-masking-employee-national-id-number","title":"Example: Masking employee national ID number","text":"

    In the following example, the national ID number of an employee is masked.

    Masking transformer example
    - schema: \"humanresources\"\n  name: \"employee\"\n  transformers:\n    - name: \"Masking\"\n      params:\n        column: \"nationalidnumber\"\n        type: \"id\"\n
    Expected result
    | column name      | original value | transformed |\n|------------------|----------------|-------------|\n| nationalidnumber | 295847284      | 295847****  |\n
    "},{"location":"built_in_transformers/standard_transformers/noise_date/","title":"NoiseDate","text":"

    Randomly add or subtract a duration within the provided ratio interval to the original date value.

    "},{"location":"built_in_transformers/standard_transformers/noise_date/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes date, timestamp, timestamptz min_ratio The minimum random value for noise. The value must be in PostgreSQL interval format, e. g. 1 year 2 mons 3 day 04:05:06.07 5% from max_ration parameter No - max_ratio The maximum random value for noise. The value must be in PostgreSQL interval format, e. g. 1 year 2 mons 3 day 04:05:06.07 Yes - min Min threshold date (and/or time) of value. The value has the same format as column parameter No - max Max threshold date (and/or time) of value. The value has the same format as column parameter No - truncate Truncate the date to the specified part (nanosecond, microsecond, millisecond, second, minute, hour, day, month, year). The truncate operation is not applied by default. No - engine The engine used for generating the values [random, hash]. Use hash for deterministic generation random No -"},{"location":"built_in_transformers/standard_transformers/noise_date/#dynamic-parameters","title":"Dynamic parameters","text":"Parameter Supported types min date, timestamp, timestamptz max date, timestamp, timestamptz"},{"location":"built_in_transformers/standard_transformers/noise_date/#description","title":"Description","text":"

    The NoiseDate transformer randomly generates duration between min_ratio and max_ratio parameter and adds it to or subtracts it from the original date value. The min_ratio or max_ratio parameters must be written in the PostgreSQL interval format. You can also truncate the resulted date up to a specified part by setting the truncate parameter.

    In case you have constraints on the date range, you can set the min and max parameters to specify the threshold values. The values for min and max must have the same format as the column parameter. Parameters min and max support dynamic mode.

    Info

    If the noised value exceeds the max threshold, the transformer will set the value to max. If the noised value is lower than the min threshold, the transformer will set the value to min.

    The engine parameter allows you to choose between random and hash engines for generating values. Read more about the engines in the Transformation engines section.

    "},{"location":"built_in_transformers/standard_transformers/noise_date/#example-adding-noise-to-the-modified-date","title":"Example: Adding noise to the modified date","text":"

    In the following example, the original timestamp value of modifieddate will be noised up to 1 year 2 months 3 days 4 hours 5 minutes 6 seconds and 7 milliseconds with truncation up to the month part.

    NoiseDate transformer example
    - schema: \"humanresources\"\n  name: \"jobcandidate\"\n  transformers:\n    - name: \"NoiseDate\"\n      params:\n        column: \"hiredate\"\n        max_ratio: \"1 year 2 mons 3 day 04:05:06.07\"\n        truncate: \"month\"\n        max: \"2020-01-01 00:00:00\"\n
    "},{"location":"built_in_transformers/standard_transformers/noise_date/#example-adding-noise-to-the-modified-date-with-dynamic-min-parameter-with-hash-engine","title":"Example: Adding noise to the modified date with dynamic min parameter with hash engine","text":"

    In the following example, the original timestamp value of hiredate will be noised up to 1 year 2 months 3 days 4 hours 5 minutes 6 seconds and 7 milliseconds with truncation up to the month part. The max threshold is set to 2020-01-01 00:00:00, and the min threshold is set to the birthdate column. If the birthdate column is NULL, the default value 1990-01-01 will be used. The hash engine is used for deterministic generation - the same input will always produce the same output.

    NoiseDate transformer example
    - schema: \"humanresources\"\n  name: \"employee\"\n  transformers:\n    - name: \"NoiseDate\"\n      params:\n        column: \"hiredate\"\n        max_ratio: \"1 year 2 mons 3 day 04:05:06.07\"\n        truncate: \"month\"\n        max: \"2020-01-01 00:00:00\"\n        engine: \"hash\"\n      dynamic_params:\n        min:\n          column: \"birthdate\"\n          default: \"1990-01-01\"\n

    Result

    ColumnOriginalValueTransformedValue hiredate2009-01-142010-08-01"},{"location":"built_in_transformers/standard_transformers/noise_float/","title":"NoiseFloat","text":"

    Add or subtract a random fraction to the original float value.

    "},{"location":"built_in_transformers/standard_transformers/noise_float/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes float4, float8 decimal The decimal of the noised float value (number of digits after the decimal point) 4 No - min_ratio The minimum random percentage for noise, from 0 to 1, e. g. 0.1 means \"add noise up to 10%\" 0.05 No - max_ratio The maximum random percentage for noise, from 0 to 1, e. g. 0.1 means \"add noise up to 10%\" Yes - min Min threshold of noised value No - max Max threshold of noised value No - engine The engine used for generating the values [random, hash]. Use hash for deterministic generation random No -"},{"location":"built_in_transformers/standard_transformers/noise_float/#dynamic-parameters","title":"Dynamic parameters","text":"Parameter Supported types min float4, float8, int2, int4, int8 max float4, float8, int2, int4, int8"},{"location":"built_in_transformers/standard_transformers/noise_float/#description","title":"Description","text":"

    The NoiseFloat transformer multiplies the original float value by randomly generated value that is not higher than the max_ratio parameter and not less that max_ratio parameter and adds it to or subtracts it from the original value. Additionally, you can specify the number of decimal digits by using the decimal parameter.

    In case you have constraints on the float range, you can set the min and max parameters to specify the threshold values. The values for min and max must have the same format as the column parameter. Parameters min and max support dynamic mode. Engine parameter allows you to choose between random and hash engines for generating values. Read more about the engines

    Info

    If the noised value exceeds the max threshold, the transformer will set the value to max. If the noised value is lower than the min threshold, the transformer will set the value to min.

    The engine parameter allows you to choose between random and hash engines for generating values. Read more about the engines in the Transformation engines section.

    "},{"location":"built_in_transformers/standard_transformers/noise_float/#example-adding-noise-to-the-purchase-price","title":"Example: Adding noise to the purchase price","text":"

    In this example, the original value of standardprice will be noised up to 50% and rounded up to 2 decimals.

    NoiseFloat transformer example
    - schema: \"purchasing\"\n  name: \"productvendor\"\n  columns_type_override: # (1)\n    lastreceiptcost: \"float8\"\n    standardprice: \"float8\"\n  transformers:\n    - name: \"NoiseFloat\"\n      params:\n        column: \"lastreceiptcost\"\n        max_ratio: 0.15\n        decimal: 2\n      dynamic_params:\n        min:\n          column: \"standardprice\"\n
    1. The type overrides applied for example because the playground database does not contain any tables with float columns.

    Result

    ColumnOriginalValueTransformedValue lastreceiptcost50.263547.87"},{"location":"built_in_transformers/standard_transformers/noise_int/","title":"NoiseInt","text":"

    Add or subtract a random fraction to the original integer value.

    "},{"location":"built_in_transformers/standard_transformers/noise_int/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes int2, int4, int8 min_ratio The minimum random percentage for noise, from 0 to 1, e. g. 0.1 means \"add noise up to 10%\" 0.05 No - max_ratio The maximum random percentage for noise, from 0 to 1, e. g. 0.1 means \"add noise up to 10%\" Yes - min Min threshold of noised value No - max Min threshold of noised value No - engine The engine used for generating the values [random, hash]. Use hash for deterministic generation random No -"},{"location":"built_in_transformers/standard_transformers/noise_int/#dynamic-parameters","title":"Dynamic parameters","text":"Parameter Supported types min int2, int4, int8 max int2, int4, int8"},{"location":"built_in_transformers/standard_transformers/noise_int/#description","title":"Description","text":"

    The NoiseInt transformer multiplies the original integer value by randomly generated value that is not higher than the max_ratio parameter and not less that max_ratio parameter and adds it to or subtracts it from the original value.

    In case you have constraints on the integer range, you can set the min and max parameters to specify the threshold values. The values for min and max must have the same format as the column parameter. Parameters min and max support dynamic mode.

    Info

    If the noised value exceeds the max threshold, the transformer will set the value to max. If the noised value is lower than the min threshold, the transformer will set the value to min.

    The engine parameter allows you to choose between random and hash engines for generating values. Read more about the engines in the Transformation engines section.

    "},{"location":"built_in_transformers/standard_transformers/noise_int/#example-noise-vacation-hours-of-an-employee","title":"Example: Noise vacation hours of an employee","text":"

    In the following example, the original value of vacationhours will be noised up to 40%. The transformer will set the value to 10 if the noised value is lower than 10 and to 1000 if the noised value exceeds 1000.

    NoiseInt transformer example
    - schema: \"humanresources\"\n  name: \"employee\"\n  transformers:\n    - name: \"NoiseInt\"\n      params:\n        column: \"vacationhours\"\n        max_ratio: 0.4\n        min: 10\n        max: 1000\n

    Result

    ColumnOriginalValueTransformedValue vacationhours9969"},{"location":"built_in_transformers/standard_transformers/noise_numeric/","title":"NoiseNumeric","text":"

    Add or subtract a random fraction to the original numeric value.

    "},{"location":"built_in_transformers/standard_transformers/noise_numeric/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes numeric, decimal decimal The decimal of the noised float value (number of digits after the decimal point) 4 No - min_ratio The minimum random percentage for noise, from 0 to 1, e. g. 0.1 means \"add noise up to 10%\" 0.05 No - max_ratio The maximum random percentage for noise, from 0 to 1, e. g. 0.1 means \"add noise up to 10%\" Yes - min Min threshold of noised value No - max Max threshold of noised value No - engine The engine used for generating the values [random, hash]. Use hash for deterministic generation random No -"},{"location":"built_in_transformers/standard_transformers/noise_numeric/#dynamic-parameters","title":"Dynamic parameters","text":"Parameter Supported types min numeric, decimal, float4, float8, int2, int4, int8 max numeric, decimal, float4, float8, int2, int4, int8"},{"location":"built_in_transformers/standard_transformers/noise_numeric/#description","title":"Description","text":"

    The NoiseNumeric transformer multiplies the original numeric (or decimal) value by randomly generated value that is not higher than the max_ratio parameter and not less that max_ratio parameter and adds it to or subtracts it from the original value. Additionally, you can specify the number of decimal digits by using the decimal parameter.

    In case you have constraints on the numeric range, you can set the min and max parameters to specify the threshold values. The values for min and max must have the same format as the column parameter. Parameters min and max support dynamic mode. Engine parameter allows you to choose between random and hash engines for generating values. Read more about the engines

    Info

    If the noised value exceeds the max threshold, the transformer will set the value to max. If the noised value is lower than the min threshold, the transformer will set the value to min.

    The engine parameter allows you to choose between random and hash engines for generating values. Read more about the engines in the Transformation engines section.

    Warning

    Greenmask cannot parse the numeric type sitteng. For instance NUMERIC(10, 2). You should set min and max treshholds manually as well as allowed decimal. This behaviour will be changed in the later versions. Grenmask will be able to determine the decimal and scale of the column and set the min and max treshholds automatically if were not set.

    "},{"location":"built_in_transformers/standard_transformers/noise_numeric/#example-adding-noise-to-the-purchase-price","title":"Example: Adding noise to the purchase price","text":"

    In this example, the original value of standardprice will be noised up to 50% and rounded up to 2 decimals.

    NoiseNumeric transformer example
    - schema: \"purchasing\"\n  name: \"productvendor\"\n  transformers:\n    - name: \"NoiseNumeric\"\n      params:\n        column: \"lastreceiptcost\"\n        max_ratio: 0.15\n        decimal: 2\n        max: 10000\n      dynamic_params:\n        min:\n          column: \"standardprice\"\n

    Result

    ColumnOriginalValueTransformedValue lastreceiptcost50.263557.33"},{"location":"built_in_transformers/standard_transformers/random_amount_with_currency/","title":"RandomAmountWithCurrency","text":"

    The RandomAmountWithCurrency transformer is specifically designed to populate specified database columns with random financial amounts accompanied by currency codes. Ideal for applications requiring the simulation of financial transactions, this utility enhances the realism of financial datasets by introducing variability in amounts and currencies.

    "},{"location":"built_in_transformers/standard_transformers/random_amount_with_currency/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_amount_with_currency/#description","title":"Description","text":"

    This transformer automatically generates random financial amounts along with corresponding global currency codes (e. g., 250.00 USD, 300.00 EUR), injecting them into the designated database column. It provides a straightforward solution for populating financial records with varied and realistic data, suitable for testing payment systems, data anonymization, and simulation of economic models.

    "},{"location":"built_in_transformers/standard_transformers/random_amount_with_currency/#example-populate-the-payments-table-with-random-amounts-and-currencies","title":"Example: Populate the payments table with random amounts and currencies","text":"

    This example shows how to configure the RandomAmountWithCurrency transformer to populate the payment_details column in the payments table with random amounts and currencies. It is an effective approach to simulating a diverse range of payment transactions.

    RandomAmountWithCurrency transformer example
    - schema: \"public\"\n  name: \"payments\"\n  transformers:\n    - name: \"RandomAmountWithCurrency\"\n      params:\n        column: \"payment_details\"\n        keep_null: false\n

    In this setup, the payment_details column will be updated with random financial amounts and currency codes for each entry, replacing any existing non-NULL values. The keep_null parameter, when set to true, ensures that existing NULL values in the column remain unchanged, preserving the integrity of records without specified payment details.

    "},{"location":"built_in_transformers/standard_transformers/random_bool/","title":"RandomBool","text":"

    Generate random boolean values.

    "},{"location":"built_in_transformers/standard_transformers/random_bool/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes bool keep_null Indicates whether NULL values should be replaced with transformed values or not true No - engine The engine used for generating the values [random, hash]. Use hash for deterministic generation random No -"},{"location":"built_in_transformers/standard_transformers/random_bool/#description","title":"Description","text":"

    The RandomBool transformer generates a random boolean value. The behaviour for NULL values can be configured using the keep_null parameter. The engine parameter allows you to choose between random and hash engines for generating values. Read more about the engines in the Transformation engines section.

    "},{"location":"built_in_transformers/standard_transformers/random_bool/#example-generate-a-random-boolean-for-a-column","title":"Example: Generate a random boolean for a column","text":"

    In the following example, the RandomBool transformer generates a random boolean value for the salariedflag column.

    RandomBool transformer example
    - schema: \"humanresources\"\n  name: \"employee\"\n  transformers:\n    - name: \"RandomBool\"\n      params:\n        column: \"salariedflag\"\n

    Result

    ColumnOriginalValueTransformedValue salariedflagtf"},{"location":"built_in_transformers/standard_transformers/random_cc_number/","title":"RandomCCNumber","text":"

    The RandomCCNumber transformer is specifically designed to populate specified database columns with random credit card numbers. This utility is crucial for applications that involve simulating financial data, testing payment systems, or anonymizing real credit card numbers in datasets.

    "},{"location":"built_in_transformers/standard_transformers/random_cc_number/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_cc_number/#description","title":"Description","text":"

    By leveraging algorithms capable of generating plausible credit card numbers that adhere to standard credit card validation rules (such as the Luhn algorithm), the RandomCCNumber transformer injects random credit card numbers into the designated database column. This approach ensures the generation of credit card numbers that are realistic for testing and development purposes, without compromising real-world applicability and security.

    "},{"location":"built_in_transformers/standard_transformers/random_cc_number/#example-populate-random-credit-card-numbers-for-the-payment_information-table","title":"Example: Populate random credit card numbers for the payment_information table","text":"

    This example demonstrates configuring the RandomCCNumber transformer to populate the cc_number column in the payment_information table with random credit card numbers. It is an effective strategy for creating a realistic set of payment data for application testing or data anonymization.

    RandomCCNumber transformer example
    - schema: \"public\"\n  name: \"payment_information\"\n  transformers:\n    - name: \"RandomCCNumber\"\n      params:\n        column: \"cc_number\"\n        keep_null: false\n

    With this setup, the cc_number column will be updated with random credit card numbers for each entry, replacing any existing non-NULL values. If the keep_null parameter is set to true, it will ensure that existing NULL values in the column are preserved, maintaining the integrity of records where credit card information is not applicable or available.

    "},{"location":"built_in_transformers/standard_transformers/random_cc_type/","title":"RandomCCType","text":"

    The RandomCCType transformer is designed to populate specified database columns with random credit card types. This tool is essential for applications that require the simulation of financial transaction data, testing payment processing systems, or anonymizing credit card type information in datasets.

    "},{"location":"built_in_transformers/standard_transformers/random_cc_type/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_cc_type/#description","title":"Description","text":"

    Utilizing a predefined list of credit card types (e.g., VISA, MasterCard, American Express, Discover), the RandomCCType transformer injects random credit card type names into the designated database column. This feature allows for the creation of realistic and varied financial transaction datasets by simulating a range of credit card types without using real card data.

    "},{"location":"built_in_transformers/standard_transformers/random_cc_type/#example-populate-random-credit-card-types-for-the-transactions-table","title":"Example: Populate random credit card types for the transactions table","text":"

    This example shows how to configure the RandomCCType transformer to populate the card_type column in the transactions table with random credit card types. It is a straightforward method for simulating diverse payment methods across transactions.

    RandomCCType transformer example
    - schema: \"public\"\n  name: \"transactions\"\n  transformers:\n    - name: \"RandomCCType\"\n      params:\n        column: \"card_type\"\n        keep_null: false\n

    In this configuration, the card_type column will be updated with random credit card types for each entry, replacing any existing non-NULL values. If the keep_null parameter is set to true, existing NULL values in the column will be preserved, maintaining the integrity of records where card type information is not applicable.

    "},{"location":"built_in_transformers/standard_transformers/random_century/","title":"RandomCentury","text":"

    The RandomCentury transformer is crafted to populate specified database columns with random century values. It is ideal for applications that require historical data simulation, such as generating random years within specific centuries for historical databases, testing datasets with temporal dimensions, or anonymizing dates in historical research data.

    "},{"location":"built_in_transformers/standard_transformers/random_century/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_century/#description","title":"Description","text":"

    The RandomCentury transformer utilizes an algorithm or a library function (hypothetical in this context) to generate random century values. Each value represents a century (e.g., 19th, 20th, 21st), providing a broad temporal range that can be used to enhance datasets requiring a distribution across different historical periods without the need for precise date information.

    "},{"location":"built_in_transformers/standard_transformers/random_century/#example-populate-random-centuries-for-the-historical_artifacts-table","title":"Example: Populate random centuries for the historical_artifacts table","text":"

    This example shows how to configure the RandomCentury transformer to populate the century column in a historical_artifacts table with random century values, adding an element of variability and historical context to the dataset.

    RandomCentury transformer example
    - schema: \"public\"\n  name: \"historical_artifacts\"\n  transformers:\n    - name: \"RandomCentury\"\n      params:\n        column: \"century\"\n        keep_null: false\n

    In this setup, the century column will be filled with random century values, replacing any existing non-NULL values. If the keep_null parameter is set to true, then existing NULL values in the column will remain untouched, preserving the original dataset's integrity where no temporal data is available.

    "},{"location":"built_in_transformers/standard_transformers/random_choice/","title":"RandomChoice","text":"

    Replace values randomly chosen from a provided list.

    "},{"location":"built_in_transformers/standard_transformers/random_choice/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes any values A list of values in any format. The string with value \\N is considered NULL. Yes - validate Performs a decoding procedure via the PostgreSQL driver using the column type to ensure that values have correct type true No keep_null Indicates whether NULL values should be replaced with transformed values or not true No engine The engine used for generating the values [random, hash]. Use hash for deterministic generation random No -"},{"location":"built_in_transformers/standard_transformers/random_choice/#description","title":"Description","text":"

    The RandomChoice transformer replaces one randomly chosen value from the list provided in the values parameter. You can use the validate parameter to ensure that values are correct before applying the transformation. The behaviour for NULL values can be configured using the keep_null parameter.

    The engine parameter allows you to choose between random and hash engines for generating values. Read more about the engines in the Transformation engines section.

    "},{"location":"built_in_transformers/standard_transformers/random_choice/#example-choosing-randomly-from-provided-dates","title":"Example: Choosing randomly from provided dates","text":"

    In this example, the provided values undergo validation through PostgreSQL driver decoding, and one value is randomly chosen from the list.

    RandomChoice transformer example
    - schema: \"humanresources\"\n  name: \"jobcandidate\"\n  transformers:\n    - name: \"RandomChoice\"\n      params:\n        column: \"modifieddate\"\n        validate: true\n        engine: hash\n        values:\n          - \"2023-12-21 07:41:06.891\"\n          - \"2023-12-21 07:41:06.896\"\n

    Result

    ColumnOriginalValueTransformedValue modifieddate2007-06-23 00:00:002023-12-21 07:41:06.891"},{"location":"built_in_transformers/standard_transformers/random_currency/","title":"RandomCurrency","text":"

    The RandomCurrency transformer is tailored to populate specified database columns with random currency codes. This tool is highly beneficial for applications involving the simulation of international financial data, testing currency conversion features, or anonymizing currency information in datasets.

    "},{"location":"built_in_transformers/standard_transformers/random_currency/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_currency/#description","title":"Description","text":"

    Utilizing a comprehensive list of global currency codes (e.g., USD, EUR, JPY), the RandomCurrency transformer injects random currency codes into the designated database column. This feature allows for the creation of diverse and realistic financial transaction datasets by simulating a variety of currencies without relying on actual financial data.

    "},{"location":"built_in_transformers/standard_transformers/random_currency/#example-populate-random-currency-codes-for-the-transactions-table","title":"Example: Populate random currency codes for the transactions table","text":"

    This example outlines configuring the RandomCurrency transformer to populate the currency_code column in a transactions table with random currency codes. It is an effective way to simulate international transactions across multiple currencies.

    RandomCurrency transformer example
    - schema: \"public\"\n  name: \"transactions\"\n  transformers:\n    - name: \"RandomCurrency\"\n      params:\n        column: \"currency_code\"\n        keep_null: false\n

    In this configuration, the currency_code column will be updated with random currency codes for each entry, replacing any existing non-NULL values. If the keep_null parameter is set to true, existing NULL values in the column will be preserved, ensuring the integrity of records where currency data may not be applicable.

    "},{"location":"built_in_transformers/standard_transformers/random_date/","title":"RandomDate","text":"

    Generate a random date in a specified interval.

    "},{"location":"built_in_transformers/standard_transformers/random_date/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column Name of the column to be affected Yes date, timestamp, timestamptz min The minimum threshold date for the random value. The format depends on the column type. Yes - max The maximum threshold date for the random value. The format depends on the column type. Yes - truncate Truncate the date to the specified part (nanosecond, microsecond, millisecond, second, minute, hour, day, month, year). The truncate operation is not applied by default. No - keep_null Indicates whether NULL values should be replaced with transformed values or not true No - engine The engine used for generating the values [random, hash]. Use hash for deterministic generation random No -"},{"location":"built_in_transformers/standard_transformers/random_date/#dynamic-parameters","title":"Dynamic parameters","text":"Parameter Supported types min date, timestamp, timestamptz max date, timestamp, timestamptz"},{"location":"built_in_transformers/standard_transformers/random_date/#description","title":"Description","text":"

    The RandomDate transformer generates a random date within the provided interval, starting from min to max. It can also perform date truncation up to the specified part of the date. The format of dates in the min and max parameters must adhere to PostgreSQL types, including DATE, TIMESTAMP WITHOUT TIMEZONE, or TIMESTAMP WITH TIMEZONE.

    Note

    The value of min and max parameters depends on the column type. For example, for the date column, the value should be in the format YYYY-MM-DD, while for the timestamp column, the value should be in the format YYYY-MM-DD HH:MM:SS or YYYY-MM-DD HH:MM:SS.SSSSSS. The timestamptz column requires the value to be in the format YYYY-MM-DD HH:MM:SS.SSSSSS+HH:MM. Read more about date/time formats in the PostgreSQL documentation.

    The behaviour for NULL values can be configured using the keep_null parameter. The engine parameter allows you to choose between random and hash engines for generating values. Read more about the engines in the Transformation engines section.

    "},{"location":"built_in_transformers/standard_transformers/random_date/#example-generate-modifieddate","title":"Example: Generate modifieddate","text":"

    In the following example, a random timestamp without timezone is generated for the modifieddate column within the range from 2011-05-31 00:00:00 to 2013-05-31 00:00:00, and the part of the random value after day is truncated.

    RandomDate transformer example
    - schema: \"sales\"\n  name: \"salesorderdetail\"\n  transformers:\n    - name: \"RandomDate\"\n      params:\n        column: \"modifieddate\"\n        keep_null: false\n        min: \"2011-05-31 00:00:00\"\n        max: \"2013-05-31 00:00:00\"\n        truncate: \"day\"\n

    Result

    ColumnOriginalValueTransformedValue modifieddate2014-06-30 00:00:002012-07-27 00:00:00"},{"location":"built_in_transformers/standard_transformers/random_date/#example-generate-hiredate-based-on-birthdate-using-two-transformations","title":"Example: Generate hiredate based on birthdate using two transformations","text":"

    In this example, the RandomDate transformer generates a random date for the birthdate column within the range now - 50 years to now - 18 years. The hire date is generated based on the birthdate, ensuring that the employee is at least 18 years old when hired.

    - schema: \"humanresources\"\n  name: \"employee\"\n  transformers:\n    - name: \"RandomDate\"\n      params:\n        column: \"birthdate\"\n        min: '{{ now | tsModify \"-50 years\" | .EncodeValue }}' # 1994\n        max: '{{ now | tsModify \"-18 years\" | .EncodeValue }}' # 2006\n\n    - name: \"RandomDate\"\n      params:\n        column: \"hiredate\"\n        truncate: \"month\"\n        max: \"{{ now | .EncodeValue }}\"\n      dynamic_params:\n        min:\n          column: \"birthdate\"\n          template: '{{ .GetValue | tsModify \"18 years\" | .EncodeValue }}' # min age 18 years\n

    Result:

    ColumnOriginalValueTransformedValue birthdate1969-01-291985-10-29 hiredate2009-01-142023-01-01"},{"location":"built_in_transformers/standard_transformers/random_day_of_month/","title":"RandomDayOfMonth","text":"

    The RandomDayOfMonth transformer is designed to populate specified database columns with random day-of-the-month values. It is particularly useful for scenarios requiring the simulation of dates, such as generating random event dates, user sign-up dates, or any situation where the specific day of the month is needed without reference to the actual month or year.

    "},{"location":"built_in_transformers/standard_transformers/random_day_of_month/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar, int2, int4, int8, numeric keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_day_of_month/#description","title":"Description","text":"

    Utilizing the faker library, the RandomDayOfMonth transformer generates random numerical values representing days of the month, ranging from 1 to 31. This allows for the easy insertion of random but plausible day-of-the-month data into a database, enhancing realism or anonymizing actual dates.

    "},{"location":"built_in_transformers/standard_transformers/random_day_of_month/#example-populate-random-days-of-the-month-for-the-events-table","title":"Example: Populate random days of the month for the events table","text":"

    This example illustrates how to configure the RandomDayOfMonth transformer to fill the event_day column in the events table with random day-of-the-month values, facilitating the simulation of varied event scheduling.

    RandomDayOfMonth transformer example
    - schema: \"public\"\n  name: \"events\"\n  transformers:\n    - name: \"RandomDayOfMonth\"\n      params:\n        column: \"event_day\"\n        keep_null: false\n

    With this setup, the event_day column will be updated with random day-of-the-month values, replacing any existing non-NULL values. Setting keep_null to true ensures that NULL values in the column are left unchanged, maintaining any existing gaps in the data.

    "},{"location":"built_in_transformers/standard_transformers/random_day_of_week/","title":"RandomDayOfWeek","text":"

    The RandomDayOfWeek transformer is specifically designed to fill specified database columns with random day-of-the-week names. It is particularly useful for applications that require simulated weekly schedules, random event planning, or any scenario where the day of the week is relevant but the specific date is not.

    "},{"location":"built_in_transformers/standard_transformers/random_day_of_week/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_day_of_week/#description","title":"Description","text":"

    Utilizing the faker library, the RandomDayOfWeek transformer generates names of days (e. g., Monday, Tuesday) at random. This transformer can be applied to any text or varchar column in a database, introducing variability and realism into data sets that need to represent days of the week in a non-specific manner.

    "},{"location":"built_in_transformers/standard_transformers/random_day_of_week/#example-populate-random-days-of-the-week-for-the-work_schedule-table","title":"Example: Populate random days of the week for the work_schedule table","text":"

    This example demonstrates configuring the RandomDayOfWeek transformer to populate the work_day column in the work_schedule table with random days of the week. This setup can help simulate a diverse range of work schedules without tying them to specific dates.

    RandomDayOfWeek transformer example
    - schema: \"public\"\n  name: \"work_schedule\"\n  transformers:\n    - name: \"RandomDayOfWeek\"\n      params:\n        column: \"work_day\"\n        keep_null: false\n

    In this configuration, every entry in the work_day column will be updated with a random day of the week, replacing any existing non-NULL values. If the keep_null parameter is set to true, then existing NULL values within the column will remain unchanged.

    "},{"location":"built_in_transformers/standard_transformers/random_domain_name/","title":"RandomDomainName","text":"

    The RandomDomainName transformer is designed to populate specified database columns with random domain names. This tool is invaluable for simulating web data, testing applications that interact with domain names, or anonymizing real domain information in datasets.

    "},{"location":"built_in_transformers/standard_transformers/random_domain_name/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_domain_name/#description","title":"Description","text":"

    By leveraging an algorithm or library capable of generating believable domain names, the RandomDomainName transformer introduces random domain names into the specified database column. Each generated domain name includes a second-level domain (SLD) and a top-level domain (TLD), such as \"example.com\" or \"website.org,\" providing a wide range of plausible web addresses for database enrichment.

    "},{"location":"built_in_transformers/standard_transformers/random_domain_name/#example-populate-random-domain-names-for-the-websites-table","title":"Example: Populate random domain names for the websites table","text":"

    This example demonstrates configuring the RandomDomainName transformer to populate the domain column in the websites table with random domain names. This approach facilitates the creation of a diverse and realistic set of web addresses for testing, simulation, or data anonymization purposes.

    RandomDomainName transformer example
    - schema: \"public\"\n  name: \"websites\"\n  transformers:\n    - name: \"RandomDomainName\"\n      params:\n        column: \"domain\"\n        keep_null: false\n

    In this setup, the domain column will be updated with random domain names for each entry, replacing any existing non-NULL values. If keep_null is set to true, the transformer will preserve existing NULL values in the column, maintaining the integrity of data where domain information is not applicable.

    "},{"location":"built_in_transformers/standard_transformers/random_e164_phone_number/","title":"RandomE164PhoneNumber","text":"

    The RandomE164PhoneNumber transformer is developed to populate specified database columns with random E.164 phone numbers. This tool is essential for applications requiring the simulation of contact information, testing phone number validation systems, or anonymizing phone number data in datasets while focusing on E.164 numbers.

    "},{"location":"built_in_transformers/standard_transformers/random_e164_phone_number/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_e164_phone_number/#description","title":"Description","text":"

    The RandomE164PhoneNumber transformer utilizes algorithms capable of generating random E.164 phone numbers with the standard international format and injects them into the designated database column. This feature allows for the creation of diverse and realistic contact information in datasets for development, testing, or data anonymization purposes.

    "},{"location":"built_in_transformers/standard_transformers/random_e164_phone_number/#example-populate-random-e164-phone-numbers-for-the-contact_information-table","title":"Example: Populate random E.164 phone numbers for the contact_information table","text":"

    This example demonstrates configuring the RandomE164PhoneNumber transformer to populate the phone_number column in the contact_information table with random E.164 phone numbers. It is an effective method for simulating a variety of contact information entries with E.164 numbers.

    RandomE164PhoneNumber transformer example
    - schema: \"public\"\n  name: \"contact_information\"\n  transformers:\n    - name: \"RandomE164PhoneNumber\"\n      params:\n        column: \"phone_number\"\n        keep_null: false\n

    In this configuration, the phone_number column will be updated with random E.164 phone numbers for each contact information entry, replacing any existing non-NULL values. If the keep_null parameter is set to true, existing NULL values in the column will be preserved, ensuring the integrity of records where E.164 phone number information is not applicable or provided.

    "},{"location":"built_in_transformers/standard_transformers/random_email/","title":"RandomEmail","text":"

    Generate email addresses for a specified column.

    "},{"location":"built_in_transformers/standard_transformers/random_email/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar keep_original_domain Keep original of the original address false No - local_part_template The template for local part of email No - domain_part_template The template for domain part of email No - domains List of domains for new email [\"gmail.com\", \"yahoo.com\", \"outlook.com\", \"hotmail.com\", \"aol.com\", \"icloud.com\", \"mail.com\", \"zoho.com\", \"yandex.com\", \"protonmail.com\", \"gmx.com\", \"fastmail.com\"] No - validate Validate generated email if using template false No - max_random_length Max length of randomly generated part of the email 32 No - keep_null Indicates whether NULL values should be preserved false No - engine The engine used for generating the values [random, hash]. Use hash for deterministic generation random No -"},{"location":"built_in_transformers/standard_transformers/random_email/#description","title":"Description","text":"

    The RandomEmail transformer generates random email addresses for the specified database column. By default, the transformer generates random email addresses with a maximum length of 32 characters. The keep_original_domain parameter allows you to preserve the original domain part of the email address. The local_part_template and domain_part_template parameters enable you to specify templates for the local and domain parts of the email address, respectively. If the validate parameter is set to true, the transformer will validate the generated email addresses against the specified templates. The keep_null parameter allows you to preserve existing NULL values in the column.

    The engine parameter allows you to choose between random and hash engines for generating values. Read more about the engines in the Transformation engines section.

    "},{"location":"built_in_transformers/standard_transformers/random_email/#templates-parameters","title":"Templates parameters","text":"

    In each template you have access to the columns of the table by using the {{ .column_name }} syntax. Note that all values are strings. For example, you can use for assembling the email address by accessing to first_name and last_name columns {{ .first_name | lower }}.{{ .last_name | lower }}.

    The transformer always generates random sequences for the email, and you can use it by accessing the {{ .random_string }} variable. For example, we can add random string in the end of local part {{ .first_name | lower }}.{{ .last_name | lower }}.{{ .random_string }}.

    Read more about template function Template functions.

    "},{"location":"built_in_transformers/standard_transformers/random_email/#random-email-generation-using-first-name-and-last-name","title":"Random email generation using first name and last name","text":"

    In this example, the RandomEmail transformer generates random email addresses for the email column in the account table. The transformer generates email addresses using the first_name and last_name columns as the local part of the email address and adds a random string to the end of the local part with length 10 characters. The original domain part of the email address is preserved.

    CREATE TABLE account\n(\n    id         SERIAL PRIMARY KEY,\n    gender     VARCHAR(1) NOT NULL,\n    email      TEXT       NOT NULL NOT NULL UNIQUE,\n    first_name TEXT       NOT NULL,\n    last_name  TEXT       NOT NULL,\n    birth_date DATE,\n    created_at TIMESTAMP  NOT NULL DEFAULT NOW()\n);\n\nINSERT INTO account (first_name, gender, last_name, birth_date, email)\nVALUES ('John', 'M', 'Smith', '1980-01-01', 'john.smith@gmail.com');\n
    RandomEmail transformer example
    - schema: \"public\"\n  name: \"account\"\n  transformers:\n    - name: \"RandomEmail\"\n      params:\n        column: \"email\"\n        engine: \"hash\"\n        keep_original_domain: true\n        local_part_template: \"{{ first_name | lower }}.{{ last_name | lower }}.{{ .random_string | trunc 10 }}\"\n

    Result:

    ColumnOriginalValueTransformedValue emailjohn.smith@gmail.comjohn.smith.a075d99e2d@gmail.com"},{"location":"built_in_transformers/standard_transformers/random_email/#simple-random-email-generation","title":"Simple random email generation","text":"

    In this example, the RandomEmail transformer generates random email addresses for the email column in the account table. The transformer generates random email addresses with a maximum length of 10 characters.

    RandomEmail transformer example
    - schema: \"public\"\n  name: \"account\"\n  transformers:\n    - name: \"RandomEmail\"\n      params:\n        column: \"email\"\n        max_random_length: 10\n

    Result:

    ColumnOriginalValueTransformedValue emailjohn.smith@gmail.comjohn.smith.a075d99e2d@gmail.com"},{"location":"built_in_transformers/standard_transformers/random_float/","title":"RandomFloat","text":"

    Generate a random float within the provided interval.

    "},{"location":"built_in_transformers/standard_transformers/random_float/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes float4, float8 min The minimum threshold for the random value. The value range depends on the column type. Yes - max The maximum threshold for the random value. The value range depends on the column type. Yes - decimal The decimal of the random float value (number of digits after the decimal point) 4 No - keep_null Indicates whether NULL values should be replaced with transformed values or not true No - engine The engine used for generating the values [random, hash]. Use hash for deterministic generation random No -"},{"location":"built_in_transformers/standard_transformers/random_float/#dynamic-parameters","title":"Dynamic parameters","text":"Parameter Supported types min float4, float8 max float4, float8"},{"location":"built_in_transformers/standard_transformers/random_float/#description","title":"Description","text":"

    The RandomFloat transformer generates a random float value within the provided interval, starting from min to max, with the option to specify the number of decimal digits by using the decimal parameter. The behaviour for NULL values can be configured using the keep_null parameter.

    The engine parameter allows you to choose between random and hash engines for generating values. Read more about the engines in the Transformation engines section.

    "},{"location":"built_in_transformers/standard_transformers/random_float/#example-generate-random-price","title":"Example: Generate random price","text":"

    In this example, the RandomFloat transformer generates random prices in the range from 0.1 to 7000 while maintaining a decimal of up to 2 digits.

    RandomFloat transformer example
    - schema: \"sales\"\n  name: \"salesorderdetail\"\n  columns_type_override:  # (1)\n    \"unitprice\": \"float8\"\n  transformers:\n    - name: \"RandomFloat\"\n      params:\n        column: \"unitprice\"\n        min: 0.1\n        max: 7000\n        decimal: 2\n
    1. The type overrides applied for example because the playground database does not contain any tables with float columns.

    Result:

    ColumnOriginalValueTransformedValue unitprice2024.9944449.7"},{"location":"built_in_transformers/standard_transformers/random_int/","title":"RandomInt","text":"

    Generate a random integer within the provided interval.

    "},{"location":"built_in_transformers/standard_transformers/random_int/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes int2, int4, int8 min The minimum threshold for the random value Yes - max The maximum threshold for the random value Yes - keep_null Indicates whether NULL values should be replaced with transformed values or not true No - engine The engine used for generating the values [random, hash]. Use hash for deterministic generation random No -"},{"location":"built_in_transformers/standard_transformers/random_int/#dynamic-parameters","title":"Dynamic parameters","text":"Parameter Supported types min int2, int4, int8 max int2, int4, int8"},{"location":"built_in_transformers/standard_transformers/random_int/#description","title":"Description","text":"

    The RandomInt transformer generates a random integer within the specified min and max thresholds. The behaviour for NULL values can be configured using the keep_null parameter.

    The engine parameter allows you to choose between random and hash engines for generating values. Read more about the engines in the Transformation engines section.

    "},{"location":"built_in_transformers/standard_transformers/random_int/#example-generate-random-item-quantity","title":"Example: Generate random item quantity","text":"

    In the following example, the RandomInt transformer generates a random value in the range from 1 to 30 and assigns it to the orderqty column.

    generate random orderqty in the range from 1 to 30
    - schema: \"sales\"\n  name: \"salesorderdetail\"\n  transformers:\n    - name: \"RandomInt\"\n      params:\n        column: \"orderqty\"\n        min: 1\n        max: 30\n

    Result

    ColumnOriginalValueTransformedValue orderqty129"},{"location":"built_in_transformers/standard_transformers/random_int/#example-generate-random-sick-leave-hours-based-on-vacation-hours","title":"Example: Generate random sick leave hours based on vacation hours","text":"

    In the following example, the RandomInt transformer generates a random value in the range from 1 to the value of the vacationhours column and assigns it to the sickleavehours column. This configuration allows for the simulation of sick leave hours based on the number of vacation hours.

    RandomInt transformer example
    - schema: \"humanresources\"\n  name: \"employee\"\n  transformers:\n    - name: \"RandomInt\"\n      params:\n        column: \"sickleavehours\"\n        max: 100\n      dynamic_params:\n        min:\n          column: \"vacationhours\"\n

    Result

    ColumnOriginalValueTransformedValue sickleavehours6999"},{"location":"built_in_transformers/standard_transformers/random_ip/","title":"RandomIP","text":"

    The RandomIp transformer is designed to populate specified database columns with random IP v4 or V6 addresses. This utility is essential for applications requiring the simulation of network data, testing systems that utilize IP addresses, or anonymizing real IP addresses in datasets.

    "},{"location":"built_in_transformers/standard_transformers/random_ip/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar, inet subnet Subnet for generating random ip in V4 or V6 format Yes - engine The engine used for generating the values [random, hash]. Use hash for deterministic generation random No -"},{"location":"built_in_transformers/standard_transformers/random_ip/#dynamic-parameters","title":"Dynamic parameters","text":"Name Supported types subnet cidr, text, varchar"},{"location":"built_in_transformers/standard_transformers/random_ip/#description","title":"Description","text":"

    Utilizing a robust algorithm or library for generating IP addresses, the RandomIp transformer injects random IPv4 or IPv6 addresses into the designated database column, depending on the provided subnet. The transformer automatically detects whether to generate an IPv4 or IPv6 address based on the subnet version specified.

    "},{"location":"built_in_transformers/standard_transformers/random_ip/#example-generate-a-random-ipv4-address-for-a-1921681024-subnet","title":"Example: Generate a Random IPv4 Address for a 192.168.1.0/24 Subnet","text":"

    This example demonstrates how to configure the RandomIp transformer to inject a random IPv4 address into the ip_address column for entries in the 192.168.1.0/24 subnet:

    Create table ip_networks and insert data
    CREATE TABLE ip_networks\n(\n    id         SERIAL PRIMARY KEY,\n    ip_address INET,\n    network    CIDR\n);\n\nINSERT INTO ip_networks (ip_address, network)\nVALUES ('192.168.1.10', '192.168.1.0/24'),\n       ('10.0.0.5', '10.0.0.0/16'),\n       ('172.16.254.3', '172.16.0.0/12'),\n       ('192.168.100.14', '192.168.100.0/24'),\n       ('2001:0db8:85a3:0000:0000:8a2e:0370:7334', '2001:0db8:85a3::/64'); -- An IPv6 address and network\n
    RandomPerson transformer example
    - schema: public\n  name: ip_networks\n  transformers:\n    - name: \"RandomIp\"\n      params:\n        subnet: \"192.168.1.0/24\"\n        column: \"ip_address\"\n        engine: \"random\"\n

    Result:

    ColumnOriginalValueTransformedValue ip_address192.168.1.10192.168.1.28"},{"location":"built_in_transformers/standard_transformers/random_ip/#example-generate-a-random-ip-based-on-the-dynamic-subnet-parameter","title":"Example: Generate a Random IP Based on the Dynamic Subnet Parameter","text":"

    This configuration illustrates how to use the RandomIp transformer dynamically, where it reads the subnet information from the network column of the database and generates a corresponding random IP address:

    RandomPerson transformer example with dynamic mode
    - schema: public\n  name: ip_networks\n  transformers:\n    - name: \"RandomIp\"\n      params:\n        column: \"ip_address\"\n        engine: \"random\"\n      dynamic_params:\n        subnet:\n          column: \"network\"\n

    Result:

    ColumnOriginalValueTransformedValue ip_address192.168.1.10192.168.1.111"},{"location":"built_in_transformers/standard_transformers/random_latitude/","title":"RandomLatitude","text":"

    The RandomLatitude transformer generates random latitude values for specified database columns. It is designed to support geographical data enhancements, particularly useful for applications requiring randomized but plausible geographical coordinates.

    "},{"location":"built_in_transformers/standard_transformers/random_latitude/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes float4, float8, numeric keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_latitude/#description","title":"Description","text":"

    The RandomLatitude transformer utilizes the faker library to produce random latitude values within the range of -90 to +90 degrees. This transformer can be applied to columns designated to store geographical latitude information, enhancing data sets with randomized latitude coordinates.

    "},{"location":"built_in_transformers/standard_transformers/random_latitude/#example-populate-random-latitude-for-the-locations-table","title":"Example: Populate random latitude for the locations table","text":"

    This example demonstrates configuring the RandomLatitude transformer to populate the latitude column in the locations table with random latitude values.

    RandomLatitude transformer example
    - schema: \"public\"\n  name: \"locations\"\n  transformers:\n    - name: \"RandomLatitude\"\n      params:\n        column: \"latitude\"\n        keep_null: false\n

    With this configuration, the latitude column will be filled with random latitude values, replacing any existing non-NULL values. If keep_null is set to true, existing NULL values will be preserved.

    "},{"location":"built_in_transformers/standard_transformers/random_longitude/","title":"RandomLongitude","text":"

    The RandomLongitude transformer is designed to generate random longitude values for specified database columns, enhancing datasets with realistic geographic coordinates suitable for a wide range of applications, from testing location-based services to anonymizing real geographic data.

    "},{"location":"built_in_transformers/standard_transformers/random_longitude/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes float4, float8, numeric keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_longitude/#description","title":"Description","text":"

    The RandomLongitude transformer leverages the faker library to produce random longitude values within the globally accepted range of -180 to +180 degrees. This flexibility allows the transformer to be applied to any column intended for storing longitude data, providing a simple yet powerful tool for introducing randomized longitude coordinates into a database.

    "},{"location":"built_in_transformers/standard_transformers/random_longitude/#example-populate-random-longitude-for-the-locations-table","title":"Example: Populate random longitude for the locations table","text":"

    This example shows how to use the RandomLongitude transformer to fill the longitude column in the locations table with random longitude values.

    RandomLongitude transformer example
    - schema: \"public\"\n  name: \"locations\"\n  transformers:\n    - name: \"RandomLongitude\"\n      params:\n        column: \"longitude\"\n        keep_null: false\n

    This setup ensures that all entries in the longitude column receive a random longitude value, replacing any existing non-NULL values. If keep_null is set to true, then existing NULL values in the column will remain unchanged.

    "},{"location":"built_in_transformers/standard_transformers/random_mac/","title":"RandomMac","text":"

    The RandomMac transformer is designed to populate specified database columns with random MAC addresses.

    "},{"location":"built_in_transformers/standard_transformers/random_mac/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar, macaddr keep_original_vendor Should the Individual/Group (I/G) and Universal/Local (U/L) bits be preserved from the original MAC address. false No - cast_type Param which allow to set Individual/Group (I/G) bit in MAC Address. Allowed values [any, individual, group]. If this value is individual, the address is meant for a single device (unicast). If it is group, the address is for a group of devices, which can include multicast and broadcast addresses. any No management_type Param which allow to set Universal/Local (U/L) bit in MAC Address. Allowed values [any, universal, local]. If this bit is universal, the address is universally administered (globally unique). If it is local, the address is locally administered (such as when set manually or programmatically on a network device). any No engine The engine used for generating the values [random, hash]. Use hash for deterministic generation random No -"},{"location":"built_in_transformers/standard_transformers/random_mac/#description","title":"Description","text":"

    The RandomMac transformer generates a random MAC address and injects it into the specified database column. The transformer can be configured to preserve the Individual/Group (I/G) and Universal/Local (U/L) bits from the original MAC address. You can also keep the original vendor bits in the generated MAC address by setting the keep_original_vendor parameter to true.

    The engine parameter allows you to choose between random and hash engines for generating values. Read more about the engines in the Transformation engines section.

    "},{"location":"built_in_transformers/standard_transformers/random_mac/#example-generate-a-random-mac-address","title":"Example: Generate a Random MAC Address","text":"

    This example demonstrates how to configure the RandomMac transformer to inject a random MAC address into the mac_address column:

    Create table mac_addresses and insert data
    CREATE TABLE mac_addresses\n(\n    id          SERIAL PRIMARY KEY,\n    device_name VARCHAR(50),\n    mac_address MACADDR,\n    description TEXT\n);\n\nINSERT INTO mac_addresses (device_name, mac_address, description)\nVALUES ('Device A', '00:1A:2B:3C:4D:5E', 'Description for Device A'),\n       ('Device B', '01:2B:3C:4D:5E:6F', 'Description for Device B'),\n       ('Device C', '02:3C:4D:5E:6F:70', 'Description for Device C'),\n       ('Device D', '03:4D:5E:6F:70:71', 'Description for Device D'),\n       ('Device E', '04:5E:6F:70:71:72', 'Description for Device E');\n
    RandomPerson transformer example
    - schema: public\n  name: mac_addresses\n  transformers:\n    - name: \"RandomMac\"\n      params:\n        column: \"mac_address\"\n        engine: \"random\"\n        cast_type: \"any\"\n        management_type: \"any\"\n

    Result:

    ColumnOriginalValueTransformedValue mac_address00:1a:2b:3c:4d:5eac:7f:a8:11:4e:0d"},{"location":"built_in_transformers/standard_transformers/random_month_name/","title":"RandomMonthName","text":"

    The RandomMonthName transformer is crafted to populate specified database columns with random month names. This transformer is especially useful for scenarios requiring the simulation of time-related data, such as user birth months or event months, without relying on specific date values.

    "},{"location":"built_in_transformers/standard_transformers/random_month_name/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_month_name/#description","title":"Description","text":"

    The RandomMonthName transformer utilizes the faker library to generate the names of months at random. It can be applied to any textual column in a database to introduce variety and realism into data sets that require representations of months without the need for specific calendar dates.

    "},{"location":"built_in_transformers/standard_transformers/random_month_name/#example-populate-random-month-names-for-the-user_profiles-table","title":"Example: Populate random month names for the user_profiles table","text":"

    This example demonstrates how to configure the RandomMonthName transformer to fill the birth_month column in the user_profiles table with random month names, adding a layer of diversity to user data without using actual birthdates.

    RandomMonthName transformer example
    - schema: \"public\"\n  name: \"user_profiles\"\n  transformers:\n    - name: \"RandomMonthName\"\n      params:\n        column: \"birth_month\"\n        keep_null: false\n

    With this setup, the birth_month column will be updated with random month names, replacing any existing non-NULL values. If the keep_null parameter is set to true, then existing NULL values within the column will remain untouched.

    "},{"location":"built_in_transformers/standard_transformers/random_numeric/","title":"RandomNumeric","text":"

    Generate a random numeric within the provided interval.

    "},{"location":"built_in_transformers/standard_transformers/random_numeric/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes numeric, decimal min The minimum threshold for the random value. The value range depends on the column type. Yes - max The maximum threshold for the random value. The value range depends on the column type. Yes - decimal The decimal of the random numeric value (number of digits after the decimal point) 4 No - keep_null Indicates whether NULL values should be replaced with transformed values or not true No - engine The engine used for generating the values [random, hash]. Use hash for deterministic generation random No -"},{"location":"built_in_transformers/standard_transformers/random_numeric/#dynamic-parameters","title":"Dynamic parameters","text":"Parameter Supported types min int2, int4, int8, float4, float8, numeric, decimal max int2, int4, int8, float4, float8, numeric, decimal"},{"location":"built_in_transformers/standard_transformers/random_numeric/#description","title":"Description","text":"

    The RandomNumeric transformer generates a random numeric value within the provided interval, starting from min to max, with the option to specify the number of decimal digits by using the decimal parameter. The behaviour for NULL values can be configured using the keep_null parameter.

    The engine parameter allows you to choose between random and hash engines for generating values. Read more about the engines in the Transformation engines section.

    "},{"location":"built_in_transformers/standard_transformers/random_numeric/#example-generate-random-price","title":"Example: Generate random price","text":"

    In this example, the RandomNumeric transformer generates random prices in the range from 0.1 to 7000 while maintaining a decimal of up to 2 digits.

    RandomNumeric transformer example
    - schema: \"sales\"\n  name: \"salesorderdetail\"\n  transformers:\n    - name: \"RandomNumeric\"\n      params:\n        column: \"unitprice\"\n        min: 0.1\n        max: 7000\n        decimal: 2\n
    1. The type overrides applied for example because the playground database does not contain any tables with numeric columns.

    Result:

    ColumnOriginalValueTransformedValue unitprice2024.9944449.7"},{"location":"built_in_transformers/standard_transformers/random_paragraph/","title":"RandomParagraph","text":"

    The RandomParagraph transformer is crafted to populate specified database columns with random paragraphs. This utility is indispensable for applications that require the generation of extensive textual content, such as simulating articles, enhancing textual datasets for NLP systems, or anonymizing textual content in databases.

    "},{"location":"built_in_transformers/standard_transformers/random_paragraph/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_paragraph/#description","title":"Description","text":"

    Employing sophisticated text generation algorithms or libraries, the RandomParagraph transformer generates random paragraphs, injecting them into the designated database column. This transformer is designed to create varied and plausible paragraphs that simulate real-world textual content, providing a valuable tool for database enrichment, testing, and anonymization.

    "},{"location":"built_in_transformers/standard_transformers/random_paragraph/#example-populate-random-paragraphs-for-the-articles-table","title":"Example: Populate random paragraphs for the articles table","text":"

    This example illustrates configuring the RandomParagraph transformer to populate the body column in an articles table with random paragraphs. It is an effective way to simulate diverse article content for development, testing, or demonstration purposes.

    RandomParagraph transformer example
    - schema: \"public\"\n  name: \"articles\"\n  transformers:\n    - name: \"RandomParagraph\"\n      params:\n        column: \"body\"\n        keep_null: false\n

    With this setup, the body column will receive random paragraphs for each entry, replacing any existing non-NULL values. Setting the keep_null parameter to true allows for the preservation of existing NULL values within the column, maintaining the integrity of records where article content is not applicable or provided.

    "},{"location":"built_in_transformers/standard_transformers/random_password/","title":"RandomPassword","text":"

    The RandomPassword transformer is designed to populate specified database columns with random passwords. This utility is vital for applications that require the simulation of secure user data, testing systems with authentication mechanisms, or anonymizing real passwords in datasets.

    "},{"location":"built_in_transformers/standard_transformers/random_password/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_password/#description","title":"Description","text":"

    Employing sophisticated password generation algorithms or libraries, the RandomPassword transformer injects random passwords into the designated database column. This feature is particularly useful for creating realistic and secure user password datasets for development, testing, or demonstration purposes.

    "},{"location":"built_in_transformers/standard_transformers/random_password/#example-populate-random-passwords-for-the-user_accounts-table","title":"Example: Populate random passwords for the user_accounts table","text":"

    This example demonstrates how to configure the RandomPassword transformer to populate the password column in the user_accounts table with random passwords.

    RandomPassword transformer example
    - schema: \"public\"\n  name: \"user_accounts\"\n  transformers:\n    - name: \"RandomPassword\"\n      params:\n        column: \"password\"\n        keep_null: false\n

    In this configuration, every entry in the password column will be updated with a random password. Setting the keep_null parameter to true will preserve existing NULL values in the column, accommodating scenarios where password data may not be applicable.

    "},{"location":"built_in_transformers/standard_transformers/random_person/","title":"RandomPerson","text":"

    The RandomPerson transformer is designed to populate specified database columns with personal attributes such as first name, last name, title and gender.

    "},{"location":"built_in_transformers/standard_transformers/random_person/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types columns The name of the column to be affected Yes text, varchar gender set specific gender (possible values: Male, Female, Any) Any No - gender_mapping Specify gender name to possible values when using dynamic mode in \"gender\" parameter Any No - fallback_gender Specify fallback gender if not mapped when using dynamic mode in \"gender\" parameter Any No - engine The engine used for generating the values [random, hash]. Use hash for deterministic generation random No -"},{"location":"built_in_transformers/standard_transformers/random_person/#description","title":"Description","text":"

    The RandomPerson transformer utilizes a comprehensive list of first names to inject random first names into the designated database column. This feature allows for the creation of diverse and realistic user profiles by simulating a variety of first names without using real user data.

    "},{"location":"built_in_transformers/standard_transformers/random_person/#column-object-attributes","title":"column object attributes","text":""},{"location":"built_in_transformers/standard_transformers/random_person/#gender_mapping-object-attributes","title":"gender_mapping object attributes","text":"

    gender_mapping - a dictionary that maps the gender value when gender parameters works in dynamic mode. The default value is:

    {\n  \"Male\": [\n    \"male\",\n    \"M\",\n    \"m\",\n    \"man\",\n    \"Man\"\n  ],\n  \"Female\": [\n    \"female\",\n    \"F\",\n    \"f\",\n    \"w\",\n    \"woman\",\n    \"Woman\"\n  ]\n}\n
    "},{"location":"built_in_transformers/standard_transformers/random_person/#fallback_gender","title":"fallback_gender","text":"

    Gender that will be used if gender_mapping was not found. This parameter is optional and required only for gender parameter in dynamic mode. The default value is Any.

    "},{"location":"built_in_transformers/standard_transformers/random_person/#example-populate-random-first-name-and-last-name-for-table-user_profiles-in-static-mode","title":"Example: Populate random first name and last name for table user_profiles in static mode","text":"

    This example demonstrates how to use the RandomPerson transformer to populate the name and surname columns in the user_profiles table with random first names, last name, respectively.

    Create table user_profiles and insert data
    CREATE TABLE personal_data\n(\n    id      SERIAL PRIMARY KEY,\n    name    VARCHAR(100),\n    surname VARCHAR(100),\n    sex     CHAR(1) CHECK (sex IN ('M', 'F'))\n);\n\n-- Insert sample data into the table\nINSERT INTO personal_data (name, surname, sex)\nVALUES ('John', 'Doe', 'M'),\n       ('Jane', 'Smith', 'F'),\n       ('Alice', 'Johnson', 'F'),\n       ('Bob', 'Lee', 'M');\n
    RandomPerson transformer example
    - schema: public\n  name: personal_data\n  transformers:\n    - name: \"RandomPerson\"\n      params:\n        gender: \"Any\"\n        columns:\n          - name: \"name\"\n            template: \"{{ .FirstName }}\"\n          - name: \"surname\"\n            template: \"{{ .LastName }}\"\n        engine: \"hash\"\n

    Result

    ColumnOriginalValueTransformedValue nameJohnZane surnameDoeMcCullough"},{"location":"built_in_transformers/standard_transformers/random_person/#example-populate-random-first-name-and-last-name-for-table-user_profiles-in-dynamic-mode","title":"Example: Populate random first name and last name for table user_profiles in dynamic mode","text":"

    This example demonstrates how to use the RandomPerson transformer to populate the name, surname using dynamic gender

    RandomPerson transformer example with dynamic mode
    - schema: public\n  name: personal_data\n  transformers:\n    - name: \"RandomPerson\"\n      params:\n        columns:\n          - name: \"name\"\n            template: \"{{ .FirstName }}\"\n          - name: \"surname\"\n            template: \"{{ .LastName }}\"\n        engine: \"random\"\n      dynamic_params:\n        gender:\n          column: sex\n

    Result:

    ColumnOriginalValueTransformedValue nameJohnMartin surnameDoeMueller"},{"location":"built_in_transformers/standard_transformers/random_phone_number/","title":"RandomPhoneNumber","text":"

    The RandomPhoneNumber transformer is developed to populate specified database columns with random phone numbers. This tool is essential for applications requiring the simulation of contact information, testing phone number validation systems, or anonymizing phone number data in datasets.

    "},{"location":"built_in_transformers/standard_transformers/random_phone_number/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_phone_number/#description","title":"Description","text":"

    The RandomPhoneNumber transformer utilizes algorithms capable of generating random phone numbers with various formats and injects them into the designated database column. This feature allows for the creation of diverse and realistic contact information in datasets for development, testing, or data anonymization purposes.

    "},{"location":"built_in_transformers/standard_transformers/random_phone_number/#example-populate-random-phone-numbers-for-the-contact_information-table","title":"Example: Populate random phone numbers for the contact_information table","text":"

    This example demonstrates configuring the RandomPhoneNumber transformer to populate the phone_number column in the contact_information table with random phone numbers. It is an effective method for simulating a variety of contact information entries.

    RandomPhoneNumber transformer example
    - schema: \"public\"\n  name: \"contact_information\"\n  transformers:\n    - name: \"RandomPhoneNumber\"\n      params:\n        column: \"phone_number\"\n        keep_null: false\n

    In this configuration, the phone_number column will be updated with random phone numbers for each contact information entry, replacing any existing non-NULL values. If the keep_null parameter is set to true, existing NULL values in the column will be preserved, ensuring the integrity of records where phone number information is not applicable or provided.

    "},{"location":"built_in_transformers/standard_transformers/random_sentence/","title":"RandomSentence","text":"

    The RandomSentence transformer is designed to populate specified database columns with random sentences. Ideal for simulating natural language text for user comments, testing NLP systems, or anonymizing textual data in databases.

    "},{"location":"built_in_transformers/standard_transformers/random_sentence/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_sentence/#description","title":"Description","text":"

    The RandomSentence transformer employs complex text generation algorithms or libraries to generate random sentences, injecting them into a designated database column without the need for specifying sentence length. This flexibility ensures the creation of varied and plausible text for a wide range of applications.

    "},{"location":"built_in_transformers/standard_transformers/random_sentence/#example-populate-random-sentences-for-the-comments-table","title":"Example: Populate random sentences for the comments table","text":"

    This example shows how to configure the RandomSentence transformer to populate the comment column in the comments table with random sentences. It is a straightforward method for simulating diverse user-generated content.

    RandomSentence transformer example
    - schema: \"public\"\n  name: \"comments\"\n  transformers:\n    - name: \"RandomSentence\"\n      params:\n        column: \"comment\"\n        keep_null: false\n

    In this configuration, the comment column will be updated with random sentences for each entry, replacing any existing non-NULL values. If keep_null is set to true, existing NULL values in the column will be preserved, maintaining the integrity of records where comments are not applicable.

    "},{"location":"built_in_transformers/standard_transformers/random_string/","title":"RandomString","text":"

    Generate a random string using the provided characters within the specified length range.

    "},{"location":"built_in_transformers/standard_transformers/random_string/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar min_length The minimum length of the generated string Yes - max_length The maximum length of the generated string Yes - symbols The range of characters that can be used in the random string abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ No - keep_null Indicates whether NULL values should be replaced with transformed values or not true No - engine The engine used for generating the values [random, hash]. Use hash for deterministic generation random No -"},{"location":"built_in_transformers/standard_transformers/random_string/#description","title":"Description","text":"

    The RandomString transformer generates a random string with a length between min_length and max_length using the characters specified in the symbols string as the possible set of characters. The behaviour for NULL values can be configured using the keep_null parameter.

    The engine parameter allows you to choose between random and hash engines for generating values. Read more about the engines in the Transformation engines section.

    "},{"location":"built_in_transformers/standard_transformers/random_string/#example-generate-a-random-string-for-accountnumber","title":"Example: Generate a random string for accountnumber","text":"

    In the following example, a random string is generated for the accountnumber column with a length range from 9 to 12. The character set used for generation includes 1234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ.

    RandomString transformer example
    - schema: \"purchasing\"\n  name: \"vendor\"\n  transformers:\n    - name: \"RandomString\"\n      params:\n        column: \"accountnumber\"\n        min_length: 9\n        max_length: 12\n        symbols: \"1234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ\"\n

    Result

    ColumnOriginalValueTransformedValue accountnumberAUSTRALI00014VUI6P2OZ"},{"location":"built_in_transformers/standard_transformers/random_timezone/","title":"RandomTimezone","text":"

    The RandomTimezone transformer is designed to populate specified database columns with random timezone strings. This transformer is particularly useful for applications that require the simulation of global user data, testing of timezone-related functionalities, or anonymizing real user timezone information in datasets.

    "},{"location":"built_in_transformers/standard_transformers/random_timezone/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_timezone/#description","title":"Description","text":"

    Utilizing a comprehensive library or algorithm for generating timezone data, the RandomTimezone transformer provides random timezone strings (e. g., \"America/New_York\", \"Europe/London\") for database columns. This feature enables the creation of diverse and realistic datasets by simulating timezone information for user profiles, event timings, or any other data requiring timezone context.

    "},{"location":"built_in_transformers/standard_transformers/random_timezone/#example-populate-random-timezone-strings-for-the-user_accounts-table","title":"Example: Populate random timezone strings for the user_accounts table","text":"

    This example demonstrates how to configure the RandomTimezone transformer to populate the timezone column in the user_accounts table with random timezone strings, enhancing the dataset with varied global user representations.

    RandomTimezone transformer example
    - schema: \"public\"\n  name: \"user_accounts\"\n  transformers:\n    - name: \"RandomTimezone\"\n      params:\n        column: \"timezone\"\n        keep_null: false\n

    With this configuration, every entry in the timezone column will be updated with a random timezone string, replacing any existing non-NULL values. If the keep_null parameter is set to true, existing NULL values within the column will remain unchanged, preserving the integrity of rows without specified timezone data.

    "},{"location":"built_in_transformers/standard_transformers/random_toll_free_phone_number/","title":"RandomTollFreePhoneNumber","text":"

    The RandomTollFreePhoneNumber transformer is designed to populate specified database columns with random toll-free phone numbers. This tool is essential for applications requiring the simulation of contact information, testing phone number validation systems, or anonymizing phone number data in datasets while focusing on toll-free numbers.

    "},{"location":"built_in_transformers/standard_transformers/random_toll_free_phone_number/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_toll_free_phone_number/#description","title":"Description","text":"

    The RandomTollFreePhoneNumber transformer utilizes algorithms capable of generating random toll-free phone numbers with various formats and injects them into the designated database column. This feature allows for the creation of diverse and realistic toll-free contact information in datasets for development, testing, or data anonymization purposes.

    "},{"location":"built_in_transformers/standard_transformers/random_toll_free_phone_number/#example-populate-random-toll-free-phone-numbers-for-the-contact_information-table","title":"Example: Populate random toll-free phone numbers for the contact_information table","text":"

    This example demonstrates configuring the RandomTollFreePhoneNumber transformer to populate the phone_number column in the contact_information table with random toll-free phone numbers. It is an effective method for simulating a variety of contact information entries with toll-free numbers.

    RandomTollFreePhoneNumber transformer example
    - schema: \"public\"\n  name: \"contact_information\"\n  transformers:\n    - name: \"RandomTollFreePhoneNumber\"\n      params:\n        column: \"phone_number\"\n        keep_null: false\n

    In this configuration, the phone_number column will be updated with random toll-free phone numbers for each contact information entry, replacing any existing non-NULL values. If the keep_null parameter is set to true, existing NULL values in the column will be preserved, ensuring the integrity of records where toll-free phone number information is not applicable or provided.

    "},{"location":"built_in_transformers/standard_transformers/random_unix_timestamp/","title":"RandomUnixTimestamp","text":"

    The RandomUnixTimestamp transformer generates random Unix time values (timestamps) for specified database columns. It is particularly useful for populating columns with timestamp data, simulating time-related data, or anonymizing actual timestamps in a dataset.

    "},{"location":"built_in_transformers/standard_transformers/random_unix_timestamp/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes int2, int4, int8 min The minimum threshold date for the random value in unix timestamp format (integer) with sec unit by default Yes - max The maximum threshold date for the random value in unix timestamp format (integer) with sec unit by default Yes - unit Generated unix timestamp value unit. Possible values [second, millisecond, microsecond, nanosecond] second Yes - min_unit Min unix timestamp threshold date unit. Possible values [second, millisecond, microsecond, nanosecond] second Yes - max_unit Min unix timestamp threshold date unit. Possible values [second, millisecond, microsecond, nanosecond] second Yes - keep_null Indicates whether NULL values should be preserved false No - truncate Truncate the date to the specified part (nanosecond, microsecond, millisecond, second, minute, hour, day, month, year). The truncate operation is not applied by default. No - engine The engine used for generating the values [random, hash]. Use hash for deterministic generation random No -"},{"location":"built_in_transformers/standard_transformers/random_unix_timestamp/#description","title":"Description","text":"

    The RandomUnixTimestamp transformer generates random Unix timestamps within the provided interval, starting from min to max. The min and max parameters are expected to be in Unix timestamp format. The min_unit and max_unit parameters specify the unit of the Unix timestamp threshold date. The truncate parameter allows you to truncate the date to the specified part of the date. The keep_null parameter allows you to specify whether NULL values should be preserved or replaced with transformed values.

    The engine parameter allows you to choose between random and hash engines for generating values. Read more about the engines in the Transformation engines section.

    "},{"location":"built_in_transformers/standard_transformers/random_unix_timestamp/#example-generate-random-unix-timestamps-with-dynamic-parameters","title":"Example: Generate random Unix timestamps with dynamic parameters","text":"

    In this example, the RandomUnixTimestamp transformer generates random Unix timestamps using dynamic parameters. The min parameter is set to the created_at column, which is converted to Unix seconds using the TimestampToUnixSec. The max parameter is set to a fixed value. The paid_at column is populated with random Unix timestamps in the range from created_at to 1715934239 (Unix timestamp for 2024-05-17 12:03:59). The unit parameter is set to millisecond because the paid_at column stores timestamps in milliseconds.

    CREATE TABLE transactions\n(\n    id         SERIAL PRIMARY KEY,\n    kind       VARCHAR(255),\n    total      DECIMAL(10, 2),\n    created_at TIMESTAMP,\n    paid_at    BIGINT -- stores milliseconds since the epoch\n);\n\n-- Inserting data with milliseconds timestamp\nINSERT INTO transactions (kind, total, created_at, paid_at)\nVALUES ('Sale', 199.99, '2023-05-17 12:00:00', (EXTRACT(EPOCH FROM TIMESTAMP '2023-05-17 12:05:00') * 1000)),\n       ('Refund', 50.00, '2023-05-18 15:00:00', (EXTRACT(EPOCH FROM TIMESTAMP '2023-05-18 15:10:00') * 1000)),\n       ('Sale', 129.99, '2023-05-19 10:30:00', (EXTRACT(EPOCH FROM TIMESTAMP '2023-05-19 10:35:00') * 1000));\n
    RandomUnixTimestamp transformer example
    - schema: \"public\"\n  name: \"transactions\"\n  transformers:\n    - name: \"RandomUnixTimestamp\"\n      params:\n        column: \"paid_at\"\n        max: 1715934239\n        unit: \"millisecond\"\n        min_unit: \"second\"\n        max_unit: \"second\"\n      dynamic_params:\n        min:\n          column: \"created_at\"\n          cast_to: \"TimestampToUnixSec\"\n

    Result:

    ColumnOriginalValueTransformedValue paid_at16843251000001708919030732"},{"location":"built_in_transformers/standard_transformers/random_unix_timestamp/#example-generate-simple-random-unix-timestamps","title":"Example: Generate simple random Unix timestamps","text":"

    In this example, the RandomUnixTimestamp transformer generates random Unix timestamps for the paid_at column in the range from 1615934239 (Unix timestamp for 2021-03-16 12:03:59) to 1715934239 (Unix timestamp for 2024-05-17 12:03:59). The unit parameter is set to millisecond because the paid_at column stores timestamps in milliseconds.

    - schema: \"public\"\n  name: \"transactions\"\n  transformers:\n    - name: \"RandomUnixTimestamp\"\n      params:\n        column: \"paid_at\"\n        min: 1615934239\n        max: 1715934239\n        unit: \"millisecond\"\n

    Result:

    ColumnOriginalValueTransformedValue paid_at16843251000001655768292548"},{"location":"built_in_transformers/standard_transformers/random_url/","title":"RandomURL","text":"

    The RandomURL transformer is designed to populate specified database columns with random URL (Uniform Resource Locator) addresses. This tool is highly beneficial for simulating web content, testing applications that require URL input, or anonymizing real web addresses in datasets.

    "},{"location":"built_in_transformers/standard_transformers/random_url/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_url/#description","title":"Description","text":"

    Utilizing advanced algorithms or libraries for generating URL strings, the RandomURL transformer injects random, plausible URLs into the designated database column. Each generated URL is structured to include the protocol (e. g., \"http://\", \"https://\"), domain name, and path, offering a realistic range of web addresses for various applications.

    "},{"location":"built_in_transformers/standard_transformers/random_url/#example-populate-random-urls-for-the-webpages-table","title":"Example: Populate random URLs for the webpages table","text":"

    This example illustrates how to configure the RandomURL transformer to populate the page_url column in a webpages table with random URLs, providing a broad spectrum of web addresses for testing or data simulation purposes.

    RandomURL transformer example
    - schema: \"public\"\n  name: \"webpages\"\n  transformers:\n    - name: \"RandomURL\"\n      params:\n        column: \"page_url\"\n        keep_null: false\n

    With this configuration, the page_url column will be filled with random URLs for each entry, replacing any existing non-NULL values. Setting the keep_null parameter to true allows for the preservation of existing NULL values within the column, accommodating scenarios where URL data may be intentionally omitted.

    "},{"location":"built_in_transformers/standard_transformers/random_username/","title":"RandomUsername","text":"

    The RandomUsername transformer is crafted to populate specified database columns with random usernames. This utility is crucial for applications that require the simulation of user data, testing systems with user login functionality, or anonymizing real usernames in datasets.

    "},{"location":"built_in_transformers/standard_transformers/random_username/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_username/#description","title":"Description","text":"

    By employing sophisticated algorithms or libraries capable of generating believable usernames, the RandomUsername transformer introduces random usernames into the specified database column. Each generated username is designed to be unique and plausible, incorporating a mix of letters, numbers, and possibly special characters, depending on the generation logic used.

    "},{"location":"built_in_transformers/standard_transformers/random_username/#example-populate-random-usernames-for-the-user_accounts-table","title":"Example: Populate random usernames for the user_accounts table","text":"

    This example demonstrates configuring the RandomUsername transformer to populate the username column in a user_accounts table with random usernames. This setup is ideal for creating a diverse and realistic user base for development, testing, or demonstration purposes.

    RandomUsername transformer example
    - schema: \"public\"\n  name: \"user_accounts\"\n  transformers:\n    - name: \"RandomUsername\"\n      params:\n        column: \"username\"\n        keep_null: false\n

    In this configuration, every entry in the username column will be updated with a random username, replacing any existing non-NULL values. If the keep_null parameter is set to true, then the transformer will preserve existing NULL values within the column, maintaining data integrity where usernames are not applicable or available.

    "},{"location":"built_in_transformers/standard_transformers/random_uuid/","title":"RandomUuid","text":"

    Generate random unique user ID using version 4.

    "},{"location":"built_in_transformers/standard_transformers/random_uuid/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar, uuid keep_null Indicates whether NULL values should be replaced with transformed values or not true No - engine The engine used for generating the values [random, hash]. Use hash for deterministic generation random No -"},{"location":"built_in_transformers/standard_transformers/random_uuid/#description","title":"Description","text":"

    The RandomUuid transformer generates a random UUID. The behaviour for NULL values can be configured using the keep_null parameter.

    The engine parameter allows you to choose between random and hash engines for generating values. Read more about the engines in the Transformation engines section.

    "},{"location":"built_in_transformers/standard_transformers/random_uuid/#example-updating-the-rowguid-column","title":"Example: Updating the rowguid column","text":"

    The following example replaces original UUID values of the rowguid column to randomly generated ones.

    RandomUuid transformer example
    - schema: \"humanresources\"\n  name: \"employee\"\n  transformers:\n  - name: \"RandomUuid\"\n    params:\n      column: \"rowguid\"\n      keep_null: false\n

    Result

    ColumnOriginalValueTransformedValue rowguidf01251e5-96a3-448d-981e-0f99d789110d8ed8c4b2-7e7a-1e8d-f0f0-768e0e8ed0d0"},{"location":"built_in_transformers/standard_transformers/random_word/","title":"RandomWord","text":"

    The RandomWord transformer populates specified database columns with random words. Ideal for simulating textual content, enhancing linguistic datasets, or anonymizing text in databases.

    "},{"location":"built_in_transformers/standard_transformers/random_word/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_word/#description","title":"Description","text":"

    The RandomWord transformer employs a mechanism to inject random words into a designated database column, supporting the generation of linguistically plausible and contextually diverse text. This transformer is particularly beneficial for creating rich text datasets for development, testing, or educational purposes without specifying the language, focusing on versatility and ease of use.

    "},{"location":"built_in_transformers/standard_transformers/random_word/#example-populate-random-words-for-the-content-table","title":"Example: Populate random words for the content table","text":"

    This example demonstrates configuring the RandomWord transformer to populate the tag column in the content table with random words. It is a straightforward approach to adding varied textual data for tagging or content categorization.

    RandomWord transformer example
    - schema: \"public\"\n  name: \"content\"\n  transformers:\n    - name: \"RandomWord\"\n      params:\n        column: \"tag\"\n        keep_null: false\n

    In this setup, the tag column will be updated with random words for each entry, replacing any existing non-NULL values. If keep_null is set to true, existing NULL values in the column will remain unchanged, maintaining data integrity for records where textual data is not applicable.

    "},{"location":"built_in_transformers/standard_transformers/random_year_string/","title":"RandomYearString","text":"

    The RandomYearString transformer is designed to populate specified database columns with random year strings. It is ideal for scenarios that require the representation of years without specific dates, such as manufacturing years of products, birth years of users, or any other context where only the year is relevant.

    "},{"location":"built_in_transformers/standard_transformers/random_year_string/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar, int2, int4, int8, numeric keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_year_string/#description","title":"Description","text":"

    The RandomYearString transformer leverages the faker library to generate strings representing random years. This allows for the easy generation of year data in a string format, adding versatility and realism to datasets that need to simulate or anonymize year-related information.

    "},{"location":"built_in_transformers/standard_transformers/random_year_string/#example-populate-random-year-strings-for-the-products-table","title":"Example: Populate random year strings for the products table","text":"

    This example shows how to use the RandomYearString transformer to fill the manufacturing_year column in the products table with random year strings, simulating the diversity of manufacturing dates.

    RandomYearString transformer example
    - schema: \"public\"\n  name: \"products\"\n  transformers:\n    - name: \"RandomYearString\"\n      params:\n        column: \"manufacturing_year\"\n        keep_null: false\n

    In this configuration, the manufacturing_year column will be populated with random year strings, replacing any existing non-NULL values. If keep_null is set to true, then existing NULL values in the column will be preserved.

    "},{"location":"built_in_transformers/standard_transformers/real_address/","title":"RealAddress","text":"

    Generates real addresses for specified database columns using the faker library. It supports customization of the generated address format through Go templates.

    "},{"location":"built_in_transformers/standard_transformers/real_address/#parameters","title":"Parameters","text":"Name Properties Description Default Required Supported DB types columns Specifies the affected column names along with additional properties for each column Yes Various \u221f name The name of the column to be affected Yes string \u221f template A Go template string for formatting real address attributes Yes string \u221f keep_null Indicates whether NULL values should be preserved No bool"},{"location":"built_in_transformers/standard_transformers/real_address/#template-value-descriptions","title":"Template value descriptions","text":"

    The template parameter allows for the injection of real address attributes into a customizable template. The following values can be included in your template:

    These placeholders can be combined and formatted as desired within the template string to generate custom address formats.

    "},{"location":"built_in_transformers/standard_transformers/real_address/#description","title":"Description","text":"

    The RealAddress transformer uses the faker library to generate realistic addresses, which can then be formatted according to a specified template and applied to selected columns in a database. It allows for the generated addresses to replace existing values or to preserve NULL values, based on the transformer's configuration.

    "},{"location":"built_in_transformers/standard_transformers/real_address/#example-generate-real-addresses-for-the-employee-table","title":"Example: Generate Real addresses for the employee table","text":"

    This example shows how to configure the RealAddress transformer to generate real addresses for the address column in the employee table, using a custom format.

    RealAddress transformer example
    - schema: \"humanresources\"\n  name: \"employee\"\n  transformers:\n    - name: \"RealAddress\"\n      params:\n        columns:\n          - name: \"address\"\n            template: \"{{.Address}}, {{.City}}, {{.State}} {{.PostalCode}}\"\n            keep_null: false\n

    This configuration will generate real addresses with the format \"Street address, city, state postal code\" and apply them to the address column, replacing any existing non-NULL values.

    "},{"location":"built_in_transformers/standard_transformers/regexp_replace/","title":"RegexpReplace","text":"

    Replace a string using a regular expression.

    "},{"location":"built_in_transformers/standard_transformers/regexp_replace/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar regexp The regular expression pattern to search for in the column's value Yes - replace The replacement value. This value may be replaced with a captured group from the regexp parameter. Yes -"},{"location":"built_in_transformers/standard_transformers/regexp_replace/#description","title":"Description","text":"

    The RegexpReplace transformer replaces a string according to the applied regular expression. The valid regular expressions syntax is the same as the general syntax used by Perl, Python, and other languages. To be precise, it is the syntax accepted by RE2 and described in the Golang documentation, except for \\C.

    "},{"location":"built_in_transformers/standard_transformers/regexp_replace/#example-removing-leading-prefix-from-loginid-column-value","title":"Example: Removing leading prefix from loginid column value","text":"

    In the following example, the original values from loginid matching the adventure-works\\{{ id_name }} format are replaced with {{ id_name }}.

    RegexpReplace transformer example
    - schema: \"humanresources\"\n  name: \"employee\"\n  transformers:\n  - name: \"RegexpReplace\"\n    params:\n      column: \"loginid\"\n      regexp: \"adventure-works\\\\\\\\(.*)\"\n      replace: \"$1\"\n
    Expected result
    | column name | original value       | transformed |\n|-------------|----------------------|-------------|\n| loginid     | adventure-works\\ken0 | ken0        |\n

    Note

    YAML has control symbols, and using them without escaping may result in an error. In the example above, the prefix of id is separated by the \\ symbol. Since this symbol is a control symbol, we must escape it using \\\\. However, the '\\' symbol is also a control symbol for regular expressions, which is why we need to double-escape it as \\\\\\\\.

    "},{"location":"built_in_transformers/standard_transformers/replace/","title":"Replace","text":"

    Replace an original value by the provided one.

    "},{"location":"built_in_transformers/standard_transformers/replace/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes any replace The value to replace Yes - keep_null Indicates whether NULL values should be replaced with transformed values or not true No - validate Performs a decoding procedure via the PostgreSQL driver using the column type to ensure that values have correct type true No -"},{"location":"built_in_transformers/standard_transformers/replace/#description","title":"Description","text":"

    The Replace transformer replace an original value from the specified column with the provided one. It can optionally run a validation check with the validate parameter to ensure that the values are of a correct type before starting transformation. The behaviour for NULL values can be configured using the keep_null parameter.

    "},{"location":"built_in_transformers/standard_transformers/replace/#example-updating-the-jobtitle-column","title":"Example: Updating the jobtitle column","text":"

    In the following example, the provided value: \"programmer\" is first validated through driver decoding. If the current value of the jobtitle column is not NULL, it will be replaced with programmer. If the current value is NULL, it will remain NULL.

    Replace transformer example
    - schema: \"humanresources\"\n  name: \"employee\"\n  transformers:\n  - name: \"Replace\"\n    params:\n      column: \"jobtitle\"\n      value: \"programmer\"\n      keep_null: false\n      validate: true\n
    Expected result
    | column name | original value          | transformed |\n|-------------|-------------------------|-------------|\n| jobtitle    | Chief Executive Officer | programmer  |\n
    "},{"location":"built_in_transformers/standard_transformers/set_null/","title":"SetNull","text":"

    Set NULL value to a column.

    "},{"location":"built_in_transformers/standard_transformers/set_null/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes any"},{"location":"built_in_transformers/standard_transformers/set_null/#description","title":"Description","text":"

    The SetNull transformer assigns NULL value to a column. This transformer generates warning if the affected column has NOT NULL constraint.

    NULL constraint violation warning
    {\n  \"hash\": \"5a229ee964a4ba674a41a4d63dab5a8c\",\n  \"meta\": {\n    \"ColumnName\": \"jobtitle\",\n    \"ConstraintType\": \"NotNull\",\n    \"ParameterName\": \"column\",\n    \"SchemaName\": \"humanresources\",\n    \"TableName\": \"employee\",\n    \"TransformerName\": \"SetNull\"\n  },\n  \"msg\": \"transformer may produce NULL values but column has NOT NULL constraint\",\n  \"severity\": \"warning\"\n}\n
    "},{"location":"built_in_transformers/standard_transformers/set_null/#example-set-null-value-to-updated_at-column","title":"Example: Set NULL value to updated_at column","text":"SetNull transformer example
    - schema: \"humanresources\"\n  name: \"employee\"\n  transformation:\n    - name: \"SetNull\"\n      params:\n        column: \"jobtitle\"\n
    Expected result
    | column name | original value          | transformed |\n|-------------|-------------------------|-------------|\n| jobtitle    | Chief Executive Officer | NULL        |\n
    "},{"location":"commands/","title":"Commands","text":""},{"location":"commands/#introduction","title":"Introduction","text":"Greenmask available commands
    greenmask \\\n--log-format=[json|text] \\\n--log-level=[debug|info|error] \\\n--config=config.yml \\\n[dump|list-dumps|delete|list-transformers|show-transformer|restore|show-dump]`\n

    You can use the following commands within Greenmask:

    For any of the commands mentioned above, you can include the following common flags:

    "},{"location":"commands/delete/","title":"delete command","text":"

    Delete dump from the storage with a specific ID

    Supported flags
    Usage:\n  greenmask delete [flags] [dumpId]\n\nFlags:\n      --before-date string   delete dumps older than the specified date in RFC3339Nano format: 2021-01-01T00:00.0:00Z\n      --dry-run              do not delete anything, just show what would be deleted\n      --prune-failed         prune failed dumps\n      --prune-unsafe         prune dumps with \"unknown-or-failed\" statuses. Works only with --prune-failed\n      --retain-for string    retain dumps for the specified duration in format: 1w2d3h4m5s6ms7us8ns\n      --retain-recent int    retain the most recent N completed dumps (default -1)\n
    delete dump by id
    greenmask --config config.yml delete 1723643249862\n
    delete dumps older than the specified date
    greenmask --config config.yml delete --before-date 2021-01-01T00:00.0:00Z --dry-run \n
    prune failed dumps
    greenmask --config config.yml delete --prune-failed --dry-run \n
    prune dumps with 'unknown-or-failed' statuses
    greenmask --config config.yml delete --prune-failed --prune-unsafe --dry-run\n
    retain dumps for the specified duration
    greenmask --config config.yml delete --retain-for 1w5d --dry-run\n
    retain the most recent N completed dumps
    greenmask --config config.yml delete --retain-recent 5 --dry-run\n
    "},{"location":"commands/dump/","title":"dump","text":""},{"location":"commands/dump/#dump-command","title":"dump command","text":"

    The dump command operates in the following way:

    1. Dumps the data from the source database.
    2. Validates the data for potential issues.
    3. Applies the defined transformations.
    4. Stores the transformed data in the specified storage location.

    Note that the dump command shares the same parameters and environment variables as pg_dump, allowing you to configure the restoration process as needed.

    Mostly it supports the same flags as the pg_dump utility, with some extra flags for Greenmask-specific features.

    Supported flags
      -b, --blobs                           include large objects in dump\n  -c, --clean                           clean (drop) database objects before recreating\n  -Z, --compress int                    compression level for compressed formats (default -1)\n  -C, --create                          include commands to create database in dump\n  -a, --data-only                       dump only the data, not the schema\n  -d, --dbname string                   database to dump (default \"postgres\")\n      --disable-dollar-quoting          disable dollar quoting, use SQL standard quoting\n      --enable-row-security             enable row security (dump only content user has access to)\n  -E, --encoding string                 dump the data in encoding ENCODING\n  -N, --exclude-schema strings          dump the specified schema(s) only\n  -T, --exclude-table strings           do NOT dump the specified table(s)\n      --exclude-table-data strings      do NOT dump data for the specified table(s)\n  -e, --extension strings               dump the specified extension(s) only\n      --extra-float-digits string       override default setting for extra_float_digits\n  -f, --file string                     output file or directory name\n  -h, --host string                     database server host or socket directory (default \"/var/run/postgres\")\n      --if-exists                       use IF EXISTS when dropping objects\n      --include-foreign-data strings    use IF EXISTS when dropping objects\n  -j, --jobs int                        use this many parallel jobs to dump (default 1)\n      --load-via-partition-root         load partitions via the root table\n      --lock-wait-timeout int           fail after waiting TIMEOUT for a table lock (default -1)\n  -B, --no-blobs                        exclude large objects in dump\n      --no-comments                     do not dump comments\n  -O, --no-owner                        skip restoration of object ownership in plain-text format\n  -X, --no-privileges                   do not dump privileges (grant/revoke)\n      --no-publications                 do not dump publications\n      --no-security-labels              do not dump security label assignments\n      --no-subscriptions                do not dump subscriptions\n      --no-sync                         do not wait for changes to be written safely to dis\n      --no-synchronized-snapshots       do not use synchronized snapshots in parallel jobs\n      --no-tablespaces                  do not dump tablespace assignments\n      --no-toast-compression            do not dump TOAST compression methods\n      --no-unlogged-table-data          do not dump unlogged table data\n      --pgzip                           use pgzip compression instead of gzip\n  -p, --port int                        database server port number (default 5432)\n      --quote-all-identifiers           quote all identifiers, even if not key words\n  -n, --schema strings                  dump the specified schema(s) only\n  -s, --schema-only                     dump only the schema, no data\n      --section string                  dump named section (pre-data, data, or post-data)\n      --serializable-deferrable         wait until the dump can run without anomalies\n      --snapshot string                 use given snapshot for the dump\n      --strict-names                    require table and/or schema include patterns to match at least one entity each\n  -t, --table strings                   dump the specified table(s) only\n      --test string                     connect as specified database user (default \"postgres\")\n      --use-set-session-authorization   use SET SESSION AUTHORIZATION commands instead of ALTER OWNER commands to set ownership\n  -U, --username string                 connect as specified database user (default \"postgres\")\n  -v, --verbose string                  verbose mode\n
    "},{"location":"commands/dump/#pgzip-compression","title":"Pgzip compression","text":"

    By default, Greenmask uses gzip compression to restore data. In mist cases it is quite slow and does not utilize all available resources and is a bootleneck for IO operations. To speed up the restoration process, you can use the --pgzip flag to use pgzip compression instead of gzip. This method splits the data into blocks, which are compressed in parallel, making it ideal for handling large volumes of data. The output remains a standard gzip file.

    "},{"location":"commands/list-dumps/","title":"list-dumps","text":""},{"location":"commands/list-dumps/#list-dumps-command","title":"list-dumps command","text":"

    The list-dumps command provides a list of all dumps stored in the storage. The list includes the following attributes:

    Example of list-dumps output:

    Info

    Greenmask uses a heartbeat mechanism to determine the status of a dump. A dump is considered failed if it lacks a \"done\" heartbeat or if the last heartbeat timestamp exceeds 30 minutes. Heartbeats are recorded every 15 minutes by the dump command while it is in progress. If greenmask fails unexpectedly, the heartbeat stops being updated, and after 30 minutes (twice the interval), the dump is classified as failed. The in progress status indicates that a dump is still ongoing.

    "},{"location":"commands/list-transformers/","title":"list-transformers","text":""},{"location":"commands/list-transformers/#list-transformers-command","title":"list-transformers command","text":"

    The list-transformers command provides a list of all the allowed transformers, including both standard and advanced transformers. This list can be helpful for searching for an appropriate transformer for your data transformation needs.

    To show a list of available transformers, use the following command:

    greenmask --config=config.yml list-transformers\n

    Supported flags:

    Example of list-transformers output:

    When using the list-transformers command, you receive a list of available transformers with essential information about each of them. Below are the key parameters for each transformer:

    The JSON call greenmask --config=config.yml list-transformers --format=json has the same attributes:

    JSON format output
    [\n  {\n    \"name\": \"Cmd\",\n    \"description\": \"Transform data via external program using stdin and stdout interaction\",\n    \"parameters\": [\n      {\n        \"name\": \"columns\",\n        \"supported_types\": [\n          \"any\"\n        ]\n      }\n    ]\n  },\n  {\n    \"name\": \"Dict\",\n    \"description\": \"Replace values matched by dictionary keys\",\n    \"parameters\": [\n      {\n        \"name\": \"column\",\n        \"supported_types\": [\n          \"any\"\n        ]\n      }\n    ]\n  }\n]\n
    "},{"location":"commands/restore/","title":"restore","text":""},{"location":"commands/restore/#restore-command","title":"restore command","text":"

    The restore command is used to restore a database from a previously created dump. You can specify the dump to restore by providing the dump ID or use the latest keyword to restore the latest completed dump.

    greenmask --config=config.yml restore DUMP_ID\n

    Alternatively, to restore the latest completed dump, use the following command:

    greenmask --config=config.yml restore latest\n

    Note that the restore command shares the same parameters and environment variables as pg_restore, allowing you to configure the restoration process as needed.

    Mostly it supports the same flags as the pg_restore utility, with some extra flags for Greenmask-specific features.

    Supported flags
          --batch-size int                         the number of rows to insert in a single batch during the COPY command (0 - all rows will be inserted in a single batch)\n  -c, --clean                                  clean (drop) database objects before recreating\n  -C, --create                                 create the target database\n  -a, --data-only                              restore only the data, no schema\n  -d, --dbname string                          connect to database name (default \"postgres\")\n      --disable-triggers                       disable triggers during data section restore\n      --enable-row-security                    enable row security\n  -N, --exclude-schema strings                 do not restore objects in this schema\n  -e, --exit-on-error                          exit on error, default is to continue\n  -f, --file string                            output file name (- for stdout)\n  -P, --function strings                       restore named function\n  -h, --host string                            database server host or socket directory (default \"/var/run/postgres\")\n      --if-exists                              use IF EXISTS when dropping objects\n  -i, --index strings                          restore named index\n      --inserts                                restore data as INSERT commands, rather than COPY\n  -j, --jobs int                               use this many parallel jobs to restore (default 1)\n      --list-format string                     use table of contents in format of text, json or yaml (default \"text\")\n      --no-comments                            do not restore comments\n      --no-data-for-failed-tables              do not restore data of tables that could not be created\n  -O, --no-owner                               skip restoration of object ownership\n  -X, --no-privileges                          skip restoration of access privileges (grant/revoke)\n      --no-publications                        do not restore publications\n      --no-security-labels                     do not restore security labels\n      --no-subscriptions                       ddo not restore subscriptions\n      --no-table-access-method                 do not restore table access methods\n      --no-tablespaces                         do not restore tablespace assignments\n      --on-conflict-do-nothing                 add ON CONFLICT DO NOTHING to INSERT commands\n      --overriding-system-value                use OVERRIDING SYSTEM VALUE clause for INSERTs\n      --pgzip                                  use pgzip decompression instead of gzip\n  -p, --port int                               database server port number (default 5432)\n      --restore-in-order                       restore tables in topological order, ensuring that dependent tables are not restored until the tables they depend on have been restored\n  -n, --schema strings                         restore only objects in this schema\n  -s, --schema-only                            restore only the schema, no data\n      --section string                         restore named section (pre-data, data, or post-data)\n  -1, --single-transaction                     restore as a single transaction\n      --strict-names                           restore named section (pre-data, data, or post-data) match at least one entity each\n  -S, --superuser string                       superuser user name to use for disabling triggers\n  -t, --table strings                          restore named relation (table, view, etc.)\n  -T, --trigger strings                        restore named trigger\n  -L, --use-list string                        use table of contents from this file for selecting/ordering output\n      --use-session-replication-role-replica   use SET session_replication_role = 'replica' to disable triggers during data section restore (alternative for --disable-triggers)\n      --use-set-session-authorization          use SET SESSION AUTHORIZATION commands instead of ALTER OWNER commands to set ownership\n  -U, --username string                        connect as specified database user (default \"postgres\")\n  -v, --verbose string                         verbose mode\n
    "},{"location":"commands/restore/#extra-features","title":"Extra features","text":""},{"location":"commands/restore/#inserts-and-error-handling","title":"Inserts and error handling","text":"

    Warning

    Insert commands are a lot slower than COPY commands. Use this feature only when necessary.

    By default, Greenmask restores data using the COPY command. If you prefer to restore data using INSERT commands, you can use the --inserts flag. This flag allows you to manage errors that occur during the execution of INSERT commands. By configuring an error and constraint exclusion list in the config, you can skip certain errors and continue inserting subsequent rows from the dump.

    This can be useful when adding new records to an existing dump, but you don't want the process to stop if some records already exist in the database or violate certain constraints.

    By adding the --on-conflict-do-nothing flag, it generates INSERT statements with the ON CONFLICT DO NOTHING clause, similar to the original pg_dump option. However, this approach only works for unique or exclusion constraints. If a foreign key is missing in the referenced table or any other constraint is violated, the insertion will still fail. To handle these issues, you can define anexclusion list in the config.

    example with inserts and error handling
    ```shell title=\"example with inserts and on conflict do nothing\"\ngreenmask --config=config.yml restore DUMP_ID --inserts --on-conflict-do-nothing\n

    By adding the --overriding-system-value flag, it generates INSERT statements with the OVERRIDING SYSTEM VALUE clause, which allows you to insert data into identity columns.

    example of GENERATED ALWAYS AS IDENTITY column
    CREATE TABLE people (\n    id integer GENERATED ALWAYS AS IDENTITY PRIMARY KEY,\n    generated text GENERATED ALWAYS AS (id || first_name) STORED,\n    first_name text\n);\n
    example with inserts
    greenmask --config=config.yml restore DUMP_ID --inserts --overriding-system-value\n
    "},{"location":"commands/restore/#restoration-in-topological-order","title":"Restoration in topological order","text":"

    By default, Greenmask restores tables in the order they are listed in the dump file. To restore tables in topological order, use the --restore-in-order flag. This flag ensures that dependent tables are not restored until the tables they depend on have been restored.

    This is useful when you have the schema already created with foreign keys and other constraints, and you want to insert data into the tables in the correct order or catch-up the target database with the new data.

    Warning

    Greenmask cannot guarantee restoration in topological order when the schema contains cycles. The only way to restore tables with cyclic dependencies is to temporarily remove the foreign key constraint (to break the cycle), restore the data, and then re-add the foreign key constraint once the data restoration is complete.

    If your database has cyclic dependencies you will be notified about it but the restoration will continue.

    2024-08-16T21:39:50+03:00 WRN cycle between tables is detected: cannot guarantee the order of restoration within cycle cycle=[\"public.employees\",\"public.departments\",\"public.projects\",\"public.employees\"]\n
    "},{"location":"commands/restore/#pgzip-decompression","title":"Pgzip decompression","text":"

    By default, Greenmask uses gzip decompression to restore data. In mist cases it is quite slow and does not utilize all available resources and is a bootleneck for IO operations. To speed up the restoration process, you can use the --pgzip flag to use pgzip decompression instead of gzip. This method splits the data into blocks, which are decompressed in parallel, making it ideal for handling large volumes of data.

    example with pgzip decompression
    greenmask --config=config.yml restore latest --pgzip\n
    "},{"location":"commands/restore/#restore-data-batching","title":"Restore data batching","text":"

    The COPY command returns the error only on transaction commit. This means that if you have a large dump and an error occurs, you will have to wait until the end of the transaction to see the error message. To avoid this, you can use the --batch-size flag to specify the number of rows to insert in a single batch during the COPY command. If an error occurs during the batch insertion, the error message will be displayed immediately. The data will be committed only if all batches are inserted successfully.

    This is useful when you want to be notified of errors as immediately as possible without waiting for the entire table to be restored.

    Warning

    The batch size should be chosen carefully. If the batch size is too small, the restoration process will be slow. If the batch size is too large, you may not be able to identify the error row.

    In the example below, the batch size is set to 1000 rows. This means that 1000 rows will be inserted in a single batch, so you will be notified of any errors immediately after each batch is inserted.

    example with batch size
    greenmask --config=config.yml restore latest --batch-size 1000\n
    "},{"location":"commands/show-dump/","title":"show-dump","text":""},{"location":"commands/show-dump/#show-dump-command","title":"show-dump command","text":"

    This command provides details about all objects and data that can be restored, similar to the pg_restore -l command in PostgreSQL. It helps you inspect the contents of the dump before performing the actual restoration.

    Parameters:

    To display metadata information about a dump, use the following command:

    greenmask --config=config.yml show-dump dumpID\n
    Text output example
    ;\n; Archive created at 2023-10-30 12:52:38 UTC\n; dbname: demo\n; TOC Entries: 17\n; Compression: -1\n; Dump Version: 15.4\n; Format: DIRECTORY\n; Integer: 4 bytes\n; Offset: 8 bytes\n; Dumped from database version: 15.4\n; Dumped by pg_dump version: 15.4\n;\n;\n; Selected TOC Entries:\n;\n3444; 0 0 ENCODING - ENCODING\n3445; 0 0 STDSTRINGS - STDSTRINGS\n3446; 0 0 SEARCHPATH - SEARCHPATH\n3447; 1262 24970 DATABASE - demo postgres\n3448; 0 0 DATABASE PROPERTIES - demo postgres\n222; 1259 24999 TABLE bookings flights postgres\n223; 1259 25005 SEQUENCE bookings flights_flight_id_seq postgres\n3460; 0 0 SEQUENCE OWNED BY bookings flights_flight_id_seq postgres\n3281; 2604 25030 DEFAULT bookings flights flight_id postgres\n3462; 0 24999 TABLE DATA bookings flights postgres\n3289; 2606 25044 CONSTRAINT bookings flights flights_flight_no_scheduled_departure_key postgres\n3291; 2606 25046 CONSTRAINT bookings flights flights_pkey postgres\n3287; 1259 42848 INDEX bookings flights_aircraft_code_status_idx postgres\n3292; 1259 42847 INDEX bookings flights_status_aircraft_code_idx postgres\n3293; 2606 25058 FK CONSTRAINT bookings flights flights_aircraft_code_fkey postgres\n3294; 2606 25063 FK CONSTRAINT bookings flights flights_arrival_airport_fkey postgres\n3295; 2606 25068 FK CONSTRAINT bookings flights flights_departure_airport_fkey postgres\n
    JSON output example

    {\n  \"startedAt\": \"2023-10-29T20:50:19.948017+02:00\", // (1)\n  \"completedAt\": \"2023-10-29T20:50:22.19333+02:00\", // (2)\n  \"originalSize\": 4053842, // (3)\n  \"compressedSize\": 686557, // (4)\n  \"transformers\": [ // (5)\n    {\n      \"Schema\": \"bookings\", // (6)\n      \"Name\": \"flights\", // (7)\n      \"Query\": \"\", // (8)\n      \"Transformers\": [ // (9)\n        {\n          \"Name\": \"RandomDate\", // (10)\n          \"Params\": { // (11)\n            \"column\": \"c2NoZWR1bGVkX2RlcGFydHVyZQ==\",\n            \"max\": \"MjAyMy0wMS0wMiAwMDowMDowMC4wKzAz\",\n            \"min\": \"MjAyMy0wMS0wMSAwMDowMDowMC4wKzAz\"\n          }\n        }\n      ],\n      \"ColumnsTypeOverride\": null // (12)\n    }\n  ],\n  \"header\": { // (13)\n    \"creationDate\": \"2023-10-29T20:50:20+02:00\",\n    \"dbName\": \"demo\",\n    \"tocEntriesCount\": 15,\n    \"dumpVersion\": \"16.0 (Homebrew)\",\n    \"format\": \"TAR\",\n    \"integer\": 4,\n    \"offset\": 8,\n    \"dumpedFrom\": \"16.0 (Debian 16.0-1.pgdg120+1)\",\n    \"dumpedBy\": \"16.0 (Homebrew)\",\n    \"tocFileSize\": 8090,\n    \"compression\": 0\n  },\n  \"entries\": [ // (14)\n    {\n      \"dumpId\": 3416,\n      \"databaseOid\": 0,\n      \"objectOid\": 0,\n      \"objectType\": \"ENCODING\",\n      \"schema\": \"\",\n      \"name\": \"ENCODING\",\n      \"owner\": \"\",\n      \"section\": \"PreData\",\n      \"originalSize\": 0,\n      \"compressedSize\": 0,\n      \"fileName\": \"\",\n      \"dependencies\": null\n    },\n    {\n      \"dumpId\": 3417,\n      \"databaseOid\": 0,\n      \"objectOid\": 0,\n      \"objectType\": \"STDSTRINGS\",\n      \"schema\": \"\",\n      \"name\": \"STDSTRINGS\",\n      \"owner\": \"\",\n      \"section\": \"PreData\",\n      \"originalSize\": 0,\n      \"compressedSize\": 0,\n      \"fileName\": \"\",\n      \"dependencies\": null\n    },\n    {\n      \"dumpId\": 3418,\n      \"databaseOid\": 0,\n      \"objectOid\": 0,\n      \"objectType\": \"SEARCHPATH\",\n      \"schema\": \"\",\n      \"name\": \"SEARCHPATH\",\n      \"owner\": \"\",\n      \"section\": \"PreData\",\n      \"originalSize\": 0,\n      \"compressedSize\": 0,\n      \"fileName\": \"\",\n      \"dependencies\": null\n    },\n    {\n      \"dumpId\": 3419,\n      \"databaseOid\": 16384,\n      \"objectOid\": 1262,\n      \"objectType\": \"DATABASE\",\n      \"schema\": \"\",\n      \"name\": \"demo\",\n      \"owner\": \"postgres\",\n      \"section\": \"PreData\",\n      \"originalSize\": 0,\n      \"compressedSize\": 0,\n      \"fileName\": \"\",\n      \"dependencies\": null\n    },\n    {\n      \"dumpId\": 3420,\n      \"databaseOid\": 0,\n      \"objectOid\": 0,\n      \"objectType\": \"DATABASE PROPERTIES\",\n      \"schema\": \"\",\n      \"name\": \"demo\",\n      \"owner\": \"postgres\",\n      \"section\": \"PreData\",\n      \"originalSize\": 0,\n      \"compressedSize\": 0,\n      \"fileName\": \"\",\n      \"dependencies\": null\n    },\n    {\n      \"dumpId\": 222,\n      \"databaseOid\": 16414,\n      \"objectOid\": 1259,\n      \"objectType\": \"TABLE\",\n      \"schema\": \"bookings\",\n      \"name\": \"flights\",\n      \"owner\": \"postgres\",\n      \"section\": \"PreData\",\n      \"originalSize\": 0,\n      \"compressedSize\": 0,\n      \"fileName\": \"\",\n      \"dependencies\": null\n    },\n    {\n      \"dumpId\": 223,\n      \"databaseOid\": 16420,\n      \"objectOid\": 1259,\n      \"objectType\": \"SEQUENCE\",\n      \"schema\": \"bookings\",\n      \"name\": \"flights_flight_id_seq\",\n      \"owner\": \"postgres\",\n      \"section\": \"PreData\",\n      \"originalSize\": 0,\n      \"compressedSize\": 0,\n      \"fileName\": \"\",\n      \"dependencies\": [\n        222\n      ]\n    },\n    {\n      \"dumpId\": 3432,\n      \"databaseOid\": 0,\n      \"objectOid\": 0,\n      \"objectType\": \"SEQUENCE OWNED BY\",\n      \"schema\": \"bookings\",\n      \"name\": \"flights_flight_id_seq\",\n      \"owner\": \"postgres\",\n      \"section\": \"PreData\",\n      \"originalSize\": 0,\n      \"compressedSize\": 0,\n      \"fileName\": \"\",\n      \"dependencies\": [\n        223\n      ]\n    },\n    {\n      \"dumpId\": 3254,\n      \"databaseOid\": 16445,\n      \"objectOid\": 2604,\n      \"objectType\": \"DEFAULT\",\n      \"schema\": \"bookings\",\n      \"name\": \"flights flight_id\",\n      \"owner\": \"postgres\",\n      \"section\": \"PreData\",\n      \"originalSize\": 0,\n      \"compressedSize\": 0,\n      \"fileName\": \"\",\n      \"dependencies\": [\n        223,\n        222\n      ]\n    },\n    {\n      \"dumpId\": 3434,\n      \"databaseOid\": 16414,\n      \"objectOid\": 0,\n      \"objectType\": \"TABLE DATA\",\n      \"schema\": \"\\\"bookings\\\"\",\n      \"name\": \"\\\"flights\\\"\",\n      \"owner\": \"\\\"postgres\\\"\",\n      \"section\": \"Data\",\n      \"originalSize\": 4045752,\n      \"compressedSize\": 678467,\n      \"fileName\": \"3434.dat.gz\",\n      \"dependencies\": []\n    },\n    {\n      \"dumpId\": 3261,\n      \"databaseOid\": 16461,\n      \"objectOid\": 2606,\n      \"objectType\": \"CONSTRAINT\",\n      \"schema\": \"bookings\",\n      \"name\": \"flights flights_flight_no_scheduled_departure_key\",\n      \"owner\": \"postgres\",\n      \"section\": \"PostData\",\n      \"originalSize\": 0,\n      \"compressedSize\": 0,\n      \"fileName\": \"\",\n      \"dependencies\": [\n        222,\n        222\n      ]\n    },\n    {\n      \"dumpId\": 3263,\n      \"databaseOid\": 16463,\n      \"objectOid\": 2606,\n      \"objectType\": \"CONSTRAINT\",\n      \"schema\": \"bookings\",\n      \"name\": \"flights flights_pkey\",\n      \"owner\": \"postgres\",\n      \"section\": \"PostData\",\n      \"originalSize\": 0,\n      \"compressedSize\": 0,\n      \"fileName\": \"\",\n      \"dependencies\": [\n        222\n      ]\n    },\n    {\n      \"dumpId\": 3264,\n      \"databaseOid\": 16477,\n      \"objectOid\": 2606,\n      \"objectType\": \"FK CONSTRAINT\",\n      \"schema\": \"bookings\",\n      \"name\": \"flights flights_aircraft_code_fkey\",\n      \"owner\": \"postgres\",\n      \"section\": \"PostData\",\n      \"originalSize\": 0,\n      \"compressedSize\": 0,\n      \"fileName\": \"\",\n      \"dependencies\": [\n        222\n      ]\n    },\n    {\n      \"dumpId\": 3265,\n      \"databaseOid\": 16482,\n      \"objectOid\": 2606,\n      \"objectType\": \"FK CONSTRAINT\",\n      \"schema\": \"bookings\",\n      \"name\": \"flights flights_arrival_airport_fkey\",\n      \"owner\": \"postgres\",\n      \"section\": \"PostData\",\n      \"originalSize\": 0,\n      \"compressedSize\": 0,\n      \"fileName\": \"\",\n      \"dependencies\": [\n        222\n      ]\n    },\n    {\n      \"dumpId\": 3266,\n      \"databaseOid\": 16487,\n      \"objectOid\": 2606,\n      \"objectType\": \"FK CONSTRAINT\",\n      \"schema\": \"bookings\",\n      \"name\": \"flights flights_departure_airport_fkey\",\n      \"owner\": \"postgres\",\n      \"section\": \"PostData\",\n      \"originalSize\": 0,\n      \"compressedSize\": 0,\n      \"fileName\": \"\",\n      \"dependencies\": [\n        222\n      ]\n    }\n  ]\n}\n

    1. The date when the backup has been initiated, also indicating the snapshot date.
    2. The date when the backup process was successfully completed.
    3. The original size of the backup in bytes.
    4. The size of the backup after compression in bytes.
    5. A list of tables that underwent transformation during the backup.
    6. The schema name of the table.
    7. The name of the table.
    8. Custom query override, if applicable.
    9. A list of transformers that were applied during the backup.
    10. The name of the transformer.
    11. The parameters provided for the transformer.
    12. A mapping of overridden column types.
    13. The header information in the table of contents file. This provides the same details as the --format=text output in the previous snippet.
    14. The list of restoration entries. This offers the same information as the --format=text output in the previous snippet.

    Note

    The json format provides more detailed information compared to the text format. The text format is primarily used for backward compatibility and for generating a restoration list that can be used with pg_restore -L listfile. On the other hand, the json format provides comprehensive metadata about the dump, including information about the applied transformers and their parameters. The json format is especially useful for detailed dump introspection.

    "},{"location":"commands/show-transformer/","title":"show-transformer","text":""},{"location":"commands/show-transformer/#show-transformer-command","title":"show-transformer command","text":"

    This command prints out detailed information about a transformer by a provided name, including specific attributes to help you understand and configure the transformer effectively.

    To show detailed information about a transformer, use the following command:

    greenmask --config=config.yml show-transformer TRANSFORMER_NAME\n

    Supported flags:

    Example of show-transformer output:

    When using the show-transformer command, you receive detailed information about the transformer and its parameters and their possible attributes. Below are the key parameters for each transformer:

    Warning

    The default value in JSON format is base64 encoded. This might be changed in later version of Greenmask.

    JSON output example
    [\n  {\n    \"properties\": {\n      \"name\": \"NoiseFloat\",\n      \"description\": \"Make noise float for int\",\n      \"is_custom\": false\n    },\n    \"parameters\": [\n      {\n        \"name\": \"column\",\n        \"description\": \"column name\",\n        \"required\": true,\n        \"is_column\": true,\n        \"is_column_container\": false,\n        \"column_properties\": {\n          \"max_length\": -1,\n          \"affected\": true,\n          \"allowed_types\": [\n            \"float4\",\n            \"float8\",\n            \"numeric\"\n          ],\n          \"skip_on_null\": true\n        }\n      },\n      {\n        \"name\": \"ratio\",\n        \"description\": \"max random percentage for noise\",\n        \"required\": false,\n        \"is_column\": false,\n        \"is_column_container\": false,\n        \"default_value\": \"MC4x\"\n      },\n      {\n        \"name\": \"decimal\",\n        \"description\": \"decimal of noised float value (number of digits after coma)\",\n        \"required\": false,\n        \"is_column\": false,\n        \"is_column_container\": false,\n        \"default_value\": \"NA==\"\n      }\n    ]\n  }\n]\n
    "},{"location":"commands/validate/","title":"validate command","text":"

    The validate command allows you to perform a validation procedure and compare transformed data.

    Below is a list of all supported flags for the validate command:

    Supported flags
    Usage:\n  greenmask validate [flags]\n\nFlags:\n      --data                  Perform test dump for --rows-limit rows and print it pretty\n      --diff                  Find difference between original and transformed data\n      --format string         Format of output. possible values [text|json] (default \"text\")\n      --rows-limit uint       Check tables dump only for specific tables (default 10)\n      --schema                Make a schema diff between previous dump and the current state\n      --table strings         Check tables dump only for specific tables\n      --table-format string   Format of table output (only for --format=text). Possible values [vertical|horizontal] (default \"vertical\")\n      --transformed-only      Print only transformed column and primary key\n      --warnings              Print warnings\n

    Validate command can exit with non-zero code when:

    All of those cases may be used for CI/CD pipelines to stop the process when something went wrong. This is especially useful when --schema flag is used - this allows to avoid data leakage when schema changed.

    You can use the --table flag multiple times to specify the tables you want to check. Tables can be written with or without schema names (e. g., public.table_name or table_name). If you specify multiple tables from different schemas, an error will be thrown.

    To start validation, use the following command:

    greenmask --config=config.yml validate \\\n  --warnings \\\n  --data \\\n  --diff \\\n  --schema \\\n  --format=text \\\n  --table-format=vertical \\\n  --transformed-only \\\n  --rows-limit=1\n
    Validation output example
    2024-03-15T19:46:12+02:00 WRN ValidationWarning={\"hash\":\"aa808fb574a1359c6606e464833feceb\",\"meta\":{\"ColumnName\":\"birthdate\",\"ConstraintDef\":\"CHECK (birthdate \\u003e= '1930-01-01'::date AND birthdate \\u003c= (now() - '18 years'::interval))\",\"ConstraintName\":\"humanresources\",\"ConstraintSchema\":\"humanresources\",\"ConstraintType\":\"Check\",\"ParameterName\":\"column\",\"SchemaName\":\"humanresources\",\"TableName\":\"employee\",\"TransformerName\":\"NoiseDate\"},\"msg\":\"possible constraint violation: column has Check constraint\",\"severity\":\"warning\"}\n

    The validation output will provide detailed information about potential constraint violations and schema issues. Each line contains nested JSON data under the ValidationWarning key, offering insights into the affected part of the configuration and potential constraint violations.

    Pretty formatted validation warning

    { \n  \"hash\": \"aa808fb574a1359c6606e464833feceb\", // (13)\n  \"meta\": { // (1)\n    \"ColumnName\": \"birthdate\", // (2)\n    \"ConstraintDef\": \"CHECK (birthdate >= '1930-01-01'::date AND birthdate <= (now() - '18 years'::interval))\", // (3)\n    \"ConstraintName\": \"humanresources\", // (4)\n    \"ConstraintSchema\": \"humanresources\", // (5)\n    \"ConstraintType\": \"Check\", // (6)\n    \"ParameterName\": \"column\", // (7)\n    \"SchemaName\": \"humanresources\", // (8)\n    \"TableName\": \"employee\", // (9)\n    \"TransformerName\": \"NoiseDate\" // (10)\n  },\n  \"msg\": \"possible constraint violation: column has Check constraint\", // (11)\n  \"severity\": \"warning\" // (12)\n}\n

    1. Detailed metadata. The validation output provides comprehensive metadata to pinpoint the source of problems.
    2. Column name indicates the name of the affected column.
    3. Constraint definition specifies the definition of the constraint that may be violated.
    4. Constraint name identifies the name of the constraint that is potentially violated.
    5. Constraint schema name indicates the schema in which the constraint is defined.
    6. Type of constraint represents the type of constraint and can be one of the following:
      * ForeignKey\n* Check\n* NotNull\n* PrimaryKey\n* PrimaryKeyReferences\n* Unique\n* Length\n* Exclusion\n* TriggerConstraint\n
    7. Table schema name specifies the schema name of the affected table.
    8. Table name identifies the name of the table where the problem occurs.
    9. Transformer name indicates the name of the transformer responsible for the transformation.
    10. Name of affected parameter typically, this is the name of the column parameter that is relevant to the validation warning.
    11. Validation warning description provides a detailed description of the validation warning and the reason behind it.
    12. Severity of validation warning indicates the severity level of the validation warning and can be one of the following:
      * error\n* warning\n* info\n* debug\n
    13. Hash is a unique identifier of the validation warning. It is used to resolve the warning in the config file

    Note

    A validation warning with a severity level of \"error\" is considered critical and must be addressed before the dump operation can proceed. Failure to resolve such warnings will prevent the dump operation from being executed.

    Schema diff changed output example
    2024-03-15T19:46:12+02:00 WRN Database schema has been changed Hint=\"Check schema changes before making new dump\" PreviousDumpId=1710520855501\n2024-03-15T19:46:12+02:00 WRN Column renamed Event=ColumnRenamed Signature={\"CurrentColumnName\":\"id1\",\"PreviousColumnName\":\"id\",\"TableName\":\"test\",\"TableSchema\":\"public\"}\n2024-03-15T19:46:12+02:00 WRN Column type changed Event=ColumnTypeChanged Signature={\"ColumnName\":\"id\",\"CurrentColumnType\":\"bigint\",\"CurrentColumnTypeOid\":\"20\",\"PreviousColumnType\":\"integer\",\"PreviousColumnTypeOid\":\"23\",\"TableName\":\"test\",\"TableSchema\":\"public\"}\n2024-03-15T19:46:12+02:00 WRN Column created Event=ColumnCreated Signature={\"ColumnName\":\"name\",\"ColumnType\":\"text\",\"TableName\":\"test\",\"TableSchema\":\"public\"}\n2024-03-15T19:46:12+02:00 WRN Table created Event=TableCreated Signature={\"SchemaName\":\"public\",\"TableName\":\"test1\",\"TableOid\":\"20563\"}\n

    Example of validation diff:

    The validation diff is presented in a neatly formatted table. In this table:

    The whole validate command may be run in json format including logging making easy to parse the structure.

    greenmask --config=config.yml validate \\\n  --warnings \\\n  --data \\\n  --diff \\\n  --schema \\\n  --format=json \\\n  --table-format=vertical \\\n  --transformed-only \\\n  --rows-limit=1 \\\n  --log-format=json\n

    The json object result

    The validation warningSchema diff eventsTransformation diff line
    {\n  \"level\": \"warn\",\n  \"ValidationWarning\": {\n    \"msg\": \"possible constraint violation: column has Check constraint\",\n    \"severity\": \"warning\",\n    \"meta\": {\n      \"ColumnName\": \"birthdate\",\n      \"ConstraintDef\": \"CHECK (birthdate >= '1930-01-01'::date AND birthdate <= (now() - '18 years'::interval))\",\n      \"ConstraintName\": \"humanresources\",\n      \"ConstraintSchema\": \"humanresources\",\n      \"ConstraintType\": \"Check\",\n      \"ParameterName\": \"column\",\n      \"SchemaName\": \"humanresources\",\n      \"TableName\": \"employee\",\n      \"TransformerName\": \"NoiseDate\"\n    },\n    \"hash\": \"aa808fb574a1359c6606e464833feceb\"\n  },\n  \"time\": \"2024-03-15T20:01:51+02:00\"\n}\n
    {\n  \"level\": \"warn\",\n  \"PreviousDumpId\": \"1710520855501\",\n  \"Diff\": [\n    {\n      \"event\": \"ColumnRenamed\",\n      \"signature\": {\n        \"CurrentColumnName\": \"id1\",\n        \"PreviousColumnName\": \"id\",\n        \"TableName\": \"test\",\n        \"TableSchema\": \"public\"\n      }\n    },\n    {\n      \"event\": \"ColumnTypeChanged\",\n      \"signature\": {\n        \"ColumnName\": \"id\",\n        \"CurrentColumnType\": \"bigint\",\n        \"CurrentColumnTypeOid\": \"20\",\n        \"PreviousColumnType\": \"integer\",\n        \"PreviousColumnTypeOid\": \"23\",\n        \"TableName\": \"test\",\n        \"TableSchema\": \"public\"\n      }\n    },\n    {\n      \"event\": \"ColumnCreated\",\n      \"signature\": {\n        \"ColumnName\": \"name\",\n        \"ColumnType\": \"text\",\n        \"TableName\": \"test\",\n        \"TableSchema\": \"public\"\n      }\n    },\n    {\n      \"event\": \"TableCreated\",\n      \"signature\": {\n        \"SchemaName\": \"public\",\n        \"TableName\": \"test1\",\n        \"TableOid\": \"20563\"\n      }\n    }\n  ],\n  \"Hint\": \"Check schema changes before making new dump\",\n  \"time\": \"2024-03-15T20:01:51+02:00\",\n  \"message\": \"Database schema has been changed\"\n}\n
    {\n  \"schema\": \"humanresources\",\n  \"name\": \"employee\",\n  \"primary_key_columns\": [\n    \"businessentityid\"\n  ],\n  \"with_diff\": true,\n  \"transformed_only\": true,\n  \"records\": [\n    {\n      \"birthdate\": {\n        \"original\": \"1969-01-29\",\n        \"transformed\": \"1964-10-20\",\n        \"equal\": false,\n        \"implicit\": true\n      },\n      \"businessentityid\": {\n        \"original\": \"1\",\n        \"transformed\": \"1\",\n        \"equal\": true,\n        \"implicit\": true\n      }\n    }\n  ]\n}\n
    "},{"location":"release_notes/greenmask_0_1_0/","title":"Greenmask 0.1.0","text":"

    We are excited to announce the release of Greenmask v0.1.0, marking the first production-ready version. This release addresses various bug fixes, introduces improvements, and includes documentation refactoring for enhanced clarity.

    "},{"location":"release_notes/greenmask_0_1_0/#new-features","title":"New features","text":""},{"location":"release_notes/greenmask_0_1_0/#improvements","title":"Improvements","text":""},{"location":"release_notes/greenmask_0_1_0/#fixes","title":"Fixes","text":""},{"location":"release_notes/greenmask_0_1_0/#ecosystem-changes","title":"Ecosystem changes","text":""},{"location":"release_notes/greenmask_0_1_0/#assets","title":"Assets","text":"

    To download the Greenmask binary compatible with your system, see the release's assets list.

    "},{"location":"release_notes/greenmask_0_1_0_beta/","title":"Greenmask 0.0.1 Beta","text":"

    We are excited to announce the beta release of Greenmask, a versatile and open-source utility for PostgreSQL logical backup dumping, anonymization, and restoration. Greenmask is perfect for routine backup and restoration tasks. It facilitates anonymization and data masking for staging environments and analytics.

    This release introduces a range of features aimed at enhancing database management and security.

    "},{"location":"release_notes/greenmask_0_1_0_beta/#key-features","title":"Key features","text":""},{"location":"release_notes/greenmask_0_1_0_beta/#download","title":"Download","text":"

    To download the Greenmask binary compatible with your system, see the release's assets list.

    "},{"location":"release_notes/greenmask_0_1_1/","title":"Greenmask 0.1.1","text":"

    This release introduces a suite of new transformers, significantly enhancing Greenmask's capabilities for obfuscating PostgreSQL databases.

    "},{"location":"release_notes/greenmask_0_1_1/#new-features","title":"New features","text":"

    Added the following new transformers:

    Transformer Description RandomLatitude Generates a random latitude value RandomLongitude Generates a random longitude value RandomUnixTime Generates a random Unix timestamp RandomMonthName Generates the name of a random month RandomYearString Generates a random year as a string RandomDayOfWeek Generates a random day of the week RandomDayOfMonth Generates a random day of the month RandomCentury Generates a random century RandomTimezone Generates a random timezone RandomEmail Generates a random email address RandomMacAddress Generates a random MAC address RandomDomainName Generates a random domain name RandomURL Generates a random URL RandomUsername Generates a random username RandomIPv4 Generates a random IPv4 address RandomIPv6 Generates a random IPv6 address RandomPassword Generates a random password RandomWord Generates a random word RandomSentence Generates a random sentence RandomParagraph Generates a random paragraph RandomCCType Generates a random credit card type RandomCCNumber Generates a random credit card number RandomCurrency Generates a random currency code RandomAmountWithCurrency Generates a random monetary amount with currency RandomTitleMale Generates a random title for males RandomTitleFemale Generates a random title for females RandomFirstName Generates a random first name RandomFirstNameMale Generates a random male first name RandomFirstNameFemale Generates a random female first name RandomLastName Generates a random last name RandomName Generates a full random name RandomPhoneNumber Generates a random phone number RandomTollFreePhoneNumber Generates a random toll-free phone number RandomE164PhoneNumber Generates a random phone number in E.164 format RealAddress Generates a real address"},{"location":"release_notes/greenmask_0_1_1/#assets","title":"Assets","text":"

    To download the Greenmask binary compatible with your system, see the release's assets list.

    "},{"location":"release_notes/greenmask_0_1_10/","title":"Greenmask 0.1.10","text":"

    This release introduces improvements and bug fixes

    "},{"location":"release_notes/greenmask_0_1_10/#changes","title":"Changes","text":""},{"location":"release_notes/greenmask_0_1_10/#assets","title":"Assets","text":"

    To download the Greenmask binary compatible with your system, see the release's assets list.

    "},{"location":"release_notes/greenmask_0_1_11/","title":"Greenmask 0.1.11","text":"

    This release introduces improvements and bug fixes

    "},{"location":"release_notes/greenmask_0_1_11/#changes","title":"Changes","text":""},{"location":"release_notes/greenmask_0_1_11/#assets","title":"Assets","text":"

    To download the Greenmask binary compatible with your system, see the release's assets list.

    "},{"location":"release_notes/greenmask_0_1_12/","title":"Greenmask 0.1.12","text":"

    This release introduces improvements and bug fixes

    "},{"location":"release_notes/greenmask_0_1_12/#changes","title":"Changes","text":""},{"location":"release_notes/greenmask_0_1_12/#assets","title":"Assets","text":"

    To download the Greenmask binary compatible with your system, see the release's assets list.

    "},{"location":"release_notes/greenmask_0_1_13/","title":"Greenmask 0.1.13","text":"

    This release introduces only improvements in documentation deployment. The core greenmask utility does not contain any changes.

    "},{"location":"release_notes/greenmask_0_1_13/#changes","title":"Changes","text":""},{"location":"release_notes/greenmask_0_1_13/#assets","title":"Assets","text":"

    To download the Greenmask binary compatible with your system, see the release's assets list.

    "},{"location":"release_notes/greenmask_0_1_14/","title":"Greenmask 0.1.14","text":"

    This release introduces bug fixes.

    "},{"location":"release_notes/greenmask_0_1_14/#changes","title":"Changes","text":""},{"location":"release_notes/greenmask_0_1_14/#assets","title":"Assets","text":"

    To download the Greenmask binary compatible with your system, see the release's assets list.

    "},{"location":"release_notes/greenmask_0_1_2/","title":"Greenmask 0.1.2","text":"

    This release introduces bug fixes.

    "},{"location":"release_notes/greenmask_0_1_2/#fixes","title":"Fixes","text":""},{"location":"release_notes/greenmask_0_1_2/#assets","title":"Assets","text":"

    To download the Greenmask binary compatible with your system, see the release's assets list.

    "},{"location":"release_notes/greenmask_0_1_3/","title":"Greenmask 0.1.3","text":"

    This release introduces bug fixes.

    "},{"location":"release_notes/greenmask_0_1_3/#fixes","title":"Fixes","text":""},{"location":"release_notes/greenmask_0_1_3/#assets","title":"Assets","text":"

    To download the Greenmask binary compatible with your system, see the release's assets list.

    "},{"location":"release_notes/greenmask_0_1_4/","title":"Greenmask 0.1.4","text":"

    This release introduces bug fixes.

    "},{"location":"release_notes/greenmask_0_1_4/#fixes","title":"Fixes","text":""},{"location":"release_notes/greenmask_0_1_4/#assets","title":"Assets","text":"

    To download the Greenmask binary compatible with your system, see the release's assets list.

    "},{"location":"release_notes/greenmask_0_1_5/","title":"Greenmask 0.1.5","text":"

    This release introduces a new Greenmask command, improvements, bug fixes, and numerous documentation updates.

    "},{"location":"release_notes/greenmask_0_1_5/#new-features","title":"New features","text":"

    Added a new Greenmask CLI command\u2014show-transformer that shows detailed information about a specified transformer.

    "},{"location":"release_notes/greenmask_0_1_5/#improvements","title":"Improvements","text":""},{"location":"release_notes/greenmask_0_1_5/#fixes","title":"Fixes","text":""},{"location":"release_notes/greenmask_0_1_5/#assets","title":"Assets","text":"

    To download the Greenmask binary compatible with your system, see the release's assets list.

    "},{"location":"release_notes/greenmask_0_1_6/","title":"Greenmask 0.1.6","text":"

    This is a minor release that introduces a bug hotfix

    "},{"location":"release_notes/greenmask_0_1_6/#fixes","title":"Fixes","text":""},{"location":"release_notes/greenmask_0_1_6/#assets","title":"Assets","text":"

    To download the Greenmask binary compatible with your system, see the release's assets list.

    "},{"location":"release_notes/greenmask_0_1_7/","title":"Greenmask 0.1.7","text":"

    This release introduces a new Greenmask command, improvements, bug fixes, and documentation update.

    "},{"location":"release_notes/greenmask_0_1_7/#new-features","title":"New features","text":""},{"location":"release_notes/greenmask_0_1_7/#improvements","title":"Improvements","text":""},{"location":"release_notes/greenmask_0_1_7/#fixes","title":"Fixes","text":""},{"location":"release_notes/greenmask_0_1_7/#assets","title":"Assets","text":"

    To download the Greenmask binary compatible with your system, see the release's assets list.

    "},{"location":"release_notes/greenmask_0_1_8/","title":"Greenmask 0.1.8","text":"

    This release introduces improvements and bug fixes

    "},{"location":"release_notes/greenmask_0_1_8/#improvements","title":"Improvements","text":""},{"location":"release_notes/greenmask_0_1_8/#fixes","title":"Fixes","text":""},{"location":"release_notes/greenmask_0_1_8/#assets","title":"Assets","text":"

    To download the Greenmask binary compatible with your system, see the release's assets list.

    "},{"location":"release_notes/greenmask_0_1_9/","title":"Greenmask 0.1.9","text":"

    This release introduces improvements and bug fixes

    "},{"location":"release_notes/greenmask_0_1_9/#improvements","title":"Improvements","text":""},{"location":"release_notes/greenmask_0_1_9/#fixes","title":"Fixes","text":""},{"location":"release_notes/greenmask_0_1_9/#assets","title":"Assets","text":"

    To download the Greenmask binary compatible with your system, see the release's assets list.

    "},{"location":"release_notes/greenmask_0_2_0/","title":"Greenmask 0.2.0","text":"

    This is one of the biggest releases since Greenmask was founded. We've been in close contact with our users, gathering feedback, and working hard to make Greenmask more flexible, reliable, and user-friendly.

    This major release introduces exciting new features such as database subsetting, pgzip support, restoration in topological order, and refactored transformers, significantly enhancing Greenmask's flexibility to better meet business needs. It also includes several fixes and improvements.

    "},{"location":"release_notes/greenmask_0_2_0/#preface","title":"Preface","text":"

    This release is a major milestone that significantly expands Greenmask's functionality, transforming it into a simple, extensible, and reliable solution for database security, data anonymization, and everyday operations. Our goal is to create a core system that can serve as a foundation for comprehensive dynamic staging environments and robust data security.

    "},{"location":"release_notes/greenmask_0_2_0/#notable-changes","title":"Notable changes","text":""},{"location":"release_notes/greenmask_0_2_0/#core","title":"Core","text":""},{"location":"release_notes/greenmask_0_2_0/#transformers","title":"Transformers","text":""},{"location":"release_notes/greenmask_0_2_0/#fixes-and-improvements","title":"Fixes and improvements","text":""},{"location":"release_notes/greenmask_0_2_0/#full-changelog-v0114v020","title":"Full Changelog: v0.1.14...v0.2.0","text":""},{"location":"release_notes/greenmask_0_2_0/#links","title":"Links","text":"

    Feel free to reach out to us if you have any questions or need assistance:

    "},{"location":"release_notes/greenmask_0_2_0_b1/","title":"Greenmask 0.2.0b1 (pre-release)","text":"

    This major beta release introduces new features and refactored transformers, significantly enhancing Greenmask's flexibility to better meet business needs.

    "},{"location":"release_notes/greenmask_0_2_0_b1/#changes-overview","title":"Changes overview","text":""},{"location":"release_notes/greenmask_0_2_0_b1/#notable-changes","title":"Notable changes","text":""},{"location":"release_notes/greenmask_0_2_0_b1/#core","title":"Core","text":""},{"location":"release_notes/greenmask_0_2_0_b1/#documentation","title":"Documentation","text":"

    Documentation has been significantly refactored. New information about features and updates to transformer descriptions have been added.

    "},{"location":"release_notes/greenmask_0_2_0_b1/#transformers","title":"Transformers","text":""},{"location":"release_notes/greenmask_0_2_0_b1/#full-changelog-v0114v020b1","title":"Full Changelog: v0.1.14...v0.2.0b1","text":""},{"location":"release_notes/greenmask_0_2_0_b1/#playground-usage-for-beta-version","title":"Playground usage for beta version","text":"

    If you want to run a Greenmask playground for the beta version v0.2.0b1 execute:

    git checkout tags/v0.2.0b1 -b v0.2.0b1\ndocker-compose run greenmask-from-source\n
    "},{"location":"release_notes/greenmask_0_2_0_b1/#links","title":"Links","text":"

    Feel free to reach out to us if you have any questions or need assistance:

    "},{"location":"release_notes/greenmask_0_2_0_b2/","title":"Greenmask 0.2.0b2 (pre-release)","text":"

    This major beta release introduces new features such as the database subset, pgzip support, restoration in topological and many more. It also includes fixes and improvements.

    "},{"location":"release_notes/greenmask_0_2_0_b2/#preface","title":"Preface","text":"

    This release is a major milestone that significantly expands Greenmask's functionality, transforming it into a simple, extensible, and reliable solution for database security, data anonymization, and everyday operations. Our goal is to create a core system that can serve as a foundation for comprehensive dynamic staging environments and robust data security.

    "},{"location":"release_notes/greenmask_0_2_0_b2/#notable-changes","title":"Notable changes","text":""},{"location":"release_notes/greenmask_0_2_0_b2/#fixes-and-improvements","title":"Fixes and improvements","text":""},{"location":"release_notes/greenmask_0_2_0_b2/#full-changelog-v020b1v020b2","title":"Full Changelog: v0.2.0b1...v0.2.0b2","text":""},{"location":"release_notes/greenmask_0_2_0_b2/#playground-usage-for-beta-version","title":"Playground usage for beta version","text":"

    If you want to run a Greenmask playground for the beta version v0.2.0b2 execute:

    git checkout tags/v0.2.0b2 -b v0.2.0b2\ndocker-compose run greenmask-from-source\n
    "},{"location":"release_notes/greenmask_0_2_0_b2/#links","title":"Links","text":"

    Feel free to reach out to us if you have any questions or need assistance:

    "},{"location":"release_notes/greenmask_0_2_1/","title":"Greenmask 0.2.1","text":"

    This release introduces two new features transformation conditions and transformation inheritance for primary and foreign keys. It also includes several bug fixes and improvements.

    "},{"location":"release_notes/greenmask_0_2_1/#changes","title":"Changes","text":""},{"location":"release_notes/greenmask_0_2_1/#full-changelog-v020v021","title":"Full Changelog: v0.2.0...v0.2.1","text":""},{"location":"release_notes/greenmask_0_2_1/#links","title":"Links","text":"

    Feel free to reach out to us if you have any questions or need assistance:

    "},{"location":"release_notes/greenmask_0_2_2/","title":"Greenmask 0.2.2","text":"

    This release introduces bug fixes.

    "},{"location":"release_notes/greenmask_0_2_2/#changes","title":"Changes","text":""},{"location":"release_notes/greenmask_0_2_2/#full-changelog-v021v022","title":"Full Changelog: v0.2.1...v0.2.2","text":""},{"location":"release_notes/greenmask_0_2_2/#links","title":"Links","text":"

    Feel free to reach out to us if you have any questions or need assistance:

    "},{"location":"release_notes/greenmask_0_2_3/","title":"Greenmask 0.2.3","text":"

    This release introduces bug fixes.

    "},{"location":"release_notes/greenmask_0_2_3/#changes","title":"Changes","text":""},{"location":"release_notes/greenmask_0_2_3/#full-changelog-v022v023","title":"Full Changelog: v0.2.2...v0.2.3","text":""},{"location":"release_notes/greenmask_0_2_3/#links","title":"Links","text":"

    Feel free to reach out to us if you have any questions or need assistance:

    "},{"location":"release_notes/greenmask_0_2_4/","title":"Greenmask 0.2.4","text":"

    This release introduces bug fixes.

    "},{"location":"release_notes/greenmask_0_2_4/#changes","title":"Changes","text":""},{"location":"release_notes/greenmask_0_2_4/#full-changelog-v023v024","title":"Full Changelog: v0.2.3...v0.2.4","text":""},{"location":"release_notes/greenmask_0_2_4/#links","title":"Links","text":"

    Feel free to reach out to us if you have any questions or need assistance:

    "},{"location":"release_notes/greenmask_0_2_5/","title":"Greenmask 0.2.5","text":"

    This release introduces bug fixes.

    "},{"location":"release_notes/greenmask_0_2_5/#changes","title":"Changes","text":""},{"location":"release_notes/greenmask_0_2_5/#full-changelog-v024v025","title":"Full Changelog: v0.2.4...v0.2.5","text":""},{"location":"release_notes/greenmask_0_2_5/#links","title":"Links","text":"

    Feel free to reach out to us if you have any questions or need assistance:

    "}]} \ No newline at end of file +{"config":{"lang":["en"],"separator":"[\\s\\-]+","pipeline":["stopWordFilter"]},"docs":[{"location":"","title":"About Greenmask","text":""},{"location":"#dump-anonymization-and-synthetic-data-generation-tool","title":"Dump anonymization and synthetic data generation tool","text":"

    Greenmask is a powerful open-source utility that is designed for logical database backup dumping, anonymization, synthetic data generation and restoration. It has ported PostgreSQL libraries, making it reliable. It is stateless and does not require any changes to your database schema. It is designed to be highly customizable and backward-compatible with existing PostgreSQL utilities, fast and reliable.

    "},{"location":"#key-features","title":"Key features","text":""},{"location":"#use-cases","title":"Use cases","text":"

    Greenmask is ideal for various scenarios, including:

    "},{"location":"#links","title":"Links","text":""},{"location":"architecture/","title":"Architecture","text":""},{"location":"architecture/#introduction","title":"Introduction","text":"

    It is evident that the most appropriate approach for executing logical backup dumping and restoration is by leveraging the core PostgreSQL utilities, specifically pg_dump and pg_restore. Greenmask has been purposefully designed to align with PostgreSQL's native utilities, ensuring compatibility. Greenmask primarily handles data dumping operations independently and delegates the responsibilities of schema dumping and restoration to pg_dump and pg_restore respectively, maintaining seamless integration with PostgreSQL's standard tools.

    "},{"location":"architecture/#backup-process","title":"Backup process","text":"

    The process of backing up PostgreSQL databases is divided into three distinct sections:

    Greenmask focuses exclusively on the data section during runtime. It delegates the handling of the pre-data and post-data sections to the core PostgreSQL utilities, pg_dump and pg_restore.

    Greenmask employs the directory format of pg_dump and pg_restore. This format is particularly suitable for parallel execution and partial restoration, and it includes clear metadata files that aid in determining the backup and restoration steps. Greenmask has been optimized to work seamlessly with remote storage systems and anonymization procedures.

    When performing data dumping, Greenmask utilizes the COPY command in TEXT format, maintaining reliability and compatibility with the vanilla PostgreSQL utilities.

    Additionally, Greenmask supports parallel execution, significantly reducing the time required for the dumping process.

    "},{"location":"architecture/#storage-options","title":"Storage options","text":"

    The core PostgreSQL utilities, pg_dump and pg_restore, traditionally operate with files in a directory format, offering no alternative methods. To meet modern backup requirements and provide flexible approaches, Greenmask introduces the concept of storages.

    "},{"location":"architecture/#restoration-process","title":"Restoration process","text":"

    In the restoration process, Greenmask combines the capabilities of different tools:

    Greenmask also supports parallel restoration, which can significantly reduce the time required to complete the restoration process. This parallel execution enhances the efficiency of restoring large datasets.

    "},{"location":"architecture/#data-anonymization-and-validation","title":"Data anonymization and validation","text":"

    Greenmask works with COPY lines, collects schema metadata using the Golang driver, and employs this driver in the encoding and decoding process. The validate command offers a way to assess the impact on both schema (validation warnings) and data (transformation and displaying differences). This command allows you to validate the schema and data transformations, ensuring the desired outcomes during the anonymization process.

    "},{"location":"architecture/#customization","title":"Customization","text":"

    If your table schema relies on functional dependencies between columns, you can address this challenge using the TemplateRecord transformer. This transformer enables you to define transformation logic for entire tables, offering type-safe operations when assigning new values.

    Greenmask provides a framework for creating your custom transformers, which can be reused efficiently. These transformers can be seamlessly integrated without requiring recompilation, thanks to the PIPE (stdin/stdout) interaction.

    Note

    Furthermore, Greenmask's architecture is designed to be highly extensible, making it possible to introduce other interaction protocols, such as HTTP or Socket, for conducting anonymization procedures.

    "},{"location":"architecture/#postgresql-version-compatibility","title":"PostgreSQL version compatibility","text":"

    Greenmask is compatible with PostgreSQL versions 11 and higher.

    "},{"location":"configuration/","title":"Configuration","text":"
    # Configuration\n

    The configuration is organized into six sections:

    "},{"location":"configuration/#common-section","title":"common section","text":"

    In the common section of the configuration, you can specify the following settings:

    Note

    Greenmask exclusively manages data dumping and data restoration processes, delegating schema dumping to the pg_dumputility and schema restoration to the pg_restore utility. Both pg_dump and pg_restore rely on a toc.dat file located in a specific directory, which contains metadata and object definitions. Therefore, the tmp_dir parameter is essential for storing the toc.dat file during the dumping or restoration procedure. It is important to note that all artifacts in this directory will be automatically deleted once the Greenmask command is completed.

    "},{"location":"configuration/#log-section","title":"log section","text":"

    In the log section of the configuration, you can specify the following settings:

    "},{"location":"configuration/#storage-section","title":"storage section","text":"

    In the storage section, you can configure the storage driver for storing the dumped data. Currently, two storage type options are supported: directory and s3.

    directory options3 option

    The directory storage option refers to a filesystem directory where the dump data will be stored.

    Parameters include path which specifies the path to the directory in the filesystem where the dumps will be stored.

    directory storage config example
    storage:\n  type: \"directory\"\n  directory:\n    path: \"/home/user_name/storage_dir\" # (1)\n

    By choosing the s3 storage option, you can store dump data in an S3-like remote storage service, such as Amazon S3 or Azure Blob Storage. Here are the parameters you can configure for S3 storage:

    s3 storage config example for Minio running in Docker
    storage:  \n  type: \"s3\"\n  s3:\n    endpoint: \"http://localhost:9000\"\n    bucket: \"testbucket\"\n    region: \"us-east-1\"\n    access_key_id: \"Q3AM3UQ867SPQQA43P2F\"\n    secret_access_key: \"zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG\"\n
    "},{"location":"configuration/#dump-section","title":"dump section","text":"

    In the dump section of the configuration, you configure the greenmask dump command. It includes the following parameters:

    Here is an example configuration for the dump section:

    dump section config example

    dump:\n  pg_dump_options:\n    dbname: \"host=/run/postgresql user=postgres dbname=demo\"\n    jobs: 10\n    exclude-schema: \"(\\\"teSt\\\"*|test*)\"\n    table: \"bookings.flights\"\n    load-via-partition-root: true\n\n  transformation:\n    - schema: \"bookings\"\n      name: \"flights\"\n      query: \"select * from bookings.flights3 limit 1000000\"\n      columns_type_override:\n        post_code: \"int4\" # (1)\n      transformers:\n        - name: \"RandomDate\"\n          params:\n            min: \"2023-01-01 00:00:00.0+03\"\n            max: \"2023-01-02 00:00:00.0+03\"\n            column: \"scheduled_departure\"\n\n        - name: \"NoiseDate\"\n          params:\n            ratio: \"01:00:00\"\n            column: \"scheduled_arrival\"\n\n        - name: \"RegexpReplace\"\n          params:\n            column: \"status\"\n            regexp: \"On Time\"\n            replace: \"Delayed\"\n\n        - name: \"RandomInt\" # (2)\n          params:\n            column: \"post_code\"\n            min: \"11\"\n            max: \"99\"\n\n    - schema: \"bookings\"\n      name: \"aircrafts_data\"\n      subset_conds: # (3)\n        - \"bookings.aircrafts_data.model = 'Boeing 777-300-2023'\"\n      transformers:\n        - name: \"Json\"\n          params:\n            column: \"model\"\n            operations:\n              - operation: \"set\"\n                path: \"en\"\n                value: \"Boeing 777-300-2023\"\n              - operation: \"set\"\n                path: \"crewSize\"\n                value: 10\n\n        - name: \"NoiseInt\"\n          params:\n            ratio: 0.9\n            column: \"range\"\n

    1. Override the post_code column type to int4 (INTEGER). This is necessary because the post_code column originally has a TEXT type, but it contains values that resemble integers. By explicitly overriding the type to int4, we ensure compatibility with transformers that work with integer types, such as RandomInt.
    2. After the type is overridden, we can apply a compatible transformer.
    3. Database subset condition applied to the aircrafts_data table. The subset condition filters the data based on the model column.
    "},{"location":"configuration/#validate-section","title":"validate section","text":"

    In the validate section of the configuration, you can specify parameters for the greenmask validate command. Here is an example of the validate section configuration:

    validate section config example

    validate:\n  tables: # (1)\n    - \"orders\"\n    - \"public.cart\"\n  data: true # (2)\n  diff: true # (3)\n  rows_limit: 10 # (4)\n  resolved_warnings: # (5)\n    - \"8d436fae67b2b82b36bd3afeb0c93f30\"\n  table_format: \"horizontal\" # (7)\n  format: \"text\" # (6)\n  schema: true # (8)\n  transformed_only: true # (9)\n  warnings: true # (10)\n

    1. A list of tables to validate. If this list is not empty, the validation operation will only be performed for the specified tables. Tables can be written with or without the schema name (e. g., \"public.cart\" or \"orders\").
    2. Specifies whether to perform data transformation for a limited set of rows. If set to true, data transformation will be performed, and the number of rows transformed will be limited to the value specified in the rows_limit parameter (default is 10).
    3. Specifies whether to perform diff operations for the transformed data. If set to true, the validation process will find the differences between the original and transformed data. See more details in the validate command documentation.
    4. Limits the number of rows to be transformed during validation. The default limit is 10 rows, but you can change it by modifying this parameter.
    5. A hash list of resolved warnings. These warnings have been addressed and resolved in a previous validation run.
    6. Specifies the format of the transformation output. Possible values are [horizontal|vertical]. The default format is horizontal. You can choose the format that suits your needs. See more details in the validate command documentation.
    7. The output format (json or text)
    8. Specifies whether to validate the schema current schema with the previous and print the differences if any.
    9. If set to true, transformation output will be only with the transformed columns and primary keys
    10. If set to then all the warnings be printed
    "},{"location":"configuration/#restore-section","title":"restore section","text":"

    In the restore section of the configuration, you can specify parameters for the greenmask restore command. It contains pg_restore settings and custom script execution settings. Below you can find the available parameters:

    As mentioned in the architecture, a backup contains three sections: pre-data, data, and post-data. The custom script execution allows you to customize and control the restoration process by executing scripts or commands at specific stages. The available restoration stages and their corresponding execution conditions are as follows:

    Each stage can have a \"when\" condition with one of the following possible values:

    Below you can find one of the possible versions for the scripts part of the restore section:

    scripts definition example

    scripts:\n  pre-data: # (1)\n    - name: \"pre-data before script [1] with query\"\n      when: \"before\"\n      query: \"create table script_test(stage text)\"\n    - name: \"pre-data before script [2]\"\n      when: \"before\"\n      query: \"insert into script_test values('pre-data before')\"\n    - name: \"pre-data after test script [1]\"\n      when: \"after\"\n      query: \"insert into script_test values('pre-data after')\"\n    - name: \"pre-data after script with query_file [1]\"\n      when: \"after\"\n      query_file: \"pre-data-after.sql\"\n  data: # (2)\n    - name: \"data before script with command [1]\"\n      when: \"before\"\n      command: # (4)\n        - \"data-after.sh\"\n        - \"param1\"\n        - \"param2\"\n    - name: \"data after script [1]\"\n      when: \"after\"\n      query_file: \"data-after.sql\"\n  post-data: # (3)\n    - name: \"post-data before script [1]\"\n      when: \"before\"\n      query: \"insert into script_test values('post-data before')\"\n    - name: \"post-data after script with query_file [1]\"\n      when: \"after\"\n      query_file: \"post-data-after.sql\"\n

    1. List of pre-data stage scripts. This section contains scripts that are executed before or after the restoration of the pre-data section. The scripts include SQL queries and query files.
    2. List of data stage scripts. This section contains scripts that are executed before or after the restoration of the data section. The scripts include shell commands with parameters and SQL query files.
    3. List of post-data stage scripts. This section contains scripts that are executed before or after the restoration of the post-data section. The scripts include SQL queries and query files.
    4. Command in the first argument and the parameters in the rest of the list. When specifying a command to be executed in the scripts section, you provide the command name as the first item in a list, followed by any parameters or arguments for that command. The command and its parameters are provided as a list within the script configuration.
    "},{"location":"configuration/#restoration-error-exclusion","title":"restoration error exclusion","text":"

    You can configure which errors to ignore during the restoration process by setting the insert_error_exclusions parameter. This parameter can be applied globally or per table. If both global and table-specific settings are defined, the table-specific settings will take precedence. Below is an example of how to configure the insert_error_exclusions parameter. You can specify constraint names from your database schema or the error codes returned by PostgreSQL. codes in the PostgreSQL documentation.

    parameter defintion
    insert_error_exclusions:\n\n  global:\n    error_codes: [\"23505\"] # (1)\n    constraints: [\"PK_ProductReview_ProductReviewID\"] # (2)\n  tables: # (3)\n    - schema: \"production\"\n      name: \"productreview\"\n      constraints: [\"PK_ProductReview_ProductReviewID\"]\n      error_codes: [\"23505\"]\n
    1. List of strings that contains postgresql error codes
    2. List of strings that contains constraint names (globally)
    3. List of tables with their schema, name, constraints, and error codes

    Here is an example configuration for the restore section:

    restore:\n  scripts:\n      pre-data: # (1)\n        - name: \"pre-data before script [1] with query\"\n          when: \"before\"\n          query: \"create table script_test(stage text)\"\n\n  insert_error_exclusions:\n    tables:\n      - schema: \"production\"\n        name: \"productreview\"\n        constraints:\n          - \"PK_ProductReview_ProductReviewID\"\n        error_codes:\n          - \"23505\"\n    global:\n      error_codes:\n        - \"23505\"\n\n  pg_restore_options:\n    jobs: 10\n    exit-on-error: false\n    dbname: \"postgresql://postgres:example@localhost:54316/transformed\"\n    table: \n      - \"productreview\"\n    pgzip: true\n    inserts: true\n    on-conflict-do-nothing: true\n    restore-in-order: true\n
    "},{"location":"configuration/#environment-variable-configuration","title":"Environment variable configuration","text":"

    It's also possible to configure Greenmask through environment variables.

    Greenmask will automatically parse any environment variable that matches the configuration in the config file by substituting the dot (.) separator for an underscore (_) and uppercasing it. As an example, the config file below would apply the same configuration as defining the LOG_LEVEL=debug environment variable

    config.yaml
    log:\n  level: debug\n
    "},{"location":"configuration/#global-configuration-variables","title":"Global configuration variables","text":""},{"location":"configuration/#postgres-connection-variables","title":"Postgres connection variables","text":"

    Additionaly, there are some environment variables exposed by the dump and restore commands to facilitate the connection configuration with a Postgres database

    "},{"location":"database_subset/","title":"Database subset","text":"

    Greenmask allows you to define a subset condition for filtering data during the dump process. This feature is useful when you need to dump only a part of the database, such as a specific table or a set of tables. It automatically ensures data consistency by including all related data from other tables that are required to maintain the integrity of the subset. The subset condition can be defined using subset_conds attribute that can be defined on the table in the transformation section (see examples).

    Info

    Greenmask genrates queries for subset conditions based on the introspected schema using joins and recursive queries. It cannot be responsible for query optimization. The subset quries might be slow due to the complexity of the queries and/or lack of indexes. Circular are resolved using recursive queries.

    "},{"location":"database_subset/#detail","title":"Detail","text":"

    The subset is a list of SQL conditions that are applied to table. The conditions are combined with AND operator. You need to specify the schema, table and column name when pointing out the column to filter by to avoid ambiguity. The subset condition must be a valid SQL condition.

    Subset condition example
    subset_conds:\n  - 'person.businessentity.businessentityid IN (274, 290, 721, 852)'\n
    "},{"location":"database_subset/#use-cases","title":"Use cases","text":""},{"location":"database_subset/#references-with-null-values","title":"References with NULL values","text":"

    For references that do not have NOT NULL constraints, Greenmask will automatically generate LEFT JOIN queries with the appropriate conditions to ensure integrity checks. You can rely on Greenmask to handle such cases correctly\u2014no special configuration is needed, as it performs this automatically based on the introspected schema.

    "},{"location":"database_subset/#circular-reference","title":"Circular reference","text":"

    Greenmask supports circular references between tables. You can define a subset condition for any table, and Greenmask will automatically generate the appropriate queries for the table subset using recursive queries. The subset system ensures data consistency by validating all records found through the recursive queries. If a record does not meet the subset condition, it will be excluded along with its parent records, preventing constraint violations.

    Warning

    Currently (v0.2b2), Greenmask can resolve multi-cylces in one strogly connected component, but only for one group of vertexes. If you have SSC that contains 2 groups of vertexes, Greenmask will not be able to resolve it. For instance we have 2 cycles with tables A, B, C (first group) and B, C, E (second group). Greenmask will not be able to resolve it. But if you have only one group of vertexes one and more cycles in the same group of tables (for instance A, B, C), Greenmask works with it. This will be fixed in the future. See second example below. In practice this is quite rare situation and 99% of people will not face this issue.

    You can read the Wikipedia article about Circular reference here.

    "},{"location":"database_subset/#virtual-references","title":"Virtual references","text":"

    During the development process, there are situations where foreign keys need to be removed. The reasons can vary\u2014from improving performance to simplifying the database structure. Additionally, some foreign keys may exist within loosely structured data, such as JSON, where PostgreSQL cannot create foreign keys at all. These limitations could significantly hinder the capabilities of a subset system. Greenmask offers a flexible solution to this problem by allowing the declaration of virtual references in the configuration, enabling the preservation and management of logical relationships between tables, even in the absence of explicit foreign keys. Virtual reference can be called virtual foreign key as well.

    The virtual_references can be defined in dump section. It contains the list of virtual references. First you set the table where you want to define virtual reference. In the attribute references define the list of tables that are referenced by the table. In the columns attribute define the list of columns that are used in the foreign key reference. The not_null attribute is optional and defines if the FK has not null constraint. If true Greenmask will generate INNER JOIN instead of LEFT JOIN by default it is false. The expression needs to be used when you want to use some expression to get the value of the column in the referencing table. For instance, if you have JSONB column in the audit_logs table that contains order_id field, you can use this field as FK reference.

    Info

    You do not need to define primry key of the referenced table. Greenmask will automatically resolve it and use it in the join condition.

    Virtual references example
    dump:\n  virtual_references:\n    - schema: \"public\" # (1)\n      name: \"orders\" # (2)\n      references: # (3)\n        - schema: \"public\" # (4) \n          name: \"customers\" # (5)\n          columns: # (6)\n            - name: \"customer_id\"\n          not_null: false # (7)\n\n    - schema: \"public\"\n      name: \"audit_logs\"\n      references:\n        - schema: \"public\"\n          name: \"orders\"\n          columns:\n            - expression: \"(public.audit_logs.log_data ->> 'order_id')::INT\" # (8)\n
    1. The schema name of table that has foreign key reference (table that own FK reference)
    2. The table name that has foreign key reference (table that own FK reference)
    3. List of virtual references
    4. The schema name of the table that has foreign key reference (referencing table)
    5. The table name that has foreign key reference (referencing table)
    6. List of columns that are used in the foreign key reference. Each column has one of property defined at the same time:

      • name - column name in the referencing table
      • expression - expression that is used to get the value of the column in the referencing table
    7. not_null - is FK has not null constraint. If true Default it is false

    8. expression - expression that is used to get the value of the column in the referencing table
    "},{"location":"database_subset/#polymorphic-references","title":"Polymorphic references","text":"

    Greenmask supports polymorphic references. You can define a virtual reference for a table with polymorphic references using polymorphic_exprs attribute. The polymorphic_exprs attribute is a list of expressions that are used to make a polymorphic reference. For instance we might have a table comments that has polymorphic reference to posts and videos. The table comments might have commentable_id and commentable_type columns. The commentable_type column contains the type of the table that is referenced by the commentable_id column. The example of the config:

    Polymorphic references example
    dump:\n  virtual_references:\n    - schema: \"public\"\n      name: \"comments\"\n      references:\n        - schema: \"public\"\n          name: \"videos\"\n          polymorphic_exprs:\n            - \"public.comments.commentable_type = 'video'\"\n          columns:\n            - name: \"commentable_id\"\n        - schema: \"public\"\n          name: \"posts\"\n          polymorphic_exprs:\n            - \"public.comments.commentable_type = 'post'\"\n          columns:\n            - name: \"commentable_id\"\n

    Warning

    The plimorphic references cannot be non_null because the commentable_id column can be NULL if the commentable_type is not set or different that the values defined in the polymorphic_exprs attribute.

    "},{"location":"database_subset/#troubleshooting","title":"Troubleshooting","text":""},{"location":"database_subset/#exclude-the-records-that-has-null-values-in-the-referenced-column","title":"Exclude the records that has NULL values in the referenced column","text":"

    If you want to exclude records that have NULL values in the referenced column, you can manually add this condition to the subset condition for the table. Greenmask does not automatically exclude records with NULL values because it applies a LEFT OUTER JOIN on nullable foreign keys.

    "},{"location":"database_subset/#some-table-is-not-filtered-by-the-subset-condition","title":"Some table is not filtered by the subset condition","text":"

    Greenmask builds a table dependency graph based on the introspected schema and existing foreign keys. If a table is not filtered by the subset condition, it means that the table either does not reference another table that is filtered by the subset condition or the table itself does not have a subset condition applied.

    If you have a table with a removed foreign key and want to filter it by the subset condition, you need to define a virtual reference. For more information on virtual references, refer to the Virtual References section.

    Info

    If you find any issues related to the code or greenmask is not working as expected, do not hesitate to contact us directly or by creating an issue in the repository.

    "},{"location":"database_subset/#error-column-reference-id-is-ambiguous","title":"ERROR: column reference \"id\" is ambiguous","text":"

    If you see the error message ERROR: column reference \"{column name}\" is ambiguous, you have specified the column name without the table and/or schema name. To avoid ambiguity, always specify the schema and table name when pointing out the column to filter by. For instance if you want to filter employees by employee_id column, you should use public.employees.employee_id instead of employee_id.

    Valid subset condition
    public.employees.employee_id IN (1, 2, 3)\n
    "},{"location":"database_subset/#the-subset-condition-is-not-working-correctly-how-can-i-verify-it","title":"The subset condition is not working correctly. How can I verify it?","text":"

    Run greenmask with --log-level=debug to see the generated SQL queries. You will find the generated SQL queries in the log output. Validate this query in your database client to ensure that the subset condition is working as expected.

    For example:

    $ greenmask dump --config config.yaml --log-level=debug\n\n2024-08-29T19:06:18+03:00 DBG internal/db/postgres/context/context.go:202 > Debug query Schema=person Table=businessentitycontact pid=1638339\n2024-08-29T19:06:18+03:00 DBG internal/db/postgres/context/context.go:203 > SELECT \"person\".\"businessentitycontact\".* FROM \"person\".\"businessentitycontact\"  INNER JOIN \"person\".\"businessentity\" ON \"person\".\"businessentitycontact\".\"businessentityid\" = \"person\".\"businessentity\".\"businessentityid\" AND ( person.businessentity.businessentityid between 400 and 800 OR person.businessentity.businessentityid between 800 and 900 ) INNER JOIN \"person\".\"person\" ON \"person\".\"businessentitycontact\".\"personid\" = \"person\".\"person\".\"businessentityid\" WHERE TRUE AND ((\"person\".\"person\".\"businessentityid\") IN (SELECT \"person\".\"businessentity\".\"businessentityid\" FROM \"person\".\"businessentity\"   WHERE ( ( person.businessentity.businessentityid between 400 and 800 OR person.businessentity.businessentityid between 800 and 900 ) )))\n pid=1638339\n
    "},{"location":"database_subset/#dump-is-too-slow","title":"Dump is too slow","text":"

    If the dump process is too slow the generated query might be too complex. In this case you can:

    "},{"location":"database_subset/#example-dump-a-subset-of-the-database","title":"Example: Dump a subset of the database","text":"

    Info

    All examples based on playground database. Read more about the playground database in the Playground section.

    The following example demonstrates how to dump a subset of the person schema. The subset condition is applied to the businessentity and password tables. The subset condition filters the data based on the businessentityid and passwordsalt columns, respectively.

    Subset configuration example
    transformation:\n  - schema: \"person\"\n    name: \"businessentity\"\n    subset_conds:\n      - 'person.businessentity.businessentityid IN (274, 290, 721, 852)'\n    transformers:\n      - name: \"RandomDate\"\n        params:\n          column: \"modifieddate\"\n          min: \"2020-01-01 00:00:00\"\n          max: \"2024-06-26 00:00:00\"\n          truncate: \"day\"\n          keep_null: false\n\n  - schema: \"person\"\n    name: \"password\"\n    subset_conds:\n      - >\n        person.password.passwordsalt = '329eacbe-c883-4f48-b8b6-17aa4627efff'\n
    "},{"location":"database_subset/#example-dump-a-subset-with-circular-reference","title":"Example: Dump a subset with circular reference","text":"Create tables with multi cyles
    -- Step 1: Create tables without foreign keys\nDROP TABLE IF EXISTS employees CASCADE;\nCREATE TABLE employees\n(\n    employee_id   SERIAL PRIMARY KEY,\n    name          VARCHAR(100) NOT NULL,\n    department_id INT -- Will reference departments(department_id)\n);\n\nDROP TABLE IF EXISTS departments CASCADE;\nCREATE TABLE departments\n(\n    department_id SERIAL PRIMARY KEY,\n    name          VARCHAR(100) NOT NULL,\n    project_id    INT -- Will reference projects(project_id)\n);\n\nDROP TABLE IF EXISTS projects CASCADE;\nCREATE TABLE projects\n(\n    project_id       SERIAL PRIMARY KEY,\n    name             VARCHAR(100) NOT NULL,\n    lead_employee_id INT, -- Will reference employees(employee_id)\n    head_employee_id INT  -- Will reference employees(employee_id)\n);\n\n-- Step 2: Alter tables to add foreign key constraints\nALTER TABLE employees\n    ADD CONSTRAINT fk_department\n        FOREIGN KEY (department_id) REFERENCES departments (department_id);\n\nALTER TABLE departments\n    ADD CONSTRAINT fk_project\n        FOREIGN KEY (project_id) REFERENCES projects (project_id);\n\nALTER TABLE projects\n    ADD CONSTRAINT fk_lead_employee\n        FOREIGN KEY (lead_employee_id) REFERENCES employees (employee_id);\n\nALTER TABLE projects\n    ADD CONSTRAINT fk_lead_employee2\n        FOREIGN KEY (head_employee_id) REFERENCES employees (employee_id);\n\n-- Insert projects\nINSERT INTO projects (name, lead_employee_id)\nSELECT 'Project ' || i, NULL\nFROM generate_series(1, 10) AS s(i);\n\n-- Insert departments\nINSERT INTO departments (name, project_id)\nSELECT 'Department ' || i, i\nFROM generate_series(1, 10) AS s(i);\n\n-- Insert employees and assign 10 of them as project leads\nINSERT INTO employees (name, department_id)\nSELECT 'Employee ' || i, (i / 10) + 1\nFROM generate_series(1, 99) AS s(i);\n\n-- Assign 10 employees as project leads\nUPDATE projects\nSET lead_employee_id = (SELECT employee_id\n                        FROM employees\n                        WHERE employees.department_id = projects.project_id\n                        LIMIT 1),\n    head_employee_id = 3\nWHERE project_id <= 10;\n

    This schema has two cycles:

    Greenmask can simply resolve it by generating a recursive query with integrity checks for subset and join conditions.

    The example below will fetch the data for both 3 employees and related departments and projects.

    Subset configuration example
    transformation:\n  - schema: \"public\"\n    name: \"employees\"\n    subset_conds:\n      - \"public.employees.employee_id in (1, 2, 3)\"\n

    But this will return empty result, because the subset condition is not met for all related tables because project with project_id=1 has reference to employee with employee_id=3 that is invalid for subset condition.

    Subset configuration example
    transformation:\n  - schema: \"public\"\n    name: \"employees\"\n    subset_conds:\n      - \"public.employees.employee_id in (1, 2)\"\n
    "},{"location":"database_subset/#example-dump-a-subset-with-virtual-references","title":"Example: Dump a subset with virtual references","text":"

    In this example, we will create a subset of the tables with virtual references. The subset will include the orders table and its related tables customers and audit_logs. The orders table has a virtual reference to the customers table, and the audit_logs table has a virtual reference to the orders table.

    Create tables with virtual references
    -- Create customers table\nCREATE TABLE customers\n(\n    customer_id   SERIAL PRIMARY KEY,\n    customer_name VARCHAR(100)\n);\n\n-- Create orders table\nCREATE TABLE orders\n(\n    order_id    SERIAL PRIMARY KEY,\n    customer_id INT, -- This should reference customers.customer_id, but no FK constraint is defined\n    order_date  DATE\n);\n\n-- Create payments table\nCREATE TABLE payments\n(\n    payment_id     SERIAL PRIMARY KEY,\n    order_id       INT, -- This should reference orders.order_id, but no FK constraint is defined\n    payment_amount DECIMAL(10, 2),\n    payment_date   DATE\n);\n\n-- Insert test data into customers table\nINSERT INTO customers (customer_name)\nVALUES ('John Doe'),\n       ('Jane Smith'),\n       ('Alice Johnson');\n\n-- Insert test data into orders table\nINSERT INTO orders (customer_id, order_date)\nVALUES (1, '2023-08-01'), -- Related to customer John Doe\n       (2, '2023-08-05'), -- Related to customer Jane Smith\n       (3, '2023-08-07');\n-- Related to customer Alice Johnson\n\n-- Insert test data into payments table\nINSERT INTO payments (order_id, payment_amount, payment_date)\nVALUES (1, 100.00, '2023-08-02'), -- Related to order 1 (John Doe's order)\n       (2, 200.50, '2023-08-06'), -- Related to order 2 (Jane Smith's order)\n       (3, 300.75, '2023-08-08');\n-- Related to order 3 (Alice Johnson's order)\n\n\n-- Create a table with a multi-key reference (composite key reference)\nCREATE TABLE order_items\n(\n    order_id     INT,               -- Should logically reference orders.order_id\n    item_id      INT,               -- Composite part of the key\n    product_name VARCHAR(100),\n    quantity     INT,\n    PRIMARY KEY (order_id, item_id) -- Composite primary key\n);\n\n-- Create a table with a JSONB column that contains a reference value\nCREATE TABLE audit_logs\n(\n    log_id   SERIAL PRIMARY KEY,\n    log_data JSONB -- This JSONB field will contain references to other tables\n);\n\n-- Insert data into order_items table with multi-key reference\nINSERT INTO order_items (order_id, item_id, product_name, quantity)\nVALUES (1, 1, 'Product A', 3), -- Related to order_id = 1 from orders table\n       (1, 2, 'Product B', 5), -- Related to order_id = 1 from orders table\n       (2, 1, 'Product C', 2), -- Related to order_id = 2 from orders table\n       (3, 1, 'Product D', 1);\n-- Related to order_id = 3 from orders table\n\n-- Insert data into audit_logs table with JSONB reference value\nINSERT INTO audit_logs (log_data)\nVALUES ('{\n  \"event\": \"order_created\",\n  \"order_id\": 1,\n  \"details\": {\n    \"customer_name\": \"John Doe\",\n    \"total\": 100.00\n  }\n}'),\n       ('{\n         \"event\": \"payment_received\",\n         \"order_id\": 2,\n         \"details\": {\n           \"payment_amount\": 200.50,\n           \"payment_date\": \"2023-08-06\"\n         }\n       }'),\n       ('{\n         \"event\": \"item_added\",\n         \"order_id\": 1,\n         \"item\": {\n           \"item_id\": 2,\n           \"product_name\": \"Product B\",\n           \"quantity\": 5\n         }\n       }');\n

    The following example demonstrates how to make a subset for keys that does not have FK constraints but a data relationship exists.

    dump:\n  virtual_references:\n    - schema: \"public\"\n      name: \"orders\"\n      references:\n        - schema: \"public\"\n          name: \"customers\"\n          columns:\n            - name: \"customer_id\"\n          not_null: true\n\n    - schema: \"public\"\n      name: \"payments\"\n      references:\n        - schema: \"public\"\n          name: \"orders\"\n          columns:\n            - name: \"order_id\"\n          not_null: true\n\n    - schema: \"public\"\n      name: \"order_items\"\n      references:\n        - schema: \"public\"\n          name: \"orders\"\n          columns:\n            - name: \"order_id\"\n          not_null: true\n        - schema: \"public\"\n          name: \"products\"\n          columns:\n            - name: \"product_id\"\n          not_null: true\n\n    - schema: \"public\"\n      name: \"audit_logs\"\n      references:\n        - schema: \"public\"\n          name: \"orders\"\n          columns:\n            - expression: \"(public.audit_logs.log_data ->> 'order_id')::INT\"\n          not_null: false\n        - schema: \"public\"\n          name: \"order_items\"\n          columns:\n            - expression: \"(public.audit_logs.log_data -> 'item' ->> 'item_id')::INT\"\n            - expression: \"(public.audit_logs.log_data ->> 'order_id')::INT\"\n          not_null: false\n\n  transformation:\n\n    - schema: \"public\"\n      name: \"customers\"\n      subset_conds:\n        - \"public.customers.customer_id in (1)\"\n

    As a result, the customers table will be dumped with the orders table and its related tables payments, order_items, and audit_logs. The subset condition will be applied to the customers table, and the data will be filtered based on the customer_id column.

    "},{"location":"database_subset/#example-dump-a-subset-with-polymorphic-references","title":"Example: Dump a subset with polymorphic references","text":"

    In this example, we will create a subset of the tables with polymorphic references. This example includes the comments table and its related tables posts and videos.

    Create tables with polymorphic references and insert data
    -- Create the Posts table\nCREATE TABLE posts\n(\n    id      SERIAL PRIMARY KEY,\n    title   VARCHAR(255) NOT NULL,\n    content TEXT         NOT NULL\n);\n\n-- Create the Videos table\nCREATE TABLE videos\n(\n    id    SERIAL PRIMARY KEY,\n    title VARCHAR(255) NOT NULL,\n    url   VARCHAR(255) NOT NULL\n);\n\n-- Create the Comments table with a polymorphic reference\nCREATE TABLE comments\n(\n    id               SERIAL PRIMARY KEY,\n    commentable_id   INT         NOT NULL, -- Will refer to either posts.id or videos.id\n    commentable_type VARCHAR(50) NOT NULL, -- Will store the type of the associated record\n    body             TEXT        NOT NULL,\n    created_at       TIMESTAMP DEFAULT CURRENT_TIMESTAMP\n);\n\n\n-- Insert data into the Posts table\nINSERT INTO posts (title, content)\nVALUES ('First Post', 'This is the content of the first post.'),\n       ('Second Post', 'This is the content of the second post.');\n\n-- Insert data into the Videos table\nINSERT INTO videos (title, url)\nVALUES ('First Video', 'https://example.com/video1'),\n       ('Second Video', 'https://example.com/video2');\n\n-- Insert data into the Comments table, associating some comments with posts and others with videos\n-- For posts:\nINSERT INTO comments (commentable_id, commentable_type, body)\nVALUES (1, 'post', 'This is a comment on the first post.'),\n       (2, 'post', 'This is a comment on the second post.');\n\n-- For videos:\nINSERT INTO comments (commentable_id, commentable_type, body)\nVALUES (1, 'video', 'This is a comment on the first video.'),\n       (2, 'video', 'This is a comment on the second video.');\n

    The comments table has a polymorphic reference to the posts and videos tables. Depending on the value of the commentable_type column, the commentable_id column will reference either the posts.id or videos.id column.

    The following example demonstrates how to make a subset for tables with polymorphic references.

    Subset configuration example
    dump:\n  virtual_references:\n    - schema: \"public\"\n      name: \"comments\"\n      references:\n        - schema: \"public\"\n          name: \"posts\"\n          polymorphic_exprs:\n            - \"public.comments.commentable_type = 'post'\"\n          columns:\n            - name: \"commentable_id\"\n        - schema: \"public\"\n          name: \"videos\"\n          polymorphic_exprs:\n            - \"public.comments.commentable_type = 'video'\"\n          columns:\n            - name: \"commentable_id\"\n\n  transformation:\n    - schema: \"public\"\n      name: \"posts\"\n      subset_conds:\n        - \"public.posts.id in (1)\"\n

    This example selects only the first post from the posts table and its related comments from the comments table. The comments are associated with videos are included without filtering because the subset condition is applied only to the posts table and related comments.

    The resulted records will be:

    transformed=# select * from comments;\n id | commentable_id | commentable_type |                 body                  |         created_at         \n----+----------------+------------------+---------------------------------------+----------------------------\n  1 |              1 | post             | This is a comment on the first post.  | 2024-09-18 05:27:54.217405\n  2 |              2 | post             | This is a comment on the second post. | 2024-09-18 05:27:54.217405\n  3 |              1 | video            | This is a comment on the first video. | 2024-09-18 05:27:54.229794\n(3 rows)\n
    "},{"location":"installation/","title":"Installation","text":""},{"location":"installation/#prerequisites","title":"Prerequisites","text":""},{"location":"installation/#via-docker","title":"Via docker","text":"

    You can find the docker images in the:

    1. Docker-hub page

    To run the greenmask container from DockerHub, use the following command:

    docker run -it greenmask/greenmask:latest\n

    1. GitHub container registry

    To run the greenmask container from Github registry, use the following command:

    docker run -it ghcr.io/greenmaskio/greenmask:latest\n

    Info

    For pre-releases (rc, beta, etc.), use explicit tags like v0.2.0b2.

    "},{"location":"installation/#via-brew","title":"Via brew","text":"

    The greenmask build is available in brew, but only a production build is available. To install the greenmask via brew, use the following command:

    brew install greenmask\n
    "},{"location":"installation/#from-source","title":"From source","text":"
    1. Clone the Greenmask repository by using the following command:

      git clone git@github.com:GreenmaskIO/greenmask.git\n
    2. Once the repository is cloned, execute the following command to build Greenmask:

      make build\n

    After completing the build process, you will find the binary named greenmask in the root directory of the repository. Execute the binary to start using Greenmask.

    "},{"location":"installation/#playground","title":"Playground","text":"

    Greenmask Playground is a sandbox environment for your experiments in Docker with sample databases included to help you try Greenmask without any additional actions. Read the Playground guide to learn more.

    "},{"location":"playground/","title":"Greenmask Playground","text":"

    Greenmask Playground is a sandbox environment in Docker with sample databases included to help you try Greenmask without any additional actions. It includes the following components:

    Warning

    To complete this guide, you must have Docker and docker-compose installed.

    "},{"location":"playground/#setting-up-greenmask-playground","title":"Setting up Greenmask Playground","text":"
    1. Clone the greenmask repository and navigate to its directory by running the following commands:

      git clone git@github.com:GreenmaskIO/greenmask.git && cd greenmask\n
    2. Once you have cloned the repository, start the environment by running Docker Compose:

      docker-compose run greenmask\n

    Tip

    If you're experiencing problems with pulling images from Docker Hub, you can build the Greenmask image from source by running the following command:

    docker-compose run greenmask-from-source\n

    Now you have Greenmask Playground up and running with a shell prompt inside the container. All further operations will be carried out within this container's shell.

    "},{"location":"playground/#commands","title":"Commands","text":"

    Below you can see Greenmask commands:

    To learn more about them, see Commands.

    "},{"location":"playground/#transformers","title":"Transformers","text":"

    A configuration file is mandatory for Greenmask functioning. The pre-defined configuration file is stored at the repository root directory (./playground/config.yml). It also serves to define transformers which you can update to your liking in order to use Greenmask Playground more effectively and to get better understanding of the tool itself. To learn how to customize a configuration file, see Configuration

    The pre-defined configuration file uses the NoiseDate transformer as an example. To learn more about other transformers and how to use them, see Transformers.

    "},{"location":"built_in_transformers/","title":"About transformers","text":"

    Transformers in Greenmask are methods which are applied to anonymize sensitive data. All Greenmask transformers are split into the following groups:

    "},{"location":"built_in_transformers/dynamic_parameters/","title":"Dynamic parameters","text":""},{"location":"built_in_transformers/dynamic_parameters/#description","title":"Description","text":"

    Most transformers in Greenmask have dynamic parameters. This functionality is possible because Greenmask utilizes a database driver that can encode and decode raw values into their actual type representations.

    This allows you to retrieve parameter values directly from the records. This capability is particularly beneficial when you need to resolve functional dependencies between fields or satisfy constraints. Greenmask processes transformations sequentially. Therefore, when you reference a field that was transformed in a previous step, you will access the transformed value.

    "},{"location":"built_in_transformers/dynamic_parameters/#definition","title":"Definition","text":"
    dynamic_params:\n  - column: \"column_name\" # (1)\n    cast_to: \"cast_function\" # (2)\n    template: \"template_function\" # (3)\n    default_value: any # (4)\n
    1. Name of the column from which the value is retrieved.
    2. Function used to cast the column value to the desired type.
    3. Default value used if the column's value is NULL.
    4. Template used for casting the column value to the desired type.
    "},{"location":"built_in_transformers/dynamic_parameters/#dynamic-parameter-options","title":"Dynamic parameter options","text":""},{"location":"built_in_transformers/dynamic_parameters/#cast-functions","title":"Cast functions","text":"name description input type output type UnixNanoToDate Cast int value as Unix Timestamp in Nano Seconds to date type int2, int4, int8, numeric, float4, float8 date UnixMicroToDate Cast int value as Unix Timestamp in Micro Seconds to date type int2, int4, int8, numeric, float4, float8 date UnixMilliToDate Cast int value as Unix Timestamp in Milli Seconds to date type int2, int4, int8, numeric, float4, float8 date UnixSecToDate Cast int value as Unix Timestamp in Seconds to date type int2, int4, int8, numeric, float4, float8 date UnixNanoToTimestamp Cast int value as Unix Timestamp in Nano Seconds to timestamp type int2, int4, int8, numeric, float4, float8 timestamp UnixMicroToTimestamp Cast int value as Unix Timestamp in Micro Seconds to timestamp type int2, int4, int8, numeric, float4, float8 timestamp UnixMilliToTimestamp Cast int value as Unix Timestamp in Milli Seconds to timestamp type int2, int4, int8, numeric, float4, float8 timestamp UnixSecToTimestamp Cast int value as Unix Timestamp in Seconds to timestamp type int2, int4, int8, numeric, float4, float8 timestamp UnixNanoToTimestampTz Cast int value as Unix Timestamp in Nano Seconds to timestamptz type int2, int4, int8, numeric, float4, float8 timestamptz UnixMicroToTimestampTz Cast int value as Unix Timestamp in Micro Seconds to timestamptz type int2, int4, int8, numeric, float4, float8 timestamptz UnixMilliToTimestampTz Cast int value as Unix Timestamp in Milli Seconds to timestamptz type int2, int4, int8, numeric, float4, float8 timestamptz UnixSecToTimestampTz Cast int value as Unix Timestamp in Seconds to timestamptz type int2, int4, int8, numeric, float4, float8 timestamptz DateToUnixNano Cast date value to int value as a Unix Timestamp in Nano Seconds date int2, int4, int8, numeric, float4, float8 DateToUnixMicro Cast date value to int value as a Unix Timestamp in Micro Seconds date int2, int4, int8, numeric, float4, float8 DateToUnixMilli Cast date value to int value as a Unix Timestamp in Milli Seconds date int2, int4, int8, numeric, float4, float8 DateToUnixSec Cast date value to int value as a Unix Timestamp in Seconds date int2, int4, int8, numeric, float4, float8 TimestampToUnixNano Cast timestamp value to int value as a Unix Timestamp in Nano Seconds timestamp int2, int4, int8, numeric, float4, float8 TimestampToUnixMicro Cast timestamp value to int value as a Unix Timestamp in Micro Seconds timestamp int2, int4, int8, numeric, float4, float8 TimestampToUnixMilli Cast timestamp value to int value as a Unix Timestamp in Milli Seconds timestamp int2, int4, int8, numeric, float4, float8 TimestampToUnixSec Cast timestamp value to int value as a Unix Timestamp in Seconds timestamp int2, int4, int8, numeric, float4, float8 TimestampTzToUnixNano Cast timestamptz value to int value as a Unix Timestamp in Nano Seconds timestamptz int2, int4, int8, numeric, float4, float8 TimestampTzToUnixMicro Cast timestamptz value to int value as a Unix Timestamp in Micro Seconds timestamptz int2, int4, int8, numeric, float4, float8 TimestampTzToUnixMilli Cast timestamptz value to int value as a Unix Timestamp in Milli Seconds timestamptz int2, int4, int8, numeric, float4, float8 TimestampTzToUnixSec Cast timestamptz value to int value as a Unix Timestamp in Seconds timestamptz int2, int4, int8, numeric, float4, float8 FloatToInt Cast float value to one of integer type. The fractional part will be discarded numeric, float4, float8 int2, int4, int8, numeric IntToFloat Cast int value to one of integer type int2, int4, int8, numeric numeric, float4, float8 IntToBool Cast int value to boolean. The value with 0 is false, 1 is true int2, int4, int8, numeric, float4, float8 bool BoolToInt Cast boolean value to int. The value false is 0, true is 1 bool int2, int4, int8, numeric, float4, float8"},{"location":"built_in_transformers/dynamic_parameters/#example-functional-dependency-resolution-between-columns","title":"Example: Functional dependency resolution between columns","text":"

    There is simplified schema of the table humanresources.employee from the playground:

           Column      |            Type                      \n------------------+-----------------------------\n businessentityid | integer                      \n jobtitle         | character varying(50)        \n birthdate        | date                        \n hiredate         | date                         \nCheck constraints:\n    CHECK (birthdate >= '1930-01-01'::date AND birthdate <= (now() - '18 years'::interval))\n

    As you can see, there is a functional dependency between the birthdate and hiredate columns. Logically, the hiredate should be later than the birthdate. Additionally, the birthdate should range from 1930-01-01 to 18 years prior to the current date.

    Imagine that you need to generate random birthdate and hiredate columns. To ensure these dates satisfy the constraints, you can use dynamic parameters in the RandomDate transformer:

    - schema: \"humanresources\"\n  name: \"employee\"\n  transformers:\n\n    - name: \"RandomDate\" # (1)\n      params:\n        column: \"birthdate\"\n        min: '{{ now | tsModify \"-30 years\" | .EncodeValue }}' # (2)\n        max: '{{ now | tsModify \"-18 years\" | .EncodeValue }}' # (3)\n\n    - name: \"RandomDate\" # (4)\n      params:\n        column: \"hiredate\"\n        max: \"{{ now | .EncodeValue }}\" # (5)\n      dynamic_params:\n        min:\n          column: \"birthdate\" # (6)\n          template: '{{ .GetValue | tsModify \"18 years\" | .EncodeValue }}' # (7)\n
    1. Firstly we generate the RadnomDate for birthdate column. The result of the transformation will used as the minimum value for the next transformation for hiredate column.
    2. Apply the template for static parameter. It calculates the now date and subtracts 30 years from it. The result is 1994. The function tsModify return not a raw data, but time.Time object. For getting the raw value suitable for birthdate type we need to pass this value to .EncodeValue function. This value is used as the minimum value for the birthdate column.
    3. The same as the previous step, but we subtract 18 years from the now date. The result is 2002.
    4. Generate the RadnomDate for hiredate column based on the value from the birthdate.
    5. Set the maximum value for the hiredate column. The value is the current date.
    6. The min parameter is set to the value of the birthdate column from the previous step.
    7. The template gets the value of the randomly generated birthdate value and adds 18 years to it.

    Below is the result of the transformation:

    From the result, you can see that all functional dependencies and constraints are satisfied.

    "},{"location":"built_in_transformers/parameters_templating/","title":"Parameters templating","text":""},{"location":"built_in_transformers/parameters_templating/#description","title":"Description","text":"

    It is allowed to generate parameter values from templates. It is useful when you don't want to write values manually, but instead want to generate and initialize them dynamically.

    Here you can find the list of template functions that can be used in the template Custom functions.

    You can encode and decode objects using the driver function bellow.

    "},{"location":"built_in_transformers/parameters_templating/#template-functions","title":"Template functions","text":"Function Description Signature .GetColumnType Returns a string with the column type. .GetColumnType(name string) (typeName string, err error) .EncodeValueByColumn Encodes a value of any type into its raw string representation using the specified column name. Encoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .EncodeValueByColumn(name string, value any) (res any, err error) .DecodeValueByColumn Decodes a value from its raw string representation to a Golang type using the specified column name. Decoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .DecodeValueByColumn(name string, value any) (res any, err error) .EncodeValueByType Encodes a value of any type into its string representation using the specified type name. Encoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .EncodeValueByType(name string, value any) (res any, err error) .DecodeValueByType Decodes a value from its raw string representation to a Golang type using the specified type name. Decoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .DecodeValueByType(name string, value any) (res any, err error) .DecodeValue Decodes a value from its raw string representation to a Golang type using the data type assigned to the table column specified in the column parameter. Decoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .DecodeValueByColumn(value any) (res any, err error) .EncodeValue Encodes a value of any type into its string representation using the type assigned to the table column specified in the column parameter. Encoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .EncodeValue(value any) (res any, err error)

    Warning

    If column parameter is not linked to column parameter, then functions .DecodeValue and .EncodeValue will return an error. You can use .DecodeValueByType and .EncodeValueByType or .DecodeValueByColumn and .EncodeValueByColumn instead.

    "},{"location":"built_in_transformers/parameters_templating/#example","title":"Example","text":"

    In the example below, the min and max values for the birth_date column are generated dynamically using the now template function. The value returns the current date and time. The tsModify function is then used to subtract 30 (and 18) years. But because the parameter type is mapped on column parameter type, the EncodeValue function is used to encode the value into the column type.

    For example, if we have the now date as 2021-01-01, the dynamically calculated min value will be 1994-01-01 and the max value will be 2006-01-01.

    CREATE TABLE account\n(\n    id         SERIAL PRIMARY KEY,\n    gender     VARCHAR(1) NOT NULL,\n    email      TEXT       NOT NULL NOT NULL UNIQUE,\n    first_name TEXT       NOT NULL,\n    last_name  TEXT       NOT NULL,\n    birth_date DATE,\n    created_at TIMESTAMP  NOT NULL DEFAULT NOW()\n);\n\nINSERT INTO account (first_name, gender, last_name, birth_date, email)\nVALUES ('John', 'M', 'Smith', '1980-01-01', 'john.smith@gmail.com');\n
    - schema: \"public\"\n  name: \"account\"\n  transformers:\n    - name: \"RandomDate\"\n      params:\n        column: \"birth_date\"\n        min: '{{ now | tsModify \"-30 years\" | .EncodeValue }}' # 1994\n        max: '{{ now | tsModify \"-18 years\" | .EncodeValue }}' # 2006\n

    Result

    ColumnOriginalValueTransformedValue birth_date1980-01-011995-09-06"},{"location":"built_in_transformers/transformation_condition/","title":"Transformation Condition","text":""},{"location":"built_in_transformers/transformation_condition/#description","title":"Description","text":"

    The transformation condition feature allows you to execute a defined transformation only if a specified condition is met. The condition must be defined as a boolean expression that evaluates to true or false. Greenmask uses expr-lang/expr under the hood. You can use all functions and syntax provided by the expr library.

    You can use the same functions that are described in the built-in transformers

    The transformers are executed one by one - this helps you create complex transformation pipelines. For instance depending on value chosen in the previous transformer, you can decide to execute the next transformer or not.

    "},{"location":"built_in_transformers/transformation_condition/#record-descriptors","title":"Record descriptors","text":"

    To improve the user experience, Greenmask offers special namespaces for accessing values in different formats: either the driver-encoded value in its real type or as a raw string.

    You can access a specific column\u2019s value using record.column_name for the real type or raw_record.column_name for the raw string value.

    Warning

    A record may always be modified by previous transformers before the condition is evaluated. This means Greenmask does not retain the original record value and instead provides the current modified value for condition evaluation.

    "},{"location":"built_in_transformers/transformation_condition/#null-values-condition","title":"Null values condition","text":"

    To check if the value is null, you can use null value for the comparisson. This operation works compatibly with SQL operator IS NULL or IS NOT NULL.

    Is null cond example
    record.accountnumber == null && record.date > now()\n
    Is not null cond example
    record.accountnumber != null && record.date <= now()\n
    "},{"location":"built_in_transformers/transformation_condition/#expression-scope","title":"Expression scope","text":"

    Expression scope can be on table or specific transformer. If you define the condition on the table scope, then the condition will be evaluated before any transformer is executed. If you define the condition on the transformer scope, then the condition will be evaluated before the specified transformer is executed.

    Table scope
    - schema: \"purchasing\"\n  name: \"vendor\"\n  when: 'record.accountnumber == null || record.accountnumber == \"ALLENSON0001\"'\n  transformers:\n    - name: \"RandomString\"\n      params:\n        column: \"accountnumber\"\n        min_length: 9\n        max_length: 12\n        symbols: \"1234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ\"\n
    Transformer scope
    - schema: \"purchasing\"\n  name: \"vendor\"\n  transformers:\n    - name: \"RandomString\"\n      when: 'record.accountnumber != null || record.accountnumber == \"ALLENSON0001\"'\n      params:\n        column: \"accountnumber\"\n        min_length: 9\n        max_length: 12\n        symbols: \"1234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ\"\n
    "},{"location":"built_in_transformers/transformation_condition/#int-and-float-value-definition","title":"Int and float value definition","text":"

    It is important to create the integer or float value in the correct format. If you want to define the integer value you must write a number without dot (1, 2, etc.). If you want to define the float value you must write a number with dot (1.0, 2.0, etc.).

    Warning

    You may see a wrong comparison result if you compare int and float, for example 1 == 1.0 will return false.

    "},{"location":"built_in_transformers/transformation_condition/#architecture","title":"Architecture","text":"

    Greenmask encodes the way only when evaluating the condition - this allows to optimize the performance of the transformation if you have a lot of conditions that uses or (||) or and (&&) operators.

    "},{"location":"built_in_transformers/transformation_condition/#example-chose-random-value-and-execute-one-of","title":"Example: Chose random value and execute one of","text":"

    In the following example, the RandomChoice transformer is used to choose a random value from the list of values. Depending on the chosen value, the Replace transformer is executed to set the activeflag column to true or false.

    In this case the condition scope is on the transformer level.

    - schema: \"purchasing\"\n  name: \"vendor\"\n  transformers:\n    - name: \"RandomChoice\"\n      params:\n        column: \"name\"\n        values:\n          - \"test1\"\n          - \"test2\"\n\n    - name: \"Replace\"\n      when: 'record.name == \"test1\"'\n      params:\n        column: \"activeflag\"\n        value: \"false\"\n\n    - name: \"Replace\"\n      when: 'record.name == \"test2\"'\n      params:\n        column: \"activeflag\"\n        value: \"true\"\n
    "},{"location":"built_in_transformers/transformation_condition/#example-do-not-transform-specific-columns","title":"Example: Do not transform specific columns","text":"

    In the following example, the RandomString transformer is executed only if the businessentityid column value is not equal to 1492 or 1.

      - schema: \"purchasing\"\n    name: \"vendor\"\n    when: '!(record.businessentityid | has([1492, 1]))'\n    transformers:\n      - name: \"RandomString\"\n        params:\n          column: \"accountnumber\"\n          min_length: 9\n          max_length: 12\n          symbols: \"1234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ\"\n
    "},{"location":"built_in_transformers/transformation_condition/#example-check-the-json-attribute-value","title":"Example: Check the json attribute value","text":"

    In the following example, the RandomString transformer is executed only if the a attribute in the json_data column is equal to 1.

    - schema: \"public\"\n  name: \"jsondata\"\n  when: 'raw_record.json_data | jsonGet(\"a\") == 1'\n  transformers:\n    - name: \"RandomString\"\n      params:\n        column: \"accountnumber\"\n        min_length: 9\n        max_length: 12\n        symbols: \"1234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ\"\n
    "},{"location":"built_in_transformers/transformation_engines/","title":"Transformation engine","text":"

    The greenmask provides two engines random and hash. Most of the transformers has engine parameters that by default is set to random. Use hash engine when you need to generate deterministic data - the same input will always produce the same output.

    Info

    Greenmask employs the SHA-3 algorithm to hash input values. While this function is cryptographically secure, it does exhibit lower performance. We plan to introduce additional hash functions in the future to offer a balance between security and performance. For example, SipHash, which provides a good trade-off between security and performance, is currently in development and is expected to be included in the stable v0.2 release of Greenmask.

    Warning

    The hash engine does not guarantee the uniqueness of generated values. Although transformers such as Hash, RandomEmail, and RandomUuid typically have a low probability of producing duplicate values The feature to ensure uniqueness is currently under development at Greenmask and is expected to be released in future updates. For the latest status, please visit the Greenmask roadmap.

    "},{"location":"built_in_transformers/transformation_engines/#details","title":"Details","text":""},{"location":"built_in_transformers/transformation_engines/#example-schema","title":"Example schema","text":"

    The next examples will be run on the following schema and sample data:

    CREATE TABLE account\n(\n    id         SERIAL PRIMARY KEY,\n    gender     VARCHAR(1) NOT NULL,\n    email      TEXT       NOT NULL NOT NULL UNIQUE,\n    first_name TEXT       NOT NULL,\n    last_name  TEXT       NOT NULL,\n    birth_date DATE,\n    created_at TIMESTAMP  NOT NULL DEFAULT NOW()\n);\n\nINSERT INTO account (first_name, gender, last_name, birth_date, email)\nVALUES ('John', 'M', 'Smith', '1980-01-01', 'john.smith@gmail.com');\n\nCREATE TABLE orders\n(\n    id          SERIAL PRIMARY KEY,\n    account_id  INTEGER REFERENCES account (id),\n    total_price NUMERIC(10, 2),\n    created_at  TIMESTAMP NOT NULL DEFAULT NOW(),\n    paid_at     TIMESTAMP\n);\n\nINSERT INTO orders (account_id, total_price, created_at, paid_at)\nVALUES (1, 100.50, '2024-05-01', '2024-05-02'),\n       (1, 200.75, '2024-05-03', NULL);\n
    "},{"location":"built_in_transformers/transformation_engines/#random-engine","title":"Random engine","text":"

    The random engine serves as the default engine for the greenmask. It operates using a pseudo-random number generator, which is initialized with a random seed sourced from a cryptographically secure random number generator. Employ the random engine when you need to generate random data and do not require reproducibility of the same transformation results with the same input.

    The following example demonstrates how to configure the RandomDate transformer to generate random.

    - schema: \"public\"\n  name: \"account\"\n  transformers:\n    - name: \"RandomDate\"\n      params:\n        column: \"birth_date\"\n        engine: \"random\" # (1)\n        min: '1970-01-01'\n        max: '2000-01-01'\n
    1. random engine is explicitly specified, although it is the default value.

    Results:

    ColumnOriginalValueTransformedValue birth_date1980-01-011970-02-23

    Keep in mind that the random engine is always generates different values for the same input. For instance in we run the previous example multiple times we will get different results.

    "},{"location":"built_in_transformers/transformation_engines/#hash-engine","title":"Hash engine","text":"

    The hash engine is designed to generate deterministic data. It uses the SHA-3 algorithm to hash the input value. The hash engine is particularly useful when you need to generate the same output for the same input. For example, when you want to transform values that are used as primary or foreign keys in a database.

    For secure reason it is suggested set global greenmask salt via GREENMASK_GLOBAL_SALT environment variable. The salt is added to the hash input to prevent the possibility of reverse engineering the original value from the hashed output. The value is hex encoded with variadic length. For example, GREENMASK_GLOBAL_SALT=a5eddc84e762e810. Generate a strong random salt and keep it secret.

    The following example demonstrates how to configure the RandomInt transformer to generate deterministic data using the hash engine. The public.account.id and public.orders.account_id columns will have the same values.

    - schema: \"public\"\n  name: \"account\"\n  transformers:\n\n    - name: \"RandomInt\"\n      params:\n        column: \"id\"\n        engine: hash\n        min: 1\n        max: 2147483647\n\n- schema: \"public\"\n  name: \"orders\"\n  transformers:\n\n    - name: \"RandomInt\"\n      params:\n        column: \"account_id\"\n        engine: hash\n        min: 1\n        max: 2147483647\n

    Result:

    ColumnOriginalValueTransformedValue id1130162079 ColumnOriginalValueTransformedValue account_id1130162079"},{"location":"built_in_transformers/transformation_inheritance/","title":"Transformation Inheritance","text":""},{"location":"built_in_transformers/transformation_inheritance/#description","title":"Description","text":"

    If you have partitioned tables or want to apply a transformation to a primary key and propagate it to all tables referencing that column, you can do so with Greenmask.

    "},{"location":"built_in_transformers/transformation_inheritance/#apply-for-inherited","title":"Apply for inherited","text":"

    Using apply_for_inherited, you can apply transformations to all partitions of a partitioned table, including any subpartitions.

    "},{"location":"built_in_transformers/transformation_inheritance/#configuration-conflicts","title":"Configuration conflicts","text":"

    When a partition has a transformation defined manually via config, and apply_for_inherited is set on the parent table, Greenmask will merge both the inherited and manually defined configurations. The manually defined transformation will execute last, giving it higher priority.

    If this situation occurs, you will see the following information in the log:

    {\n  \"level\": \"info\",\n  \"ParentTableSchema\": \"public\",\n  \"ParentTableName\": \"sales\",\n  \"ChildTableSchema\": \"public\",\n  \"ChildTableName\": \"sales_2022_feb\",\n  \"ChildTableConfig\": [\n    {\n      \"name\": \"RandomDate\",\n      \"params\": {\n        \"column\": \"sale_date\",\n        \"engine\": \"random\",\n        \"max\": \"2005-01-01\",\n        \"min\": \"2001-01-01\"\n      }\n    }\n  ],\n  \"time\": \"2024-11-03T22:14:01+02:00\",\n  \"message\": \"config will be merged: found manually defined transformers on the partitioned table\"\n}\n
    "},{"location":"built_in_transformers/transformation_inheritance/#apply-for-references","title":"Apply for references","text":"

    Using apply_for_references, you can apply transformations to columns involved in a primary key or in tables with a foreign key that references that column. This simplifies the transformation process by requiring you to define the transformation only on the primary key column, which will then be applied to all tables referencing that column.

    The transformer must be deterministic or support hash engine and the hash engin must be set in the configuration file.

    List of transformers that supports apply_for_references:

    "},{"location":"built_in_transformers/transformation_inheritance/#end-to-end-identifiers","title":"End-to-End Identifiers","text":"

    End-to-end identifiers in databases are unique identifiers that are consistently used across multiple tables in a relational database schema, allowing for a seamless chain of references from one table to another. These identifiers typically serve as primary keys in one table and are propagated as foreign keys in other tables, creating a direct, traceable link from one end of a data relationship to the other.

    Greenmask can detect end-to-end identifiers and apply transformations across the entire sequence of tables. These identifiers are detected when the following condition is met: the foreign key serves as both a primary key and a foreign key in the referenced table.

    "},{"location":"built_in_transformers/transformation_inheritance/#configuration-conflicts_1","title":"Configuration conflicts","text":"

    When on the referenced column a transformation is manually defined via config, and the apply_for_references is set on parent table, the transformation defined will be chosen and the inherited transformation will be ignored. You will receive a INFO message in the logs.

    {\n  \"level\": \"info\",\n  \"TransformerName\": \"RandomInt\",\n  \"ParentTableSchema\": \"public\",\n  \"ParentTableName\": \"tablea\",\n  \"ChildTableSchema\": \"public\",\n  \"ChildTableName\": \"tablec\",\n  \"ChildColumnName\": \"id2\",\n  \"TransformerConfig\": {\n    \"name\": \"RandomInt\",\n    \"apply_for_references\": true\n  },\n  \"time\": \"2024-11-03T21:28:10+02:00\",\n  \"message\": \"skipping apply transformer for reference: found manually configured transformer\"\n}\n
    "},{"location":"built_in_transformers/transformation_inheritance/#limitations","title":"Limitations","text":"

    Warning

    We do not recommend using apply_for_references with transformation conditions, as these conditions are not inherited by transformers on the referenced columns. This may lead to inconsistencies in the data.

    "},{"location":"built_in_transformers/transformation_inheritance/#example-1-partitioned-tables","title":"Example 1. Partitioned tables","text":"

    In this example, we have a partitioned table sales that is partitioned by year and then by month. Each partition contains a subset of data based on the year and month of the sale. The sales table has a primary key sale_id and is partitioned by sale_date. The sale_date column is transformed using the RandomDate transformer.

    CREATE TABLE sales\n(\n    sale_id   SERIAL         NOT NULL,\n    sale_date DATE           NOT NULL,\n    amount    NUMERIC(10, 2) NOT NULL\n) PARTITION BY RANGE (EXTRACT(YEAR FROM sale_date));\n\n-- Step 2: Create first-level partitions by year\nCREATE TABLE sales_2022 PARTITION OF sales\n    FOR VALUES FROM (2022) TO (2023)\n    PARTITION BY LIST (EXTRACT(MONTH FROM sale_date));\n\nCREATE TABLE sales_2023 PARTITION OF sales\n    FOR VALUES FROM (2023) TO (2024)\n    PARTITION BY LIST (EXTRACT(MONTH FROM sale_date));\n\n-- Step 3: Create second-level partitions by month for each year, adding PRIMARY KEY on each partition\n\n-- Monthly partitions for 2022\nCREATE TABLE sales_2022_jan PARTITION OF sales_2022 FOR VALUES IN (1)\n    WITH (fillfactor = 70);\nCREATE TABLE sales_2022_feb PARTITION OF sales_2022 FOR VALUES IN (2);\nCREATE TABLE sales_2022_mar PARTITION OF sales_2022 FOR VALUES IN (3);\n-- Continue adding monthly partitions for 2022...\n\n-- Monthly partitions for 2023\nCREATE TABLE sales_2023_jan PARTITION OF sales_2023 FOR VALUES IN (1);\nCREATE TABLE sales_2023_feb PARTITION OF sales_2023 FOR VALUES IN (2);\nCREATE TABLE sales_2023_mar PARTITION OF sales_2023 FOR VALUES IN (3);\n-- Continue adding monthly partitions for 2023...\n\n-- Step 4: Insert sample data\nINSERT INTO sales (sale_date, amount)\nVALUES ('2022-01-15', 100.00);\nINSERT INTO sales (sale_date, amount)\nVALUES ('2022-02-20', 150.00);\nINSERT INTO sales (sale_date, amount)\nVALUES ('2023-03-10', 200.00);\n

    To transform the sale_date column in the sales table and all its partitions, you can use the following configuration:

    - schema: public\n  name: sales\n  apply_for_inherited: true\n  transformers:\n    - name: RandomDate\n      params:\n        min: \"2022-01-01\"\n        max: \"2022-03-01\"\n        column: \"sale_date\"\n        engine: \"random\"\n
    "},{"location":"built_in_transformers/transformation_inheritance/#example-2-simple-table-references","title":"Example 2. Simple table references","text":"

    This is ordinary table references where the primary key of the users table is referenced in the orders table.

    -- Enable the extension for UUID generation (if not enabled)\nCREATE EXTENSION IF NOT EXISTS \"uuid-ossp\";\n\nCREATE TABLE users\n(\n    user_id  UUID PRIMARY KEY DEFAULT uuid_generate_v4(),\n    username VARCHAR(50) NOT NULL\n);\n\nCREATE TABLE orders\n(\n    order_id   UUID PRIMARY KEY DEFAULT uuid_generate_v4(),\n    user_id    UUID REFERENCES users (user_id),\n    order_date DATE NOT NULL\n);\n\nINSERT INTO users (username)\nVALUES ('john_doe');\nINSERT INTO users (username)\nVALUES ('jane_smith');\n\nINSERT INTO orders (user_id, order_date)\nVALUES ((SELECT user_id FROM users WHERE username = 'john_doe'), '2024-10-31'),\n       ((SELECT user_id FROM users WHERE username = 'jane_smith'), '2024-10-30');\n

    To transform the username column in the users table, you can use the following configuration:

    - schema: public\n  name: users\n  apply_for_inherited: true\n  transformers:\n    - name: RandomUuid\n      apply_for_references: true\n      params:\n        column: \"user_id\"\n        engine: \"hash\"\n

    This will apply the RandomUuid transformation to the user_id column in the orders table automatically.

    "},{"location":"built_in_transformers/transformation_inheritance/#example-3-references-on-tables-with-end-to-end-identifiers","title":"Example 3. References on tables with end-to-end identifiers","text":"

    In this example, we have three tables: tablea, tableb, and tablec. All tables have a composite primary key. In the tables tableb and tablec, the primary key is also a foreign key that references the primary key of tablea. This means that all PKs are end-to-end identifiers.

    CREATE TABLE tablea\n(\n    id1  INT,\n    id2  INT,\n    data VARCHAR(50),\n    PRIMARY KEY (id1, id2)\n);\n\nCREATE TABLE tableb\n(\n    id1    INT,\n    id2    INT,\n    detail VARCHAR(50),\n    PRIMARY KEY (id1, id2),\n    FOREIGN KEY (id1, id2) REFERENCES tablea (id1, id2) ON DELETE CASCADE\n);\n\nCREATE TABLE tablec\n(\n    id1         INT,\n    id2         INT,\n    description VARCHAR(50),\n    PRIMARY KEY (id1, id2),\n    FOREIGN KEY (id1, id2) REFERENCES tableb (id1, id2) ON DELETE CASCADE\n);\n\nINSERT INTO tablea (id1, id2, data)\nVALUES (1, 1, 'Data A1'),\n       (2, 1, 'Data A2'),\n       (3, 1, 'Data A3');\n\nINSERT INTO tableb (id1, id2, detail)\nVALUES (1, 1, 'Detail B1'),\n       (2, 1, 'Detail B2'),\n       (3, 1, 'Detail B3');\n\nINSERT INTO tablec (id1, id2, description)\nVALUES (1, 1, 'Description C1'),\n       (2, 1, 'Description C2'),\n       (3, 1, 'Description C3');\n

    To transform the data column in tablea, you can use the following configuration:

    - schema: public\n  name: \"tablea\"\n  apply_for_inherited: true\n  transformers:\n    - name: RandomInt\n      apply_for_references: true\n      params:\n        min: 0\n        max: 100\n        column: \"id1\"\n        engine: \"hash\"\n    - name: RandomInt\n      apply_for_references: true\n      params:\n        min: 0\n        max: 100\n        column: \"id2\"\n        engine: \"hash\"\n

    This will apply the RandomInt transformation to the id1 and id2 columns in tableb and tablec automatically.

    "},{"location":"built_in_transformers/advanced_transformers/","title":"Advanced transformers","text":"

    Advanced transformers are modifiable anonymization methods that users can adjust based on their needs by using custom functions.

    Below you can find an index of all advanced transformers currently available in Greenmask.

    1. Json \u2014 changes a JSON content by using delete and set operations.
    2. Template \u2014 executes a Go template of your choice and applies the result to a specified column.
    3. TemplateRecord \u2014 modifies records by using a Go template of your choice and applies the changes via the PostgreSQL driver.
    "},{"location":"built_in_transformers/advanced_transformers/json/","title":"Json","text":"

    Change a JSON document using delete and set operations. NULL values are kept.

    "},{"location":"built_in_transformers/advanced_transformers/json/#parameters","title":"Parameters","text":"Name Properties Description Default Required Supported DB types column The name of the column to be affected Yes json, jsonb operations A list of operations that contains editing delete and set Yes - \u221f operation Specifies the operation type: set or delete Yes - \u221f path The path to an object to be modified. See path syntax below. Yes - \u221f value A value to be assigned to the provided path No - \u221f value_template A Golang template to be assigned to the provided path. See the list of template functions below. No - \u221f error_not_exist Throws an error if the key does not exist by the provided path. Disabled by default. false No -"},{"location":"built_in_transformers/advanced_transformers/json/#description","title":"Description","text":"

    The Json transformer applies a sequence of changing operations (set and/or delete) to a JSON document. The value can be static or dynamic. For the set operation type, a static value is provided in the value parameter, while a dynamic value is provided in the value_template parameter, taking the data received after template execution as a result. Both the value and value_template parameters are mandatory for the set operation.

    "},{"location":"built_in_transformers/advanced_transformers/json/#path-syntax","title":"Path syntax","text":"

    The Json transformer is based on tidwall/sjson and supports the same path syntax. See their documentation for syntax rules.

    "},{"location":"built_in_transformers/advanced_transformers/json/#template-functions","title":"Template functions","text":"Function Description Signature .GetPath Returns the current path to which the operation is being applied .GetPath() (path string) .GetOriginalValue Returns the original value to which the current operation path is pointing. If the value at the specified path does not exist, it returns nil. .GetOriginalValue() (value any) .OriginalValueExists Returns a boolean value indicating whether the specified path exists or not. .OriginalValueExists() (exists bool) .GetColumnValue Returns an encoded into Golang type value for a specified column or throws an error. A value can be any of int, float, time, string, bool, or slice or map. .GetColumnValue(name string) (value any, err error) .GetRawColumnValue Returns a raw value for a specified column as a string or throws an error .GetRawColumnValue(name string) (value string, err error) .EncodeValueByColumn Encodes a value of any type into its raw string representation using the specified column name. Encoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .EncodeValueByColumn(name string, value any) (res any, err error) .DecodeValueByColumn Decodes a value from its raw string representation to a Golang type using the specified column name. Decoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .DecodeValueByColumn(name string, value any) (res any, err error) .EncodeValueByType Encodes a value of any type into its string representation using the specified type name. Encoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .EncodeValueByType(name string, value any) (res any, err error) .DecodeValueByType Decodes a value from its raw string representation to a Golang type using the specified type name. Decoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .DecodeValueByType(name string, value any) (res any, err error)"},{"location":"built_in_transformers/advanced_transformers/json/#example-changing-json-document","title":"Example: Changing JSON document","text":"Json transformer example
    - schema: \"bookings\"\n  name: \"aircrafts_data\"\n  transformers:\n    - name: \"Json\"\n      params:\n        column: \"model\"\n        operations:\n          - operation: \"set\"\n            path: \"en\"\n            value: \"Boeing 777-300-2023\"\n          - operation: \"set\"\n            path: \"seats\"\n            error_not_exist: True\n            value_template: \"{{ randomInt 100 400 }}\"\n          - operation: \"set\"\n            path: \"details.preperties.1\"\n            value: {\"name\": \"somename\", \"description\": null}\n          - operation: \"delete\"\n            path: \"values.:2\"\n
    "},{"location":"built_in_transformers/advanced_transformers/template/","title":"Template","text":"

    Execute a Go template and automatically apply the result to a specified column.

    "},{"location":"built_in_transformers/advanced_transformers/template/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes any template A Go template string Yes - validate Validates the template result using the PostgreSQL driver decoding procedure. Throws an error if a custom type does not have an encode-decoder implementation. false No -"},{"location":"built_in_transformers/advanced_transformers/template/#description","title":"Description","text":"

    The Template transformer executes Go templates and automatically applies the template result to a specified column. Go template system is designed to be extensible, enabling developers to access data objects and incorporate custom functions programmatically. For more information, you can refer to the official Go Template documentation.

    With the Template transformer, you can implement complicated transformation logic using basic or custom template functions. Below you can get familiar with the basic template functions for the Template transformer. For more information about available custom template functions, see Custom functions.

    Warning

    Pay attention to the whitespaces in templates. Use dash-wrapped - brackets {{- -}} for trimming the spaces. For example, the value \"2023-12-19\" is not the same as \" 2023-12-19 \" and it may throw an error when restoring.

    "},{"location":"built_in_transformers/advanced_transformers/template/#template-functions","title":"Template functions","text":"Function Description Signature .GetColumnType Returns a string with the column type. .GetColumnType(name string) (typeName string, err error) .GetValue Returns the column value for column assigned in the column parameter, encoded by the PostgreSQL driver into any type along with any associated error. Supported types include int, float, time, string, bool, as well as slice or map of any type. .GetValue() (value any, err error) .GetRawValue Returns a raw value as a string for column assigned in the column parameter. .GetRawColumnValue(name string) (value string, err error) .GetColumnValue Returns an encoded value for a specified column or throws an error. A value can be any of int, float, time, string, bool, or slice or map. .GetColumnValue(name string) (value any, err error) .GetRawColumnValue Returns a raw value for a specified column as a string or throws an error .GetRawColumnValue(name string) (value string, err error) .EncodeValue Encodes a value of any type into its string representation using the type assigned to the table column specified in the column parameter. Encoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .EncodeValue(value any) (res any, err error) .DecodeValue Decodes a value from its raw string representation to a Golang type using the data type assigned to the table column specified in the column parameter. Decoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .DecodeValueByColumn(value any) (res any, err error) .EncodeValueByColumn Encodes a value of any type into its raw string representation using the specified column name. Encoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .EncodeValueByColumn(name string, value any) (res any, err error) .DecodeValueByColumn Decodes a value from its raw string representation to a Golang type using the specified column name. Decoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .DecodeValueByColumn(name string, value any) (res any, err error) .EncodeValueByType Encodes a value of any type into its string representation using the specified type name. Encoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .EncodeValueByType(name string, value any) (res any, err error) .DecodeValueByType Decodes a value from its raw string representation to a Golang type using the specified type name. Decoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .DecodeValueByType(name string, value any) (res any, err error)"},{"location":"built_in_transformers/advanced_transformers/template/#example-update-the-firstname-column","title":"Example: Update the firstname column","text":"

    Below you can see the table structure:

    "},{"location":"built_in_transformers/advanced_transformers/template/#change-rule","title":"Change rule","text":"

    The goal is to modify the firstname column based on the following conditions:

    "},{"location":"built_in_transformers/advanced_transformers/template/#using-a-template-function","title":"Using a template function","text":"

    To generate random names, you can use the fakerFirstName template function, which is designed to create synthetic names.

    Template transformer example
    - schema: \"humanresources\"\n  name: \"employee\"\n  transformation:\n    - name: \"Template\"\n      params:\n        column: \"firstname\"\n        template: >\n          {{- if eq .GetValue \"Terri\" -}}\n            Mary\n          {{- else -}}\n            {{- fakerFirstName -}} Jr\n          {{- end -}}\n\n        validate: true\n

    Expected result:

    Value = TerryValue != Terri column name original value transformed firstname Terri Mary column name original value transformed firstname Ken Jr Mike"},{"location":"built_in_transformers/advanced_transformers/template_record/","title":"TemplateRecord","text":"

    Modify records using a Go template and apply changes by using the PostgreSQL driver functions. This transformer provides a way to implement custom transformation logic.

    "},{"location":"built_in_transformers/advanced_transformers/template_record/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types columns A list of columns to be affected by the template. The list of columns will be checked for constraint violations. No any template A Go template string Yes - validate Validate the template result via PostgreSQL driver decoding procedure. Throws an error if a custom type does not have an encode-decoder implementation. false No -"},{"location":"built_in_transformers/advanced_transformers/template_record/#description","title":"Description","text":"

    TemplateRecord uses Go templates to change data. However, while the Template transformer operates with a single column and automatically applies results, the TemplateRecord transformer can make changes to a set of columns in the string, and using driver functions .SetValue or .SetRawValue is mandatory to do that.

    With the TemplateRecord transformer, you can implement complicated transformation logic using basic or custom template functions. Below you can get familiar with the basic template functions for the TemplateRecord transformer. For more information about available custom template functions, see Custom functions.

    "},{"location":"built_in_transformers/advanced_transformers/template_record/#template-functions","title":"Template functions","text":"Function Description Signature .GetColumnType Returns a string with the column type. .GetColumnType(name string) (typeName string, err error) .GetColumnValue Returns an encoded value for a specified column or throws an error. A value can be any of int, float, time, string, bool, or slice or map. .GetColumnValue(name string) (value any, err error) .GetRawColumnValue Returns a raw value for a specified column as a string or throws an error .GetRawColumnValue(name string) (value string, err error) .SetColumnValue Sets a new value of a specific data type to the column. The value assigned must be compatible with the PostgreSQL data type of the column. For example, it is allowed to assign an int value to an INTEGER column, but you cannot assign a float value to a timestamptz column. SetColumnValue(name string, v any) (bool, error) .SetRawColumnValue Sets a new raw value for a column, inheriting the column's existing data type, without performing data type validation. This can lead to errors when restoring the dump if the assigned value is not compatible with the column type. To ensure compatibility, consider using the .DecodeValueByColumn function followed by .SetColumnValue, for example, {{ \"13\" \\| .DecodeValueByColumn \"items_amount\" \\| .SetColumnValue \"items_amount\" }}. .SetRawColumnValue(name string, value any) (err error) .EncodeValueByColumn Encodes a value of any type into its raw string representation using the specified column name. Encoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .EncodeValueByColumn(name string, value any) (res any, err error) .DecodeValueByColumn Decodes a value from its raw string representation to a Golang type using the specified column name. Decoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .DecodeValueByColumn(name string, value any) (res any, err error) .EncodeValueByType Encodes a value of any type into its string representation using the specified type name. Encoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .EncodeValueByType(name string, value any) (res any, err error) .DecodeValueByType Decodes a value from its raw string representation to a Golang type using the specified type name. Decoding is performed through the PostgreSQL driver. Throws an error if types are incompatible. .DecodeValueByType(name string, value any) (res any, err error)"},{"location":"built_in_transformers/advanced_transformers/template_record/#example-generate-a-random-created_at-and-updated_at-dates","title":"Example: Generate a random created_at and updated_at dates","text":"

    Below you can see the table structure:

    The goal is to modify the \"created_at\" and \"updated_at\" columns based on the following rules:

    Template transformer example
    - name: \"TemplateRecord\"\n  params:\n    columns:\n      - \"created_at\"\n      - \"updated_at\"\n    template: >\n      {{ $val := .GetColumnValue \"created_at\" }}\n      {{ if isNotNull $val }}\n          {{ $createdAtValue := now }}\n          {{ $maxUpdatedDate := date_modify \"24h\" $createdAtValue }}\n          {{ $updatedAtValue := randomDate $createdAtValue $maxUpdatedDate }}\n          {{ .SetColumnValue \"created_at\" $createdAtValue }}\n          {{ .SetColumnValue \"updated_at\" $updatedAtValue }}\n      {{ end }}\n    validate: true\n

    Expected result:

    column name original value transformed created_at 2021-01-20 07:01:00.513325+00 2023-12-17 19:37:29.910054Z updated_at 2021-08-09 21:27:00.513325+00 2023-12-18 10:05:25.828498Z"},{"location":"built_in_transformers/advanced_transformers/custom_functions/","title":"Template custom functions","text":"

    Within Greenmask, custom functions play a crucial role, providing a wide array of options for implementing diverse logic. Under the hood, the custom functions are based on the sprig Go's template functions. Greenmask enhances this capability by introducing additional functions and transformation functions. These extensions mirror the logic found in the standard transformers but offer you the flexibility to implement intricate and comprehensive logic tailored to your specific needs.

    Currently, you can use template custom functions for the advanced transformers:

    and for the Transformation condition feature as well.

    Custom functions are arbitrarily divided into 2 groups:

    "},{"location":"built_in_transformers/advanced_transformers/custom_functions/core_functions/","title":"Core functions","text":"

    Below you can find custom core functions which are divided into categories based on the transformation purpose.

    "},{"location":"built_in_transformers/advanced_transformers/custom_functions/core_functions/#postgresql-driver-functions","title":"PostgreSQL driver functions","text":"Function Description null Returns the NULL value that can be used for the driver encoding-decoding operations isNull Returns true if the checked value is NULL isNotNull Returns true if the checked value is not NULL sqlCoalesce Works as a standard SQL coalesce function. It allows you to choose the first non-NULL argument from the list."},{"location":"built_in_transformers/advanced_transformers/custom_functions/core_functions/#json-output-function","title":"JSON output function","text":"Function Description jsonExists Checks if the path value exists in JSON. Returns true if the path exists. mustJsonGet Gets the JSON attribute value by path and throws an error if the path does not exist mustJsonGetRaw Gets the JSON attribute raw value by path and throws an error if the path does not exist jsonGet Gets the JSON attribute value by path and returns nil if the path does not exist jsonGetRaw Gets the JSON attribute raw value by path and returns nil if the path does not exist jsonSet Sets the value for the JSON document by path jsonSetRaw Sets the raw value for the JSON document by path jsonDelete Deletes an attribute from the JSON document by path jsonValidate Validates the JSON document syntax and throws an error if there are any issues jsonIsValid Checks the JSON document for validity and returns true if it is valid toJsonRawValue Casts any type of value to the raw JSON value"},{"location":"built_in_transformers/advanced_transformers/custom_functions/core_functions/#testing-functions","title":"Testing functions","text":"Function Description isInt Checks if the value of an integer type isFloat Checks if the value of a float type isNil Checks if the value is nil isString Checks if the value of a string type isMap Checks if the value of a map type isSlice Checks if the value of a slice type isBool Checks if the value of a boolean type"},{"location":"built_in_transformers/advanced_transformers/custom_functions/core_functions/#transformation-and-generators","title":"Transformation and generators","text":""},{"location":"built_in_transformers/advanced_transformers/custom_functions/core_functions/#masking","title":"masking","text":"

    Replaces characters with asterisk * symbols depending on the provided masking rule. If the value is NULL, it is kept unchanged. This function is based on ggwhite/go-masker.

    Masking rulesSignatureParametersReturn values Rule Description Example input Example output default Returns the sequence of * symbols of the same length test1234 ******** name Masks the second and the third letters ABCD A**D password Always returns a sequence of * address Keeps first 6 letters, masks the rest Larnaca, makarios st Larnac************* email Keeps a domain and the first 3 letters, masks the rest ggw.chang@gmail.com ggw****@gmail.com mobile Masks 3 digits starting from the 4th digit 0987654321 0987***321 telephone Removes (, ), , - symbols, masks last 4 digits of a telephone number, and formats it to (??)????-???? 0227993078 (02)2799-**** id Masks last 4 digits of an ID A123456789 A12345**** credit_card Masks 6 digits starting from the 7th digit 1234567890123456 123456******3456 url Masks the password part of the URL (if applicable) http://admin:mysecretpassword@localhost:1234/uri http://admin:xxxxx@localhost:1234/uri

    masking(dataType string, value string) (res string, err error)

    "},{"location":"built_in_transformers/advanced_transformers/custom_functions/core_functions/#truncatedate","title":"truncateDate","text":"

    Truncates datetime up to the provided part.

    SignatureParametersReturn values

    truncateDate(part string, original time.Time) (res time.Time, err error)

    "},{"location":"built_in_transformers/advanced_transformers/custom_functions/core_functions/#noisedatepginterval","title":"noiseDatePgInterval","text":"

    Adds or subtracts a random duration in the provided interval to or from the original date value.

    SignatureParametersReturn values

    noiseDate(interval string, original time.Time) (res time.Time, err error)

    "},{"location":"built_in_transformers/advanced_transformers/custom_functions/core_functions/#noisefloat","title":"noiseFloat","text":"

    Adds or subtracts a random fraction to or from the original float value. Multiplies the original float value by a provided random value that is not higher than the ratio parameter and adds it to the original value with the option to specify the decimal via the decimal parameter.

    SignatureParametersReturn values

    noiseFloat(ratio float, decimal int, value float) (res float64, err error)

    "},{"location":"built_in_transformers/advanced_transformers/custom_functions/core_functions/#noiseint","title":"noiseInt","text":"

    Adds or subtracts a random fraction to or from the original integer value. Multiplies the original integer value by a provided random value that is not higher than the ratio parameter and adds it to the original value.

    SignatureParametersReturn values

    noiseInt(ratio float, value float) (res int, err error)

    "},{"location":"built_in_transformers/advanced_transformers/custom_functions/core_functions/#randombool","title":"randomBool","text":"

    Generates a random boolean value.

    "},{"location":"built_in_transformers/advanced_transformers/custom_functions/core_functions/#randomdate","title":"randomDate","text":"

    Generates a random date within the provided interval.

    SignatureParametersReturn values

    randomDate(min time.Time, max time.Time) (res time.Time, err error)

    "},{"location":"built_in_transformers/advanced_transformers/custom_functions/core_functions/#randomfloat","title":"randomFloat","text":"

    Generates a random float value within the provided interval.

    SignatureParametersReturn values

    randomFloat(min any, max any, decimal int) (res float, err error)

    "},{"location":"built_in_transformers/advanced_transformers/custom_functions/core_functions/#randomint","title":"randomInt","text":"

    Generates a random integer value within the provided interval.

    SignatureParametersReturn values

    randomInt(min int, max int) (res int, err error)

    "},{"location":"built_in_transformers/advanced_transformers/custom_functions/core_functions/#randomstring","title":"randomString","text":"

    Generates a random string using the provided characters within the specified length range.

    SignatureParametersReturn values

    randomString(minLength int, maxLength int, symbols string) (res string, err error)

    "},{"location":"built_in_transformers/advanced_transformers/custom_functions/core_functions/#roundfloat","title":"roundFloat","text":"

    Rounds a float value up to provided decimal.

    SignatureParametersReturn values

    roundFloat(decimal int, original float) (res float, err error)

    "},{"location":"built_in_transformers/advanced_transformers/custom_functions/core_functions/#tsmodify","title":"tsModify","text":"

    Modify original time value by adding or subtracting the provided interval. The interval is a string in the format of the PostgreSQL interval.

    SignatureParametersReturn values

    tsModify(interval string, val time.Time) (time.Time, error)

    "},{"location":"built_in_transformers/advanced_transformers/custom_functions/faker_function/","title":"Faker functions","text":"

    Greenmask uses go-faker/faker under the hood for generating of synthetic data.

    "},{"location":"built_in_transformers/advanced_transformers/custom_functions/faker_function/#faker-functions-address","title":"Faker functions: Address","text":"Function Description Signature fakerRealAddress Generates a random real-world address that includes: city, state, postal code, latitude, and longitude fakerRealAddress() (res ReadAddress) fakerLatitude Generates random fake latitude fakerLatitude() (res float64) fakerLongitude Generates random fake longitude fakerLongitude() (res float64)"},{"location":"built_in_transformers/advanced_transformers/custom_functions/faker_function/#faker-functions-datetime","title":"Faker functions: Datetime","text":"Function Description Signature fakerUnixTime Generates random Unix time in seconds fakerLongitude() (res int64) fakerDate Generates random date with the pattern of YYYY-MM-DD fakerDate() (res string) fakerTimeString Generates random time fakerTimeString() (res string) fakerMonthName Generates a random month fakerMonthName() (res string) fakerYearString Generates a random year fakerYearString() (res string) fakerDayOfWeek Generates a random day of a week fakerDayOfWeek() (res string) fakerDayOfMonth Generates a random day of a month fakerDayOfMonth() (res string) fakerTimestamp Generates a random timestamp with the pattern of YYYY-MM-DD HH:MM:SS fakerTimestamp() (res string) fakerCentury Generates a random century fakerCentury() (res string) fakerTimezone Generates a random timezone name fakerTimezone() (res string) fakerTimeperiod Generates a random time period with the patter of either AM or PM fakerTimeperiod() (res string)"},{"location":"built_in_transformers/advanced_transformers/custom_functions/faker_function/#faker-functions-internet","title":"Faker functions: Internet","text":"Function Description Signature fakerEmail Generates a random email fakerEmail() (res string) fakerMacAddress Generates a random MAC address fakerMacAddress() (res string) fakerDomainName Generates a random domain name fakerDomainName() (res string) fakerURL Generates a random URL with the pattern of https://www.domainname.some/somepath fakerURL() (res string) fakerUsername Generates a random username fakerUsername() (res string) fakerIPv4 Generates a random IPv4 address fakerIPv4() (res string) fakerIPv6 Generates a random IPv6 address fakerIPv6() (res string) fakerPassword Generates a random password fakerPassword() (res string)"},{"location":"built_in_transformers/advanced_transformers/custom_functions/faker_function/#faker-functions-words-and-sentences","title":"Faker functions: words and sentences","text":"Function Description Signature fakerWord Generates a random word fakerWord() (res string) fakerSentence Generates a random sentence fakerSentence() (res string) fakerParagraph Generates a random sequence of sentences as a paragraph fakerParagraph() (res string)"},{"location":"built_in_transformers/advanced_transformers/custom_functions/faker_function/#faker-functions-payment","title":"Faker functions: Payment","text":"Function Description Signature fakerCCType Generates a random credit card type, e.g. VISA, MasterCard, etc. fakerCCType() (res string) fakerCCNumber Generates a random credit card number fakerCCNumber() (res string) fakerCurrency Generates a random currency name fakerCurrency() (res string) fakerAmountWithCurrency Generates random amount preceded with random currency fakerAmountWithCurrency() (res string)"},{"location":"built_in_transformers/advanced_transformers/custom_functions/faker_function/#faker-functions-person","title":"Faker functions: Person","text":"Function Description Signature fakerTitleMale Generates a random male title from the predefined list fakerTitleMale() (res string) fakerTitleFemale Generates a random female title from the predefined list fakerTitleFemale() (res string) fakerFirstName Generates a random first name fakerFirstName() (res string) fakerFirstNameMale Generates a random male first name fakerFirstNameMale() (res string) fakerFirstNameFemale Generates a random female first name fakerFirstNameFemale() (res string) fakerFirstLastName Generates a random last name fakerFirstLastName() (res string) fakerName Generates a random full name preceded with a title fakerName() (res string)"},{"location":"built_in_transformers/advanced_transformers/custom_functions/faker_function/#faker-functions-phone","title":"Faker functions: Phone","text":"Function Description Signature fakerPhoneNumber Generates a random phone number fakerPhoneNumber() (res string) fakerTollFreePhoneNumber Generates a random phone number with the pattern of (123) 456-7890 fakerTollFreePhoneNumber() (res string) fakerE164PhoneNumber Generates a random phone number with the pattern of +12345678900 fakerE164PhoneNumber() (res string)"},{"location":"built_in_transformers/advanced_transformers/custom_functions/faker_function/#faker-functions-uuid","title":"Faker functions: UUID","text":"Function Description Signature fakerUUIDHyphenated Generates a random unique user ID separated by hyphens fakerUUID() (res string) fakerUUIDDigit Generates a random unique user ID in the HEX format fakerUUIDDigit() (res string)"},{"location":"built_in_transformers/standard_transformers/","title":"Standard transformers","text":"

    Standard transformers are ready-to-use methods that require no customization and perform with just as little as parameters input. Below you can find an index of all standard transformers currently available in Greenmask.

    1. Cmd \u2014 transforms data via external program using stdin and stdout interaction.
    2. Dict \u2014 replaces values matched by dictionary keys.
    3. Hash \u2014 generates a hash of the text value.
    4. Masking \u2014 masks a value using one of the masking behaviors depending on your domain.
    5. NoiseDate \u2014 randomly adds or subtracts a duration within the provided ratio interval to the original date value.
    6. NoiseFloat \u2014 adds or subtracts a random fraction to the original float value.terval to the original date value.
    7. NoiseNumeric \u2014 adds or subtracts a random fraction to the original numeric value.
    8. NoiseInt \u2014 adds or subtracts a random fraction to the original integer value.
    9. RandomBool \u2014 generates random boolean values.
    10. RandomChoice \u2014 replaces values randomly chosen from a provided list.
    11. RandomDate \u2014 generates a random date in a specified interval.
    12. RandomFloat \u2014 generates a random float within the provided interval.
    13. RandomInt \u2014 generates a random integer within the provided interval.
    14. RandomString \u2014 generates a random string using the provided characters within the specified length range.
    15. RandomUuid \u2014 generates a random unique user ID.
    16. RandomLatitude \u2014 generates a random latitude value.
    17. RandomLongitude \u2014 generates a random longitude value.
    18. RandomUnixTimestamp \u2014 generates a random Unix timestamp.
    19. RandomDayOfWeek \u2014 generates a random day of the week.
    20. RandomDayOfMonth \u2014 generates a random day of the month.
    21. RandomMonthName \u2014 generates the name of a random month.
    22. RandomYearString \u2014 generates a random year as a string.
    23. RandomCentury \u2014 generates a random century.
    24. RandomTimezone \u2014 generates a random timezone.
    25. RandomEmail \u2014 generates a random email address.
    26. RandomUsername \u2014 generates a random username.
    27. RandomPassword \u2014 generates a random password.
    28. RandomDomainName \u2014 generates a random domain name.
    29. RandomURL \u2014 generates a random URL.
    30. RandomMac \u2014 generates a random MAC addresses.
    31. RandomIP \u2014 generates a random IPv4 or IPv6 addresses.
    32. RandomWord \u2014 generates a random word.
    33. RandomSentence \u2014 generates a random sentence.
    34. RandomParagraph \u2014 generates a random paragraph.
    35. RandomCCType \u2014 generates a random credit card type.
    36. RandomCCNumber \u2014 generates a random credit card number.
    37. RandomCurrency \u2014 generates a random currency code.
    38. RandomAmountWithCurrency \u2014 generates a random monetary amount with currency.
    39. RandomPerson \u2014 generates a random person data (first name, last name, etc.)
    40. RandomPhoneNumber \u2014 generates a random phone number.
    41. RandomTollFreePhoneNumber \u2014 generates a random toll-free phone number.
    42. RandomE164PhoneNumber \u2014 generates a random phone number in E.164 format.
    43. RealAddress \u2014 generates a real address.
    44. RegexpReplace \u2014 replaces a string using a regular expression.
    45. Replace \u2014 replaces an original value by the provided one.
    46. SetNull \u2014 sets NULL value to the column.
    "},{"location":"built_in_transformers/standard_transformers/cmd/","title":"Cmd","text":"

    Transform data via external program using stdin and stdout interaction.

    "},{"location":"built_in_transformers/standard_transformers/cmd/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types columns A list of column names to be affected. If empty, the entire tuple is used. Read about the structure further. Yes Any executable The path to the executable parameter file Yes - args A list of parameters for the executable No - driver The row driver with parameters that is used for interacting with cmd. See details below. {\"name\": \"csv\"} No - validate Performs a decoding operation using the PostgreSQL driver for data received from the command to ensure the data format is correct false No - timeout Timeout for sending and receiving data from the external command 2s No - expected_exit_code The expected exit code on SIGTERM signal. If the exit code is unexpected, the transformation exits with an error. 0 No - skip_on_behaviour Skips transformation call if one of the provided columns has a null value (any) or each of the provided columns has null values (all). This option works together with the skip_on_null_input parameter on columns. Possible values: all, any. all No -

    Warning

    The parameter validate_output=true may cause an error if the type does not have a PostgreSQL driver decoder implementation. Most of the types, such as int, float, text, varchar, date, timestamp, etc., have encoders and decoders, as well as inherited types like domain types based on them.

    "},{"location":"built_in_transformers/standard_transformers/cmd/#description","title":"Description","text":"

    The Cmd transformer allows you to send original data to an external program via stdin and receive transformed data from stdout. It supports various interaction formats such as json, csv, or plain text for one-column transformations. The interaction is performed line by line, so at the end of each sent data, a new line symbol \\n must be included.

    "},{"location":"built_in_transformers/standard_transformers/cmd/#types-of-interaction-modes","title":"Types of interaction modes","text":""},{"location":"built_in_transformers/standard_transformers/cmd/#text","title":"text","text":"

    Textual driver that is used only for one column transformation, thus you cannot provide here more than one column. The value encodes into string laterally. For example, 2023-01-03 01:00:00.0+03.

    "},{"location":"built_in_transformers/standard_transformers/cmd/#json","title":"json","text":"

    JSON line driver. It has two formats that can be passed through driver.json_data_format: [text|bytes]. Use the bytes format for binary datatypes. Use the text format for non-binary datatypes and for those that can be represented as string literals. The default json_data_format is text.

    Text format with indexesBytes format with indexes
    {\n  \"column1\": {\n    \"d\": \"some_value1\",\n    \"n\": false,\n  },\n  \"column2\": {\n    \"d\": \"some_value2\",\n    \"n\": false,\n  }\n}\n
    {\n  \"column1\": {\n    \"d\": \"aGVsbG8gd29ybHNeODcxMjE5MCUlJSUlJQ==\",\n    \"n\": false,\n  },\n  \"column2\": {\n    \"d\": \"aGVsbG8gd29ybHNeODcxMjE5MCUlJSUlJQ==\",\n    \"n\": false,\n  }\n}\n

    where:

    "},{"location":"built_in_transformers/standard_transformers/cmd/#csv","title":"csv","text":"

    CSV driver (comma-separated). The number of attributes is the same as the number of table columns, but the columns that were not mentioned in the columns list are empty. The NULL value is represented as \\N. Each attribute is escaped by a quote (\"). For example, if the transformed table has attributes id, title, and created_at, and only id and created_at require transformation, then the CSV line will look as follows:

    csv line example
    \"123\",\"\",\"2023-01-03 01:00:00.0+03\"\n
    "},{"location":"built_in_transformers/standard_transformers/cmd/#column-object-attributes","title":"Column object attributes","text":""},{"location":"built_in_transformers/standard_transformers/cmd/#example-apply-transformation-performed-by-external-command-in-text-format","title":"Example: Apply transformation performed by external command in TEXT format","text":"

    In the following example, jobtitle columns is transformed via external command transformer.

    External transformer in python example
    #!/usr/bin/env python3\nimport signal\nimport sys\n\nsignal.signal(signal.SIGTERM, lambda sig, frame: exit(0))\n\n\n# If we want to implement a simple generator, we need read the line from stdin and write any result to stdout\nfor _ in sys.stdin:\n    # Writing the result to stdout with new line and flushing the buffer\n    sys.stdout.write(\"New Job Title\")\n    sys.stdout.write(\"\\n\")\n    sys.stdout.flush()\n
    Cmd transformer config example
    - schema: \"humanresources\"\n  name: \"employee\"\n  transformers:\n    - name: \"Cmd\"\n      params:\n        driver:\n          name: \"text\"\n        expected_exit_code: -1\n        skip_on_null_input: true\n        validate: true\n        skip_on_behaviour: \"any\"\n        timeout: 60s\n        executable: \"/var/lib/playground/test.py\"\n        columns:\n          - name: \"jobtitle\"\n            skip_original_data: true\n            skip_on_null_input: true \n
    "},{"location":"built_in_transformers/standard_transformers/cmd/#example-apply-transformation-performed-by-external-command-in-json-format","title":"Example: Apply transformation performed by external command in JSON format","text":"

    In the following example, jobtitle and loginid columns are transformed via external command transformer.

    External transformer in python example
    #!/usr/bin/env python3\nimport json\nimport signal\nimport sys\n\nsignal.signal(signal.SIGTERM, lambda sig, frame: exit(0))\n\nfor line in sys.stdin:\n    res = json.loads(line)\n    # Setting dummy values\n    res[\"jobtitle\"] = {\"d\": \"New Job Title\", \"n\": False}\n    res[\"loginid\"][\"d\"] = \"123\"\n\n    # Writing the result to stdout with new line and flushing the buffer\n    sys.stdout.write(json.dumps(res))\n    sys.stdout.write(\"\\n\")\n    sys.stdout.flush()\n
    Cmd transformer config example
    - schema: \"humanresources\"\n  name: \"employee\"\n  transformers:\n    - name: \"Cmd\"\n      params:\n        driver:\n          name: \"json\" # (1)\n          json_data_format: \"text\" # (4)\n        expected_exit_code: -1\n        skip_on_null_input: true\n        validate: true\n        skip_on_behaviour: \"any\" # (2)\n        timeout: 60s\n        executable: \"/var/lib/playground/test.py\"\n        columns:\n          - name: \"jobtitle\"\n            skip_original_data: true\n            skip_on_null_input: true # (3)\n          - name: \"loginid\"\n            skip_original_data: false # (5)\n            skip_on_null_input: true # (3)\n

    { .annotate }

    1. Validate the received data via decode procedure using the PostgreSQL driver. Note that this may cause an error if the type is not supported in the PostgreSQL driver.
    2. Skip transformation (keep the values) if one of the affected columns (not_affected=false) has a null value.
    3. If a column has a null value, then skip it. This works in conjunction with skip_on_behaviour. Since it has the value any, if one of the columns (jobtitle or loginid) has a null value, then skip the transformation call.
    4. The format of JSON can be either text or bytes. The default value is text.
    5. The skip_original_data attribute is set to true the date will not be transfered to the command. This column will contain the empty original data
    "},{"location":"built_in_transformers/standard_transformers/dict/","title":"Dict","text":"

    Replace values matched by dictionary keys.

    "},{"location":"built_in_transformers/standard_transformers/dict/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes any values Value replace mapping as in: {\"string\": \"string\"}. The string with value \"\\N\" is considered NULL. No - default Shown if no value has been matched with dict. The string with value \"\\N\" is considered NULL. By default is empty. No - fail_not_matched When no value is matched with the dict, fails the replacement process if set to true, or keeps the current value, if set to false. true No - validate Performs the encode-decode procedure using column type to ensure that values have correct type true No -"},{"location":"built_in_transformers/standard_transformers/dict/#description","title":"Description","text":"

    The Dict transformer uses a user-provided key-value dictionary to replace values based on matches specified in the values parameter mapping. These provided values must align with the PostgreSQL type format. To validate the values format before application, you can utilize the validate parameter, triggering a decoding procedure via the PostgreSQL driver.

    If there are no matches by key, an error will be raised according to a default fail_not_matched: true parameter. You can change this behaviour by providing the default parameter, value from which will be shown in case of a missing match.

    In certain cases where the driver type does not support the validation operation, an error may occur. For setting or matching a NULL value, use a string with the \\N sequence.

    "},{"location":"built_in_transformers/standard_transformers/dict/#example-replace-marital-status","title":"Example: Replace marital status","text":"

    The following example replaces marital status from S to M or from M to S and raises an error if there is no match:

    Dict transformer example
    - schema: \"humanresources\"\n  name: \"employee\"\n  transformers:\n    - name: \"Dict\"\n      params:\n        column: \"maritalstatus\"\n        values:\n          \"S\": \"M\"\n          \"M\": \"S\"\n        validate: true\n        fail_not_matched: true\n

    Result

    ColumnOriginalValueTransformedValue maritalstatusSM"},{"location":"built_in_transformers/standard_transformers/hash/","title":"Hash","text":"

    Generate a hash of the text value using the Scrypt hash function under the hood. NULL values are kept.

    "},{"location":"built_in_transformers/standard_transformers/hash/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar salt Hex encoded salt string. This value may be provided via environment variable GREENMASK_GLOBAL_SALT Yes text, varchar function Hash algorithm to anonymize data. Can be any of md5, sha1, sha256, sha512, sha3-224, sha3-254, sha3-384, sha3-512. sha1 No - max_length Indicates whether to truncate the hash tail and specifies at what length. Can be any integer number, where 0 means \"no truncation\". 0 No -"},{"location":"built_in_transformers/standard_transformers/hash/#example-generate-hash-from-job-title","title":"Example: Generate hash from job title","text":"

    The following example generates a hash from the jobtitle into sha1 and truncates the results after the 10th character.

    We can set the salt via the environment variable GREENMASK_GLOBAL_SALT:

    export GREENMASK_GLOBAL_SALT=\"12343567baaa\"\n
    Hash transformer example
    - schema: \"humanresources\"\n  name: \"employee\"\n  transformers:\n    - name: \"Hash\"\n      params:\n        column: \"jobtitle\"\n        function: \"sha1\"\n        max_length: 10\n
    Expected result
    | column name | original value                   | transformed |\n|-------------|----------------------------------|-------------|\n| jobtitle    | Research and Development Manager | 3a456da5c5  |\n
    "},{"location":"built_in_transformers/standard_transformers/masking/","title":"Masking","text":"

    Mask a value using one of the masking rules depending on your domain. NULL values are kept.

    "},{"location":"built_in_transformers/standard_transformers/masking/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar type Data type of attribute (default, password, name, addr, email, mobile, tel, id, credit, url) default No -"},{"location":"built_in_transformers/standard_transformers/masking/#description","title":"Description","text":"

    The Masking transformer replaces characters with asterisk * symbols depending on the provided data type. If the value is NULL, it is kept unchanged. It is based on ggwhite/go-masker and supports the following masking rules:

    Type Description default Returns * symbols with the same length, e.g. input: test1234 output: ******** name Masks the second letter the third letter in a word, e. g. input: ABCD output: A**D password Always returns ************ address Keeps first 6 letters, masks the rest, e. g. input: Larnaca, makarios st output: Larnac************* email Keeps a domain and the first 3 letters, masks the rest, e. g. input: ggw.chang@gmail.com output: ggw****@gmail.com mobile Masks 3 digits starting from the 4th digit, e. g. input: 0987654321 output: 0987***321 telephone Removes (, ), , - chart, and masks last 4 digits of telephone number, then formats it to (??)????-????, e. g. input: 0227993078 output: (02)2799-**** id Masks last 4 digits of ID number, e. g. input: A123456789 output: A12345**** credit_cart Masks 6 digits starting from the 7th digit, e. g. input 1234567890123456 output 123456******3456 url Masks the password part of the URL, if applicable, e. g. http://admin:mysecretpassword@localhost:1234/uri output: http://admin:xxxxx@localhost:1234/uri"},{"location":"built_in_transformers/standard_transformers/masking/#example-masking-employee-national-id-number","title":"Example: Masking employee national ID number","text":"

    In the following example, the national ID number of an employee is masked.

    Masking transformer example
    - schema: \"humanresources\"\n  name: \"employee\"\n  transformers:\n    - name: \"Masking\"\n      params:\n        column: \"nationalidnumber\"\n        type: \"id\"\n
    Expected result
    | column name      | original value | transformed |\n|------------------|----------------|-------------|\n| nationalidnumber | 295847284      | 295847****  |\n
    "},{"location":"built_in_transformers/standard_transformers/noise_date/","title":"NoiseDate","text":"

    Randomly add or subtract a duration within the provided ratio interval to the original date value.

    "},{"location":"built_in_transformers/standard_transformers/noise_date/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes date, timestamp, timestamptz min_ratio The minimum random value for noise. The value must be in PostgreSQL interval format, e. g. 1 year 2 mons 3 day 04:05:06.07 5% from max_ration parameter No - max_ratio The maximum random value for noise. The value must be in PostgreSQL interval format, e. g. 1 year 2 mons 3 day 04:05:06.07 Yes - min Min threshold date (and/or time) of value. The value has the same format as column parameter No - max Max threshold date (and/or time) of value. The value has the same format as column parameter No - truncate Truncate the date to the specified part (nanosecond, microsecond, millisecond, second, minute, hour, day, month, year). The truncate operation is not applied by default. No - engine The engine used for generating the values [random, hash]. Use hash for deterministic generation random No -"},{"location":"built_in_transformers/standard_transformers/noise_date/#dynamic-parameters","title":"Dynamic parameters","text":"Parameter Supported types min date, timestamp, timestamptz max date, timestamp, timestamptz"},{"location":"built_in_transformers/standard_transformers/noise_date/#description","title":"Description","text":"

    The NoiseDate transformer randomly generates duration between min_ratio and max_ratio parameter and adds it to or subtracts it from the original date value. The min_ratio or max_ratio parameters must be written in the PostgreSQL interval format. You can also truncate the resulted date up to a specified part by setting the truncate parameter.

    In case you have constraints on the date range, you can set the min and max parameters to specify the threshold values. The values for min and max must have the same format as the column parameter. Parameters min and max support dynamic mode.

    Info

    If the noised value exceeds the max threshold, the transformer will set the value to max. If the noised value is lower than the min threshold, the transformer will set the value to min.

    The engine parameter allows you to choose between random and hash engines for generating values. Read more about the engines in the Transformation engines section.

    "},{"location":"built_in_transformers/standard_transformers/noise_date/#example-adding-noise-to-the-modified-date","title":"Example: Adding noise to the modified date","text":"

    In the following example, the original timestamp value of modifieddate will be noised up to 1 year 2 months 3 days 4 hours 5 minutes 6 seconds and 7 milliseconds with truncation up to the month part.

    NoiseDate transformer example
    - schema: \"humanresources\"\n  name: \"jobcandidate\"\n  transformers:\n    - name: \"NoiseDate\"\n      params:\n        column: \"hiredate\"\n        max_ratio: \"1 year 2 mons 3 day 04:05:06.07\"\n        truncate: \"month\"\n        max: \"2020-01-01 00:00:00\"\n
    "},{"location":"built_in_transformers/standard_transformers/noise_date/#example-adding-noise-to-the-modified-date-with-dynamic-min-parameter-with-hash-engine","title":"Example: Adding noise to the modified date with dynamic min parameter with hash engine","text":"

    In the following example, the original timestamp value of hiredate will be noised up to 1 year 2 months 3 days 4 hours 5 minutes 6 seconds and 7 milliseconds with truncation up to the month part. The max threshold is set to 2020-01-01 00:00:00, and the min threshold is set to the birthdate column. If the birthdate column is NULL, the default value 1990-01-01 will be used. The hash engine is used for deterministic generation - the same input will always produce the same output.

    NoiseDate transformer example
    - schema: \"humanresources\"\n  name: \"employee\"\n  transformers:\n    - name: \"NoiseDate\"\n      params:\n        column: \"hiredate\"\n        max_ratio: \"1 year 2 mons 3 day 04:05:06.07\"\n        truncate: \"month\"\n        max: \"2020-01-01 00:00:00\"\n        engine: \"hash\"\n      dynamic_params:\n        min:\n          column: \"birthdate\"\n          default: \"1990-01-01\"\n

    Result

    ColumnOriginalValueTransformedValue hiredate2009-01-142010-08-01"},{"location":"built_in_transformers/standard_transformers/noise_float/","title":"NoiseFloat","text":"

    Add or subtract a random fraction to the original float value.

    "},{"location":"built_in_transformers/standard_transformers/noise_float/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes float4, float8 decimal The decimal of the noised float value (number of digits after the decimal point) 4 No - min_ratio The minimum random percentage for noise, from 0 to 1, e. g. 0.1 means \"add noise up to 10%\" 0.05 No - max_ratio The maximum random percentage for noise, from 0 to 1, e. g. 0.1 means \"add noise up to 10%\" Yes - min Min threshold of noised value No - max Max threshold of noised value No - engine The engine used for generating the values [random, hash]. Use hash for deterministic generation random No -"},{"location":"built_in_transformers/standard_transformers/noise_float/#dynamic-parameters","title":"Dynamic parameters","text":"Parameter Supported types min float4, float8, int2, int4, int8 max float4, float8, int2, int4, int8"},{"location":"built_in_transformers/standard_transformers/noise_float/#description","title":"Description","text":"

    The NoiseFloat transformer multiplies the original float value by randomly generated value that is not higher than the max_ratio parameter and not less that max_ratio parameter and adds it to or subtracts it from the original value. Additionally, you can specify the number of decimal digits by using the decimal parameter.

    In case you have constraints on the float range, you can set the min and max parameters to specify the threshold values. The values for min and max must have the same format as the column parameter. Parameters min and max support dynamic mode. Engine parameter allows you to choose between random and hash engines for generating values. Read more about the engines

    Info

    If the noised value exceeds the max threshold, the transformer will set the value to max. If the noised value is lower than the min threshold, the transformer will set the value to min.

    The engine parameter allows you to choose between random and hash engines for generating values. Read more about the engines in the Transformation engines section.

    "},{"location":"built_in_transformers/standard_transformers/noise_float/#example-adding-noise-to-the-purchase-price","title":"Example: Adding noise to the purchase price","text":"

    In this example, the original value of standardprice will be noised up to 50% and rounded up to 2 decimals.

    NoiseFloat transformer example
    - schema: \"purchasing\"\n  name: \"productvendor\"\n  columns_type_override: # (1)\n    lastreceiptcost: \"float8\"\n    standardprice: \"float8\"\n  transformers:\n    - name: \"NoiseFloat\"\n      params:\n        column: \"lastreceiptcost\"\n        max_ratio: 0.15\n        decimal: 2\n      dynamic_params:\n        min:\n          column: \"standardprice\"\n
    1. The type overrides applied for example because the playground database does not contain any tables with float columns.

    Result

    ColumnOriginalValueTransformedValue lastreceiptcost50.263547.87"},{"location":"built_in_transformers/standard_transformers/noise_int/","title":"NoiseInt","text":"

    Add or subtract a random fraction to the original integer value.

    "},{"location":"built_in_transformers/standard_transformers/noise_int/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes int2, int4, int8 min_ratio The minimum random percentage for noise, from 0 to 1, e. g. 0.1 means \"add noise up to 10%\" 0.05 No - max_ratio The maximum random percentage for noise, from 0 to 1, e. g. 0.1 means \"add noise up to 10%\" Yes - min Min threshold of noised value No - max Min threshold of noised value No - engine The engine used for generating the values [random, hash]. Use hash for deterministic generation random No -"},{"location":"built_in_transformers/standard_transformers/noise_int/#dynamic-parameters","title":"Dynamic parameters","text":"Parameter Supported types min int2, int4, int8 max int2, int4, int8"},{"location":"built_in_transformers/standard_transformers/noise_int/#description","title":"Description","text":"

    The NoiseInt transformer multiplies the original integer value by randomly generated value that is not higher than the max_ratio parameter and not less that max_ratio parameter and adds it to or subtracts it from the original value.

    In case you have constraints on the integer range, you can set the min and max parameters to specify the threshold values. The values for min and max must have the same format as the column parameter. Parameters min and max support dynamic mode.

    Info

    If the noised value exceeds the max threshold, the transformer will set the value to max. If the noised value is lower than the min threshold, the transformer will set the value to min.

    The engine parameter allows you to choose between random and hash engines for generating values. Read more about the engines in the Transformation engines section.

    "},{"location":"built_in_transformers/standard_transformers/noise_int/#example-noise-vacation-hours-of-an-employee","title":"Example: Noise vacation hours of an employee","text":"

    In the following example, the original value of vacationhours will be noised up to 40%. The transformer will set the value to 10 if the noised value is lower than 10 and to 1000 if the noised value exceeds 1000.

    NoiseInt transformer example
    - schema: \"humanresources\"\n  name: \"employee\"\n  transformers:\n    - name: \"NoiseInt\"\n      params:\n        column: \"vacationhours\"\n        max_ratio: 0.4\n        min: 10\n        max: 1000\n

    Result

    ColumnOriginalValueTransformedValue vacationhours9969"},{"location":"built_in_transformers/standard_transformers/noise_numeric/","title":"NoiseNumeric","text":"

    Add or subtract a random fraction to the original numeric value.

    "},{"location":"built_in_transformers/standard_transformers/noise_numeric/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes numeric, decimal decimal The decimal of the noised float value (number of digits after the decimal point) 4 No - min_ratio The minimum random percentage for noise, from 0 to 1, e. g. 0.1 means \"add noise up to 10%\" 0.05 No - max_ratio The maximum random percentage for noise, from 0 to 1, e. g. 0.1 means \"add noise up to 10%\" Yes - min Min threshold of noised value No - max Max threshold of noised value No - engine The engine used for generating the values [random, hash]. Use hash for deterministic generation random No -"},{"location":"built_in_transformers/standard_transformers/noise_numeric/#dynamic-parameters","title":"Dynamic parameters","text":"Parameter Supported types min numeric, decimal, float4, float8, int2, int4, int8 max numeric, decimal, float4, float8, int2, int4, int8"},{"location":"built_in_transformers/standard_transformers/noise_numeric/#description","title":"Description","text":"

    The NoiseNumeric transformer multiplies the original numeric (or decimal) value by randomly generated value that is not higher than the max_ratio parameter and not less that max_ratio parameter and adds it to or subtracts it from the original value. Additionally, you can specify the number of decimal digits by using the decimal parameter.

    In case you have constraints on the numeric range, you can set the min and max parameters to specify the threshold values. The values for min and max must have the same format as the column parameter. Parameters min and max support dynamic mode. Engine parameter allows you to choose between random and hash engines for generating values. Read more about the engines

    Info

    If the noised value exceeds the max threshold, the transformer will set the value to max. If the noised value is lower than the min threshold, the transformer will set the value to min.

    The engine parameter allows you to choose between random and hash engines for generating values. Read more about the engines in the Transformation engines section.

    Warning

    Greenmask cannot parse the numeric type sitteng. For instance NUMERIC(10, 2). You should set min and max treshholds manually as well as allowed decimal. This behaviour will be changed in the later versions. Grenmask will be able to determine the decimal and scale of the column and set the min and max treshholds automatically if were not set.

    "},{"location":"built_in_transformers/standard_transformers/noise_numeric/#example-adding-noise-to-the-purchase-price","title":"Example: Adding noise to the purchase price","text":"

    In this example, the original value of standardprice will be noised up to 50% and rounded up to 2 decimals.

    NoiseNumeric transformer example
    - schema: \"purchasing\"\n  name: \"productvendor\"\n  transformers:\n    - name: \"NoiseNumeric\"\n      params:\n        column: \"lastreceiptcost\"\n        max_ratio: 0.15\n        decimal: 2\n        max: 10000\n      dynamic_params:\n        min:\n          column: \"standardprice\"\n

    Result

    ColumnOriginalValueTransformedValue lastreceiptcost50.263557.33"},{"location":"built_in_transformers/standard_transformers/random_amount_with_currency/","title":"RandomAmountWithCurrency","text":"

    The RandomAmountWithCurrency transformer is specifically designed to populate specified database columns with random financial amounts accompanied by currency codes. Ideal for applications requiring the simulation of financial transactions, this utility enhances the realism of financial datasets by introducing variability in amounts and currencies.

    "},{"location":"built_in_transformers/standard_transformers/random_amount_with_currency/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_amount_with_currency/#description","title":"Description","text":"

    This transformer automatically generates random financial amounts along with corresponding global currency codes (e. g., 250.00 USD, 300.00 EUR), injecting them into the designated database column. It provides a straightforward solution for populating financial records with varied and realistic data, suitable for testing payment systems, data anonymization, and simulation of economic models.

    "},{"location":"built_in_transformers/standard_transformers/random_amount_with_currency/#example-populate-the-payments-table-with-random-amounts-and-currencies","title":"Example: Populate the payments table with random amounts and currencies","text":"

    This example shows how to configure the RandomAmountWithCurrency transformer to populate the payment_details column in the payments table with random amounts and currencies. It is an effective approach to simulating a diverse range of payment transactions.

    RandomAmountWithCurrency transformer example
    - schema: \"public\"\n  name: \"payments\"\n  transformers:\n    - name: \"RandomAmountWithCurrency\"\n      params:\n        column: \"payment_details\"\n        keep_null: false\n

    In this setup, the payment_details column will be updated with random financial amounts and currency codes for each entry, replacing any existing non-NULL values. The keep_null parameter, when set to true, ensures that existing NULL values in the column remain unchanged, preserving the integrity of records without specified payment details.

    "},{"location":"built_in_transformers/standard_transformers/random_bool/","title":"RandomBool","text":"

    Generate random boolean values.

    "},{"location":"built_in_transformers/standard_transformers/random_bool/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes bool keep_null Indicates whether NULL values should be replaced with transformed values or not true No - engine The engine used for generating the values [random, hash]. Use hash for deterministic generation random No -"},{"location":"built_in_transformers/standard_transformers/random_bool/#description","title":"Description","text":"

    The RandomBool transformer generates a random boolean value. The behaviour for NULL values can be configured using the keep_null parameter. The engine parameter allows you to choose between random and hash engines for generating values. Read more about the engines in the Transformation engines section.

    "},{"location":"built_in_transformers/standard_transformers/random_bool/#example-generate-a-random-boolean-for-a-column","title":"Example: Generate a random boolean for a column","text":"

    In the following example, the RandomBool transformer generates a random boolean value for the salariedflag column.

    RandomBool transformer example
    - schema: \"humanresources\"\n  name: \"employee\"\n  transformers:\n    - name: \"RandomBool\"\n      params:\n        column: \"salariedflag\"\n

    Result

    ColumnOriginalValueTransformedValue salariedflagtf"},{"location":"built_in_transformers/standard_transformers/random_cc_number/","title":"RandomCCNumber","text":"

    The RandomCCNumber transformer is specifically designed to populate specified database columns with random credit card numbers. This utility is crucial for applications that involve simulating financial data, testing payment systems, or anonymizing real credit card numbers in datasets.

    "},{"location":"built_in_transformers/standard_transformers/random_cc_number/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_cc_number/#description","title":"Description","text":"

    By leveraging algorithms capable of generating plausible credit card numbers that adhere to standard credit card validation rules (such as the Luhn algorithm), the RandomCCNumber transformer injects random credit card numbers into the designated database column. This approach ensures the generation of credit card numbers that are realistic for testing and development purposes, without compromising real-world applicability and security.

    "},{"location":"built_in_transformers/standard_transformers/random_cc_number/#example-populate-random-credit-card-numbers-for-the-payment_information-table","title":"Example: Populate random credit card numbers for the payment_information table","text":"

    This example demonstrates configuring the RandomCCNumber transformer to populate the cc_number column in the payment_information table with random credit card numbers. It is an effective strategy for creating a realistic set of payment data for application testing or data anonymization.

    RandomCCNumber transformer example
    - schema: \"public\"\n  name: \"payment_information\"\n  transformers:\n    - name: \"RandomCCNumber\"\n      params:\n        column: \"cc_number\"\n        keep_null: false\n

    With this setup, the cc_number column will be updated with random credit card numbers for each entry, replacing any existing non-NULL values. If the keep_null parameter is set to true, it will ensure that existing NULL values in the column are preserved, maintaining the integrity of records where credit card information is not applicable or available.

    "},{"location":"built_in_transformers/standard_transformers/random_cc_type/","title":"RandomCCType","text":"

    The RandomCCType transformer is designed to populate specified database columns with random credit card types. This tool is essential for applications that require the simulation of financial transaction data, testing payment processing systems, or anonymizing credit card type information in datasets.

    "},{"location":"built_in_transformers/standard_transformers/random_cc_type/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_cc_type/#description","title":"Description","text":"

    Utilizing a predefined list of credit card types (e.g., VISA, MasterCard, American Express, Discover), the RandomCCType transformer injects random credit card type names into the designated database column. This feature allows for the creation of realistic and varied financial transaction datasets by simulating a range of credit card types without using real card data.

    "},{"location":"built_in_transformers/standard_transformers/random_cc_type/#example-populate-random-credit-card-types-for-the-transactions-table","title":"Example: Populate random credit card types for the transactions table","text":"

    This example shows how to configure the RandomCCType transformer to populate the card_type column in the transactions table with random credit card types. It is a straightforward method for simulating diverse payment methods across transactions.

    RandomCCType transformer example
    - schema: \"public\"\n  name: \"transactions\"\n  transformers:\n    - name: \"RandomCCType\"\n      params:\n        column: \"card_type\"\n        keep_null: false\n

    In this configuration, the card_type column will be updated with random credit card types for each entry, replacing any existing non-NULL values. If the keep_null parameter is set to true, existing NULL values in the column will be preserved, maintaining the integrity of records where card type information is not applicable.

    "},{"location":"built_in_transformers/standard_transformers/random_century/","title":"RandomCentury","text":"

    The RandomCentury transformer is crafted to populate specified database columns with random century values. It is ideal for applications that require historical data simulation, such as generating random years within specific centuries for historical databases, testing datasets with temporal dimensions, or anonymizing dates in historical research data.

    "},{"location":"built_in_transformers/standard_transformers/random_century/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_century/#description","title":"Description","text":"

    The RandomCentury transformer utilizes an algorithm or a library function (hypothetical in this context) to generate random century values. Each value represents a century (e.g., 19th, 20th, 21st), providing a broad temporal range that can be used to enhance datasets requiring a distribution across different historical periods without the need for precise date information.

    "},{"location":"built_in_transformers/standard_transformers/random_century/#example-populate-random-centuries-for-the-historical_artifacts-table","title":"Example: Populate random centuries for the historical_artifacts table","text":"

    This example shows how to configure the RandomCentury transformer to populate the century column in a historical_artifacts table with random century values, adding an element of variability and historical context to the dataset.

    RandomCentury transformer example
    - schema: \"public\"\n  name: \"historical_artifacts\"\n  transformers:\n    - name: \"RandomCentury\"\n      params:\n        column: \"century\"\n        keep_null: false\n

    In this setup, the century column will be filled with random century values, replacing any existing non-NULL values. If the keep_null parameter is set to true, then existing NULL values in the column will remain untouched, preserving the original dataset's integrity where no temporal data is available.

    "},{"location":"built_in_transformers/standard_transformers/random_choice/","title":"RandomChoice","text":"

    Replace values randomly chosen from a provided list.

    "},{"location":"built_in_transformers/standard_transformers/random_choice/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes any values A list of values in any format. The string with value \\N is considered NULL. Yes - validate Performs a decoding procedure via the PostgreSQL driver using the column type to ensure that values have correct type true No keep_null Indicates whether NULL values should be replaced with transformed values or not true No engine The engine used for generating the values [random, hash]. Use hash for deterministic generation random No -"},{"location":"built_in_transformers/standard_transformers/random_choice/#description","title":"Description","text":"

    The RandomChoice transformer replaces one randomly chosen value from the list provided in the values parameter. You can use the validate parameter to ensure that values are correct before applying the transformation. The behaviour for NULL values can be configured using the keep_null parameter.

    The engine parameter allows you to choose between random and hash engines for generating values. Read more about the engines in the Transformation engines section.

    "},{"location":"built_in_transformers/standard_transformers/random_choice/#example-choosing-randomly-from-provided-dates","title":"Example: Choosing randomly from provided dates","text":"

    In this example, the provided values undergo validation through PostgreSQL driver decoding, and one value is randomly chosen from the list.

    RandomChoice transformer example
    - schema: \"humanresources\"\n  name: \"jobcandidate\"\n  transformers:\n    - name: \"RandomChoice\"\n      params:\n        column: \"modifieddate\"\n        validate: true\n        engine: hash\n        values:\n          - \"2023-12-21 07:41:06.891\"\n          - \"2023-12-21 07:41:06.896\"\n

    Result

    ColumnOriginalValueTransformedValue modifieddate2007-06-23 00:00:002023-12-21 07:41:06.891"},{"location":"built_in_transformers/standard_transformers/random_currency/","title":"RandomCurrency","text":"

    The RandomCurrency transformer is tailored to populate specified database columns with random currency codes. This tool is highly beneficial for applications involving the simulation of international financial data, testing currency conversion features, or anonymizing currency information in datasets.

    "},{"location":"built_in_transformers/standard_transformers/random_currency/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_currency/#description","title":"Description","text":"

    Utilizing a comprehensive list of global currency codes (e.g., USD, EUR, JPY), the RandomCurrency transformer injects random currency codes into the designated database column. This feature allows for the creation of diverse and realistic financial transaction datasets by simulating a variety of currencies without relying on actual financial data.

    "},{"location":"built_in_transformers/standard_transformers/random_currency/#example-populate-random-currency-codes-for-the-transactions-table","title":"Example: Populate random currency codes for the transactions table","text":"

    This example outlines configuring the RandomCurrency transformer to populate the currency_code column in a transactions table with random currency codes. It is an effective way to simulate international transactions across multiple currencies.

    RandomCurrency transformer example
    - schema: \"public\"\n  name: \"transactions\"\n  transformers:\n    - name: \"RandomCurrency\"\n      params:\n        column: \"currency_code\"\n        keep_null: false\n

    In this configuration, the currency_code column will be updated with random currency codes for each entry, replacing any existing non-NULL values. If the keep_null parameter is set to true, existing NULL values in the column will be preserved, ensuring the integrity of records where currency data may not be applicable.

    "},{"location":"built_in_transformers/standard_transformers/random_date/","title":"RandomDate","text":"

    Generate a random date in a specified interval.

    "},{"location":"built_in_transformers/standard_transformers/random_date/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column Name of the column to be affected Yes date, timestamp, timestamptz min The minimum threshold date for the random value. The format depends on the column type. Yes - max The maximum threshold date for the random value. The format depends on the column type. Yes - truncate Truncate the date to the specified part (nanosecond, microsecond, millisecond, second, minute, hour, day, month, year). The truncate operation is not applied by default. No - keep_null Indicates whether NULL values should be replaced with transformed values or not true No - engine The engine used for generating the values [random, hash]. Use hash for deterministic generation random No -"},{"location":"built_in_transformers/standard_transformers/random_date/#dynamic-parameters","title":"Dynamic parameters","text":"Parameter Supported types min date, timestamp, timestamptz max date, timestamp, timestamptz"},{"location":"built_in_transformers/standard_transformers/random_date/#description","title":"Description","text":"

    The RandomDate transformer generates a random date within the provided interval, starting from min to max. It can also perform date truncation up to the specified part of the date. The format of dates in the min and max parameters must adhere to PostgreSQL types, including DATE, TIMESTAMP WITHOUT TIMEZONE, or TIMESTAMP WITH TIMEZONE.

    Note

    The value of min and max parameters depends on the column type. For example, for the date column, the value should be in the format YYYY-MM-DD, while for the timestamp column, the value should be in the format YYYY-MM-DD HH:MM:SS or YYYY-MM-DD HH:MM:SS.SSSSSS. The timestamptz column requires the value to be in the format YYYY-MM-DD HH:MM:SS.SSSSSS+HH:MM. Read more about date/time formats in the PostgreSQL documentation.

    The behaviour for NULL values can be configured using the keep_null parameter. The engine parameter allows you to choose between random and hash engines for generating values. Read more about the engines in the Transformation engines section.

    "},{"location":"built_in_transformers/standard_transformers/random_date/#example-generate-modifieddate","title":"Example: Generate modifieddate","text":"

    In the following example, a random timestamp without timezone is generated for the modifieddate column within the range from 2011-05-31 00:00:00 to 2013-05-31 00:00:00, and the part of the random value after day is truncated.

    RandomDate transformer example
    - schema: \"sales\"\n  name: \"salesorderdetail\"\n  transformers:\n    - name: \"RandomDate\"\n      params:\n        column: \"modifieddate\"\n        keep_null: false\n        min: \"2011-05-31 00:00:00\"\n        max: \"2013-05-31 00:00:00\"\n        truncate: \"day\"\n

    Result

    ColumnOriginalValueTransformedValue modifieddate2014-06-30 00:00:002012-07-27 00:00:00"},{"location":"built_in_transformers/standard_transformers/random_date/#example-generate-hiredate-based-on-birthdate-using-two-transformations","title":"Example: Generate hiredate based on birthdate using two transformations","text":"

    In this example, the RandomDate transformer generates a random date for the birthdate column within the range now - 50 years to now - 18 years. The hire date is generated based on the birthdate, ensuring that the employee is at least 18 years old when hired.

    - schema: \"humanresources\"\n  name: \"employee\"\n  transformers:\n    - name: \"RandomDate\"\n      params:\n        column: \"birthdate\"\n        min: '{{ now | tsModify \"-50 years\" | .EncodeValue }}' # 1994\n        max: '{{ now | tsModify \"-18 years\" | .EncodeValue }}' # 2006\n\n    - name: \"RandomDate\"\n      params:\n        column: \"hiredate\"\n        truncate: \"month\"\n        max: \"{{ now | .EncodeValue }}\"\n      dynamic_params:\n        min:\n          column: \"birthdate\"\n          template: '{{ .GetValue | tsModify \"18 years\" | .EncodeValue }}' # min age 18 years\n

    Result:

    ColumnOriginalValueTransformedValue birthdate1969-01-291985-10-29 hiredate2009-01-142023-01-01"},{"location":"built_in_transformers/standard_transformers/random_day_of_month/","title":"RandomDayOfMonth","text":"

    The RandomDayOfMonth transformer is designed to populate specified database columns with random day-of-the-month values. It is particularly useful for scenarios requiring the simulation of dates, such as generating random event dates, user sign-up dates, or any situation where the specific day of the month is needed without reference to the actual month or year.

    "},{"location":"built_in_transformers/standard_transformers/random_day_of_month/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar, int2, int4, int8, numeric keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_day_of_month/#description","title":"Description","text":"

    Utilizing the faker library, the RandomDayOfMonth transformer generates random numerical values representing days of the month, ranging from 1 to 31. This allows for the easy insertion of random but plausible day-of-the-month data into a database, enhancing realism or anonymizing actual dates.

    "},{"location":"built_in_transformers/standard_transformers/random_day_of_month/#example-populate-random-days-of-the-month-for-the-events-table","title":"Example: Populate random days of the month for the events table","text":"

    This example illustrates how to configure the RandomDayOfMonth transformer to fill the event_day column in the events table with random day-of-the-month values, facilitating the simulation of varied event scheduling.

    RandomDayOfMonth transformer example
    - schema: \"public\"\n  name: \"events\"\n  transformers:\n    - name: \"RandomDayOfMonth\"\n      params:\n        column: \"event_day\"\n        keep_null: false\n

    With this setup, the event_day column will be updated with random day-of-the-month values, replacing any existing non-NULL values. Setting keep_null to true ensures that NULL values in the column are left unchanged, maintaining any existing gaps in the data.

    "},{"location":"built_in_transformers/standard_transformers/random_day_of_week/","title":"RandomDayOfWeek","text":"

    The RandomDayOfWeek transformer is specifically designed to fill specified database columns with random day-of-the-week names. It is particularly useful for applications that require simulated weekly schedules, random event planning, or any scenario where the day of the week is relevant but the specific date is not.

    "},{"location":"built_in_transformers/standard_transformers/random_day_of_week/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_day_of_week/#description","title":"Description","text":"

    Utilizing the faker library, the RandomDayOfWeek transformer generates names of days (e. g., Monday, Tuesday) at random. This transformer can be applied to any text or varchar column in a database, introducing variability and realism into data sets that need to represent days of the week in a non-specific manner.

    "},{"location":"built_in_transformers/standard_transformers/random_day_of_week/#example-populate-random-days-of-the-week-for-the-work_schedule-table","title":"Example: Populate random days of the week for the work_schedule table","text":"

    This example demonstrates configuring the RandomDayOfWeek transformer to populate the work_day column in the work_schedule table with random days of the week. This setup can help simulate a diverse range of work schedules without tying them to specific dates.

    RandomDayOfWeek transformer example
    - schema: \"public\"\n  name: \"work_schedule\"\n  transformers:\n    - name: \"RandomDayOfWeek\"\n      params:\n        column: \"work_day\"\n        keep_null: false\n

    In this configuration, every entry in the work_day column will be updated with a random day of the week, replacing any existing non-NULL values. If the keep_null parameter is set to true, then existing NULL values within the column will remain unchanged.

    "},{"location":"built_in_transformers/standard_transformers/random_domain_name/","title":"RandomDomainName","text":"

    The RandomDomainName transformer is designed to populate specified database columns with random domain names. This tool is invaluable for simulating web data, testing applications that interact with domain names, or anonymizing real domain information in datasets.

    "},{"location":"built_in_transformers/standard_transformers/random_domain_name/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_domain_name/#description","title":"Description","text":"

    By leveraging an algorithm or library capable of generating believable domain names, the RandomDomainName transformer introduces random domain names into the specified database column. Each generated domain name includes a second-level domain (SLD) and a top-level domain (TLD), such as \"example.com\" or \"website.org,\" providing a wide range of plausible web addresses for database enrichment.

    "},{"location":"built_in_transformers/standard_transformers/random_domain_name/#example-populate-random-domain-names-for-the-websites-table","title":"Example: Populate random domain names for the websites table","text":"

    This example demonstrates configuring the RandomDomainName transformer to populate the domain column in the websites table with random domain names. This approach facilitates the creation of a diverse and realistic set of web addresses for testing, simulation, or data anonymization purposes.

    RandomDomainName transformer example
    - schema: \"public\"\n  name: \"websites\"\n  transformers:\n    - name: \"RandomDomainName\"\n      params:\n        column: \"domain\"\n        keep_null: false\n

    In this setup, the domain column will be updated with random domain names for each entry, replacing any existing non-NULL values. If keep_null is set to true, the transformer will preserve existing NULL values in the column, maintaining the integrity of data where domain information is not applicable.

    "},{"location":"built_in_transformers/standard_transformers/random_e164_phone_number/","title":"RandomE164PhoneNumber","text":"

    The RandomE164PhoneNumber transformer is developed to populate specified database columns with random E.164 phone numbers. This tool is essential for applications requiring the simulation of contact information, testing phone number validation systems, or anonymizing phone number data in datasets while focusing on E.164 numbers.

    "},{"location":"built_in_transformers/standard_transformers/random_e164_phone_number/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_e164_phone_number/#description","title":"Description","text":"

    The RandomE164PhoneNumber transformer utilizes algorithms capable of generating random E.164 phone numbers with the standard international format and injects them into the designated database column. This feature allows for the creation of diverse and realistic contact information in datasets for development, testing, or data anonymization purposes.

    "},{"location":"built_in_transformers/standard_transformers/random_e164_phone_number/#example-populate-random-e164-phone-numbers-for-the-contact_information-table","title":"Example: Populate random E.164 phone numbers for the contact_information table","text":"

    This example demonstrates configuring the RandomE164PhoneNumber transformer to populate the phone_number column in the contact_information table with random E.164 phone numbers. It is an effective method for simulating a variety of contact information entries with E.164 numbers.

    RandomE164PhoneNumber transformer example
    - schema: \"public\"\n  name: \"contact_information\"\n  transformers:\n    - name: \"RandomE164PhoneNumber\"\n      params:\n        column: \"phone_number\"\n        keep_null: false\n

    In this configuration, the phone_number column will be updated with random E.164 phone numbers for each contact information entry, replacing any existing non-NULL values. If the keep_null parameter is set to true, existing NULL values in the column will be preserved, ensuring the integrity of records where E.164 phone number information is not applicable or provided.

    "},{"location":"built_in_transformers/standard_transformers/random_email/","title":"RandomEmail","text":"

    Generate email addresses for a specified column.

    "},{"location":"built_in_transformers/standard_transformers/random_email/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar keep_original_domain Keep original of the original address false No - local_part_template The template for local part of email No - domain_part_template The template for domain part of email No - domains List of domains for new email [\"gmail.com\", \"yahoo.com\", \"outlook.com\", \"hotmail.com\", \"aol.com\", \"icloud.com\", \"mail.com\", \"zoho.com\", \"yandex.com\", \"protonmail.com\", \"gmx.com\", \"fastmail.com\"] No - validate Validate generated email if using template false No - max_random_length Max length of randomly generated part of the email 32 No - keep_null Indicates whether NULL values should be preserved false No - engine The engine used for generating the values [random, hash]. Use hash for deterministic generation random No -"},{"location":"built_in_transformers/standard_transformers/random_email/#description","title":"Description","text":"

    The RandomEmail transformer generates random email addresses for the specified database column. By default, the transformer generates random email addresses with a maximum length of 32 characters. The keep_original_domain parameter allows you to preserve the original domain part of the email address. The local_part_template and domain_part_template parameters enable you to specify templates for the local and domain parts of the email address, respectively. If the validate parameter is set to true, the transformer will validate the generated email addresses against the specified templates. The keep_null parameter allows you to preserve existing NULL values in the column.

    The engine parameter allows you to choose between random and hash engines for generating values. Read more about the engines in the Transformation engines section.

    "},{"location":"built_in_transformers/standard_transformers/random_email/#templates-parameters","title":"Templates parameters","text":"

    In each template you have access to the columns of the table by using the {{ .column_name }} syntax. Note that all values are strings. For example, you can use for assembling the email address by accessing to first_name and last_name columns {{ .first_name | lower }}.{{ .last_name | lower }}.

    The transformer always generates random sequences for the email, and you can use it by accessing the {{ .random_string }} variable. For example, we can add random string in the end of local part {{ .first_name | lower }}.{{ .last_name | lower }}.{{ .random_string }}.

    Read more about template function Template functions.

    "},{"location":"built_in_transformers/standard_transformers/random_email/#random-email-generation-using-first-name-and-last-name","title":"Random email generation using first name and last name","text":"

    In this example, the RandomEmail transformer generates random email addresses for the email column in the account table. The transformer generates email addresses using the first_name and last_name columns as the local part of the email address and adds a random string to the end of the local part with length 10 characters. The original domain part of the email address is preserved.

    CREATE TABLE account\n(\n    id         SERIAL PRIMARY KEY,\n    gender     VARCHAR(1) NOT NULL,\n    email      TEXT       NOT NULL NOT NULL UNIQUE,\n    first_name TEXT       NOT NULL,\n    last_name  TEXT       NOT NULL,\n    birth_date DATE,\n    created_at TIMESTAMP  NOT NULL DEFAULT NOW()\n);\n\nINSERT INTO account (first_name, gender, last_name, birth_date, email)\nVALUES ('John', 'M', 'Smith', '1980-01-01', 'john.smith@gmail.com');\n
    RandomEmail transformer example
    - schema: \"public\"\n  name: \"account\"\n  transformers:\n    - name: \"RandomEmail\"\n      params:\n        column: \"email\"\n        engine: \"hash\"\n        keep_original_domain: true\n        local_part_template: \"{{ first_name | lower }}.{{ last_name | lower }}.{{ .random_string | trunc 10 }}\"\n

    Result:

    ColumnOriginalValueTransformedValue emailjohn.smith@gmail.comjohn.smith.a075d99e2d@gmail.com"},{"location":"built_in_transformers/standard_transformers/random_email/#simple-random-email-generation","title":"Simple random email generation","text":"

    In this example, the RandomEmail transformer generates random email addresses for the email column in the account table. The transformer generates random email addresses with a maximum length of 10 characters.

    RandomEmail transformer example
    - schema: \"public\"\n  name: \"account\"\n  transformers:\n    - name: \"RandomEmail\"\n      params:\n        column: \"email\"\n        max_random_length: 10\n

    Result:

    ColumnOriginalValueTransformedValue emailjohn.smith@gmail.comjohn.smith.a075d99e2d@gmail.com"},{"location":"built_in_transformers/standard_transformers/random_float/","title":"RandomFloat","text":"

    Generate a random float within the provided interval.

    "},{"location":"built_in_transformers/standard_transformers/random_float/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes float4, float8 min The minimum threshold for the random value. The value range depends on the column type. Yes - max The maximum threshold for the random value. The value range depends on the column type. Yes - decimal The decimal of the random float value (number of digits after the decimal point) 4 No - keep_null Indicates whether NULL values should be replaced with transformed values or not true No - engine The engine used for generating the values [random, hash]. Use hash for deterministic generation random No -"},{"location":"built_in_transformers/standard_transformers/random_float/#dynamic-parameters","title":"Dynamic parameters","text":"Parameter Supported types min float4, float8 max float4, float8"},{"location":"built_in_transformers/standard_transformers/random_float/#description","title":"Description","text":"

    The RandomFloat transformer generates a random float value within the provided interval, starting from min to max, with the option to specify the number of decimal digits by using the decimal parameter. The behaviour for NULL values can be configured using the keep_null parameter.

    The engine parameter allows you to choose between random and hash engines for generating values. Read more about the engines in the Transformation engines section.

    "},{"location":"built_in_transformers/standard_transformers/random_float/#example-generate-random-price","title":"Example: Generate random price","text":"

    In this example, the RandomFloat transformer generates random prices in the range from 0.1 to 7000 while maintaining a decimal of up to 2 digits.

    RandomFloat transformer example
    - schema: \"sales\"\n  name: \"salesorderdetail\"\n  columns_type_override:  # (1)\n    \"unitprice\": \"float8\"\n  transformers:\n    - name: \"RandomFloat\"\n      params:\n        column: \"unitprice\"\n        min: 0.1\n        max: 7000\n        decimal: 2\n
    1. The type overrides applied for example because the playground database does not contain any tables with float columns.

    Result:

    ColumnOriginalValueTransformedValue unitprice2024.9944449.7"},{"location":"built_in_transformers/standard_transformers/random_int/","title":"RandomInt","text":"

    Generate a random integer within the provided interval.

    "},{"location":"built_in_transformers/standard_transformers/random_int/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes int2, int4, int8 min The minimum threshold for the random value Yes - max The maximum threshold for the random value Yes - keep_null Indicates whether NULL values should be replaced with transformed values or not true No - engine The engine used for generating the values [random, hash]. Use hash for deterministic generation random No -"},{"location":"built_in_transformers/standard_transformers/random_int/#dynamic-parameters","title":"Dynamic parameters","text":"Parameter Supported types min int2, int4, int8 max int2, int4, int8"},{"location":"built_in_transformers/standard_transformers/random_int/#description","title":"Description","text":"

    The RandomInt transformer generates a random integer within the specified min and max thresholds. The behaviour for NULL values can be configured using the keep_null parameter.

    The engine parameter allows you to choose between random and hash engines for generating values. Read more about the engines in the Transformation engines section.

    "},{"location":"built_in_transformers/standard_transformers/random_int/#example-generate-random-item-quantity","title":"Example: Generate random item quantity","text":"

    In the following example, the RandomInt transformer generates a random value in the range from 1 to 30 and assigns it to the orderqty column.

    generate random orderqty in the range from 1 to 30
    - schema: \"sales\"\n  name: \"salesorderdetail\"\n  transformers:\n    - name: \"RandomInt\"\n      params:\n        column: \"orderqty\"\n        min: 1\n        max: 30\n

    Result

    ColumnOriginalValueTransformedValue orderqty129"},{"location":"built_in_transformers/standard_transformers/random_int/#example-generate-random-sick-leave-hours-based-on-vacation-hours","title":"Example: Generate random sick leave hours based on vacation hours","text":"

    In the following example, the RandomInt transformer generates a random value in the range from 1 to the value of the vacationhours column and assigns it to the sickleavehours column. This configuration allows for the simulation of sick leave hours based on the number of vacation hours.

    RandomInt transformer example
    - schema: \"humanresources\"\n  name: \"employee\"\n  transformers:\n    - name: \"RandomInt\"\n      params:\n        column: \"sickleavehours\"\n        max: 100\n      dynamic_params:\n        min:\n          column: \"vacationhours\"\n

    Result

    ColumnOriginalValueTransformedValue sickleavehours6999"},{"location":"built_in_transformers/standard_transformers/random_ip/","title":"RandomIP","text":"

    The RandomIp transformer is designed to populate specified database columns with random IP v4 or V6 addresses. This utility is essential for applications requiring the simulation of network data, testing systems that utilize IP addresses, or anonymizing real IP addresses in datasets.

    "},{"location":"built_in_transformers/standard_transformers/random_ip/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar, inet subnet Subnet for generating random ip in V4 or V6 format Yes - engine The engine used for generating the values [random, hash]. Use hash for deterministic generation random No -"},{"location":"built_in_transformers/standard_transformers/random_ip/#dynamic-parameters","title":"Dynamic parameters","text":"Name Supported types subnet cidr, text, varchar"},{"location":"built_in_transformers/standard_transformers/random_ip/#description","title":"Description","text":"

    Utilizing a robust algorithm or library for generating IP addresses, the RandomIp transformer injects random IPv4 or IPv6 addresses into the designated database column, depending on the provided subnet. The transformer automatically detects whether to generate an IPv4 or IPv6 address based on the subnet version specified.

    "},{"location":"built_in_transformers/standard_transformers/random_ip/#example-generate-a-random-ipv4-address-for-a-1921681024-subnet","title":"Example: Generate a Random IPv4 Address for a 192.168.1.0/24 Subnet","text":"

    This example demonstrates how to configure the RandomIp transformer to inject a random IPv4 address into the ip_address column for entries in the 192.168.1.0/24 subnet:

    Create table ip_networks and insert data
    CREATE TABLE ip_networks\n(\n    id         SERIAL PRIMARY KEY,\n    ip_address INET,\n    network    CIDR\n);\n\nINSERT INTO ip_networks (ip_address, network)\nVALUES ('192.168.1.10', '192.168.1.0/24'),\n       ('10.0.0.5', '10.0.0.0/16'),\n       ('172.16.254.3', '172.16.0.0/12'),\n       ('192.168.100.14', '192.168.100.0/24'),\n       ('2001:0db8:85a3:0000:0000:8a2e:0370:7334', '2001:0db8:85a3::/64'); -- An IPv6 address and network\n
    RandomPerson transformer example
    - schema: public\n  name: ip_networks\n  transformers:\n    - name: \"RandomIp\"\n      params:\n        subnet: \"192.168.1.0/24\"\n        column: \"ip_address\"\n        engine: \"random\"\n

    Result:

    ColumnOriginalValueTransformedValue ip_address192.168.1.10192.168.1.28"},{"location":"built_in_transformers/standard_transformers/random_ip/#example-generate-a-random-ip-based-on-the-dynamic-subnet-parameter","title":"Example: Generate a Random IP Based on the Dynamic Subnet Parameter","text":"

    This configuration illustrates how to use the RandomIp transformer dynamically, where it reads the subnet information from the network column of the database and generates a corresponding random IP address:

    RandomPerson transformer example with dynamic mode
    - schema: public\n  name: ip_networks\n  transformers:\n    - name: \"RandomIp\"\n      params:\n        column: \"ip_address\"\n        engine: \"random\"\n      dynamic_params:\n        subnet:\n          column: \"network\"\n

    Result:

    ColumnOriginalValueTransformedValue ip_address192.168.1.10192.168.1.111"},{"location":"built_in_transformers/standard_transformers/random_latitude/","title":"RandomLatitude","text":"

    The RandomLatitude transformer generates random latitude values for specified database columns. It is designed to support geographical data enhancements, particularly useful for applications requiring randomized but plausible geographical coordinates.

    "},{"location":"built_in_transformers/standard_transformers/random_latitude/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes float4, float8, numeric keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_latitude/#description","title":"Description","text":"

    The RandomLatitude transformer utilizes the faker library to produce random latitude values within the range of -90 to +90 degrees. This transformer can be applied to columns designated to store geographical latitude information, enhancing data sets with randomized latitude coordinates.

    "},{"location":"built_in_transformers/standard_transformers/random_latitude/#example-populate-random-latitude-for-the-locations-table","title":"Example: Populate random latitude for the locations table","text":"

    This example demonstrates configuring the RandomLatitude transformer to populate the latitude column in the locations table with random latitude values.

    RandomLatitude transformer example
    - schema: \"public\"\n  name: \"locations\"\n  transformers:\n    - name: \"RandomLatitude\"\n      params:\n        column: \"latitude\"\n        keep_null: false\n

    With this configuration, the latitude column will be filled with random latitude values, replacing any existing non-NULL values. If keep_null is set to true, existing NULL values will be preserved.

    "},{"location":"built_in_transformers/standard_transformers/random_longitude/","title":"RandomLongitude","text":"

    The RandomLongitude transformer is designed to generate random longitude values for specified database columns, enhancing datasets with realistic geographic coordinates suitable for a wide range of applications, from testing location-based services to anonymizing real geographic data.

    "},{"location":"built_in_transformers/standard_transformers/random_longitude/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes float4, float8, numeric keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_longitude/#description","title":"Description","text":"

    The RandomLongitude transformer leverages the faker library to produce random longitude values within the globally accepted range of -180 to +180 degrees. This flexibility allows the transformer to be applied to any column intended for storing longitude data, providing a simple yet powerful tool for introducing randomized longitude coordinates into a database.

    "},{"location":"built_in_transformers/standard_transformers/random_longitude/#example-populate-random-longitude-for-the-locations-table","title":"Example: Populate random longitude for the locations table","text":"

    This example shows how to use the RandomLongitude transformer to fill the longitude column in the locations table with random longitude values.

    RandomLongitude transformer example
    - schema: \"public\"\n  name: \"locations\"\n  transformers:\n    - name: \"RandomLongitude\"\n      params:\n        column: \"longitude\"\n        keep_null: false\n

    This setup ensures that all entries in the longitude column receive a random longitude value, replacing any existing non-NULL values. If keep_null is set to true, then existing NULL values in the column will remain unchanged.

    "},{"location":"built_in_transformers/standard_transformers/random_mac/","title":"RandomMac","text":"

    The RandomMac transformer is designed to populate specified database columns with random MAC addresses.

    "},{"location":"built_in_transformers/standard_transformers/random_mac/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar, macaddr keep_original_vendor Should the Individual/Group (I/G) and Universal/Local (U/L) bits be preserved from the original MAC address. false No - cast_type Param which allow to set Individual/Group (I/G) bit in MAC Address. Allowed values [any, individual, group]. If this value is individual, the address is meant for a single device (unicast). If it is group, the address is for a group of devices, which can include multicast and broadcast addresses. any No management_type Param which allow to set Universal/Local (U/L) bit in MAC Address. Allowed values [any, universal, local]. If this bit is universal, the address is universally administered (globally unique). If it is local, the address is locally administered (such as when set manually or programmatically on a network device). any No engine The engine used for generating the values [random, hash]. Use hash for deterministic generation random No -"},{"location":"built_in_transformers/standard_transformers/random_mac/#description","title":"Description","text":"

    The RandomMac transformer generates a random MAC address and injects it into the specified database column. The transformer can be configured to preserve the Individual/Group (I/G) and Universal/Local (U/L) bits from the original MAC address. You can also keep the original vendor bits in the generated MAC address by setting the keep_original_vendor parameter to true.

    The engine parameter allows you to choose between random and hash engines for generating values. Read more about the engines in the Transformation engines section.

    "},{"location":"built_in_transformers/standard_transformers/random_mac/#example-generate-a-random-mac-address","title":"Example: Generate a Random MAC Address","text":"

    This example demonstrates how to configure the RandomMac transformer to inject a random MAC address into the mac_address column:

    Create table mac_addresses and insert data
    CREATE TABLE mac_addresses\n(\n    id          SERIAL PRIMARY KEY,\n    device_name VARCHAR(50),\n    mac_address MACADDR,\n    description TEXT\n);\n\nINSERT INTO mac_addresses (device_name, mac_address, description)\nVALUES ('Device A', '00:1A:2B:3C:4D:5E', 'Description for Device A'),\n       ('Device B', '01:2B:3C:4D:5E:6F', 'Description for Device B'),\n       ('Device C', '02:3C:4D:5E:6F:70', 'Description for Device C'),\n       ('Device D', '03:4D:5E:6F:70:71', 'Description for Device D'),\n       ('Device E', '04:5E:6F:70:71:72', 'Description for Device E');\n
    RandomPerson transformer example
    - schema: public\n  name: mac_addresses\n  transformers:\n    - name: \"RandomMac\"\n      params:\n        column: \"mac_address\"\n        engine: \"random\"\n        cast_type: \"any\"\n        management_type: \"any\"\n

    Result:

    ColumnOriginalValueTransformedValue mac_address00:1a:2b:3c:4d:5eac:7f:a8:11:4e:0d"},{"location":"built_in_transformers/standard_transformers/random_month_name/","title":"RandomMonthName","text":"

    The RandomMonthName transformer is crafted to populate specified database columns with random month names. This transformer is especially useful for scenarios requiring the simulation of time-related data, such as user birth months or event months, without relying on specific date values.

    "},{"location":"built_in_transformers/standard_transformers/random_month_name/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_month_name/#description","title":"Description","text":"

    The RandomMonthName transformer utilizes the faker library to generate the names of months at random. It can be applied to any textual column in a database to introduce variety and realism into data sets that require representations of months without the need for specific calendar dates.

    "},{"location":"built_in_transformers/standard_transformers/random_month_name/#example-populate-random-month-names-for-the-user_profiles-table","title":"Example: Populate random month names for the user_profiles table","text":"

    This example demonstrates how to configure the RandomMonthName transformer to fill the birth_month column in the user_profiles table with random month names, adding a layer of diversity to user data without using actual birthdates.

    RandomMonthName transformer example
    - schema: \"public\"\n  name: \"user_profiles\"\n  transformers:\n    - name: \"RandomMonthName\"\n      params:\n        column: \"birth_month\"\n        keep_null: false\n

    With this setup, the birth_month column will be updated with random month names, replacing any existing non-NULL values. If the keep_null parameter is set to true, then existing NULL values within the column will remain untouched.

    "},{"location":"built_in_transformers/standard_transformers/random_numeric/","title":"RandomNumeric","text":"

    Generate a random numeric within the provided interval.

    "},{"location":"built_in_transformers/standard_transformers/random_numeric/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes numeric, decimal min The minimum threshold for the random value. The value range depends on the column type. Yes - max The maximum threshold for the random value. The value range depends on the column type. Yes - decimal The decimal of the random numeric value (number of digits after the decimal point) 4 No - keep_null Indicates whether NULL values should be replaced with transformed values or not true No - engine The engine used for generating the values [random, hash]. Use hash for deterministic generation random No -"},{"location":"built_in_transformers/standard_transformers/random_numeric/#dynamic-parameters","title":"Dynamic parameters","text":"Parameter Supported types min int2, int4, int8, float4, float8, numeric, decimal max int2, int4, int8, float4, float8, numeric, decimal"},{"location":"built_in_transformers/standard_transformers/random_numeric/#description","title":"Description","text":"

    The RandomNumeric transformer generates a random numeric value within the provided interval, starting from min to max, with the option to specify the number of decimal digits by using the decimal parameter. The behaviour for NULL values can be configured using the keep_null parameter.

    The engine parameter allows you to choose between random and hash engines for generating values. Read more about the engines in the Transformation engines section.

    "},{"location":"built_in_transformers/standard_transformers/random_numeric/#example-generate-random-price","title":"Example: Generate random price","text":"

    In this example, the RandomNumeric transformer generates random prices in the range from 0.1 to 7000 while maintaining a decimal of up to 2 digits.

    RandomNumeric transformer example
    - schema: \"sales\"\n  name: \"salesorderdetail\"\n  transformers:\n    - name: \"RandomNumeric\"\n      params:\n        column: \"unitprice\"\n        min: 0.1\n        max: 7000\n        decimal: 2\n
    1. The type overrides applied for example because the playground database does not contain any tables with numeric columns.

    Result:

    ColumnOriginalValueTransformedValue unitprice2024.9944449.7"},{"location":"built_in_transformers/standard_transformers/random_paragraph/","title":"RandomParagraph","text":"

    The RandomParagraph transformer is crafted to populate specified database columns with random paragraphs. This utility is indispensable for applications that require the generation of extensive textual content, such as simulating articles, enhancing textual datasets for NLP systems, or anonymizing textual content in databases.

    "},{"location":"built_in_transformers/standard_transformers/random_paragraph/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_paragraph/#description","title":"Description","text":"

    Employing sophisticated text generation algorithms or libraries, the RandomParagraph transformer generates random paragraphs, injecting them into the designated database column. This transformer is designed to create varied and plausible paragraphs that simulate real-world textual content, providing a valuable tool for database enrichment, testing, and anonymization.

    "},{"location":"built_in_transformers/standard_transformers/random_paragraph/#example-populate-random-paragraphs-for-the-articles-table","title":"Example: Populate random paragraphs for the articles table","text":"

    This example illustrates configuring the RandomParagraph transformer to populate the body column in an articles table with random paragraphs. It is an effective way to simulate diverse article content for development, testing, or demonstration purposes.

    RandomParagraph transformer example
    - schema: \"public\"\n  name: \"articles\"\n  transformers:\n    - name: \"RandomParagraph\"\n      params:\n        column: \"body\"\n        keep_null: false\n

    With this setup, the body column will receive random paragraphs for each entry, replacing any existing non-NULL values. Setting the keep_null parameter to true allows for the preservation of existing NULL values within the column, maintaining the integrity of records where article content is not applicable or provided.

    "},{"location":"built_in_transformers/standard_transformers/random_password/","title":"RandomPassword","text":"

    The RandomPassword transformer is designed to populate specified database columns with random passwords. This utility is vital for applications that require the simulation of secure user data, testing systems with authentication mechanisms, or anonymizing real passwords in datasets.

    "},{"location":"built_in_transformers/standard_transformers/random_password/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_password/#description","title":"Description","text":"

    Employing sophisticated password generation algorithms or libraries, the RandomPassword transformer injects random passwords into the designated database column. This feature is particularly useful for creating realistic and secure user password datasets for development, testing, or demonstration purposes.

    "},{"location":"built_in_transformers/standard_transformers/random_password/#example-populate-random-passwords-for-the-user_accounts-table","title":"Example: Populate random passwords for the user_accounts table","text":"

    This example demonstrates how to configure the RandomPassword transformer to populate the password column in the user_accounts table with random passwords.

    RandomPassword transformer example
    - schema: \"public\"\n  name: \"user_accounts\"\n  transformers:\n    - name: \"RandomPassword\"\n      params:\n        column: \"password\"\n        keep_null: false\n

    In this configuration, every entry in the password column will be updated with a random password. Setting the keep_null parameter to true will preserve existing NULL values in the column, accommodating scenarios where password data may not be applicable.

    "},{"location":"built_in_transformers/standard_transformers/random_person/","title":"RandomPerson","text":"

    The RandomPerson transformer is designed to populate specified database columns with personal attributes such as first name, last name, title and gender.

    "},{"location":"built_in_transformers/standard_transformers/random_person/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types columns The name of the column to be affected Yes text, varchar gender set specific gender (possible values: Male, Female, Any) Any No - gender_mapping Specify gender name to possible values when using dynamic mode in \"gender\" parameter Any No - fallback_gender Specify fallback gender if not mapped when using dynamic mode in \"gender\" parameter Any No - engine The engine used for generating the values [random, hash]. Use hash for deterministic generation random No -"},{"location":"built_in_transformers/standard_transformers/random_person/#description","title":"Description","text":"

    The RandomPerson transformer utilizes a comprehensive list of first names to inject random first names into the designated database column. This feature allows for the creation of diverse and realistic user profiles by simulating a variety of first names without using real user data.

    "},{"location":"built_in_transformers/standard_transformers/random_person/#column-object-attributes","title":"column object attributes","text":""},{"location":"built_in_transformers/standard_transformers/random_person/#gender_mapping-object-attributes","title":"gender_mapping object attributes","text":"

    gender_mapping - a dictionary that maps the gender value when gender parameters works in dynamic mode. The default value is:

    {\n  \"Male\": [\n    \"male\",\n    \"M\",\n    \"m\",\n    \"man\",\n    \"Man\"\n  ],\n  \"Female\": [\n    \"female\",\n    \"F\",\n    \"f\",\n    \"w\",\n    \"woman\",\n    \"Woman\"\n  ]\n}\n
    "},{"location":"built_in_transformers/standard_transformers/random_person/#fallback_gender","title":"fallback_gender","text":"

    Gender that will be used if gender_mapping was not found. This parameter is optional and required only for gender parameter in dynamic mode. The default value is Any.

    "},{"location":"built_in_transformers/standard_transformers/random_person/#example-populate-random-first-name-and-last-name-for-table-user_profiles-in-static-mode","title":"Example: Populate random first name and last name for table user_profiles in static mode","text":"

    This example demonstrates how to use the RandomPerson transformer to populate the name and surname columns in the user_profiles table with random first names, last name, respectively.

    Create table user_profiles and insert data
    CREATE TABLE personal_data\n(\n    id      SERIAL PRIMARY KEY,\n    name    VARCHAR(100),\n    surname VARCHAR(100),\n    sex     CHAR(1) CHECK (sex IN ('M', 'F'))\n);\n\n-- Insert sample data into the table\nINSERT INTO personal_data (name, surname, sex)\nVALUES ('John', 'Doe', 'M'),\n       ('Jane', 'Smith', 'F'),\n       ('Alice', 'Johnson', 'F'),\n       ('Bob', 'Lee', 'M');\n
    RandomPerson transformer example
    - schema: public\n  name: personal_data\n  transformers:\n    - name: \"RandomPerson\"\n      params:\n        gender: \"Any\"\n        columns:\n          - name: \"name\"\n            template: \"{{ .FirstName }}\"\n          - name: \"surname\"\n            template: \"{{ .LastName }}\"\n        engine: \"hash\"\n

    Result

    ColumnOriginalValueTransformedValue nameJohnZane surnameDoeMcCullough"},{"location":"built_in_transformers/standard_transformers/random_person/#example-populate-random-first-name-and-last-name-for-table-user_profiles-in-dynamic-mode","title":"Example: Populate random first name and last name for table user_profiles in dynamic mode","text":"

    This example demonstrates how to use the RandomPerson transformer to populate the name, surname using dynamic gender

    RandomPerson transformer example with dynamic mode
    - schema: public\n  name: personal_data\n  transformers:\n    - name: \"RandomPerson\"\n      params:\n        columns:\n          - name: \"name\"\n            template: \"{{ .FirstName }}\"\n          - name: \"surname\"\n            template: \"{{ .LastName }}\"\n        engine: \"random\"\n      dynamic_params:\n        gender:\n          column: sex\n

    Result:

    ColumnOriginalValueTransformedValue nameJohnMartin surnameDoeMueller"},{"location":"built_in_transformers/standard_transformers/random_phone_number/","title":"RandomPhoneNumber","text":"

    The RandomPhoneNumber transformer is developed to populate specified database columns with random phone numbers. This tool is essential for applications requiring the simulation of contact information, testing phone number validation systems, or anonymizing phone number data in datasets.

    "},{"location":"built_in_transformers/standard_transformers/random_phone_number/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_phone_number/#description","title":"Description","text":"

    The RandomPhoneNumber transformer utilizes algorithms capable of generating random phone numbers with various formats and injects them into the designated database column. This feature allows for the creation of diverse and realistic contact information in datasets for development, testing, or data anonymization purposes.

    "},{"location":"built_in_transformers/standard_transformers/random_phone_number/#example-populate-random-phone-numbers-for-the-contact_information-table","title":"Example: Populate random phone numbers for the contact_information table","text":"

    This example demonstrates configuring the RandomPhoneNumber transformer to populate the phone_number column in the contact_information table with random phone numbers. It is an effective method for simulating a variety of contact information entries.

    RandomPhoneNumber transformer example
    - schema: \"public\"\n  name: \"contact_information\"\n  transformers:\n    - name: \"RandomPhoneNumber\"\n      params:\n        column: \"phone_number\"\n        keep_null: false\n

    In this configuration, the phone_number column will be updated with random phone numbers for each contact information entry, replacing any existing non-NULL values. If the keep_null parameter is set to true, existing NULL values in the column will be preserved, ensuring the integrity of records where phone number information is not applicable or provided.

    "},{"location":"built_in_transformers/standard_transformers/random_sentence/","title":"RandomSentence","text":"

    The RandomSentence transformer is designed to populate specified database columns with random sentences. Ideal for simulating natural language text for user comments, testing NLP systems, or anonymizing textual data in databases.

    "},{"location":"built_in_transformers/standard_transformers/random_sentence/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_sentence/#description","title":"Description","text":"

    The RandomSentence transformer employs complex text generation algorithms or libraries to generate random sentences, injecting them into a designated database column without the need for specifying sentence length. This flexibility ensures the creation of varied and plausible text for a wide range of applications.

    "},{"location":"built_in_transformers/standard_transformers/random_sentence/#example-populate-random-sentences-for-the-comments-table","title":"Example: Populate random sentences for the comments table","text":"

    This example shows how to configure the RandomSentence transformer to populate the comment column in the comments table with random sentences. It is a straightforward method for simulating diverse user-generated content.

    RandomSentence transformer example
    - schema: \"public\"\n  name: \"comments\"\n  transformers:\n    - name: \"RandomSentence\"\n      params:\n        column: \"comment\"\n        keep_null: false\n

    In this configuration, the comment column will be updated with random sentences for each entry, replacing any existing non-NULL values. If keep_null is set to true, existing NULL values in the column will be preserved, maintaining the integrity of records where comments are not applicable.

    "},{"location":"built_in_transformers/standard_transformers/random_string/","title":"RandomString","text":"

    Generate a random string using the provided characters within the specified length range.

    "},{"location":"built_in_transformers/standard_transformers/random_string/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar min_length The minimum length of the generated string Yes - max_length The maximum length of the generated string Yes - symbols The range of characters that can be used in the random string abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ No - keep_null Indicates whether NULL values should be replaced with transformed values or not true No - engine The engine used for generating the values [random, hash]. Use hash for deterministic generation random No -"},{"location":"built_in_transformers/standard_transformers/random_string/#description","title":"Description","text":"

    The RandomString transformer generates a random string with a length between min_length and max_length using the characters specified in the symbols string as the possible set of characters. The behaviour for NULL values can be configured using the keep_null parameter.

    The engine parameter allows you to choose between random and hash engines for generating values. Read more about the engines in the Transformation engines section.

    "},{"location":"built_in_transformers/standard_transformers/random_string/#example-generate-a-random-string-for-accountnumber","title":"Example: Generate a random string for accountnumber","text":"

    In the following example, a random string is generated for the accountnumber column with a length range from 9 to 12. The character set used for generation includes 1234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ.

    RandomString transformer example
    - schema: \"purchasing\"\n  name: \"vendor\"\n  transformers:\n    - name: \"RandomString\"\n      params:\n        column: \"accountnumber\"\n        min_length: 9\n        max_length: 12\n        symbols: \"1234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ\"\n

    Result

    ColumnOriginalValueTransformedValue accountnumberAUSTRALI00014VUI6P2OZ"},{"location":"built_in_transformers/standard_transformers/random_timezone/","title":"RandomTimezone","text":"

    The RandomTimezone transformer is designed to populate specified database columns with random timezone strings. This transformer is particularly useful for applications that require the simulation of global user data, testing of timezone-related functionalities, or anonymizing real user timezone information in datasets.

    "},{"location":"built_in_transformers/standard_transformers/random_timezone/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_timezone/#description","title":"Description","text":"

    Utilizing a comprehensive library or algorithm for generating timezone data, the RandomTimezone transformer provides random timezone strings (e. g., \"America/New_York\", \"Europe/London\") for database columns. This feature enables the creation of diverse and realistic datasets by simulating timezone information for user profiles, event timings, or any other data requiring timezone context.

    "},{"location":"built_in_transformers/standard_transformers/random_timezone/#example-populate-random-timezone-strings-for-the-user_accounts-table","title":"Example: Populate random timezone strings for the user_accounts table","text":"

    This example demonstrates how to configure the RandomTimezone transformer to populate the timezone column in the user_accounts table with random timezone strings, enhancing the dataset with varied global user representations.

    RandomTimezone transformer example
    - schema: \"public\"\n  name: \"user_accounts\"\n  transformers:\n    - name: \"RandomTimezone\"\n      params:\n        column: \"timezone\"\n        keep_null: false\n

    With this configuration, every entry in the timezone column will be updated with a random timezone string, replacing any existing non-NULL values. If the keep_null parameter is set to true, existing NULL values within the column will remain unchanged, preserving the integrity of rows without specified timezone data.

    "},{"location":"built_in_transformers/standard_transformers/random_toll_free_phone_number/","title":"RandomTollFreePhoneNumber","text":"

    The RandomTollFreePhoneNumber transformer is designed to populate specified database columns with random toll-free phone numbers. This tool is essential for applications requiring the simulation of contact information, testing phone number validation systems, or anonymizing phone number data in datasets while focusing on toll-free numbers.

    "},{"location":"built_in_transformers/standard_transformers/random_toll_free_phone_number/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_toll_free_phone_number/#description","title":"Description","text":"

    The RandomTollFreePhoneNumber transformer utilizes algorithms capable of generating random toll-free phone numbers with various formats and injects them into the designated database column. This feature allows for the creation of diverse and realistic toll-free contact information in datasets for development, testing, or data anonymization purposes.

    "},{"location":"built_in_transformers/standard_transformers/random_toll_free_phone_number/#example-populate-random-toll-free-phone-numbers-for-the-contact_information-table","title":"Example: Populate random toll-free phone numbers for the contact_information table","text":"

    This example demonstrates configuring the RandomTollFreePhoneNumber transformer to populate the phone_number column in the contact_information table with random toll-free phone numbers. It is an effective method for simulating a variety of contact information entries with toll-free numbers.

    RandomTollFreePhoneNumber transformer example
    - schema: \"public\"\n  name: \"contact_information\"\n  transformers:\n    - name: \"RandomTollFreePhoneNumber\"\n      params:\n        column: \"phone_number\"\n        keep_null: false\n

    In this configuration, the phone_number column will be updated with random toll-free phone numbers for each contact information entry, replacing any existing non-NULL values. If the keep_null parameter is set to true, existing NULL values in the column will be preserved, ensuring the integrity of records where toll-free phone number information is not applicable or provided.

    "},{"location":"built_in_transformers/standard_transformers/random_unix_timestamp/","title":"RandomUnixTimestamp","text":"

    The RandomUnixTimestamp transformer generates random Unix time values (timestamps) for specified database columns. It is particularly useful for populating columns with timestamp data, simulating time-related data, or anonymizing actual timestamps in a dataset.

    "},{"location":"built_in_transformers/standard_transformers/random_unix_timestamp/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes int2, int4, int8 min The minimum threshold date for the random value in unix timestamp format (integer) with sec unit by default Yes - max The maximum threshold date for the random value in unix timestamp format (integer) with sec unit by default Yes - unit Generated unix timestamp value unit. Possible values [second, millisecond, microsecond, nanosecond] second Yes - min_unit Min unix timestamp threshold date unit. Possible values [second, millisecond, microsecond, nanosecond] second Yes - max_unit Min unix timestamp threshold date unit. Possible values [second, millisecond, microsecond, nanosecond] second Yes - keep_null Indicates whether NULL values should be preserved false No - truncate Truncate the date to the specified part (nanosecond, microsecond, millisecond, second, minute, hour, day, month, year). The truncate operation is not applied by default. No - engine The engine used for generating the values [random, hash]. Use hash for deterministic generation random No -"},{"location":"built_in_transformers/standard_transformers/random_unix_timestamp/#description","title":"Description","text":"

    The RandomUnixTimestamp transformer generates random Unix timestamps within the provided interval, starting from min to max. The min and max parameters are expected to be in Unix timestamp format. The min_unit and max_unit parameters specify the unit of the Unix timestamp threshold date. The truncate parameter allows you to truncate the date to the specified part of the date. The keep_null parameter allows you to specify whether NULL values should be preserved or replaced with transformed values.

    The engine parameter allows you to choose between random and hash engines for generating values. Read more about the engines in the Transformation engines section.

    "},{"location":"built_in_transformers/standard_transformers/random_unix_timestamp/#example-generate-random-unix-timestamps-with-dynamic-parameters","title":"Example: Generate random Unix timestamps with dynamic parameters","text":"

    In this example, the RandomUnixTimestamp transformer generates random Unix timestamps using dynamic parameters. The min parameter is set to the created_at column, which is converted to Unix seconds using the TimestampToUnixSec. The max parameter is set to a fixed value. The paid_at column is populated with random Unix timestamps in the range from created_at to 1715934239 (Unix timestamp for 2024-05-17 12:03:59). The unit parameter is set to millisecond because the paid_at column stores timestamps in milliseconds.

    CREATE TABLE transactions\n(\n    id         SERIAL PRIMARY KEY,\n    kind       VARCHAR(255),\n    total      DECIMAL(10, 2),\n    created_at TIMESTAMP,\n    paid_at    BIGINT -- stores milliseconds since the epoch\n);\n\n-- Inserting data with milliseconds timestamp\nINSERT INTO transactions (kind, total, created_at, paid_at)\nVALUES ('Sale', 199.99, '2023-05-17 12:00:00', (EXTRACT(EPOCH FROM TIMESTAMP '2023-05-17 12:05:00') * 1000)),\n       ('Refund', 50.00, '2023-05-18 15:00:00', (EXTRACT(EPOCH FROM TIMESTAMP '2023-05-18 15:10:00') * 1000)),\n       ('Sale', 129.99, '2023-05-19 10:30:00', (EXTRACT(EPOCH FROM TIMESTAMP '2023-05-19 10:35:00') * 1000));\n
    RandomUnixTimestamp transformer example
    - schema: \"public\"\n  name: \"transactions\"\n  transformers:\n    - name: \"RandomUnixTimestamp\"\n      params:\n        column: \"paid_at\"\n        max: 1715934239\n        unit: \"millisecond\"\n        min_unit: \"second\"\n        max_unit: \"second\"\n      dynamic_params:\n        min:\n          column: \"created_at\"\n          cast_to: \"TimestampToUnixSec\"\n

    Result:

    ColumnOriginalValueTransformedValue paid_at16843251000001708919030732"},{"location":"built_in_transformers/standard_transformers/random_unix_timestamp/#example-generate-simple-random-unix-timestamps","title":"Example: Generate simple random Unix timestamps","text":"

    In this example, the RandomUnixTimestamp transformer generates random Unix timestamps for the paid_at column in the range from 1615934239 (Unix timestamp for 2021-03-16 12:03:59) to 1715934239 (Unix timestamp for 2024-05-17 12:03:59). The unit parameter is set to millisecond because the paid_at column stores timestamps in milliseconds.

    - schema: \"public\"\n  name: \"transactions\"\n  transformers:\n    - name: \"RandomUnixTimestamp\"\n      params:\n        column: \"paid_at\"\n        min: 1615934239\n        max: 1715934239\n        unit: \"millisecond\"\n

    Result:

    ColumnOriginalValueTransformedValue paid_at16843251000001655768292548"},{"location":"built_in_transformers/standard_transformers/random_url/","title":"RandomURL","text":"

    The RandomURL transformer is designed to populate specified database columns with random URL (Uniform Resource Locator) addresses. This tool is highly beneficial for simulating web content, testing applications that require URL input, or anonymizing real web addresses in datasets.

    "},{"location":"built_in_transformers/standard_transformers/random_url/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_url/#description","title":"Description","text":"

    Utilizing advanced algorithms or libraries for generating URL strings, the RandomURL transformer injects random, plausible URLs into the designated database column. Each generated URL is structured to include the protocol (e. g., \"http://\", \"https://\"), domain name, and path, offering a realistic range of web addresses for various applications.

    "},{"location":"built_in_transformers/standard_transformers/random_url/#example-populate-random-urls-for-the-webpages-table","title":"Example: Populate random URLs for the webpages table","text":"

    This example illustrates how to configure the RandomURL transformer to populate the page_url column in a webpages table with random URLs, providing a broad spectrum of web addresses for testing or data simulation purposes.

    RandomURL transformer example
    - schema: \"public\"\n  name: \"webpages\"\n  transformers:\n    - name: \"RandomURL\"\n      params:\n        column: \"page_url\"\n        keep_null: false\n

    With this configuration, the page_url column will be filled with random URLs for each entry, replacing any existing non-NULL values. Setting the keep_null parameter to true allows for the preservation of existing NULL values within the column, accommodating scenarios where URL data may be intentionally omitted.

    "},{"location":"built_in_transformers/standard_transformers/random_username/","title":"RandomUsername","text":"

    The RandomUsername transformer is crafted to populate specified database columns with random usernames. This utility is crucial for applications that require the simulation of user data, testing systems with user login functionality, or anonymizing real usernames in datasets.

    "},{"location":"built_in_transformers/standard_transformers/random_username/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_username/#description","title":"Description","text":"

    By employing sophisticated algorithms or libraries capable of generating believable usernames, the RandomUsername transformer introduces random usernames into the specified database column. Each generated username is designed to be unique and plausible, incorporating a mix of letters, numbers, and possibly special characters, depending on the generation logic used.

    "},{"location":"built_in_transformers/standard_transformers/random_username/#example-populate-random-usernames-for-the-user_accounts-table","title":"Example: Populate random usernames for the user_accounts table","text":"

    This example demonstrates configuring the RandomUsername transformer to populate the username column in a user_accounts table with random usernames. This setup is ideal for creating a diverse and realistic user base for development, testing, or demonstration purposes.

    RandomUsername transformer example
    - schema: \"public\"\n  name: \"user_accounts\"\n  transformers:\n    - name: \"RandomUsername\"\n      params:\n        column: \"username\"\n        keep_null: false\n

    In this configuration, every entry in the username column will be updated with a random username, replacing any existing non-NULL values. If the keep_null parameter is set to true, then the transformer will preserve existing NULL values within the column, maintaining data integrity where usernames are not applicable or available.

    "},{"location":"built_in_transformers/standard_transformers/random_uuid/","title":"RandomUuid","text":"

    Generate random unique user ID using version 4.

    "},{"location":"built_in_transformers/standard_transformers/random_uuid/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar, uuid keep_null Indicates whether NULL values should be replaced with transformed values or not true No - engine The engine used for generating the values [random, hash]. Use hash for deterministic generation random No -"},{"location":"built_in_transformers/standard_transformers/random_uuid/#description","title":"Description","text":"

    The RandomUuid transformer generates a random UUID. The behaviour for NULL values can be configured using the keep_null parameter.

    The engine parameter allows you to choose between random and hash engines for generating values. Read more about the engines in the Transformation engines section.

    "},{"location":"built_in_transformers/standard_transformers/random_uuid/#example-updating-the-rowguid-column","title":"Example: Updating the rowguid column","text":"

    The following example replaces original UUID values of the rowguid column to randomly generated ones.

    RandomUuid transformer example
    - schema: \"humanresources\"\n  name: \"employee\"\n  transformers:\n  - name: \"RandomUuid\"\n    params:\n      column: \"rowguid\"\n      keep_null: false\n

    Result

    ColumnOriginalValueTransformedValue rowguidf01251e5-96a3-448d-981e-0f99d789110d8ed8c4b2-7e7a-1e8d-f0f0-768e0e8ed0d0"},{"location":"built_in_transformers/standard_transformers/random_word/","title":"RandomWord","text":"

    The RandomWord transformer populates specified database columns with random words. Ideal for simulating textual content, enhancing linguistic datasets, or anonymizing text in databases.

    "},{"location":"built_in_transformers/standard_transformers/random_word/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_word/#description","title":"Description","text":"

    The RandomWord transformer employs a mechanism to inject random words into a designated database column, supporting the generation of linguistically plausible and contextually diverse text. This transformer is particularly beneficial for creating rich text datasets for development, testing, or educational purposes without specifying the language, focusing on versatility and ease of use.

    "},{"location":"built_in_transformers/standard_transformers/random_word/#example-populate-random-words-for-the-content-table","title":"Example: Populate random words for the content table","text":"

    This example demonstrates configuring the RandomWord transformer to populate the tag column in the content table with random words. It is a straightforward approach to adding varied textual data for tagging or content categorization.

    RandomWord transformer example
    - schema: \"public\"\n  name: \"content\"\n  transformers:\n    - name: \"RandomWord\"\n      params:\n        column: \"tag\"\n        keep_null: false\n

    In this setup, the tag column will be updated with random words for each entry, replacing any existing non-NULL values. If keep_null is set to true, existing NULL values in the column will remain unchanged, maintaining data integrity for records where textual data is not applicable.

    "},{"location":"built_in_transformers/standard_transformers/random_year_string/","title":"RandomYearString","text":"

    The RandomYearString transformer is designed to populate specified database columns with random year strings. It is ideal for scenarios that require the representation of years without specific dates, such as manufacturing years of products, birth years of users, or any other context where only the year is relevant.

    "},{"location":"built_in_transformers/standard_transformers/random_year_string/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar, int2, int4, int8, numeric keep_null Indicates whether NULL values should be preserved false No -"},{"location":"built_in_transformers/standard_transformers/random_year_string/#description","title":"Description","text":"

    The RandomYearString transformer leverages the faker library to generate strings representing random years. This allows for the easy generation of year data in a string format, adding versatility and realism to datasets that need to simulate or anonymize year-related information.

    "},{"location":"built_in_transformers/standard_transformers/random_year_string/#example-populate-random-year-strings-for-the-products-table","title":"Example: Populate random year strings for the products table","text":"

    This example shows how to use the RandomYearString transformer to fill the manufacturing_year column in the products table with random year strings, simulating the diversity of manufacturing dates.

    RandomYearString transformer example
    - schema: \"public\"\n  name: \"products\"\n  transformers:\n    - name: \"RandomYearString\"\n      params:\n        column: \"manufacturing_year\"\n        keep_null: false\n

    In this configuration, the manufacturing_year column will be populated with random year strings, replacing any existing non-NULL values. If keep_null is set to true, then existing NULL values in the column will be preserved.

    "},{"location":"built_in_transformers/standard_transformers/real_address/","title":"RealAddress","text":"

    Generates real addresses for specified database columns using the faker library. It supports customization of the generated address format through Go templates.

    "},{"location":"built_in_transformers/standard_transformers/real_address/#parameters","title":"Parameters","text":"Name Properties Description Default Required Supported DB types columns Specifies the affected column names along with additional properties for each column Yes Various \u221f name The name of the column to be affected Yes string \u221f template A Go template string for formatting real address attributes Yes string \u221f keep_null Indicates whether NULL values should be preserved No bool"},{"location":"built_in_transformers/standard_transformers/real_address/#template-value-descriptions","title":"Template value descriptions","text":"

    The template parameter allows for the injection of real address attributes into a customizable template. The following values can be included in your template:

    These placeholders can be combined and formatted as desired within the template string to generate custom address formats.

    "},{"location":"built_in_transformers/standard_transformers/real_address/#description","title":"Description","text":"

    The RealAddress transformer uses the faker library to generate realistic addresses, which can then be formatted according to a specified template and applied to selected columns in a database. It allows for the generated addresses to replace existing values or to preserve NULL values, based on the transformer's configuration.

    "},{"location":"built_in_transformers/standard_transformers/real_address/#example-generate-real-addresses-for-the-employee-table","title":"Example: Generate Real addresses for the employee table","text":"

    This example shows how to configure the RealAddress transformer to generate real addresses for the address column in the employee table, using a custom format.

    RealAddress transformer example
    - schema: \"humanresources\"\n  name: \"employee\"\n  transformers:\n    - name: \"RealAddress\"\n      params:\n        columns:\n          - name: \"address\"\n            template: \"{{.Address}}, {{.City}}, {{.State}} {{.PostalCode}}\"\n            keep_null: false\n

    This configuration will generate real addresses with the format \"Street address, city, state postal code\" and apply them to the address column, replacing any existing non-NULL values.

    "},{"location":"built_in_transformers/standard_transformers/regexp_replace/","title":"RegexpReplace","text":"

    Replace a string using a regular expression.

    "},{"location":"built_in_transformers/standard_transformers/regexp_replace/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes text, varchar regexp The regular expression pattern to search for in the column's value Yes - replace The replacement value. This value may be replaced with a captured group from the regexp parameter. Yes -"},{"location":"built_in_transformers/standard_transformers/regexp_replace/#description","title":"Description","text":"

    The RegexpReplace transformer replaces a string according to the applied regular expression. The valid regular expressions syntax is the same as the general syntax used by Perl, Python, and other languages. To be precise, it is the syntax accepted by RE2 and described in the Golang documentation, except for \\C.

    "},{"location":"built_in_transformers/standard_transformers/regexp_replace/#example-removing-leading-prefix-from-loginid-column-value","title":"Example: Removing leading prefix from loginid column value","text":"

    In the following example, the original values from loginid matching the adventure-works\\{{ id_name }} format are replaced with {{ id_name }}.

    RegexpReplace transformer example
    - schema: \"humanresources\"\n  name: \"employee\"\n  transformers:\n  - name: \"RegexpReplace\"\n    params:\n      column: \"loginid\"\n      regexp: \"adventure-works\\\\\\\\(.*)\"\n      replace: \"$1\"\n
    Expected result
    | column name | original value       | transformed |\n|-------------|----------------------|-------------|\n| loginid     | adventure-works\\ken0 | ken0        |\n

    Note

    YAML has control symbols, and using them without escaping may result in an error. In the example above, the prefix of id is separated by the \\ symbol. Since this symbol is a control symbol, we must escape it using \\\\. However, the '\\' symbol is also a control symbol for regular expressions, which is why we need to double-escape it as \\\\\\\\.

    "},{"location":"built_in_transformers/standard_transformers/replace/","title":"Replace","text":"

    Replace an original value by the provided one.

    "},{"location":"built_in_transformers/standard_transformers/replace/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes any replace The value to replace Yes - keep_null Indicates whether NULL values should be replaced with transformed values or not true No - validate Performs a decoding procedure via the PostgreSQL driver using the column type to ensure that values have correct type true No -"},{"location":"built_in_transformers/standard_transformers/replace/#description","title":"Description","text":"

    The Replace transformer replace an original value from the specified column with the provided one. It can optionally run a validation check with the validate parameter to ensure that the values are of a correct type before starting transformation. The behaviour for NULL values can be configured using the keep_null parameter.

    "},{"location":"built_in_transformers/standard_transformers/replace/#example-updating-the-jobtitle-column","title":"Example: Updating the jobtitle column","text":"

    In the following example, the provided value: \"programmer\" is first validated through driver decoding. If the current value of the jobtitle column is not NULL, it will be replaced with programmer. If the current value is NULL, it will remain NULL.

    Replace transformer example
    - schema: \"humanresources\"\n  name: \"employee\"\n  transformers:\n  - name: \"Replace\"\n    params:\n      column: \"jobtitle\"\n      value: \"programmer\"\n      keep_null: false\n      validate: true\n
    Expected result
    | column name | original value          | transformed |\n|-------------|-------------------------|-------------|\n| jobtitle    | Chief Executive Officer | programmer  |\n
    "},{"location":"built_in_transformers/standard_transformers/set_null/","title":"SetNull","text":"

    Set NULL value to a column.

    "},{"location":"built_in_transformers/standard_transformers/set_null/#parameters","title":"Parameters","text":"Name Description Default Required Supported DB types column The name of the column to be affected Yes any"},{"location":"built_in_transformers/standard_transformers/set_null/#description","title":"Description","text":"

    The SetNull transformer assigns NULL value to a column. This transformer generates warning if the affected column has NOT NULL constraint.

    NULL constraint violation warning
    {\n  \"hash\": \"5a229ee964a4ba674a41a4d63dab5a8c\",\n  \"meta\": {\n    \"ColumnName\": \"jobtitle\",\n    \"ConstraintType\": \"NotNull\",\n    \"ParameterName\": \"column\",\n    \"SchemaName\": \"humanresources\",\n    \"TableName\": \"employee\",\n    \"TransformerName\": \"SetNull\"\n  },\n  \"msg\": \"transformer may produce NULL values but column has NOT NULL constraint\",\n  \"severity\": \"warning\"\n}\n
    "},{"location":"built_in_transformers/standard_transformers/set_null/#example-set-null-value-to-updated_at-column","title":"Example: Set NULL value to updated_at column","text":"SetNull transformer example
    - schema: \"humanresources\"\n  name: \"employee\"\n  transformation:\n    - name: \"SetNull\"\n      params:\n        column: \"jobtitle\"\n
    Expected result
    | column name | original value          | transformed |\n|-------------|-------------------------|-------------|\n| jobtitle    | Chief Executive Officer | NULL        |\n
    "},{"location":"commands/","title":"Commands","text":""},{"location":"commands/#introduction","title":"Introduction","text":"Greenmask available commands
    greenmask \\\n--log-format=[json|text] \\\n--log-level=[debug|info|error] \\\n--config=config.yml \\\n[dump|list-dumps|delete|list-transformers|show-transformer|restore|show-dump]`\n

    You can use the following commands within Greenmask:

    For any of the commands mentioned above, you can include the following common flags:

    "},{"location":"commands/delete/","title":"delete command","text":"

    Delete dump from the storage with a specific ID

    Supported flags
    Usage:\n  greenmask delete [flags] [dumpId]\n\nFlags:\n      --before-date string   delete dumps older than the specified date in RFC3339Nano format: 2021-01-01T00:00.0:00Z\n      --dry-run              do not delete anything, just show what would be deleted\n      --prune-failed         prune failed dumps\n      --prune-unsafe         prune dumps with \"unknown-or-failed\" statuses. Works only with --prune-failed\n      --retain-for string    retain dumps for the specified duration in format: 1w2d3h4m5s6ms7us8ns\n      --retain-recent int    retain the most recent N completed dumps (default -1)\n
    delete dump by id
    greenmask --config config.yml delete 1723643249862\n
    delete dumps older than the specified date
    greenmask --config config.yml delete --before-date 2021-01-01T00:00.0:00Z --dry-run \n
    prune failed dumps
    greenmask --config config.yml delete --prune-failed --dry-run \n
    prune dumps with 'unknown-or-failed' statuses
    greenmask --config config.yml delete --prune-failed --prune-unsafe --dry-run\n
    retain dumps for the specified duration
    greenmask --config config.yml delete --retain-for 1w5d --dry-run\n
    retain the most recent N completed dumps
    greenmask --config config.yml delete --retain-recent 5 --dry-run\n
    "},{"location":"commands/dump/","title":"dump","text":""},{"location":"commands/dump/#dump-command","title":"dump command","text":"

    The dump command operates in the following way:

    1. Dumps the data from the source database.
    2. Validates the data for potential issues.
    3. Applies the defined transformations.
    4. Stores the transformed data in the specified storage location.

    Note that the dump command shares the same parameters and environment variables as pg_dump, allowing you to configure the restoration process as needed.

    Mostly it supports the same flags as the pg_dump utility, with some extra flags for Greenmask-specific features.

    Supported flags
      -b, --blobs                           include large objects in dump\n  -c, --clean                           clean (drop) database objects before recreating\n  -Z, --compress int                    compression level for compressed formats (default -1)\n  -C, --create                          include commands to create database in dump\n  -a, --data-only                       dump only the data, not the schema\n  -d, --dbname string                   database to dump (default \"postgres\")\n      --disable-dollar-quoting          disable dollar quoting, use SQL standard quoting\n      --enable-row-security             enable row security (dump only content user has access to)\n  -E, --encoding string                 dump the data in encoding ENCODING\n  -N, --exclude-schema strings          dump the specified schema(s) only\n  -T, --exclude-table strings           do NOT dump the specified table(s)\n      --exclude-table-data strings      do NOT dump data for the specified table(s)\n  -e, --extension strings               dump the specified extension(s) only\n      --extra-float-digits string       override default setting for extra_float_digits\n  -f, --file string                     output file or directory name\n  -h, --host string                     database server host or socket directory (default \"/var/run/postgres\")\n      --if-exists                       use IF EXISTS when dropping objects\n      --include-foreign-data strings    use IF EXISTS when dropping objects\n  -j, --jobs int                        use this many parallel jobs to dump (default 1)\n      --load-via-partition-root         load partitions via the root table\n      --lock-wait-timeout int           fail after waiting TIMEOUT for a table lock (default -1)\n  -B, --no-blobs                        exclude large objects in dump\n      --no-comments                     do not dump comments\n  -O, --no-owner                        skip restoration of object ownership in plain-text format\n  -X, --no-privileges                   do not dump privileges (grant/revoke)\n      --no-publications                 do not dump publications\n      --no-security-labels              do not dump security label assignments\n      --no-subscriptions                do not dump subscriptions\n      --no-sync                         do not wait for changes to be written safely to dis\n      --no-synchronized-snapshots       do not use synchronized snapshots in parallel jobs\n      --no-tablespaces                  do not dump tablespace assignments\n      --no-toast-compression            do not dump TOAST compression methods\n      --no-unlogged-table-data          do not dump unlogged table data\n      --pgzip                           use pgzip compression instead of gzip\n  -p, --port int                        database server port number (default 5432)\n      --quote-all-identifiers           quote all identifiers, even if not key words\n  -n, --schema strings                  dump the specified schema(s) only\n  -s, --schema-only                     dump only the schema, no data\n      --section string                  dump named section (pre-data, data, or post-data)\n      --serializable-deferrable         wait until the dump can run without anomalies\n      --snapshot string                 use given snapshot for the dump\n      --strict-names                    require table and/or schema include patterns to match at least one entity each\n  -t, --table strings                   dump the specified table(s) only\n      --test string                     connect as specified database user (default \"postgres\")\n      --use-set-session-authorization   use SET SESSION AUTHORIZATION commands instead of ALTER OWNER commands to set ownership\n  -U, --username string                 connect as specified database user (default \"postgres\")\n  -v, --verbose string                  verbose mode\n
    "},{"location":"commands/dump/#pgzip-compression","title":"Pgzip compression","text":"

    By default, Greenmask uses gzip compression to restore data. In mist cases it is quite slow and does not utilize all available resources and is a bootleneck for IO operations. To speed up the restoration process, you can use the --pgzip flag to use pgzip compression instead of gzip. This method splits the data into blocks, which are compressed in parallel, making it ideal for handling large volumes of data. The output remains a standard gzip file.

    "},{"location":"commands/list-dumps/","title":"list-dumps","text":""},{"location":"commands/list-dumps/#list-dumps-command","title":"list-dumps command","text":"

    The list-dumps command provides a list of all dumps stored in the storage. The list includes the following attributes:

    Example of list-dumps output:

    Info

    Greenmask uses a heartbeat mechanism to determine the status of a dump. A dump is considered failed if it lacks a \"done\" heartbeat or if the last heartbeat timestamp exceeds 30 minutes. Heartbeats are recorded every 15 minutes by the dump command while it is in progress. If greenmask fails unexpectedly, the heartbeat stops being updated, and after 30 minutes (twice the interval), the dump is classified as failed. The in progress status indicates that a dump is still ongoing.

    "},{"location":"commands/list-transformers/","title":"list-transformers","text":""},{"location":"commands/list-transformers/#list-transformers-command","title":"list-transformers command","text":"

    The list-transformers command provides a list of all the allowed transformers, including both standard and advanced transformers. This list can be helpful for searching for an appropriate transformer for your data transformation needs.

    To show a list of available transformers, use the following command:

    greenmask --config=config.yml list-transformers\n

    Supported flags:

    Example of list-transformers output:

    When using the list-transformers command, you receive a list of available transformers with essential information about each of them. Below are the key parameters for each transformer:

    The JSON call greenmask --config=config.yml list-transformers --format=json has the same attributes:

    JSON format output
    [\n  {\n    \"name\": \"Cmd\",\n    \"description\": \"Transform data via external program using stdin and stdout interaction\",\n    \"parameters\": [\n      {\n        \"name\": \"columns\",\n        \"supported_types\": [\n          \"any\"\n        ]\n      }\n    ]\n  },\n  {\n    \"name\": \"Dict\",\n    \"description\": \"Replace values matched by dictionary keys\",\n    \"parameters\": [\n      {\n        \"name\": \"column\",\n        \"supported_types\": [\n          \"any\"\n        ]\n      }\n    ]\n  }\n]\n
    "},{"location":"commands/restore/","title":"restore","text":""},{"location":"commands/restore/#restore-command","title":"restore command","text":"

    The restore command is used to restore a database from a previously created dump. You can specify the dump to restore by providing the dump ID or use the latest keyword to restore the latest completed dump.

    greenmask --config=config.yml restore DUMP_ID\n

    Alternatively, to restore the latest completed dump, use the following command:

    greenmask --config=config.yml restore latest\n

    Note that the restore command shares the same parameters and environment variables as pg_restore, allowing you to configure the restoration process as needed.

    Mostly it supports the same flags as the pg_restore utility, with some extra flags for Greenmask-specific features.

    Supported flags
          --batch-size int                         the number of rows to insert in a single batch during the COPY command (0 - all rows will be inserted in a single batch)\n  -c, --clean                                  clean (drop) database objects before recreating\n  -C, --create                                 create the target database\n  -a, --data-only                              restore only the data, no schema\n  -d, --dbname string                          connect to database name (default \"postgres\")\n      --disable-triggers                       disable triggers during data section restore\n      --enable-row-security                    enable row security\n  -N, --exclude-schema strings                 do not restore objects in this schema\n  -e, --exit-on-error                          exit on error, default is to continue\n  -f, --file string                            output file name (- for stdout)\n  -P, --function strings                       restore named function\n  -h, --host string                            database server host or socket directory (default \"/var/run/postgres\")\n      --if-exists                              use IF EXISTS when dropping objects\n  -i, --index strings                          restore named index\n      --inserts                                restore data as INSERT commands, rather than COPY\n  -j, --jobs int                               use this many parallel jobs to restore (default 1)\n      --list-format string                     use table of contents in format of text, json or yaml (default \"text\")\n      --no-comments                            do not restore comments\n      --no-data-for-failed-tables              do not restore data of tables that could not be created\n  -O, --no-owner                               skip restoration of object ownership\n  -X, --no-privileges                          skip restoration of access privileges (grant/revoke)\n      --no-publications                        do not restore publications\n      --no-security-labels                     do not restore security labels\n      --no-subscriptions                       ddo not restore subscriptions\n      --no-table-access-method                 do not restore table access methods\n      --no-tablespaces                         do not restore tablespace assignments\n      --on-conflict-do-nothing                 add ON CONFLICT DO NOTHING to INSERT commands\n      --overriding-system-value                use OVERRIDING SYSTEM VALUE clause for INSERTs\n      --pgzip                                  use pgzip decompression instead of gzip\n  -p, --port int                               database server port number (default 5432)\n      --restore-in-order                       restore tables in topological order, ensuring that dependent tables are not restored until the tables they depend on have been restored\n  -n, --schema strings                         restore only objects in this schema\n  -s, --schema-only                            restore only the schema, no data\n      --section string                         restore named section (pre-data, data, or post-data)\n  -1, --single-transaction                     restore as a single transaction\n      --strict-names                           restore named section (pre-data, data, or post-data) match at least one entity each\n  -S, --superuser string                       superuser user name to use for disabling triggers\n  -t, --table strings                          restore named relation (table, view, etc.)\n  -T, --trigger strings                        restore named trigger\n  -L, --use-list string                        use table of contents from this file for selecting/ordering output\n      --use-session-replication-role-replica   use SET session_replication_role = 'replica' to disable triggers during data section restore (alternative for --disable-triggers)\n      --use-set-session-authorization          use SET SESSION AUTHORIZATION commands instead of ALTER OWNER commands to set ownership\n  -U, --username string                        connect as specified database user (default \"postgres\")\n  -v, --verbose string                         verbose mode\n
    "},{"location":"commands/restore/#extra-features","title":"Extra features","text":""},{"location":"commands/restore/#inserts-and-error-handling","title":"Inserts and error handling","text":"

    Warning

    Insert commands are a lot slower than COPY commands. Use this feature only when necessary.

    By default, Greenmask restores data using the COPY command. If you prefer to restore data using INSERT commands, you can use the --inserts flag. This flag allows you to manage errors that occur during the execution of INSERT commands. By configuring an error and constraint exclusion list in the config, you can skip certain errors and continue inserting subsequent rows from the dump.

    This can be useful when adding new records to an existing dump, but you don't want the process to stop if some records already exist in the database or violate certain constraints.

    By adding the --on-conflict-do-nothing flag, it generates INSERT statements with the ON CONFLICT DO NOTHING clause, similar to the original pg_dump option. However, this approach only works for unique or exclusion constraints. If a foreign key is missing in the referenced table or any other constraint is violated, the insertion will still fail. To handle these issues, you can define anexclusion list in the config.

    example with inserts and error handling
    ```shell title=\"example with inserts and on conflict do nothing\"\ngreenmask --config=config.yml restore DUMP_ID --inserts --on-conflict-do-nothing\n

    By adding the --overriding-system-value flag, it generates INSERT statements with the OVERRIDING SYSTEM VALUE clause, which allows you to insert data into identity columns.

    example of GENERATED ALWAYS AS IDENTITY column
    CREATE TABLE people (\n    id integer GENERATED ALWAYS AS IDENTITY PRIMARY KEY,\n    generated text GENERATED ALWAYS AS (id || first_name) STORED,\n    first_name text\n);\n
    example with inserts
    greenmask --config=config.yml restore DUMP_ID --inserts --overriding-system-value\n
    "},{"location":"commands/restore/#restoration-in-topological-order","title":"Restoration in topological order","text":"

    By default, Greenmask restores tables in the order they are listed in the dump file. To restore tables in topological order, use the --restore-in-order flag. This flag ensures that dependent tables are not restored until the tables they depend on have been restored.

    This is useful when you have the schema already created with foreign keys and other constraints, and you want to insert data into the tables in the correct order or catch-up the target database with the new data.

    Warning

    Greenmask cannot guarantee restoration in topological order when the schema contains cycles. The only way to restore tables with cyclic dependencies is to temporarily remove the foreign key constraint (to break the cycle), restore the data, and then re-add the foreign key constraint once the data restoration is complete.

    If your database has cyclic dependencies you will be notified about it but the restoration will continue.

    2024-08-16T21:39:50+03:00 WRN cycle between tables is detected: cannot guarantee the order of restoration within cycle cycle=[\"public.employees\",\"public.departments\",\"public.projects\",\"public.employees\"]\n
    "},{"location":"commands/restore/#pgzip-decompression","title":"Pgzip decompression","text":"

    By default, Greenmask uses gzip decompression to restore data. In mist cases it is quite slow and does not utilize all available resources and is a bootleneck for IO operations. To speed up the restoration process, you can use the --pgzip flag to use pgzip decompression instead of gzip. This method splits the data into blocks, which are decompressed in parallel, making it ideal for handling large volumes of data.

    example with pgzip decompression
    greenmask --config=config.yml restore latest --pgzip\n
    "},{"location":"commands/restore/#restore-data-batching","title":"Restore data batching","text":"

    The COPY command returns the error only on transaction commit. This means that if you have a large dump and an error occurs, you will have to wait until the end of the transaction to see the error message. To avoid this, you can use the --batch-size flag to specify the number of rows to insert in a single batch during the COPY command. If an error occurs during the batch insertion, the error message will be displayed immediately. The data will be committed only if all batches are inserted successfully.

    This is useful when you want to be notified of errors as immediately as possible without waiting for the entire table to be restored.

    Warning

    The batch size should be chosen carefully. If the batch size is too small, the restoration process will be slow. If the batch size is too large, you may not be able to identify the error row.

    In the example below, the batch size is set to 1000 rows. This means that 1000 rows will be inserted in a single batch, so you will be notified of any errors immediately after each batch is inserted.

    example with batch size
    greenmask --config=config.yml restore latest --batch-size 1000\n
    "},{"location":"commands/show-dump/","title":"show-dump","text":""},{"location":"commands/show-dump/#show-dump-command","title":"show-dump command","text":"

    This command provides details about all objects and data that can be restored, similar to the pg_restore -l command in PostgreSQL. It helps you inspect the contents of the dump before performing the actual restoration.

    Parameters:

    To display metadata information about a dump, use the following command:

    greenmask --config=config.yml show-dump dumpID\n
    Text output example
    ;\n; Archive created at 2023-10-30 12:52:38 UTC\n; dbname: demo\n; TOC Entries: 17\n; Compression: -1\n; Dump Version: 15.4\n; Format: DIRECTORY\n; Integer: 4 bytes\n; Offset: 8 bytes\n; Dumped from database version: 15.4\n; Dumped by pg_dump version: 15.4\n;\n;\n; Selected TOC Entries:\n;\n3444; 0 0 ENCODING - ENCODING\n3445; 0 0 STDSTRINGS - STDSTRINGS\n3446; 0 0 SEARCHPATH - SEARCHPATH\n3447; 1262 24970 DATABASE - demo postgres\n3448; 0 0 DATABASE PROPERTIES - demo postgres\n222; 1259 24999 TABLE bookings flights postgres\n223; 1259 25005 SEQUENCE bookings flights_flight_id_seq postgres\n3460; 0 0 SEQUENCE OWNED BY bookings flights_flight_id_seq postgres\n3281; 2604 25030 DEFAULT bookings flights flight_id postgres\n3462; 0 24999 TABLE DATA bookings flights postgres\n3289; 2606 25044 CONSTRAINT bookings flights flights_flight_no_scheduled_departure_key postgres\n3291; 2606 25046 CONSTRAINT bookings flights flights_pkey postgres\n3287; 1259 42848 INDEX bookings flights_aircraft_code_status_idx postgres\n3292; 1259 42847 INDEX bookings flights_status_aircraft_code_idx postgres\n3293; 2606 25058 FK CONSTRAINT bookings flights flights_aircraft_code_fkey postgres\n3294; 2606 25063 FK CONSTRAINT bookings flights flights_arrival_airport_fkey postgres\n3295; 2606 25068 FK CONSTRAINT bookings flights flights_departure_airport_fkey postgres\n
    JSON output example

    {\n  \"startedAt\": \"2023-10-29T20:50:19.948017+02:00\", // (1)\n  \"completedAt\": \"2023-10-29T20:50:22.19333+02:00\", // (2)\n  \"originalSize\": 4053842, // (3)\n  \"compressedSize\": 686557, // (4)\n  \"transformers\": [ // (5)\n    {\n      \"Schema\": \"bookings\", // (6)\n      \"Name\": \"flights\", // (7)\n      \"Query\": \"\", // (8)\n      \"Transformers\": [ // (9)\n        {\n          \"Name\": \"RandomDate\", // (10)\n          \"Params\": { // (11)\n            \"column\": \"c2NoZWR1bGVkX2RlcGFydHVyZQ==\",\n            \"max\": \"MjAyMy0wMS0wMiAwMDowMDowMC4wKzAz\",\n            \"min\": \"MjAyMy0wMS0wMSAwMDowMDowMC4wKzAz\"\n          }\n        }\n      ],\n      \"ColumnsTypeOverride\": null // (12)\n    }\n  ],\n  \"header\": { // (13)\n    \"creationDate\": \"2023-10-29T20:50:20+02:00\",\n    \"dbName\": \"demo\",\n    \"tocEntriesCount\": 15,\n    \"dumpVersion\": \"16.0 (Homebrew)\",\n    \"format\": \"TAR\",\n    \"integer\": 4,\n    \"offset\": 8,\n    \"dumpedFrom\": \"16.0 (Debian 16.0-1.pgdg120+1)\",\n    \"dumpedBy\": \"16.0 (Homebrew)\",\n    \"tocFileSize\": 8090,\n    \"compression\": 0\n  },\n  \"entries\": [ // (14)\n    {\n      \"dumpId\": 3416,\n      \"databaseOid\": 0,\n      \"objectOid\": 0,\n      \"objectType\": \"ENCODING\",\n      \"schema\": \"\",\n      \"name\": \"ENCODING\",\n      \"owner\": \"\",\n      \"section\": \"PreData\",\n      \"originalSize\": 0,\n      \"compressedSize\": 0,\n      \"fileName\": \"\",\n      \"dependencies\": null\n    },\n    {\n      \"dumpId\": 3417,\n      \"databaseOid\": 0,\n      \"objectOid\": 0,\n      \"objectType\": \"STDSTRINGS\",\n      \"schema\": \"\",\n      \"name\": \"STDSTRINGS\",\n      \"owner\": \"\",\n      \"section\": \"PreData\",\n      \"originalSize\": 0,\n      \"compressedSize\": 0,\n      \"fileName\": \"\",\n      \"dependencies\": null\n    },\n    {\n      \"dumpId\": 3418,\n      \"databaseOid\": 0,\n      \"objectOid\": 0,\n      \"objectType\": \"SEARCHPATH\",\n      \"schema\": \"\",\n      \"name\": \"SEARCHPATH\",\n      \"owner\": \"\",\n      \"section\": \"PreData\",\n      \"originalSize\": 0,\n      \"compressedSize\": 0,\n      \"fileName\": \"\",\n      \"dependencies\": null\n    },\n    {\n      \"dumpId\": 3419,\n      \"databaseOid\": 16384,\n      \"objectOid\": 1262,\n      \"objectType\": \"DATABASE\",\n      \"schema\": \"\",\n      \"name\": \"demo\",\n      \"owner\": \"postgres\",\n      \"section\": \"PreData\",\n      \"originalSize\": 0,\n      \"compressedSize\": 0,\n      \"fileName\": \"\",\n      \"dependencies\": null\n    },\n    {\n      \"dumpId\": 3420,\n      \"databaseOid\": 0,\n      \"objectOid\": 0,\n      \"objectType\": \"DATABASE PROPERTIES\",\n      \"schema\": \"\",\n      \"name\": \"demo\",\n      \"owner\": \"postgres\",\n      \"section\": \"PreData\",\n      \"originalSize\": 0,\n      \"compressedSize\": 0,\n      \"fileName\": \"\",\n      \"dependencies\": null\n    },\n    {\n      \"dumpId\": 222,\n      \"databaseOid\": 16414,\n      \"objectOid\": 1259,\n      \"objectType\": \"TABLE\",\n      \"schema\": \"bookings\",\n      \"name\": \"flights\",\n      \"owner\": \"postgres\",\n      \"section\": \"PreData\",\n      \"originalSize\": 0,\n      \"compressedSize\": 0,\n      \"fileName\": \"\",\n      \"dependencies\": null\n    },\n    {\n      \"dumpId\": 223,\n      \"databaseOid\": 16420,\n      \"objectOid\": 1259,\n      \"objectType\": \"SEQUENCE\",\n      \"schema\": \"bookings\",\n      \"name\": \"flights_flight_id_seq\",\n      \"owner\": \"postgres\",\n      \"section\": \"PreData\",\n      \"originalSize\": 0,\n      \"compressedSize\": 0,\n      \"fileName\": \"\",\n      \"dependencies\": [\n        222\n      ]\n    },\n    {\n      \"dumpId\": 3432,\n      \"databaseOid\": 0,\n      \"objectOid\": 0,\n      \"objectType\": \"SEQUENCE OWNED BY\",\n      \"schema\": \"bookings\",\n      \"name\": \"flights_flight_id_seq\",\n      \"owner\": \"postgres\",\n      \"section\": \"PreData\",\n      \"originalSize\": 0,\n      \"compressedSize\": 0,\n      \"fileName\": \"\",\n      \"dependencies\": [\n        223\n      ]\n    },\n    {\n      \"dumpId\": 3254,\n      \"databaseOid\": 16445,\n      \"objectOid\": 2604,\n      \"objectType\": \"DEFAULT\",\n      \"schema\": \"bookings\",\n      \"name\": \"flights flight_id\",\n      \"owner\": \"postgres\",\n      \"section\": \"PreData\",\n      \"originalSize\": 0,\n      \"compressedSize\": 0,\n      \"fileName\": \"\",\n      \"dependencies\": [\n        223,\n        222\n      ]\n    },\n    {\n      \"dumpId\": 3434,\n      \"databaseOid\": 16414,\n      \"objectOid\": 0,\n      \"objectType\": \"TABLE DATA\",\n      \"schema\": \"\\\"bookings\\\"\",\n      \"name\": \"\\\"flights\\\"\",\n      \"owner\": \"\\\"postgres\\\"\",\n      \"section\": \"Data\",\n      \"originalSize\": 4045752,\n      \"compressedSize\": 678467,\n      \"fileName\": \"3434.dat.gz\",\n      \"dependencies\": []\n    },\n    {\n      \"dumpId\": 3261,\n      \"databaseOid\": 16461,\n      \"objectOid\": 2606,\n      \"objectType\": \"CONSTRAINT\",\n      \"schema\": \"bookings\",\n      \"name\": \"flights flights_flight_no_scheduled_departure_key\",\n      \"owner\": \"postgres\",\n      \"section\": \"PostData\",\n      \"originalSize\": 0,\n      \"compressedSize\": 0,\n      \"fileName\": \"\",\n      \"dependencies\": [\n        222,\n        222\n      ]\n    },\n    {\n      \"dumpId\": 3263,\n      \"databaseOid\": 16463,\n      \"objectOid\": 2606,\n      \"objectType\": \"CONSTRAINT\",\n      \"schema\": \"bookings\",\n      \"name\": \"flights flights_pkey\",\n      \"owner\": \"postgres\",\n      \"section\": \"PostData\",\n      \"originalSize\": 0,\n      \"compressedSize\": 0,\n      \"fileName\": \"\",\n      \"dependencies\": [\n        222\n      ]\n    },\n    {\n      \"dumpId\": 3264,\n      \"databaseOid\": 16477,\n      \"objectOid\": 2606,\n      \"objectType\": \"FK CONSTRAINT\",\n      \"schema\": \"bookings\",\n      \"name\": \"flights flights_aircraft_code_fkey\",\n      \"owner\": \"postgres\",\n      \"section\": \"PostData\",\n      \"originalSize\": 0,\n      \"compressedSize\": 0,\n      \"fileName\": \"\",\n      \"dependencies\": [\n        222\n      ]\n    },\n    {\n      \"dumpId\": 3265,\n      \"databaseOid\": 16482,\n      \"objectOid\": 2606,\n      \"objectType\": \"FK CONSTRAINT\",\n      \"schema\": \"bookings\",\n      \"name\": \"flights flights_arrival_airport_fkey\",\n      \"owner\": \"postgres\",\n      \"section\": \"PostData\",\n      \"originalSize\": 0,\n      \"compressedSize\": 0,\n      \"fileName\": \"\",\n      \"dependencies\": [\n        222\n      ]\n    },\n    {\n      \"dumpId\": 3266,\n      \"databaseOid\": 16487,\n      \"objectOid\": 2606,\n      \"objectType\": \"FK CONSTRAINT\",\n      \"schema\": \"bookings\",\n      \"name\": \"flights flights_departure_airport_fkey\",\n      \"owner\": \"postgres\",\n      \"section\": \"PostData\",\n      \"originalSize\": 0,\n      \"compressedSize\": 0,\n      \"fileName\": \"\",\n      \"dependencies\": [\n        222\n      ]\n    }\n  ]\n}\n

    1. The date when the backup has been initiated, also indicating the snapshot date.
    2. The date when the backup process was successfully completed.
    3. The original size of the backup in bytes.
    4. The size of the backup after compression in bytes.
    5. A list of tables that underwent transformation during the backup.
    6. The schema name of the table.
    7. The name of the table.
    8. Custom query override, if applicable.
    9. A list of transformers that were applied during the backup.
    10. The name of the transformer.
    11. The parameters provided for the transformer.
    12. A mapping of overridden column types.
    13. The header information in the table of contents file. This provides the same details as the --format=text output in the previous snippet.
    14. The list of restoration entries. This offers the same information as the --format=text output in the previous snippet.

    Note

    The json format provides more detailed information compared to the text format. The text format is primarily used for backward compatibility and for generating a restoration list that can be used with pg_restore -L listfile. On the other hand, the json format provides comprehensive metadata about the dump, including information about the applied transformers and their parameters. The json format is especially useful for detailed dump introspection.

    "},{"location":"commands/show-transformer/","title":"show-transformer","text":""},{"location":"commands/show-transformer/#show-transformer-command","title":"show-transformer command","text":"

    This command prints out detailed information about a transformer by a provided name, including specific attributes to help you understand and configure the transformer effectively.

    To show detailed information about a transformer, use the following command:

    greenmask --config=config.yml show-transformer TRANSFORMER_NAME\n

    Supported flags:

    Example of show-transformer output:

    When using the show-transformer command, you receive detailed information about the transformer and its parameters and their possible attributes. Below are the key parameters for each transformer:

    Warning

    The default value in JSON format is base64 encoded. This might be changed in later version of Greenmask.

    JSON output example
    [\n  {\n    \"properties\": {\n      \"name\": \"NoiseFloat\",\n      \"description\": \"Make noise float for int\",\n      \"is_custom\": false\n    },\n    \"parameters\": [\n      {\n        \"name\": \"column\",\n        \"description\": \"column name\",\n        \"required\": true,\n        \"is_column\": true,\n        \"is_column_container\": false,\n        \"column_properties\": {\n          \"max_length\": -1,\n          \"affected\": true,\n          \"allowed_types\": [\n            \"float4\",\n            \"float8\",\n            \"numeric\"\n          ],\n          \"skip_on_null\": true\n        }\n      },\n      {\n        \"name\": \"ratio\",\n        \"description\": \"max random percentage for noise\",\n        \"required\": false,\n        \"is_column\": false,\n        \"is_column_container\": false,\n        \"default_value\": \"MC4x\"\n      },\n      {\n        \"name\": \"decimal\",\n        \"description\": \"decimal of noised float value (number of digits after coma)\",\n        \"required\": false,\n        \"is_column\": false,\n        \"is_column_container\": false,\n        \"default_value\": \"NA==\"\n      }\n    ]\n  }\n]\n
    "},{"location":"commands/validate/","title":"validate command","text":"

    The validate command allows you to perform a validation procedure and compare transformed data.

    Below is a list of all supported flags for the validate command:

    Supported flags
    Usage:\n  greenmask validate [flags]\n\nFlags:\n      --data                  Perform test dump for --rows-limit rows and print it pretty\n      --diff                  Find difference between original and transformed data\n      --format string         Format of output. possible values [text|json] (default \"text\")\n      --rows-limit uint       Check tables dump only for specific tables (default 10)\n      --schema                Make a schema diff between previous dump and the current state\n      --table strings         Check tables dump only for specific tables\n      --table-format string   Format of table output (only for --format=text). Possible values [vertical|horizontal] (default \"vertical\")\n      --transformed-only      Print only transformed column and primary key\n      --warnings              Print warnings\n

    Validate command can exit with non-zero code when:

    All of those cases may be used for CI/CD pipelines to stop the process when something went wrong. This is especially useful when --schema flag is used - this allows to avoid data leakage when schema changed.

    You can use the --table flag multiple times to specify the tables you want to check. Tables can be written with or without schema names (e. g., public.table_name or table_name). If you specify multiple tables from different schemas, an error will be thrown.

    To start validation, use the following command:

    greenmask --config=config.yml validate \\\n  --warnings \\\n  --data \\\n  --diff \\\n  --schema \\\n  --format=text \\\n  --table-format=vertical \\\n  --transformed-only \\\n  --rows-limit=1\n
    Validation output example
    2024-03-15T19:46:12+02:00 WRN ValidationWarning={\"hash\":\"aa808fb574a1359c6606e464833feceb\",\"meta\":{\"ColumnName\":\"birthdate\",\"ConstraintDef\":\"CHECK (birthdate \\u003e= '1930-01-01'::date AND birthdate \\u003c= (now() - '18 years'::interval))\",\"ConstraintName\":\"humanresources\",\"ConstraintSchema\":\"humanresources\",\"ConstraintType\":\"Check\",\"ParameterName\":\"column\",\"SchemaName\":\"humanresources\",\"TableName\":\"employee\",\"TransformerName\":\"NoiseDate\"},\"msg\":\"possible constraint violation: column has Check constraint\",\"severity\":\"warning\"}\n

    The validation output will provide detailed information about potential constraint violations and schema issues. Each line contains nested JSON data under the ValidationWarning key, offering insights into the affected part of the configuration and potential constraint violations.

    Pretty formatted validation warning

    { \n  \"hash\": \"aa808fb574a1359c6606e464833feceb\", // (13)\n  \"meta\": { // (1)\n    \"ColumnName\": \"birthdate\", // (2)\n    \"ConstraintDef\": \"CHECK (birthdate >= '1930-01-01'::date AND birthdate <= (now() - '18 years'::interval))\", // (3)\n    \"ConstraintName\": \"humanresources\", // (4)\n    \"ConstraintSchema\": \"humanresources\", // (5)\n    \"ConstraintType\": \"Check\", // (6)\n    \"ParameterName\": \"column\", // (7)\n    \"SchemaName\": \"humanresources\", // (8)\n    \"TableName\": \"employee\", // (9)\n    \"TransformerName\": \"NoiseDate\" // (10)\n  },\n  \"msg\": \"possible constraint violation: column has Check constraint\", // (11)\n  \"severity\": \"warning\" // (12)\n}\n

    1. Detailed metadata. The validation output provides comprehensive metadata to pinpoint the source of problems.
    2. Column name indicates the name of the affected column.
    3. Constraint definition specifies the definition of the constraint that may be violated.
    4. Constraint name identifies the name of the constraint that is potentially violated.
    5. Constraint schema name indicates the schema in which the constraint is defined.
    6. Type of constraint represents the type of constraint and can be one of the following:
      * ForeignKey\n* Check\n* NotNull\n* PrimaryKey\n* PrimaryKeyReferences\n* Unique\n* Length\n* Exclusion\n* TriggerConstraint\n
    7. Table schema name specifies the schema name of the affected table.
    8. Table name identifies the name of the table where the problem occurs.
    9. Transformer name indicates the name of the transformer responsible for the transformation.
    10. Name of affected parameter typically, this is the name of the column parameter that is relevant to the validation warning.
    11. Validation warning description provides a detailed description of the validation warning and the reason behind it.
    12. Severity of validation warning indicates the severity level of the validation warning and can be one of the following:
      * error\n* warning\n* info\n* debug\n
    13. Hash is a unique identifier of the validation warning. It is used to resolve the warning in the config file

    Note

    A validation warning with a severity level of \"error\" is considered critical and must be addressed before the dump operation can proceed. Failure to resolve such warnings will prevent the dump operation from being executed.

    Schema diff changed output example
    2024-03-15T19:46:12+02:00 WRN Database schema has been changed Hint=\"Check schema changes before making new dump\" PreviousDumpId=1710520855501\n2024-03-15T19:46:12+02:00 WRN Column renamed Event=ColumnRenamed Signature={\"CurrentColumnName\":\"id1\",\"PreviousColumnName\":\"id\",\"TableName\":\"test\",\"TableSchema\":\"public\"}\n2024-03-15T19:46:12+02:00 WRN Column type changed Event=ColumnTypeChanged Signature={\"ColumnName\":\"id\",\"CurrentColumnType\":\"bigint\",\"CurrentColumnTypeOid\":\"20\",\"PreviousColumnType\":\"integer\",\"PreviousColumnTypeOid\":\"23\",\"TableName\":\"test\",\"TableSchema\":\"public\"}\n2024-03-15T19:46:12+02:00 WRN Column created Event=ColumnCreated Signature={\"ColumnName\":\"name\",\"ColumnType\":\"text\",\"TableName\":\"test\",\"TableSchema\":\"public\"}\n2024-03-15T19:46:12+02:00 WRN Table created Event=TableCreated Signature={\"SchemaName\":\"public\",\"TableName\":\"test1\",\"TableOid\":\"20563\"}\n

    Example of validation diff:

    The validation diff is presented in a neatly formatted table. In this table:

    The whole validate command may be run in json format including logging making easy to parse the structure.

    greenmask --config=config.yml validate \\\n  --warnings \\\n  --data \\\n  --diff \\\n  --schema \\\n  --format=json \\\n  --table-format=vertical \\\n  --transformed-only \\\n  --rows-limit=1 \\\n  --log-format=json\n

    The json object result

    The validation warningSchema diff eventsTransformation diff line
    {\n  \"level\": \"warn\",\n  \"ValidationWarning\": {\n    \"msg\": \"possible constraint violation: column has Check constraint\",\n    \"severity\": \"warning\",\n    \"meta\": {\n      \"ColumnName\": \"birthdate\",\n      \"ConstraintDef\": \"CHECK (birthdate >= '1930-01-01'::date AND birthdate <= (now() - '18 years'::interval))\",\n      \"ConstraintName\": \"humanresources\",\n      \"ConstraintSchema\": \"humanresources\",\n      \"ConstraintType\": \"Check\",\n      \"ParameterName\": \"column\",\n      \"SchemaName\": \"humanresources\",\n      \"TableName\": \"employee\",\n      \"TransformerName\": \"NoiseDate\"\n    },\n    \"hash\": \"aa808fb574a1359c6606e464833feceb\"\n  },\n  \"time\": \"2024-03-15T20:01:51+02:00\"\n}\n
    {\n  \"level\": \"warn\",\n  \"PreviousDumpId\": \"1710520855501\",\n  \"Diff\": [\n    {\n      \"event\": \"ColumnRenamed\",\n      \"signature\": {\n        \"CurrentColumnName\": \"id1\",\n        \"PreviousColumnName\": \"id\",\n        \"TableName\": \"test\",\n        \"TableSchema\": \"public\"\n      }\n    },\n    {\n      \"event\": \"ColumnTypeChanged\",\n      \"signature\": {\n        \"ColumnName\": \"id\",\n        \"CurrentColumnType\": \"bigint\",\n        \"CurrentColumnTypeOid\": \"20\",\n        \"PreviousColumnType\": \"integer\",\n        \"PreviousColumnTypeOid\": \"23\",\n        \"TableName\": \"test\",\n        \"TableSchema\": \"public\"\n      }\n    },\n    {\n      \"event\": \"ColumnCreated\",\n      \"signature\": {\n        \"ColumnName\": \"name\",\n        \"ColumnType\": \"text\",\n        \"TableName\": \"test\",\n        \"TableSchema\": \"public\"\n      }\n    },\n    {\n      \"event\": \"TableCreated\",\n      \"signature\": {\n        \"SchemaName\": \"public\",\n        \"TableName\": \"test1\",\n        \"TableOid\": \"20563\"\n      }\n    }\n  ],\n  \"Hint\": \"Check schema changes before making new dump\",\n  \"time\": \"2024-03-15T20:01:51+02:00\",\n  \"message\": \"Database schema has been changed\"\n}\n
    {\n  \"schema\": \"humanresources\",\n  \"name\": \"employee\",\n  \"primary_key_columns\": [\n    \"businessentityid\"\n  ],\n  \"with_diff\": true,\n  \"transformed_only\": true,\n  \"records\": [\n    {\n      \"birthdate\": {\n        \"original\": \"1969-01-29\",\n        \"transformed\": \"1964-10-20\",\n        \"equal\": false,\n        \"implicit\": true\n      },\n      \"businessentityid\": {\n        \"original\": \"1\",\n        \"transformed\": \"1\",\n        \"equal\": true,\n        \"implicit\": true\n      }\n    }\n  ]\n}\n
    "},{"location":"release_notes/greenmask_0_1_0/","title":"Greenmask 0.1.0","text":"

    We are excited to announce the release of Greenmask v0.1.0, marking the first production-ready version. This release addresses various bug fixes, introduces improvements, and includes documentation refactoring for enhanced clarity.

    "},{"location":"release_notes/greenmask_0_1_0/#new-features","title":"New features","text":""},{"location":"release_notes/greenmask_0_1_0/#improvements","title":"Improvements","text":""},{"location":"release_notes/greenmask_0_1_0/#fixes","title":"Fixes","text":""},{"location":"release_notes/greenmask_0_1_0/#ecosystem-changes","title":"Ecosystem changes","text":""},{"location":"release_notes/greenmask_0_1_0/#assets","title":"Assets","text":"

    To download the Greenmask binary compatible with your system, see the release's assets list.

    "},{"location":"release_notes/greenmask_0_1_0_beta/","title":"Greenmask 0.0.1 Beta","text":"

    We are excited to announce the beta release of Greenmask, a versatile and open-source utility for PostgreSQL logical backup dumping, anonymization, and restoration. Greenmask is perfect for routine backup and restoration tasks. It facilitates anonymization and data masking for staging environments and analytics.

    This release introduces a range of features aimed at enhancing database management and security.

    "},{"location":"release_notes/greenmask_0_1_0_beta/#key-features","title":"Key features","text":""},{"location":"release_notes/greenmask_0_1_0_beta/#download","title":"Download","text":"

    To download the Greenmask binary compatible with your system, see the release's assets list.

    "},{"location":"release_notes/greenmask_0_1_1/","title":"Greenmask 0.1.1","text":"

    This release introduces a suite of new transformers, significantly enhancing Greenmask's capabilities for obfuscating PostgreSQL databases.

    "},{"location":"release_notes/greenmask_0_1_1/#new-features","title":"New features","text":"

    Added the following new transformers:

    Transformer Description RandomLatitude Generates a random latitude value RandomLongitude Generates a random longitude value RandomUnixTime Generates a random Unix timestamp RandomMonthName Generates the name of a random month RandomYearString Generates a random year as a string RandomDayOfWeek Generates a random day of the week RandomDayOfMonth Generates a random day of the month RandomCentury Generates a random century RandomTimezone Generates a random timezone RandomEmail Generates a random email address RandomMacAddress Generates a random MAC address RandomDomainName Generates a random domain name RandomURL Generates a random URL RandomUsername Generates a random username RandomIPv4 Generates a random IPv4 address RandomIPv6 Generates a random IPv6 address RandomPassword Generates a random password RandomWord Generates a random word RandomSentence Generates a random sentence RandomParagraph Generates a random paragraph RandomCCType Generates a random credit card type RandomCCNumber Generates a random credit card number RandomCurrency Generates a random currency code RandomAmountWithCurrency Generates a random monetary amount with currency RandomTitleMale Generates a random title for males RandomTitleFemale Generates a random title for females RandomFirstName Generates a random first name RandomFirstNameMale Generates a random male first name RandomFirstNameFemale Generates a random female first name RandomLastName Generates a random last name RandomName Generates a full random name RandomPhoneNumber Generates a random phone number RandomTollFreePhoneNumber Generates a random toll-free phone number RandomE164PhoneNumber Generates a random phone number in E.164 format RealAddress Generates a real address"},{"location":"release_notes/greenmask_0_1_1/#assets","title":"Assets","text":"

    To download the Greenmask binary compatible with your system, see the release's assets list.

    "},{"location":"release_notes/greenmask_0_1_10/","title":"Greenmask 0.1.10","text":"

    This release introduces improvements and bug fixes

    "},{"location":"release_notes/greenmask_0_1_10/#changes","title":"Changes","text":""},{"location":"release_notes/greenmask_0_1_10/#assets","title":"Assets","text":"

    To download the Greenmask binary compatible with your system, see the release's assets list.

    "},{"location":"release_notes/greenmask_0_1_11/","title":"Greenmask 0.1.11","text":"

    This release introduces improvements and bug fixes

    "},{"location":"release_notes/greenmask_0_1_11/#changes","title":"Changes","text":""},{"location":"release_notes/greenmask_0_1_11/#assets","title":"Assets","text":"

    To download the Greenmask binary compatible with your system, see the release's assets list.

    "},{"location":"release_notes/greenmask_0_1_12/","title":"Greenmask 0.1.12","text":"

    This release introduces improvements and bug fixes

    "},{"location":"release_notes/greenmask_0_1_12/#changes","title":"Changes","text":""},{"location":"release_notes/greenmask_0_1_12/#assets","title":"Assets","text":"

    To download the Greenmask binary compatible with your system, see the release's assets list.

    "},{"location":"release_notes/greenmask_0_1_13/","title":"Greenmask 0.1.13","text":"

    This release introduces only improvements in documentation deployment. The core greenmask utility does not contain any changes.

    "},{"location":"release_notes/greenmask_0_1_13/#changes","title":"Changes","text":""},{"location":"release_notes/greenmask_0_1_13/#assets","title":"Assets","text":"

    To download the Greenmask binary compatible with your system, see the release's assets list.

    "},{"location":"release_notes/greenmask_0_1_14/","title":"Greenmask 0.1.14","text":"

    This release introduces bug fixes.

    "},{"location":"release_notes/greenmask_0_1_14/#changes","title":"Changes","text":""},{"location":"release_notes/greenmask_0_1_14/#assets","title":"Assets","text":"

    To download the Greenmask binary compatible with your system, see the release's assets list.

    "},{"location":"release_notes/greenmask_0_1_2/","title":"Greenmask 0.1.2","text":"

    This release introduces bug fixes.

    "},{"location":"release_notes/greenmask_0_1_2/#fixes","title":"Fixes","text":""},{"location":"release_notes/greenmask_0_1_2/#assets","title":"Assets","text":"

    To download the Greenmask binary compatible with your system, see the release's assets list.

    "},{"location":"release_notes/greenmask_0_1_3/","title":"Greenmask 0.1.3","text":"

    This release introduces bug fixes.

    "},{"location":"release_notes/greenmask_0_1_3/#fixes","title":"Fixes","text":""},{"location":"release_notes/greenmask_0_1_3/#assets","title":"Assets","text":"

    To download the Greenmask binary compatible with your system, see the release's assets list.

    "},{"location":"release_notes/greenmask_0_1_4/","title":"Greenmask 0.1.4","text":"

    This release introduces bug fixes.

    "},{"location":"release_notes/greenmask_0_1_4/#fixes","title":"Fixes","text":""},{"location":"release_notes/greenmask_0_1_4/#assets","title":"Assets","text":"

    To download the Greenmask binary compatible with your system, see the release's assets list.

    "},{"location":"release_notes/greenmask_0_1_5/","title":"Greenmask 0.1.5","text":"

    This release introduces a new Greenmask command, improvements, bug fixes, and numerous documentation updates.

    "},{"location":"release_notes/greenmask_0_1_5/#new-features","title":"New features","text":"

    Added a new Greenmask CLI command\u2014show-transformer that shows detailed information about a specified transformer.

    "},{"location":"release_notes/greenmask_0_1_5/#improvements","title":"Improvements","text":""},{"location":"release_notes/greenmask_0_1_5/#fixes","title":"Fixes","text":""},{"location":"release_notes/greenmask_0_1_5/#assets","title":"Assets","text":"

    To download the Greenmask binary compatible with your system, see the release's assets list.

    "},{"location":"release_notes/greenmask_0_1_6/","title":"Greenmask 0.1.6","text":"

    This is a minor release that introduces a bug hotfix

    "},{"location":"release_notes/greenmask_0_1_6/#fixes","title":"Fixes","text":""},{"location":"release_notes/greenmask_0_1_6/#assets","title":"Assets","text":"

    To download the Greenmask binary compatible with your system, see the release's assets list.

    "},{"location":"release_notes/greenmask_0_1_7/","title":"Greenmask 0.1.7","text":"

    This release introduces a new Greenmask command, improvements, bug fixes, and documentation update.

    "},{"location":"release_notes/greenmask_0_1_7/#new-features","title":"New features","text":""},{"location":"release_notes/greenmask_0_1_7/#improvements","title":"Improvements","text":""},{"location":"release_notes/greenmask_0_1_7/#fixes","title":"Fixes","text":""},{"location":"release_notes/greenmask_0_1_7/#assets","title":"Assets","text":"

    To download the Greenmask binary compatible with your system, see the release's assets list.

    "},{"location":"release_notes/greenmask_0_1_8/","title":"Greenmask 0.1.8","text":"

    This release introduces improvements and bug fixes

    "},{"location":"release_notes/greenmask_0_1_8/#improvements","title":"Improvements","text":""},{"location":"release_notes/greenmask_0_1_8/#fixes","title":"Fixes","text":""},{"location":"release_notes/greenmask_0_1_8/#assets","title":"Assets","text":"

    To download the Greenmask binary compatible with your system, see the release's assets list.

    "},{"location":"release_notes/greenmask_0_1_9/","title":"Greenmask 0.1.9","text":"

    This release introduces improvements and bug fixes

    "},{"location":"release_notes/greenmask_0_1_9/#improvements","title":"Improvements","text":""},{"location":"release_notes/greenmask_0_1_9/#fixes","title":"Fixes","text":""},{"location":"release_notes/greenmask_0_1_9/#assets","title":"Assets","text":"

    To download the Greenmask binary compatible with your system, see the release's assets list.

    "},{"location":"release_notes/greenmask_0_2_0/","title":"Greenmask 0.2.0","text":"

    This is one of the biggest releases since Greenmask was founded. We've been in close contact with our users, gathering feedback, and working hard to make Greenmask more flexible, reliable, and user-friendly.

    This major release introduces exciting new features such as database subsetting, pgzip support, restoration in topological order, and refactored transformers, significantly enhancing Greenmask's flexibility to better meet business needs. It also includes several fixes and improvements.

    "},{"location":"release_notes/greenmask_0_2_0/#preface","title":"Preface","text":"

    This release is a major milestone that significantly expands Greenmask's functionality, transforming it into a simple, extensible, and reliable solution for database security, data anonymization, and everyday operations. Our goal is to create a core system that can serve as a foundation for comprehensive dynamic staging environments and robust data security.

    "},{"location":"release_notes/greenmask_0_2_0/#notable-changes","title":"Notable changes","text":""},{"location":"release_notes/greenmask_0_2_0/#core","title":"Core","text":""},{"location":"release_notes/greenmask_0_2_0/#transformers","title":"Transformers","text":""},{"location":"release_notes/greenmask_0_2_0/#fixes-and-improvements","title":"Fixes and improvements","text":""},{"location":"release_notes/greenmask_0_2_0/#full-changelog-v0114v020","title":"Full Changelog: v0.1.14...v0.2.0","text":""},{"location":"release_notes/greenmask_0_2_0/#links","title":"Links","text":"

    Feel free to reach out to us if you have any questions or need assistance:

    "},{"location":"release_notes/greenmask_0_2_0_b1/","title":"Greenmask 0.2.0b1 (pre-release)","text":"

    This major beta release introduces new features and refactored transformers, significantly enhancing Greenmask's flexibility to better meet business needs.

    "},{"location":"release_notes/greenmask_0_2_0_b1/#changes-overview","title":"Changes overview","text":""},{"location":"release_notes/greenmask_0_2_0_b1/#notable-changes","title":"Notable changes","text":""},{"location":"release_notes/greenmask_0_2_0_b1/#core","title":"Core","text":""},{"location":"release_notes/greenmask_0_2_0_b1/#documentation","title":"Documentation","text":"

    Documentation has been significantly refactored. New information about features and updates to transformer descriptions have been added.

    "},{"location":"release_notes/greenmask_0_2_0_b1/#transformers","title":"Transformers","text":""},{"location":"release_notes/greenmask_0_2_0_b1/#full-changelog-v0114v020b1","title":"Full Changelog: v0.1.14...v0.2.0b1","text":""},{"location":"release_notes/greenmask_0_2_0_b1/#playground-usage-for-beta-version","title":"Playground usage for beta version","text":"

    If you want to run a Greenmask playground for the beta version v0.2.0b1 execute:

    git checkout tags/v0.2.0b1 -b v0.2.0b1\ndocker-compose run greenmask-from-source\n
    "},{"location":"release_notes/greenmask_0_2_0_b1/#links","title":"Links","text":"

    Feel free to reach out to us if you have any questions or need assistance:

    "},{"location":"release_notes/greenmask_0_2_0_b2/","title":"Greenmask 0.2.0b2 (pre-release)","text":"

    This major beta release introduces new features such as the database subset, pgzip support, restoration in topological and many more. It also includes fixes and improvements.

    "},{"location":"release_notes/greenmask_0_2_0_b2/#preface","title":"Preface","text":"

    This release is a major milestone that significantly expands Greenmask's functionality, transforming it into a simple, extensible, and reliable solution for database security, data anonymization, and everyday operations. Our goal is to create a core system that can serve as a foundation for comprehensive dynamic staging environments and robust data security.

    "},{"location":"release_notes/greenmask_0_2_0_b2/#notable-changes","title":"Notable changes","text":""},{"location":"release_notes/greenmask_0_2_0_b2/#fixes-and-improvements","title":"Fixes and improvements","text":""},{"location":"release_notes/greenmask_0_2_0_b2/#full-changelog-v020b1v020b2","title":"Full Changelog: v0.2.0b1...v0.2.0b2","text":""},{"location":"release_notes/greenmask_0_2_0_b2/#playground-usage-for-beta-version","title":"Playground usage for beta version","text":"

    If you want to run a Greenmask playground for the beta version v0.2.0b2 execute:

    git checkout tags/v0.2.0b2 -b v0.2.0b2\ndocker-compose run greenmask-from-source\n
    "},{"location":"release_notes/greenmask_0_2_0_b2/#links","title":"Links","text":"

    Feel free to reach out to us if you have any questions or need assistance:

    "},{"location":"release_notes/greenmask_0_2_1/","title":"Greenmask 0.2.1","text":"

    This release introduces two new features transformation conditions and transformation inheritance for primary and foreign keys. It also includes several bug fixes and improvements.

    "},{"location":"release_notes/greenmask_0_2_1/#changes","title":"Changes","text":""},{"location":"release_notes/greenmask_0_2_1/#full-changelog-v020v021","title":"Full Changelog: v0.2.0...v0.2.1","text":""},{"location":"release_notes/greenmask_0_2_1/#links","title":"Links","text":"

    Feel free to reach out to us if you have any questions or need assistance:

    "},{"location":"release_notes/greenmask_0_2_2/","title":"Greenmask 0.2.2","text":"

    This release introduces bug fixes.

    "},{"location":"release_notes/greenmask_0_2_2/#changes","title":"Changes","text":""},{"location":"release_notes/greenmask_0_2_2/#full-changelog-v021v022","title":"Full Changelog: v0.2.1...v0.2.2","text":""},{"location":"release_notes/greenmask_0_2_2/#links","title":"Links","text":"

    Feel free to reach out to us if you have any questions or need assistance:

    "},{"location":"release_notes/greenmask_0_2_3/","title":"Greenmask 0.2.3","text":"

    This release introduces bug fixes.

    "},{"location":"release_notes/greenmask_0_2_3/#changes","title":"Changes","text":""},{"location":"release_notes/greenmask_0_2_3/#full-changelog-v022v023","title":"Full Changelog: v0.2.2...v0.2.3","text":""},{"location":"release_notes/greenmask_0_2_3/#links","title":"Links","text":"

    Feel free to reach out to us if you have any questions or need assistance:

    "},{"location":"release_notes/greenmask_0_2_4/","title":"Greenmask 0.2.4","text":"

    This release introduces bug fixes.

    "},{"location":"release_notes/greenmask_0_2_4/#changes","title":"Changes","text":""},{"location":"release_notes/greenmask_0_2_4/#full-changelog-v023v024","title":"Full Changelog: v0.2.3...v0.2.4","text":""},{"location":"release_notes/greenmask_0_2_4/#links","title":"Links","text":"

    Feel free to reach out to us if you have any questions or need assistance:

    "},{"location":"release_notes/greenmask_0_2_5/","title":"Greenmask 0.2.5","text":"

    This release introduces bug fixes.

    "},{"location":"release_notes/greenmask_0_2_5/#changes","title":"Changes","text":""},{"location":"release_notes/greenmask_0_2_5/#full-changelog-v024v025","title":"Full Changelog: v0.2.4...v0.2.5","text":""},{"location":"release_notes/greenmask_0_2_5/#links","title":"Links","text":"

    Feel free to reach out to us if you have any questions or need assistance:

    "},{"location":"release_notes/greenmask_0_2_6/","title":"Greenmask 0.2.6","text":"

    This release introduces new features and bug fixes.

    "},{"location":"release_notes/greenmask_0_2_6/#changes","title":"Changes","text":""},{"location":"release_notes/greenmask_0_2_6/#full-changelog-v025v026","title":"Full Changelog: v0.2.5...v0.2.6","text":""},{"location":"release_notes/greenmask_0_2_6/#links","title":"Links","text":"

    Feel free to reach out to us if you have any questions or need assistance:

    "}]} \ No newline at end of file diff --git a/dev/sitemap.xml b/dev/sitemap.xml index 01ac9daf..ade61783 100644 --- a/dev/sitemap.xml +++ b/dev/sitemap.xml @@ -400,4 +400,8 @@ https://docs.greenmask.io/dev/release_notes/greenmask_0_2_5/ 2024-12-07 + + https://docs.greenmask.io/dev/release_notes/greenmask_0_2_6/ + 2024-12-07 + \ No newline at end of file diff --git a/dev/sitemap.xml.gz b/dev/sitemap.xml.gz index 5d3f7a66e82b4a8839b42a79302cc50c606bbb0a..758fe5de616b064caa171431a130e1ef14a62ea7 100644 GIT binary patch delta 832 zcmV-G1Hb&p2FeDI7k}DIbJ{Qv0N{In1;ZT-guFV0-1-aJvqngZtws-zb`7DwzA_;+ z2PV@_XYei;Y_Po_daY!kJUfxi3?fDhs=6RrSYwJVhggWY>$?VmX;FCbRo;$mz4U7}Yt*zlYP=G(~Nw z%W&=3CdI9*yR4~H!nK22O?smd6n2|Z#n%q+B9iugke}XqeW-KlqP=$HP3Jr*7Zqh4 zahq}|J&*^&9)F?^&Zt&r9e~)l{&-XwT-v_H#F_`~ymM^Ds&r@ThG@@!rGr?z2*}dr zVguQcjs|%6wB7X>%dnqbru{@N&F-@A)OA6EQDrbGTh3I|A*Gysw_ zOcw#%JP)d-ch4Wmor3j5WwD~6^j^vJ5L7T96=xNL`lD zOih!V5mBUax<2=8dS)I}+NzKuHFCInfENkc6m&Tu>L8&h!2E8h+9pMilMSF0u!IcD ze%5+&GJl`8zAIY+$o|eebyh;p_bJv=tU*gyuN?EKl!0H)GO+*itIoP@L|75`Z0?(X zPA%KKZ{NN{Wz9~otNSiOKuxKkOMo z@!()_EqEXpE_`?W%z6&)9nz#1;IM=tAJI5V%5X<157!c~N&Cs5&EY8cP#1$icG|lk z{y52HEa;#8j)?%P^Ve#o=4!?Yqc%XAlW5Kg!>Y38yvk<5N^`?Xv*e^al+hl!aQ)oJo3RTIL&Mp{mf?0%(LL+VO2P+O6GdbJ{QvfbaPg40kXPnn#C_TYo`&)(B~_)#%~T$`JbNyC#I@ zz+~F#4Bq8}WhCv#KC~;@tLN9?M?1*qQhXZEr?c@0qDj`p`f2>*`?tx%_<4P^>as^D zBkCOSY21|3eyytgem_N5Am|pSDc2R6h8kF%ZdSAD<9K~DqJK|6!Q0jGv+~KT`&p2t zvB}`H&H$0dzD!-JEbOXP)%W>0L}Llbt{1b#axz~`X7|+Pbni_@br$mP;&e6*QJd0b zxVCJQ;?~t&)`g64ZQ)iIz0n8?yA7H6+TvZrqP-vFueV+w>YTc0uPu4gIbW2Es$?B; zn{uECw7ti~ng`msar9zE;cVRy{`^;T5Nj6!S+cp< zKz2n(13dV&-Sse*VVqt@`-xoK?lRvgbU^~6%3xBqoKa1OB?YzZqUi$&MX{1{K`8oG zzd$~>d7x7r6|K%3*Q(2CCRomR)|tYYuhH0W=Ih6?DSy!}BgW)t<73(6qt7U}KIy_A z(?tQ0v7?4FoI&Z@Qp(YRwx>nyU1^l*a)xLQ0$Ql^scahEQ7TiT97~rv2%zw+)cgj`(WPdVWIx8XaeTuad)}STqSC08q%EB*a8`wYmsmB0{0W5!y*j%ipH6Qmb;SjaGroo+D`^`ha