From 171a75c09458054b135e86d4ce8c15dda58902ed Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 15 Mar 2023 11:36:50 +0100 Subject: [PATCH 001/346] Bump webpack from 5.75.0 to 5.76.1 in /frontend (#371) Bumps [webpack](https://github.com/webpack/webpack) from 5.75.0 to 5.76.1. --- frontend/yarn.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/frontend/yarn.lock b/frontend/yarn.lock index 8dc48c22b..911acfa48 100644 --- a/frontend/yarn.lock +++ b/frontend/yarn.lock @@ -12219,9 +12219,9 @@ webpack-sources@^3.2.3: integrity sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w== webpack@^5.64.4: - version "5.75.0" - resolved "https://registry.npmjs.org/webpack/-/webpack-5.75.0.tgz" - integrity sha512-piaIaoVJlqMsPtX/+3KTTO6jfvrSYgauFVdt8cr9LTHKmcq/AMd4mhzsiP7ZF/PGRNPGA8336jldh9l2Kt2ogQ== + version "5.76.1" + resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.76.1.tgz#7773de017e988bccb0f13c7d75ec245f377d295c" + integrity sha512-4+YIK4Abzv8172/SGqObnUjaIHjLEuUasz9EwQj/9xmPPkYJy2Mh03Q/lJfSD3YLzbxy5FeTq5Uw0323Oh6SJQ== dependencies: "@types/eslint-scope" "^3.7.3" "@types/estree" "^0.0.51" From 40029630fb8b33f472390e598b46d691052e7cb4 Mon Sep 17 00:00:00 2001 From: dlpzx <71252798+dlpzx@users.noreply.github.com> Date: Mon, 20 Mar 2023 16:58:01 +0100 Subject: [PATCH 002/346] Upgrade sqlalchemy 13.16 -> 1.3.24 and starlette 0.19.1 -> 0.25.0, ariadne 0.13 -> 0.17, fastapi 0.78 -> 0.92 (#379) ### Feature or Bugfix - Bugfix ### Detail - Upgrade starlette version: vulnerability found in starlette <0.25 (https://security.snyk.io/vuln/SNYK-PYTHON-STARLETTE-3319937). It does not affect data.all as we do not use `python-multipart` but nevertheless it is better to be in a non-vulnerable version. - Upgrade sqlalchemy version: the vulnerability is not stopping the CICD pipeline, but by upgrading we are able to use the latest version of alembic and we can revert the pinning of the version which happened in https://github.com/awslabs/aws-dataall/pull/354 - Upgrade ariadne to version 0.17.0: needed to support starlette 0.25.0 Higher version of ariadne==0.18.0 removes `PLAYGROUND_HTML` constant that we use in testing (Check [docs](https://ariadnegraphql.org/docs/0.17/constants-reference)) - Upgrade fastapi version to 0.92.0: needed to support starlette 0.25.0 (Version that supports this particular version of starlette, [docs](https://fastapi.tiangolo.com/release-notes/#0920)) ### Relates - #378 By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- Makefile | 4 ++-- backend/dataall/cdkproxy/requirements.txt | 3 ++- backend/requirements.txt | 6 ++++-- deploy/stacks/dbmigration.py | 2 +- 4 files changed, 9 insertions(+), 6 deletions(-) diff --git a/Makefile b/Makefile index 9e3492333..4c695ed4f 100644 --- a/Makefile +++ b/Makefile @@ -76,12 +76,12 @@ assume-role: rm .assume_role_json drop-tables: upgrade-pip install-backend - pip install 'alembic==1.9.4' + pip install 'alembic' export PYTHONPATH=./backend && \ python backend/migrations/drop_tables.py upgrade-db: upgrade-pip install-backend - pip install 'alembic==1.9.4' + pip install 'alembic' export PYTHONPATH=./backend && \ alembic -c backend/alembic.ini upgrade head diff --git a/backend/dataall/cdkproxy/requirements.txt b/backend/dataall/cdkproxy/requirements.txt index 9fd74e6ab..f2da84ebe 100644 --- a/backend/dataall/cdkproxy/requirements.txt +++ b/backend/dataall/cdkproxy/requirements.txt @@ -5,7 +5,8 @@ boto3-stubs==1.24.85 botocore==1.27.85 cdk-nag==2.7.2 constructs==10.0.73 -fastapi==0.78.0 +starlette==0.25.0 +fastapi == 0.92.0 Flask==2.0.3 PyYAML==6.0 requests==2.27.1 diff --git a/backend/requirements.txt b/backend/requirements.txt index 47f416aff..1b6fbe97d 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -1,7 +1,8 @@ -ariadne==0.13.0 +ariadne==0.17.0 aws-xray-sdk==2.4.3 boto3==1.24.85 botocore==1.27.85 +fastapi == 0.92.0 Flask==2.0.3 flask-cors==3.0.10 nanoid==2.0.0 @@ -12,4 +13,5 @@ pyjwt==2.4.0 PyYAML==6.0 requests==2.27.1 requests_aws4auth==1.1.1 -sqlalchemy==1.3.16 \ No newline at end of file +sqlalchemy==1.3.24 +starlette==0.25.0 \ No newline at end of file diff --git a/deploy/stacks/dbmigration.py b/deploy/stacks/dbmigration.py index 1930dcff0..f48712795 100644 --- a/deploy/stacks/dbmigration.py +++ b/deploy/stacks/dbmigration.py @@ -108,7 +108,7 @@ def __init__( 'python -m venv env', '. env/bin/activate', 'pip install -r backend/requirements.txt', - 'pip install "alembic==1.9.4"', + 'pip install alembic', 'export PYTHONPATH=backend', f'export envname={envname}', f'alembic -c backend/alembic.ini upgrade head', From 79f0e4ca12840563338dbbe206e04a199233b1b4 Mon Sep 17 00:00:00 2001 From: dlpzx <71252798+dlpzx@users.noreply.github.com> Date: Mon, 27 Mar 2023 10:15:52 +0200 Subject: [PATCH 003/346] Add dependency in dataset stack (#385) ### Feature or Bugfix - Bugfix ### Detail - Added dependency to dataset S3 Bucket for the dataset crawler ### Relates - #384 By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- backend/dataall/cdkproxy/stacks/dataset.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/backend/dataall/cdkproxy/stacks/dataset.py b/backend/dataall/cdkproxy/stacks/dataset.py index f7da39072..4ee53beb1 100644 --- a/backend/dataall/cdkproxy/stacks/dataset.py +++ b/backend/dataall/cdkproxy/stacks/dataset.py @@ -459,7 +459,7 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): }, ) - glue.CfnCrawler( + crawler = glue.CfnCrawler( self, dataset.GlueCrawlerName, description=f'datall Glue Crawler for bucket {dataset.S3BucketName}', @@ -475,6 +475,7 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): ] ), ) + crawler.node.add_dependency(dataset_bucket) job_args = { '--additional-python-modules': 'pydeequ,great_expectations,requests', From 95c8619798bf3651e66f2a4522f5686efcd5635d Mon Sep 17 00:00:00 2001 From: wolanlu <101870655+wolanlu@users.noreply.github.com> Date: Tue, 28 Mar 2023 16:18:37 +0200 Subject: [PATCH 004/346] feat: generate url with dynamically domain name for quicksight embeded dashboards (#380) feat: generate url with dynamically domain name for quicksight embeded dashboards ### Feature or Bugfix - Feature ### Detail Previously one had to configure statically data.all domain name inside QuickSight to allow dashboards to be embeddable inside data.all. With new api it is possible to dynamically set in inside request. User no longer has to configure QuickSight before using it. ### Relates By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- .../dataall/api/Objects/Dashboard/resolvers.py | 7 +++++++ backend/dataall/aws/handlers/quicksight.py | 16 +++++++++------- backend/requirements.txt | 4 ++-- deploy/pivot_role/pivotRole.yaml | 2 +- documentation/userguide/docs/environments.md | 9 --------- .../docs/pictures/environments/boot_qs_3.png | Bin 112581 -> 0 bytes 6 files changed, 19 insertions(+), 19 deletions(-) delete mode 100644 documentation/userguide/docs/pictures/environments/boot_qs_3.png diff --git a/backend/dataall/api/Objects/Dashboard/resolvers.py b/backend/dataall/api/Objects/Dashboard/resolvers.py index 799354207..a44800502 100644 --- a/backend/dataall/api/Objects/Dashboard/resolvers.py +++ b/backend/dataall/api/Objects/Dashboard/resolvers.py @@ -7,6 +7,12 @@ from ....db import permissions, models from ....db.api import ResourcePolicy, Glossary, Vote from ....searchproxy import indexers +from ....utils import Parameter + +param_store = Parameter() +ENVNAME = os.getenv("envname", "local") +DOMAIN_NAME = param_store.get_parameter(env=ENVNAME, path="frontend/custom_domain_name") if ENVNAME not in ["local", "dkrcompose"] else None +DOMAIN_URL = f"https://{DOMAIN_NAME}" if DOMAIN_NAME else "http://localhost:8080" def get_quicksight_reader_url(context, source, dashboardUri: str = None): @@ -33,6 +39,7 @@ def get_quicksight_reader_url(context, source, dashboardUri: str = None): region=env.region, UserName=context.username, DashboardId=dash.DashboardId, + domain_name=DOMAIN_URL, ) else: shared_groups = db.api.Dashboard.query_all_user_groups_shareddashboard( diff --git a/backend/dataall/aws/handlers/quicksight.py b/backend/dataall/aws/handlers/quicksight.py index c468296de..54ca9ad5e 100644 --- a/backend/dataall/aws/handlers/quicksight.py +++ b/backend/dataall/aws/handlers/quicksight.py @@ -234,9 +234,7 @@ def register_user_in_group(AwsAccountId, UserName, GroupName, UserRole='READER') return Quicksight.describe_user(AwsAccountId, UserName) @staticmethod - def get_reader_session( - AwsAccountId, region, UserName, UserRole='READER', DashboardId=None - ): + def get_reader_session(AwsAccountId, region, UserName, UserRole="READER", DashboardId=None, domain_name: str = None): client = Quicksight.get_quicksight_client(AwsAccountId, region) user = Quicksight.describe_user(AwsAccountId, UserName) @@ -245,12 +243,16 @@ def get_reader_session( AwsAccountId=AwsAccountId, UserName=UserName, GroupName=DEFAULT_GROUP_NAME, UserRole=UserRole ) - response = client.get_dashboard_embed_url( + response = client.generate_embed_url_for_registered_user( AwsAccountId=AwsAccountId, - DashboardId=DashboardId, - IdentityType='QUICKSIGHT', SessionLifetimeInMinutes=120, - UserArn=user.get('Arn'), + UserArn=user.get("Arn"), + ExperienceConfiguration={ + "Dashboard": { + "InitialDashboardId": DashboardId, + }, + }, + AllowedDomains=[domain_name], ) return response.get('EmbedUrl') diff --git a/backend/requirements.txt b/backend/requirements.txt index 1b6fbe97d..7429e61c9 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -1,7 +1,7 @@ ariadne==0.17.0 aws-xray-sdk==2.4.3 -boto3==1.24.85 -botocore==1.27.85 +boto3==1.26.95 +botocore==1.29.95 fastapi == 0.92.0 Flask==2.0.3 flask-cors==3.0.10 diff --git a/deploy/pivot_role/pivotRole.yaml b/deploy/pivot_role/pivotRole.yaml index 601d30f70..3dc29385e 100644 --- a/deploy/pivot_role/pivotRole.yaml +++ b/deploy/pivot_role/pivotRole.yaml @@ -593,7 +593,7 @@ Resources: - "quicksight:DescribeDashboard" - "quicksight:DescribeUser" - "quicksight:SearchDashboards" - - "quicksight:GetDashboardEmbedUrl" + - "quicksight:GenerateEmbedUrlForRegisteredUser" - "quicksight:GenerateEmbedUrlForAnonymousUser" - "quicksight:UpdateUser" - "quicksight:ListUserGroups" diff --git a/documentation/userguide/docs/environments.md b/documentation/userguide/docs/environments.md index 9aabe13b6..9c18cbe44 100644 --- a/documentation/userguide/docs/environments.md +++ b/documentation/userguide/docs/environments.md @@ -77,15 +77,6 @@ Enterprise option as show below: ![quicksight](pictures/environments/boot_qs_2.png#zoom#shadow) -After you've successfully subscribed to QuickSight, we need to trust *data.all* domain on QuickSight -to enable Dashboard Embedding on *data.all* UI. To do that go to: - -1. Manage QuickSight -2. Domains and Embedding -3. Put *data.all* domain and check include subdomains -4. Save - -![quicksight_domain](pictures/environments/boot_qs_3.png#zoom#shadow) ## :material-new-box: **Link an environment** ### Necessary permissions diff --git a/documentation/userguide/docs/pictures/environments/boot_qs_3.png b/documentation/userguide/docs/pictures/environments/boot_qs_3.png deleted file mode 100644 index b0c39e6fd67447658609c97a961d6999810c8e25..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 112581 zcmagG2UJtb+dYgZqEQ49KDm3R#mLCeu%&~W z3*zXe`R2_#O+&w?FAZ!RU+PoZQa;7#>HYkR=I3cd16jI8eVO&ecWi`0o|bIC((w4& zHw9foRlj!zInap=COE}YdR*9V-|rKsX)-w&=py;}_%LL{tuq`HY_60M_cYHq(?v*A zepL<17kx4Ggl6}0eq02b1>3>dh!g5Kv3)H(^VeUlekO)AV{d8Te&@Ca^v(vJK1C7yDA<9L&)0-YRZ!$* zenpueU%u+W$%&KL>C;uWPd833ot&IZ9G{#_nq9ouUiQMX?i3k!x3Qb9sNn?YSS>T{ zC+09185F!eM|LX6lI#q4bqf42fgdum(;ouKD8XM^@bfT*{J%e?=E1h+ljF zmw)mE{Dqr1nwi--J+pUq*0c@=ry8_GXgh1eR3uF7ZF!7L?TyWN+-)6*hmc9ROMsWQ zX3j=z?zT2|P7?0YH~#*H1b9t+oA(CW-(PXImcF44Q)iR6cQj)Y;o;-qyCDN%V`GzY zG&Psde5mlB!@>WgZ#;8$c97uZb#rs$aTDaRceLQ;7Z(@j9f zVnY1?_t5|I(f{{Qq?4JWyuB@G(^=+!AI*Ob{=Xmo=Rhf5;;sLmuK2Hh{`+0f(J~Mz z-v4=MGLYw@kTNo|`(#fZK0vsiS{`?cp+k-YZbaV_SE9KExqa(A^ft{SI*rRt4fHOL zoIAur((t+Ok#zJlDoYKVZ_gQ@(Rleb{-x{WQtnh+HknvvFzg?sM zy9+eN<_R$u{`K|+$X~~NfA;(?$xZ^-XsQQVp8l#ws$c%@wGWrszN#pMm|P~=g*5ls zra{Y$oU{MdFk8gC``zpHYqbZt!j?U^s<~{JWDnP0CAtjgST2Q!k$zG%_YL>xT4;@- zU-U|>B4J5V;yG$~-*^$*xB;1ki~rW$u4?{w*)c2{r_7^y^m_=!M+bXlS~*WMO4KCe z|D_-N$Kfx;`Q5tKA~WSke>LR`WEl~xW0-u4+OArs$lUeE1Ih~SnxoxO4~K6F=ro^$ zU9D6(3f*e=g3#;Y0&|>>xBjKe_Ep%X+0A1G%(7iqMxIE!&a_6Zj5&3c9PLd`;*fZA zJSwj=*_ZGg1B*8Ye=RkAzDfM)lw2!z>J)EQ2xqYso%D9fmN&MOJlgKPDZes%>uaA= zvGrgllc=4I%hEtezy*epPh9Y^u0AZ>wlR=uRU$LV6F2VEC0+VETsFHcBYLDYlH0^U zw!3L5x2lsOd(!vd9=rLCJG<;8r_66J|962g5I(fmiZC9YsT#)KDYd2Ib-2--^Z5M@ zolfzE&mYGu0-Qz1oj)EOAFS!@a&Aq1Q06J1A#zq8QW zj6+~Q%&1Ez$L&fJ=F{ztmW#SvjdI9GZ-z_tD7^b?s?=f=_5B0KN|@xTL$v$uu!W#; z#P@?tdlCVl<%ArnJz8oT-;pFOIW3B}Q3x2LBrL% z+7UA`(nsn51J z{!9gp{z#P{|AEy}RsGe-`9OFsPS|?6YOS_S+kLRk@8t0dtTlEK-Zh24dvdtd)xvE3 z2X$0`co|`=8pFq4DE*OJ*m^)Hpn|}KuJyjn`)HCx_{ony|Lkp5w1MY$7Wbe2uhlS@ z`O!W*{jZxJt|8c?Ha~c+)rfkc8oDZ@b?s|hGEEFTX6UQHLmk`oJKjp(?SreWIIC2O z&NC)?t%^(T|NhuJ?xH|Yp7CmJ34JXA!_vY}&DNb9*9ic0t;>`*i3)jn`hwv8k3_^;DqgB{jm(ZCG5n zS6}d?-$@O&M__C_Xr)ob*6(O%a9r(Aj7lCV&p^62I>nE~qZ@)zNVqJTawfT_c}axI z#AX%pdv13uKNez6#{8vM{7l0&NMY9+@ng0lzUs2v&1>@9Q(kxU#AcZ4)Wd^O+(?`$ zvxKt){NimHfsOEfc-D(nt+mZCYn^fhHDM~`IG zd%vG<3N0!$Ya4mQpl{kGds00*fq`8+IG8+H?d+$)PRY_`Y*R8jM__SVpl@88@7T)z zj*tgKRc4gy2xXQQm}~nZ#ebz_&|r0@ho-brtJ-y^A2rI~e0|#0I=9{U+uKK%`YXEc zT{pB!4lL=vp0YLD5pt+ZW{VmcPFJLsT+B+>yHn}DVSKQN&$Ah?bg`>dM|vrAU49B~ zx$5W33u6h%(4$B!^!}vB9JANnxEpM0#w=QYw!u(e9(F=5x8<@Zd$!;>paHG8ag9{a z{G)uILa+56PAkW1F3~MXdaGUVa$K*fWQP!8HeB{N#!TojtRnNBX)gwrEBAbVb7pro z-hM>KBGGJTX|OE#3g?L5$pLyWe3K?I53>bAtr-}9WneV7>+j{1O*k^Xmt^~sk?;ug z#{&%Xv5%IFkX2vS-HK4jXU%z0_jD_l7JY?&XDf6d6f#tl#6Jn}PL|pX8-KhFE4&#d z)?g;c#Dbc$b|0zrIUqFt;vt3$>&e<0uI6qhg4}}{yG5trLNlH=jiwCWrGETeVf;Xj zu5PMafL@cJSu5RO-6m@+{?2-OxQu}8TWo_)KpRu2)4}eFi1#0dXzqp2x%yRc0^C$( zc#H0cs*T#r`Jf-pZz?ghALno1^x-VneZ(5SJoj0yU4ZVq?jJ)%eHU7lE(y}Pp&g)8>xOc2-E-8x0; zd9Ra0ikU=go>9yENlEl2EV!D}$PsAZ0cdEEr3<*IY9WsAvv1iDs>}0Y#2#=>UbXe!r3<$x}>v;7M6b`~xeGbUP zRb38zbI}s$ z#*Qd%RHqrr*K46CW4tAYDNXdsz2=?5*o%BShMt!F+nbM2&`M@W-=pi6MFGrj#h zwSRtrFkY-{lvW!7;!y)qzNs>-0YPPr$B=C`T;MHc6f`fFRiL_w$Nbdjk6oi;G_F*r zmZfuZn2$a_=ludphTu)j-P@l_4hnl)1ELxN<$^m=e|PaXz~7!g{e63(H^*ylb$2R= zmC!LzWT93D8;!oJLp$6FA%z}j?xo~wu>U?-tFxC6x>VxxDgs7fW(B#9%y3_?r;LYu zOXJ=aQppxzM_bZ;fk+F0KuAyEI7X1n`&2%^p zO%b>>n723nRS#!{*xUXU!dv+Q!JZ>dZVPuv%j{BK;q>2~5` zZM$$BnXc5XLK)+ECzL81Q!J?gw=A@eQxw#Pol1|(%_}qwzy`<3eH*o}8)v4y#CIgF z)FzyoLv3-5lB>#hwR%ex#dI`e6232Q!gHjY@3~_oDwQq!I5OI#IV|){f^QXCHA!lC zyzV5s^~dxp20d76o>Gc1W#+N=cHa5Ez%!?~`=i5NqDd^$5uqH8aEW@0j^o&qp;YTMi`Bvm)?z|l= z8O`l66h5J?q?Yuxyguy?n-&R*H&~yyojS_CNeXL<)Bm6;K;jV3w={7|>d4?$B}78T zdy&YXKZb&tKC`o8p)asB8ui%Ryb8g7%?e5wIt!DM=Qz{4yV=HDHCH_wXFcY6q>0S% z?L!D-m$=uf_a?k<%CdT|`;V^qEz}~A*eTt};b*T^0wOC1SzmeZAfIKj(hu*8KRBxB zR%R`S=D)LJ8bC%y`w+IDBa^!ij^+?`oUEqM$NlPcOr;_pSNt}t-{F5FUtpE3RnYzi z>pV>><*+!}9?|61gDxyjq?mPriEt=uf(rasZnU65?|L3VBY4fWY}}kMc-{&+*dM~o z{igpY%Tyc2`_L|4snU|f5PNaoj-nG12t}%6m81fUtQhnh{I8iK8Q%nY_OEBTP5P9L zPF8z3iB6i@37Jpid$8CkQyj^Pb)hnk{3Ne<+Io|I2PTSCZv{Ok1Oqa z9IgK{)d6Wk47jkAhNo2Boe*vtfg$9_ zN}qK%%sc_Bwj+C%<4uY@CSkbzhE>e?St=$|-5AU-Q7VpuHNTV5+bS_M6{7lO00Hs* z)n9W*2-c^0l=Mk=AA9x6PI8#fb8h~k3w$omL&x#w6?(f5qqz;ykZX7}Qj{>;wUG?h zUkq=j^zn>!>hTsf9n96&Z%7^&hv*46W8~C?13qmSu#JaP^ZG0%>z^zulFH%)?)?xs z*DMsCPR-{c4b#mugwww-8*>c78}uCp59+(=n|@Ye&SZbqXlp_vV)u6BDoa5x>ss{j z*!o=2D;juzFjFBC_RS>jl6rt?s~L#qpE&+PM1G!)Moh)p?=1Egu}FJrCytb?1al@i z#8VGx*kTu-z*w_bJ7kCRjpV&RY#u+1kGrxB(!37ZHw2l+HuH|S3iZ82w~6O_>H8A& z_jgxD2er<`Mvza6PPjK7Z-o1m8yPfqW%@SUw1e98OLrv8PBNX@o3C*W3oJth%w6EE zj^G&k6wJ!sH;GCED_s#jnxC8BiPmtjsh8dmmv{N5!q5}W#LvjRm)zpvc5yLCSu29$ zwj%Ag6SEJG8 z4you4flJR#r^Dzuelitlci~g*d<^kk7pSqw;JG^uxnVkEsXt0aF%#NxA1^C7b|) zeXk+mw?D)1(H|$!e|YXQi1{w`$&!_h6ppozRY&QVmK0&V42u)UDQxn|;hZdgvgF~> zycAXJwKcDvCZ!jKjqj%c7+bD0FN=I_c6|1ppqVnxZ??r>cZg=6ga{Uhf)AhItpc_L<%Q2 zKps&!jf})DKIS-z)^iMwgd$rd+*X&VLn;u)N%H}SJ2*a6KbD{>WMeB4-=s4E;ORBa zO66!C3~NcztAcD{10gC>P`9Ovyfx^MuMj(y30VZGMpP@dYWaFV4p>$z;!TPk2GjC% zZ9aNgwTuyPo=SxB3pyuWci}b-jn0m43ZJ^>lZLyzb7h>)*D6UVbYZFaESS;7KF9lW zt3PCq|J)>ex|_U{;Wq!HUWm)uUC(g5`On6*zQF2%c7gGu-+H0`*#!apbOu(>bT}5E z!#i8%3YRWv4P@&~H^FP?lDrz?O>&88yPbfxM~nGTzL9GqmFUB!ce`EKk3O2K(NmX9 zY~ek5!B!TXarEMd)Wu4dr5njQ4@ku{SJWwb7p^(3XR!WEX|G)&D%+~6voQo!2u0%? zyFGMgwtC7;S%woM%Yi88k=2`CVwy)cyYF#rN6u&SJ7+G!_1D*6ohHtK{2V|)Iq7#L zN{X64u~W^(n>gE2%}AIuy-P?c?0t?)$!1m|$DcO_T!vb_sJ%fhVQVjA4%K@hE-ckg z*e183O`D^ARqpU#cr-R8HmwKfKKA$F!_3!eWYIw}$JoU_tO;?}t+d<>-;N(ER19Ys z00he2QP)iBLrE2@b#5J6c(`ahFJ zYkhh40TB9DTEp6d)hgFXpS_jb@J=Vvgxm5IxlZ86@y2_bGt<_0#-Em0>3n-jZ`+;r z$WFB*k{dPK<~4&M&9LrX_n&7sydv#E>u92Po( z7Spg;{x>@H)S1nvfPc>WR>4BDvp~GMMF;rjmPXZoyNVWTz`0jno%1AxivA|&)j`Yb z)RO;7Xy;L!6p9cCVA}T^)*MR(QS5f^V%z(BsGfL0Jr?PbIS0HYsm|0Xi-&o z+M|yl93h=D8*$47Q(2M^{(dVt|7`9%8;DlJl6BPRZW*iczEZ-Q`x0l;UVUG;s7v~o z5!S7?P&aO&g)yEV*zTc}@31l$lsmRJY-W8~N|BR)(7{V`$oN-Z9(vzjzAaFlB#N4Y zo?DhX#fn(G+OQ-!Ny|RldE16$H0`-X^MvbUk$00{`+T1DS{S5QNI%Aorp1E%IAF5C z`9j@2f>s$km=V@sLFy{l*3NjRT;Xm1UL`rD1T+~o+CGbuT85`(cKW-DW`|)FBu7fV z*ZDuUDImgD6?r@}sdLM#Y5$UK8_ z2t8!B{$d=OAXD6tXN|23NE>-fdcL-JouUwtDc)f31WeS{u|oL`a`savJOtQOSHZxM z$UT!BJZqi)Da?RR#6sTX8dKHwXP0G2rfZg0*H48-ZTKR5W_9N}6UP^cJYkeS*(Hwy zmW$WL_}aYYWEFWSshEC;g_KnvZnlm49=+Y0I9NIU%a6SAyXW~RQnUO%69rx8pV>hl z!}JU}<+|;qkgN9!2|G&%z5mrK2gF{vZar?_D=!KwE9v**uL2c47zW<_`x1=lYAVYQn#X{)1d-={A@{>Kk6NU=gldyw~$thWDM!SHQ zR61p^_X@*EKFWyV`_6}HUEQ)ck&t7ZLet>>ERCBBOV7#47wF#X921Tk4-P%u8LfS$ z8UnN`9L-Sv3*7ui0tR&mV^2oXopJVzch;3viE7x=aWktt2PWE3CxYS^(wJZYIbnWi zfAj11jk}~`p}!Q}Im+lMm+=O`P9+aWG6I4JmLSHjoII%-W4H$Rk|c{g&t_~-if4Qm62)_ zkztEa^`BQ#*UockU6g4!`H`gn06_v`l2?WdjbqFqSqfjso& zC7C~l+?t=?*be2_H-%oCn_ZP!EuRL$j&W`XtM8xs-AO;cUcDf|sFc*~O^g!RY9vmp z#J+C73Btf*lk>F1vDtc}3#7Ocvz>`cGG&&%O-j)`D}>{jXfzl_<{~Cd7kdc>%`DcB zLavr8R zkF%B9t1k)=Wwe9eAJz2!5;tw3shUppZ#DfGbpvu(XDdLIXDDT!bpO+lb$Sx z(kF+(AtIcf&`QnB&6H6HyKXsNGOZSc^%rq{-Rw$l?I03(i0{~S&6POo+yWr-#LupH zpB(M!vjW7FCk^oDYOQnoe0TbliIA|=8QD{;hJkB$uNwrGDdt@l z??K=8L>r)tOYJ8oCOnt!!PSQW4Zrjw^%argqzEFC0#z`iLMOOW=3Yk!R!>Al=OESQ*T=?@vHF_s9WTBFRYJ&iJ!fZh`S@>3VoLn0i+f`F~^yWP1 z+OZB@5KF#4p2-NqGqrof+5i1d&5`yOK4qHYB zK++=$-tofLQ6+ZcBOONA{X0k(Ko@QyZ;v8sbaI{w|1C_XS{C|zUNZPl!_z#cdOtFz z;A!=qjQ8Eh7$d3ylJV){x!<>n&!a`gBV+WdTuXqUI6mQfV7r$N5IeFhxBM5xZI}V= z-5qsT=hpzT#AD+dLzTEd&_zaL-s<(22i<%rAa`gLc%3cD{s&p^a9CME%zZlEdkmXJ zS4^2sQ*l1M^MLi9ZfSACL`WTHRFwgnUU2Nks`Dp_bPySvuJ2E-zargVg8mMNX*=_* zNTEdwkV?GI7(iufPt8WQ_a(&Owem_iLKt-WQv?dVD;#xL%z*-53Yf_j?2hHxp=vTJ75(}kl+VP4-?nU+tiS6lPtw-1*0_E)CN+C&gcE3s7Zlsv2dCt_$&<@If!D3MSfYb~F*_zf*K=F08P}Ci)N>(iwwk?C$ z+*x`;Ot{M?eXZ+`c6CdenT z@0DJ(Ko#@W{%PmgHmJT8GQ0)rGndPOtx>9;>t)b`5%yXS?l&^h#_GhBC3QeC#{`vW8T#FfxQ%uA= z(F#j90v}EeSkE!3=7Tx9763Apuhg9!uNDLBKODu2PRaiGR6g+H^Y9&@gT~3_I(UIt4tp>UWc?hF&*l~eM3gVfT6 zYS2r=8PNt-b!UvG0QvoMprj+Y&URE}a0O83;Bgq)O|+f9B*x~5<$`0%mx{p(ZwsIQ zLmZPkU_~2waq9Gq=BB3_JCmQEd>lJ^R+lRXYynF>86g+W0&When{W6a3huH7hJRHR zIv{v&%qe_fc*1>}O7dX2$mzQA?qYu%MK$1GoP3FXqHJ`XFHe5WO6lmB(*on~Vj!mu z%Tr7LF8%&w>a%yv3UJX_SE4B5Adg($#dWd*LfR|$60^h*Fq!-gTtPbEeF(nJYn4gl z);jYnXD?lUMqFl(xV?RU`cp0sqWxTwXFdjF)^7B(eVj5C+z2(zQme@#oSvl50j7k3 zOts{ejC5H=SJR@+VTR zL>>%(aob58N(O(S1NclY!a}CLde)1(m0apo(_xa!z}ZqX9WFaT_ZYhEwOTQ!r;eey zSGCUtg#WGb1x&o&9vH43QEh)xP1U4#5M@W;iiD9$6CQJkfX?${;4>Fz?+gMqqqSA{ z0GCQ$3NI_*HYvZrNv{sIK@$Bh+0Un&a-V7-L*lyNr#|BtyMV9EO-0q0>#y8ve-`)Y zN_UrsdlND59u2pENVvB8r15uWlJqF@9l=-lBr|DaXAtcc8CQ5@_v>Mq?rqONN>=Y5 ziFpkbP?2dx!{uqi^7E~{h1N$#6Zu8CWHy1*y=JFm0%h|v0D-pO%GydMaj zDt{S6w6NP-U9!pX2W>@8nXvfTR)ekOmmc2ik*BOR`xA(iW0JIxAy zyQ4+Ai&94+!3nQpp!fklb!1o)b^thgmRJs(mzsZ~T}V9+Uh;*cD{%QY$3PQ#zt4qa z#Lm9rF}rI85yfleEDiu=el2PW4S7Jni+%codS^uFF9GCKCXQNI&nE3+ip_AL-5ofE zBY5y``qp<4fBR{GHLt{HZ+vhEn{Bu|x+hwP3A=!@C2Ser6Ml$mPy@yVi*9xN zR0Ve=>dw3V{GvfUr_iNeAZu`akoWSTaKI}o0Qd6UCOkMdr%!8?32rOPw^)hkm|4d; zA-eFGRM(mG3>=G(Fs_hwl1bs6{0!In&HKKI_ya*+n&qUz2Yn&#W@DQB{eI#{Cw` ztk_cSae5Y1k(nH*4CL5M9f~hq(HCPIKERW-|GE#>*e`m|wT4PCNlaUZu}; zjId4i*?r!I@LM=U`|f`u=85F@JFe^lJ3+-ox&HUHT-$v#*^txp?Td;qClkUKR5JZz z!j#?4aM|jEjNkAF8R<}g9jA8w=9rfn0hy4SjXqgxRu5AC!gE!v!%h{ev{G)pZ?eW| zA(Ju>P+RPl)zdZBIm}z;O_DriqfUTua#K}-h2f%iznrFaMlZ(jn_PGPV@~gArDqIF z!wKjnw=YqsJ7yOjzKOb*Qy=!(;7VlJb@3jKI;H9zasmS)(xL6uLouo6Sq2D>6{3j^ zKhb*&AH?h$Llgmab@@Cj0hyv#p1~ix^NR~R7)=1fu!^_P%wff$H~J7vHeCR4$3wPE zA_Ug1d5f>SI{49s!4&xI7%#T49$U5(Bxi-0`xI^S*P%Ua@>*LU3c(Z8DSOH<{0IsuG)$Mn2Cn>?X=ZU1t zy}l%}uI%ElO$&2*`H zwPw$NA#mSoaOg{%I>M~Xv)&v2B)f@jAo{ZhT>$jiQ9<>|@d3tEo4r@pzSeSUuB(gI z7za@cJ^|Qo@mPE3*ZjBswhW969m^v1>U%|&jcDh+6b2rNuYDVFpS1k`C{??#Rwj47 z47PD$h3p?H2&%W-yp-@e(5itzZTvtic8Wl%Gkv& z#OBod;+hm0aA^>Nxe!eP9 zL-N;~n<^i_<0`u+Lkyl#1XQY~9qUnY9rqlhuRk69Bwx<4IpE$0xzWG9Il*X+3ICftxA$3Sw?-0j z7Tc|eP5{*-nSLQD)F9K|y&`7!ThFoh{xK4rODm+K^2ep+xXWV*TfH?1{y1Mc+`iJo zd-PH~=K>%13%wZ;_1vDn8_k2neLBd#Q4XMv;;zfu#JJgGPhYc3kkc;Nj-2#wxX_vr znXEGq=sQn=tTe>cbZbjrrBH9Ix+tf74m6$-zzO{Jm_>7mv*1W>ZKOEYiP+31_}Grn z9_H-pJ0 zXH;d>(87)0lRwH>g`D2`b|)0Ip2@4~KWqO$IJwp)^s*js8uc}DJ?y!VX0qL5eNJ6= z!VZY_V@^x%J{n4K!a79dSf9~WRh^D$F+ejTy=fMSUX(xe;bjj`{b3>@t>~Jn*2nxs zQ1#;poU)a-#$`vD>o0hytNY+uRVec9Yj&crUkSh;5jvK3+?Xos>=O&IqyNEGt@$mB z`~k<7rv~By_NS^UVR}#*=QZ#f58>Yl`Yiia8|HT%vac$+BGkLHt%#SsI%!&kR`0+W zsk`{>|CN0?pDEsQPFSw5%fcgi9fuY zK}||k{?ok-TPuOExudUF)Wp1eC<$SQ4uz1AY3V($wc4eZW+(bK6V<~M6TuV8ZZ{=; z7ZTOBeHQZF*HU)B9aObEsgp@h#~R zj&kRu18;dr!Hi`ij+!at%mb6`tGuU!U9wB57zMtr^khX%x$a5t4b{|#LyETH8A_3N z_une~>^qlw`s~FP0W>D?9rAG|rd!k}GEGidIN&W|19nr?>bL`#3Wm+}B@CR5p+)Yq zu@;9${LumAr|GA-?r(Rc&J7m3LP5pc2W34P{C!{(*+gA_nwH;GPg{8s6&~A9qH3rvp*GZS@%R#)qZJ5i~C)#F@q$$oWlrJ{OuOda3J^8v2SChOP}n?liU#esrJ}8%uFq8Cq|!>guO3amT_h zj#G9%9DIG+*aF3(dWGrE;ko8R2e)}4+s(Wt!us-a&>=)GYrHSrFX+!tIdbijuV@4f z=Cjtd9^j4xUJ?e}O8LZO`j5wg>v>^kvZJNOwv+zI0-I+w-)7!9=VYU<1oxl=*!CEc zjMjc#4uqE@+_gbRG^5OL{fiB}LbgsxOb@m>MWEr0u zOWoYQzaQD6-lNO4?-I0qv{+JECxsxHP`yZDORGl!>lOArj)T7J-F9ouBvRmNZNE}N zA)<{UtuBawh0*x>bD37Pa)?vK-hFk2F%Iw&3wH^{IyIdzD2GoZt`4;lZ6@(_gqvUZ zVrCrf*3Ps&VvB3hUuf_12z$`=Us1==d6n;cE4-g&n2w3o9Fn8lW2`QiU`B#g>qRME zGc6*!GNbkS-v&R;UJ5Gnpzb)Bf2F(={qeiS&dY<%N$tIqFPc|6q5UW%j@4My`)Gb^ zf(lBHY|6&WX{9WLt7XVe)+NCj#{=V)@H@QM`?eLC@El7yGPL6s078#_Z6q+#GiNnG-dNG&VMeV71R+&|BhVts1P^k zrNlX`0NOrm6qaU$YV=SmKTVP1F^(`|e4yRKtrO%Ra=fFQiNm(}`E)--$f-qYfi4)GyiRd3-x zW*5F~5B%#lN0D!{uU^_L&W%tv!q`KU>9Ygij&;1eQA)-14~dmltBS!cV$aofS*nAM z>E+UJZQ4hR9KCxfvf3StOyhJs+~tS}EJPXk9a?@tlvU=y78ivO&TI{h{9|pb)3Je# z`grUu+pQdVH|Rw8#bln5bL52e#EX`~ao>xv8_P)Q&Wuoz=Oj0-?IgZk)J%)d^ zK*$1%nN~*7M6)Kz=0wJvh(3$(!gMA|R!wOhowiajjQn*8vTGfq^MF`)Whd4%^5AjB z?K{o=-jQ+gj-Xt}&c?9g;Eyg}*e0=t12`jG8?$A0`rw%Fibw0XoL973!c@^X!>ve^ z!SiYQa3sf# zUV*zW1xSGLp&r7`o?E%yFk~*#NVa8Ybqwp!$3D^aNm+;%=;h1uoOOWr<9D&`I!m@A z#m`dZQG}yratbJW;kx==H%_X*nraiA~IR)h4fNM!vtosp|Y7?bzi7R1o=O zLDzLc=5XL^J=AZ)@8sxa7wb~RPT1nXpjc<^UAIxi17|Rh6}y@p zEm4I6+>}sKA^E+4*vz%RIRCs8>~t7JKE~uDj6SV4mL+&I5;mO^UP@AAcjq|pJ+w(1 zz`{BAE_z-?a^q+P$9|2=yFb?Os}4g(T7o*hF^6i$#qRW=3;y(Ec*|84*}h!aE?$r{ zSc$R}&Q>AFGS>wYF!&L58Q|P;^dZpnpzgU|lmZHb1N_uT;Y=G0Sw!e$p~i{Q(;m=h z47TUinS_Zq9U0v*)e1RY7Ew6r@k-lM*h;mkMNq-7vt&w0+6TPF+4Y^N^0e4i<*4i; zy3VH@fmQfHuh`&bC9o3H)<#dQ@l>sGPwcI+N={5Zay=q!>HM>B`IBO|SV`q%?CzBd zcknGdd5_2BCssRP9OkKP+pYcW331*C4t%%CGSnL$k|c#l9d9sf83B^ZkkD-h=Y!|1 zba?uU;fvf0X?qrXI^jhUS&)06RLnJ~X*>@qD;-W=>?}0?5vnQNm60?mie!L3yb$!! zHyMp1Xhfzm35aO*I^@S43P)vx4bpruQl@B)i`PJgPLXx(C1%Mc;{7P5PQ=n=4`hI> zy1z7#b8Rbb*u(k_nj&|~&;R|rjK$ehi{YOKa2I>$kUfLI@U6XTM?L}J-|eIaCLc)< zI#w(H*=^gxN=`U=?bGYI9S&XGPPg1W)Lep+_ikHwJ>|fZ8OO?=X;{l!y>lo&P&tV) zbJxFF4Q)Y)WG#0F<+o-P=Vo-_4?PUn2H7!dZ%7J|FTS9Gve85-Ic4U`emm^kvJz2O zI5_7QRz}zZz#of| z*D}GXd|aNoKjARHE+#T}f!Odh$bUDSsMPMgQQ~-oBq6$bMu_t^mH$hg`yA#EPR>@4 z;;e^q&Ug#Qv9zKx1ELYet6JziyFI?Cy}`Q1WA^`;2oiz8p_!z?;m#Lwgz?p%yy3dx z=hTB4Qw=Fd2`)oB#G7;@DqK*0wUa7s|JfUMFw@c=rotP+%mOoFIoC8#A5!E%3TT*@?X?}l8WXj*`Pv_qhrcTkW z0|g7~KQ`bmtxOa!CW}~NGRN;x0GT1-Q!@TpwqTX(iWQ_QZ4{)~Hh}sHnXn;B7DP5Z z04Q@gg+R|7B$mIa$sQF;x`MSwtIE|DkHkkENCEx)ntZtwoc~g{BUbgF>NIOtgTv6vR z$2~A_r-36s0UEi8Sl?Sw1af)v{p(Befa6~VBu}w<5DU{V(C6)~yVC-Qg_d@sfE>5` z$f;Ip5-u&`sB#iu!GVBPx;N_UNbTeAo_&4+<=!G>rs ztwPRJc<(z(F6TEce^is}OIB3e|XHXfEC0%n0awhC33bX|T1 zap-UGA<9vh>5(#fdnQ*@eSVUntLHILAjXwG-I?lyt3%^`c88l0HJ(2oQ0lBmxabY( z0)fWuor+nCOuUfguj{*jx++tvUkBCJCNpi(qT^~CzrKyT`u^ie3!H=Xv?;VjhCb*q zTN{6n05#RNpOj9xj@p)j>Q08=zyXXp87(j&mWmb~y=;9z5ewPxo9#-rADKvba9Z?T z_5i4Iu(n8ai8=x_oT~+&9`2<|z-!3GAgXKT#3B!7hcBlvvIwfVr=K5R9<6l2FR8KP z8k#{V)iBVUR>@n4suL2WKeIcx9Zl$7FRfJgLrC@^V5I?kA(~EyOKnH>+$t?fi1n7p z54FIGwQAtfbyG2{7%=^vKnGJ|SjVX$9dZq7Mx;LmcJ}Xkp52*ChWr0k=B@cHv$r(Z zwUe3ZZRW7uttfdj;3LGLbSH2%UT zgG$OV7O!RgGG+F$YxdPfV9rb~`~bvr1(^mik+>HC5kTf|1~3@BxsF#~dwoSUA9%4l z^-)HNxa<8Rl=LwWNn{OU-Zw86Kktqdg65VQ(Q|`*!J}WJ;nD_h^<{w1k|;z!Fawx=1(@Dl zFTH23f7@dv4wwz;RRTm>(q-{CxiLjdKOQCOqXv0I@8}DvL#BFlNEER6lix`Bg;x;@ zeY+33+APhQ@X(O+l$kEJw45Cymk-=clACZK+_+JJ>Im1V0ES9-3*k?e?%LXAg5S|h za$4+DlgGQF47HZ?NUDNLpc&kiwyax~mv4#!Hhkpm#TRFHmPWy&vUj=CKFql0aY!K~ zn;BTy?@$i8*DvI3osF5YMwVdNt%M&gfAK#JYpk(~$0_+pg8JdVcP;=n!ULFAx&oCO zvvhKj;tDWXzX2}AahE~;0iXu7I8^#(28JX57M3zHlAbjCfg)zRss<#|kI2PSad4wZ z8M^7D{8bRfc4Ot;6W&{}YoG!HY z(w5cBl>r!Z1Zim{%?ayIwu+k+;LHJ}Waq<^=Nb;UFPl-icEPV}>W&HcczEAlMujj5 zwSi52{fq!V=WJ#@!V?;y`94a)sqYQY4DhjZD$`|FGfHrcNM6GOV8gQoinrZsRWtck z3!%7~*XOT{VdDl=A3e8rK;y>BB9ZJHUgQH1A4U2_vo$lByKM`f10T z8dgjXX2hosH0w4x`(@~(7pf!ceU_?$bZ+@~b*vI)mk#?KvTLBO0hy~eOdr8jod`tT z_%O8qi`oSPpmc$fB;g`Z-1Mj8t`k>8rjxN%i)MfwlLT^03X_h7YBb{Ph6};M#XVcu z$NJ(pqVNJ;@AIRC?NlQ@pDo_J<2y+DSgUxk=V_AtsEXOotVq{IMJG$EzjTkT`yFnq z_7OLA==dJ{(O2y3KQ8RRnvi$@Hk4nu@-wEm_uL0yQRk-wV`8_JR#^zTUo7d>mu!UQo$T1((q74BVZ;iYIn8&)9^SH<( zxh>DMawdEC?4;)-bRu$rIPX^e!Wr0WR^9cBfVu8(_kY(hs@e&d0Bp*}UE{sZ?hvhc zy?0_Ql6^{RK(Ms)e7KY9)$i;Ju?K?v+^YOw-={ZFF)u(I>Sy7}kb5pfdd%|Va7PEj z6Y*Y2CEzV6SMGE~-Fp?G(*FW7NtcAZp%EFYG&NlUMdHd4$DlyvxqRN@Oo*K)qfq8F zn9SD1T?~TD4St=bmf|D)h)G6LVP$DM>OFvr^i}Hi#MW0SF{|{X2VhTzW5N<<2M7=` z&_|$p_}A=L8?Rr%rML2b)bW)s;_zvmodk_bgqs6(5`T%<#EfDsI-m49S>4e+;wINe z!=@IADom0efI*lkAA0je2}rV zkQZ1LZX!S7Y!ELzfc&e|9L9Xp{Rr*TAF6~+(O~>Wr1l9t0#z8OVn7CNwtBlq8CIcC zW;Z_e$p@5DIn`t3`n2y2R6zIv?Xe`MlGt10HVIkf%A4^Gm{pfCtxbf z(Aw5K!`;#YPO5gv0dc5g5-?HMFjWshZl`mL?WEn+&i^8G9cOS+IFf79p^?f?y2?r= zl79((9@HXp+Pwj>5klFYG+Yu^9|9sd;s6*n$5^$de|iYL!_#_3Dj$5X?C8fvWvL;f z0Ztsu{eWz?;!%x!+H5^#8ScgGTo_*!N;gkRYQRb(ax)qkpHBQoar9 z)Kj!1LuU@v)ePJQK)PHV-j9V?M0#QKL!%^#xCZx&PXj)mg=+?;hmOUwmOf(7dOhr` zHx#0Jk*)X$4YO~aMtxw6$u$qqK>S68;?7%8i~puUQD9dVc`May zQE>5{m9oiIIo+fB{VAuCJOC1&npzfar|hpP>O)7XKLI*35diDbGgbO{ju>Ns0i$NhUbKk2CH2Q8)!&6?BgZvs+_y#lCD0YDQ#v4e;fP+bZn| zPdJNxXM~Wc?kd{sW*yoO@r>o!|tH=UlnKR$bmlga- zcTB|%;jh_HVLz(3Lx)*$KCj5L!B!}hU*HthL?1!Ay(%aNq!Puqwl`jqY~RV?b3Q)Y z=Xrbx@QmKZQ`K!GI~F5@)QIif)m49l#Y~wXZ7_x+!Ou-1EJ;r(o?>4yo*(jNJdem~S^=Krlah!)Fb*m2>d48ub2E zY1kgk#)3e__P!d+-0$=I?8K05R{#d7jvW#{VJY(+k@R2=BOsI$7>PU=w|FkblT3Q< zCB=oYu@t(`W3qrKQyuhZmqO-r?;fz7qW@Gv^oB6$m^0Jl|2p)7zuYeii3IrL?pKFvqPAx>~2cJyE=4q#`O`$>}qDql@%-C#&p{e{jx*K)#x8V^qSuOkMXm?Sa|Y0aAA(? z{c*~6se`p?FCmO)tMX#5k*rhvK(v^82zIvk0s3OLZtb-siLd)hYXh%o@R$EkGxfZR zr`)&K50)m4{6?pIe5I?2{#MK&=`vk9LvV8!)}xzq6wf%qgLTAuE^w0`Ui)%?n>YXX zNC8W{h+|*YJ2WSO95`zVlBB&xh6+r^bLYFdn>h)GVAHT6p#8M>NVjyTbSSN~ba!_zx?$1M zASDJ$cc*lBcXxMl=GuF|@AsWI^qlMB5B4Q{#q&IK%rVEf@82MK0p~0m#OZMjN!s5j z=XCL!C^nh`d!e1Zxg!Ei$u(QWZgUXUO!`1tF*kqB9Z38OWI0iEfmRf|(y$`JK5x$U zsK;?Bj6I{c3vLD{HsQhL*3#-PZ(|PZ9TQK2gZOJfdN5k;wAYNVa#Tw$5?WxC>P+>; z$$9WC^#l@i@`nMlCtrJkNJgcEg!lPb@GpWo;x-aoRc}MT}|1g=FmO04XV(<1~j_p<5*lbEqXufdwl%Ddwr0IBX8H%tY_H4lK6hMcISA zW)~0WMD+n%>nq4?8CLDCJZcm9*n!vJbjpgqzAHmI%@VwvU1}ha%MXPd_WIibPa zA6H8!_?(=?IX^I8jF1$|>>F70A@nsc{L))k9rkw&Yoxh(E_5WGOV|D=Pxp zyxw0qNCWYIG2qyBR@rZ0R*KGj%Mu41FPx)FhqRByayOYwCz-eM9V*}6J%uHqlHQqC zpbb<8^7hf0xd2prp!l5QJ)7189$=d{S-o?BEwl}Co07$r5a1A+F$Wv|u4{iGt5P*k z!|cE)PoxJh>wUJbGn+e1BB|jpqJVUD(za^2vDb2j@lU@Q5Y^_oT^uS_SnHgTOXr$p z62J0Ht*44j(DeCb1^a_E{GdiqrkA8nvVe$!Y*z4odLU6S&&~rJb!|}Javbq{q>x{| z%Z0N@)9nftkVF?Ku}y%=86KzTy(vJAu>k%5B84~orxCy{1J1ks{s}>sA@(1Zg9St% zB;aU5(j9MO}P_)6(rWFI+tm2tccB@%)&K-7JZ;gtm zq<0bm-5TdDHYUFeLvgRU0;3f6zkkRDOO~-=_(XmK$1;mi9>Qs}D13BkMh@qPTq;414wf8M|WspysGIWZWGb^YZBYGwVzNwycQT zA%8)4WSJGnh|BXxRDNcmu{dage##6V>Oyvgj?0KVZ#Hdt>J9ISc}8_uOE z*)((+_s;WHz31ddu+$vvbS@zApzv8Io90kKF3jfzOeP7v1Mel9;#$puc46AGYGlW@ z2X@w%kE9je=c$%I^?Xu+{sNhbZ|Sqp`Nljf96~Z z%o>^@cD%p5QNC~+InBe?-%Vc_^@h5(`94IoRS1K1>Y93iD}IR0Wbi9}Ti_bKy!MY^ z0sik9kB9rS>Q=t;dzv2~0`e5wjSxGkQMG;tCOi8#TFfs=0a^;y8*8})K(KqVGeq5` zvtZO8E_9Z;FVpvRK@$P!ra`5x7-XH}sOi~RG77mw4#To4muQ~-GSytWFagUpT;IO4 zhlWDNBl@R?pc*k}JC(mj%@hafjG@H5Yb_~usK5F~e|V?B1NnMh2D8+WP!?ISQD3U1 zlt>R6o+XGI4zV$?QV{ZKhD0LftOY0&JFS_mD=WN%1+bklAR7_|lrIbLP=q8$|ERA$ zo$Mm~P9jgLb-X47J1(?bK>t z0oCX9^+#~;z@oAhTro9P zWO?cbS{P>ofvwlmpmtxbF_OlmWoH72Y!Pu&9(tPeg7^wSiKcXzF(Z!BC;Gj=sL{f5 zN6ptd$~nXeGc?Qq;3<2ULdCHJB|*o>f&cJm5MT|s*~dPwcFGqxfkH*aruYMWP*%22 z*9TQMKnQ_VwYN$kGDtrvalth+y8_`u!cK^GKbgxRE9?ia-vL4@C5T$2$B5(2RC40OxD|ukGey}aPK)vB|IW|@j&I)b; zb=4_g+1yH%YuYfaPZfMv6`li>TObSCpk;b$NIRS>_dQ$#RN{BJE_dx`OH78gw+@yp zS`Jqj)d^U1{jwjPoPF7o8D2Mn6f4G z3Fa2iqjloj7tu+vmk_w%GZN_?ennUkhmuKN7-ERh+))UQ_hJ^1%nh;2-& z5x27Y8N@y08}X`220B1iMW14)8<6(;=-sBKn}H4_EwA2sK>(I_Ab)JUD#<&NcLw>4 zyPW6bYBs@*BSZ04U!TTCU(oG#xhT`qI@;tMP}MqSJaY!=z^+Fy4ik}`=kYXnkmTmV zvV_5=W>9#kRst!EMO)21j?FA&{9!7skYei51vw+w;;ENBE9a;P7Z~5aPN>`&J0mvB z+~b=5+eQ^I_DVfJAZEn%o2lkZy~h*%%WrAO!)a^z;?NDsUt4-jFcA@0`Nol$iji4{ z?D6e=U%!J}I8!3@`-dki^&PY$If2IAA=&R&DV@F)3Aj9lyNnxn-pz28iCA{u-$_Vr z{;GThdxkYY%ASb;P6@TgP3-~kbV_;lcBB~|2E9TgXRyyv>;FHWgVJzdz3m4$6&_Rl z+W(G7K|1eCkh18Tvd=neUF6}>Akh<)*i3Hy9Ig^uhB9NdS>oFch$8Q(lYeb3+~@a} z7wiVXKpEWr+c69?#BDfOgSqRUj=9u#lvH-QyFN^*;{RQM06Kj^@CjHiA|GEp3VyA+ z)=d8#t;hYBe}AC>^UdA5fzCFb6^9BqByO=+8O92yT_89WH>C2g*(J!7qnx{|ZFr3h}U?;^N`)kTu9!YV9qaa9I^$`8)H9j?f zmbQv^9j)a{Z(tNevL5&U)nl_<3Ogn`{N2IV^dwhj!h>DmqyKFX#B={O!uv22ywk;T z$U<2Z==i;$VSM~cl>sZ=eD;9-)4z_Ui7K8heJQWcZxp2e$y<;MO03@VE&k=X$c+?S z&${zx6+!BcmNPJmtr_!sHhz6lG{MH@vj4F)5SU1R7a~gXL22(NXx|h=uKEVLfQqnk zX^|lijd=I;`(d_>%wQ#NEEeZ4oBFRw%KkbQpfz>0I_jC-q61k#)t7(XEIB->GUMA% z^Nhcab`&xh5_lx&Zwy$=By1`h_f?9vI9i2q%_ z53uboyesY2R<~b=3y=uH6=qR4OS2gW(Q`_-?mj5om} zw`zU&#}UiS@T}zX03SV8O-p<8U3(?}&ICmzw`xTT+5NEXR>42>sMEwK3e;mLesU1} z>I=Zc39NJghp(dL@{11RpD*D}0RTDC@JrUeT5YfzQhUxH02$-HLHN6Oj)Jj%FhiSu z^<2YVgQOU+<`V&6>zD8B&4W2!Vx!loq*}J_G*x3F*LYNJK5M>r>@k|*_JV4`X0p;` zDIg_$qRJxVrqKFUYD&*SY*n?emuY<}82cDu{`S|O0~7iBtkQGBieI|)6vweJqm3wS zW&XrWNiDzHssz|DCwuy8k`1)poPV0PUC(A3pWfh~3UDxkITaPlbSb%)g3WSHZxPnk zF%+4!Ol$Y*Te3!T^ps}rjHm6&9*{2gVbvq$1S@|b51+WG*qPe7QPL_g!D^i<18zs_ z1v16D!CA-A5%x*G(#^?+wbTNw?~;X#PJH@TyJOmGM}lfawTjj`KD~+jQYAN!O$=Ma zV-Qm`_LV@p{Ctj78upa^*CRc}6XjZ6e~X!-7CyY=3>EYe|0yW@<-{rp;&h45TIbJw zn+qHFt29s5JJG#jEh+`1~mg+vN+u~MW5qCrvW zJ}^DOcK*$C%4cFInk%1#KmrJ$&f_Y6%@sj~OJd6@w;@I?{W9rfUz-Vc@cmN}a}~pl zyLS&FTtjSEoNA)&N{(}qqhgi0v(tEY<{ZLQXLTRHJNN8pj;fAigo$2k_TJX1S^KaS zl~e)c(b{;c!3W8c=E9HEZE-HHdrMw6i( zb1ED2Ts`5uKZAElmEw+W0{al|+U)a1Q`|nLSf2`e6cf&*>&a-iM7#IjQ7UPGG*qkZ zZq#(xOglD9-mYz!iL~y<^zU+*kje1lA6Vy2V+~NlljUT>0FpSA1EePtkTlj25oOY;CkT-3QC_ zijMl(s8uru4J>vxW7+HuwZ@WoPY|5YP99r6xkznX!)bO9==M3!wdwosj|Wh$M8UZ} zXbOvat}rQr=H8}Swx#XwyP(Gp*)d?hHa+1wF&~)b%xmK>wt~f$w z)xqba-J&XlSGH?@o+eQBlYxvO=Y^g(l==!&`8s+@uN;bU)vLyL@CU!gzvAB&;$B(o znpv~rzw$ioh1U!zyjdz|Qxg+=rz$M(s8aNV!7=~Um1>lG$0^yw*m6&uzmO0HZF&z8 z6Xm0)v2256evNnhbK+ypYu(PHLmSC*H+g-|z1=VD`rPM^Qe0P^Q!jcPGz4?4lGw*t z<)o)(~t8UCr3X^9qDb zIyCwa*P-pzTW~t2|XLk<+$;av=bYc_wSm#bp3dPTua(at5J_U=9_-L1(%#Z?TuokLy zn#5l6Y!sO_x!{}QYC`rckrBjOkUg(|!gs5lJoCe1s%5(-w3s`JDsBw4LXo1;?Y;7F zRCb+l$W41dsKzE|7jj9Z+ndVw%Dhatc6l>&udSAQ5MJWarrpFSFwAW9QHbYFWk1_q z4Oauagx^?nwIgz5C+;cV@p^lhKx8#P4kifxps>b@Vfx4PwvnDl4k+2)~Z{Mn{G-mPo9He8Uu% zvT)BiDO{_gEu}dQH&$C^TzsT2X8CHbB+`xz-XxxfI{S(^d@XwPlx}j7KfW|2OQOm^ zOE_JPMPOJpVfqIu5A_UH*xk#}XU9Iy;fvm#9@-FH&FQ5#$X05F?^l=^F}?QcD&!JQ zN8r_n@8ZM!<6GpO+`}@%^`CCRW>VE3Qn*OtVNqK7lI@Ol4+H(zKi=@(zS6r-T;^;g z&u^h2J{YC8#gTSJe z=PlmT3(@5yUWZ_Y{AbQ48AWM0z2z;-UC9?cUwz2@swEAenIWE14C^->W&svOvOS)* z4jGetGpvV_x}}@!5-R!~d-YR?DXOIm=*+bwsX5TJnptA5=Z7)dIKW<$?B8|5Dp6br zjX$~2hp1}R(MnP3<^_u?Tv5Q0k^JTdXn(qQ9QNxQYxdUs+;0^);wh49xX&rEn5T$6 zO~*~<$`&H#JdvGJJw-lF1~U?2tOQYRVwpS@3F*-jPEgmE8ClD7( zsj^{r9A;0s1|?)qVT|1d|L)1oIfZYZN$AbWsf^1dOWuxoZoY}oq$#QNa^5=Y(p)%RJ4kQrhQqztM`yQc|Szu!e&nA;UJ_N>O(SvrjbY^*I{6>y<7h&YDF@>c%52y{ zi!^hi&`bX?XFF(6&nvb$GIM#o;ur&^GXKt?$|6FR6Q$q9*w^riB;JJNdtE1AcoM*P zfG3wHRpi%2^mmJMED;?NJ$nxQ7;*QHg4FU+_fXv_bBbs0Y~L#8$j~I;?%@l}w5lbQ zukD3+(jiW$7QPogn&+t*s+67Wu4hc0?2*iQsp?G}5 zm)sS7dOB~HZ7QuOw0B2$(WkM0S)XsOtv>IPH&0)@Z-i#FJgh|Scb~L)Ev4_L3hCr=eMBE@+23FYRgqAKS=Mn5=P6{%bTtK{p~2u0<8DViO+Y53Y@9Ot+i(kg z?M#gxuazKOhrR(>U|5%1#i3V5_sBYpaMDoOTLL#1P2sa!;+FhP8e*bZT6;F9(`k2s z$qqyCa;U_px9IA0;+%h8txF)SS>8q8CUhdg&d~px_$^}6h{cC#ij}K5&#PAwxJMq4 zr3=2eK&*TrKk-9Fmsx3BjoZT^pC4Kdf_WE|Yd6f?B+4#?H2Bi=$?Qgs%2mf_GR0Dx z>Zw7C53J;ho+~GL1i0rD6bj-pAccT{u&IaQ+B4A2@7WAzIGS-M0a-I}pAIazZN$^@ zMP-B5nA43Tv&1`RFm6*#(I@hzD>+YEMba^(IPZHSG@`Ok%NTEY+*}G<{`3scA}d!< z{QbTJeZ7J8qs+!dy2ZRUQ>O6!xcyNTO$aJvJ++@PI3`%%>2nc4V(cdMI!k)2}2!kfTqYxaX(~$>5pis{dO3cqkuU^4C47v;i zpqZHt8e3(zK=_6{@UDT9@oVwOI#f&KCd|1r^|m#K(b} z7I{+>h;bk*u}Y1A)+Yc&kOECVnC5^&)ELard6NihW`qK&oBSfcvb@aMkcN8_gSZg! z{4M>Cj3^+4nWJ8Trt8-NpkeX{ZIg%1Q-Hiq-VLTky|kL;*)AE9dTR#pvTAG&Qpx@aDx^3uygXVzc3ji5w1wCw2%`RV5ezy_c!+7oq4~yM(P#!8zt?O*( zg^{c|W&-zKO3G2$8cwu5Na{!+`x+OpBPh>MY77J$$`0Nz^jvn*CvO9k`xDdA9K6-; zM7<%%j7JS<}&^5>xUAE46|paM~GqG`#k0tn0n&?J1NXzq*NVcR_; z$ppIT1wb_unDpgCDKJGmUk(pmcLHKz)eE3^_7j%nG1@dzb5h$G`W}(%2@2j#+cT|@0|iYau{a6;^Si^!T#o{V_;xF0geE- z_F<8;-I=E4j%dkkq31KkU&17sDVG`DTV}9v>?TRz^Cf;4KR=lxRVcn;0!zF9B~NIM zbe9^(#8b)rzWTU_whEmB&#mYem(J3bmPmD-&!p*9(8#ys)Ud)%V)>Hkxr;_s%@V1} z$mS|+mKuRPp(p78+)ZR48qI*>X$GQ_?X?G>K1(nbO_iv@G7?FRK5+X8G?$3Lx_|_m zcmP?B6qUFZYUTm+vLs(m+Yd?Z`1FHHlGhmdDd-s|cm#Yv0p-!F2+7gD1ePZF zZtxIvAKKa8dvojtWd0%0gqS{k7>`clYaVE)7MzT#si`3*R+jwK&XoNkG7j8*g2jeC zjgss$`im=|t!pBIu5-{I6ZE^i7~0VJ`Ze6@U3ir8b%cMP!hT}W$5dberD`SR0%Gcy z<`oM8>A*+fUsVkDO|=RYKT&}~vQ?d8a*QF=H^YM600=Q#me5~_I~xRK^ESA=F7_nH z^cM4#L9)cK{*f|NxrX1Q4{)e%@_VApj{$V4sG!&{s%48+i`6=X2Ag-7-k)dN%+`1L(EC0@z`gqqm08egHx1e)gGg;Niky#DCvW14ApqVk`J@)O+68PTD*#-x5e!=}aTeICTq8neaRmVOCz#qt zJ87LEzrEt%4_-Ty$gGl4p-b_e3^P0Js$(r8-hC`-BXaGAm*~5I;*gVR=ZUN<)ld^d z=I0opMxDWHLl1T7H4`8)je*H2kYG$MrQcr3f?N(R%;_i3O=vZ>GT2@ z_uTp7X$&BestNpYwxqh44^M*W^2DaDfb4jji-#1c_7bF3-x-ojA0XLa+|&VLDD!F) zS`)Cl$jQK*NGLD|hhRO|Kb`i`2P<9TQD)m z47|X+E}wEuXvW{UwoEFL8vxUI_r2i5LE6&P4C}LO?2M0#%)hE!OGO5f8fOyfGr*NH zsw@%DzKOC7bX#1$zjj6B+@tiBB^6f*Xl9JZ0sCl(trS>px|pQ+4zlyAXe}z;6pyzj zUchjo3!u&XILW-|Enm{4^9S+zqpX*-ZonF&vg>g86NtLmcUIlX-GP!ppy@fnO`=h1 zVwt+Lav@o`hVwYn$(#QaxIR7i23Ht+M{$7?!f2}p#b-$f^ypD2F#|dtTvmOt3Wi5- zL)xM1XF!pzrU66+Xw%~h893<%l^a6ilQ0qOXf!dFv@8$RLF%$Fp_)Z$Y;MGz;IfeX z@~U3GW^E3^1zlyjKWsi`4G7^2FyYGCa?>_|mjy*8PQLb%oCHyqz)+a;P0GC;7!#*M z*M;fNn2;!&Z9RAT$7P}p%I+PCfz635XERQuffoHyrWwF}S8HYuwxv(of``^~x zYZLSYK6e+BmtWX~NEq&or7b>K>~fp^9&_ZJk-56$u)?ZkNM;Cl2CSzM=$V#&2`d}c z0u3-PaZc%|YF%eW1mC4``dN)Lc;1lX2!Z7QyD_!~_%$lO1%*7I`;rLcKSP@IOCnRi zX@=Q8vQY-x`GM{$FjMaPWKSm0jGkwa4S~62tkzADN}$!;bAK2asJCeLKN8G@s^^<3 z98DiPBu-_VBBv`yd)J;m2?pcp_|1p;j4 zAJ1pwAr`B;kl?Cvi!xNbsa)|TBe%iq8B z!FsHqQvj-V%o@z=#2J3u>;v04+NDMhyas?JkpM9=wPzh+VP+yc(EkTr z?14V^m9TO@_*XcAp2=Gr=8(A$0K$vZd2-6OSaqLH_h-hmd?h(d=2~~(J_9c-|k=| zC^GP<5r5!b5N3;D5EKB9wb=mk`KHdIf@m`Ry|gmdVSwKzQKoKg2+mO=BO z&Kl5omtFTlzaqxkkoOg@#eXGOiEh2mv+Q$P;~w*q`{iwHgVl90iZ?R3JlDk`y7E#h zA54*`1JZ|3CdQYZAb)-c|FdG?;R!h{2G>);ZJ0Ii&+`G3sX#M`)9NHYxDXmM$^W3b z3fB&l!DjCyOVD1vBZqgRt-cMw7sxgsi(hMo8DSV+7L<#7?3iEoac$+0xoZ=k5cAcD z6(gFLi&R$2O#lQIp(8kL^p~2Af$W_435`z!Y`0RYv)_1+b%dUYafi-b2r@uF*qGqe z=(|C}q$JC#j%MlnXoOmh(fx3ISyJ?xr8ch<#B-=@9dFmT=WH8lYcjhfNQ@bA(dt5J zbf3Amtl!ryUE?VCAK)jJ2?{mpWF#4_;~~|#MeWNO_n=w+t)O@cH6hvxvoKxi@Q+XU#SdjQSW2fBiB8J zk>MrxU;hMIZ{krQIcf`*>}Jr{$RWNlG1?A`6U~3!V&D0^zAh&}>cByR94+34ODC=^ zs0@z$85*10m~Q?N8e4du5};SKHHWx9<}=`%c(_M5{{H3#947NC^uL17yWl@Gou+x# z?&0;#ga%@yh9f?**%PtC%71ytLC&`*>Gbmf&N!`K&sUN_h~TmpIK%St9QO;Z3XAFV zm3CY+R&VQSx*-xFzJuB!;<(|7*f*ocAZGXCAi+~l)&W0ukKACrpYqT3IZ(eahNDV# zSvJhjSe+2v6f=TJvXBzoXx^ZFWjaQB{I;5OlN|g-V2SsK0iY$`(uyGB4MzL{6#yPQ zXX^8z@U_w=9}23h^LKr_lVwv$HOd&~TH7t%j)oXd2jg~(Ldx>||R zc*F7L7+}n~BYOXOWV4P-Ct$y{Zh5K%U-OQohR(S!gqQ(RPo~}YfiL zr1?ygKaa0<(P17h@Og-F7;Yd7-#%CK{;eM2RZHdTJM>kSn)|%$8vvwl3AYN;Ok!Ld zsKWO&kE(;h#;)6gm4t&(QlG{hpih^eVO?$Xu|xRzln+p6OYM@5yzJO?1AuEpOWuCxP%`}2K_0}J zKTr#OBLqTX%*3&Dy^($>DovK?Wi8?H7_v2rqNx% zL&A#ewAlp;QkA4!wK)FLZ7-g%JpEI_+b;A{0I~4}svxdFx2@oF>!0^+1|HDYT#ZW$ zHQBE=($3nzz>H7zTzR-L@MesCr-#QioMsdX>c=!)bDb|qJHg28?+y47w$LX~KTE`N zNcdvq`m8&|U~#66k6ahCGj2a&7@s}-DH&oqcd-iF?KW}%G$5ySv7piPL_YaWQ9Wq(GTXC z>=}AYYEBc_wgzeugaB7ywuSqB_XN>M0Gm9kiu9bcPPVm{W1-BMLe~i@v|F*#=+mDI znO~IctPqrMU?p=L!Icy+s}p5tMde-oMo47b!`hUc-)_6^?Hr+KQ=Cu zE0M&S)hi>?{*gV0Bax&oP(t@@f#<*} zyA3M~NaSCR7U(p=-Fu-{TgR8?am5zu1h2#635E_RS*wrVt>EdYBVX4Xm%W$lNPMee zyD62(+9K5L)~1`crp!v<0Csenh7-6xqZ+0}en2ODBs3*|_08ZvFF%BPK?!tiHt(5) z|7>3>zK4V0EWBOyxwjxcGy(9wR;iI2qifa4JRuOtD(ys5*E0e^>n?r9DB1WBWKE*< z`?Az2&iIHyiQQ9TPiIpg7YTt_J@V3wTl+si9qZ^(&4ld)RT?YQ%TMm+tp3h|^L>YB zXUMfDq$V5U7>*95B()FBuh?$#XVe%HS$GGAn54)}>=LL%F zI@z$}&j`BK#ur#RPomw5p^1=aKu#LPRvc|puDThr{f~Lhp*YFaOxod;)5$-IWu3hD z8Z*kw#(D*kFb@=G6^RB(S6v<`)K!&-UQ#YFMk^Y7>aDvH;fZF z0y(;=F9v7n+xT(Gdr9rlAH{ks5aOTRs)cV_$ia&U=q^@@j8Bsa1|-Dq)Ppf44a)6p z^Wpt%Ho-5h!7h~Zaq-bBMEQNH7;CP+PPfZMs|lEMhzcYxWi;-uEA(ojuwKN%yfoND zR;`tR_L2(NeM*~l(tYvN$NBj4tjS8)Z|LW+Ap4XQ0@rcOyb?fWo3VZgnVJ z1r;-(lo0ndwXJD~VH) zm9r1je^sJ%rrIFiAs4D3z5K9-Nq%ko7T1kgptkCHg1#K)jtl~+We5Hy0neTh--+wv z_qIN*Jx3g?bipo!%xz%0GKN+O=wo$=kwam4D(_O!3T+fwJeVyY?7nOCxoCoVjRPL_ zTOvm5ORDA3X2i^H4%?)7vTI95dLB=T&zFFFUc-f4oz$cE3M}UnuY=vT(X+}=1LG2J zLs_z}z%oO2p2d6=hky>L(79UoMxY-ntf*O2=45*-LdQ1nc08E zf^5cnhx#zWH~f8pe#@GJRw`^N|2lP~4_e{Wz2}RLrxQj9&GChLZK|5p5JEpc3Z*50 z!og03EFvAX3lc2G!rUfYJb>7KN{N1eCHokS{m?Wj3B4~{-VvaIl!^I(QSLP%P}CM# zOjonkp;m6AbGx2wK1IJm#}x(Ra5OU^_?WuI6tMFx6+J(I^tbARLi+-ES=yki-0<02 z*T8s9mkwM~fx`UMqapt#w`VZ#(oIE!+7sDl`F!EuU2z>5M4+mZ)HMg7Qe{SuzOvsf z?A92v)5m{@0>&fg^38Oso&w$t3RDh)PWE9zp)5+Czs)}3jB-mD}s=UK>Z?1&%X2CZ*#E00%4v)nmqoQuFX&ke)UI-X^#mj$I}hqYATwmOe8>2^mo^KdiI8FH47qND*e}; zUrBvExD`rY((ic=0GfM=TPJR!Wcq-BT(cKpV zo(CYT_ooX=J{Lflr{PIe4hf9k0hKW4Vi=%->b!D&t@6>5L+cvG^uCbLS4tFWCIP^( zo?=76XX~zEs=YS(o?>VL=CB)E2<)?-O9_qE@x>hj{y9T>D!XX|MOq9bdV z8$VA2bih67iACJcl%Fp^3TM#B>hrSo;7v}KeqFwYL`@FU*%_ZfGIMGhm|GRHT`>#T zJ`eT|_*Qw{$wD`>^Xp8vjPGZpGM#`>!j>LglaH8(>%(&;c+-oqkR($F8~*Hn;tomy z+sd%1RW&b`+a*Zow`mMG8VChKXJJsQX`|s9Qs84Li>zJ$*$`BqH@t_r{O_?l% z_?6^e8}t9H`*~tGp5}0`j;39Zlgp~oe-Q~QLhu69lxG^O$Z7D@1fh~6iCCb?mXXc6p^u29`~>Pj@Kp_&@QHKYOU+I1^>XW(utzlUB~G5 zHdlRum546T#M@8SMT}(opfOsl&4QK!^H+Hzit>z0JtFaN#rPTIu#E*ri8gcTnlbw@ zG8CO{qW;`va*fEFiF^d*>@)O7bL|fh#QK`WxO~o@pYUS{V;u2W=hM~ko((|uamh!- ze8ngXV9w7e*1I+<2$9GtJ4K|svr6rul2WNa~T`{aR z6=_gtw>zFDfomFTnz`@UVe?D+OX|N{PAk5D5Rc!kWPWw=Gmh>6p$xfeYMt{Qh7t@| zIYkkR>-EE1N!f;8DdNqf*odReWOoTFsR$3XVTNssMjMV^DklBgY@DEhns%3WuN(Ps z{fG)=F;7vHNB)s9I#V}9o86YEJMU%x^FjRkn*LKrCc^^(>O?W*a;c~;EruU zez5=RtN-(D{`rf{B0LO^Xbqa72)0^L_lrPN65Hcxx-Hl|9)nrzUP!U-m&Zu_ch`0a zY!<4ZZy=&?u+d2Et&TX3_CJ(a$`)A~W`HF?kImNwPiftgQTW&;y z;iiN?B5uyqlR=gNDcl+hpbLL}%U4F1$CRb=U#|ZDmZ)k!KyMg21Vy6q<2NvaeNZAu zncQ%>9nBN?J?mDggyz-2;E?sHaw`a!6)zA=tI$XpudchGNViN%k%4@$xu8E36!n#Z zYy@LA5;#p+`($kmt5${(<}$!MUGH0x-ctar!wCeX(QzK#OmhTgvm%1wi<;?w7+V2=B}}`+Ec;>>*6oqt z5jv)Tw@?`eh)aCXaFy5HY4QLQoXYC!_+he`F#vr`x8nu_&_iBnh>-Aks66r9JF1kR zX)*&bzm@n+B92d)w8r%5&zrWky%moZJ$ji4mU}EBC=@Q+-tfDf~ zRSx67?n?i$DgU|t@ArVomxm!qd>z`rc*T1|=@e=BO zFPiu8UsIlcyXJqr4lBk7Xkt+K=jAGD9jZ)qm`rzqzI|oA8|y&D&rsI|l%4H+8EN z2X7zmFEm-qj3{Yo=%--+UbW|i_#_Q>W(cXb30JEFy`qIyd@B?%_sq4EVOsg24hx&tC?qSVLd8$XOJd@sA){@5V`ID+z0Tj42}G!Ez~lZ=Shrnaj>a% z=k<42_51Vp(fsf&E7D~Q096dx^Y+5%9Mqy8F^u8=zdY&JNY{WbxBx;D3?t~f@dCu0 zN1*71#_Mw2USC8|b`BD@rg$w66S7rRvs?xJ5K!9Y3MJy@f{AZ_zWPGuAE)=G<_4(B zRJ9U7!N!u)l%a}dM`jQ7Q&sH0#nIzmMea0axxmSq!$ zIzWE2klsFW3cBIUD|g^2cnoBGdCt0ARX{E04+Dw_@%q)?-JaD`Jp|vnR&g1q9oR`R zG0>Wtuom{f@Y+}Dd!SK)0@KW)Kfn;;zg#D`lg+pWpbm6w237qnSiQiRRkib^VnrQ$aGZMZKSZG_28@+I9D1I=&pFBFwpFCu(`6c}8f;<2gzO?(8 z4InwTrL${qK1m2mJbdU&Ad9hJwb&d))e6JHVIxeO`9^d_{Yp;TCVv7xetM zaax%d@l!6kPLy#h$D2xR+&S0gT3CUmK__@U5|kyR*8#%`0_N;B!t|5o6oe!J+MJ;0 zG(-i7C=(cdJK4xWpMufLmTK6aGQKWH^}R^qrmNIFxw&6ro&4itPS$~Fh$m?L;SwKX zmymqc{yYUfJW&}P!0DC-mDg#Ry8;!|m0Hr`b6BpN=sO8WdYV9Py)6An;5I2{8ylb+ z6n_A&5NcGqV4&)@9(-Lzy73oBa`eyp)kVDAm@G5sLcGoNr0OhEBBA}6Kt)*CqG;HU zp*4Zk{Qgf3;W$za7_Xe@V~PC56XLFm7!6Utbd1Bg0jP@Nl><){sfWCu>D=}EmrN2% zzq2lr4Swya#9IqSYnN<2r$fW0V3+t$a4J6)SOSjmH~9 z7I#ZiroVscN2siUIBoLr>Fx~oyt+RGnz0}PFWOj*Ng3t9A)4-5AsdGn=jZ7T+C~x) z`|UA8@j?w0@(f$ngSHH9E)q&p_wxgB%_PEwRfwDVC%SZ4>JW)7U8> zha1AtHd6LlK$4^C&|ouka`vS8F7zuv1>SsO{->|S<)m4DqE}E8J{Ez01)UQ?6SX$h3PGQ50cMCgH=Z=j&RoT*+C|ENMZNT_^Z&*rr2YwoUxU-AM-q0Oo(fd z?2OoX=z~6N0A1SR(eBHD$1$D#_RKu8l-e0&@-+r40?ib4%!L6%Dj*{-Bh1r$| zj8PSyv=t>FArgi$E2bG~19l}AZP89;(`H!hWuPi@4S_J0VCE%+l& z0Vu4R2bg0uIo$Q>>_dDpP!2rka(`GD|2zeZQz`jSw3ciaoMX60`g@ct`de>f%p8yk1Qgo*V-e=8G4K2+wcoPBEh!L`Lmrk2)P7*_5&n% z5c}eV?Bd4#`z$8V!WnWV>1F|WXI&b*MQw;z74ygGU63A>I)d8b&}({AKY93e!N+{# z!IwxsxdfJ?(q5Ph^8%87NK?1RaGu5#k0=IR$NgX&+I0PchH2O)3&e?uIRbls9Wy#f z??oj>-~aSAw9gxaRF+UJ>_u{g+GQE4%{Mmm@s~>{t7?38xqZ>mUheOV{N#*kMRmlK zEQOiYbSN%=p7{>4*3AzY)}eXDirgLYGWt~pd7dpTuzASChHakmQpfYHO&Kye#OdSJ z7K+*3uO%+K~njP(R>Yx0~@0j#EqBnmjzEn_H@?*~yid zqUK+MY)+=-^_CD<#)yxmWY)QjQu5nH?Hy%`8~k4K9 z{Fv)mStgx=SPn#Nwm<6@wpXjr&n%#ZZ|%i#q$UDES${Hb9$j=!L`YTc@D`7gwd3sK z_G%Bo*0M7$?RBEFZq)J1Xbgc-)Hi!z@^L{@gpHMeR@&w&W03X?NUtE=NR_g|koYH9 zpaS04fZSwu6eA~9sN7d~1IutaOe-DK=EO*=2c~2)Zuxu&KjR5FT1yi5#oGn5xPI679vxAl$8o;+C|L#l_%ng-_?Y zOlbdH9tGF%V!^wWe=L@!fccU%X@kM>Z(m8*xD!PM@n6JdspoY;MH+D0Exr_N_IzZM zti%&QOLKw=oI6hvu~*-Fx?Q<-wUwVZ;~HQ-zqr;*yC7id3WTgv-*{PE+s&To2}|IG zlyu;lj^)h+8-E^mShZ6>KWK_TPF-FmXxgp2vJ@A6?SV?BKoLg#?E3i>P)#FyK$nLM zi|Pg;eNDD*TD3gEGEuwN#~%B&k^B7Ho;Il;PF&nR2FK@jIdq;)4}1}w!piZZ3=Zk7 z94-9Z$l8bzgX zcARyo)Ux6H?}MrL(|3Q|{DFaH#1wjkm8(`>CKg^(54KhMT;e%9XPvxP=sT>@J4IcL z>43_v$X>rNptEnC%7XKIu65hrEOpzQU%@Nb2xo&sr!b5QC!Rd3uA~ zhwFld^Ls47X3!N&wpZ z{2#`?I-u$-{aXc|GP?kcDWbrzaoE8@}}NgA^j--v}nVl}lH5o^q^PRT-{v%JyW_l)MB z>hDd-_2`SbG>?8fDCu~rr5+Rwa`|cXU#WDw0{BYmw67}3P9m1uxB`Ui9v*q&UH03W z^0p$7Vg-b1AL_TCet<5+ozcc+o4z|ibO+HIFEf{;H{F&{l{1<>g;TrucJ zW|pMpXyabs1*l+3tUbc)T{fVnSAw9`mJ$C$P{+?51d^_7~Hrxpyqn9 z;ka8R%DfgGWu8P2icE>nLk(Nir$H@3e~htE2r%1fSZOr@L+^MvLhrTWv_#qaE_b)N ziBhG?b~15`;|1nI_d#m@f%58AL zVNJ4rK>En7=uks!QtNQThHs^c^!y#yuE(>?1vlCD;tC?C%4JmR|2sf0KtXVG8u+yz z^?i|6E9Go=>-0%kv`iJh*0cDFSWB&Gzq_X_iug_aSEm|zFgmF!%i2Rt9XEvhhQp3W zB)rVbRvuY(E^#lGdKu~p+BbVDWR^Jp0P_pnwNifa^aymDN|8;^6g&FOtTJv7dl}#P zemO8&0WfT5`BUZspOBB3c+=xiEy?ZQ+OSp zvuqg5KlH8z&*ey_S6Aq~`lAR!Bj}wtdv{BytF~uXb9JecI7nToF9619XKccbn+7#H z(V{*|bm;p1bZ269Iesg@EalK8&UH+($>i${VhPO82^YtKM}CHKb$CmJy)Le742p0M zAEZ9K@_MMA;mhRujAZ5dktDf9r=Sl+x)GBtH321~;H)AeGpS_mVE*a0c1f+&$#wbK z9c6tRYYqQ0d{|A?x#!UE5oVyfHHM3&4)LndAjeaTHTU|P!p?klOAL8jo0z_0$<{%_tC zCb7p@9`|o8yFO0npe&uC)KHT+e(m^sn>h+{x1O#Vb*I=_9M#pkih`3>!e-!+Y&%Xn zhgIXJ^mxJ)^4L|Xue_YB0D%qFl1p>^(L^wJ)_xxdhaym=q0f-l+Ab68M-jqi)jQuu z5dK`jJ0cpYv^|bUXgR(+s8C5=B{-`-81@d4^Ha>|V__`EHAT<*B52Pjzf0QN5W*Tf zPG@GjY0`O{^b~g?w-7>3H`IxilkLwT0y_5mH{%uf%@Ch(k+zpW zQiD(S@3gYOLSRH#^}fKp_}f0vWu387Cs!t88YZ^t>uN~A2$>2f+eY_!3aOD0De!e_ zSP$Ur@(IkuK*V6ecsRE&p1Eh!riDsM);Qn0`2BUA^9%j$m#z2~dcQhFbeNi#!w!Q` z7%=Ld-z?nTOjJ$*uE)K_H*e4*lup)wvGKRV<0<;@&l`zKKVN4s*nKu-8Mu3bB|;$- zDIM@u=*qs>VBlE~Nq0>PD^9WW+9+rsnJ17B)XF4s4f}mH>hXnkVmsPc+42cK_tTjD z>A&xztH__lkJ z1e?hRTj$r*0OzJrQ}NdP+(`npUaqe_qU}Jj5{gnT-HaoB?15y%IW)gqoBY$Y9~K#1 zB1Q)H)AaN}d<;Eq>Y5*R1zfwiX|+Z?v8G~ciVUHj-6c;Ua#D!W5VN005AS~38iGS< z&@F7PNxs}cpfY&Y#pMC9msxUKo@57d+x684#{(vb=Fs|# z&R0wsRu8#7S(9FjMfX=H8}fRI=yEkW`yO!*HM+moI#oUXiux->33|(P61yc_nY~n4 za`W%wpJp>bL9$gMOYRFA-D>(>-M()YL*#o}DIm@CeWNQL`zfAf&*#2C zzpZqS`Uxh{9~Q9p&-nW@>qFR_XKYw;IR+5)A4M=CYS^Ju)FcB=Wo4*c5X~$~IAOBu z?8+;fmaTqK-{;C}UA?X0Wyvtvo9Qb)l)DzM-w5;Kx<4#=v>e#DEVR+Zv+J@BgV?rXa_~cgh`#lu!#RWp zWJpS#h@p5=QQ1~!=Z*cri1LW6;g|^bm%|O48*S1d9|IzTY(rQNS3_TP;%~!w2D7-@yN_M+6-~wR%n)k>f$=5 z<>QN6N8FxMR15YUGeQNTHq@r{DXcc!TLtA7>?!GqTW(nwj#0>BT*c=WNo;Qwo$ceR zC8~A)Z@K`FcY-Ukq$vN)-6^H+F#GHmgAioA9OFi;yLN;^JUiLBKiPt!yVmJy6->sW zKU*(j%xW_5t>gPu!aNd)NuaI(Ou^hg%J0f5l z1tJ0Ses_6mpzSA?DA&O*o^gSEp%@NI_y&eNBBEdAl(M9m@1l@C)^BxbuX+gvCjIHS zrQU{t)gh)dshF5*__%;>R&3eIB@tYL!njSo%7V{%#jWG4ALHNQ0#?I1At}oN2O&PQ zivXIVbW;THVK>>_y918VINCA=gS!Saujc!fG~E_3ps2UsEAFLo&)BB+Tkr;t7B7{2 zf5+c-`EkpUe&OKDw#cD-<3x~i%5oR#@K#wbBRaaMP9HB_>vcTh_Uwl-SDen;2cl1GYv6ei%mAig};d zWRP`2FYlUk(XV=?B)Ku~lHQwoO#fwnDWK5N&&s6XS;-ttm$KRLfOEG-Jl^euQiEEb zqvqYume*Vwtwh;n&V*CW8PohTWuNpu=Z1l;+RLh=RH2<{E}G}+hOgu0*G_rGfUfcL zI<99VskesZ-Qb0?V^^$gL&rW^xaVUykF(OvjiR1ZuN6m5w8(s;ud6(w&|y`5*I04Y ze&CySts{7IHOVh>rx(9?bqJ`%uhnkM|KKfYaXAsib|oy9<|oAD!(>_m9*q;z%8urL z*^>p~h$|mzcp#2D<-fhWW;9B#I97l^61MynWOeg&A_tu@1q?XHh5K!fM%&OoA%CiO zTXE(6*?jLF=YCDRbdGTA57O#=2Qoe{Eq8@G(y{5 zWFoyZY)(zosxO5%&U}5QFZPlZ{Cq{_DKhAETiI;20 zK09ItPqq5okdrCqIJ_J%5QS?^2$XafMzM$EmO};M z&W2wmy-m$+`v(~ZoOrAFCJH8EgVi`7jh6hQ@D zGWC8xba7)VvqG8*+ENWa?OSPw{=$7#4@2q3vS;U+oL2b_!{9wa+8kzKW!0%`n7*HY z6NAcMbL>NSU^PoZc}0ytm}~~97LT1Z4Zy&rZtkq(aV?va& zQg_-M`;N$G<%LefHDoogyz2KRHdpuC$2|8#iAn$?LR6|=mJp2-#gPk5QZ}xst^26n= z;c}XEI=twZk&an&&pP_v`#A+&gyhwVjPX>ba-OC7(*?$pn)lcSpAG`O`W-u_>~$#U zXBtVX?H*$!M;$%?&V^G9b#A}J+!N^iyr&TpCX;9Brcty^0utCQ=l&zjq#c6AYq>7J zRjVLfV3HPqs^^EdnkXJ!{-ZaWC$^l~M9b65*WTA0qMtd65AjP(fpTsTqK{*!;(H$e zDotgngt)TkDR5rbWOwebvz_f`?Bjs;u9vUr|606Lk6mM>XR8{uc!7Lx+MVtYati%& zU}y0fqEp1`CQIgO@V!XY6$2HvCxPhpZ(6T31!Z@^6``cMkSXE z;rAZooQ?FH4jZq59uC^vc6jV}wg?P1r`_2o$MRdH#&asVF!d#&IP7^A*bD81F>WBq zAkWw%^`(Aefa5c{w`Ie6+>=U<&$e!uj@HP-Y^#{Xu+ErFm?GAvgqQw4A)|fyEpHsHx#ErV-goo$oI+q19lVy`73wUySe^#jJK3ZX0eLE16HtSe6iyD~?1283yh|=_M9y9+3amNoC8U2(7b%Swd0MsY> zeYT2SG-U}(V(&^ud)p_BTw_aP+nX2|f4=*_1HsgB;PF+3D|j0DUtj9}cqdheb+x6h z^vie|UMdZro6%c@A?6n{(=X(-2ylUhLm7|f(N?D9pW%xIW{9&=Vb^xn-_Dei%l@qhWIe|$-B;|bpF z`Pw2ZRiq7|MsVQRb91=7%VRKCQF$}b=>F!%x3?Sb#0x(N`hcrI#Ax{UtoF|{o!>uu zt3)@0Dc%#;<8@59ajATRt0NYZdk;VGZsePNw^f)5&{DsN^MJ{Ywu-`=rXKFQ{-5dB zkj3ZzyxRXB{pkIP#LLSC&~O6aDrH1K^8r{s%A$Y~xAvQANj(eNK8s*{@~sR2zIF6wq`6t@9JihV<1SUB(=VLXR-|u33cBrEzy(>j zpZ3?j+aZWy&?W$I=H-xJX<$%ja4BDE&@lu~AF{0q5?)Is4QWav_%E*9WjBk9EJgj-HlRtIFSDarBSj%LpOhMkhRsUF%dME>5))m--+WWP?zL=v~1_Z9crNLb24l**X*2Qhw10Fv!TcRYx z_gQ1^f@r;&k+#-xUPQ{@0v*w+4{0m5yg*yy0Y^(Zn|2+NKilVzcSwT{x*Gt*j1Jqj zvz4-1k<0|ORaxbq9hd+3K^}riBK_nG4D`H8C$7$qEF~emG8|LwI(_93pw839VUZAG z`EB@Bi!PYYGJcZ)@{;wMDWG;^#Z9+t zK&}%BKwzw-BM;!>Y~GgT`1%GQGwrnKu;3##oHH3ne48AV>^mB}#silgYE9&}hxh8+ zz+iw_$8{sq2AHmA^kw2(b52~SF!%ugs{qsE{is#0MKLXclO_d4g2LQK|2jqgK4ODY z!CR4f=*H`w^j8Y>RU9>_zVNE5mL*Ot2vA?x-Wo9eLM065q%?jW(nU2>^gLg!IC(Xn zVlQn**YkF#oLdIS5v>madq|ren(R1JYJ-9TBb0i0u+`H6UVWKg&_B=jVwD?(r<(mI zwW@zzn75tOSB=rE-yp?ck(+P#}AHairG0=yfHKJ?I^X4c89;5|5Br5f1 zECQV7;b>kQb6_I=X0k*-)i`&a4a}aUzp&9b0O2r;+Knu@ZmEB;3zycW010W%$RJ>j z)3AkIr~@^;nWAsJNDmzCaNO?A05G>d7*TPOaKu(hw9{EG{YU1~pN^#-uRHpxsOQ04 z67#*`AK!>Gd#|-Z#lozXw;5671Hf{67APj4C-9PSgC+4}h^t-NWnLw(@8ohgscnE!Sc@l`0E z{(K=^6J?c!{(-)8SkCYWDm39GZuI}=8+b>55rlf^^PgY5M2qhM^r0KtIF=?5Iuq^W zpW+U{vjZS=!Eoe8VX+PP0S{FCQN(qmd6_VoLK5=Jcw&k}KhGcM?V#M!z>H zwa<5Ew0wsJKE%nAbhCzCOiQre-a4IT3Vj9ypW}=hT6B*<~!PM4sra? z-FA){^nit7r9Io2(%?cT6tK>j9J(XDo_ecM1;h+h+Zy(I$4jAT6q~zbugr^bksohI ziHBbCZW2y+gWFQ0BHKLN9n34a20(qK=g1~@yyX#%bfaTkZnmO*EU47)r;KB#^y+~e z8yd|LI7Xz-HuH<6=PZG}R}F7xHRW<^C|9=<%8Q;0I4&7@Y?c5~BCUt((CK(4ec>~( zdwt6<0Xpg)!z2K~Fgfi*H|6pF*EaQ5Nx4I2{1z=bKB%Z}xAyKZ==kxM8#sHu+nv=w zFrj+$!dV-TEYxfsOyx6TVU`2V)l=(pfWvq^0ksc<*#WHi)4^&$VMii^p6nLaQofrq z#(k*)VAk?u(E&j5 zKmg9!_%R+o{EoQ()??D5yx*sb{{|4RTjAU-uz3x2lG`PVz-bGor0G4|9h7KekEbXH z`hexG7}sUvVcE|i5kR(y)#(>_Z>(_Y$gibs*z!A>CZH{V57H^K`7Ge*^=&-LeXFpl z6+j+MsSUvo^j~5-?Esx#q$impj0;u3o*rpQZuwdwo_zv3dU56P?RNX+%(C5pYvn4U zE+FN#g1Pn^I=FY=&~v*csv`Ez_fHN-_TG+WY2a_RznQ2}xP=GlZEkyE-*sn~Th^Tf zm9)QCJNolV)t8vBCsNhfDHWeo3cS|Dp&jp}>aZ{c?};`EXUM`Nk#hU&49FM& zcP=RvPzV>nO`^lmT|!}zWq5lB0}h6P;XIo;HUO*b5BiN?#GS#n`U84|`bT(Fiwupa z{_dwwN%W$14LBS=duD1n6LQe;0u~WcF#pm5(4^EPNi@k%xN#2ErEOovELIe* zhMzV+?za*zYD@4)9ZrFe|NMA!yjS$~3ykZRV16P!L@8|vz_8LrqBceXfFpEZ(~kLi zG9S1u#u&xEW40!wnz1OCsRs3iWzDT23Gc;v`XU&AK72&LRssWf&qr!dTmOp1>tf|x zML0TIdd1Ny=u1$NjtQtG9s|?#zH00FqCezG|9Pzb<-=`V1bRq@{P)YUfe*Qc6bSJs z)sI(={}5pTn05{_JU?hA59338TLay)N+z&P1Lg1ZV-n4ba1 zNdsXj*puDpdyW=?k5k?2-o-qRrKoYvEK8h?21m4~cdTh%Ba!`fX(j#qSEiQqL*V3# z4(NOia3U8y|F^);GFEa3&(oajIA11N_rRf#9I(z^*ga#zn{lXo)a8?5`cxnvY zYag1q-VBlCf3_)sQ(2pl5+kbviQG#3eKKXx1EsK>Q+L5`Mp|@GV$&+w0bmZ(MN?b` z=o^DKd1c>?L6)6_S_x;7z#a(UMf3wnVH{e?AYI`bvco6Ixa6U9U3aB+6X?hO(v&ZTI4bMWinwnA1;% z*sV}XoxRD{vsnD3s~j$WZT)4q^ry@E*R_ILSw5G7RwJ3GyD4ZfdOe_uY|#ize6aS> z+Pm>TkmA)w)^Hs@rYj;HSoHrec{XVn0X!>QsNpJwNb_;$V>`*|4BZg-4`ZVY6BI2RofEGtn#FitMK z{|1PKTJUSltptinQ>sxczL`)r3>yNPe7)8Zx?u8QwrG+Y>WXO&(oYCnmL7fxmJBvt z2AnJ4skM&$GDIncb9T-^g<8mfsEg}$P?iC_7Ch%#$0~tvf&4hof%W}Rv9iUx-+V8O za6TU1LP@pTn2)Su9dFEhQM~%+VEnhA&#}a#sTglHzxe~=q*q9!j>uKR;2KNjL&miD zU*@Zd?V=VUUWCjBqJSh_yn{vd6V8t|SOle>6~Y=^8y}!mrAZ3FQ6ps4jrP%0pW_l7 zBKKxq{)AWapJjQz^FG0pD<7A6FB0&u)MVd)_o$DRUik$i-MZ=e9UQ@J`K*#skz4a-T34 z0drHD2^^Y4gWsrd=d|A&YTm63<5xj9;YD>Ds7jS81zdWfN^~e3vUG-Hk>K{LbpG>Xy2`w;mp>H6h*g~I9^b?AWr>oqy6kYyD zmZXk@+P;eW>oeo-(UEZe{=c~4I3M4nsFEQZ97t6y|3i>_@xzCA{)QxsMAvRTZe@?= zW2NMzx85EX0~dr;5P5DYyV)k7kzyRs$~)Fw4zM*vPHrlt5BbUqIr?o5{rBs+TyQ2h zrr)H0OR)ROiW+Ajeg>`A4l$2EDXez95qR%}U_D-c>v=UQek3aKc8o5XLQ(N!l#OS= z)|1-!ycu?Oj3;?7l*sIrg9JX2221*)kxG767FYf>RG(kxV3%4Z<>e^RBy3)lrGr|hV_Ltw>W z;&u=QNsx=^)t4Ia|7Ixovk8H~=?Ih(Rq&cjxXgC>)g}0sCCFIc0e#oW-0Wlmjjo3X z8l-|eQw8#CiT~|Z1fQb9C50pq;x_%`spTMQ3C~`>i~CWmUB?k(*7dgBw;8mr5?9Ab zg5p=35NQ9aTOSb+PXU$KQ%0R_SyfQpjM}_|n_L6Mh%s4kTvxdPZv_s}Mr~6{{YeLO zaH@bjvdQ#0cI4;*E*&)P?OL}CvQCv%(i})NIl#1pu7S=cPNQe=`|tjTT)YU9$SmNv zgsaq}52A8E_#5Xy!AfjU^%9f8HUW%<-C}YG)~YO64iYBS5J?J=0f=--c zb$;{t-+5@3d3(7|odfX%EP<^oAmdd5G=!#GzjiHwWX57`AW5~_W|3m1{UUAWF%cJr zIZm%#jv#2!&AyjKVu^U<^M!gm-)XYgFF7tSDG}>8qudn}Zy=024YFAa&@i*g3*@T+ zq}a+C3l}CPSJh~^%I?^gBH-nK$W*==+wAgJb4hOtlCL`N8r{HS_V^(K`ovb?DL5X9 z1!GG*_ljS+(}LT8o51$=6ODQvTM-aWZ?v4N9k=B0ym5OEkGXwu%vn(t2R%S<)dDC@ zasZ!sLSW|#ih9$JGLFz^x@*ISaJ(%4*}ll=`x+aC!CqV)=awk`*iRhZK-YW*-cpg{ zKkx&Z9b}fs5?wmS?b67Qy5DVpG2{drp3QuFYY5sMS?=I`ETWuNYMDfET|Zm}w60Q6 zjE}IX*J@U4J1_cwygLS`Jx`3t(D3n}{k(VDoDE3=UC8K}*O$3CvkNyToG*XH8P1Zj z$Fyuum0N=4y32(IrG1n8iBp#!Fv{a`uJVzyAUqB?V9#m|8+2G`MlR^U-YhgW3alqzx!=Q1__pua5;mwJOdUT)LI$|H(Hh4dcJ#hoBXPM}}kf>6hIC%Ar^IX2!_~~d} zJ~;Tl%!m0)bp+$g;Uxo>W+jBrwL0LMC)?GX)+;wT1JI`;;4G%1s;mJyi3BeXwL#va zP(A?k;xOvQFiof=&$MD3`JOcpQuzBm5wBAu4M8?4Me$gjH zk}qG?U@_!?;pC#t%Q_0jTdT7c*=|_$9G}7 z6=>I0f>LDgS-R+lCq=`JED;;-KR}y`;Aj#}p7wGQ41Tsj8>I@Q9?w28sNPkCp^d@T zpC$AFXNtEVI658yK>;%(7tQz4wf7FldlA<^#2X(c;=xUBe~M$uOR?&nAl00^-%;uv zJEYT%+=O-M)l07)O`OGp`+XxRQ|EE*I-x0p?!-B3<2%!y-72s7I0>!|z(60U*Z~1! zts=@K%^^xPhg~q~sFT6oSFS~OfBv`IM@_JSJo;{5gbKcU=HmUm;jIy%Q`Z@0#ToVy z|1os)nQB2=sDb3`{4|nGf|JX!veL>4j>ZC=hUtFQT*VGXgDs|`=8M&x``0c?8ULdvJi^#`GAhsv*yt!iV%4N-AkN1f)qmPM6i>d&O4kx2N zpKnCuHOMXBh$7toHg0Eo1+ba{58MV|Y!Rqj0?vRRz_v#uCfl^-!1Ix|1Bgto3D&8d zcr)Ww3~zv@@Q=)BIA0t3u6*U>U~6nkac^Pl@(9rPU?K5Y3WCd7v|cVig>Gu`@xxn2 zwxrIUa>9R{Z|%5oye@VbPMM0Ww6RSPM3pGCcc2e$HD85s^ww~$RryN3m+eulPPL8x zd+=xBEHkN&d5hq`Sw%J(sEME?VyII)c60)Q1XDT@!!y)}4InMvjWgSZlP&5_jp02a zAX`~G=T61iu&hY#8RnaNTq)&KU`*fv#?~H^D{NK?O0wJ?uz4u3SRs614`TkuBMLVR z4hICl&a8k};6+V07j2McWh4{>JM!OScO)u;WS1?N$JPe2AiYp=BbWq|3f0Io^mTwn zzmt_<#S#{n`{~CIeKeiC=aIR{A%J~Xy>|sSgZ|4qAc^rp2n4e;$+A4@c$TbEs!>I^ z7)F6d#3GMQ;Q~bnpUsX1^qTJ|*&Ki-f>T1ST226;B8EM1o!EUAKX1>+=3F9^!av78 zd)n1YXb}q?c68jEpKI_;U^kBex*C_vz7^k}`8t|_32gaC^4HUF>2mYwZ*OBFBV{?H zCb?#e)8;Z!4SNFbuLq2IPw z#-n57y@AC5w&Lrvd>YF_egdCdAXCG@Zu#la*{1@pfCd6)1HT7)n@%w*lyXDQPW4oi z5|4g=aW)wb)>CEf}Z%cQ+oWh z_e+8C`T*j?u*Ff^3qL1!AW&-w6f?4P;s794JfJ>Z@@&N-&JroAwaRQf@8gBo!l342 zEF~xGv|(N==D9*E(s!`lw-81>Hhsr#xNb5KnHvz5{<^b>x#iA76tH)R7-Ym{@b0w ze?q+Bj!@rNLMig!B*6dYa*WczH%hm!4*hWt|L3cJvfYk!^}fno@aOxxmH4m!W0BGW z+b7=fO?Zf)Fak0;0N#{Tsie*5$|TQE~H z`tjNZ7cN2|${9(9F6Pbd8dkg+p*zs&|AijCA4Ou)s%#PT+tmslB7lKbIjD8MgA_RO zDxg5@Z+>|<41vEn{Q>NV|LbzLYXtW7RHbFYc!37%#HEclN(O}>1}i1$5AxsMOypND zgt$k99KXr$w(w0p{F@g;QiXQboLMx8Oy8zNTtD07+4R83EN>8iA?#nEPWTJvAs%L# znREQ(KE10ET=yxw@M8mU{=<_NY(>z3v@7TP0FWRxbdlV#4M?y}1Fyh{vEH@Xzvke7 zzNByxZ#pUJx;MjQ?}5jZWmovK@XMU^@G>xP)CA#-4Xa z4Zh}+bxy?%7n}J-R>1zS5==&>Kr)-SvnlESZ*TJ7|0y0J7=y?>erdP=v!!Srtu>ln z>j$U=H|+Q>L(V}0QV3>d@4(n|avD70`%Z`{=u6+2b-yFPu`zt#Y+tE10uCnwM{Aw-h04ct|qYt9Y5$DwJsI8QOlXinp!Ze{Vo~x zT;DjHyVK+1;Pr%-%7O};;66~_DFlh0DoDgB)_(q7k^JYUGo?ZzlI*u1f|tN!l=+uD%|s2aDQhobPvc`5VL3cjPu|ii8>Ym(9gJg z6Q~5CUZd`)9@2oK6}77>q?!4dUL`LI3_7}t-#K!F!N(5q?O9+#Y%cnhYyw9=J#Z&` zB-9s#$NC?(8vO1eRS@8OtP|t6lRn`z0cM1LEj&x|EG9GoP!nt`M^kp1G)i_rsAY92P z;rI>OQV+=EqcC+k#f+dW+c=~2rpD(a>+d-`HTPnl0X<$-uvWTXH{hj*C{~zVNg!#x z1UbqB9@>h0pNKDcuQL%MB**hS@6ek2kYq9Pbb-`!chi1Y47WxV0nH&#xj=mw$knBr_(8y-+$yjbcMuLSd6=P%KR) z845*O&BuZc@eeSqwg%Igx{X>pdl!(@&hniB82sw5-j(Tpd)^q`Y~H{$XiAo6gW^=- zp&^j8Q3Y4oT?XXfIXdj~)mDIVmeMRk<|9yu|Kjn^{HE>tW6RaSy)cbuFmo+#`~4YG zSq61hlXTco&LF7kHg)gY$a(#ykH(`Aup*1+Jh9EfndM%DnrjHoOTj>jtLr+9kt#TUEIL zu%eYRO)N>1KlTmY-ubPKMzI5~(G0!q4d=;UBy*`H5-}CBg z6HViDzM7lI8>DQkAvabutUhJW{X#UqfQ8gA)a?TxKlSPUi+lK>3_ z7XKwsQ`oKL;37(HA>X#8T3~{SaWx^a*5C_*u9xMKCJUwqyLzhXf@Ws1(rH&uii(3 z5#rdR^z86Apg<35cOILPY}=vH0d>z~rz_CFsi}_S$vRIvWe0kxHt{sksbwO8k7-Qi zl%MtjEJ}~@fN~eBuE}8XPMO|?U~c$Y2#aN&{WL%*ZiANYNyuku*kj?ny3HSj3GKp! z8`^OJ2TDA%n=RZhhQTf;qa$z_%z#RB%|HY0AM}J$l7gEw@WlQJTNRZzXXr;q%n8 zB;BusYAq5K7V_s{P&#)7U=Lfa?`V^XJmd3pVlU8}`{G%5;ALl^xO1LP>IK|eveM+c_#9RPxqn^V=i%~77RN~CUiwI;RGsqr zqT!Zxk>nXESAFt%AC`PA7N6m|{&~-f;=Xz;q}K!6mvr;jJB*TpfoKi^d+tr)ztv2C zK7CM(LYudQo>CYKq9HTG_$5nmkzTmpGqgMczoS}k4xmfcSgbC>_47XX_z^70?uCpU zNYCVXU!CmCXp4>O%h*0O&Qbm2mHG`Kpckkm3)kf1bV8EC;!phW!tGhNabB`A_i(au@B&31n@iq?_v(Md& zDkzZoi6W=FSjHMaF2{fXa>8dDfQvHH=d?%qmMRWw&zlVCKFM_kyb| zdYGhfzP&l&c@8XPGpDJ|Y&DxCe<1UqLS<}8#;Qm{O zH&T2p4*)yrW#&M8HdwpKqk*Q**8}%JvR*X^t@~|KvdR_S6ITw!bSn1w!$FFRXSCP# z16T|!pL2t6x`diMuLu3Y^G$KD(Y@;3hO~M?*U`{4OD6%%jh3%5nDd9rxS~Q=A;v5^ z0Xh^m)bPYTW#eq|(=7AKUWlfBo`1%!%JqQ5UVCkmqW}%8iLr+%_0#+-vm0mWzhw3# zjtFoTQwJm4amu^^4PH*#t?5J^al;W%nu#bxDN}xXC6SqE2`cRs3RlkIdKdebFM}%1 z<`XYLUg0Vl=ir=+Zz@G2%&>YNTe`XvAr}linQPW=tx}%fDw$qYLx|5 z_aw2Kk1sd1HBC(V4;}fRWNAEUpCztWAM535NIVi_G=tgoAfsL%MrFD9#8%PP3gO61 z2w*gsY7Qs>s#qdbxv{);aAS=+7}9@?@SFv2u1 z@0L=Ui)wtM^(ZnQ*OQi~%E}d|p=3N{B+w^0aawj+<$|pY#A@3X@dkBOb0nMwi?I-X zOG1Xc@*9z_G57Q8&Qz1+mTe;4Dr#QmP2=5Ol!_V2r?VbN~1#n{@B79>|btTHd&w!i8m%2`U)GOZb>+PV{Ko<^Ht3^cg{gEQP*UC<9fWGgDi5Zy*6?$J(=`aFc;Yn z{{m*FF(L3KYQ;BwaZ8>c5=?Gt%@v-lE%FQI&xL#d+^uBMwzJKqj43su_OX%^KSBn z5P1vH17ZQhj|5dtJJakY∋*CEN0xsLULS-6t|I+Q(XsZv^=mVLF+h8C_&sH}6t` z=94)rR!Z6tQdGT!RwcQIKh~_tC#>S&fkQv~{s~S)m%J%QhwgE>u;bBQ>#?l(o~%P5 z+M$`&?XE~EenuDs9Va2e1BDAkH;H)%(yp=0HvO;BZ8 z^@?LPo6%Sx3U)b#oxO>F01m!kUac_()y!b~?z_iG>2V5RF2iJlYYEF$xJ&Yp#_I_ zpy;^T-E5%3T$Ai>);gP3ue_8??<0&Fg!q9Ey5! z6Zy&h1o=Wj418y_#ihiyy_y%gjl$H68Idx%=Vnww`15Qx~w`S4wHeKNs=BM?G zzFyO^^iQHc2n|Z+Np&?L^Z^K8Q+~?LO$wMz79nR7Xm5~eakyG3+CT?IYORR%#+Y%q z#;-G?E(l&i2l>BDRBA#R2fw)&JlVqIsVaJneo#xS;)FB^i!8>zJlM7Nbi2MW+9fnU zU;UftyM;8yq)jm6%P`@NVkB{o$BUdcV%B^>rRwc+1c{*P5+B3VT<`+R#9VA)#*hvEYV-LsI`>Z$CnXY2A zB{_G#G8%%((9~)0QEywrGB%Tv>qGj{1$vmP@*WQ#_Qjo;@WMbC~PtRsP2{PWAq zra)S>oa-SXkJmK|GGAytD~2o@o?XL+c1HUH zZcRz*Eel+(t)d~SO@v5ct6wh#o3xDWhTK!VY^%GKPD7JZuef0t$zPuA;hB}@q?yJ5 zr0`%=ntBuYIDKZ?r78>zJKZG{${>HKen)7&2DIPIiIW8$?nPsZ9*b$DDd+j|1e`mT zW^L|g#RVY5r~AA*%-@8H6i+t5DFU+PO(o@k`binnL{3PcH~?wJ3)JuUEn|w?bs&iyf#yMf9G6-s<=U z-}cr&V)0&9k1(dkq}rF8m0*NE0Cu)5pu%`C(xiPC!XZewD=h(PU z=X?)EUgvypRgQN9iyF;n91qDE$*M++jU>xK8{&q09Kolpq=(>b1PV;Y-0+xgk)K6* z1lD~r6Z=}4cTC1H8}iF$!uN<2-VQWM_Cj#Wr3ae!g)<#hRHOG|6`g5IGW4<_lh{y2 z#(JA!k)%$<{RWY?Ew?=`0oknoN7!}8Q@#KHxSbS*E>h-c3T2a#RiSLoIY>qs2^ra2 zG?X$bdnFtk;~3d94P=#_O|o}p=I`~Xd%OL@9KfSD7VH_@Ht8Z|a+!Xregwe7Nq{_`GLp)^`iRI*h(evpugJ#46-pdW4h+?B5Dr=i z9P#(<)g`PfZzo49L%EF7+;H8OUjbpCMfM`^B3C7qAg$ia2q^M~y>rMz%tnuuvZ+;a z_suUnBp0*3tDZTFUm{gwzA!+kG`wA@*@@~^WTl}fk!q$0Ad+dvG#4g(7At`F_ z;aTwH8%d2@1x!G1oi7%|umbwVzfLZRSV=nENPYAJrV3e^&G}i{u z)^#*k1wv8J4S{`0+WRd#bKXJ=O2YiW!igT*?cT@?whPP|ZQk~`G~C&9&jj7syvnUx z)Z&LrG1sh>&{q~2qNTsw!iUmal-#rW)GLQ}F|PGhzIym53*uC{=033;dhLGMyB0pR zE&ez%8`MiceEP6kFqF(C*AmNvSx#mWkT_Sec8P11`S`2tsGq_ipR&}%jAcuBO}QaxXC449{C3Z?_?vO8iLz@c^R3W!jHfb$>F(E! z!h1VLO8_!f_*_yh(fMez!j4FDvi%PohBaHGa}6(GzXeB*oGR(Yj~g}_9X5D9dm zE$7Z8vQ&g#ggSqUY|xtHIN>tB61$Mz=hkzmbkMt^>6MK1Nk84#o3Roc44xQM?O<7r z3IWt^y3-gbRq59-LZTMGPc0Kufr-|v(CTaAva)8rA7Y(xMmIE#_Be*1`s_>sS*-Ra z_F!T|hQJ}SJMHD3p6!e{FC98mr%__iA@a{6WZ?w(9XDx8dl& zApO5hGp|={v02PdI9)txbQg$cO9h%Sk15u_OsbEGl=W%O=)SDDP~NR#f6~Q-He-}o zsbt{p<4g22PA63K&?>WWJd1^6k0p5)a|LMe$QFHxpXcXkr;na5tGyJ*D!xG8YjBCc1ip) zUA5cC`)pC33wd8*lpljHnr-LpSKJW+o1@HkJXHpgTuh$H4!h_}NuxPYFPAgMzqdKv zs5DE-n&oCf)?3P5=d0%1l~feDE51SYUe~#(nIB^87s>QU>PF+FVqDBCIml^c^8K6t zxTQi9m-BR$n3FsZHw1K}h5d8#+S*ZLC7a!I{*fU=!hgPqI1O#WhdH;&S`9bjC9O6it6z1lzp2k3+Zl)TTz^vHxh~+hjs}o@u={ ze*EjV>`*`h`wkc)#7P{x*{%PFlm64I|0LzLE|S@RJ@73$UT0SYNoM3Js~BcsYxJeI z0Qs+3%|EaI`V*6c7fgYZRURG@+~c{x!oK?4qDv?hSU*PkSE+^7;TEtE%AJXf$Y-Ko zp=v@-ny;$U+>5N2F|}fwfZV4FhsKmn50hVkX$xI=2Z$1#Af!k7 z^@s(v7GNs|mhaANS3eBPLs0wrgMWFz-^v67FVGuxLi_PD^lfrZJAeKF!(-BnYeNu@ zHe9CWM*EjOc54Zov-zb8WlCJdJ zTI;)DRt~7kkv=&;24mj3cwKsr^GFBCTP?t38ez9m zzg`elY(4=OQ*SrtWZv18^pobFH#I_xaFMCbJ^LdYM|^t&W_BK7gI^=U-1e)r{e7tyo|7`wM^w*8 zH2J|9{nRFYABPqp8_XW6`P{jo2N`@^?aXubeSYba4Z44O0X!w3hXDsaz{oFiQs}%g z&{u3kzn0d&T>Z#O@~8Hy3Fa*3#4%jN-%(Pym}PLMD;;=CrXA7l)c4@lKUC%Ux6S)) zGZvBpBRnV=8WccxHi5QMuFy!7;e>Z@II@%@EsNg5QXaMj_8^PY_R zQqHw2&`m6&&`^#L=CxS_1>N(l<*m)9YY3MRqx!^K$_4eAX}7!!)yv;fqqdQ5(d_im z7hv6hCil&;=bl@e)!2CmN_o)G+?$U*GM>F1iwNXkxObR>gt|}i!ns*9a|Ipti zL%Gym2h69*Xvf|g_MED*H`!-LYr!){sY)Y2L7$3QZV{Q8r`QEYX@@U8_cW1r)r~F6 z+MCcme80lpG*Bmc0$QTBRjW`vUw?UfSKjb)U%GZ{3XsVqJhdaCyt4fQu}9ZEM+F2R zO1EZU2I6X`Asn8Z+i9Bnk?nZ-M=}njPAXtxJ7od%>>#w(;J?5qi)|q2P$o+ zeU7=V#9OTORI9a)X8jM5{pN25wA%8m^e^R_1y6K1L1YgjwL?hzhmtfpu_$hLwadut zy(~>V=Qk*x4I3rs=}&n(!wQL7ljGi|8?kf%lofO*wBFm50f14j2h_@HBDi$ejJSiX z%M^&_^O0VA|5F#A)h6gAnKLiAxd97#{m?18B^VbGW)x1En)0;a=_xBc`dKCj4ipZE zfdbcP@({T{tk6DeA6Ptez&3u&BsYS2!LPIa1v>)Ku5h&kNU}m#>-N(>#9BA&=X+-t zSq39#2MQ~gvB;4<1{@Nwb+|EXtHEhan6L!Ngf2`@vJ~_9&8n6fN4*@Nub1vX=h&ar-;L;U#*ZcgtB1`0f2cGwZ(*=TK}^W_L4pPC%4yKmSpXM@IDT06 zp*6;$4{Q$*x}EOqG@Py3Ac;I`=j@H$2K)ejuIgk%LO$|vbz`xEjvyBonBE_gl0OTk z6vjSf_7fWIt_!e*?Mdo@&{*Wct({X%D~!ufL>&O@9sBkN2sdLo0^7l*TcjmFGEa~1 zjqA*_8LCM%Om<83vIqhNF8 zXMp>MFa%LfScmlDzSmf)GpOAV+b4**P&{&h-+8w%7!uVUGBnb=@rF)A@VsA&dJ2|( zehusuj{iA{(Z-=D=yEW=EifOUaLTG^>=^yFMG~=m9xqme=VX-nQaVK&AE}gcoSEpI z0_3?Y$kV&Q`{bho4vm$mXa_@q=6cTT_4-qNAKuXGPnIQ(__#c9VOzY03&eVTMA$PT zoF6zD=~^2jqrl`jQ6LAAa8hs;yb!e;?I(RYMxa>xP4*(yi4sU)L3yRos_y14x*ht( zS+X_HL$6tDzOhPS%rykXYtj%kHlC>Wcy;ALqgDvHfn`@-eR>8QL66O6OSx1|&%HiU zE4VLF-U+&`k8v62QS(jOZuD*Emdk30xAy8he6fBFAVK;SeEctw%Gi>ZZY!F8Dxi9Ix@jHZ; zym4kiY%1xkO&80;p?$6p)ef`bU+NU= zEXLzf;*V#}FS1D{ORp1S_rENK@lK1v$ZsPcwD{@FLrB)0wNlK~Grfv{%&x%Axo%Ir zkVd2(E2KP-Yjy+|MbnBD!HHEwS{$ur2e-S^Cp074OI^ zpJCZGiBS|bl&lindwj**SMBmo3re=Qs*`*GI}B|vQk0JS%eB;L@(4(%+L7c*gZ)~v z%Y+LMLfOU2-Ca}nB~;_Al<*$nAQA4-3hC4`G@9-%Y&L629&j;Dv@4q}G^{^$?fnZs zk(7n3{9cz-#u1D-xBH_);z+_1nd!XVGBU zmN0TV)?d_6CtN+^vPqrB1K@)Fs~Jvv!SIvE)ZMRz?*d zf_5Dh#Rb%TWgjN2uAZdwPI^PCkMd>z&B_gw_)DR%$0sSkfZQYH3n-|HuRk6W6LUQb z{|HCT;X8jFHLQO32_M2lX~!WsiGj?vape6n)85y1f+KPfs1`9C!2<49zsuKgeq4*w z0Aw}0DLQ=RQT-|*hZJiCp{t*CL$jSedhbA=zu-k~>M~DUNeFs)%Z6~ZluWSt z2#H@00h@;moqcefGkOvZ)khah1~Y$iZOA4~P=V55b=~H5F9hfti&%~>ZxqGwJyFbs z-=Y#;hc$1ahxaZNv)y#wzL8ys{YtAieC9HcwU@vNq!TFQmti(z65~V>@I2a9S}^IY z9VV=Mcjz4XDbgkAUH)6cDrO1kJZMgw8tYQe_sdaE54RPcX#t{ zP|1PdW2(eq>X6*3$DD!daL+P-Mfs`0 z7sc1VjZAy9j6e)4MNEXmrKxWsfQ4A z1<0NVL0pap@~Er8%l379XUPvQiE)Q8yq))X7xyumiL=!FG&e*9kem485nF6f3?%g` zi+AYR)jtK&vJfP8iZSga9rq$^yNYZOGW*~zQn>AvkysCNQOpxl}5|h!RredDOWUy&S=`zAu`^!+!7d^yC z)TiHlucnHtXa&_}Cq!f&swOBBF2xY0anJ1f2n#d4i^xdQPk(vN=d72`pu zR}tnnAoaQgZaYmSl48RiBz|55+U#+!@birh`8^Jirji>}JAQ6z_(N}X1N?lBcV!v+ zD24OAL9xakMZ~e8%6SlWEn83KcN+lP+urL8iogW39 z9(kTy9{H)+DugJQ18nKKe22AA9kw8GJ=aF<>l(j!Fm~HjXbPo6d?!e3fUtg&rE=l2 zR*BObcIF+W+}#(DHw|hZkxdG0ZEYu9TBhD^kwoS`B}Y9^&9d)*vu5jayL4>c9$f-v zQ5TGY)Sb6Mx>}&Mk`9Er0VTtR^7YTP)1{DpZiHA=O+LJMV_za5 zl|@TeP5xSE=MM#jY)ho<$CzBZX-`f%w;l1~0X~ODv{@^ZSD1L>|)3vWt3!qou|^IK5{$ zCL%6}O+G$?&iKVm5(j-A*>Ug7QmYS7+##ovZysnW(vMKCRQ@}Xeuv=#r%(&TSOA!a zG4fLkx*WKaPx}zf{39I0JYPBSi448R)SrUEce1$txgz z&|;q+C>lGU1xR1lHfj0HWR|J9{gAcKmT09g#6R4qTa;PEvtr-t7d**SHD!^ zPH?h(D;vNPJ(<-S`5BgzUDs|XAH=P{Fz1w4-dONDU9Qs%Yq^-L7ICA<)qu?zi9ji+ zwuSeJk|PmF+McVrCOYb#L{?@!Q;*=XiOSLA-nYtPpj%*{-rGXnK;L@SUR9TNCo%{e z0f!zHjqA8nyMd8xH&@CNZ@g$ZHw5+P$lNARdkpG;>*Dy(aI3E}RT;TZ-nGV;SSkxo z@YpI?giUJCZjNX)NaKroXYErcUEVL1x0TkcH zjn553;sCUoYleAw*%8ZI`Zf$u+SC^;mR9TFXVlAbZ#nh(`*j+WoH90SU4uaF5^p>wr z+y=NsYqwusyF;&cFaM%%?M9~bTtmj)DHqAdIz15jA1562^kp-?C9n$wy3FYHo_B0~ ztwO8Gue?R2Mth(i8j~i=! zTMp?hEu0vtKlr_4d^s4Rbo;hE-q3-W%X#`bQ2=`|?nMYJi|XSl=qGgr0o&KApA;emX z3c+~50%};|Gr>jjo!05~0I=og4% zFgCbxT!3D1x61VTI7x(h&&%-YCY`+wb$dy-^V+`7xG(<|hWUggSv0_MOVTZ#!@kIn z=h1S8T(a5yf=ldtm70}A@JVR}aX-%3ZBMmx?00(ykc)D>F%w=18-1&pu06zJN#}-9 zRq@KRPWgZ+1exX2lY3of#Mk<7QFXiEFZ*%1Na551-hz*MVeehgq5Aipm|h;dwSaaB z#LX^6m=kYwvDc>(ajbAal}eWSNQRBaubNAuE_UUbkN7|5J&$MPlDCMn%aZ>iHJ~nN zpT=5Ss^&Y3lwJZynM{IuSQ;oREhgFv_r!T5_cDvxS(ggG6P-AbuOTjigVgk@ylHB0 z#<&QfPq!p)f=S?7U{?d#ZMyA?tf(_q#ZyB0dcVG}fea+2l4==m;ZUg)>}-24?9%s< z{~>-@>-qyJZPb1m{&bs3pKsBKlvLjHnE;;rGM9c?_tf4V1s<N~>Hv{vx9DW7UI}+|7O4wQ=iO z!CC%zz0gJyOl~K}J&5wChFU+LPF#7aW^(1D+1gfy#&k6^(j+*Xrg_F2L(i+t)(5=W zUs_su8$0vTi7FMKs5oq{gpAn&@5?^!;z_<{6Hu6P$)q;rER8n4vWT1GpgGC#2Sz5O z6`Eok1L%}X)W9bVgxuARaKk;I1&#$6G&Zo!`dX-w30+A`RX`!ZPjK2;`vGZYktQvz z29GU3DTyyKU1E-D(k36#3Xh`x8L~i^l5Xc|=B(Z{rGt*KhR9z&VQ3PP#R7yQXS!Ym zdf13cgSyE$yA1eI2>+!3eEQRdI!w9}(ZY!1$N7vJD2b{XB)8l`Su`pn{j6eH+`v>& zMO_@Px&fKt6h!MB#B~YP9z@zISt4ZKRtMJcf}z^yYH91R)>d$8IDk#m5s?s=JsfD4 zrVDH`KyTTG6PF!2!s>T}u&oyg_s5f|+@jN9az0ckvtQDqFGxxLMSfN-PU^Dt%<&*(ZuhvUHyzJV^Ow<>{{`*vcmZymu%@;82k~RlTjos1 znO;jb8GL4|+}iOs<^VM7)~KU+>0(Pp;~rcGSs~YpB=l2nFL|#C1(49{P|2Z01@9HR z@NyUvTr5+L(;+@ry41?0oQsWx1}HLI0Mhw}`lF|edwi<77|6v!pf8d!^;p)N)rXr6hcniU~R3PN%G4mo)&F}@ghCRH`9F^ZXjpu zmy!x$s{8vEyIY(RAoRyQcL%Gm9^u&Rxvj&0N;t|_;WE{|J|fAFQf>xm1AyV@O*Q}W z4EO=|l;Wa3U+;Qb$GT95lvEl0XiY+8Hrp>#^3-1)p#$Ka6$192%JYwMwr){LjFK9e zN}O-;hjRWDUI~$A8~Ih<#8NjU;aYuB6!e|D=s0Cv&dUmIn#&HJ$qpcUpxVPryIQJzG{8*l27F*=R&kW+7a6JfDhZDS-9UxYwepA!jG5LhLD#$^k#$v zG6IG-o7VP{lU`)+pSh^-^EW#696Gkk(`vv+Uu%||-;AHXCg#3ewV|&` zp-=HQSR8oOOVGTnVA~}#s@M%|%PpS64=Qt7*mTyM7EMI zn{FY>%WRRZ|E;c(8}mt(q*Uy)c>Vg(gdz9GeWC26B1-7C4VH;MlyqIxQY60YYr)l_ z5ewBn7xQ?C?GGd&e%#W)*349GN@F)^1IdpI`9H1dH-!p&FW46M6hSSuC!v2w9Dr!RWWd~sI7yi~N{OOqGv}mP zJ(EX`C6Z~(Q!WSkQe9p({6f$l5&B3*dH_vbmcKr*o;_PSF(QbYTefoQ<(|jn59mv- zdod}U<|^D66E-k-``x1<={oRe?E>{yw%yH6FwWKjfo>Y`Ll4Zk`Z1k-@47Vk zw#obf_E^P%R>z@?^3MCMscZ#uP4#@Wd}9|^?tWh(KV0qCzmr9=aG%S@u^(=N)GqmB#U4mt_j zB?oHA0#WuVTd2v>!I-PjzOH2kT8Z`}Cobis?K!~rI@%nZZ@DsE0C`Iq>xQE?hXhOl z0QDL6xQ{roAv!YvGan@mGB-p6W5)z6i=X%07>U-{N_e6U;9&?sbQ|MOapn7U1wPmw zca~?BWux&!sP=Hxi-(Y#1l-Fx0Xd6LJ8yuL8OoqX#Uq?Fz(WtS)kAjV6{dbIV4PZf zW@0EHX(pnbO~J(=Wf_4ubxUcFW4GM^Bsp=&r6k*EMQchhJD*;ub}sg*WQcJJ)l15s zP?YZvcrZ`L?3jNyq5!l_kZMuML_PPV2%p z6l}HGn(H%E%2hnN$^z|pvMWf0T=aNE&hHb`{UQqu{tDDMLW>QG#N#(pi^iPbK+U>SVSl^;AXParFylB}OuYEK5~+iU7mkrLrHp<@ zi4pz>!h~~pWH`PC3WzCa&5j;w7F3Ek-Qlsd;h3)i=J7o@IB>+tbvLsfSUt@aNgH(5 z&=|-<>h6yxmqVs6R|FtI4TvC<-bL~qsV^*HQTB{mN5hh}0407VIUT2V@#RPeEd>@I zqpMh-{;Vng9?L~8N=dT>TW@P7<8!}!9<$Y$H*i_SNpIr#KigQs>43Y0r^?z;7HPPL zu1)ia3v8)3`^&dBtmvaK)Xik?U(&fx7mi$qdrb*rpg<~_4lp)XtP_l)II?Ru2hYVa zH|I6*^Y6~0Q^brl#`{?GmGvy%#ad?r(5#VjO)Xz>BDib`0JG<2((hgY-@OC!@z^(y z;32!1ojgc^Aj#+lP|Oc(vX@9^&%<*z3oyAo> z?zAr+=AJHjS}svu=V{_ACA;ff-=+I*%UNipS8qI*n`cEOGuHHEM&_PEFZm)I{_XKI zprF4|!G%h1NzH@>O7vz>PzOf*Q8KW6^maiIQ|Lv#NAU&^r$u#G$uSp_+_FGKIi{TRI#{-v$7@9(w!$E#GMJ~yoV94IT0!9|$>Sbo(E~i)H z`ac0Md=+LTe10$qfbnyhR87aL~BO!9a~5kL#?HHMk&>sP7f!Q`gDjot5h+{gtRp5IXH0{u{KJXI<< zT)mH^i$2RlyyMjB3h3-MY!b6JtltWUj=FNMy5`ugZ2FfiZaog8?A#GgNT5$!Z7<(t(71tF#%87GU z8TKAsC-xYu%5!9DCMh#VBJNi}Z(M9>5yh+eDF>$vjn^l|+*&o+j8Hn<+5jgwma4X0 z&$S-PIF6JpQVG%_; zHY8faomp=v0o%4E%x4v#U`uE!#{=81j>U8RM!sLerFhvrj`iH7 zD{&JSRth3mWb^{BuDTBl4=Iv7PWD6hF-&ddz1a_OFb^yU2f-I z#xP$ZX#$u+l)^n!2HNo!>hBo^+iWaoxIm&14MLQJoXti*@G2`0UL9&$77K1m+#0Fia^1W0M@JYr)d+c(j;e8Z`f zcu*y?q9Uzee>6s%i~fYxWgyY}Zy$VVuhxoqf#zYjt4HGm@Z}pzkzhdgb}?vSqO}=Q zDvIIXOz3M@vA6V-ya*X*d36X`#oM*jWk;_!u!Lnpsv^|_HTuU7jyDsK91(SW!?AoO zFSaVtJCi{(8(8REE1Ezes`l^@Qvw>ldZ*bXlskX%Xwd^yML(c@M@4WYLl=a|%SJnC z1Jl&;z7xERr4sm=`!Q_z!OY)W_#~Q|TY#qY^s1{}4@R4{RJ?ERc`SOhm)w);-P)jO zxIT;dc&MpsdHPe9ouZPm-0j&Aaf%p*Zn7d=4uIqukRzYeW_*ayLOgKsrL-dcE&h8R z`1Agknx@-HW*?WqS7<-hA>^W8c9>cPy~?s1DCYw)Pqc?Y!#FS`8oCwJgq5y?oRE>~ zNHG@}$d&oAh<7e7ArnRTPv0a=XuMOyJDmjalY?=r-p{}0du9|UO-jCvuyRvnCDTLK z>0;VcjznADz($x{zct0{A~wUGa*txI39N~W!LgM{%y{Q0ZRW1&nybJ0#G6vtq;sSq zZ>L|&Z|A#!UiH}R86cQp?BVP*9_kZHs2LaL1{fBS}fA^JD_ z+3%(c{!5H$N1{4*yVvf0=j+9lMLTv^j#z*F8gfLb5q8e0Aw1))tLNm1(s>1+(KNNW zg0&x#g@UtX?31_ue5M34cv%NlE$b=NZvrOn@aOq=8;YVGlM9gvTwgy^L>v9?x$UmY zS3~I+1z<>U3Wg=NxfSBj^M9; zpw*8^7hcp!lnZ&#cZ@hnEB*N1?-0uQTiXl`5=s1YfBCOJ{9g3@_MHY-V@}Hb*2ok- z#Y+Iu+I;s^|KAMw`vSNrHM|SRE~hX3w_&U)Oo0gBA>i;+bM=3KAg?+ac%gG1(>Ep9 z|NQHZe{4d7dS=hH$-k6!%5=$E>&KrTLa7ryyxfE)w#+)8m_$JsHmSbF^EV`ce`IZYycF1d-2wN^671X67P~w|Up{j_ zTUozz#{@A=t)_-4y02HS><#(IJKJiZ6F)40?~4ZcJH9i(cEmxii1-36<1_kaMcTw}8#A))=l0|H%dS@2Yb#Z|NocCqWKi>ZZoX&tNAE}O z6D%|$b9ld{$rB9L7rne}}Pi;wTiexMEFP~zcSt?wf5Y)K7;;1o~`zG-++s|yN zZ@;m6H;ApnpVjbwdqCB)q+7{qo$@Tf@2+O_jJfD5>*w~gPSVOVRlZI&4{eUY|%sT0>T@Pcx=or9CwHQkHJAXdiAev1N$advEw$=f{(- zX*a%n&%{#D8lx`zj%>E%gfOQlYz5#MGK+ZnuPX(VH4t&dD~-ox>lx3taUh zES$Xl34Z_YC!=7gXSbwlQ*Pa0`nH?=jl9mNY-sUtm22_HT`ei(&Luj{GAAbo@^kC0 zRBT$z8|07D{4xkQ>!C#@c9$=`d*a+8G{CCsw2~Xc3NqMXzM8kgU-EY`|8+NgP#;lY z<9FNe`4T?GHDmwtbpQHe>}7IQ<-E(inndznZ_K~n=HtB~asUJH!z$z*@?7)9SCTAdw!!?q2JX-c`-DUz6HAeD1Q*~C^Xk|m#@~`LFSX# z)Y?xW{SA22B_OIHLva%Omw0Ycy4?=m#R~kz4iqP5=4*$ zRNPba_e@J=-8|^F-v?{&Twov<&(4A;WE)5O(sYkGtNX$uq~y>BIj7&PNB)RuH*{!M zlyp7_8Q0LJsy_3EVMomZE__A0))gpdK%ZCx-s>cAdLE{2o)&CD8nuUNIW*F)F%%LK zCt64Akc=AP>L3GsdJtk}2f)HC9c-m+0KW0)78BJZU?CV7-Hs>fK6@r>14C6sK7vR} z6d6Nf4?Io;b#|NB9J69sDT|qnKs84Gs}DK3Q)lHR z$tP*!5UTK7b5<0<>ciZo2s;y84m--dALo+Kxvk(A0m?4vfzgeYYnvNu#b~98+y<*< z#13!~88yS{J|1d5fK-StRr94sLtmcv%W(rTQuj55#W65JwIk_4n*Mn$xtv(nPmkmd zxtcMed>H%)n?SB>y%WxGKRlWJatTn9ns1jES?X@-N`JX{=V93x18pM0{SCIn?b1YL zbIgoqlmpV%pzA&g#0H87blcY!T^8Dxfe6$E6ju5hPx@eP*m%vzcogDdL!q(ph0cgW zanJIF39hmg0~08CxWivw-cStVsW6(ExzCbY-b-V1!LE-L@E7`+@r{JWbcT%-59Hqe zCVGCPIR0c;{;c`d>2Nv7;v0EiWi0Ak)J`)l)KJ!lhGBULj9y7n3V2MP^9VD2BKs$b zQ+^!lRgzm{-ir0X&R`&3yf>Wl)}to_OFok86)4XLPj#h%q)xD&86Z7-=P^PIWqdYT z8`cmb-h~Qd0Ji+xSZqo98KFL;Fea@-W@0P?QjpzRq9O z0!OcI0zxV?+|jZb;kr<^Hf<|JzN1GhYUH{yitf>2L1zm#098V5ZZI5-u z00eQ$05bssgCSDIj(gNBKX3+@k6ut{f0n4Kxhy*p5-yu^ z_fp9k(KU55IY4w75%14hp8owIR2@%R$Y4-&?n-3MgRk0Pz_+y_kW+@!s8wh`iG7{w zEqCWuS@-C;>ODeieQ22YP!NVGU%o1ZJ)wE@<=kLZano~AA6d(kFPU=zp6l_9kCGdo ziaxdK^V}+RP2tOCb({6#6ia>d0JQ7NkqOQVM-6eEI3>KIpv@crppF?E+^9R`Bby6J zlHu}09KxlrlzPmLHgvLeEP+c{@swadqHHq9dW(HBHZ{UO)x4=Bf+IL}@LjanDI2GH zX+q&~dscyrqzKt3tBJ-40o;|aH^wZ|yYVT)4ZdQ6sC2JPA&yOOSvIu@C)iQ9Q{!N~ ze}?&EcRr;ncv+tmfppT#;S;5CiGOC5;T`*oV;>h%Nvy=4ymV2V`TXBr0C^|IY}X9u zzso>=2_OWDkrqm@C7$69X2pF-a;O(J&DiygKC$@1shbn%{lJ1etAV=zT&hQvuX@B6xqniN!%NbjD^>| zm)e*t@GTMhm%Zy%ax!q@V4~A1%qwLm)Wplvo$@gj2(R(sc!3Qhs0AZ}+4#-4{0Gb0 zczwagDtH6InS__=AfPO$r$cjCufw?S?(@ngv4z+^+EoG}A)OmD(f-EuG%$>HwrFF= zw15LSd@zz@6fB}94%X&M9cHELHY<~7wX5vrv419em}8+u1?;E}M{=m*&Wjn>1XYi= zK`XRqk%KqY`3zN3YleQ`6mbn^b_JfL#Lt^IVCo|e)JlonPJcxTQ7YewVff-)0Q{4L&$$e( z1QFii9qol&`=bdSDGL)9O`?s*%dcyq@SXwWXWpfISwK2i?-RsvFl|=wv^KUK?@FXr zH+E!o{OjQ*-%@A0zHK~vg(a2Yn(qLj7ah-%TfU)Dm99a@~~YQN*pkemD&PhRBqR^4y4 z`Mes+AVLx&DJlyBq<9q?=k&%HNaV7i%>h;?^@qH!8Qt2hLMvsKlPr6x0n*@&YxJlr zu<({JnwiJ|*z!KPN#8Egvx_PN=HN7y200TEo8he`hzbB2KvGwtHFaiUyIWvnuaIbn zY_*x5%0@B}Re{s}Xnw6w4)huWXB6Shi4Pazr3wJnw3OU3hl70yhC&n+if^pPL_C=< z0SEQS*X}LiQNlM25)`wDc<6ll+{WLadO`mC-K0ZvoOZicd8=}LuH_L&gW#Kc0#Xz# zGlJ3u_*C%c1V3_@S}Vs3Wt4oDKUMV!Ih#f9gdx+|K{sY|`0!!Q>@W9M5eC+&s)U#I z3oy#@@JZYu&BIde%~9bTDybVoq0CCQT4p);;qryUCa*>s1&=2@jA6uH$(HZGyT;h7 z@@^wb@K9?&^26R^z2<(OTdLnfOZQmM8jF=@$Zzv-xd^EnZ66>@IJ! zHNKJ+GL%ZqFV>5O+|OqIn9$DQdo8DfBP7+2DuNpLlkBytRU#tCep$c$Q|&n8q@EcR z+|gefd3)L{ayG#KfJges+?hHzHD#fPL9OouNmUQi369|915=Dr%MT%{GcD!9IRr^CDyK4R7h zl58(dH}#R|{MM%Erj#Bv`gI+JqnE@l&yFA9L8)^9DV(W5YKh+^Ow`pBq_@MI2e{so z{nsnOdd5KZxYPft5&te7zkeZTY52|D-KfPCV2Xy}U^*fmczD}b$CsmuG zk?vCcc~Tn+blq$wdSs3C-=nF1EBPWoScmKDK*OJJw~ZoN^z}oJXd32cQP-7^+fx45 ztv;^}x6qX1+oXiwJU_1}WI60IZumDWOtjBPnHMIo#$O`OS^_x_7-2 z0QGp1XmIwwZg=%AxRxl@VWwZ-lYiE6V9vIKSDXHx9QNx){q-|M4#Kt2)I|MkUH(7! z_3=LRYsCoPJ-zLVGXOt|g$C;BhnRjLUd-~~+9!C$ZBf8PG-wv^XSUSMQc zApY3UcM>Ib0A=SoB!8A0pvdi60y@;aVs}fT7I^1ObrPZ& zQIehj-urd1H8WKfF==1|%N$vwaBmIWW&O?1q1Ajb|6Ic_f3iNZ>y!39!Xrcn_S_`K zW0iUaI{aPfKpgKnIKfvO3%y*2m%G@E9F|}8s$3aDJOGbnwn#s9U%CjinUO@-`n-D42( zHUuXEtuqb5QJ^p|gr)Hu4BF(DhW;zUI$wxYrGCv^=SPX?yAoem05xy=225>BBglYGDp@q^H{Ym@Jf!FV;eSE>maN9BWff)f_o zk-^6I{3ekRB9!<=Gn>HhhwcY@4$1fmx5K1gEs$39IIN%fIpY7g%h4yOnOX0C&paCT zLgrC3WN@-J*Cu;B;f$CcIaZiJJ&pKHNk@}5SYt+$Ef}`P(>w)l5k_I_SoT0_<#If>S_IBy}f`S|^U zoodqK;84j7v{}G2i9YvBFexUv$T>CdESb_nNNV^Z}HjOIYc?6T=?>}r>7>Jgyt2Be) zd+Sn~JB^@4ysEmF5x6Wj_vnzs$zECgA_yE*#;?k`olQpm&(xk=>;O#581yDWx-$rd zOv&u-56)g67Wf?|wKOd?@YY)!GoB{Kr-b+Y$F010ERgMtVzBT=^V3b*_9)(Tmy)UmntG!l_l>0&I zM5jcxNhf_`^2J_;GSJ+cPp1m0O(=1mYTjw?T!1Vz@E0dogiHI3vf=R!o$a&blo$Nvw3Q2d1c)9JP87l=$Q&*&!-nT#D8!qlP2S zx|Opw^t>nRkpm852T5EJ7pZ(hD7!f@rY^=vzbfMuEoDAOBI&6)#IuBMgqy+41-pKB=<(vDh-UQ|8P50Z<;Z%SzV-QX2wKr>1I{XF==X;HhI z=<)l;-TY^IIWnGGLbK@EV9eYvK6CMkDD%l`_N_h<`3c4TyWS3si{_Vu`48pU6#sUq zWtfOKVqxd zt4H+sZKEFc(a?`mB~d#fSksMP$QmJK_(NqpQ_3tvsA1|$gV`{}z6Avq}d-j9qqto7qQ z-2cVw(2?5@W0^R>lln^-)EF%OXZE+w{`5^kRt^)4<+$Gj&o5^u2G+lwoQ3~ick|<) z1Wv+OBwAyB`JOyI3|VCJgY56-_aE;fV*rvxl18=v>Q#Xy1Zzj&HPx@Cdn^RTvR}-e z=2x==(#Hq5#~ip^gqJ4SokZoAY-o8L1L>CMFZBbQqY!Ji9EYFR_KqwQY-kQmBgAL- z%rtMTRmslB*z?c5Vd!3He6wu-6>P0QXlr|gD6ijAef+iH~;ZI2MT5RE{{E{3L5%l>#%iAWw! zbn#NVS)qHkJMv#Cv_AU%YLuSBhY)^?6h;YT5}W1y-7lQaD_RyFgVQW zwFbtyCuMMVmfeH=as({nNLzAouBt=b9G726lYGIcp=kc{=jLShrMG(uZqf56k+)+i zgI4FF=H_D#t6S%Z@T8lt4@H_wg;A`TN4F`zz6|ppl1gG#_63FPKQDt3hy=uAy@+NQ zBxphBTBcbLb7vS(3&^QjUv^*1KXmHF_N%(-{kFp2JQT(~zKA6<<;PW^z(e9$c5>~0 zWgWD{O%R1$r0L&s^-Iy?N<>0x-afQY;l4%^DsY|l_6UuC1nyF5ArO9`*Dqd`G^wUn zO{WDXIufB;wE;0;7BnNtb0@ya?!P_A8sg4;9=n@hLK{=QsW_hHS@22hn$g;IF0wc4 z4S9BReA<2aFL<*QKIK@%xf5(Wim5AI<#)Jpkp@UpzHT=Qmt9saG?^cquP(`1#;Cu_ zacrP#X6iwTZMDycaK_Wn=++5IuYou#QaB$Ea<+xA-;HU;Bl5?Y;NFdh{t`!&I8k3b z|M&H4sG^VP<#Klg(Ft8ZpY5OA=?kb{EofL}!RkPWDU-hRZ6wH~`3@YZ?Riy}HE;R) z#M&2uKxRqR2P%PFWiyYA)|RFvuZD@NRBLYD>MYdnxOuQN?fqj%KIdgC zEG~$zb9OeVzI3tx098ZpYAeIi`-O#hm15m zpYr$C1kO=4s{*ZlmEd#o3w6gjrFwgoqmI9MATqoVt@6g(tEle?@&3zqmIsf=EUdcH z&Xs?N&T@*pwHANaT&q{#!J>FyTw&M8bHiqV4dJi6V{e=-W~eOpOxb?JY=i&6V`s0x zbt}&?BXLbWg)r0IV9(zZxvDgOb-iqGCX`!mqcJCt=0n(Zgg51)Jao z%1!o{cxOkui6VN&YBM*=|(_Q1E;9t0Zy9 zgYULG2oW>u!~!lv77#B06fwo=J45)ZHBxnkFi4AZ-n}LwZ%3gNIRKy^IE8>wW)TX- zrs?y^lF2x8K(O1^bX=46tX54nAg5w8B;$Y-?M6|KN!yFAM2X#J!71K7dvtmW$EA^@ zEkFzE2-6HC4i%?m_v7y0UmaD)baKP}mA!0@xGEjB1U%5Gefpm8$R^IdC zWQ;xN;F+B-YESzF#Mv&SF5y=Wi??KZb9sha6FMX^%%JSGB<;M{vtn-%;wUW)dUxFn zp7YE24!*h--tv|gG}&m64Lj_HBC`m%Ti*oTi9*E8k8?Uho~ACNB?v+e$3euD9AS>8 z)BwFo2m{&H4PXTg+#W%s`{q_kZo~^Y4)Z)+ajx8q^#n?9;ps(0f*HYJSqb5vwtzE1 zHi$Gj%O>HZ$*z$n4q`H|(09 z-dq@?i$|8Sj{6zXa(Z&Vyxo2w?SaC33vuU2d*dQX&tGd^lU#GPZp$rP?c?KkZoX!v z5T%0g^M2JSY!Ghwy2h_{IgD|N<_Pf%3XjT}ZK*-f8`y+qnN7-5JNvk|fE76saGChS{I_32p=l75t?8*(;EoM@D()z`=-s1G3qTv0DiU-If4>dez))b)= zT7M*6six{(I{8k(Q{YzMXSxgCNpxjwp;)+r8=a9c0M))76RAz~ppb7(>WR)I4lkD%0_bNhlKU z=P$Q3F1W8*V0UWOELuWhy4ImAA31TQEhc;ffpL|NJ(ZqCJh$FWA`*bb>915ar>iT4 ziSIQGXUHR+eh{3zNAbb_+k-})biGB2ta9r%vuB(Vmorqa(83kCnaufymlf4y`=A`R zpogNT>YBEH+J~5~i#HO0D=Ms40D@KicW~$UT^2vrPQE!KR(os^aYwEauGAjUg1vhV zTS+L_8>6RM1h0tLoJ%=JrnsZU#pVbjyBT_bGM){i;>d3gSMeNvAt9p^Z;jjAohr|O z>3N+*?r?aOx9zrK6Lkh7&z6SD@a&0KGMCIss(p7&HPV*25Q(;*emf4c)a zuI){StFqWw8fWeX@h$7C+D3_7%Y^L(LQD}G&Bhw^+l!6rp0}=UE`9}cggBFU(W1k4 z<8BBY-IJ~|`#J8pw@vwKG!@6&EQZG^T3$xx?wG85R`^lw?*54m%Z|CVPU-#_X{{bX z2K1beK816Z!R8$S$`&a)qm9;J%m9^_gW7M-S$FV3c_rmq#YPf-j6CIh36}e){e+J` zmQ!&*!Aq&^qrp0r7nHm~GpQG~6qq?>aLQh;34f6QwfEtC0!yZ{cyT{Vmd#G%Bz?ot zdVM)*|JQ=IU##`kTdGa@AA7g$Gy9S zwMv(Z6xyYZ6r8^t-E+@8=(T~$Hb0k_;8EWUJ{!9OKD)58Sbgu|!W?e~l95-MFSc87 zj_OiA#~#8|X)=LNnxTv86W zQv&h?3ZX)y%jNe zz@&iiz^rh>Oo5-Ae>Co+4I=8AmDl_R)QbB;gF4+jY5B@id{j;uQ(53rtdD*R9`h|b zm(b=5*Ko9Z1XHOcH^+Fb#+|YpDIaHhq5!g#bbajwgDJrK9*V zPPKO%iaeU(PMP&criWjhO%qj(*XvyQaDzASsmiGn{^>NU8KBvA0vj=u3)Ru$mePt_ zA*j3zr6&|zM!=d@*95JWEW^6z4?U*og8z`Q*pOh9_m4RErbF>$JaN)g$FAKYiS<6Z zG{m0SY@v3{MzZK>5{I%;Yq68|$t(-+B9wBIPV-2;l8$|2m02Lhn(Aa_ChBB; zpe#^b;i$#R^87F=y*RL{(r?)n_9 zELNvW9<*ws@Cl3N4A;I`9Ol|}tEo3^ z2z_ejMhg30nO9y;*XNyTPJXAIX_*mbx8U3Lj#P#QiL3R9rFMyQX=h9;bin_fvZPwWz3s~WOpS!fq7_H3ZRC2;m)xCu=kYKl(IK6ApZc8K zo+b2d`xCgXbEG}SyO&XBadi>w1u|KBsf^qMnw8uwbJkw2HfTewmDdy6Jso|)Hw@eT zHW=mUgopcfkMut$kEXLLt`?a5)X^3)=oiAtJoh*%*`WEsG0(?S=AUqkelkU|&uK2m z?IUW`5IdvY?Z>#DoIq^(l0?kn4x@`)2Z1D7Y1_}v;=o1YLXs!eFFi{I$2mP5pYCyE zcK?EC`_>8(jZm~jWnLZk>sWc$=zTF$CH?8<+Y)p28SguTM=J44!wz`M>wUMO|5X53 zR!*fb*15hh-BW^+c9J*0&FmQ`i66e)rMvwYG+Q0yS2d)SJ&s!~w_CrD9#v9yPcsz? zRa}*g&)uTFioc-xyW)D<(`>SfNHl*A@&&2VlgNLW#NtQV_%(GgUxU|I%@!FvLjMbFx0uZsj4z zvAp-Ox$1$>t*3a-#6Ep3Oxwsi-CtDs(!*Txlp%K?ia3d%%7+}v(F zO|Nf!D%9z%4^7TKe%E-I#7L$lC5V1iQfR=w@J#%LnNNXSE4!PG}_R+lCE|NYpnHuQQEy`{9#037xMaK z6M-L;|K>!R+zX0UwmM}*?@KP!56`Zsm`tVkZJuMkvDvD2-WLmd;II@ z{#TW#H6Rl1X`f`5ojjPi@rtu-zFSY3?U8@@bP1=yjC2et7dzR0tr4+~_qQhdymFYX z*>o8CZp8vzr2uN%UjR+OYa1%;i%GPD^~9AEjgg~;t*$E?T#9;mqj_l)A6Is!uiY1TQk zu{0F9NvVe02ExTkS0gfp>;{HLDfX3==ldNVlbgR}S5cgyKiGCSPskxwO{`>l>wUTUgmVo7cGjx;v4ZW2{v$w;BH%Y6O|% zJ3w_5L~YJ`CRxA7rCfMrmKx`lp_NL}>|HTQuYRN^+~zWG0p-(Smfo5RMoJYvFP4N5 zTNmjJOwFpJb*h zBg=A#BRFG+QUm3#3~QR+G#hSR_o>7HZIfw9H9oVBT1$8xd%tMpTl5aIH3c5y{-#M`S#MFGQz?Ifg_5e>ohEiXbc3Yp+ z%b=jLnJG4>n_Sq$xj}0R{HreE4FOX2JB|dQB;RPvt80lD2jyeA1U()--1Qp=@8qra z`cre^mhM^3Tr>7k2O9k)ZygDIt+#%3WKzga;A{Y)tam=*)u+&>I>(8fQ#&}!Cto&~ zOc%*IG*^AeVOrfmMncF{ysB=D_1CYc2@WGRV(~i|^dKtdOS%T8eanJuV(GDS#fkF| zYmbm;F^?NV6~*?QNnOqDty@P1is;`tW>}jUl=5T-6Qmt~a&Ci#kHJkMJ%lGHCtnKp54$;&sygMPCj1*Q~ z6-0+|sy^w~?`Nu~-unL5)Z+c3Ee!DizNpRXKt}Q%S?HbyR?RJWZy0kL9LjxV;fe|Hd9i#l*e5iA14I~);E2a zRzuP-*u00yDap1+C0YPimk6HD$wU!_1@|dAU#1*#J(TJld~7K-}uApu3h$O zUKMuH_zK$(!$g%w=`Ib}n`N}N+zEl`%x*f4h5V>z{k_vt(QIf{Mxa>V?!HU@#a@jNRm9ebvz%R;raT&dQT;Y zE`qvALr!%mMKO*si^A)}qtxfLA~Da{wToMuN?%aC`xd1A`Pz}iohd|O3_*1gN$gat zxI(8nO=<@|iwLFb&72e>GxpKjb-d+gM8Vf;Ui?yML}yxyi!3vTH^Ae~+g@Cal2yr` zMD!IPEz@$jYY-Kvq&L;Ka_oYd_tWW2zmBY}a~YU6kC#wvxI89Z@ER9y5-Dy(QqIOs zTjBg*&ERY0Q<;e7YCMCMgGX974%-4{JG988gREEd3Vi{`)+m;ovpwD%Y3~_roOs_D z%pWd187CTjb2&@%zze=yl@k%>_R;LUWer&>i-G7&%}L$Vi`+o$JxJ6`cZ>eu{T~=g zAa!vPq}8y@WSp~C`s_+aFv-+hO)~V>@8J72>k@X&Pns8&JjtAiuCMcQ*BveEkj4ER z5g8IBnsYaTEXst`F#IiqLM0NJdu<&w`ey}MYQ8edzip2Ft%P4I=qtg(*N8T-KS^w- z>a&AeeM8`Dh4shX0yvTpOrA{p>tA6SNDTv^YVVY0C-Y6-{=Z}SuRE$m0BFfy1Mo}Zs35vuz~@96g`h!` zClPA!%S)#Kwvk)5Cg@)W@#lYp&_O4s_kaBB|IONWbIRBbzZjWfAo+{wK2W>g_eHO* z!k^Rl_6gf75W#0&{ZwN0<4>|OBH-D^)w4|R8&|bNcYnPy*;7$S*2zDO+pzz51UAHE zR{|p3$^n>rJTxK3BFN7E!jHfIuOVGS`eoPdum1QO|9avfNl1ILzNzlJN%+SjWPAe4 z{{++b8m)h=fv-P_1_2Rwf|Z};-$>B^cOl;^aODSgZv_A23%>iS02>S?X&`3ruU7vd zq$PXk7P4LbHNYSIG?oTK`Tw89DN@bKo7KAz5#K^>TPJ!UEx|^ypRZm!Sp+EsB+N#H z$OBx-Cf>V`V_ceJ1o#X-K7KrF*V8{gRNfCQm)n5Dxj1W(>yoWf-IiuyX$2r2Tjb$X<%J4OC=+dfivsALUfJrn#Jxj@!56J-gNxhU1eG zGyU2Tel6lPi;zi`@QsZqfHUR;k;2Hhn`hGlItP3}hs8kA&(KwkPRLI5AGQ2{&MD;$ z1j$y3n>kCZ&j$ce#1P@|X_;3D=xAS|cr(yPG@xKOhgf>ebY_;WLdubcxVwNiPE)lySo8&l#Ib+!wzG-tETtpIT0|QW}a*%t%3Sa4**gu!8x}8 zN=to+&miDd3!qr92}BKT8<)>-cXrnRK1p+=${$hWL*yrgm@YbJ9myO<*wCTuT3rw} zUo2{XyBy@5D#$_fhB&sOg+0-TT>za{H+1iYOXeIK5uZX~#5R-tS)#ZcNemhERV}8% zajAn1W@he&F;!=s_Eu(>=1_N ze5I(^1Tb8Ua?97E$ZNOkE?0o`69=&;w1l*MQWj;xQ#=vQEz;zf_Bac@U4WxqeuDu` zxQS*22G=J-*pNrsI8YwSA1iWeu&{4B!O%U^X;*L;C%K>s_8a92k%q!C(M_fiv z5DQqp0wim7Tba69?v5=4(bhfy1%giet@$nBfjb7Xwa{20RYEXCYEXH>TPD}-(RP)( z#1ZU;6A|#*!z1i7K_izPyke^>yG8+TH|62r<)7k~EzEZ5u@zEuB0owMW2VO|aCW^M zu@pqOyv(ZAo6|JFt)8!aUBFRZah7!+$heqieqA|Iqei9`?~>cl)`I?OHq z#!VCTIszD~)jq;zs$O_wQsEv#?a3|87T&iUU2lMK4iHT_YE~qrqTJP$qthRFpkpf- zAhWNa{9dvH5%}t=^!7GyP4GqBgn2$GC7mn?^pg)&?@W^}2-kJ1yKLqjnBV~oSJ@;Q zs`@jO(kXkc`;J3Nf09TPJR%g{rY7(R-hV7?T;?TdO^wGP?YfZ{s0TCX4vN5zj@oJj zEQd6j;J#Jh|pjX1d<^)zi*8k5FZ7 zi=9*IfgP7HS$QWmryevXGw=r1s`zL@Z;x8dY%IgCi-y;%m_ICNa&iyS0W-MwVO})7 z^66aZ5HDe=ISDA)ydaH$T~`Y^oZnp}Ip8jMw?eP)w;rT)d$f zP1j0)e|UDFgt9M`L#HoPN7jkpf|twpMoP^0B|>95r{n@rGPw>JQVkh8ZqvzD`KHhg z)ZWle?qqv}0nY~xwNv`5NsEfi(&Op&8E4J~cIV3_sqhM}B7EXt8m0^b#KAOq9?ePf zF497}aDgTg)3Ay=R2J>G^yb^-;2+224qrd^5Ob!9?sxYEp_bio?H!a)YXIT+z>Mrm zhezv#E^*~nnsn2#V^%bY=_v==`9q&}ZiUi#M)Xcdr<^YJF7smP(@Vs>O&H6VDD0@M zMF@(*>!0rwmzr`&6~G>ZDr_@R<82B!mU&fAdIdA(NqHa2okUWhcUG5e5ZA`QYlzjE ztO(nlm?vS86&T@9KR-Q!_WEjHzo(X?IgTQj66*m~U{D^i)`9!tdKc6%9UeW?)&UeV zTPDJlWe|bPPcMx(;7_Q>sfoo@$qe&6v^kEw)o6Ux9XxSoyYyCnmQZo8e)BkPb0h z&usMY7YGrDJ+YI&|2*D?>fl&?OauY;iR%b%{fS+o<{~sBN?q9lNHD6#Ye?ROa30i1 z5+@bKu9fhz9?RTdH@!4E*QP{%FU~6(ZB?mlmYXnS_zLiF?Q-04xq)Pko zATj}7H-&f6s7uEA>1A0`SwWyn7HX{OUB#|j2};wZV3Oy!83a9#riEj|l~O|)l-U!n zI!t6jtp~v>S}32wLAfp6%N3T3ST-lm^dpgyPL+?k_Iwy}4X6b)WLuqd z95TtB>b_2Q@-S@^@c32UjX+<~^QGJSt&Qcu7!Uz5p$?H4R#Er!5iCl{ z##F?}4mQsaT>)mN?IN=t0CC6}OW(FtTfQ;Fzzc|PBmRd9Hz!;t<1lh<0K|3HZ4ydX zyi3Ns8Z2IMO8L#oY+j3#r;2E*4=;L};ex@cR(~uoC9LdMWeExDiQ@jX^JIFCv1IzY z_tA{yY(ACdu*E=}HZ?J&4Y=Y`4xOS2bcZXY*$l%A2U(hO*sPt=t*>nsgKg{hyaz|T zWn#d!s$VDBl(+8_fz_nbajIf#6RK|qvh?`}+;%my1!pFxd-lJwO~O1IKCL)hjbHqu zb4qz-C*IWw!8~&&-pyS|{9KuBdG;s8MA=c%kj3O9^!=( z1`%u$iacX9@|^#U9o*U1uZXF1f{`q6lT6!1BFt7&q!P>wUs%JzpO1)+;+bX2^%1rQ zPIdkZ^ry|Ey4?~(cm`Hvx0joRS_+Ai#H*PMU7}s^YWFh^s1R&kTI0@6krzQ2+PkV# zv%kghOgN&W6IiUGpVmfXCK!y$n{i=l)6CT^uN|Qrz%6!hBI^gQjO{70zug{e{bpH~ zpdYGR3VwyM!(-|qNfMHt%6ixv$-d>b>pg;2Zl3B2c&N3#6!3&LysQXv93(LAF=;mT zX8~trTaoQPo)bDrTV&$x_qQP4?6-XvZf!z3)u4;J5WUS2o;JOpkFbaOtxDP3QX(}M zGG(?#83;Ds0gWAEK|Zlt;d^QM%^Zle*~7nAM{vE)s4m|4nCQ4;hvIqXFC#`PQlVMx z@?}SI9n{L^lETmCKcd#ZR}|Q3O%e|2hlNWXPfI(d6cvf$53H~Ni1*6x7T!7#d_~yb zyeB{VTjS!J#pQt+_hO2#+7?TGJ{8ZCo{r#NNSCEt7_tsw63CwLh|71||Nejly^1?U5jdg#HgitJ?6U4M z740oL7TtIUUC#{`i?l;CY)v_wjZ+QcWPLmf`P*gP)DbT#T_^_`nntocSDX)f_MX`~ zl>y)dA^yO-ov$fp??uZvdg)B2r=MEn4JfSNfmzr~b0*E5Q+keJp+()Z zxDas(Sy!1B?hb!jvHm-G|ErsKc=uO?ygXEb6ADH}Wl(H4TmgM?$MFZNS5JG2$yh>4 zXH_|MVY7{;W7ueggpJ^QanN8@V>9!3#n4&YrZ0-fY!I*@h}Y%(rEPNE*JWF-4*U9a zyN8n`DJs`PY9OwUn12+NBTw*dEpe@7s zY~u(@U@||um5`q)^(AxW{!lyG8LY)+1#Zk($7(Hb6Q4>A*I9w}Ypp5cM&`5RKYVFT0r@2u`6osi=b>`|*|%ZR)OZ#wYZ(E=CvOj#!oG zo;|6e;5z8oT_Q1PTSLc@-&s(va%$3X8$tl|4CRIH^2Hjp9h*`Ls#WSni}<#+iWhgg z&R8h*q~C!>8CuQ4XtQN_QQb%O+={<^R5o$=D66-JSLn9W|`8pyjQ% zdTLkXdbeGxfP);b=abLdn+w5N-Gs_*dY37$sY(W%weJuM8jB7G4xim}fteniWR0@& z@C&UIMc85Y7LpY*Jnz0c^O9~0(s*yR{Mxs-Qj2%|p;cNsN&tLm(3o&Kx| zgsc*gk-%FqO0k%f!XtJj%c;1XV4l@*I5I!RIuP+o_f zDdLPesZy`c9;Femd)d#|#Todxqg+uxO=*pWir;m~uqd5Ka?oH_Ap{ld}o>F6PeZ9*{HbnYs%@<@V%?}93 zN1KX1=>)G!kVVXUz8nqELJe$rO_y`7Zg+Y!tax>;YA5Pg@FT}a@PIT=y8kuw4TOiF@-qCa_`=Ij3 z0q)+>bdL-_QIc~NF`43Qv3eL4Gqof_hYDJmxk^P&m6-&;p;oi!d_fDMDkX)^C`+4g zS0hbIv3o|dQxH(zn;gYXbV6<&iTzxw>Sd!ZyIDf#I~+GcuD#3cW7uBY&HqrPMOAGD zte_B)odtscwm++XiDGsN1X=-oki$$F@)H?ZqjD5}}?eY}*N8F4((X@*@h>og;& z?x|j;Co`>S(lb9Ln*lp8#(jYDxl>@L3Wwg#!up4}3vb+F)6Qh6#H&mcLb+*vd&s7G zpA~;-34UOAo!) zb0sH}e!%rOc91pdNnr2)v25N*iY9$jVOxeW8h>cPitO^?jE$1rs{;WCqEflg8%9y> zTxtzdUmW(5oSo+NKSLvs3X!w2himth%?*QlMWrCOGl$q)E}LguNw2yXXs5y&ndTq- zOgEmA&+O$4kfL?&Cn*(@wNN&uufPixzIS&x*i*?kxD^eBmv>&f$iT>Ue0f~4;0fx( znUkHYS9n7SE3&>;Wv;*RqS$U4G=2yA=)Y#qV>u%IM zC^~m6z-1FRzJ2ry%sI!_QLkY$*A>iS;dV)xV{YdPfq!se3L8=7=-+6z&rj*?YfZIc z|KV_&x-7*PWb;2L4ZR(KDmQ%&YT!*rAjs^~F2ryJo$uBx4i*u(fzC1zUur8}<+<*g%$4hB}Ku%?chJ&!|O z!5s{mo}<+bPm_-dMwsF`9#hcp1Nfhnl{1Nm8IAwUWVQqO)2-w~>1N>8fbTH41_NUk3Qxe zs1?y;Qx!uG<&kmmTMf9>mW52OLJclS5`>cy^X3EQ3sChR&8A&9TKPQoy$*UZ^v3Y` zr+z)m*7@HdKip)=(`8DWFA&$~y=->DllcFG>Y}`rNXPAF#=GlbD^Krmt$0l?7XV_&dbxmEPlen3JVq{b4WPNu- z76V0)nR-VvIr#zvXk&Ao`4aC!@`T7N4Y4oA&;5A)@oY0|W(U-#+kHuy9n8 z+;`8p)x!rVqDPN{&w@xk$YTf|+GSSym|lK!YQXHvM2d=@3Th3otUUf9!lI9bN=K6v{ULq}j_y~q->(Zw3W{2hko8PXu+B2`Q_?~i zzR)!8YSZl!wg~AC1|eL`7hUs}7g8=6Co`*1dTyQ8xuViod8P?SX|>-{d0j@Z(ZX%V zAI>CIS^FDK#=7T-+&%j)lbY;f2alk#Z1?1(mIJwXC1pmYN3M9m`FETATl7jMY@?F& zuJ1!rI?K|+we24CYu!xn)7A;VaYG%OSE=GU%V-4&Rh1HB%@mo`XXdEYWrv@5L(Np?wk-8juE5e-a2C2L2g z_hFBOxy>iF>|45fMX;NaB3~UGeYBDcsNDFWbz^y`0^7E^Nb`Mj>HaAN)^$nrV+!m0 zjwi$ZI9qJ)lAEZ+qn3;$+Znna`3W4y?h*?Y_qBO@5aZFO%bY;-GRzs|u99jV4T*G_ z2UaLWB)87kL``VNTy`&v-sBdvshhIp?q^v@y$}oKziTdQ>Ni)( zs;X0aXL5yM2^UB?lpY_&(jLCKin)MGYkLKHZf?Bj^x!wE_(xvZbDkoOO#$u<)sw^U zX1&}pbtw@^_0tyVHg_psQx>USxN}99jMI^D>)74k*rDYto5h;@jxFs%&Tbe={h4jt z!JH9G)mv&w9=yYuL(*-sn`T!sIVHeG;q1VR;Ph^?PF}%cpu$<*wac=+@x2zCtGp#!7ub+Ml)9!12w3?_j=xkqFX`T|i-{N!dixF6pf=w}3dgY@71$DS zzM4+roe^-?_Hc2t*MHX+KVAYmt>k?9Wc}Z|n6*kfn(R1=& zFmxHc0X{IXuqnClw|oEYy0DU2ZD5cj^I>*k-|br8ZenBBfE3yCBuxFPZ%hKT&AhjR zN*tu$zbJpd1K%wKg;BY{Q2s5X>$@s^!dqxqA1T(VC;36t_V>FEsYEoZvG^qH-~aX7 zVSL@1DKElUh@7Q=<~{NCHkOLuL2g%gul&1F$k##o5&?%;bu|d<-fTX(QO1M zGK>u#`o*JG_zbWP@&Wd7zwhSbo6O@t?JzvR|MB*HFP&5XDIaj5R;QmisPk`3<^6i1 zg!y!DLSvm84~zWAl0UA&T1$k88iL(yFX#%9Eg9KaeslBx8=Asf5=7^Ff{aRkW?01j z4HcQ-lR|0%SEuG*5|mw~zST!J1buXKg=<+x0z&iLyP!w1X$j^v0a+S4P zb1wcFNC&szunX1z<6eFAqtxy1)61A5niKjg7B=9o^j^@HJMcP+pcAhN9lF6b z7=kA)B-eH6K!`;&$1&i{hxBs&qwegc;2Y#5Y1RbWTvuv8fC*gsREmHSLKOzkj^K;E z`Bj_)f`eII=c(3r_dv-N5g@`tar?n+&94OeUXa+GdarVHtyO}eqyicRAN@mNK=w?L zN5G@9i86#PoaW6{XF<-gW(v>8<0m&k67)vo1*6%!hvi-YFn5UZ7MsH*kj)%?nT4h% zQ8Sm?f34T7S&KGL5?w7{*(SJg7r zbK^ibqrtxp5z6gH?SK2Ln-?c4Idn@7{2Bung`iq$@%gQ9;{3}?onr}PJv2NfpO2VO zCZqO)iP9H75SP)Sc^(Bml(Dfczdv~5samxHjTgH_jYmh=v%qf+(PY4UN|rW^o?;=J z^jPo8yDd*WcGWtmn=LP6IR_-GDA6+mA0AQ5w@X@rFSMZg8?_{9`T35zwd63>&ij(t z5NZ6Mmbf9}bSUVE>Fa%0+j{H6LsM+A?6?im!qTenDCta7tPVWgX>zT=(4uNLJ-<~9 z`LRxl0*K@KE(33)f3>Nc6dwh?WbUi+gsmE>P&O*fXG<#GC#$Cv^Uw@P#RuW0s_hap zj_oVRcE`@If!uXT>qbLTs+SzbkkL;@_#m+EZk$R}_r`mTN+NEZfPBB+aq|%FAM2kWxIELp?q^BK5M+ zdU$AV(mUN^)@SE$1JWStJC&-GNrtcT)P8--A0bvdJqw;0*zQ3nvdl+2hXHS2Vy?)V zZplL!D{c|alO%Y%JJZfgN7U@x(!3s*a^Tba$2Z(*iYGW*$^7pcot%@qTWfHW_3D$< z7yF(a;;dCxJTx9yW7uWja`W1r?11O$5~!0MCM{S^Y;QjyraJN%_p&)o+p*bmn6SA` zAXE}2(Ln}RIm(QtRJ=wx^9hUH;En{0xf{d~)N zDP~ch9XpALNe?i7{{vU}+avaaJt4WbO+PgnZ$aMSeys!yvDd1|Ik#riPcKR{x!``RcmtyPS#p71dZ?Dt^BFVL z?51}~XgR6<#;|V8U=C49-#H<&z0neOnZ>&7%g19aWBwxYO%;vhz%F^Uz`g-H$BK_# zS3s(68C+=-58|po9|0ca&~2q5uu*8Y-r$X%4)7ALTgovdr{!W1=Pw^_*lfepoX5rU4O3Z`;0D&}iaUc8=Q(#H$-sZ+qi`LOSKn%>h1amZfur z?VEhY>D~E_yJS9xZC2Z5;y=Exoc%2_?DFw=c+)HPS`LMeYqDsC1$`g#bj5W=q7Q=_* z7KGP&4EmwUI9LI4NnYOT{NYVD-WR<9)q&@EXQUbSp-H9{AXQFz=Q`a1dWL1>!&1wE zrIkiz0G-nf!Zz%Mh2)%NuNIOcH<8_7KVDbl82(X>VS~Ab6=>rAbj6$|Kd;;hOJ(>? z_wUdd9=I*%1w?6f?`6|HYxly++1l^vTe*1Vj2_rAQF`GwhX@iNdXzS~NLQ6Ya+Yos zTqDWgnQwHxvvDiN0=nFm6z5s=2nCo~yg)vf1ZEpm){k&Xe@rpj7^rLIk*+Xa8$ZCf zKmPpj8op}tYTJm!sLu`qLEua$jN#tAF@aAvFzL-(20)sFfx)0NitTtXd9!Bp84BsvsEg1A-x8nfSWQ!}` zvoT>7iH~#wh#fZBFfYbp+X_9MqA_d-#p|_WPeHZMn1VZxdcv?FtNC1Tw2@_ZvXRrM z1jS_@e0Z{X`vuH}hN9C~=roOUjMXMC64TA&yUt6aQv$;u(^ayZ-eBKDi+JUJ$@Zdu zbT>xgKn$bc+!IOq>6$Nb(yW~9_!ywZp{Or6tPmZO~eDP=>qW*j*ztN=eY5JbS0mmL_u<@~P zicOU6zr51AjYs^A+a7GCV!jP1tc*8po#8;0l3 z%QZA+F+SGQctd;G>Fv(lD|jLJf|;=44LBQCeQXgq%GBT?gW$_(M;xQ{ILHa)u}hB_ z2RA~ysh}JR4wevbcviNaYj6kj+sb7Dc&rQa8}NX!!|_J8?fV7BLc%nXA{|}l3c3qn zTxG-FfO>OljVzZBFtQKmjp}`*@~b7h=X|%!TvEh=IXrx6&e@$ejW?PKJEp9gE;iCX zqt><&YY={_q3IN^<1{Tc{L0Wwn7d_6C~sj5949_C3u3auj)nHhb9?W(benwF;WEVg zVh+kf3kAle-H$lXCg}MQ!@8%Z*rn@FAH~C+_k$0Ho4#O2#fOf@I!9B`JCGxkb;OCD zXTLcLf`ff60WAyE%j?eX4@$0hiXL7wjvSbkj~utnW4Wz){Llr&Vizx%CwzG8^hvpf zxG>v#`WT7Qk$bd#urGE4U~H7$cz?^OlTAaZ64LrnP92Jp`#1#-{bX@OFf9h8Yxvve zmI{u38OHjbtHi-eQaBqKq2XPr-k|$L1LqiRS!jx^{gs++sP9-tgho;?x5IAHUa@!f zRm|K<^M)ndOjJK=Ptdla~P~d zx`&|f7gUufaYEZ=!eQ-*@T7XremDkCuN;O5+#DW`4{8`^V7I6lq51 zPOL}n^VChew+~P2Dg2W$Oq>a^u^sr>UA^HEJ^??X9|jnIZbn&2Z;h#M&&B?IWTHNA3B-3JP(=TJ z(-7?>!>=TxCC>lscqb)2ANbQRA(q-hdS2Qw;MUJZcHqD*n%{nUndgTN+zOl7|KUgR z*Z=L+jEwJjcK!4+tf!!+(n3<*j!4IPMP51uvG73^K!5Ko75t(`?B6c@--7w^8vbv={L32tZ^8VxVE(p$z Date: Tue, 28 Mar 2023 16:40:51 +0100 Subject: [PATCH 005/346] fix: dev docker images base (#387) ### Feature or Bug-fix - Bug-fix ### Detail The latest version of dev docker images for FE and BE no-longer has `amazon-linux-extras`, this update changes the based of the docker image to use tag `2` (which is consistent with the rest of the images) instead of `latest` (which is a bad practice anyway -- see 2.4 [here](https://sysdig.com/blog/dockerfile-best-practices)) By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- backend/docker/dev/Dockerfile | 2 +- frontend/docker/dev/Dockerfile | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/backend/docker/dev/Dockerfile b/backend/docker/dev/Dockerfile index cf819b399..486c42c0f 100644 --- a/backend/docker/dev/Dockerfile +++ b/backend/docker/dev/Dockerfile @@ -1,4 +1,4 @@ -FROM public.ecr.aws/amazonlinux/amazonlinux:latest +FROM public.ecr.aws/amazonlinux/amazonlinux:2 ARG NODE_VERSION=16 ARG NVM_VERSION=v0.37.2 diff --git a/frontend/docker/dev/Dockerfile b/frontend/docker/dev/Dockerfile index e39a321d0..aa29c376c 100644 --- a/frontend/docker/dev/Dockerfile +++ b/frontend/docker/dev/Dockerfile @@ -1,4 +1,4 @@ -FROM public.ecr.aws/amazonlinux/amazonlinux:latest +FROM public.ecr.aws/amazonlinux/amazonlinux:2 ARG NODE_VERSION=16 ARG NGINX_VERSION=1.12 From e01e01412f05233dda6be20f730550fd3bc1620f Mon Sep 17 00:00:00 2001 From: dlpzx <71252798+dlpzx@users.noreply.github.com> Date: Thu, 30 Mar 2023 11:44:50 +0200 Subject: [PATCH 006/346] Added missing groupUri from get credentials (#391) ### Feature or Bugfix - Bugfix ### Detail - Get credentials access token was missing groupUri input variable, as a result all users appeared as Unauthorized ### Relates - #389 By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- .../Environment/generateEnvironmentAccessToken.js | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/frontend/src/api/Environment/generateEnvironmentAccessToken.js b/frontend/src/api/Environment/generateEnvironmentAccessToken.js index 61cf5ed71..06b8f6912 100644 --- a/frontend/src/api/Environment/generateEnvironmentAccessToken.js +++ b/frontend/src/api/Environment/generateEnvironmentAccessToken.js @@ -1,12 +1,19 @@ import { gql } from 'apollo-boost'; -const generateEnvironmentAccessToken = ({ environmentUri }) => ({ +const generateEnvironmentAccessToken = ({ environmentUri, groupUri }) => ({ variables: { - environmentUri + environmentUri, + groupUri }, query: gql` - query GenerateEnvironmentAccessToken($environmentUri: String) { - generateEnvironmentAccessToken(environmentUri: $environmentUri) + query GenerateEnvironmentAccessToken( + $environmentUri: String! + $groupUri: String + ) { + generateEnvironmentAccessToken( + environmentUri: $environmentUri + groupUri: $groupUri + ) } ` }); From 9057116265f4c6e422318fc8e627a6ebe5cdfea5 Mon Sep 17 00:00:00 2001 From: dlpzx <71252798+dlpzx@users.noreply.github.com> Date: Thu, 30 Mar 2023 13:47:36 +0200 Subject: [PATCH 007/346] 388 race condition occurs when adding folder to shared items in shares and errors out (#392) ### Feature or Bugfix - Bugfix ### Detail - The creation of S3 access points is asynchronous and can take more than 5 seconds to complete. When the share managers tries attaching the policy to the access points it fails in certain cases. This PR replaces the waiting time of 5 seconds for a while loop that checks that the access points has been created and if not it waits for 30s ### Relates - #388 By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- .../data_sharing/share_managers/s3_share_manager.py | 10 +++++++++- tests/tasks/test_s3_share_manager.py | 5 +++++ 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/backend/dataall/tasks/data_sharing/share_managers/s3_share_manager.py b/backend/dataall/tasks/data_sharing/share_managers/s3_share_manager.py index 7c70b2d6e..1323770a4 100644 --- a/backend/dataall/tasks/data_sharing/share_managers/s3_share_manager.py +++ b/backend/dataall/tasks/data_sharing/share_managers/s3_share_manager.py @@ -12,6 +12,8 @@ from ....utils.alarm_service import AlarmService logger = logging.getLogger(__name__) +ACCESS_POINT_CREATION_TIME = 30 +ACCESS_POINT_CREATION_RETRIES = 5 class S3ShareManager: @@ -196,7 +198,13 @@ def manage_access_point_and_policy(self): ) access_point_arn = S3.create_bucket_access_point(self.source_account_id, self.source_environment.region, self.bucket_name, self.access_point_name) # Access point creation is slow - time.sleep(5) + retries = 1 + while not S3.get_bucket_access_point_arn(self.source_account_id, self.source_environment.region, self.access_point_name) and retries < ACCESS_POINT_CREATION_RETRIES: + logger.info( + 'Waiting 30s for access point creation to complete..' + ) + time.sleep(ACCESS_POINT_CREATION_TIME) + retries += 1 existing_policy = S3.get_access_point_policy(self.source_account_id, self.source_environment.region, self.access_point_name) # requester will use this role to access resources target_requester_id = SessionHelper.get_role_id(self.target_account_id, self.target_requester_IAMRoleName) diff --git a/tests/tasks/test_s3_share_manager.py b/tests/tasks/test_s3_share_manager.py index f534e29af..53c7f426b 100644 --- a/tests/tasks/test_s3_share_manager.py +++ b/tests/tasks/test_s3_share_manager.py @@ -657,6 +657,11 @@ def test_manage_access_point_and_policy_1( return_value="new-access-point-arn", ) + mocker.patch( + "dataall.aws.handlers.s3.S3.get_bucket_access_point_arn", + return_value="new-access-point-arn" + ) + mocker.patch( "dataall.aws.handlers.s3.S3.get_access_point_policy", return_value=None, From d191b26a60e595f0e64a4bfcdfe1d295d528c47d Mon Sep 17 00:00:00 2001 From: dlpzx <71252798+dlpzx@users.noreply.github.com> Date: Tue, 4 Apr 2023 08:29:24 +0200 Subject: [PATCH 008/346] hotfix: Revert PR on custom url quicksight embedding sessions (#403) ### Feature or Bugfix - Bugfix ### Detail In PR #380 domains that are not custom domains are not taken into account. Reverting changes back and will continue that feature on the side. ### Relates - #400 By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- .../dataall/api/Objects/Dashboard/resolvers.py | 7 ------- backend/dataall/aws/handlers/quicksight.py | 16 +++++++--------- deploy/pivot_role/pivotRole.yaml | 2 +- documentation/userguide/docs/environments.md | 9 +++++++++ 4 files changed, 17 insertions(+), 17 deletions(-) diff --git a/backend/dataall/api/Objects/Dashboard/resolvers.py b/backend/dataall/api/Objects/Dashboard/resolvers.py index a44800502..799354207 100644 --- a/backend/dataall/api/Objects/Dashboard/resolvers.py +++ b/backend/dataall/api/Objects/Dashboard/resolvers.py @@ -7,12 +7,6 @@ from ....db import permissions, models from ....db.api import ResourcePolicy, Glossary, Vote from ....searchproxy import indexers -from ....utils import Parameter - -param_store = Parameter() -ENVNAME = os.getenv("envname", "local") -DOMAIN_NAME = param_store.get_parameter(env=ENVNAME, path="frontend/custom_domain_name") if ENVNAME not in ["local", "dkrcompose"] else None -DOMAIN_URL = f"https://{DOMAIN_NAME}" if DOMAIN_NAME else "http://localhost:8080" def get_quicksight_reader_url(context, source, dashboardUri: str = None): @@ -39,7 +33,6 @@ def get_quicksight_reader_url(context, source, dashboardUri: str = None): region=env.region, UserName=context.username, DashboardId=dash.DashboardId, - domain_name=DOMAIN_URL, ) else: shared_groups = db.api.Dashboard.query_all_user_groups_shareddashboard( diff --git a/backend/dataall/aws/handlers/quicksight.py b/backend/dataall/aws/handlers/quicksight.py index 54ca9ad5e..c468296de 100644 --- a/backend/dataall/aws/handlers/quicksight.py +++ b/backend/dataall/aws/handlers/quicksight.py @@ -234,7 +234,9 @@ def register_user_in_group(AwsAccountId, UserName, GroupName, UserRole='READER') return Quicksight.describe_user(AwsAccountId, UserName) @staticmethod - def get_reader_session(AwsAccountId, region, UserName, UserRole="READER", DashboardId=None, domain_name: str = None): + def get_reader_session( + AwsAccountId, region, UserName, UserRole='READER', DashboardId=None + ): client = Quicksight.get_quicksight_client(AwsAccountId, region) user = Quicksight.describe_user(AwsAccountId, UserName) @@ -243,16 +245,12 @@ def get_reader_session(AwsAccountId, region, UserName, UserRole="READER", Dashbo AwsAccountId=AwsAccountId, UserName=UserName, GroupName=DEFAULT_GROUP_NAME, UserRole=UserRole ) - response = client.generate_embed_url_for_registered_user( + response = client.get_dashboard_embed_url( AwsAccountId=AwsAccountId, + DashboardId=DashboardId, + IdentityType='QUICKSIGHT', SessionLifetimeInMinutes=120, - UserArn=user.get("Arn"), - ExperienceConfiguration={ - "Dashboard": { - "InitialDashboardId": DashboardId, - }, - }, - AllowedDomains=[domain_name], + UserArn=user.get('Arn'), ) return response.get('EmbedUrl') diff --git a/deploy/pivot_role/pivotRole.yaml b/deploy/pivot_role/pivotRole.yaml index 3dc29385e..601d30f70 100644 --- a/deploy/pivot_role/pivotRole.yaml +++ b/deploy/pivot_role/pivotRole.yaml @@ -593,7 +593,7 @@ Resources: - "quicksight:DescribeDashboard" - "quicksight:DescribeUser" - "quicksight:SearchDashboards" - - "quicksight:GenerateEmbedUrlForRegisteredUser" + - "quicksight:GetDashboardEmbedUrl" - "quicksight:GenerateEmbedUrlForAnonymousUser" - "quicksight:UpdateUser" - "quicksight:ListUserGroups" diff --git a/documentation/userguide/docs/environments.md b/documentation/userguide/docs/environments.md index 9c18cbe44..9aabe13b6 100644 --- a/documentation/userguide/docs/environments.md +++ b/documentation/userguide/docs/environments.md @@ -77,6 +77,15 @@ Enterprise option as show below: ![quicksight](pictures/environments/boot_qs_2.png#zoom#shadow) +After you've successfully subscribed to QuickSight, we need to trust *data.all* domain on QuickSight +to enable Dashboard Embedding on *data.all* UI. To do that go to: + +1. Manage QuickSight +2. Domains and Embedding +3. Put *data.all* domain and check include subdomains +4. Save + +![quicksight_domain](pictures/environments/boot_qs_3.png#zoom#shadow) ## :material-new-box: **Link an environment** ### Necessary permissions From a17f12ac5a2b83bc6de2a8bd91a8b6a80e97706b Mon Sep 17 00:00:00 2001 From: Noah Paige <69586985+noah-paige@users.noreply.github.com> Date: Tue, 4 Apr 2023 02:31:53 -0400 Subject: [PATCH 009/346] 401 shared dbs worksheet list (#402) ### Feature or Bugfix - Bugfix ### Detail - Fix Worksheet View to only show shares to a environment-team specific to the team's IAM role (not consumption role) ### Relates - [#401 ] By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- backend/dataall/api/Objects/Environment/input_types.py | 2 +- backend/dataall/db/api/environment.py | 8 ++++++-- frontend/src/views/Worksheets/WorksheetView.js | 2 +- 3 files changed, 8 insertions(+), 4 deletions(-) diff --git a/backend/dataall/api/Objects/Environment/input_types.py b/backend/dataall/api/Objects/Environment/input_types.py index 0b87eec63..d47cf8f22 100644 --- a/backend/dataall/api/Objects/Environment/input_types.py +++ b/backend/dataall/api/Objects/Environment/input_types.py @@ -98,7 +98,7 @@ class EnvironmentSortField(GraphQLEnumMapper): gql.Argument('term', gql.String), gql.Argument('page', gql.Integer), gql.Argument('pageSize', gql.Integer), - gql.Argument('uniqueDatasets', gql.Boolean) + gql.Argument('uniqueShares', gql.Boolean) ], ) diff --git a/backend/dataall/db/api/environment.py b/backend/dataall/db/api/environment.py index e41386024..1d8c0e68e 100644 --- a/backend/dataall/db/api/environment.py +++ b/backend/dataall/db/api/environment.py @@ -19,6 +19,8 @@ ShareableType, EnvironmentType, EnvironmentPermission, + PrincipalType + ) from ..models.Permission import PermissionType from ..paginator import Page, paginate @@ -871,6 +873,7 @@ def paginated_shared_with_environment_datasets( models.Environment.name.label('environmentName'), models.ShareObject.created.label('created'), models.ShareObject.principalId.label('principalId'), + models.ShareObject.principalType.label('principalType'), models.ShareObjectItem.itemType.label('itemType'), models.ShareObjectItem.GlueDatabaseName.label('GlueDatabaseName'), models.ShareObjectItem.GlueTableName.label('GlueTableName'), @@ -941,8 +944,9 @@ def paginated_shared_with_environment_datasets( or_(*[models.ShareObjectItem.itemType == t for t in itemTypes]) ) - if data.get("uniqueDatasets", False): - q = q.distinct(models.ShareObject.datasetUri) + if data.get("uniqueShares", False): + q = q.filter(models.ShareObject.principalType != PrincipalType.ConsumptionRole.value) + q = q.distinct(models.ShareObject.shareUri) if data.get('term'): term = data.get('term') diff --git a/frontend/src/views/Worksheets/WorksheetView.js b/frontend/src/views/Worksheets/WorksheetView.js index 52f1c6121..c5e2a2ab8 100644 --- a/frontend/src/views/Worksheets/WorksheetView.js +++ b/frontend/src/views/Worksheets/WorksheetView.js @@ -161,7 +161,7 @@ const WorksheetView = () => { page: 1, pageSize: 10000, term: '', - uniqueDatasets: true, + uniqueShares: true, itemTypes: 'DatasetTable' } }) From 6460986883637b2024615fa8ed5a64c07f3eafc0 Mon Sep 17 00:00:00 2001 From: dlpzx <71252798+dlpzx@users.noreply.github.com> Date: Tue, 4 Apr 2023 18:38:04 +0200 Subject: [PATCH 010/346] Fix sharing update (#404) ### Feature or Bugfix - Bugfix ### Detail When we import a dataset stack, if the S3 location was already registered data.all does not create a storage location. The issue is that for datasets where data.all needs to create a storage location: 1. the first time that it creates the stack it detects that there is no storage location and it creates the corresponding CFN resource 2. the first time that it UPDATES the stack it detects the storage location from 1. and it deletes the CFN resource 3. the next time that it UPDATES the stack it does not detect any storage location (it was deleted in 2.) and it creates the CFN resource again. To fix this behavior, in V1.5 we will use Lambda custom resource to check the storage location and avoid CFN resources. But for previous versions, this PR includes: - in the method that checks the existence of an storage location, we filter by the roleArn of the location. If the roleArn is the `dataallPivotRole` then we assume that it was created by the dataset, which means that `existing_storage_location = False` I tested locally but with actual stacks being created. No additional policies are needed ### Relates By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- backend/dataall/aws/handlers/lakeformation.py | 8 +++++--- backend/dataall/cdkproxy/stacks/dataset.py | 1 + 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/backend/dataall/aws/handlers/lakeformation.py b/backend/dataall/aws/handlers/lakeformation.py index 703b117f7..03234c939 100644 --- a/backend/dataall/aws/handlers/lakeformation.py +++ b/backend/dataall/aws/handlers/lakeformation.py @@ -13,7 +13,7 @@ def __init__(self): pass @staticmethod - def describe_resource(resource_arn, accountid, region): + def describe_resource(resource_arn, role_arn, accountid, region): """ Describes a LF data location """ @@ -23,8 +23,10 @@ def describe_resource(resource_arn, accountid, region): response = lf_client.describe_resource(ResourceArn=resource_arn) - log.info(f'LF data location already registered: {response}') - + log.info(f'LF data location already registered: {response}, checking if data.all registered it ...') + if response['ResourceInfo']['RoleArn'] == role_arn: + log.info('The existing data location was created as part of the dataset stack. There was no pre-existing data location.') + return False return response['ResourceInfo'] except ClientError as e: diff --git a/backend/dataall/cdkproxy/stacks/dataset.py b/backend/dataall/cdkproxy/stacks/dataset.py index 4ee53beb1..cd5fbb4c7 100644 --- a/backend/dataall/cdkproxy/stacks/dataset.py +++ b/backend/dataall/cdkproxy/stacks/dataset.py @@ -417,6 +417,7 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): existing_location = LakeFormation.describe_resource( resource_arn=f'arn:aws:s3:::{dataset.S3BucketName}', + role_arn=f'arn:aws:iam::{env.AwsAccountId}:role/{pivot_role_name}', accountid=env.AwsAccountId, region=env.region ) From 3a5e0dedb94b9f83a3fd9331061fd927422d13f8 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 11 Apr 2023 11:50:38 +0200 Subject: [PATCH 011/346] Initialization of dataset module --- backend/dataall/modules/datasets/__init__.py | 18 ++++++++++++++++++ .../dataall/modules/datasets/api/__init__.py | 1 + 2 files changed, 19 insertions(+) create mode 100644 backend/dataall/modules/datasets/__init__.py create mode 100644 backend/dataall/modules/datasets/api/__init__.py diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py new file mode 100644 index 000000000..67298a06e --- /dev/null +++ b/backend/dataall/modules/datasets/__init__.py @@ -0,0 +1,18 @@ +"""Contains the code related to datasets""" +import logging +from dataall.modules.loader import ModuleInterface, ImportMode + +log = logging.getLogger(__name__) + + +class DatasetApiModuleInterface(ModuleInterface): + """Implements ModuleInterface for notebook GraphQl lambda""" + + @classmethod + def is_supported(cls, modes): + return ImportMode.API in modes + + def __init__(self): + import dataall.modules.datasets.api + log.info("API of datasets has been imported") + diff --git a/backend/dataall/modules/datasets/api/__init__.py b/backend/dataall/modules/datasets/api/__init__.py new file mode 100644 index 000000000..13cf9331d --- /dev/null +++ b/backend/dataall/modules/datasets/api/__init__.py @@ -0,0 +1 @@ +"""The GraphQL schema of datasets and related functionality""" From a50a02f39b05d64d1e6bc410afe956a66eeb5602 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 11 Apr 2023 11:55:30 +0200 Subject: [PATCH 012/346] Refactoring of datasets Moved dataset table column to modules --- backend/dataall/api/Objects/DatasetTable/schema.py | 2 +- backend/dataall/api/Objects/__init__.py | 1 - .../datasets/api}/DatasetTableColumn/__init__.py | 2 +- .../datasets/api}/DatasetTableColumn/input_types.py | 2 +- .../datasets/api}/DatasetTableColumn/mutations.py | 7 +++++-- .../datasets/api}/DatasetTableColumn/queries.py | 4 ++-- .../datasets/api}/DatasetTableColumn/resolvers.py | 10 +++++----- .../datasets/api}/DatasetTableColumn/schema.py | 4 ++-- backend/dataall/modules/datasets/api/__init__.py | 5 +++++ 9 files changed, 22 insertions(+), 15 deletions(-) rename backend/dataall/{api/Objects => modules/datasets/api}/DatasetTableColumn/__init__.py (70%) rename backend/dataall/{api/Objects => modules/datasets/api}/DatasetTableColumn/input_types.py (94%) rename backend/dataall/{api/Objects => modules/datasets/api}/DatasetTableColumn/mutations.py (78%) rename backend/dataall/{api/Objects => modules/datasets/api}/DatasetTableColumn/queries.py (74%) rename backend/dataall/{api/Objects => modules/datasets/api}/DatasetTableColumn/resolvers.py (94%) rename backend/dataall/{api/Objects => modules/datasets/api}/DatasetTableColumn/schema.py (93%) diff --git a/backend/dataall/api/Objects/DatasetTable/schema.py b/backend/dataall/api/Objects/DatasetTable/schema.py index dc1cffcb4..0436c0f2e 100644 --- a/backend/dataall/api/Objects/DatasetTable/schema.py +++ b/backend/dataall/api/Objects/DatasetTable/schema.py @@ -1,4 +1,4 @@ -from ..DatasetTableColumn.resolvers import list_table_columns +from dataall.modules.datasets.api.DatasetTableColumn.resolvers import list_table_columns from ... import gql from .resolvers import * from ...constants import GraphQLEnumMapper diff --git a/backend/dataall/api/Objects/__init__.py b/backend/dataall/api/Objects/__init__.py index 060f2ba6e..43d5e0833 100644 --- a/backend/dataall/api/Objects/__init__.py +++ b/backend/dataall/api/Objects/__init__.py @@ -18,7 +18,6 @@ Environment, Activity, DatasetTable, - DatasetTableColumn, Dataset, Group, Principal, diff --git a/backend/dataall/api/Objects/DatasetTableColumn/__init__.py b/backend/dataall/modules/datasets/api/DatasetTableColumn/__init__.py similarity index 70% rename from backend/dataall/api/Objects/DatasetTableColumn/__init__.py rename to backend/dataall/modules/datasets/api/DatasetTableColumn/__init__.py index dfa46b264..691b10331 100644 --- a/backend/dataall/api/Objects/DatasetTableColumn/__init__.py +++ b/backend/dataall/modules/datasets/api/DatasetTableColumn/__init__.py @@ -1,4 +1,4 @@ -from . import ( +from dataall.modules.datasets.api.DatasetTableColumn import ( input_types, mutations, queries, diff --git a/backend/dataall/api/Objects/DatasetTableColumn/input_types.py b/backend/dataall/modules/datasets/api/DatasetTableColumn/input_types.py similarity index 94% rename from backend/dataall/api/Objects/DatasetTableColumn/input_types.py rename to backend/dataall/modules/datasets/api/DatasetTableColumn/input_types.py index 24fbbdbca..745e7f271 100644 --- a/backend/dataall/api/Objects/DatasetTableColumn/input_types.py +++ b/backend/dataall/modules/datasets/api/DatasetTableColumn/input_types.py @@ -1,4 +1,4 @@ -from ... import gql +from dataall.api import gql DatasetTableColumnFilter = gql.InputType( name='DatasetTableColumnFilter', diff --git a/backend/dataall/api/Objects/DatasetTableColumn/mutations.py b/backend/dataall/modules/datasets/api/DatasetTableColumn/mutations.py similarity index 78% rename from backend/dataall/api/Objects/DatasetTableColumn/mutations.py rename to backend/dataall/modules/datasets/api/DatasetTableColumn/mutations.py index 012d83ea7..f7b682e3d 100644 --- a/backend/dataall/api/Objects/DatasetTableColumn/mutations.py +++ b/backend/dataall/modules/datasets/api/DatasetTableColumn/mutations.py @@ -1,5 +1,8 @@ -from ... import gql -from .resolvers import * +from dataall.api import gql +from dataall.modules.datasets.api.DatasetTableColumn.resolvers import ( + sync_table_columns, + update_table_column +) syncDatasetTableColumns = gql.MutationField( name='syncDatasetTableColumns', diff --git a/backend/dataall/api/Objects/DatasetTableColumn/queries.py b/backend/dataall/modules/datasets/api/DatasetTableColumn/queries.py similarity index 74% rename from backend/dataall/api/Objects/DatasetTableColumn/queries.py rename to backend/dataall/modules/datasets/api/DatasetTableColumn/queries.py index 4f5f05646..0a08e37b6 100644 --- a/backend/dataall/api/Objects/DatasetTableColumn/queries.py +++ b/backend/dataall/modules/datasets/api/DatasetTableColumn/queries.py @@ -1,5 +1,5 @@ -from ... import gql -from .resolvers import * +from dataall.api import gql +from dataall.modules.datasets.api.DatasetTableColumn.resolvers import list_table_columns listDatasetTableColumns = gql.QueryField( name='listDatasetTableColumns', diff --git a/backend/dataall/api/Objects/DatasetTableColumn/resolvers.py b/backend/dataall/modules/datasets/api/DatasetTableColumn/resolvers.py similarity index 94% rename from backend/dataall/api/Objects/DatasetTableColumn/resolvers.py rename to backend/dataall/modules/datasets/api/DatasetTableColumn/resolvers.py index 88bf2c728..a7a1bf5f4 100644 --- a/backend/dataall/api/Objects/DatasetTableColumn/resolvers.py +++ b/backend/dataall/modules/datasets/api/DatasetTableColumn/resolvers.py @@ -1,10 +1,10 @@ from sqlalchemy import or_ -from .... import db -from ....api.context import Context -from ....aws.handlers.service_handlers import Worker -from ....db import paginate, permissions, models -from ....db.api import ResourcePolicy +from dataall import db +from dataall.api.context import Context +from dataall.aws.handlers.service_handlers import Worker +from dataall.db import paginate, permissions, models +from dataall.db.api import ResourcePolicy def list_table_columns( diff --git a/backend/dataall/api/Objects/DatasetTableColumn/schema.py b/backend/dataall/modules/datasets/api/DatasetTableColumn/schema.py similarity index 93% rename from backend/dataall/api/Objects/DatasetTableColumn/schema.py rename to backend/dataall/modules/datasets/api/DatasetTableColumn/schema.py index d571fc9a6..8772e99b7 100644 --- a/backend/dataall/api/Objects/DatasetTableColumn/schema.py +++ b/backend/dataall/modules/datasets/api/DatasetTableColumn/schema.py @@ -1,5 +1,5 @@ -from ... import gql -from .resolvers import * +from dataall.api import gql +from dataall.modules.datasets.api.DatasetTableColumn.resolvers import resolve_terms DatasetTableColumn = gql.ObjectType( diff --git a/backend/dataall/modules/datasets/api/__init__.py b/backend/dataall/modules/datasets/api/__init__.py index 13cf9331d..f79e9a30c 100644 --- a/backend/dataall/modules/datasets/api/__init__.py +++ b/backend/dataall/modules/datasets/api/__init__.py @@ -1 +1,6 @@ """The GraphQL schema of datasets and related functionality""" +from dataall.modules.datasets.api import ( + DatasetTableColumn +) + +__all__ = ["DatasetTableColumn"] From be1498689d48aa63aae3eba763635f8af800148b Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 11 Apr 2023 12:09:05 +0200 Subject: [PATCH 013/346] Refactoring of datasets Renamed table column to the python's convention format --- backend/dataall/api/Objects/DatasetTable/schema.py | 2 +- backend/dataall/modules/datasets/api/__init__.py | 4 ++-- .../api/{DatasetTableColumn => table_column}/__init__.py | 2 +- .../api/{DatasetTableColumn => table_column}/input_types.py | 0 .../api/{DatasetTableColumn => table_column}/mutations.py | 2 +- .../api/{DatasetTableColumn => table_column}/queries.py | 2 +- .../api/{DatasetTableColumn => table_column}/resolvers.py | 0 .../api/{DatasetTableColumn => table_column}/schema.py | 2 +- 8 files changed, 7 insertions(+), 7 deletions(-) rename backend/dataall/modules/datasets/api/{DatasetTableColumn => table_column}/__init__.py (70%) rename backend/dataall/modules/datasets/api/{DatasetTableColumn => table_column}/input_types.py (100%) rename backend/dataall/modules/datasets/api/{DatasetTableColumn => table_column}/mutations.py (89%) rename backend/dataall/modules/datasets/api/{DatasetTableColumn => table_column}/queries.py (80%) rename backend/dataall/modules/datasets/api/{DatasetTableColumn => table_column}/resolvers.py (100%) rename backend/dataall/modules/datasets/api/{DatasetTableColumn => table_column}/schema.py (95%) diff --git a/backend/dataall/api/Objects/DatasetTable/schema.py b/backend/dataall/api/Objects/DatasetTable/schema.py index 0436c0f2e..74d413818 100644 --- a/backend/dataall/api/Objects/DatasetTable/schema.py +++ b/backend/dataall/api/Objects/DatasetTable/schema.py @@ -1,4 +1,4 @@ -from dataall.modules.datasets.api.DatasetTableColumn.resolvers import list_table_columns +from dataall.modules.datasets.api.table_column.resolvers import list_table_columns from ... import gql from .resolvers import * from ...constants import GraphQLEnumMapper diff --git a/backend/dataall/modules/datasets/api/__init__.py b/backend/dataall/modules/datasets/api/__init__.py index f79e9a30c..538df0734 100644 --- a/backend/dataall/modules/datasets/api/__init__.py +++ b/backend/dataall/modules/datasets/api/__init__.py @@ -1,6 +1,6 @@ """The GraphQL schema of datasets and related functionality""" from dataall.modules.datasets.api import ( - DatasetTableColumn + table_column ) -__all__ = ["DatasetTableColumn"] +__all__ = ["table_column"] diff --git a/backend/dataall/modules/datasets/api/DatasetTableColumn/__init__.py b/backend/dataall/modules/datasets/api/table_column/__init__.py similarity index 70% rename from backend/dataall/modules/datasets/api/DatasetTableColumn/__init__.py rename to backend/dataall/modules/datasets/api/table_column/__init__.py index 691b10331..070301f58 100644 --- a/backend/dataall/modules/datasets/api/DatasetTableColumn/__init__.py +++ b/backend/dataall/modules/datasets/api/table_column/__init__.py @@ -1,4 +1,4 @@ -from dataall.modules.datasets.api.DatasetTableColumn import ( +from dataall.modules.datasets.api.table_column import ( input_types, mutations, queries, diff --git a/backend/dataall/modules/datasets/api/DatasetTableColumn/input_types.py b/backend/dataall/modules/datasets/api/table_column/input_types.py similarity index 100% rename from backend/dataall/modules/datasets/api/DatasetTableColumn/input_types.py rename to backend/dataall/modules/datasets/api/table_column/input_types.py diff --git a/backend/dataall/modules/datasets/api/DatasetTableColumn/mutations.py b/backend/dataall/modules/datasets/api/table_column/mutations.py similarity index 89% rename from backend/dataall/modules/datasets/api/DatasetTableColumn/mutations.py rename to backend/dataall/modules/datasets/api/table_column/mutations.py index f7b682e3d..0fc5a7d87 100644 --- a/backend/dataall/modules/datasets/api/DatasetTableColumn/mutations.py +++ b/backend/dataall/modules/datasets/api/table_column/mutations.py @@ -1,5 +1,5 @@ from dataall.api import gql -from dataall.modules.datasets.api.DatasetTableColumn.resolvers import ( +from dataall.modules.datasets.api.table_column.resolvers import ( sync_table_columns, update_table_column ) diff --git a/backend/dataall/modules/datasets/api/DatasetTableColumn/queries.py b/backend/dataall/modules/datasets/api/table_column/queries.py similarity index 80% rename from backend/dataall/modules/datasets/api/DatasetTableColumn/queries.py rename to backend/dataall/modules/datasets/api/table_column/queries.py index 0a08e37b6..2c29e94b7 100644 --- a/backend/dataall/modules/datasets/api/DatasetTableColumn/queries.py +++ b/backend/dataall/modules/datasets/api/table_column/queries.py @@ -1,5 +1,5 @@ from dataall.api import gql -from dataall.modules.datasets.api.DatasetTableColumn.resolvers import list_table_columns +from dataall.modules.datasets.api.table_column.resolvers import list_table_columns listDatasetTableColumns = gql.QueryField( name='listDatasetTableColumns', diff --git a/backend/dataall/modules/datasets/api/DatasetTableColumn/resolvers.py b/backend/dataall/modules/datasets/api/table_column/resolvers.py similarity index 100% rename from backend/dataall/modules/datasets/api/DatasetTableColumn/resolvers.py rename to backend/dataall/modules/datasets/api/table_column/resolvers.py diff --git a/backend/dataall/modules/datasets/api/DatasetTableColumn/schema.py b/backend/dataall/modules/datasets/api/table_column/schema.py similarity index 95% rename from backend/dataall/modules/datasets/api/DatasetTableColumn/schema.py rename to backend/dataall/modules/datasets/api/table_column/schema.py index 8772e99b7..9730b70b9 100644 --- a/backend/dataall/modules/datasets/api/DatasetTableColumn/schema.py +++ b/backend/dataall/modules/datasets/api/table_column/schema.py @@ -1,5 +1,5 @@ from dataall.api import gql -from dataall.modules.datasets.api.DatasetTableColumn.resolvers import resolve_terms +from dataall.modules.datasets.api.table_column.resolvers import resolve_terms DatasetTableColumn = gql.ObjectType( From 06f82ad80386f53780af735571f3b7eb66497594 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 11 Apr 2023 12:16:34 +0200 Subject: [PATCH 014/346] Refactoring of datasets Added dataset module to config.json --- config.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/config.json b/config.json index e0a9d85d0..4aed5ef87 100644 --- a/config.json +++ b/config.json @@ -2,6 +2,9 @@ "modules": { "notebooks": { "active": true + }, + "datasets": { + "active": true } } } \ No newline at end of file From 38145ae637a2084c1bb198a1fa76a395bd1c39b5 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 11 Apr 2023 12:32:00 +0200 Subject: [PATCH 015/346] Fixed leftover in loader --- backend/dataall/modules/loader.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/dataall/modules/loader.py b/backend/dataall/modules/loader.py index 95aa2083a..aa4a656d4 100644 --- a/backend/dataall/modules/loader.py +++ b/backend/dataall/modules/loader.py @@ -57,7 +57,7 @@ def load_modules(modes: List[ImportMode]) -> None: log.info(f"Module {name} is not active. Skipping...") continue - if active.lower() == "true" and not _import_module(name): + if not _import_module(name): raise ValueError(f"Couldn't find module {name} under modules directory") log.info(f"Module {name} is loaded") From f0e146aa2bf5cfb2156a369f9690adb9cfda9c09 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 11 Apr 2023 13:56:04 +0200 Subject: [PATCH 016/346] Dataset refactoring Moved database table service --- .../api/Objects/DatasetProfiling/resolvers.py | 3 ++- .../api/Objects/DatasetTable/resolvers.py | 25 ++++++++++--------- backend/dataall/aws/handlers/glue.py | 5 ++-- backend/dataall/aws/handlers/redshift.py | 3 ++- backend/dataall/db/api/__init__.py | 1 - .../datasets/api/table_column/resolvers.py | 7 +++--- .../modules/datasets/services/__init__.py | 1 + .../datasets/services}/dataset_table.py | 9 +++---- .../subscriptions/subscription_service.py | 5 ++-- backend/dataall/tasks/tables_syncer.py | 3 ++- tests/api/test_dataset_table.py | 5 ++-- 11 files changed, 36 insertions(+), 31 deletions(-) create mode 100644 backend/dataall/modules/datasets/services/__init__.py rename backend/dataall/{db/api => modules/datasets/services}/dataset_table.py (98%) diff --git a/backend/dataall/api/Objects/DatasetProfiling/resolvers.py b/backend/dataall/api/Objects/DatasetProfiling/resolvers.py index 678a8cba6..c391a1a8c 100644 --- a/backend/dataall/api/Objects/DatasetProfiling/resolvers.py +++ b/backend/dataall/api/Objects/DatasetProfiling/resolvers.py @@ -6,6 +6,7 @@ from ....aws.handlers.sts import SessionHelper from ....db import api, permissions, models from ....db.api import ResourcePolicy +from dataall.modules.datasets.services.dataset_table import DatasetTable log = logging.getLogger(__name__) @@ -97,7 +98,7 @@ def get_last_table_profiling_run(context: Context, source, tableUri=None): if run: if not run.results: - table = api.DatasetTable.get_dataset_table_by_uri(session, tableUri) + table = DatasetTable.get_dataset_table_by_uri(session, tableUri) dataset = api.Dataset.get_dataset_by_uri(session, table.datasetUri) environment = api.Environment.get_environment_by_uri( session, dataset.environmentUri diff --git a/backend/dataall/api/Objects/DatasetTable/resolvers.py b/backend/dataall/api/Objects/DatasetTable/resolvers.py index 9ea811411..854b99de9 100644 --- a/backend/dataall/api/Objects/DatasetTable/resolvers.py +++ b/backend/dataall/api/Objects/DatasetTable/resolvers.py @@ -13,13 +13,14 @@ from ....db.api import ResourcePolicy, Glossary from ....searchproxy import indexers from ....utils import json_utils +from dataall.modules.datasets.services.dataset_table import DatasetTable log = logging.getLogger(__name__) def create_table(context, source, datasetUri: str = None, input: dict = None): with context.engine.scoped_session() as session: - table = db.api.DatasetTable.create_dataset_table( + table = DatasetTable.create_dataset_table( session=session, username=context.username, groups=context.groups, @@ -37,7 +38,7 @@ def list_dataset_tables(context, source, filter: dict = None): if not filter: filter = {} with context.engine.scoped_session() as session: - return db.api.DatasetTable.list_dataset_tables( + return DatasetTable.list_dataset_tables( session=session, username=context.username, groups=context.groups, @@ -49,8 +50,8 @@ def list_dataset_tables(context, source, filter: dict = None): def get_table(context, source: models.Dataset, tableUri: str = None): with context.engine.scoped_session() as session: - table = db.api.DatasetTable.get_dataset_table_by_uri(session, tableUri) - return db.api.DatasetTable.get_dataset_table( + table = DatasetTable.get_dataset_table_by_uri(session, tableUri) + return DatasetTable.get_dataset_table( session=session, username=context.username, groups=context.groups, @@ -64,14 +65,14 @@ def get_table(context, source: models.Dataset, tableUri: str = None): def update_table(context, source, tableUri: str = None, input: dict = None): with context.engine.scoped_session() as session: - table = db.api.DatasetTable.get_dataset_table_by_uri(session, tableUri) + table = DatasetTable.get_dataset_table_by_uri(session, tableUri) dataset = db.api.Dataset.get_dataset_by_uri(session, table.datasetUri) input['table'] = table input['tableUri'] = table.tableUri - db.api.DatasetTable.update_dataset_table( + DatasetTable.update_dataset_table( session=session, username=context.username, groups=context.groups, @@ -85,8 +86,8 @@ def update_table(context, source, tableUri: str = None, input: dict = None): def delete_table(context, source, tableUri: str = None): with context.engine.scoped_session() as session: - table = db.api.DatasetTable.get_dataset_table_by_uri(session, tableUri) - db.api.DatasetTable.delete_dataset_table( + table = DatasetTable.get_dataset_table_by_uri(session, tableUri) + DatasetTable.delete_dataset_table( session=session, username=context.username, groups=context.groups, @@ -102,7 +103,7 @@ def delete_table(context, source, tableUri: str = None): def preview(context, source, tableUri: str = None): with context.engine.scoped_session() as session: - table: models.DatasetTable = db.api.DatasetTable.get_dataset_table_by_uri( + table: models.DatasetTable = DatasetTable.get_dataset_table_by_uri( session, tableUri ) dataset = db.api.Dataset.get_dataset_by_uri(session, table.datasetUri) @@ -157,7 +158,7 @@ def get_glue_table_properties(context: Context, source: models.DatasetTable, **k if not source: return None with context.engine.scoped_session() as session: - table: models.DatasetTable = db.api.DatasetTable.get_dataset_table_by_uri( + table: models.DatasetTable = DatasetTable.get_dataset_table_by_uri( session, source.tableUri ) return json_utils.to_string(table.GlueTableProperties).replace('\\', ' ') @@ -186,7 +187,7 @@ def resolve_glossary_terms(context: Context, source: models.DatasetTable, **kwar def publish_table_update(context: Context, source, tableUri: str = None): with context.engine.scoped_session() as session: - table: models.DatasetTable = db.api.DatasetTable.get_dataset_table_by_uri( + table: models.DatasetTable = DatasetTable.get_dataset_table_by_uri( session, tableUri ) ResourcePolicy.check_user_resource_permission( @@ -235,7 +236,7 @@ def resolve_redshift_copy_location( def list_shared_tables_by_env_dataset(context: Context, source, datasetUri: str, envUri: str, filter: dict = None): with context.engine.scoped_session() as session: - return db.api.DatasetTable.get_dataset_tables_shared_with_env( + return DatasetTable.get_dataset_tables_shared_with_env( session, envUri, datasetUri diff --git a/backend/dataall/aws/handlers/glue.py b/backend/dataall/aws/handlers/glue.py index ebeb1fca6..468268640 100644 --- a/backend/dataall/aws/handlers/glue.py +++ b/backend/dataall/aws/handlers/glue.py @@ -6,6 +6,7 @@ from .sts import SessionHelper from ... import db from ...db import models +from dataall.modules.datasets.services.dataset_table import DatasetTable log = logging.getLogger('aws:glue') @@ -84,7 +85,7 @@ def list_tables(engine, task: models.Task): tables = Glue.list_glue_database_tables( accountid, dataset.GlueDatabaseName, region ) - db.api.DatasetTable.sync(session, dataset.datasetUri, glue_tables=tables) + DatasetTable.sync(session, dataset.datasetUri, glue_tables=tables) return tables @staticmethod @@ -642,7 +643,7 @@ def get_table_columns(engine, task: models.Task): f'//{dataset_table.name} due to: ' f'{e}' ) - db.api.DatasetTable.sync_table_columns( + DatasetTable.sync_table_columns( session, dataset_table, glue_table['Table'] ) return True diff --git a/backend/dataall/aws/handlers/redshift.py b/backend/dataall/aws/handlers/redshift.py index a6a02f9e7..a1f479417 100644 --- a/backend/dataall/aws/handlers/redshift.py +++ b/backend/dataall/aws/handlers/redshift.py @@ -9,6 +9,7 @@ from .sts import SessionHelper from ... import db from ...db import models +from dataall.modules.datasets.services.dataset_table import DatasetTable log = logging.getLogger(__name__) @@ -446,7 +447,7 @@ def copy_data(engine, task: models.Task): session, task.payload['datasetUri'] ) - table: models.DatasetTable = db.api.DatasetTable.get_dataset_table_by_uri( + table: models.DatasetTable = DatasetTable.get_dataset_table_by_uri( session, task.payload['tableUri'] ) diff --git a/backend/dataall/db/api/__init__.py b/backend/dataall/db/api/__init__.py index 01647c81b..19138f7d7 100644 --- a/backend/dataall/db/api/__init__.py +++ b/backend/dataall/db/api/__init__.py @@ -14,7 +14,6 @@ from .dataset import Dataset from .dataset_location import DatasetStorageLocation from .dataset_profiling_run import DatasetProfilingRun -from .dataset_table import DatasetTable from .notification import Notification from .redshift_cluster import RedshiftCluster from .vpc import Vpc diff --git a/backend/dataall/modules/datasets/api/table_column/resolvers.py b/backend/dataall/modules/datasets/api/table_column/resolvers.py index a7a1bf5f4..8d977403a 100644 --- a/backend/dataall/modules/datasets/api/table_column/resolvers.py +++ b/backend/dataall/modules/datasets/api/table_column/resolvers.py @@ -5,6 +5,7 @@ from dataall.aws.handlers.service_handlers import Worker from dataall.db import paginate, permissions, models from dataall.db.api import ResourcePolicy +from dataall.modules.datasets.services.dataset_table import DatasetTable def list_table_columns( @@ -19,7 +20,7 @@ def list_table_columns( filter = {} with context.engine.scoped_session() as session: if not source: - source = db.api.DatasetTable.get_dataset_table_by_uri(session, tableUri) + source = DatasetTable.get_dataset_table_by_uri(session, tableUri) q = ( session.query(models.DatasetTableColumn) .filter( @@ -44,7 +45,7 @@ def list_table_columns( def sync_table_columns(context: Context, source, tableUri: str = None): with context.engine.scoped_session() as session: - table: models.DatasetTable = db.api.DatasetTable.get_dataset_table_by_uri( + table: models.DatasetTable = DatasetTable.get_dataset_table_by_uri( session, tableUri ) ResourcePolicy.check_user_resource_permission( @@ -79,7 +80,7 @@ def update_table_column( ).get(columnUri) if not column: raise db.exceptions.ObjectNotFound('Column', columnUri) - table: models.DatasetTable = db.api.DatasetTable.get_dataset_table_by_uri( + table: models.DatasetTable = DatasetTable.get_dataset_table_by_uri( session, column.tableUri ) ResourcePolicy.check_user_resource_permission( diff --git a/backend/dataall/modules/datasets/services/__init__.py b/backend/dataall/modules/datasets/services/__init__.py new file mode 100644 index 000000000..03ef29863 --- /dev/null +++ b/backend/dataall/modules/datasets/services/__init__.py @@ -0,0 +1 @@ +"""Contains business logic for datasets""" diff --git a/backend/dataall/db/api/dataset_table.py b/backend/dataall/modules/datasets/services/dataset_table.py similarity index 98% rename from backend/dataall/db/api/dataset_table.py rename to backend/dataall/modules/datasets/services/dataset_table.py index 77ee515e3..7c46120c1 100644 --- a/backend/dataall/db/api/dataset_table.py +++ b/backend/dataall/modules/datasets/services/dataset_table.py @@ -1,12 +1,11 @@ import logging -from typing import List from sqlalchemy.sql import and_ -from .. import models, api, permissions, exceptions, paginate -from . import has_tenant_perm, has_resource_perm, Glossary, ResourcePolicy, Environment -from ..models import Dataset -from ...utils import json_utils +from dataall.db import models, api, permissions, exceptions, paginate +from dataall.db.api import has_tenant_perm, has_resource_perm, Glossary, ResourcePolicy, Environment +from dataall.db.models import Dataset +from dataall.utils import json_utils logger = logging.getLogger(__name__) diff --git a/backend/dataall/tasks/subscriptions/subscription_service.py b/backend/dataall/tasks/subscriptions/subscription_service.py index 52aeb4e40..7b2a6d461 100644 --- a/backend/dataall/tasks/subscriptions/subscription_service.py +++ b/backend/dataall/tasks/subscriptions/subscription_service.py @@ -14,6 +14,7 @@ from ...db import models from ...tasks.subscriptions import poll_queues from ...utils import json_utils +from dataall.modules.datasets.services.dataset_table import DatasetTable root = logging.getLogger() root.setLevel(logging.INFO) @@ -64,7 +65,7 @@ def notify_consumers(engine, messages): @staticmethod def publish_table_update_message(engine, message): with engine.scoped_session() as session: - table: models.DatasetTable = db.api.DatasetTable.get_table_by_s3_prefix( + table: models.DatasetTable = DatasetTable.get_table_by_s3_prefix( session, message.get('prefix'), message.get('accountid'), @@ -135,7 +136,7 @@ def publish_location_update_message(session, message): @staticmethod def store_dataquality_results(session, message): - table: models.DatasetTable = db.api.DatasetTable.get_table_by_s3_prefix( + table: models.DatasetTable = DatasetTable.get_table_by_s3_prefix( session, message.get('prefix'), message.get('accountid'), diff --git a/backend/dataall/tasks/tables_syncer.py b/backend/dataall/tasks/tables_syncer.py index 04bafdfa5..5e6e8d48e 100644 --- a/backend/dataall/tasks/tables_syncer.py +++ b/backend/dataall/tasks/tables_syncer.py @@ -13,6 +13,7 @@ connect, ) from ..utils.alarm_service import AlarmService +from dataall.modules.datasets.services.dataset_table import DatasetTable root = logging.getLogger() root.setLevel(logging.INFO) @@ -63,7 +64,7 @@ def sync_tables(engine, es=None): f'Found {len(tables)} tables on Glue database {dataset.GlueDatabaseName}' ) - db.api.DatasetTable.sync( + DatasetTable.sync( session, dataset.datasetUri, glue_tables=tables ) diff --git a/tests/api/test_dataset_table.py b/tests/api/test_dataset_table.py index 6c30e77ea..af27529d5 100644 --- a/tests/api/test_dataset_table.py +++ b/tests/api/test_dataset_table.py @@ -3,6 +3,7 @@ import pytest import dataall +from dataall.modules.datasets.services.dataset_table import DatasetTable @pytest.fixture(scope='module', autouse=True) @@ -289,9 +290,7 @@ def test_sync_tables_and_columns(client, table, dataset1, db): }, ] - assert dataall.db.api.DatasetTable.sync( - session, dataset1.datasetUri, glue_tables - ) + assert DatasetTable.sync(session, dataset1.datasetUri, glue_tables) new_table: dataall.db.models.DatasetTable = ( session.query(dataall.db.models.DatasetTable) .filter(dataall.db.models.DatasetTable.name == 'new_table') From b039163449245ef6c079f3c277ce52e2a5cda579 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 11 Apr 2023 14:02:44 +0200 Subject: [PATCH 017/346] Dataset refactoring Renamed DatasetTable to DatasetTableService to avoid collisions with models.DatasetTable --- .../api/Objects/DatasetProfiling/resolvers.py | 4 +-- .../api/Objects/DatasetTable/resolvers.py | 26 +++++++++---------- backend/dataall/aws/handlers/glue.py | 6 ++--- backend/dataall/aws/handlers/redshift.py | 4 +-- .../datasets/api/table_column/resolvers.py | 8 +++--- .../datasets/services/dataset_table.py | 16 ++++++------ .../subscriptions/subscription_service.py | 6 ++--- backend/dataall/tasks/tables_syncer.py | 4 +-- tests/api/test_dataset_table.py | 4 +-- 9 files changed, 39 insertions(+), 39 deletions(-) diff --git a/backend/dataall/api/Objects/DatasetProfiling/resolvers.py b/backend/dataall/api/Objects/DatasetProfiling/resolvers.py index c391a1a8c..4b4684019 100644 --- a/backend/dataall/api/Objects/DatasetProfiling/resolvers.py +++ b/backend/dataall/api/Objects/DatasetProfiling/resolvers.py @@ -6,7 +6,7 @@ from ....aws.handlers.sts import SessionHelper from ....db import api, permissions, models from ....db.api import ResourcePolicy -from dataall.modules.datasets.services.dataset_table import DatasetTable +from dataall.modules.datasets.services.dataset_table import DatasetTableService log = logging.getLogger(__name__) @@ -98,7 +98,7 @@ def get_last_table_profiling_run(context: Context, source, tableUri=None): if run: if not run.results: - table = DatasetTable.get_dataset_table_by_uri(session, tableUri) + table = DatasetTableService.get_dataset_table_by_uri(session, tableUri) dataset = api.Dataset.get_dataset_by_uri(session, table.datasetUri) environment = api.Environment.get_environment_by_uri( session, dataset.environmentUri diff --git a/backend/dataall/api/Objects/DatasetTable/resolvers.py b/backend/dataall/api/Objects/DatasetTable/resolvers.py index 854b99de9..3e2b833e3 100644 --- a/backend/dataall/api/Objects/DatasetTable/resolvers.py +++ b/backend/dataall/api/Objects/DatasetTable/resolvers.py @@ -13,14 +13,14 @@ from ....db.api import ResourcePolicy, Glossary from ....searchproxy import indexers from ....utils import json_utils -from dataall.modules.datasets.services.dataset_table import DatasetTable +from dataall.modules.datasets.services.dataset_table import DatasetTableService log = logging.getLogger(__name__) def create_table(context, source, datasetUri: str = None, input: dict = None): with context.engine.scoped_session() as session: - table = DatasetTable.create_dataset_table( + table = DatasetTableService.create_dataset_table( session=session, username=context.username, groups=context.groups, @@ -38,7 +38,7 @@ def list_dataset_tables(context, source, filter: dict = None): if not filter: filter = {} with context.engine.scoped_session() as session: - return DatasetTable.list_dataset_tables( + return DatasetTableService.list_dataset_tables( session=session, username=context.username, groups=context.groups, @@ -50,8 +50,8 @@ def list_dataset_tables(context, source, filter: dict = None): def get_table(context, source: models.Dataset, tableUri: str = None): with context.engine.scoped_session() as session: - table = DatasetTable.get_dataset_table_by_uri(session, tableUri) - return DatasetTable.get_dataset_table( + table = DatasetTableService.get_dataset_table_by_uri(session, tableUri) + return DatasetTableService.get_dataset_table( session=session, username=context.username, groups=context.groups, @@ -65,14 +65,14 @@ def get_table(context, source: models.Dataset, tableUri: str = None): def update_table(context, source, tableUri: str = None, input: dict = None): with context.engine.scoped_session() as session: - table = DatasetTable.get_dataset_table_by_uri(session, tableUri) + table = DatasetTableService.get_dataset_table_by_uri(session, tableUri) dataset = db.api.Dataset.get_dataset_by_uri(session, table.datasetUri) input['table'] = table input['tableUri'] = table.tableUri - DatasetTable.update_dataset_table( + DatasetTableService.update_dataset_table( session=session, username=context.username, groups=context.groups, @@ -86,8 +86,8 @@ def update_table(context, source, tableUri: str = None, input: dict = None): def delete_table(context, source, tableUri: str = None): with context.engine.scoped_session() as session: - table = DatasetTable.get_dataset_table_by_uri(session, tableUri) - DatasetTable.delete_dataset_table( + table = DatasetTableService.get_dataset_table_by_uri(session, tableUri) + DatasetTableService.delete_dataset_table( session=session, username=context.username, groups=context.groups, @@ -103,7 +103,7 @@ def delete_table(context, source, tableUri: str = None): def preview(context, source, tableUri: str = None): with context.engine.scoped_session() as session: - table: models.DatasetTable = DatasetTable.get_dataset_table_by_uri( + table: models.DatasetTable = DatasetTableService.get_dataset_table_by_uri( session, tableUri ) dataset = db.api.Dataset.get_dataset_by_uri(session, table.datasetUri) @@ -158,7 +158,7 @@ def get_glue_table_properties(context: Context, source: models.DatasetTable, **k if not source: return None with context.engine.scoped_session() as session: - table: models.DatasetTable = DatasetTable.get_dataset_table_by_uri( + table: models.DatasetTable = DatasetTableService.get_dataset_table_by_uri( session, source.tableUri ) return json_utils.to_string(table.GlueTableProperties).replace('\\', ' ') @@ -187,7 +187,7 @@ def resolve_glossary_terms(context: Context, source: models.DatasetTable, **kwar def publish_table_update(context: Context, source, tableUri: str = None): with context.engine.scoped_session() as session: - table: models.DatasetTable = DatasetTable.get_dataset_table_by_uri( + table: models.DatasetTable = DatasetTableService.get_dataset_table_by_uri( session, tableUri ) ResourcePolicy.check_user_resource_permission( @@ -236,7 +236,7 @@ def resolve_redshift_copy_location( def list_shared_tables_by_env_dataset(context: Context, source, datasetUri: str, envUri: str, filter: dict = None): with context.engine.scoped_session() as session: - return DatasetTable.get_dataset_tables_shared_with_env( + return DatasetTableService.get_dataset_tables_shared_with_env( session, envUri, datasetUri diff --git a/backend/dataall/aws/handlers/glue.py b/backend/dataall/aws/handlers/glue.py index 468268640..88f68fc84 100644 --- a/backend/dataall/aws/handlers/glue.py +++ b/backend/dataall/aws/handlers/glue.py @@ -6,7 +6,7 @@ from .sts import SessionHelper from ... import db from ...db import models -from dataall.modules.datasets.services.dataset_table import DatasetTable +from dataall.modules.datasets.services.dataset_table import DatasetTableService log = logging.getLogger('aws:glue') @@ -85,7 +85,7 @@ def list_tables(engine, task: models.Task): tables = Glue.list_glue_database_tables( accountid, dataset.GlueDatabaseName, region ) - DatasetTable.sync(session, dataset.datasetUri, glue_tables=tables) + DatasetTableService.sync(session, dataset.datasetUri, glue_tables=tables) return tables @staticmethod @@ -643,7 +643,7 @@ def get_table_columns(engine, task: models.Task): f'//{dataset_table.name} due to: ' f'{e}' ) - DatasetTable.sync_table_columns( + DatasetTableService.sync_table_columns( session, dataset_table, glue_table['Table'] ) return True diff --git a/backend/dataall/aws/handlers/redshift.py b/backend/dataall/aws/handlers/redshift.py index a1f479417..4d2591520 100644 --- a/backend/dataall/aws/handlers/redshift.py +++ b/backend/dataall/aws/handlers/redshift.py @@ -9,7 +9,7 @@ from .sts import SessionHelper from ... import db from ...db import models -from dataall.modules.datasets.services.dataset_table import DatasetTable +from dataall.modules.datasets.services.dataset_table import DatasetTableService log = logging.getLogger(__name__) @@ -447,7 +447,7 @@ def copy_data(engine, task: models.Task): session, task.payload['datasetUri'] ) - table: models.DatasetTable = DatasetTable.get_dataset_table_by_uri( + table: models.DatasetTable = DatasetTableService.get_dataset_table_by_uri( session, task.payload['tableUri'] ) diff --git a/backend/dataall/modules/datasets/api/table_column/resolvers.py b/backend/dataall/modules/datasets/api/table_column/resolvers.py index 8d977403a..e5e7fd60c 100644 --- a/backend/dataall/modules/datasets/api/table_column/resolvers.py +++ b/backend/dataall/modules/datasets/api/table_column/resolvers.py @@ -5,7 +5,7 @@ from dataall.aws.handlers.service_handlers import Worker from dataall.db import paginate, permissions, models from dataall.db.api import ResourcePolicy -from dataall.modules.datasets.services.dataset_table import DatasetTable +from dataall.modules.datasets.services.dataset_table import DatasetTableService def list_table_columns( @@ -20,7 +20,7 @@ def list_table_columns( filter = {} with context.engine.scoped_session() as session: if not source: - source = DatasetTable.get_dataset_table_by_uri(session, tableUri) + source = DatasetTableService.get_dataset_table_by_uri(session, tableUri) q = ( session.query(models.DatasetTableColumn) .filter( @@ -45,7 +45,7 @@ def list_table_columns( def sync_table_columns(context: Context, source, tableUri: str = None): with context.engine.scoped_session() as session: - table: models.DatasetTable = DatasetTable.get_dataset_table_by_uri( + table: models.DatasetTable = DatasetTableService.get_dataset_table_by_uri( session, tableUri ) ResourcePolicy.check_user_resource_permission( @@ -80,7 +80,7 @@ def update_table_column( ).get(columnUri) if not column: raise db.exceptions.ObjectNotFound('Column', columnUri) - table: models.DatasetTable = DatasetTable.get_dataset_table_by_uri( + table: models.DatasetTable = DatasetTableService.get_dataset_table_by_uri( session, column.tableUri ) ResourcePolicy.check_user_resource_permission( diff --git a/backend/dataall/modules/datasets/services/dataset_table.py b/backend/dataall/modules/datasets/services/dataset_table.py index 7c46120c1..eeeb99f1d 100644 --- a/backend/dataall/modules/datasets/services/dataset_table.py +++ b/backend/dataall/modules/datasets/services/dataset_table.py @@ -10,7 +10,7 @@ logger = logging.getLogger(__name__) -class DatasetTable: +class DatasetTableService: @staticmethod @has_tenant_perm(permissions.MANAGE_DATASETS) @has_resource_perm(permissions.CREATE_DATASET_TABLE) @@ -107,7 +107,7 @@ def get_dataset_table( data: dict = None, check_perm: bool = False, ) -> models.DatasetTable: - return DatasetTable.get_dataset_table_by_uri(session, data['tableUri']) + return DatasetTableService.get_dataset_table_by_uri(session, data['tableUri']) @staticmethod @has_tenant_perm(permissions.MANAGE_DATASETS) @@ -122,7 +122,7 @@ def update_dataset_table( ): table = data.get( 'table', - DatasetTable.get_dataset_table_by_uri(session, data['tableUri']), + DatasetTableService.get_dataset_table_by_uri(session, data['tableUri']), ) for k in [attr for attr in data.keys() if attr != 'term']: @@ -146,7 +146,7 @@ def delete_dataset_table( data: dict = None, check_perm: bool = False, ): - table = DatasetTable.get_dataset_table_by_uri(session, data['tableUri']) + table = DatasetTableService.get_dataset_table_by_uri(session, data['tableUri']) share_item_shared_states = api.ShareItemSM.get_share_item_shared_states() share_item = ( session.query(models.ShareObjectItem) @@ -210,7 +210,7 @@ def get_dataset_tables_shared_with_env( ): return [ {"tableUri": t.tableUri, "GlueTableName": t.GlueTableName} - for t in DatasetTable.query_dataset_tables_shared_with_env( + for t in DatasetTableService.query_dataset_tables_shared_with_env( session, environment_uri, dataset_uri ) ] @@ -235,7 +235,7 @@ def sync(session, datasetUri, glue_tables=None): existing_table_names = [e.GlueTableName for e in existing_tables] existing_dataset_tables_map = {t.GlueTableName: t for t in existing_tables} - DatasetTable.update_existing_tables_status(existing_tables, glue_tables) + DatasetTableService.update_existing_tables_status(existing_tables, glue_tables) logger.info( f'existing_tables={glue_tables}' ) @@ -284,7 +284,7 @@ def sync(session, datasetUri, glue_tables=None): table.get('Parameters', {}) ) - DatasetTable.sync_table_columns(session, updated_table, table) + DatasetTableService.sync_table_columns(session, updated_table, table) return True @@ -300,7 +300,7 @@ def update_existing_tables_status(existing_tables, glue_tables): @staticmethod def sync_table_columns(session, dataset_table, glue_table): - DatasetTable.delete_all_table_columns(session, dataset_table) + DatasetTableService.delete_all_table_columns(session, dataset_table) columns = [ {**item, **{'columnType': 'column'}} diff --git a/backend/dataall/tasks/subscriptions/subscription_service.py b/backend/dataall/tasks/subscriptions/subscription_service.py index 7b2a6d461..bf7eded35 100644 --- a/backend/dataall/tasks/subscriptions/subscription_service.py +++ b/backend/dataall/tasks/subscriptions/subscription_service.py @@ -14,7 +14,7 @@ from ...db import models from ...tasks.subscriptions import poll_queues from ...utils import json_utils -from dataall.modules.datasets.services.dataset_table import DatasetTable +from dataall.modules.datasets.services.dataset_table import DatasetTableService root = logging.getLogger() root.setLevel(logging.INFO) @@ -65,7 +65,7 @@ def notify_consumers(engine, messages): @staticmethod def publish_table_update_message(engine, message): with engine.scoped_session() as session: - table: models.DatasetTable = DatasetTable.get_table_by_s3_prefix( + table: models.DatasetTable = DatasetTableService.get_table_by_s3_prefix( session, message.get('prefix'), message.get('accountid'), @@ -136,7 +136,7 @@ def publish_location_update_message(session, message): @staticmethod def store_dataquality_results(session, message): - table: models.DatasetTable = DatasetTable.get_table_by_s3_prefix( + table: models.DatasetTable = DatasetTableService.get_table_by_s3_prefix( session, message.get('prefix'), message.get('accountid'), diff --git a/backend/dataall/tasks/tables_syncer.py b/backend/dataall/tasks/tables_syncer.py index 5e6e8d48e..7d2781ccf 100644 --- a/backend/dataall/tasks/tables_syncer.py +++ b/backend/dataall/tasks/tables_syncer.py @@ -13,7 +13,7 @@ connect, ) from ..utils.alarm_service import AlarmService -from dataall.modules.datasets.services.dataset_table import DatasetTable +from dataall.modules.datasets.services.dataset_table import DatasetTableService root = logging.getLogger() root.setLevel(logging.INFO) @@ -64,7 +64,7 @@ def sync_tables(engine, es=None): f'Found {len(tables)} tables on Glue database {dataset.GlueDatabaseName}' ) - DatasetTable.sync( + DatasetTableService.sync( session, dataset.datasetUri, glue_tables=tables ) diff --git a/tests/api/test_dataset_table.py b/tests/api/test_dataset_table.py index af27529d5..82548252b 100644 --- a/tests/api/test_dataset_table.py +++ b/tests/api/test_dataset_table.py @@ -3,7 +3,7 @@ import pytest import dataall -from dataall.modules.datasets.services.dataset_table import DatasetTable +from dataall.modules.datasets.services.dataset_table import DatasetTableService @pytest.fixture(scope='module', autouse=True) @@ -290,7 +290,7 @@ def test_sync_tables_and_columns(client, table, dataset1, db): }, ] - assert DatasetTable.sync(session, dataset1.datasetUri, glue_tables) + assert DatasetTableService.sync(session, dataset1.datasetUri, glue_tables) new_table: dataall.db.models.DatasetTable = ( session.query(dataall.db.models.DatasetTable) .filter(dataall.db.models.DatasetTable.name == 'new_table') From b7922ed51c674e210c66a0bd298dae099c9eface Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 11 Apr 2023 14:19:02 +0200 Subject: [PATCH 018/346] Dataset refactoring Moved DatasetTableColumn to modules --- backend/dataall/api/Objects/Feed/resolvers.py | 5 ++-- .../dataall/api/Objects/Glossary/resolvers.py | 5 ++-- backend/dataall/aws/handlers/glue.py | 5 ++-- backend/dataall/db/api/glossary.py | 11 +++---- backend/dataall/db/models/__init__.py | 1 - .../datasets/api/table_column/resolvers.py | 21 +++++++------- .../dataall/modules/datasets/db/__init__.py | 1 + .../datasets/db/table_column_model.py} | 4 +-- .../datasets/services/dataset_table.py | 9 +++--- tests/api/test_dataset_table.py | 29 ++++++++++--------- tests/api/test_glossary.py | 5 ++-- 11 files changed, 52 insertions(+), 44 deletions(-) create mode 100644 backend/dataall/modules/datasets/db/__init__.py rename backend/dataall/{db/models/DatasetTableColumn.py => modules/datasets/db/table_column_model.py} (91%) diff --git a/backend/dataall/api/Objects/Feed/resolvers.py b/backend/dataall/api/Objects/Feed/resolvers.py index a6c0535de..cbde23f0d 100644 --- a/backend/dataall/api/Objects/Feed/resolvers.py +++ b/backend/dataall/api/Objects/Feed/resolvers.py @@ -2,6 +2,7 @@ from ....api.context import Context from ....db import paginate, models +from dataall.modules.datasets.db.table_column_model import DatasetTableColumn class Feed: @@ -19,7 +20,7 @@ def targetType(self): def resolve_feed_target_type(obj, *_): - if isinstance(obj, models.DatasetTableColumn): + if isinstance(obj, DatasetTableColumn): return 'DatasetTableColumn' elif isinstance(obj, models.Worksheet): return 'Worksheet' @@ -44,7 +45,7 @@ def resolve_target(context: Context, source: Feed, **kwargs): model = { 'Dataset': models.Dataset, 'DatasetTable': models.DatasetTable, - 'DatasetTableColumn': models.DatasetTableColumn, + 'DatasetTableColumn': DatasetTableColumn, 'DatasetStorageLocation': models.DatasetStorageLocation, 'Dashboard': models.Dashboard, 'DataPipeline': models.DataPipeline, diff --git a/backend/dataall/api/Objects/Glossary/resolvers.py b/backend/dataall/api/Objects/Glossary/resolvers.py index 801bd27dc..847f16ac6 100644 --- a/backend/dataall/api/Objects/Glossary/resolvers.py +++ b/backend/dataall/api/Objects/Glossary/resolvers.py @@ -11,6 +11,7 @@ from ....api.constants import ( GlossaryRole ) +from dataall.modules.datasets.db.table_column_model import DatasetTableColumn def resolve_glossary_node(obj: models.GlossaryNode, *_): @@ -322,7 +323,7 @@ def get_link(context: Context, source, linkUri: str = None): def target_union_resolver(obj, *_): - if isinstance(obj, models.DatasetTableColumn): + if isinstance(obj, DatasetTableColumn): return 'DatasetTableColumn' elif isinstance(obj, models.DatasetTable): return 'DatasetTable' @@ -341,7 +342,7 @@ def resolve_link_target(context, source, **kwargs): model = { 'Dataset': models.Dataset, 'DatasetTable': models.DatasetTable, - 'Column': models.DatasetTableColumn, + 'Column': DatasetTableColumn, 'DatasetStorageLocation': models.DatasetStorageLocation, 'Dashboard': models.Dashboard, }[source.targetType] diff --git a/backend/dataall/aws/handlers/glue.py b/backend/dataall/aws/handlers/glue.py index 88f68fc84..ca00a81f5 100644 --- a/backend/dataall/aws/handlers/glue.py +++ b/backend/dataall/aws/handlers/glue.py @@ -7,6 +7,7 @@ from ... import db from ...db import models from dataall.modules.datasets.services.dataset_table import DatasetTableService +from dataall.modules.datasets.db.table_column_model import DatasetTableColumn log = logging.getLogger('aws:glue') @@ -525,8 +526,8 @@ def _update_existing_crawler(glue, accountid, crawler_name, targets, database): @Worker.handler('glue.table.update_column') def update_table_columns(engine, task: models.Task): with engine.scoped_session() as session: - column: models.DatasetTableColumn = session.query( - models.DatasetTableColumn + column: DatasetTableColumn = session.query( + DatasetTableColumn ).get(task.targetUri) table: models.DatasetTable = session.query(models.DatasetTable).get( column.tableUri diff --git a/backend/dataall/db/api/glossary.py b/backend/dataall/db/api/glossary.py index 1616141c8..c6313e007 100644 --- a/backend/dataall/db/api/glossary.py +++ b/backend/dataall/db/api/glossary.py @@ -9,6 +9,7 @@ has_tenant_perm, ) from ..models.Glossary import GlossaryNodeStatus +from dataall.modules.datasets.db.table_column_model import DatasetTableColumn logger = logging.getLogger(__name__) @@ -133,7 +134,7 @@ def link_term(session, username, groups, uri, data=None, check_perm=None): elif targetType == 'Folder': target = session.query(models.DatasetStorageLocation).get(targetUri) elif targetType == 'Column': - target = session.query(models.DatasetTableColumn).get(targetUri) + target = session.query(DatasetTableColumn).get(targetUri) elif targetType == 'Dashboard': target = session.query(models.Dashboard).get(targetUri) else: @@ -361,11 +362,11 @@ def list_term_associations( models.DatasetTable.description.label('description'), ) columns = session.query( - models.DatasetTableColumn.columnUri.label('targetUri'), + DatasetTableColumn.columnUri.label('targetUri'), literal('column').label('targetType'), - models.DatasetTableColumn.label.label('label'), - models.DatasetTableColumn.name.label('name'), - models.DatasetTableColumn.description.label('description'), + DatasetTableColumn.label.label('label'), + DatasetTableColumn.name.label('name'), + DatasetTableColumn.description.label('description'), ) folders = session.query( models.DatasetStorageLocation.locationUri.label('targetUri'), diff --git a/backend/dataall/db/models/__init__.py b/backend/dataall/db/models/__init__.py index 1ce567c87..1ab4134b3 100644 --- a/backend/dataall/db/models/__init__.py +++ b/backend/dataall/db/models/__init__.py @@ -9,7 +9,6 @@ from .DatasetQualityRule import DatasetQualityRule from .DatasetStorageLocation import DatasetStorageLocation from .DatasetTable import DatasetTable -from .DatasetTableColumn import DatasetTableColumn from .DatasetTableProfilingJob import DatasetTableProfilingJob from .Environment import Environment from .EnvironmentGroup import EnvironmentGroup diff --git a/backend/dataall/modules/datasets/api/table_column/resolvers.py b/backend/dataall/modules/datasets/api/table_column/resolvers.py index e5e7fd60c..b958f2f7a 100644 --- a/backend/dataall/modules/datasets/api/table_column/resolvers.py +++ b/backend/dataall/modules/datasets/api/table_column/resolvers.py @@ -6,6 +6,7 @@ from dataall.db import paginate, permissions, models from dataall.db.api import ResourcePolicy from dataall.modules.datasets.services.dataset_table import DatasetTableService +from dataall.modules.datasets.db.table_column_model import DatasetTableColumn def list_table_columns( @@ -22,21 +23,21 @@ def list_table_columns( if not source: source = DatasetTableService.get_dataset_table_by_uri(session, tableUri) q = ( - session.query(models.DatasetTableColumn) + session.query(DatasetTableColumn) .filter( - models.DatasetTableColumn.tableUri == tableUri, - models.DatasetTableColumn.deleted.is_(None), + DatasetTableColumn.tableUri == tableUri, + DatasetTableColumn.deleted.is_(None), ) - .order_by(models.DatasetTableColumn.columnType.asc()) + .order_by(DatasetTableColumn.columnType.asc()) ) term = filter.get('term') if term: q = q.filter( or_( - models.DatasetTableColumn.label.ilike('%' + term + '%'), - models.DatasetTableColumn.description.ilike('%' + term + '%'), + DatasetTableColumn.label.ilike('%' + term + '%'), + DatasetTableColumn.description.ilike('%' + term + '%'), ) - ).order_by(models.DatasetTableColumn.columnType.asc()) + ).order_by(DatasetTableColumn.columnType.asc()) return paginate( q, page=filter.get('page', 1), page_size=filter.get('pageSize', 65) @@ -61,7 +62,7 @@ def sync_table_columns(context: Context, source, tableUri: str = None): return list_table_columns(context, source=table, tableUri=tableUri) -def resolve_terms(context, source: models.DatasetTableColumn, **kwargs): +def resolve_terms(context, source: DatasetTableColumn, **kwargs): if not source: return None with context.engine.scoped_session() as session: @@ -75,8 +76,8 @@ def update_table_column( context: Context, source, columnUri: str = None, input: dict = None ): with context.engine.scoped_session() as session: - column: models.DatasetTableColumn = session.query( - models.DatasetTableColumn + column: DatasetTableColumn = session.query( + DatasetTableColumn ).get(columnUri) if not column: raise db.exceptions.ObjectNotFound('Column', columnUri) diff --git a/backend/dataall/modules/datasets/db/__init__.py b/backend/dataall/modules/datasets/db/__init__.py new file mode 100644 index 000000000..104b49a42 --- /dev/null +++ b/backend/dataall/modules/datasets/db/__init__.py @@ -0,0 +1 @@ +"""Database logic for datasets""" diff --git a/backend/dataall/db/models/DatasetTableColumn.py b/backend/dataall/modules/datasets/db/table_column_model.py similarity index 91% rename from backend/dataall/db/models/DatasetTableColumn.py rename to backend/dataall/modules/datasets/db/table_column_model.py index f4fe1f7d6..4d3d7e009 100644 --- a/backend/dataall/db/models/DatasetTableColumn.py +++ b/backend/dataall/modules/datasets/db/table_column_model.py @@ -1,7 +1,7 @@ from sqlalchemy import Column, String -from .. import Base -from .. import Resource, utils +from dataall.db import Base +from dataall.db import Resource, utils class DatasetTableColumn(Resource, Base): diff --git a/backend/dataall/modules/datasets/services/dataset_table.py b/backend/dataall/modules/datasets/services/dataset_table.py index eeeb99f1d..873cbe01e 100644 --- a/backend/dataall/modules/datasets/services/dataset_table.py +++ b/backend/dataall/modules/datasets/services/dataset_table.py @@ -6,6 +6,7 @@ from dataall.db.api import has_tenant_perm, has_resource_perm, Glossary, ResourcePolicy, Environment from dataall.db.models import Dataset from dataall.utils import json_utils +from dataall.modules.datasets.db.table_column_model import DatasetTableColumn logger = logging.getLogger(__name__) @@ -315,7 +316,7 @@ def sync_table_columns(session, dataset_table, glue_table): logger.debug(f'Found partitions {partitions} for table {dataset_table}') for col in columns + partitions: - table_col = models.DatasetTableColumn( + table_col = DatasetTableColumn( name=col['Name'], description=col.get('Comment', 'No description provided'), label=col['Name'], @@ -333,11 +334,11 @@ def sync_table_columns(session, dataset_table, glue_table): @staticmethod def delete_all_table_columns(session, dataset_table): - session.query(models.DatasetTableColumn).filter( + session.query(DatasetTableColumn).filter( and_( - models.DatasetTableColumn.GlueDatabaseName + DatasetTableColumn.GlueDatabaseName == dataset_table.GlueDatabaseName, - models.DatasetTableColumn.GlueTableName == dataset_table.GlueTableName, + DatasetTableColumn.GlueTableName == dataset_table.GlueTableName, ) ).delete() session.commit() diff --git a/tests/api/test_dataset_table.py b/tests/api/test_dataset_table.py index 82548252b..a2fcb2add 100644 --- a/tests/api/test_dataset_table.py +++ b/tests/api/test_dataset_table.py @@ -4,6 +4,7 @@ import dataall from dataall.modules.datasets.services.dataset_table import DatasetTableService +from dataall.modules.datasets.db.table_column_model import DatasetTableColumn @pytest.fixture(scope='module', autouse=True) @@ -112,7 +113,7 @@ def test_add_columns(table, dataset1, db): .filter(dataall.db.models.DatasetTable.name == 'table1') .first() ) - table_col = dataall.db.models.DatasetTableColumn( + table_col = DatasetTableColumn( name='col1', description='None', label='col1', @@ -186,8 +187,8 @@ def test_update_dataset_table_column(client, table, dataset1, db): .first() ) column = ( - session.query(dataall.db.models.DatasetTableColumn) - .filter(dataall.db.models.DatasetTableColumn.tableUri == table.tableUri) + session.query(DatasetTableColumn) + .filter(DatasetTableColumn.tableUri == table.tableUri) .first() ) response = client.query( @@ -208,7 +209,7 @@ def test_update_dataset_table_column(client, table, dataset1, db): response.data.updateDatasetTableColumn.description == 'My new description' ) - column = session.query(dataall.db.models.DatasetTableColumn).get( + column = session.query(DatasetTableColumn).get( column.columnUri ) assert column.description == 'My new description' @@ -235,8 +236,8 @@ def test_sync_tables_and_columns(client, table, dataset1, db): .first() ) column = ( - session.query(dataall.db.models.DatasetTableColumn) - .filter(dataall.db.models.DatasetTableColumn.tableUri == table.tableUri) + session.query(DatasetTableColumn) + .filter(DatasetTableColumn.tableUri == table.tableUri) .first() ) glue_tables = [ @@ -298,10 +299,10 @@ def test_sync_tables_and_columns(client, table, dataset1, db): ) assert new_table assert new_table.GlueTableName == 'new_table' - columns: [dataall.db.models.DatasetTableColumn] = ( - session.query(dataall.db.models.DatasetTableColumn) - .filter(dataall.db.models.DatasetTableColumn.tableUri == new_table.tableUri) - .order_by(dataall.db.models.DatasetTableColumn.columnType.asc()) + columns: [DatasetTableColumn] = ( + session.query(DatasetTableColumn) + .filter(DatasetTableColumn.tableUri == new_table.tableUri) + .order_by(DatasetTableColumn.columnType.asc()) .all() ) assert len(columns) == 2 @@ -315,10 +316,10 @@ def test_sync_tables_and_columns(client, table, dataset1, db): ) assert existing_table assert existing_table.GlueTableName == 'table1' - columns: [dataall.db.models.DatasetTableColumn] = ( - session.query(dataall.db.models.DatasetTableColumn) - .filter(dataall.db.models.DatasetTableColumn.tableUri == new_table.tableUri) - .order_by(dataall.db.models.DatasetTableColumn.columnType.asc()) + columns: [DatasetTableColumn] = ( + session.query(DatasetTableColumn) + .filter(DatasetTableColumn.tableUri == new_table.tableUri) + .order_by(DatasetTableColumn.columnType.asc()) .all() ) assert len(columns) == 2 diff --git a/tests/api/test_glossary.py b/tests/api/test_glossary.py index 157c6cd2c..8276dca8c 100644 --- a/tests/api/test_glossary.py +++ b/tests/api/test_glossary.py @@ -1,5 +1,6 @@ from typing import List from dataall.db import models +from dataall.modules.datasets.db.table_column_model import DatasetTableColumn import pytest @@ -48,11 +49,11 @@ def _table(db, _dataset) -> models.DatasetTable: @pytest.fixture(scope='module', autouse=True) -def _columns(db, _dataset, _table) -> List[models.DatasetTableColumn]: +def _columns(db, _dataset, _table) -> List[DatasetTableColumn]: with db.scoped_session() as session: cols = [] for i in range(0, 10): - c = models.DatasetTableColumn( + c = DatasetTableColumn( datasetUri=_dataset.datasetUri, tableUri=_table.tableUri, label=f'c{i+1}', From 1771bcaa4fc590eda8ca01537e544b2e1f5fa317 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 11 Apr 2023 14:28:27 +0200 Subject: [PATCH 019/346] Notebooks doesn't require tasks --- backend/dataall/modules/notebooks/tasks/__init__.py | 1 - 1 file changed, 1 deletion(-) delete mode 100644 backend/dataall/modules/notebooks/tasks/__init__.py diff --git a/backend/dataall/modules/notebooks/tasks/__init__.py b/backend/dataall/modules/notebooks/tasks/__init__.py deleted file mode 100644 index 7da194e3b..000000000 --- a/backend/dataall/modules/notebooks/tasks/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Currently notebooks don't have tasks, but this module needed for correct loading""" From 3d1603f21806bbaa099d70f9af072c5f2d40a5e4 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 11 Apr 2023 14:31:07 +0200 Subject: [PATCH 020/346] Renamed tasks to handlers Currently, only async handlers require dedicated loading. Long-running tasks (scheduled tasks) might not need to have a dedicated loading mode --- backend/aws_handler.py | 2 +- backend/dataall/modules/loader.py | 8 ++++---- backend/local_graphql_server.py | 2 +- tests/conftest.py | 2 +- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/backend/aws_handler.py b/backend/aws_handler.py index 872c8f433..ec5382b6f 100644 --- a/backend/aws_handler.py +++ b/backend/aws_handler.py @@ -14,7 +14,7 @@ engine = get_engine(envname=ENVNAME) -load_modules(modes=[ImportMode.TASKS]) +load_modules(modes=[ImportMode.HANDLERS]) def handler(event, context=None): diff --git a/backend/dataall/modules/loader.py b/backend/dataall/modules/loader.py index aa4a656d4..9fa3c69bf 100644 --- a/backend/dataall/modules/loader.py +++ b/backend/dataall/modules/loader.py @@ -2,7 +2,7 @@ import importlib import logging from abc import ABC, abstractmethod -from enum import Enum +from enum import Enum, auto from typing import List from dataall.core.config import config @@ -19,9 +19,9 @@ class ImportMode(Enum): of functionality to be loaded, there should be different loading modes """ - API = "api" - CDK = "cdk" - TASKS = "tasks" + API = auto() + CDK = auto() + HANDLERS = auto() class ModuleInterface(ABC): diff --git a/backend/local_graphql_server.py b/backend/local_graphql_server.py index 3783ba0a3..44f79a087 100644 --- a/backend/local_graphql_server.py +++ b/backend/local_graphql_server.py @@ -30,7 +30,7 @@ es = connect(envname=ENVNAME) logger.info('Connected') # create_schema_and_tables(engine, envname=ENVNAME) -load_modules(modes=[ImportMode.API, ImportMode.TASKS]) +load_modules(modes=[ImportMode.API, ImportMode.HANDLERS]) Base.metadata.create_all(engine.engine) CDKPROXY_URL = ( 'http://cdkproxy:2805' if ENVNAME == 'dkrcompose' else 'http://localhost:2805' diff --git a/tests/conftest.py b/tests/conftest.py index a67d6bd41..2767a66a3 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,7 +3,7 @@ import dataall from dataall.modules.loader import load_modules, ImportMode -load_modules(modes=[ImportMode.TASKS, ImportMode.API, ImportMode.CDK]) +load_modules(modes=[ImportMode.HANDLERS, ImportMode.API, ImportMode.CDK]) ENVNAME = os.environ.get('envname', 'pytest') From fb6b515103927e80f96d8248ddb95568a08b7f7c Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 11 Apr 2023 16:00:18 +0200 Subject: [PATCH 021/346] Dataset refactoring Extracted code from glue to glue_column_handler Added handlers importing for datasets --- backend/dataall/aws/handlers/glue.py | 99 --------------- backend/dataall/db/api/redshift_cluster.py | 6 +- backend/dataall/modules/datasets/__init__.py | 15 ++- .../modules/datasets/handlers/__init__.py | 8 ++ .../datasets/handlers/glue_column_handler.py | 113 ++++++++++++++++++ 5 files changed, 138 insertions(+), 103 deletions(-) create mode 100644 backend/dataall/modules/datasets/handlers/__init__.py create mode 100644 backend/dataall/modules/datasets/handlers/glue_column_handler.py diff --git a/backend/dataall/aws/handlers/glue.py b/backend/dataall/aws/handlers/glue.py index ca00a81f5..4bfda7ce3 100644 --- a/backend/dataall/aws/handlers/glue.py +++ b/backend/dataall/aws/handlers/glue.py @@ -7,7 +7,6 @@ from ... import db from ...db import models from dataall.modules.datasets.services.dataset_table import DatasetTableService -from dataall.modules.datasets.db.table_column_model import DatasetTableColumn log = logging.getLogger('aws:glue') @@ -522,104 +521,6 @@ def _update_existing_crawler(glue, accountid, crawler_name, targets, database): else: raise e - @staticmethod - @Worker.handler('glue.table.update_column') - def update_table_columns(engine, task: models.Task): - with engine.scoped_session() as session: - column: DatasetTableColumn = session.query( - DatasetTableColumn - ).get(task.targetUri) - table: models.DatasetTable = session.query(models.DatasetTable).get( - column.tableUri - ) - try: - aws_session = SessionHelper.remote_session(table.AWSAccountId) - - Glue.grant_pivot_role_all_table_permissions(aws_session, table) - - glue_client = aws_session.client('glue', region_name=table.region) - - original_table = glue_client.get_table( - CatalogId=table.AWSAccountId, - DatabaseName=table.GlueDatabaseName, - Name=table.name, - ) - updated_table = { - k: v - for k, v in original_table['Table'].items() - if k - not in [ - 'CatalogId', - 'VersionId', - 'DatabaseName', - 'CreateTime', - 'UpdateTime', - 'CreatedBy', - 'IsRegisteredWithLakeFormation', - ] - } - all_columns = updated_table.get('StorageDescriptor', {}).get( - 'Columns', [] - ) + updated_table.get('PartitionKeys', []) - for col in all_columns: - if col['Name'] == column.name: - col['Comment'] = column.description - log.info( - f'Found column {column.name} adding description {column.description}' - ) - response = glue_client.update_table( - DatabaseName=table.GlueDatabaseName, - TableInput=updated_table, - ) - log.info( - f'Column {column.name} updated successfully: {response}' - ) - return True - - except ClientError as e: - log.error( - f'Failed to update table column {column.name} description: {e}' - ) - raise e - - @staticmethod - def grant_pivot_role_all_table_permissions(aws_session, table): - """ - Pivot role needs to have all permissions - for tables managed inside dataall - :param aws_session: - :param table: - :return: - """ - try: - lf_client = aws_session.client('lakeformation', region_name=table.region) - grant_dict = dict( - Principal={ - 'DataLakePrincipalIdentifier': SessionHelper.get_delegation_role_arn( - table.AWSAccountId - ) - }, - Resource={ - 'Table': { - 'DatabaseName': table.GlueDatabaseName, - 'Name': table.name, - } - }, - Permissions=['SELECT', 'ALTER', 'DROP', 'INSERT'], - ) - response = lf_client.grant_permissions(**grant_dict) - log.error( - f'Successfully granted pivot role all table ' - f'aws://{table.AWSAccountId}/{table.GlueDatabaseName}/{table.name} ' - f'access: {response}' - ) - except ClientError as e: - log.error( - f'Failed to grant pivot role all table ' - f'aws://{table.AWSAccountId}/{table.GlueDatabaseName}/{table.name} ' - f'access: {e}' - ) - raise e @staticmethod @Worker.handler('glue.table.columns') diff --git a/backend/dataall/db/api/redshift_cluster.py b/backend/dataall/db/api/redshift_cluster.py index 91e687d2b..4167a555a 100644 --- a/backend/dataall/db/api/redshift_cluster.py +++ b/backend/dataall/db/api/redshift_cluster.py @@ -3,13 +3,13 @@ from sqlalchemy import and_, or_, literal from .. import models, api, exceptions, paginate, permissions -from . import has_resource_perm, ResourcePolicy, DatasetTable, Environment, Dataset -from ..models.Enums import ShareItemStatus +from . import has_resource_perm, ResourcePolicy, Environment, Dataset from ...utils.naming_convention import ( NamingConventionService, NamingConventionPattern, ) from ...utils.slugify import slugify +from dataall.modules.datasets.services.dataset_table import DatasetTableService log = logging.getLogger(__name__) @@ -495,7 +495,7 @@ def enable_copy_table( session, username, groups, uri, data=None, check_perm=True ) -> models.RedshiftClusterDatasetTable: cluster = RedshiftCluster.get_redshift_cluster_by_uri(session, uri) - table = DatasetTable.get_dataset_table_by_uri(session, data['tableUri']) + table = DatasetTableService.get_dataset_table_by_uri(session, data['tableUri']) table = models.RedshiftClusterDatasetTable( clusterUri=uri, datasetUri=data['datasetUri'], diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index 67298a06e..cd52bc207 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -1,12 +1,14 @@ """Contains the code related to datasets""" import logging +from typing import List + from dataall.modules.loader import ModuleInterface, ImportMode log = logging.getLogger(__name__) class DatasetApiModuleInterface(ModuleInterface): - """Implements ModuleInterface for notebook GraphQl lambda""" + """Implements ModuleInterface for dataset GraphQl lambda""" @classmethod def is_supported(cls, modes): @@ -16,3 +18,14 @@ def __init__(self): import dataall.modules.datasets.api log.info("API of datasets has been imported") + +class DatasetAsyncHandlersModuleInterface(ModuleInterface): + """Implements ModuleInterface for dataset async lambda""" + + @classmethod + def is_supported(cls, modes: List[ImportMode]): + return ImportMode.HANDLERS in modes + + def __init__(self): + import dataall.modules.datasets.handlers + log.info("Dataset handlers have been imported") diff --git a/backend/dataall/modules/datasets/handlers/__init__.py b/backend/dataall/modules/datasets/handlers/__init__.py new file mode 100644 index 000000000..7ed90c729 --- /dev/null +++ b/backend/dataall/modules/datasets/handlers/__init__.py @@ -0,0 +1,8 @@ +""" +Contains code with the handlers that are need for async +processing in a separate lambda function +""" +from dataall.modules.datasets.handlers import glue_column_handler + +__all__ = ["glue_column_handler"] + diff --git a/backend/dataall/modules/datasets/handlers/glue_column_handler.py b/backend/dataall/modules/datasets/handlers/glue_column_handler.py new file mode 100644 index 000000000..e7f8d358b --- /dev/null +++ b/backend/dataall/modules/datasets/handlers/glue_column_handler.py @@ -0,0 +1,113 @@ +import logging + +from botocore.exceptions import ClientError + +from dataall.aws.handlers.sts import SessionHelper +from dataall.db import models +from dataall.aws.handlers.service_handlers import Worker +from dataall.modules.datasets.db.table_column_model import DatasetTableColumn + +log = logging.getLogger(__name__) + + +class DatasetColumnGlueHandler: + """A handler for dataset table columns""" + + @staticmethod + @Worker.handler('glue.table.update_column') + def update_table_columns(engine, task: models.Task): + with engine.scoped_session() as session: + column: DatasetTableColumn = session.query( + DatasetTableColumn + ).get(task.targetUri) + table: models.DatasetTable = session.query(models.DatasetTable).get( + column.tableUri + ) + try: + aws_session = SessionHelper.remote_session(table.AWSAccountId) + + DatasetColumnGlueHandler.grant_pivot_role_all_table_permissions(aws_session, table) + + glue_client = aws_session.client('glue', region_name=table.region) + + original_table = glue_client.get_table( + CatalogId=table.AWSAccountId, + DatabaseName=table.GlueDatabaseName, + Name=table.name, + ) + updated_table = { + k: v + for k, v in original_table['Table'].items() + if k + not in [ + 'CatalogId', + 'VersionId', + 'DatabaseName', + 'CreateTime', + 'UpdateTime', + 'CreatedBy', + 'IsRegisteredWithLakeFormation', + ] + } + all_columns = updated_table.get('StorageDescriptor', {}).get( + 'Columns', [] + ) + updated_table.get('PartitionKeys', []) + for col in all_columns: + if col['Name'] == column.name: + col['Comment'] = column.description + log.info( + f'Found column {column.name} adding description {column.description}' + ) + response = glue_client.update_table( + DatabaseName=table.GlueDatabaseName, + TableInput=updated_table, + ) + log.info( + f'Column {column.name} updated successfully: {response}' + ) + return True + + except ClientError as e: + log.error( + f'Failed to update table column {column.name} description: {e}' + ) + raise e + + @staticmethod + def grant_pivot_role_all_table_permissions(aws_session, table): + """ + Pivot role needs to have all permissions + for tables managed inside dataall + :param aws_session: + :param table: + :return: + """ + try: + lf_client = aws_session.client('lakeformation', region_name=table.region) + grant_dict = dict( + Principal={ + 'DataLakePrincipalIdentifier': SessionHelper.get_delegation_role_arn( + table.AWSAccountId + ) + }, + Resource={ + 'Table': { + 'DatabaseName': table.GlueDatabaseName, + 'Name': table.name, + } + }, + Permissions=['SELECT', 'ALTER', 'DROP', 'INSERT'], + ) + response = lf_client.grant_permissions(**grant_dict) + log.error( + f'Successfully granted pivot role all table ' + f'aws://{table.AWSAccountId}/{table.GlueDatabaseName}/{table.name} ' + f'access: {response}' + ) + except ClientError as e: + log.error( + f'Failed to grant pivot role all table ' + f'aws://{table.AWSAccountId}/{table.GlueDatabaseName}/{table.name} ' + f'access: {e}' + ) + raise e From e3596a553a42d42baddcb3d36bd2b71b60f101a2 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 11 Apr 2023 17:18:46 +0200 Subject: [PATCH 022/346] Dataset refactoring Extracted the code for dataset table handler --- backend/dataall/aws/handlers/glue.py | 15 ---------- .../modules/datasets/handlers/__init__.py | 7 +++-- .../datasets/handlers/glue_table_handler.py | 30 +++++++++++++++++++ 3 files changed, 35 insertions(+), 17 deletions(-) create mode 100644 backend/dataall/modules/datasets/handlers/glue_table_handler.py diff --git a/backend/dataall/aws/handlers/glue.py b/backend/dataall/aws/handlers/glue.py index 4bfda7ce3..cc8c5cfc7 100644 --- a/backend/dataall/aws/handlers/glue.py +++ b/backend/dataall/aws/handlers/glue.py @@ -73,21 +73,6 @@ def database_exists(**data): log.info(f'Database {database} does not exist on account {accountid}...') return False - @staticmethod - @Worker.handler(path='glue.dataset.database.tables') - def list_tables(engine, task: models.Task): - with engine.scoped_session() as session: - dataset: models.Dataset = db.api.Dataset.get_dataset_by_uri( - session, task.targetUri - ) - accountid = dataset.AwsAccountId - region = dataset.region - tables = Glue.list_glue_database_tables( - accountid, dataset.GlueDatabaseName, region - ) - DatasetTableService.sync(session, dataset.datasetUri, glue_tables=tables) - return tables - @staticmethod def list_glue_database_tables(accountid, database, region): aws_session = SessionHelper.remote_session(accountid=accountid) diff --git a/backend/dataall/modules/datasets/handlers/__init__.py b/backend/dataall/modules/datasets/handlers/__init__.py index 7ed90c729..19bd47297 100644 --- a/backend/dataall/modules/datasets/handlers/__init__.py +++ b/backend/dataall/modules/datasets/handlers/__init__.py @@ -2,7 +2,10 @@ Contains code with the handlers that are need for async processing in a separate lambda function """ -from dataall.modules.datasets.handlers import glue_column_handler +from dataall.modules.datasets.handlers import ( + glue_column_handler, + glue_table_handler +) -__all__ = ["glue_column_handler"] +__all__ = ["glue_column_handler", "glue_table_handler"] diff --git a/backend/dataall/modules/datasets/handlers/glue_table_handler.py b/backend/dataall/modules/datasets/handlers/glue_table_handler.py new file mode 100644 index 000000000..9bb50c501 --- /dev/null +++ b/backend/dataall/modules/datasets/handlers/glue_table_handler.py @@ -0,0 +1,30 @@ +import logging + +from botocore.exceptions import ClientError + +from dataall.aws.handlers.glue import Glue +from dataall.aws.handlers.service_handlers import Worker +from dataall.db import models +from dataall.db.api import Dataset +from dataall.modules.datasets.services.dataset_table import DatasetTableService + +log = logging.getLogger(__name__) + + +class DatasetColumnGlueHandler: + """A handler for dataset table""" + + @staticmethod + @Worker.handler(path='glue.dataset.database.tables') + def list_tables(engine, task: models.Task): + with engine.scoped_session() as session: + dataset: models.Dataset = Dataset.get_dataset_by_uri( + session, task.targetUri + ) + account_id = dataset.AwsAccountId + region = dataset.region + tables = Glue.list_glue_database_tables( + account_id, dataset.GlueDatabaseName, region + ) + DatasetTableService.sync(session, dataset.datasetUri, glue_tables=tables) + return tables From 3af2ecfb0f24342eea970b4f9fd4263dbb81fc2e Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 11 Apr 2023 17:28:46 +0200 Subject: [PATCH 023/346] Dataset refactoring Extracted the long-running task for datasets --- .../dataall/modules/datasets/tasks/__init__.py | 1 + .../datasets}/tasks/tables_syncer.py | 18 ++++++++---------- backend/dataall/tasks/__init__.py | 1 - deploy/stacks/container.py | 2 +- tests/tasks/test_tables_sync.py | 4 ++-- 5 files changed, 12 insertions(+), 14 deletions(-) create mode 100644 backend/dataall/modules/datasets/tasks/__init__.py rename backend/dataall/{ => modules/datasets}/tasks/tables_syncer.py (92%) diff --git a/backend/dataall/modules/datasets/tasks/__init__.py b/backend/dataall/modules/datasets/tasks/__init__.py new file mode 100644 index 000000000..da597f309 --- /dev/null +++ b/backend/dataall/modules/datasets/tasks/__init__.py @@ -0,0 +1 @@ +"""Code of the long-running tasks that run in ECS""" diff --git a/backend/dataall/tasks/tables_syncer.py b/backend/dataall/modules/datasets/tasks/tables_syncer.py similarity index 92% rename from backend/dataall/tasks/tables_syncer.py rename to backend/dataall/modules/datasets/tasks/tables_syncer.py index 7d2781ccf..27a870d60 100644 --- a/backend/dataall/tasks/tables_syncer.py +++ b/backend/dataall/modules/datasets/tasks/tables_syncer.py @@ -3,16 +3,14 @@ import sys from operator import and_ -from .. import db -from ..aws.handlers.glue import Glue -from ..aws.handlers.sts import SessionHelper -from ..db import get_engine -from ..db import models -from ..searchproxy import indexers -from ..searchproxy.connect import ( - connect, -) -from ..utils.alarm_service import AlarmService +from dataall import db +from dataall.aws.handlers.glue import Glue +from dataall.aws.handlers.sts import SessionHelper +from dataall.db import get_engine +from dataall.db import models +from dataall.searchproxy import indexers +from dataall.searchproxy.connect import connect +from dataall.utils.alarm_service import AlarmService from dataall.modules.datasets.services.dataset_table import DatasetTableService root = logging.getLogger() diff --git a/backend/dataall/tasks/__init__.py b/backend/dataall/tasks/__init__.py index 02ccaaa8b..89cb28e27 100644 --- a/backend/dataall/tasks/__init__.py +++ b/backend/dataall/tasks/__init__.py @@ -1,2 +1 @@ -from .tables_syncer import sync_tables from .catalog_indexer import index_objects diff --git a/deploy/stacks/container.py b/deploy/stacks/container.py index c313df82e..47fc3d114 100644 --- a/deploy/stacks/container.py +++ b/deploy/stacks/container.py @@ -94,7 +94,7 @@ def __init__( sync_tables_task = self.set_scheduled_task( cluster=cluster, - command=['python3.8', '-m', 'dataall.tasks.tables_syncer'], + command=['python3.8', '-m', 'dataall.modules.datasets.tasks.tables_syncer'], container_id=f'container', ecr_repository=ecr_repository, environment=self._create_env('INFO'), diff --git a/tests/tasks/test_tables_sync.py b/tests/tasks/test_tables_sync.py index 812dda1bd..d4e86b83f 100644 --- a/tests/tasks/test_tables_sync.py +++ b/tests/tasks/test_tables_sync.py @@ -147,14 +147,14 @@ def _test_tables_sync(db, org, env, sync_dataset, table, mocker): ], ) mocker.patch( - 'dataall.tasks.tables_syncer.is_assumable_pivot_role', return_value=True + 'dataall.modules.datasets.tables_syncer.is_assumable_pivot_role', return_value=True ) mocker.patch( 'dataall.aws.handlers.glue.Glue.grant_principals_all_table_permissions', return_value=True, ) - processed_tables = dataall.tasks.tables_syncer.sync_tables(engine=db) + processed_tables = dataall.modules.datasets.tasks.tables_syncer.sync_tables(engine=db) assert len(processed_tables) == 2 with db.scoped_session() as session: saved_table: dataall.db.models.DatasetTable = ( From 1a063b2cbe020ba5ffe72587902f0872ed788db4 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 11 Apr 2023 18:34:12 +0200 Subject: [PATCH 024/346] Dataset refactoring Extracted the subscription service into datasets --- .../datasets/tasks}/subscription_service.py | 16 ++++++++-------- backend/dataall/tasks/subscriptions/__init__.py | 1 - deploy/stacks/container.py | 2 +- 3 files changed, 9 insertions(+), 10 deletions(-) rename backend/dataall/{tasks/subscriptions => modules/datasets/tasks}/subscription_service.py (97%) diff --git a/backend/dataall/tasks/subscriptions/subscription_service.py b/backend/dataall/modules/datasets/tasks/subscription_service.py similarity index 97% rename from backend/dataall/tasks/subscriptions/subscription_service.py rename to backend/dataall/modules/datasets/tasks/subscription_service.py index bf7eded35..8674f903a 100644 --- a/backend/dataall/tasks/subscriptions/subscription_service.py +++ b/backend/dataall/modules/datasets/tasks/subscription_service.py @@ -6,14 +6,14 @@ from botocore.exceptions import ClientError from sqlalchemy import and_ -from ... import db -from ...aws.handlers.service_handlers import Worker -from ...aws.handlers.sts import SessionHelper -from ...aws.handlers.sqs import SqsQueue -from ...db import get_engine -from ...db import models -from ...tasks.subscriptions import poll_queues -from ...utils import json_utils +from dataall import db +from dataall.aws.handlers.service_handlers import Worker +from dataall.aws.handlers.sts import SessionHelper +from dataall.aws.handlers.sqs import SqsQueue +from dataall.db import get_engine +from dataall.db import models +from dataall.tasks.subscriptions import poll_queues +from dataall.utils import json_utils from dataall.modules.datasets.services.dataset_table import DatasetTableService root = logging.getLogger() diff --git a/backend/dataall/tasks/subscriptions/__init__.py b/backend/dataall/tasks/subscriptions/__init__.py index f60ca5310..fa0214e42 100644 --- a/backend/dataall/tasks/subscriptions/__init__.py +++ b/backend/dataall/tasks/subscriptions/__init__.py @@ -1,2 +1 @@ from .sqs_poller import poll_queues -from .subscription_service import SubscriptionService diff --git a/deploy/stacks/container.py b/deploy/stacks/container.py index 47fc3d114..d3c761519 100644 --- a/deploy/stacks/container.py +++ b/deploy/stacks/container.py @@ -179,7 +179,7 @@ def __init__( command=[ 'python3.8', '-m', - 'dataall.tasks.subscriptions.subscription_service', + 'dataall.modules.datasets.tasks.subscription_service', ], container_id=f'container', ecr_repository=ecr_repository, From b7337142064d4b14a1e247eae5ff412f7db81448 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 11 Apr 2023 18:39:54 +0200 Subject: [PATCH 025/346] Dataset refactoring Extracted the handler to get table columns --- backend/dataall/aws/handlers/glue.py | 30 ------------------- .../datasets/handlers/glue_column_handler.py | 29 ++++++++++++++++++ 2 files changed, 29 insertions(+), 30 deletions(-) diff --git a/backend/dataall/aws/handlers/glue.py b/backend/dataall/aws/handlers/glue.py index cc8c5cfc7..e05ce4c54 100644 --- a/backend/dataall/aws/handlers/glue.py +++ b/backend/dataall/aws/handlers/glue.py @@ -6,7 +6,6 @@ from .sts import SessionHelper from ... import db from ...db import models -from dataall.modules.datasets.services.dataset_table import DatasetTableService log = logging.getLogger('aws:glue') @@ -506,35 +505,6 @@ def _update_existing_crawler(glue, accountid, crawler_name, targets, database): else: raise e - - @staticmethod - @Worker.handler('glue.table.columns') - def get_table_columns(engine, task: models.Task): - with engine.scoped_session() as session: - dataset_table: models.DatasetTable = session.query(models.DatasetTable).get( - task.targetUri - ) - aws = SessionHelper.remote_session(dataset_table.AWSAccountId) - glue_client = aws.client('glue', region_name=dataset_table.region) - glue_table = {} - try: - glue_table = glue_client.get_table( - CatalogId=dataset_table.AWSAccountId, - DatabaseName=dataset_table.GlueDatabaseName, - Name=dataset_table.name, - ) - except glue_client.exceptions.ClientError as e: - log.error( - f'Failed to get table aws://{dataset_table.AWSAccountId}' - f'//{dataset_table.GlueDatabaseName}' - f'//{dataset_table.name} due to: ' - f'{e}' - ) - DatasetTableService.sync_table_columns( - session, dataset_table, glue_table['Table'] - ) - return True - @staticmethod @Worker.handler(path='glue.job.runs') def get_job_runs(engine, task: models.Task): diff --git a/backend/dataall/modules/datasets/handlers/glue_column_handler.py b/backend/dataall/modules/datasets/handlers/glue_column_handler.py index e7f8d358b..02003eea2 100644 --- a/backend/dataall/modules/datasets/handlers/glue_column_handler.py +++ b/backend/dataall/modules/datasets/handlers/glue_column_handler.py @@ -6,6 +6,7 @@ from dataall.db import models from dataall.aws.handlers.service_handlers import Worker from dataall.modules.datasets.db.table_column_model import DatasetTableColumn +from dataall.modules.datasets.services.dataset_table import DatasetTableService log = logging.getLogger(__name__) @@ -13,6 +14,34 @@ class DatasetColumnGlueHandler: """A handler for dataset table columns""" + @staticmethod + @Worker.handler('glue.table.columns') + def get_table_columns(engine, task: models.Task): + with engine.scoped_session() as session: + dataset_table: models.DatasetTable = session.query(models.DatasetTable).get( + task.targetUri + ) + aws = SessionHelper.remote_session(dataset_table.AWSAccountId) + glue_client = aws.client('glue', region_name=dataset_table.region) + glue_table = {} + try: + glue_table = glue_client.get_table( + CatalogId=dataset_table.AWSAccountId, + DatabaseName=dataset_table.GlueDatabaseName, + Name=dataset_table.name, + ) + except glue_client.exceptions.ClientError as e: + log.error( + f'Failed to get table aws://{dataset_table.AWSAccountId}' + f'//{dataset_table.GlueDatabaseName}' + f'//{dataset_table.name} due to: ' + f'{e}' + ) + DatasetTableService.sync_table_columns( + session, dataset_table, glue_table['Table'] + ) + return True + @staticmethod @Worker.handler('glue.table.update_column') def update_table_columns(engine, task: models.Task): From 2a4e2e09caf2e520118baa1ef22f1fb262541239 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 11 Apr 2023 19:06:39 +0200 Subject: [PATCH 026/346] Extracted feed registry Needed for migration for modules --- backend/dataall/api/Objects/Feed/resolvers.py | 29 ++------------- backend/dataall/core/feed/__init__.py | 1 + .../dataall/core/feed/services/__init__.py | 1 + .../core/feed/services/feed_registry.py | 36 +++++++++++++++++++ backend/dataall/modules/datasets/__init__.py | 3 ++ 5 files changed, 44 insertions(+), 26 deletions(-) create mode 100644 backend/dataall/core/feed/__init__.py create mode 100644 backend/dataall/core/feed/services/__init__.py create mode 100644 backend/dataall/core/feed/services/feed_registry.py diff --git a/backend/dataall/api/Objects/Feed/resolvers.py b/backend/dataall/api/Objects/Feed/resolvers.py index cbde23f0d..0fff09053 100644 --- a/backend/dataall/api/Objects/Feed/resolvers.py +++ b/backend/dataall/api/Objects/Feed/resolvers.py @@ -2,7 +2,7 @@ from ....api.context import Context from ....db import paginate, models -from dataall.modules.datasets.db.table_column_model import DatasetTableColumn +from dataall.core.feed.services.feed_registry import FeedRegistry class Feed: @@ -20,37 +20,14 @@ def targetType(self): def resolve_feed_target_type(obj, *_): - if isinstance(obj, DatasetTableColumn): - return 'DatasetTableColumn' - elif isinstance(obj, models.Worksheet): - return 'Worksheet' - elif isinstance(obj, models.DataPipeline): - return 'DataPipeline' - elif isinstance(obj, models.DatasetTable): - return 'DatasetTable' - elif isinstance(obj, models.Dataset): - return 'Dataset' - elif isinstance(obj, models.DatasetStorageLocation): - return 'DatasetStorageLocation' - elif isinstance(obj, models.Dashboard): - return 'Dashboard' - else: - return None + return FeedRegistry.find_by_model(obj) def resolve_target(context: Context, source: Feed, **kwargs): if not source: return None with context.engine.scoped_session() as session: - model = { - 'Dataset': models.Dataset, - 'DatasetTable': models.DatasetTable, - 'DatasetTableColumn': DatasetTableColumn, - 'DatasetStorageLocation': models.DatasetStorageLocation, - 'Dashboard': models.Dashboard, - 'DataPipeline': models.DataPipeline, - 'Worksheet': models.Worksheet, - }[source.targetType] + model = FeedRegistry.find(source.targetType) target = session.query(model).get(source.targetUri) return target diff --git a/backend/dataall/core/feed/__init__.py b/backend/dataall/core/feed/__init__.py new file mode 100644 index 000000000..d06f5a78f --- /dev/null +++ b/backend/dataall/core/feed/__init__.py @@ -0,0 +1 @@ +"""Contains all code related to feeds""" diff --git a/backend/dataall/core/feed/services/__init__.py b/backend/dataall/core/feed/services/__init__.py new file mode 100644 index 000000000..e87be7564 --- /dev/null +++ b/backend/dataall/core/feed/services/__init__.py @@ -0,0 +1 @@ +"""Contains business logic of feed""" diff --git a/backend/dataall/core/feed/services/feed_registry.py b/backend/dataall/core/feed/services/feed_registry.py new file mode 100644 index 000000000..7a382c057 --- /dev/null +++ b/backend/dataall/core/feed/services/feed_registry.py @@ -0,0 +1,36 @@ +from dataclasses import dataclass +from typing import Dict, Type +from dataall.db import Resource, models + + +@dataclass +class FeedDefinition: + target_type: str + model: Type[Resource] + + +class FeedRegistry: + """Registers feeds for different models""" + _DEFINITION: Dict[str, FeedDefinition] = {} + + @classmethod + def register(cls, feed: FeedDefinition): + cls._DEFINITION[feed.target_type] = feed + + @classmethod + def find(cls, target_type: str): + return cls._DEFINITION[target_type] + + @classmethod + def find_by_model(cls, obj: Resource): + for target_type, feed in cls._DEFINITION.items(): + if isinstance(obj, feed.model): + return target_type + return None + + +FeedRegistry.register(FeedDefinition("Worksheet", models.Worksheet)) +FeedRegistry.register(FeedDefinition("DataPipeline", models.DataPipeline)) +FeedRegistry.register(FeedDefinition("DatasetTable", models.DatasetTable)) +FeedRegistry.register(FeedDefinition("DatasetStorageLocation", models.DatasetStorageLocation)) +FeedRegistry.register(FeedDefinition("Dashboard", models.Dashboard)) diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index cd52bc207..0de251fee 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -2,6 +2,8 @@ import logging from typing import List +from dataall.core.feed.services.feed_registry import FeedRegistry, FeedDefinition +from dataall.modules.datasets.db.table_column_model import DatasetTableColumn from dataall.modules.loader import ModuleInterface, ImportMode log = logging.getLogger(__name__) @@ -16,6 +18,7 @@ def is_supported(cls, modes): def __init__(self): import dataall.modules.datasets.api + FeedRegistry.register(FeedDefinition("DatasetTableColumn", DatasetTableColumn)) log.info("API of datasets has been imported") From c15d0902da9a10a846e2c1f0231f0f0fafc6c0f0 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 11 Apr 2023 19:30:07 +0200 Subject: [PATCH 027/346] Extracted feed and glossary registry and created a model registry --- backend/dataall/api/Objects/Feed/resolvers.py | 6 +-- .../dataall/api/Objects/Glossary/resolvers.py | 23 ++------- backend/dataall/core/feed/__init__.py | 1 - .../dataall/core/feed/services/__init__.py | 1 - .../core/feed/services/feed_registry.py | 36 -------------- backend/dataall/core/utils/__init__.py | 1 + backend/dataall/core/utils/model_registry.py | 47 +++++++++++++++++++ backend/dataall/modules/datasets/__init__.py | 5 +- 8 files changed, 57 insertions(+), 63 deletions(-) delete mode 100644 backend/dataall/core/feed/__init__.py delete mode 100644 backend/dataall/core/feed/services/__init__.py delete mode 100644 backend/dataall/core/feed/services/feed_registry.py create mode 100644 backend/dataall/core/utils/__init__.py create mode 100644 backend/dataall/core/utils/model_registry.py diff --git a/backend/dataall/api/Objects/Feed/resolvers.py b/backend/dataall/api/Objects/Feed/resolvers.py index 0fff09053..1f328b1ae 100644 --- a/backend/dataall/api/Objects/Feed/resolvers.py +++ b/backend/dataall/api/Objects/Feed/resolvers.py @@ -1,8 +1,8 @@ from sqlalchemy import or_ -from ....api.context import Context -from ....db import paginate, models -from dataall.core.feed.services.feed_registry import FeedRegistry +from dataall.api.context import Context +from dataall.db import paginate, models +from dataall.core.utils.model_registry import FeedRegistry class Feed: diff --git a/backend/dataall/api/Objects/Glossary/resolvers.py b/backend/dataall/api/Objects/Glossary/resolvers.py index 847f16ac6..c6f2634d0 100644 --- a/backend/dataall/api/Objects/Glossary/resolvers.py +++ b/backend/dataall/api/Objects/Glossary/resolvers.py @@ -4,6 +4,7 @@ from .... import db from ....api.context import Context +from ....core.utils.model_registry import GlossaryRegistry from ....db import paginate, exceptions, models from ....searchproxy import upsert_dataset from ....searchproxy import upsert_table @@ -11,7 +12,6 @@ from ....api.constants import ( GlossaryRole ) -from dataall.modules.datasets.db.table_column_model import DatasetTableColumn def resolve_glossary_node(obj: models.GlossaryNode, *_): @@ -323,29 +323,12 @@ def get_link(context: Context, source, linkUri: str = None): def target_union_resolver(obj, *_): - if isinstance(obj, DatasetTableColumn): - return 'DatasetTableColumn' - elif isinstance(obj, models.DatasetTable): - return 'DatasetTable' - elif isinstance(obj, models.Dataset): - return 'Dataset' - elif isinstance(obj, models.DatasetStorageLocation): - return 'DatasetStorageLocation' - elif isinstance(obj, models.Dashboard): - return 'Dashboard' - else: - return None + return GlossaryRegistry.find_by_model(obj) def resolve_link_target(context, source, **kwargs): with context.engine.scoped_session() as session: - model = { - 'Dataset': models.Dataset, - 'DatasetTable': models.DatasetTable, - 'Column': DatasetTableColumn, - 'DatasetStorageLocation': models.DatasetStorageLocation, - 'Dashboard': models.Dashboard, - }[source.targetType] + model = GlossaryRegistry.find(source.targetUri) target = session.query(model).get(source.targetUri) return target diff --git a/backend/dataall/core/feed/__init__.py b/backend/dataall/core/feed/__init__.py deleted file mode 100644 index d06f5a78f..000000000 --- a/backend/dataall/core/feed/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Contains all code related to feeds""" diff --git a/backend/dataall/core/feed/services/__init__.py b/backend/dataall/core/feed/services/__init__.py deleted file mode 100644 index e87be7564..000000000 --- a/backend/dataall/core/feed/services/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Contains business logic of feed""" diff --git a/backend/dataall/core/feed/services/feed_registry.py b/backend/dataall/core/feed/services/feed_registry.py deleted file mode 100644 index 7a382c057..000000000 --- a/backend/dataall/core/feed/services/feed_registry.py +++ /dev/null @@ -1,36 +0,0 @@ -from dataclasses import dataclass -from typing import Dict, Type -from dataall.db import Resource, models - - -@dataclass -class FeedDefinition: - target_type: str - model: Type[Resource] - - -class FeedRegistry: - """Registers feeds for different models""" - _DEFINITION: Dict[str, FeedDefinition] = {} - - @classmethod - def register(cls, feed: FeedDefinition): - cls._DEFINITION[feed.target_type] = feed - - @classmethod - def find(cls, target_type: str): - return cls._DEFINITION[target_type] - - @classmethod - def find_by_model(cls, obj: Resource): - for target_type, feed in cls._DEFINITION.items(): - if isinstance(obj, feed.model): - return target_type - return None - - -FeedRegistry.register(FeedDefinition("Worksheet", models.Worksheet)) -FeedRegistry.register(FeedDefinition("DataPipeline", models.DataPipeline)) -FeedRegistry.register(FeedDefinition("DatasetTable", models.DatasetTable)) -FeedRegistry.register(FeedDefinition("DatasetStorageLocation", models.DatasetStorageLocation)) -FeedRegistry.register(FeedDefinition("Dashboard", models.Dashboard)) diff --git a/backend/dataall/core/utils/__init__.py b/backend/dataall/core/utils/__init__.py new file mode 100644 index 000000000..02ed9cfb4 --- /dev/null +++ b/backend/dataall/core/utils/__init__.py @@ -0,0 +1 @@ +"""Utility functions and classes""" diff --git a/backend/dataall/core/utils/model_registry.py b/backend/dataall/core/utils/model_registry.py new file mode 100644 index 000000000..9a4c21952 --- /dev/null +++ b/backend/dataall/core/utils/model_registry.py @@ -0,0 +1,47 @@ +from dataclasses import dataclass +from typing import Type, Dict + +from dataall.db import Resource, models + + +@dataclass +class ModelDefinition: + target_type: str + model: Type[Resource] + + +class ModelRegistry: + """Registers models for different target types""" + + def __init__(self): + self._definitions: Dict[str, ModelDefinition] = {} + + def register(self, model: ModelDefinition): + self._definitions[model.target_type] = model + + def find(self, target_type: str): + return self._definitions[target_type] + + def find_by_model(self, obj: Resource): + for target_type, definition in self._definitions.items(): + if isinstance(obj, definition.model): + return target_type + return None + + +# TODO should migrate to a proper file after the modularization +FeedRegistry = ModelRegistry() +GlossaryRegistry = ModelRegistry() + + +FeedRegistry.register(ModelDefinition("Worksheet", models.Worksheet)) +FeedRegistry.register(ModelDefinition("DataPipeline", models.DataPipeline)) +FeedRegistry.register(ModelDefinition("DatasetTable", models.DatasetTable)) +FeedRegistry.register(ModelDefinition("DatasetStorageLocation", models.DatasetStorageLocation)) +FeedRegistry.register(ModelDefinition("Dashboard", models.Dashboard)) + +GlossaryRegistry.register(ModelDefinition("DatasetTable", models.DatasetTable)) +GlossaryRegistry.register(ModelDefinition("DatasetStorageLocation", models.DatasetStorageLocation)) +GlossaryRegistry.register(ModelDefinition("Dashboard", models.Dashboard)) +GlossaryRegistry.register(ModelDefinition("DatasetTable", models.DatasetTable)) +GlossaryRegistry.register(ModelDefinition("Dataset", models.Dataset)) diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index 0de251fee..8f30bd897 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -2,7 +2,7 @@ import logging from typing import List -from dataall.core.feed.services.feed_registry import FeedRegistry, FeedDefinition +from dataall.core.utils.model_registry import ModelDefinition, FeedRegistry, GlossaryRegistry from dataall.modules.datasets.db.table_column_model import DatasetTableColumn from dataall.modules.loader import ModuleInterface, ImportMode @@ -18,7 +18,8 @@ def is_supported(cls, modes): def __init__(self): import dataall.modules.datasets.api - FeedRegistry.register(FeedDefinition("DatasetTableColumn", DatasetTableColumn)) + FeedRegistry.register(ModelDefinition("DatasetTableColumn", DatasetTableColumn)) + GlossaryRegistry.register(ModelDefinition("DatasetTableColumn", DatasetTableColumn)) log.info("API of datasets has been imported") From 052a2b1f33139dab09a8beb78d5a7cfb72387128 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 12 Apr 2023 11:12:56 +0200 Subject: [PATCH 028/346] Dataset refactoring Fixed tests and added new for dataset module --- tests/core/test_config.py | 6 +++++- tests/tasks/test_subscriptions.py | 4 ++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/tests/core/test_config.py b/tests/core/test_config.py index 3222e4144..30f69f792 100644 --- a/tests/core/test_config.py +++ b/tests/core/test_config.py @@ -25,4 +25,8 @@ def test_default_config(): assert "notebooks" in modules assert "active" in modules["notebooks"] - assert config.get_property("modules.notebooks.active") == "true" + assert "datasets" in modules + assert "active" in modules["datasets"] + + assert config.get_property("modules.notebooks.active") + assert config.get_property("modules.datasets.active") diff --git a/tests/tasks/test_subscriptions.py b/tests/tasks/test_subscriptions.py index 25cd6178a..874b8ccab 100644 --- a/tests/tasks/test_subscriptions.py +++ b/tests/tasks/test_subscriptions.py @@ -134,10 +134,10 @@ def share( def test_subscriptions(org, env, otherenv, db, dataset, share, mocker): mocker.patch( - 'dataall.tasks.subscriptions.subscription_service.SubscriptionService.sns_call', + 'dataall.modules.datasets.tasks.subscription_service.SubscriptionService.sns_call', return_value=True, ) - subscriber = dataall.tasks.subscriptions.subscription_service.SubscriptionService() + subscriber = dataall.modules.datasets.tasks.subscription_service.SubscriptionService() messages = [ { 'prefix': 's3://dataset/testtable/csv/', From d9844834646aa7564104ef184ca95d41f0ecbbb2 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 12 Apr 2023 13:12:39 +0200 Subject: [PATCH 029/346] Fixed and unignored test_tables_sync --- tests/tasks/test_tables_sync.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/tests/tasks/test_tables_sync.py b/tests/tasks/test_tables_sync.py index d4e86b83f..9d8282e65 100644 --- a/tests/tasks/test_tables_sync.py +++ b/tests/tasks/test_tables_sync.py @@ -92,7 +92,13 @@ def table(org, env, db, sync_dataset): yield table -def _test_tables_sync(db, org, env, sync_dataset, table, mocker): +@pytest.fixture(scope='module', autouse=True) +def permissions(db): + with db.scoped_session() as session: + yield dataall.db.api.Permission.init_permissions(session) + + +def test_tables_sync(db, org, env, sync_dataset, table, mocker): mocker.patch( 'dataall.aws.handlers.glue.Glue.list_glue_database_tables', return_value=[ @@ -147,7 +153,7 @@ def _test_tables_sync(db, org, env, sync_dataset, table, mocker): ], ) mocker.patch( - 'dataall.modules.datasets.tables_syncer.is_assumable_pivot_role', return_value=True + 'dataall.modules.datasets.tasks.tables_syncer.is_assumable_pivot_role', return_value=True ) mocker.patch( 'dataall.aws.handlers.glue.Glue.grant_principals_all_table_permissions', From dc0c9350b242be12238c8ce37b0dc74a018ed36d Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 12 Apr 2023 14:10:09 +0200 Subject: [PATCH 030/346] Split model registry into feed and glossaries Glossaries had different target types and had to be treated differently --- backend/dataall/api/Objects/Feed/resolvers.py | 2 +- .../dataall/api/Objects/Glossary/resolvers.py | 7 +-- backend/dataall/core/feed/__init__.py | 1 + .../dataall/core/feed/services/__init__.py | 1 + .../dataall/core/feed/services/registry.py | 36 ++++++++++++++ backend/dataall/core/glossary/__init__.py | 1 + .../core/glossary/services/__init__.py | 1 + .../core/glossary/services/registry.py | 38 +++++++++++++++ backend/dataall/core/utils/model_registry.py | 47 ------------------- backend/dataall/db/api/glossary.py | 28 ++++------- backend/dataall/modules/datasets/__init__.py | 7 +-- 11 files changed, 97 insertions(+), 72 deletions(-) create mode 100644 backend/dataall/core/feed/__init__.py create mode 100644 backend/dataall/core/feed/services/__init__.py create mode 100644 backend/dataall/core/feed/services/registry.py create mode 100644 backend/dataall/core/glossary/__init__.py create mode 100644 backend/dataall/core/glossary/services/__init__.py create mode 100644 backend/dataall/core/glossary/services/registry.py delete mode 100644 backend/dataall/core/utils/model_registry.py diff --git a/backend/dataall/api/Objects/Feed/resolvers.py b/backend/dataall/api/Objects/Feed/resolvers.py index 1f328b1ae..598ec86e1 100644 --- a/backend/dataall/api/Objects/Feed/resolvers.py +++ b/backend/dataall/api/Objects/Feed/resolvers.py @@ -2,7 +2,7 @@ from dataall.api.context import Context from dataall.db import paginate, models -from dataall.core.utils.model_registry import FeedRegistry +from dataall.core.feed.services.registry import FeedRegistry class Feed: diff --git a/backend/dataall/api/Objects/Glossary/resolvers.py b/backend/dataall/api/Objects/Glossary/resolvers.py index c6f2634d0..ae8501993 100644 --- a/backend/dataall/api/Objects/Glossary/resolvers.py +++ b/backend/dataall/api/Objects/Glossary/resolvers.py @@ -4,7 +4,6 @@ from .... import db from ....api.context import Context -from ....core.utils.model_registry import GlossaryRegistry from ....db import paginate, exceptions, models from ....searchproxy import upsert_dataset from ....searchproxy import upsert_table @@ -13,6 +12,8 @@ GlossaryRole ) +from dataall.core.glossary.services.registry import GlossaryRegistry + def resolve_glossary_node(obj: models.GlossaryNode, *_): if obj.nodeType == 'G': @@ -323,12 +324,12 @@ def get_link(context: Context, source, linkUri: str = None): def target_union_resolver(obj, *_): - return GlossaryRegistry.find_by_model(obj) + return GlossaryRegistry.find_object_type(obj) def resolve_link_target(context, source, **kwargs): with context.engine.scoped_session() as session: - model = GlossaryRegistry.find(source.targetUri) + model = GlossaryRegistry.find_model(source.targetUri) target = session.query(model).get(source.targetUri) return target diff --git a/backend/dataall/core/feed/__init__.py b/backend/dataall/core/feed/__init__.py new file mode 100644 index 000000000..39f751553 --- /dev/null +++ b/backend/dataall/core/feed/__init__.py @@ -0,0 +1 @@ +"""Contains logic related to feeds""" diff --git a/backend/dataall/core/feed/services/__init__.py b/backend/dataall/core/feed/services/__init__.py new file mode 100644 index 000000000..5b130b24b --- /dev/null +++ b/backend/dataall/core/feed/services/__init__.py @@ -0,0 +1 @@ +"""Service layer of feeds""" diff --git a/backend/dataall/core/feed/services/registry.py b/backend/dataall/core/feed/services/registry.py new file mode 100644 index 000000000..a69bcdd37 --- /dev/null +++ b/backend/dataall/core/feed/services/registry.py @@ -0,0 +1,36 @@ +from dataclasses import dataclass +from typing import Type, Dict + +from dataall.db import Resource, models + + +@dataclass +class FeedDefinition: + target_type: str + model: Type[Resource] + + +class FeedRegistry: + """Registers models for different target types""" + + def __init__(self): + self._definitions: Dict[str, FeedDefinition] = {} + + def register(self, model: FeedDefinition): + self._definitions[model.target_type] = model + + def find(self, target_type: str): + return self._definitions[target_type] + + def find_by_model(self, obj: Resource): + for target_type, definition in self._definitions.items(): + if isinstance(obj, definition.model): + return target_type + return None + + +FeedRegistry.register(FeedDefinition("Worksheet", models.Worksheet)) +FeedRegistry.register(FeedDefinition("DataPipeline", models.DataPipeline)) +FeedRegistry.register(FeedDefinition("DatasetTable", models.DatasetTable)) +FeedRegistry.register(FeedDefinition("DatasetStorageLocation", models.DatasetStorageLocation)) +FeedRegistry.register(FeedDefinition("Dashboard", models.Dashboard)) diff --git a/backend/dataall/core/glossary/__init__.py b/backend/dataall/core/glossary/__init__.py new file mode 100644 index 000000000..aa81c1e26 --- /dev/null +++ b/backend/dataall/core/glossary/__init__.py @@ -0,0 +1 @@ +"""Contains code related to glossaries""" diff --git a/backend/dataall/core/glossary/services/__init__.py b/backend/dataall/core/glossary/services/__init__.py new file mode 100644 index 000000000..9ed65d261 --- /dev/null +++ b/backend/dataall/core/glossary/services/__init__.py @@ -0,0 +1 @@ +"""Service layer of glossaries""" diff --git a/backend/dataall/core/glossary/services/registry.py b/backend/dataall/core/glossary/services/registry.py new file mode 100644 index 000000000..7484087c4 --- /dev/null +++ b/backend/dataall/core/glossary/services/registry.py @@ -0,0 +1,38 @@ +from dataclasses import dataclass +from typing import Type, Dict, Optional + +from dataall.db import Resource, models + + +@dataclass +class GlossaryDefinition: + target_type: str + object_type: str + model: Type[Resource] + + +class GlossaryRegistry: + _DEFINITIONS: Dict[str, GlossaryDefinition] = {} + + @classmethod + def register(cls, glossary: GlossaryDefinition) -> None: + cls._DEFINITIONS[glossary.target_type] = glossary + + @classmethod + def find_model(cls, target_type: str) -> Optional[Resource]: + definition = cls._DEFINITIONS[target_type] + return definition.model if definition is not None else None + + @classmethod + def find_object_type(cls, model: Resource) -> Optional[str]: + for _, definition in cls._DEFINITIONS.items(): + if isinstance(model, definition.model): + return definition.object_type + return None + + +GlossaryRegistry.register(GlossaryDefinition("DatasetTable", "DatasetTable", models.DatasetTable)) +GlossaryRegistry.register(GlossaryDefinition("Folder", "DatasetStorageLocation", models.DatasetStorageLocation)) +GlossaryRegistry.register(GlossaryDefinition("Dashboard", "Dashboard", models.Dashboard)) +GlossaryRegistry.register(GlossaryDefinition("DatasetTable", "DatasetTable", models.DatasetTable)) +GlossaryRegistry.register(GlossaryDefinition("Dataset", "Dataset", models.Dataset)) diff --git a/backend/dataall/core/utils/model_registry.py b/backend/dataall/core/utils/model_registry.py deleted file mode 100644 index 9a4c21952..000000000 --- a/backend/dataall/core/utils/model_registry.py +++ /dev/null @@ -1,47 +0,0 @@ -from dataclasses import dataclass -from typing import Type, Dict - -from dataall.db import Resource, models - - -@dataclass -class ModelDefinition: - target_type: str - model: Type[Resource] - - -class ModelRegistry: - """Registers models for different target types""" - - def __init__(self): - self._definitions: Dict[str, ModelDefinition] = {} - - def register(self, model: ModelDefinition): - self._definitions[model.target_type] = model - - def find(self, target_type: str): - return self._definitions[target_type] - - def find_by_model(self, obj: Resource): - for target_type, definition in self._definitions.items(): - if isinstance(obj, definition.model): - return target_type - return None - - -# TODO should migrate to a proper file after the modularization -FeedRegistry = ModelRegistry() -GlossaryRegistry = ModelRegistry() - - -FeedRegistry.register(ModelDefinition("Worksheet", models.Worksheet)) -FeedRegistry.register(ModelDefinition("DataPipeline", models.DataPipeline)) -FeedRegistry.register(ModelDefinition("DatasetTable", models.DatasetTable)) -FeedRegistry.register(ModelDefinition("DatasetStorageLocation", models.DatasetStorageLocation)) -FeedRegistry.register(ModelDefinition("Dashboard", models.Dashboard)) - -GlossaryRegistry.register(ModelDefinition("DatasetTable", models.DatasetTable)) -GlossaryRegistry.register(ModelDefinition("DatasetStorageLocation", models.DatasetStorageLocation)) -GlossaryRegistry.register(ModelDefinition("Dashboard", models.Dashboard)) -GlossaryRegistry.register(ModelDefinition("DatasetTable", models.DatasetTable)) -GlossaryRegistry.register(ModelDefinition("Dataset", models.Dataset)) diff --git a/backend/dataall/db/api/glossary.py b/backend/dataall/db/api/glossary.py index c6313e007..3df8d34f7 100644 --- a/backend/dataall/db/api/glossary.py +++ b/backend/dataall/db/api/glossary.py @@ -4,12 +4,12 @@ from sqlalchemy import asc, or_, and_, literal, case from sqlalchemy.orm import with_expression, aliased -from .. import models, exceptions, permissions, paginate +from .. import models, exceptions, permissions, paginate, Resource from .permission_checker import ( has_tenant_perm, ) from ..models.Glossary import GlossaryNodeStatus -from dataall.modules.datasets.db.table_column_model import DatasetTableColumn +from dataall.core.glossary.services.registry import GlossaryRegistry logger = logging.getLogger(__name__) @@ -124,24 +124,16 @@ def link_term(session, username, groups, uri, data=None, check_perm=None): 'associations are allowed for Glossary terms only', ) - targetUri: str = data['targetUri'] - targetType: str = data['targetType'] - - if targetType == 'Dataset': - target = session.query(models.Dataset).get(targetUri) - elif targetType == 'DatasetTable': - target = session.query(models.DatasetTable).get(targetUri) - elif targetType == 'Folder': - target = session.query(models.DatasetStorageLocation).get(targetUri) - elif targetType == 'Column': - target = session.query(DatasetTableColumn).get(targetUri) - elif targetType == 'Dashboard': - target = session.query(models.Dashboard).get(targetUri) - else: + target_uri: str = data['targetUri'] + target_type: str = data['targetType'] + + target_model: Resource = GlossaryRegistry.find_model(target_type) + if not target_model: raise exceptions.InvalidInput( 'NodeType', 'term.nodeType', 'association target type is invalid' ) + target = session.query(target_model).get(target_uri) if not target: raise exceptions.ObjectNotFound('Association target', uri) @@ -150,8 +142,8 @@ def link_term(session, username, groups, uri, data=None, check_perm=None): approvedByOwner=data.get('approvedByOwner', True), approvedBySteward=data.get('approvedBySteward', True), nodeUri=uri, - targetUri=targetUri, - targetType=targetType, + targetUri=target_uri, + targetType=target_type, ) session.add(link) return link diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index 8f30bd897..306778f40 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -2,7 +2,8 @@ import logging from typing import List -from dataall.core.utils.model_registry import ModelDefinition, FeedRegistry, GlossaryRegistry +from dataall.core.feed.services.registry import FeedRegistry, FeedDefinition +from dataall.core.glossary.services.registry import GlossaryRegistry, GlossaryDefinition from dataall.modules.datasets.db.table_column_model import DatasetTableColumn from dataall.modules.loader import ModuleInterface, ImportMode @@ -18,8 +19,8 @@ def is_supported(cls, modes): def __init__(self): import dataall.modules.datasets.api - FeedRegistry.register(ModelDefinition("DatasetTableColumn", DatasetTableColumn)) - GlossaryRegistry.register(ModelDefinition("DatasetTableColumn", DatasetTableColumn)) + FeedRegistry.register(FeedDefinition("DatasetTableColumn", DatasetTableColumn)) + GlossaryRegistry.register(GlossaryDefinition("DatasetTableColumn", DatasetTableColumn)) log.info("API of datasets has been imported") From 727e3537cfc3b133aa45c0deafc9568b25280b3a Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 12 Apr 2023 15:40:08 +0200 Subject: [PATCH 031/346] Abstraction for glossaries Created API for glossaries to use modularization --- .../core/glossary/services/registry.py | 16 +++++- backend/dataall/db/api/environment.py | 2 +- backend/dataall/db/api/glossary.py | 57 +++++++------------ backend/dataall/db/api/organization.py | 1 - backend/dataall/db/models/Dashboard.py | 3 + backend/dataall/db/models/Dataset.py | 4 ++ .../db/models/DatasetStorageLocation.py | 3 + backend/dataall/db/models/DatasetTable.py | 3 + .../modules/datasets/db/table_column_model.py | 3 + 9 files changed, 50 insertions(+), 42 deletions(-) diff --git a/backend/dataall/core/glossary/services/registry.py b/backend/dataall/core/glossary/services/registry.py index 7484087c4..ee3f10d41 100644 --- a/backend/dataall/core/glossary/services/registry.py +++ b/backend/dataall/core/glossary/services/registry.py @@ -1,14 +1,22 @@ from dataclasses import dataclass -from typing import Type, Dict, Optional +from typing import Type, Dict, Optional, Protocol, Union from dataall.db import Resource, models +class Identifiable(Protocol): + def uri(self): + ... + + @dataclass class GlossaryDefinition: target_type: str object_type: str - model: Type[Resource] + model: Union[Type[Resource], Identifiable] # should be an intersection, but python typing doesn't have one yet + + def target_uri(self): + return self.model.uri() class GlossaryRegistry: @@ -30,6 +38,10 @@ def find_object_type(cls, model: Resource) -> Optional[str]: return definition.object_type return None + @classmethod + def definitions(cls): + return cls._DEFINITIONS.values() + GlossaryRegistry.register(GlossaryDefinition("DatasetTable", "DatasetTable", models.DatasetTable)) GlossaryRegistry.register(GlossaryDefinition("Folder", "DatasetStorageLocation", models.DatasetStorageLocation)) diff --git a/backend/dataall/db/api/environment.py b/backend/dataall/db/api/environment.py index 4a436bf9a..19d5de342 100644 --- a/backend/dataall/db/api/environment.py +++ b/backend/dataall/db/api/environment.py @@ -21,7 +21,7 @@ EnvironmentPermission, ) from ..models.Permission import PermissionType -from ..paginator import Page, paginate +from ..paginator import paginate from dataall.core.environment.models import EnvironmentParameter from ...core.environment.db.repositories import EnvironmentParameterRepository from ...utils.naming_convention import ( diff --git a/backend/dataall/db/api/glossary.py b/backend/dataall/db/api/glossary.py index 3df8d34f7..96c340f62 100644 --- a/backend/dataall/db/api/glossary.py +++ b/backend/dataall/db/api/glossary.py @@ -10,6 +10,7 @@ ) from ..models.Glossary import GlossaryNodeStatus from dataall.core.glossary.services.registry import GlossaryRegistry +from ..paginator import Page logger = logging.getLogger(__name__) @@ -339,46 +340,26 @@ def list_term_associations( ): source = data['source'] filter = data['filter'] - datasets = session.query( - models.Dataset.datasetUri.label('targetUri'), - literal('dataset').label('targetType'), - models.Dataset.label.label('label'), - models.Dataset.name.label('name'), - models.Dataset.description.label('description'), - ) - tables = session.query( - models.DatasetTable.tableUri.label('targetUri'), - literal('table').label('targetType'), - models.DatasetTable.label.label('label'), - models.DatasetTable.name.label('name'), - models.DatasetTable.description.label('description'), - ) - columns = session.query( - DatasetTableColumn.columnUri.label('targetUri'), - literal('column').label('targetType'), - DatasetTableColumn.label.label('label'), - DatasetTableColumn.name.label('name'), - DatasetTableColumn.description.label('description'), - ) - folders = session.query( - models.DatasetStorageLocation.locationUri.label('targetUri'), - literal('folder').label('targetType'), - models.DatasetStorageLocation.label.label('label'), - models.DatasetStorageLocation.name.label('name'), - models.DatasetStorageLocation.description.label('description'), - ) - dashboards = session.query( - models.Dashboard.dashboardUri.label('targetUri'), - literal('dashboard').label('targetType'), - models.Dashboard.label.label('label'), - models.Dashboard.name.label('name'), - models.Dashboard.description.label('description'), - ) + query = None + for definition in GlossaryRegistry.definitions(): + model = definition.model + subquery = session.query( + definition.target_uri().label('targetUri'), + literal(definition.target_type.lower()).label('targetType'), + model.label.label('label'), + model.name.label('name'), + model.description.label('description'), + ) + if query: + query.union(subquery) + else: + query = subquery - linked_objects = datasets.union(tables, columns, folders, dashboards).subquery( - 'linked_objects' - ) + if query is None: + return Page([], 1, 1, 0) # empty page. All modules are turned off + + linked_objects = query.subquery('linked_objects') path = models.GlossaryNode.path q = ( diff --git a/backend/dataall/db/api/organization.py b/backend/dataall/db/api/organization.py index 979dd1095..dd570eeae 100644 --- a/backend/dataall/db/api/organization.py +++ b/backend/dataall/db/api/organization.py @@ -8,7 +8,6 @@ from . import has_tenant_perm, ResourcePolicy, has_resource_perm from ..models import OrganizationGroup from ..models.Enums import OrganisationUserRole -from ..paginator import Page logger = logging.getLogger(__name__) diff --git a/backend/dataall/db/models/Dashboard.py b/backend/dataall/db/models/Dashboard.py index 1a24ef1cb..0b12ecd96 100644 --- a/backend/dataall/db/models/Dashboard.py +++ b/backend/dataall/db/models/Dashboard.py @@ -18,3 +18,6 @@ class Dashboard(Resource, Base): SamlGroupName = Column(String, nullable=False) userRoleForDashboard = query_expression() + + def uri(self): + return self.dashboardUri diff --git a/backend/dataall/db/models/Dataset.py b/backend/dataall/db/models/Dataset.py index 71a95fe0e..451c7da7c 100644 --- a/backend/dataall/db/models/Dataset.py +++ b/backend/dataall/db/models/Dataset.py @@ -59,3 +59,7 @@ class Dataset(Resource, Base): importedKmsKey = Column(Boolean, default=False) importedAdminRole = Column(Boolean, default=False) imported = Column(Boolean, default=False) + + def uri(self): + return self.datasetUri + diff --git a/backend/dataall/db/models/DatasetStorageLocation.py b/backend/dataall/db/models/DatasetStorageLocation.py index 33b121438..e21ae6694 100644 --- a/backend/dataall/db/models/DatasetStorageLocation.py +++ b/backend/dataall/db/models/DatasetStorageLocation.py @@ -17,3 +17,6 @@ class DatasetStorageLocation(Resource, Base): userRoleForStorageLocation = query_expression() projectPermission = query_expression() environmentEndPoint = query_expression() + + def uri(self): + return self.locationUri diff --git a/backend/dataall/db/models/DatasetTable.py b/backend/dataall/db/models/DatasetTable.py index a1b06b192..e97174167 100644 --- a/backend/dataall/db/models/DatasetTable.py +++ b/backend/dataall/db/models/DatasetTable.py @@ -27,3 +27,6 @@ class DatasetTable(Resource, Base): stage = Column(String, default='RAW') topics = Column(postgresql.ARRAY(String), nullable=True) confidentiality = Column(String, nullable=False, default='C1') + + def uri(self): + return self.tableUri diff --git a/backend/dataall/modules/datasets/db/table_column_model.py b/backend/dataall/modules/datasets/db/table_column_model.py index 4d3d7e009..05bc26058 100644 --- a/backend/dataall/modules/datasets/db/table_column_model.py +++ b/backend/dataall/modules/datasets/db/table_column_model.py @@ -18,3 +18,6 @@ class DatasetTableColumn(Resource, Base): columnType = Column( String, default='column' ) # can be either "column" or "partition" + + def uri(self): + return self.columnUri From 49fbb415dbdfc27c20d90366a1cf3bcd41ebea0f Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 12 Apr 2023 15:47:09 +0200 Subject: [PATCH 032/346] Fixed leftovers --- .../dataall/core/feed/services/registry.py | 19 ++++++++++--------- backend/dataall/modules/datasets/__init__.py | 2 +- 2 files changed, 11 insertions(+), 10 deletions(-) diff --git a/backend/dataall/core/feed/services/registry.py b/backend/dataall/core/feed/services/registry.py index a69bcdd37..07f2e77a1 100644 --- a/backend/dataall/core/feed/services/registry.py +++ b/backend/dataall/core/feed/services/registry.py @@ -12,18 +12,19 @@ class FeedDefinition: class FeedRegistry: """Registers models for different target types""" + _DEFINITIONS: Dict[str, FeedDefinition] = {} - def __init__(self): - self._definitions: Dict[str, FeedDefinition] = {} + @classmethod + def register(cls, model: FeedDefinition): + cls._DEFINITIONS[model.target_type] = model - def register(self, model: FeedDefinition): - self._definitions[model.target_type] = model + @classmethod + def find(cls, target_type: str): + return cls._DEFINITIONS[target_type] - def find(self, target_type: str): - return self._definitions[target_type] - - def find_by_model(self, obj: Resource): - for target_type, definition in self._definitions.items(): + @classmethod + def find_by_model(cls, obj: Resource): + for target_type, definition in cls._DEFINITIONS.items(): if isinstance(obj, definition.model): return target_type return None diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index 306778f40..5b7224a48 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -20,7 +20,7 @@ def is_supported(cls, modes): def __init__(self): import dataall.modules.datasets.api FeedRegistry.register(FeedDefinition("DatasetTableColumn", DatasetTableColumn)) - GlossaryRegistry.register(GlossaryDefinition("DatasetTableColumn", DatasetTableColumn)) + GlossaryRegistry.register(GlossaryDefinition("Column", "DatasetTableColumn", DatasetTableColumn)) log.info("API of datasets has been imported") From 7d029e73046990bae939f8ff5f6b6cfaa8a83d62 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 13 Apr 2023 10:07:11 +0200 Subject: [PATCH 033/346] Datasets refactoring Added and fixed tests --- backend/dataall/core/feed/services/registry.py | 4 ++-- tests/api/test_feed.py | 3 ++- tests/modules/datasets/__init__.py | 0 tests/modules/datasets/test_dataset_feed.py | 11 +++++++++++ 4 files changed, 15 insertions(+), 3 deletions(-) create mode 100644 tests/modules/datasets/__init__.py create mode 100644 tests/modules/datasets/test_dataset_feed.py diff --git a/backend/dataall/core/feed/services/registry.py b/backend/dataall/core/feed/services/registry.py index 07f2e77a1..893f37f55 100644 --- a/backend/dataall/core/feed/services/registry.py +++ b/backend/dataall/core/feed/services/registry.py @@ -15,8 +15,8 @@ class FeedRegistry: _DEFINITIONS: Dict[str, FeedDefinition] = {} @classmethod - def register(cls, model: FeedDefinition): - cls._DEFINITIONS[model.target_type] = model + def register(cls, definition: FeedDefinition): + cls._DEFINITIONS[definition.target_type] = definition @classmethod def find(cls, target_type: str): diff --git a/tests/api/test_feed.py b/tests/api/test_feed.py index 11f7c4891..f1d8aaf7f 100644 --- a/tests/api/test_feed.py +++ b/tests/api/test_feed.py @@ -103,4 +103,5 @@ def test_get_target(client, worksheet): targetType='Worksheet', username='me', ) - print(response) + assert response.data.getFeed.target.worksheetUri == worksheet.worksheetUri + diff --git a/tests/modules/datasets/__init__.py b/tests/modules/datasets/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/modules/datasets/test_dataset_feed.py b/tests/modules/datasets/test_dataset_feed.py new file mode 100644 index 000000000..52c97e990 --- /dev/null +++ b/tests/modules/datasets/test_dataset_feed.py @@ -0,0 +1,11 @@ + +from dataall.core.feed.services.registry import FeedRegistry +from dataall.modules.datasets.db.table_column_model import DatasetTableColumn + + +def test_dataset_registered(): + model = FeedRegistry.find("DatasetTableColumn") + assert model == DatasetTableColumn + + model = DatasetTableColumn() + assert "DatasetTableColumn" == FeedRegistry.find_by_model(model) From be527ebc26e50f989ca3f9a8e84814d3d316a913 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 13 Apr 2023 11:40:52 +0200 Subject: [PATCH 034/346] Added runtime type registration for Union GraphQL type --- backend/dataall/api/gql/graphql_union_type.py | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/backend/dataall/api/gql/graphql_union_type.py b/backend/dataall/api/gql/graphql_union_type.py index a8fea7e2f..b67fe3c9a 100644 --- a/backend/dataall/api/gql/graphql_union_type.py +++ b/backend/dataall/api/gql/graphql_union_type.py @@ -1,19 +1,31 @@ +from abc import ABC + from ._cache import cache_instances from .utils import get_named_type +class UnionTypeRegistry(ABC): + """An abstract class that is used to provide union type in runtime""" + + @classmethod + def types(cls): + raise NotImplementedError("Types method is not implemented") + + @cache_instances class Union: _register = {} - def __init__(self, name, types=[], resolver=lambda *_, **__: None): + def __init__(self, name, types=[], type_registry=None, resolver=lambda *_, **__: None): self.name = name self.types = types + self.type_registry = type_registry self.resolver = resolver Union._register[name] = self def gql(self, *args, **kwargs): - return f"union {self.name} = {'|'.join([get_named_type(t).name for t in self.types])}" + types = self.type_registry.types() if self.type_registry else self.types + return f"union {self.name} = {'|'.join([get_named_type(t).name for t in types])}" if __name__ == '__main__': From 3daf2aab7da67320bab4474de2119a93c46840f8 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 13 Apr 2023 11:42:51 +0200 Subject: [PATCH 035/346] Changed Feed type registration mechanism Moved FeedRegistry to gql since it's more appropriate place for this Started using registry to provide types Renaming and small fixes --- .../feed/services => api/Objects/Feed}/registry.py | 14 ++++++++++---- backend/dataall/api/Objects/Feed/resolvers.py | 6 +++--- backend/dataall/api/Objects/Feed/schema.py | 11 ++--------- backend/dataall/core/feed/__init__.py | 1 - backend/dataall/core/feed/services/__init__.py | 1 - backend/dataall/modules/datasets/__init__.py | 2 +- tests/modules/datasets/test_dataset_feed.py | 6 +++--- 7 files changed, 19 insertions(+), 22 deletions(-) rename backend/dataall/{core/feed/services => api/Objects/Feed}/registry.py (72%) delete mode 100644 backend/dataall/core/feed/__init__.py delete mode 100644 backend/dataall/core/feed/services/__init__.py diff --git a/backend/dataall/core/feed/services/registry.py b/backend/dataall/api/Objects/Feed/registry.py similarity index 72% rename from backend/dataall/core/feed/services/registry.py rename to backend/dataall/api/Objects/Feed/registry.py index 893f37f55..a119529ab 100644 --- a/backend/dataall/core/feed/services/registry.py +++ b/backend/dataall/api/Objects/Feed/registry.py @@ -1,6 +1,8 @@ from dataclasses import dataclass from typing import Type, Dict +from dataall.api import gql +from dataall.api.gql.graphql_union_type import UnionTypeRegistry from dataall.db import Resource, models @@ -10,7 +12,7 @@ class FeedDefinition: model: Type[Resource] -class FeedRegistry: +class FeedRegistry(UnionTypeRegistry): """Registers models for different target types""" _DEFINITIONS: Dict[str, FeedDefinition] = {} @@ -19,16 +21,20 @@ def register(cls, definition: FeedDefinition): cls._DEFINITIONS[definition.target_type] = definition @classmethod - def find(cls, target_type: str): - return cls._DEFINITIONS[target_type] + def find_model(cls, target_type: str): + return cls._DEFINITIONS[target_type].model @classmethod - def find_by_model(cls, obj: Resource): + def find_target(cls, obj: Resource): for target_type, definition in cls._DEFINITIONS.items(): if isinstance(obj, definition.model): return target_type return None + @classmethod + def types(cls): + return [gql.Ref(target_type) for target_type in cls._DEFINITIONS.keys()] + FeedRegistry.register(FeedDefinition("Worksheet", models.Worksheet)) FeedRegistry.register(FeedDefinition("DataPipeline", models.DataPipeline)) diff --git a/backend/dataall/api/Objects/Feed/resolvers.py b/backend/dataall/api/Objects/Feed/resolvers.py index 598ec86e1..08de0d6b7 100644 --- a/backend/dataall/api/Objects/Feed/resolvers.py +++ b/backend/dataall/api/Objects/Feed/resolvers.py @@ -2,7 +2,7 @@ from dataall.api.context import Context from dataall.db import paginate, models -from dataall.core.feed.services.registry import FeedRegistry +from dataall.api.Objects.Feed.registry import FeedRegistry class Feed: @@ -20,14 +20,14 @@ def targetType(self): def resolve_feed_target_type(obj, *_): - return FeedRegistry.find_by_model(obj) + return FeedRegistry.find_target(obj) def resolve_target(context: Context, source: Feed, **kwargs): if not source: return None with context.engine.scoped_session() as session: - model = FeedRegistry.find(source.targetType) + model = FeedRegistry.find_model(source.targetType) target = session.query(model).get(source.targetUri) return target diff --git a/backend/dataall/api/Objects/Feed/schema.py b/backend/dataall/api/Objects/Feed/schema.py index d58918716..42fea86ad 100644 --- a/backend/dataall/api/Objects/Feed/schema.py +++ b/backend/dataall/api/Objects/Feed/schema.py @@ -1,18 +1,11 @@ from ... import gql from .resolvers import * +from dataall.api.Objects.Feed.registry import FeedRegistry FeedTarget = gql.Union( name='FeedTarget', - types=[ - gql.Ref('Dataset'), - gql.Ref('DatasetTable'), - gql.Ref('DatasetTableColumn'), - gql.Ref('DatasetStorageLocation'), - gql.Ref('DataPipeline'), - gql.Ref('Worksheet'), - gql.Ref('Dashboard'), - ], + type_registry=FeedRegistry, resolver=resolve_feed_target_type, ) diff --git a/backend/dataall/core/feed/__init__.py b/backend/dataall/core/feed/__init__.py deleted file mode 100644 index 39f751553..000000000 --- a/backend/dataall/core/feed/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Contains logic related to feeds""" diff --git a/backend/dataall/core/feed/services/__init__.py b/backend/dataall/core/feed/services/__init__.py deleted file mode 100644 index 5b130b24b..000000000 --- a/backend/dataall/core/feed/services/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Service layer of feeds""" diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index 5b7224a48..6ba4a24e2 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -2,7 +2,7 @@ import logging from typing import List -from dataall.core.feed.services.registry import FeedRegistry, FeedDefinition +from dataall.api.Objects.Feed.registry import FeedRegistry, FeedDefinition from dataall.core.glossary.services.registry import GlossaryRegistry, GlossaryDefinition from dataall.modules.datasets.db.table_column_model import DatasetTableColumn from dataall.modules.loader import ModuleInterface, ImportMode diff --git a/tests/modules/datasets/test_dataset_feed.py b/tests/modules/datasets/test_dataset_feed.py index 52c97e990..db5ff43e2 100644 --- a/tests/modules/datasets/test_dataset_feed.py +++ b/tests/modules/datasets/test_dataset_feed.py @@ -1,11 +1,11 @@ -from dataall.core.feed.services.registry import FeedRegistry +from dataall.api.Objects.Feed.registry import FeedRegistry from dataall.modules.datasets.db.table_column_model import DatasetTableColumn def test_dataset_registered(): - model = FeedRegistry.find("DatasetTableColumn") + model = FeedRegistry.find_model("DatasetTableColumn") assert model == DatasetTableColumn model = DatasetTableColumn() - assert "DatasetTableColumn" == FeedRegistry.find_by_model(model) + assert "DatasetTableColumn" == FeedRegistry.find_target(model) From db3bfd3f51c0b7ea2ef39ab62f9176b99e5ebeb5 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 13 Apr 2023 11:43:56 +0200 Subject: [PATCH 036/346] Added TODO for future refactoring Solve circular dependecy for redshift. It should go away after the migration of redshift --- backend/dataall/aws/handlers/redshift.py | 1 + backend/dataall/db/api/redshift_cluster.py | 7 +++++-- deploy/stacks/container.py | 2 ++ 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/backend/dataall/aws/handlers/redshift.py b/backend/dataall/aws/handlers/redshift.py index 4d2591520..c186d5df7 100644 --- a/backend/dataall/aws/handlers/redshift.py +++ b/backend/dataall/aws/handlers/redshift.py @@ -9,6 +9,7 @@ from .sts import SessionHelper from ... import db from ...db import models +# TODO should be migrated in the redshift module from dataall.modules.datasets.services.dataset_table import DatasetTableService log = logging.getLogger(__name__) diff --git a/backend/dataall/db/api/redshift_cluster.py b/backend/dataall/db/api/redshift_cluster.py index 4167a555a..8ca3088bf 100644 --- a/backend/dataall/db/api/redshift_cluster.py +++ b/backend/dataall/db/api/redshift_cluster.py @@ -9,7 +9,6 @@ NamingConventionPattern, ) from ...utils.slugify import slugify -from dataall.modules.datasets.services.dataset_table import DatasetTableService log = logging.getLogger(__name__) @@ -495,7 +494,11 @@ def enable_copy_table( session, username, groups, uri, data=None, check_perm=True ) -> models.RedshiftClusterDatasetTable: cluster = RedshiftCluster.get_redshift_cluster_by_uri(session, uri) - table = DatasetTableService.get_dataset_table_by_uri(session, data['tableUri']) + + # TODO should be migrated in the redshift module + table = dataall.modules.datasets.services.dataset_table.DatasetTableService.get_dataset_table_by_uri( + session, data['tableUri'] + ) table = models.RedshiftClusterDatasetTable( clusterUri=uri, datasetUri=data['datasetUri'], diff --git a/deploy/stacks/container.py b/deploy/stacks/container.py index d3c761519..aa7be04df 100644 --- a/deploy/stacks/container.py +++ b/deploy/stacks/container.py @@ -92,6 +92,7 @@ def __init__( envname, resource_prefix, vpc, vpc_endpoints_sg ) + # TODO introduce the ability to change the deployment depending on config.json file sync_tables_task = self.set_scheduled_task( cluster=cluster, command=['python3.8', '-m', 'dataall.modules.datasets.tasks.tables_syncer'], @@ -174,6 +175,7 @@ def __init__( update_bucket_policies_task.task.security_groups ) + # TODO introduce the ability to change the deployment depending on config.json file subscriptions_task = self.set_scheduled_task( cluster=cluster, command=[ From 13b6e92918b453c3fe6ad20be7ec2d8e74194315 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 13 Apr 2023 15:11:50 +0200 Subject: [PATCH 037/346] Added GlossaryRegistry for Union scheme --- .../dataall/api/Objects/Glossary/__init__.py | 3 ++- .../Objects/Glossary}/registry.py | 10 ++++++- .../dataall/api/Objects/Glossary/resolvers.py | 27 ++++++++++--------- .../dataall/api/Objects/Glossary/schema.py | 9 ++----- backend/dataall/core/glossary/__init__.py | 1 - .../core/glossary/services/__init__.py | 1 - backend/dataall/db/api/glossary.py | 25 ++++++----------- backend/dataall/modules/datasets/__init__.py | 2 +- tests/api/test_glossary.py | 20 +++++++------- 9 files changed, 46 insertions(+), 52 deletions(-) rename backend/dataall/{core/glossary/services => api/Objects/Glossary}/registry.py (80%) delete mode 100644 backend/dataall/core/glossary/__init__.py delete mode 100644 backend/dataall/core/glossary/services/__init__.py diff --git a/backend/dataall/api/Objects/Glossary/__init__.py b/backend/dataall/api/Objects/Glossary/__init__.py index 0c4ec6166..30e86e17e 100644 --- a/backend/dataall/api/Objects/Glossary/__init__.py +++ b/backend/dataall/api/Objects/Glossary/__init__.py @@ -4,6 +4,7 @@ mutations, resolvers, schema, + registry, ) -__all__ = ['resolvers', 'schema', 'input_types', 'queries', 'mutations'] +__all__ = ['registry', 'resolvers', 'schema', 'input_types', 'queries', 'mutations'] diff --git a/backend/dataall/core/glossary/services/registry.py b/backend/dataall/api/Objects/Glossary/registry.py similarity index 80% rename from backend/dataall/core/glossary/services/registry.py rename to backend/dataall/api/Objects/Glossary/registry.py index ee3f10d41..375f470e2 100644 --- a/backend/dataall/core/glossary/services/registry.py +++ b/backend/dataall/api/Objects/Glossary/registry.py @@ -1,6 +1,8 @@ from dataclasses import dataclass from typing import Type, Dict, Optional, Protocol, Union +from dataall.api import gql +from dataall.api.gql.graphql_union_type import UnionTypeRegistry from dataall.db import Resource, models @@ -11,6 +13,7 @@ def uri(self): @dataclass class GlossaryDefinition: + """Glossary's definition used for registration references of other modules""" target_type: str object_type: str model: Union[Type[Resource], Identifiable] # should be an intersection, but python typing doesn't have one yet @@ -19,7 +22,8 @@ def target_uri(self): return self.model.uri() -class GlossaryRegistry: +class GlossaryRegistry(UnionTypeRegistry): + """Registry of glossary definition and API to retrieve data""" _DEFINITIONS: Dict[str, GlossaryDefinition] = {} @classmethod @@ -42,6 +46,10 @@ def find_object_type(cls, model: Resource) -> Optional[str]: def definitions(cls): return cls._DEFINITIONS.values() + @classmethod + def types(cls): + return [gql.Ref(definition.object_type) for definition in cls._DEFINITIONS.values()] + GlossaryRegistry.register(GlossaryDefinition("DatasetTable", "DatasetTable", models.DatasetTable)) GlossaryRegistry.register(GlossaryDefinition("Folder", "DatasetStorageLocation", models.DatasetStorageLocation)) diff --git a/backend/dataall/api/Objects/Glossary/resolvers.py b/backend/dataall/api/Objects/Glossary/resolvers.py index ae8501993..15e77327f 100644 --- a/backend/dataall/api/Objects/Glossary/resolvers.py +++ b/backend/dataall/api/Objects/Glossary/resolvers.py @@ -2,6 +2,7 @@ from sqlalchemy import and_, or_, asc +from dataall.api.Objects.Glossary.registry import GlossaryRegistry from .... import db from ....api.context import Context from ....db import paginate, exceptions, models @@ -12,8 +13,6 @@ GlossaryRole ) -from dataall.core.glossary.services.registry import GlossaryRegistry - def resolve_glossary_node(obj: models.GlossaryNode, *_): if obj.nodeType == 'G': @@ -273,8 +272,6 @@ def request_link( with context.engine.scoped_session() as session: return db.api.Glossary.link_term( session=session, - username=context.username, - groups=context.groups, uri=nodeUri, data={ 'targetUri': targetUri, @@ -282,7 +279,7 @@ def request_link( 'approvedByOwner': True, 'approvedBySteward': False, }, - check_perm=True, + target_model=_target_model(targetType), ) @@ -296,8 +293,6 @@ def link_term( with context.engine.scoped_session() as session: return db.api.Glossary.link_term( session=session, - username=context.username, - groups=context.groups, uri=nodeUri, data={ 'targetUri': targetUri, @@ -305,7 +300,7 @@ def link_term( 'approvedByOwner': True, 'approvedBySteward': True, }, - check_perm=True, + target_model=_target_model(targetType), ) @@ -329,7 +324,7 @@ def target_union_resolver(obj, *_): def resolve_link_target(context, source, **kwargs): with context.engine.scoped_session() as session: - model = GlossaryRegistry.find_model(source.targetUri) + model = GlossaryRegistry.find_model(source.targetType) target = session.query(model).get(source.targetUri) return target @@ -342,11 +337,8 @@ def resolve_term_associations( with context.engine.scoped_session() as session: return db.api.Glossary.list_term_associations( session=session, - username=context.username, - groups=context.groups, - uri=None, data={'source': source, 'filter': filter}, - check_perm=True, + target_model_definitions=GlossaryRegistry.definitions() ) @@ -477,3 +469,12 @@ def reindex(context, linkUri): upsert_folder(session=session, es=context.es, locationUri=link.targetUri) elif isinstance(target, models.Dashboard): upsert_dashboard(session=session, es=context.es, dashboardUri=link.targetUri) + + +def _target_model(target_type: str): + target_model = GlossaryRegistry.find_model(target_type) + if not target_model: + raise exceptions.InvalidInput( + 'NodeType', 'term.nodeType', 'association target type is invalid' + ) + return target_model diff --git a/backend/dataall/api/Objects/Glossary/schema.py b/backend/dataall/api/Objects/Glossary/schema.py index 36fd1b758..9b71ae4b1 100644 --- a/backend/dataall/api/Objects/Glossary/schema.py +++ b/backend/dataall/api/Objects/Glossary/schema.py @@ -1,6 +1,7 @@ from ... import gql from .resolvers import * from ...constants import GlossaryRole +from dataall.api.Objects.Glossary.registry import GlossaryRegistry GlossaryNode = gql.Union( name='GlossaryNode', @@ -246,13 +247,7 @@ GlossaryTermLinkTarget = gql.Union( name='GlossaryTermLinkTarget', - types=[ - gql.Ref('Dataset'), - gql.Ref('DatasetTable'), - gql.Ref('DatasetStorageLocation'), - gql.Ref('DatasetTableColumn'), - gql.Ref('Dashboard'), - ], + type_registry=GlossaryRegistry, resolver=target_union_resolver, ) diff --git a/backend/dataall/core/glossary/__init__.py b/backend/dataall/core/glossary/__init__.py deleted file mode 100644 index aa81c1e26..000000000 --- a/backend/dataall/core/glossary/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Contains code related to glossaries""" diff --git a/backend/dataall/core/glossary/services/__init__.py b/backend/dataall/core/glossary/services/__init__.py deleted file mode 100644 index 9ed65d261..000000000 --- a/backend/dataall/core/glossary/services/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Service layer of glossaries""" diff --git a/backend/dataall/db/api/glossary.py b/backend/dataall/db/api/glossary.py index 96c340f62..2dc97c62c 100644 --- a/backend/dataall/db/api/glossary.py +++ b/backend/dataall/db/api/glossary.py @@ -5,12 +5,11 @@ from sqlalchemy.orm import with_expression, aliased from .. import models, exceptions, permissions, paginate, Resource -from .permission_checker import ( - has_tenant_perm, -) +from .permission_checker import has_tenant_perm from ..models.Glossary import GlossaryNodeStatus -from dataall.core.glossary.services.registry import GlossaryRegistry from ..paginator import Page +from dataall.core.permission_checker import has_tenant_permission +from dataall.core.context import get_context logger = logging.getLogger(__name__) @@ -113,8 +112,8 @@ def update_node(session, username, groups, uri, data=None, check_perm=None): return node @staticmethod - @has_tenant_perm(permissions.MANAGE_GLOSSARIES) - def link_term(session, username, groups, uri, data=None, check_perm=None): + @has_tenant_permission(permissions.MANAGE_GLOSSARIES) + def link_term(session, uri, target_model: Resource, data): term: models.GlossaryNode = session.query(models.GlossaryNode).get(uri) if not term: raise exceptions.ObjectNotFound('Node', uri) @@ -128,18 +127,12 @@ def link_term(session, username, groups, uri, data=None, check_perm=None): target_uri: str = data['targetUri'] target_type: str = data['targetType'] - target_model: Resource = GlossaryRegistry.find_model(target_type) - if not target_model: - raise exceptions.InvalidInput( - 'NodeType', 'term.nodeType', 'association target type is invalid' - ) - target = session.query(target_model).get(target_uri) if not target: raise exceptions.ObjectNotFound('Association target', uri) link = models.TermLink( - owner=username, + owner=get_context().username, approvedByOwner=data.get('approvedByOwner', True), approvedBySteward=data.get('approvedBySteward', True), nodeUri=uri, @@ -335,14 +328,12 @@ def list_node_children(session, source, filter): ).to_dict() @staticmethod - def list_term_associations( - session, username, groups, uri, data=None, check_perm=None - ): + def list_term_associations(session, target_model_definitions, data=None): source = data['source'] filter = data['filter'] query = None - for definition in GlossaryRegistry.definitions(): + for definition in target_model_definitions: model = definition.model subquery = session.query( definition.target_uri().label('targetUri'), diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index 6ba4a24e2..de1963bd2 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -3,7 +3,7 @@ from typing import List from dataall.api.Objects.Feed.registry import FeedRegistry, FeedDefinition -from dataall.core.glossary.services.registry import GlossaryRegistry, GlossaryDefinition +from dataall.api.Objects.Glossary.registry import GlossaryRegistry, GlossaryDefinition from dataall.modules.datasets.db.table_column_model import DatasetTableColumn from dataall.modules.loader import ModuleInterface, ImportMode diff --git a/tests/api/test_glossary.py b/tests/api/test_glossary.py index 8276dca8c..987ccc1a8 100644 --- a/tests/api/test_glossary.py +++ b/tests/api/test_glossary.py @@ -1,3 +1,4 @@ +from datetime import datetime from typing import List from dataall.db import models from dataall.modules.datasets.db.table_column_model import DatasetTableColumn @@ -197,7 +198,6 @@ def test_list_glossaries(client): } """ ) - print(response) assert response.data.listGlossaries.count == 1 assert response.data.listGlossaries.nodes[0].stats.categories == 2 @@ -246,7 +246,6 @@ def test_hierarchical_search(client): } """ ) - print(response) assert response.data.searchGlossary.count == 4 @@ -263,7 +262,6 @@ def test_get_glossary(client, g1): """, nodeUri=g1.nodeUri, ) - print(r) assert r.data.getGlossary.nodeUri == g1.nodeUri assert r.data.getGlossary.label == g1.label assert r.data.getGlossary.readme == g1.readme @@ -301,7 +299,6 @@ def test_get_term(client, t1): """, nodeUri=t1.nodeUri, ) - print(r) assert r.data.getTerm.nodeUri == t1.nodeUri assert r.data.getTerm.label == t1.label assert r.data.getTerm.readme == t1.readme @@ -552,7 +549,7 @@ def test_link_term(client, t1, _columns, group): print(r) -def test_get_term_associations(t1, client): +def test_get_term_associations(t1, db, client): r = client.query( """ query GetTerm($nodeUri:String!){ @@ -579,10 +576,13 @@ def test_get_term_associations(t1, client): nodeUri=t1.nodeUri, username='alice', ) - print(r) + assert r.data.getTerm.nodeUri == t1.nodeUri + assert r.data.getTerm.label == t1.label + assert r.data.getTerm.readme == t1.readme -def test_delete_category(client, c1, group): +def test_delete_category(client, db, c1, group): + now = datetime.now() r = client.query( """ mutation DeleteCategory( @@ -597,7 +597,9 @@ def test_delete_category(client, c1, group): username='alice', groups=[group.name], ) - print(r) + with db.scoped_session() as session: + node = session.query(models.GlossaryNode).get(c1.nodeUri) + assert node.deleted >= now def test_list_glossaries_after_delete(client): @@ -634,7 +636,6 @@ def test_list_glossaries_after_delete(client): } """ ) - print(response) assert response.data.listGlossaries.count == 1 assert response.data.listGlossaries.nodes[0].stats.categories == 0 @@ -683,5 +684,4 @@ def test_hierarchical_search_after_delete(client): } """ ) - print(response) assert response.data.searchGlossary.count == 1 From 144dfea5a1026d665eda7e6384adc5781297c5ba Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 13 Apr 2023 15:15:08 +0200 Subject: [PATCH 038/346] Changed import in redshift module --- backend/dataall/db/api/redshift_cluster.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/backend/dataall/db/api/redshift_cluster.py b/backend/dataall/db/api/redshift_cluster.py index 8ca3088bf..31b795225 100644 --- a/backend/dataall/db/api/redshift_cluster.py +++ b/backend/dataall/db/api/redshift_cluster.py @@ -495,8 +495,9 @@ def enable_copy_table( ) -> models.RedshiftClusterDatasetTable: cluster = RedshiftCluster.get_redshift_cluster_by_uri(session, uri) - # TODO should be migrated in the redshift module - table = dataall.modules.datasets.services.dataset_table.DatasetTableService.get_dataset_table_by_uri( + # TODO this dirty hack should be removed in the redshift module or after pipeline migration (circular import) + from dataall.modules.datasets.services.dataset_table import DatasetTableService + table = DatasetTableService.get_dataset_table_by_uri( session, data['tableUri'] ) table = models.RedshiftClusterDatasetTable( From d43b9b31b661287e303cfad8fc958c7f511277d2 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 13 Apr 2023 15:18:04 +0200 Subject: [PATCH 039/346] No need for Utils yet --- backend/dataall/core/utils/__init__.py | 1 - 1 file changed, 1 deletion(-) delete mode 100644 backend/dataall/core/utils/__init__.py diff --git a/backend/dataall/core/utils/__init__.py b/backend/dataall/core/utils/__init__.py deleted file mode 100644 index 02ed9cfb4..000000000 --- a/backend/dataall/core/utils/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Utility functions and classes""" From 39b244c9b2fc841405e53088a48f8c564850b505 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 13 Apr 2023 15:22:39 +0200 Subject: [PATCH 040/346] Fixed linting --- backend/dataall/db/models/Dataset.py | 1 - .../modules/datasets/handlers/__init__.py | 1 - .../datasets/handlers/glue_column_handler.py | 19 +++++++++---------- 3 files changed, 9 insertions(+), 12 deletions(-) diff --git a/backend/dataall/db/models/Dataset.py b/backend/dataall/db/models/Dataset.py index 451c7da7c..fd65387b7 100644 --- a/backend/dataall/db/models/Dataset.py +++ b/backend/dataall/db/models/Dataset.py @@ -62,4 +62,3 @@ class Dataset(Resource, Base): def uri(self): return self.datasetUri - diff --git a/backend/dataall/modules/datasets/handlers/__init__.py b/backend/dataall/modules/datasets/handlers/__init__.py index 19bd47297..a5d506712 100644 --- a/backend/dataall/modules/datasets/handlers/__init__.py +++ b/backend/dataall/modules/datasets/handlers/__init__.py @@ -8,4 +8,3 @@ ) __all__ = ["glue_column_handler", "glue_table_handler"] - diff --git a/backend/dataall/modules/datasets/handlers/glue_column_handler.py b/backend/dataall/modules/datasets/handlers/glue_column_handler.py index 02003eea2..329b702b7 100644 --- a/backend/dataall/modules/datasets/handlers/glue_column_handler.py +++ b/backend/dataall/modules/datasets/handlers/glue_column_handler.py @@ -67,16 +67,15 @@ def update_table_columns(engine, task: models.Task): updated_table = { k: v for k, v in original_table['Table'].items() - if k - not in [ - 'CatalogId', - 'VersionId', - 'DatabaseName', - 'CreateTime', - 'UpdateTime', - 'CreatedBy', - 'IsRegisteredWithLakeFormation', - ] + if k not in [ + 'CatalogId', + 'VersionId', + 'DatabaseName', + 'CreateTime', + 'UpdateTime', + 'CreatedBy', + 'IsRegisteredWithLakeFormation', + ] } all_columns = updated_table.get('StorageDescriptor', {}).get( 'Columns', [] From cb3800a8aae649c8ff14d937440ea215e106db09 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 14 Apr 2023 15:24:26 +0200 Subject: [PATCH 041/346] Datasets refactoring Moving datasets profiling to datasets modules --- backend/dataall/api/Objects/__init__.py | 1 - .../datasets/api}/DatasetProfiling/__init__.py | 0 .../datasets/api}/DatasetProfiling/input_types.py | 2 +- .../datasets/api}/DatasetProfiling/mutations.py | 2 +- .../datasets/api}/DatasetProfiling/queries.py | 2 +- .../datasets/api}/DatasetProfiling/resolvers.py | 10 +++++----- .../datasets/api}/DatasetProfiling/schema.py | 2 +- backend/dataall/modules/datasets/api/__init__.py | 5 +++-- 8 files changed, 12 insertions(+), 12 deletions(-) rename backend/dataall/{api/Objects => modules/datasets/api}/DatasetProfiling/__init__.py (100%) rename backend/dataall/{api/Objects => modules/datasets/api}/DatasetProfiling/input_types.py (95%) rename backend/dataall/{api/Objects => modules/datasets/api}/DatasetProfiling/mutations.py (95%) rename backend/dataall/{api/Objects => modules/datasets/api}/DatasetProfiling/queries.py (97%) rename backend/dataall/{api/Objects => modules/datasets/api}/DatasetProfiling/resolvers.py (95%) rename backend/dataall/{api/Objects => modules/datasets/api}/DatasetProfiling/schema.py (98%) diff --git a/backend/dataall/api/Objects/__init__.py b/backend/dataall/api/Objects/__init__.py index 43d5e0833..80b91358a 100644 --- a/backend/dataall/api/Objects/__init__.py +++ b/backend/dataall/api/Objects/__init__.py @@ -29,7 +29,6 @@ Test, SagemakerStudio, RedshiftCluster, - DatasetProfiling, Glossary, AthenaQueryResult, Worksheet, diff --git a/backend/dataall/api/Objects/DatasetProfiling/__init__.py b/backend/dataall/modules/datasets/api/DatasetProfiling/__init__.py similarity index 100% rename from backend/dataall/api/Objects/DatasetProfiling/__init__.py rename to backend/dataall/modules/datasets/api/DatasetProfiling/__init__.py diff --git a/backend/dataall/api/Objects/DatasetProfiling/input_types.py b/backend/dataall/modules/datasets/api/DatasetProfiling/input_types.py similarity index 95% rename from backend/dataall/api/Objects/DatasetProfiling/input_types.py rename to backend/dataall/modules/datasets/api/DatasetProfiling/input_types.py index deb1739c5..e8e89fb16 100644 --- a/backend/dataall/api/Objects/DatasetProfiling/input_types.py +++ b/backend/dataall/modules/datasets/api/DatasetProfiling/input_types.py @@ -1,4 +1,4 @@ -from ... import gql +from dataall.api import gql StartDatasetProfilingRunInput = gql.InputType( name='StartDatasetProfilingRunInput', diff --git a/backend/dataall/api/Objects/DatasetProfiling/mutations.py b/backend/dataall/modules/datasets/api/DatasetProfiling/mutations.py similarity index 95% rename from backend/dataall/api/Objects/DatasetProfiling/mutations.py rename to backend/dataall/modules/datasets/api/DatasetProfiling/mutations.py index 5876c81a7..778526048 100644 --- a/backend/dataall/api/Objects/DatasetProfiling/mutations.py +++ b/backend/dataall/modules/datasets/api/DatasetProfiling/mutations.py @@ -1,4 +1,4 @@ -from ... import gql +from dataall.api import gql from .resolvers import * startDatasetProfilingRun = gql.MutationField( diff --git a/backend/dataall/api/Objects/DatasetProfiling/queries.py b/backend/dataall/modules/datasets/api/DatasetProfiling/queries.py similarity index 97% rename from backend/dataall/api/Objects/DatasetProfiling/queries.py rename to backend/dataall/modules/datasets/api/DatasetProfiling/queries.py index 9ab3eb2bb..2225e2117 100644 --- a/backend/dataall/api/Objects/DatasetProfiling/queries.py +++ b/backend/dataall/modules/datasets/api/DatasetProfiling/queries.py @@ -1,4 +1,4 @@ -from ... import gql +from dataall.api import gql from .resolvers import * diff --git a/backend/dataall/api/Objects/DatasetProfiling/resolvers.py b/backend/dataall/modules/datasets/api/DatasetProfiling/resolvers.py similarity index 95% rename from backend/dataall/api/Objects/DatasetProfiling/resolvers.py rename to backend/dataall/modules/datasets/api/DatasetProfiling/resolvers.py index 4b4684019..a84d0d82f 100644 --- a/backend/dataall/api/Objects/DatasetProfiling/resolvers.py +++ b/backend/dataall/modules/datasets/api/DatasetProfiling/resolvers.py @@ -1,11 +1,11 @@ import json import logging -from ....api.context import Context -from ....aws.handlers.service_handlers import Worker -from ....aws.handlers.sts import SessionHelper -from ....db import api, permissions, models -from ....db.api import ResourcePolicy +from dataall.api.context import Context +from dataall.aws.handlers.service_handlers import Worker +from dataall.aws.handlers.sts import SessionHelper +from dataall.db import api, permissions, models +from dataall.db.api import ResourcePolicy from dataall.modules.datasets.services.dataset_table import DatasetTableService log = logging.getLogger(__name__) diff --git a/backend/dataall/api/Objects/DatasetProfiling/schema.py b/backend/dataall/modules/datasets/api/DatasetProfiling/schema.py similarity index 98% rename from backend/dataall/api/Objects/DatasetProfiling/schema.py rename to backend/dataall/modules/datasets/api/DatasetProfiling/schema.py index f6fe9c575..f79022a51 100644 --- a/backend/dataall/api/Objects/DatasetProfiling/schema.py +++ b/backend/dataall/modules/datasets/api/DatasetProfiling/schema.py @@ -1,4 +1,4 @@ -from ... import gql +from dataall.api import gql from .resolvers import ( resolve_dataset, get_profiling_run_status, diff --git a/backend/dataall/modules/datasets/api/__init__.py b/backend/dataall/modules/datasets/api/__init__.py index 538df0734..00d4dd3b8 100644 --- a/backend/dataall/modules/datasets/api/__init__.py +++ b/backend/dataall/modules/datasets/api/__init__.py @@ -1,6 +1,7 @@ """The GraphQL schema of datasets and related functionality""" from dataall.modules.datasets.api import ( - table_column + table_column, + DatasetProfiling ) -__all__ = ["table_column"] +__all__ = ["table_column", "DatasetProfiling"] From dd8e597a2e0a301342f0fa4b406b9a37f9e445a2 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 14 Apr 2023 15:28:55 +0200 Subject: [PATCH 042/346] Datasets refactoring Renaming profiling --- backend/dataall/modules/datasets/api/__init__.py | 4 ++-- .../api/{DatasetProfiling => profiling}/__init__.py | 2 +- .../api/{DatasetProfiling => profiling}/input_types.py | 0 .../api/{DatasetProfiling => profiling}/mutations.py | 5 ++++- .../api/{DatasetProfiling => profiling}/queries.py | 7 ++++++- .../api/{DatasetProfiling => profiling}/resolvers.py | 0 .../datasets/api/{DatasetProfiling => profiling}/schema.py | 2 +- tests/api/test_dataset_profiling.py | 4 ++-- 8 files changed, 16 insertions(+), 8 deletions(-) rename backend/dataall/modules/datasets/api/{DatasetProfiling => profiling}/__init__.py (73%) rename backend/dataall/modules/datasets/api/{DatasetProfiling => profiling}/input_types.py (100%) rename backend/dataall/modules/datasets/api/{DatasetProfiling => profiling}/mutations.py (83%) rename backend/dataall/modules/datasets/api/{DatasetProfiling => profiling}/queries.py (86%) rename backend/dataall/modules/datasets/api/{DatasetProfiling => profiling}/resolvers.py (100%) rename backend/dataall/modules/datasets/api/{DatasetProfiling => profiling}/schema.py (96%) diff --git a/backend/dataall/modules/datasets/api/__init__.py b/backend/dataall/modules/datasets/api/__init__.py index 00d4dd3b8..6bb6f8ab0 100644 --- a/backend/dataall/modules/datasets/api/__init__.py +++ b/backend/dataall/modules/datasets/api/__init__.py @@ -1,7 +1,7 @@ """The GraphQL schema of datasets and related functionality""" from dataall.modules.datasets.api import ( table_column, - DatasetProfiling + profiling ) -__all__ = ["table_column", "DatasetProfiling"] +__all__ = ["table_column", "profiling"] diff --git a/backend/dataall/modules/datasets/api/DatasetProfiling/__init__.py b/backend/dataall/modules/datasets/api/profiling/__init__.py similarity index 73% rename from backend/dataall/modules/datasets/api/DatasetProfiling/__init__.py rename to backend/dataall/modules/datasets/api/profiling/__init__.py index dfa46b264..4c5b6c491 100644 --- a/backend/dataall/modules/datasets/api/DatasetProfiling/__init__.py +++ b/backend/dataall/modules/datasets/api/profiling/__init__.py @@ -1,4 +1,4 @@ -from . import ( +from dataall.modules.datasets.api.profiling import ( input_types, mutations, queries, diff --git a/backend/dataall/modules/datasets/api/DatasetProfiling/input_types.py b/backend/dataall/modules/datasets/api/profiling/input_types.py similarity index 100% rename from backend/dataall/modules/datasets/api/DatasetProfiling/input_types.py rename to backend/dataall/modules/datasets/api/profiling/input_types.py diff --git a/backend/dataall/modules/datasets/api/DatasetProfiling/mutations.py b/backend/dataall/modules/datasets/api/profiling/mutations.py similarity index 83% rename from backend/dataall/modules/datasets/api/DatasetProfiling/mutations.py rename to backend/dataall/modules/datasets/api/profiling/mutations.py index 778526048..e4bcd62cc 100644 --- a/backend/dataall/modules/datasets/api/DatasetProfiling/mutations.py +++ b/backend/dataall/modules/datasets/api/profiling/mutations.py @@ -1,5 +1,8 @@ from dataall.api import gql -from .resolvers import * +from dataall.modules.datasets.api.profiling.resolvers import ( + start_profiling_run, + update_profiling_run_results +) startDatasetProfilingRun = gql.MutationField( name='startDatasetProfilingRun', diff --git a/backend/dataall/modules/datasets/api/DatasetProfiling/queries.py b/backend/dataall/modules/datasets/api/profiling/queries.py similarity index 86% rename from backend/dataall/modules/datasets/api/DatasetProfiling/queries.py rename to backend/dataall/modules/datasets/api/profiling/queries.py index 2225e2117..8d2fbb25c 100644 --- a/backend/dataall/modules/datasets/api/DatasetProfiling/queries.py +++ b/backend/dataall/modules/datasets/api/profiling/queries.py @@ -1,5 +1,10 @@ from dataall.api import gql -from .resolvers import * +from dataall.modules.datasets.api.profiling.resolvers import ( + get_profiling_run, + list_profiling_runs, + list_table_profiling_runs, + get_last_table_profiling_run +) getDatasetProfilingRun = gql.QueryField( diff --git a/backend/dataall/modules/datasets/api/DatasetProfiling/resolvers.py b/backend/dataall/modules/datasets/api/profiling/resolvers.py similarity index 100% rename from backend/dataall/modules/datasets/api/DatasetProfiling/resolvers.py rename to backend/dataall/modules/datasets/api/profiling/resolvers.py diff --git a/backend/dataall/modules/datasets/api/DatasetProfiling/schema.py b/backend/dataall/modules/datasets/api/profiling/schema.py similarity index 96% rename from backend/dataall/modules/datasets/api/DatasetProfiling/schema.py rename to backend/dataall/modules/datasets/api/profiling/schema.py index f79022a51..6babb61b3 100644 --- a/backend/dataall/modules/datasets/api/DatasetProfiling/schema.py +++ b/backend/dataall/modules/datasets/api/profiling/schema.py @@ -1,5 +1,5 @@ from dataall.api import gql -from .resolvers import ( +from dataall.modules.datasets.api.profiling.resolvers import ( resolve_dataset, get_profiling_run_status, get_profiling_results, diff --git a/tests/api/test_dataset_profiling.py b/tests/api/test_dataset_profiling.py index c5bed6d1e..ad410a610 100644 --- a/tests/api/test_dataset_profiling.py +++ b/tests/api/test_dataset_profiling.py @@ -129,7 +129,7 @@ def test_get_table_profiling_run( client, dataset1, env1, module_mocker, table, db, group ): module_mocker.patch( - 'dataall.api.Objects.DatasetProfiling.resolvers.get_profiling_results_from_s3', + 'dataall.modules.datasets.api.profiling.resolvers.get_profiling_results_from_s3', return_value='{"results": "yes"}', ) runs = list_profiling_runs(client, dataset1, group) @@ -169,7 +169,7 @@ def test_list_table_profiling_runs( client, dataset1, env1, module_mocker, table, db, group ): module_mocker.patch( - 'dataall.api.Objects.DatasetProfiling.resolvers.get_profiling_results_from_s3', + 'dataall.modules.datasets.api.profiling.resolvers.get_profiling_results_from_s3', return_value='{"results": "yes"}', ) module_mocker.patch('requests.post', return_value=True) From 8ca7bea0feec8594ce6ca43f5bdadfe8951d0406 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 14 Apr 2023 15:32:50 +0200 Subject: [PATCH 043/346] Datasets refactoring Renaming table_column_model to models to easier import other models --- backend/dataall/modules/datasets/__init__.py | 2 +- backend/dataall/modules/datasets/api/table_column/resolvers.py | 2 +- .../modules/datasets/db/{table_column_model.py => models.py} | 0 .../dataall/modules/datasets/handlers/glue_column_handler.py | 2 +- backend/dataall/modules/datasets/services/dataset_table.py | 2 +- tests/api/test_dataset_table.py | 2 +- tests/api/test_glossary.py | 2 +- tests/modules/datasets/test_dataset_feed.py | 2 +- 8 files changed, 7 insertions(+), 7 deletions(-) rename backend/dataall/modules/datasets/db/{table_column_model.py => models.py} (100%) diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index de1963bd2..4620495fe 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -4,7 +4,7 @@ from dataall.api.Objects.Feed.registry import FeedRegistry, FeedDefinition from dataall.api.Objects.Glossary.registry import GlossaryRegistry, GlossaryDefinition -from dataall.modules.datasets.db.table_column_model import DatasetTableColumn +from dataall.modules.datasets.db.models import DatasetTableColumn from dataall.modules.loader import ModuleInterface, ImportMode log = logging.getLogger(__name__) diff --git a/backend/dataall/modules/datasets/api/table_column/resolvers.py b/backend/dataall/modules/datasets/api/table_column/resolvers.py index b958f2f7a..8e78a042e 100644 --- a/backend/dataall/modules/datasets/api/table_column/resolvers.py +++ b/backend/dataall/modules/datasets/api/table_column/resolvers.py @@ -6,7 +6,7 @@ from dataall.db import paginate, permissions, models from dataall.db.api import ResourcePolicy from dataall.modules.datasets.services.dataset_table import DatasetTableService -from dataall.modules.datasets.db.table_column_model import DatasetTableColumn +from dataall.modules.datasets.db.models import DatasetTableColumn def list_table_columns( diff --git a/backend/dataall/modules/datasets/db/table_column_model.py b/backend/dataall/modules/datasets/db/models.py similarity index 100% rename from backend/dataall/modules/datasets/db/table_column_model.py rename to backend/dataall/modules/datasets/db/models.py diff --git a/backend/dataall/modules/datasets/handlers/glue_column_handler.py b/backend/dataall/modules/datasets/handlers/glue_column_handler.py index 329b702b7..df43f9dbd 100644 --- a/backend/dataall/modules/datasets/handlers/glue_column_handler.py +++ b/backend/dataall/modules/datasets/handlers/glue_column_handler.py @@ -5,7 +5,7 @@ from dataall.aws.handlers.sts import SessionHelper from dataall.db import models from dataall.aws.handlers.service_handlers import Worker -from dataall.modules.datasets.db.table_column_model import DatasetTableColumn +from dataall.modules.datasets.db.models import DatasetTableColumn from dataall.modules.datasets.services.dataset_table import DatasetTableService log = logging.getLogger(__name__) diff --git a/backend/dataall/modules/datasets/services/dataset_table.py b/backend/dataall/modules/datasets/services/dataset_table.py index 873cbe01e..cd02eadf5 100644 --- a/backend/dataall/modules/datasets/services/dataset_table.py +++ b/backend/dataall/modules/datasets/services/dataset_table.py @@ -6,7 +6,7 @@ from dataall.db.api import has_tenant_perm, has_resource_perm, Glossary, ResourcePolicy, Environment from dataall.db.models import Dataset from dataall.utils import json_utils -from dataall.modules.datasets.db.table_column_model import DatasetTableColumn +from dataall.modules.datasets.db.models import DatasetTableColumn logger = logging.getLogger(__name__) diff --git a/tests/api/test_dataset_table.py b/tests/api/test_dataset_table.py index a2fcb2add..88140b68c 100644 --- a/tests/api/test_dataset_table.py +++ b/tests/api/test_dataset_table.py @@ -4,7 +4,7 @@ import dataall from dataall.modules.datasets.services.dataset_table import DatasetTableService -from dataall.modules.datasets.db.table_column_model import DatasetTableColumn +from dataall.modules.datasets.db.models import DatasetTableColumn @pytest.fixture(scope='module', autouse=True) diff --git a/tests/api/test_glossary.py b/tests/api/test_glossary.py index 987ccc1a8..bb7f34516 100644 --- a/tests/api/test_glossary.py +++ b/tests/api/test_glossary.py @@ -1,7 +1,7 @@ from datetime import datetime from typing import List from dataall.db import models -from dataall.modules.datasets.db.table_column_model import DatasetTableColumn +from dataall.modules.datasets.db.models import DatasetTableColumn import pytest diff --git a/tests/modules/datasets/test_dataset_feed.py b/tests/modules/datasets/test_dataset_feed.py index db5ff43e2..06ffdc8ed 100644 --- a/tests/modules/datasets/test_dataset_feed.py +++ b/tests/modules/datasets/test_dataset_feed.py @@ -1,6 +1,6 @@ from dataall.api.Objects.Feed.registry import FeedRegistry -from dataall.modules.datasets.db.table_column_model import DatasetTableColumn +from dataall.modules.datasets.db.models import DatasetTableColumn def test_dataset_registered(): From e36ab3b0df34c9fa9c86de3f755dca6b547faa73 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 14 Apr 2023 15:42:42 +0200 Subject: [PATCH 044/346] Datasets refactoring Moving DatasetProfilingRun model --- backend/dataall/aws/handlers/glue.py | 5 +- .../dataall/db/api/dataset_profiling_run.py | 47 ++++++++++--------- .../dataall/db/models/DatasetProfilingRun.py | 20 -------- backend/dataall/db/models/__init__.py | 1 - .../datasets/api/profiling/resolvers.py | 9 ++-- backend/dataall/modules/datasets/db/models.py | 21 +++++++-- tests/api/test_dataset_profiling.py | 5 +- 7 files changed, 53 insertions(+), 55 deletions(-) delete mode 100644 backend/dataall/db/models/DatasetProfilingRun.py diff --git a/backend/dataall/aws/handlers/glue.py b/backend/dataall/aws/handlers/glue.py index e05ce4c54..a61afc3a1 100644 --- a/backend/dataall/aws/handlers/glue.py +++ b/backend/dataall/aws/handlers/glue.py @@ -6,6 +6,7 @@ from .sts import SessionHelper from ... import db from ...db import models +from dataall.modules.datasets.db.models import DatasetProfilingRun log = logging.getLogger('aws:glue') @@ -526,7 +527,7 @@ def get_job_runs(engine, task: models.Task): @Worker.handler('glue.job.start_profiling_run') def start_profiling_run(engine, task: models.Task): with engine.scoped_session() as session: - profiling: models.DatasetProfilingRun = ( + profiling: DatasetProfilingRun = ( db.api.DatasetProfilingRun.get_profiling_run( session, profilingRunUri=task.targetUri ) @@ -572,7 +573,7 @@ def run_job(**data): @Worker.handler('glue.job.profiling_run_status') def get_profiling_run(engine, task: models.Task): with engine.scoped_session() as session: - profiling: models.DatasetProfilingRun = ( + profiling: DatasetProfilingRun = ( db.api.DatasetProfilingRun.get_profiling_run( session, profilingRunUri=task.targetUri ) diff --git a/backend/dataall/db/api/dataset_profiling_run.py b/backend/dataall/db/api/dataset_profiling_run.py index f1552bc81..5e7024843 100644 --- a/backend/dataall/db/api/dataset_profiling_run.py +++ b/backend/dataall/db/api/dataset_profiling_run.py @@ -2,6 +2,7 @@ from .. import paginate, models from ..exceptions import ObjectNotFound +from dataall.modules.datasets.db.models import DatasetProfilingRun as DatasetProfilingRunModel class DatasetProfilingRun: @@ -30,7 +31,7 @@ def start_profiling( if not environment: raise ObjectNotFound('Environment', dataset.environmentUri) - run = models.DatasetProfilingRun( + run = DatasetProfilingRunModel( datasetUri=dataset.datasetUri, status='RUNNING', AwsAccountId=environment.AwsAccountId, @@ -72,14 +73,14 @@ def get_profiling_run( session, profilingRunUri=None, GlueJobRunId=None, GlueTableName=None ): if profilingRunUri: - run: models.DatasetProfilingRun = session.query( - models.DatasetProfilingRun + run: DatasetProfilingRunModel = session.query( + DatasetProfilingRunModel ).get(profilingRunUri) else: - run: models.DatasetProfilingRun = ( - session.query(models.DatasetProfilingRun) - .filter(models.DatasetProfilingRun.GlueJobRunId == GlueJobRunId) - .filter(models.DatasetProfilingRun.GlueTableName == GlueTableName) + run: DatasetProfilingRunModel = ( + session.query(DatasetProfilingRunModel) + .filter(DatasetProfilingRunModel.GlueJobRunId == GlueJobRunId) + .filter(DatasetProfilingRunModel.GlueTableName == GlueTableName) .first() ) return run @@ -89,9 +90,9 @@ def list_profiling_runs(session, datasetUri, filter: dict = None): if not filter: filter = {} q = ( - session.query(models.DatasetProfilingRun) - .filter(models.DatasetProfilingRun.datasetUri == datasetUri) - .order_by(models.DatasetProfilingRun.created.desc()) + session.query(DatasetProfilingRunModel) + .filter(DatasetProfilingRunModel.datasetUri == datasetUri) + .order_by(DatasetProfilingRunModel.created.desc()) ) return paginate( q, page=filter.get('page', 1), page_size=filter.get('pageSize', 20) @@ -102,19 +103,19 @@ def list_table_profiling_runs(session, tableUri, filter): if not filter: filter = {} q = ( - session.query(models.DatasetProfilingRun) + session.query(DatasetProfilingRunModel) .join( models.DatasetTable, - models.DatasetTable.datasetUri == models.DatasetProfilingRun.datasetUri, + models.DatasetTable.datasetUri == DatasetProfilingRunModel.datasetUri, ) .filter( and_( models.DatasetTable.tableUri == tableUri, models.DatasetTable.GlueTableName - == models.DatasetProfilingRun.GlueTableName, + == DatasetProfilingRunModel.GlueTableName, ) ) - .order_by(models.DatasetProfilingRun.created.desc()) + .order_by(DatasetProfilingRunModel.created.desc()) ) return paginate( q, page=filter.get('page', 1), page_size=filter.get('pageSize', 20) @@ -123,34 +124,34 @@ def list_table_profiling_runs(session, tableUri, filter): @staticmethod def get_table_last_profiling_run(session, tableUri): return ( - session.query(models.DatasetProfilingRun) + session.query(DatasetProfilingRunModel) .join( models.DatasetTable, - models.DatasetTable.datasetUri == models.DatasetProfilingRun.datasetUri, + models.DatasetTable.datasetUri == DatasetProfilingRunModel.datasetUri, ) .filter(models.DatasetTable.tableUri == tableUri) .filter( models.DatasetTable.GlueTableName - == models.DatasetProfilingRun.GlueTableName + == DatasetProfilingRunModel.GlueTableName ) - .order_by(models.DatasetProfilingRun.created.desc()) + .order_by(DatasetProfilingRunModel.created.desc()) .first() ) @staticmethod def get_table_last_profiling_run_with_results(session, tableUri): return ( - session.query(models.DatasetProfilingRun) + session.query(DatasetProfilingRunModel) .join( models.DatasetTable, - models.DatasetTable.datasetUri == models.DatasetProfilingRun.datasetUri, + models.DatasetTable.datasetUri == DatasetProfilingRunModel.datasetUri, ) .filter(models.DatasetTable.tableUri == tableUri) .filter( models.DatasetTable.GlueTableName - == models.DatasetProfilingRun.GlueTableName + == DatasetProfilingRunModel.GlueTableName ) - .filter(models.DatasetProfilingRun.results.isnot(None)) - .order_by(models.DatasetProfilingRun.created.desc()) + .filter(DatasetProfilingRunModel.results.isnot(None)) + .order_by(DatasetProfilingRunModel.created.desc()) .first() ) diff --git a/backend/dataall/db/models/DatasetProfilingRun.py b/backend/dataall/db/models/DatasetProfilingRun.py deleted file mode 100644 index b4996db64..000000000 --- a/backend/dataall/db/models/DatasetProfilingRun.py +++ /dev/null @@ -1,20 +0,0 @@ -from sqlalchemy import Column, String -from sqlalchemy.dialects.postgresql import JSON - -from .. import Base, Resource, utils - - -class DatasetProfilingRun(Resource, Base): - __tablename__ = 'dataset_profiling_run' - profilingRunUri = Column( - String, primary_key=True, default=utils.uuid('profilingrun') - ) - datasetUri = Column(String, nullable=False) - GlueJobName = Column(String) - GlueJobRunId = Column(String) - GlueTriggerSchedule = Column(String) - GlueTriggerName = Column(String) - GlueTableName = Column(String) - AwsAccountId = Column(String) - results = Column(JSON, default={}) - status = Column(String, default='Created') diff --git a/backend/dataall/db/models/__init__.py b/backend/dataall/db/models/__init__.py index 1ab4134b3..f25e5f59b 100644 --- a/backend/dataall/db/models/__init__.py +++ b/backend/dataall/db/models/__init__.py @@ -5,7 +5,6 @@ from .DashboardShare import DashboardShare from .DashboardShare import DashboardShareStatus from .Dataset import Dataset -from .DatasetProfilingRun import DatasetProfilingRun from .DatasetQualityRule import DatasetQualityRule from .DatasetStorageLocation import DatasetStorageLocation from .DatasetTable import DatasetTable diff --git a/backend/dataall/modules/datasets/api/profiling/resolvers.py b/backend/dataall/modules/datasets/api/profiling/resolvers.py index a84d0d82f..05efbbab8 100644 --- a/backend/dataall/modules/datasets/api/profiling/resolvers.py +++ b/backend/dataall/modules/datasets/api/profiling/resolvers.py @@ -7,11 +7,12 @@ from dataall.db import api, permissions, models from dataall.db.api import ResourcePolicy from dataall.modules.datasets.services.dataset_table import DatasetTableService +from dataall.modules.datasets.db.models import DatasetProfilingRun log = logging.getLogger(__name__) -def resolve_dataset(context, source: models.DatasetProfilingRun): +def resolve_dataset(context, source: DatasetProfilingRun): if not source: return None with context.engine.scoped_session() as session: @@ -49,7 +50,7 @@ def start_profiling_run(context: Context, source, input: dict = None): return run -def get_profiling_run_status(context: Context, source: models.DatasetProfilingRun): +def get_profiling_run_status(context: Context, source: DatasetProfilingRun): if not source: return None with context.engine.scoped_session() as session: @@ -61,7 +62,7 @@ def get_profiling_run_status(context: Context, source: models.DatasetProfilingRu return source.status -def get_profiling_results(context: Context, source: models.DatasetProfilingRun): +def get_profiling_results(context: Context, source: DatasetProfilingRun): if not source or source.results == {}: return None else: @@ -90,7 +91,7 @@ def get_profiling_run(context: Context, source, profilingRunUri=None): def get_last_table_profiling_run(context: Context, source, tableUri=None): with context.engine.scoped_session() as session: - run: models.DatasetProfilingRun = ( + run: DatasetProfilingRun = ( api.DatasetProfilingRun.get_table_last_profiling_run( session=session, tableUri=tableUri ) diff --git a/backend/dataall/modules/datasets/db/models.py b/backend/dataall/modules/datasets/db/models.py index 05bc26058..1ba60bea1 100644 --- a/backend/dataall/modules/datasets/db/models.py +++ b/backend/dataall/modules/datasets/db/models.py @@ -1,7 +1,6 @@ from sqlalchemy import Column, String - -from dataall.db import Base -from dataall.db import Resource, utils +from sqlalchemy.dialects.postgresql import JSON +from dataall.db import Base, Resource, utils class DatasetTableColumn(Resource, Base): @@ -21,3 +20,19 @@ class DatasetTableColumn(Resource, Base): def uri(self): return self.columnUri + + +class DatasetProfilingRun(Resource, Base): + __tablename__ = 'dataset_profiling_run' + profilingRunUri = Column( + String, primary_key=True, default=utils.uuid('profilingrun') + ) + datasetUri = Column(String, nullable=False) + GlueJobName = Column(String) + GlueJobRunId = Column(String) + GlueTriggerSchedule = Column(String) + GlueTriggerName = Column(String) + GlueTableName = Column(String) + AwsAccountId = Column(String) + results = Column(JSON, default={}) + status = Column(String, default='Created') diff --git a/tests/api/test_dataset_profiling.py b/tests/api/test_dataset_profiling.py index ad410a610..8d708e94d 100644 --- a/tests/api/test_dataset_profiling.py +++ b/tests/api/test_dataset_profiling.py @@ -2,6 +2,7 @@ import pytest import dataall +from dataall.modules.datasets.db.models import DatasetProfilingRun @pytest.fixture(scope='module', autouse=True) @@ -39,7 +40,7 @@ def test_add_tables(table, dataset1, db): def update_runs(db, runs): with db.scoped_session() as session: for run in runs: - run = session.query(dataall.db.models.DatasetProfilingRun).get( + run = session.query(DatasetProfilingRun).get( run['profilingRunUri'] ) run.status = 'SUCCEEDED' @@ -70,7 +71,7 @@ def test_start_profiling(org1, env1, dataset1, client, module_mocker, db, user, profiling = response.data.startDatasetProfilingRun assert profiling.profilingRunUri with db.scoped_session() as session: - profiling = session.query(dataall.db.models.DatasetProfilingRun).get( + profiling = session.query(DatasetProfilingRun).get( profiling.profilingRunUri ) profiling.GlueJobRunId = 'jr_111111111111' From 31720c254c3286e7a30c049e7c6835b5d4602c64 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 14 Apr 2023 15:52:44 +0200 Subject: [PATCH 045/346] Datasets refactoring Moving dataset profiling service and renaming it --- backend/dataall/aws/handlers/glue.py | 7 ++++--- backend/dataall/db/api/__init__.py | 1 - .../modules/datasets/api/profiling/resolvers.py | 15 ++++++++------- .../services/dataset_profiling_service.py} | 8 ++++---- .../datasets/tasks/subscription_service.py | 3 ++- 5 files changed, 18 insertions(+), 16 deletions(-) rename backend/dataall/{db/api/dataset_profiling_run.py => modules/datasets/services/dataset_profiling_service.py} (96%) diff --git a/backend/dataall/aws/handlers/glue.py b/backend/dataall/aws/handlers/glue.py index a61afc3a1..567ab6967 100644 --- a/backend/dataall/aws/handlers/glue.py +++ b/backend/dataall/aws/handlers/glue.py @@ -7,6 +7,7 @@ from ... import db from ...db import models from dataall.modules.datasets.db.models import DatasetProfilingRun +from dataall.modules.datasets.services.dataset_profiling_service import DatasetProfilingService log = logging.getLogger('aws:glue') @@ -528,7 +529,7 @@ def get_job_runs(engine, task: models.Task): def start_profiling_run(engine, task: models.Task): with engine.scoped_session() as session: profiling: DatasetProfilingRun = ( - db.api.DatasetProfilingRun.get_profiling_run( + DatasetProfilingService.get_profiling_run( session, profilingRunUri=task.targetUri ) ) @@ -547,7 +548,7 @@ def start_profiling_run(engine, task: models.Task): ), } ) - db.api.DatasetProfilingRun.update_run( + DatasetProfilingService.update_run( session, profilingRunUri=profiling.profilingRunUri, GlueJobRunId=run['JobRunId'], @@ -574,7 +575,7 @@ def run_job(**data): def get_profiling_run(engine, task: models.Task): with engine.scoped_session() as session: profiling: DatasetProfilingRun = ( - db.api.DatasetProfilingRun.get_profiling_run( + DatasetProfilingService.get_profiling_run( session, profilingRunUri=task.targetUri ) ) diff --git a/backend/dataall/db/api/__init__.py b/backend/dataall/db/api/__init__.py index 19138f7d7..a5f11d2c7 100644 --- a/backend/dataall/db/api/__init__.py +++ b/backend/dataall/db/api/__init__.py @@ -13,7 +13,6 @@ from .share_object import ShareObject, ShareObjectSM, ShareItemSM from .dataset import Dataset from .dataset_location import DatasetStorageLocation -from .dataset_profiling_run import DatasetProfilingRun from .notification import Notification from .redshift_cluster import RedshiftCluster from .vpc import Vpc diff --git a/backend/dataall/modules/datasets/api/profiling/resolvers.py b/backend/dataall/modules/datasets/api/profiling/resolvers.py index 05efbbab8..62ff64942 100644 --- a/backend/dataall/modules/datasets/api/profiling/resolvers.py +++ b/backend/dataall/modules/datasets/api/profiling/resolvers.py @@ -7,6 +7,7 @@ from dataall.db import api, permissions, models from dataall.db.api import ResourcePolicy from dataall.modules.datasets.services.dataset_table import DatasetTableService +from dataall.modules.datasets.services.dataset_profiling_service import DatasetProfilingService from dataall.modules.datasets.db.models import DatasetProfilingRun log = logging.getLogger(__name__) @@ -33,7 +34,7 @@ def start_profiling_run(context: Context, source, input: dict = None): ) dataset = api.Dataset.get_dataset_by_uri(session, input['datasetUri']) - run = api.DatasetProfilingRun.start_profiling( + run = DatasetProfilingService.start_profiling( session=session, datasetUri=dataset.datasetUri, tableUri=input.get('tableUri'), @@ -71,7 +72,7 @@ def get_profiling_results(context: Context, source: DatasetProfilingRun): def update_profiling_run_results(context: Context, source, profilingRunUri, results): with context.engine.scoped_session() as session: - run = api.DatasetProfilingRun.update_run( + run = DatasetProfilingService.update_run( session=session, profilingRunUri=profilingRunUri, results=results ) return run @@ -79,12 +80,12 @@ def update_profiling_run_results(context: Context, source, profilingRunUri, resu def list_profiling_runs(context: Context, source, datasetUri=None): with context.engine.scoped_session() as session: - return api.DatasetProfilingRun.list_profiling_runs(session, datasetUri) + return DatasetProfilingService.list_profiling_runs(session, datasetUri) def get_profiling_run(context: Context, source, profilingRunUri=None): with context.engine.scoped_session() as session: - return api.DatasetProfilingRun.get_profiling_run( + return DatasetProfilingService.get_profiling_run( session=session, profilingRunUri=profilingRunUri ) @@ -92,7 +93,7 @@ def get_profiling_run(context: Context, source, profilingRunUri=None): def get_last_table_profiling_run(context: Context, source, tableUri=None): with context.engine.scoped_session() as session: run: DatasetProfilingRun = ( - api.DatasetProfilingRun.get_table_last_profiling_run( + DatasetProfilingService.get_table_last_profiling_run( session=session, tableUri=tableUri ) ) @@ -113,7 +114,7 @@ def get_last_table_profiling_run(context: Context, source, tableUri=None): if not run.results: run_with_results = ( - api.DatasetProfilingRun.get_table_last_profiling_run_with_results( + DatasetProfilingService.get_table_last_profiling_run_with_results( session=session, tableUri=tableUri ) ) @@ -144,6 +145,6 @@ def get_profiling_results_from_s3(environment, dataset, table, run): def list_table_profiling_runs(context: Context, source, tableUri=None): with context.engine.scoped_session() as session: - return api.DatasetProfilingRun.list_table_profiling_runs( + return DatasetProfilingService.list_table_profiling_runs( session=session, tableUri=tableUri, filter={} ) diff --git a/backend/dataall/db/api/dataset_profiling_run.py b/backend/dataall/modules/datasets/services/dataset_profiling_service.py similarity index 96% rename from backend/dataall/db/api/dataset_profiling_run.py rename to backend/dataall/modules/datasets/services/dataset_profiling_service.py index 5e7024843..915a52eda 100644 --- a/backend/dataall/db/api/dataset_profiling_run.py +++ b/backend/dataall/modules/datasets/services/dataset_profiling_service.py @@ -1,11 +1,11 @@ from sqlalchemy import and_ -from .. import paginate, models -from ..exceptions import ObjectNotFound +from dataall.db import paginate, models +from dataall.db.exceptions import ObjectNotFound from dataall.modules.datasets.db.models import DatasetProfilingRun as DatasetProfilingRunModel -class DatasetProfilingRun: +class DatasetProfilingService: def __init__(self): pass @@ -56,7 +56,7 @@ def update_run( GlueJobRunState=None, results=None, ): - run = DatasetProfilingRun.get_profiling_run( + run = DatasetProfilingService.get_profiling_run( session, profilingRunUri=profilingRunUri, GlueJobRunId=GlueJobRunId ) if GlueJobRunId: diff --git a/backend/dataall/modules/datasets/tasks/subscription_service.py b/backend/dataall/modules/datasets/tasks/subscription_service.py index 8674f903a..74f84d7c9 100644 --- a/backend/dataall/modules/datasets/tasks/subscription_service.py +++ b/backend/dataall/modules/datasets/tasks/subscription_service.py @@ -12,6 +12,7 @@ from dataall.aws.handlers.sqs import SqsQueue from dataall.db import get_engine from dataall.db import models +from dataall.modules.datasets.services.dataset_profiling_service import DatasetProfilingService from dataall.tasks.subscriptions import poll_queues from dataall.utils import json_utils from dataall.modules.datasets.services.dataset_table import DatasetTableService @@ -143,7 +144,7 @@ def store_dataquality_results(session, message): message.get('region'), ) - run = db.api.DatasetProfilingRun.start_profiling( + run = DatasetProfilingService.start_profiling( session=session, datasetUri=table.datasetUri, GlueTableName=table.GlueTableName, From 8a907df924211b345b94c2a2fc3c1f166ecd5fbf Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 14 Apr 2023 15:59:51 +0200 Subject: [PATCH 046/346] Datasets refactoring Extracted glue_profiling_handler --- backend/dataall/aws/handlers/glue.py | 86 ---------------- .../handlers/glue_profiling_handler.py | 98 +++++++++++++++++++ 2 files changed, 98 insertions(+), 86 deletions(-) create mode 100644 backend/dataall/modules/datasets/handlers/glue_profiling_handler.py diff --git a/backend/dataall/aws/handlers/glue.py b/backend/dataall/aws/handlers/glue.py index 567ab6967..e76fd4e63 100644 --- a/backend/dataall/aws/handlers/glue.py +++ b/backend/dataall/aws/handlers/glue.py @@ -6,8 +6,6 @@ from .sts import SessionHelper from ... import db from ...db import models -from dataall.modules.datasets.db.models import DatasetProfilingRun -from dataall.modules.datasets.services.dataset_profiling_service import DatasetProfilingService log = logging.getLogger('aws:glue') @@ -524,90 +522,6 @@ def get_job_runs(engine, task: models.Task): return [] return response['JobRuns'] - @staticmethod - @Worker.handler('glue.job.start_profiling_run') - def start_profiling_run(engine, task: models.Task): - with engine.scoped_session() as session: - profiling: DatasetProfilingRun = ( - DatasetProfilingService.get_profiling_run( - session, profilingRunUri=task.targetUri - ) - ) - dataset: models.Dataset = session.query(models.Dataset).get( - profiling.datasetUri - ) - run = Glue.run_job( - **{ - 'accountid': dataset.AwsAccountId, - 'name': dataset.GlueProfilingJobName, - 'region': dataset.region, - 'arguments': ( - {'--table': profiling.GlueTableName} - if profiling.GlueTableName - else {} - ), - } - ) - DatasetProfilingService.update_run( - session, - profilingRunUri=profiling.profilingRunUri, - GlueJobRunId=run['JobRunId'], - ) - return run - - @staticmethod - def run_job(**data): - accountid = data['accountid'] - name = data['name'] - try: - session = SessionHelper.remote_session(accountid=accountid) - client = session.client('glue', region_name=data.get('region', 'eu-west-1')) - response = client.start_job_run( - JobName=name, Arguments=data.get('arguments', {}) - ) - return response - except ClientError as e: - log.error(f'Failed to start profiling job {name} due to: {e}') - raise e - - @staticmethod - @Worker.handler('glue.job.profiling_run_status') - def get_profiling_run(engine, task: models.Task): - with engine.scoped_session() as session: - profiling: DatasetProfilingRun = ( - DatasetProfilingService.get_profiling_run( - session, profilingRunUri=task.targetUri - ) - ) - dataset: models.Dataset = session.query(models.Dataset).get( - profiling.datasetUri - ) - glue_run = Glue.get_job_run( - **{ - 'accountid': dataset.AwsAccountId, - 'name': dataset.GlueProfilingJobName, - 'region': dataset.region, - 'run_id': profiling.GlueJobRunId, - } - ) - profiling.status = glue_run['JobRun']['JobRunState'] - session.commit() - return profiling.status - - @staticmethod - def get_job_run(**data): - accountid = data['accountid'] - name = data['name'] - run_id = data['run_id'] - try: - session = SessionHelper.remote_session(accountid=accountid) - client = session.client('glue', region_name=data.get('region', 'eu-west-1')) - response = client.get_job_run(JobName=name, RunId=run_id) - return response - except ClientError as e: - log.error(f'Failed to get job run {run_id} due to: {e}') - raise e - @staticmethod def grant_principals_all_table_permissions( table: models.DatasetTable, principals: [str], client=None diff --git a/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py b/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py new file mode 100644 index 000000000..d15607733 --- /dev/null +++ b/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py @@ -0,0 +1,98 @@ +import logging +from botocore.exceptions import ClientError + +from dataall.aws.handlers.service_handlers import Worker +from dataall.aws.handlers.sts import SessionHelper +from dataall.db import models +from dataall.modules.datasets.db.models import DatasetProfilingRun +from dataall.modules.datasets.services.dataset_profiling_service import DatasetProfilingService + +log = logging.getLogger(__name__) + + +class DatasetProfilingGlueHandler: + """A handler for dataset profiling""" + + @staticmethod + @Worker.handler('glue.job.profiling_run_status') + def get_profiling_run(engine, task: models.Task): + with engine.scoped_session() as session: + profiling: DatasetProfilingRun = ( + DatasetProfilingService.get_profiling_run( + session, profilingRunUri=task.targetUri + ) + ) + dataset: models.Dataset = session.query(models.Dataset).get( + profiling.datasetUri + ) + glue_run = DatasetProfilingGlueHandler.get_job_run( + **{ + 'accountid': dataset.AwsAccountId, + 'name': dataset.GlueProfilingJobName, + 'region': dataset.region, + 'run_id': profiling.GlueJobRunId, + } + ) + profiling.status = glue_run['JobRun']['JobRunState'] + session.commit() + return profiling.status + + @staticmethod + @Worker.handler('glue.job.start_profiling_run') + def start_profiling_run(engine, task: models.Task): + with engine.scoped_session() as session: + profiling: DatasetProfilingRun = ( + DatasetProfilingService.get_profiling_run( + session, profilingRunUri=task.targetUri + ) + ) + dataset: models.Dataset = session.query(models.Dataset).get( + profiling.datasetUri + ) + run = DatasetProfilingGlueHandler.run_job( + **{ + 'accountid': dataset.AwsAccountId, + 'name': dataset.GlueProfilingJobName, + 'region': dataset.region, + 'arguments': ( + {'--table': profiling.GlueTableName} + if profiling.GlueTableName + else {} + ), + } + ) + DatasetProfilingService.update_run( + session, + profilingRunUri=profiling.profilingRunUri, + GlueJobRunId=run['JobRunId'], + ) + return run + + @staticmethod + def get_job_run(**data): + accountid = data['accountid'] + name = data['name'] + run_id = data['run_id'] + try: + session = SessionHelper.remote_session(accountid=accountid) + client = session.client('glue', region_name=data.get('region', 'eu-west-1')) + response = client.get_job_run(JobName=name, RunId=run_id) + return response + except ClientError as e: + log.error(f'Failed to get job run {run_id} due to: {e}') + raise e + + @staticmethod + def run_job(**data): + accountid = data['accountid'] + name = data['name'] + try: + session = SessionHelper.remote_session(accountid=accountid) + client = session.client('glue', region_name=data.get('region', 'eu-west-1')) + response = client.start_job_run( + JobName=name, Arguments=data.get('arguments', {}) + ) + return response + except ClientError as e: + log.error(f'Failed to start profiling job {name} due to: {e}') + raise e From 561da72a94c1eefe4547eede1a03582b2ee14f79 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 14 Apr 2023 16:08:24 +0200 Subject: [PATCH 047/346] Datasets refactoring Deleted DatasetTableProfilingJob since could not find any usage of it --- .../db/models/DatasetTableProfilingJob.py | 18 ------------------ backend/dataall/db/models/__init__.py | 1 - 2 files changed, 19 deletions(-) delete mode 100644 backend/dataall/db/models/DatasetTableProfilingJob.py diff --git a/backend/dataall/db/models/DatasetTableProfilingJob.py b/backend/dataall/db/models/DatasetTableProfilingJob.py deleted file mode 100644 index ea0fedbf0..000000000 --- a/backend/dataall/db/models/DatasetTableProfilingJob.py +++ /dev/null @@ -1,18 +0,0 @@ -from sqlalchemy import Column, String -from sqlalchemy.orm import query_expression - -from .. import Base -from .. import Resource, utils - - -class DatasetTableProfilingJob(Resource, Base): - __tablename__ = 'dataset_table_profiling_job' - tableUri = Column(String, nullable=False) - jobUri = Column(String, primary_key=True, default=utils.uuid('profilingjob')) - AWSAccountId = Column(String, nullable=False) - RunCommandId = Column(String, nullable=True) - GlueDatabaseName = Column(String, nullable=False) - GlueTableName = Column(String, nullable=False) - region = Column(String, default='eu-west-1') - status = Column(String, default='') - userRoleForJob = query_expression() diff --git a/backend/dataall/db/models/__init__.py b/backend/dataall/db/models/__init__.py index f25e5f59b..0af480d79 100644 --- a/backend/dataall/db/models/__init__.py +++ b/backend/dataall/db/models/__init__.py @@ -8,7 +8,6 @@ from .DatasetQualityRule import DatasetQualityRule from .DatasetStorageLocation import DatasetStorageLocation from .DatasetTable import DatasetTable -from .DatasetTableProfilingJob import DatasetTableProfilingJob from .Environment import Environment from .EnvironmentGroup import EnvironmentGroup from .FeedMessage import FeedMessage From 73c8150a363b3eafc6715d658f97dc6449db2dbd Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 17 Apr 2023 10:29:51 +0200 Subject: [PATCH 048/346] Datasets refactoring Moved dataset storage location into modules --- backend/dataall/api/Objects/__init__.py | 1 - .../api}/DatasetStorageLocation/__init__.py | 2 +- .../api}/DatasetStorageLocation/input_types.py | 2 +- .../api}/DatasetStorageLocation/mutations.py | 13 +++++++++---- .../datasets/api}/DatasetStorageLocation/queries.py | 4 ++-- .../api}/DatasetStorageLocation/resolvers.py | 12 ++++++------ .../datasets/api}/DatasetStorageLocation/schema.py | 7 +++++-- backend/dataall/modules/datasets/api/__init__.py | 5 +++-- 8 files changed, 27 insertions(+), 19 deletions(-) rename backend/dataall/{api/Objects => modules/datasets/api}/DatasetStorageLocation/__init__.py (69%) rename backend/dataall/{api/Objects => modules/datasets/api}/DatasetStorageLocation/input_types.py (97%) rename backend/dataall/{api/Objects => modules/datasets/api}/DatasetStorageLocation/mutations.py (76%) rename backend/dataall/{api/Objects => modules/datasets/api}/DatasetStorageLocation/queries.py (66%) rename backend/dataall/{api/Objects => modules/datasets/api}/DatasetStorageLocation/resolvers.py (94%) rename backend/dataall/{api/Objects => modules/datasets/api}/DatasetStorageLocation/schema.py (95%) diff --git a/backend/dataall/api/Objects/__init__.py b/backend/dataall/api/Objects/__init__.py index 80b91358a..7c064fb1f 100644 --- a/backend/dataall/api/Objects/__init__.py +++ b/backend/dataall/api/Objects/__init__.py @@ -24,7 +24,6 @@ Dashboard, ShareObject, Organization, - DatasetStorageLocation, Stack, Test, SagemakerStudio, diff --git a/backend/dataall/api/Objects/DatasetStorageLocation/__init__.py b/backend/dataall/modules/datasets/api/DatasetStorageLocation/__init__.py similarity index 69% rename from backend/dataall/api/Objects/DatasetStorageLocation/__init__.py rename to backend/dataall/modules/datasets/api/DatasetStorageLocation/__init__.py index dfa46b264..e00ffe36f 100644 --- a/backend/dataall/api/Objects/DatasetStorageLocation/__init__.py +++ b/backend/dataall/modules/datasets/api/DatasetStorageLocation/__init__.py @@ -1,4 +1,4 @@ -from . import ( +from dataall.modules.datasets.api.DatasetStorageLocation import ( input_types, mutations, queries, diff --git a/backend/dataall/api/Objects/DatasetStorageLocation/input_types.py b/backend/dataall/modules/datasets/api/DatasetStorageLocation/input_types.py similarity index 97% rename from backend/dataall/api/Objects/DatasetStorageLocation/input_types.py rename to backend/dataall/modules/datasets/api/DatasetStorageLocation/input_types.py index f948bebad..4e4bf10e4 100644 --- a/backend/dataall/api/Objects/DatasetStorageLocation/input_types.py +++ b/backend/dataall/modules/datasets/api/DatasetStorageLocation/input_types.py @@ -1,4 +1,4 @@ -from ... import gql +from dataall.api import gql NewDatasetStorageLocationInput = gql.InputType( name='NewDatasetStorageLocationInput', diff --git a/backend/dataall/api/Objects/DatasetStorageLocation/mutations.py b/backend/dataall/modules/datasets/api/DatasetStorageLocation/mutations.py similarity index 76% rename from backend/dataall/api/Objects/DatasetStorageLocation/mutations.py rename to backend/dataall/modules/datasets/api/DatasetStorageLocation/mutations.py index 5b89cc6c1..10fc2ec40 100644 --- a/backend/dataall/api/Objects/DatasetStorageLocation/mutations.py +++ b/backend/dataall/modules/datasets/api/DatasetStorageLocation/mutations.py @@ -1,10 +1,15 @@ -from ... import gql -from .input_types import ( +from dataall.api import gql +from dataall.modules.datasets.api.DatasetStorageLocation.input_types import ( ModifyDatasetFolderInput, NewDatasetStorageLocationInput, ) -from .resolvers import * -from .schema import DatasetStorageLocation +from dataall.modules.datasets.api.DatasetStorageLocation.resolvers import ( + create_storage_location, + update_storage_location, + remove_storage_location, + publish_location_update +) +from dataall.modules.datasets.api.DatasetStorageLocation.schema import DatasetStorageLocation createDatasetStorageLocation = gql.MutationField( name='createDatasetStorageLocation', diff --git a/backend/dataall/api/Objects/DatasetStorageLocation/queries.py b/backend/dataall/modules/datasets/api/DatasetStorageLocation/queries.py similarity index 66% rename from backend/dataall/api/Objects/DatasetStorageLocation/queries.py rename to backend/dataall/modules/datasets/api/DatasetStorageLocation/queries.py index 1baa5a7f9..447225cfd 100644 --- a/backend/dataall/api/Objects/DatasetStorageLocation/queries.py +++ b/backend/dataall/modules/datasets/api/DatasetStorageLocation/queries.py @@ -1,5 +1,5 @@ -from ... import gql -from .resolvers import * +from dataall.api import gql +from dataall.modules.datasets.api.DatasetStorageLocation.resolvers import get_storage_location getDatasetStorageLocation = gql.QueryField( name='getDatasetStorageLocation', diff --git a/backend/dataall/api/Objects/DatasetStorageLocation/resolvers.py b/backend/dataall/modules/datasets/api/DatasetStorageLocation/resolvers.py similarity index 94% rename from backend/dataall/api/Objects/DatasetStorageLocation/resolvers.py rename to backend/dataall/modules/datasets/api/DatasetStorageLocation/resolvers.py index 1a4171444..5f73c468c 100644 --- a/backend/dataall/api/Objects/DatasetStorageLocation/resolvers.py +++ b/backend/dataall/modules/datasets/api/DatasetStorageLocation/resolvers.py @@ -1,15 +1,15 @@ -from ....api.context import Context -from ....aws.handlers.service_handlers import Worker -from ....aws.handlers.s3 import S3 -from ....db import permissions, models -from ....db.api import ( +from dataall.api.context import Context +from dataall.aws.handlers.service_handlers import Worker +from dataall.aws.handlers.s3 import S3 +from dataall.db import permissions, models +from dataall.db.api import ( ResourcePolicy, Glossary, DatasetStorageLocation, Dataset, Environment, ) -from ....searchproxy import indexers +from dataall.searchproxy import indexers def create_storage_location( diff --git a/backend/dataall/api/Objects/DatasetStorageLocation/schema.py b/backend/dataall/modules/datasets/api/DatasetStorageLocation/schema.py similarity index 95% rename from backend/dataall/api/Objects/DatasetStorageLocation/schema.py rename to backend/dataall/modules/datasets/api/DatasetStorageLocation/schema.py index d05309f0b..fab6c3bd1 100644 --- a/backend/dataall/api/Objects/DatasetStorageLocation/schema.py +++ b/backend/dataall/modules/datasets/api/DatasetStorageLocation/schema.py @@ -1,5 +1,8 @@ -from ... import gql -from .resolvers import * +from dataall.api import gql +from dataall.modules.datasets.api.DatasetStorageLocation.resolvers import ( + resolve_glossary_terms, + resolve_dataset +) DatasetStorageLocation = gql.ObjectType( name='DatasetStorageLocation', diff --git a/backend/dataall/modules/datasets/api/__init__.py b/backend/dataall/modules/datasets/api/__init__.py index 6bb6f8ab0..4657365df 100644 --- a/backend/dataall/modules/datasets/api/__init__.py +++ b/backend/dataall/modules/datasets/api/__init__.py @@ -1,7 +1,8 @@ """The GraphQL schema of datasets and related functionality""" from dataall.modules.datasets.api import ( table_column, - profiling + profiling, + DatasetStorageLocation ) -__all__ = ["table_column", "profiling"] +__all__ = ["table_column", "profiling", "DatasetStorageLocation"] From 56a3610116ae136579ead39bf70ba962b7592f8a Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 17 Apr 2023 10:31:21 +0200 Subject: [PATCH 049/346] Datasets refactoring Renamed dataset storage location --- backend/dataall/modules/datasets/api/__init__.py | 4 ++-- .../__init__.py | 2 +- .../input_types.py | 0 .../mutations.py | 6 +++--- .../{DatasetStorageLocation => storage_location}/queries.py | 2 +- .../resolvers.py | 0 .../{DatasetStorageLocation => storage_location}/schema.py | 2 +- 7 files changed, 8 insertions(+), 8 deletions(-) rename backend/dataall/modules/datasets/api/{DatasetStorageLocation => storage_location}/__init__.py (69%) rename backend/dataall/modules/datasets/api/{DatasetStorageLocation => storage_location}/input_types.py (100%) rename backend/dataall/modules/datasets/api/{DatasetStorageLocation => storage_location}/mutations.py (84%) rename backend/dataall/modules/datasets/api/{DatasetStorageLocation => storage_location}/queries.py (74%) rename backend/dataall/modules/datasets/api/{DatasetStorageLocation => storage_location}/resolvers.py (100%) rename backend/dataall/modules/datasets/api/{DatasetStorageLocation => storage_location}/schema.py (97%) diff --git a/backend/dataall/modules/datasets/api/__init__.py b/backend/dataall/modules/datasets/api/__init__.py index 4657365df..4c279340e 100644 --- a/backend/dataall/modules/datasets/api/__init__.py +++ b/backend/dataall/modules/datasets/api/__init__.py @@ -2,7 +2,7 @@ from dataall.modules.datasets.api import ( table_column, profiling, - DatasetStorageLocation + storage_location ) -__all__ = ["table_column", "profiling", "DatasetStorageLocation"] +__all__ = ["table_column", "profiling", "storage_location"] diff --git a/backend/dataall/modules/datasets/api/DatasetStorageLocation/__init__.py b/backend/dataall/modules/datasets/api/storage_location/__init__.py similarity index 69% rename from backend/dataall/modules/datasets/api/DatasetStorageLocation/__init__.py rename to backend/dataall/modules/datasets/api/storage_location/__init__.py index e00ffe36f..e878410f5 100644 --- a/backend/dataall/modules/datasets/api/DatasetStorageLocation/__init__.py +++ b/backend/dataall/modules/datasets/api/storage_location/__init__.py @@ -1,4 +1,4 @@ -from dataall.modules.datasets.api.DatasetStorageLocation import ( +from dataall.modules.datasets.api.storage_location import ( input_types, mutations, queries, diff --git a/backend/dataall/modules/datasets/api/DatasetStorageLocation/input_types.py b/backend/dataall/modules/datasets/api/storage_location/input_types.py similarity index 100% rename from backend/dataall/modules/datasets/api/DatasetStorageLocation/input_types.py rename to backend/dataall/modules/datasets/api/storage_location/input_types.py diff --git a/backend/dataall/modules/datasets/api/DatasetStorageLocation/mutations.py b/backend/dataall/modules/datasets/api/storage_location/mutations.py similarity index 84% rename from backend/dataall/modules/datasets/api/DatasetStorageLocation/mutations.py rename to backend/dataall/modules/datasets/api/storage_location/mutations.py index 10fc2ec40..14aafddc7 100644 --- a/backend/dataall/modules/datasets/api/DatasetStorageLocation/mutations.py +++ b/backend/dataall/modules/datasets/api/storage_location/mutations.py @@ -1,15 +1,15 @@ from dataall.api import gql -from dataall.modules.datasets.api.DatasetStorageLocation.input_types import ( +from dataall.modules.datasets.api.storage_location.input_types import ( ModifyDatasetFolderInput, NewDatasetStorageLocationInput, ) -from dataall.modules.datasets.api.DatasetStorageLocation.resolvers import ( +from dataall.modules.datasets.api.storage_location.resolvers import ( create_storage_location, update_storage_location, remove_storage_location, publish_location_update ) -from dataall.modules.datasets.api.DatasetStorageLocation.schema import DatasetStorageLocation +from dataall.modules.datasets.api.storage_location.schema import DatasetStorageLocation createDatasetStorageLocation = gql.MutationField( name='createDatasetStorageLocation', diff --git a/backend/dataall/modules/datasets/api/DatasetStorageLocation/queries.py b/backend/dataall/modules/datasets/api/storage_location/queries.py similarity index 74% rename from backend/dataall/modules/datasets/api/DatasetStorageLocation/queries.py rename to backend/dataall/modules/datasets/api/storage_location/queries.py index 447225cfd..ea129a37d 100644 --- a/backend/dataall/modules/datasets/api/DatasetStorageLocation/queries.py +++ b/backend/dataall/modules/datasets/api/storage_location/queries.py @@ -1,5 +1,5 @@ from dataall.api import gql -from dataall.modules.datasets.api.DatasetStorageLocation.resolvers import get_storage_location +from dataall.modules.datasets.api.storage_location.resolvers import get_storage_location getDatasetStorageLocation = gql.QueryField( name='getDatasetStorageLocation', diff --git a/backend/dataall/modules/datasets/api/DatasetStorageLocation/resolvers.py b/backend/dataall/modules/datasets/api/storage_location/resolvers.py similarity index 100% rename from backend/dataall/modules/datasets/api/DatasetStorageLocation/resolvers.py rename to backend/dataall/modules/datasets/api/storage_location/resolvers.py diff --git a/backend/dataall/modules/datasets/api/DatasetStorageLocation/schema.py b/backend/dataall/modules/datasets/api/storage_location/schema.py similarity index 97% rename from backend/dataall/modules/datasets/api/DatasetStorageLocation/schema.py rename to backend/dataall/modules/datasets/api/storage_location/schema.py index fab6c3bd1..c9853a22f 100644 --- a/backend/dataall/modules/datasets/api/DatasetStorageLocation/schema.py +++ b/backend/dataall/modules/datasets/api/storage_location/schema.py @@ -1,5 +1,5 @@ from dataall.api import gql -from dataall.modules.datasets.api.DatasetStorageLocation.resolvers import ( +from dataall.modules.datasets.api.storage_location.resolvers import ( resolve_glossary_terms, resolve_dataset ) From 47a38ccbaea0b3368b3cd5003208067081ccd1a6 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 17 Apr 2023 10:37:27 +0200 Subject: [PATCH 050/346] Datasets refactoring Returned the name to model after renaming the service --- .../services/dataset_profiling_service.py | 48 +++++++++---------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/backend/dataall/modules/datasets/services/dataset_profiling_service.py b/backend/dataall/modules/datasets/services/dataset_profiling_service.py index 915a52eda..5b6ca8d41 100644 --- a/backend/dataall/modules/datasets/services/dataset_profiling_service.py +++ b/backend/dataall/modules/datasets/services/dataset_profiling_service.py @@ -2,7 +2,7 @@ from dataall.db import paginate, models from dataall.db.exceptions import ObjectNotFound -from dataall.modules.datasets.db.models import DatasetProfilingRun as DatasetProfilingRunModel +from dataall.modules.datasets.db.models import DatasetProfilingRun class DatasetProfilingService: @@ -31,7 +31,7 @@ def start_profiling( if not environment: raise ObjectNotFound('Environment', dataset.environmentUri) - run = DatasetProfilingRunModel( + run = DatasetProfilingRun( datasetUri=dataset.datasetUri, status='RUNNING', AwsAccountId=environment.AwsAccountId, @@ -73,14 +73,14 @@ def get_profiling_run( session, profilingRunUri=None, GlueJobRunId=None, GlueTableName=None ): if profilingRunUri: - run: DatasetProfilingRunModel = session.query( - DatasetProfilingRunModel + run: DatasetProfilingRun = session.query( + DatasetProfilingRun ).get(profilingRunUri) else: - run: DatasetProfilingRunModel = ( - session.query(DatasetProfilingRunModel) - .filter(DatasetProfilingRunModel.GlueJobRunId == GlueJobRunId) - .filter(DatasetProfilingRunModel.GlueTableName == GlueTableName) + run: DatasetProfilingRun = ( + session.query(DatasetProfilingRun) + .filter(DatasetProfilingRun.GlueJobRunId == GlueJobRunId) + .filter(DatasetProfilingRun.GlueTableName == GlueTableName) .first() ) return run @@ -90,9 +90,9 @@ def list_profiling_runs(session, datasetUri, filter: dict = None): if not filter: filter = {} q = ( - session.query(DatasetProfilingRunModel) - .filter(DatasetProfilingRunModel.datasetUri == datasetUri) - .order_by(DatasetProfilingRunModel.created.desc()) + session.query(DatasetProfilingRun) + .filter(DatasetProfilingRun.datasetUri == datasetUri) + .order_by(DatasetProfilingRun.created.desc()) ) return paginate( q, page=filter.get('page', 1), page_size=filter.get('pageSize', 20) @@ -103,19 +103,19 @@ def list_table_profiling_runs(session, tableUri, filter): if not filter: filter = {} q = ( - session.query(DatasetProfilingRunModel) + session.query(DatasetProfilingRun) .join( models.DatasetTable, - models.DatasetTable.datasetUri == DatasetProfilingRunModel.datasetUri, + models.DatasetTable.datasetUri == DatasetProfilingRun.datasetUri, ) .filter( and_( models.DatasetTable.tableUri == tableUri, models.DatasetTable.GlueTableName - == DatasetProfilingRunModel.GlueTableName, + == DatasetProfilingRun.GlueTableName, ) ) - .order_by(DatasetProfilingRunModel.created.desc()) + .order_by(DatasetProfilingRun.created.desc()) ) return paginate( q, page=filter.get('page', 1), page_size=filter.get('pageSize', 20) @@ -124,34 +124,34 @@ def list_table_profiling_runs(session, tableUri, filter): @staticmethod def get_table_last_profiling_run(session, tableUri): return ( - session.query(DatasetProfilingRunModel) + session.query(DatasetProfilingRun) .join( models.DatasetTable, - models.DatasetTable.datasetUri == DatasetProfilingRunModel.datasetUri, + models.DatasetTable.datasetUri == DatasetProfilingRun.datasetUri, ) .filter(models.DatasetTable.tableUri == tableUri) .filter( models.DatasetTable.GlueTableName - == DatasetProfilingRunModel.GlueTableName + == DatasetProfilingRun.GlueTableName ) - .order_by(DatasetProfilingRunModel.created.desc()) + .order_by(DatasetProfilingRun.created.desc()) .first() ) @staticmethod def get_table_last_profiling_run_with_results(session, tableUri): return ( - session.query(DatasetProfilingRunModel) + session.query(DatasetProfilingRun) .join( models.DatasetTable, - models.DatasetTable.datasetUri == DatasetProfilingRunModel.datasetUri, + models.DatasetTable.datasetUri == DatasetProfilingRun.datasetUri, ) .filter(models.DatasetTable.tableUri == tableUri) .filter( models.DatasetTable.GlueTableName - == DatasetProfilingRunModel.GlueTableName + == DatasetProfilingRun.GlueTableName ) - .filter(DatasetProfilingRunModel.results.isnot(None)) - .order_by(DatasetProfilingRunModel.created.desc()) + .filter(DatasetProfilingRun.results.isnot(None)) + .order_by(DatasetProfilingRun.created.desc()) .first() ) From dbb55179baefc20027578bf89747a82ce192ee13 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 17 Apr 2023 10:54:08 +0200 Subject: [PATCH 051/346] Datasets refactoring Moved DatasetStorageLocation into modules --- backend/dataall/api/Objects/Feed/registry.py | 1 - .../dataall/api/Objects/Glossary/registry.py | 1 - .../dataall/api/Objects/Glossary/resolvers.py | 4 +- .../api/Objects/ShareObject/resolvers.py | 3 +- backend/dataall/cdkproxy/stacks/dataset.py | 15 ++++---- backend/dataall/db/api/dataset.py | 21 ++++++----- backend/dataall/db/api/dataset_location.py | 37 ++++++++++--------- backend/dataall/db/api/environment.py | 20 +++++----- backend/dataall/db/api/share_object.py | 23 ++++++------ .../db/models/DatasetStorageLocation.py | 22 ----------- backend/dataall/db/models/__init__.py | 1 - backend/dataall/modules/datasets/__init__.py | 7 +++- .../api/storage_location/resolvers.py | 5 ++- backend/dataall/modules/datasets/db/models.py | 22 ++++++++++- .../datasets/tasks/subscription_service.py | 3 +- backend/dataall/searchproxy/indexers.py | 31 ++++++++-------- .../dataall/tasks/bucket_policy_updater.py | 15 ++++---- .../share_managers/s3_share_manager.py | 3 +- .../share_processors/s3_process_share.py | 7 ++-- backend/dataall/utils/alarm_service.py | 5 ++- tests/api/conftest.py | 5 ++- tests/api/test_dataset.py | 3 +- tests/searchproxy/test_indexers.py | 3 +- tests/tasks/conftest.py | 9 ++--- tests/tasks/test_s3_share_manager.py | 37 ++++++++++--------- 25 files changed, 161 insertions(+), 142 deletions(-) delete mode 100644 backend/dataall/db/models/DatasetStorageLocation.py diff --git a/backend/dataall/api/Objects/Feed/registry.py b/backend/dataall/api/Objects/Feed/registry.py index a119529ab..6a01a488a 100644 --- a/backend/dataall/api/Objects/Feed/registry.py +++ b/backend/dataall/api/Objects/Feed/registry.py @@ -39,5 +39,4 @@ def types(cls): FeedRegistry.register(FeedDefinition("Worksheet", models.Worksheet)) FeedRegistry.register(FeedDefinition("DataPipeline", models.DataPipeline)) FeedRegistry.register(FeedDefinition("DatasetTable", models.DatasetTable)) -FeedRegistry.register(FeedDefinition("DatasetStorageLocation", models.DatasetStorageLocation)) FeedRegistry.register(FeedDefinition("Dashboard", models.Dashboard)) diff --git a/backend/dataall/api/Objects/Glossary/registry.py b/backend/dataall/api/Objects/Glossary/registry.py index 375f470e2..7c42e4f4c 100644 --- a/backend/dataall/api/Objects/Glossary/registry.py +++ b/backend/dataall/api/Objects/Glossary/registry.py @@ -52,7 +52,6 @@ def types(cls): GlossaryRegistry.register(GlossaryDefinition("DatasetTable", "DatasetTable", models.DatasetTable)) -GlossaryRegistry.register(GlossaryDefinition("Folder", "DatasetStorageLocation", models.DatasetStorageLocation)) GlossaryRegistry.register(GlossaryDefinition("Dashboard", "Dashboard", models.Dashboard)) GlossaryRegistry.register(GlossaryDefinition("DatasetTable", "DatasetTable", models.DatasetTable)) GlossaryRegistry.register(GlossaryDefinition("Dataset", "Dataset", models.Dataset)) diff --git a/backend/dataall/api/Objects/Glossary/resolvers.py b/backend/dataall/api/Objects/Glossary/resolvers.py index 15e77327f..959578600 100644 --- a/backend/dataall/api/Objects/Glossary/resolvers.py +++ b/backend/dataall/api/Objects/Glossary/resolvers.py @@ -13,6 +13,8 @@ GlossaryRole ) +from dataall.modules.datasets.db.models import DatasetStorageLocation + def resolve_glossary_node(obj: models.GlossaryNode, *_): if obj.nodeType == 'G': @@ -465,7 +467,7 @@ def reindex(context, linkUri): upsert_dataset(session=session, es=context.es, datasetUri=link.targetUri) elif isinstance(target, models.DatasetTable): upsert_table(session=session, es=context.es, tableUri=link.targetUri) - elif isinstance(target, models.DatasetStorageLocation): + elif isinstance(target, DatasetStorageLocation): upsert_folder(session=session, es=context.es, locationUri=link.targetUri) elif isinstance(target, models.Dashboard): upsert_dashboard(session=session, es=context.es, dashboardUri=link.targetUri) diff --git a/backend/dataall/api/Objects/ShareObject/resolvers.py b/backend/dataall/api/Objects/ShareObject/resolvers.py index 6bbb64bf4..16e4e1353 100644 --- a/backend/dataall/api/Objects/ShareObject/resolvers.py +++ b/backend/dataall/api/Objects/ShareObject/resolvers.py @@ -7,6 +7,7 @@ from ....api.context import Context from ....aws.handlers.service_handlers import Worker from ....db import models +from dataall.modules.datasets.db.models import DatasetStorageLocation log = logging.getLogger(__name__) @@ -266,7 +267,7 @@ def resolve_dataset(context: Context, source: models.ShareObject, **kwargs): def union_resolver(object, *_): if isinstance(object, models.DatasetTable): return 'DatasetTable' - elif isinstance(object, models.DatasetStorageLocation): + elif isinstance(object, DatasetStorageLocation): return 'DatasetStorageLocation' diff --git a/backend/dataall/cdkproxy/stacks/dataset.py b/backend/dataall/cdkproxy/stacks/dataset.py index 133b5f928..852cba66b 100644 --- a/backend/dataall/cdkproxy/stacks/dataset.py +++ b/backend/dataall/cdkproxy/stacks/dataset.py @@ -28,6 +28,7 @@ from ...db.api import Environment from ...utils.cdk_nag_utils import CDKNagUtil from ...utils.runtime_stacks_tagging import TagsUtil +from dataall.modules.datasets.db.models import DatasetStorageLocation logger = logging.getLogger(__name__) @@ -110,14 +111,14 @@ def get_shared_tables(self) -> typing.List[models.ShareObjectItem]: logger.info(f'found {len(tables)} shared tables') return tables - def get_shared_folders(self) -> typing.List[models.DatasetStorageLocation]: + def get_shared_folders(self) -> typing.List[DatasetStorageLocation]: engine = self.get_engine() with engine.scoped_session() as session: locations = ( session.query( - models.DatasetStorageLocation.locationUri.label('locationUri'), - models.DatasetStorageLocation.S3BucketName.label('S3BucketName'), - models.DatasetStorageLocation.S3Prefix.label('S3Prefix'), + DatasetStorageLocation.locationUri.label('locationUri'), + DatasetStorageLocation.S3BucketName.label('S3BucketName'), + DatasetStorageLocation.S3Prefix.label('S3Prefix'), models.Environment.AwsAccountId.label('AwsAccountId'), models.Environment.region.label('region'), ) @@ -125,7 +126,7 @@ def get_shared_folders(self) -> typing.List[models.DatasetStorageLocation]: models.ShareObjectItem, and_( models.ShareObjectItem.itemUri - == models.DatasetStorageLocation.locationUri + == DatasetStorageLocation.locationUri ), ) .join( @@ -139,8 +140,8 @@ def get_shared_folders(self) -> typing.List[models.DatasetStorageLocation]: ) .filter( and_( - models.DatasetStorageLocation.datasetUri == self.target_uri, - models.DatasetStorageLocation.deleted.is_(None), + DatasetStorageLocation.datasetUri == self.target_uri, + DatasetStorageLocation.deleted.is_(None), models.ShareObjectItem.status.in_(self.shared_states) ) ) diff --git a/backend/dataall/db/api/dataset.py b/backend/dataall/db/api/dataset.py index 8fdbb72b7..f78d92eae 100644 --- a/backend/dataall/db/api/dataset.py +++ b/backend/dataall/db/api/dataset.py @@ -20,6 +20,7 @@ NamingConventionService, NamingConventionPattern, ) +from dataall.modules.datasets.db.models import DatasetStorageLocation logger = logging.getLogger(__name__) @@ -266,17 +267,17 @@ def paginated_user_datasets( def paginated_dataset_locations( session, username, groups, uri, data=None, check_perm=None ) -> dict: - query = session.query(models.DatasetStorageLocation).filter( - models.DatasetStorageLocation.datasetUri == uri + query = session.query(DatasetStorageLocation).filter( + DatasetStorageLocation.datasetUri == uri ) if data and data.get('term'): query = query.filter( or_( *[ - models.DatasetStorageLocation.name.ilike( + DatasetStorageLocation.name.ilike( '%' + data.get('term') + '%' ), - models.DatasetStorageLocation.S3Prefix.ilike( + DatasetStorageLocation.S3Prefix.ilike( '%' + data.get('term') + '%' ), ] @@ -489,8 +490,8 @@ def get_dataset_tables(session, dataset_uri): def get_dataset_folders(session, dataset_uri): """return the dataset folders""" return ( - session.query(models.DatasetStorageLocation) - .filter(models.DatasetStorageLocation.datasetUri == dataset_uri) + session.query(DatasetStorageLocation) + .filter(DatasetStorageLocation.datasetUri == dataset_uri) .all() ) @@ -634,10 +635,10 @@ def _delete_dataset_tables(session, dataset_uri) -> bool: @staticmethod def _delete_dataset_locations(session, dataset_uri) -> bool: locations = ( - session.query(models.DatasetStorageLocation) + session.query(DatasetStorageLocation) .filter( and_( - models.DatasetStorageLocation.datasetUri == dataset_uri, + DatasetStorageLocation.datasetUri == dataset_uri, ) ) .all() @@ -675,7 +676,7 @@ def count_dataset_tables(session, dataset_uri): @staticmethod def count_dataset_locations(session, dataset_uri): return ( - session.query(models.DatasetStorageLocation) - .filter(models.DatasetStorageLocation.datasetUri == dataset_uri) + session.query(DatasetStorageLocation) + .filter(DatasetStorageLocation.datasetUri == dataset_uri) .count() ) diff --git a/backend/dataall/db/api/dataset_location.py b/backend/dataall/db/api/dataset_location.py index ef9f085f3..e19f1dfb0 100644 --- a/backend/dataall/db/api/dataset_location.py +++ b/backend/dataall/db/api/dataset_location.py @@ -6,6 +6,7 @@ from . import has_tenant_perm, has_resource_perm, Glossary from .. import models, api, paginate, permissions, exceptions from .dataset import Dataset +from dataall.modules.datasets.db.models import DatasetStorageLocation as DatasetStorageLocationModel logger = logging.getLogger(__name__) @@ -21,14 +22,14 @@ def create_dataset_location( uri: str, data: dict = None, check_perm: bool = False, - ) -> models.DatasetStorageLocation: + ) -> DatasetStorageLocationModel: dataset = Dataset.get_dataset_by_uri(session, uri) exists = ( - session.query(models.DatasetStorageLocation) + session.query(DatasetStorageLocationModel) .filter( and_( - models.DatasetStorageLocation.datasetUri == dataset.datasetUri, - models.DatasetStorageLocation.S3Prefix == data['prefix'], + DatasetStorageLocationModel.datasetUri == dataset.datasetUri, + DatasetStorageLocationModel.S3Prefix == data['prefix'], ) ) .count() @@ -40,7 +41,7 @@ def create_dataset_location( message=f'Folder: {data["prefix"]} already exist on dataset {uri}', ) - location = models.DatasetStorageLocation( + location = DatasetStorageLocationModel( datasetUri=dataset.datasetUri, label=data.get('label'), description=data.get('description', 'No description provided'), @@ -77,14 +78,14 @@ def list_dataset_locations( check_perm: bool = False, ) -> dict: query = ( - session.query(models.DatasetStorageLocation) - .filter(models.DatasetStorageLocation.datasetUri == uri) - .order_by(models.DatasetStorageLocation.created.desc()) + session.query(DatasetStorageLocationModel) + .filter(DatasetStorageLocationModel.datasetUri == uri) + .order_by(DatasetStorageLocationModel.created.desc()) ) if data.get('term'): term = data.get('term') query = query.filter( - models.DatasetStorageLocation.label.ilike('%' + term + '%') + DatasetStorageLocationModel.label.ilike('%' + term + '%') ) return paginate( query, page=data.get('page', 1), page_size=data.get('pageSize', 10) @@ -100,7 +101,7 @@ def get_dataset_location( uri: str, data: dict = None, check_perm: bool = False, - ) -> models.DatasetStorageLocation: + ) -> DatasetStorageLocationModel: return DatasetStorageLocation.get_location_by_uri(session, data['locationUri']) @staticmethod @@ -113,7 +114,7 @@ def update_dataset_location( uri: str, data: dict = None, check_perm: bool = False, - ) -> models.DatasetStorageLocation: + ) -> DatasetStorageLocationModel: location = data.get( 'location', @@ -176,9 +177,9 @@ def delete_dataset_location( return True @staticmethod - def get_location_by_uri(session, location_uri) -> models.DatasetStorageLocation: + def get_location_by_uri(session, location_uri) -> DatasetStorageLocationModel: location: DatasetStorageLocation = session.query( - models.DatasetStorageLocation + DatasetStorageLocationModel ).get(location_uri) if not location: raise exceptions.ObjectNotFound('Folder', location_uri) @@ -186,13 +187,13 @@ def get_location_by_uri(session, location_uri) -> models.DatasetStorageLocation: @staticmethod def get_location_by_s3_prefix(session, s3_prefix, accountid, region): - location: models.DatasetStorageLocation = ( - session.query(models.DatasetStorageLocation) + location: DatasetStorageLocationModel = ( + session.query(DatasetStorageLocationModel) .filter( and_( - models.DatasetStorageLocation.S3Prefix.startswith(s3_prefix), - models.DatasetStorageLocation.AWSAccountId == accountid, - models.DatasetStorageLocation.region == region, + DatasetStorageLocationModel.S3Prefix.startswith(s3_prefix), + DatasetStorageLocationModel.AWSAccountId == accountid, + DatasetStorageLocationModel.region == region, ) ) .first() diff --git a/backend/dataall/db/api/environment.py b/backend/dataall/db/api/environment.py index 19d5de342..ac3777e5d 100644 --- a/backend/dataall/db/api/environment.py +++ b/backend/dataall/db/api/environment.py @@ -29,6 +29,8 @@ NamingConventionPattern, ) +from dataall.modules.datasets.db.models import DatasetStorageLocation + log = logging.getLogger(__name__) @@ -905,7 +907,7 @@ def paginated_shared_with_environment_datasets( ( models.ShareObjectItem.itemType == ShareableType.StorageLocation.value, - func.concat(models.DatasetStorageLocation.name), + func.concat(DatasetStorageLocation.name), ), ], else_='XXX XXXX', @@ -933,9 +935,9 @@ def paginated_shared_with_environment_datasets( models.ShareObjectItem.itemUri == models.DatasetTable.tableUri, ) .outerjoin( - models.DatasetStorageLocation, + DatasetStorageLocation, models.ShareObjectItem.itemUri - == models.DatasetStorageLocation.locationUri, + == DatasetStorageLocation.locationUri, ) .filter( and_( @@ -1001,7 +1003,7 @@ def paginated_shared_with_environment_group_datasets( ( models.ShareObjectItem.itemType == ShareableType.StorageLocation.value, - func.concat(models.DatasetStorageLocation.name), + func.concat(DatasetStorageLocation.name), ), ], else_='XXX XXXX', @@ -1029,9 +1031,9 @@ def paginated_shared_with_environment_group_datasets( models.ShareObjectItem.itemUri == models.DatasetTable.tableUri, ) .outerjoin( - models.DatasetStorageLocation, + DatasetStorageLocation, models.ShareObjectItem.itemUri - == models.DatasetStorageLocation.locationUri, + == DatasetStorageLocation.locationUri, ) .filter( and_( @@ -1122,7 +1124,7 @@ def paginated_environment_data_items( ( models.ShareObjectItem.itemType == ShareableType.StorageLocation.value, - func.concat(models.DatasetStorageLocation.name), + func.concat(DatasetStorageLocation.name), ), ], else_='XXX XXXX', @@ -1150,9 +1152,9 @@ def paginated_environment_data_items( models.ShareObjectItem.itemUri == models.DatasetTable.tableUri, ) .outerjoin( - models.DatasetStorageLocation, + DatasetStorageLocation, models.ShareObjectItem.itemUri - == models.DatasetStorageLocation.locationUri, + == DatasetStorageLocation.locationUri, ) .filter( and_( diff --git a/backend/dataall/db/api/share_object.py b/backend/dataall/db/api/share_object.py index ff1c426d7..bd0215190 100644 --- a/backend/dataall/db/api/share_object.py +++ b/backend/dataall/db/api/share_object.py @@ -10,6 +10,7 @@ from .. import api, utils from .. import models, exceptions, permissions, paginate from ..models.Enums import ShareObjectStatus, ShareItemStatus, ShareObjectActions, ShareItemActions, ShareableType, PrincipalType +from dataall.modules.datasets.db.models import DatasetStorageLocation logger = logging.getLogger(__name__) @@ -419,7 +420,7 @@ def create_share_object( item = None if itemType: if itemType == ShareableType.StorageLocation.value: - item = session.query(models.DatasetStorageLocation).get(itemUri) + item = session.query(DatasetStorageLocation).get(itemUri) if itemType == ShareableType.Table.value: item = session.query(models.DatasetTable).get(itemUri) @@ -718,7 +719,7 @@ def get_share_item( if share_item.itemType == ShareableType.Table.value: return session.query(models.DatasetTable).get(share_item.itemUri) if share_item.itemType == ShareableType.StorageLocation: - return session.Query(models.DatasetStorageLocation).get(share_item.itemUri) + return session.Query(DatasetStorageLocation).get(share_item.itemUri) @staticmethod def get_share_by_uri(session, uri): @@ -771,7 +772,7 @@ def add_share_object_item( ) elif itemType == ShareableType.StorageLocation.value: - item = session.query(models.DatasetStorageLocation).get(itemUri) + item = session.query(DatasetStorageLocation).get(itemUri) if not item: raise exceptions.ObjectNotFound('ShareObjectItem', itemUri) @@ -971,10 +972,10 @@ def list_shareable_items( # marking the folder as part of the shareObject locations = ( session.query( - models.DatasetStorageLocation.locationUri.label('itemUri'), + DatasetStorageLocation.locationUri.label('itemUri'), func.coalesce('DatasetStorageLocation').label('itemType'), - models.DatasetStorageLocation.S3Prefix.label('itemName'), - models.DatasetStorageLocation.description.label('description'), + DatasetStorageLocation.S3Prefix.label('itemName'), + DatasetStorageLocation.description.label('description'), models.ShareObjectItem.shareItemUri.label('shareItemUri'), models.ShareObjectItem.status.label('status'), case( @@ -986,11 +987,11 @@ def list_shareable_items( models.ShareObjectItem, and_( models.ShareObjectItem.shareUri == share.shareUri, - models.DatasetStorageLocation.locationUri + DatasetStorageLocation.locationUri == models.ShareObjectItem.itemUri, ), ) - .filter(models.DatasetStorageLocation.datasetUri == datasetUri) + .filter(DatasetStorageLocation.datasetUri == datasetUri) ) if data: if data.get("isRevokable"): @@ -1162,7 +1163,7 @@ def find_share_item_by_table( def find_share_item_by_folder( session, share: models.ShareObject, - folder: models.DatasetStorageLocation, + folder: DatasetStorageLocation, ) -> models.ShareObjectItem: share_item: models.ShareObjectItem = ( session.query(models.ShareObjectItem) @@ -1268,10 +1269,10 @@ def get_share_data_items(session, share_uri, status): ) folders = ( - session.query(models.DatasetStorageLocation) + session.query(DatasetStorageLocation) .join( models.ShareObjectItem, - models.ShareObjectItem.itemUri == models.DatasetStorageLocation.locationUri, + models.ShareObjectItem.itemUri == DatasetStorageLocation.locationUri, ) .join( models.ShareObject, diff --git a/backend/dataall/db/models/DatasetStorageLocation.py b/backend/dataall/db/models/DatasetStorageLocation.py deleted file mode 100644 index e21ae6694..000000000 --- a/backend/dataall/db/models/DatasetStorageLocation.py +++ /dev/null @@ -1,22 +0,0 @@ -from sqlalchemy import Boolean, Column, String -from sqlalchemy.orm import query_expression - -from .. import Base, Resource, utils - - -class DatasetStorageLocation(Resource, Base): - __tablename__ = 'dataset_storage_location' - datasetUri = Column(String, nullable=False) - locationUri = Column(String, primary_key=True, default=utils.uuid('location')) - AWSAccountId = Column(String, nullable=False) - S3BucketName = Column(String, nullable=False) - S3Prefix = Column(String, nullable=False) - S3AccessPoint = Column(String, nullable=True) - region = Column(String, default='eu-west-1') - locationCreated = Column(Boolean, default=False) - userRoleForStorageLocation = query_expression() - projectPermission = query_expression() - environmentEndPoint = query_expression() - - def uri(self): - return self.locationUri diff --git a/backend/dataall/db/models/__init__.py b/backend/dataall/db/models/__init__.py index 0af480d79..c288527cf 100644 --- a/backend/dataall/db/models/__init__.py +++ b/backend/dataall/db/models/__init__.py @@ -6,7 +6,6 @@ from .DashboardShare import DashboardShareStatus from .Dataset import Dataset from .DatasetQualityRule import DatasetQualityRule -from .DatasetStorageLocation import DatasetStorageLocation from .DatasetTable import DatasetTable from .Environment import Environment from .EnvironmentGroup import EnvironmentGroup diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index 4620495fe..842eba82b 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -4,7 +4,7 @@ from dataall.api.Objects.Feed.registry import FeedRegistry, FeedDefinition from dataall.api.Objects.Glossary.registry import GlossaryRegistry, GlossaryDefinition -from dataall.modules.datasets.db.models import DatasetTableColumn +from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetStorageLocation from dataall.modules.loader import ModuleInterface, ImportMode log = logging.getLogger(__name__) @@ -19,8 +19,13 @@ def is_supported(cls, modes): def __init__(self): import dataall.modules.datasets.api + FeedRegistry.register(FeedDefinition("DatasetTableColumn", DatasetTableColumn)) + FeedRegistry.register(FeedDefinition("DatasetStorageLocation", DatasetStorageLocation)) + GlossaryRegistry.register(GlossaryDefinition("Column", "DatasetTableColumn", DatasetTableColumn)) + GlossaryRegistry.register(GlossaryDefinition("Folder", "DatasetStorageLocation", DatasetStorageLocation)) + log.info("API of datasets has been imported") diff --git a/backend/dataall/modules/datasets/api/storage_location/resolvers.py b/backend/dataall/modules/datasets/api/storage_location/resolvers.py index 5f73c468c..2eb18198c 100644 --- a/backend/dataall/modules/datasets/api/storage_location/resolvers.py +++ b/backend/dataall/modules/datasets/api/storage_location/resolvers.py @@ -10,6 +10,7 @@ Environment, ) from dataall.searchproxy import indexers +from dataall.modules.datasets.db.models import DatasetStorageLocation def create_storage_location( @@ -92,7 +93,7 @@ def remove_storage_location(context, source, locationUri: str = None): return True -def resolve_dataset(context, source: models.DatasetStorageLocation, **kwargs): +def resolve_dataset(context, source: DatasetStorageLocation, **kwargs): if not source: return None with context.engine.scoped_session() as session: @@ -129,7 +130,7 @@ def publish_location_update(context: Context, source, locationUri: str = None): def resolve_glossary_terms( - context: Context, source: models.DatasetStorageLocation, **kwargs + context: Context, source: DatasetStorageLocation, **kwargs ): if not source: return None diff --git a/backend/dataall/modules/datasets/db/models.py b/backend/dataall/modules/datasets/db/models.py index 1ba60bea1..2dfee26ec 100644 --- a/backend/dataall/modules/datasets/db/models.py +++ b/backend/dataall/modules/datasets/db/models.py @@ -1,5 +1,6 @@ -from sqlalchemy import Column, String +from sqlalchemy import Boolean, Column, String from sqlalchemy.dialects.postgresql import JSON +from sqlalchemy.orm import query_expression from dataall.db import Base, Resource, utils @@ -36,3 +37,22 @@ class DatasetProfilingRun(Resource, Base): AwsAccountId = Column(String) results = Column(JSON, default={}) status = Column(String, default='Created') + + +class DatasetStorageLocation(Resource, Base): + __tablename__ = 'dataset_storage_location' + datasetUri = Column(String, nullable=False) + locationUri = Column(String, primary_key=True, default=utils.uuid('location')) + AWSAccountId = Column(String, nullable=False) + S3BucketName = Column(String, nullable=False) + S3Prefix = Column(String, nullable=False) + S3AccessPoint = Column(String, nullable=True) + region = Column(String, default='eu-west-1') + locationCreated = Column(Boolean, default=False) + userRoleForStorageLocation = query_expression() + projectPermission = query_expression() + environmentEndPoint = query_expression() + + def uri(self): + return self.locationUri + diff --git a/backend/dataall/modules/datasets/tasks/subscription_service.py b/backend/dataall/modules/datasets/tasks/subscription_service.py index 74f84d7c9..7df028914 100644 --- a/backend/dataall/modules/datasets/tasks/subscription_service.py +++ b/backend/dataall/modules/datasets/tasks/subscription_service.py @@ -16,6 +16,7 @@ from dataall.tasks.subscriptions import poll_queues from dataall.utils import json_utils from dataall.modules.datasets.services.dataset_table import DatasetTableService +from dataall.modules.datasets.db.models import DatasetStorageLocation root = logging.getLogger() root.setLevel(logging.INFO) @@ -103,7 +104,7 @@ def publish_table_update_message(engine, message): @staticmethod def publish_location_update_message(session, message): - location: models.DatasetStorageLocation = ( + location: DatasetStorageLocation = ( db.api.DatasetStorageLocation.get_location_by_s3_prefix( session, message.get('prefix'), diff --git a/backend/dataall/searchproxy/indexers.py b/backend/dataall/searchproxy/indexers.py index 78886716d..7361c2150 100644 --- a/backend/dataall/searchproxy/indexers.py +++ b/backend/dataall/searchproxy/indexers.py @@ -6,6 +6,7 @@ from .upsert import upsert from .. import db from ..db import models +from dataall.modules.datasets.db.models import DatasetStorageLocation log = logging.getLogger(__name__) @@ -184,14 +185,14 @@ def upsert_table(session, es, tableUri: str): def upsert_folder(session, es, locationUri: str): folder = ( session.query( - models.DatasetStorageLocation.datasetUri.label('datasetUri'), - models.DatasetStorageLocation.locationUri.label('uri'), - models.DatasetStorageLocation.name.label('name'), - models.DatasetStorageLocation.owner.label('owner'), - models.DatasetStorageLocation.label.label('label'), - models.DatasetStorageLocation.description.label('description'), - models.DatasetStorageLocation.tags.label('tags'), - models.DatasetStorageLocation.region.label('region'), + DatasetStorageLocation.datasetUri.label('datasetUri'), + DatasetStorageLocation.locationUri.label('uri'), + DatasetStorageLocation.name.label('name'), + DatasetStorageLocation.owner.label('owner'), + DatasetStorageLocation.label.label('label'), + DatasetStorageLocation.description.label('description'), + DatasetStorageLocation.tags.label('tags'), + DatasetStorageLocation.region.label('region'), models.Organization.organizationUri.label('orgUri'), models.Organization.name.label('orgName'), models.Environment.environmentUri.label('envUri'), @@ -200,13 +201,13 @@ def upsert_folder(session, es, locationUri: str): models.Dataset.S3BucketName.label('source'), models.Dataset.topics.label('topics'), models.Dataset.confidentiality.label('classification'), - models.DatasetStorageLocation.created, - models.DatasetStorageLocation.updated, - models.DatasetStorageLocation.deleted, + DatasetStorageLocation.created, + DatasetStorageLocation.updated, + DatasetStorageLocation.deleted, ) .join( models.Dataset, - models.Dataset.datasetUri == models.DatasetStorageLocation.datasetUri, + models.Dataset.datasetUri == DatasetStorageLocation.datasetUri, ) .join( models.Organization, @@ -216,7 +217,7 @@ def upsert_folder(session, es, locationUri: str): models.Environment, models.Dataset.environmentUri == models.Environment.environmentUri, ) - .filter(models.DatasetStorageLocation.locationUri == locationUri) + .filter(DatasetStorageLocation.locationUri == locationUri) .first() ) if folder: @@ -349,8 +350,8 @@ def remove_deleted_tables(session, es, datasetUri: str): def upsert_dataset_folders(session, es, datasetUri: str): folders = ( - session.query(models.DatasetStorageLocation) - .filter(models.DatasetStorageLocation.datasetUri == datasetUri) + session.query(DatasetStorageLocation) + .filter(DatasetStorageLocation.datasetUri == datasetUri) .all() ) for folder in folders: diff --git a/backend/dataall/tasks/bucket_policy_updater.py b/backend/dataall/tasks/bucket_policy_updater.py index 5b8f322be..9932f53ae 100644 --- a/backend/dataall/tasks/bucket_policy_updater.py +++ b/backend/dataall/tasks/bucket_policy_updater.py @@ -10,6 +10,7 @@ from ..aws.handlers.sts import SessionHelper from ..db import get_engine from ..db import models, api +from dataall.modules.datasets.db.models import DatasetStorageLocation root = logging.getLogger() root.setLevel(logging.INFO) @@ -201,13 +202,13 @@ def get_shared_tables(self, dataset) -> typing.List[models.ShareObjectItem]: ).all() return tables - def get_shared_folders(self, dataset) -> typing.List[models.DatasetStorageLocation]: + def get_shared_folders(self, dataset) -> typing.List[DatasetStorageLocation]: with self.engine.scoped_session() as session: locations = ( session.query( - models.DatasetStorageLocation.locationUri.label('locationUri'), - models.DatasetStorageLocation.S3BucketName.label('S3BucketName'), - models.DatasetStorageLocation.S3Prefix.label('S3Prefix'), + DatasetStorageLocation.locationUri.label('locationUri'), + DatasetStorageLocation.S3BucketName.label('S3BucketName'), + DatasetStorageLocation.S3Prefix.label('S3Prefix'), models.Environment.AwsAccountId.label('AwsAccountId'), models.Environment.region.label('region'), ) @@ -215,7 +216,7 @@ def get_shared_folders(self, dataset) -> typing.List[models.DatasetStorageLocati models.ShareObjectItem, and_( models.ShareObjectItem.itemUri - == models.DatasetStorageLocation.locationUri + == DatasetStorageLocation.locationUri ), ) .join( @@ -229,8 +230,8 @@ def get_shared_folders(self, dataset) -> typing.List[models.DatasetStorageLocati ) .filter( and_( - models.DatasetStorageLocation.datasetUri == dataset.datasetUri, - models.DatasetStorageLocation.deleted.is_(None), + DatasetStorageLocation.datasetUri == dataset.datasetUri, + DatasetStorageLocation.deleted.is_(None), models.ShareObjectItem.status == models.Enums.ShareObjectStatus.Approved.value, ) diff --git a/backend/dataall/tasks/data_sharing/share_managers/s3_share_manager.py b/backend/dataall/tasks/data_sharing/share_managers/s3_share_manager.py index 1323770a4..f0ea4e162 100644 --- a/backend/dataall/tasks/data_sharing/share_managers/s3_share_manager.py +++ b/backend/dataall/tasks/data_sharing/share_managers/s3_share_manager.py @@ -10,6 +10,7 @@ from ....aws.handlers.iam import IAM from ....utils.alarm_service import AlarmService +from dataall.modules.datasets.db.models import DatasetStorageLocation logger = logging.getLogger(__name__) ACCESS_POINT_CREATION_TIME = 30 @@ -22,7 +23,7 @@ def __init__( session, dataset: models.Dataset, share: models.ShareObject, - target_folder: models.DatasetStorageLocation, + target_folder: DatasetStorageLocation, source_environment: models.Environment, target_environment: models.Environment, source_env_group: models.EnvironmentGroup, diff --git a/backend/dataall/tasks/data_sharing/share_processors/s3_process_share.py b/backend/dataall/tasks/data_sharing/share_processors/s3_process_share.py index 6940d3392..96b608338 100644 --- a/backend/dataall/tasks/data_sharing/share_processors/s3_process_share.py +++ b/backend/dataall/tasks/data_sharing/share_processors/s3_process_share.py @@ -2,6 +2,7 @@ from ....db import models, api from ..share_managers import S3ShareManager +from dataall.modules.datasets.db.models import DatasetStorageLocation log = logging.getLogger(__name__) @@ -13,7 +14,7 @@ def __init__( session, dataset: models.Dataset, share: models.ShareObject, - share_folder: models.DatasetStorageLocation, + share_folder: DatasetStorageLocation, source_environment: models.Environment, target_environment: models.Environment, source_env_group: models.EnvironmentGroup, @@ -37,7 +38,7 @@ def process_approved_shares( session, dataset: models.Dataset, share: models.ShareObject, - share_folders: [models.DatasetStorageLocation], + share_folders: [DatasetStorageLocation], source_environment: models.Environment, target_environment: models.Environment, source_env_group: models.EnvironmentGroup, @@ -104,7 +105,7 @@ def process_revoked_shares( session, dataset: models.Dataset, share: models.ShareObject, - revoke_folders: [models.DatasetStorageLocation], + revoke_folders: [DatasetStorageLocation], source_environment: models.Environment, target_environment: models.Environment, source_env_group: models.EnvironmentGroup, diff --git a/backend/dataall/utils/alarm_service.py b/backend/dataall/utils/alarm_service.py index 838029d3e..436d5a701 100644 --- a/backend/dataall/utils/alarm_service.py +++ b/backend/dataall/utils/alarm_service.py @@ -11,6 +11,7 @@ from ..aws.handlers.sts import SessionHelper from ..db import models +from dataall.modules.datasets.db.models import DatasetStorageLocation logger = logging.getLogger(__name__) @@ -74,7 +75,7 @@ def trigger_table_sharing_failure_alarm( def trigger_folder_sharing_failure_alarm( self, - folder: models.DatasetStorageLocation, + folder: DatasetStorageLocation, share: models.ShareObject, target_environment: models.Environment, ): @@ -101,7 +102,7 @@ def trigger_folder_sharing_failure_alarm( def trigger_revoke_folder_sharing_failure_alarm( self, - folder: models.DatasetStorageLocation, + folder: DatasetStorageLocation, share: models.ShareObject, target_environment: models.Environment, ): diff --git a/tests/api/conftest.py b/tests/api/conftest.py index 65dc6934b..8334f7700 100644 --- a/tests/api/conftest.py +++ b/tests/api/conftest.py @@ -1,6 +1,7 @@ from .client import * from dataall.db import models from dataall.api import constants +from dataall.modules.datasets.db.models import DatasetStorageLocation @pytest.fixture(scope='module', autouse=True) @@ -521,12 +522,12 @@ def factory( def location(db): cache = {} - def factory(dataset: models.Dataset, name, username) -> models.DatasetStorageLocation: + def factory(dataset: models.Dataset, name, username) -> DatasetStorageLocation: key = f'{dataset.datasetUri}-{name}' if cache.get(key): return cache.get(key) with db.scoped_session() as session: - ds_location = models.DatasetStorageLocation( + ds_location = DatasetStorageLocation( name=name, label=name, owner=username, diff --git a/tests/api/test_dataset.py b/tests/api/test_dataset.py index 057ff66a3..359a780b4 100644 --- a/tests/api/test_dataset.py +++ b/tests/api/test_dataset.py @@ -3,6 +3,7 @@ import pytest import dataall +from dataall.modules.datasets.db.models import DatasetStorageLocation @pytest.fixture(scope='module', autouse=True) @@ -235,7 +236,7 @@ def test_add_locations(location, dataset1, db): location(dataset=dataset1, name=f'unstructured{i+1}', username=dataset1.owner) with db.scoped_session() as session: - nb = session.query(dataall.db.models.DatasetStorageLocation).count() + nb = session.query(DatasetStorageLocation).count() assert nb == 10 diff --git a/tests/searchproxy/test_indexers.py b/tests/searchproxy/test_indexers.py index 478c8bf3c..fcdb18bb0 100644 --- a/tests/searchproxy/test_indexers.py +++ b/tests/searchproxy/test_indexers.py @@ -4,6 +4,7 @@ import dataall from dataall.searchproxy import indexers +from dataall.modules.datasets.db.models import DatasetStorageLocation @pytest.fixture(scope='module', autouse=True) @@ -89,7 +90,7 @@ def table(org, env, db, dataset): @pytest.fixture(scope='module', autouse=True) def folder(org, env, db, dataset): with db.scoped_session() as session: - location = dataall.db.models.DatasetStorageLocation( + location = DatasetStorageLocation( datasetUri=dataset.datasetUri, AWSAccountId='12345678901', S3Prefix='S3prefix', diff --git a/tests/tasks/conftest.py b/tests/tasks/conftest.py index 826ae524f..7e6f0d71a 100644 --- a/tests/tasks/conftest.py +++ b/tests/tasks/conftest.py @@ -1,9 +1,8 @@ -import boto3 -import os import pytest from dataall.db import models from dataall.api import constants +from dataall.modules.datasets.db.models import DatasetStorageLocation @pytest.fixture(scope="module") @@ -128,10 +127,10 @@ def factory( @pytest.fixture(scope="module") def location(db): - def factory(dataset: models.Dataset, label: str) -> models.DatasetStorageLocation: + def factory(dataset: models.Dataset, label: str) -> DatasetStorageLocation: with db.scoped_session() as session: - ds_location = models.DatasetStorageLocation( + ds_location = DatasetStorageLocation( name=label, label=label, owner=dataset.owner, @@ -198,7 +197,7 @@ def factory( def share_item_folder(db): def factory( share: models.ShareObject, - location: models.DatasetStorageLocation, + location: DatasetStorageLocation, ) -> models.ShareObjectItem: with db.scoped_session() as session: share_item = models.ShareObjectItem( diff --git a/tests/tasks/test_s3_share_manager.py b/tests/tasks/test_s3_share_manager.py index 53c7f426b..2841be87e 100644 --- a/tests/tasks/test_s3_share_manager.py +++ b/tests/tasks/test_s3_share_manager.py @@ -7,6 +7,7 @@ from dataall.tasks.data_sharing.share_managers.s3_share_manager import S3ShareManager from dataall.utils.alarm_service import AlarmService +from dataall.modules.datasets.db.models import DatasetStorageLocation SOURCE_ENV_ACCOUNT = "111111111111" @@ -68,7 +69,7 @@ def dataset1(dataset: Callable, org1: models.Organization, source_environment: m @pytest.fixture(scope="module") -def location1(location: Callable, dataset1: models.Dataset) -> models.DatasetStorageLocation: +def location1(location: Callable, dataset1: models.Dataset) -> DatasetStorageLocation: yield location(dataset1, "location1") @@ -81,7 +82,7 @@ def share1(share: Callable, dataset1: models.Dataset, @pytest.fixture(scope="module") -def share_item_folder1(share_item_folder: Callable, share1: models.ShareObject, location1: models.DatasetStorageLocation): +def share_item_folder1(share_item_folder: Callable, share1: models.ShareObject, location1: DatasetStorageLocation): share_item_folder1 = share_item_folder(share1, location1) return share_item_folder1 @@ -383,7 +384,7 @@ def test_grant_target_role_access_policy_test_no_policy( db, share1: models.ShareObject, share_item_folder1: models.ShareObjectItem, - location1: models.DatasetStorageLocation, + location1: DatasetStorageLocation, source_environment: models.Environment, target_environment: models.Environment, ): @@ -445,7 +446,7 @@ def test_update_dataset_bucket_key_policy_with_env_admin( db, share1: models.ShareObject, share_item_folder1: models.ShareObjectItem, - location1: models.DatasetStorageLocation, + location1: DatasetStorageLocation, source_environment: models.Environment, target_environment: models.Environment, ): @@ -562,7 +563,7 @@ def test_update_dataset_bucket_key_policy_without_env_admin( db, share1: models.ShareObject, share_item_folder1: models.ShareObjectItem, - location1: models.DatasetStorageLocation, + location1: DatasetStorageLocation, source_environment: models.Environment, target_environment: models.Environment, ): @@ -642,7 +643,7 @@ def test_manage_access_point_and_policy_1( db, share1: models.ShareObject, share_item_folder1: models.ShareObjectItem, - location1: models.DatasetStorageLocation, + location1: DatasetStorageLocation, source_environment: models.Environment, target_environment: models.Environment, ): @@ -733,7 +734,7 @@ def test_manage_access_point_and_policy_2( db, share1: models.ShareObject, share_item_folder1: models.ShareObjectItem, - location1: models.DatasetStorageLocation, + location1: DatasetStorageLocation, source_environment: models.Environment, target_environment: models.Environment, ): @@ -807,7 +808,7 @@ def test_manage_access_point_and_policy_3( db, share1: models.ShareObject, share_item_folder1: models.ShareObjectItem, - location1: models.DatasetStorageLocation, + location1: DatasetStorageLocation, source_environment: models.Environment, target_environment: models.Environment, ): @@ -878,7 +879,7 @@ def test_delete_access_point_policy_with_env_admin_one_prefix( db, share1: models.ShareObject, share_item_folder1: models.ShareObjectItem, - location1: models.DatasetStorageLocation, + location1: DatasetStorageLocation, source_environment: models.Environment, target_environment: models.Environment, ): @@ -950,7 +951,7 @@ def test_delete_access_point_policy_with_env_admin_multiple_prefix( db, share1: models.ShareObject, share_item_folder1: models.ShareObjectItem, - location1: models.DatasetStorageLocation, + location1: DatasetStorageLocation, source_environment: models.Environment, target_environment: models.Environment, ): @@ -1017,7 +1018,7 @@ def test_dont_delete_access_point_with_policy( db, share1: models.ShareObject, share_item_folder1: models.ShareObjectItem, - location1: models.DatasetStorageLocation, + location1: DatasetStorageLocation, source_environment: models.Environment, target_environment: models.Environment, ): @@ -1063,7 +1064,7 @@ def test_delete_access_point_without_policy( db, share1: models.ShareObject, share_item_folder1: models.ShareObjectItem, - location1: models.DatasetStorageLocation, + location1: DatasetStorageLocation, source_environment: models.Environment, target_environment: models.Environment, ): @@ -1109,7 +1110,7 @@ def test_delete_target_role_access_policy_no_remaining_statement( db, share1: models.ShareObject, share_item_folder1: models.ShareObjectItem, - location1: models.DatasetStorageLocation, + location1: DatasetStorageLocation, source_environment: models.Environment, target_environment: models.Environment, ): @@ -1174,7 +1175,7 @@ def test_delete_target_role_access_policy_with_remaining_statement( db, share1: models.ShareObject, share_item_folder1: models.ShareObjectItem, - location1: models.DatasetStorageLocation, + location1: DatasetStorageLocation, source_environment: models.Environment, target_environment: models.Environment, ): @@ -1260,7 +1261,7 @@ def test_delete_dataset_bucket_key_policy_existing_policy_with_additional_target db, share1: models.ShareObject, share_item_folder1: models.ShareObjectItem, - location1: models.DatasetStorageLocation, + location1: DatasetStorageLocation, source_environment: models.Environment, target_environment: models.Environment, ): @@ -1351,7 +1352,7 @@ def test_delete_dataset_bucket_key_policy_existing_policy_with_no_additional_tar db, share1: models.ShareObject, share_item_folder1: models.ShareObjectItem, - location1: models.DatasetStorageLocation, + location1: DatasetStorageLocation, source_environment: models.Environment, target_environment: models.Environment, ): @@ -1424,7 +1425,7 @@ def test_handle_share_failure( db, share1: models.ShareObject, share_item_folder1: models.ShareObjectItem, - location1: models.DatasetStorageLocation, + location1: DatasetStorageLocation, source_environment: models.Environment, target_environment: models.Environment, ): @@ -1459,7 +1460,7 @@ def test_handle_revoke_failure( db, share1: models.ShareObject, share_item_folder1: models.ShareObjectItem, - location1: models.DatasetStorageLocation, + location1: DatasetStorageLocation, source_environment: models.Environment, target_environment: models.Environment, ): From b2566786250928b9328b2f4b89cfa725e34e5793 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 17 Apr 2023 11:48:11 +0200 Subject: [PATCH 052/346] Datasets refactoring Moved DatasetStorageLocation into dataset services --- backend/dataall/aws/handlers/s3.py | 3 +- backend/dataall/db/api/__init__.py | 1 - .../api/storage_location/resolvers.py | 20 +++---- .../datasets/services}/dataset_location.py | 52 +++++++++---------- .../datasets/tasks/subscription_service.py | 3 +- 5 files changed, 40 insertions(+), 39 deletions(-) rename backend/dataall/{db/api => modules/datasets/services}/dataset_location.py (78%) diff --git a/backend/dataall/aws/handlers/s3.py b/backend/dataall/aws/handlers/s3.py index bcd0ad440..0be215ae3 100755 --- a/backend/dataall/aws/handlers/s3.py +++ b/backend/dataall/aws/handlers/s3.py @@ -4,6 +4,7 @@ from ...db import models from .service_handlers import Worker from .sts import SessionHelper +from dataall.modules.datasets.services.dataset_location import DatasetStorageLocationService log = logging.getLogger(__name__) @@ -13,7 +14,7 @@ class S3: @Worker.handler(path='s3.prefix.create') def create_dataset_location(engine, task: models.Task): with engine.scoped_session() as session: - location = db.api.DatasetStorageLocation.get_location_by_uri( + location = DatasetStorageLocationService.get_location_by_uri( session, task.targetUri ) S3.create_bucket_prefix(location) diff --git a/backend/dataall/db/api/__init__.py b/backend/dataall/db/api/__init__.py index a5f11d2c7..7bf8e0a4b 100644 --- a/backend/dataall/db/api/__init__.py +++ b/backend/dataall/db/api/__init__.py @@ -12,7 +12,6 @@ from .vote import Vote from .share_object import ShareObject, ShareObjectSM, ShareItemSM from .dataset import Dataset -from .dataset_location import DatasetStorageLocation from .notification import Notification from .redshift_cluster import RedshiftCluster from .vpc import Vpc diff --git a/backend/dataall/modules/datasets/api/storage_location/resolvers.py b/backend/dataall/modules/datasets/api/storage_location/resolvers.py index 2eb18198c..e66e767a9 100644 --- a/backend/dataall/modules/datasets/api/storage_location/resolvers.py +++ b/backend/dataall/modules/datasets/api/storage_location/resolvers.py @@ -5,19 +5,19 @@ from dataall.db.api import ( ResourcePolicy, Glossary, - DatasetStorageLocation, Dataset, Environment, ) from dataall.searchproxy import indexers from dataall.modules.datasets.db.models import DatasetStorageLocation +from dataall.modules.datasets.services.dataset_location import DatasetStorageLocationService def create_storage_location( context, source, datasetUri: str = None, input: dict = None ): with context.engine.scoped_session() as session: - location = DatasetStorageLocation.create_dataset_location( + location = DatasetStorageLocationService.create_dataset_location( session=session, username=context.username, groups=context.groups, @@ -40,15 +40,15 @@ def list_dataset_locations(context, source, filter: dict = None): if not filter: filter = {} with context.engine.scoped_session() as session: - return DatasetStorageLocation.list_dataset_locations( + return DatasetStorageLocationService.list_dataset_locations( session=session, uri=source.datasetUri, data=filter, check_perm=True ) def get_storage_location(context, source, locationUri=None): with context.engine.scoped_session() as session: - location = DatasetStorageLocation.get_location_by_uri(session, locationUri) - return DatasetStorageLocation.get_dataset_location( + location = DatasetStorageLocationService.get_location_by_uri(session, locationUri) + return DatasetStorageLocationService.get_dataset_location( session=session, username=context.username, groups=context.groups, @@ -62,10 +62,10 @@ def update_storage_location( context, source, locationUri: str = None, input: dict = None ): with context.engine.scoped_session() as session: - location = DatasetStorageLocation.get_location_by_uri(session, locationUri) + location = DatasetStorageLocationService.get_location_by_uri(session, locationUri) input['location'] = location input['locationUri'] = location.locationUri - DatasetStorageLocation.update_dataset_location( + DatasetStorageLocationService.update_dataset_location( session=session, username=context.username, groups=context.groups, @@ -80,8 +80,8 @@ def update_storage_location( def remove_storage_location(context, source, locationUri: str = None): with context.engine.scoped_session() as session: - location = DatasetStorageLocation.get_location_by_uri(session, locationUri) - DatasetStorageLocation.delete_dataset_location( + location = DatasetStorageLocationService.get_location_by_uri(session, locationUri) + DatasetStorageLocationService.delete_dataset_location( session=session, username=context.username, groups=context.groups, @@ -103,7 +103,7 @@ def resolve_dataset(context, source: DatasetStorageLocation, **kwargs): def publish_location_update(context: Context, source, locationUri: str = None): with context.engine.scoped_session() as session: - location = DatasetStorageLocation.get_location_by_uri(session, locationUri) + location = DatasetStorageLocationService.get_location_by_uri(session, locationUri) ResourcePolicy.check_user_resource_permission( session=session, username=context.username, diff --git a/backend/dataall/db/api/dataset_location.py b/backend/dataall/modules/datasets/services/dataset_location.py similarity index 78% rename from backend/dataall/db/api/dataset_location.py rename to backend/dataall/modules/datasets/services/dataset_location.py index e19f1dfb0..640f0a037 100644 --- a/backend/dataall/db/api/dataset_location.py +++ b/backend/dataall/modules/datasets/services/dataset_location.py @@ -3,15 +3,15 @@ from sqlalchemy import and_, or_ -from . import has_tenant_perm, has_resource_perm, Glossary -from .. import models, api, paginate, permissions, exceptions -from .dataset import Dataset -from dataall.modules.datasets.db.models import DatasetStorageLocation as DatasetStorageLocationModel +from dataall.db.api import has_tenant_perm, has_resource_perm, Glossary +from dataall.db import models, api, paginate, permissions, exceptions +from dataall.db.api.dataset import Dataset +from dataall.modules.datasets.db.models import DatasetStorageLocation logger = logging.getLogger(__name__) -class DatasetStorageLocation: +class DatasetStorageLocationService: @staticmethod @has_tenant_perm(permissions.MANAGE_DATASETS) @has_resource_perm(permissions.CREATE_DATASET_FOLDER) @@ -22,14 +22,14 @@ def create_dataset_location( uri: str, data: dict = None, check_perm: bool = False, - ) -> DatasetStorageLocationModel: + ) -> DatasetStorageLocation: dataset = Dataset.get_dataset_by_uri(session, uri) exists = ( - session.query(DatasetStorageLocationModel) + session.query(DatasetStorageLocation) .filter( and_( - DatasetStorageLocationModel.datasetUri == dataset.datasetUri, - DatasetStorageLocationModel.S3Prefix == data['prefix'], + DatasetStorageLocation.datasetUri == dataset.datasetUri, + DatasetStorageLocation.S3Prefix == data['prefix'], ) ) .count() @@ -41,7 +41,7 @@ def create_dataset_location( message=f'Folder: {data["prefix"]} already exist on dataset {uri}', ) - location = DatasetStorageLocationModel( + location = DatasetStorageLocation( datasetUri=dataset.datasetUri, label=data.get('label'), description=data.get('description', 'No description provided'), @@ -78,14 +78,14 @@ def list_dataset_locations( check_perm: bool = False, ) -> dict: query = ( - session.query(DatasetStorageLocationModel) - .filter(DatasetStorageLocationModel.datasetUri == uri) - .order_by(DatasetStorageLocationModel.created.desc()) + session.query(DatasetStorageLocation) + .filter(DatasetStorageLocation.datasetUri == uri) + .order_by(DatasetStorageLocation.created.desc()) ) if data.get('term'): term = data.get('term') query = query.filter( - DatasetStorageLocationModel.label.ilike('%' + term + '%') + DatasetStorageLocation.label.ilike('%' + term + '%') ) return paginate( query, page=data.get('page', 1), page_size=data.get('pageSize', 10) @@ -101,8 +101,8 @@ def get_dataset_location( uri: str, data: dict = None, check_perm: bool = False, - ) -> DatasetStorageLocationModel: - return DatasetStorageLocation.get_location_by_uri(session, data['locationUri']) + ) -> DatasetStorageLocation: + return DatasetStorageLocationService.get_location_by_uri(session, data['locationUri']) @staticmethod @has_tenant_perm(permissions.MANAGE_DATASETS) @@ -114,11 +114,11 @@ def update_dataset_location( uri: str, data: dict = None, check_perm: bool = False, - ) -> DatasetStorageLocationModel: + ) -> DatasetStorageLocation: location = data.get( 'location', - DatasetStorageLocation.get_location_by_uri(session, data['locationUri']), + DatasetStorageLocationService.get_location_by_uri(session, data['locationUri']), ) for k in data.keys(): @@ -145,7 +145,7 @@ def delete_dataset_location( data: dict = None, check_perm: bool = False, ): - location = DatasetStorageLocation.get_location_by_uri( + location = DatasetStorageLocationService.get_location_by_uri( session, data['locationUri'] ) share_item_shared_states = api.ShareItemSM.get_share_item_shared_states() @@ -177,9 +177,9 @@ def delete_dataset_location( return True @staticmethod - def get_location_by_uri(session, location_uri) -> DatasetStorageLocationModel: + def get_location_by_uri(session, location_uri) -> DatasetStorageLocation: location: DatasetStorageLocation = session.query( - DatasetStorageLocationModel + DatasetStorageLocation ).get(location_uri) if not location: raise exceptions.ObjectNotFound('Folder', location_uri) @@ -187,13 +187,13 @@ def get_location_by_uri(session, location_uri) -> DatasetStorageLocationModel: @staticmethod def get_location_by_s3_prefix(session, s3_prefix, accountid, region): - location: DatasetStorageLocationModel = ( - session.query(DatasetStorageLocationModel) + location: DatasetStorageLocation = ( + session.query(DatasetStorageLocation) .filter( and_( - DatasetStorageLocationModel.S3Prefix.startswith(s3_prefix), - DatasetStorageLocationModel.AWSAccountId == accountid, - DatasetStorageLocationModel.region == region, + DatasetStorageLocation.S3Prefix.startswith(s3_prefix), + DatasetStorageLocation.AWSAccountId == accountid, + DatasetStorageLocation.region == region, ) ) .first() diff --git a/backend/dataall/modules/datasets/tasks/subscription_service.py b/backend/dataall/modules/datasets/tasks/subscription_service.py index 7df028914..94339d0f7 100644 --- a/backend/dataall/modules/datasets/tasks/subscription_service.py +++ b/backend/dataall/modules/datasets/tasks/subscription_service.py @@ -16,6 +16,7 @@ from dataall.tasks.subscriptions import poll_queues from dataall.utils import json_utils from dataall.modules.datasets.services.dataset_table import DatasetTableService +from dataall.modules.datasets.services.dataset_location import DatasetStorageLocationService from dataall.modules.datasets.db.models import DatasetStorageLocation root = logging.getLogger() @@ -105,7 +106,7 @@ def publish_table_update_message(engine, message): @staticmethod def publish_location_update_message(session, message): location: DatasetStorageLocation = ( - db.api.DatasetStorageLocation.get_location_by_s3_prefix( + DatasetStorageLocationService.get_location_by_s3_prefix( session, message.get('prefix'), message.get('accountid'), From 66b5ddbf8093f4c58f6de46552064dff3db57bf0 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 17 Apr 2023 11:58:32 +0200 Subject: [PATCH 053/346] Datasets refactoring Extracted s3_location_handler --- backend/dataall/aws/handlers/s3.py | 35 -------------- .../modules/datasets/handlers/__init__.py | 6 ++- .../datasets/handlers/s3_location_handler.py | 48 +++++++++++++++++++ 3 files changed, 52 insertions(+), 37 deletions(-) create mode 100644 backend/dataall/modules/datasets/handlers/s3_location_handler.py diff --git a/backend/dataall/aws/handlers/s3.py b/backend/dataall/aws/handlers/s3.py index 0be215ae3..2352ef791 100755 --- a/backend/dataall/aws/handlers/s3.py +++ b/backend/dataall/aws/handlers/s3.py @@ -1,51 +1,16 @@ import logging -from ... import db -from ...db import models -from .service_handlers import Worker from .sts import SessionHelper -from dataall.modules.datasets.services.dataset_location import DatasetStorageLocationService log = logging.getLogger(__name__) class S3: - @staticmethod - @Worker.handler(path='s3.prefix.create') - def create_dataset_location(engine, task: models.Task): - with engine.scoped_session() as session: - location = DatasetStorageLocationService.get_location_by_uri( - session, task.targetUri - ) - S3.create_bucket_prefix(location) - return location - @staticmethod def client(account_id: str, region: str, client_type: str): session = SessionHelper.remote_session(accountid=account_id) return session.client(client_type, region_name=region) - @staticmethod - def create_bucket_prefix(location): - try: - accountid = location.AWSAccountId - region = location.region - s3cli = S3.client(account_id=accountid, region=region, client_type='s3') - response = s3cli.put_object( - Bucket=location.S3BucketName, Body='', Key=location.S3Prefix + '/' - ) - log.info( - 'Creating S3 Prefix `{}`({}) on AWS #{}'.format( - location.S3BucketName, accountid, response - ) - ) - location.locationCreated = True - except Exception as e: - log.error( - f'Dataset storage location creation failed on S3 for dataset location {location.locationUri} : {e}' - ) - raise e - @staticmethod def create_bucket_policy(account_id: str, region: str, bucket_name: str, policy: str): try: diff --git a/backend/dataall/modules/datasets/handlers/__init__.py b/backend/dataall/modules/datasets/handlers/__init__.py index a5d506712..382f052a9 100644 --- a/backend/dataall/modules/datasets/handlers/__init__.py +++ b/backend/dataall/modules/datasets/handlers/__init__.py @@ -4,7 +4,9 @@ """ from dataall.modules.datasets.handlers import ( glue_column_handler, - glue_table_handler + glue_table_handler, + glue_profiling_handler, + s3_location_handler ) -__all__ = ["glue_column_handler", "glue_table_handler"] +__all__ = ["glue_column_handler", "glue_table_handler", "glue_profiling_handler", "s3_location_handler"] diff --git a/backend/dataall/modules/datasets/handlers/s3_location_handler.py b/backend/dataall/modules/datasets/handlers/s3_location_handler.py new file mode 100644 index 000000000..431a4cecd --- /dev/null +++ b/backend/dataall/modules/datasets/handlers/s3_location_handler.py @@ -0,0 +1,48 @@ +import logging + +from dataall.aws.handlers.service_handlers import Worker +from dataall.aws.handlers.sts import SessionHelper +from dataall.db import models +from dataall.modules.datasets.services.dataset_location import DatasetStorageLocationService + +log = logging.getLogger(__name__) + + +class S3DatasetLocationHandler: + """Handles async requests related to s3 for dataset storage location""" + + @staticmethod + def client(account_id: str, region: str, client_type: str): + session = SessionHelper.remote_session(accountid=account_id) + return session.client(client_type, region_name=region) + + @staticmethod + @Worker.handler(path='s3.prefix.create') + def create_dataset_location(engine, task: models.Task): + with engine.scoped_session() as session: + location = DatasetStorageLocationService.get_location_by_uri( + session, task.targetUri + ) + S3DatasetLocationHandler.create_bucket_prefix(location) + return location + + @staticmethod + def create_bucket_prefix(location): + try: + account_id = location.AWSAccountId + region = location.region + s3cli = S3DatasetLocationHandler.client(account_id=account_id, region=region, client_type='s3') + response = s3cli.put_object( + Bucket=location.S3BucketName, Body='', Key=location.S3Prefix + '/' + ) + log.info( + 'Creating S3 Prefix `{}`({}) on AWS #{}'.format( + location.S3BucketName, account_id, response + ) + ) + location.locationCreated = True + except Exception as e: + log.error( + f'Dataset storage location creation failed on S3 for dataset location {location.locationUri} : {e}' + ) + raise e From 352d82485aa4edbcfcaa972f3603ee2bdb0e0d96 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 17 Apr 2023 12:12:58 +0200 Subject: [PATCH 054/346] Datasets refactoring Moved the dataset stack into modules --- backend/dataall/cdkproxy/stacks/__init__.py | 2 -- backend/dataall/modules/datasets/__init__.py | 12 +++++++++++ .../dataall/modules/datasets/cdk/__init__.py | 3 +++ .../datasets/cdk/dataset_stack.py} | 20 +++++++++---------- tests/cdkproxy/test_dataset_stack.py | 8 ++++---- 5 files changed, 29 insertions(+), 16 deletions(-) create mode 100644 backend/dataall/modules/datasets/cdk/__init__.py rename backend/dataall/{cdkproxy/stacks/dataset.py => modules/datasets/cdk/dataset_stack.py} (97%) diff --git a/backend/dataall/cdkproxy/stacks/__init__.py b/backend/dataall/cdkproxy/stacks/__init__.py index 3857b30c0..fb4674754 100644 --- a/backend/dataall/cdkproxy/stacks/__init__.py +++ b/backend/dataall/cdkproxy/stacks/__init__.py @@ -1,4 +1,3 @@ -from .dataset import Dataset from .environment import EnvironmentSetup from .pipeline import PipelineStack from .manager import stack, instanciate_stack, StackManager @@ -7,7 +6,6 @@ __all__ = [ 'EnvironmentSetup', - 'Dataset', 'StackManager', 'stack', 'StackManager', diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index 842eba82b..f0bac92d2 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -39,3 +39,15 @@ def is_supported(cls, modes: List[ImportMode]): def __init__(self): import dataall.modules.datasets.handlers log.info("Dataset handlers have been imported") + + +class DatasetCdkModuleInterface(ModuleInterface): + """Loads dataset cdk stacks """ + + @classmethod + def is_supported(cls, modes: List[ImportMode]): + return ImportMode.CDK in modes + + def __init__(self): + import dataall.modules.datasets.cdk + log.info("Dataset stacks have been imported") diff --git a/backend/dataall/modules/datasets/cdk/__init__.py b/backend/dataall/modules/datasets/cdk/__init__.py new file mode 100644 index 000000000..5642d8a40 --- /dev/null +++ b/backend/dataall/modules/datasets/cdk/__init__.py @@ -0,0 +1,3 @@ +from dataall.modules.datasets.cdk import dataset_stack + +__all__ = ["dataset_stack"] diff --git a/backend/dataall/cdkproxy/stacks/dataset.py b/backend/dataall/modules/datasets/cdk/dataset_stack.py similarity index 97% rename from backend/dataall/cdkproxy/stacks/dataset.py rename to backend/dataall/modules/datasets/cdk/dataset_stack.py index 852cba66b..e99b43b0c 100644 --- a/backend/dataall/cdkproxy/stacks/dataset.py +++ b/backend/dataall/modules/datasets/cdk/dataset_stack.py @@ -19,22 +19,22 @@ from aws_cdk.aws_glue import CfnCrawler from sqlalchemy import and_, or_ -from .manager import stack -from ... import db -from ...aws.handlers.quicksight import Quicksight -from ...aws.handlers.lakeformation import LakeFormation -from ...aws.handlers.sts import SessionHelper -from ...db import models -from ...db.api import Environment -from ...utils.cdk_nag_utils import CDKNagUtil -from ...utils.runtime_stacks_tagging import TagsUtil +from dataall.cdkproxy.stacks.manager import stack +from dataall import db +from dataall.aws.handlers.quicksight import Quicksight +from dataall.aws.handlers.lakeformation import LakeFormation +from dataall.aws.handlers.sts import SessionHelper +from dataall.db import models +from dataall.db.api import Environment +from dataall.utils.cdk_nag_utils import CDKNagUtil +from dataall.utils.runtime_stacks_tagging import TagsUtil from dataall.modules.datasets.db.models import DatasetStorageLocation logger = logging.getLogger(__name__) @stack(stack='dataset') -class Dataset(Stack): +class DatasetStack(Stack): module_name = __file__ def get_engine(self) -> db.Engine: diff --git a/tests/cdkproxy/test_dataset_stack.py b/tests/cdkproxy/test_dataset_stack.py index 19a30d513..34f495056 100644 --- a/tests/cdkproxy/test_dataset_stack.py +++ b/tests/cdkproxy/test_dataset_stack.py @@ -3,14 +3,14 @@ import pytest from aws_cdk import App -from dataall.cdkproxy.stacks import Dataset +from dataall.modules.datasets.cdk.dataset_stack import DatasetStack @pytest.fixture(scope='function', autouse=True) def patch_methods(mocker, db, dataset, env, org): - mocker.patch('dataall.cdkproxy.stacks.dataset.Dataset.get_engine', return_value=db) + mocker.patch('dataall.cdkproxy.stacks.dataset.DatasetStack.get_engine', return_value=db) mocker.patch( - 'dataall.cdkproxy.stacks.dataset.Dataset.get_target', return_value=dataset + 'dataall.cdkproxy.stacks.dataset.DatasetStack.get_target', return_value=dataset ) mocker.patch( 'dataall.aws.handlers.sts.SessionHelper.get_delegation_role_name', @@ -41,7 +41,7 @@ def patch_methods(mocker, db, dataset, env, org): @pytest.fixture(scope='function', autouse=True) def template(dataset): app = App() - Dataset(app, 'Dataset', target_uri=dataset.datasetUri) + DatasetStack(app, 'Dataset', target_uri=dataset.datasetUri) return json.dumps(app.synth().get_stack_by_name('Dataset').template) From 9934a9cf81a52ce95287965c12c25397dad329cd Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 17 Apr 2023 13:42:22 +0200 Subject: [PATCH 055/346] Datasets refactoring Moved indexing into GlossaryRegistry --- .../dataall/api/Objects/Glossary/registry.py | 39 +++++++++++++++---- .../dataall/api/Objects/Glossary/resolvers.py | 16 +------- backend/dataall/modules/datasets/__init__.py | 8 +++- 3 files changed, 41 insertions(+), 22 deletions(-) diff --git a/backend/dataall/api/Objects/Glossary/registry.py b/backend/dataall/api/Objects/Glossary/registry.py index 7c42e4f4c..0f0cdb61f 100644 --- a/backend/dataall/api/Objects/Glossary/registry.py +++ b/backend/dataall/api/Objects/Glossary/registry.py @@ -1,9 +1,12 @@ -from dataclasses import dataclass -from typing import Type, Dict, Optional, Protocol, Union +from dataclasses import dataclass, field +from typing import Type, Dict, Optional, Protocol, Union, Callable, Any + +from opensearchpy import OpenSearch from dataall.api import gql from dataall.api.gql.graphql_union_type import UnionTypeRegistry from dataall.db import Resource, models +from dataall.searchproxy.indexers import upsert_dashboard, upsert_table, upsert_dataset class Identifiable(Protocol): @@ -17,13 +20,14 @@ class GlossaryDefinition: target_type: str object_type: str model: Union[Type[Resource], Identifiable] # should be an intersection, but python typing doesn't have one yet + reindexer: Callable[[Any, OpenSearch, str], None] = None # a callback to reindex glossaries in open search def target_uri(self): return self.model.uri() class GlossaryRegistry(UnionTypeRegistry): - """Registry of glossary definition and API to retrieve data""" + """Registry of glossary definition and API to retrieve and reindex data""" _DEFINITIONS: Dict[str, GlossaryDefinition] = {} @classmethod @@ -50,8 +54,29 @@ def definitions(cls): def types(cls): return [gql.Ref(definition.object_type) for definition in cls._DEFINITIONS.values()] + @classmethod + def reindex(cls, session, es: OpenSearch, target_type: str, target_uri: str): + definition = cls._DEFINITIONS[target_type] + if definition.reindexer: + definition.reindexer(session, es, target_uri) + + +GlossaryRegistry.register(GlossaryDefinition( + target_type="Dashboard", + object_type="Dashboard", + model=models.Dashboard, + reindexer=upsert_dashboard +)) -GlossaryRegistry.register(GlossaryDefinition("DatasetTable", "DatasetTable", models.DatasetTable)) -GlossaryRegistry.register(GlossaryDefinition("Dashboard", "Dashboard", models.Dashboard)) -GlossaryRegistry.register(GlossaryDefinition("DatasetTable", "DatasetTable", models.DatasetTable)) -GlossaryRegistry.register(GlossaryDefinition("Dataset", "Dataset", models.Dataset)) +GlossaryRegistry.register(GlossaryDefinition( + target_type="DatasetTable", + object_type="DatasetTable", + model=models.DatasetTable, + reindexer=upsert_table +)) +GlossaryRegistry.register(GlossaryDefinition( + target_type="Dataset", + object_type="Dataset", + model=models.Dataset, + reindexer=upsert_dataset +)) diff --git a/backend/dataall/api/Objects/Glossary/resolvers.py b/backend/dataall/api/Objects/Glossary/resolvers.py index 959578600..fdc4c3eea 100644 --- a/backend/dataall/api/Objects/Glossary/resolvers.py +++ b/backend/dataall/api/Objects/Glossary/resolvers.py @@ -6,15 +6,10 @@ from .... import db from ....api.context import Context from ....db import paginate, exceptions, models -from ....searchproxy import upsert_dataset -from ....searchproxy import upsert_table -from ....searchproxy.indexers import upsert_folder, upsert_dashboard from ....api.constants import ( GlossaryRole ) -from dataall.modules.datasets.db.models import DatasetStorageLocation - def resolve_glossary_node(obj: models.GlossaryNode, *_): if obj.nodeType == 'G': @@ -462,15 +457,8 @@ def reindex(context, linkUri): link: models.TermLink = session.query(models.TermLink).get(linkUri) if not link: return - target = resolve_link_target(context, source=link) - if isinstance(target, models.Dataset): - upsert_dataset(session=session, es=context.es, datasetUri=link.targetUri) - elif isinstance(target, models.DatasetTable): - upsert_table(session=session, es=context.es, tableUri=link.targetUri) - elif isinstance(target, DatasetStorageLocation): - upsert_folder(session=session, es=context.es, locationUri=link.targetUri) - elif isinstance(target, models.Dashboard): - upsert_dashboard(session=session, es=context.es, dashboardUri=link.targetUri) + + GlossaryRegistry.reindex(session, context.es, link.targetType, link.targetUri) def _target_model(target_type: str): diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index f0bac92d2..3e50f37fa 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -6,6 +6,7 @@ from dataall.api.Objects.Glossary.registry import GlossaryRegistry, GlossaryDefinition from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetStorageLocation from dataall.modules.loader import ModuleInterface, ImportMode +from dataall.searchproxy.indexers import upsert_folder log = logging.getLogger(__name__) @@ -24,7 +25,12 @@ def __init__(self): FeedRegistry.register(FeedDefinition("DatasetStorageLocation", DatasetStorageLocation)) GlossaryRegistry.register(GlossaryDefinition("Column", "DatasetTableColumn", DatasetTableColumn)) - GlossaryRegistry.register(GlossaryDefinition("Folder", "DatasetStorageLocation", DatasetStorageLocation)) + GlossaryRegistry.register(GlossaryDefinition( + target_type="Folder", + object_type="DatasetStorageLocation", + model=DatasetStorageLocation, + reindexer=upsert_folder + )) log.info("API of datasets has been imported") From 228c1753039667181edcd97af3a50a0915fd396e Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 18 Apr 2023 14:25:34 +0200 Subject: [PATCH 056/346] Datasets refactoring Removed dead code --- backend/dataall/db/api/environment.py | 92 --------------------------- 1 file changed, 92 deletions(-) diff --git a/backend/dataall/db/api/environment.py b/backend/dataall/db/api/environment.py index ac3777e5d..8642287db 100644 --- a/backend/dataall/db/api/environment.py +++ b/backend/dataall/db/api/environment.py @@ -1089,98 +1089,6 @@ def paginated_environment_networks( page_size=data.get('pageSize', 10), ).to_dict() - @staticmethod - @has_resource_perm(permissions.LIST_ENVIRONMENT_DATASETS) - def paginated_environment_data_items( - session, username, groups, uri, data=None, check_perm=None - ): - share_item_shared_states = api.ShareItemSM.get_share_item_shared_states() - q = ( - session.query( - models.ShareObjectItem.shareUri.label('shareUri'), - models.Dataset.datasetUri.label('datasetUri'), - models.Dataset.name.label('datasetName'), - models.Dataset.description.label('datasetDescription'), - models.Environment.environmentUri.label('environmentUri'), - models.Environment.name.label('environmentName'), - models.ShareObject.created.label('created'), - models.ShareObjectItem.itemType.label('itemType'), - models.ShareObjectItem.GlueDatabaseName.label('GlueDatabaseName'), - models.ShareObjectItem.GlueTableName.label('GlueTableName'), - models.ShareObjectItem.S3AccessPointName.label('S3AccessPointName'), - models.Organization.organizationUri.label('organizationUri'), - models.Organization.name.label('organizationName'), - case( - [ - ( - models.ShareObjectItem.itemType - == ShareableType.Table.value, - func.concat( - models.DatasetTable.GlueDatabaseName, - '.', - models.DatasetTable.GlueTableName, - ), - ), - ( - models.ShareObjectItem.itemType - == ShareableType.StorageLocation.value, - func.concat(DatasetStorageLocation.name), - ), - ], - else_='XXX XXXX', - ).label('itemAccess'), - ) - .join( - models.ShareObject, - models.ShareObject.shareUri == models.ShareObjectItem.shareUri, - ) - .join( - models.Dataset, - models.ShareObject.datasetUri == models.Dataset.datasetUri, - ) - .join( - models.Environment, - models.Environment.environmentUri == models.Dataset.environmentUri, - ) - .join( - models.Organization, - models.Organization.organizationUri - == models.Environment.organizationUri, - ) - .outerjoin( - models.DatasetTable, - models.ShareObjectItem.itemUri == models.DatasetTable.tableUri, - ) - .outerjoin( - DatasetStorageLocation, - models.ShareObjectItem.itemUri - == DatasetStorageLocation.locationUri, - ) - .filter( - and_( - models.ShareObjectItem.status.in_(share_item_shared_states), - models.ShareObject.environmentUri == uri, - ) - ) - ) - - if data.get('datasetUri'): - datasetUri = data.get('datasetUri') - q = q.filter(models.ShareObject.datasetUri == datasetUri) - - if data.get('itemTypes', None): - itemTypes = data.get('itemTypes') - q = q.filter( - or_(*[models.ShareObjectItem.itemType == t for t in itemTypes]) - ) - if data.get('term'): - term = data.get('term') - q = q.filter(models.ShareObjectItem.itemName.ilike('%' + term + '%')) - - return paginate( - query=q, page=data.get('page', 1), page_size=data.get('pageSize', 10) - ).to_dict() - @staticmethod def validate_invite_params(data): if not data: From 263d10cd4bb3b1c83b5392fd6ee0b0138e59977b Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 19 Apr 2023 10:08:11 +0200 Subject: [PATCH 057/346] Datasets refactoring Extracted dataset share service --- .../api/Objects/Environment/resolvers.py | 4 +- .../dataall/api/Objects/Group/resolvers.py | 4 +- backend/dataall/db/api/environment.py | 194 +---------------- .../services/dataset_share_service.py | 204 ++++++++++++++++++ 4 files changed, 211 insertions(+), 195 deletions(-) create mode 100644 backend/dataall/modules/datasets/services/dataset_share_service.py diff --git a/backend/dataall/api/Objects/Environment/resolvers.py b/backend/dataall/api/Objects/Environment/resolvers.py index 60af060a7..86f251f59 100644 --- a/backend/dataall/api/Objects/Environment/resolvers.py +++ b/backend/dataall/api/Objects/Environment/resolvers.py @@ -21,6 +21,8 @@ NamingConventionPattern, ) +from dataall.modules.datasets.services.dataset_share_service import DatasetShareService + log = logging.getLogger() @@ -391,7 +393,7 @@ def list_shared_with_environment_data_items( if not filter: filter = {} with context.engine.scoped_session() as session: - return db.api.Environment.paginated_shared_with_environment_datasets( + return DatasetShareService.paginated_shared_with_environment_datasets( session=session, username=context.username, groups=context.groups, diff --git a/backend/dataall/api/Objects/Group/resolvers.py b/backend/dataall/api/Objects/Group/resolvers.py index 9192b6b59..11de0da1b 100644 --- a/backend/dataall/api/Objects/Group/resolvers.py +++ b/backend/dataall/api/Objects/Group/resolvers.py @@ -4,7 +4,7 @@ from ....db import exceptions from ....db.models import Group from ....aws.handlers.cognito import Cognito - +from ....modules.datasets.services.dataset_share_service import DatasetShareService log = logging.getLogger() @@ -66,7 +66,7 @@ def list_data_items_shared_with_env_group( if not filter: filter = {} with context.engine.scoped_session() as session: - return db.api.Environment.paginated_shared_with_environment_group_datasets( + return DatasetShareService.paginated_shared_with_environment_group_datasets( session=session, username=context.username, groups=context.groups, diff --git a/backend/dataall/db/api/environment.py b/backend/dataall/db/api/environment.py index 8642287db..d1c7a67fa 100644 --- a/backend/dataall/db/api/environment.py +++ b/backend/dataall/db/api/environment.py @@ -1,11 +1,11 @@ import logging import re -from sqlalchemy import or_, case, func +from sqlalchemy import or_ from sqlalchemy.orm import Query from sqlalchemy.sql import and_ -from .. import exceptions, permissions, models, api +from .. import exceptions, permissions, models from . import ( has_resource_perm, has_tenant_perm, @@ -16,7 +16,6 @@ from ..api.organization import Organization from ..models import EnvironmentGroup from ..models.Enums import ( - ShareableType, EnvironmentType, EnvironmentPermission, ) @@ -29,8 +28,6 @@ NamingConventionPattern, ) -from dataall.modules.datasets.db.models import DatasetStorageLocation - log = logging.getLogger(__name__) @@ -871,195 +868,8 @@ def paginated_environment_group_datasets( page_size=data.get('pageSize', 10), ).to_dict() - @staticmethod - @has_resource_perm(permissions.LIST_ENVIRONMENT_SHARED_WITH_OBJECTS) - def paginated_shared_with_environment_datasets( - session, username, groups, uri, data=None, check_perm=None - ) -> dict: - share_item_shared_states = api.ShareItemSM.get_share_item_shared_states() - q = ( - session.query( - models.ShareObjectItem.shareUri.label('shareUri'), - models.Dataset.datasetUri.label('datasetUri'), - models.Dataset.name.label('datasetName'), - models.Dataset.description.label('datasetDescription'), - models.Environment.environmentUri.label('environmentUri'), - models.Environment.name.label('environmentName'), - models.ShareObject.created.label('created'), - models.ShareObject.principalId.label('principalId'), - models.ShareObjectItem.itemType.label('itemType'), - models.ShareObjectItem.GlueDatabaseName.label('GlueDatabaseName'), - models.ShareObjectItem.GlueTableName.label('GlueTableName'), - models.ShareObjectItem.S3AccessPointName.label('S3AccessPointName'), - models.Organization.organizationUri.label('organizationUri'), - models.Organization.name.label('organizationName'), - case( - [ - ( - models.ShareObjectItem.itemType - == ShareableType.Table.value, - func.concat( - models.DatasetTable.GlueDatabaseName, - '.', - models.DatasetTable.GlueTableName, - ), - ), - ( - models.ShareObjectItem.itemType - == ShareableType.StorageLocation.value, - func.concat(DatasetStorageLocation.name), - ), - ], - else_='XXX XXXX', - ).label('itemAccess'), - ) - .join( - models.ShareObject, - models.ShareObject.shareUri == models.ShareObjectItem.shareUri, - ) - .join( - models.Dataset, - models.ShareObject.datasetUri == models.Dataset.datasetUri, - ) - .join( - models.Environment, - models.Environment.environmentUri == models.Dataset.environmentUri, - ) - .join( - models.Organization, - models.Organization.organizationUri - == models.Environment.organizationUri, - ) - .outerjoin( - models.DatasetTable, - models.ShareObjectItem.itemUri == models.DatasetTable.tableUri, - ) - .outerjoin( - DatasetStorageLocation, - models.ShareObjectItem.itemUri - == DatasetStorageLocation.locationUri, - ) - .filter( - and_( - models.ShareObjectItem.status.in_(share_item_shared_states), - models.ShareObject.environmentUri == uri, - ) - ) - ) - - if data.get('datasetUri'): - datasetUri = data.get('datasetUri') - q = q.filter(models.ShareObject.datasetUri == datasetUri) - - if data.get('itemTypes', None): - itemTypes = data.get('itemTypes') - q = q.filter( - or_(*[models.ShareObjectItem.itemType == t for t in itemTypes]) - ) - - if data.get("uniqueDatasets", False): - q = q.distinct(models.ShareObject.datasetUri) - - if data.get('term'): - term = data.get('term') - q = q.filter(models.ShareObjectItem.itemName.ilike('%' + term + '%')) - - return paginate( - query=q, page=data.get('page', 1), page_size=data.get('pageSize', 10) - ).to_dict() - - @staticmethod - def paginated_shared_with_environment_group_datasets( - session, username, groups, envUri, groupUri, data=None, check_perm=None - ) -> dict: - share_item_shared_states = api.ShareItemSM.get_share_item_shared_states() - q = ( - session.query( - models.ShareObjectItem.shareUri.label('shareUri'), - models.Dataset.datasetUri.label('datasetUri'), - models.Dataset.name.label('datasetName'), - models.Dataset.description.label('datasetDescription'), - models.Environment.environmentUri.label('environmentUri'), - models.Environment.name.label('environmentName'), - models.ShareObject.created.label('created'), - models.ShareObject.principalId.label('principalId'), - models.ShareObjectItem.itemType.label('itemType'), - models.ShareObjectItem.GlueDatabaseName.label('GlueDatabaseName'), - models.ShareObjectItem.GlueTableName.label('GlueTableName'), - models.ShareObjectItem.S3AccessPointName.label('S3AccessPointName'), - models.Organization.organizationUri.label('organizationUri'), - models.Organization.name.label('organizationName'), - case( - [ - ( - models.ShareObjectItem.itemType - == ShareableType.Table.value, - func.concat( - models.DatasetTable.GlueDatabaseName, - '.', - models.DatasetTable.GlueTableName, - ), - ), - ( - models.ShareObjectItem.itemType - == ShareableType.StorageLocation.value, - func.concat(DatasetStorageLocation.name), - ), - ], - else_='XXX XXXX', - ).label('itemAccess'), - ) - .join( - models.ShareObject, - models.ShareObject.shareUri == models.ShareObjectItem.shareUri, - ) - .join( - models.Dataset, - models.ShareObject.datasetUri == models.Dataset.datasetUri, - ) - .join( - models.Environment, - models.Environment.environmentUri == models.Dataset.environmentUri, - ) - .join( - models.Organization, - models.Organization.organizationUri - == models.Environment.organizationUri, - ) - .outerjoin( - models.DatasetTable, - models.ShareObjectItem.itemUri == models.DatasetTable.tableUri, - ) - .outerjoin( - DatasetStorageLocation, - models.ShareObjectItem.itemUri - == DatasetStorageLocation.locationUri, - ) - .filter( - and_( - models.ShareObjectItem.status.in_(share_item_shared_states), - models.ShareObject.environmentUri == envUri, - models.ShareObject.principalId == groupUri, - ) - ) - ) - if data.get('datasetUri'): - datasetUri = data.get('datasetUri') - q = q.filter(models.ShareObject.datasetUri == datasetUri) - if data.get('itemTypes', None): - itemTypes = data.get('itemTypes') - q = q.filter( - or_(*[models.ShareObjectItem.itemType == t for t in itemTypes]) - ) - if data.get('term'): - term = data.get('term') - q = q.filter(models.ShareObjectItem.itemName.ilike('%' + term + '%')) - - return paginate( - query=q, page=data.get('page', 1), page_size=data.get('pageSize', 10) - ).to_dict() @staticmethod def query_environment_networks(session, username, groups, uri, filter) -> Query: diff --git a/backend/dataall/modules/datasets/services/dataset_share_service.py b/backend/dataall/modules/datasets/services/dataset_share_service.py new file mode 100644 index 000000000..9ca84a1cf --- /dev/null +++ b/backend/dataall/modules/datasets/services/dataset_share_service.py @@ -0,0 +1,204 @@ +import logging +import re + +from sqlalchemy import or_, case, func +from sqlalchemy.sql import and_ + +from dataall.api.constants import ShareableType +from dataall.db import models, permissions +from dataall.db.api import has_resource_perm, ShareItemSM +from dataall.db.paginator import paginate +from dataall.modules.datasets.db.models import DatasetStorageLocation + + +class DatasetShareService: + + @staticmethod + @has_resource_perm(permissions.LIST_ENVIRONMENT_SHARED_WITH_OBJECTS) + def paginated_shared_with_environment_datasets( + session, username, groups, uri, data=None, check_perm=None + ) -> dict: + share_item_shared_states = ShareItemSM.get_share_item_shared_states() + q = ( + session.query( + models.ShareObjectItem.shareUri.label('shareUri'), + models.Dataset.datasetUri.label('datasetUri'), + models.Dataset.name.label('datasetName'), + models.Dataset.description.label('datasetDescription'), + models.Environment.environmentUri.label('environmentUri'), + models.Environment.name.label('environmentName'), + models.ShareObject.created.label('created'), + models.ShareObject.principalId.label('principalId'), + models.ShareObjectItem.itemType.label('itemType'), + models.ShareObjectItem.GlueDatabaseName.label('GlueDatabaseName'), + models.ShareObjectItem.GlueTableName.label('GlueTableName'), + models.ShareObjectItem.S3AccessPointName.label('S3AccessPointName'), + models.Organization.organizationUri.label('organizationUri'), + models.Organization.name.label('organizationName'), + case( + [ + ( + models.ShareObjectItem.itemType + == ShareableType.Table.value, + func.concat( + models.DatasetTable.GlueDatabaseName, + '.', + models.DatasetTable.GlueTableName, + ), + ), + ( + models.ShareObjectItem.itemType + == ShareableType.StorageLocation.value, + func.concat(DatasetStorageLocation.name), + ), + ], + else_='XXX XXXX', + ).label('itemAccess'), + ) + .join( + models.ShareObject, + models.ShareObject.shareUri == models.ShareObjectItem.shareUri, + ) + .join( + models.Dataset, + models.ShareObject.datasetUri == models.Dataset.datasetUri, + ) + .join( + models.Environment, + models.Environment.environmentUri == models.Dataset.environmentUri, + ) + .join( + models.Organization, + models.Organization.organizationUri + == models.Environment.organizationUri, + ) + .outerjoin( + models.DatasetTable, + models.ShareObjectItem.itemUri == models.DatasetTable.tableUri, + ) + .outerjoin( + DatasetStorageLocation, + models.ShareObjectItem.itemUri + == DatasetStorageLocation.locationUri, + ) + .filter( + and_( + models.ShareObjectItem.status.in_(share_item_shared_states), + models.ShareObject.environmentUri == uri, + ) + ) + ) + + if data.get('datasetUri'): + datasetUri = data.get('datasetUri') + q = q.filter(models.ShareObject.datasetUri == datasetUri) + + if data.get('itemTypes', None): + itemTypes = data.get('itemTypes') + q = q.filter( + or_(*[models.ShareObjectItem.itemType == t for t in itemTypes]) + ) + + if data.get("uniqueDatasets", False): + q = q.distinct(models.ShareObject.datasetUri) + + if data.get('term'): + term = data.get('term') + q = q.filter(models.ShareObjectItem.itemName.ilike('%' + term + '%')) + + return paginate( + query=q, page=data.get('page', 1), page_size=data.get('pageSize', 10) + ).to_dict() + + @staticmethod + def paginated_shared_with_environment_group_datasets( + session, username, groups, envUri, groupUri, data=None, check_perm=None + ) -> dict: + share_item_shared_states = ShareItemSM.get_share_item_shared_states() + q = ( + session.query( + models.ShareObjectItem.shareUri.label('shareUri'), + models.Dataset.datasetUri.label('datasetUri'), + models.Dataset.name.label('datasetName'), + models.Dataset.description.label('datasetDescription'), + models.Environment.environmentUri.label('environmentUri'), + models.Environment.name.label('environmentName'), + models.ShareObject.created.label('created'), + models.ShareObject.principalId.label('principalId'), + models.ShareObjectItem.itemType.label('itemType'), + models.ShareObjectItem.GlueDatabaseName.label('GlueDatabaseName'), + models.ShareObjectItem.GlueTableName.label('GlueTableName'), + models.ShareObjectItem.S3AccessPointName.label('S3AccessPointName'), + models.Organization.organizationUri.label('organizationUri'), + models.Organization.name.label('organizationName'), + case( + [ + ( + models.ShareObjectItem.itemType + == ShareableType.Table.value, + func.concat( + models.DatasetTable.GlueDatabaseName, + '.', + models.DatasetTable.GlueTableName, + ), + ), + ( + models.ShareObjectItem.itemType + == ShareableType.StorageLocation.value, + func.concat(DatasetStorageLocation.name), + ), + ], + else_='XXX XXXX', + ).label('itemAccess'), + ) + .join( + models.ShareObject, + models.ShareObject.shareUri == models.ShareObjectItem.shareUri, + ) + .join( + models.Dataset, + models.ShareObject.datasetUri == models.Dataset.datasetUri, + ) + .join( + models.Environment, + models.Environment.environmentUri == models.Dataset.environmentUri, + ) + .join( + models.Organization, + models.Organization.organizationUri + == models.Environment.organizationUri, + ) + .outerjoin( + models.DatasetTable, + models.ShareObjectItem.itemUri == models.DatasetTable.tableUri, + ) + .outerjoin( + DatasetStorageLocation, + models.ShareObjectItem.itemUri + == DatasetStorageLocation.locationUri, + ) + .filter( + and_( + models.ShareObjectItem.status.in_(share_item_shared_states), + models.ShareObject.environmentUri == envUri, + models.ShareObject.principalId == groupUri, + ) + ) + ) + + if data.get('datasetUri'): + datasetUri = data.get('datasetUri') + q = q.filter(models.ShareObject.datasetUri == datasetUri) + + if data.get('itemTypes', None): + itemTypes = data.get('itemTypes') + q = q.filter( + or_(*[models.ShareObjectItem.itemType == t for t in itemTypes]) + ) + if data.get('term'): + term = data.get('term') + q = q.filter(models.ShareObjectItem.itemName.ilike('%' + term + '%')) + + return paginate( + query=q, page=data.get('page', 1), page_size=data.get('pageSize', 10) + ).to_dict() From 417e6e50fcd120c16bc8207c76a5733e7005ab04 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 19 Apr 2023 11:21:46 +0200 Subject: [PATCH 058/346] Datasets refactoring Solved broken reference --- .../dataall/modules/datasets/api/storage_location/resolvers.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/backend/dataall/modules/datasets/api/storage_location/resolvers.py b/backend/dataall/modules/datasets/api/storage_location/resolvers.py index e66e767a9..32cfcfcac 100644 --- a/backend/dataall/modules/datasets/api/storage_location/resolvers.py +++ b/backend/dataall/modules/datasets/api/storage_location/resolvers.py @@ -8,6 +8,7 @@ Dataset, Environment, ) +from dataall.modules.datasets.handlers.s3_location_handler import S3DatasetLocationHandler from dataall.searchproxy import indexers from dataall.modules.datasets.db.models import DatasetStorageLocation from dataall.modules.datasets.services.dataset_location import DatasetStorageLocationService @@ -26,7 +27,7 @@ def create_storage_location( check_perm=True, ) - S3.create_bucket_prefix(location) + S3DatasetLocationHandler.create_bucket_prefix(location) indexers.upsert_folder( session=session, es=context.es, locationUri=location.locationUri From 7aaff5b113182485b9b163a51c83f1234043c5a5 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 19 Apr 2023 16:30:09 +0200 Subject: [PATCH 059/346] Introduced Indexers --- .../api/Objects/Dashboard/resolvers.py | 5 +- .../dataall/api/Objects/Dataset/resolvers.py | 14 +- .../api/Objects/DatasetTable/resolvers.py | 5 +- .../dataall/api/Objects/Glossary/registry.py | 13 +- backend/dataall/api/Objects/Vote/resolvers.py | 7 +- backend/dataall/modules/datasets/__init__.py | 4 +- .../api/storage_location/resolvers.py | 7 +- backend/dataall/searchproxy/__init__.py | 5 - backend/dataall/searchproxy/indexers.py | 565 +++++++++--------- backend/dataall/searchproxy/upsert.py | 68 ++- backend/dataall/tasks/catalog_indexer.py | 8 +- tests/api/conftest.py | 9 +- tests/searchproxy/test_indexers.py | 15 +- tests/tasks/test_catalog_indexer.py | 2 +- 14 files changed, 380 insertions(+), 347 deletions(-) diff --git a/backend/dataall/api/Objects/Dashboard/resolvers.py b/backend/dataall/api/Objects/Dashboard/resolvers.py index 714b6c4b9..84a2a1bcc 100644 --- a/backend/dataall/api/Objects/Dashboard/resolvers.py +++ b/backend/dataall/api/Objects/Dashboard/resolvers.py @@ -8,6 +8,7 @@ from ....db.api import ResourcePolicy, Glossary, Vote from ....searchproxy import indexers from ....utils import Parameter +from dataall.searchproxy.indexers import DashboardIndexer param_store = Parameter() ENVNAME = os.getenv("envname", "local") @@ -146,7 +147,7 @@ def import_dashboard(context: Context, source, input: dict = None): check_perm=True, ) - indexers.upsert_dashboard(session, context.es, dashboard.dashboardUri) + DashboardIndexer.upsert(session, dashboard_uri=dashboard.dashboardUri) return dashboard @@ -166,7 +167,7 @@ def update_dashboard(context, source, input: dict = None): check_perm=True, ) - indexers.upsert_dashboard(session, context.es, dashboard.dashboardUri) + DashboardIndexer.upsert(session, dashboard_uri=dashboard.dashboardUri) return dashboard diff --git a/backend/dataall/api/Objects/Dataset/resolvers.py b/backend/dataall/api/Objects/Dataset/resolvers.py index a03b0647f..fcbc2d6f3 100644 --- a/backend/dataall/api/Objects/Dataset/resolvers.py +++ b/backend/dataall/api/Objects/Dataset/resolvers.py @@ -13,11 +13,11 @@ from ....aws.handlers.glue import Glue from ....aws.handlers.service_handlers import Worker from ....aws.handlers.sts import SessionHelper -from ....aws.handlers.sns import Sns from ....db import paginate, exceptions, permissions, models from ....db.api import Dataset, Environment, ShareObject, ResourcePolicy from ....db.api.organization import Organization -from ....searchproxy import indexers +from dataall.searchproxy import indexers +from dataall.searchproxy.indexers import DatasetIndexer log = logging.getLogger(__name__) @@ -34,8 +34,8 @@ def create_dataset(context: Context, source, input=None): ) Dataset.create_dataset_stack(session, dataset) - indexers.upsert_dataset( - session=session, es=context.es, datasetUri=dataset.datasetUri + DatasetIndexer.upsert( + session=session, dataset_uri=dataset.datasetUri ) stack_helper.deploy_dataset_stack(dataset) @@ -72,8 +72,8 @@ def import_dataset(context: Context, source, input=None): Dataset.create_dataset_stack(session, dataset) - indexers.upsert_dataset( - session=session, es=context.es, datasetUri=dataset.datasetUri + DatasetIndexer.upsert( + session=session, dataset_uri=dataset.datasetUri ) stack_helper.deploy_dataset_stack(dataset) @@ -220,7 +220,7 @@ def update_dataset(context, source, datasetUri: str = None, input: dict = None): data=input, check_perm=True, ) - indexers.upsert_dataset(session, context.es, datasetUri) + DatasetIndexer.upsert(session, dataset_uri=datasetUri) stack_helper.deploy_dataset_stack(updated_dataset) diff --git a/backend/dataall/api/Objects/DatasetTable/resolvers.py b/backend/dataall/api/Objects/DatasetTable/resolvers.py index 3e2b833e3..567985348 100644 --- a/backend/dataall/api/Objects/DatasetTable/resolvers.py +++ b/backend/dataall/api/Objects/DatasetTable/resolvers.py @@ -13,6 +13,7 @@ from ....db.api import ResourcePolicy, Glossary from ....searchproxy import indexers from ....utils import json_utils +from dataall.searchproxy.indexers import DatasetTableIndexer from dataall.modules.datasets.services.dataset_table import DatasetTableService log = logging.getLogger(__name__) @@ -28,7 +29,7 @@ def create_table(context, source, datasetUri: str = None, input: dict = None): data=input, check_perm=True, ) - indexers.upsert_table(session, context.es, table.tableUri) + DatasetTableIndexer.upsert(session, table_uri=table.tableUri) return table @@ -80,7 +81,7 @@ def update_table(context, source, tableUri: str = None, input: dict = None): data=input, check_perm=True, ) - indexers.upsert_table(session, context.es, table.tableUri) + DatasetTableIndexer.upsert(session, table_uri=table.tableUri) return table diff --git a/backend/dataall/api/Objects/Glossary/registry.py b/backend/dataall/api/Objects/Glossary/registry.py index 0f0cdb61f..cb82bf208 100644 --- a/backend/dataall/api/Objects/Glossary/registry.py +++ b/backend/dataall/api/Objects/Glossary/registry.py @@ -6,7 +6,8 @@ from dataall.api import gql from dataall.api.gql.graphql_union_type import UnionTypeRegistry from dataall.db import Resource, models -from dataall.searchproxy.indexers import upsert_dashboard, upsert_table, upsert_dataset +from dataall.searchproxy.indexers import DashboardIndexer, DatasetTableIndexer, DatasetIndexer +from dataall.searchproxy.upsert import BaseIndexer class Identifiable(Protocol): @@ -20,7 +21,7 @@ class GlossaryDefinition: target_type: str object_type: str model: Union[Type[Resource], Identifiable] # should be an intersection, but python typing doesn't have one yet - reindexer: Callable[[Any, OpenSearch, str], None] = None # a callback to reindex glossaries in open search + reindexer: Type[BaseIndexer] = None # a callback to reindex glossaries in open search def target_uri(self): return self.model.uri() @@ -58,25 +59,25 @@ def types(cls): def reindex(cls, session, es: OpenSearch, target_type: str, target_uri: str): definition = cls._DEFINITIONS[target_type] if definition.reindexer: - definition.reindexer(session, es, target_uri) + definition.reindexer.upsert(session, target_uri) GlossaryRegistry.register(GlossaryDefinition( target_type="Dashboard", object_type="Dashboard", model=models.Dashboard, - reindexer=upsert_dashboard + reindexer=DashboardIndexer )) GlossaryRegistry.register(GlossaryDefinition( target_type="DatasetTable", object_type="DatasetTable", model=models.DatasetTable, - reindexer=upsert_table + reindexer=DatasetTableIndexer )) GlossaryRegistry.register(GlossaryDefinition( target_type="Dataset", object_type="Dataset", model=models.Dataset, - reindexer=upsert_dataset + reindexer=DatasetIndexer )) diff --git a/backend/dataall/api/Objects/Vote/resolvers.py b/backend/dataall/api/Objects/Vote/resolvers.py index da41462cd..34dcd9f05 100644 --- a/backend/dataall/api/Objects/Vote/resolvers.py +++ b/backend/dataall/api/Objects/Vote/resolvers.py @@ -1,7 +1,6 @@ from .... import db from ....api.context import Context -from ....searchproxy.indexers import upsert_dashboard -from ....searchproxy.indexers import upsert_dataset +from dataall.searchproxy.indexers import DatasetIndexer, DashboardIndexer def count_upvotes( @@ -34,9 +33,9 @@ def upvote(context: Context, source, input=None): def reindex(session, es, vote): if vote.targetType == 'dataset': - upsert_dataset(session=session, es=es, datasetUri=vote.targetUri) + DatasetIndexer.upsert(session=session, dataset_uri=vote.targetUri) elif vote.targetType == 'dashboard': - upsert_dashboard(session=session, es=es, dashboardUri=vote.targetUri) + DashboardIndexer.upsert(session=session, dashboard_uri=vote.targetUri) def get_vote(context: Context, source, targetUri: str = None, targetType: str = None): diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index 3e50f37fa..a2f600f68 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -6,7 +6,7 @@ from dataall.api.Objects.Glossary.registry import GlossaryRegistry, GlossaryDefinition from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetStorageLocation from dataall.modules.loader import ModuleInterface, ImportMode -from dataall.searchproxy.indexers import upsert_folder +from dataall.searchproxy.indexers import DatasetLocationIndexer log = logging.getLogger(__name__) @@ -29,7 +29,7 @@ def __init__(self): target_type="Folder", object_type="DatasetStorageLocation", model=DatasetStorageLocation, - reindexer=upsert_folder + reindexer=DatasetLocationIndexer )) log.info("API of datasets has been imported") diff --git a/backend/dataall/modules/datasets/api/storage_location/resolvers.py b/backend/dataall/modules/datasets/api/storage_location/resolvers.py index 32cfcfcac..4aebc1458 100644 --- a/backend/dataall/modules/datasets/api/storage_location/resolvers.py +++ b/backend/dataall/modules/datasets/api/storage_location/resolvers.py @@ -12,6 +12,7 @@ from dataall.searchproxy import indexers from dataall.modules.datasets.db.models import DatasetStorageLocation from dataall.modules.datasets.services.dataset_location import DatasetStorageLocationService +from dataall.searchproxy.indexers import DatasetLocationIndexer def create_storage_location( @@ -29,9 +30,7 @@ def create_storage_location( S3DatasetLocationHandler.create_bucket_prefix(location) - indexers.upsert_folder( - session=session, es=context.es, locationUri=location.locationUri - ) + DatasetLocationIndexer.upsert(session=session, folder_uri=location.locationUri) return location @@ -74,7 +73,7 @@ def update_storage_location( data=input, check_perm=True, ) - indexers.upsert_folder(session, context.es, location.locationUri) + DatasetLocationIndexer.upsert(session, folder_uri=location.locationUri) return location diff --git a/backend/dataall/searchproxy/__init__.py b/backend/dataall/searchproxy/__init__.py index 1a69dac6c..8b648babe 100644 --- a/backend/dataall/searchproxy/__init__.py +++ b/backend/dataall/searchproxy/__init__.py @@ -1,15 +1,10 @@ from .connect import connect -from .indexers import upsert_dataset -from .indexers import upsert_table from .indexers import upsert_dataset_tables from .search import run_query -from .upsert import upsert __all__ = [ 'connect', 'run_query', 'upsert', - 'upsert_dataset', - 'upsert_table', 'upsert_dataset_tables', ] diff --git a/backend/dataall/searchproxy/indexers.py b/backend/dataall/searchproxy/indexers.py index 7361c2150..34c1c3d17 100644 --- a/backend/dataall/searchproxy/indexers.py +++ b/backend/dataall/searchproxy/indexers.py @@ -1,319 +1,300 @@ import logging from sqlalchemy import and_ -from sqlalchemy.orm import with_expression -from .upsert import upsert from .. import db from ..db import models +from dataall.searchproxy.upsert import BaseIndexer from dataall.modules.datasets.db.models import DatasetStorageLocation log = logging.getLogger(__name__) -def get_target_glossary_terms(session, targetUri): - q = ( - session.query(models.TermLink) - .options( - with_expression(models.TermLink.path, models.GlossaryNode.path), - with_expression(models.TermLink.label, models.GlossaryNode.label), - with_expression(models.TermLink.readme, models.GlossaryNode.readme), +class DatasetIndexer(BaseIndexer): + + @classmethod + def upsert(cls, session, dataset_uri: str): + dataset = ( + session.query( + models.Dataset.datasetUri.label('datasetUri'), + models.Dataset.name.label('name'), + models.Dataset.owner.label('owner'), + models.Dataset.label.label('label'), + models.Dataset.description.label('description'), + models.Dataset.confidentiality.label('classification'), + models.Dataset.tags.label('tags'), + models.Dataset.topics.label('topics'), + models.Dataset.region.label('region'), + models.Organization.organizationUri.label('orgUri'), + models.Organization.name.label('orgName'), + models.Environment.environmentUri.label('envUri'), + models.Environment.name.label('envName'), + models.Dataset.SamlAdminGroupName.label('admins'), + models.Dataset.GlueDatabaseName.label('database'), + models.Dataset.S3BucketName.label('source'), + models.Dataset.created, + models.Dataset.updated, + models.Dataset.deleted, + ) + .join( + models.Organization, + models.Dataset.organizationUri == models.Organization.organizationUri, + ) + .join( + models.Environment, + models.Dataset.environmentUri == models.Environment.environmentUri, + ) + .filter(models.Dataset.datasetUri == dataset_uri) + .first() ) - .join( - models.GlossaryNode, models.GlossaryNode.nodeUri == models.TermLink.nodeUri + count_tables = db.api.Dataset.count_dataset_tables(session, dataset_uri) + count_folders = db.api.Dataset.count_dataset_locations(session, dataset_uri) + count_upvotes = db.api.Vote.count_upvotes( + session, None, None, dataset_uri, {'targetType': 'dataset'} ) - .filter( - and_( - models.TermLink.targetUri == targetUri, - models.TermLink.approvedBySteward.is_(True), + + if dataset: + glossary = BaseIndexer._get_target_glossary_terms(session, dataset_uri) + BaseIndexer._index( + doc_id=dataset_uri, + doc={ + 'name': dataset.name, + 'owner': dataset.owner, + 'label': dataset.label, + 'admins': dataset.admins, + 'database': dataset.database, + 'source': dataset.source, + 'resourceKind': 'dataset', + 'description': dataset.description, + 'classification': dataset.classification, + 'tags': [t.replace('-', '') for t in dataset.tags or []], + 'topics': dataset.topics, + 'region': dataset.region.replace('-', ''), + 'environmentUri': dataset.envUri, + 'environmentName': dataset.envName, + 'organizationUri': dataset.orgUri, + 'organizationName': dataset.orgName, + 'created': dataset.created, + 'updated': dataset.updated, + 'deleted': dataset.deleted, + 'glossary': glossary, + 'tables': count_tables, + 'folders': count_folders, + 'upvotes': count_upvotes, + }, ) - ) - ) - return [t.path for t in q] + return dataset -def upsert_dataset(session, es, datasetUri: str): - dataset = ( - session.query( - models.Dataset.datasetUri.label('datasetUri'), - models.Dataset.name.label('name'), - models.Dataset.owner.label('owner'), - models.Dataset.label.label('label'), - models.Dataset.description.label('description'), - models.Dataset.confidentiality.label('classification'), - models.Dataset.tags.label('tags'), - models.Dataset.topics.label('topics'), - models.Dataset.region.label('region'), - models.Organization.organizationUri.label('orgUri'), - models.Organization.name.label('orgName'), - models.Environment.environmentUri.label('envUri'), - models.Environment.name.label('envName'), - models.Dataset.SamlAdminGroupName.label('admins'), - models.Dataset.GlueDatabaseName.label('database'), - models.Dataset.S3BucketName.label('source'), - models.Dataset.created, - models.Dataset.updated, - models.Dataset.deleted, - ) - .join( - models.Organization, - models.Dataset.organizationUri == models.Organization.organizationUri, - ) - .join( - models.Environment, - models.Dataset.environmentUri == models.Environment.environmentUri, - ) - .filter(models.Dataset.datasetUri == datasetUri) - .first() - ) - count_tables = db.api.Dataset.count_dataset_tables(session, datasetUri) - count_folders = db.api.Dataset.count_dataset_locations(session, datasetUri) - count_upvotes = db.api.Vote.count_upvotes( - session, None, None, datasetUri, {'targetType': 'dataset'} - ) +class DatasetTableIndexer(BaseIndexer): - if dataset: - glossary = get_target_glossary_terms(session, datasetUri) - upsert( - es=es, - index='dataall-index', - id=datasetUri, - doc={ - 'name': dataset.name, - 'owner': dataset.owner, - 'label': dataset.label, - 'admins': dataset.admins, - 'database': dataset.database, - 'source': dataset.source, - 'resourceKind': 'dataset', - 'description': dataset.description, - 'classification': dataset.classification, - 'tags': [t.replace('-', '') for t in dataset.tags or []], - 'topics': dataset.topics, - 'region': dataset.region.replace('-', ''), - 'environmentUri': dataset.envUri, - 'environmentName': dataset.envName, - 'organizationUri': dataset.orgUri, - 'organizationName': dataset.orgName, - 'created': dataset.created, - 'updated': dataset.updated, - 'deleted': dataset.deleted, - 'glossary': glossary, - 'tables': count_tables, - 'folders': count_folders, - 'upvotes': count_upvotes, - }, + @classmethod + def upsert(cls, session, table_uri: str): + table = ( + session.query( + models.DatasetTable.datasetUri.label('datasetUri'), + models.DatasetTable.tableUri.label('uri'), + models.DatasetTable.name.label('name'), + models.DatasetTable.owner.label('owner'), + models.DatasetTable.label.label('label'), + models.DatasetTable.description.label('description'), + models.Dataset.confidentiality.label('classification'), + models.DatasetTable.tags.label('tags'), + models.Dataset.topics.label('topics'), + models.Dataset.region.label('region'), + models.Organization.organizationUri.label('orgUri'), + models.Organization.name.label('orgName'), + models.Environment.environmentUri.label('envUri'), + models.Environment.name.label('envName'), + models.Dataset.SamlAdminGroupName.label('admins'), + models.Dataset.GlueDatabaseName.label('database'), + models.Dataset.S3BucketName.label('source'), + models.DatasetTable.created, + models.DatasetTable.updated, + models.DatasetTable.deleted, + ) + .join( + models.Dataset, + models.Dataset.datasetUri == models.DatasetTable.datasetUri, + ) + .join( + models.Organization, + models.Dataset.organizationUri == models.Organization.organizationUri, + ) + .join( + models.Environment, + models.Dataset.environmentUri == models.Environment.environmentUri, + ) + .filter(models.DatasetTable.tableUri == table_uri) + .first() ) - return dataset - -def upsert_table(session, es, tableUri: str): - table = ( - session.query( - models.DatasetTable.datasetUri.label('datasetUri'), - models.DatasetTable.tableUri.label('uri'), - models.DatasetTable.name.label('name'), - models.DatasetTable.owner.label('owner'), - models.DatasetTable.label.label('label'), - models.DatasetTable.description.label('description'), - models.Dataset.confidentiality.label('classification'), - models.DatasetTable.tags.label('tags'), - models.Dataset.topics.label('topics'), - models.Dataset.region.label('region'), - models.Organization.organizationUri.label('orgUri'), - models.Organization.name.label('orgName'), - models.Environment.environmentUri.label('envUri'), - models.Environment.name.label('envName'), - models.Dataset.SamlAdminGroupName.label('admins'), - models.Dataset.GlueDatabaseName.label('database'), - models.Dataset.S3BucketName.label('source'), - models.DatasetTable.created, - models.DatasetTable.updated, - models.DatasetTable.deleted, - ) - .join( - models.Dataset, - models.Dataset.datasetUri == models.DatasetTable.datasetUri, - ) - .join( - models.Organization, - models.Dataset.organizationUri == models.Organization.organizationUri, - ) - .join( - models.Environment, - models.Dataset.environmentUri == models.Environment.environmentUri, - ) - .filter(models.DatasetTable.tableUri == tableUri) - .first() - ) + if table: + glossary = BaseIndexer._get_target_glossary_terms(session, table_uri) + tags = table.tags if table.tags else [] + BaseIndexer._index( + doc_id=table_uri, + doc={ + 'name': table.name, + 'admins': table.admins, + 'owner': table.owner, + 'label': table.label, + 'resourceKind': 'table', + 'description': table.description, + 'database': table.database, + 'source': table.source, + 'classification': table.classification, + 'tags': [t.replace('-', '') for t in tags or []], + 'topics': table.topics, + 'region': table.region.replace('-', ''), + 'datasetUri': table.datasetUri, + 'environmentUri': table.envUri, + 'environmentName': table.envName, + 'organizationUri': table.orgUri, + 'organizationName': table.orgName, + 'created': table.created, + 'updated': table.updated, + 'deleted': table.deleted, + 'glossary': glossary, + }, + ) + DatasetIndexer.upsert(session=session, dataset_uri=table.datasetUri) + return table - if table: - glossary = get_target_glossary_terms(session, tableUri) - tags = table.tags if table.tags else [] - upsert( - es=es, - index='dataall-index', - id=tableUri, - doc={ - 'name': table.name, - 'admins': table.admins, - 'owner': table.owner, - 'label': table.label, - 'resourceKind': 'table', - 'description': table.description, - 'database': table.database, - 'source': table.source, - 'classification': table.classification, - 'tags': [t.replace('-', '') for t in tags or []], - 'topics': table.topics, - 'region': table.region.replace('-', ''), - 'datasetUri': table.datasetUri, - 'environmentUri': table.envUri, - 'environmentName': table.envName, - 'organizationUri': table.orgUri, - 'organizationName': table.orgName, - 'created': table.created, - 'updated': table.updated, - 'deleted': table.deleted, - 'glossary': glossary, - }, - ) - upsert_dataset(session, es, table.datasetUri) - return table +class DatasetLocationIndexer(BaseIndexer): -def upsert_folder(session, es, locationUri: str): - folder = ( - session.query( - DatasetStorageLocation.datasetUri.label('datasetUri'), - DatasetStorageLocation.locationUri.label('uri'), - DatasetStorageLocation.name.label('name'), - DatasetStorageLocation.owner.label('owner'), - DatasetStorageLocation.label.label('label'), - DatasetStorageLocation.description.label('description'), - DatasetStorageLocation.tags.label('tags'), - DatasetStorageLocation.region.label('region'), - models.Organization.organizationUri.label('orgUri'), - models.Organization.name.label('orgName'), - models.Environment.environmentUri.label('envUri'), - models.Environment.name.label('envName'), - models.Dataset.SamlAdminGroupName.label('admins'), - models.Dataset.S3BucketName.label('source'), - models.Dataset.topics.label('topics'), - models.Dataset.confidentiality.label('classification'), - DatasetStorageLocation.created, - DatasetStorageLocation.updated, - DatasetStorageLocation.deleted, - ) - .join( - models.Dataset, - models.Dataset.datasetUri == DatasetStorageLocation.datasetUri, - ) - .join( - models.Organization, - models.Dataset.organizationUri == models.Organization.organizationUri, - ) - .join( - models.Environment, - models.Dataset.environmentUri == models.Environment.environmentUri, - ) - .filter(DatasetStorageLocation.locationUri == locationUri) - .first() - ) - if folder: - glossary = get_target_glossary_terms(session, locationUri) - upsert( - es=es, - index='dataall-index', - id=locationUri, - doc={ - 'name': folder.name, - 'admins': folder.admins, - 'owner': folder.owner, - 'label': folder.label, - 'resourceKind': 'folder', - 'description': folder.description, - 'source': folder.source, - 'classification': folder.classification, - 'tags': [f.replace('-', '') for f in folder.tags or []], - 'topics': folder.topics, - 'region': folder.region.replace('-', ''), - 'datasetUri': folder.datasetUri, - 'environmentUri': folder.envUri, - 'environmentName': folder.envName, - 'organizationUri': folder.orgUri, - 'organizationName': folder.orgName, - 'created': folder.created, - 'updated': folder.updated, - 'deleted': folder.deleted, - 'glossary': glossary, - }, + @classmethod + def upsert(cls, session, folder_uri: str): + folder = ( + session.query( + DatasetStorageLocation.datasetUri.label('datasetUri'), + DatasetStorageLocation.locationUri.label('uri'), + DatasetStorageLocation.name.label('name'), + DatasetStorageLocation.owner.label('owner'), + DatasetStorageLocation.label.label('label'), + DatasetStorageLocation.description.label('description'), + DatasetStorageLocation.tags.label('tags'), + DatasetStorageLocation.region.label('region'), + models.Organization.organizationUri.label('orgUri'), + models.Organization.name.label('orgName'), + models.Environment.environmentUri.label('envUri'), + models.Environment.name.label('envName'), + models.Dataset.SamlAdminGroupName.label('admins'), + models.Dataset.S3BucketName.label('source'), + models.Dataset.topics.label('topics'), + models.Dataset.confidentiality.label('classification'), + DatasetStorageLocation.created, + DatasetStorageLocation.updated, + DatasetStorageLocation.deleted, + ) + .join( + models.Dataset, + models.Dataset.datasetUri == DatasetStorageLocation.datasetUri, + ) + .join( + models.Organization, + models.Dataset.organizationUri == models.Organization.organizationUri, + ) + .join( + models.Environment, + models.Dataset.environmentUri == models.Environment.environmentUri, + ) + .filter(DatasetStorageLocation.locationUri == folder_uri) + .first() ) - upsert_dataset(session, es, folder.datasetUri) - return folder + if folder: + glossary = BaseIndexer._get_target_glossary_terms(session, folder_uri) + BaseIndexer._index( + doc_id=folder_uri, + doc={ + 'name': folder.name, + 'admins': folder.admins, + 'owner': folder.owner, + 'label': folder.label, + 'resourceKind': 'folder', + 'description': folder.description, + 'source': folder.source, + 'classification': folder.classification, + 'tags': [f.replace('-', '') for f in folder.tags or []], + 'topics': folder.topics, + 'region': folder.region.replace('-', ''), + 'datasetUri': folder.datasetUri, + 'environmentUri': folder.envUri, + 'environmentName': folder.envName, + 'organizationUri': folder.orgUri, + 'organizationName': folder.orgName, + 'created': folder.created, + 'updated': folder.updated, + 'deleted': folder.deleted, + 'glossary': glossary, + }, + ) + DatasetIndexer.upsert(session=session, dataset_uri=folder.datasetUri) + return folder -def upsert_dashboard(session, es, dashboardUri: str): - dashboard = ( - session.query( - models.Dashboard.dashboardUri.label('uri'), - models.Dashboard.name.label('name'), - models.Dashboard.owner.label('owner'), - models.Dashboard.label.label('label'), - models.Dashboard.description.label('description'), - models.Dashboard.tags.label('tags'), - models.Dashboard.region.label('region'), - models.Organization.organizationUri.label('orgUri'), - models.Organization.name.label('orgName'), - models.Environment.environmentUri.label('envUri'), - models.Environment.name.label('envName'), - models.Dashboard.SamlGroupName.label('admins'), - models.Dashboard.created, - models.Dashboard.updated, - models.Dashboard.deleted, - ) - .join( - models.Organization, - models.Dashboard.organizationUri == models.Dashboard.organizationUri, - ) - .join( - models.Environment, - models.Dashboard.environmentUri == models.Environment.environmentUri, - ) - .filter(models.Dashboard.dashboardUri == dashboardUri) - .first() - ) - if dashboard: - glossary = get_target_glossary_terms(session, dashboardUri) - count_upvotes = db.api.Vote.count_upvotes( - session, None, None, dashboardUri, {'targetType': 'dashboard'} - ) - upsert( - es=es, - index='dataall-index', - id=dashboardUri, - doc={ - 'name': dashboard.name, - 'admins': dashboard.admins, - 'owner': dashboard.owner, - 'label': dashboard.label, - 'resourceKind': 'dashboard', - 'description': dashboard.description, - 'tags': [f.replace('-', '') for f in dashboard.tags or []], - 'topics': [], - 'region': dashboard.region.replace('-', ''), - 'environmentUri': dashboard.envUri, - 'environmentName': dashboard.envName, - 'organizationUri': dashboard.orgUri, - 'organizationName': dashboard.orgName, - 'created': dashboard.created, - 'updated': dashboard.updated, - 'deleted': dashboard.deleted, - 'glossary': glossary, - 'upvotes': count_upvotes, - }, +class DashboardIndexer(BaseIndexer): + @classmethod + def upsert(cls, session, dashboard_uri: str): + dashboard = ( + session.query( + models.Dashboard.dashboardUri.label('uri'), + models.Dashboard.name.label('name'), + models.Dashboard.owner.label('owner'), + models.Dashboard.label.label('label'), + models.Dashboard.description.label('description'), + models.Dashboard.tags.label('tags'), + models.Dashboard.region.label('region'), + models.Organization.organizationUri.label('orgUri'), + models.Organization.name.label('orgName'), + models.Environment.environmentUri.label('envUri'), + models.Environment.name.label('envName'), + models.Dashboard.SamlGroupName.label('admins'), + models.Dashboard.created, + models.Dashboard.updated, + models.Dashboard.deleted, + ) + .join( + models.Organization, + models.Dashboard.organizationUri == models.Dashboard.organizationUri, + ) + .join( + models.Environment, + models.Dashboard.environmentUri == models.Environment.environmentUri, + ) + .filter(models.Dashboard.dashboardUri == dashboard_uri) + .first() ) - return dashboard + if dashboard: + glossary = BaseIndexer._get_target_glossary_terms(session, dashboard_uri) + count_upvotes = db.api.Vote.count_upvotes( + session, None, None, dashboard_uri, {'targetType': 'dashboard'} + ) + BaseIndexer._index( + doc_id=dashboard_uri, + doc={ + 'name': dashboard.name, + 'admins': dashboard.admins, + 'owner': dashboard.owner, + 'label': dashboard.label, + 'resourceKind': 'dashboard', + 'description': dashboard.description, + 'tags': [f.replace('-', '') for f in dashboard.tags or []], + 'topics': [], + 'region': dashboard.region.replace('-', ''), + 'environmentUri': dashboard.envUri, + 'environmentName': dashboard.envName, + 'organizationUri': dashboard.orgUri, + 'organizationName': dashboard.orgName, + 'created': dashboard.created, + 'updated': dashboard.updated, + 'deleted': dashboard.deleted, + 'glossary': glossary, + 'upvotes': count_upvotes, + }, + ) + return dashboard def upsert_dataset_tables(session, es, datasetUri: str): @@ -328,7 +309,7 @@ def upsert_dataset_tables(session, es, datasetUri: str): .all() ) for table in tables: - upsert_table(session, es, table.tableUri) + DatasetTableIndexer.upsert(session=session, table_uri=table.tableUri) return tables @@ -355,7 +336,7 @@ def upsert_dataset_folders(session, es, datasetUri: str): .all() ) for folder in folders: - upsert_folder(session, es, folder.locationUri) + DatasetLocationIndexer.upsert(session=session, folder_uri=folder.locationUri) return folders diff --git a/backend/dataall/searchproxy/upsert.py b/backend/dataall/searchproxy/upsert.py index 0fd9735e5..9eb2e3125 100644 --- a/backend/dataall/searchproxy/upsert.py +++ b/backend/dataall/searchproxy/upsert.py @@ -1,15 +1,65 @@ import logging +import os +from abc import ABC, abstractmethod from datetime import datetime +from operator import and_ + +from sqlalchemy.orm import with_expression + +from dataall.db import models +from dataall.searchproxy import connect log = logging.getLogger(__name__) -def upsert(es, index, id, doc): - doc['_indexed'] = datetime.now() - if es: - res = es.index(index=index, id=id, body=doc) - log.info(f'doc {doc} for id {id} indexed with response {res}') - return True - else: - log.error(f'ES config is missing doc {doc} for id {id} was not indexed') - return False +class BaseIndexer(ABC): + """API to work with OpenSearch""" + _INDEX = 'dataall-index' + _es = None + + @classmethod + def es(cls): + """Lazy creation of the OpenSearch connection""" + if cls._es is None: + cls._es = connect(envname=os.getenv('envname', 'local')) + + return cls._es + + @staticmethod + @abstractmethod + def upsert(session, target_id): + raise NotImplementedError("Method upsert is not implemented") + + @classmethod + def _index(cls, doc_id, doc): + es = cls.es() + doc['_indexed'] = datetime.now() + if es: + res = es.index(index=BaseIndexer._INDEX, id=doc_id, body=doc) + log.info(f'doc {doc} for id {doc_id} indexed with response {res}') + return True + else: + log.error(f'ES config is missing doc {doc} for id {doc_id} was not indexed') + return False + + @staticmethod + def _get_target_glossary_terms(session, target_uri): + q = ( + session.query(models.TermLink) + .options( + with_expression(models.TermLink.path, models.GlossaryNode.path), + with_expression(models.TermLink.label, models.GlossaryNode.label), + with_expression(models.TermLink.readme, models.GlossaryNode.readme), + ) + .join( + models.GlossaryNode, models.GlossaryNode.nodeUri == models.TermLink.nodeUri + ) + .filter( + and_( + models.TermLink.targetUri == target_uri, + models.TermLink.approvedBySteward.is_(True), + ) + ) + ) + return [t.path for t in q] + diff --git a/backend/dataall/tasks/catalog_indexer.py b/backend/dataall/tasks/catalog_indexer.py index 2a53880c8..9c70ce9ca 100644 --- a/backend/dataall/tasks/catalog_indexer.py +++ b/backend/dataall/tasks/catalog_indexer.py @@ -5,7 +5,7 @@ from .. import db from ..db import get_engine, exceptions from ..db import models -from ..searchproxy import indexers +from dataall.searchproxy.indexers import upsert_dataset_tables, upsert_dataset_folders, DashboardIndexer from ..searchproxy.connect import ( connect, ) @@ -33,8 +33,8 @@ def index_objects(engine, es): log.info(f'Found {len(all_datasets)} datasets') dataset: models.Dataset for dataset in all_datasets: - tables = indexers.upsert_dataset_tables(session, es, dataset.datasetUri) - folders = indexers.upsert_dataset_folders( + tables = upsert_dataset_tables(session, es, dataset.datasetUri) + folders = upsert_dataset_folders( session, es, dataset.datasetUri ) indexed_objects_counter = ( @@ -45,7 +45,7 @@ def index_objects(engine, es): log.info(f'Found {len(all_dashboards)} dashboards') dashboard: models.Dashboard for dashboard in all_dashboards: - indexers.upsert_dashboard(session, es, dashboard.dashboardUri) + DashboardIndexer.upsert(session=session, dashboard_uri=dashboard.dashboardUri) indexed_objects_counter = indexed_objects_counter + 1 log.info(f'Successfully indexed {indexed_objects_counter} objects') diff --git a/tests/api/conftest.py b/tests/api/conftest.py index 8334f7700..4a160d818 100644 --- a/tests/api/conftest.py +++ b/tests/api/conftest.py @@ -1,3 +1,4 @@ +import dataall.searchproxy.indexers from .client import * from dataall.db import models from dataall.api import constants @@ -29,10 +30,10 @@ def patch_es(module_mocker): module_mocker.patch('dataall.searchproxy.search', return_value={}) module_mocker.patch('dataall.searchproxy.upsert', return_value={}) module_mocker.patch('dataall.searchproxy.indexers.upsert_dataset_tables', return_value={}) - module_mocker.patch('dataall.searchproxy.indexers.upsert_dataset', return_value={}) - module_mocker.patch('dataall.searchproxy.indexers.upsert_table', return_value={}) - module_mocker.patch('dataall.searchproxy.indexers.upsert_folder', return_value={}) - module_mocker.patch('dataall.searchproxy.indexers.upsert_dashboard', return_value={}) + module_mocker.patch('dataall.searchproxy.indexers.DatasetIndexer.upsert', return_value={}) + module_mocker.patch('dataall.searchproxy.indexers.DatasetTableIndexer.upsert', return_value={}) + module_mocker.patch('dataall.searchproxy.indexers.DatasetLocationIndexer.upsert', return_value={}) + module_mocker.patch('dataall.searchproxy.indexers.DashboardIndexer.upsert', return_value={}) module_mocker.patch('dataall.searchproxy.indexers.delete_doc', return_value={}) diff --git a/tests/searchproxy/test_indexers.py b/tests/searchproxy/test_indexers.py index fcdb18bb0..b9c823957 100644 --- a/tests/searchproxy/test_indexers.py +++ b/tests/searchproxy/test_indexers.py @@ -5,6 +5,11 @@ import dataall from dataall.searchproxy import indexers from dataall.modules.datasets.db.models import DatasetStorageLocation +from dataall.searchproxy.indexers import ( + DatasetIndexer, + DatasetTableIndexer, + DatasetLocationIndexer, +) @pytest.fixture(scope='module', autouse=True) @@ -124,8 +129,8 @@ def test_es_request(): def test_upsert_dataset(db, dataset, env, mocker): mocker.patch('dataall.searchproxy.upsert', return_value={}) with db.scoped_session() as session: - dataset_indexed = indexers.upsert_dataset( - session, es={}, datasetUri=dataset.datasetUri + dataset_indexed = DatasetIndexer.upsert( + session, dataset_uri=dataset.datasetUri ) assert dataset_indexed.datasetUri == dataset.datasetUri @@ -133,15 +138,15 @@ def test_upsert_dataset(db, dataset, env, mocker): def test_upsert_table(db, dataset, env, mocker, table): mocker.patch('dataall.searchproxy.upsert', return_value={}) with db.scoped_session() as session: - table_indexed = indexers.upsert_table(session, es={}, tableUri=table.tableUri) + table_indexed = DatasetTableIndexer.upsert(session, table_uri=table.tableUri) assert table_indexed.uri == table.tableUri def test_upsert_folder(db, dataset, env, mocker, folder): mocker.patch('dataall.searchproxy.upsert', return_value={}) with db.scoped_session() as session: - folder_indexed = indexers.upsert_folder( - session, es={}, locationUri=folder.locationUri + folder_indexed = DatasetLocationIndexer.upsert( + session=session, folder_uri=folder.locationUri ) assert folder_indexed.uri == folder.locationUri diff --git a/tests/tasks/test_catalog_indexer.py b/tests/tasks/test_catalog_indexer.py index 77090b2d4..d6e73e4c6 100644 --- a/tests/tasks/test_catalog_indexer.py +++ b/tests/tasks/test_catalog_indexer.py @@ -86,7 +86,7 @@ def test_catalog_indexer(db, org, env, sync_dataset, table, mocker): 'dataall.searchproxy.indexers.upsert_dataset_tables', return_value=[table] ) mocker.patch( - 'dataall.searchproxy.indexers.upsert_dataset', return_value=sync_dataset + 'dataall.searchproxy.indexers.DatasetIndexer.upsert', return_value=sync_dataset ) indexed_objects_counter = dataall.tasks.catalog_indexer.index_objects( engine=db, es=True From 4e31b991e7b09137a31559e1e3abc77d7820d283 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 19 Apr 2023 16:41:48 +0200 Subject: [PATCH 060/346] Extracted upsert_dataset_folders into DatasetLocationIndexer and renamed it --- backend/dataall/searchproxy/indexers.py | 22 +++++++++++----------- backend/dataall/tasks/catalog_indexer.py | 6 ++---- 2 files changed, 13 insertions(+), 15 deletions(-) diff --git a/backend/dataall/searchproxy/indexers.py b/backend/dataall/searchproxy/indexers.py index 34c1c3d17..a58d832eb 100644 --- a/backend/dataall/searchproxy/indexers.py +++ b/backend/dataall/searchproxy/indexers.py @@ -233,6 +233,17 @@ def upsert(cls, session, folder_uri: str): DatasetIndexer.upsert(session=session, dataset_uri=folder.datasetUri) return folder + @classmethod + def upsert_all(cls, session, dataset_uri: str): + folders = ( + session.query(DatasetStorageLocation) + .filter(DatasetStorageLocation.datasetUri == dataset_uri) + .all() + ) + for folder in folders: + DatasetLocationIndexer.upsert(session=session, folder_uri=folder.locationUri) + return folders + class DashboardIndexer(BaseIndexer): @classmethod @@ -329,17 +340,6 @@ def remove_deleted_tables(session, es, datasetUri: str): return tables -def upsert_dataset_folders(session, es, datasetUri: str): - folders = ( - session.query(DatasetStorageLocation) - .filter(DatasetStorageLocation.datasetUri == datasetUri) - .all() - ) - for folder in folders: - DatasetLocationIndexer.upsert(session=session, folder_uri=folder.locationUri) - return folders - - def delete_doc(es, doc_id, index='dataall-index'): es.delete(index=index, id=doc_id, ignore=[400, 404]) return True diff --git a/backend/dataall/tasks/catalog_indexer.py b/backend/dataall/tasks/catalog_indexer.py index 9c70ce9ca..b951e7e83 100644 --- a/backend/dataall/tasks/catalog_indexer.py +++ b/backend/dataall/tasks/catalog_indexer.py @@ -5,7 +5,7 @@ from .. import db from ..db import get_engine, exceptions from ..db import models -from dataall.searchproxy.indexers import upsert_dataset_tables, upsert_dataset_folders, DashboardIndexer +from dataall.searchproxy.indexers import upsert_dataset_tables, DashboardIndexer, DatasetLocationIndexer from ..searchproxy.connect import ( connect, ) @@ -34,9 +34,7 @@ def index_objects(engine, es): dataset: models.Dataset for dataset in all_datasets: tables = upsert_dataset_tables(session, es, dataset.datasetUri) - folders = upsert_dataset_folders( - session, es, dataset.datasetUri - ) + folders = DatasetLocationIndexer.upsert_all(session, dataset_uri=dataset.datasetUri) indexed_objects_counter = ( indexed_objects_counter + len(tables) + len(folders) + 1 ) From b7728125afdce3a3e42d75f96593b3aeae55665e Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 19 Apr 2023 16:51:39 +0200 Subject: [PATCH 061/346] Moved DatasetLocationIndexer into the dataset module --- backend/dataall/modules/datasets/__init__.py | 2 +- .../api/storage_location/resolvers.py | 3 +- .../modules/datasets/indexers/__init__.py | 1 + .../datasets/indexers/location_indexer.py | 89 +++++++++++++++++++ backend/dataall/searchproxy/indexers.py | 84 ----------------- backend/dataall/tasks/catalog_indexer.py | 3 +- tests/api/conftest.py | 5 +- tests/searchproxy/test_indexers.py | 2 +- 8 files changed, 99 insertions(+), 90 deletions(-) create mode 100644 backend/dataall/modules/datasets/indexers/__init__.py create mode 100644 backend/dataall/modules/datasets/indexers/location_indexer.py diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index a2f600f68..b976764ce 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -5,8 +5,8 @@ from dataall.api.Objects.Feed.registry import FeedRegistry, FeedDefinition from dataall.api.Objects.Glossary.registry import GlossaryRegistry, GlossaryDefinition from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetStorageLocation +from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer from dataall.modules.loader import ModuleInterface, ImportMode -from dataall.searchproxy.indexers import DatasetLocationIndexer log = logging.getLogger(__name__) diff --git a/backend/dataall/modules/datasets/api/storage_location/resolvers.py b/backend/dataall/modules/datasets/api/storage_location/resolvers.py index 4aebc1458..1b6dcdb92 100644 --- a/backend/dataall/modules/datasets/api/storage_location/resolvers.py +++ b/backend/dataall/modules/datasets/api/storage_location/resolvers.py @@ -1,6 +1,5 @@ from dataall.api.context import Context from dataall.aws.handlers.service_handlers import Worker -from dataall.aws.handlers.s3 import S3 from dataall.db import permissions, models from dataall.db.api import ( ResourcePolicy, @@ -9,10 +8,10 @@ Environment, ) from dataall.modules.datasets.handlers.s3_location_handler import S3DatasetLocationHandler +from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer from dataall.searchproxy import indexers from dataall.modules.datasets.db.models import DatasetStorageLocation from dataall.modules.datasets.services.dataset_location import DatasetStorageLocationService -from dataall.searchproxy.indexers import DatasetLocationIndexer def create_storage_location( diff --git a/backend/dataall/modules/datasets/indexers/__init__.py b/backend/dataall/modules/datasets/indexers/__init__.py new file mode 100644 index 000000000..faf66363b --- /dev/null +++ b/backend/dataall/modules/datasets/indexers/__init__.py @@ -0,0 +1 @@ +"""Contains dataset related indexers for OpenSearch""" diff --git a/backend/dataall/modules/datasets/indexers/location_indexer.py b/backend/dataall/modules/datasets/indexers/location_indexer.py new file mode 100644 index 000000000..9a3147e12 --- /dev/null +++ b/backend/dataall/modules/datasets/indexers/location_indexer.py @@ -0,0 +1,89 @@ +"""Indexes DatasetStorageLocation in OpenSearch""" +from dataall.modules.datasets.db.models import DatasetStorageLocation + +from dataall.db import models +from dataall.searchproxy.indexers import DatasetIndexer +from dataall.searchproxy.upsert import BaseIndexer + + +class DatasetLocationIndexer(BaseIndexer): + + @classmethod + def upsert(cls, session, folder_uri: str): + folder = ( + session.query( + DatasetStorageLocation.datasetUri.label('datasetUri'), + DatasetStorageLocation.locationUri.label('uri'), + DatasetStorageLocation.name.label('name'), + DatasetStorageLocation.owner.label('owner'), + DatasetStorageLocation.label.label('label'), + DatasetStorageLocation.description.label('description'), + DatasetStorageLocation.tags.label('tags'), + DatasetStorageLocation.region.label('region'), + models.Organization.organizationUri.label('orgUri'), + models.Organization.name.label('orgName'), + models.Environment.environmentUri.label('envUri'), + models.Environment.name.label('envName'), + models.Dataset.SamlAdminGroupName.label('admins'), + models.Dataset.S3BucketName.label('source'), + models.Dataset.topics.label('topics'), + models.Dataset.confidentiality.label('classification'), + DatasetStorageLocation.created, + DatasetStorageLocation.updated, + DatasetStorageLocation.deleted, + ) + .join( + models.Dataset, + models.Dataset.datasetUri == DatasetStorageLocation.datasetUri, + ) + .join( + models.Organization, + models.Dataset.organizationUri == models.Organization.organizationUri, + ) + .join( + models.Environment, + models.Dataset.environmentUri == models.Environment.environmentUri, + ) + .filter(DatasetStorageLocation.locationUri == folder_uri) + .first() + ) + if folder: + glossary = BaseIndexer._get_target_glossary_terms(session, folder_uri) + BaseIndexer._index( + doc_id=folder_uri, + doc={ + 'name': folder.name, + 'admins': folder.admins, + 'owner': folder.owner, + 'label': folder.label, + 'resourceKind': 'folder', + 'description': folder.description, + 'source': folder.source, + 'classification': folder.classification, + 'tags': [f.replace('-', '') for f in folder.tags or []], + 'topics': folder.topics, + 'region': folder.region.replace('-', ''), + 'datasetUri': folder.datasetUri, + 'environmentUri': folder.envUri, + 'environmentName': folder.envName, + 'organizationUri': folder.orgUri, + 'organizationName': folder.orgName, + 'created': folder.created, + 'updated': folder.updated, + 'deleted': folder.deleted, + 'glossary': glossary, + }, + ) + DatasetIndexer.upsert(session=session, dataset_uri=folder.datasetUri) + return folder + + @classmethod + def upsert_all(cls, session, dataset_uri: str): + folders = ( + session.query(DatasetStorageLocation) + .filter(DatasetStorageLocation.datasetUri == dataset_uri) + .all() + ) + for folder in folders: + DatasetLocationIndexer.upsert(session=session, folder_uri=folder.locationUri) + return folders diff --git a/backend/dataall/searchproxy/indexers.py b/backend/dataall/searchproxy/indexers.py index a58d832eb..601945509 100644 --- a/backend/dataall/searchproxy/indexers.py +++ b/backend/dataall/searchproxy/indexers.py @@ -5,7 +5,6 @@ from .. import db from ..db import models from dataall.searchproxy.upsert import BaseIndexer -from dataall.modules.datasets.db.models import DatasetStorageLocation log = logging.getLogger(__name__) @@ -162,89 +161,6 @@ def upsert(cls, session, table_uri: str): return table -class DatasetLocationIndexer(BaseIndexer): - - @classmethod - def upsert(cls, session, folder_uri: str): - folder = ( - session.query( - DatasetStorageLocation.datasetUri.label('datasetUri'), - DatasetStorageLocation.locationUri.label('uri'), - DatasetStorageLocation.name.label('name'), - DatasetStorageLocation.owner.label('owner'), - DatasetStorageLocation.label.label('label'), - DatasetStorageLocation.description.label('description'), - DatasetStorageLocation.tags.label('tags'), - DatasetStorageLocation.region.label('region'), - models.Organization.organizationUri.label('orgUri'), - models.Organization.name.label('orgName'), - models.Environment.environmentUri.label('envUri'), - models.Environment.name.label('envName'), - models.Dataset.SamlAdminGroupName.label('admins'), - models.Dataset.S3BucketName.label('source'), - models.Dataset.topics.label('topics'), - models.Dataset.confidentiality.label('classification'), - DatasetStorageLocation.created, - DatasetStorageLocation.updated, - DatasetStorageLocation.deleted, - ) - .join( - models.Dataset, - models.Dataset.datasetUri == DatasetStorageLocation.datasetUri, - ) - .join( - models.Organization, - models.Dataset.organizationUri == models.Organization.organizationUri, - ) - .join( - models.Environment, - models.Dataset.environmentUri == models.Environment.environmentUri, - ) - .filter(DatasetStorageLocation.locationUri == folder_uri) - .first() - ) - if folder: - glossary = BaseIndexer._get_target_glossary_terms(session, folder_uri) - BaseIndexer._index( - doc_id=folder_uri, - doc={ - 'name': folder.name, - 'admins': folder.admins, - 'owner': folder.owner, - 'label': folder.label, - 'resourceKind': 'folder', - 'description': folder.description, - 'source': folder.source, - 'classification': folder.classification, - 'tags': [f.replace('-', '') for f in folder.tags or []], - 'topics': folder.topics, - 'region': folder.region.replace('-', ''), - 'datasetUri': folder.datasetUri, - 'environmentUri': folder.envUri, - 'environmentName': folder.envName, - 'organizationUri': folder.orgUri, - 'organizationName': folder.orgName, - 'created': folder.created, - 'updated': folder.updated, - 'deleted': folder.deleted, - 'glossary': glossary, - }, - ) - DatasetIndexer.upsert(session=session, dataset_uri=folder.datasetUri) - return folder - - @classmethod - def upsert_all(cls, session, dataset_uri: str): - folders = ( - session.query(DatasetStorageLocation) - .filter(DatasetStorageLocation.datasetUri == dataset_uri) - .all() - ) - for folder in folders: - DatasetLocationIndexer.upsert(session=session, folder_uri=folder.locationUri) - return folders - - class DashboardIndexer(BaseIndexer): @classmethod def upsert(cls, session, dashboard_uri: str): diff --git a/backend/dataall/tasks/catalog_indexer.py b/backend/dataall/tasks/catalog_indexer.py index b951e7e83..e3d80458e 100644 --- a/backend/dataall/tasks/catalog_indexer.py +++ b/backend/dataall/tasks/catalog_indexer.py @@ -2,10 +2,11 @@ import os import sys +from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer from .. import db from ..db import get_engine, exceptions from ..db import models -from dataall.searchproxy.indexers import upsert_dataset_tables, DashboardIndexer, DatasetLocationIndexer +from dataall.searchproxy.indexers import upsert_dataset_tables, DashboardIndexer from ..searchproxy.connect import ( connect, ) diff --git a/tests/api/conftest.py b/tests/api/conftest.py index 4a160d818..f61ad46ef 100644 --- a/tests/api/conftest.py +++ b/tests/api/conftest.py @@ -32,7 +32,10 @@ def patch_es(module_mocker): module_mocker.patch('dataall.searchproxy.indexers.upsert_dataset_tables', return_value={}) module_mocker.patch('dataall.searchproxy.indexers.DatasetIndexer.upsert', return_value={}) module_mocker.patch('dataall.searchproxy.indexers.DatasetTableIndexer.upsert', return_value={}) - module_mocker.patch('dataall.searchproxy.indexers.DatasetLocationIndexer.upsert', return_value={}) + module_mocker.patch( + 'dataall.modules.datasets.indexers.location_indexer.DatasetLocationIndexer.upsert', + return_value={} + ) module_mocker.patch('dataall.searchproxy.indexers.DashboardIndexer.upsert', return_value={}) module_mocker.patch('dataall.searchproxy.indexers.delete_doc', return_value={}) diff --git a/tests/searchproxy/test_indexers.py b/tests/searchproxy/test_indexers.py index b9c823957..eda5a7dd7 100644 --- a/tests/searchproxy/test_indexers.py +++ b/tests/searchproxy/test_indexers.py @@ -3,12 +3,12 @@ import pytest import dataall +from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer from dataall.searchproxy import indexers from dataall.modules.datasets.db.models import DatasetStorageLocation from dataall.searchproxy.indexers import ( DatasetIndexer, DatasetTableIndexer, - DatasetLocationIndexer, ) From cd798e2c5537f546a3f8a7fcac3f4e07358ddb4c Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 20 Apr 2023 10:26:46 +0200 Subject: [PATCH 062/346] Moved DatasetStorageLocation methods to the service --- .../dataall/api/Objects/Dataset/resolvers.py | 7 ++- backend/dataall/db/api/dataset.py | 60 +------------------ .../datasets/services/dataset_location.py | 57 ++++++++++++++++++ backend/dataall/searchproxy/indexers.py | 3 +- 4 files changed, 65 insertions(+), 62 deletions(-) diff --git a/backend/dataall/api/Objects/Dataset/resolvers.py b/backend/dataall/api/Objects/Dataset/resolvers.py index fcbc2d6f3..63b9a47b4 100644 --- a/backend/dataall/api/Objects/Dataset/resolvers.py +++ b/backend/dataall/api/Objects/Dataset/resolvers.py @@ -18,6 +18,7 @@ from ....db.api.organization import Organization from dataall.searchproxy import indexers from dataall.searchproxy.indexers import DatasetIndexer +from ....modules.datasets.services.dataset_location import DatasetStorageLocationService log = logging.getLogger(__name__) @@ -160,7 +161,7 @@ def list_locations(context, source: models.Dataset, filter: dict = None): if not filter: filter = {'page': 1, 'pageSize': 5} with context.engine.scoped_session() as session: - return Dataset.paginated_dataset_locations( + return DatasetStorageLocationService.paginated_dataset_locations( session=session, username=context.username, groups=context.groups, @@ -232,7 +233,7 @@ def get_dataset_statistics(context: Context, source: models.Dataset, **kwargs): return None with context.engine.scoped_session() as session: count_tables = db.api.Dataset.count_dataset_tables(session, source.datasetUri) - count_locations = db.api.Dataset.count_dataset_locations( + count_locations = DatasetStorageLocationService.count_dataset_locations( session, source.datasetUri ) count_upvotes = db.api.Vote.count_upvotes( @@ -557,7 +558,7 @@ def delete_dataset( for uri in tables: indexers.delete_doc(es=context.es, doc_id=uri) - folders = [f.locationUri for f in Dataset.get_dataset_folders(session, datasetUri)] + folders = [f.locationUri for f in DatasetStorageLocationService.get_dataset_folders(session, datasetUri)] for uri in folders: indexers.delete_doc(es=context.es, doc_id=uri) diff --git a/backend/dataall/db/api/dataset.py b/backend/dataall/db/api/dataset.py index f78d92eae..27b5f59d0 100644 --- a/backend/dataall/db/api/dataset.py +++ b/backend/dataall/db/api/dataset.py @@ -16,11 +16,11 @@ from . import Organization from .. import models, api, exceptions, permissions, paginate from ..models.Enums import Language, ConfidentialityClassification +from ...modules.datasets.services.dataset_location import DatasetStorageLocationService from ...utils.naming_convention import ( NamingConventionService, NamingConventionPattern, ) -from dataall.modules.datasets.db.models import DatasetStorageLocation logger = logging.getLogger(__name__) @@ -263,30 +263,6 @@ def paginated_user_datasets( page_size=data.get('pageSize', 10), ).to_dict() - @staticmethod - def paginated_dataset_locations( - session, username, groups, uri, data=None, check_perm=None - ) -> dict: - query = session.query(DatasetStorageLocation).filter( - DatasetStorageLocation.datasetUri == uri - ) - if data and data.get('term'): - query = query.filter( - or_( - *[ - DatasetStorageLocation.name.ilike( - '%' + data.get('term') + '%' - ), - DatasetStorageLocation.S3Prefix.ilike( - '%' + data.get('term') + '%' - ), - ] - ) - ) - return paginate( - query=query, page_size=data.get('pageSize', 10), page=data.get('page', 1) - ).to_dict() - @staticmethod def paginated_dataset_tables( session, username, groups, uri, data=None, check_perm=None @@ -486,15 +462,6 @@ def get_dataset_tables(session, dataset_uri): .all() ) - @staticmethod - def get_dataset_folders(session, dataset_uri): - """return the dataset folders""" - return ( - session.query(DatasetStorageLocation) - .filter(DatasetStorageLocation.datasetUri == dataset_uri) - .all() - ) - @staticmethod def query_dataset_shares(session, dataset_uri) -> Query: return session.query(models.ShareObject).filter( @@ -549,7 +516,7 @@ def delete_dataset( Dataset._delete_dataset_shares_with_no_shared_items(session, uri) Dataset._delete_dataset_term_links(session, uri) Dataset._delete_dataset_tables(session, dataset.datasetUri) - Dataset._delete_dataset_locations(session, dataset.datasetUri) + DatasetStorageLocationService.delete_dataset_locations(session, dataset.datasetUri) KeyValueTag.delete_key_value_tags(session, dataset.datasetUri, 'dataset') Vote.delete_votes(session, dataset.datasetUri, 'dataset') session.delete(dataset) @@ -632,21 +599,6 @@ def _delete_dataset_tables(session, dataset_uri) -> bool: table.deleted = datetime.now() return tables - @staticmethod - def _delete_dataset_locations(session, dataset_uri) -> bool: - locations = ( - session.query(DatasetStorageLocation) - .filter( - and_( - DatasetStorageLocation.datasetUri == dataset_uri, - ) - ) - .all() - ) - for location in locations: - session.delete(location) - return True - @staticmethod def list_all_datasets(session) -> [models.Dataset]: return session.query(models.Dataset).all() @@ -672,11 +624,3 @@ def count_dataset_tables(session, dataset_uri): .filter(models.DatasetTable.datasetUri == dataset_uri) .count() ) - - @staticmethod - def count_dataset_locations(session, dataset_uri): - return ( - session.query(DatasetStorageLocation) - .filter(DatasetStorageLocation.datasetUri == dataset_uri) - .count() - ) diff --git a/backend/dataall/modules/datasets/services/dataset_location.py b/backend/dataall/modules/datasets/services/dataset_location.py index 640f0a037..4d82ec2d3 100644 --- a/backend/dataall/modules/datasets/services/dataset_location.py +++ b/backend/dataall/modules/datasets/services/dataset_location.py @@ -203,3 +203,60 @@ def get_location_by_s3_prefix(session, s3_prefix, accountid, region): else: logging.info(f'Found location {location.locationUri}|{location.S3Prefix}') return location + + @staticmethod + def count_dataset_locations(session, dataset_uri): + return ( + session.query(DatasetStorageLocation) + .filter(DatasetStorageLocation.datasetUri == dataset_uri) + .count() + ) + + @staticmethod + def delete_dataset_locations(session, dataset_uri) -> bool: + locations = ( + session.query(DatasetStorageLocation) + .filter( + and_( + DatasetStorageLocation.datasetUri == dataset_uri, + ) + ) + .all() + ) + for location in locations: + session.delete(location) + return True + + @staticmethod + def get_dataset_folders(session, dataset_uri): + """return the dataset folders""" + return ( + session.query(DatasetStorageLocation) + .filter(DatasetStorageLocation.datasetUri == dataset_uri) + .all() + ) + + @staticmethod + def paginated_dataset_locations( + session, username, groups, uri, data=None, check_perm=None + ) -> dict: + query = session.query(DatasetStorageLocation).filter( + DatasetStorageLocation.datasetUri == uri + ) + if data and data.get('term'): + query = query.filter( + or_( + *[ + DatasetStorageLocation.name.ilike( + '%' + data.get('term') + '%' + ), + DatasetStorageLocation.S3Prefix.ilike( + '%' + data.get('term') + '%' + ), + ] + ) + ) + return paginate( + query=query, page_size=data.get('pageSize', 10), page=data.get('page', 1) + ).to_dict() + diff --git a/backend/dataall/searchproxy/indexers.py b/backend/dataall/searchproxy/indexers.py index 601945509..f157deae2 100644 --- a/backend/dataall/searchproxy/indexers.py +++ b/backend/dataall/searchproxy/indexers.py @@ -5,6 +5,7 @@ from .. import db from ..db import models from dataall.searchproxy.upsert import BaseIndexer +from dataall.modules.datasets.services.dataset_location import DatasetStorageLocationService log = logging.getLogger(__name__) @@ -47,7 +48,7 @@ def upsert(cls, session, dataset_uri: str): .first() ) count_tables = db.api.Dataset.count_dataset_tables(session, dataset_uri) - count_folders = db.api.Dataset.count_dataset_locations(session, dataset_uri) + count_folders = DatasetStorageLocationService.count_dataset_locations(session, dataset_uri) count_upvotes = db.api.Vote.count_upvotes( session, None, None, dataset_uri, {'targetType': 'dataset'} ) From b0e6a62a9449edfd908066bdfe5fb17b2e4ffbb2 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 20 Apr 2023 10:27:14 +0200 Subject: [PATCH 063/346] Renamed the service --- .../dataall/api/Objects/Dataset/resolvers.py | 8 ++++---- backend/dataall/db/api/dataset.py | 4 ++-- .../api/storage_location/resolvers.py | 20 +++++++++---------- .../datasets/handlers/s3_location_handler.py | 4 ++-- .../datasets/services/dataset_location.py | 8 ++++---- .../datasets/tasks/subscription_service.py | 4 ++-- backend/dataall/searchproxy/indexers.py | 4 ++-- 7 files changed, 26 insertions(+), 26 deletions(-) diff --git a/backend/dataall/api/Objects/Dataset/resolvers.py b/backend/dataall/api/Objects/Dataset/resolvers.py index 63b9a47b4..1b522cd94 100644 --- a/backend/dataall/api/Objects/Dataset/resolvers.py +++ b/backend/dataall/api/Objects/Dataset/resolvers.py @@ -18,7 +18,7 @@ from ....db.api.organization import Organization from dataall.searchproxy import indexers from dataall.searchproxy.indexers import DatasetIndexer -from ....modules.datasets.services.dataset_location import DatasetStorageLocationService +from ....modules.datasets.services.dataset_location import DatasetLocationService log = logging.getLogger(__name__) @@ -161,7 +161,7 @@ def list_locations(context, source: models.Dataset, filter: dict = None): if not filter: filter = {'page': 1, 'pageSize': 5} with context.engine.scoped_session() as session: - return DatasetStorageLocationService.paginated_dataset_locations( + return DatasetLocationService.paginated_dataset_locations( session=session, username=context.username, groups=context.groups, @@ -233,7 +233,7 @@ def get_dataset_statistics(context: Context, source: models.Dataset, **kwargs): return None with context.engine.scoped_session() as session: count_tables = db.api.Dataset.count_dataset_tables(session, source.datasetUri) - count_locations = DatasetStorageLocationService.count_dataset_locations( + count_locations = DatasetLocationService.count_dataset_locations( session, source.datasetUri ) count_upvotes = db.api.Vote.count_upvotes( @@ -558,7 +558,7 @@ def delete_dataset( for uri in tables: indexers.delete_doc(es=context.es, doc_id=uri) - folders = [f.locationUri for f in DatasetStorageLocationService.get_dataset_folders(session, datasetUri)] + folders = [f.locationUri for f in DatasetLocationService.get_dataset_folders(session, datasetUri)] for uri in folders: indexers.delete_doc(es=context.es, doc_id=uri) diff --git a/backend/dataall/db/api/dataset.py b/backend/dataall/db/api/dataset.py index 27b5f59d0..c9f2bd581 100644 --- a/backend/dataall/db/api/dataset.py +++ b/backend/dataall/db/api/dataset.py @@ -16,7 +16,7 @@ from . import Organization from .. import models, api, exceptions, permissions, paginate from ..models.Enums import Language, ConfidentialityClassification -from ...modules.datasets.services.dataset_location import DatasetStorageLocationService +from ...modules.datasets.services.dataset_location import DatasetLocationService from ...utils.naming_convention import ( NamingConventionService, NamingConventionPattern, @@ -516,7 +516,7 @@ def delete_dataset( Dataset._delete_dataset_shares_with_no_shared_items(session, uri) Dataset._delete_dataset_term_links(session, uri) Dataset._delete_dataset_tables(session, dataset.datasetUri) - DatasetStorageLocationService.delete_dataset_locations(session, dataset.datasetUri) + DatasetLocationService.delete_dataset_locations(session, dataset.datasetUri) KeyValueTag.delete_key_value_tags(session, dataset.datasetUri, 'dataset') Vote.delete_votes(session, dataset.datasetUri, 'dataset') session.delete(dataset) diff --git a/backend/dataall/modules/datasets/api/storage_location/resolvers.py b/backend/dataall/modules/datasets/api/storage_location/resolvers.py index 1b6dcdb92..09cf4b14a 100644 --- a/backend/dataall/modules/datasets/api/storage_location/resolvers.py +++ b/backend/dataall/modules/datasets/api/storage_location/resolvers.py @@ -11,14 +11,14 @@ from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer from dataall.searchproxy import indexers from dataall.modules.datasets.db.models import DatasetStorageLocation -from dataall.modules.datasets.services.dataset_location import DatasetStorageLocationService +from dataall.modules.datasets.services.dataset_location import DatasetLocationService def create_storage_location( context, source, datasetUri: str = None, input: dict = None ): with context.engine.scoped_session() as session: - location = DatasetStorageLocationService.create_dataset_location( + location = DatasetLocationService.create_dataset_location( session=session, username=context.username, groups=context.groups, @@ -39,15 +39,15 @@ def list_dataset_locations(context, source, filter: dict = None): if not filter: filter = {} with context.engine.scoped_session() as session: - return DatasetStorageLocationService.list_dataset_locations( + return DatasetLocationService.list_dataset_locations( session=session, uri=source.datasetUri, data=filter, check_perm=True ) def get_storage_location(context, source, locationUri=None): with context.engine.scoped_session() as session: - location = DatasetStorageLocationService.get_location_by_uri(session, locationUri) - return DatasetStorageLocationService.get_dataset_location( + location = DatasetLocationService.get_location_by_uri(session, locationUri) + return DatasetLocationService.get_dataset_location( session=session, username=context.username, groups=context.groups, @@ -61,10 +61,10 @@ def update_storage_location( context, source, locationUri: str = None, input: dict = None ): with context.engine.scoped_session() as session: - location = DatasetStorageLocationService.get_location_by_uri(session, locationUri) + location = DatasetLocationService.get_location_by_uri(session, locationUri) input['location'] = location input['locationUri'] = location.locationUri - DatasetStorageLocationService.update_dataset_location( + DatasetLocationService.update_dataset_location( session=session, username=context.username, groups=context.groups, @@ -79,8 +79,8 @@ def update_storage_location( def remove_storage_location(context, source, locationUri: str = None): with context.engine.scoped_session() as session: - location = DatasetStorageLocationService.get_location_by_uri(session, locationUri) - DatasetStorageLocationService.delete_dataset_location( + location = DatasetLocationService.get_location_by_uri(session, locationUri) + DatasetLocationService.delete_dataset_location( session=session, username=context.username, groups=context.groups, @@ -102,7 +102,7 @@ def resolve_dataset(context, source: DatasetStorageLocation, **kwargs): def publish_location_update(context: Context, source, locationUri: str = None): with context.engine.scoped_session() as session: - location = DatasetStorageLocationService.get_location_by_uri(session, locationUri) + location = DatasetLocationService.get_location_by_uri(session, locationUri) ResourcePolicy.check_user_resource_permission( session=session, username=context.username, diff --git a/backend/dataall/modules/datasets/handlers/s3_location_handler.py b/backend/dataall/modules/datasets/handlers/s3_location_handler.py index 431a4cecd..ba8cf6eda 100644 --- a/backend/dataall/modules/datasets/handlers/s3_location_handler.py +++ b/backend/dataall/modules/datasets/handlers/s3_location_handler.py @@ -3,7 +3,7 @@ from dataall.aws.handlers.service_handlers import Worker from dataall.aws.handlers.sts import SessionHelper from dataall.db import models -from dataall.modules.datasets.services.dataset_location import DatasetStorageLocationService +from dataall.modules.datasets.services.dataset_location import DatasetLocationService log = logging.getLogger(__name__) @@ -20,7 +20,7 @@ def client(account_id: str, region: str, client_type: str): @Worker.handler(path='s3.prefix.create') def create_dataset_location(engine, task: models.Task): with engine.scoped_session() as session: - location = DatasetStorageLocationService.get_location_by_uri( + location = DatasetLocationService.get_location_by_uri( session, task.targetUri ) S3DatasetLocationHandler.create_bucket_prefix(location) diff --git a/backend/dataall/modules/datasets/services/dataset_location.py b/backend/dataall/modules/datasets/services/dataset_location.py index 4d82ec2d3..d0e8f0936 100644 --- a/backend/dataall/modules/datasets/services/dataset_location.py +++ b/backend/dataall/modules/datasets/services/dataset_location.py @@ -11,7 +11,7 @@ logger = logging.getLogger(__name__) -class DatasetStorageLocationService: +class DatasetLocationService: @staticmethod @has_tenant_perm(permissions.MANAGE_DATASETS) @has_resource_perm(permissions.CREATE_DATASET_FOLDER) @@ -102,7 +102,7 @@ def get_dataset_location( data: dict = None, check_perm: bool = False, ) -> DatasetStorageLocation: - return DatasetStorageLocationService.get_location_by_uri(session, data['locationUri']) + return DatasetLocationService.get_location_by_uri(session, data['locationUri']) @staticmethod @has_tenant_perm(permissions.MANAGE_DATASETS) @@ -118,7 +118,7 @@ def update_dataset_location( location = data.get( 'location', - DatasetStorageLocationService.get_location_by_uri(session, data['locationUri']), + DatasetLocationService.get_location_by_uri(session, data['locationUri']), ) for k in data.keys(): @@ -145,7 +145,7 @@ def delete_dataset_location( data: dict = None, check_perm: bool = False, ): - location = DatasetStorageLocationService.get_location_by_uri( + location = DatasetLocationService.get_location_by_uri( session, data['locationUri'] ) share_item_shared_states = api.ShareItemSM.get_share_item_shared_states() diff --git a/backend/dataall/modules/datasets/tasks/subscription_service.py b/backend/dataall/modules/datasets/tasks/subscription_service.py index 94339d0f7..901865812 100644 --- a/backend/dataall/modules/datasets/tasks/subscription_service.py +++ b/backend/dataall/modules/datasets/tasks/subscription_service.py @@ -16,7 +16,7 @@ from dataall.tasks.subscriptions import poll_queues from dataall.utils import json_utils from dataall.modules.datasets.services.dataset_table import DatasetTableService -from dataall.modules.datasets.services.dataset_location import DatasetStorageLocationService +from dataall.modules.datasets.services.dataset_location import DatasetLocationService from dataall.modules.datasets.db.models import DatasetStorageLocation root = logging.getLogger() @@ -106,7 +106,7 @@ def publish_table_update_message(engine, message): @staticmethod def publish_location_update_message(session, message): location: DatasetStorageLocation = ( - DatasetStorageLocationService.get_location_by_s3_prefix( + DatasetLocationService.get_location_by_s3_prefix( session, message.get('prefix'), message.get('accountid'), diff --git a/backend/dataall/searchproxy/indexers.py b/backend/dataall/searchproxy/indexers.py index f157deae2..12d1da90d 100644 --- a/backend/dataall/searchproxy/indexers.py +++ b/backend/dataall/searchproxy/indexers.py @@ -5,7 +5,7 @@ from .. import db from ..db import models from dataall.searchproxy.upsert import BaseIndexer -from dataall.modules.datasets.services.dataset_location import DatasetStorageLocationService +from dataall.modules.datasets.services.dataset_location import DatasetLocationService log = logging.getLogger(__name__) @@ -48,7 +48,7 @@ def upsert(cls, session, dataset_uri: str): .first() ) count_tables = db.api.Dataset.count_dataset_tables(session, dataset_uri) - count_folders = DatasetStorageLocationService.count_dataset_locations(session, dataset_uri) + count_folders = DatasetLocationService.count_dataset_locations(session, dataset_uri) count_upvotes = db.api.Vote.count_upvotes( session, None, None, dataset_uri, {'targetType': 'dataset'} ) From 27c6d79559460090732a3f27f756f87d58a9e177 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 20 Apr 2023 10:39:04 +0200 Subject: [PATCH 064/346] Moved DatasetIndexer to modules --- .../dataall/api/Objects/Dataset/resolvers.py | 4 +- .../dataall/api/Objects/Glossary/registry.py | 10 +-- backend/dataall/api/Objects/Vote/resolvers.py | 3 +- backend/dataall/modules/datasets/__init__.py | 9 ++ .../datasets/indexers/dataset_indexer.py | 82 +++++++++++++++++++ .../datasets/indexers/location_indexer.py | 2 +- backend/dataall/searchproxy/indexers.py | 78 +----------------- tests/api/conftest.py | 2 +- .../modules/notebooks/test_notebook_stack.py | 2 +- .../notebooks/test_sagemaker_notebook.py | 1 - tests/searchproxy/test_indexers.py | 6 +- tests/tasks/test_catalog_indexer.py | 2 +- 12 files changed, 104 insertions(+), 97 deletions(-) create mode 100644 backend/dataall/modules/datasets/indexers/dataset_indexer.py diff --git a/backend/dataall/api/Objects/Dataset/resolvers.py b/backend/dataall/api/Objects/Dataset/resolvers.py index 1b522cd94..2acdfa1fc 100644 --- a/backend/dataall/api/Objects/Dataset/resolvers.py +++ b/backend/dataall/api/Objects/Dataset/resolvers.py @@ -17,8 +17,8 @@ from ....db.api import Dataset, Environment, ShareObject, ResourcePolicy from ....db.api.organization import Organization from dataall.searchproxy import indexers -from dataall.searchproxy.indexers import DatasetIndexer -from ....modules.datasets.services.dataset_location import DatasetLocationService +from dataall.modules.datasets.services.dataset_location import DatasetLocationService +from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer log = logging.getLogger(__name__) diff --git a/backend/dataall/api/Objects/Glossary/registry.py b/backend/dataall/api/Objects/Glossary/registry.py index cb82bf208..147f97a0e 100644 --- a/backend/dataall/api/Objects/Glossary/registry.py +++ b/backend/dataall/api/Objects/Glossary/registry.py @@ -6,7 +6,7 @@ from dataall.api import gql from dataall.api.gql.graphql_union_type import UnionTypeRegistry from dataall.db import Resource, models -from dataall.searchproxy.indexers import DashboardIndexer, DatasetTableIndexer, DatasetIndexer +from dataall.searchproxy.indexers import DashboardIndexer, DatasetTableIndexer from dataall.searchproxy.upsert import BaseIndexer @@ -74,10 +74,4 @@ def reindex(cls, session, es: OpenSearch, target_type: str, target_uri: str): object_type="DatasetTable", model=models.DatasetTable, reindexer=DatasetTableIndexer -)) -GlossaryRegistry.register(GlossaryDefinition( - target_type="Dataset", - object_type="Dataset", - model=models.Dataset, - reindexer=DatasetIndexer -)) +)) \ No newline at end of file diff --git a/backend/dataall/api/Objects/Vote/resolvers.py b/backend/dataall/api/Objects/Vote/resolvers.py index 34dcd9f05..42f5c20f5 100644 --- a/backend/dataall/api/Objects/Vote/resolvers.py +++ b/backend/dataall/api/Objects/Vote/resolvers.py @@ -1,6 +1,7 @@ from .... import db from ....api.context import Context -from dataall.searchproxy.indexers import DatasetIndexer, DashboardIndexer +from dataall.searchproxy.indexers import DashboardIndexer +from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer def count_upvotes( diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index b976764ce..03cd58cdc 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -4,7 +4,9 @@ from dataall.api.Objects.Feed.registry import FeedRegistry, FeedDefinition from dataall.api.Objects.Glossary.registry import GlossaryRegistry, GlossaryDefinition +from dataall.db import models from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetStorageLocation +from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer from dataall.modules.loader import ModuleInterface, ImportMode @@ -32,6 +34,13 @@ def __init__(self): reindexer=DatasetLocationIndexer )) + GlossaryRegistry.register(GlossaryDefinition( + target_type="Dataset", + object_type="Dataset", + model=models.Dataset, + reindexer=DatasetIndexer + )) + log.info("API of datasets has been imported") diff --git a/backend/dataall/modules/datasets/indexers/dataset_indexer.py b/backend/dataall/modules/datasets/indexers/dataset_indexer.py new file mode 100644 index 000000000..8cb0b7873 --- /dev/null +++ b/backend/dataall/modules/datasets/indexers/dataset_indexer.py @@ -0,0 +1,82 @@ +"""Indexes Datasets in OpenSearch""" + +from dataall import db +from dataall.db import models +from dataall.modules.datasets.services.dataset_location import DatasetLocationService +from dataall.searchproxy.upsert import BaseIndexer + + +class DatasetIndexer(BaseIndexer): + + @classmethod + def upsert(cls, session, dataset_uri: str): + dataset = ( + session.query( + models.Dataset.datasetUri.label('datasetUri'), + models.Dataset.name.label('name'), + models.Dataset.owner.label('owner'), + models.Dataset.label.label('label'), + models.Dataset.description.label('description'), + models.Dataset.confidentiality.label('classification'), + models.Dataset.tags.label('tags'), + models.Dataset.topics.label('topics'), + models.Dataset.region.label('region'), + models.Organization.organizationUri.label('orgUri'), + models.Organization.name.label('orgName'), + models.Environment.environmentUri.label('envUri'), + models.Environment.name.label('envName'), + models.Dataset.SamlAdminGroupName.label('admins'), + models.Dataset.GlueDatabaseName.label('database'), + models.Dataset.S3BucketName.label('source'), + models.Dataset.created, + models.Dataset.updated, + models.Dataset.deleted, + ) + .join( + models.Organization, + models.Dataset.organizationUri == models.Organization.organizationUri, + ) + .join( + models.Environment, + models.Dataset.environmentUri == models.Environment.environmentUri, + ) + .filter(models.Dataset.datasetUri == dataset_uri) + .first() + ) + count_tables = db.api.Dataset.count_dataset_tables(session, dataset_uri) + count_folders = DatasetLocationService.count_dataset_locations(session, dataset_uri) + count_upvotes = db.api.Vote.count_upvotes( + session, None, None, dataset_uri, {'targetType': 'dataset'} + ) + + if dataset: + glossary = BaseIndexer._get_target_glossary_terms(session, dataset_uri) + BaseIndexer._index( + doc_id=dataset_uri, + doc={ + 'name': dataset.name, + 'owner': dataset.owner, + 'label': dataset.label, + 'admins': dataset.admins, + 'database': dataset.database, + 'source': dataset.source, + 'resourceKind': 'dataset', + 'description': dataset.description, + 'classification': dataset.classification, + 'tags': [t.replace('-', '') for t in dataset.tags or []], + 'topics': dataset.topics, + 'region': dataset.region.replace('-', ''), + 'environmentUri': dataset.envUri, + 'environmentName': dataset.envName, + 'organizationUri': dataset.orgUri, + 'organizationName': dataset.orgName, + 'created': dataset.created, + 'updated': dataset.updated, + 'deleted': dataset.deleted, + 'glossary': glossary, + 'tables': count_tables, + 'folders': count_folders, + 'upvotes': count_upvotes, + }, + ) + return dataset diff --git a/backend/dataall/modules/datasets/indexers/location_indexer.py b/backend/dataall/modules/datasets/indexers/location_indexer.py index 9a3147e12..72495b51c 100644 --- a/backend/dataall/modules/datasets/indexers/location_indexer.py +++ b/backend/dataall/modules/datasets/indexers/location_indexer.py @@ -2,7 +2,7 @@ from dataall.modules.datasets.db.models import DatasetStorageLocation from dataall.db import models -from dataall.searchproxy.indexers import DatasetIndexer +from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer from dataall.searchproxy.upsert import BaseIndexer diff --git a/backend/dataall/searchproxy/indexers.py b/backend/dataall/searchproxy/indexers.py index 12d1da90d..fa91cb4eb 100644 --- a/backend/dataall/searchproxy/indexers.py +++ b/backend/dataall/searchproxy/indexers.py @@ -5,87 +5,11 @@ from .. import db from ..db import models from dataall.searchproxy.upsert import BaseIndexer -from dataall.modules.datasets.services.dataset_location import DatasetLocationService +from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer log = logging.getLogger(__name__) -class DatasetIndexer(BaseIndexer): - - @classmethod - def upsert(cls, session, dataset_uri: str): - dataset = ( - session.query( - models.Dataset.datasetUri.label('datasetUri'), - models.Dataset.name.label('name'), - models.Dataset.owner.label('owner'), - models.Dataset.label.label('label'), - models.Dataset.description.label('description'), - models.Dataset.confidentiality.label('classification'), - models.Dataset.tags.label('tags'), - models.Dataset.topics.label('topics'), - models.Dataset.region.label('region'), - models.Organization.organizationUri.label('orgUri'), - models.Organization.name.label('orgName'), - models.Environment.environmentUri.label('envUri'), - models.Environment.name.label('envName'), - models.Dataset.SamlAdminGroupName.label('admins'), - models.Dataset.GlueDatabaseName.label('database'), - models.Dataset.S3BucketName.label('source'), - models.Dataset.created, - models.Dataset.updated, - models.Dataset.deleted, - ) - .join( - models.Organization, - models.Dataset.organizationUri == models.Organization.organizationUri, - ) - .join( - models.Environment, - models.Dataset.environmentUri == models.Environment.environmentUri, - ) - .filter(models.Dataset.datasetUri == dataset_uri) - .first() - ) - count_tables = db.api.Dataset.count_dataset_tables(session, dataset_uri) - count_folders = DatasetLocationService.count_dataset_locations(session, dataset_uri) - count_upvotes = db.api.Vote.count_upvotes( - session, None, None, dataset_uri, {'targetType': 'dataset'} - ) - - if dataset: - glossary = BaseIndexer._get_target_glossary_terms(session, dataset_uri) - BaseIndexer._index( - doc_id=dataset_uri, - doc={ - 'name': dataset.name, - 'owner': dataset.owner, - 'label': dataset.label, - 'admins': dataset.admins, - 'database': dataset.database, - 'source': dataset.source, - 'resourceKind': 'dataset', - 'description': dataset.description, - 'classification': dataset.classification, - 'tags': [t.replace('-', '') for t in dataset.tags or []], - 'topics': dataset.topics, - 'region': dataset.region.replace('-', ''), - 'environmentUri': dataset.envUri, - 'environmentName': dataset.envName, - 'organizationUri': dataset.orgUri, - 'organizationName': dataset.orgName, - 'created': dataset.created, - 'updated': dataset.updated, - 'deleted': dataset.deleted, - 'glossary': glossary, - 'tables': count_tables, - 'folders': count_folders, - 'upvotes': count_upvotes, - }, - ) - return dataset - - class DatasetTableIndexer(BaseIndexer): @classmethod diff --git a/tests/api/conftest.py b/tests/api/conftest.py index f61ad46ef..9eb50e050 100644 --- a/tests/api/conftest.py +++ b/tests/api/conftest.py @@ -30,7 +30,7 @@ def patch_es(module_mocker): module_mocker.patch('dataall.searchproxy.search', return_value={}) module_mocker.patch('dataall.searchproxy.upsert', return_value={}) module_mocker.patch('dataall.searchproxy.indexers.upsert_dataset_tables', return_value={}) - module_mocker.patch('dataall.searchproxy.indexers.DatasetIndexer.upsert', return_value={}) + module_mocker.patch('dataall.modules.datasets.indexers.dataset_indexer.DatasetIndexer.upsert', return_value={}) module_mocker.patch('dataall.searchproxy.indexers.DatasetTableIndexer.upsert', return_value={}) module_mocker.patch( 'dataall.modules.datasets.indexers.location_indexer.DatasetLocationIndexer.upsert', diff --git a/tests/modules/notebooks/test_notebook_stack.py b/tests/modules/notebooks/test_notebook_stack.py index fc65e9af4..1c41c46c6 100644 --- a/tests/modules/notebooks/test_notebook_stack.py +++ b/tests/modules/notebooks/test_notebook_stack.py @@ -15,4 +15,4 @@ def test_notebook_stack(client, sgm_notebook, group): username="alice", groups=[group.name], ) - assert response.data.updateStack.targetUri == sgm_notebook.notebookUri \ No newline at end of file + assert response.data.updateStack.targetUri == sgm_notebook.notebookUri diff --git a/tests/modules/notebooks/test_sagemaker_notebook.py b/tests/modules/notebooks/test_sagemaker_notebook.py index 5fd4e4d16..8b2aa9792 100644 --- a/tests/modules/notebooks/test_sagemaker_notebook.py +++ b/tests/modules/notebooks/test_sagemaker_notebook.py @@ -12,7 +12,6 @@ def get_notebook_instance_status(self): return "INSERVICE" - @pytest.fixture(scope='module') def org1(org, user, group, tenant): org1 = org('testorg', user.userName, group.name) diff --git a/tests/searchproxy/test_indexers.py b/tests/searchproxy/test_indexers.py index eda5a7dd7..ffc0370c1 100644 --- a/tests/searchproxy/test_indexers.py +++ b/tests/searchproxy/test_indexers.py @@ -6,10 +6,8 @@ from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer from dataall.searchproxy import indexers from dataall.modules.datasets.db.models import DatasetStorageLocation -from dataall.searchproxy.indexers import ( - DatasetIndexer, - DatasetTableIndexer, -) +from dataall.searchproxy.indexers import DatasetTableIndexer +from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer @pytest.fixture(scope='module', autouse=True) diff --git a/tests/tasks/test_catalog_indexer.py b/tests/tasks/test_catalog_indexer.py index d6e73e4c6..f8901933b 100644 --- a/tests/tasks/test_catalog_indexer.py +++ b/tests/tasks/test_catalog_indexer.py @@ -86,7 +86,7 @@ def test_catalog_indexer(db, org, env, sync_dataset, table, mocker): 'dataall.searchproxy.indexers.upsert_dataset_tables', return_value=[table] ) mocker.patch( - 'dataall.searchproxy.indexers.DatasetIndexer.upsert', return_value=sync_dataset + 'dataall.modules.datasets.indexers.dataset_indexer.DatasetIndexer.upsert', return_value=sync_dataset ) indexed_objects_counter = dataall.tasks.catalog_indexer.index_objects( engine=db, es=True From 0e730ac3945b53f4ee3cedaf2cd819d34e8d80b6 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 20 Apr 2023 10:50:08 +0200 Subject: [PATCH 065/346] Created a dataset repository. There is a few of circular imports. It's a first attempt to solve it --- backend/dataall/db/api/dataset.py | 6 ++---- .../modules/datasets/db/dataset_repository.py | 13 +++++++++++++ .../modules/datasets/services/dataset_location.py | 4 ++-- 3 files changed, 17 insertions(+), 6 deletions(-) create mode 100644 backend/dataall/modules/datasets/db/dataset_repository.py diff --git a/backend/dataall/db/api/dataset.py b/backend/dataall/db/api/dataset.py index c9f2bd581..3c2c8de10 100644 --- a/backend/dataall/db/api/dataset.py +++ b/backend/dataall/db/api/dataset.py @@ -16,6 +16,7 @@ from . import Organization from .. import models, api, exceptions, permissions, paginate from ..models.Enums import Language, ConfidentialityClassification +from ...modules.datasets.db.dataset_repository import DatasetRepository from ...modules.datasets.services.dataset_location import DatasetLocationService from ...utils.naming_convention import ( NamingConventionService, @@ -210,10 +211,7 @@ def get_dataset( @staticmethod def get_dataset_by_uri(session, dataset_uri) -> models.Dataset: - dataset: Dataset = session.query(models.Dataset).get(dataset_uri) - if not dataset: - raise exceptions.ObjectNotFound('Dataset', dataset_uri) - return dataset + return DatasetRepository.get_dataset_by_uri(session, dataset_uri) @staticmethod def query_user_datasets(session, username, groups, filter) -> Query: diff --git a/backend/dataall/modules/datasets/db/dataset_repository.py b/backend/dataall/modules/datasets/db/dataset_repository.py new file mode 100644 index 000000000..d58c3a7a1 --- /dev/null +++ b/backend/dataall/modules/datasets/db/dataset_repository.py @@ -0,0 +1,13 @@ +from dataall.db import exceptions +from dataall.db.models import Dataset + + +class DatasetRepository: + """DAO layer for Datasets""" + + @staticmethod + def get_dataset_by_uri(session, dataset_uri) -> Dataset: + dataset: Dataset = session.query(Dataset).get(dataset_uri) + if not dataset: + raise exceptions.ObjectNotFound('Dataset', dataset_uri) + return dataset diff --git a/backend/dataall/modules/datasets/services/dataset_location.py b/backend/dataall/modules/datasets/services/dataset_location.py index d0e8f0936..f1d8b5eaf 100644 --- a/backend/dataall/modules/datasets/services/dataset_location.py +++ b/backend/dataall/modules/datasets/services/dataset_location.py @@ -5,7 +5,7 @@ from dataall.db.api import has_tenant_perm, has_resource_perm, Glossary from dataall.db import models, api, paginate, permissions, exceptions -from dataall.db.api.dataset import Dataset +from dataall.modules.datasets.db.dataset_repository import DatasetRepository from dataall.modules.datasets.db.models import DatasetStorageLocation logger = logging.getLogger(__name__) @@ -23,7 +23,7 @@ def create_dataset_location( data: dict = None, check_perm: bool = False, ) -> DatasetStorageLocation: - dataset = Dataset.get_dataset_by_uri(session, uri) + dataset = DatasetRepository.get_dataset_by_uri(session, uri) exists = ( session.query(DatasetStorageLocation) .filter( From 9ac79644614a6f9bf2fb5b9d003abc07dc07569f Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 20 Apr 2023 11:04:38 +0200 Subject: [PATCH 066/346] Moved DatasetTableIndexer --- .../dataall/api/Objects/Dataset/resolvers.py | 5 +- .../api/Objects/DatasetTable/resolvers.py | 2 +- .../datasets/indexers/table_indexer.py | 98 +++++++++++++++++++ .../modules/datasets/tasks/tables_syncer.py | 3 +- backend/dataall/searchproxy/__init__.py | 2 - backend/dataall/searchproxy/indexers.py | 93 ------------------ backend/dataall/tasks/catalog_indexer.py | 5 +- tests/api/conftest.py | 5 +- tests/searchproxy/test_indexers.py | 6 +- tests/tasks/test_catalog_indexer.py | 3 +- 10 files changed, 116 insertions(+), 106 deletions(-) create mode 100644 backend/dataall/modules/datasets/indexers/table_indexer.py diff --git a/backend/dataall/api/Objects/Dataset/resolvers.py b/backend/dataall/api/Objects/Dataset/resolvers.py index 2acdfa1fc..79e306c9e 100644 --- a/backend/dataall/api/Objects/Dataset/resolvers.py +++ b/backend/dataall/api/Objects/Dataset/resolvers.py @@ -19,6 +19,7 @@ from dataall.searchproxy import indexers from dataall.modules.datasets.services.dataset_location import DatasetLocationService from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer +from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer log = logging.getLogger(__name__) @@ -324,8 +325,8 @@ def sync_tables(context: Context, source, datasetUri: str = None): session.add(task) Worker.process(engine=context.engine, task_ids=[task.taskUri], save_response=False) with context.engine.scoped_session() as session: - indexers.upsert_dataset_tables( - session=session, es=context.es, datasetUri=dataset.datasetUri + DatasetTableIndexer.upsert_all( + session=session, dataset_uri=dataset.datasetUri ) indexers.remove_deleted_tables( session=session, es=context.es, datasetUri=dataset.datasetUri diff --git a/backend/dataall/api/Objects/DatasetTable/resolvers.py b/backend/dataall/api/Objects/DatasetTable/resolvers.py index 567985348..7df3d8cba 100644 --- a/backend/dataall/api/Objects/DatasetTable/resolvers.py +++ b/backend/dataall/api/Objects/DatasetTable/resolvers.py @@ -13,7 +13,7 @@ from ....db.api import ResourcePolicy, Glossary from ....searchproxy import indexers from ....utils import json_utils -from dataall.searchproxy.indexers import DatasetTableIndexer +from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer from dataall.modules.datasets.services.dataset_table import DatasetTableService log = logging.getLogger(__name__) diff --git a/backend/dataall/modules/datasets/indexers/table_indexer.py b/backend/dataall/modules/datasets/indexers/table_indexer.py new file mode 100644 index 000000000..1eab70a87 --- /dev/null +++ b/backend/dataall/modules/datasets/indexers/table_indexer.py @@ -0,0 +1,98 @@ +"""Indexes DatasetTable in OpenSearch""" +from operator import and_ + +from dataall.db import models +from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer +from dataall.searchproxy.upsert import BaseIndexer + + +class DatasetTableIndexer(BaseIndexer): + + @classmethod + def upsert(cls, session, table_uri: str): + table = ( + session.query( + models.DatasetTable.datasetUri.label('datasetUri'), + models.DatasetTable.tableUri.label('uri'), + models.DatasetTable.name.label('name'), + models.DatasetTable.owner.label('owner'), + models.DatasetTable.label.label('label'), + models.DatasetTable.description.label('description'), + models.Dataset.confidentiality.label('classification'), + models.DatasetTable.tags.label('tags'), + models.Dataset.topics.label('topics'), + models.Dataset.region.label('region'), + models.Organization.organizationUri.label('orgUri'), + models.Organization.name.label('orgName'), + models.Environment.environmentUri.label('envUri'), + models.Environment.name.label('envName'), + models.Dataset.SamlAdminGroupName.label('admins'), + models.Dataset.GlueDatabaseName.label('database'), + models.Dataset.S3BucketName.label('source'), + models.DatasetTable.created, + models.DatasetTable.updated, + models.DatasetTable.deleted, + ) + .join( + models.Dataset, + models.Dataset.datasetUri == models.DatasetTable.datasetUri, + ) + .join( + models.Organization, + models.Dataset.organizationUri == models.Organization.organizationUri, + ) + .join( + models.Environment, + models.Dataset.environmentUri == models.Environment.environmentUri, + ) + .filter(models.DatasetTable.tableUri == table_uri) + .first() + ) + + if table: + glossary = BaseIndexer._get_target_glossary_terms(session, table_uri) + tags = table.tags if table.tags else [] + BaseIndexer._index( + doc_id=table_uri, + doc={ + 'name': table.name, + 'admins': table.admins, + 'owner': table.owner, + 'label': table.label, + 'resourceKind': 'table', + 'description': table.description, + 'database': table.database, + 'source': table.source, + 'classification': table.classification, + 'tags': [t.replace('-', '') for t in tags or []], + 'topics': table.topics, + 'region': table.region.replace('-', ''), + 'datasetUri': table.datasetUri, + 'environmentUri': table.envUri, + 'environmentName': table.envName, + 'organizationUri': table.orgUri, + 'organizationName': table.orgName, + 'created': table.created, + 'updated': table.updated, + 'deleted': table.deleted, + 'glossary': glossary, + }, + ) + DatasetIndexer.upsert(session=session, dataset_uri=table.datasetUri) + return table + + @classmethod + def upsert_all(cls, session, dataset_uri: str): + tables = ( + session.query(models.DatasetTable) + .filter( + and_( + models.DatasetTable.datasetUri == dataset_uri, + models.DatasetTable.LastGlueTableStatus != 'Deleted', + ) + ) + .all() + ) + for table in tables: + DatasetTableIndexer.upsert(session=session, table_uri=table.tableUri) + return tables diff --git a/backend/dataall/modules/datasets/tasks/tables_syncer.py b/backend/dataall/modules/datasets/tasks/tables_syncer.py index 27a870d60..7ae104cc9 100644 --- a/backend/dataall/modules/datasets/tasks/tables_syncer.py +++ b/backend/dataall/modules/datasets/tasks/tables_syncer.py @@ -8,6 +8,7 @@ from dataall.aws.handlers.sts import SessionHelper from dataall.db import get_engine from dataall.db import models +from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer from dataall.searchproxy import indexers from dataall.searchproxy.connect import connect from dataall.utils.alarm_service import AlarmService @@ -87,7 +88,7 @@ def sync_tables(engine, es=None): processed_tables.extend(tables) if es: - indexers.upsert_dataset_tables(session, es, dataset.datasetUri) + DatasetTableIndexer.upsert_all(session, dataset_uri=dataset.datasetUri) except Exception as e: log.error( f'Failed to sync tables for dataset ' diff --git a/backend/dataall/searchproxy/__init__.py b/backend/dataall/searchproxy/__init__.py index 8b648babe..78493adb6 100644 --- a/backend/dataall/searchproxy/__init__.py +++ b/backend/dataall/searchproxy/__init__.py @@ -1,10 +1,8 @@ from .connect import connect -from .indexers import upsert_dataset_tables from .search import run_query __all__ = [ 'connect', 'run_query', 'upsert', - 'upsert_dataset_tables', ] diff --git a/backend/dataall/searchproxy/indexers.py b/backend/dataall/searchproxy/indexers.py index fa91cb4eb..13ba44eea 100644 --- a/backend/dataall/searchproxy/indexers.py +++ b/backend/dataall/searchproxy/indexers.py @@ -5,87 +5,10 @@ from .. import db from ..db import models from dataall.searchproxy.upsert import BaseIndexer -from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer log = logging.getLogger(__name__) -class DatasetTableIndexer(BaseIndexer): - - @classmethod - def upsert(cls, session, table_uri: str): - table = ( - session.query( - models.DatasetTable.datasetUri.label('datasetUri'), - models.DatasetTable.tableUri.label('uri'), - models.DatasetTable.name.label('name'), - models.DatasetTable.owner.label('owner'), - models.DatasetTable.label.label('label'), - models.DatasetTable.description.label('description'), - models.Dataset.confidentiality.label('classification'), - models.DatasetTable.tags.label('tags'), - models.Dataset.topics.label('topics'), - models.Dataset.region.label('region'), - models.Organization.organizationUri.label('orgUri'), - models.Organization.name.label('orgName'), - models.Environment.environmentUri.label('envUri'), - models.Environment.name.label('envName'), - models.Dataset.SamlAdminGroupName.label('admins'), - models.Dataset.GlueDatabaseName.label('database'), - models.Dataset.S3BucketName.label('source'), - models.DatasetTable.created, - models.DatasetTable.updated, - models.DatasetTable.deleted, - ) - .join( - models.Dataset, - models.Dataset.datasetUri == models.DatasetTable.datasetUri, - ) - .join( - models.Organization, - models.Dataset.organizationUri == models.Organization.organizationUri, - ) - .join( - models.Environment, - models.Dataset.environmentUri == models.Environment.environmentUri, - ) - .filter(models.DatasetTable.tableUri == table_uri) - .first() - ) - - if table: - glossary = BaseIndexer._get_target_glossary_terms(session, table_uri) - tags = table.tags if table.tags else [] - BaseIndexer._index( - doc_id=table_uri, - doc={ - 'name': table.name, - 'admins': table.admins, - 'owner': table.owner, - 'label': table.label, - 'resourceKind': 'table', - 'description': table.description, - 'database': table.database, - 'source': table.source, - 'classification': table.classification, - 'tags': [t.replace('-', '') for t in tags or []], - 'topics': table.topics, - 'region': table.region.replace('-', ''), - 'datasetUri': table.datasetUri, - 'environmentUri': table.envUri, - 'environmentName': table.envName, - 'organizationUri': table.orgUri, - 'organizationName': table.orgName, - 'created': table.created, - 'updated': table.updated, - 'deleted': table.deleted, - 'glossary': glossary, - }, - ) - DatasetIndexer.upsert(session=session, dataset_uri=table.datasetUri) - return table - - class DashboardIndexer(BaseIndexer): @classmethod def upsert(cls, session, dashboard_uri: str): @@ -149,22 +72,6 @@ def upsert(cls, session, dashboard_uri: str): return dashboard -def upsert_dataset_tables(session, es, datasetUri: str): - tables = ( - session.query(models.DatasetTable) - .filter( - and_( - models.DatasetTable.datasetUri == datasetUri, - models.DatasetTable.LastGlueTableStatus != 'Deleted', - ) - ) - .all() - ) - for table in tables: - DatasetTableIndexer.upsert(session=session, table_uri=table.tableUri) - return tables - - def remove_deleted_tables(session, es, datasetUri: str): tables = ( session.query(models.DatasetTable) diff --git a/backend/dataall/tasks/catalog_indexer.py b/backend/dataall/tasks/catalog_indexer.py index e3d80458e..5d32800c7 100644 --- a/backend/dataall/tasks/catalog_indexer.py +++ b/backend/dataall/tasks/catalog_indexer.py @@ -3,10 +3,11 @@ import sys from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer +from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer from .. import db from ..db import get_engine, exceptions from ..db import models -from dataall.searchproxy.indexers import upsert_dataset_tables, DashboardIndexer +from dataall.searchproxy.indexers import DashboardIndexer from ..searchproxy.connect import ( connect, ) @@ -34,7 +35,7 @@ def index_objects(engine, es): log.info(f'Found {len(all_datasets)} datasets') dataset: models.Dataset for dataset in all_datasets: - tables = upsert_dataset_tables(session, es, dataset.datasetUri) + tables = DatasetTableIndexer.upsert_all(session, dataset.datasetUri) folders = DatasetLocationIndexer.upsert_all(session, dataset_uri=dataset.datasetUri) indexed_objects_counter = ( indexed_objects_counter + len(tables) + len(folders) + 1 diff --git a/tests/api/conftest.py b/tests/api/conftest.py index 9eb50e050..619559b5c 100644 --- a/tests/api/conftest.py +++ b/tests/api/conftest.py @@ -29,7 +29,10 @@ def patch_es(module_mocker): module_mocker.patch('dataall.searchproxy.connect', return_value={}) module_mocker.patch('dataall.searchproxy.search', return_value={}) module_mocker.patch('dataall.searchproxy.upsert', return_value={}) - module_mocker.patch('dataall.searchproxy.indexers.upsert_dataset_tables', return_value={}) + module_mocker.patch( + 'dataall.modules.datasets.indexers.table_indexer.DatasetTableIndexer.indexers.upsert_all', + return_value={} + ) module_mocker.patch('dataall.modules.datasets.indexers.dataset_indexer.DatasetIndexer.upsert', return_value={}) module_mocker.patch('dataall.searchproxy.indexers.DatasetTableIndexer.upsert', return_value={}) module_mocker.patch( diff --git a/tests/searchproxy/test_indexers.py b/tests/searchproxy/test_indexers.py index ffc0370c1..fd31506f1 100644 --- a/tests/searchproxy/test_indexers.py +++ b/tests/searchproxy/test_indexers.py @@ -4,9 +4,9 @@ import dataall from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer +from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer from dataall.searchproxy import indexers from dataall.modules.datasets.db.models import DatasetStorageLocation -from dataall.searchproxy.indexers import DatasetTableIndexer from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer @@ -152,7 +152,7 @@ def test_upsert_folder(db, dataset, env, mocker, folder): def test_upsert_tables(db, dataset, env, mocker, folder): mocker.patch('dataall.searchproxy.upsert', return_value={}) with db.scoped_session() as session: - tables = indexers.upsert_dataset_tables( - session, es={}, datasetUri=dataset.datasetUri + tables = DatasetTableIndexer.upsert_all( + session, dataset_uri=dataset.datasetUri ) assert len(tables) == 1 diff --git a/tests/tasks/test_catalog_indexer.py b/tests/tasks/test_catalog_indexer.py index f8901933b..31b0f14d4 100644 --- a/tests/tasks/test_catalog_indexer.py +++ b/tests/tasks/test_catalog_indexer.py @@ -83,7 +83,8 @@ def table(org, env, db, sync_dataset): def test_catalog_indexer(db, org, env, sync_dataset, table, mocker): mocker.patch( - 'dataall.searchproxy.indexers.upsert_dataset_tables', return_value=[table] + 'dataall.modules.datasets.indexers.table_indexer.DatasetTableIndexer.upsert_all', + return_value=[table] ) mocker.patch( 'dataall.modules.datasets.indexers.dataset_indexer.DatasetIndexer.upsert', return_value=sync_dataset From a1825ba4797bdaf8468868d675aa3bcc54193688 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 20 Apr 2023 11:42:15 +0200 Subject: [PATCH 067/346] Fixed test mocking --- tests/api/conftest.py | 4 ++-- tests/api/test_dataset_location.py | 3 ++- tests/cdkproxy/test_dataset_stack.py | 4 ++-- 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/tests/api/conftest.py b/tests/api/conftest.py index 619559b5c..f959be417 100644 --- a/tests/api/conftest.py +++ b/tests/api/conftest.py @@ -30,11 +30,11 @@ def patch_es(module_mocker): module_mocker.patch('dataall.searchproxy.search', return_value={}) module_mocker.patch('dataall.searchproxy.upsert', return_value={}) module_mocker.patch( - 'dataall.modules.datasets.indexers.table_indexer.DatasetTableIndexer.indexers.upsert_all', + 'dataall.modules.datasets.indexers.table_indexer.DatasetTableIndexer.upsert_all', return_value={} ) module_mocker.patch('dataall.modules.datasets.indexers.dataset_indexer.DatasetIndexer.upsert', return_value={}) - module_mocker.patch('dataall.searchproxy.indexers.DatasetTableIndexer.upsert', return_value={}) + module_mocker.patch('dataall.modules.datasets.indexers.table_indexer.DatasetTableIndexer.upsert', return_value={}) module_mocker.patch( 'dataall.modules.datasets.indexers.location_indexer.DatasetLocationIndexer.upsert', return_value={} diff --git a/tests/api/test_dataset_location.py b/tests/api/test_dataset_location.py index 128d21bbb..2977a8baf 100644 --- a/tests/api/test_dataset_location.py +++ b/tests/api/test_dataset_location.py @@ -71,7 +71,8 @@ def test_get_dataset(client, dataset1, env1, user, group): def test_create_location(client, dataset1, env1, user, group, patch_es, module_mocker): module_mocker.patch( - 'dataall.aws.handlers.s3.S3.create_bucket_prefix', return_value=True + 'dataall.modules.datasets.handlers.s3_location_handler.S3DatasetLocationHandler.create_bucket_prefix', + return_value=True ) response = client.query( """ diff --git a/tests/cdkproxy/test_dataset_stack.py b/tests/cdkproxy/test_dataset_stack.py index 34f495056..a9a84fc4e 100644 --- a/tests/cdkproxy/test_dataset_stack.py +++ b/tests/cdkproxy/test_dataset_stack.py @@ -8,9 +8,9 @@ @pytest.fixture(scope='function', autouse=True) def patch_methods(mocker, db, dataset, env, org): - mocker.patch('dataall.cdkproxy.stacks.dataset.DatasetStack.get_engine', return_value=db) + mocker.patch('dataall.modules.datasets.cdk.dataset_stack.DatasetStack.get_engine', return_value=db) mocker.patch( - 'dataall.cdkproxy.stacks.dataset.DatasetStack.get_target', return_value=dataset + 'dataall.modules.datasets.cdk.dataset_stack.DatasetStack.get_target', return_value=dataset ) mocker.patch( 'dataall.aws.handlers.sts.SessionHelper.get_delegation_role_name', From d2954853f5f4d894b219d79a5f5e876e84afb669 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 20 Apr 2023 11:43:01 +0200 Subject: [PATCH 068/346] Fixed circular import while half of the module is not migrate --- backend/dataall/api/Objects/Glossary/registry.py | 9 +-------- backend/dataall/modules/datasets/__init__.py | 13 +++++++++++-- 2 files changed, 12 insertions(+), 10 deletions(-) diff --git a/backend/dataall/api/Objects/Glossary/registry.py b/backend/dataall/api/Objects/Glossary/registry.py index 147f97a0e..36fea6cf0 100644 --- a/backend/dataall/api/Objects/Glossary/registry.py +++ b/backend/dataall/api/Objects/Glossary/registry.py @@ -6,7 +6,7 @@ from dataall.api import gql from dataall.api.gql.graphql_union_type import UnionTypeRegistry from dataall.db import Resource, models -from dataall.searchproxy.indexers import DashboardIndexer, DatasetTableIndexer +from dataall.searchproxy.indexers import DashboardIndexer from dataall.searchproxy.upsert import BaseIndexer @@ -67,11 +67,4 @@ def reindex(cls, session, es: OpenSearch, target_type: str, target_uri: str): object_type="Dashboard", model=models.Dashboard, reindexer=DashboardIndexer -)) - -GlossaryRegistry.register(GlossaryDefinition( - target_type="DatasetTable", - object_type="DatasetTable", - model=models.DatasetTable, - reindexer=DatasetTableIndexer )) \ No newline at end of file diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index 03cd58cdc..e02a9d9bf 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -2,12 +2,11 @@ import logging from typing import List -from dataall.api.Objects.Feed.registry import FeedRegistry, FeedDefinition -from dataall.api.Objects.Glossary.registry import GlossaryRegistry, GlossaryDefinition from dataall.db import models from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetStorageLocation from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer +from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer from dataall.modules.loader import ModuleInterface, ImportMode log = logging.getLogger(__name__) @@ -21,6 +20,9 @@ def is_supported(cls, modes): return ImportMode.API in modes def __init__(self): + from dataall.api.Objects.Feed.registry import FeedRegistry, FeedDefinition + from dataall.api.Objects.Glossary.registry import GlossaryRegistry, GlossaryDefinition + import dataall.modules.datasets.api FeedRegistry.register(FeedDefinition("DatasetTableColumn", DatasetTableColumn)) @@ -41,6 +43,13 @@ def __init__(self): reindexer=DatasetIndexer )) + GlossaryRegistry.register(GlossaryDefinition( + target_type="DatasetTable", + object_type="DatasetTable", + model=models.DatasetTable, + reindexer=DatasetTableIndexer + )) + log.info("API of datasets has been imported") From 005a5e7201f1e5218dd5d8c4039fdcd593cde772 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 20 Apr 2023 14:23:31 +0200 Subject: [PATCH 069/346] Removed not used alarms --- .../share_managers/s3_share_manager.py | 12 +--- backend/dataall/utils/alarm_service.py | 55 --------------- tests/tasks/test_s3_share_manager.py | 70 ------------------- 3 files changed, 2 insertions(+), 135 deletions(-) diff --git a/backend/dataall/tasks/data_sharing/share_managers/s3_share_manager.py b/backend/dataall/tasks/data_sharing/share_managers/s3_share_manager.py index f0ea4e162..30c72a60e 100644 --- a/backend/dataall/tasks/data_sharing/share_managers/s3_share_manager.py +++ b/backend/dataall/tasks/data_sharing/share_managers/s3_share_manager.py @@ -398,7 +398,7 @@ def delete_dataset_bucket_key_policy( json.dumps(policy) ) - def handle_share_failure(self, error: Exception) -> bool: + def handle_share_failure(self, error: Exception) -> None: """ Handles share failure by raising an alarm to alarmsTopic Returns @@ -411,12 +411,8 @@ def handle_share_failure(self, error: Exception) -> bool: f'with target account {self.target_environment.AwsAccountId}/{self.target_environment.region} ' f'due to: {error}' ) - AlarmService().trigger_folder_sharing_failure_alarm( - self.target_folder, self.share, self.target_environment - ) - return True - def handle_revoke_failure(self, error: Exception) -> bool: + def handle_revoke_failure(self, error: Exception) -> None: """ Handles share failure by raising an alarm to alarmsTopic Returns @@ -429,7 +425,3 @@ def handle_revoke_failure(self, error: Exception) -> bool: f'with target account {self.target_environment.AwsAccountId}/{self.target_environment.region} ' f'due to: {error}' ) - AlarmService().trigger_revoke_folder_sharing_failure_alarm( - self.target_folder, self.share, self.target_environment - ) - return True diff --git a/backend/dataall/utils/alarm_service.py b/backend/dataall/utils/alarm_service.py index 436d5a701..b414e1ed0 100644 --- a/backend/dataall/utils/alarm_service.py +++ b/backend/dataall/utils/alarm_service.py @@ -11,7 +11,6 @@ from ..aws.handlers.sts import SessionHelper from ..db import models -from dataall.modules.datasets.db.models import DatasetStorageLocation logger = logging.getLogger(__name__) @@ -73,60 +72,6 @@ def trigger_table_sharing_failure_alarm( """ return self.publish_message_to_alarms_topic(subject, message) - def trigger_folder_sharing_failure_alarm( - self, - folder: DatasetStorageLocation, - share: models.ShareObject, - target_environment: models.Environment, - ): - logger.info('Triggering share failure alarm...') - subject = ( - f'ALARM: DATAALL Folder {folder.S3Prefix} Sharing Failure Notification' - ) - message = f""" -You are receiving this email because your DATAALL {self.envname} environment in the {self.region} region has entered the ALARM state, because it failed to share the folder {folder.S3Prefix} with S3 Access Point. -Alarm Details: - - State Change: OK -> ALARM - - Reason for State Change: S3 Folder sharing failure - - Timestamp: {datetime.now()} - Share Source - - Dataset URI: {share.datasetUri} - - AWS Account: {folder.AWSAccountId} - - Region: {folder.region} - - S3 Bucket: {folder.S3BucketName} - - S3 Folder: {folder.S3Prefix} - Share Target - - AWS Account: {target_environment.AwsAccountId} - - Region: {target_environment.region} -""" - - def trigger_revoke_folder_sharing_failure_alarm( - self, - folder: DatasetStorageLocation, - share: models.ShareObject, - target_environment: models.Environment, - ): - logger.info('Triggering share failure alarm...') - subject = ( - f'ALARM: DATAALL Folder {folder.S3Prefix} Sharing Revoke Failure Notification' - ) - message = f""" -You are receiving this email because your DATAALL {self.envname} environment in the {self.region} region has entered the ALARM state, because it failed to share the folder {folder.S3Prefix} with S3 Access Point. -Alarm Details: - - State Change: OK -> ALARM - - Reason for State Change: S3 Folder sharing Revoke failure - - Timestamp: {datetime.now()} - Share Source - - Dataset URI: {share.datasetUri} - - AWS Account: {folder.AWSAccountId} - - Region: {folder.region} - - S3 Bucket: {folder.S3BucketName} - - S3 Folder: {folder.S3Prefix} - Share Target - - AWS Account: {target_environment.AwsAccountId} - - Region: {target_environment.region} -""" - def trigger_revoke_table_sharing_failure_alarm( self, table: models.DatasetTable, diff --git a/tests/tasks/test_s3_share_manager.py b/tests/tasks/test_s3_share_manager.py index 2841be87e..14f61fefd 100644 --- a/tests/tasks/test_s3_share_manager.py +++ b/tests/tasks/test_s3_share_manager.py @@ -1415,73 +1415,3 @@ def test_delete_dataset_bucket_key_policy_existing_policy_with_no_additional_tar # Then kms_put_key_policy_mock.assert_called() kms_put_key_policy_mock.assert_called_with(source_environment.AwsAccountId, 'eu-central-1', kms_get_key_mock.return_value, "default", json.dumps(remaining_policy)) - - -def test_handle_share_failure( - mocker, - source_environment_group: models.EnvironmentGroup, - target_environment_group: models.EnvironmentGroup, - dataset1: models.Dataset, - db, - share1: models.ShareObject, - share_item_folder1: models.ShareObjectItem, - location1: DatasetStorageLocation, - source_environment: models.Environment, - target_environment: models.Environment, -): - # Given - alarm_service_mock = mocker.patch.object(AlarmService, "trigger_folder_sharing_failure_alarm") - - with db.scoped_session() as session: - manager = S3ShareManager( - session, - dataset1, - share1, - location1, - source_environment, - target_environment, - source_environment_group, - target_environment_group, - ) - - error = Exception - # When - manager.handle_share_failure(error) - - # Then - alarm_service_mock.assert_called() - - -def test_handle_revoke_failure( - mocker, - source_environment_group: models.EnvironmentGroup, - target_environment_group: models.EnvironmentGroup, - dataset1: models.Dataset, - db, - share1: models.ShareObject, - share_item_folder1: models.ShareObjectItem, - location1: DatasetStorageLocation, - source_environment: models.Environment, - target_environment: models.Environment, -): - # Given - alarm_service_mock = mocker.patch.object(AlarmService, "trigger_revoke_folder_sharing_failure_alarm") - - with db.scoped_session() as session: - manager = S3ShareManager( - session, - dataset1, - share1, - location1, - source_environment, - target_environment, - source_environment_group, - target_environment_group, - ) - - error = Exception - # When - manager.handle_revoke_failure(error) - - # Then - alarm_service_mock.assert_called() From 0fd7c02f4498e1ab6a22610f64448411e84e7ecc Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 21 Apr 2023 10:19:15 +0200 Subject: [PATCH 070/346] Moved dataset table GraphQL api in modules --- backend/dataall/api/Objects/__init__.py | 1 - .../dataall/modules/datasets/api/__init__.py | 5 +++-- .../datasets/api/table}/__init__.py | 2 +- .../datasets/api/table}/input_types.py | 4 ++-- .../datasets/api/table}/mutations.py | 11 ++++++++--- .../datasets/api/table}/queries.py | 12 ++++++++---- .../datasets/api/table}/resolvers.py | 18 +++++++++--------- .../datasets/api/table}/schema.py | 12 +++++++++--- 8 files changed, 40 insertions(+), 25 deletions(-) rename backend/dataall/{api/Objects/DatasetTable => modules/datasets/api/table}/__init__.py (75%) rename backend/dataall/{api/Objects/DatasetTable => modules/datasets/api/table}/input_types.py (93%) rename backend/dataall/{api/Objects/DatasetTable => modules/datasets/api/table}/mutations.py (82%) rename backend/dataall/{api/Objects/DatasetTable => modules/datasets/api/table}/queries.py (81%) rename backend/dataall/{api/Objects/DatasetTable => modules/datasets/api/table}/resolvers.py (95%) rename backend/dataall/{api/Objects/DatasetTable => modules/datasets/api/table}/schema.py (94%) diff --git a/backend/dataall/api/Objects/__init__.py b/backend/dataall/api/Objects/__init__.py index 7c064fb1f..5cc73fbdf 100644 --- a/backend/dataall/api/Objects/__init__.py +++ b/backend/dataall/api/Objects/__init__.py @@ -17,7 +17,6 @@ DataPipeline, Environment, Activity, - DatasetTable, Dataset, Group, Principal, diff --git a/backend/dataall/modules/datasets/api/__init__.py b/backend/dataall/modules/datasets/api/__init__.py index 4c279340e..7fe2d06a1 100644 --- a/backend/dataall/modules/datasets/api/__init__.py +++ b/backend/dataall/modules/datasets/api/__init__.py @@ -2,7 +2,8 @@ from dataall.modules.datasets.api import ( table_column, profiling, - storage_location + storage_location, + table ) -__all__ = ["table_column", "profiling", "storage_location"] +__all__ = ["table_column", "profiling", "storage_location", "table"] diff --git a/backend/dataall/api/Objects/DatasetTable/__init__.py b/backend/dataall/modules/datasets/api/table/__init__.py similarity index 75% rename from backend/dataall/api/Objects/DatasetTable/__init__.py rename to backend/dataall/modules/datasets/api/table/__init__.py index dfa46b264..3aaba05cf 100644 --- a/backend/dataall/api/Objects/DatasetTable/__init__.py +++ b/backend/dataall/modules/datasets/api/table/__init__.py @@ -1,4 +1,4 @@ -from . import ( +from dataall.modules.datasets.api.table import ( input_types, mutations, queries, diff --git a/backend/dataall/api/Objects/DatasetTable/input_types.py b/backend/dataall/modules/datasets/api/table/input_types.py similarity index 93% rename from backend/dataall/api/Objects/DatasetTable/input_types.py rename to backend/dataall/modules/datasets/api/table/input_types.py index a5bd07998..2e6649515 100644 --- a/backend/dataall/api/Objects/DatasetTable/input_types.py +++ b/backend/dataall/modules/datasets/api/table/input_types.py @@ -1,5 +1,5 @@ -from ... import gql -from ....api.constants import SortDirection, GraphQLEnumMapper +from dataall.api import gql +from dataall.api.constants import SortDirection, GraphQLEnumMapper NewDatasetTableInput = gql.InputType( diff --git a/backend/dataall/api/Objects/DatasetTable/mutations.py b/backend/dataall/modules/datasets/api/table/mutations.py similarity index 82% rename from backend/dataall/api/Objects/DatasetTable/mutations.py rename to backend/dataall/modules/datasets/api/table/mutations.py index 532605cff..7a26a6c15 100644 --- a/backend/dataall/api/Objects/DatasetTable/mutations.py +++ b/backend/dataall/modules/datasets/api/table/mutations.py @@ -1,9 +1,14 @@ -from ... import gql -from .input_types import ( +from dataall.api import gql +from dataall.modules.datasets.api.table.input_types import ( ModifyDatasetTableInput, NewDatasetTableInput, ) -from .resolvers import * +from dataall.modules.datasets.api.table.resolvers import ( + create_table, + update_table, + delete_table, + publish_table_update +) createDatasetTable = gql.MutationField( name='createDatasetTable', diff --git a/backend/dataall/api/Objects/DatasetTable/queries.py b/backend/dataall/modules/datasets/api/table/queries.py similarity index 81% rename from backend/dataall/api/Objects/DatasetTable/queries.py rename to backend/dataall/modules/datasets/api/table/queries.py index 8f7809e62..a6d8d48cf 100644 --- a/backend/dataall/api/Objects/DatasetTable/queries.py +++ b/backend/dataall/modules/datasets/api/table/queries.py @@ -1,7 +1,11 @@ -from ... import gql -from .input_types import DatasetTableFilter -from .resolvers import * -from .schema import ( +from dataall.api import gql +from dataall.modules.datasets.api.table.input_types import DatasetTableFilter +from dataall.modules.datasets.api.table.resolvers import ( + get_table, + list_shared_tables_by_env_dataset, + preview +) +from dataall.modules.datasets.api.table.schema import ( DatasetTable, DatasetTableSearchResult, ) diff --git a/backend/dataall/api/Objects/DatasetTable/resolvers.py b/backend/dataall/modules/datasets/api/table/resolvers.py similarity index 95% rename from backend/dataall/api/Objects/DatasetTable/resolvers.py rename to backend/dataall/modules/datasets/api/table/resolvers.py index 7df3d8cba..f4d7f4ea1 100644 --- a/backend/dataall/api/Objects/DatasetTable/resolvers.py +++ b/backend/dataall/modules/datasets/api/table/resolvers.py @@ -4,15 +4,15 @@ from botocore.exceptions import ClientError from pyathena import connect -from .... import db -from ..Dataset.resolvers import get_dataset -from ....api.context import Context -from ....aws.handlers.service_handlers import Worker -from ....aws.handlers.sts import SessionHelper -from ....db import permissions, models -from ....db.api import ResourcePolicy, Glossary -from ....searchproxy import indexers -from ....utils import json_utils +from dataall import db +from dataall.api.Objects.Dataset.resolvers import get_dataset +from dataall.api.context import Context +from dataall.aws.handlers.service_handlers import Worker +from dataall.aws.handlers.sts import SessionHelper +from dataall.db import permissions, models +from dataall.db.api import ResourcePolicy, Glossary +from dataall.searchproxy import indexers +from dataall.utils import json_utils from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer from dataall.modules.datasets.services.dataset_table import DatasetTableService diff --git a/backend/dataall/api/Objects/DatasetTable/schema.py b/backend/dataall/modules/datasets/api/table/schema.py similarity index 94% rename from backend/dataall/api/Objects/DatasetTable/schema.py rename to backend/dataall/modules/datasets/api/table/schema.py index 74d413818..666bf7e35 100644 --- a/backend/dataall/api/Objects/DatasetTable/schema.py +++ b/backend/dataall/modules/datasets/api/table/schema.py @@ -1,7 +1,13 @@ from dataall.modules.datasets.api.table_column.resolvers import list_table_columns -from ... import gql -from .resolvers import * -from ...constants import GraphQLEnumMapper +from dataall.api import gql +from dataall.modules.datasets.api.table.resolvers import ( + resolve_dataset, + get_glue_table_properties, + resolve_redshift_copy_location, + resolve_glossary_terms, + resolve_redshift_copy_schema +) +from dataall.api.constants import GraphQLEnumMapper TablePermission = gql.ObjectType( name='TablePermission', From 7030c8226c6a741078ee6f0cb9c8e217be406fc4 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 21 Apr 2023 10:53:55 +0200 Subject: [PATCH 071/346] Moved DatasetTable model to modules --- backend/dataall/api/Objects/Feed/registry.py | 1 - .../api/Objects/ShareObject/resolvers.py | 4 +- backend/dataall/aws/handlers/glue.py | 3 +- backend/dataall/aws/handlers/redshift.py | 3 +- backend/dataall/db/api/dataset.py | 33 ++++++------ backend/dataall/db/api/redshift_cluster.py | 25 +++++----- backend/dataall/db/api/share_object.py | 26 +++++----- backend/dataall/db/models/DatasetTable.py | 32 ------------ backend/dataall/db/models/__init__.py | 1 - backend/dataall/modules/datasets/__init__.py | 5 +- .../modules/datasets/api/table/resolvers.py | 17 ++++--- .../datasets/api/table_column/resolvers.py | 8 +-- .../modules/datasets/cdk/dataset_stack.py | 16 +++--- backend/dataall/modules/datasets/db/models.py | 31 +++++++++++- .../datasets/handlers/glue_column_handler.py | 12 ++--- .../datasets/indexers/table_indexer.py | 31 ++++++------ .../services/dataset_profiling_service.py | 28 +++++------ .../services/dataset_share_service.py | 18 +++---- .../datasets/services/dataset_table.py | 50 +++++++++---------- .../datasets/tasks/subscription_service.py | 10 ++-- .../modules/datasets/tasks/tables_syncer.py | 5 +- backend/dataall/searchproxy/indexers.py | 7 +-- .../dataall/tasks/bucket_policy_updater.py | 20 ++++---- .../share_managers/lf_share_manager.py | 23 +++++---- .../lf_process_cross_account_share.py | 5 +- .../lf_process_same_account_share.py | 7 +-- backend/dataall/utils/alarm_service.py | 5 +- ...215e_backfill_dataset_table_permissions.py | 5 +- tests/api/conftest.py | 8 +-- tests/api/test_dataset.py | 4 +- tests/api/test_dataset_profiling.py | 12 ++--- tests/api/test_dataset_table.py | 34 ++++++------- tests/api/test_glossary.py | 6 +-- tests/api/test_share.py | 11 ++-- tests/cdkproxy/conftest.py | 5 +- tests/searchproxy/test_indexers.py | 4 +- tests/tasks/conftest.py | 8 +-- tests/tasks/test_catalog_indexer.py | 3 +- tests/tasks/test_lf_share_manager.py | 35 ++++++------- tests/tasks/test_policies.py | 3 +- tests/tasks/test_subscriptions.py | 3 +- tests/tasks/test_tables_sync.py | 9 ++-- 42 files changed, 292 insertions(+), 284 deletions(-) delete mode 100644 backend/dataall/db/models/DatasetTable.py diff --git a/backend/dataall/api/Objects/Feed/registry.py b/backend/dataall/api/Objects/Feed/registry.py index 6a01a488a..4fedd252a 100644 --- a/backend/dataall/api/Objects/Feed/registry.py +++ b/backend/dataall/api/Objects/Feed/registry.py @@ -38,5 +38,4 @@ def types(cls): FeedRegistry.register(FeedDefinition("Worksheet", models.Worksheet)) FeedRegistry.register(FeedDefinition("DataPipeline", models.DataPipeline)) -FeedRegistry.register(FeedDefinition("DatasetTable", models.DatasetTable)) FeedRegistry.register(FeedDefinition("Dashboard", models.Dashboard)) diff --git a/backend/dataall/api/Objects/ShareObject/resolvers.py b/backend/dataall/api/Objects/ShareObject/resolvers.py index 16e4e1353..49f20fc17 100644 --- a/backend/dataall/api/Objects/ShareObject/resolvers.py +++ b/backend/dataall/api/Objects/ShareObject/resolvers.py @@ -7,7 +7,7 @@ from ....api.context import Context from ....aws.handlers.service_handlers import Worker from ....db import models -from dataall.modules.datasets.db.models import DatasetStorageLocation +from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable log = logging.getLogger(__name__) @@ -265,7 +265,7 @@ def resolve_dataset(context: Context, source: models.ShareObject, **kwargs): def union_resolver(object, *_): - if isinstance(object, models.DatasetTable): + if isinstance(object, DatasetTable): return 'DatasetTable' elif isinstance(object, DatasetStorageLocation): return 'DatasetStorageLocation' diff --git a/backend/dataall/aws/handlers/glue.py b/backend/dataall/aws/handlers/glue.py index e76fd4e63..c2a7ecf21 100644 --- a/backend/dataall/aws/handlers/glue.py +++ b/backend/dataall/aws/handlers/glue.py @@ -6,6 +6,7 @@ from .sts import SessionHelper from ... import db from ...db import models +from dataall.modules.datasets.db.models import DatasetTable log = logging.getLogger('aws:glue') @@ -524,7 +525,7 @@ def get_job_runs(engine, task: models.Task): @staticmethod def grant_principals_all_table_permissions( - table: models.DatasetTable, principals: [str], client=None + table: DatasetTable, principals: [str], client=None ): """ Update the table permissions on Lake Formation diff --git a/backend/dataall/aws/handlers/redshift.py b/backend/dataall/aws/handlers/redshift.py index c186d5df7..1fe6c738c 100644 --- a/backend/dataall/aws/handlers/redshift.py +++ b/backend/dataall/aws/handlers/redshift.py @@ -11,6 +11,7 @@ from ...db import models # TODO should be migrated in the redshift module from dataall.modules.datasets.services.dataset_table import DatasetTableService +from dataall.modules.datasets.db.models import DatasetTable log = logging.getLogger(__name__) @@ -448,7 +449,7 @@ def copy_data(engine, task: models.Task): session, task.payload['datasetUri'] ) - table: models.DatasetTable = DatasetTableService.get_dataset_table_by_uri( + table: DatasetTable = DatasetTableService.get_dataset_table_by_uri( session, task.payload['tableUri'] ) diff --git a/backend/dataall/db/api/dataset.py b/backend/dataall/db/api/dataset.py index 3c2c8de10..d328dddb5 100644 --- a/backend/dataall/db/api/dataset.py +++ b/backend/dataall/db/api/dataset.py @@ -16,9 +16,10 @@ from . import Organization from .. import models, api, exceptions, permissions, paginate from ..models.Enums import Language, ConfidentialityClassification -from ...modules.datasets.db.dataset_repository import DatasetRepository -from ...modules.datasets.services.dataset_location import DatasetLocationService -from ...utils.naming_convention import ( +from dataall.modules.datasets.db.dataset_repository import DatasetRepository +from dataall.modules.datasets.db.models import DatasetTable +from dataall.modules.datasets.services.dataset_location import DatasetLocationService +from dataall.utils.naming_convention import ( NamingConventionService, NamingConventionPattern, ) @@ -266,21 +267,21 @@ def paginated_dataset_tables( session, username, groups, uri, data=None, check_perm=None ) -> dict: query = ( - session.query(models.DatasetTable) + session.query(DatasetTable) .filter( and_( - models.DatasetTable.datasetUri == uri, - models.DatasetTable.LastGlueTableStatus != 'Deleted', + DatasetTable.datasetUri == uri, + DatasetTable.LastGlueTableStatus != 'Deleted', ) ) - .order_by(models.DatasetTable.created.desc()) + .order_by(DatasetTable.created.desc()) ) if data and data.get('term'): query = query.filter( or_( *[ - models.DatasetTable.name.ilike('%' + data.get('term') + '%'), - models.DatasetTable.GlueTableName.ilike( + DatasetTable.name.ilike('%' + data.get('term') + '%'), + DatasetTable.GlueTableName.ilike( '%' + data.get('term') + '%' ), ] @@ -379,7 +380,7 @@ def transfer_stewardship_to_new_stewards(session, dataset, new_stewards): group=new_stewards, permissions=permissions.DATASET_TABLE_READ, resource_uri=tableUri, - resource_type=models.DatasetTable.__name__, + resource_type=DatasetTable.__name__, ) dataset_shares = ( @@ -455,8 +456,8 @@ def update_glue_database_status(session, dataset_uri): def get_dataset_tables(session, dataset_uri): """return the dataset tables""" return ( - session.query(models.DatasetTable) - .filter(models.DatasetTable.datasetUri == dataset_uri) + session.query(DatasetTable) + .filter(DatasetTable.datasetUri == dataset_uri) .all() ) @@ -585,10 +586,10 @@ def _delete_dataset_term_links(session, uri): @staticmethod def _delete_dataset_tables(session, dataset_uri) -> bool: tables = ( - session.query(models.DatasetTable) + session.query(DatasetTable) .filter( and_( - models.DatasetTable.datasetUri == dataset_uri, + DatasetTable.datasetUri == dataset_uri, ) ) .all() @@ -618,7 +619,7 @@ def get_dataset_by_bucket_name(session, bucket) -> [models.Dataset]: @staticmethod def count_dataset_tables(session, dataset_uri): return ( - session.query(models.DatasetTable) - .filter(models.DatasetTable.datasetUri == dataset_uri) + session.query(DatasetTable) + .filter(DatasetTable.datasetUri == dataset_uri) .count() ) diff --git a/backend/dataall/db/api/redshift_cluster.py b/backend/dataall/db/api/redshift_cluster.py index 31b795225..de5799180 100644 --- a/backend/dataall/db/api/redshift_cluster.py +++ b/backend/dataall/db/api/redshift_cluster.py @@ -4,11 +4,12 @@ from .. import models, api, exceptions, paginate, permissions from . import has_resource_perm, ResourcePolicy, Environment, Dataset -from ...utils.naming_convention import ( +from dataall.modules.datasets.db.models import DatasetTable +from dataall.utils.naming_convention import ( NamingConventionService, NamingConventionPattern, ) -from ...utils.slugify import slugify +from dataall.utils.slugify import slugify log = logging.getLogger(__name__) @@ -334,13 +335,13 @@ def list_available_cluster_tables( ) created = ( session.query( - models.DatasetTable.datasetUri.label('datasetUri'), - models.DatasetTable.tableUri.label('tableUri'), + DatasetTable.datasetUri.label('datasetUri'), + DatasetTable.tableUri.label('tableUri'), models.RedshiftCluster.clusterUri.label('clusterUri'), ) .join( models.Dataset, - models.DatasetTable.datasetUri == models.Dataset.datasetUri, + DatasetTable.datasetUri == models.Dataset.datasetUri, ) .filter( and_( @@ -354,8 +355,8 @@ def list_available_cluster_tables( ) ) .group_by( - models.DatasetTable.datasetUri, - models.DatasetTable.tableUri, + DatasetTable.datasetUri, + DatasetTable.tableUri, models.RedshiftCluster.clusterUri, ) ) @@ -363,10 +364,10 @@ def list_available_cluster_tables( 'all_group_tables_sub_query' ) query = ( - session.query(models.DatasetTable) + session.query(DatasetTable) .join( all_group_tables_sub_query, - all_group_tables_sub_query.c.tableUri == models.DatasetTable.tableUri, + all_group_tables_sub_query.c.tableUri == DatasetTable.tableUri, ) .filter( models.RedshiftCluster.clusterUri == cluster.clusterUri, @@ -541,18 +542,18 @@ def list_copy_enabled_tables( session, username, groups, uri, data=None, check_perm=True ) -> [models.RedshiftClusterDatasetTable]: q = ( - session.query(models.DatasetTable) + session.query(DatasetTable) .join( models.RedshiftClusterDatasetTable, models.RedshiftClusterDatasetTable.tableUri - == models.DatasetTable.tableUri, + == DatasetTable.tableUri, ) .filter(models.RedshiftClusterDatasetTable.clusterUri == uri) ) if data.get('term'): term = data.get('term') q = q.filter( - models.DatasetTable.label.ilike('%' + term + '%'), + DatasetTable.label.ilike('%' + term + '%'), ) return paginate( q, page=data.get('page', 1), page_size=data.get('pageSize', 20) diff --git a/backend/dataall/db/api/share_object.py b/backend/dataall/db/api/share_object.py index bd0215190..4fddda5e9 100644 --- a/backend/dataall/db/api/share_object.py +++ b/backend/dataall/db/api/share_object.py @@ -10,7 +10,7 @@ from .. import api, utils from .. import models, exceptions, permissions, paginate from ..models.Enums import ShareObjectStatus, ShareItemStatus, ShareObjectActions, ShareItemActions, ShareableType, PrincipalType -from dataall.modules.datasets.db.models import DatasetStorageLocation +from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable logger = logging.getLogger(__name__) @@ -422,7 +422,7 @@ def create_share_object( if itemType == ShareableType.StorageLocation.value: item = session.query(DatasetStorageLocation).get(itemUri) if itemType == ShareableType.Table.value: - item = session.query(models.DatasetTable).get(itemUri) + item = session.query(DatasetTable).get(itemUri) share_item = ( session.query(models.ShareObjectItem) @@ -605,7 +605,7 @@ def approve_share_object( group=share.principalId, permissions=permissions.DATASET_TABLE_READ, resource_uri=table.itemUri, - resource_type=models.DatasetTable.__name__, + resource_type=DatasetTable.__name__, ) api.Notification.notify_share_object_approval(session, username, dataset, share) @@ -717,7 +717,7 @@ def get_share_item( ShareObject.get_share_item_by_uri(session, data['shareItemUri']), ) if share_item.itemType == ShareableType.Table.value: - return session.query(models.DatasetTable).get(share_item.itemUri) + return session.query(DatasetTable).get(share_item.itemUri) if share_item.itemType == ShareableType.StorageLocation: return session.Query(DatasetStorageLocation).get(share_item.itemUri) @@ -762,7 +762,7 @@ def add_share_object_item( Share_SM.update_state(session, share, new_share_state) if itemType == ShareableType.Table.value: - item: models.DatasetTable = session.query(models.DatasetTable).get(itemUri) + item: DatasetTable = session.query(DatasetTable).get(itemUri) if item and item.region != target_environment.region: raise exceptions.UnauthorizedOperation( action=permissions.ADD_ITEM, @@ -944,10 +944,10 @@ def list_shareable_items( # marking the table as part of the shareObject tables = ( session.query( - models.DatasetTable.tableUri.label('itemUri'), + DatasetTable.tableUri.label('itemUri'), func.coalesce('DatasetTable').label('itemType'), - models.DatasetTable.GlueTableName.label('itemName'), - models.DatasetTable.description.label('description'), + DatasetTable.GlueTableName.label('itemName'), + DatasetTable.description.label('description'), models.ShareObjectItem.shareItemUri.label('shareItemUri'), models.ShareObjectItem.status.label('status'), case( @@ -959,10 +959,10 @@ def list_shareable_items( models.ShareObjectItem, and_( models.ShareObjectItem.shareUri == share.shareUri, - models.DatasetTable.tableUri == models.ShareObjectItem.itemUri, + DatasetTable.tableUri == models.ShareObjectItem.itemUri, ), ) - .filter(models.DatasetTable.datasetUri == datasetUri) + .filter(DatasetTable.datasetUri == datasetUri) ) if data: if data.get("isRevokable"): @@ -1145,7 +1145,7 @@ def update_share_item_status_batch( def find_share_item_by_table( session, share: models.ShareObject, - table: models.DatasetTable, + table: DatasetTable, ) -> models.ShareObjectItem: share_item: models.ShareObjectItem = ( session.query(models.ShareObjectItem) @@ -1247,10 +1247,10 @@ def get_share_data_items(session, share_uri, status): raise exceptions.ObjectNotFound('Share', share_uri) tables = ( - session.query(models.DatasetTable) + session.query(DatasetTable) .join( models.ShareObjectItem, - models.ShareObjectItem.itemUri == models.DatasetTable.tableUri, + models.ShareObjectItem.itemUri == DatasetTable.tableUri, ) .join( models.ShareObject, diff --git a/backend/dataall/db/models/DatasetTable.py b/backend/dataall/db/models/DatasetTable.py deleted file mode 100644 index e97174167..000000000 --- a/backend/dataall/db/models/DatasetTable.py +++ /dev/null @@ -1,32 +0,0 @@ -from sqlalchemy import Column, String, Text -from sqlalchemy.dialects import postgresql -from sqlalchemy.orm import query_expression - -from .. import Base -from .. import Resource, utils - - -class DatasetTable(Resource, Base): - __tablename__ = 'dataset_table' - datasetUri = Column(String, nullable=False) - tableUri = Column(String, primary_key=True, default=utils.uuid('table')) - AWSAccountId = Column(String, nullable=False) - S3BucketName = Column(String, nullable=False) - S3Prefix = Column(String, nullable=False) - GlueDatabaseName = Column(String, nullable=False) - GlueTableName = Column(String, nullable=False) - GlueTableConfig = Column(Text) - GlueTableProperties = Column(postgresql.JSON, default={}) - LastGlueTableStatus = Column(String, default='InSync') - region = Column(String, default='eu-west-1') - # LastGeneratedPreviewDate= Column(DateTime, default=None) - confidentiality = Column(String, nullable=True) - userRoleForTable = query_expression() - projectPermission = query_expression() - redshiftClusterPermission = query_expression() - stage = Column(String, default='RAW') - topics = Column(postgresql.ARRAY(String), nullable=True) - confidentiality = Column(String, nullable=False, default='C1') - - def uri(self): - return self.tableUri diff --git a/backend/dataall/db/models/__init__.py b/backend/dataall/db/models/__init__.py index c288527cf..123547f8c 100644 --- a/backend/dataall/db/models/__init__.py +++ b/backend/dataall/db/models/__init__.py @@ -6,7 +6,6 @@ from .DashboardShare import DashboardShareStatus from .Dataset import Dataset from .DatasetQualityRule import DatasetQualityRule -from .DatasetTable import DatasetTable from .Environment import Environment from .EnvironmentGroup import EnvironmentGroup from .FeedMessage import FeedMessage diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index e02a9d9bf..4f8964016 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -3,7 +3,7 @@ from typing import List from dataall.db import models -from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetStorageLocation +from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetStorageLocation, DatasetTable from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer @@ -27,6 +27,7 @@ def __init__(self): FeedRegistry.register(FeedDefinition("DatasetTableColumn", DatasetTableColumn)) FeedRegistry.register(FeedDefinition("DatasetStorageLocation", DatasetStorageLocation)) + FeedRegistry.register(FeedDefinition("DatasetTable", DatasetTable)) GlossaryRegistry.register(GlossaryDefinition("Column", "DatasetTableColumn", DatasetTableColumn)) GlossaryRegistry.register(GlossaryDefinition( @@ -46,7 +47,7 @@ def __init__(self): GlossaryRegistry.register(GlossaryDefinition( target_type="DatasetTable", object_type="DatasetTable", - model=models.DatasetTable, + model=DatasetTable, reindexer=DatasetTableIndexer )) diff --git a/backend/dataall/modules/datasets/api/table/resolvers.py b/backend/dataall/modules/datasets/api/table/resolvers.py index f4d7f4ea1..ea16cae79 100644 --- a/backend/dataall/modules/datasets/api/table/resolvers.py +++ b/backend/dataall/modules/datasets/api/table/resolvers.py @@ -11,6 +11,7 @@ from dataall.aws.handlers.sts import SessionHelper from dataall.db import permissions, models from dataall.db.api import ResourcePolicy, Glossary +from dataall.modules.datasets.db.models import DatasetTable from dataall.searchproxy import indexers from dataall.utils import json_utils from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer @@ -104,7 +105,7 @@ def delete_table(context, source, tableUri: str = None): def preview(context, source, tableUri: str = None): with context.engine.scoped_session() as session: - table: models.DatasetTable = DatasetTableService.get_dataset_table_by_uri( + table: DatasetTable = DatasetTableService.get_dataset_table_by_uri( session, tableUri ) dataset = db.api.Dataset.get_dataset_by_uri(session, table.datasetUri) @@ -155,17 +156,17 @@ def preview(context, source, tableUri: str = None): return {'rows': rows, 'fields': fields} -def get_glue_table_properties(context: Context, source: models.DatasetTable, **kwargs): +def get_glue_table_properties(context: Context, source: DatasetTable, **kwargs): if not source: return None with context.engine.scoped_session() as session: - table: models.DatasetTable = DatasetTableService.get_dataset_table_by_uri( + table: DatasetTable = DatasetTableService.get_dataset_table_by_uri( session, source.tableUri ) return json_utils.to_string(table.GlueTableProperties).replace('\\', ' ') -def resolve_dataset(context, source: models.DatasetTable, **kwargs): +def resolve_dataset(context, source: DatasetTable, **kwargs): if not source: return None with context.engine.scoped_session() as session: @@ -177,7 +178,7 @@ def resolve_dataset(context, source: models.DatasetTable, **kwargs): return dataset_with_role -def resolve_glossary_terms(context: Context, source: models.DatasetTable, **kwargs): +def resolve_glossary_terms(context: Context, source: DatasetTable, **kwargs): if not source: return None with context.engine.scoped_session() as session: @@ -188,7 +189,7 @@ def resolve_glossary_terms(context: Context, source: models.DatasetTable, **kwar def publish_table_update(context: Context, source, tableUri: str = None): with context.engine.scoped_session() as session: - table: models.DatasetTable = DatasetTableService.get_dataset_table_by_uri( + table: DatasetTable = DatasetTableService.get_dataset_table_by_uri( session, tableUri ) ResourcePolicy.check_user_resource_permission( @@ -217,7 +218,7 @@ def publish_table_update(context: Context, source, tableUri: str = None): return True -def resolve_redshift_copy_schema(context, source: models.DatasetTable, clusterUri: str): +def resolve_redshift_copy_schema(context, source: DatasetTable, clusterUri: str): if not source: return None with context.engine.scoped_session() as session: @@ -227,7 +228,7 @@ def resolve_redshift_copy_schema(context, source: models.DatasetTable, clusterUr def resolve_redshift_copy_location( - context, source: models.DatasetTable, clusterUri: str + context, source: DatasetTable, clusterUri: str ): with context.engine.scoped_session() as session: return db.api.RedshiftCluster.get_cluster_dataset_table( diff --git a/backend/dataall/modules/datasets/api/table_column/resolvers.py b/backend/dataall/modules/datasets/api/table_column/resolvers.py index 8e78a042e..b27a99dd3 100644 --- a/backend/dataall/modules/datasets/api/table_column/resolvers.py +++ b/backend/dataall/modules/datasets/api/table_column/resolvers.py @@ -6,12 +6,12 @@ from dataall.db import paginate, permissions, models from dataall.db.api import ResourcePolicy from dataall.modules.datasets.services.dataset_table import DatasetTableService -from dataall.modules.datasets.db.models import DatasetTableColumn +from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetTable def list_table_columns( context: Context, - source: models.DatasetTable, + source: DatasetTable, tableUri: str = None, filter: dict = None, ): @@ -46,7 +46,7 @@ def list_table_columns( def sync_table_columns(context: Context, source, tableUri: str = None): with context.engine.scoped_session() as session: - table: models.DatasetTable = DatasetTableService.get_dataset_table_by_uri( + table: DatasetTable = DatasetTableService.get_dataset_table_by_uri( session, tableUri ) ResourcePolicy.check_user_resource_permission( @@ -81,7 +81,7 @@ def update_table_column( ).get(columnUri) if not column: raise db.exceptions.ObjectNotFound('Column', columnUri) - table: models.DatasetTable = DatasetTableService.get_dataset_table_by_uri( + table: DatasetTable = DatasetTableService.get_dataset_table_by_uri( session, column.tableUri ) ResourcePolicy.check_user_resource_permission( diff --git a/backend/dataall/modules/datasets/cdk/dataset_stack.py b/backend/dataall/modules/datasets/cdk/dataset_stack.py index e99b43b0c..517b32893 100644 --- a/backend/dataall/modules/datasets/cdk/dataset_stack.py +++ b/backend/dataall/modules/datasets/cdk/dataset_stack.py @@ -28,7 +28,7 @@ from dataall.db.api import Environment from dataall.utils.cdk_nag_utils import CDKNagUtil from dataall.utils.runtime_stacks_tagging import TagsUtil -from dataall.modules.datasets.db.models import DatasetStorageLocation +from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable logger = logging.getLogger(__name__) @@ -77,17 +77,17 @@ def get_shared_tables(self) -> typing.List[models.ShareObjectItem]: with engine.scoped_session() as session: tables = ( session.query( - models.DatasetTable.GlueDatabaseName.label('GlueDatabaseName'), - models.DatasetTable.GlueTableName.label('GlueTableName'), - models.DatasetTable.AWSAccountId.label('SourceAwsAccountId'), - models.DatasetTable.region.label('SourceRegion'), + DatasetTable.GlueDatabaseName.label('GlueDatabaseName'), + DatasetTable.GlueTableName.label('GlueTableName'), + DatasetTable.AWSAccountId.label('SourceAwsAccountId'), + DatasetTable.region.label('SourceRegion'), models.Environment.AwsAccountId.label('TargetAwsAccountId'), models.Environment.region.label('TargetRegion'), ) .join( models.ShareObjectItem, and_( - models.ShareObjectItem.itemUri == models.DatasetTable.tableUri + models.ShareObjectItem.itemUri == DatasetTable.tableUri ), ) .join( @@ -101,8 +101,8 @@ def get_shared_tables(self) -> typing.List[models.ShareObjectItem]: ) .filter( and_( - models.DatasetTable.datasetUri == self.target_uri, - models.DatasetTable.deleted.is_(None), + DatasetTable.datasetUri == self.target_uri, + DatasetTable.deleted.is_(None), models.ShareObjectItem.status.in_(self.shared_states) ) ) diff --git a/backend/dataall/modules/datasets/db/models.py b/backend/dataall/modules/datasets/db/models.py index 2dfee26ec..a25978bef 100644 --- a/backend/dataall/modules/datasets/db/models.py +++ b/backend/dataall/modules/datasets/db/models.py @@ -1,5 +1,5 @@ -from sqlalchemy import Boolean, Column, String -from sqlalchemy.dialects.postgresql import JSON +from sqlalchemy import Boolean, Column, String, Text +from sqlalchemy.dialects.postgresql import JSON, ARRAY from sqlalchemy.orm import query_expression from dataall.db import Base, Resource, utils @@ -56,3 +56,30 @@ class DatasetStorageLocation(Resource, Base): def uri(self): return self.locationUri + +class DatasetTable(Resource, Base): + __tablename__ = 'dataset_table' + datasetUri = Column(String, nullable=False) + tableUri = Column(String, primary_key=True, default=utils.uuid('table')) + AWSAccountId = Column(String, nullable=False) + S3BucketName = Column(String, nullable=False) + S3Prefix = Column(String, nullable=False) + GlueDatabaseName = Column(String, nullable=False) + GlueTableName = Column(String, nullable=False) + GlueTableConfig = Column(Text) + GlueTableProperties = Column(JSON, default={}) + LastGlueTableStatus = Column(String, default='InSync') + region = Column(String, default='eu-west-1') + # LastGeneratedPreviewDate= Column(DateTime, default=None) + confidentiality = Column(String, nullable=True) + userRoleForTable = query_expression() + projectPermission = query_expression() + redshiftClusterPermission = query_expression() + stage = Column(String, default='RAW') + topics = Column(ARRAY(String), nullable=True) + confidentiality = Column(String, nullable=False, default='C1') + + def uri(self): + return self.tableUri + + diff --git a/backend/dataall/modules/datasets/handlers/glue_column_handler.py b/backend/dataall/modules/datasets/handlers/glue_column_handler.py index df43f9dbd..f41784959 100644 --- a/backend/dataall/modules/datasets/handlers/glue_column_handler.py +++ b/backend/dataall/modules/datasets/handlers/glue_column_handler.py @@ -5,7 +5,7 @@ from dataall.aws.handlers.sts import SessionHelper from dataall.db import models from dataall.aws.handlers.service_handlers import Worker -from dataall.modules.datasets.db.models import DatasetTableColumn +from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetTable from dataall.modules.datasets.services.dataset_table import DatasetTableService log = logging.getLogger(__name__) @@ -18,7 +18,7 @@ class DatasetColumnGlueHandler: @Worker.handler('glue.table.columns') def get_table_columns(engine, task: models.Task): with engine.scoped_session() as session: - dataset_table: models.DatasetTable = session.query(models.DatasetTable).get( + dataset_table: DatasetTable = session.query(DatasetTable).get( task.targetUri ) aws = SessionHelper.remote_session(dataset_table.AWSAccountId) @@ -46,12 +46,8 @@ def get_table_columns(engine, task: models.Task): @Worker.handler('glue.table.update_column') def update_table_columns(engine, task: models.Task): with engine.scoped_session() as session: - column: DatasetTableColumn = session.query( - DatasetTableColumn - ).get(task.targetUri) - table: models.DatasetTable = session.query(models.DatasetTable).get( - column.tableUri - ) + column: DatasetTableColumn = session.query(DatasetTableColumn).get(task.targetUri) + table: DatasetTable = session.query(DatasetTable).get(column.tableUri) try: aws_session = SessionHelper.remote_session(table.AWSAccountId) diff --git a/backend/dataall/modules/datasets/indexers/table_indexer.py b/backend/dataall/modules/datasets/indexers/table_indexer.py index 1eab70a87..4c96eea6d 100644 --- a/backend/dataall/modules/datasets/indexers/table_indexer.py +++ b/backend/dataall/modules/datasets/indexers/table_indexer.py @@ -2,6 +2,7 @@ from operator import and_ from dataall.db import models +from dataall.modules.datasets.db.models import DatasetTable from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer from dataall.searchproxy.upsert import BaseIndexer @@ -12,14 +13,14 @@ class DatasetTableIndexer(BaseIndexer): def upsert(cls, session, table_uri: str): table = ( session.query( - models.DatasetTable.datasetUri.label('datasetUri'), - models.DatasetTable.tableUri.label('uri'), - models.DatasetTable.name.label('name'), - models.DatasetTable.owner.label('owner'), - models.DatasetTable.label.label('label'), - models.DatasetTable.description.label('description'), + DatasetTable.datasetUri.label('datasetUri'), + DatasetTable.tableUri.label('uri'), + DatasetTable.name.label('name'), + DatasetTable.owner.label('owner'), + DatasetTable.label.label('label'), + DatasetTable.description.label('description'), models.Dataset.confidentiality.label('classification'), - models.DatasetTable.tags.label('tags'), + DatasetTable.tags.label('tags'), models.Dataset.topics.label('topics'), models.Dataset.region.label('region'), models.Organization.organizationUri.label('orgUri'), @@ -29,13 +30,13 @@ def upsert(cls, session, table_uri: str): models.Dataset.SamlAdminGroupName.label('admins'), models.Dataset.GlueDatabaseName.label('database'), models.Dataset.S3BucketName.label('source'), - models.DatasetTable.created, - models.DatasetTable.updated, - models.DatasetTable.deleted, + DatasetTable.created, + DatasetTable.updated, + DatasetTable.deleted, ) .join( models.Dataset, - models.Dataset.datasetUri == models.DatasetTable.datasetUri, + models.Dataset.datasetUri == DatasetTable.datasetUri, ) .join( models.Organization, @@ -45,7 +46,7 @@ def upsert(cls, session, table_uri: str): models.Environment, models.Dataset.environmentUri == models.Environment.environmentUri, ) - .filter(models.DatasetTable.tableUri == table_uri) + .filter(DatasetTable.tableUri == table_uri) .first() ) @@ -84,11 +85,11 @@ def upsert(cls, session, table_uri: str): @classmethod def upsert_all(cls, session, dataset_uri: str): tables = ( - session.query(models.DatasetTable) + session.query(DatasetTable) .filter( and_( - models.DatasetTable.datasetUri == dataset_uri, - models.DatasetTable.LastGlueTableStatus != 'Deleted', + DatasetTable.datasetUri == dataset_uri, + DatasetTable.LastGlueTableStatus != 'Deleted', ) ) .all() diff --git a/backend/dataall/modules/datasets/services/dataset_profiling_service.py b/backend/dataall/modules/datasets/services/dataset_profiling_service.py index 5b6ca8d41..01bc3dc57 100644 --- a/backend/dataall/modules/datasets/services/dataset_profiling_service.py +++ b/backend/dataall/modules/datasets/services/dataset_profiling_service.py @@ -2,7 +2,7 @@ from dataall.db import paginate, models from dataall.db.exceptions import ObjectNotFound -from dataall.modules.datasets.db.models import DatasetProfilingRun +from dataall.modules.datasets.db.models import DatasetProfilingRun, DatasetTable class DatasetProfilingService: @@ -18,7 +18,7 @@ def start_profiling( raise ObjectNotFound('Dataset', datasetUri) if tableUri and not GlueTableName: - table: models.DatasetTable = session.query(models.DatasetTable).get( + table: DatasetTable = session.query(DatasetTable).get( tableUri ) if not table: @@ -105,13 +105,13 @@ def list_table_profiling_runs(session, tableUri, filter): q = ( session.query(DatasetProfilingRun) .join( - models.DatasetTable, - models.DatasetTable.datasetUri == DatasetProfilingRun.datasetUri, + DatasetTable, + DatasetTable.datasetUri == DatasetProfilingRun.datasetUri, ) .filter( and_( - models.DatasetTable.tableUri == tableUri, - models.DatasetTable.GlueTableName + DatasetTable.tableUri == tableUri, + DatasetTable.GlueTableName == DatasetProfilingRun.GlueTableName, ) ) @@ -126,12 +126,12 @@ def get_table_last_profiling_run(session, tableUri): return ( session.query(DatasetProfilingRun) .join( - models.DatasetTable, - models.DatasetTable.datasetUri == DatasetProfilingRun.datasetUri, + DatasetTable, + DatasetTable.datasetUri == DatasetProfilingRun.datasetUri, ) - .filter(models.DatasetTable.tableUri == tableUri) + .filter(DatasetTable.tableUri == tableUri) .filter( - models.DatasetTable.GlueTableName + DatasetTable.GlueTableName == DatasetProfilingRun.GlueTableName ) .order_by(DatasetProfilingRun.created.desc()) @@ -143,12 +143,12 @@ def get_table_last_profiling_run_with_results(session, tableUri): return ( session.query(DatasetProfilingRun) .join( - models.DatasetTable, - models.DatasetTable.datasetUri == DatasetProfilingRun.datasetUri, + DatasetTable, + DatasetTable.datasetUri == DatasetProfilingRun.datasetUri, ) - .filter(models.DatasetTable.tableUri == tableUri) + .filter(DatasetTable.tableUri == tableUri) .filter( - models.DatasetTable.GlueTableName + DatasetTable.GlueTableName == DatasetProfilingRun.GlueTableName ) .filter(DatasetProfilingRun.results.isnot(None)) diff --git a/backend/dataall/modules/datasets/services/dataset_share_service.py b/backend/dataall/modules/datasets/services/dataset_share_service.py index 9ca84a1cf..3503e86fe 100644 --- a/backend/dataall/modules/datasets/services/dataset_share_service.py +++ b/backend/dataall/modules/datasets/services/dataset_share_service.py @@ -8,7 +8,7 @@ from dataall.db import models, permissions from dataall.db.api import has_resource_perm, ShareItemSM from dataall.db.paginator import paginate -from dataall.modules.datasets.db.models import DatasetStorageLocation +from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable class DatasetShareService: @@ -41,9 +41,9 @@ def paginated_shared_with_environment_datasets( models.ShareObjectItem.itemType == ShareableType.Table.value, func.concat( - models.DatasetTable.GlueDatabaseName, + DatasetTable.GlueDatabaseName, '.', - models.DatasetTable.GlueTableName, + DatasetTable.GlueTableName, ), ), ( @@ -73,8 +73,8 @@ def paginated_shared_with_environment_datasets( == models.Environment.organizationUri, ) .outerjoin( - models.DatasetTable, - models.ShareObjectItem.itemUri == models.DatasetTable.tableUri, + DatasetTable, + models.ShareObjectItem.itemUri == DatasetTable.tableUri, ) .outerjoin( DatasetStorageLocation, @@ -137,9 +137,9 @@ def paginated_shared_with_environment_group_datasets( models.ShareObjectItem.itemType == ShareableType.Table.value, func.concat( - models.DatasetTable.GlueDatabaseName, + DatasetTable.GlueDatabaseName, '.', - models.DatasetTable.GlueTableName, + DatasetTable.GlueTableName, ), ), ( @@ -169,8 +169,8 @@ def paginated_shared_with_environment_group_datasets( == models.Environment.organizationUri, ) .outerjoin( - models.DatasetTable, - models.ShareObjectItem.itemUri == models.DatasetTable.tableUri, + DatasetTable, + models.ShareObjectItem.itemUri == DatasetTable.tableUri, ) .outerjoin( DatasetStorageLocation, diff --git a/backend/dataall/modules/datasets/services/dataset_table.py b/backend/dataall/modules/datasets/services/dataset_table.py index cd02eadf5..7776aa2ef 100644 --- a/backend/dataall/modules/datasets/services/dataset_table.py +++ b/backend/dataall/modules/datasets/services/dataset_table.py @@ -6,7 +6,7 @@ from dataall.db.api import has_tenant_perm, has_resource_perm, Glossary, ResourcePolicy, Environment from dataall.db.models import Dataset from dataall.utils import json_utils -from dataall.modules.datasets.db.models import DatasetTableColumn +from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetTable logger = logging.getLogger(__name__) @@ -22,14 +22,14 @@ def create_dataset_table( uri: str, data: dict = None, check_perm: bool = False, - ) -> models.DatasetTable: + ) -> DatasetTable: dataset = api.Dataset.get_dataset_by_uri(session, uri) exists = ( - session.query(models.DatasetTable) + session.query(DatasetTable) .filter( and_( - models.DatasetTable.datasetUri == uri, - models.DatasetTable.GlueTableName == data['name'], + DatasetTable.datasetUri == uri, + DatasetTable.GlueTableName == data['name'], ) ) .count() @@ -41,7 +41,7 @@ def create_dataset_table( message=f'table: {data["name"]} already exist on dataset {uri}', ) - table = models.DatasetTable( + table = DatasetTable( datasetUri=uri, label=data['name'], name=data['name'], @@ -72,7 +72,7 @@ def create_dataset_table( group=group, permissions=permissions.DATASET_TABLE_READ, resource_uri=table.tableUri, - resource_type=models.DatasetTable.__name__, + resource_type=DatasetTable.__name__, ) return table @@ -87,13 +87,13 @@ def list_dataset_tables( check_perm: bool = False, ) -> dict: query = ( - session.query(models.DatasetTable) - .filter(models.DatasetTable.datasetUri == uri) - .order_by(models.DatasetTable.created.desc()) + session.query(DatasetTable) + .filter(DatasetTable.datasetUri == uri) + .order_by(DatasetTable.created.desc()) ) if data.get('term'): term = data.get('term') - query = query.filter(models.DatasetTable.label.ilike('%' + term + '%')) + query = query.filter(DatasetTable.label.ilike('%' + term + '%')) return paginate( query, page=data.get('page', 1), page_size=data.get('pageSize', 10) ).to_dict() @@ -107,7 +107,7 @@ def get_dataset_table( uri: str, data: dict = None, check_perm: bool = False, - ) -> models.DatasetTable: + ) -> DatasetTable: return DatasetTableService.get_dataset_table_by_uri(session, data['tableUri']) @staticmethod @@ -183,10 +183,10 @@ def query_dataset_tables_shared_with_env( """ share_item_shared_states = api.ShareItemSM.get_share_item_shared_states() env_tables_shared = ( - session.query(models.DatasetTable) # all tables + session.query(DatasetTable) # all tables .join( models.ShareObjectItem, # found in ShareObjectItem - models.ShareObjectItem.itemUri == models.DatasetTable.tableUri, + models.ShareObjectItem.itemUri == DatasetTable.tableUri, ) .join( models.ShareObject, # jump to share object @@ -218,7 +218,7 @@ def get_dataset_tables_shared_with_env( @staticmethod def get_dataset_table_by_uri(session, table_uri): - table: models.DatasetTable = session.query(models.DatasetTable).get(table_uri) + table: DatasetTable = session.query(DatasetTable).get(table_uri) if not table: raise exceptions.ObjectNotFound('DatasetTable', table_uri) return table @@ -229,8 +229,8 @@ def sync(session, datasetUri, glue_tables=None): dataset: Dataset = session.query(Dataset).get(datasetUri) if dataset: existing_tables = ( - session.query(models.DatasetTable) - .filter(models.DatasetTable.datasetUri == datasetUri) + session.query(DatasetTable) + .filter(DatasetTable.datasetUri == datasetUri) .all() ) existing_table_names = [e.GlueTableName for e in existing_tables] @@ -245,7 +245,7 @@ def sync(session, datasetUri, glue_tables=None): logger.info( f'Storing new table: {table} for dataset db {dataset.GlueDatabaseName}' ) - updated_table = models.DatasetTable( + updated_table = DatasetTable( datasetUri=dataset.datasetUri, label=table['Name'], name=table['Name'], @@ -272,13 +272,13 @@ def sync(session, datasetUri, glue_tables=None): group=group, permissions=permissions.DATASET_TABLE_READ, resource_uri=updated_table.tableUri, - resource_type=models.DatasetTable.__name__, + resource_type=DatasetTable.__name__, ) else: logger.info( f'Updating table: {table} for dataset db {dataset.GlueDatabaseName}' ) - updated_table: models.DatasetTable = ( + updated_table: DatasetTable = ( existing_dataset_tables_map.get(table['Name']) ) updated_table.GlueTableProperties = json_utils.to_json( @@ -345,13 +345,13 @@ def delete_all_table_columns(session, dataset_table): @staticmethod def get_table_by_s3_prefix(session, s3_prefix, accountid, region): - table: models.DatasetTable = ( - session.query(models.DatasetTable) + table: DatasetTable = ( + session.query(DatasetTable) .filter( and_( - models.DatasetTable.S3Prefix.startswith(s3_prefix), - models.DatasetTable.AWSAccountId == accountid, - models.DatasetTable.region == region, + DatasetTable.S3Prefix.startswith(s3_prefix), + DatasetTable.AWSAccountId == accountid, + DatasetTable.region == region, ) ) .first() diff --git a/backend/dataall/modules/datasets/tasks/subscription_service.py b/backend/dataall/modules/datasets/tasks/subscription_service.py index 901865812..ae1c522e0 100644 --- a/backend/dataall/modules/datasets/tasks/subscription_service.py +++ b/backend/dataall/modules/datasets/tasks/subscription_service.py @@ -17,7 +17,7 @@ from dataall.utils import json_utils from dataall.modules.datasets.services.dataset_table import DatasetTableService from dataall.modules.datasets.services.dataset_location import DatasetLocationService -from dataall.modules.datasets.db.models import DatasetStorageLocation +from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable root = logging.getLogger() root.setLevel(logging.INFO) @@ -68,7 +68,7 @@ def notify_consumers(engine, messages): @staticmethod def publish_table_update_message(engine, message): with engine.scoped_session() as session: - table: models.DatasetTable = DatasetTableService.get_table_by_s3_prefix( + table: DatasetTable = DatasetTableService.get_table_by_s3_prefix( session, message.get('prefix'), message.get('accountid'), @@ -139,7 +139,7 @@ def publish_location_update_message(session, message): @staticmethod def store_dataquality_results(session, message): - table: models.DatasetTable = DatasetTableService.get_table_by_s3_prefix( + table: DatasetTable = DatasetTableService.get_table_by_s3_prefix( session, message.get('prefix'), message.get('accountid'), @@ -207,7 +207,7 @@ def set_columns_type(quality_results, message): @staticmethod def publish_sns_message( - engine, message, dataset, share_items, prefix, table: models.DatasetTable = None + engine, message, dataset, share_items, prefix, table: DatasetTable = None ): with engine.scoped_session() as session: for item in share_items: @@ -290,7 +290,7 @@ def redshift_copy( message, dataset: models.Dataset, environment: models.Environment, - table: models.DatasetTable, + table: DatasetTable, ): log.info( f'Redshift copy starting ' diff --git a/backend/dataall/modules/datasets/tasks/tables_syncer.py b/backend/dataall/modules/datasets/tasks/tables_syncer.py index 7ae104cc9..0974df585 100644 --- a/backend/dataall/modules/datasets/tasks/tables_syncer.py +++ b/backend/dataall/modules/datasets/tasks/tables_syncer.py @@ -8,6 +8,7 @@ from dataall.aws.handlers.sts import SessionHelper from dataall.db import get_engine from dataall.db import models +from dataall.modules.datasets.db.models import DatasetTable from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer from dataall.searchproxy import indexers from dataall.searchproxy.connect import connect @@ -68,8 +69,8 @@ def sync_tables(engine, es=None): ) tables = ( - session.query(models.DatasetTable) - .filter(models.DatasetTable.datasetUri == dataset.datasetUri) + session.query(DatasetTable) + .filter(DatasetTable.datasetUri == dataset.datasetUri) .all() ) diff --git a/backend/dataall/searchproxy/indexers.py b/backend/dataall/searchproxy/indexers.py index 13ba44eea..9140cf3aa 100644 --- a/backend/dataall/searchproxy/indexers.py +++ b/backend/dataall/searchproxy/indexers.py @@ -5,6 +5,7 @@ from .. import db from ..db import models from dataall.searchproxy.upsert import BaseIndexer +from dataall.modules.datasets.db.models import DatasetTable log = logging.getLogger(__name__) @@ -74,11 +75,11 @@ def upsert(cls, session, dashboard_uri: str): def remove_deleted_tables(session, es, datasetUri: str): tables = ( - session.query(models.DatasetTable) + session.query(DatasetTable) .filter( and_( - models.DatasetTable.datasetUri == datasetUri, - models.DatasetTable.LastGlueTableStatus == 'Deleted', + DatasetTable.datasetUri == datasetUri, + DatasetTable.LastGlueTableStatus == 'Deleted', ) ) .all() diff --git a/backend/dataall/tasks/bucket_policy_updater.py b/backend/dataall/tasks/bucket_policy_updater.py index 9932f53ae..6cb4c51ea 100644 --- a/backend/dataall/tasks/bucket_policy_updater.py +++ b/backend/dataall/tasks/bucket_policy_updater.py @@ -9,8 +9,8 @@ from ..aws.handlers.sts import SessionHelper from ..db import get_engine -from ..db import models, api -from dataall.modules.datasets.db.models import DatasetStorageLocation +from ..db import models +from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable root = logging.getLogger() root.setLevel(logging.INFO) @@ -168,18 +168,18 @@ def get_shared_tables(self, dataset) -> typing.List[models.ShareObjectItem]: with self.engine.scoped_session() as session: tables = ( session.query( - models.DatasetTable.GlueDatabaseName.label('GlueDatabaseName'), - models.DatasetTable.GlueTableName.label('GlueTableName'), - models.DatasetTable.S3Prefix.label('S3Prefix'), - models.DatasetTable.AWSAccountId.label('SourceAwsAccountId'), - models.DatasetTable.region.label('SourceRegion'), + DatasetTable.GlueDatabaseName.label('GlueDatabaseName'), + DatasetTable.GlueTableName.label('GlueTableName'), + DatasetTable.S3Prefix.label('S3Prefix'), + DatasetTable.AWSAccountId.label('SourceAwsAccountId'), + DatasetTable.region.label('SourceRegion'), models.Environment.AwsAccountId.label('TargetAwsAccountId'), models.Environment.region.label('TargetRegion'), ) .join( models.ShareObjectItem, and_( - models.ShareObjectItem.itemUri == models.DatasetTable.tableUri + models.ShareObjectItem.itemUri == DatasetTable.tableUri ), ) .join( @@ -193,8 +193,8 @@ def get_shared_tables(self, dataset) -> typing.List[models.ShareObjectItem]: ) .filter( and_( - models.DatasetTable.datasetUri == dataset.datasetUri, - models.DatasetTable.deleted.is_(None), + DatasetTable.datasetUri == dataset.datasetUri, + DatasetTable.deleted.is_(None), models.ShareObjectItem.status == models.Enums.ShareObjectStatus.Approved.value, ) diff --git a/backend/dataall/tasks/data_sharing/share_managers/lf_share_manager.py b/backend/dataall/tasks/data_sharing/share_managers/lf_share_manager.py index b74e34e93..2b7eaf20a 100644 --- a/backend/dataall/tasks/data_sharing/share_managers/lf_share_manager.py +++ b/backend/dataall/tasks/data_sharing/share_managers/lf_share_manager.py @@ -11,7 +11,8 @@ from ....aws.handlers.sts import SessionHelper from ....aws.handlers.ram import Ram from ....db import api, exceptions, models -from ....utils.alarm_service import AlarmService +from dataall.modules.datasets.db.models import DatasetTable +from dataall.utils.alarm_service import AlarmService logger = logging.getLogger(__name__) @@ -22,8 +23,8 @@ def __init__( session, dataset: models.Dataset, share: models.ShareObject, - shared_tables: [models.DatasetTable], - revoked_tables: [models.DatasetTable], + shared_tables: [DatasetTable], + revoked_tables: [DatasetTable], source_environment: models.Environment, target_environment: models.Environment, env_group: models.EnvironmentGroup, @@ -82,7 +83,7 @@ def build_shared_db_name(self) -> str: """ return (self.dataset.GlueDatabaseName + '_shared_' + self.share.shareUri)[:254] - def build_share_data(self, table: models.DatasetTable) -> dict: + def build_share_data(self, table: DatasetTable) -> dict: """ Build aws dict for boto3 operations on Glue and LF from share data Parameters @@ -110,7 +111,7 @@ def build_share_data(self, table: models.DatasetTable) -> dict: return data def check_share_item_exists_on_glue_catalog( - self, share_item: models.ShareObjectItem, table: models.DatasetTable + self, share_item: models.ShareObjectItem, table: DatasetTable ) -> None: """ Checks if a table in the share request @@ -271,12 +272,12 @@ def create_resource_link(cls, **data) -> dict: ) raise e - def revoke_table_resource_link_access(self, table: models.DatasetTable, principals: [str]): + def revoke_table_resource_link_access(self, table: DatasetTable, principals: [str]): """ Revokes access to glue table resource link Parameters ---------- - table : models.DatasetTable + table : DatasetTable principals: List of strings. IAM role arn and Quicksight groups Returns @@ -332,7 +333,7 @@ def revoke_source_table_access(self, table, principals: [str]): Revokes access to the source glue table Parameters ---------- - table : models.DatasetTable + table : DatasetTable Returns ------- @@ -366,7 +367,7 @@ def revoke_source_table_access(self, table, principals: [str]): ) return True - def delete_resource_link_table(self, table: models.DatasetTable): + def delete_resource_link_table(self, table: DatasetTable): logger.info(f'Deleting shared table {table.GlueTableName}') if not Glue.table_exists( @@ -502,7 +503,7 @@ def delete_ram_resource_shares(self, resource_arn: str) -> [dict]: def handle_share_failure( self, - table: models.DatasetTable, + table: DatasetTable, share_item: models.ShareObjectItem, error: Exception, ) -> bool: @@ -532,7 +533,7 @@ def handle_share_failure( def handle_revoke_failure( self, - table: models.DatasetTable, + table: DatasetTable, share_item: models.ShareObjectItem, error: Exception, ) -> bool: diff --git a/backend/dataall/tasks/data_sharing/share_processors/lf_process_cross_account_share.py b/backend/dataall/tasks/data_sharing/share_processors/lf_process_cross_account_share.py index ffdf7d487..dfceec978 100644 --- a/backend/dataall/tasks/data_sharing/share_processors/lf_process_cross_account_share.py +++ b/backend/dataall/tasks/data_sharing/share_processors/lf_process_cross_account_share.py @@ -4,6 +4,7 @@ from ..share_managers import LFShareManager from ....aws.handlers.ram import Ram from ....db import models, api +from dataall.modules.datasets.db.models import DatasetTable log = logging.getLogger(__name__) @@ -14,8 +15,8 @@ def __init__( session, dataset: models.Dataset, share: models.ShareObject, - shared_tables: [models.DatasetTable], - revoked_tables: [models.DatasetTable], + shared_tables: [DatasetTable], + revoked_tables: [DatasetTable], source_environment: models.Environment, target_environment: models.Environment, env_group: models.EnvironmentGroup, diff --git a/backend/dataall/tasks/data_sharing/share_processors/lf_process_same_account_share.py b/backend/dataall/tasks/data_sharing/share_processors/lf_process_same_account_share.py index 4b5ad4096..3ea939b4f 100644 --- a/backend/dataall/tasks/data_sharing/share_processors/lf_process_same_account_share.py +++ b/backend/dataall/tasks/data_sharing/share_processors/lf_process_same_account_share.py @@ -1,7 +1,8 @@ import logging from ..share_managers import LFShareManager -from ....db import models, api +from dataall.db import models, api +from dataall.modules.datasets.db.models import DatasetTable log = logging.getLogger(__name__) @@ -12,8 +13,8 @@ def __init__( session, dataset: models.Dataset, share: models.ShareObject, - shared_tables: [models.DatasetTable], - revoked_tables: [models.DatasetTable], + shared_tables: [DatasetTable], + revoked_tables: [DatasetTable], source_environment: models.Environment, target_environment: models.Environment, env_group: models.EnvironmentGroup, diff --git a/backend/dataall/utils/alarm_service.py b/backend/dataall/utils/alarm_service.py index b414e1ed0..a1d0a6d5b 100644 --- a/backend/dataall/utils/alarm_service.py +++ b/backend/dataall/utils/alarm_service.py @@ -11,6 +11,7 @@ from ..aws.handlers.sts import SessionHelper from ..db import models +from dataall.modules.datasets.db.models import DatasetTable logger = logging.getLogger(__name__) @@ -42,7 +43,7 @@ def trigger_stack_deployment_failure_alarm(self, stack: models.Stack): def trigger_table_sharing_failure_alarm( self, - table: models.DatasetTable, + table: DatasetTable, share: models.ShareObject, target_environment: models.Environment, ): @@ -74,7 +75,7 @@ def trigger_table_sharing_failure_alarm( def trigger_revoke_table_sharing_failure_alarm( self, - table: models.DatasetTable, + table: DatasetTable, share: models.ShareObject, target_environment: models.Environment, ): diff --git a/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py b/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py index d75e7d6cc..32ca6abe0 100644 --- a/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py +++ b/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py @@ -6,7 +6,6 @@ """ from alembic import op -import sqlalchemy as sa from sqlalchemy import orm, Column, String, Text, DateTime, and_ from sqlalchemy.orm import query_expression from sqlalchemy.dialects import postgresql @@ -95,7 +94,7 @@ def upgrade(): resource_uri=table.tableUri, group=group, permissions=permissions.DATASET_TABLE_READ, - resource_type=models.DatasetTable.__name__, + resource_type=DatasetTable.__name__, ) print('dataset table permissions updated successfully for owners/stewards') except Exception as e: @@ -120,7 +119,7 @@ def upgrade(): group=share.principalId, permissions=permissions.DATASET_TABLE_READ, resource_uri=shared_table.itemUri, - resource_type=models.DatasetTable.__name__, + resource_type=DatasetTable.__name__, ) print('dataset table permissions updated for all shared tables') except Exception as e: diff --git a/tests/api/conftest.py b/tests/api/conftest.py index f959be417..aff658520 100644 --- a/tests/api/conftest.py +++ b/tests/api/conftest.py @@ -2,7 +2,7 @@ from .client import * from dataall.db import models from dataall.api import constants -from dataall.modules.datasets.db.models import DatasetStorageLocation +from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable @pytest.fixture(scope='module', autouse=True) @@ -506,7 +506,7 @@ def factory( def share_item(db): def factory( share: models.ShareObject, - table: models.DatasetTable, + table: DatasetTable, status: str ) -> models.ShareObjectItem: with db.scoped_session() as session: @@ -554,12 +554,12 @@ def factory(dataset: models.Dataset, name, username) -> DatasetStorageLocation: def table(db): cache = {} - def factory(dataset: models.Dataset, name, username) -> models.DatasetTable: + def factory(dataset: models.Dataset, name, username) -> DatasetTable: key = f'{dataset.datasetUri}-{name}' if cache.get(key): return cache.get(key) with db.scoped_session() as session: - table = models.DatasetTable( + table = DatasetTable( name=name, label=name, owner=username, diff --git a/tests/api/test_dataset.py b/tests/api/test_dataset.py index 359a780b4..9dc5d37f5 100644 --- a/tests/api/test_dataset.py +++ b/tests/api/test_dataset.py @@ -3,7 +3,7 @@ import pytest import dataall -from dataall.modules.datasets.db.models import DatasetStorageLocation +from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable @pytest.fixture(scope='module', autouse=True) @@ -227,7 +227,7 @@ def test_add_tables(table, dataset1, db): table(dataset=dataset1, name=f'table{i+1}', username=dataset1.owner) with db.scoped_session() as session: - nb = session.query(dataall.db.models.DatasetTable).count() + nb = session.query(DatasetTable).count() assert nb == 10 diff --git a/tests/api/test_dataset_profiling.py b/tests/api/test_dataset_profiling.py index 8d708e94d..8f7b1bc84 100644 --- a/tests/api/test_dataset_profiling.py +++ b/tests/api/test_dataset_profiling.py @@ -2,7 +2,7 @@ import pytest import dataall -from dataall.modules.datasets.db.models import DatasetProfilingRun +from dataall.modules.datasets.db.models import DatasetProfilingRun, DatasetTable @pytest.fixture(scope='module', autouse=True) @@ -33,7 +33,7 @@ def test_add_tables(table, dataset1, db): table(dataset=dataset1, name=f'table{i+1}', username=dataset1.owner) with db.scoped_session() as session: - nb = session.query(dataall.db.models.DatasetTable).count() + nb = session.query(DatasetTable).count() assert nb == 10 @@ -141,8 +141,8 @@ def test_get_table_profiling_run( table = table(dataset=dataset1, name='table1', username=dataset1.owner) with db.scoped_session() as session: table = ( - session.query(dataall.db.models.DatasetTable) - .filter(dataall.db.models.DatasetTable.GlueTableName == 'table1') + session.query(DatasetTable) + .filter(DatasetTable.GlueTableName == 'table1') .first() ) response = client.query( @@ -178,8 +178,8 @@ def test_list_table_profiling_runs( table1000 = table(dataset=dataset1, name='table1000', username=dataset1.owner) with db.scoped_session() as session: table = ( - session.query(dataall.db.models.DatasetTable) - .filter(dataall.db.models.DatasetTable.GlueTableName == 'table1') + session.query(DatasetTable) + .filter(DatasetTable.GlueTableName == 'table1') .first() ) module_mocker.patch( diff --git a/tests/api/test_dataset_table.py b/tests/api/test_dataset_table.py index 88140b68c..7ed3732a4 100644 --- a/tests/api/test_dataset_table.py +++ b/tests/api/test_dataset_table.py @@ -4,7 +4,7 @@ import dataall from dataall.modules.datasets.services.dataset_table import DatasetTableService -from dataall.modules.datasets.db.models import DatasetTableColumn +from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetTable @pytest.fixture(scope='module', autouse=True) @@ -76,7 +76,7 @@ def test_add_tables(table, dataset1, db): table(dataset=dataset1, name=f'table{i+1}', username=dataset1.owner) with db.scoped_session() as session: - nb = session.query(dataall.db.models.DatasetTable).count() + nb = session.query(DatasetTable).count() assert nb == 10 @@ -109,8 +109,8 @@ def test_update_table(client, env1, table, dataset1, db, user, group): def test_add_columns(table, dataset1, db): with db.scoped_session() as session: table = ( - session.query(dataall.db.models.DatasetTable) - .filter(dataall.db.models.DatasetTable.name == 'table1') + session.query(DatasetTable) + .filter(DatasetTable.name == 'table1') .first() ) table_col = DatasetTableColumn( @@ -182,8 +182,8 @@ def test_list_dataset_tables(client, dataset1): def test_update_dataset_table_column(client, table, dataset1, db): with db.scoped_session() as session: table = ( - session.query(dataall.db.models.DatasetTable) - .filter(dataall.db.models.DatasetTable.name == 'table1') + session.query(DatasetTable) + .filter(DatasetTable.name == 'table1') .first() ) column = ( @@ -231,8 +231,8 @@ def test_update_dataset_table_column(client, table, dataset1, db): def test_sync_tables_and_columns(client, table, dataset1, db): with db.scoped_session() as session: table = ( - session.query(dataall.db.models.DatasetTable) - .filter(dataall.db.models.DatasetTable.name == 'table1') + session.query(DatasetTable) + .filter(DatasetTable.name == 'table1') .first() ) column = ( @@ -292,9 +292,9 @@ def test_sync_tables_and_columns(client, table, dataset1, db): ] assert DatasetTableService.sync(session, dataset1.datasetUri, glue_tables) - new_table: dataall.db.models.DatasetTable = ( - session.query(dataall.db.models.DatasetTable) - .filter(dataall.db.models.DatasetTable.name == 'new_table') + new_table: DatasetTable = ( + session.query(DatasetTable) + .filter(DatasetTable.name == 'new_table') .first() ) assert new_table @@ -309,9 +309,9 @@ def test_sync_tables_and_columns(client, table, dataset1, db): assert columns[0].columnType == 'column' assert columns[1].columnType == 'partition_0' - existing_table: dataall.db.models.DatasetTable = ( - session.query(dataall.db.models.DatasetTable) - .filter(dataall.db.models.DatasetTable.name == 'table1') + existing_table: DatasetTable = ( + session.query(DatasetTable) + .filter(DatasetTable.name == 'table1') .first() ) assert existing_table @@ -326,9 +326,9 @@ def test_sync_tables_and_columns(client, table, dataset1, db): assert columns[0].columnType == 'column' assert columns[1].columnType == 'partition_0' - deleted_table: dataall.db.models.DatasetTable = ( - session.query(dataall.db.models.DatasetTable) - .filter(dataall.db.models.DatasetTable.name == 'table2') + deleted_table: DatasetTable = ( + session.query(DatasetTable) + .filter(DatasetTable.name == 'table2') .first() ) assert deleted_table.LastGlueTableStatus == 'Deleted' diff --git a/tests/api/test_glossary.py b/tests/api/test_glossary.py index bb7f34516..1aa15ce73 100644 --- a/tests/api/test_glossary.py +++ b/tests/api/test_glossary.py @@ -1,7 +1,7 @@ from datetime import datetime from typing import List from dataall.db import models -from dataall.modules.datasets.db.models import DatasetTableColumn +from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetTable import pytest @@ -32,9 +32,9 @@ def _dataset(db, _env, _org, group, user, dataset) -> models.Dataset: @pytest.fixture(scope='module', autouse=True) -def _table(db, _dataset) -> models.DatasetTable: +def _table(db, _dataset) -> DatasetTable: with db.scoped_session() as session: - t = models.DatasetTable( + t = DatasetTable( datasetUri=_dataset.datasetUri, label='table', AWSAccountId=_dataset.AwsAccountId, diff --git a/tests/api/test_share.py b/tests/api/test_share.py index 58309aa01..d951a15f8 100644 --- a/tests/api/test_share.py +++ b/tests/api/test_share.py @@ -3,6 +3,7 @@ import pytest import dataall +from dataall.modules.datasets.db.models import DatasetTable def random_table_name(): @@ -64,7 +65,7 @@ def tables1(table: typing.Callable, dataset1: dataall.db.models.Dataset): @pytest.fixture(scope="module", autouse=True) def table1(table: typing.Callable, dataset1: dataall.db.models.Dataset, - user: dataall.db.models.User) -> dataall.db.models.DatasetTable: + user: dataall.db.models.User) -> DatasetTable: yield table( dataset=dataset1, name="table1", @@ -112,7 +113,7 @@ def tables2(table, dataset2): @pytest.fixture(scope="module", autouse=True) def table2(table: typing.Callable, dataset2: dataall.db.models.Dataset, - user2: dataall.db.models.User) -> dataall.db.models.DatasetTable: + user2: dataall.db.models.User) -> DatasetTable: yield table( dataset=dataset2, name="table2", @@ -195,7 +196,7 @@ def share1_draft( def share1_item_pa( share_item: typing.Callable, share1_draft: dataall.db.models.ShareObject, - table1: dataall.db.models.DatasetTable + table1: DatasetTable ) -> dataall.db.models.ShareObjectItem: # Cleaned up with share1_draft yield share_item( @@ -270,7 +271,7 @@ def share2_submitted( def share2_item_pa( share_item: typing.Callable, share2_submitted: dataall.db.models.ShareObject, - table1: dataall.db.models.DatasetTable + table1: DatasetTable ) -> dataall.db.models.ShareObjectItem: # Cleaned up with share2 yield share_item( @@ -345,7 +346,7 @@ def share3_processed( def share3_item_shared( share_item: typing.Callable, share3_processed: dataall.db.models.ShareObject, - table1: dataall.db.models.DatasetTable + table1:DatasetTable ) -> dataall.db.models.ShareObjectItem: # Cleaned up with share3 yield share_item( diff --git a/tests/cdkproxy/conftest.py b/tests/cdkproxy/conftest.py index c83d0028b..c223f4a37 100644 --- a/tests/cdkproxy/conftest.py +++ b/tests/cdkproxy/conftest.py @@ -1,6 +1,7 @@ import pytest from dataall.db import models, api +from dataall.modules.datasets.db.models import DatasetTable @pytest.fixture(scope='module', autouse=True) @@ -121,9 +122,9 @@ def dataset(db, env: models.Environment) -> models.Dataset: @pytest.fixture(scope='module', autouse=True) -def table(db, dataset: models.Dataset) -> models.DatasetTable: +def table(db, dataset: models.Dataset) -> DatasetTable: with db.scoped_session() as session: - table = models.DatasetTable( + table = DatasetTable( label='thistable', owner='me', datasetUri=dataset.datasetUri, diff --git a/tests/searchproxy/test_indexers.py b/tests/searchproxy/test_indexers.py index fd31506f1..4fad9e6d2 100644 --- a/tests/searchproxy/test_indexers.py +++ b/tests/searchproxy/test_indexers.py @@ -6,7 +6,7 @@ from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer from dataall.searchproxy import indexers -from dataall.modules.datasets.db.models import DatasetStorageLocation +from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer @@ -74,7 +74,7 @@ def dataset(org, env, db): @pytest.fixture(scope='module', autouse=True) def table(org, env, db, dataset): with db.scoped_session() as session: - table = dataall.db.models.DatasetTable( + table = DatasetTable( datasetUri=dataset.datasetUri, AWSAccountId='12345678901', S3Prefix='S3prefix', diff --git a/tests/tasks/conftest.py b/tests/tasks/conftest.py index 7e6f0d71a..267d3ef73 100644 --- a/tests/tasks/conftest.py +++ b/tests/tasks/conftest.py @@ -2,7 +2,7 @@ from dataall.db import models from dataall.api import constants -from dataall.modules.datasets.db.models import DatasetStorageLocation +from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable @pytest.fixture(scope="module") @@ -148,10 +148,10 @@ def factory(dataset: models.Dataset, label: str) -> DatasetStorageLocation: @pytest.fixture(scope='module') def table(db): - def factory(dataset: models.Dataset, label: str) -> models.DatasetTable: + def factory(dataset: models.Dataset, label: str) -> DatasetTable: with db.scoped_session() as session: - table = models.DatasetTable( + table = DatasetTable( name=label, label=label, owner=dataset.owner, @@ -218,7 +218,7 @@ def factory( def share_item_table(db): def factory( share: models.ShareObject, - table: models.DatasetTable, + table: DatasetTable, status: str, ) -> models.ShareObjectItem: with db.scoped_session() as session: diff --git a/tests/tasks/test_catalog_indexer.py b/tests/tasks/test_catalog_indexer.py index 31b0f14d4..8da53e3d2 100644 --- a/tests/tasks/test_catalog_indexer.py +++ b/tests/tasks/test_catalog_indexer.py @@ -1,5 +1,6 @@ import pytest import dataall +from dataall.modules.datasets.db.models import DatasetTable @pytest.fixture(scope='module', autouse=True) @@ -65,7 +66,7 @@ def sync_dataset(org, env, db): @pytest.fixture(scope='module', autouse=True) def table(org, env, db, sync_dataset): with db.scoped_session() as session: - table = dataall.db.models.DatasetTable( + table = DatasetTable( datasetUri=sync_dataset.datasetUri, AWSAccountId='12345678901', S3Prefix='S3prefix', diff --git a/tests/tasks/test_lf_share_manager.py b/tests/tasks/test_lf_share_manager.py index bee190258..1ff99ba43 100644 --- a/tests/tasks/test_lf_share_manager.py +++ b/tests/tasks/test_lf_share_manager.py @@ -10,6 +10,7 @@ from dataall.db import models from dataall.api import constants +from dataall.modules.datasets.db.models import DatasetTable from dataall.tasks.data_sharing.share_processors.lf_process_cross_account_share import ProcessLFCrossAccountShare from dataall.tasks.data_sharing.share_processors.lf_process_same_account_share import ProcessLFSameAccountShare @@ -94,7 +95,7 @@ def dataset1(dataset: Callable, org1: models.Organization, source_environment: m @pytest.fixture(scope="module") -def table1(table: Callable, dataset1: models.Dataset) -> models.DatasetTable: +def table1(table: Callable, dataset1: models.Dataset) -> DatasetTable: yield table( dataset=dataset1, label="table1" @@ -102,7 +103,7 @@ def table1(table: Callable, dataset1: models.Dataset) -> models.DatasetTable: @pytest.fixture(scope="module") -def table2(table: Callable, dataset1: models.Dataset) -> models.DatasetTable: +def table2(table: Callable, dataset1: models.Dataset) -> DatasetTable: yield table( dataset=dataset1, label="table2" @@ -133,7 +134,7 @@ def share_cross_account( @pytest.fixture(scope="module") def share_item_same_account(share_item_table: Callable, share_same_account: models.ShareObject, - table1: models.DatasetTable) -> models.ShareObjectItem: + table1: DatasetTable) -> models.ShareObjectItem: yield share_item_table( share=share_same_account, table=table1, @@ -142,7 +143,7 @@ def share_item_same_account(share_item_table: Callable, share_same_account: mode @pytest.fixture(scope="module") def revoke_item_same_account(share_item_table: Callable, share_same_account: models.ShareObject, - table2: models.DatasetTable) -> models.ShareObjectItem: + table2: DatasetTable) -> models.ShareObjectItem: yield share_item_table( share=share_same_account, table=table2, @@ -151,7 +152,7 @@ def revoke_item_same_account(share_item_table: Callable, share_same_account: mod @pytest.fixture(scope="module") def share_item_cross_account(share_item_table: Callable, share_cross_account: models.ShareObject, - table1: models.DatasetTable) -> models.ShareObjectItem: + table1: DatasetTable) -> models.ShareObjectItem: yield share_item_table( share=share_cross_account, table=table1, @@ -160,7 +161,7 @@ def share_item_cross_account(share_item_table: Callable, share_cross_account: mo @pytest.fixture(scope="module") def revoke_item_cross_account(share_item_table: Callable, share_cross_account: models.ShareObject, - table2: models.DatasetTable) -> models.ShareObjectItem: + table2: DatasetTable) -> models.ShareObjectItem: yield share_item_table( share=share_cross_account, table=table2, @@ -294,7 +295,7 @@ def test_check_share_item_exists_on_glue_catalog( db, processor_same_account: ProcessLFSameAccountShare, processor_cross_account: ProcessLFCrossAccountShare, - table1: models.DatasetTable, + table1: DatasetTable, share_item_same_account: models.ShareObjectItem, share_item_cross_account: models.ShareObjectItem, mocker, @@ -332,7 +333,7 @@ def test_build_share_data( source_environment: models.Environment, target_environment: models.Environment, dataset1: models.Dataset, - table1: models.DatasetTable, + table1: DatasetTable, ): data_same_account = { 'source': { @@ -380,7 +381,7 @@ def test_create_resource_link( source_environment: models.Environment, target_environment: models.Environment, dataset1: models.Dataset, - table1: models.DatasetTable, + table1: DatasetTable, mocker, ): sts_mock = mocker.patch( @@ -463,7 +464,7 @@ def test_revoke_table_resource_link_access( source_environment: models.Environment, target_environment: models.Environment, dataset1: models.Dataset, - table2: models.DatasetTable, + table2: DatasetTable, mocker, ): glue_mock = mocker.patch( @@ -511,7 +512,7 @@ def test_revoke_source_table_access( source_environment: models.Environment, target_environment: models.Environment, dataset1: models.Dataset, - table2: models.DatasetTable, + table2: DatasetTable, mocker, ): glue_mock = mocker.patch( @@ -554,7 +555,7 @@ def test_delete_resource_link_table( source_environment: models.Environment, target_environment: models.Environment, dataset1: models.Dataset, - table2: models.DatasetTable, + table2: DatasetTable, mocker, ): glue_mock = mocker.patch( @@ -596,7 +597,7 @@ def test_delete_shared_database( source_environment: models.Environment, target_environment: models.Environment, dataset1: models.Dataset, - table1: models.DatasetTable, + table1: DatasetTable, mocker, ): glue_mock = mocker.patch( @@ -625,8 +626,8 @@ def test_revoke_external_account_access_on_source_account( source_environment: models.Environment, target_environment: models.Environment, dataset1: models.Dataset, - table1: models.DatasetTable, - table2: models.DatasetTable, + table1: DatasetTable, + table2: DatasetTable, mocker, ): lf_mock = mocker.patch( @@ -649,7 +650,7 @@ def test_handle_share_failure( processor_cross_account: ProcessLFCrossAccountShare, share_item_same_account: models.ShareObjectItem, share_item_cross_account: models.ShareObjectItem, - table1: models.DatasetTable, + table1: DatasetTable, mocker, ): @@ -678,7 +679,7 @@ def test_handle_revoke_failure( processor_cross_account: ProcessLFCrossAccountShare, revoke_item_same_account: models.ShareObjectItem, revoke_item_cross_account: models.ShareObjectItem, - table1: models.DatasetTable, + table1: DatasetTable, mocker, ): # Given diff --git a/tests/tasks/test_policies.py b/tests/tasks/test_policies.py index d51cc2ac7..ca8c259c6 100644 --- a/tests/tasks/test_policies.py +++ b/tests/tasks/test_policies.py @@ -1,4 +1,5 @@ from dataall.api.constants import OrganisationUserRole +from dataall.modules.datasets.db.models import DatasetTable from dataall.tasks.bucket_policy_updater import BucketPoliciesUpdater import pytest import dataall @@ -68,7 +69,7 @@ def sync_dataset(org, env, db): @pytest.fixture(scope='module', autouse=True) def table(org, env, db, sync_dataset): with db.scoped_session() as session: - table = dataall.db.models.DatasetTable( + table = DatasetTable( datasetUri=sync_dataset.datasetUri, AWSAccountId='12345678901', S3Prefix='S3prefix', diff --git a/tests/tasks/test_subscriptions.py b/tests/tasks/test_subscriptions.py index 874b8ccab..61c70d174 100644 --- a/tests/tasks/test_subscriptions.py +++ b/tests/tasks/test_subscriptions.py @@ -2,6 +2,7 @@ import dataall from dataall.api.constants import OrganisationUserRole +from dataall.modules.datasets.db.models import DatasetTable @pytest.fixture(scope='module') @@ -93,7 +94,7 @@ def share( ): with db.scoped_session() as session: - table = dataall.db.models.DatasetTable( + table = DatasetTable( label='foo', name='foo', owner='alice', diff --git a/tests/tasks/test_tables_sync.py b/tests/tasks/test_tables_sync.py index 9d8282e65..ff6f8271e 100644 --- a/tests/tasks/test_tables_sync.py +++ b/tests/tasks/test_tables_sync.py @@ -1,6 +1,7 @@ import pytest import dataall from dataall.api.constants import OrganisationUserRole +from dataall.modules.datasets.db.models import DatasetTable @pytest.fixture(scope='module', autouse=True) @@ -76,7 +77,7 @@ def sync_dataset(org, env, db): @pytest.fixture(scope='module', autouse=True) def table(org, env, db, sync_dataset): with db.scoped_session() as session: - table = dataall.db.models.DatasetTable( + table = DatasetTable( datasetUri=sync_dataset.datasetUri, AWSAccountId='12345678901', S3Prefix='S3prefix', @@ -163,9 +164,9 @@ def test_tables_sync(db, org, env, sync_dataset, table, mocker): processed_tables = dataall.modules.datasets.tasks.tables_syncer.sync_tables(engine=db) assert len(processed_tables) == 2 with db.scoped_session() as session: - saved_table: dataall.db.models.DatasetTable = ( - session.query(dataall.db.models.DatasetTable) - .filter(dataall.db.models.DatasetTable.GlueTableName == 'table1') + saved_table: DatasetTable = ( + session.query(DatasetTable) + .filter(DatasetTable.GlueTableName == 'table1') .first() ) assert saved_table From ba45ca50bc422df343aea81749ab010b91743a71 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 21 Apr 2023 11:18:49 +0200 Subject: [PATCH 072/346] Moved delete_doc to BaseIndexer --- .../api/Objects/Dashboard/resolvers.py | 2 +- .../dataall/api/Objects/Dataset/resolvers.py | 10 ++++---- .../api/storage_location/resolvers.py | 2 +- .../modules/datasets/api/table/resolvers.py | 2 +- .../datasets/indexers/table_indexer.py | 16 +++++++++++++ backend/dataall/searchproxy/indexers.py | 24 ------------------- backend/dataall/searchproxy/upsert.py | 6 +++++ tests/api/conftest.py | 2 +- 8 files changed, 30 insertions(+), 34 deletions(-) diff --git a/backend/dataall/api/Objects/Dashboard/resolvers.py b/backend/dataall/api/Objects/Dashboard/resolvers.py index 84a2a1bcc..94372f5d1 100644 --- a/backend/dataall/api/Objects/Dashboard/resolvers.py +++ b/backend/dataall/api/Objects/Dashboard/resolvers.py @@ -311,7 +311,7 @@ def delete_dashboard(context: Context, source, dashboardUri: str = None): data=None, check_perm=True, ) - indexers.delete_doc(es=context.es, doc_id=dashboardUri) + DashboardIndexer.delete_doc(doc_id=dashboardUri) return True diff --git a/backend/dataall/api/Objects/Dataset/resolvers.py b/backend/dataall/api/Objects/Dataset/resolvers.py index 79e306c9e..c17deef76 100644 --- a/backend/dataall/api/Objects/Dataset/resolvers.py +++ b/backend/dataall/api/Objects/Dataset/resolvers.py @@ -328,9 +328,7 @@ def sync_tables(context: Context, source, datasetUri: str = None): DatasetTableIndexer.upsert_all( session=session, dataset_uri=dataset.datasetUri ) - indexers.remove_deleted_tables( - session=session, es=context.es, datasetUri=dataset.datasetUri - ) + DatasetTableIndexer.remove_all_deleted(session=session, dataset_uri=dataset.datasetUri) return Dataset.paginated_dataset_tables( session=session, username=context.username, @@ -557,13 +555,13 @@ def delete_dataset( tables = [t.tableUri for t in Dataset.get_dataset_tables(session, datasetUri)] for uri in tables: - indexers.delete_doc(es=context.es, doc_id=uri) + DatasetIndexer.delete_doc(doc_id=uri) folders = [f.locationUri for f in DatasetLocationService.get_dataset_folders(session, datasetUri)] for uri in folders: - indexers.delete_doc(es=context.es, doc_id=uri) + DatasetIndexer.delete_doc(doc_id=uri) - indexers.delete_doc(es=context.es, doc_id=datasetUri) + DatasetIndexer.delete_doc(doc_id=datasetUri) Dataset.delete_dataset( session=session, diff --git a/backend/dataall/modules/datasets/api/storage_location/resolvers.py b/backend/dataall/modules/datasets/api/storage_location/resolvers.py index 09cf4b14a..6f8d82e43 100644 --- a/backend/dataall/modules/datasets/api/storage_location/resolvers.py +++ b/backend/dataall/modules/datasets/api/storage_location/resolvers.py @@ -88,7 +88,7 @@ def remove_storage_location(context, source, locationUri: str = None): data={'locationUri': location.locationUri}, check_perm=True, ) - indexers.delete_doc(es=context.es, doc_id=location.locationUri) + DatasetLocationIndexer.delete_doc(doc_id=location.locationUri) return True diff --git a/backend/dataall/modules/datasets/api/table/resolvers.py b/backend/dataall/modules/datasets/api/table/resolvers.py index ea16cae79..ce884bcbe 100644 --- a/backend/dataall/modules/datasets/api/table/resolvers.py +++ b/backend/dataall/modules/datasets/api/table/resolvers.py @@ -99,7 +99,7 @@ def delete_table(context, source, tableUri: str = None): }, check_perm=True, ) - indexers.delete_doc(es=context.es, doc_id=tableUri) + DatasetTableIndexer.delete_doc(doc_id=tableUri) return True diff --git a/backend/dataall/modules/datasets/indexers/table_indexer.py b/backend/dataall/modules/datasets/indexers/table_indexer.py index 4c96eea6d..2fe9451e1 100644 --- a/backend/dataall/modules/datasets/indexers/table_indexer.py +++ b/backend/dataall/modules/datasets/indexers/table_indexer.py @@ -97,3 +97,19 @@ def upsert_all(cls, session, dataset_uri: str): for table in tables: DatasetTableIndexer.upsert(session=session, table_uri=table.tableUri) return tables + + @classmethod + def remove_all_deleted(cls, session, dataset_uri: str): + tables = ( + session.query(DatasetTable) + .filter( + and_( + DatasetTable.datasetUri == dataset_uri, + DatasetTable.LastGlueTableStatus == 'Deleted', + ) + ) + .all() + ) + for table in tables: + cls.delete_doc(doc_id=table.tableUri) + return tables diff --git a/backend/dataall/searchproxy/indexers.py b/backend/dataall/searchproxy/indexers.py index 9140cf3aa..eba878fa2 100644 --- a/backend/dataall/searchproxy/indexers.py +++ b/backend/dataall/searchproxy/indexers.py @@ -1,11 +1,8 @@ import logging -from sqlalchemy import and_ - from .. import db from ..db import models from dataall.searchproxy.upsert import BaseIndexer -from dataall.modules.datasets.db.models import DatasetTable log = logging.getLogger(__name__) @@ -71,24 +68,3 @@ def upsert(cls, session, dashboard_uri: str): }, ) return dashboard - - -def remove_deleted_tables(session, es, datasetUri: str): - tables = ( - session.query(DatasetTable) - .filter( - and_( - DatasetTable.datasetUri == datasetUri, - DatasetTable.LastGlueTableStatus == 'Deleted', - ) - ) - .all() - ) - for table in tables: - delete_doc(es, doc_id=table.tableUri) - return tables - - -def delete_doc(es, doc_id, index='dataall-index'): - es.delete(index=index, id=doc_id, ignore=[400, 404]) - return True diff --git a/backend/dataall/searchproxy/upsert.py b/backend/dataall/searchproxy/upsert.py index 9eb2e3125..a787032dd 100644 --- a/backend/dataall/searchproxy/upsert.py +++ b/backend/dataall/searchproxy/upsert.py @@ -30,6 +30,12 @@ def es(cls): def upsert(session, target_id): raise NotImplementedError("Method upsert is not implemented") + @classmethod + def delete_doc(cls, doc_id): + es = cls.es() + es.delete(index=cls._INDEX, id=doc_id, ignore=[400, 404]) + return True + @classmethod def _index(cls, doc_id, doc): es = cls.es() diff --git a/tests/api/conftest.py b/tests/api/conftest.py index aff658520..37fef4f10 100644 --- a/tests/api/conftest.py +++ b/tests/api/conftest.py @@ -40,7 +40,7 @@ def patch_es(module_mocker): return_value={} ) module_mocker.patch('dataall.searchproxy.indexers.DashboardIndexer.upsert', return_value={}) - module_mocker.patch('dataall.searchproxy.indexers.delete_doc', return_value={}) + module_mocker.patch('dataall.searchproxy.upsert.BaseIndexer.delete_doc', return_value={}) @pytest.fixture(scope='module', autouse=True) From dc8ff72a817e4403cd3cb1481908228a7709efaa Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 21 Apr 2023 11:28:38 +0200 Subject: [PATCH 073/346] Lazy creation of connection to OpenSearch --- backend/api_handler.py | 6 +----- .../dataall/api/Objects/Glossary/registry.py | 8 +++----- .../dataall/api/Objects/Glossary/resolvers.py | 2 +- backend/dataall/api/Objects/Vote/resolvers.py | 4 ++-- backend/dataall/api/context.py | 2 -- backend/dataall/core/context.py | 2 -- .../modules/datasets/tasks/tables_syncer.py | 10 +++------- backend/dataall/tasks/catalog_indexer.py | 17 ++++------------- backend/local_graphql_server.py | 3 +-- 9 files changed, 15 insertions(+), 39 deletions(-) diff --git a/backend/api_handler.py b/backend/api_handler.py index 890235347..714e107b2 100644 --- a/backend/api_handler.py +++ b/backend/api_handler.py @@ -15,7 +15,6 @@ from dataall.core.context import set_context, dispose_context, RequestContext from dataall.db import init_permissions, get_engine, api, permissions from dataall.modules.loader import load_modules, ImportMode -from dataall.searchproxy import connect logger = logging.getLogger() logger.setLevel(os.environ.get('LOG_LEVEL', 'INFO')) @@ -30,7 +29,6 @@ TYPE_DEFS = gql(SCHEMA.gql(with_directives=False)) ENVNAME = os.getenv('envname', 'local') ENGINE = get_engine(envname=ENVNAME) -ES = connect(envname=ENVNAME) Worker.queue = SqsQueue.send init_permissions(ENGINE) @@ -99,7 +97,6 @@ def handler(event, context): log.info('Lambda Event %s', event) log.debug('Env name %s', ENVNAME) - log.debug('ElasticSearch %s', ES) log.debug('Engine %s', ENGINE.engine.url) if event['httpMethod'] == 'OPTIONS': @@ -137,11 +134,10 @@ def handler(event, context): print(f'Error managing groups due to: {e}') groups = [] - set_context(RequestContext(ENGINE, username, groups, ES)) + set_context(RequestContext(ENGINE, username, groups)) app_context = { 'engine': ENGINE, - 'es': ES, 'username': username, 'groups': groups, 'schema': SCHEMA, diff --git a/backend/dataall/api/Objects/Glossary/registry.py b/backend/dataall/api/Objects/Glossary/registry.py index 36fea6cf0..ef006a777 100644 --- a/backend/dataall/api/Objects/Glossary/registry.py +++ b/backend/dataall/api/Objects/Glossary/registry.py @@ -1,7 +1,5 @@ -from dataclasses import dataclass, field -from typing import Type, Dict, Optional, Protocol, Union, Callable, Any - -from opensearchpy import OpenSearch +from dataclasses import dataclass +from typing import Type, Dict, Optional, Protocol, Union from dataall.api import gql from dataall.api.gql.graphql_union_type import UnionTypeRegistry @@ -56,7 +54,7 @@ def types(cls): return [gql.Ref(definition.object_type) for definition in cls._DEFINITIONS.values()] @classmethod - def reindex(cls, session, es: OpenSearch, target_type: str, target_uri: str): + def reindex(cls, session, target_type: str, target_uri: str): definition = cls._DEFINITIONS[target_type] if definition.reindexer: definition.reindexer.upsert(session, target_uri) diff --git a/backend/dataall/api/Objects/Glossary/resolvers.py b/backend/dataall/api/Objects/Glossary/resolvers.py index fdc4c3eea..42fae88ce 100644 --- a/backend/dataall/api/Objects/Glossary/resolvers.py +++ b/backend/dataall/api/Objects/Glossary/resolvers.py @@ -458,7 +458,7 @@ def reindex(context, linkUri): if not link: return - GlossaryRegistry.reindex(session, context.es, link.targetType, link.targetUri) + GlossaryRegistry.reindex(session, link.targetType, link.targetUri) def _target_model(target_type: str): diff --git a/backend/dataall/api/Objects/Vote/resolvers.py b/backend/dataall/api/Objects/Vote/resolvers.py index 42f5c20f5..d9f739872 100644 --- a/backend/dataall/api/Objects/Vote/resolvers.py +++ b/backend/dataall/api/Objects/Vote/resolvers.py @@ -28,11 +28,11 @@ def upvote(context: Context, source, input=None): data=input, check_perm=True, ) - reindex(session, context.es, vote) + reindex(session, vote) return vote -def reindex(session, es, vote): +def reindex(session, vote): if vote.targetType == 'dataset': DatasetIndexer.upsert(session=session, dataset_uri=vote.targetUri) elif vote.targetType == 'dashboard': diff --git a/backend/dataall/api/context.py b/backend/dataall/api/context.py index a210dc0a1..238627a81 100644 --- a/backend/dataall/api/context.py +++ b/backend/dataall/api/context.py @@ -2,11 +2,9 @@ class Context: def __init__( self, engine=None, - es=None, username=None, groups=None, ): self.engine = engine - self.es = es self.username = username self.groups = groups diff --git a/backend/dataall/core/context.py b/backend/dataall/core/context.py index dcf594896..a6cc2d4ba 100644 --- a/backend/dataall/core/context.py +++ b/backend/dataall/core/context.py @@ -12,7 +12,6 @@ from dataall.db.connection import Engine from threading import local -import opensearchpy _request_storage = local() @@ -24,7 +23,6 @@ class RequestContext: db_engine: Engine username: str groups: List[str] - es_engine: opensearchpy.OpenSearch def get_context() -> RequestContext: diff --git a/backend/dataall/modules/datasets/tasks/tables_syncer.py b/backend/dataall/modules/datasets/tasks/tables_syncer.py index 0974df585..4ed22425e 100644 --- a/backend/dataall/modules/datasets/tasks/tables_syncer.py +++ b/backend/dataall/modules/datasets/tasks/tables_syncer.py @@ -10,8 +10,6 @@ from dataall.db import models from dataall.modules.datasets.db.models import DatasetTable from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer -from dataall.searchproxy import indexers -from dataall.searchproxy.connect import connect from dataall.utils.alarm_service import AlarmService from dataall.modules.datasets.services.dataset_table import DatasetTableService @@ -22,7 +20,7 @@ log = logging.getLogger(__name__) -def sync_tables(engine, es=None): +def sync_tables(engine): with engine.scoped_session() as session: processed_tables = [] all_datasets: [models.Dataset] = db.api.Dataset.list_all_active_datasets( @@ -88,8 +86,7 @@ def sync_tables(engine, es=None): processed_tables.extend(tables) - if es: - DatasetTableIndexer.upsert_all(session, dataset_uri=dataset.datasetUri) + DatasetTableIndexer.upsert_all(session, dataset_uri=dataset.datasetUri) except Exception as e: log.error( f'Failed to sync tables for dataset ' @@ -113,5 +110,4 @@ def is_assumable_pivot_role(env: models.Environment): if __name__ == '__main__': ENVNAME = os.environ.get('envname', 'local') ENGINE = get_engine(envname=ENVNAME) - ES = connect(envname=ENVNAME) - sync_tables(engine=ENGINE, es=ES) + sync_tables(engine=ENGINE) diff --git a/backend/dataall/tasks/catalog_indexer.py b/backend/dataall/tasks/catalog_indexer.py index 5d32800c7..945bdd214 100644 --- a/backend/dataall/tasks/catalog_indexer.py +++ b/backend/dataall/tasks/catalog_indexer.py @@ -5,13 +5,9 @@ from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer from .. import db -from ..db import get_engine, exceptions -from ..db import models +from dataall.db import get_engine, models from dataall.searchproxy.indexers import DashboardIndexer -from ..searchproxy.connect import ( - connect, -) -from ..utils.alarm_service import AlarmService +from dataall.utils.alarm_service import AlarmService root = logging.getLogger() root.setLevel(logging.INFO) @@ -20,12 +16,8 @@ log = logging.getLogger(__name__) -def index_objects(engine, es): +def index_objects(engine): try: - if not es: - raise exceptions.AWSResourceNotFound( - action='CATALOG_INDEXER_TASK', message='ES configuration not found' - ) indexed_objects_counter = 0 with engine.scoped_session() as session: @@ -58,5 +50,4 @@ def index_objects(engine, es): if __name__ == '__main__': ENVNAME = os.environ.get('envname', 'local') ENGINE = get_engine(envname=ENVNAME) - ES = connect(envname=ENVNAME) - index_objects(engine=ENGINE, es=ES) + index_objects(engine=ENGINE) diff --git a/backend/local_graphql_server.py b/backend/local_graphql_server.py index 44f79a087..98e99cd73 100644 --- a/backend/local_graphql_server.py +++ b/backend/local_graphql_server.py @@ -86,12 +86,11 @@ def request_context(headers, mock=False): tenant_name='dataall', ) - set_context(RequestContext(engine, username, groups, es)) + set_context(RequestContext(engine, username, groups)) # TODO: remove when the migration to a new RequestContext API is complete. Used only for backward compatibility context = Context( engine=engine, - es=es, schema=schema, username=username, groups=groups, From c99ed58a07e5c0c599bb6de1587d0ccd3aadc4e7 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 21 Apr 2023 16:50:52 +0200 Subject: [PATCH 074/346] Moved dataset GraphQL API to modules --- backend/dataall/api/Objects/__init__.py | 1 - .../dataall/modules/datasets/api/__init__.py | 5 +++-- .../datasets/api/dataset}/__init__.py | 2 +- .../datasets/api/dataset}/input_types.py | 4 ++-- .../datasets/api/dataset}/mutations.py | 16 +++++++++++--- .../datasets/api/dataset}/queries.py | 16 ++++++++++---- .../datasets/api/dataset}/resolvers.py | 21 +++++++++---------- .../datasets/api/dataset}/schema.py | 18 +++++++++++++--- .../modules/datasets/api/table/resolvers.py | 3 +-- 9 files changed, 57 insertions(+), 29 deletions(-) rename backend/dataall/{api/Objects/Dataset => modules/datasets/api/dataset}/__init__.py (74%) rename backend/dataall/{api/Objects/Dataset => modules/datasets/api/dataset}/input_types.py (97%) rename backend/dataall/{api/Objects/Dataset => modules/datasets/api/dataset}/mutations.py (86%) rename backend/dataall/{api/Objects/Dataset => modules/datasets/api/dataset}/queries.py (83%) rename backend/dataall/{api/Objects/Dataset => modules/datasets/api/dataset}/resolvers.py (97%) rename backend/dataall/{api/Objects/Dataset => modules/datasets/api/dataset}/schema.py (93%) diff --git a/backend/dataall/api/Objects/__init__.py b/backend/dataall/api/Objects/__init__.py index 5cc73fbdf..38d8bfe21 100644 --- a/backend/dataall/api/Objects/__init__.py +++ b/backend/dataall/api/Objects/__init__.py @@ -17,7 +17,6 @@ DataPipeline, Environment, Activity, - Dataset, Group, Principal, Dashboard, diff --git a/backend/dataall/modules/datasets/api/__init__.py b/backend/dataall/modules/datasets/api/__init__.py index 7fe2d06a1..cd34a2ac4 100644 --- a/backend/dataall/modules/datasets/api/__init__.py +++ b/backend/dataall/modules/datasets/api/__init__.py @@ -3,7 +3,8 @@ table_column, profiling, storage_location, - table + table, + dataset ) -__all__ = ["table_column", "profiling", "storage_location", "table"] +__all__ = ["table_column", "profiling", "storage_location", "table", "dataset"] diff --git a/backend/dataall/api/Objects/Dataset/__init__.py b/backend/dataall/modules/datasets/api/dataset/__init__.py similarity index 74% rename from backend/dataall/api/Objects/Dataset/__init__.py rename to backend/dataall/modules/datasets/api/dataset/__init__.py index dfa46b264..d286e1a7d 100644 --- a/backend/dataall/api/Objects/Dataset/__init__.py +++ b/backend/dataall/modules/datasets/api/dataset/__init__.py @@ -1,4 +1,4 @@ -from . import ( +from dataall.modules.datasets.api.dataset import ( input_types, mutations, queries, diff --git a/backend/dataall/api/Objects/Dataset/input_types.py b/backend/dataall/modules/datasets/api/dataset/input_types.py similarity index 97% rename from backend/dataall/api/Objects/Dataset/input_types.py rename to backend/dataall/modules/datasets/api/dataset/input_types.py index 16609814d..373dc3834 100644 --- a/backend/dataall/api/Objects/Dataset/input_types.py +++ b/backend/dataall/modules/datasets/api/dataset/input_types.py @@ -1,5 +1,5 @@ -from ... import gql -from ....api.constants import GraphQLEnumMapper, SortDirection +from dataall.api import gql +from dataall.api.constants import GraphQLEnumMapper, SortDirection class DatasetSortField(GraphQLEnumMapper): diff --git a/backend/dataall/api/Objects/Dataset/mutations.py b/backend/dataall/modules/datasets/api/dataset/mutations.py similarity index 86% rename from backend/dataall/api/Objects/Dataset/mutations.py rename to backend/dataall/modules/datasets/api/dataset/mutations.py index cc26c219c..a006797ce 100644 --- a/backend/dataall/api/Objects/Dataset/mutations.py +++ b/backend/dataall/modules/datasets/api/dataset/mutations.py @@ -1,10 +1,20 @@ -from ... import gql -from .input_types import ( +from dataall.api import gql +from dataall.modules.datasets.api.dataset.input_types import ( ModifyDatasetInput, NewDatasetInput, ImportDatasetInput, ) -from .resolvers import * +from dataall.modules.datasets.api.dataset.resolvers import ( + create_dataset, + update_dataset, + sync_tables, + generate_dataset_access_token, + save_dataset_summary, + delete_dataset, + import_dataset, + publish_dataset_update, + start_crawler +) createDataset = gql.MutationField( name='createDataset', diff --git a/backend/dataall/api/Objects/Dataset/queries.py b/backend/dataall/modules/datasets/api/dataset/queries.py similarity index 83% rename from backend/dataall/api/Objects/Dataset/queries.py rename to backend/dataall/modules/datasets/api/dataset/queries.py index c71408a1c..d48a78e90 100644 --- a/backend/dataall/api/Objects/Dataset/queries.py +++ b/backend/dataall/modules/datasets/api/dataset/queries.py @@ -1,7 +1,15 @@ -from ... import gql -from .input_types import DatasetFilter -from .resolvers import * -from .schema import DatasetSearchResult +from dataall.api import gql +from dataall.modules.datasets.api.dataset.input_types import DatasetFilter +from dataall.modules.datasets.api.dataset.resolvers import ( + get_dataset, + list_datasets, + get_dataset_assume_role_url, + get_dataset_etl_credentials, + get_dataset_summary, + get_file_upload_presigned_url, + list_dataset_share_objects +) +from dataall.modules.datasets.api.dataset.schema import DatasetSearchResult getDataset = gql.QueryField( name='getDataset', diff --git a/backend/dataall/api/Objects/Dataset/resolvers.py b/backend/dataall/modules/datasets/api/dataset/resolvers.py similarity index 97% rename from backend/dataall/api/Objects/Dataset/resolvers.py rename to backend/dataall/modules/datasets/api/dataset/resolvers.py index c17deef76..d41163a1b 100644 --- a/backend/dataall/api/Objects/Dataset/resolvers.py +++ b/backend/dataall/modules/datasets/api/dataset/resolvers.py @@ -4,19 +4,18 @@ from botocore.config import Config from botocore.exceptions import ClientError -from ..Stack import stack_helper -from .... import db -from ....api.constants import ( +from dataall.api.Objects.Stack import stack_helper +from dataall import db +from dataall.api.constants import ( DatasetRole, ) -from ....api.context import Context -from ....aws.handlers.glue import Glue -from ....aws.handlers.service_handlers import Worker -from ....aws.handlers.sts import SessionHelper -from ....db import paginate, exceptions, permissions, models -from ....db.api import Dataset, Environment, ShareObject, ResourcePolicy -from ....db.api.organization import Organization -from dataall.searchproxy import indexers +from dataall.api.context import Context +from dataall.aws.handlers.glue import Glue +from dataall.aws.handlers.service_handlers import Worker +from dataall.aws.handlers.sts import SessionHelper +from dataall.db import paginate, exceptions, permissions, models +from dataall.db.api import Dataset, Environment, ShareObject, ResourcePolicy +from dataall.db.api.organization import Organization from dataall.modules.datasets.services.dataset_location import DatasetLocationService from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer diff --git a/backend/dataall/api/Objects/Dataset/schema.py b/backend/dataall/modules/datasets/api/dataset/schema.py similarity index 93% rename from backend/dataall/api/Objects/Dataset/schema.py rename to backend/dataall/modules/datasets/api/dataset/schema.py index bede581a6..766537e46 100644 --- a/backend/dataall/api/Objects/Dataset/schema.py +++ b/backend/dataall/modules/datasets/api/dataset/schema.py @@ -1,6 +1,18 @@ -from ... import gql -from .resolvers import * -from ...constants import DatasetRole, EnvironmentPermission +from dataall.api import gql +from dataall.modules.datasets.api.dataset.resolvers import ( + get_dataset_environment, + get_dataset_organization, + get_dataset_owners_group, + get_dataset_stewards_group, + list_tables, + list_locations, + resolve_user_role, + get_dataset_statistics, + list_dataset_share_objects, + get_dataset_glossary_terms, + resolve_redshift_copy_enabled +) +from dataall.api.constants import DatasetRole, EnvironmentPermission DatasetStatistics = gql.ObjectType( name='DatasetStatistics', diff --git a/backend/dataall/modules/datasets/api/table/resolvers.py b/backend/dataall/modules/datasets/api/table/resolvers.py index ce884bcbe..50878ec44 100644 --- a/backend/dataall/modules/datasets/api/table/resolvers.py +++ b/backend/dataall/modules/datasets/api/table/resolvers.py @@ -5,14 +5,13 @@ from pyathena import connect from dataall import db -from dataall.api.Objects.Dataset.resolvers import get_dataset +from dataall.modules.datasets.api.dataset.resolvers import get_dataset from dataall.api.context import Context from dataall.aws.handlers.service_handlers import Worker from dataall.aws.handlers.sts import SessionHelper from dataall.db import permissions, models from dataall.db.api import ResourcePolicy, Glossary from dataall.modules.datasets.db.models import DatasetTable -from dataall.searchproxy import indexers from dataall.utils import json_utils from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer from dataall.modules.datasets.services.dataset_table import DatasetTableService From 9ddbaf33a56bd29659d27f50c74212417e9084e8 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 24 Apr 2023 10:56:13 +0200 Subject: [PATCH 075/346] Migrated DatasetService --- .../api/Objects/ShareObject/resolvers.py | 9 +-- backend/dataall/api/Objects/__init__.py | 1 - backend/dataall/aws/handlers/glue.py | 5 +- backend/dataall/aws/handlers/redshift.py | 5 +- backend/dataall/aws/handlers/sns.py | 3 +- backend/dataall/cdkproxy/stacks/pipeline.py | 5 +- backend/dataall/db/api/__init__.py | 1 - backend/dataall/db/api/redshift_cluster.py | 5 +- backend/dataall/db/api/share_object.py | 11 ++-- .../modules/datasets/api/dataset/resolvers.py | 55 ++++++++++--------- .../modules/datasets/api/dataset/schema.py | 3 +- .../datasets/api/profiling/resolvers.py | 7 ++- .../api/storage_location/resolvers.py | 5 +- .../modules/datasets/api/table/resolvers.py | 7 ++- .../datasets/handlers/glue_table_handler.py | 4 +- .../datasets/indexers/dataset_indexer.py | 3 +- .../datasets/services/dataset_service.py} | 44 +++++++-------- .../datasets/services/dataset_table.py | 3 +- .../modules/datasets/tasks/tables_syncer.py | 3 +- backend/dataall/tasks/catalog_indexer.py | 4 +- backend/dataall/tasks/stacks_updater.py | 3 +- ...215e_backfill_dataset_table_permissions.py | 4 +- tests/api/client.py | 5 +- tests/api/test_dataset.py | 3 +- tests/api/test_redshift_cluster.py | 3 +- tests/db/test_permission.py | 3 +- tests/tasks/test_catalog_indexer.py | 2 +- 27 files changed, 110 insertions(+), 96 deletions(-) rename backend/dataall/{db/api/dataset.py => modules/datasets/services/dataset_service.py} (93%) diff --git a/backend/dataall/api/Objects/ShareObject/resolvers.py b/backend/dataall/api/Objects/ShareObject/resolvers.py index 49f20fc17..4455b0019 100644 --- a/backend/dataall/api/Objects/ShareObject/resolvers.py +++ b/backend/dataall/api/Objects/ShareObject/resolvers.py @@ -8,6 +8,7 @@ from ....aws.handlers.service_handlers import Worker from ....db import models from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable +from dataall.modules.datasets.services.dataset_service import DatasetService log = logging.getLogger(__name__) @@ -32,7 +33,7 @@ def create_share_object( ): with context.engine.scoped_session() as session: - dataset: models.Dataset = db.api.Dataset.get_dataset_by_uri(session, datasetUri) + dataset: models.Dataset = DatasetService.get_dataset_by_uri(session, datasetUri) environment: models.Environment = db.api.Environment.get_environment_by_uri( session, input['environmentUri'] ) @@ -222,7 +223,7 @@ def resolve_user_role(context: Context, source: models.ShareObject, **kwargs): if not source: return None with context.engine.scoped_session() as session: - dataset: models.Dataset = db.api.Dataset.get_dataset_by_uri(session, source.datasetUri) + dataset: models.Dataset = DatasetService.get_dataset_by_uri(session, source.datasetUri) if dataset and dataset.stewards in context.groups: return ShareObjectPermission.Approvers.value if ( @@ -250,7 +251,7 @@ def resolve_dataset(context: Context, source: models.ShareObject, **kwargs): if not source: return None with context.engine.scoped_session() as session: - ds: models.Dataset = db.api.Dataset.get_dataset_by_uri(session, source.datasetUri) + ds: models.Dataset = DatasetService.get_dataset_by_uri(session, source.datasetUri) if ds: env: models.Environment = db.api.Environment.get_environment_by_uri(session, ds.environmentUri) return { @@ -292,7 +293,7 @@ def resolve_consumption_data(context: Context, source: models.ShareObject, **kwa if not source: return None with context.engine.scoped_session() as session: - ds: models.Dataset = db.api.Dataset.get_dataset_by_uri(session, source.datasetUri) + ds: models.Dataset = DatasetService.get_dataset_by_uri(session, source.datasetUri) if ds: S3AccessPointName = utils.slugify( source.datasetUri + '-' + source.principalId, diff --git a/backend/dataall/api/Objects/__init__.py b/backend/dataall/api/Objects/__init__.py index 38d8bfe21..94a2ed2ba 100644 --- a/backend/dataall/api/Objects/__init__.py +++ b/backend/dataall/api/Objects/__init__.py @@ -82,7 +82,6 @@ def adapted(obj, info, **kwargs): response = resolver( context=Namespace( engine=info.context['engine'], - es=info.context['es'], username=info.context['username'], groups=info.context['groups'], schema=info.context['schema'], diff --git a/backend/dataall/aws/handlers/glue.py b/backend/dataall/aws/handlers/glue.py index c2a7ecf21..c965db520 100644 --- a/backend/dataall/aws/handlers/glue.py +++ b/backend/dataall/aws/handlers/glue.py @@ -7,6 +7,7 @@ from ... import db from ...db import models from dataall.modules.datasets.db.models import DatasetTable +from dataall.modules.datasets.services.dataset_service import DatasetService log = logging.getLogger('aws:glue') @@ -374,7 +375,7 @@ def batch_delete_tables(**data): @Worker.handler(path='glue.dataset.crawler.create') def create_crawler(engine, task: models.Task): with engine.scoped_session() as session: - dataset: models.Dataset = db.api.Dataset.get_dataset_by_uri( + dataset: models.Dataset = DatasetService.get_dataset_by_uri( session, task.targetUri ) location = task.payload.get('location') @@ -434,7 +435,7 @@ def get_glue_crawler(data): @Worker.handler(path='glue.crawler.start') def start_crawler(engine, task: models.Task): with engine.scoped_session() as session: - dataset: models.Dataset = db.api.Dataset.get_dataset_by_uri( + dataset: models.Dataset = DatasetService.get_dataset_by_uri( session, task.targetUri ) location = task.payload.get('location') diff --git a/backend/dataall/aws/handlers/redshift.py b/backend/dataall/aws/handlers/redshift.py index 1fe6c738c..7558302a7 100644 --- a/backend/dataall/aws/handlers/redshift.py +++ b/backend/dataall/aws/handlers/redshift.py @@ -12,6 +12,7 @@ # TODO should be migrated in the redshift module from dataall.modules.datasets.services.dataset_table import DatasetTableService from dataall.modules.datasets.db.models import DatasetTable +from dataall.modules.datasets.services.dataset_service import DatasetService log = logging.getLogger(__name__) @@ -371,7 +372,7 @@ def get_cluster_catalog_databases(session, task): Redshift.set_cluster_secrets(secretsmanager, cluster) catalog_databases = [] for d in cluster_datasets: - dataset = db.api.Dataset.get_dataset_by_uri(session, d.datasetUri) + dataset = DatasetService.get_dataset_by_uri(session, d.datasetUri) if dataset.environmentUri != cluster.environmentUri: catalog_databases.append(f'{dataset.GlueDatabaseName}shared') else: @@ -445,7 +446,7 @@ def copy_data(engine, task: models.Task): task.targetUri ) - dataset: models.Dataset = db.api.Dataset.get_dataset_by_uri( + dataset: models.Dataset = DatasetService.get_dataset_by_uri( session, task.payload['datasetUri'] ) diff --git a/backend/dataall/aws/handlers/sns.py b/backend/dataall/aws/handlers/sns.py index 3f7c87ba9..0dcd72414 100644 --- a/backend/dataall/aws/handlers/sns.py +++ b/backend/dataall/aws/handlers/sns.py @@ -7,6 +7,7 @@ from .sts import SessionHelper from ... import db from ...db import models +from dataall.modules.datasets.services.dataset_service import DatasetService logger = logging.getLogger(__name__) @@ -19,7 +20,7 @@ def __init__(self): @Worker.handler(path='sns.dataset.publish_update') def publish_update(engine, task: models.Task): with engine.scoped_session() as session: - dataset = db.api.Dataset.get_dataset_by_uri(session, task.targetUri) + dataset = DatasetService.get_dataset_by_uri(session, task.targetUri) environment = db.api.Environment.get_environment_by_uri( session, dataset.environmentUri ) diff --git a/backend/dataall/cdkproxy/stacks/pipeline.py b/backend/dataall/cdkproxy/stacks/pipeline.py index 616151a4e..eac36500a 100644 --- a/backend/dataall/cdkproxy/stacks/pipeline.py +++ b/backend/dataall/cdkproxy/stacks/pipeline.py @@ -20,9 +20,10 @@ from ...aws.handlers.sts import SessionHelper from ... import db from ...db import models -from ...db.api import Environment, Pipeline, Dataset +from ...db.api import Environment, Pipeline from ...utils.cdk_nag_utils import CDKNagUtil from ...utils.runtime_stacks_tagging import TagsUtil +from dataall.modules.datasets.services.dataset_service import DatasetService logger = logging.getLogger(__name__) @@ -83,7 +84,7 @@ def get_env_team(self, pipeline: models.DataPipeline) -> models.EnvironmentGroup def get_dataset(self, dataset_uri) -> models.Dataset: engine = self.get_engine() with engine.scoped_session() as session: - ds = Dataset.get_dataset_by_uri( + ds = DatasetService.get_dataset_by_uri( session, dataset_uri ) return ds diff --git a/backend/dataall/db/api/__init__.py b/backend/dataall/db/api/__init__.py index 7bf8e0a4b..ed19787aa 100644 --- a/backend/dataall/db/api/__init__.py +++ b/backend/dataall/db/api/__init__.py @@ -11,7 +11,6 @@ from .glossary import Glossary from .vote import Vote from .share_object import ShareObject, ShareObjectSM, ShareItemSM -from .dataset import Dataset from .notification import Notification from .redshift_cluster import RedshiftCluster from .vpc import Vpc diff --git a/backend/dataall/db/api/redshift_cluster.py b/backend/dataall/db/api/redshift_cluster.py index de5799180..1a20929fe 100644 --- a/backend/dataall/db/api/redshift_cluster.py +++ b/backend/dataall/db/api/redshift_cluster.py @@ -3,13 +3,14 @@ from sqlalchemy import and_, or_, literal from .. import models, api, exceptions, paginate, permissions -from . import has_resource_perm, ResourcePolicy, Environment, Dataset +from . import has_resource_perm, ResourcePolicy, Environment from dataall.modules.datasets.db.models import DatasetTable from dataall.utils.naming_convention import ( NamingConventionService, NamingConventionPattern, ) from dataall.utils.slugify import slugify +from dataall.modules.datasets.services.dataset_service import DatasetService log = logging.getLogger(__name__) @@ -394,7 +395,7 @@ def add_dataset(session, username, groups, uri, data=None, check_perm=True): message=f'Cluster {cluster.name} is not on available state ({cluster.status})', ) - dataset = Dataset.get_dataset_by_uri(session, dataset_uri=data['datasetUri']) + dataset = DatasetService.get_dataset_by_uri(session, dataset_uri=data['datasetUri']) exists = session.query(models.RedshiftClusterDataset).get( (uri, data['datasetUri']) diff --git a/backend/dataall/db/api/share_object.py b/backend/dataall/db/api/share_object.py index 4fddda5e9..79ace3c15 100644 --- a/backend/dataall/db/api/share_object.py +++ b/backend/dataall/db/api/share_object.py @@ -11,6 +11,7 @@ from .. import models, exceptions, permissions, paginate from ..models.Enums import ShareObjectStatus, ShareItemStatus, ShareObjectActions, ShareItemActions, ShareableType, PrincipalType from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable +from dataall.modules.datasets.services.dataset_service import DatasetService logger = logging.getLogger(__name__) @@ -346,7 +347,7 @@ def create_share_object( itemType = data.get('itemType') dataset: models.Dataset = data.get( - 'dataset', api.Dataset.get_dataset_by_uri(session, datasetUri) + 'dataset', DatasetService.get_dataset_by_uri(session, datasetUri) ) environment: models.Environment = data.get( 'environment', @@ -539,7 +540,7 @@ def submit_share_object( check_perm: bool = False, ) -> models.ShareObject: share = ShareObject.get_share_by_uri(session, uri) - dataset = api.Dataset.get_dataset_by_uri(session, share.datasetUri) + dataset = DatasetService.get_dataset_by_uri(session, share.datasetUri) share_items_states = ShareObject.get_share_items_states(session, uri) valid_states = [ShareItemStatus.PendingApproval.value] @@ -577,7 +578,7 @@ def approve_share_object( check_perm: bool = False, ) -> models.ShareObject: share = ShareObject.get_share_by_uri(session, uri) - dataset = api.Dataset.get_dataset_by_uri(session, share.datasetUri) + dataset = DatasetService.get_dataset_by_uri(session, share.datasetUri) share_items_states = ShareObject.get_share_items_states(session, uri) Share_SM = ShareObjectSM(share.status) @@ -623,7 +624,7 @@ def reject_share_object( ) -> models.ShareObject: share = ShareObject.get_share_by_uri(session, uri) - dataset = api.Dataset.get_dataset_by_uri(session, share.datasetUri) + dataset = DatasetService.get_dataset_by_uri(session, share.datasetUri) share_items_states = ShareObject.get_share_items_states(session, uri) Share_SM = ShareObjectSM(share.status) @@ -656,7 +657,7 @@ def revoke_items_share_object( ) -> models.ShareObject: share = ShareObject.get_share_by_uri(session, uri) - dataset = api.Dataset.get_dataset_by_uri(session, share.datasetUri) + dataset = DatasetService.get_dataset_by_uri(session, share.datasetUri) revoked_items_states = ShareObject.get_share_items_states(session, uri, data.get("revokedItemUris")) revoked_items = [ShareObject.get_share_item_by_uri(session, uri) for uri in data.get("revokedItemUris")] diff --git a/backend/dataall/modules/datasets/api/dataset/resolvers.py b/backend/dataall/modules/datasets/api/dataset/resolvers.py index d41163a1b..1dabe9c57 100644 --- a/backend/dataall/modules/datasets/api/dataset/resolvers.py +++ b/backend/dataall/modules/datasets/api/dataset/resolvers.py @@ -14,9 +14,10 @@ from dataall.aws.handlers.service_handlers import Worker from dataall.aws.handlers.sts import SessionHelper from dataall.db import paginate, exceptions, permissions, models -from dataall.db.api import Dataset, Environment, ShareObject, ResourcePolicy +from dataall.db.api import Environment, ShareObject, ResourcePolicy from dataall.db.api.organization import Organization from dataall.modules.datasets.services.dataset_location import DatasetLocationService +from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer @@ -25,7 +26,7 @@ def create_dataset(context: Context, source, input=None): with context.engine.scoped_session() as session: - dataset = Dataset.create_dataset( + dataset = DatasetService.create_dataset( session=session, username=context.username, groups=context.groups, @@ -33,7 +34,7 @@ def create_dataset(context: Context, source, input=None): data=input, check_perm=True, ) - Dataset.create_dataset_stack(session, dataset) + DatasetService.create_dataset_stack(session, dataset) DatasetIndexer.upsert( session=session, dataset_uri=dataset.datasetUri @@ -57,7 +58,7 @@ def import_dataset(context: Context, source, input=None): raise exceptions.RequiredParameter('group') with context.engine.scoped_session() as session: - dataset = Dataset.create_dataset( + dataset = DatasetService.create_dataset( session=session, username=context.username, groups=context.groups, @@ -71,7 +72,7 @@ def import_dataset(context: Context, source, input=None): dataset.importedKmsKey = True if input.get('KmsKeyId') else False dataset.importedAdminRole = True if input.get('adminRoleName') else False - Dataset.create_dataset_stack(session, dataset) + DatasetService.create_dataset_stack(session, dataset) DatasetIndexer.upsert( session=session, dataset_uri=dataset.datasetUri @@ -86,7 +87,7 @@ def import_dataset(context: Context, source, input=None): def get_dataset(context, source, datasetUri=None): with context.engine.scoped_session() as session: - dataset = Dataset.get_dataset( + dataset = DatasetService.get_dataset( session=session, username=context.username, groups=context.groups, @@ -124,7 +125,7 @@ def get_file_upload_presigned_url( context, source, datasetUri: str = None, input: dict = None ): with context.engine.scoped_session() as session: - dataset = Dataset.get_dataset_by_uri(session, datasetUri) + dataset = DatasetService.get_dataset_by_uri(session, datasetUri) s3_client = SessionHelper.remote_session(dataset.AwsAccountId).client( 's3', @@ -150,7 +151,7 @@ def list_datasets(context: Context, source, filter: dict = None): if not filter: filter = {'page': 1, 'pageSize': 5} with context.engine.scoped_session() as session: - return Dataset.paginated_user_datasets( + return DatasetService.paginated_user_datasets( session, context.username, context.groups, uri=None, data=filter ) @@ -176,7 +177,7 @@ def list_tables(context, source: models.Dataset, filter: dict = None): if not filter: filter = {'page': 1, 'pageSize': 5} with context.engine.scoped_session() as session: - return Dataset.paginated_dataset_tables( + return DatasetService.paginated_dataset_tables( session=session, username=context.username, groups=context.groups, @@ -213,7 +214,7 @@ def get_dataset_stewards_group(context, source: models.Dataset, **kwargs): def update_dataset(context, source, datasetUri: str = None, input: dict = None): with context.engine.scoped_session() as session: - updated_dataset = Dataset.update_dataset( + updated_dataset = DatasetService.update_dataset( session=session, username=context.username, groups=context.groups, @@ -232,7 +233,7 @@ def get_dataset_statistics(context: Context, source: models.Dataset, **kwargs): if not source: return None with context.engine.scoped_session() as session: - count_tables = db.api.Dataset.count_dataset_tables(session, source.datasetUri) + count_tables = DatasetService.count_dataset_tables(session, source.datasetUri) count_locations = DatasetLocationService.count_dataset_locations( session, source.datasetUri ) @@ -272,7 +273,7 @@ def get_dataset_assume_role_url(context: Context, source, datasetUri: str = None resource_uri=datasetUri, permission_name=permissions.CREDENTIALS_DATASET, ) - dataset = Dataset.get_dataset_by_uri(session, datasetUri) + dataset = DatasetService.get_dataset_by_uri(session, datasetUri) if dataset.SamlAdminGroupName not in context.groups: share = ShareObject.get_share_by_dataset_attributes( session=session, @@ -315,7 +316,7 @@ def sync_tables(context: Context, source, datasetUri: str = None): resource_uri=datasetUri, permission_name=permissions.SYNC_DATASET, ) - dataset = Dataset.get_dataset_by_uri(session, datasetUri) + dataset = DatasetService.get_dataset_by_uri(session, datasetUri) task = models.Task( action='glue.dataset.database.tables', @@ -328,7 +329,7 @@ def sync_tables(context: Context, source, datasetUri: str = None): session=session, dataset_uri=dataset.datasetUri ) DatasetTableIndexer.remove_all_deleted(session=session, dataset_uri=dataset.datasetUri) - return Dataset.paginated_dataset_tables( + return DatasetService.paginated_dataset_tables( session=session, username=context.username, groups=context.groups, @@ -348,7 +349,7 @@ def start_crawler(context: Context, source, datasetUri: str, input: dict = None) permission_name=permissions.CRAWL_DATASET, ) - dataset = Dataset.get_dataset_by_uri(session, datasetUri) + dataset = DatasetService.get_dataset_by_uri(session, datasetUri) location = ( f's3://{dataset.S3BucketName}/{input.get("prefix")}' @@ -393,7 +394,7 @@ def list_dataset_share_objects(context, source, filter: dict = None): if not filter: filter = {'page': 1, 'pageSize': 5} with context.engine.scoped_session() as session: - return Dataset.paginated_dataset_shares( + return DatasetService.paginated_dataset_shares( session=session, username=context.username, groups=context.groups, @@ -412,7 +413,7 @@ def generate_dataset_access_token(context, source, datasetUri: str = None): resource_uri=datasetUri, permission_name=permissions.CREDENTIALS_DATASET, ) - dataset = Dataset.get_dataset_by_uri(session, datasetUri) + dataset = DatasetService.get_dataset_by_uri(session, datasetUri) pivot_session = SessionHelper.remote_session(dataset.AwsAccountId) aws_session = SessionHelper.get_session( @@ -430,7 +431,7 @@ def generate_dataset_access_token(context, source, datasetUri: str = None): def get_dataset_summary(context, source, datasetUri: str = None): with context.engine.scoped_session() as session: - dataset = Dataset.get_dataset_by_uri(session, datasetUri) + dataset = DatasetService.get_dataset_by_uri(session, datasetUri) environment = Environment.get_environment_by_uri( session, dataset.environmentUri ) @@ -468,7 +469,7 @@ def save_dataset_summary( resource_uri=datasetUri, permission_name=permissions.SUMMARY_DATASET, ) - dataset = Dataset.get_dataset_by_uri(session, datasetUri) + dataset = DatasetService.get_dataset_by_uri(session, datasetUri) environment = Environment.get_environment_by_uri( session, dataset.environmentUri ) @@ -506,7 +507,7 @@ def get_crawler(context, source, datasetUri: str = None, name: str = None): resource_uri=datasetUri, permission_name=permissions.CRAWL_DATASET, ) - dataset = Dataset.get_dataset_by_uri(session, datasetUri) + dataset = DatasetService.get_dataset_by_uri(session, datasetUri) aws_session = SessionHelper.remote_session(dataset.AwsAccountId) client = aws_session.client('glue', region_name=dataset.region) @@ -531,18 +532,18 @@ def delete_dataset( resource_uri=datasetUri, permission_name=permissions.DELETE_DATASET, ) - dataset: models.Dataset = db.api.Dataset.get_dataset_by_uri(session, datasetUri) - env: models.Environment = db.api.Environment.get_environment_by_uri( + dataset: models.Dataset = DatasetService.get_dataset_by_uri(session, datasetUri) + env: models.Environment = Environment.get_environment_by_uri( session, dataset.environmentUri ) - shares = db.api.Dataset.list_dataset_shares_with_existing_shared_items(session, datasetUri) + shares = DatasetService.list_dataset_shares_with_existing_shared_items(session, datasetUri) if shares: raise exceptions.UnauthorizedOperation( action=permissions.DELETE_DATASET, message=f'Dataset {dataset.name} is shared with other teams. ' 'Revoke all dataset shares before deletion.', ) - redshift_datasets = db.api.Dataset.list_dataset_redshift_clusters( + redshift_datasets = DatasetService.list_dataset_redshift_clusters( session, datasetUri ) if redshift_datasets: @@ -552,7 +553,7 @@ def delete_dataset( 'Remove clusters associations first.', ) - tables = [t.tableUri for t in Dataset.get_dataset_tables(session, datasetUri)] + tables = [t.tableUri for t in DatasetService.get_dataset_tables(session, datasetUri)] for uri in tables: DatasetIndexer.delete_doc(doc_id=uri) @@ -562,7 +563,7 @@ def delete_dataset( DatasetIndexer.delete_doc(doc_id=datasetUri) - Dataset.delete_dataset( + DatasetService.delete_dataset( session=session, username=context.username, groups=context.groups, @@ -608,7 +609,7 @@ def publish_dataset_update( resource_uri=datasetUri, permission_name=permissions.SUBSCRIPTIONS_DATASET, ) - dataset = Dataset.get_dataset_by_uri(session, datasetUri) + dataset = DatasetService.get_dataset_by_uri(session, datasetUri) env = db.api.Environment.get_environment_by_uri(session, dataset.environmentUri) if not env.subscriptionsEnabled or not env.subscriptionsProducersTopicName: raise Exception( diff --git a/backend/dataall/modules/datasets/api/dataset/schema.py b/backend/dataall/modules/datasets/api/dataset/schema.py index 766537e46..4786df52a 100644 --- a/backend/dataall/modules/datasets/api/dataset/schema.py +++ b/backend/dataall/modules/datasets/api/dataset/schema.py @@ -10,7 +10,8 @@ get_dataset_statistics, list_dataset_share_objects, get_dataset_glossary_terms, - resolve_redshift_copy_enabled + resolve_redshift_copy_enabled, + get_dataset_stack ) from dataall.api.constants import DatasetRole, EnvironmentPermission diff --git a/backend/dataall/modules/datasets/api/profiling/resolvers.py b/backend/dataall/modules/datasets/api/profiling/resolvers.py index 62ff64942..d1eeaf3c9 100644 --- a/backend/dataall/modules/datasets/api/profiling/resolvers.py +++ b/backend/dataall/modules/datasets/api/profiling/resolvers.py @@ -6,6 +6,7 @@ from dataall.aws.handlers.sts import SessionHelper from dataall.db import api, permissions, models from dataall.db.api import ResourcePolicy +from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.modules.datasets.services.dataset_table import DatasetTableService from dataall.modules.datasets.services.dataset_profiling_service import DatasetProfilingService from dataall.modules.datasets.db.models import DatasetProfilingRun @@ -17,7 +18,7 @@ def resolve_dataset(context, source: DatasetProfilingRun): if not source: return None with context.engine.scoped_session() as session: - return api.Dataset.get_dataset_by_uri( + return DatasetService.get_dataset_by_uri( session=session, dataset_uri=source.datasetUri ) @@ -32,7 +33,7 @@ def start_profiling_run(context: Context, source, input: dict = None): resource_uri=input['datasetUri'], permission_name=permissions.PROFILE_DATASET_TABLE, ) - dataset = api.Dataset.get_dataset_by_uri(session, input['datasetUri']) + dataset = DatasetService.get_dataset_by_uri(session, input['datasetUri']) run = DatasetProfilingService.start_profiling( session=session, @@ -101,7 +102,7 @@ def get_last_table_profiling_run(context: Context, source, tableUri=None): if run: if not run.results: table = DatasetTableService.get_dataset_table_by_uri(session, tableUri) - dataset = api.Dataset.get_dataset_by_uri(session, table.datasetUri) + dataset = DatasetService.get_dataset_by_uri(session, table.datasetUri) environment = api.Environment.get_environment_by_uri( session, dataset.environmentUri ) diff --git a/backend/dataall/modules/datasets/api/storage_location/resolvers.py b/backend/dataall/modules/datasets/api/storage_location/resolvers.py index 6f8d82e43..ef06bbba6 100644 --- a/backend/dataall/modules/datasets/api/storage_location/resolvers.py +++ b/backend/dataall/modules/datasets/api/storage_location/resolvers.py @@ -4,14 +4,13 @@ from dataall.db.api import ( ResourcePolicy, Glossary, - Dataset, Environment, ) from dataall.modules.datasets.handlers.s3_location_handler import S3DatasetLocationHandler from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer -from dataall.searchproxy import indexers from dataall.modules.datasets.db.models import DatasetStorageLocation from dataall.modules.datasets.services.dataset_location import DatasetLocationService +from dataall.modules.datasets.services.dataset_service import DatasetService def create_storage_location( @@ -110,7 +109,7 @@ def publish_location_update(context: Context, source, locationUri: str = None): resource_uri=location.datasetUri, permission_name=permissions.UPDATE_DATASET_FOLDER, ) - dataset = Dataset.get_dataset_by_uri(session, location.datasetUri) + dataset = DatasetService.get_dataset_by_uri(session, location.datasetUri) env = Environment.get_environment_by_uri(session, dataset.environmentUri) if not env.subscriptionsEnabled or not env.subscriptionsProducersTopicName: raise Exception( diff --git a/backend/dataall/modules/datasets/api/table/resolvers.py b/backend/dataall/modules/datasets/api/table/resolvers.py index 50878ec44..329828b0d 100644 --- a/backend/dataall/modules/datasets/api/table/resolvers.py +++ b/backend/dataall/modules/datasets/api/table/resolvers.py @@ -12,6 +12,7 @@ from dataall.db import permissions, models from dataall.db.api import ResourcePolicy, Glossary from dataall.modules.datasets.db.models import DatasetTable +from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.utils import json_utils from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer from dataall.modules.datasets.services.dataset_table import DatasetTableService @@ -68,7 +69,7 @@ def update_table(context, source, tableUri: str = None, input: dict = None): with context.engine.scoped_session() as session: table = DatasetTableService.get_dataset_table_by_uri(session, tableUri) - dataset = db.api.Dataset.get_dataset_by_uri(session, table.datasetUri) + dataset = DatasetService.get_dataset_by_uri(session, table.datasetUri) input['table'] = table input['tableUri'] = table.tableUri @@ -107,7 +108,7 @@ def preview(context, source, tableUri: str = None): table: DatasetTable = DatasetTableService.get_dataset_table_by_uri( session, tableUri ) - dataset = db.api.Dataset.get_dataset_by_uri(session, table.datasetUri) + dataset = DatasetService.get_dataset_by_uri(session, table.datasetUri) if ( dataset.confidentiality != models.ConfidentialityClassification.Unclassified.value @@ -198,7 +199,7 @@ def publish_table_update(context: Context, source, tableUri: str = None): resource_uri=table.datasetUri, permission_name=permissions.UPDATE_DATASET_TABLE, ) - dataset = db.api.Dataset.get_dataset_by_uri(session, table.datasetUri) + dataset = DatasetService.get_dataset_by_uri(session, table.datasetUri) env = db.api.Environment.get_environment_by_uri(session, dataset.environmentUri) if not env.subscriptionsEnabled or not env.subscriptionsProducersTopicName: raise Exception( diff --git a/backend/dataall/modules/datasets/handlers/glue_table_handler.py b/backend/dataall/modules/datasets/handlers/glue_table_handler.py index 9bb50c501..d2218884b 100644 --- a/backend/dataall/modules/datasets/handlers/glue_table_handler.py +++ b/backend/dataall/modules/datasets/handlers/glue_table_handler.py @@ -5,7 +5,7 @@ from dataall.aws.handlers.glue import Glue from dataall.aws.handlers.service_handlers import Worker from dataall.db import models -from dataall.db.api import Dataset +from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.modules.datasets.services.dataset_table import DatasetTableService log = logging.getLogger(__name__) @@ -18,7 +18,7 @@ class DatasetColumnGlueHandler: @Worker.handler(path='glue.dataset.database.tables') def list_tables(engine, task: models.Task): with engine.scoped_session() as session: - dataset: models.Dataset = Dataset.get_dataset_by_uri( + dataset: models.Dataset = DatasetService.get_dataset_by_uri( session, task.targetUri ) account_id = dataset.AwsAccountId diff --git a/backend/dataall/modules/datasets/indexers/dataset_indexer.py b/backend/dataall/modules/datasets/indexers/dataset_indexer.py index 8cb0b7873..665e2a9c4 100644 --- a/backend/dataall/modules/datasets/indexers/dataset_indexer.py +++ b/backend/dataall/modules/datasets/indexers/dataset_indexer.py @@ -3,6 +3,7 @@ from dataall import db from dataall.db import models from dataall.modules.datasets.services.dataset_location import DatasetLocationService +from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.searchproxy.upsert import BaseIndexer @@ -43,7 +44,7 @@ def upsert(cls, session, dataset_uri: str): .filter(models.Dataset.datasetUri == dataset_uri) .first() ) - count_tables = db.api.Dataset.count_dataset_tables(session, dataset_uri) + count_tables = DatasetService.count_dataset_tables(session, dataset_uri) count_folders = DatasetLocationService.count_dataset_locations(session, dataset_uri) count_upvotes = db.api.Vote.count_upvotes( session, None, None, dataset_uri, {'targetType': 'dataset'} diff --git a/backend/dataall/db/api/dataset.py b/backend/dataall/modules/datasets/services/dataset_service.py similarity index 93% rename from backend/dataall/db/api/dataset.py rename to backend/dataall/modules/datasets/services/dataset_service.py index d328dddb5..c513c574b 100644 --- a/backend/dataall/db/api/dataset.py +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -4,7 +4,7 @@ from sqlalchemy import and_, or_ from sqlalchemy.orm import Query -from . import ( +from dataall.db.api import ( Environment, has_tenant_perm, has_resource_perm, @@ -13,9 +13,9 @@ Vote, Stack, ) -from . import Organization -from .. import models, api, exceptions, permissions, paginate -from ..models.Enums import Language, ConfidentialityClassification +from dataall.db.api import Organization +from dataall.db import models, api, exceptions, permissions, paginate +from dataall.db.models.Enums import Language, ConfidentialityClassification from dataall.modules.datasets.db.dataset_repository import DatasetRepository from dataall.modules.datasets.db.models import DatasetTable from dataall.modules.datasets.services.dataset_location import DatasetLocationService @@ -27,7 +27,7 @@ logger = logging.getLogger(__name__) -class Dataset: +class DatasetService: @staticmethod @has_tenant_perm(permissions.MANAGE_DATASETS) @has_resource_perm(permissions.CREATE_DATASET) @@ -96,7 +96,7 @@ def create_dataset( session.add(dataset) session.commit() - Dataset._set_dataset_aws_resources(dataset, data, environment) + DatasetService._set_dataset_aws_resources(dataset, data, environment) activity = models.Activity( action='dataset:create', @@ -208,7 +208,7 @@ def get_dataset( data: dict = None, check_perm: bool = False, ) -> models.Dataset: - return Dataset.get_dataset_by_uri(session, uri) + return DatasetService.get_dataset_by_uri(session, uri) @staticmethod def get_dataset_by_uri(session, dataset_uri) -> models.Dataset: @@ -257,7 +257,7 @@ def paginated_user_datasets( session, username, groups, uri, data=None, check_perm=None ) -> dict: return paginate( - query=Dataset.query_user_datasets(session, username, groups, data), + query=DatasetService.query_user_datasets(session, username, groups, data), page=data.get('page', 1), page_size=data.get('pageSize', 10), ).to_dict() @@ -297,19 +297,19 @@ def paginated_dataset_tables( def update_dataset( session, username, groups, uri, data=None, check_perm=None ) -> models.Dataset: - dataset: models.Dataset = Dataset.get_dataset_by_uri(session, uri) + dataset: models.Dataset = DatasetService.get_dataset_by_uri(session, uri) if data and isinstance(data, dict): for k in data.keys(): if k != 'stewards': setattr(dataset, k, data.get(k)) if data.get('stewards') and data.get('stewards') != dataset.stewards: if data.get('stewards') != dataset.SamlAdminGroupName: - Dataset.transfer_stewardship_to_new_stewards( + DatasetService.transfer_stewardship_to_new_stewards( session, dataset, data['stewards'] ) dataset.stewards = data['stewards'] else: - Dataset.transfer_stewardship_to_owners(session, dataset) + DatasetService.transfer_stewardship_to_owners(session, dataset) dataset.stewards = dataset.SamlAdminGroupName ResourcePolicy.attach_resource_policy( @@ -319,7 +319,7 @@ def update_dataset( resource_uri=dataset.datasetUri, resource_type=models.Dataset.__name__, ) - Dataset.update_dataset_glossary_terms(session, username, uri, data) + DatasetService.update_dataset_glossary_terms(session, username, uri, data) activity = models.Activity( action='dataset:update', label='dataset:update', @@ -367,7 +367,7 @@ def transfer_stewardship_to_new_stewards(session, dataset, new_stewards): resource_type=models.Dataset.__name__, ) - dataset_tables = [t.tableUri for t in Dataset.get_dataset_tables(session, dataset.datasetUri)] + dataset_tables = [t.tableUri for t in DatasetService.get_dataset_tables(session, dataset.datasetUri)] for tableUri in dataset_tables: if dataset.stewards != env.SamlGroupName: ResourcePolicy.delete_resource_policy( @@ -440,7 +440,7 @@ def update_bucket_status(session, dataset_uri): """ helper method to update the dataset bucket status """ - dataset = Dataset.get_dataset_by_uri(session, dataset_uri) + dataset = DatasetService.get_dataset_by_uri(session, dataset_uri) dataset.bucketCreated = True return dataset @@ -449,7 +449,7 @@ def update_glue_database_status(session, dataset_uri): """ helper method to update the dataset db status """ - dataset = Dataset.get_dataset_by_uri(session, dataset_uri) + dataset = DatasetService.get_dataset_by_uri(session, dataset_uri) dataset.glueDatabaseCreated = True @staticmethod @@ -474,7 +474,7 @@ def query_dataset_shares(session, dataset_uri) -> Query: def paginated_dataset_shares( session, username, groups, uri, data=None, check_perm=None ) -> [models.ShareObject]: - query = Dataset.query_dataset_shares(session, uri) + query = DatasetService.query_dataset_shares(session, uri) return paginate( query=query, page=data.get('page', 1), page_size=data.get('pageSize', 5) ).to_dict() @@ -482,7 +482,7 @@ def paginated_dataset_shares( @staticmethod def list_dataset_shares(session, dataset_uri) -> [models.ShareObject]: """return the dataset shares""" - query = Dataset.query_dataset_shares(session, dataset_uri) + query = DatasetService.query_dataset_shares(session, dataset_uri) return query.all() @staticmethod @@ -511,10 +511,10 @@ def list_dataset_redshift_clusters( def delete_dataset( session, username, groups, uri, data=None, check_perm=None ) -> bool: - dataset = Dataset.get_dataset_by_uri(session, uri) - Dataset._delete_dataset_shares_with_no_shared_items(session, uri) - Dataset._delete_dataset_term_links(session, uri) - Dataset._delete_dataset_tables(session, dataset.datasetUri) + dataset = DatasetService.get_dataset_by_uri(session, uri) + DatasetService._delete_dataset_shares_with_no_shared_items(session, uri) + DatasetService._delete_dataset_term_links(session, uri) + DatasetService._delete_dataset_tables(session, dataset.datasetUri) DatasetLocationService.delete_dataset_locations(session, dataset.datasetUri) KeyValueTag.delete_key_value_tags(session, dataset.datasetUri, 'dataset') Vote.delete_votes(session, dataset.datasetUri, 'dataset') @@ -555,7 +555,7 @@ def _delete_dataset_shares_with_no_shared_items(session, dataset_uri): @staticmethod def _delete_dataset_term_links(session, uri): - tables = [t.tableUri for t in Dataset.get_dataset_tables(session, uri)] + tables = [t.tableUri for t in DatasetService.get_dataset_tables(session, uri)] for tableUri in tables: term_links = ( session.query(models.TermLink) diff --git a/backend/dataall/modules/datasets/services/dataset_table.py b/backend/dataall/modules/datasets/services/dataset_table.py index 7776aa2ef..0c4a72172 100644 --- a/backend/dataall/modules/datasets/services/dataset_table.py +++ b/backend/dataall/modules/datasets/services/dataset_table.py @@ -5,6 +5,7 @@ from dataall.db import models, api, permissions, exceptions, paginate from dataall.db.api import has_tenant_perm, has_resource_perm, Glossary, ResourcePolicy, Environment from dataall.db.models import Dataset +from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.utils import json_utils from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetTable @@ -23,7 +24,7 @@ def create_dataset_table( data: dict = None, check_perm: bool = False, ) -> DatasetTable: - dataset = api.Dataset.get_dataset_by_uri(session, uri) + dataset = DatasetService.get_dataset_by_uri(session, uri) exists = ( session.query(DatasetTable) .filter( diff --git a/backend/dataall/modules/datasets/tasks/tables_syncer.py b/backend/dataall/modules/datasets/tasks/tables_syncer.py index 4ed22425e..a503ac8f5 100644 --- a/backend/dataall/modules/datasets/tasks/tables_syncer.py +++ b/backend/dataall/modules/datasets/tasks/tables_syncer.py @@ -10,6 +10,7 @@ from dataall.db import models from dataall.modules.datasets.db.models import DatasetTable from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer +from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.utils.alarm_service import AlarmService from dataall.modules.datasets.services.dataset_table import DatasetTableService @@ -23,7 +24,7 @@ def sync_tables(engine): with engine.scoped_session() as session: processed_tables = [] - all_datasets: [models.Dataset] = db.api.Dataset.list_all_active_datasets( + all_datasets: [models.Dataset] = DatasetService.list_all_active_datasets( session ) log.info(f'Found {len(all_datasets)} datasets for tables sync') diff --git a/backend/dataall/tasks/catalog_indexer.py b/backend/dataall/tasks/catalog_indexer.py index 945bdd214..7bea9d965 100644 --- a/backend/dataall/tasks/catalog_indexer.py +++ b/backend/dataall/tasks/catalog_indexer.py @@ -4,7 +4,7 @@ from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer -from .. import db +from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.db import get_engine, models from dataall.searchproxy.indexers import DashboardIndexer from dataall.utils.alarm_service import AlarmService @@ -21,7 +21,7 @@ def index_objects(engine): indexed_objects_counter = 0 with engine.scoped_session() as session: - all_datasets: [models.Dataset] = db.api.Dataset.list_all_active_datasets( + all_datasets: [models.Dataset] = DatasetService.list_all_active_datasets( session ) log.info(f'Found {len(all_datasets)} datasets') diff --git a/backend/dataall/tasks/stacks_updater.py b/backend/dataall/tasks/stacks_updater.py index 03bfd1005..f4908b8a1 100644 --- a/backend/dataall/tasks/stacks_updater.py +++ b/backend/dataall/tasks/stacks_updater.py @@ -2,6 +2,7 @@ import os import sys +from dataall.modules.datasets.services.dataset_service import DatasetService from .. import db from ..db import models from ..aws.handlers.ecs import Ecs @@ -18,7 +19,7 @@ def update_stacks(engine, envname): with engine.scoped_session() as session: - all_datasets: [models.Dataset] = db.api.Dataset.list_all_active_datasets( + all_datasets: [models.Dataset] = DatasetService.list_all_active_datasets( session ) all_environments: [ diff --git a/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py b/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py index 32ca6abe0..6845c2484 100644 --- a/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py +++ b/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py @@ -13,7 +13,7 @@ from dataall.db import api, models, permissions, utils, Resource from datetime import datetime from dataall.db.models.Enums import ShareObjectStatus, ShareableType - +from dataall.modules.datasets.services.dataset_service import DatasetService # revision identifiers, used by Alembic. revision = 'd05f9a5b215e' @@ -84,7 +84,7 @@ def upgrade(): print('Back-filling dataset table permissions for owners/stewards...') dataset_tables: [DatasetTable] = session.query(DatasetTable).filter(DatasetTable.deleted.is_(None)).all() for table in dataset_tables: - dataset = api.Dataset.get_dataset_by_uri(session, table.datasetUri) + dataset = DatasetService.get_dataset_by_uri(session, table.datasetUri) env = api.Environment.get_environment_by_uri(session, dataset.environmentUri) groups = set([dataset.SamlAdminGroupName, env.SamlGroupName, dataset.stewards if dataset.stewards is not None else dataset.SamlAdminGroupName]) diff --git a/tests/api/client.py b/tests/api/client.py index f54c45d66..36c6bd8e8 100644 --- a/tests/api/client.py +++ b/tests/api/client.py @@ -33,7 +33,7 @@ def query( @pytest.fixture(scope='module', autouse=True) -def app(db, es): +def app(db): app = Flask('tests') schema = dataall.api.get_executable_schema() @@ -63,7 +63,7 @@ def graphql_server(): username = request.headers.get('Username', 'anonym') groups = json.loads(request.headers.get('Groups', '[]')) - set_context(RequestContext(db, username, groups, es)) + set_context(RequestContext(db, username, groups)) success, result = graphql_sync( schema, @@ -73,7 +73,6 @@ def graphql_server(): 'engine': db, 'username': username, 'groups': groups, - 'es': es, }, debug=app.debug, ) diff --git a/tests/api/test_dataset.py b/tests/api/test_dataset.py index 9dc5d37f5..8aac00702 100644 --- a/tests/api/test_dataset.py +++ b/tests/api/test_dataset.py @@ -4,6 +4,7 @@ import dataall from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable +from dataall.modules.datasets.services.dataset_service import DatasetService @pytest.fixture(scope='module', autouse=True) @@ -490,7 +491,7 @@ def test_get_dataset_by_prefix(db, env1, org1): ) session.add(dataset) session.commit() - dataset_found: dataall.db.models.Dataset = dataall.db.api.Dataset.get_dataset_by_bucket_name( + dataset_found: dataall.db.models.Dataset = DatasetService.get_dataset_by_bucket_name( session, bucket='s3a://insite-data-lake-raw-alpha-eu-west-1/booker/volume_constraints/insite_version=1/volume_constraints.delta'.split( '//' diff --git a/tests/api/test_redshift_cluster.py b/tests/api/test_redshift_cluster.py index c9a8fac73..9fc1d9d06 100644 --- a/tests/api/test_redshift_cluster.py +++ b/tests/api/test_redshift_cluster.py @@ -4,6 +4,7 @@ import pytest import dataall from dataall.api.constants import RedshiftClusterRole +from dataall.modules.datasets.services.dataset_service import DatasetService @pytest.fixture(scope='module', autouse=True) @@ -41,7 +42,7 @@ def dataset1(db, user, env1, org1, dataset, group, group3) -> dataall.db.models. IAMDatasetAdminRoleArn=f'arn:aws:iam::123456789012:role/dataset', stewards=group3.name, ) - dataset = dataall.db.api.Dataset.create_dataset( + dataset = DatasetService.create_dataset( session=session, username=user.userName, groups=[group.name], diff --git a/tests/db/test_permission.py b/tests/db/test_permission.py index 148cd3051..289f59f0b 100644 --- a/tests/db/test_permission.py +++ b/tests/db/test_permission.py @@ -4,6 +4,7 @@ from dataall.api.constants import OrganisationUserRole from dataall.db import exceptions from dataall.db.models.Permission import PermissionType +from dataall.modules.datasets.services.dataset_service import DatasetService @pytest.fixture(scope='module') @@ -257,7 +258,7 @@ def test_create_dataset(db, env, user, group, group_user, dataset, permissions, IAMDatasetAdminUserArn=f'arn:aws:iam::123456789012:user/dataset', IAMDatasetAdminRoleArn=f'arn:aws:iam::123456789012:role/dataset', ) - dataset = dataall.db.api.Dataset.create_dataset( + dataset = DatasetService.create_dataset( session=session, username=user.userName, groups=[group.name], diff --git a/tests/tasks/test_catalog_indexer.py b/tests/tasks/test_catalog_indexer.py index 8da53e3d2..3944aed8b 100644 --- a/tests/tasks/test_catalog_indexer.py +++ b/tests/tasks/test_catalog_indexer.py @@ -91,6 +91,6 @@ def test_catalog_indexer(db, org, env, sync_dataset, table, mocker): 'dataall.modules.datasets.indexers.dataset_indexer.DatasetIndexer.upsert', return_value=sync_dataset ) indexed_objects_counter = dataall.tasks.catalog_indexer.index_objects( - engine=db, es=True + engine=db ) assert indexed_objects_counter == 2 From ad845e74ca343ff7fa07e854b9ee629747ffb10d Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 24 Apr 2023 10:58:00 +0200 Subject: [PATCH 076/346] Removed unused dataset method --- backend/dataall/cdkproxy/stacks/pipeline.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/backend/dataall/cdkproxy/stacks/pipeline.py b/backend/dataall/cdkproxy/stacks/pipeline.py index eac36500a..f40b80576 100644 --- a/backend/dataall/cdkproxy/stacks/pipeline.py +++ b/backend/dataall/cdkproxy/stacks/pipeline.py @@ -23,7 +23,6 @@ from ...db.api import Environment, Pipeline from ...utils.cdk_nag_utils import CDKNagUtil from ...utils.runtime_stacks_tagging import TagsUtil -from dataall.modules.datasets.services.dataset_service import DatasetService logger = logging.getLogger(__name__) @@ -81,14 +80,6 @@ def get_env_team(self, pipeline: models.DataPipeline) -> models.EnvironmentGroup ) return env - def get_dataset(self, dataset_uri) -> models.Dataset: - engine = self.get_engine() - with engine.scoped_session() as session: - ds = DatasetService.get_dataset_by_uri( - session, dataset_uri - ) - return ds - def __init__(self, scope, id, target_uri: str = None, **kwargs): kwargs.setdefault("tags", {}).update({"utility": "dataall-data-pipeline"}) super().__init__( From 32be3ee2032c3df9a47ab788ef223ef3f9a94440 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 24 Apr 2023 16:30:07 +0200 Subject: [PATCH 077/346] DatasetQualityRule is not used --- backend/dataall/db/models/DatasetQualityRule.py | 13 ------------- backend/dataall/db/models/__init__.py | 1 - 2 files changed, 14 deletions(-) delete mode 100644 backend/dataall/db/models/DatasetQualityRule.py diff --git a/backend/dataall/db/models/DatasetQualityRule.py b/backend/dataall/db/models/DatasetQualityRule.py deleted file mode 100644 index d5befa805..000000000 --- a/backend/dataall/db/models/DatasetQualityRule.py +++ /dev/null @@ -1,13 +0,0 @@ -from sqlalchemy import Column, String -from sqlalchemy.dialects.postgresql import JSON - -from .. import Base, Resource, utils - - -class DatasetQualityRule(Resource, Base): - __tablename__ = 'dataset_quality_rule' - datasetUri = Column(String, nullable=False) - ruleUri = Column(String, primary_key=True, default=utils.uuid('dqlrule')) - query = Column(String, nullable=False) - status = Column(String, nullable=False, default='Created') - logs = Column(JSON, default={}) diff --git a/backend/dataall/db/models/__init__.py b/backend/dataall/db/models/__init__.py index 123547f8c..068e5e495 100644 --- a/backend/dataall/db/models/__init__.py +++ b/backend/dataall/db/models/__init__.py @@ -5,7 +5,6 @@ from .DashboardShare import DashboardShare from .DashboardShare import DashboardShareStatus from .Dataset import Dataset -from .DatasetQualityRule import DatasetQualityRule from .Environment import Environment from .EnvironmentGroup import EnvironmentGroup from .FeedMessage import FeedMessage From f95566c305f9ab903c35372cc6352abb3aff049f Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 24 Apr 2023 16:57:32 +0200 Subject: [PATCH 078/346] Moved the Dataset models to modules --- .../api/Objects/ShareObject/resolvers.py | 12 ++-- .../dataall/api/Objects/Stack/stack_helper.py | 3 +- backend/dataall/aws/handlers/glue.py | 7 +- backend/dataall/aws/handlers/redshift.py | 4 +- .../dataall/cdkproxy/stacks/environment.py | 9 +-- .../cdkproxy/stacks/policies/data_policy.py | 5 +- backend/dataall/db/api/environment.py | 47 +++++++------- backend/dataall/db/api/notification.py | 9 +-- backend/dataall/db/api/redshift_cluster.py | 53 ++++++++------- backend/dataall/db/api/share_object.py | 18 +++--- backend/dataall/db/models/Dataset.py | 64 ------------------- backend/dataall/db/models/__init__.py | 1 - backend/dataall/modules/datasets/__init__.py | 5 +- .../modules/datasets/api/dataset/resolvers.py | 25 ++++---- .../api/storage_location/resolvers.py | 4 +- .../modules/datasets/api/table/resolvers.py | 4 +- .../modules/datasets/cdk/dataset_stack.py | 12 ++-- backend/dataall/modules/datasets/db/models.py | 61 +++++++++++++++++- .../handlers/glue_profiling_handler.py | 6 +- .../datasets/handlers/glue_table_handler.py | 5 +- .../datasets/indexers/dataset_indexer.py | 37 +++++------ .../datasets/indexers/location_indexer.py | 18 +++--- .../datasets/indexers/table_indexer.py | 22 +++---- .../services/dataset_profiling_service.py | 4 +- .../datasets/services/dataset_service.py | 56 ++++++++-------- .../services/dataset_share_service.py | 26 ++++---- .../datasets/tasks/subscription_service.py | 8 +-- .../modules/datasets/tasks/tables_syncer.py | 6 +- .../dataall/tasks/bucket_policy_updater.py | 8 +-- backend/dataall/tasks/catalog_indexer.py | 5 +- .../share_managers/lf_share_manager.py | 8 +-- .../share_managers/s3_share_manager.py | 10 +-- .../lf_process_cross_account_share.py | 4 +- .../lf_process_same_account_share.py | 4 +- .../share_processors/s3_process_share.py | 11 ++-- backend/dataall/tasks/stacks_updater.py | 5 +- backend/dataall/utils/alarm_service.py | 4 +- tests/api/conftest.py | 16 ++--- tests/api/test_dashboards.py | 3 +- tests/api/test_dataset.py | 12 ++-- tests/api/test_dataset_location.py | 3 +- tests/api/test_dataset_profiling.py | 4 +- tests/api/test_dataset_table.py | 4 +- tests/api/test_environment.py | 3 +- tests/api/test_glossary.py | 4 +- tests/api/test_keyvaluetag.py | 3 +- tests/api/test_redshift_cluster.py | 5 +- tests/api/test_share.py | 20 +++--- tests/api/test_vote.py | 3 +- tests/cdkproxy/conftest.py | 10 +-- tests/db/test_permission.py | 5 +- tests/searchproxy/test_indexers.py | 4 +- tests/tasks/conftest.py | 12 ++-- tests/tasks/test_catalog_indexer.py | 2 +- tests/tasks/test_lf_share_manager.py | 30 ++++----- tests/tasks/test_policies.py | 4 +- tests/tasks/test_s3_share_manager.py | 35 +++++----- tests/tasks/test_stacks_updater.py | 3 +- tests/tasks/test_subscriptions.py | 6 +- tests/tasks/test_tables_sync.py | 4 +- 60 files changed, 395 insertions(+), 390 deletions(-) delete mode 100644 backend/dataall/db/models/Dataset.py diff --git a/backend/dataall/api/Objects/ShareObject/resolvers.py b/backend/dataall/api/Objects/ShareObject/resolvers.py index 4455b0019..f2e58fa14 100644 --- a/backend/dataall/api/Objects/ShareObject/resolvers.py +++ b/backend/dataall/api/Objects/ShareObject/resolvers.py @@ -7,7 +7,7 @@ from ....api.context import Context from ....aws.handlers.service_handlers import Worker from ....db import models -from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable +from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable, Dataset from dataall.modules.datasets.services.dataset_service import DatasetService log = logging.getLogger(__name__) @@ -20,7 +20,7 @@ def get_share_object_dataset(context, source, **kwargs): share: models.ShareObject = session.query(models.ShareObject).get( source.shareUri ) - return session.query(models.Dataset).get(share.datasetUri) + return session.query(Dataset).get(share.datasetUri) def create_share_object( @@ -33,7 +33,7 @@ def create_share_object( ): with context.engine.scoped_session() as session: - dataset: models.Dataset = DatasetService.get_dataset_by_uri(session, datasetUri) + dataset: Dataset = DatasetService.get_dataset_by_uri(session, datasetUri) environment: models.Environment = db.api.Environment.get_environment_by_uri( session, input['environmentUri'] ) @@ -223,7 +223,7 @@ def resolve_user_role(context: Context, source: models.ShareObject, **kwargs): if not source: return None with context.engine.scoped_session() as session: - dataset: models.Dataset = DatasetService.get_dataset_by_uri(session, source.datasetUri) + dataset: Dataset = DatasetService.get_dataset_by_uri(session, source.datasetUri) if dataset and dataset.stewards in context.groups: return ShareObjectPermission.Approvers.value if ( @@ -251,7 +251,7 @@ def resolve_dataset(context: Context, source: models.ShareObject, **kwargs): if not source: return None with context.engine.scoped_session() as session: - ds: models.Dataset = DatasetService.get_dataset_by_uri(session, source.datasetUri) + ds: Dataset = DatasetService.get_dataset_by_uri(session, source.datasetUri) if ds: env: models.Environment = db.api.Environment.get_environment_by_uri(session, ds.environmentUri) return { @@ -293,7 +293,7 @@ def resolve_consumption_data(context: Context, source: models.ShareObject, **kwa if not source: return None with context.engine.scoped_session() as session: - ds: models.Dataset = DatasetService.get_dataset_by_uri(session, source.datasetUri) + ds: Dataset = DatasetService.get_dataset_by_uri(session, source.datasetUri) if ds: S3AccessPointName = utils.slugify( source.datasetUri + '-' + source.principalId, diff --git a/backend/dataall/api/Objects/Stack/stack_helper.py b/backend/dataall/api/Objects/Stack/stack_helper.py index 659dd2ab0..5ffffb790 100644 --- a/backend/dataall/api/Objects/Stack/stack_helper.py +++ b/backend/dataall/api/Objects/Stack/stack_helper.py @@ -11,6 +11,7 @@ from dataall.core.config import config from dataall.core.context import get_context +from dataall.modules.datasets.db.models import Dataset def get_stack_with_cfn_resources(targetUri: str, environmentUri: str): @@ -84,7 +85,7 @@ def deploy_stack(targetUri): return stack -def deploy_dataset_stack(dataset: models.Dataset): +def deploy_dataset_stack(dataset: Dataset): """ Each dataset stack deployment triggers environment stack update to rebuild teams IAM roles data access policies diff --git a/backend/dataall/aws/handlers/glue.py b/backend/dataall/aws/handlers/glue.py index c965db520..ba4d2a37c 100644 --- a/backend/dataall/aws/handlers/glue.py +++ b/backend/dataall/aws/handlers/glue.py @@ -4,9 +4,8 @@ from .service_handlers import Worker from .sts import SessionHelper -from ... import db from ...db import models -from dataall.modules.datasets.db.models import DatasetTable +from dataall.modules.datasets.db.models import DatasetTable, Dataset from dataall.modules.datasets.services.dataset_service import DatasetService log = logging.getLogger('aws:glue') @@ -375,7 +374,7 @@ def batch_delete_tables(**data): @Worker.handler(path='glue.dataset.crawler.create') def create_crawler(engine, task: models.Task): with engine.scoped_session() as session: - dataset: models.Dataset = DatasetService.get_dataset_by_uri( + dataset: Dataset = DatasetService.get_dataset_by_uri( session, task.targetUri ) location = task.payload.get('location') @@ -435,7 +434,7 @@ def get_glue_crawler(data): @Worker.handler(path='glue.crawler.start') def start_crawler(engine, task: models.Task): with engine.scoped_session() as session: - dataset: models.Dataset = DatasetService.get_dataset_by_uri( + dataset: Dataset = DatasetService.get_dataset_by_uri( session, task.targetUri ) location = task.payload.get('location') diff --git a/backend/dataall/aws/handlers/redshift.py b/backend/dataall/aws/handlers/redshift.py index 7558302a7..f606b9a79 100644 --- a/backend/dataall/aws/handlers/redshift.py +++ b/backend/dataall/aws/handlers/redshift.py @@ -11,7 +11,7 @@ from ...db import models # TODO should be migrated in the redshift module from dataall.modules.datasets.services.dataset_table import DatasetTableService -from dataall.modules.datasets.db.models import DatasetTable +from dataall.modules.datasets.db.models import DatasetTable, Dataset from dataall.modules.datasets.services.dataset_service import DatasetService log = logging.getLogger(__name__) @@ -446,7 +446,7 @@ def copy_data(engine, task: models.Task): task.targetUri ) - dataset: models.Dataset = DatasetService.get_dataset_by_uri( + dataset: Dataset = DatasetService.get_dataset_by_uri( session, task.payload['datasetUri'] ) diff --git a/backend/dataall/cdkproxy/stacks/environment.py b/backend/dataall/cdkproxy/stacks/environment.py index 55bccca0e..8c9440933 100644 --- a/backend/dataall/cdkproxy/stacks/environment.py +++ b/backend/dataall/cdkproxy/stacks/environment.py @@ -40,6 +40,7 @@ from ...db import models from ...utils.cdk_nag_utils import CDKNagUtil from ...utils.runtime_stacks_tagging import TagsUtil +from dataall.modules.datasets.db.models import Dataset logger = logging.getLogger(__name__) @@ -130,7 +131,7 @@ def get_environment_admins_group( @staticmethod def get_environment_group_datasets( engine, environment: models.Environment, group: str - ) -> [models.Dataset]: + ) -> [Dataset]: with engine.scoped_session() as session: return db.api.Environment.list_group_datasets( session, @@ -144,12 +145,12 @@ def get_environment_group_datasets( @staticmethod def get_all_environment_datasets( engine, environment: models.Environment - ) -> [models.Dataset]: + ) -> [Dataset]: with engine.scoped_session() as session: return ( - session.query(models.Dataset) + session.query(Dataset) .filter( - models.Dataset.environmentUri == environment.environmentUri, + Dataset.environmentUri == environment.environmentUri, ) .all() ) diff --git a/backend/dataall/cdkproxy/stacks/policies/data_policy.py b/backend/dataall/cdkproxy/stacks/policies/data_policy.py index be926b4ce..b5842afea 100644 --- a/backend/dataall/cdkproxy/stacks/policies/data_policy.py +++ b/backend/dataall/cdkproxy/stacks/policies/data_policy.py @@ -4,6 +4,7 @@ from aws_cdk import aws_iam as iam from ....db import models +from dataall.modules.datasets.db.models import Dataset logger = logging.getLogger() @@ -21,7 +22,7 @@ def __init__( resource_prefix, environment: models.Environment, team: models.EnvironmentGroup, - datasets: [models.Dataset], + datasets: [Dataset], ): self.stack = stack self.id = id @@ -147,7 +148,7 @@ def set_allowed_s3_buckets_statements(self, statements): f'arn:aws:s3:::{self.environment.EnvironmentDefaultBucketName}/*', ] if self.datasets: - dataset: models.Dataset + dataset: Dataset for dataset in self.datasets: allowed_buckets.append(f'arn:aws:s3:::{dataset.S3BucketName}/*') allowed_buckets.append(f'arn:aws:s3:::{dataset.S3BucketName}') diff --git a/backend/dataall/db/api/environment.py b/backend/dataall/db/api/environment.py index d1c7a67fa..a26650368 100644 --- a/backend/dataall/db/api/environment.py +++ b/backend/dataall/db/api/environment.py @@ -22,8 +22,9 @@ from ..models.Permission import PermissionType from ..paginator import paginate from dataall.core.environment.models import EnvironmentParameter -from ...core.environment.db.repositories import EnvironmentParameterRepository -from ...utils.naming_convention import ( +from dataall.core.environment.db.repositories import EnvironmentParameterRepository +from dataall.modules.datasets.db.models import Dataset +from dataall.utils.naming_convention import ( NamingConventionService, NamingConventionPattern, ) @@ -356,8 +357,8 @@ def remove_group(session, username, groups, uri, data=None, check_perm=None): group_env_objects_count = ( session.query(models.Environment) .outerjoin( - models.Dataset, - models.Dataset.environmentUri == models.Environment.environmentUri, + Dataset, + Dataset.environmentUri == models.Environment.environmentUri, ) .outerjoin( models.SagemakerStudioUserProfile, @@ -387,7 +388,7 @@ def remove_group(session, username, groups, uri, data=None, check_perm=None): models.Environment.environmentUri == environment.environmentUri, or_( models.RedshiftCluster.SamlGroupName == group, - models.Dataset.SamlAdminGroupName == group, + Dataset.SamlAdminGroupName == group, models.SagemakerStudioUserProfile.SamlAdminGroupName == group, models.DataPipeline.SamlGroupName == group, models.Dashboard.SamlGroupName == group, @@ -804,41 +805,41 @@ def find_consumption_roles_by_IAMArn( @staticmethod def query_environment_datasets(session, username, groups, uri, filter) -> Query: - query = session.query(models.Dataset).filter( + query = session.query(Dataset).filter( and_( - models.Dataset.environmentUri == uri, - models.Dataset.deleted.is_(None), + Dataset.environmentUri == uri, + Dataset.deleted.is_(None), ) ) if filter and filter.get('term'): term = filter['term'] query = query.filter( or_( - models.Dataset.label.ilike('%' + term + '%'), - models.Dataset.description.ilike('%' + term + '%'), - models.Dataset.tags.contains(f'{{{term}}}'), - models.Dataset.region.ilike('%' + term + '%'), + Dataset.label.ilike('%' + term + '%'), + Dataset.description.ilike('%' + term + '%'), + Dataset.tags.contains(f'{{{term}}}'), + Dataset.region.ilike('%' + term + '%'), ) ) return query @staticmethod def query_environment_group_datasets(session, username, groups, envUri, groupUri, filter) -> Query: - query = session.query(models.Dataset).filter( + query = session.query(Dataset).filter( and_( - models.Dataset.environmentUri == envUri, - models.Dataset.SamlAdminGroupName == groupUri, - models.Dataset.deleted.is_(None), + Dataset.environmentUri == envUri, + Dataset.SamlAdminGroupName == groupUri, + Dataset.deleted.is_(None), ) ) if filter and filter.get('term'): term = filter['term'] query = query.filter( or_( - models.Dataset.label.ilike('%' + term + '%'), - models.Dataset.description.ilike('%' + term + '%'), - models.Dataset.tags.contains(f'{{{term}}}'), - models.Dataset.region.ilike('%' + term + '%'), + Dataset.label.ilike('%' + term + '%'), + Dataset.description.ilike('%' + term + '%'), + Dataset.tags.contains(f'{{{term}}}'), + Dataset.region.ilike('%' + term + '%'), ) ) return query @@ -1026,11 +1027,11 @@ def list_group_datasets(session, username, groups, uri, data=None, check_perm=No raise exceptions.RequiredParameter('groupUri') return ( - session.query(models.Dataset) + session.query(Dataset) .filter( and_( - models.Dataset.environmentUri == uri, - models.Dataset.SamlAdminGroupName == data['groupUri'], + Dataset.environmentUri == uri, + Dataset.SamlAdminGroupName == data['groupUri'], ) ) .all() diff --git a/backend/dataall/db/api/notification.py b/backend/dataall/db/api/notification.py index 9f6a72158..1447892d3 100644 --- a/backend/dataall/db/api/notification.py +++ b/backend/dataall/db/api/notification.py @@ -4,6 +4,7 @@ from .. import models from ...db import paginate +from dataall.modules.datasets.db.models import Dataset class Notification: @@ -12,7 +13,7 @@ def __init__(self): @staticmethod def notify_share_object_submission( - session, username: str, dataset: models.Dataset, share: models.ShareObject + session, username: str, dataset: Dataset, share: models.ShareObject ): notifications = [] # stewards = Notification.get_dataset_stewards(session, dataset) @@ -38,7 +39,7 @@ def get_dataset_stewards(session, dataset): @staticmethod def notify_share_object_approval( - session, username: str, dataset: models.Dataset, share: models.ShareObject + session, username: str, dataset: Dataset, share: models.ShareObject ): notifications = [] targeted_users = Notification.get_share_object_targeted_users( @@ -59,7 +60,7 @@ def notify_share_object_approval( @staticmethod def notify_share_object_rejection( - session, username: str, dataset: models.Dataset, share: models.ShareObject + session, username: str, dataset: Dataset, share: models.ShareObject ): notifications = [] targeted_users = Notification.get_share_object_targeted_users( @@ -80,7 +81,7 @@ def notify_share_object_rejection( @staticmethod def notify_new_data_available_from_owners( - session, dataset: models.Dataset, share: models.ShareObject, s3_prefix + session, dataset: Dataset, share: models.ShareObject, s3_prefix ): notifications = [] targeted_users = Notification.get_share_object_targeted_users( diff --git a/backend/dataall/db/api/redshift_cluster.py b/backend/dataall/db/api/redshift_cluster.py index 1a20929fe..ca7a69515 100644 --- a/backend/dataall/db/api/redshift_cluster.py +++ b/backend/dataall/db/api/redshift_cluster.py @@ -4,7 +4,7 @@ from .. import models, api, exceptions, paginate, permissions from . import has_resource_perm, ResourcePolicy, Environment -from dataall.modules.datasets.db.models import DatasetTable +from dataall.modules.datasets.db.models import DatasetTable, Dataset from dataall.utils.naming_convention import ( NamingConventionService, NamingConventionPattern, @@ -123,7 +123,7 @@ def create_redshift_cluster( group=environment.SamlGroupName, permissions=permissions.REDSHIFT_CLUSTER_ALL, resource_uri=redshift_cluster.clusterUri, - resource_type=models.Dataset.__name__, + resource_type=Dataset.__name__, ) return redshift_cluster @@ -211,36 +211,35 @@ def list_available_datasets( ) created = ( session.query( - models.Dataset.datasetUri.label('datasetUri'), + Dataset.datasetUri.label('datasetUri'), models.RedshiftCluster.clusterUri.label('clusterUri'), ) .filter( and_( or_( - models.Dataset.owner == username, - models.Dataset.SamlAdminGroupName.in_(groups), + Dataset.owner == username, + Dataset.SamlAdminGroupName.in_(groups), ), - models.RedshiftCluster.clusterUri == cluster.clusterUri, - models.Dataset.environmentUri + RedshiftCluster.clusterUri == cluster.clusterUri, + Dataset.environmentUri == models.RedshiftCluster.environmentUri, ) ) - .group_by(models.Dataset.datasetUri, models.RedshiftCluster.clusterUri) + .group_by(Dataset.datasetUri, models.RedshiftCluster.clusterUri) ) all_group_datasets_sub_query = shared.union(created).subquery( 'all_group_datasets_sub_query' ) query = ( - session.query(models.Dataset) + session.query(Dataset) .join( all_group_datasets_sub_query, - models.Dataset.datasetUri == all_group_datasets_sub_query.c.datasetUri, + Dataset.datasetUri == all_group_datasets_sub_query.c.datasetUri, ) .outerjoin( models.RedshiftClusterDataset, and_( - models.RedshiftClusterDataset.datasetUri - == models.Dataset.datasetUri, + models.RedshiftClusterDataset.datasetUri == Dataset.datasetUri, models.RedshiftClusterDataset.clusterUri == cluster.clusterUri, ), ) @@ -248,7 +247,7 @@ def list_available_datasets( and_( all_group_datasets_sub_query.c.clusterUri == cluster.clusterUri, models.RedshiftClusterDataset.datasetUri.is_(None), - models.Dataset.deleted.is_(None), + Dataset.deleted.is_(None), ) ) ) @@ -256,9 +255,9 @@ def list_available_datasets( term = data.get('term') query = query.filter( or_( - models.Dataset.label.ilike('%' + term + '%'), - models.Dataset.tags.any(term), - models.Dataset.topics.any(term), + Dataset.label.ilike('%' + term + '%'), + Dataset.tags.any(term), + Dataset.topics.any(term), ) ) return paginate( @@ -271,10 +270,10 @@ def list_cluster_datasets( session, username, groups, uri: str, data: dict = None, check_perm=None ): query = ( - session.query(models.Dataset) + session.query(Dataset) .join( models.RedshiftClusterDataset, - models.Dataset.datasetUri == models.RedshiftClusterDataset.datasetUri, + Dataset.datasetUri == models.RedshiftClusterDataset.datasetUri, ) .filter( models.RedshiftClusterDataset.clusterUri == uri, @@ -284,9 +283,9 @@ def list_cluster_datasets( term = data.get('term') query = query.filter( or_( - models.Dataset.label.ilike('%' + term + '%'), - models.Dataset.tags.any(term), - models.Dataset.topics.any(term), + Dataset.label.ilike('%' + term + '%'), + Dataset.tags.any(term), + Dataset.topics.any(term), ) ) return paginate( @@ -341,17 +340,17 @@ def list_available_cluster_tables( models.RedshiftCluster.clusterUri.label('clusterUri'), ) .join( - models.Dataset, - DatasetTable.datasetUri == models.Dataset.datasetUri, + Dataset, + DatasetTable.datasetUri == Dataset.datasetUri, ) .filter( and_( or_( - models.Dataset.owner == username, - models.Dataset.SamlAdminGroupName.in_(groups), + Dataset.owner == username, + Dataset.SamlAdminGroupName.in_(groups), ), models.RedshiftCluster.clusterUri == cluster.clusterUri, - models.Dataset.environmentUri + Dataset.environmentUri == models.RedshiftCluster.environmentUri, ) ) @@ -433,7 +432,7 @@ def remove_dataset_from_cluster( ) if exists: session.delete(exists) - dataset = session.query(models.Dataset).get(data['datasetUri']) + dataset = session.query(Dataset).get(data['datasetUri']) if not dataset: raise exceptions.ObjectNotFound('Dataset', data['datasetUri']) diff --git a/backend/dataall/db/api/share_object.py b/backend/dataall/db/api/share_object.py index 79ace3c15..d353c7825 100644 --- a/backend/dataall/db/api/share_object.py +++ b/backend/dataall/db/api/share_object.py @@ -10,7 +10,7 @@ from .. import api, utils from .. import models, exceptions, permissions, paginate from ..models.Enums import ShareObjectStatus, ShareItemStatus, ShareObjectActions, ShareItemActions, ShareableType, PrincipalType -from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable +from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable, Dataset from dataall.modules.datasets.services.dataset_service import DatasetService logger = logging.getLogger(__name__) @@ -346,7 +346,7 @@ def create_share_object( itemUri = data.get('itemUri') itemType = data.get('itemType') - dataset: models.Dataset = data.get( + dataset: Dataset = data.get( 'dataset', DatasetService.get_dataset_by_uri(session, datasetUri) ) environment: models.Environment = data.get( @@ -753,7 +753,7 @@ def add_share_object_item( itemUri = data.get('itemUri') item = None share: models.ShareObject = session.query(models.ShareObject).get(uri) - dataset: models.Dataset = session.query(models.Dataset).get(share.datasetUri) + dataset: Dataset = session.query(Dataset).get(share.datasetUri) target_environment: models.Environment = session.query(models.Environment).get( share.environmentUri ) @@ -1023,16 +1023,16 @@ def list_user_received_share_requests( query = ( session.query(models.ShareObject) .join( - models.Dataset, - models.Dataset.datasetUri == models.ShareObject.datasetUri, + Dataset, + Dataset.datasetUri == models.ShareObject.datasetUri, ) .filter( or_( - models.Dataset.businessOwnerEmail == username, - models.Dataset.businessOwnerDelegationEmails.contains( + Dataset.businessOwnerEmail == username, + Dataset.businessOwnerDelegationEmails.contains( f'{{{username}}}' ), - models.Dataset.stewards.in_(groups), + Dataset.stewards.in_(groups), ) ) ) @@ -1184,7 +1184,7 @@ def get_share_data(session, share_uri): if not share: raise exceptions.ObjectNotFound('Share', share_uri) - dataset: models.Dataset = session.query(models.Dataset).get(share.datasetUri) + dataset: Dataset = session.query(Dataset).get(share.datasetUri) if not dataset: raise exceptions.ObjectNotFound('Dataset', share.datasetUri) diff --git a/backend/dataall/db/models/Dataset.py b/backend/dataall/db/models/Dataset.py deleted file mode 100644 index fd65387b7..000000000 --- a/backend/dataall/db/models/Dataset.py +++ /dev/null @@ -1,64 +0,0 @@ -from sqlalchemy import Boolean, Column, String, ForeignKey -from sqlalchemy.dialects import postgresql -from sqlalchemy.orm import query_expression - -from .. import Base, Resource, utils - - -class Dataset(Resource, Base): - __tablename__ = 'dataset' - environmentUri = Column(String, ForeignKey("environment.environmentUri"), nullable=False) - organizationUri = Column(String, nullable=False) - datasetUri = Column(String, primary_key=True, default=utils.uuid('dataset')) - region = Column(String, default='eu-west-1') - AwsAccountId = Column(String, nullable=False) - S3BucketName = Column(String, nullable=False) - GlueDatabaseName = Column(String, nullable=False) - GlueCrawlerName = Column(String) - GlueCrawlerSchedule = Column(String) - GlueProfilingJobName = Column(String) - GlueProfilingTriggerSchedule = Column(String) - GlueProfilingTriggerName = Column(String) - GlueDataQualityJobName = Column(String) - GlueDataQualitySchedule = Column(String) - GlueDataQualityTriggerName = Column(String) - IAMDatasetAdminRoleArn = Column(String, nullable=False) - IAMDatasetAdminUserArn = Column(String, nullable=False) - KmsAlias = Column(String, nullable=False) - userRoleForDataset = query_expression() - userRoleInEnvironment = query_expression() - isPublishedInEnvironment = query_expression() - projectPermission = query_expression() - language = Column(String, nullable=False, default='English') - topics = Column(postgresql.ARRAY(String), nullable=True) - confidentiality = Column(String, nullable=False, default='Unclassified') - tags = Column(postgresql.ARRAY(String)) - inProject = query_expression() - - bucketCreated = Column(Boolean, default=False) - glueDatabaseCreated = Column(Boolean, default=False) - iamAdminRoleCreated = Column(Boolean, default=False) - iamAdminUserCreated = Column(Boolean, default=False) - kmsAliasCreated = Column(Boolean, default=False) - lakeformationLocationCreated = Column(Boolean, default=False) - bucketPolicyCreated = Column(Boolean, default=False) - - # bookmarked = Column(Integer, default=0) - # upvotes=Column(Integer, default=0) - - businessOwnerEmail = Column(String, nullable=True) - businessOwnerDelegationEmails = Column(postgresql.ARRAY(String), nullable=True) - stewards = Column(String, nullable=True) - - SamlAdminGroupName = Column(String, nullable=True) - - redshiftClusterPermission = query_expression() - - importedS3Bucket = Column(Boolean, default=False) - importedGlueDatabase = Column(Boolean, default=False) - importedKmsKey = Column(Boolean, default=False) - importedAdminRole = Column(Boolean, default=False) - imported = Column(Boolean, default=False) - - def uri(self): - return self.datasetUri diff --git a/backend/dataall/db/models/__init__.py b/backend/dataall/db/models/__init__.py index 068e5e495..fff02245e 100644 --- a/backend/dataall/db/models/__init__.py +++ b/backend/dataall/db/models/__init__.py @@ -4,7 +4,6 @@ from .Dashboard import Dashboard from .DashboardShare import DashboardShare from .DashboardShare import DashboardShareStatus -from .Dataset import Dataset from .Environment import Environment from .EnvironmentGroup import EnvironmentGroup from .FeedMessage import FeedMessage diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index 4f8964016..1ac36b783 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -2,8 +2,7 @@ import logging from typing import List -from dataall.db import models -from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetStorageLocation, DatasetTable +from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetStorageLocation, DatasetTable, Dataset from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer @@ -40,7 +39,7 @@ def __init__(self): GlossaryRegistry.register(GlossaryDefinition( target_type="Dataset", object_type="Dataset", - model=models.Dataset, + model=Dataset, reindexer=DatasetIndexer )) diff --git a/backend/dataall/modules/datasets/api/dataset/resolvers.py b/backend/dataall/modules/datasets/api/dataset/resolvers.py index 1dabe9c57..29b5fdc43 100644 --- a/backend/dataall/modules/datasets/api/dataset/resolvers.py +++ b/backend/dataall/modules/datasets/api/dataset/resolvers.py @@ -16,6 +16,7 @@ from dataall.db import paginate, exceptions, permissions, models from dataall.db.api import Environment, ShareObject, ResourcePolicy from dataall.db.api.organization import Organization +from dataall.modules.datasets import Dataset from dataall.modules.datasets.services.dataset_location import DatasetLocationService from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer @@ -98,7 +99,7 @@ def get_dataset(context, source, datasetUri=None): return dataset -def resolve_user_role(context: Context, source: models.Dataset, **kwargs): +def resolve_user_role(context: Context, source: Dataset, **kwargs): if not source: return None if source.owner == context.username: @@ -156,7 +157,7 @@ def list_datasets(context: Context, source, filter: dict = None): ) -def list_locations(context, source: models.Dataset, filter: dict = None): +def list_locations(context, source: Dataset, filter: dict = None): if not source: return None if not filter: @@ -171,7 +172,7 @@ def list_locations(context, source: models.Dataset, filter: dict = None): ) -def list_tables(context, source: models.Dataset, filter: dict = None): +def list_tables(context, source: Dataset, filter: dict = None): if not source: return None if not filter: @@ -186,27 +187,27 @@ def list_tables(context, source: models.Dataset, filter: dict = None): ) -def get_dataset_organization(context, source: models.Dataset, **kwargs): +def get_dataset_organization(context, source: Dataset, **kwargs): if not source: return None with context.engine.scoped_session() as session: return Organization.get_organization_by_uri(session, source.organizationUri) -def get_dataset_environment(context, source: models.Dataset, **kwargs): +def get_dataset_environment(context, source: Dataset, **kwargs): if not source: return None with context.engine.scoped_session() as session: return Environment.get_environment_by_uri(session, source.environmentUri) -def get_dataset_owners_group(context, source: models.Dataset, **kwargs): +def get_dataset_owners_group(context, source: Dataset, **kwargs): if not source: return None return source.SamlAdminGroupName -def get_dataset_stewards_group(context, source: models.Dataset, **kwargs): +def get_dataset_stewards_group(context, source: Dataset, **kwargs): if not source: return None return source.stewards @@ -229,7 +230,7 @@ def update_dataset(context, source, datasetUri: str = None, input: dict = None): return updated_dataset -def get_dataset_statistics(context: Context, source: models.Dataset, **kwargs): +def get_dataset_statistics(context: Context, source: Dataset, **kwargs): if not source: return None with context.engine.scoped_session() as session: @@ -489,7 +490,7 @@ def save_dataset_summary( return True -def get_dataset_stack(context: Context, source: models.Dataset, **kwargs): +def get_dataset_stack(context: Context, source: Dataset, **kwargs): if not source: return None return stack_helper.get_stack_with_cfn_resources( @@ -532,7 +533,7 @@ def delete_dataset( resource_uri=datasetUri, permission_name=permissions.DELETE_DATASET, ) - dataset: models.Dataset = DatasetService.get_dataset_by_uri(session, datasetUri) + dataset: Dataset = DatasetService.get_dataset_by_uri(session, datasetUri) env: models.Environment = Environment.get_environment_by_uri( session, dataset.environmentUri ) @@ -583,7 +584,7 @@ def delete_dataset( return True -def get_dataset_glossary_terms(context: Context, source: models.Dataset, **kwargs): +def get_dataset_glossary_terms(context: Context, source: Dataset, **kwargs): if not source: return None with context.engine.scoped_session() as session: @@ -631,7 +632,7 @@ def publish_dataset_update( return True -def resolve_redshift_copy_enabled(context, source: models.Dataset, clusterUri: str): +def resolve_redshift_copy_enabled(context, source: Dataset, clusterUri: str): if not source: return None with context.engine.scoped_session() as session: diff --git a/backend/dataall/modules/datasets/api/storage_location/resolvers.py b/backend/dataall/modules/datasets/api/storage_location/resolvers.py index ef06bbba6..634e1239a 100644 --- a/backend/dataall/modules/datasets/api/storage_location/resolvers.py +++ b/backend/dataall/modules/datasets/api/storage_location/resolvers.py @@ -8,7 +8,7 @@ ) from dataall.modules.datasets.handlers.s3_location_handler import S3DatasetLocationHandler from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer -from dataall.modules.datasets.db.models import DatasetStorageLocation +from dataall.modules.datasets.db.models import DatasetStorageLocation, Dataset from dataall.modules.datasets.services.dataset_location import DatasetLocationService from dataall.modules.datasets.services.dataset_service import DatasetService @@ -95,7 +95,7 @@ def resolve_dataset(context, source: DatasetStorageLocation, **kwargs): if not source: return None with context.engine.scoped_session() as session: - d = session.query(models.Dataset).get(source.datasetUri) + d = session.query(Dataset).get(source.datasetUri) return d diff --git a/backend/dataall/modules/datasets/api/table/resolvers.py b/backend/dataall/modules/datasets/api/table/resolvers.py index 329828b0d..a45cee61e 100644 --- a/backend/dataall/modules/datasets/api/table/resolvers.py +++ b/backend/dataall/modules/datasets/api/table/resolvers.py @@ -11,7 +11,7 @@ from dataall.aws.handlers.sts import SessionHelper from dataall.db import permissions, models from dataall.db.api import ResourcePolicy, Glossary -from dataall.modules.datasets.db.models import DatasetTable +from dataall.modules.datasets.db.models import DatasetTable, Dataset from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.utils import json_utils from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer @@ -50,7 +50,7 @@ def list_dataset_tables(context, source, filter: dict = None): ) -def get_table(context, source: models.Dataset, tableUri: str = None): +def get_table(context, source: Dataset, tableUri: str = None): with context.engine.scoped_session() as session: table = DatasetTableService.get_dataset_table_by_uri(session, tableUri) return DatasetTableService.get_dataset_table( diff --git a/backend/dataall/modules/datasets/cdk/dataset_stack.py b/backend/dataall/modules/datasets/cdk/dataset_stack.py index 517b32893..86204097c 100644 --- a/backend/dataall/modules/datasets/cdk/dataset_stack.py +++ b/backend/dataall/modules/datasets/cdk/dataset_stack.py @@ -28,7 +28,7 @@ from dataall.db.api import Environment from dataall.utils.cdk_nag_utils import CDKNagUtil from dataall.utils.runtime_stacks_tagging import TagsUtil -from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable +from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable, Dataset logger = logging.getLogger(__name__) @@ -56,18 +56,18 @@ def get_env_group(self, dataset) -> models.EnvironmentGroup: ) return env - def get_target_with_uri(self, target_uri) -> models.Dataset: + def get_target_with_uri(self, target_uri) -> Dataset: engine = self.get_engine() with engine.scoped_session() as session: - dataset = session.query(models.Dataset).get(target_uri) + dataset = session.query(Dataset).get(target_uri) if not dataset: raise Exception('ObjectNotFound') return dataset - def get_target(self) -> models.Dataset: + def get_target(self) -> Dataset: engine = self.get_engine() with engine.scoped_session() as session: - dataset = session.query(models.Dataset).get(self.target_uri) + dataset = session.query(Dataset).get(self.target_uri) if not dataset: raise Exception('ObjectNotFound') return dataset @@ -539,6 +539,6 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): Tags.of(self).add('Classification', dataset.confidentiality) - TagsUtil.add_tags(stack=self, model=models.Dataset, target_type="dataset") + TagsUtil.add_tags(stack=self, model=Dataset, target_type="dataset") CDKNagUtil.check_rules(self) diff --git a/backend/dataall/modules/datasets/db/models.py b/backend/dataall/modules/datasets/db/models.py index a25978bef..df15c19a4 100644 --- a/backend/dataall/modules/datasets/db/models.py +++ b/backend/dataall/modules/datasets/db/models.py @@ -1,4 +1,4 @@ -from sqlalchemy import Boolean, Column, String, Text +from sqlalchemy import Boolean, Column, String, Text, ForeignKey from sqlalchemy.dialects.postgresql import JSON, ARRAY from sqlalchemy.orm import query_expression from dataall.db import Base, Resource, utils @@ -83,3 +83,62 @@ def uri(self): return self.tableUri +class Dataset(Resource, Base): + __tablename__ = 'dataset' + environmentUri = Column(String, ForeignKey("environment.environmentUri"), nullable=False) + organizationUri = Column(String, nullable=False) + datasetUri = Column(String, primary_key=True, default=utils.uuid('dataset')) + region = Column(String, default='eu-west-1') + AwsAccountId = Column(String, nullable=False) + S3BucketName = Column(String, nullable=False) + GlueDatabaseName = Column(String, nullable=False) + GlueCrawlerName = Column(String) + GlueCrawlerSchedule = Column(String) + GlueProfilingJobName = Column(String) + GlueProfilingTriggerSchedule = Column(String) + GlueProfilingTriggerName = Column(String) + GlueDataQualityJobName = Column(String) + GlueDataQualitySchedule = Column(String) + GlueDataQualityTriggerName = Column(String) + IAMDatasetAdminRoleArn = Column(String, nullable=False) + IAMDatasetAdminUserArn = Column(String, nullable=False) + KmsAlias = Column(String, nullable=False) + userRoleForDataset = query_expression() + userRoleInEnvironment = query_expression() + isPublishedInEnvironment = query_expression() + projectPermission = query_expression() + language = Column(String, nullable=False, default='English') + topics = Column(ARRAY(String), nullable=True) + confidentiality = Column(String, nullable=False, default='Unclassified') + tags = Column(ARRAY(String)) + inProject = query_expression() + + bucketCreated = Column(Boolean, default=False) + glueDatabaseCreated = Column(Boolean, default=False) + iamAdminRoleCreated = Column(Boolean, default=False) + iamAdminUserCreated = Column(Boolean, default=False) + kmsAliasCreated = Column(Boolean, default=False) + lakeformationLocationCreated = Column(Boolean, default=False) + bucketPolicyCreated = Column(Boolean, default=False) + + # bookmarked = Column(Integer, default=0) + # upvotes=Column(Integer, default=0) + + businessOwnerEmail = Column(String, nullable=True) + businessOwnerDelegationEmails = Column(ARRAY(String), nullable=True) + stewards = Column(String, nullable=True) + + SamlAdminGroupName = Column(String, nullable=True) + + redshiftClusterPermission = query_expression() + + importedS3Bucket = Column(Boolean, default=False) + importedGlueDatabase = Column(Boolean, default=False) + importedKmsKey = Column(Boolean, default=False) + importedAdminRole = Column(Boolean, default=False) + imported = Column(Boolean, default=False) + + def uri(self): + return self.datasetUri + + diff --git a/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py b/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py index d15607733..1809313a8 100644 --- a/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py +++ b/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py @@ -4,7 +4,7 @@ from dataall.aws.handlers.service_handlers import Worker from dataall.aws.handlers.sts import SessionHelper from dataall.db import models -from dataall.modules.datasets.db.models import DatasetProfilingRun +from dataall.modules.datasets.db.models import DatasetProfilingRun, Dataset from dataall.modules.datasets.services.dataset_profiling_service import DatasetProfilingService log = logging.getLogger(__name__) @@ -22,7 +22,7 @@ def get_profiling_run(engine, task: models.Task): session, profilingRunUri=task.targetUri ) ) - dataset: models.Dataset = session.query(models.Dataset).get( + dataset: Dataset = session.query(Dataset).get( profiling.datasetUri ) glue_run = DatasetProfilingGlueHandler.get_job_run( @@ -46,7 +46,7 @@ def start_profiling_run(engine, task: models.Task): session, profilingRunUri=task.targetUri ) ) - dataset: models.Dataset = session.query(models.Dataset).get( + dataset: Dataset = session.query(Dataset).get( profiling.datasetUri ) run = DatasetProfilingGlueHandler.run_job( diff --git a/backend/dataall/modules/datasets/handlers/glue_table_handler.py b/backend/dataall/modules/datasets/handlers/glue_table_handler.py index d2218884b..029d5cf49 100644 --- a/backend/dataall/modules/datasets/handlers/glue_table_handler.py +++ b/backend/dataall/modules/datasets/handlers/glue_table_handler.py @@ -1,10 +1,9 @@ import logging -from botocore.exceptions import ClientError - from dataall.aws.handlers.glue import Glue from dataall.aws.handlers.service_handlers import Worker from dataall.db import models +from dataall.modules.datasets.db.models import Dataset from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.modules.datasets.services.dataset_table import DatasetTableService @@ -18,7 +17,7 @@ class DatasetColumnGlueHandler: @Worker.handler(path='glue.dataset.database.tables') def list_tables(engine, task: models.Task): with engine.scoped_session() as session: - dataset: models.Dataset = DatasetService.get_dataset_by_uri( + dataset: Dataset = DatasetService.get_dataset_by_uri( session, task.targetUri ) account_id = dataset.AwsAccountId diff --git a/backend/dataall/modules/datasets/indexers/dataset_indexer.py b/backend/dataall/modules/datasets/indexers/dataset_indexer.py index 665e2a9c4..ba3754ee2 100644 --- a/backend/dataall/modules/datasets/indexers/dataset_indexer.py +++ b/backend/dataall/modules/datasets/indexers/dataset_indexer.py @@ -2,6 +2,7 @@ from dataall import db from dataall.db import models +from dataall.modules.datasets import Dataset from dataall.modules.datasets.services.dataset_location import DatasetLocationService from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.searchproxy.upsert import BaseIndexer @@ -13,35 +14,35 @@ class DatasetIndexer(BaseIndexer): def upsert(cls, session, dataset_uri: str): dataset = ( session.query( - models.Dataset.datasetUri.label('datasetUri'), - models.Dataset.name.label('name'), - models.Dataset.owner.label('owner'), - models.Dataset.label.label('label'), - models.Dataset.description.label('description'), - models.Dataset.confidentiality.label('classification'), - models.Dataset.tags.label('tags'), - models.Dataset.topics.label('topics'), - models.Dataset.region.label('region'), + Dataset.datasetUri.label('datasetUri'), + Dataset.name.label('name'), + Dataset.owner.label('owner'), + Dataset.label.label('label'), + Dataset.description.label('description'), + Dataset.confidentiality.label('classification'), + Dataset.tags.label('tags'), + Dataset.topics.label('topics'), + Dataset.region.label('region'), models.Organization.organizationUri.label('orgUri'), models.Organization.name.label('orgName'), models.Environment.environmentUri.label('envUri'), models.Environment.name.label('envName'), - models.Dataset.SamlAdminGroupName.label('admins'), - models.Dataset.GlueDatabaseName.label('database'), - models.Dataset.S3BucketName.label('source'), - models.Dataset.created, - models.Dataset.updated, - models.Dataset.deleted, + Dataset.SamlAdminGroupName.label('admins'), + Dataset.GlueDatabaseName.label('database'), + Dataset.S3BucketName.label('source'), + Dataset.created, + Dataset.updated, + Dataset.deleted, ) .join( models.Organization, - models.Dataset.organizationUri == models.Organization.organizationUri, + Dataset.organizationUri == models.Organization.organizationUri, ) .join( models.Environment, - models.Dataset.environmentUri == models.Environment.environmentUri, + Dataset.environmentUri == models.Environment.environmentUri, ) - .filter(models.Dataset.datasetUri == dataset_uri) + .filter(Dataset.datasetUri == dataset_uri) .first() ) count_tables = DatasetService.count_dataset_tables(session, dataset_uri) diff --git a/backend/dataall/modules/datasets/indexers/location_indexer.py b/backend/dataall/modules/datasets/indexers/location_indexer.py index 72495b51c..fdeb83c90 100644 --- a/backend/dataall/modules/datasets/indexers/location_indexer.py +++ b/backend/dataall/modules/datasets/indexers/location_indexer.py @@ -1,5 +1,5 @@ """Indexes DatasetStorageLocation in OpenSearch""" -from dataall.modules.datasets.db.models import DatasetStorageLocation +from dataall.modules.datasets.db.models import DatasetStorageLocation, Dataset from dataall.db import models from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer @@ -24,25 +24,25 @@ def upsert(cls, session, folder_uri: str): models.Organization.name.label('orgName'), models.Environment.environmentUri.label('envUri'), models.Environment.name.label('envName'), - models.Dataset.SamlAdminGroupName.label('admins'), - models.Dataset.S3BucketName.label('source'), - models.Dataset.topics.label('topics'), - models.Dataset.confidentiality.label('classification'), + Dataset.SamlAdminGroupName.label('admins'), + Dataset.S3BucketName.label('source'), + Dataset.topics.label('topics'), + Dataset.confidentiality.label('classification'), DatasetStorageLocation.created, DatasetStorageLocation.updated, DatasetStorageLocation.deleted, ) .join( - models.Dataset, - models.Dataset.datasetUri == DatasetStorageLocation.datasetUri, + Dataset, + Dataset.datasetUri == DatasetStorageLocation.datasetUri, ) .join( models.Organization, - models.Dataset.organizationUri == models.Organization.organizationUri, + Dataset.organizationUri == models.Organization.organizationUri, ) .join( models.Environment, - models.Dataset.environmentUri == models.Environment.environmentUri, + Dataset.environmentUri == models.Environment.environmentUri, ) .filter(DatasetStorageLocation.locationUri == folder_uri) .first() diff --git a/backend/dataall/modules/datasets/indexers/table_indexer.py b/backend/dataall/modules/datasets/indexers/table_indexer.py index 2fe9451e1..dcaa1b4e9 100644 --- a/backend/dataall/modules/datasets/indexers/table_indexer.py +++ b/backend/dataall/modules/datasets/indexers/table_indexer.py @@ -2,7 +2,7 @@ from operator import and_ from dataall.db import models -from dataall.modules.datasets.db.models import DatasetTable +from dataall.modules.datasets.db.models import DatasetTable, Dataset from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer from dataall.searchproxy.upsert import BaseIndexer @@ -19,32 +19,32 @@ def upsert(cls, session, table_uri: str): DatasetTable.owner.label('owner'), DatasetTable.label.label('label'), DatasetTable.description.label('description'), - models.Dataset.confidentiality.label('classification'), + Dataset.confidentiality.label('classification'), DatasetTable.tags.label('tags'), - models.Dataset.topics.label('topics'), - models.Dataset.region.label('region'), + Dataset.topics.label('topics'), + Dataset.region.label('region'), models.Organization.organizationUri.label('orgUri'), models.Organization.name.label('orgName'), models.Environment.environmentUri.label('envUri'), models.Environment.name.label('envName'), - models.Dataset.SamlAdminGroupName.label('admins'), - models.Dataset.GlueDatabaseName.label('database'), - models.Dataset.S3BucketName.label('source'), + Dataset.SamlAdminGroupName.label('admins'), + Dataset.GlueDatabaseName.label('database'), + Dataset.S3BucketName.label('source'), DatasetTable.created, DatasetTable.updated, DatasetTable.deleted, ) .join( - models.Dataset, - models.Dataset.datasetUri == DatasetTable.datasetUri, + Dataset, + Dataset.datasetUri == DatasetTable.datasetUri, ) .join( models.Organization, - models.Dataset.organizationUri == models.Organization.organizationUri, + Dataset.organizationUri == models.Organization.organizationUri, ) .join( models.Environment, - models.Dataset.environmentUri == models.Environment.environmentUri, + Dataset.environmentUri == models.Environment.environmentUri, ) .filter(DatasetTable.tableUri == table_uri) .first() diff --git a/backend/dataall/modules/datasets/services/dataset_profiling_service.py b/backend/dataall/modules/datasets/services/dataset_profiling_service.py index 01bc3dc57..ce679ccd4 100644 --- a/backend/dataall/modules/datasets/services/dataset_profiling_service.py +++ b/backend/dataall/modules/datasets/services/dataset_profiling_service.py @@ -2,7 +2,7 @@ from dataall.db import paginate, models from dataall.db.exceptions import ObjectNotFound -from dataall.modules.datasets.db.models import DatasetProfilingRun, DatasetTable +from dataall.modules.datasets.db.models import DatasetProfilingRun, DatasetTable, Dataset class DatasetProfilingService: @@ -13,7 +13,7 @@ def __init__(self): def start_profiling( session, datasetUri, tableUri=None, GlueTableName=None, GlueJobRunId=None ): - dataset: models.Dataset = session.query(models.Dataset).get(datasetUri) + dataset: Dataset = session.query(Dataset).get(datasetUri) if not dataset: raise ObjectNotFound('Dataset', datasetUri) diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py index c513c574b..f28d1b637 100644 --- a/backend/dataall/modules/datasets/services/dataset_service.py +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -17,7 +17,7 @@ from dataall.db import models, api, exceptions, permissions, paginate from dataall.db.models.Enums import Language, ConfidentialityClassification from dataall.modules.datasets.db.dataset_repository import DatasetRepository -from dataall.modules.datasets.db.models import DatasetTable +from dataall.modules.datasets.db.models import DatasetTable, Dataset from dataall.modules.datasets.services.dataset_location import DatasetLocationService from dataall.utils.naming_convention import ( NamingConventionService, @@ -38,7 +38,7 @@ def create_dataset( uri: str, data: dict = None, check_perm: bool = False, - ) -> models.Dataset: + ) -> Dataset: if not uri: raise exceptions.RequiredParameter('environmentUri') if not data: @@ -67,7 +67,7 @@ def create_dataset( session, environment.organizationUri ) - dataset = models.Dataset( + dataset = Dataset( label=data.get('label'), owner=username, description=data.get('description', 'No description provided'), @@ -113,7 +113,7 @@ def create_dataset( group=data['SamlAdminGroupName'], permissions=permissions.DATASET_ALL, resource_uri=dataset.datasetUri, - resource_type=models.Dataset.__name__, + resource_type=Dataset.__name__, ) if dataset.stewards and dataset.stewards != dataset.SamlAdminGroupName: ResourcePolicy.attach_resource_policy( @@ -121,7 +121,7 @@ def create_dataset( group=dataset.stewards, permissions=permissions.DATASET_READ, resource_uri=dataset.datasetUri, - resource_type=models.Dataset.__name__, + resource_type=Dataset.__name__, ) if environment.SamlGroupName != dataset.SamlAdminGroupName: ResourcePolicy.attach_resource_policy( @@ -129,12 +129,12 @@ def create_dataset( group=environment.SamlGroupName, permissions=permissions.DATASET_ALL, resource_uri=dataset.datasetUri, - resource_type=models.Dataset.__name__, + resource_type=Dataset.__name__, ) return dataset @staticmethod - def _set_dataset_aws_resources(dataset: models.Dataset, data, environment): + def _set_dataset_aws_resources(dataset: Dataset, data, environment): bucket_name = NamingConventionService( target_uri=dataset.datasetUri, @@ -183,7 +183,7 @@ def _set_dataset_aws_resources(dataset: models.Dataset, data, environment): return dataset @staticmethod - def create_dataset_stack(session, dataset: models.Dataset) -> models.Stack: + def create_dataset_stack(session, dataset: Dataset) -> models.Stack: return Stack.create_stack( session=session, environment_uri=dataset.environmentUri, @@ -207,21 +207,21 @@ def get_dataset( uri: str, data: dict = None, check_perm: bool = False, - ) -> models.Dataset: + ) -> Dataset: return DatasetService.get_dataset_by_uri(session, uri) @staticmethod - def get_dataset_by_uri(session, dataset_uri) -> models.Dataset: + def get_dataset_by_uri(session, dataset_uri) -> Dataset: return DatasetRepository.get_dataset_by_uri(session, dataset_uri) @staticmethod def query_user_datasets(session, username, groups, filter) -> Query: share_item_shared_states = api.ShareItemSM.get_share_item_shared_states() query = ( - session.query(models.Dataset) + session.query(Dataset) .outerjoin( models.ShareObject, - models.ShareObject.datasetUri == models.Dataset.datasetUri, + models.ShareObject.datasetUri == Dataset.datasetUri, ) .outerjoin( models.ShareObjectItem, @@ -229,9 +229,9 @@ def query_user_datasets(session, username, groups, filter) -> Query: ) .filter( or_( - models.Dataset.owner == username, - models.Dataset.SamlAdminGroupName.in_(groups), - models.Dataset.stewards.in_(groups), + Dataset.owner == username, + Dataset.SamlAdminGroupName.in_(groups), + Dataset.stewards.in_(groups), and_( models.ShareObject.principalId.in_(groups), models.ShareObjectItem.status.in_(share_item_shared_states), @@ -246,8 +246,8 @@ def query_user_datasets(session, username, groups, filter) -> Query: if filter and filter.get('term'): query = query.filter( or_( - models.Dataset.description.ilike(filter.get('term') + '%%'), - models.Dataset.label.ilike(filter.get('term') + '%%'), + Dataset.description.ilike(filter.get('term') + '%%'), + Dataset.label.ilike(filter.get('term') + '%%'), ) ) return query @@ -296,8 +296,8 @@ def paginated_dataset_tables( @has_resource_perm(permissions.UPDATE_DATASET) def update_dataset( session, username, groups, uri, data=None, check_perm=None - ) -> models.Dataset: - dataset: models.Dataset = DatasetService.get_dataset_by_uri(session, uri) + ) -> Dataset: + dataset: Dataset = DatasetService.get_dataset_by_uri(session, uri) if data and isinstance(data, dict): for k in data.keys(): if k != 'stewards': @@ -317,7 +317,7 @@ def update_dataset( group=dataset.SamlAdminGroupName, permissions=permissions.DATASET_ALL, resource_uri=dataset.datasetUri, - resource_type=models.Dataset.__name__, + resource_type=Dataset.__name__, ) DatasetService.update_dataset_glossary_terms(session, username, uri, data) activity = models.Activity( @@ -364,7 +364,7 @@ def transfer_stewardship_to_new_stewards(session, dataset, new_stewards): group=new_stewards, permissions=permissions.DATASET_READ, resource_uri=dataset.datasetUri, - resource_type=models.Dataset.__name__, + resource_type=Dataset.__name__, ) dataset_tables = [t.tableUri for t in DatasetService.get_dataset_tables(session, dataset.datasetUri)] @@ -599,20 +599,20 @@ def _delete_dataset_tables(session, dataset_uri) -> bool: return tables @staticmethod - def list_all_datasets(session) -> [models.Dataset]: - return session.query(models.Dataset).all() + def list_all_datasets(session) -> [Dataset]: + return session.query(Dataset).all() @staticmethod - def list_all_active_datasets(session) -> [models.Dataset]: + def list_all_active_datasets(session) -> [Dataset]: return ( - session.query(models.Dataset).filter(models.Dataset.deleted.is_(None)).all() + session.query(Dataset).filter(Dataset.deleted.is_(None)).all() ) @staticmethod - def get_dataset_by_bucket_name(session, bucket) -> [models.Dataset]: + def get_dataset_by_bucket_name(session, bucket) -> [Dataset]: return ( - session.query(models.Dataset) - .filter(models.Dataset.S3BucketName == bucket) + session.query(Dataset) + .filter(Dataset.S3BucketName == bucket) .first() ) diff --git a/backend/dataall/modules/datasets/services/dataset_share_service.py b/backend/dataall/modules/datasets/services/dataset_share_service.py index 3503e86fe..74e64c951 100644 --- a/backend/dataall/modules/datasets/services/dataset_share_service.py +++ b/backend/dataall/modules/datasets/services/dataset_share_service.py @@ -8,7 +8,7 @@ from dataall.db import models, permissions from dataall.db.api import has_resource_perm, ShareItemSM from dataall.db.paginator import paginate -from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable +from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable, Dataset class DatasetShareService: @@ -22,9 +22,9 @@ def paginated_shared_with_environment_datasets( q = ( session.query( models.ShareObjectItem.shareUri.label('shareUri'), - models.Dataset.datasetUri.label('datasetUri'), - models.Dataset.name.label('datasetName'), - models.Dataset.description.label('datasetDescription'), + Dataset.datasetUri.label('datasetUri'), + Dataset.name.label('datasetName'), + Dataset.description.label('datasetDescription'), models.Environment.environmentUri.label('environmentUri'), models.Environment.name.label('environmentName'), models.ShareObject.created.label('created'), @@ -60,12 +60,12 @@ def paginated_shared_with_environment_datasets( models.ShareObject.shareUri == models.ShareObjectItem.shareUri, ) .join( - models.Dataset, - models.ShareObject.datasetUri == models.Dataset.datasetUri, + Dataset, + models.ShareObject.datasetUri == Dataset.datasetUri, ) .join( models.Environment, - models.Environment.environmentUri == models.Dataset.environmentUri, + models.Environment.environmentUri == Dataset.environmentUri, ) .join( models.Organization, @@ -118,9 +118,9 @@ def paginated_shared_with_environment_group_datasets( q = ( session.query( models.ShareObjectItem.shareUri.label('shareUri'), - models.Dataset.datasetUri.label('datasetUri'), - models.Dataset.name.label('datasetName'), - models.Dataset.description.label('datasetDescription'), + Dataset.datasetUri.label('datasetUri'), + Dataset.name.label('datasetName'), + Dataset.description.label('datasetDescription'), models.Environment.environmentUri.label('environmentUri'), models.Environment.name.label('environmentName'), models.ShareObject.created.label('created'), @@ -156,12 +156,12 @@ def paginated_shared_with_environment_group_datasets( models.ShareObject.shareUri == models.ShareObjectItem.shareUri, ) .join( - models.Dataset, - models.ShareObject.datasetUri == models.Dataset.datasetUri, + Dataset, + models.ShareObject.datasetUri == Dataset.datasetUri, ) .join( models.Environment, - models.Environment.environmentUri == models.Dataset.environmentUri, + models.Environment.environmentUri == Dataset.environmentUri, ) .join( models.Organization, diff --git a/backend/dataall/modules/datasets/tasks/subscription_service.py b/backend/dataall/modules/datasets/tasks/subscription_service.py index ae1c522e0..ec2eec459 100644 --- a/backend/dataall/modules/datasets/tasks/subscription_service.py +++ b/backend/dataall/modules/datasets/tasks/subscription_service.py @@ -17,7 +17,7 @@ from dataall.utils import json_utils from dataall.modules.datasets.services.dataset_table import DatasetTableService from dataall.modules.datasets.services.dataset_location import DatasetLocationService -from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable +from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable, Dataset root = logging.getLogger() root.setLevel(logging.INFO) @@ -81,7 +81,7 @@ def publish_table_update_message(engine, message): f'Found table {table.tableUri}|{table.GlueTableName}|{table.S3Prefix}' ) - dataset: models.Dataset = session.query(models.Dataset).get( + dataset: Dataset = session.query(Dataset).get( table.datasetUri ) log.info( @@ -119,7 +119,7 @@ def publish_location_update_message(session, message): else: log.info(f'Found location {location.locationUri}|{location.S3Prefix}') - dataset: models.Dataset = session.query(models.Dataset).get( + dataset: Dataset = session.query(Dataset).get( location.datasetUri ) log.info( @@ -288,7 +288,7 @@ def sns_call(message, environment): def redshift_copy( engine, message, - dataset: models.Dataset, + dataset: Dataset, environment: models.Environment, table: DatasetTable, ): diff --git a/backend/dataall/modules/datasets/tasks/tables_syncer.py b/backend/dataall/modules/datasets/tasks/tables_syncer.py index a503ac8f5..f662de632 100644 --- a/backend/dataall/modules/datasets/tasks/tables_syncer.py +++ b/backend/dataall/modules/datasets/tasks/tables_syncer.py @@ -8,7 +8,7 @@ from dataall.aws.handlers.sts import SessionHelper from dataall.db import get_engine from dataall.db import models -from dataall.modules.datasets.db.models import DatasetTable +from dataall.modules.datasets.db.models import DatasetTable, Dataset from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.utils.alarm_service import AlarmService @@ -24,11 +24,11 @@ def sync_tables(engine): with engine.scoped_session() as session: processed_tables = [] - all_datasets: [models.Dataset] = DatasetService.list_all_active_datasets( + all_datasets: [Dataset] = DatasetService.list_all_active_datasets( session ) log.info(f'Found {len(all_datasets)} datasets for tables sync') - dataset: models.Dataset + dataset: Dataset for dataset in all_datasets: log.info( f'Synchronizing dataset {dataset.name}|{dataset.datasetUri} tables' diff --git a/backend/dataall/tasks/bucket_policy_updater.py b/backend/dataall/tasks/bucket_policy_updater.py index 6cb4c51ea..12844aae8 100644 --- a/backend/dataall/tasks/bucket_policy_updater.py +++ b/backend/dataall/tasks/bucket_policy_updater.py @@ -10,7 +10,7 @@ from ..aws.handlers.sts import SessionHelper from ..db import get_engine from ..db import models -from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable +from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable, Dataset root = logging.getLogger() root.setLevel(logging.INFO) @@ -28,11 +28,11 @@ def __init__(self, engine, event=None): def sync_imported_datasets_bucket_policies(self): with self.engine.scoped_session() as session: imported_datasets = ( - session.query(models.Dataset) + session.query(Dataset) .filter( and_( - models.Dataset.imported == True, - models.Dataset.deleted.is_(None), + Dataset.imported == True, + Dataset.deleted.is_(None), ) ) .all() diff --git a/backend/dataall/tasks/catalog_indexer.py b/backend/dataall/tasks/catalog_indexer.py index 7bea9d965..60d9df792 100644 --- a/backend/dataall/tasks/catalog_indexer.py +++ b/backend/dataall/tasks/catalog_indexer.py @@ -2,6 +2,7 @@ import os import sys +from dataall.modules.datasets.db.models import Dataset from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer from dataall.modules.datasets.services.dataset_service import DatasetService @@ -21,11 +22,11 @@ def index_objects(engine): indexed_objects_counter = 0 with engine.scoped_session() as session: - all_datasets: [models.Dataset] = DatasetService.list_all_active_datasets( + all_datasets: [Dataset] = DatasetService.list_all_active_datasets( session ) log.info(f'Found {len(all_datasets)} datasets') - dataset: models.Dataset + dataset: Dataset for dataset in all_datasets: tables = DatasetTableIndexer.upsert_all(session, dataset.datasetUri) folders = DatasetLocationIndexer.upsert_all(session, dataset_uri=dataset.datasetUri) diff --git a/backend/dataall/tasks/data_sharing/share_managers/lf_share_manager.py b/backend/dataall/tasks/data_sharing/share_managers/lf_share_manager.py index 2b7eaf20a..997dc830f 100644 --- a/backend/dataall/tasks/data_sharing/share_managers/lf_share_manager.py +++ b/backend/dataall/tasks/data_sharing/share_managers/lf_share_manager.py @@ -11,7 +11,7 @@ from ....aws.handlers.sts import SessionHelper from ....aws.handlers.ram import Ram from ....db import api, exceptions, models -from dataall.modules.datasets.db.models import DatasetTable +from dataall.modules.datasets.db.models import DatasetTable, Dataset from dataall.utils.alarm_service import AlarmService logger = logging.getLogger(__name__) @@ -21,7 +21,7 @@ class LFShareManager: def __init__( self, session, - dataset: models.Dataset, + dataset: Dataset, share: models.ShareObject, shared_tables: [DatasetTable], revoked_tables: [DatasetTable], @@ -74,7 +74,7 @@ def build_shared_db_name(self) -> str: Unique per share Uri. Parameters ---------- - dataset : models.Dataset + dataset : Dataset share : models.ShareObject Returns @@ -155,7 +155,7 @@ def grant_pivot_role_all_database_permissions(self) -> bool: def create_shared_database( cls, target_environment: models.Environment, - dataset: models.Dataset, + dataset: Dataset, shared_db_name: str, principals: [str], ) -> dict: diff --git a/backend/dataall/tasks/data_sharing/share_managers/s3_share_manager.py b/backend/dataall/tasks/data_sharing/share_managers/s3_share_manager.py index 30c72a60e..c7e614dbe 100644 --- a/backend/dataall/tasks/data_sharing/share_managers/s3_share_manager.py +++ b/backend/dataall/tasks/data_sharing/share_managers/s3_share_manager.py @@ -10,7 +10,7 @@ from ....aws.handlers.iam import IAM from ....utils.alarm_service import AlarmService -from dataall.modules.datasets.db.models import DatasetStorageLocation +from dataall.modules.datasets.db.models import DatasetStorageLocation, Dataset logger = logging.getLogger(__name__) ACCESS_POINT_CREATION_TIME = 30 @@ -21,7 +21,7 @@ class S3ShareManager: def __init__( self, session, - dataset: models.Dataset, + dataset: Dataset, share: models.ShareObject, target_folder: DatasetStorageLocation, source_environment: models.Environment, @@ -325,7 +325,7 @@ def delete_access_point_policy(self): @staticmethod def delete_access_point( share: models.ShareObject, - dataset: models.Dataset, + dataset: Dataset, ): access_point_name = S3ShareManager.build_access_point_name(share) logger.info( @@ -342,7 +342,7 @@ def delete_access_point( @staticmethod def delete_target_role_access_policy( share: models.ShareObject, - dataset: models.Dataset, + dataset: Dataset, target_environment: models.Environment, ): logger.info( @@ -377,7 +377,7 @@ def delete_target_role_access_policy( @staticmethod def delete_dataset_bucket_key_policy( share: models.ShareObject, - dataset: models.Dataset, + dataset: Dataset, target_environment: models.Environment, ): logger.info( diff --git a/backend/dataall/tasks/data_sharing/share_processors/lf_process_cross_account_share.py b/backend/dataall/tasks/data_sharing/share_processors/lf_process_cross_account_share.py index dfceec978..94eb786ec 100644 --- a/backend/dataall/tasks/data_sharing/share_processors/lf_process_cross_account_share.py +++ b/backend/dataall/tasks/data_sharing/share_processors/lf_process_cross_account_share.py @@ -4,7 +4,7 @@ from ..share_managers import LFShareManager from ....aws.handlers.ram import Ram from ....db import models, api -from dataall.modules.datasets.db.models import DatasetTable +from dataall.modules.datasets.db.models import DatasetTable, Dataset log = logging.getLogger(__name__) @@ -13,7 +13,7 @@ class ProcessLFCrossAccountShare(LFShareManager): def __init__( self, session, - dataset: models.Dataset, + dataset: Dataset, share: models.ShareObject, shared_tables: [DatasetTable], revoked_tables: [DatasetTable], diff --git a/backend/dataall/tasks/data_sharing/share_processors/lf_process_same_account_share.py b/backend/dataall/tasks/data_sharing/share_processors/lf_process_same_account_share.py index 3ea939b4f..c2afa4b23 100644 --- a/backend/dataall/tasks/data_sharing/share_processors/lf_process_same_account_share.py +++ b/backend/dataall/tasks/data_sharing/share_processors/lf_process_same_account_share.py @@ -2,7 +2,7 @@ from ..share_managers import LFShareManager from dataall.db import models, api -from dataall.modules.datasets.db.models import DatasetTable +from dataall.modules.datasets.db.models import DatasetTable, Dataset log = logging.getLogger(__name__) @@ -11,7 +11,7 @@ class ProcessLFSameAccountShare(LFShareManager): def __init__( self, session, - dataset: models.Dataset, + dataset: Dataset, share: models.ShareObject, shared_tables: [DatasetTable], revoked_tables: [DatasetTable], diff --git a/backend/dataall/tasks/data_sharing/share_processors/s3_process_share.py b/backend/dataall/tasks/data_sharing/share_processors/s3_process_share.py index 96b608338..13175c2d1 100644 --- a/backend/dataall/tasks/data_sharing/share_processors/s3_process_share.py +++ b/backend/dataall/tasks/data_sharing/share_processors/s3_process_share.py @@ -2,8 +2,7 @@ from ....db import models, api from ..share_managers import S3ShareManager -from dataall.modules.datasets.db.models import DatasetStorageLocation - +from dataall.modules.datasets.db.models import DatasetStorageLocation, Dataset log = logging.getLogger(__name__) @@ -12,7 +11,7 @@ class ProcessS3Share(S3ShareManager): def __init__( self, session, - dataset: models.Dataset, + dataset: Dataset, share: models.ShareObject, share_folder: DatasetStorageLocation, source_environment: models.Environment, @@ -36,7 +35,7 @@ def __init__( def process_approved_shares( cls, session, - dataset: models.Dataset, + dataset: Dataset, share: models.ShareObject, share_folders: [DatasetStorageLocation], source_environment: models.Environment, @@ -103,7 +102,7 @@ def process_approved_shares( def process_revoked_shares( cls, session, - dataset: models.Dataset, + dataset: Dataset, share: models.ShareObject, revoke_folders: [DatasetStorageLocation], source_environment: models.Environment, @@ -164,7 +163,7 @@ def process_revoked_shares( @staticmethod def clean_up_share( - dataset: models.Dataset, + dataset: Dataset, share: models.ShareObject, target_environment: models.Environment ): diff --git a/backend/dataall/tasks/stacks_updater.py b/backend/dataall/tasks/stacks_updater.py index f4908b8a1..fba4060ea 100644 --- a/backend/dataall/tasks/stacks_updater.py +++ b/backend/dataall/tasks/stacks_updater.py @@ -2,6 +2,7 @@ import os import sys +from dataall.modules.datasets.db.models import Dataset from dataall.modules.datasets.services.dataset_service import DatasetService from .. import db from ..db import models @@ -19,7 +20,7 @@ def update_stacks(engine, envname): with engine.scoped_session() as session: - all_datasets: [models.Dataset] = DatasetService.list_all_active_datasets( + all_datasets: [Dataset] = DatasetService.list_all_active_datasets( session ) all_environments: [ @@ -31,7 +32,7 @@ def update_stacks(engine, envname): update_stack(session, envname, environment.environmentUri) log.info(f'Found {len(all_datasets)} datasets') - dataset: models.Dataset + dataset: Dataset for dataset in all_datasets: update_stack(session, envname, dataset.datasetUri) diff --git a/backend/dataall/utils/alarm_service.py b/backend/dataall/utils/alarm_service.py index a1d0a6d5b..661eb852b 100644 --- a/backend/dataall/utils/alarm_service.py +++ b/backend/dataall/utils/alarm_service.py @@ -11,7 +11,7 @@ from ..aws.handlers.sts import SessionHelper from ..db import models -from dataall.modules.datasets.db.models import DatasetTable +from dataall.modules.datasets.db.models import DatasetTable, Dataset logger = logging.getLogger(__name__) @@ -116,7 +116,7 @@ def trigger_catalog_indexing_failure_alarm(self, error: str): """ return self.publish_message_to_alarms_topic(subject, message) - def trigger_dataset_sync_failure_alarm(self, dataset: models.Dataset, error: str): + def trigger_dataset_sync_failure_alarm(self, dataset: Dataset, error: str): logger.info(f'Triggering dataset {dataset.name} tables sync failure alarm...') subject = ( f'ALARM: DATAALL Dataset {dataset.name} Tables Sync Failure Notification' diff --git a/tests/api/conftest.py b/tests/api/conftest.py index 37fef4f10..598a3a763 100644 --- a/tests/api/conftest.py +++ b/tests/api/conftest.py @@ -2,7 +2,7 @@ from .client import * from dataall.db import models from dataall.api import constants -from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable +from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable, Dataset @pytest.fixture(scope='module', autouse=True) @@ -190,7 +190,7 @@ def factory( name: str, owner: str, group: str, - ) -> models.Dataset: + ) -> Dataset: key = f'{org.organizationUri}-{env.environmentUri}-{name}-{group}' if cache.get(key): print('found in cache ', cache[key]) @@ -390,9 +390,9 @@ def factory( organization: models.Organization, environment: models.Environment, label: str, - ) -> models.Dataset: + ) -> Dataset: with db.scoped_session() as session: - dataset = models.Dataset( + dataset = Dataset( organizationUri=organization.organizationUri, environmentUri=environment.environmentUri, label=label, @@ -448,7 +448,7 @@ def factory( @pytest.fixture(scope="module") def share(db): def factory( - dataset: models.Dataset, + dataset: Dataset, environment: models.Environment, env_group: models.EnvironmentGroup, owner: str, @@ -529,7 +529,7 @@ def factory( def location(db): cache = {} - def factory(dataset: models.Dataset, name, username) -> DatasetStorageLocation: + def factory(dataset: Dataset, name, username) -> DatasetStorageLocation: key = f'{dataset.datasetUri}-{name}' if cache.get(key): return cache.get(key) @@ -554,7 +554,7 @@ def factory(dataset: models.Dataset, name, username) -> DatasetStorageLocation: def table(db): cache = {} - def factory(dataset: models.Dataset, name, username) -> DatasetTable: + def factory(dataset: Dataset, name, username) -> DatasetTable: key = f'{dataset.datasetUri}-{name}' if cache.get(key): return cache.get(key) @@ -626,7 +626,7 @@ def env_fixture(env, org_fixture, user, group, tenant, module_mocker): @pytest.fixture(scope='module') -def dataset_fixture(env_fixture, org_fixture, dataset, group) -> dataall.db.models.Dataset: +def dataset_fixture(env_fixture, org_fixture, dataset, group) -> Dataset: yield dataset( org=org_fixture, env=env_fixture, diff --git a/tests/api/test_dashboards.py b/tests/api/test_dashboards.py index 8e83e3d54..b82d3f314 100644 --- a/tests/api/test_dashboards.py +++ b/tests/api/test_dashboards.py @@ -2,6 +2,7 @@ import pytest import dataall +from dataall.modules.datasets.db.models import Dataset @pytest.fixture(scope='module', autouse=True) @@ -25,7 +26,7 @@ def dataset1( org1: dataall.db.models.Organization, env1: dataall.db.models.Environment, dataset: typing.Callable, -) -> dataall.db.models.Dataset: +) -> Dataset: yield dataset( org=org1, env=env1, name='dataset1', owner=env1.owner, group='dataset1admins' ) diff --git a/tests/api/test_dataset.py b/tests/api/test_dataset.py index 8aac00702..73b5dd161 100644 --- a/tests/api/test_dataset.py +++ b/tests/api/test_dataset.py @@ -3,7 +3,7 @@ import pytest import dataall -from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable +from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable, Dataset from dataall.modules.datasets.services.dataset_service import DatasetService @@ -24,7 +24,7 @@ def env1(env, org1, user, group, tenant, module_mocker): @pytest.fixture(scope='module') -def dataset1(env1, org1, dataset, group) -> dataall.db.models.Dataset: +def dataset1(env1, org1, dataset, group) -> Dataset: yield dataset( org=org1, env=env1, name='dataset1', owner=env1.owner, group=group.name ) @@ -52,7 +52,7 @@ def dataset1( env1: dataall.db.models.Environment, dataset: typing.Callable, group, -) -> dataall.db.models.Dataset: +) -> Dataset: d = dataset(org=org1, env=env1, name='dataset1', owner=env1.owner, group=group.name) print(d) yield d @@ -364,7 +364,7 @@ def test_dataset_in_environment(client, env1, dataset1, group): def test_delete_dataset(client, dataset, env1, org1, db, module_mocker, group, user): with db.scoped_session() as session: - session.query(dataall.db.models.Dataset).delete() + session.query(Dataset).delete() session.commit() deleted_dataset = dataset( org=org1, env=env1, name='dataset1', owner=user.userName, group=group.name @@ -470,7 +470,7 @@ def test_import_dataset(org1, env1, dataset1, client, group): def test_get_dataset_by_prefix(db, env1, org1): with db.scoped_session() as session: - dataset = dataall.db.models.Dataset( + dataset = Dataset( label='thisdataset', environmentUri=env1.environmentUri, organizationUri=org1.organizationUri, @@ -491,7 +491,7 @@ def test_get_dataset_by_prefix(db, env1, org1): ) session.add(dataset) session.commit() - dataset_found: dataall.db.models.Dataset = DatasetService.get_dataset_by_bucket_name( + dataset_found: Dataset = DatasetService.get_dataset_by_bucket_name( session, bucket='s3a://insite-data-lake-raw-alpha-eu-west-1/booker/volume_constraints/insite_version=1/volume_constraints.delta'.split( '//' diff --git a/tests/api/test_dataset_location.py b/tests/api/test_dataset_location.py index 2977a8baf..50aafe9a3 100644 --- a/tests/api/test_dataset_location.py +++ b/tests/api/test_dataset_location.py @@ -3,6 +3,7 @@ import pytest import dataall +from dataall.modules.datasets.db.models import Dataset @pytest.fixture(scope='module', autouse=True) @@ -22,7 +23,7 @@ def env1(env, org1, user, group, tenant, module_mocker): @pytest.fixture(scope='module') -def dataset1(env1, org1, dataset, group) -> dataall.db.models.Dataset: +def dataset1(env1, org1, dataset, group) -> Dataset: yield dataset( org=org1, env=env1, name='dataset1', owner=env1.owner, group=group.name ) diff --git a/tests/api/test_dataset_profiling.py b/tests/api/test_dataset_profiling.py index 8f7b1bc84..124c90386 100644 --- a/tests/api/test_dataset_profiling.py +++ b/tests/api/test_dataset_profiling.py @@ -2,7 +2,7 @@ import pytest import dataall -from dataall.modules.datasets.db.models import DatasetProfilingRun, DatasetTable +from dataall.modules.datasets.db.models import DatasetProfilingRun, DatasetTable, Dataset @pytest.fixture(scope='module', autouse=True) @@ -22,7 +22,7 @@ def env1(env, org1, user, group, tenant, module_mocker): @pytest.fixture(scope='module') -def dataset1(env1, org1, dataset, group, user) -> dataall.db.models.Dataset: +def dataset1(env1, org1, dataset, group, user) -> Dataset: yield dataset( org=org1, env=env1, name='dataset1', owner=user.userName, group=group.name ) diff --git a/tests/api/test_dataset_table.py b/tests/api/test_dataset_table.py index 7ed3732a4..e226e4546 100644 --- a/tests/api/test_dataset_table.py +++ b/tests/api/test_dataset_table.py @@ -4,7 +4,7 @@ import dataall from dataall.modules.datasets.services.dataset_table import DatasetTableService -from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetTable +from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetTable, Dataset @pytest.fixture(scope='module', autouse=True) @@ -24,7 +24,7 @@ def env1(env, org1, user, group, tenant, module_mocker): @pytest.fixture(scope='module') -def dataset1(env1, org1, dataset, group) -> dataall.db.models.Dataset: +def dataset1(env1, org1, dataset, group) -> Dataset: yield dataset( org=org1, env=env1, name='dataset1', owner=env1.owner, group=group.name ) diff --git a/tests/api/test_environment.py b/tests/api/test_environment.py index a84535d41..774797108 100644 --- a/tests/api/test_environment.py +++ b/tests/api/test_environment.py @@ -2,6 +2,7 @@ import dataall from dataall.db import permissions +from dataall.modules.datasets.db.models import Dataset @pytest.fixture(scope='module', autouse=True) @@ -599,7 +600,7 @@ def test_group_invitation(db, client, env1, org1, group2, user, group3, group, d assert 'EnvironmentResourcesFound' in response.errors[0].message with db.scoped_session() as session: - dataset = session.query(dataall.db.models.Dataset).get(dataset.datasetUri) + dataset = session.query(Dataset).get(dataset.datasetUri) session.delete(dataset) session.commit() diff --git a/tests/api/test_glossary.py b/tests/api/test_glossary.py index 1aa15ce73..13bfbcb58 100644 --- a/tests/api/test_glossary.py +++ b/tests/api/test_glossary.py @@ -1,7 +1,7 @@ from datetime import datetime from typing import List from dataall.db import models -from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetTable +from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetTable, Dataset import pytest @@ -24,7 +24,7 @@ def _env( @pytest.fixture(scope='module', autouse=True) -def _dataset(db, _env, _org, group, user, dataset) -> models.Dataset: +def _dataset(db, _env, _org, group, user, dataset) -> Dataset: with db.scoped_session() as session: yield dataset( org=_org, env=_env, name='dataset1', owner=user.userName, group=group.name diff --git a/tests/api/test_keyvaluetag.py b/tests/api/test_keyvaluetag.py index 8d546cb3f..fd96d9bc1 100644 --- a/tests/api/test_keyvaluetag.py +++ b/tests/api/test_keyvaluetag.py @@ -5,6 +5,7 @@ import pytest from dataall.db import exceptions +from dataall.modules.datasets.db.models import Dataset @pytest.fixture(scope='module') @@ -26,7 +27,7 @@ def env1( @pytest.fixture(scope='module', autouse=True) -def dataset1(db, env1, org1, group, user, dataset) -> models.Dataset: +def dataset1(db, env1, org1, group, user, dataset) -> Dataset: with db.scoped_session() as session: yield dataset( org=org1, env=env1, name='dataset1', owner=user.userName, group=group.name diff --git a/tests/api/test_redshift_cluster.py b/tests/api/test_redshift_cluster.py index 9fc1d9d06..fe235ecd0 100644 --- a/tests/api/test_redshift_cluster.py +++ b/tests/api/test_redshift_cluster.py @@ -4,6 +4,7 @@ import pytest import dataall from dataall.api.constants import RedshiftClusterRole +from dataall.modules.datasets.db.models import Dataset from dataall.modules.datasets.services.dataset_service import DatasetService @@ -24,7 +25,7 @@ def env1(env, org1, user, group, tenant, module_mocker): @pytest.fixture(scope='module') -def dataset1(db, user, env1, org1, dataset, group, group3) -> dataall.db.models.Dataset: +def dataset1(db, user, env1, org1, dataset, group, group3) -> Dataset: with db.scoped_session() as session: data = dict( label='label', @@ -71,7 +72,7 @@ def env2( @pytest.fixture(scope='module') -def dataset2(env2, org2, dataset, group2, user2) -> dataall.db.models.Dataset: +def dataset2(env2, org2, dataset, group2, user2) -> Dataset: yield dataset( org=org2, env=env2, diff --git a/tests/api/test_share.py b/tests/api/test_share.py index d951a15f8..c87e8255a 100644 --- a/tests/api/test_share.py +++ b/tests/api/test_share.py @@ -3,7 +3,7 @@ import pytest import dataall -from dataall.modules.datasets.db.models import DatasetTable +from dataall.modules.datasets.db.models import DatasetTable, Dataset def random_table_name(): @@ -49,7 +49,7 @@ def env1group(environment_group: typing.Callable, env1, user, group @pytest.fixture(scope='module') def dataset1(dataset_model: typing.Callable, org1: dataall.db.models.Organization, env1: dataall.db.models.Environment - ) -> dataall.db.models.Dataset: + ) -> Dataset: yield dataset_model( organization=org1, environment=env1, @@ -58,13 +58,13 @@ def dataset1(dataset_model: typing.Callable, org1: dataall.db.models.Organizatio @pytest.fixture(scope='module') -def tables1(table: typing.Callable, dataset1: dataall.db.models.Dataset): +def tables1(table: typing.Callable, dataset1: Dataset): for i in range(1, 100): table(dataset1, name=random_table_name(), username=dataset1.owner) @pytest.fixture(scope="module", autouse=True) -def table1(table: typing.Callable, dataset1: dataall.db.models.Dataset, +def table1(table: typing.Callable, dataset1: Dataset, user: dataall.db.models.User) -> DatasetTable: yield table( dataset=dataset1, @@ -97,7 +97,7 @@ def env2( @pytest.fixture(scope='module') def dataset2( dataset_model: typing.Callable, org2: dataall.db.models.Organization, env2: dataall.db.models.Environment -) -> dataall.db.models.Dataset: +) -> Dataset: yield dataset_model( organization=org2, environment=env2, @@ -112,7 +112,7 @@ def tables2(table, dataset2): @pytest.fixture(scope="module", autouse=True) -def table2(table: typing.Callable, dataset2: dataall.db.models.Dataset, +def table2(table: typing.Callable, dataset2: Dataset, user2: dataall.db.models.User) -> DatasetTable: yield table( dataset=dataset2, @@ -136,7 +136,7 @@ def share1_draft( user2, group2, share: typing.Callable, - dataset1: dataall.db.models.Dataset, + dataset1: Dataset, env2: dataall.db.models.Environment, env2group: dataall.db.models.EnvironmentGroup, ) -> dataall.db.models.ShareObject: @@ -213,7 +213,7 @@ def share2_submitted( user2, group2, share: typing.Callable, - dataset1: dataall.db.models.Dataset, + dataset1: Dataset, env2: dataall.db.models.Environment, env2group: dataall.db.models.EnvironmentGroup, ) -> dataall.db.models.ShareObject: @@ -288,7 +288,7 @@ def share3_processed( user2, group2, share: typing.Callable, - dataset1: dataall.db.models.Dataset, + dataset1: Dataset, env2: dataall.db.models.Environment, env2group: dataall.db.models.EnvironmentGroup, ) -> dataall.db.models.ShareObject: @@ -360,7 +360,7 @@ def share3_item_shared( def share4_draft( user2, share: typing.Callable, - dataset1: dataall.db.models.Dataset, + dataset1: Dataset, env2: dataall.db.models.Environment, env2group: dataall.db.models.EnvironmentGroup, ) -> dataall.db.models.ShareObject: diff --git a/tests/api/test_vote.py b/tests/api/test_vote.py index fd05557cc..4701c5609 100644 --- a/tests/api/test_vote.py +++ b/tests/api/test_vote.py @@ -1,6 +1,7 @@ import pytest from dataall.db import models +from dataall.modules.datasets.db.models import Dataset @pytest.fixture(scope='module') @@ -22,7 +23,7 @@ def env1( @pytest.fixture(scope='module', autouse=True) -def dataset1(db, env1, org1, group, user, dataset) -> models.Dataset: +def dataset1(db, env1, org1, group, user, dataset) -> Dataset: with db.scoped_session() as session: yield dataset( org=org1, env=env1, name='dataset1', owner=user.userName, group=group.name diff --git a/tests/cdkproxy/conftest.py b/tests/cdkproxy/conftest.py index c223f4a37..d1810bfef 100644 --- a/tests/cdkproxy/conftest.py +++ b/tests/cdkproxy/conftest.py @@ -1,7 +1,7 @@ import pytest from dataall.db import models, api -from dataall.modules.datasets.db.models import DatasetTable +from dataall.modules.datasets.db.models import DatasetTable, Dataset @pytest.fixture(scope='module', autouse=True) @@ -70,7 +70,7 @@ def another_group(db, env): environmentAthenaWorkGroup='workgroup', ) session.add(env_group) - dataset = models.Dataset( + dataset = Dataset( label='thisdataset', environmentUri=env.environmentUri, organizationUri=env.organizationUri, @@ -95,9 +95,9 @@ def another_group(db, env): @pytest.fixture(scope='module', autouse=True) -def dataset(db, env: models.Environment) -> models.Dataset: +def dataset(db, env: models.Environment) -> Dataset: with db.scoped_session() as session: - dataset = models.Dataset( + dataset = Dataset( label='thisdataset', environmentUri=env.environmentUri, organizationUri=env.organizationUri, @@ -122,7 +122,7 @@ def dataset(db, env: models.Environment) -> models.Dataset: @pytest.fixture(scope='module', autouse=True) -def table(db, dataset: models.Dataset) -> DatasetTable: +def table(db, dataset: Dataset) -> DatasetTable: with db.scoped_session() as session: table = DatasetTable( label='thistable', diff --git a/tests/db/test_permission.py b/tests/db/test_permission.py index 289f59f0b..d40402836 100644 --- a/tests/db/test_permission.py +++ b/tests/db/test_permission.py @@ -4,6 +4,7 @@ from dataall.api.constants import OrganisationUserRole from dataall.db import exceptions from dataall.db.models.Permission import PermissionType +from dataall.modules.datasets.db.models import Dataset from dataall.modules.datasets.services.dataset_service import DatasetService @@ -116,7 +117,7 @@ def env(org, db, group): @pytest.fixture(scope='module', autouse=True) def dataset(org, env, db, group): with db.scoped_session() as session: - dataset = dataall.db.models.Dataset( + dataset = Dataset( organizationUri=org.organizationUri, environmentUri=env.environmentUri, label='label', @@ -145,7 +146,7 @@ def test_attach_resource_policy(db, user, group, group_user, dataset, permission group=group.name, permissions=dataall.db.permissions.DATASET_WRITE, resource_uri=dataset.datasetUri, - resource_type=dataall.db.models.Dataset.__name__, + resource_type=Dataset.__name__, ) assert dataall.db.api.ResourcePolicy.check_user_resource_permission( session=session, diff --git a/tests/searchproxy/test_indexers.py b/tests/searchproxy/test_indexers.py index 4fad9e6d2..9d91bcd9d 100644 --- a/tests/searchproxy/test_indexers.py +++ b/tests/searchproxy/test_indexers.py @@ -6,7 +6,7 @@ from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer from dataall.searchproxy import indexers -from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable +from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable, Dataset from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer @@ -49,7 +49,7 @@ def env(org, db): @pytest.fixture(scope='module', autouse=True) def dataset(org, env, db): with db.scoped_session() as session: - dataset = dataall.db.models.Dataset( + dataset = Dataset( organizationUri=org.organizationUri, environmentUri=env.environmentUri, label='label', diff --git a/tests/tasks/conftest.py b/tests/tasks/conftest.py index 267d3ef73..0ff919894 100644 --- a/tests/tasks/conftest.py +++ b/tests/tasks/conftest.py @@ -2,7 +2,7 @@ from dataall.db import models from dataall.api import constants -from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable +from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable, Dataset @pytest.fixture(scope="module") @@ -100,9 +100,9 @@ def factory( organization: models.Organization, environment: models.Environment, label: str, - ) -> models.Dataset: + ) -> Dataset: with db.scoped_session() as session: - dataset = models.Dataset( + dataset = Dataset( organizationUri=organization.organizationUri, environmentUri=environment.environmentUri, label=label, @@ -127,7 +127,7 @@ def factory( @pytest.fixture(scope="module") def location(db): - def factory(dataset: models.Dataset, label: str) -> DatasetStorageLocation: + def factory(dataset: Dataset, label: str) -> DatasetStorageLocation: with db.scoped_session() as session: ds_location = DatasetStorageLocation( @@ -148,7 +148,7 @@ def factory(dataset: models.Dataset, label: str) -> DatasetStorageLocation: @pytest.fixture(scope='module') def table(db): - def factory(dataset: models.Dataset, label: str) -> DatasetTable: + def factory(dataset: Dataset, label: str) -> DatasetTable: with db.scoped_session() as session: table = DatasetTable( @@ -172,7 +172,7 @@ def factory(dataset: models.Dataset, label: str) -> DatasetTable: @pytest.fixture(scope="module") def share(db): def factory( - dataset: models.Dataset, + dataset: Dataset, environment: models.Environment, env_group: models.EnvironmentGroup ) -> models.ShareObject: diff --git a/tests/tasks/test_catalog_indexer.py b/tests/tasks/test_catalog_indexer.py index 3944aed8b..32c8873e3 100644 --- a/tests/tasks/test_catalog_indexer.py +++ b/tests/tasks/test_catalog_indexer.py @@ -42,7 +42,7 @@ def env(org, db): @pytest.fixture(scope='module', autouse=True) def sync_dataset(org, env, db): with db.scoped_session() as session: - dataset = dataall.db.models.Dataset( + dataset = Dataset( organizationUri=org.organizationUri, environmentUri=env.environmentUri, label='label', diff --git a/tests/tasks/test_lf_share_manager.py b/tests/tasks/test_lf_share_manager.py index 1ff99ba43..89373d96e 100644 --- a/tests/tasks/test_lf_share_manager.py +++ b/tests/tasks/test_lf_share_manager.py @@ -10,7 +10,7 @@ from dataall.db import models from dataall.api import constants -from dataall.modules.datasets.db.models import DatasetTable +from dataall.modules.datasets.db.models import DatasetTable, Dataset from dataall.tasks.data_sharing.share_processors.lf_process_cross_account_share import ProcessLFCrossAccountShare from dataall.tasks.data_sharing.share_processors.lf_process_same_account_share import ProcessLFSameAccountShare @@ -86,7 +86,7 @@ def target_environment_group(environment_group: Callable, target_environment: mo @pytest.fixture(scope="module") -def dataset1(dataset: Callable, org1: models.Organization, source_environment: models.Environment) -> models.Dataset: +def dataset1(dataset: Callable, org1: models.Organization, source_environment: models.Environment) -> Dataset: yield dataset( organization=org1, environment=source_environment, @@ -95,7 +95,7 @@ def dataset1(dataset: Callable, org1: models.Organization, source_environment: m @pytest.fixture(scope="module") -def table1(table: Callable, dataset1: models.Dataset) -> DatasetTable: +def table1(table: Callable, dataset1: Dataset) -> DatasetTable: yield table( dataset=dataset1, label="table1" @@ -103,7 +103,7 @@ def table1(table: Callable, dataset1: models.Dataset) -> DatasetTable: @pytest.fixture(scope="module") -def table2(table: Callable, dataset1: models.Dataset) -> DatasetTable: +def table2(table: Callable, dataset1: Dataset) -> DatasetTable: yield table( dataset=dataset1, label="table2" @@ -112,7 +112,7 @@ def table2(table: Callable, dataset1: models.Dataset) -> DatasetTable: @pytest.fixture(scope="module") def share_same_account( - share: Callable, dataset1: models.Dataset, source_environment: models.Environment, + share: Callable, dataset1: Dataset, source_environment: models.Environment, source_environment_group_requesters: models.EnvironmentGroup) -> models.ShareObject: yield share( dataset=dataset1, @@ -123,7 +123,7 @@ def share_same_account( @pytest.fixture(scope="module") def share_cross_account( - share: Callable, dataset1: models.Dataset, target_environment: models.Environment, + share: Callable, dataset1: Dataset, target_environment: models.Environment, target_environment_group: models.EnvironmentGroup) -> models.ShareObject: yield share( dataset=dataset1, @@ -209,7 +209,7 @@ def test_init(processor_same_account, processor_cross_account): def test_build_shared_db_name( processor_same_account: ProcessLFSameAccountShare, processor_cross_account: ProcessLFCrossAccountShare, - dataset1: models.Dataset, + dataset1: Dataset, share_same_account: models.ShareObject, share_cross_account: models.ShareObject, ): @@ -241,7 +241,7 @@ def test_create_shared_database( share_cross_account: models.ShareObject, source_environment: models.Environment, target_environment: models.Environment, - dataset1: models.Dataset, + dataset1: Dataset, mocker, ): create_db_mock = mocker.patch( @@ -332,7 +332,7 @@ def test_build_share_data( share_cross_account: models.ShareObject, source_environment: models.Environment, target_environment: models.Environment, - dataset1: models.Dataset, + dataset1: Dataset, table1: DatasetTable, ): data_same_account = { @@ -380,7 +380,7 @@ def test_create_resource_link( share_cross_account: models.ShareObject, source_environment: models.Environment, target_environment: models.Environment, - dataset1: models.Dataset, + dataset1: Dataset, table1: DatasetTable, mocker, ): @@ -463,7 +463,7 @@ def test_revoke_table_resource_link_access( share_cross_account: models.ShareObject, source_environment: models.Environment, target_environment: models.Environment, - dataset1: models.Dataset, + dataset1: Dataset, table2: DatasetTable, mocker, ): @@ -511,7 +511,7 @@ def test_revoke_source_table_access( share_cross_account: models.ShareObject, source_environment: models.Environment, target_environment: models.Environment, - dataset1: models.Dataset, + dataset1: Dataset, table2: DatasetTable, mocker, ): @@ -554,7 +554,7 @@ def test_delete_resource_link_table( share_cross_account: models.ShareObject, source_environment: models.Environment, target_environment: models.Environment, - dataset1: models.Dataset, + dataset1: Dataset, table2: DatasetTable, mocker, ): @@ -596,7 +596,7 @@ def test_delete_shared_database( share_cross_account: models.ShareObject, source_environment: models.Environment, target_environment: models.Environment, - dataset1: models.Dataset, + dataset1: Dataset, table1: DatasetTable, mocker, ): @@ -625,7 +625,7 @@ def test_revoke_external_account_access_on_source_account( share_cross_account: models.ShareObject, source_environment: models.Environment, target_environment: models.Environment, - dataset1: models.Dataset, + dataset1: Dataset, table1: DatasetTable, table2: DatasetTable, mocker, diff --git a/tests/tasks/test_policies.py b/tests/tasks/test_policies.py index ca8c259c6..c1018b35f 100644 --- a/tests/tasks/test_policies.py +++ b/tests/tasks/test_policies.py @@ -1,5 +1,5 @@ from dataall.api.constants import OrganisationUserRole -from dataall.modules.datasets.db.models import DatasetTable +from dataall.modules.datasets.db.models import DatasetTable, Dataset from dataall.tasks.bucket_policy_updater import BucketPoliciesUpdater import pytest import dataall @@ -44,7 +44,7 @@ def env(org, db): @pytest.fixture(scope='module', autouse=True) def sync_dataset(org, env, db): with db.scoped_session() as session: - dataset = dataall.db.models.Dataset( + dataset = Dataset( organizationUri=org.organizationUri, environmentUri=env.environmentUri, label='label', diff --git a/tests/tasks/test_s3_share_manager.py b/tests/tasks/test_s3_share_manager.py index 14f61fefd..549ed8afb 100644 --- a/tests/tasks/test_s3_share_manager.py +++ b/tests/tasks/test_s3_share_manager.py @@ -7,8 +7,7 @@ from dataall.tasks.data_sharing.share_managers.s3_share_manager import S3ShareManager from dataall.utils.alarm_service import AlarmService -from dataall.modules.datasets.db.models import DatasetStorageLocation - +from dataall.modules.datasets.db.models import DatasetStorageLocation, Dataset SOURCE_ENV_ACCOUNT = "111111111111" SOURCE_ENV_ROLE_NAME = "dataall-ProducerEnvironment-i6v1v1c2" @@ -69,12 +68,12 @@ def dataset1(dataset: Callable, org1: models.Organization, source_environment: m @pytest.fixture(scope="module") -def location1(location: Callable, dataset1: models.Dataset) -> DatasetStorageLocation: +def location1(location: Callable, dataset1: Dataset) -> DatasetStorageLocation: yield location(dataset1, "location1") @pytest.fixture(scope="module") -def share1(share: Callable, dataset1: models.Dataset, +def share1(share: Callable, dataset1: Dataset, target_environment: models.Environment, target_environment_group: models.EnvironmentGroup) -> models.ShareObject: share1 = share(dataset1, target_environment, target_environment_group) @@ -380,7 +379,7 @@ def test_grant_target_role_access_policy_test_no_policy( mocker, source_environment_group: models.EnvironmentGroup, target_environment_group: models.EnvironmentGroup, - dataset1: models.Dataset, + dataset1: Dataset, db, share1: models.ShareObject, share_item_folder1: models.ShareObjectItem, @@ -442,7 +441,7 @@ def test_update_dataset_bucket_key_policy_with_env_admin( mocker, source_environment_group: models.EnvironmentGroup, target_environment_group: models.EnvironmentGroup, - dataset1: models.Dataset, + dataset1: Dataset, db, share1: models.ShareObject, share_item_folder1: models.ShareObjectItem, @@ -559,7 +558,7 @@ def test_update_dataset_bucket_key_policy_without_env_admin( mocker, source_environment_group: models.EnvironmentGroup, target_environment_group: models.EnvironmentGroup, - dataset1: models.Dataset, + dataset1: Dataset, db, share1: models.ShareObject, share_item_folder1: models.ShareObjectItem, @@ -639,7 +638,7 @@ def test_manage_access_point_and_policy_1( mocker, source_environment_group: models.EnvironmentGroup, target_environment_group: models.EnvironmentGroup, - dataset1: models.Dataset, + dataset1: Dataset, db, share1: models.ShareObject, share_item_folder1: models.ShareObjectItem, @@ -730,7 +729,7 @@ def test_manage_access_point_and_policy_2( mocker, source_environment_group: models.EnvironmentGroup, target_environment_group: models.EnvironmentGroup, - dataset1: models.Dataset, + dataset1: Dataset, db, share1: models.ShareObject, share_item_folder1: models.ShareObjectItem, @@ -804,7 +803,7 @@ def test_manage_access_point_and_policy_3( mocker, source_environment_group: models.EnvironmentGroup, target_environment_group: models.EnvironmentGroup, - dataset1: models.Dataset, + dataset1: Dataset, db, share1: models.ShareObject, share_item_folder1: models.ShareObjectItem, @@ -875,7 +874,7 @@ def test_delete_access_point_policy_with_env_admin_one_prefix( mocker, source_environment_group: models.EnvironmentGroup, target_environment_group: models.EnvironmentGroup, - dataset1: models.Dataset, + dataset1: Dataset, db, share1: models.ShareObject, share_item_folder1: models.ShareObjectItem, @@ -947,7 +946,7 @@ def test_delete_access_point_policy_with_env_admin_multiple_prefix( mocker, source_environment_group: models.EnvironmentGroup, target_environment_group: models.EnvironmentGroup, - dataset1: models.Dataset, + dataset1: Dataset, db, share1: models.ShareObject, share_item_folder1: models.ShareObjectItem, @@ -1014,7 +1013,7 @@ def test_dont_delete_access_point_with_policy( mocker, source_environment_group: models.EnvironmentGroup, target_environment_group: models.EnvironmentGroup, - dataset1: models.Dataset, + dataset1: Dataset, db, share1: models.ShareObject, share_item_folder1: models.ShareObjectItem, @@ -1060,7 +1059,7 @@ def test_delete_access_point_without_policy( mocker, source_environment_group: models.EnvironmentGroup, target_environment_group: models.EnvironmentGroup, - dataset1: models.Dataset, + dataset1: Dataset, db, share1: models.ShareObject, share_item_folder1: models.ShareObjectItem, @@ -1106,7 +1105,7 @@ def test_delete_target_role_access_policy_no_remaining_statement( mocker, source_environment_group: models.EnvironmentGroup, target_environment_group: models.EnvironmentGroup, - dataset1: models.Dataset, + dataset1: Dataset, db, share1: models.ShareObject, share_item_folder1: models.ShareObjectItem, @@ -1171,7 +1170,7 @@ def test_delete_target_role_access_policy_with_remaining_statement( mocker, source_environment_group: models.EnvironmentGroup, target_environment_group: models.EnvironmentGroup, - dataset1: models.Dataset, + dataset1: Dataset, db, share1: models.ShareObject, share_item_folder1: models.ShareObjectItem, @@ -1257,7 +1256,7 @@ def test_delete_dataset_bucket_key_policy_existing_policy_with_additional_target mocker, source_environment_group: models.EnvironmentGroup, target_environment_group: models.EnvironmentGroup, - dataset1: models.Dataset, + dataset1: Dataset, db, share1: models.ShareObject, share_item_folder1: models.ShareObjectItem, @@ -1348,7 +1347,7 @@ def test_delete_dataset_bucket_key_policy_existing_policy_with_no_additional_tar mocker, source_environment_group: models.EnvironmentGroup, target_environment_group: models.EnvironmentGroup, - dataset1: models.Dataset, + dataset1: Dataset, db, share1: models.ShareObject, share_item_folder1: models.ShareObjectItem, diff --git a/tests/tasks/test_stacks_updater.py b/tests/tasks/test_stacks_updater.py index 1bc63c3c3..5bd095a91 100644 --- a/tests/tasks/test_stacks_updater.py +++ b/tests/tasks/test_stacks_updater.py @@ -1,6 +1,7 @@ import pytest import dataall from dataall.api.constants import OrganisationUserRole +from dataall.modules.datasets.db.models import Dataset @pytest.fixture(scope='module', autouse=True) @@ -42,7 +43,7 @@ def env(org, db): @pytest.fixture(scope='module', autouse=True) def sync_dataset(org, env, db): with db.scoped_session() as session: - dataset = dataall.db.models.Dataset( + dataset = Dataset( organizationUri=org.organizationUri, environmentUri=env.environmentUri, label='label', diff --git a/tests/tasks/test_subscriptions.py b/tests/tasks/test_subscriptions.py index 61c70d174..11c255db2 100644 --- a/tests/tasks/test_subscriptions.py +++ b/tests/tasks/test_subscriptions.py @@ -2,7 +2,7 @@ import dataall from dataall.api.constants import OrganisationUserRole -from dataall.modules.datasets.db.models import DatasetTable +from dataall.modules.datasets.db.models import DatasetTable, Dataset @pytest.fixture(scope='module') @@ -65,7 +65,7 @@ def otherenv(org, db): @pytest.fixture(scope='module') def dataset(org, env, db): with db.scoped_session() as session: - dataset = dataall.db.models.Dataset( + dataset = Dataset( organizationUri=org.organizationUri, environmentUri=env.environmentUri, label='label', @@ -88,7 +88,7 @@ def dataset(org, env, db): @pytest.fixture(scope='module') def share( - dataset: dataall.db.models.Dataset, + dataset: Dataset, db: dataall.db.Engine, otherenv: dataall.db.models.Environment, ): diff --git a/tests/tasks/test_tables_sync.py b/tests/tasks/test_tables_sync.py index ff6f8271e..5d0322f94 100644 --- a/tests/tasks/test_tables_sync.py +++ b/tests/tasks/test_tables_sync.py @@ -1,7 +1,7 @@ import pytest import dataall from dataall.api.constants import OrganisationUserRole -from dataall.modules.datasets.db.models import DatasetTable +from dataall.modules.datasets.db.models import DatasetTable, Dataset @pytest.fixture(scope='module', autouse=True) @@ -44,7 +44,7 @@ def env(org, db): @pytest.fixture(scope='module', autouse=True) def sync_dataset(org, env, db): with db.scoped_session() as session: - dataset = dataall.db.models.Dataset( + dataset = Dataset( organizationUri=org.organizationUri, environmentUri=env.environmentUri, label='label', From d05196f057bdc3ab2fad5edd6e47dd6a6ade5198 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 24 Apr 2023 17:27:53 +0200 Subject: [PATCH 079/346] Moved a part of the code from deploying dataset stack --- backend/dataall/api/Objects/Stack/stack_helper.py | 11 ----------- .../modules/datasets/api/dataset/resolvers.py | 15 ++++++++++++--- 2 files changed, 12 insertions(+), 14 deletions(-) diff --git a/backend/dataall/api/Objects/Stack/stack_helper.py b/backend/dataall/api/Objects/Stack/stack_helper.py index 5ffffb790..780dc30cf 100644 --- a/backend/dataall/api/Objects/Stack/stack_helper.py +++ b/backend/dataall/api/Objects/Stack/stack_helper.py @@ -3,7 +3,6 @@ import requests from .... import db -from ....api.context import Context from ....aws.handlers.service_handlers import Worker from ....aws.handlers.ecs import Ecs from ....db import models @@ -11,7 +10,6 @@ from dataall.core.config import config from dataall.core.context import get_context -from dataall.modules.datasets.db.models import Dataset def get_stack_with_cfn_resources(targetUri: str, environmentUri: str): @@ -85,15 +83,6 @@ def deploy_stack(targetUri): return stack -def deploy_dataset_stack(dataset: Dataset): - """ - Each dataset stack deployment triggers environment stack update - to rebuild teams IAM roles data access policies - """ - deploy_stack(dataset.datasetUri) - deploy_stack(dataset.environmentUri) - - def delete_stack( target_uri, accountid, cdk_role_arn, region ): diff --git a/backend/dataall/modules/datasets/api/dataset/resolvers.py b/backend/dataall/modules/datasets/api/dataset/resolvers.py index 29b5fdc43..87523deab 100644 --- a/backend/dataall/modules/datasets/api/dataset/resolvers.py +++ b/backend/dataall/modules/datasets/api/dataset/resolvers.py @@ -41,7 +41,7 @@ def create_dataset(context: Context, source, input=None): session=session, dataset_uri=dataset.datasetUri ) - stack_helper.deploy_dataset_stack(dataset) + _deploy_dataset_stack(dataset) dataset.userRoleForDataset = DatasetRole.Creator.value @@ -79,7 +79,7 @@ def import_dataset(context: Context, source, input=None): session=session, dataset_uri=dataset.datasetUri ) - stack_helper.deploy_dataset_stack(dataset) + _deploy_dataset_stack(dataset) dataset.userRoleForDataset = DatasetRole.Creator.value @@ -225,7 +225,7 @@ def update_dataset(context, source, datasetUri: str = None, input: dict = None): ) DatasetIndexer.upsert(session, dataset_uri=datasetUri) - stack_helper.deploy_dataset_stack(updated_dataset) + _deploy_dataset_stack(updated_dataset) return updated_dataset @@ -639,3 +639,12 @@ def resolve_redshift_copy_enabled(context, source: Dataset, clusterUri: str): return db.api.RedshiftCluster.get_cluster_dataset( session, clusterUri, source.datasetUri ).datasetCopyEnabled + + +def _deploy_dataset_stack(dataset: Dataset): + """ + Each dataset stack deployment triggers environment stack update + to rebuild teams IAM roles data access policies + """ + stack_helper.deploy_stack(dataset.datasetUri) + stack_helper.deploy_stack(dataset.environmentUri) From 8b2accb622e4dae7aa82b0b3f25ed86c5a3406ee Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 25 Apr 2023 11:10:02 +0200 Subject: [PATCH 080/346] Moved a SNS dataset handler --- .../datasets/handlers/sns_dataset_handler.py} | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) rename backend/dataall/{aws/handlers/sns.py => modules/datasets/handlers/sns_dataset_handler.py} (90%) diff --git a/backend/dataall/aws/handlers/sns.py b/backend/dataall/modules/datasets/handlers/sns_dataset_handler.py similarity index 90% rename from backend/dataall/aws/handlers/sns.py rename to backend/dataall/modules/datasets/handlers/sns_dataset_handler.py index 0dcd72414..006c4023d 100644 --- a/backend/dataall/aws/handlers/sns.py +++ b/backend/dataall/modules/datasets/handlers/sns_dataset_handler.py @@ -3,16 +3,16 @@ from botocore.exceptions import ClientError -from .service_handlers import Worker -from .sts import SessionHelper -from ... import db -from ...db import models +from dataall.aws.handlers.service_handlers import Worker +from dataall.aws.handlers.sts import SessionHelper +from dataall import db +from dataall.db import models from dataall.modules.datasets.services.dataset_service import DatasetService logger = logging.getLogger(__name__) -class Sns: +class SnsDatasetHandler: def __init__(self): pass From 2cd19d2377e3c3041ccc0a50a31cf0ff831a6042 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 25 Apr 2023 11:57:41 +0200 Subject: [PATCH 081/346] Moved a method from Glue client to LF client --- .../modules/datasets/aws/glue_table_client.py | 2 +- .../modules/datasets/aws/lf_table_client.py | 41 +++++++++++++++++-- .../datasets/handlers/glue_column_handler.py | 6 +-- .../modules/datasets/tasks/tables_syncer.py | 4 +- 4 files changed, 44 insertions(+), 9 deletions(-) diff --git a/backend/dataall/modules/datasets/aws/glue_table_client.py b/backend/dataall/modules/datasets/aws/glue_table_client.py index 78f3e4485..2b0f1c5c4 100644 --- a/backend/dataall/modules/datasets/aws/glue_table_client.py +++ b/backend/dataall/modules/datasets/aws/glue_table_client.py @@ -2,7 +2,7 @@ from botocore.exceptions import ClientError -from dataall.db.models import DatasetTable +from dataall.modules.datasets.db.models import DatasetTable log = logging.getLogger(__name__) diff --git a/backend/dataall/modules/datasets/aws/lf_table_client.py b/backend/dataall/modules/datasets/aws/lf_table_client.py index f978e0e73..239d4b194 100644 --- a/backend/dataall/modules/datasets/aws/lf_table_client.py +++ b/backend/dataall/modules/datasets/aws/lf_table_client.py @@ -2,7 +2,7 @@ from botocore.exceptions import ClientError from dataall.aws.handlers.sts import SessionHelper -from dataall.db.models import DatasetTable +from dataall.modules.datasets.db.models import DatasetTable log = logging.getLogger(__name__) @@ -10,8 +10,10 @@ class LakeFormationTableClient: """Requests to AWS LakeFormation""" - def __init__(self, aws_session, table: DatasetTable): - self._client = aws_session.client('lakeformation', region_name=table.reg) + def __init__(self, table: DatasetTable, aws_session=None): + if not aws_session: + aws_session = SessionHelper.remote_session(table.AWSAccountId) + self._client = aws_session.client('lakeformation', region_name=table.region) self._table = table def grant_pivot_role_all_table_permissions(self): @@ -51,3 +53,36 @@ def grant_pivot_role_all_table_permissions(self): f'access: {e}' ) raise e + + def grant_principals_all_table_permissions(self, principals: [str]): + """ + Update the table permissions on Lake Formation + for tables managed by data.all + :param principals: + :return: + """ + table = self._table + for principal in principals: + try: + grant_dict = dict( + Principal={'DataLakePrincipalIdentifier': principal}, + Resource={ + 'Table': { + 'DatabaseName': table.GlueDatabaseName, + 'Name': table.name, + } + }, + Permissions=['ALL'], + ) + response = self._table.grant_permissions(**grant_dict) + log.error( + f'Successfully granted principals {principals} all permissions on table ' + f'aws://{table.AWSAccountId}/{table.GlueDatabaseName}/{table.name} ' + f'access: {response}' + ) + except ClientError as e: + log.error( + f'Failed to grant admin roles {principals} all permissions on table ' + f'aws://{table.AWSAccountId}/{table.GlueDatabaseName}/{table.name} ' + f'access: {e}' + ) diff --git a/backend/dataall/modules/datasets/handlers/glue_column_handler.py b/backend/dataall/modules/datasets/handlers/glue_column_handler.py index c8a5cb848..59dca4528 100644 --- a/backend/dataall/modules/datasets/handlers/glue_column_handler.py +++ b/backend/dataall/modules/datasets/handlers/glue_column_handler.py @@ -18,7 +18,7 @@ class DatasetColumnGlueHandler: @Worker.handler('glue.table.columns') def get_table_columns(engine, task: models.Task): with engine.scoped_session() as session: - dataset_table: models.DatasetTable = session.query(models.DatasetTable).get( + dataset_table: DatasetTable = session.query(DatasetTable).get( task.targetUri ) aws = SessionHelper.remote_session(dataset_table.AWSAccountId) @@ -34,11 +34,11 @@ def get_table_columns(engine, task: models.Task): def update_table_columns(engine, task: models.Task): with engine.scoped_session() as session: column: DatasetTableColumn = session.query(DatasetTableColumn).get(task.targetUri) - table: DatasetTable = session.query(models.DatasetTable).get(column.tableUri) + table: DatasetTable = session.query(DatasetTable).get(column.tableUri) aws_session = SessionHelper.remote_session(table.AWSAccountId) - LakeFormationTableClient(aws_session, table).grant_pivot_role_all_table_permissions() + LakeFormationTableClient(table, aws_session).grant_pivot_role_all_table_permissions() glue_client = GlueTableClient(aws_session, table) original_table = glue_client.get_table() updated_table = { diff --git a/backend/dataall/modules/datasets/tasks/tables_syncer.py b/backend/dataall/modules/datasets/tasks/tables_syncer.py index f382d65a4..c17951916 100644 --- a/backend/dataall/modules/datasets/tasks/tables_syncer.py +++ b/backend/dataall/modules/datasets/tasks/tables_syncer.py @@ -8,6 +8,7 @@ from dataall.aws.handlers.sts import SessionHelper from dataall.db import get_engine from dataall.db import models +from dataall.modules.datasets.aws.lf_table_client import LakeFormationTableClient from dataall.modules.datasets.db.models import DatasetTable, Dataset from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer from dataall.modules.datasets.services.dataset_service import DatasetService @@ -76,8 +77,7 @@ def sync_tables(engine): log.info('Updating tables permissions on Lake Formation...') for table in tables: - Glue.grant_principals_all_table_permissions( - table, + LakeFormationTableClient(table).grant_principals_all_table_permissions( principals=[ SessionHelper.get_delegation_role_arn(env.AwsAccountId), env.EnvironmentDefaultIAMRoleArn, From 741238d8f68ea8a09440661c3fd3379b4208bd96 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 25 Apr 2023 12:06:20 +0200 Subject: [PATCH 082/346] Extracted the common part of the code --- .../modules/datasets/aws/lf_table_client.py | 60 +++++++------------ 1 file changed, 21 insertions(+), 39 deletions(-) diff --git a/backend/dataall/modules/datasets/aws/lf_table_client.py b/backend/dataall/modules/datasets/aws/lf_table_client.py index 239d4b194..8caff5073 100644 --- a/backend/dataall/modules/datasets/aws/lf_table_client.py +++ b/backend/dataall/modules/datasets/aws/lf_table_client.py @@ -24,21 +24,36 @@ def grant_pivot_role_all_table_permissions(self): :param table: :return: """ + table = self._table + principal = SessionHelper.get_delegation_role_arn(table.AWSAccountId) + self._grant_permissions_to_table(principal, ['SELECT', 'ALTER', 'DROP', 'INSERT']) + + def grant_principals_all_table_permissions(self, principals: [str]): + """ + Update the table permissions on Lake Formation + for tables managed by data.all + :param principals: + :return: + """ + + for principal in principals: + try: + self._grant_permissions_to_table(principal, ['ALL']) + except ClientError: + pass # ignore the error to continue with other requests + + def _grant_permissions_to_table(self, principal, permissions): table = self._table try: grant_dict = dict( - Principal={ - 'DataLakePrincipalIdentifier': SessionHelper.get_delegation_role_arn( - table.AWSAccountId - ) - }, + Principal={'DataLakePrincipalIdentifier': principal}, Resource={ 'Table': { 'DatabaseName': table.GlueDatabaseName, 'Name': table.name, } }, - Permissions=['SELECT', 'ALTER', 'DROP', 'INSERT'], + Permissions=permissions, ) response = self._client.grant_permissions(**grant_dict) log.error( @@ -53,36 +68,3 @@ def grant_pivot_role_all_table_permissions(self): f'access: {e}' ) raise e - - def grant_principals_all_table_permissions(self, principals: [str]): - """ - Update the table permissions on Lake Formation - for tables managed by data.all - :param principals: - :return: - """ - table = self._table - for principal in principals: - try: - grant_dict = dict( - Principal={'DataLakePrincipalIdentifier': principal}, - Resource={ - 'Table': { - 'DatabaseName': table.GlueDatabaseName, - 'Name': table.name, - } - }, - Permissions=['ALL'], - ) - response = self._table.grant_permissions(**grant_dict) - log.error( - f'Successfully granted principals {principals} all permissions on table ' - f'aws://{table.AWSAccountId}/{table.GlueDatabaseName}/{table.name} ' - f'access: {response}' - ) - except ClientError as e: - log.error( - f'Failed to grant admin roles {principals} all permissions on table ' - f'aws://{table.AWSAccountId}/{table.GlueDatabaseName}/{table.name} ' - f'access: {e}' - ) From 81e764693111cd1389e115c854dd60223946079c Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 25 Apr 2023 12:25:53 +0200 Subject: [PATCH 083/346] Extracted dataset part from Glue --- backend/dataall/aws/handlers/glue.py | 179 ------------------ .../datasets/handlers/glue_dataset_handler.py | 149 +++++++++++++++ 2 files changed, 149 insertions(+), 179 deletions(-) create mode 100644 backend/dataall/modules/datasets/handlers/glue_dataset_handler.py diff --git a/backend/dataall/aws/handlers/glue.py b/backend/dataall/aws/handlers/glue.py index ba4d2a37c..bbfffff8b 100644 --- a/backend/dataall/aws/handlers/glue.py +++ b/backend/dataall/aws/handlers/glue.py @@ -5,8 +5,6 @@ from .service_handlers import Worker from .sts import SessionHelper from ...db import models -from dataall.modules.datasets.db.models import DatasetTable, Dataset -from dataall.modules.datasets.services.dataset_service import DatasetService log = logging.getLogger('aws:glue') @@ -370,142 +368,6 @@ def batch_delete_tables(**data): ) raise e - @staticmethod - @Worker.handler(path='glue.dataset.crawler.create') - def create_crawler(engine, task: models.Task): - with engine.scoped_session() as session: - dataset: Dataset = DatasetService.get_dataset_by_uri( - session, task.targetUri - ) - location = task.payload.get('location') - Glue.create_glue_crawler( - **{ - 'crawler_name': f'{dataset.GlueDatabaseName}-{location}'[:52], - 'region': dataset.region, - 'accountid': dataset.AwsAccountId, - 'database': dataset.GlueDatabaseName, - 'location': location or f's3://{dataset.S3BucketName}', - } - ) - - @staticmethod - def create_glue_crawler(**data): - try: - accountid = data['accountid'] - database = data.get('database') - session = SessionHelper.remote_session(accountid=accountid) - glue = session.client('glue', region_name=data.get('region', 'eu-west-1')) - crawler_name = data.get('crawler_name') - targets = {'S3Targets': [{'Path': data.get('location')}]} - crawler = Glue._get_crawler(glue, crawler_name) - if crawler: - Glue._update_existing_crawler( - glue, accountid, crawler_name, targets, database - ) - else: - crawler = glue.create_crawler( - Name=crawler_name, - Role=SessionHelper.get_delegation_role_arn(accountid=accountid), - DatabaseName=database, - Targets=targets, - Tags=data.get('tags', {'Application': 'dataall'}), - ) - - glue.start_crawler(Name=crawler_name) - log.info('Crawler %s started ', crawler_name) - return crawler - except ClientError as e: - log.error('Failed to create Crawler due to %s', e) - - @staticmethod - def get_glue_crawler(data): - try: - accountid = data['accountid'] - session = SessionHelper.remote_session(accountid=accountid) - glue = session.client('glue', region_name=data.get('region', 'eu-west-1')) - crawler_name = data.get('crawler_name') - crawler = Glue._get_crawler(glue, crawler_name) - return crawler - except ClientError as e: - log.error('Failed to find Crawler due to %s', e) - raise e - - @staticmethod - @Worker.handler(path='glue.crawler.start') - def start_crawler(engine, task: models.Task): - with engine.scoped_session() as session: - dataset: Dataset = DatasetService.get_dataset_by_uri( - session, task.targetUri - ) - location = task.payload.get('location') - return Glue.start_glue_crawler( - { - 'crawler_name': dataset.GlueCrawlerName, - 'region': dataset.region, - 'accountid': dataset.AwsAccountId, - 'database': dataset.GlueDatabaseName, - 'location': location, - } - ) - - @staticmethod - def start_glue_crawler(data): - try: - accountid = data['accountid'] - crawler_name = data['crawler_name'] - database = data['database'] - targets = {'S3Targets': [{'Path': data.get('location')}]} - session = SessionHelper.remote_session(accountid=accountid) - glue = session.client('glue', region_name=data.get('region', 'eu-west-1')) - if data.get('location'): - Glue._update_existing_crawler( - glue, accountid, crawler_name, targets, database - ) - crawler = Glue._get_crawler(glue, crawler_name) - glue.start_crawler(Name=crawler_name) - log.info('Crawler %s started ', crawler_name) - return crawler - except ClientError as e: - log.error('Failed to start Crawler due to %s', e) - raise e - - @staticmethod - def _get_crawler(glue, crawler_name): - crawler = None - try: - crawler = glue.get_crawler(Name=crawler_name) - except ClientError as e: - if e.response['Error']['Code'] == 'EntityNotFoundException': - log.debug(f'Crawler does not exists {crawler_name} %s', e) - else: - raise e - return crawler.get('Crawler') if crawler else None - - @staticmethod - def _update_existing_crawler(glue, accountid, crawler_name, targets, database): - try: - glue.stop_crawler(Name=crawler_name) - except ClientError as e: - if ( - e.response['Error']['Code'] == 'CrawlerStoppingException' - or e.response['Error']['Code'] == 'CrawlerNotRunningException' - ): - log.error('Failed to stop crawler %s', e) - try: - glue.update_crawler( - Name=crawler_name, - Role=SessionHelper.get_delegation_role_arn(accountid=accountid), - DatabaseName=database, - Targets=targets, - ) - log.info('Crawler %s updated ', crawler_name) - except ClientError as e: - log.debug('Failed to stop and update crawler %s', e) - if e.response['Error']['Code'] != 'CrawlerRunningException': - log.error('Failed to update crawler %s', e) - else: - raise e - @staticmethod @Worker.handler(path='glue.job.runs') def get_job_runs(engine, task: models.Task): @@ -522,44 +384,3 @@ def get_job_runs(engine, task: models.Task): log.warning(f'Could not retrieve pipeline runs , {str(e)}') return [] return response['JobRuns'] - - @staticmethod - def grant_principals_all_table_permissions( - table: DatasetTable, principals: [str], client=None - ): - """ - Update the table permissions on Lake Formation - for tables managed by data.all - :param principals: - :param table: - :param client: - :return: - """ - if not client: - client = SessionHelper.remote_session(table.AWSAccountId).client( - 'lakeformation', region_name=table.region - ) - for principal in principals: - try: - grant_dict = dict( - Principal={'DataLakePrincipalIdentifier': principal}, - Resource={ - 'Table': { - 'DatabaseName': table.GlueDatabaseName, - 'Name': table.name, - } - }, - Permissions=['ALL'], - ) - response = client.grant_permissions(**grant_dict) - log.error( - f'Successfully granted principals {principals} all permissions on table ' - f'aws://{table.AWSAccountId}/{table.GlueDatabaseName}/{table.name} ' - f'access: {response}' - ) - except ClientError as e: - log.error( - f'Failed to grant admin roles {principals} all permissions on table ' - f'aws://{table.AWSAccountId}/{table.GlueDatabaseName}/{table.name} ' - f'access: {e}' - ) diff --git a/backend/dataall/modules/datasets/handlers/glue_dataset_handler.py b/backend/dataall/modules/datasets/handlers/glue_dataset_handler.py new file mode 100644 index 000000000..9eb27afaa --- /dev/null +++ b/backend/dataall/modules/datasets/handlers/glue_dataset_handler.py @@ -0,0 +1,149 @@ +import logging + +from botocore.exceptions import ClientError + +from dataall.aws.handlers.service_handlers import Worker +from dataall.aws.handlers.sts import SessionHelper +from dataall.db import models +from dataall.modules.datasets.db.models import Dataset +from dataall.modules.datasets.services.dataset_service import DatasetService + +log = logging.getLogger(__name__) + + +class GlueDatasetHandler: + @staticmethod + @Worker.handler(path='glue.dataset.crawler.create') + def create_crawler(engine, task: models.Task): + with engine.scoped_session() as session: + dataset: Dataset = DatasetService.get_dataset_by_uri( + session, task.targetUri + ) + location = task.payload.get('location') + GlueDatasetHandler.create_glue_crawler( + **{ + 'crawler_name': f'{dataset.GlueDatabaseName}-{location}'[:52], + 'region': dataset.region, + 'accountid': dataset.AwsAccountId, + 'database': dataset.GlueDatabaseName, + 'location': location or f's3://{dataset.S3BucketName}', + } + ) + + @staticmethod + @Worker.handler(path='glue.crawler.start') + def start_crawler(engine, task: models.Task): + with engine.scoped_session() as session: + dataset: Dataset = DatasetService.get_dataset_by_uri( + session, task.targetUri + ) + location = task.payload.get('location') + return GlueDatasetHandler.start_glue_crawler( + { + 'crawler_name': dataset.GlueCrawlerName, + 'region': dataset.region, + 'accountid': dataset.AwsAccountId, + 'database': dataset.GlueDatabaseName, + 'location': location, + } + ) + + @staticmethod + def create_glue_crawler(**data): + try: + accountid = data['accountid'] + database = data.get('database') + session = SessionHelper.remote_session(accountid=accountid) + glue = session.client('glue', region_name=data.get('region', 'eu-west-1')) + crawler_name = data.get('crawler_name') + targets = {'S3Targets': [{'Path': data.get('location')}]} + crawler = GlueDatasetHandler._get_crawler(glue, crawler_name) + if crawler: + GlueDatasetHandler._update_existing_crawler( + glue, accountid, crawler_name, targets, database + ) + else: + crawler = glue.create_crawler( + Name=crawler_name, + Role=SessionHelper.get_delegation_role_arn(accountid=accountid), + DatabaseName=database, + Targets=targets, + Tags=data.get('tags', {'Application': 'dataall'}), + ) + + glue.start_crawler(Name=crawler_name) + log.info('Crawler %s started ', crawler_name) + return crawler + except ClientError as e: + log.error('Failed to create Crawler due to %s', e) + + @staticmethod + def start_glue_crawler(data): + try: + accountid = data['accountid'] + crawler_name = data['crawler_name'] + database = data['database'] + targets = {'S3Targets': [{'Path': data.get('location')}]} + session = SessionHelper.remote_session(accountid=accountid) + glue = session.client('glue', region_name=data.get('region', 'eu-west-1')) + if data.get('location'): + GlueDatasetHandler._update_existing_crawler( + glue, accountid, crawler_name, targets, database + ) + crawler = GlueDatasetHandler._get_crawler(glue, crawler_name) + glue.start_crawler(Name=crawler_name) + log.info('Crawler %s started ', crawler_name) + return crawler + except ClientError as e: + log.error('Failed to start Crawler due to %s', e) + raise e + + @staticmethod + def _update_existing_crawler(glue, accountid, crawler_name, targets, database): + try: + glue.stop_crawler(Name=crawler_name) + except ClientError as e: + if ( + e.response['Error']['Code'] == 'CrawlerStoppingException' + or e.response['Error']['Code'] == 'CrawlerNotRunningException' + ): + log.error('Failed to stop crawler %s', e) + try: + glue.update_crawler( + Name=crawler_name, + Role=SessionHelper.get_delegation_role_arn(accountid=accountid), + DatabaseName=database, + Targets=targets, + ) + log.info('Crawler %s updated ', crawler_name) + except ClientError as e: + log.debug('Failed to stop and update crawler %s', e) + if e.response['Error']['Code'] != 'CrawlerRunningException': + log.error('Failed to update crawler %s', e) + else: + raise e + + @staticmethod + def get_glue_crawler(data): + try: + accountid = data['accountid'] + session = SessionHelper.remote_session(accountid=accountid) + glue = session.client('glue', region_name=data.get('region', 'eu-west-1')) + crawler_name = data.get('crawler_name') + crawler = GlueDatasetHandler._get_crawler(glue, crawler_name) + return crawler + except ClientError as e: + log.error('Failed to find Crawler due to %s', e) + raise e + + @staticmethod + def _get_crawler(glue, crawler_name): + crawler = None + try: + crawler = glue.get_crawler(Name=crawler_name) + except ClientError as e: + if e.response['Error']['Code'] == 'EntityNotFoundException': + log.debug(f'Crawler does not exists {crawler_name} %s', e) + else: + raise e + return crawler.get('Crawler') if crawler else None \ No newline at end of file From f5752e991fc880f42a396ee5f493994d625d47bf Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 25 Apr 2023 12:50:50 +0200 Subject: [PATCH 084/346] glue.dataset.crawler.create action doesn't exist --- .../datasets/handlers/glue_dataset_handler.py | 46 ------------------- 1 file changed, 46 deletions(-) diff --git a/backend/dataall/modules/datasets/handlers/glue_dataset_handler.py b/backend/dataall/modules/datasets/handlers/glue_dataset_handler.py index 9eb27afaa..e243aab39 100644 --- a/backend/dataall/modules/datasets/handlers/glue_dataset_handler.py +++ b/backend/dataall/modules/datasets/handlers/glue_dataset_handler.py @@ -12,23 +12,6 @@ class GlueDatasetHandler: - @staticmethod - @Worker.handler(path='glue.dataset.crawler.create') - def create_crawler(engine, task: models.Task): - with engine.scoped_session() as session: - dataset: Dataset = DatasetService.get_dataset_by_uri( - session, task.targetUri - ) - location = task.payload.get('location') - GlueDatasetHandler.create_glue_crawler( - **{ - 'crawler_name': f'{dataset.GlueDatabaseName}-{location}'[:52], - 'region': dataset.region, - 'accountid': dataset.AwsAccountId, - 'database': dataset.GlueDatabaseName, - 'location': location or f's3://{dataset.S3BucketName}', - } - ) @staticmethod @Worker.handler(path='glue.crawler.start') @@ -48,35 +31,6 @@ def start_crawler(engine, task: models.Task): } ) - @staticmethod - def create_glue_crawler(**data): - try: - accountid = data['accountid'] - database = data.get('database') - session = SessionHelper.remote_session(accountid=accountid) - glue = session.client('glue', region_name=data.get('region', 'eu-west-1')) - crawler_name = data.get('crawler_name') - targets = {'S3Targets': [{'Path': data.get('location')}]} - crawler = GlueDatasetHandler._get_crawler(glue, crawler_name) - if crawler: - GlueDatasetHandler._update_existing_crawler( - glue, accountid, crawler_name, targets, database - ) - else: - crawler = glue.create_crawler( - Name=crawler_name, - Role=SessionHelper.get_delegation_role_arn(accountid=accountid), - DatabaseName=database, - Targets=targets, - Tags=data.get('tags', {'Application': 'dataall'}), - ) - - glue.start_crawler(Name=crawler_name) - log.info('Crawler %s started ', crawler_name) - return crawler - except ClientError as e: - log.error('Failed to create Crawler due to %s', e) - @staticmethod def start_glue_crawler(data): try: From ce0c4e0e0e9d5f4db2c2907dbdf2b11ea65a097e Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 25 Apr 2023 13:04:41 +0200 Subject: [PATCH 085/346] Refactored dataset handlers --- .../modules/datasets/api/dataset/resolvers.py | 9 +- .../datasets/aws/glue_dataset_client.py | 67 +++++++++++++++ .../datasets/handlers/glue_dataset_handler.py | 86 ++----------------- tests/api/test_dataset.py | 2 +- 4 files changed, 76 insertions(+), 88 deletions(-) create mode 100644 backend/dataall/modules/datasets/aws/glue_dataset_client.py diff --git a/backend/dataall/modules/datasets/api/dataset/resolvers.py b/backend/dataall/modules/datasets/api/dataset/resolvers.py index 87523deab..2938cd17f 100644 --- a/backend/dataall/modules/datasets/api/dataset/resolvers.py +++ b/backend/dataall/modules/datasets/api/dataset/resolvers.py @@ -17,6 +17,7 @@ from dataall.db.api import Environment, ShareObject, ResourcePolicy from dataall.db.api.organization import Organization from dataall.modules.datasets import Dataset +from dataall.modules.datasets.aws.glue_dataset_client import DatasetCrawler from dataall.modules.datasets.services.dataset_location import DatasetLocationService from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer @@ -358,13 +359,7 @@ def start_crawler(context: Context, source, datasetUri: str, input: dict = None) else f's3://{dataset.S3BucketName}' ) - crawler = Glue.get_glue_crawler( - { - 'crawler_name': dataset.GlueCrawlerName, - 'region': dataset.region, - 'accountid': dataset.AwsAccountId, - } - ) + crawler = DatasetCrawler(dataset).get_crawler() if not crawler: raise exceptions.AWSResourceNotFound( action=permissions.CRAWL_DATASET, diff --git a/backend/dataall/modules/datasets/aws/glue_dataset_client.py b/backend/dataall/modules/datasets/aws/glue_dataset_client.py new file mode 100644 index 000000000..2f29abc41 --- /dev/null +++ b/backend/dataall/modules/datasets/aws/glue_dataset_client.py @@ -0,0 +1,67 @@ +import logging +from botocore.exceptions import ClientError + +from dataall.aws.handlers.sts import SessionHelper +from dataall.modules.datasets.db.models import Dataset + +log = logging.getLogger(__name__) + + +class DatasetCrawler: + def __init__(self, dataset: Dataset): + session = SessionHelper.remote_session(accountid=dataset.AwsAccountId) + region = dataset.region if dataset.region else 'eu-west-1' + self._client = session.client('glue', region_name=region) + self._dataset = dataset + + def get_crawler(self): + crawler = None + crawler_name = self._dataset.GlueCrawlerName + + try: + crawler = self._client.get_crawler(Name=crawler_name) + except ClientError as e: + if e.response['Error']['Code'] == 'EntityNotFoundException': + log.debug(f'Crawler does not exists {crawler_name} %s', e) + else: + raise e + return crawler.get('Crawler') if crawler else None + + def start_crawler(self): + crawler_name = self._dataset.GlueCrawlerName + try: + crawler = self.get_crawler() + self._client.start_crawler(Name=crawler_name) + log.info('Crawler %s started ', crawler_name) + return crawler + except ClientError as e: + log.error('Failed to start Crawler due to %s', e) + raise e + + def update_crawler(self, targets): + dataset = self._dataset + crawler_name = dataset.GlueCrawlerName + try: + self._client.stop_crawler(Name=crawler_name) + except ClientError as e: + if ( + e.response['Error']['Code'] == 'CrawlerStoppingException' + or e.response['Error']['Code'] == 'CrawlerNotRunningException' + ): + log.error('Failed to stop crawler %s', e) + try: + self._client.update_crawler( + Name=crawler_name, + Role=SessionHelper.get_delegation_role_arn(accountid=dataset.AwsAccountId), + DatabaseName=dataset.GlueDatabaseName, + Targets=targets, + ) + log.info('Crawler %s updated ', crawler_name) + except ClientError as e: + log.debug('Failed to stop and update crawler %s', e) + if e.response['Error']['Code'] != 'CrawlerRunningException': + log.error('Failed to update crawler %s', e) + else: + raise e + + diff --git a/backend/dataall/modules/datasets/handlers/glue_dataset_handler.py b/backend/dataall/modules/datasets/handlers/glue_dataset_handler.py index e243aab39..ff360a2f1 100644 --- a/backend/dataall/modules/datasets/handlers/glue_dataset_handler.py +++ b/backend/dataall/modules/datasets/handlers/glue_dataset_handler.py @@ -5,6 +5,7 @@ from dataall.aws.handlers.service_handlers import Worker from dataall.aws.handlers.sts import SessionHelper from dataall.db import models +from dataall.modules.datasets.aws.glue_dataset_client import DatasetCrawler from dataall.modules.datasets.db.models import Dataset from dataall.modules.datasets.services.dataset_service import DatasetService @@ -21,83 +22,8 @@ def start_crawler(engine, task: models.Task): session, task.targetUri ) location = task.payload.get('location') - return GlueDatasetHandler.start_glue_crawler( - { - 'crawler_name': dataset.GlueCrawlerName, - 'region': dataset.region, - 'accountid': dataset.AwsAccountId, - 'database': dataset.GlueDatabaseName, - 'location': location, - } - ) - - @staticmethod - def start_glue_crawler(data): - try: - accountid = data['accountid'] - crawler_name = data['crawler_name'] - database = data['database'] - targets = {'S3Targets': [{'Path': data.get('location')}]} - session = SessionHelper.remote_session(accountid=accountid) - glue = session.client('glue', region_name=data.get('region', 'eu-west-1')) - if data.get('location'): - GlueDatasetHandler._update_existing_crawler( - glue, accountid, crawler_name, targets, database - ) - crawler = GlueDatasetHandler._get_crawler(glue, crawler_name) - glue.start_crawler(Name=crawler_name) - log.info('Crawler %s started ', crawler_name) - return crawler - except ClientError as e: - log.error('Failed to start Crawler due to %s', e) - raise e - - @staticmethod - def _update_existing_crawler(glue, accountid, crawler_name, targets, database): - try: - glue.stop_crawler(Name=crawler_name) - except ClientError as e: - if ( - e.response['Error']['Code'] == 'CrawlerStoppingException' - or e.response['Error']['Code'] == 'CrawlerNotRunningException' - ): - log.error('Failed to stop crawler %s', e) - try: - glue.update_crawler( - Name=crawler_name, - Role=SessionHelper.get_delegation_role_arn(accountid=accountid), - DatabaseName=database, - Targets=targets, - ) - log.info('Crawler %s updated ', crawler_name) - except ClientError as e: - log.debug('Failed to stop and update crawler %s', e) - if e.response['Error']['Code'] != 'CrawlerRunningException': - log.error('Failed to update crawler %s', e) - else: - raise e - - @staticmethod - def get_glue_crawler(data): - try: - accountid = data['accountid'] - session = SessionHelper.remote_session(accountid=accountid) - glue = session.client('glue', region_name=data.get('region', 'eu-west-1')) - crawler_name = data.get('crawler_name') - crawler = GlueDatasetHandler._get_crawler(glue, crawler_name) - return crawler - except ClientError as e: - log.error('Failed to find Crawler due to %s', e) - raise e - - @staticmethod - def _get_crawler(glue, crawler_name): - crawler = None - try: - crawler = glue.get_crawler(Name=crawler_name) - except ClientError as e: - if e.response['Error']['Code'] == 'EntityNotFoundException': - log.debug(f'Crawler does not exists {crawler_name} %s', e) - else: - raise e - return crawler.get('Crawler') if crawler else None \ No newline at end of file + targets = {'S3Targets': [{'Path': location}]} + crawler = DatasetCrawler(dataset) + if location: + crawler.update_crawler(targets) + return crawler.start_crawler() \ No newline at end of file diff --git a/tests/api/test_dataset.py b/tests/api/test_dataset.py index 73b5dd161..ee2d6047e 100644 --- a/tests/api/test_dataset.py +++ b/tests/api/test_dataset.py @@ -180,7 +180,7 @@ def test_update_dataset(dataset1, client, patch_es, group, group2): def test_start_crawler(org1, env1, dataset1, client, group, module_mocker): module_mocker.patch( - 'dataall.aws.handlers.glue.Glue.get_glue_crawler', + 'dataall.modules.datasets.aws.glue_dataset_client.DatasetCrawler.get_crawler', return_value={'crawler_name': dataset1.GlueCrawlerName}, ) mutation = """ From 219553f5b9e5b23ad4d5c1971e1787fea50db9cd Mon Sep 17 00:00:00 2001 From: dlpzx <71252798+dlpzx@users.noreply.github.com> Date: Tue, 25 Apr 2023 13:37:22 +0200 Subject: [PATCH 086/346] V1.5.0 Features (#409) ### Feature or Bugfix - V1.5.0 Features. Check each PR for a complete description of the feature. ### Detail - #292 - #355 - #337 - #427 - #431 By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --------- Co-authored-by: kukushking Co-authored-by: Dariusz Osiennik Co-authored-by: Noah Paige <69586985+noah-paige@users.noreply.github.com> Co-authored-by: Dennis Goldner <107395339+degoldner@users.noreply.github.com> --- UserGuide.pdf | Bin 10707270 -> 10692972 bytes .../dataall/api/Objects/Dataset/resolvers.py | 19 + .../api/Objects/Environment/resolvers.py | 46 +- .../dataall/aws/handlers/cloudformation.py | 20 +- backend/dataall/aws/handlers/ec2.py | 26 + backend/dataall/aws/handlers/ecs.py | 34 +- backend/dataall/aws/handlers/glue.py | 1 - backend/dataall/aws/handlers/iam.py | 12 +- backend/dataall/aws/handlers/lakeformation.py | 17 +- .../dataall/aws/handlers/parameter_store.py | 6 +- backend/dataall/aws/handlers/quicksight.py | 43 +- backend/dataall/aws/handlers/sagemaker.py | 4 +- .../dataall/aws/handlers/sagemaker_studio.py | 9 +- .../dataall/aws/handlers/service_handlers.py | 1 - backend/dataall/aws/handlers/stepfunction.py | 2 +- backend/dataall/aws/handlers/sts.py | 104 +- .../__init__.py | 1 + .../datalakelocationcustomresource/index.py | 89 ++ .../gluedatabasecustomresource/index.py | 65 +- .../__init__.py | 1 + .../index.py | 118 +++ .../lakeformationdefaultsettings/index.py | 86 +- backend/dataall/cdkproxy/cdk_cli_wrapper.py | 61 +- backend/dataall/cdkproxy/stacks/dataset.py | 205 ++-- .../dataall/cdkproxy/stacks/environment.py | 459 ++++----- backend/dataall/cdkproxy/stacks/pivot_role.py | 890 ++++++++++++++++++ .../cdkproxy/stacks/sagemakerstudio.py | 179 +++- backend/dataall/db/api/__init__.py | 2 +- backend/dataall/db/api/dataset.py | 2 +- backend/dataall/db/api/redshift_cluster.py | 1 - backend/dataall/searchproxy/connect.py | 12 +- .../share_managers/lf_share_manager.py | 11 +- backend/dataall/tasks/stacks_updater.py | 35 +- deploy/configs/frontend_config.py | 8 +- deploy/pivot_role/pivotRole.yaml | 29 +- deploy/pivot_role/pivotRoleCDK/README.md | 8 - deploy/pivot_role/pivotRoleCDK/app.py | 25 - .../pivotRoleCDK/dataall_base_infra.py | 846 ----------------- deploy/requirements.txt | 4 +- deploy/stacks/backend_stack.py | 104 +- deploy/stacks/backend_stage.py | 8 +- deploy/stacks/container.py | 95 +- deploy/stacks/lambda_api.py | 85 +- deploy/stacks/monitoring.py | 225 +++-- deploy/stacks/opensearch.py | 10 +- deploy/stacks/opensearch_serverless.py | 198 ++++ deploy/stacks/param_store_stack.py | 8 + deploy/stacks/pipeline.py | 122 ++- deploy/stacks/secrets_stack.py | 7 +- deploy/stacks/vpc.py | 59 +- documentation/userguide/docs/environments.md | 81 +- documentation/userguide/docs/mlstudio.md | 23 +- .../Environments/EnvironmentCreateForm.js | 114 +-- template_cdk.json | 7 +- tests/api/conftest.py | 17 +- tests/api/test_dashboards.py | 3 + tests/api/test_datapipelines.py | 12 +- tests/api/test_dataset.py | 8 +- tests/api/test_dataset_location.py | 6 +- tests/api/test_dataset_profiling.py | 6 +- tests/api/test_dataset_table.py | 6 +- tests/api/test_environment.py | 6 +- tests/api/test_glossary.py | 6 +- tests/api/test_group.py | 6 +- tests/api/test_keyvaluetag.py | 6 +- tests/api/test_organization.py | 16 +- tests/api/test_redshift_cluster.py | 9 +- tests/api/test_sagemaker_notebook.py | 4 - tests/api/test_sagemaker_studio.py | 4 - tests/api/test_vote.py | 6 +- tests/api/test_vpc.py | 6 +- tests/cdkproxy/test_dataset_stack.py | 3 +- tests/cdkproxy/test_environment_stack.py | 16 +- 73 files changed, 2834 insertions(+), 1939 deletions(-) create mode 100644 backend/dataall/aws/handlers/ec2.py create mode 100644 backend/dataall/cdkproxy/assets/datalakelocationcustomresource/__init__.py create mode 100644 backend/dataall/cdkproxy/assets/datalakelocationcustomresource/index.py create mode 100644 backend/dataall/cdkproxy/assets/gluedatabasecustomresource_nodelete/__init__.py create mode 100644 backend/dataall/cdkproxy/assets/gluedatabasecustomresource_nodelete/index.py create mode 100644 backend/dataall/cdkproxy/stacks/pivot_role.py delete mode 100644 deploy/pivot_role/pivotRoleCDK/README.md delete mode 100644 deploy/pivot_role/pivotRoleCDK/app.py delete mode 100644 deploy/pivot_role/pivotRoleCDK/dataall_base_infra.py create mode 100644 deploy/stacks/opensearch_serverless.py diff --git a/UserGuide.pdf b/UserGuide.pdf index 1839ba944f505cede04903d96cd16d663a9c9ca9..2992ef52e216c628d3c2d22f7d3a8e1fd067dc63 100644 GIT binary patch delta 659008 zcmc$H30%$D`~RS1FH4(BslkYH_a!ZsWG_oew5dcT?b`c{ZEPvrp#^1ar6x-$EgDoP z4RwV!CY6d7SJM7}&iUM%n9X;V-~apizI^6$&U2pgEbsH2bI)_mr@WsJi@$jsEPmuL z!BC;7tWlvdS;8lWe6lFQCtdht3ZKGywy>T}Bg@%BK5QW$wvdh^q~i$bI6^*LHf~2F zC?o-mLK4s@BqJJyWJIHINkS@B_!QPth4oZnJxy3oBL$|>$c|_p{ zu!bX4fg@CbBPtiR;0o)x!g{W-N0BYLBpsbX($Og-9i2kb(J3T9y3mw#s<2**DkKov zk}k9*U1&?X(3W%>$-rXNoP{o4+dWk{bY9+23vrevRg=y8ZrH{lEZ&}BAz~~|Hj5`C z#t>&QG^re(kr-Q?OVy+?c!$L1iPNZ>3>NPfvAM!pDRC>_&MDHv*(C95aXAUCnJODq zsBBcsQ<)>hdni73D2>V+KU`WF6VX{TWLpcB@0P7uZMukIHV3x=v4Msd8dZ!hNbk0L03$cCXK>tAI{}HqDc>@vDlhy3U8^zoFOz`uDq-mU6VrL{V1_? z2tDj*fta*7m!`>K@;*wa;O1eJHZieLEGC_fA!`w)_w5i(agdls{fzyA5t^7ilS|j6 zGkA|jsFBq)8dsCT#4GcsL_Mbo75;zwx@1JQV| zN6i!CA`xbf-X$Tl0FOF$67SyVcyTIClgr_Gk7-3_r}Lb~h74hcz044kfp<`H(zxlL zy_<~F9KzF@SaMkB7ct}S#lUO8m~f1ZiE%}6BdTExNJ=(muEbF zCUl2{40Egy8_PR8UY&gZmN$0dC|>t?W%RmwluTG=rPvsr?gaU;M}=b2BN(7Dl>=3t z6Tag~4WEXoA52gJ0cChO;*#(_Z3rhUw^K|yY*sliHkl~PYn>r8ipF37Kqwp#d-X&a z-noerNYNQ=nkIw7E1noe+L}5^PLir&Yv-h4^8Z2YIttl3}yD4yta92>~OW68TL!Dm;x{WkFt2ya z79BJJv+-JFBzSQ$lZ3vcvNfqx3WqmURt9RMMMIUv(PSW9=tJnd+{u%MvuJEhI-BP# zt4$X0exgG>k)1!_Fy1UV%@K4K0za3^+ax!e_mUzFU4scZY!-`yyS^o-G?Yz2$dHj> zX>wUCDwpRfE6uwlCo$S{^Co8x3{qCuSph^BI(QpKlCC_17biQ0r!agnbU2O|BTrGH zGw3kD87$GD+P2AtIyiPoi<1Lt>|`z(8zkryt|ko(U_03YD54RgIFAD#u!cwuUU{LoR2YDT0xnYe4hqnSmr-VG(l~6kNG0cOBz8!(AhZsOMcGOy zxU3!}Edg2qK-oOWDU&B}*Ki&HoU6Nwy~DQ6NC;@ka}$&1a48Ty0UAPKKk*lWbJO7V zUN9CuHJ&tr%3wlJg!mXWW$I^P@c9%L4tq=vggFjxma-xad*AZ5DXYMLLC4}?MLLrg ztBm61cx7-6!YHp*dFF{qG1)N;O%9d8rlT3uB1~%T5Y^$xa1*kLa+#AL}~CO@4DdII$D=yRElEQ|&xmBWU}!AqD<6>7xM~!dk8md z>JV`?p7ZpvyoobahO%L$pe!v0=wR{;XMPKA`6O+~%XSVk5XG;4Z=a} z&uGpTE`0#T94@b3^IQB5cKRHjR&zKoG0P~yT7<@?(?kKvbCb=s%^q_wt|$~7P?XT( z=F>V*NP(AuG$9FzJz2`^Ves1mqi+L+H57mg(-SOrV4h|2E`xS>2?#4f4x86QzK&wT zijU3Wa2Tv()JZ&B>I7l4QFIOrG#Z=1rJ;%hstRsRrNcUx%B!VLA3_D-$cUqN4sQl+ zI$kG)In|1dM$MU#r?C}D2t=Oz+!9P~|>ELgY_{d8fHKg33f%BXA>6q3Kn1`27g zG=)O^6YEnUoy~?q{Cf)uHK<%Bj3)*Fi9(~Xs7wl%0@e6u9I9rq=u9>TVm5}A76b(X8ZyaW;84w=(ip%SxG}-XTowm3 z0!4(4HKIEhyY6=WMvkZk^vBaT9FeqpoYq3LvxUh z2FgQhh3Ze{qOie?918G-Uy5v`Et4ZMmj?0=i^*UEKIl{?L8CB{!2q8mLYQ1G1G?mb zwsb0s%VptOkzlZw0p5UL<$zy0n*wjpGoX`ftHFRJCY1$~D-(tepo;}UF}WhaKyKI= z0Pk_=fB-I_lgT2WUL+cDq@vNnrof7r1`dPa2>*(hgT4S6P@TpB<1jeLWaN4lAzOn= z=Kvt-PzQa}K{%Kw!MMWK95xjO9fbvur2tS^EL_V{AxOyqm}s;yICNN$v%qvT1~e84 z1|S0TGPq0@ppOYC<}jdZ>VRa(yIZBFNI+Jh!w)ki_zVO?%|(&{x{>JMX83hUhnN99!=NO2 zb176Rl>@&$p$jV56Mkqztw=5nnCqET0u)2*VPL?dA;~~-p)+ZqD##6(hAldvpdx#N zYQk!O4-6RwlLBLo2^gef?jpfh0B<&EONZbBIA#E?3fN!R7`($`vEioiI4K73@ z*qdR4UIU>WmR4*I6Hp9-(KrO46#04}$ZJ3=8n~0e#=jw9_(CjU4TO6@E(_L3947oa zfLO|g;RUrK$-o#)828`?(3!;m$Adox!afrM5F384aF{T%KrBi_Oej>q3q&~xcMxhA5OI)vq_bh;2)qfw3=9WQfKd*j4FrCGH6Q@| zzyO-yO$I#=#C{eGIu1k} z7#}d`IUpazgn`%(7NLP7U_t>fp$Q8?^i%8y1i_F6?htHf$e<7o!0E!SVFIJk2`~gq zXG{RZ=xF4F0iarBFPN7h=tFpg zDmLsDz>h%{h$?_yYKW2SkF=K`0a_zzKASa6k;TBFR7}7-}#o z0HqvI76Lfbiog%kAO%7d2OthsVSx0gRwS1OSOWeJ9tMAc2o#wAL9Kz{50GSnp_vf( zVc>wl!N&k9pmXb>3&09XW8 z5#TLU%^`%mA-KR23V47yVQ@hYTooV^Sp)_Y#4><5NB~g>Pzbdmxd6OiN-!rtjm7|b z(E$4pRE3Q-V6_7Q7A7@V{XjJh*#!K9t6}|zmJbj?*l4EZpr`;4gFz~64FejsRnYVg z3c)%6G*kf{MYVuT;0pi1oJE1@7)%YZR#*$u5B!p#;Ds0$1|RIZgOtKb4T$t~rY(So z0qZkZ*PvM(W+_oM2i94zB?%)IO_mf`WuT5lVrjrE1Epx(Fd$gLAc8G*Sf~hFlWSTw zl7BbVSCE8EPmG%5N>hPJi)`o$Ohkdy6~qyRMOUzxD9X8l?L|S$ z6=V{{AXn&2w32iM?}_4rD_D!oBC|cv8>9eWL<+kDQ-Rk-FVMMYgt>y9VC0EvpnK5( za7DfsA=(ugMFdb+unw#QMQyk$91(zAX|T|QSwmO_$mEC|>>!-5x)4gd&uIth2!gQi7rlT816G8M z9nOFmVa|gUpok1a5Cu+G=mh2oQ4NSNU`CkfV3NW$=tt7pt()u|wr>eL9D5G!kZxJX zAviSjqX>41VFPiL&#Ap@mM<5hj>+yYeLq`Hu=V65{Z;SJ{5oIGVwLu##GA6FtNQiA z9|qIaMi_3TcShE=F(*B$JrZzL-+JBk3pbwa3;q6#d~cIZkbH03<>XM6Q5)z^2bl9| zYKg1weLuoXanjCH`m1W{o!0N|_@&uNdX+wtIILfIBwpr0(QX@Fu?1(tMC*#cy_aW9 zUQ={#k4iY*?fqDH{*T`_?;csOs&Ht~#-HOKw$koTuu}4g5S+GqlhbSVa;VqVOW#pG z?D66EYnAP#w@>PGY`Zo0j=X`ZjiO%Df-5fr&DU<4v2ride(4MSLtEc`c=`H$^ShUR zwV7V!tv*C>cvPUg%@k(sqZ6CX&RMqm#9><}J(rAeayebZRkz^lT;GG0Caqg)8Dn!_ zZadiZ>ZhCjW#KzVZ%JLZA#r++Pio4j=fNBD_m44_89(D{>8RnrJ+2e8^jiTj`XgdgHwMq+g$Figh{W;u#*3 z6}G94K9RR%Q7iZb|5_bz%$=;Fq-hSvEZD`d1=OA6GP>(?=BY~T5*uF{cTZ-?vR}?< zoh({<{F^l`k_D-Y_q=T}sC$0&V$!-DUMBXZZvJX{sMVvQ@a;z}|6aHF{+`MW{T?k% zvE4yQ9{p`$IX$h_#Clze{5Aa@6O@D6f;wB8h@Af3eQtpnQMNZ9Do6L01XVZn_V`@x zi1o5aT`d{X*6=iceUfs2KGFM9ueX)xKT}QU)RbJGT^87TBdPpD!}@4}uKp#xM!o); zoZeQK{%b)Sf_hx~izWNb^=ra|t88NS_4mAd zSwu(@A2myA`Wvc^Bl>jO>$SoiAN$`+>Xla4q;^zyRd#yo`q$<}F8OiMzd;5HQ-h1>m*Xygr7L~qBMT=Ppt5`d3(qWlV;!Mm)}&Vr#-y(@*8dSj>)E+ zZ$j^9C5?}oqt)iz@ota1@^Sw%51NX4n%@%6H$>Apzv%W+i_dHb4Y!rtXKi8JCYb$F z?$Yw(=0?@@A2T)6!n?Xh7G~`CZFZGQkd(O^qNcddtMab76Gy5kAu_FY)M6V86G3Wd zYoQsNpJ`!~;JdcSR_AfbQ)2-q)pItBZ*H|T-)3!*|MlcsWB8fT%e7<{GF$j#CZ3NB zjodJSfJbBefz?Iz?iXxR66-vyN>ZMh2z)IX*1t(eulwFukQC_@vYgqH(3w%}IeQ#G zz-6PB%s$8L(78&77BZ`gqeAYUjf_r-sV|0yU>P&9f#1q_oU*E{Jgm#`z_YvNi?w7L z{rQJMmeP+Oy2=gn7o2uUen#bAdXnZ+evAm&>sOtvv_(tCF{pc;nvIRIAnvM*b!lH? zAqaG~FRkT4q~p3_6&-FDBl$xTFNOk}p&jR+8(5bP>#)@;Nm(_qA~Z4@v?*4fMNR0` z{$PBy>qU_6a*oVNcsiSsPkuZTD*d8dXS3RKBM*h(XK(fsV*;rAW_u@?1-cR+7^k*3 z?(X)8ZSY*HtbKxcI2Kr}zUqTnq*OS}t1OjpD%D+6)VMp}FJ`>gTIEYGX+?2}Xrc#b zx!Y^+(U{Vefvs`5)09TH=bn$;4t!hVemEMkx668^t-~5%5Zb%sd}EC6`s7oLMCgt1 zayaz^AKGw27?f^J84=`BJ|<**1;0BZ(1gR_C6I(k*4hS z+?uMA%#3)+DeZf~FcM9)8Hm=pml=zjM&Z{N>DzGA;){KHR1<-jsZ_)IILRloxIjyE zy)Zk?M$%?XFhY#in_k4kO+Pzah~!H&#YfhGct8E{oRJLTF}Gbrj4y3c#r-hTajDn4 zm4e1q%9kdtiZtEVWt~NNoT&xEL>#T-pVeKz2xN(qjH*A4-h3=U?tNV^e3jJrm*9=? zCSP3~ER`B}*9U^iK-LC7KwpHUWp{QAD4N_$@(UsP zX??)_G8Qbyy<8i^QX#!ej-MNfp4!f}ql(kh3Tum|CWOC)Z*K;YM=1L2PAlrQB2mY{_<*c+S zSq+pC$^jp+EYY@01Z{ zR^aBkMz|xdrGPo;$v26S+{~A7G7jzLXLVUqfyPw1RN*)XN+e2JJ^=Z>u75YZ*|bSq zy$-}kiK}zIxu7WjTx8k}D;*hr=H1@Ll!UtR9f|n~k&7#mrzAR7)daP#H}N_aZR6_b&8Cu-Bd?je<@mj>J6C(K8w!cK zxy`1bXCqsn)ee4um4hWb%vWmiGZX8;$tL z3sPbt6D4K9RrzPJu!cxj$CYPoehq5pmCHnh0)`Sj@@47})_`0ss77 z$H3Jq2+$_XQ5UFhkP2LmEY`X@-GCVlJyaxgg8ggO6`2B6nqxLl`4|hNI}%}#_o^4c z z#NtZg2}r}2NCUJOL1g}^B-0OQ%6oC_*%pplDgbjRRKq0B;6vb64)zBFJou4;xRrzH zW4m7L0Q|@6-Z)Lt1MpE503TI>3*&Xy$F`zSa>Ih80H6~<;uRl;`|$_;K;9@)qI!r2 z@w%K7_$_||=CU4BTw;-OUBX;K+5s^@n0mB%Czmgg@4emBL|L&q-O41;^_9Xpq?HQR zDnth>;CI~$#ipY!8$j>HM79(3so0r{lzP>T$hG(vk!cj$$1H{M8xqijxWCIv4C{JP zz)rSo+^trfGKmo;y`S6=pWG7BuY@bdc3KP~F_w$6p@kTHKL*zaBvwNCTBR zHQ%mWQGtm3vx)XK$`t~l7x9g1vC1ly=m0Cii*5)5Qv_mD)pw(&I@N=C>VXwDj(?%8 zdpTM>Treg2iF4`QyP+F<%PnIIn*?GVN-R&g0$ho;7q@-zxp_pKd0T>WJ_^PU=ipVXH}lR}i?+2$b-bM0x3B+WPOmc2-_om;W_4l_Tl0xkQt7eo z*yw1Zra+}D29A!^?X8!uKi^{4Un{7|?|NC>eR%Da%kA{02^XS@iFnx$vBcZ#;@Sx1 zKEb|yX$>zsZ>|W!FZqpe7soyPWujK8yYgP`2NN9A6Dyp9b@*#)pXhh(*$}1fy)DBx;;6d!shUyO zwN09{x9%-ia{Q+9^9|HBwarF4F7kv~SVD)Km&xgH^&3$+-f{L83BP90CZ9c(kuj$( zGikjMKj3mxWTRPPUDMN0W23BP2G@U_#=p9*sKe4U1o%n>ysU=qim|!KSanoQCm(P-5DR*q?mru&w!tqxRHGKho=*xZiBIn%E{2~^>X<;+jpxPk1RJti;;!aJHpbU^myTF9%*i^T zZe@`Mh2Q7s{@h7!>iinEosGKO_2R6z%K{xf{}4CTGpLG4kbLr>m$N{} z>)!i@8~LXn7H@<_Rd$*VjK`le+d@5qo?P=Om|+A1Qaj9Rd3}9XnoWqu;dbr2+D2M> zE_{h%ujFSmY7MXS6mUzBj4EF@R+m~ft1w335jgrlWc!;DE86GZ+u6`#zPo1D1*Le1 zH(M+ix*KhhOf%(EqKL(+`?im;j_Eohwp#=GY}HPyT>+hy9xeqPy3TB`s&R z`NBLUsy?tr{d9C%<8HsJua-bRMCmTw)6q^B{KJ0shyGqL?V`V}^9{4wh&s=(*4C@e zXv~UcB5UMDYeYrEbm?`Du+cFQr0cfzmxnB5^ztdA9Di8Mv=7W^iik{VX`;o1tIt3-n$m6qC{X_Ke#FD#_7#1`wjU#t z+7`Fx-qiGY-02I`LSn@%iyC0)JvOqTM;lyxV6#!#$Man;EXIPiP4_NpydqmnCZ2|P z22JU@a6xP41*e|Xfj>pPbJEg-rd~5Wa6xNQ&d)#rE1=kzGseYjLBoGJiaU#)ehdp^ z7NTKMQ&y4~UVp<@r#LoN^YUY6ki~{c9;V$(SMW1U@{v0)DZf`lt}kAn=6P5zK~lGd zvlwYgC!%IxNsF@qYS2rx#azK1Sj+njlXH@64*H((34*m3uP6%3R=+N6`3ind>G9R56TvBeExqNp);CVjv#42q z>lftlY1XAfDoaMKBZlT(37L!DU%!pIw)^<^s}H+_sU}cPWE@;Rcl}7Y<;+6Q!*hn5 z*PIuEs#AjN4RLi=9~(uS_T#8_VI%Q8ciFA=<5pfrG3lateAH@YbUrVsZJPD$vMUz_ zSH*%eRJ9&6v&DihYHi2TG^bp23K&VqJ+QD{kgz2=QaRAa$|j+%Ua_HP_7Gc(6LchN z+|E}^>$i-QYkC)q`mwdh2bWKvJh`s<2*saFXEh}HC9j>wQCF92EgpB84b9t7#C>Z$ z&DwZw7Vh!-xNTTEr)x`*=fYecmaR_ z#kXf*tG#{t5SxE;E)Oa0*r?2SI=Z^Fn9G$Yc%3IB6j>Vfs)WL8+X2= zX~!kNQL!IZidi_vL1NaB^D*^0h`HSI(<~fE9BvR;7_Z|uW08+iEKQMO)W$Vf)?mMb z7oB?iUYE!{fWSC`(wO0g75zc+HcU+B3m0SJ4;cX{JYJgd5(Wm~YT9#5{6kMJA_{we z{CPfUWDC~Py!!;=l;@?4#XLeN$id9xQh7`XVD9zSlN2Y$|1sFvz=E{o58GXrAt-uX zeuu05y*e-ylglDdhi}TeE+}0R3$`${g1KPI18j%JyV;nQzs3Ne?U(u&q#^V*usHVB zq^#zIKM76_46JOiF@bp&i2P}BM@il=Flu)swt8QA34*TU15=V&8D@4<`zC1Eqv(yy z;P(3A9W*2(cZOhx)t$tnO40qbjgB|940orvapiS1jG41ALJW$AEXLy|Zc0C?_o8bT zktXLY_h9R!ra{{vof)U2tsqwElrZ~dS(mPbKxZ|?wgL>+KifL^=)>ab3Z~v(t=B8e)`pixNe<2P zCSK|-yXE7eQxcrgGi!t#Z++`&Wx=9bzv$)6+LU>8xYwjmmz<`w33(q&uDt1A-(RV> zQRcx%n+>O|`g-4$eV8NtI=Wkr=xa$`q_B`}e|SyQReY-7%dco(e$I7j z&vlpRCTCj}8--!x%jNbz%~P?vLX}E6ymPHiJw{{D-#v3Q_EcKljj`LM-1WH>=Q}4Z;MJ?<}+%WJ2U-8gCFL z5QYj?{4o;(koC{GkWA(m4uDl*8F5`GZder1YeJ0Mq zEj{7Mth~0eXL8Cpv!2Q2&*25OJ*X!d%l2gl5uMN5rYYBi6;C%PSLmnNE@Th0NcI!t zK5f0N96a`-L;G!kcf^$b59#&Ef!%&vZHJ9lP}u+U>AX96!>966cJ4nsR)5 zXGPv{LY2jmN)c5^(G(O4pFXwD%Nq_SJi?*8&~6*W)^@l2Ksk+!=C ziG}4VbosAmLI}D49WKMgdJg?d4$T9$`w zdp@-IzYT2~9h)CkEU#3Z!hQck`GRI+IYx|;HIK1b6cvZzVUV)tHqFOt{11OLYCPpa1Ugb z*By4*v-MqEe(|cL;h|b*ozI#VDKVX@Uw)HmyLjv_c&yEDuiCq0{|_OTlSbYi;$vl4 z#{5~ZNG*RyPR&?V^^U4g*KZiEr480&C9L_k&c3zg3{4y!^452exIJW=qs)kiqoHZr zwtVNyzqOkF>V%_Zb#8O|?M>^MuEyP3KfA#*xRLsD-le7{DNR$(1ZbO*wR<{kGRMHe zI5rY$+p1h|2Rly5KM`^_CNh84VaoZ)4ril$o47iyeX+LGsv%t#OQ?`!ran&bu_&s^ zqJ~9uXN~;0B;Urg$m?<#5pp*vvbbp(dSxDqiS*_NWU=+2L2~zs{9jiUb);Ujb&IR( zzEP6GF&6Z`X;`nA(CJ%kZEwKTt3U3zt|al{T%2ugdQMjweLX8MJp>Y=9SI7~kojwS z+n#nhWUp3rb~gJQ@Jw@Fn!g|`*}e}*nK^;}veOEQxVjV_AnoNx)GoApJ-&eWb{{o>o58z!o54a zaIX2%gWEWvn}40mA1BQ!N_@WlRDKqjiEh?GS>H2?hyNrBV_>f-w2X%XCM5}G)t``ixo@1tRgNv|i7iC#aHGnTY$B6Hc!n|7FUygngR{(koMa|Y*81zS%r$=1Hx76Xf^yJEu{; zIO>KmPAsP-Ksws;*9{6-7^pcR3kN889k`NIuYH^71MKAR{l+}f_AP57w zDzdX_ai_dy@{nX*PlFk#uugX^oWm&I|EX)>2JO8dwJmDNMM-6{A5k~8D2aUh%sKzK z$C>K$7bz1@jv%hgr0outNc-+hCIxYlx{&Z(wu&1KDYj{GlOUZr!T4c4R)92&4J4G8 zt@7>Nhr53vcr3De??y+c2lz$yKc!xlwT zk0XJ~-esyvKv=@!cg>4HP1B!DLv5FgT|o#Ob-u-nN-uV|#i+0_=-uww1q`T6eK%!P;wWP?cX~qJ`DSvXxqx>zTqj5umgcU!T zbjP(Hk;tTp9K}AdJ{Rh=413Zb67lwMZdgZY&vhOIJna4Z*PREqk?=uhb-Y+Bz68PZ z4e$^x0f{6XC}&P!eFPW4qz84cP%DR>pBsPZ zURq*;WJpUzasniN#{v(=d}OlwOj7K^AUF#pu{hfSwNd?U5Zv&nO*Hf(u}tD|79401 zVQ&!@Wo84e$AgQQS!alp$~a>wN+F9z-0OuCK8dU<4*7mi2IY{Q4@`vQvgxX4g5JdB zGglx(Y&Cx~B$~5#-A4xrq>bM{8-aqqg>q+<9RE~{RiOBBNFaBOQb$?mt7qH+%W10L zPgH&AA3$tKYYW;|H#;ssw(mArWcc&*M$bB(JzjN%jZiO%?wYG-aK9z-aKK_iUya>u zk(ZToYLb7|KQZUZy*a!i?m^?%ohZI?ue+sbONxH~k9O{-ma0E0l6cVme36HK!TVOl z@#l&kzrS7L9iiNBup=@%roLjfb>QI8j5o$`~ ze_pb9kIJv8N_EfCkO4~lnEgj?dFn5*)>8eJqBMR&I8^10pSrW*v*MqF)fNmb`K)-4 z{Ng=gF`pHWpofeYxq>Vvgyd58ikPXW92FOG2Ko{ehr4|y0xG7|d?vylNVcnR((m#o zgv$Km4*qx5vE~GL;p7!I79qR; zY395;qKf$i+bk)zf?e__WecBXTIbb?(|EqQ!9}C(2g(b83!OD5JVjKlLXQv;ljX2+ z_*G1Zf6C=f;Xe|EAQk=h6#7$rV2Az-s}TR8ZU0mhp@WD&baVJK?E6n{wwpc}6HtC6 zqHqu<04uQPHwY7ep`j7?{(=eduW*^OKVmRE{|8Zs0g(So3Vnq>y9Sw-AcxR@V%x6} zWhn8N9{T&X{TadzuvMZ2g*S~5CqJJ)xHD6&y9H%-^PM-S*pxtQliC0D znu;AwLn_5#=l+!t+fezgBloV&yAuO3PgK6Zz63BYQ~30nbzY1(L1$?kiC6(R-axSx zRRE4P!c!b_gI5a60mqmch=t{L(?!IH%im{$#-B)*-{S&%@uDtC5&vaT2od*xMIoEd zZ2Noqd}&&uucx=kmM?LapRil;m)LBuC6$l;m)rUQ_;9Pt4)UzD2=;C%pF>~N@m=xxkJobly>gf z`pwhEuGxy;cfzMs-eJ>8N zH*PzB)!f=}?Ba6yWtRLNr>OEZMNM<(2%$a4(fa^23G#u2yTB%1TP>xP3| zH`0Ji+SA769(&Po+udLs(q_6A5v6p^*{_|->q zW29+Xlf}o=ThI-@OSAUljK}d)i2tgM75HtLHogOAahyz@jE+5u?oh=#;)KOyM|>~O zvE(GW`!{wX=AL+uygkQrA#bvM|2UHuynF=Frn&oiwozf{GJ8lJw9r0g!UeUe%a5*; zm*L8&$bj`;_!^y~tzyXCRm$2ebbReCGa4sC1oAlNP%k)GT6k(gJLun7v%=8%ILfwU zo`7vgNC#{vV%%wGLgq-coDV>gw6rDD95pNOwZ2sH zHs0HAxQVHp`GI7o^#u3a^t2>1Av6l@BBI2?U*D6142J#}84Ti!6b64mH(Zf3!I_LQ zGLvBmnT#hNJz`Pu{kk0`IX0Rnp+v@=w+VJfZQ$zOG8vSjn0DwC zx*2zD5lTMXQ$^d#c1H!UM`?0p2_!W#a$F8hDL(B;^!3DSh27kNHgLn~vo_;L{t7#$ zv+@46%%k#L{PGsA^GrL0k^>@IKZ06>UbxXH)dJVykT9ao)XiR9T8=i~WgabF58J&U zK?lCSW>7&4bWL(g@Q1y3Vbg+6MY#6(c|Y|Yi1ql&V%RLdr`q*m58@i{<^{Xc(5gx@ zWPw3oYdY$Dh~Fgf%AkD zh%;#~n<&IS{k@swS^I%9%-kdvvV~7}lPX$FAcg**!_Q2p!?rPh02}JqIQ*I{?rU@x z4IaVR6{kKKG@fW4<->=&;6InE=*7xEldt(8%_4}8QpaX3#FGt!OjS2fC zY6e5uDIp!-kprcF$Us_?P9xD^ZUSk@X@_2?wqk_T8G2_ekx73-{Kk`Iei$lPvjUTe zZ%;U8n+V_{naI;!-#x$_FvduyC3Tr#-U~w^lOeSDn!_reF!KJj4t#62Cfs82qgMxs*u=$R~`JOyy-MHtT{IJVU#aA zv~?|{6hs(Ed=6JM!I-bUy*GA_um5QgG|s>xAn(opBqWT zu1q1N{xA#j;GB0WQ5(Wd=Mx%)p7Kf{(Tp-(GLJ0c2N9?uQI<#=VKb7Jf?zi7P^oyk z9|{nQvk7v1gHFop!v`cD->?Mh^0ukK+hMs4T$aneQUEvK0*TE>QF`LODoRfrU-SX2 zmOT4h@l2H5ZSMNmekZOjpucg}p*JY&A%n6W(VLG#k}F4g$f+l;RW&6UV*~fpN|?R6 z*{VGKkh%M~0{9^{cMP`?*VE-BsMZ)g{zUA{fi+? zpqTXcLdT#EBxuz!?wrh*-WS|^SUrmm$oMs#k79YHel(TRuuoCx51(`HZbtvOXFta) zRtJ<>U353sYsoj#E1UO@IMw0R^S(=(Nb7rlH}|Pt|Jtg%G`N@YAUXfT-J*{r@0q>| z{fy4ZUF{!gyH7o;j0<~^l^Ikd;eA_@aAl`%@$>1@6x38_DD*u~OAg+_mP)y~bARPn z{WsgCXz6hJxpLl}=HaTie15^J?J73j!^Ynfl~28T6ZSwV$J)QyPOZiOP2tz7i1&%V|6j@!w!Vh`9eX7b*Hp5iu#ENR){4A{F4U zE-C8b0a6j)kO8girejqL72kp)#Aj-ApNlj-5>YY8ETHK;d-p+R0Zj+``VHd&bM221 z35IO@TPA{QHwlpvB!P9E!>rwz_&*zIB2hj!E%6`P7MoV3&h&tU zQdDc8_pldwg_Uv9m+m=O1Z_GZ91eej;C<6W=9shrMcM55Oxd@QicDpPz9)`{|5xU9a$rbJ(}AZTZC!=PTq9=S1NV zX9AsZo(E@~tyh|$4{`14c5{JkpwZ21q$a?3wQP-;IkJ_^wteJ&;LV?G4478+Bft{M z70LA@@zZO$1AMTmHz5`dPc|33qn}|NTa0cuz%N`q??`mk`Mjq5&uDuww04OAihX8# zPhJJZldp&3Z*ZkVIq1W-p3{vLbhND(kbPisM4P7FL&ilo+zhwj!_h7w-xF;+E@r|e zERarK&O@Yc7Ed;Tqs5(4Xy@w5-7gP07r-HB@{Dtv@Qkz5{V%o$3AB&*#r|NN_#*s8 zuL#)tg3n*|MUtP{D#ov^=BdD)g(w&t`SQsn5q3owk~leSXTBFJW@JW5U-RbFQr$CQJblTY+pLXWfX2M3w@l$B?sJZ3jPx-L(sFyPm?W(NB zdxui(uIS0w^M;0&sWxlk_n>XnK6!RBbN!K+Dus}`bmr8hEQ;|_XhkuaHh z9zKEvn>F0pfM$`j%M7{E=n!fw-j!N=5F88jUZdgmh3D;i@D-zDYu_7e#uCU}gS{0) zyx$Vb`z2h0xLAB*1arBtglwOrr35EW-4AA#Bii`o=WsuT*|CwP=nGO{0Vgh)!2SNk zxwg=Si-8s#07VZ{Z#ZW!k|g0KCPxNyzkeNVOM>!<+qvQ}q~MQEgs`lvW zW6hh;9;48Cq5Jv0WU(Bs$G5q$cfcUM@4J&DQT{Kx`n^Z-M<(kw=8~htFM! zL+#XqKS6MB<8lH zSZ_=L?8sGN!9d5W3)Lt^Df%#J$3Hp8aPfYbov^7)YZ`txNIOAhBnO@ zVxt?|(P!&I0{Vr+%l#mstAaMxg(5wU#*ltqcC_X33C2;_zpMQbZ{N+t+jna}puM`- zH((Vj?xN^JM_~7~qTQ_p;tkrqGsK6MPoYE0dJks8WrmTM?x6C?NB8;nEEhZ8VAOY7`Qpl_839&?zAN#OyQ7S`z--qO}(egzu42-V-b^|;}zb+)H!XRoWRqQ za(gGCYS=gL&OPNlEAA=p=X4GmKZ8*D^_8Ulsh;@-Klbe0FC-#_RE77H3pt&;H2VAm9CpZ<-<>Ka~2+Qon~4F;Tw9Ew}YpB6fo zz?@;zq|e$OdJ=jXdKnfm6MA~|na982Hox%HXAr4t7IrwO*c)I+pC)oL22}c7k~zj2 zfx(trSot5p>o3gDA^v}0evnYGKl8tjv44p9!H_5Zi!nV&zc1ALjpW2{VEexYnsAqv zNN?-gElc`7I)9Yp%}P>V8(qTbJO1jRnP!1r;~{f6V!ZID@W*<(L9jDNm|H#OWcLYr zY&MuzFR$kiC)ym!tw!6YYLFkTb{3w$({_o0j|`aiIc}spP8C;k#t+q04LOJMT}KBV zQYp^4OUh+}uZbQz3+y(E#W)|jbYWq8C(0G5R)o+{uXA}o{<8Q0+*IK~$G?4QLTv%b{jGRLZMvDh={Yz^U_VRy& zg$~otG4xNQCVtoWg&+8X5rJiYmi>H?0vK2ZRz3q@6hZxJ;B^H1!HE2V+h;%~uowRx z1b*Yp-}(~HIsP^Nf7|T;Zafmd6CnQO@wl@vnKnwR`GxmWnVhlxn^N~SPd5Ga<TcHXh?pbpLj~Uem^w{+>l?J_?Uy z#m@)52@33M{}3?lmQQ+<(}+B8e(>AJO4X62x^ePBA9qKtPM)u)O~9A%UHD^&x8*Z} z66N==FPeA9ak%O_`iO|qG5VM6Rcx5U#^Z8A#cmBpaKd^mX*` zl*rs*{usOI%-!;d@`dX~sR= z#J@0x1{r;@ba;UNl_<#ngVZL6`ER)V4sicIZmBt=7_YbOX-Sm-2F~a@8f0Zum#=o3 z-L=x{oMBm$AZ;~0yW@Ozf_%NzMf(7O1tGnsjrq~0cS4X`Kq}Qz@M~CCyIx?Q-%0uU z{1;=~lH=QAt6ALlA59HX^V`}xl%sR{w~YRFgHry6+rKn52K=Pe(<+g;&~~*jC`L!4 zOm0M~mQ)IV=YIG`g8nuyDVmwm{-=%e?yQqg#TD}l?7UQL5{Hf7C@Pt~M8v{!8c#$#z=RM{3%UP2E?+Q_ zqHp|KQHXzAp)W-FD}4ws7xh1~?bnC`-w4C*{9_Ld=3-+%m>ArDB;wK_OaK%3NZTMx z0CPxV55ffUvfcDSOb7&`BisQme4QKs!@n}@SK5{M8=zGj4Ew)U5o`EIc7ku@iVPV3 z?9UAKr(6b@{EoHX@0~N)gX#*X?V5{Jg&{(V$+FdQW9Lgf!pEoMwsJPy*X@4aSyX*KHlpfdr&dT~Nb1>{ z6OLLGbQ=(As*P5*kDR2&Ur)|(+#r{3-0X8x1#Wj(wW}`DuMMiIQ9bA0u`!Ga30zzL zkBjyX-*ed@V^vb!ZmpJWXUz{wFeji7R+Y>za)QV0-1e&3OZMCEzN}gK^AMltHvGXV zUha;ZwIf6ytdcFAwF7;y%KXeUOU_Utv2NE}-@u{hs>eiYYr~?I=&A=(wlps7Vq}?j zpC9~bq;*B&{LSd9N0IpX4f7$5Tepk6>H*IzfeI~hw1Gu;Oe9klK3F9?=z~>FJ3f7| zs@ME+id@U7?&iag-p!Y5SU-jzkl9g8BRq0C8mBa%KZvAx5B{~Add?YVA-L|`V5yi<*L34?7RR%wEGyRE?R8{Jef=$> z5S|_}PEXFJjr;Xz^IpTOz%2b09SicS`XU!+$ZB;wakzHT|LkrTtNzrItY7Y>&eu$yzP5K_T^+MFiZ3m0A`I}WW=U{H1EHv`8|7_auYHq8Ul|YG z+GavNxIt0-$=Q>Lb;`J4kb7JbzAy4Hw|$rT;e?Y9=f{t3uzam(u55I>F~)=Z`%1vM z=2^Pxc@LZs-m(~Fgh7Zx$XBvY?`|@MBrmY7gKH1a;zku*+VC7VrEF0YhGkifwyy;QBMT^YL*Dq(zIPK4wA&Hfr&mJXSq;e2V9;DNqR1xB+)ncMmo6&>Cg_NRAyM3a?maGm29E+-T|-2|0> zdm-t3*2C3tY^0pURl^4e2sc(BCc%)p^>9@}e+CKOq&wnmb@i+cj)N~$S%sH@8Gqs4 z=zGPpzHUfNxrgp$+*dP$wB3C*ZZe#?uRS^&4$zDKy<`R7a`OGQNf!Q85D{eQS7Dk_ z7aAP~R~@vEp?sY7F+n6`4MXkF=ct7L>Ni7@UBuw^jJuF$T6`r0{=f}9P;ycD?>S{1 z0uqQI!o#^1S0CnfD|IBkTY~;-j_BWq?Q5nN!Q1<08O$TRX+_y3k5)*uiMkSYd)o3- z(W}f*0FJ18QkPR5a7_UC#H~d&uXUs$Sruw`IFW!0ML=2SYc$i&b6getDWKY;r#j%G z$$$jiAxM4n^(FW>0!i_1>o*OS6@IK&nC$&-IJa1{;&}|;LqlO zm*7tp4M@}E3E4i8XaXF(49Rp5H|c%p|0C|J1FG7Zz6BHz z1Su(zkQV7iI;26mLqHG^X^`B6lpsilgwje$2}lV@Nl8mcNDGQ|9{Bb?fS2dSbG`Su z_x=9ai?i3PS+izl?XwT_n_0miFDNt*ehLK>n+T5VHoFIRmm_`I%a`c9nUx)M|HWHy zs@uRY_YdO$a8GOBaE6qZx+MmNoKN)Shz0j9xY}^4bCltup!h8~;cN`)xnAD#>Z9q8 zGzhQV2ql2N! zA9BLQocE}QyA5j>dkd&mxJBYej=UfWOkn{v{uS;muFi{a>$bxu=7syF_*1x_9PP9C zMM9_xry4Ljc0e%x3w|s%az(y*k)N4HDF3Cs&Hl8DT9PlN&?`{!P=FWY52a~&^1+{G>wy;7YHfnsy&z_2e~)^_jD-fh_uyJ z1Xcq*-bi+Kez=t@GZ_0mc{+r@05h9hsO*8T{ixUb{`>o1roxwYcKVwsht}3|!X%64 z<}}E{Ovr3&cOOu>qWeN$8C#B1y&%ug(*#{z1)dM6pdc>+fIwo$Q?N4PcG=arjhVDn zaZ;#*vU?=ty_yHCaMU)hHuNw^vge=`m@IRx_5AjPKeO%uRUdQ8O83pCujIu!O$5YPRiVPibq4^n%}1)uH*smXg&|AY-d zhw_i`p+jL6_&eeN|IV_0YE@XhX(Yh^iI)FZ9Pl^X52OB>wIF%_X>kF#(OO7;2yyIS zeJC$LAjs=eKNt-|nlw=g5E4kAa`I=+Y%HD@F;^lw?LYQ-U|nHp(}? z`p52mw|?|kDAtJbC6a`Ck>c@>=+~)WlE1l0a>5HZVRkeM8vMi@J`4Hmp2`{~$G&LP zRM@1Dd}evG^`O8qSOCh*C(w0 z$K1i}jFUX)nN$6^O%c4yzp(WDL2h3*5D>7N~D0 zn5ri`Fw$Kr_9zEpHdmIL{D7}}gZ4yV1-x?bj0~_$6cegByum881y9QWt4Qf$^@KnT zxzQ9N_GO5WJpD3Q?5-R^fM-v=#ChrY7p5U0Xa3;~8Na7QBA1(>-izxqJBmsU;DZi1# z=*_>S6X3s_=Qoo4iFH^%{Y7g6|BN|*>sWs-`8x;DuYez(_zyYOUz7xZ`}Uu^I9Bh; z9{On)|0zj;f5EXtXETDSD95TW$@k z2no^y@BtttV*Ko>0T5@lj*|(FIh!P$OmNJ(h2vy`KavEi_k{nk$iI=~7g0_q{~byG zKqtVdHHye>9VzAiP!a&Li?HeNDKQ`;{)wttKm9fOKXww}l+k}3{Y}|K%O=i8=7=S2=d8 zU%eszIa~M_`QQ8=|1tkhysQ3JlFYySrF`<P1d&%#8)&H0GpMUSA z^VB;h@H@W={)R#V7k=$WKlzj3^!v|Gy)gZr691a-k!PNwhrj>)E#LcRoCNy*JN3%` z8%cicum6j?2JYx(J&Qo8HPqc_-OnN-Anq#g06faJjWIsg9z1SpRP@YIocg>w7y>=w zDs7){wA8faY98*C#rhXytb)GxHur^?sxfn2#Td4 zI`>38W@a+JxOhX|YyC>_u6t|O1|ui=n^9R~RFJ8qhdi=)tlPrv3*uwM7fKq@ohS0@ z3@fSe&fe5;U%UZAMKBabh?sW3n?Q#v7$MwZ_xhw!j$w^~ycow@@A1Hrg>dmYD)(oQ z;9ddIw|6?ebsssf*wX3@a|vn}5;zG%o}(Berdg6&M#+ZcyGVUQ1+U|q4$S`fEIk7?hUD+)7dsm|%;!Zqr@K8@u{J2c2EiSL zWwo;qaHHCh7=(+=PmB!Keg$C(t9h(8ii5dn*5j`=z##wLz&+1Qx91JkvE~pGLv!2% zKuVjk_Auk^ss6B2Cd&-1X3%%WyPI>_GG8GiUyE4Y&;j{dfaiB?klq$zjW?QsPIYq- zJmc)m?A*CkV&;bGk05?Sr7h#_e8SxB00;$Gt6H{U=1TIdBt4V^<&B_uAJSO|^VEYt z2j8}m{UM--r-3;ih@!wCEpR4Q1VI)7q;uy(B58^#Q0BbNOq+`NBR{NB0|S=n5V+#K zp467;+{7-F#GmBbY@2Ql!87jW5fXv}X^P(Q%f1BbUP1_v8VGFxLN(yfj0sjS(HI)A z0m2p_Bo<}CQ*}T&;CcMLyxEL45VT^D1q610Yv!AV zOQ$>TeuS~G7KY;^2Fbvqt3u6`a{E1vIwgg{T>l67jS$3R1-KVE3#Gr$P4I?=Ly#8U zBQ(b_O@JFKg!q86>_Nl_m?8^eJmv&ItO$6wjB4?1V#KySoO!9O0;D?k4mimg>1V)GsL40AU}4+x8g{n`9g#Q)re_DiK@um|h-MHk5DrIYgo_JZ1H;?m5snbN0h|wv>mOfnU`t>M zF^)8Tac0oFd6ZrVsc5Oa);Lf~XePAN_J` z&I82loJ@Zt#B@~`#5~n33F;BIehD5h7DQ#K`{{_#RfXR42mk^u2LgbALV-XO9BH5v zLV*033qpWMtv-MwKzJYo2#z!y0U|@yJp~~^en=1c4UTl?7z&60`Hv+>93eoi29baW z5KzZ(0LazT01)y20Du6#M+gvK2mu1pLkN%)Y2XNulMiqN$cbL72|xr0bwqn?G7I<^ z*@3UXbt)E`F@703B8w`#9u5G3@E_qOr|}=)91#BF*!)z$kuMwpK*GhtAOHv`E(8Dp z1%&`0;3p7!adLGB!~Z`tpC9A|qCXbk=Zqa|Cbg^rgg*RAB?nB9w$}ZjN{}ZS^kjux zD;F?;`a0Cd*A3}hD0_4U%3+m>a3s9$yn9p-RarT!e$pr94&hDy%IsTw) z@Iu>ATrmR#9i3R{Dx{#GKJc)|6JfO4!6WIwEp%>UPXeNO=w8P=TI%txw36@Q2egs1P}?35`} zPs4UA#8=~xjopwdKmdsIQ0SUuzAK0wt-48B8uN4)Yjsl#Ldp3vH zzfO%$r)^s94u28tGb{29mvLfIZpFNLZ3xWczY?PQCwHV|0jIN>5m&rlm`EC48^VqQ zhfn?Z^SB30{XQb%6^uNg1e14<{tWHDEn)q98@Y8lM(f;*fj#do( zhpYb;I|xGvl6>xc{vm|#DW=-<1|z%v#Z9S)A?Dp6pl($M#=~j2F+_EoCzkcUY56zWF{%kZlP|i@ ze+YmNPnwL}DJkp5HwO$d^nO)k(V`xtrA30e+u+TIryTVCB-m847AEIsn+5FNE zQL{0gj6de~`)ojj?rFNe#RseRw77pw9N?7LKfA2dj|!1F(ap))|CERo)N^|_ zaKwt80j`D9%P)rO__WskF42+Qe%~O#UotFQ)BLsH6~b~Zi+?Y3#o zZ&Y$G8trz|3oO$_cw?e{?O}Z0X}5u|tQb%`;RCUIV*X>msc}d1oSyC%z95-MW(Fk3 zasThLF>*cKeZYPx?zJHQX?`~%DSLoNTY zl7atX^uV2izAD*^wNKD?jJFkw+43t6a$@2RDHzgVk%#rx8{<-b?y%ficJ|k?$c2M^ zZ-nf~cbB)qN}4aOeP-nrd>m3si)38=u76b(NsF6hIN)$+rors^M|(}oaDm6+?g797 z0+5k>!`Yw)3Hve#K=!VaB06aTKfX0gzi{E^`;$gEoe0EKB2lv30pYZ0f``T2T^X|bY z+%bNb`D^cK!@_-GB-<9l78VfRQhXs~5$35b2-Zzeyi8cTs?++G=YSe% zYQzk-T*4Z<81S5eVXD@|LLA-Lpmq{~(QzCWY$zN(9hrgcHYl}iK+C)~v!HU!s+aP< zm@zeY@Z%h9SRlV1_PJcbZkNsV94sY$m&;v+fq_iUz@j4K)ig2iCo1=cQ=RN+glyq`pvUBh-nO-K5E{)oKp0fm*QmGi>riZVHZrc zF5Pbc3HY2sZOF!m@tWLf8XiBeyhW(^>KYhrrPXW6;dHja@y{^Z3{B1PA?DnU(0N$oV)!kok@5xfZJE$VPMA=uu?lnT(ff& zI~@m4cI0yaF!P0=Wc#kqnmGsW62qQArQ5HEc7X>dP7N`2K*icS>+DG>q1c7}aTHe% zbISGR>f4fm66>=u9N_8vN8#GU}~Xq*0CG82G5Wlv(17c%r}_Sz?Cmv z0_(TWO>8`}gksySjnz=KfTk1%NF9q;xS{ozHuirhPa#`s`~(G`t!yixs59k#4eFTd zrr`N+@8BWccM)sDV06$39vwWnx?HiX1+EoBA=x)VLc#*y81&YfFWx#u=h6X2-8A9h zw&Uy7craNRM;0EplU+}n0^?TKwVI(g^>uh`^7z`_brlR$fvo3}pin&wig{AQW5dVS zIam5fFw8{_xK;+kd9*viYeIlo-~TybkwU>Qx9rB6LB}|m{z!<A{62Q?1f`+^YVz?_T{6k2YeEdaKiM=J#V;ACk73t%bslMl0tr^I%( z&W5CkFWAFo0qd8HLkz{O&%%|t27?nS?}8!7LX*gsWwbzdWeFHa@y?cqjCmXZHu14}?=!g(2Qn_&R`%NG1wr={zM z0lS}Gh6@eAT{EZ;iq~I{-*cPJDxu~FBg$ljCSc;fpiygbih;+w2mvT|Obu3x=$E|S z?M9pqLRHEa^M-NLEIWTiMG->+Rus@XYcn)i)nvVFHUE{VHjrA#?~tR=WFB_7We*z^ zDiJt%eL#GOTbK?5;6FV*VY=5` zwYNS(3A<%9)MhfLd=rLkYAp!Ai^de!g*T$w8A#X~d|0+8jqeln)BPYBdAbH)oNb0L z(T@iFhj}!nsqTVd(~rhP`!oSajufwcx*wz#&Q$vqpTEln_)|VcuK%Vuz>j7BbIT&n z{Gr}sW&g2ufFIWQH?{o7N(R7X{}p-vo4BUVKP(kreC6sbXck-S1plg9_-q!?kR+Qm3(gOG3(*k$76DjKO*hE2hE1!c8JN)H#^Ln34lgpbe_oklm%o;Qw#MXYP;HswaTpn3F46U8#JKU`u z2X?m0_V?{|g${UMhdI4p>+;y`bU#@0IM@=hG4_O|pw~WGpXx1pokO>GsLia#BtT4V z@bzM?umKsti;#A=y_Iu&Q~tFEwUy;>&K;7O2x2nE*6pX_mQEp*N&xIE99(FEf@t;* zc6V-Dp*={P(UA=!DZ{Q=E{H2eP`X}o{`%}Qx>u=hQHJcM#anK346`CiouA$Z$R)VQ zBru0Q6>DoE;T>##_u23QWq@HMy;ASn?mUxApp-04!w|* zkT1ol9~>zmaINi?vFS`y_aep>@iWF0m{ZH?pT480e>3ma${qjdi;H|R-ee*VEk0eZ|CFz0x>%`1 z@O^9K#a^xh)Q_lmkrO2OWHbDP4#p2mO<^xwfI0K0y_7)uR4oY4USf z&g)&#@?9RT$Zd}1fQ-k>43B$ayJ25@i@y^X?&L7FG1o3FN}3Jnxtf02t0@~*=uM33FUG>1^S~YL)qZwuyofB7x-hE{(U_>U3^7a0ck-nO#QX0G5 z)+CthG}BZ?AFaVXM4J$H>I9hZw zKSh;4L1y+W?)w|9Zms#>=O?q9)3V#-*f{%XjO%XAY5|u8dsR~IFU!kQKM3htK z3!%(PP)I?IoSop>J^z?rLdVQ7&{On^U$v0YD1Jgn+8W|STYZa{hL;y21wzpUZClnE zzpYo-Ns{nu*cQ?G5dE}=m2PziD|^XjoKt9iU$pL(_mT9zB#EVjUsjY98<*KR?qkAr?c`p-8WhECA zb1XukpUeDJ;1|A%&e##*LcAsCT$07@hfB8^% zz|FvZ$YGMRAw3oaGgqvJ?$(zlqY8dA0lP%wZU+T-ZnaK+UlkHy&7Zz|LtOfm-0=2F zt7wz~!aR)PW#kgQXw;-45#KC@#h5Aegn%41kC5n>7Z=&$Yw4#u#rh~YU`Qy&FKi|$ zi+DIOo6>;0e9xY-ztl8%n8ayuR)hj!#sSjU4zXA*x?jZ1D&<;}o(<8k zN+hDWe3X7IUfiC&iGltWl_vV(9fYb{vX6nULgs;3V2K0kQPaWdXoMi{;Kyb;y0G*y zy3y$NeW!JdjdMNML;Bdxmfk#^6FOKQ1gc!*&lpC3Hnkz&x&FMosI}UqHH#qZeqdk) z`C5}qf8{({n#^ux!%!zXLX)~#n`gKRaZMU);EaX)Uaa7(PZq)P7$opp@oRA-)M z20WE}h6_jw_w@CW?eJ49OuC7`vO;HVOfwr(r+=3!vB1z^mn@xLAew{nJhQz#vBgAl zY3P-FTTjVyw9Li6j7sf|=RBeg2C~=7`1{i_GNYfmdN{Xu^UdNEIVl2-oTg(k0}DVj zkwFBjy=P>is!>9|N;T?x=_NkeY2v6Ee@q(IN**95jIrfnBaHPPJ1^B|v~dJ)zV)EV z?7hU%r{0XbD@qCrE&I>jYQIe8mE&U*vd<=Iq#KJblWmd(JXno|S=Ghnl#B?K-aZiK zwy7t4o`KFnj(eeH^v*RC(-_ql6#`4UeR{8(XfMyT)6gtu!7ioAW-9kx5r|BUcge5N zZJN>tyo;$mI7sh)%{nV%_KIwN9 zZYG)9B_bC^w0?N)7EwX$0Ze9KDFm-Y?dJKtAo{`~vAu+h_9;HeOqD&I42Lh+t(zGOA+bgM=-v(z-|YEOt%a;9Jv8O}Lt1B1Bi-#;5H@A{H6E zI)8JO3OldrdvFV<{IkiIiYU7Yune}kCtritd`2j4UkEZF6fwN~g1h)YlTPXb>1v^S z($j$ZfD%%Wo>TxaYDNjJuCyFlY+#W%MvuG%EccW8oAYl=>>i0(w+VhqR2%9*x~D!w zO^2-fPAO4LIQr&DbJP&)piV)twIUHC2Rpu4zh;L3iRBySfm-gN_HWIEYRLn$pGak* z(eeFW(_&(o%1)Nxy3*nv(wVVBGzE59qelM8%qkfU=Tx0jiGW2=}R);;NS>FN2x zb@@j~{R8omj&8<{0cM{IIPQ1l;B5qxg@`;*iJa|ez|9cl%WiWQinTC&7 zd(#;1;D7U1%X_l;kS(5}u?`boQrady`3-x6d6N0X7cb41?{Fg%jNRJqX=48tq8JRD z39jA$4(wV#d@*w_5#f`nS>f%`0PC9EQ z!S9NG8zIa7fsP@HGV|O{{sr*M;$yB@NaS^MRGDzfo3@_%77v&781{yXXm1LrF^nRA zuh%k7CqR77__+4KoRO(vMe~l|cb&AUYbmqdX~o0t0`N+kKK9seiWz;{pRNhplw21^uE#PiE{{kNT4Ijx<~4T{&XC=PUZN z)Kro6mocjKw%C}L%euIwOl}ZLeWp`jH_x*RGtw9J;9O%9Wtj_~3hO8NR-v5R;%D-v zR{R=H10b3@*%7Lw&xrN*#{Nv(-b9S0;Kw(bjN4{JJm<~U>Au`8$Yrj<9df;rAW))Y zfQ0#~QaO$rc^-4b%LT)cYQu(yd2=d5pVZ|7&+KEH_vfv{WuL7z$kgNEbSok?;444R z=SuWY#omt?zHH*6YMG8+tQ3B)k$^=&qiEnMyQ(YjK8nS7!~9A$byua<`6l|zbiX?k zOG2}%=Ry&0(TtyWL6TsjA$dQ`mG|CSI-F0*llD;w>lb{#)?3|#6rC@ck#)a6B6^(D zWUVv3@FHX)hS#{^Nm!wr{+%ei`)8iX3FC=S<0=iuD12A;JaFa1tvTb{R5ZIVACiaD z^l$)&qzzDCIU9K|NsOpr^*oY1`DD~NW_!?lf569*unyc_A4@OQztqWaq296 z8ncl;FIP$=KpczCdnYmU^`()DVSmL(4|1FczdtTYW|#j|p;ud|<{}F#9Ip@i%KmU& zi~k9i$rm*jCw62840dEOvkYeT!%fL|xMXQ&H#5ii+0<^i(o7>;7(WK#6;?F@r3>Oa zN!cAu*#HGU$N1nGeTALwGc}H`;jZud`s2Dj%zWDOXVrUDZ{5=s8@QX`%RR}E4MW+z zq^dfe_GWTC)uW>R_7iRNtPYVhiK=%W1q1~oi>PgrQHk>Hv$NvOR9`Aha7|LU2VFiK z`dsNif5r4>+;dsK+iiRylJ7q+5nX#LeJvQVE+C;8!w|C{rTJX#fDE8ooh$sfv(DDaZ{r%&RC){rsA*0 zrE^OnB=EQ&Lc#2V>C629r-4mOaYMrk0ws>b=LHX5= zCn_8@*dLtsNBO7wR>7QyNxd9o0)?LhKhVt4o7|U5~4q zwZQ8cd-c-qj&5^}m3jd8xR+X-x67q_4cE*)EfOwqai>iKoBnE)jB8ArR>gc(JIJ|w zi}7kK<++;F9g1qj3?2P+>D9OUy$RNOUZeMO_tbTCu`3f8-b%ygkD1NN)VgiYE_2`7 z#o-wd#ikYW-1phVm`DPRb_zN9uLd>Mk>rm;|hr63O+~ZHw&IYD0)4I|M z(wcV(l?DJ#5i(zpW>}2Ag$){gL%OdYs9|55oZoBTyXL~5*pOwp{oIj=Hq^YA#Kv4T zo;lRle)2)DE``9{Y@ysPy*Tln?E%5E&?QnztE~ksy?M!Dj@8ROFs9b}{wl@VVDT`) ze!Dmm!w7&xqH2p!{|XYW$XkoIQcZ3)^h&$to2KVpjyuM)hLT38+T2Xafdq#ro+(4S@sqx>yV(5kd2FhCu-vsqaS+v`{~VpFD7o0GU= zQzxf=%lH8h`jB!m?8?hz z%XZ^=fCk*L74@* z!hoo$>&ZQ0Z1(ykOOt7&4Eb?$o*&s0Y0bRcvYB`gm5l2W$BolHi#+$r=34~*zI^jus4kIo3&{tNL)DbGW@FEN?dv)Lw6qSD+L?Adz& zTdM|&?uI_elf$Vk|H8q}Jv6kgLE6R@K8ITob`?>_Bq^7^O)VwHp=?-{y0A^ESe8i( zy{s28kDPI9prhP1+|#3o@M)0=b7yMc?mb`l1&lK4p=Fw zUfBG=pbbYSg91S%gB9mg-MmQ1wTXGB3O{yWZT9Qz^xmuP{vo8Znk+jnv@1jut#R_RkzOYHgchw-e5@8`dqkw|mDv|vtjqY=zu9C+klr6hERwOplG zd&&CctBPzn52{Zy6oTcoZ8*{Pb50u0jW%6bT7_SD$8Jg4p3S>KP`vYz=$14GQ<+Ow zJfQX{mdfkCC(~wk0$o6mH$V4T%~Thvuy@*Se%G>dUkzN560#o1Vr+dK9`&JyJIzA2 zBe=u3PcYPqTeG+So%WgD@QePM1T$<1}OKAsI3h64kH~RN-1ypu@aNmrq zs81InMf=7fC}`dgo+&~8FiE}}!HIc4sJ#-{)0`J5$R>Dv%PwCWE9%~8MD6=A$)^!r z(c>?)Y&9&WsV~3em}}71@5;0n?6kdAwke;!k@kfv?xkvjgjp?Ne4?RmYT8A0o4F zq^K6X?O5Gp);U4pk!t1|1dnVQ$J}dQ(p_EDQtvHFL>)Y~TIlFmQyLD(zeKN|^+byC z>XgwVbvN2PV)5cFn~v0`!ZjXUKTHN~u6CgNL0w6)C?lSot$W>-QiSQ5GcJv|chTn2 z+1f*m1}-VBfA!(ZKQ~8x_Q?X*Zm(SxJbN`r zrk?*kWuH>Gs)GjQ6peU3*B~*GfX1fUj$QLkc%uSZ<}{

{|JXARfI|Gxq!tg7eoB zfbwND>80nNSf?L8^Hb}3_)^U~3SVK6XIOCIbFNA6u!VDFBvS+Xgu@&HcfNXgY>7Kkm?zVF7h(4u@iL5iagjK*p z#o9|%$XVg465sTKqKGs@L-)AP*kwF(yLwJfo9kOtLZNs}FjdmFUUU;*fU&DF0W2pn zKz8ywVkm7(a_~*f;j}w22K55BP9wj$Ag7gYpOG3#-*Ux;#mUoZbUz?o*JF%ZFn9L= zBtCnJyrfsJU+N>%V5no*!@F}}rqdPbJ1~Z9nr^Gg6#F#WJ?^$66D-ZWQ+CIsT^_%m zs!L>M2PGPJnp9cjzH?GyXK{^u5olDN+7I@(@9Ju9EZ!@={_>?Nvo&?EmHiM!7t8a@ zcN(Hs`B&_2TD^^JTElh{t7&jQq!jP$1nzfy7dI`Qd-Gaxhu$icy!crr!BgJ)@J6mi zne(2q^aM)5F{4lAUfWjjrsSL(y>!17Ho)a=j;t7s&Onr25;}tKGBt<4)+@%#v1>#@ znI!%$8tEY~kZ*kO-jAp1m0W$QWmpQv(0Sp2f`igCeU)eq0LQffgWg?L)!bUyz6|jH z14&6=9PInWfL(S*vXJSkWnbQPd)}zB>wa6XA#d)+Ty4vI zDw5iX4<6Hpn(@22PEXB^m1ugTqJsj1uBr5gyzj#`FI(BqVg&Sck}Y_+Y*-pL?bX(3 zRS!x+yDYBEbH~!zkTBxeYK>0yb#$h!@&;Rn!%$CrQv;xkN6H?|T#=(f^SQd97a1&vlBPLJR| z=|RmYRT;3Pf8c?LvqqtO)~(7br!zog^+l%2&gMJa5ShW3f?d(}oNQa7@6-B+Df%r! z>TV8SA0iUJul;z+-}`Ipki~#yHvuEiL~j;SFPtyslvP=!?DC{HTuHV|;$SC>c0t*! z(yHRN%xtD{AR9jHbE|{_^Q}^ad50LnUJh^dn-9wzdvdN#J6*4ST6=N5^_kz|FzIZh zMU{e(i?oO=%x69+=T4O6C5JqwJo?Gk*F4fJ0?fXAIT+B6THkCqN2rhBU)H-82uv0{ zFuAhHynph(y%F26QwCJV9?k@ech)WupDX<)zq`k3LAt^8e(q&epBsM4 z<3|>f6ZT3rBRbx9CO;vg;&^)fX5-i?y;C77=@8T&OQ1)1g0-9Xreo zrC`3hW>SJzFttfetK^lK%WNIV+`eQ-Bfft#uav2cP45~rVeUhfUGldbzU!qVzN0y9 zQa-C5&B4UN%>(_# z#>U3P$;-jQ8!KXhKpabY6#*@F)dYbX;9+6;kC0mj7;?K@fnBqxF<<3X;V$ImjoHLB z!1}}&m$uTo5aluda2t;o_aT{7_$LYcArog=Y?g4AH1%+K^-GH@g&%PwF_)bY4_p}I zw(7Jzs@!k53)#$G2+;VVcAYD(TcEM~;ZE~pbFtz6b~Avf5qCG?WmS^@UMZ_6 zso%8RBFEP}_lodqOy_c1MB#aRB!Y;3Xq-9)3QRbj_BCm_jY7gD9BF@DZc2utR$N5|qo{_0< zURf(^2&;A9-#%!dJ##IkAb?pB{4w66NZW8WH|V@5J%zU(u1@=_nde(Jj=Qk!oheQH z<+t03xOvyZJzNA|9K0G1qv7aEi-~sYyGv>uKvI=B79`El36NV5bgybid7An>L&Wmw z_F}lBof&qxJEE8(7rsan9)x zy|6#iy^f?3yi&k$&Qidfp*L+-G)+Q_!<{#IQeBPcA-VX=X;@@t5s3}KvC4WnDmw6vHkGZhBKaT zH~UOH43g2r`hBqc?m8e#giMK!I;x9XUT?y^twUK@#kF*+iR1C@3@4S#_tm4W8&gT> znhL_~SP6pdKDii}P2ywP-#3vI*nRPsSjM@|e5B%RB0sa%)1`?#Bs+&xSkr_t8vboe z38Cu4HUpu9V&LIbN5OMZse0YXZXpe`Jr0+1R9=_9(I<>=Fv2`fh%9gI$Ia#>LWXlD ztW2T&RvZR@*Ik|BKjdcl>;-{h8(Enma>QYXzmZ9DMYu zLR{4-=ssSwd4nHzhALEyE{1f#A~>bvi48Z?WQ+^y8P7-{fRsr(Wl)rxDp8HM1UJ(z zJJGHP;H%5$N1eQ0Ubv#KKryNN{Wcm3y1V9wGc)fK=iHduKKN2Fn5$aRGEB+RS&Xo! zez{{UB+OzPGIz;}0*IwKfB#u|NHVfiGkNNSebG~F{pfaA1_^cEtJqNtPAx@~U!Jn* zS7iph63z1@1p?N)P(!bF;?u=>@}<7vwgX7KFj(HnD1Er~emeUMuVN~)mK!>6>C=>3 zA)H}%%o;6vtnHs~p%Y%WAFCAcCqu#4H|A?xl&h{dD~=ReVw=h;QqnDgTIMrf{nYEi zw$6oMkITvUQfC_w&docyjMETniwNJz{X~ZxlPogfVEq^{^7jkAd`ph8?Sh2lg9%rH zZ&)meA6RY0zV#AgMfQ&hp&8KS#N%nhYA1p*Jfo;*6Dc>VuZoo(Bw8kpCzo!4fwvS0+uLHEq8dj`BrVqrWf?_?)sDF8cxLC zqH~?xv`dM10OW4LQtTo8{pa~Q+Ye-;>jqv&-Vt-z@I={Kc}MEpX-syto)Z;$FB1<> zMo8Of{(Q=le*3AjiS7xx7Z9FlBeprx1$cIr~z;Lzl=uX@=v;B7=gRfNxo zJF}ADJY(stNX*1YWwaQHe|RuSRtwvdJ(nUUevV3K(GbsDG%02+``kS;MZKGeE=0w( z8dWp7=bR=O0bi|GHO2QtOiJz*x^qwTkqRf+$DmyB4$E`R8)ZN1#BJD4z4v7%UHCQ{ zOR%#D@#Wm}+Bq9OAIl$7TQjHTc@0rq*F(q^yY7h;#d-BhLsJ`*!&yU2TV3>~j8~XW zJ@`K1tN2@8gnjyfdzAW?VZtv4Tdl+`DhoaI$ zPL^YW3G1Rnep17YV($w9DAEr+C|)ef;k>O#W=ENzc<-JQeT#v({*jmFwyOxrWHq z`zr5S0OybxC4e`+;IlYSK_`GQHX3SpMUxZLWimK&R(IlRR)2-mLriySRMfd|ctdv2 z__!$&$QiZw0eSZeks>4? z^%W7HMSZ`nxd4>L)=59ljZ$okjx1Dh=eS%me7l@cdkJ4`KB!asaY^1XXJ`q-(i7pN zrniC}&zC=&6U)6cy6kzttYoY9wbIyY3i%##wlxV7sqF)mHc?qT?wz6U$s4PkhO<@GiXlN=M%-K;UOoUDBKUI3j?F?xC#y7z&w{ zEU3lRmQ9jdb-wv6HUj(9I67(`7-a%q^=7R$n;pO5bS8f^QkE?iC7S(9|Jz;BQklHyvN;>gDPcJ|?pxlm0ip9W z?9^E>=|&XOiPgNw_bj1f5vtd;l;^AF2ynLW^p~^sMt7#`DBqXr^EA~3Z&D@BUcWZT z|Eh-NfpYEeQ*^|R&UC9YWI2q_=&{PKZRySKy)to+XC?FiOz-DN$tIKO+*sw@mWoN5 z8=wk*-uAh0*RP$^ZZU~3-A(-Cu4lsdc<*vO#&+gop|e)Z^UV2%mLzcrK*Xgy3LyB* zg#iOJdk#Yi5!=?vCakaoqs(V1aTU8=coSo0raN3tb-GsW75A#wG-l2PUv|j|^SN=c zz1g5_Lu=N46>m+&=+ZOe+xDXO;>D^xcbTOD3wSiegqPBCLR`6E;crDR(2L9 z7It zgNh&)(++`;gqxd*lY@%`q~&H|Vq@WC^~!Bcw6$bhIc(a85O3CH=Wx{3&2_O z((v%7yq@*07dfs&odJTWRhua|;>RV^-$Xj9DPpYb!K8qB3~Z6E>fx zO*U!gcCJ>gjC%@Zh4-pq=)hCfJB_jblX`yGT~3n8hHwf5l-VVJO6P|D(YxZ4Hk~${ zrQ^s0p7xi;<(U{C5nu5A!dhTajLqreoNS00oS?&y6xkBaU`P2lTjM6nsxhxJ5N#Mp zghmr3);bqgT~*<7@I}5+Zp^B*a*UW<-Zp_DEu9@v&7}DZ0?x7_RWU4)SBpI7A?8h{ zAxXVgVH^z}qxj!l(LfSQuTj-PUgk*o4EG(|_U!}9fV+Bp8#Yy98ivTQGp zAoyn;=n+SrSC7=v%PQT(ZaVaH#vbfkyyIS9DLd6c{QOWbZ?R`=%T};TsEMeDo`_eB zm&iF(vh%E4sgCq&ybo|R-{A9qeGP|sVFgw(TTxUbhnLS!V5d<@j7lfo_^L(80!7*9*|c=o zP>bDJGJEiAw4vk{W^QZuTOS~%7WV(xdgmZdf~H+`$2;D!ZQHhO+cP^he#0Hxwr$(C zZQH){zTY`F&WU^f=#K7)?Ci>}?yRcLC!c(|l-usS^iSKR5||t)AzgZ&?y}~{vtwq= z67qRc{w9-%9f@jDMtkRTPS(f#PH-lBDNK!qHd1#kf4CmvWqP=r(|s?A{^Gf4*1Nj& zwlhnIjNW5fukHBCs(w?fjrNLR6xA`F%?hmdm-Q0VW@F#J-Qt^eQ_`nzoaY7j6$$H| zv+?i-8%nR{r}Eu>leiqTIBwrK>=I*j>zB)&d%b@FF;EXzvYJjloGEuj8|yr+sbzrB zVZI19UpE(P!D5nfq?oht>5(07r~w5>Mx(=HM+%z=fyGS~Yc9Z=EER~p<8AKMEsf&R zS{>?*tzl{=M4h7%=noL}99aX%wJ?;^@==K=m&F2!D2U}9gw-bt?k~;(U0?6v)#XSi z4F&SI({y?;pk*(>TFxY7n8A2l{{I+}fli1aJ)(v-l7qdq_zEVRaWNzIWXwhq8#>t1nEfm~)0PrA03~2{Ojx{JH~7K^o`b5>0ap zc)+G5FsN55?<>N+eB{b?&EPfj@^wQ{{UjbuRP9kMcZ#OV@rHeGvAwOiwf-R!S;&oW zUCPwoG_a2xB|Q#E)+oH@#`Xf@UIG|sW};D^XG2RqVE z7+JE<(CL)MQKM3s8}S7XP$XDj(?~EVXEYeT&yMIZ*@1AcT=AIgi^}Dv+?bI^yTKhhK^YAcQ1}50+Nr2=y#Bb4D+LG-S zLuq8`&)56<(~SX+S@!w_nm`qTtWu@u3)T1ya6*?S-anpXknY!qVNrD}UDm2plAbWj zF=>~FeD1R~#fgxj;S&bm8*KHn|+S6em}t2%ZuP%9Ot|F6qq_*ixs+| z-7hOoprwATkN{V*e2#t3nA;II7Du3MmqVZ3mr`-)VRESWc6AxIK}`k2gp)DeS&C8R z_#IpxM1JyC4w^&kMlv46-*{BSY<;-<@++=cUpx=`OB3)J(}!RsalgGap#DXV6JZRo zwz3o?FiZn>Sx{&_qu?^{A5GwkwH=8^tqKt(j%G26T`IlBXE`1CM!YMpVlAskg8!be zy>=WmR*&~Ts}v6YWi@@ftrGlY@%>bBBi~rOB9Iy@iYLPvmaTe8VnV)#Ui^q1DMYrb zO425@E3KF*Syf{0x<7B%nh_GPmShBx`BWDtuS^D@nnNWE*r8MSL<3oRHI8?Dc=gFw zK&7mi=wYq|&UQRuhYo*QsO4nHAWT~o;B|)Fq87`U_Gt%>LxCazD{SnWp&l+q1L($K zsK8;Y4RDIBzL%YtoSzLhMYE~Sn7;p@z zXxjlhX`4n*>^8Fn3t-Wy6stvocCwQukDu_{pASbx(c6x6vtmIL{%6QmPU;LScd!vt z9ZqXjiP)X~v!qN)OtLCFmX>)yT@wqkJ%*bLX`$sss>wT(4zmjpvBr6ET6gnM6!*z* zy+!}D8H~gUYqES$VsW@4e$}7AWVV@7b-4k)F3-~eIBLJ$>1Ei{8Yo0LO8oOdD}y46 z?|~fC4?ZNy0)DZ4VsT7iQ#q2bsU2x@By>7*nz$z$Hmy$eF=X$ckq95lTH_)AJ=U!` zoB*EJ9SCL-$Ljqx#QlbeNxxpOMN$=N2AtQeJ`i>A_*NHJjOIMC1w zyfdpixvCfI$MTJl#lAI+0={?v7EERog8fWl)L`}*^2f7dIGSr{9hB!PIW2Uebzr+?>~Q)`BN~u zAVGmK3r?!gc-+oePiQ@AGTjt%)_)*e@J`~l3~_}kkp62mB(eRwZ2j&~TshY$bJ@$} z9q?0US2s3~4ZyoFGh_-8xN{HxeB`JO1Al;9>tHku&nH61Gtj~@Vc>~~08uKis`sy! zHoeczuyY;xJ|OmMPGO3mUj5EcFPvU$s}2Z+8O~EKEc;;M6y`oR3+a%OX+$lc#FXF4 zA5O(=M6KJgD?ez&H5QR}z*0Vc{L9%dV2RWx6=Z)|b>b6++`vTd^U;Ei(DVvu{ttLJUaV)t>>_hP!@ z*j#+)(17Z$6FZ810lY>ePVyBqO}vjlUU6WFAKRl_7pEHa;euK8%;xqU)$q>8c^YpBT-@2*3lDP$UeFGTv z?E8APq|}qj!`g9!TnLSRWOW}m=fTr>*Xh(_PnJILNFGjXJm9&7@tR70V9H#UD6miV zkf~4M9}{~p)RXGkY3`=kpNLI|-Vr+5^${Mf_5<*YzX3UMqd}`V1zXJGJ;YW^^v%<= zE^HehCB+&I0n*B#rK@0KTGP0J19(}5Qi;r2OLoY=UJ(PL=RJ*)Q zHxo>6%%ICq1v|;{of<5vNT#LCf_SL1BpH(Hmeaa%$KsNbvydvk#{z=QnH8VI*cELd zkR3}|;sHyN`Ci7QcLG#g;JWYuHVmBR!w#@g`9h=jFbNW~H>9ZQNR~hj6R1jO55OJ2 zkr;XSa3+cw5nmi^^O4O=W&)bMcYDyn4uGMaR;78ycLueerRRf8t7>=}4DSIB7ki*c zVmg$aNYxh5yWzLl3iXPL?0DAJ^I{%dUk~HM(|~m6aLcvcf^&RzvlJz$q9??{bVk0? z{UQO%;tr0M>t;x-r2rbh^=vt=9^9!(vJa<1{&VQ%3QwhbTFLYZZ>A^DX>6q(B zZFJ)csO~(BNcA#XnuYa0{Qad)l91nrxR!(juEyyxiu$5@`{5ghj^X9%V~T2Ug$sr zF@}50+@s0DjuUalYjhksIqP|6m)^4FwG>At+*JJNUoub2XNBqnk6P&;-jdaZGS>e} zVmVrOKM@`=1xh{?88mIZ^L#|uVS3Z1ViZImzz|LQvDXMvbEecB)6`1{;Dm7;=YEN^9(CAs`RNsu4I*6r4u%dhgD;XM1HydTlumlmAJHD) zlp@hC;rr!7p@Alu6y-hTmM+Cs&(vfRMo1AxNHS)C6qF5*@lhLxV3j%Rkd`fLE0=&S zvfKSjfhTZTujhI_o_>IRFfuEHcuv!F6JVS|5}`nwBmX$fSG_E3<3*d*E2f&{V z?o9T~f^K>`r|ql^_UwY$Kv&D~5o9UD@zR#Z1fW$^e>6dtxoiE${`gU@lyOq6 zkneJC@Z>6P=bZC4ET)UalKUw(c>oTC#~;+s9;7f3P*rSjnHIfubMnn;69eZ)3@9F@ z1@Q8}$mPI@68dIPk0tx_!G-3$2b`6!3jbQ<2&g`c_6gmK>+{>w6WARWFwDU2;+-X} zG6i}8DBydBTe)~(OZzm~Q(`U=1* z{l$z*>QJ!}_1Ls<@Bmb(ce90zu4dGI43+k;!hX!wM~HtuM9@Bv|n*c12A+WyOs zK0*=N{U>bNOv_`p<9t3(k7#tU*dY5^g9x6nj}v>M^T61k6s<^J<;%j)tqiA4?A4&F^xG-qoA<(rlp>U)94iq>}_o{RXM4|xT(%SP9Ro7xr;T4$gu z!Rp_%F3?(S@cks6&j6d66_ajn=Q(P4I+M_XBzqPMXxdDuF1BY`;6!}VYrmghi?kmq zm?r*-)W^>FNl%ttv+0es!sCq2Z-~L)Kre%Q*er@vDWefbP@E$Ip!c`kRjFp-G5EMZ z@KR7k5UAx>bIdb~JdD6?C4Op=1yG{}B>99Y$`GN_+I_L2WdNFMFOF<9bb`MaFyJ@c zsPUtjeo+eC)0mo30Qg zLpNER(4pmcOsmu0+-65Hpm3Lt3XCA<(}`f~mE7JYjiE`IN3kk6yD|E#{J@oo0z!iZ z$>@qnVvtfrWVAv5S+H(Q@a}}?=07ti5wVIy*cllTzysDW{LuxG27SPcdaFWp*ZM`A zS}9ZrJ`hSx&;^S~%|eZp2%JlY^Oc4Bf>&fknBpS9w~2pThCjX=)LZYMzTGUv5UkuU zeY8D!Ij0z&FKI#f{@h|@H;J=W0`mY16#uCHQ7hn7gG{6TQ3e|`;7NjF{Sn%~$KExc z032(q6hJS!nd)uVxd&-dC!xbqz=#GjLnMX0tWWYD!KUjt>#)02PiPq-o?IYLHY*ny~J=gfM z{D1y7nHM{1q^`_k$s&<}(sb6Efz1ptaG-7KY6Mm?U^}l>S9BP?ySngLS6Z5>ePfdc zBSo$(Ss5kz`Yyl@B>9P%Wir$3@to^g5ah<)n0Hc50%bl{WL>B?p!uWFz_r9l&*y~| zJp$TpI%XE?c{UXY(WKNPi5rGC=G)}OW;>>mE!ybrwQ?p0sWcnhhwE*~5{8(pUJfri zYCI=2!E>W>F*RoVpkB-xq&0@ORiW}Cl>BXf4(?z)jXyeNg!sNhAF~hu?SClSYuIS_ z^>F&EBaQz4)-Xs@Sd-yBQB$PHqrv=!00_z;35liq?#y1pA4pPv+^1)a^YX|f*Ep8V ziM(`P+${g{jotyl)&|CS%>&7R4eTE`ndi{8@j7nK0{M+}n>n`p@pOzh;O{D3L9fW; ziPzZM0A*PHyLEy60CU+LZ|(CuGqEsYc1SQKW?#v5W9E znBR7Ibd;pkjcpNOC#!B*3gysx*!FFyhRug|*5(sc@>zzxTL4ReEkpDE@j_CRrGd7MAr|OBl=%aQQT}HF_X|(OnW*n)$ zP!44ftq1SUPdzPCmu?grG#X?17t=$}>tnV!yZgft0}irqT5ZISKfy)4EQKlnMeL+7 zKOQSY9|eT2h@hB@(^0x9$!4uUV-v{Ub>LKilru;gn5Yc>q!C zUX_r@$(D+_ zP*ftg1{t*91QO!um}bAi1TdkM)Th+#yd(36W(q|O(dQE=X0h@PTD6F`eA7@{JpmHM z&_vL%Z!9n@obge3jV5C|`Y_vt`W@8l1>Vp)uORzotsexBI{)Sq|CItlf-(D?e-mMU zbA{dZnC$obJR~G^pl^K9AblkV$<$ z+RdzFE0V%05M{!<6SdTBm)vfp62OCaBIR|<$To~r9Dz=H!I;%0kl9fjUpI2J2z8yrN79o$*A;mn zr=xJ#0nM5LO^(rGEFF-lyv8$5A|yW5YK)0J=0;kzcTPxfk#R*^*EsdZ5*7GEvU!$! zZEZY$e3{4tgP)O5?^a6~O1?cJg^!<{rKn?b~!7 z{@xYMKmsamDaooaNfqIoX&3AY*VJpMCBeH>)R|uc@{m`~eJ}2}ZoH3uVq~r_3cp0f zXO=#!L9KBMg>2CFFQ0dZrfF|MG|XAa4cnACF{=rO2s0XBIM-xS38P{t{9@#5*Ew;PP&WUazuiv3j}Q!DCtm?H!Za_rIG2 zs9SG#_nc5hd&6f6TD=T6m{9#OpxO5RDW#P5fx%P~T9z86h(W33rIymPu%*v`Op8ec zw7Fl}*)N(w3FZHDyCG?n|E>D})r`)hQoH`Y?EJfZbjhHkb7C~1{9h9R|FzLOy?I#L zITxN$X$-LjK=ohq3Z*HkYI5#|qzQT>RpnUyP=LF(qzzQx34he>TBQEiGHW=&`tW$M zXCLx^He58EYkg+9K75$Kw3+T7gBD6w(q`8r77eP6klr*cD_zQF7y6L3ikqg(JF9I2 z;H3f9o2K37Z78);BDoX0J29`vwB~Ecc!PEfL+`lR9DQZ;Ku$RXq-xPGxWy}~IW^6; z4Q35N_Au0IF-lxZhjFj*L|^aM9Xw#RqRqSYkQDtL0len)IF9Tumw|)9ygGtYL9|$5xwCAFa6k5xXo~{Zo8~J z-8{FsAs9G_o!rL;%kN)G68du`k6T1 zDVhIyj8-%EyTK~c0osA|)}9oCYJ;mF_kekXHM;jBBEgl$&yLor*`ewjuRMba@Nq5A zVc*WngLcGWJU18U7)+WgRZ&p%Pd!v2?;b&P2A_Bzo?lcV@zkiA*EOwW$oGlYpJ@zu z^!4VlweQW*jLZDkUwnu`gy8ylYDis(v$aa^+~u`GVajHuGJOsYdbp3T zIDHm}s#UxF-EC3&%PUdTrFKyc(ArM3k#3blTW6j57OdIC;&VFYc1MQWw-N8~hBt%$ z@WauDZ41wwZ8rwNv9wA>qPK`CfUPuh?L0(5S>P zab8u%GRIeosKnN6PW#ZwzQac>a@Jer{zR>SJ;^RuLmA)DbnH zGpJe%3VSQ}Eoo_cMU}j&qk6qV)o_$;ntOw-92n|U!O~3BZO#HxXALTgh_gRy4U2T} z87HP*;+Cu2`jr}FpF=v#RTi730{aWLxG@Xn8sGG`&DKXUmar4+f_u1Y&50${R*|crWU;yeZ zx4ReeSN*}9hmAfi{RV$@fx&0AX>ChP|J82tuobgJL{-Q*&#j&ksabL^1qo8KQxXt0 z5FQgNEh{TK2OGl=8iJjc?LW8(W_%__W?Dv87B)79q8WM9a*|$jZQh|AXA1{SjqlW5fSBr=|Y^S~GC`AIkrv0#N{1 zIT)D!@82doR?!DdvD@!z6UV=bUVVOr1MLme)C>4J?hUkpJ{uI7kp&A?+q(d7Yt{<)h~ULLVqSf^4(0x<-AZ&uQ&D(R?G`g24C z-_UjOK#mKWv`nAL89`OXJ{Qn0ZF|3eJ-lXjeckJQzoFPoH%}LLckgO*Z=|z5|Fb_< z{ypY6?F^uYx=!)rX+<)1ZE5?K{#f2@wmnX9hm^wW*yAzP>-2sfT;5imIB;hTV|cc7 zIJWQzmb(u01ZviAMGjtM8lZFVBbb6r#-_(1ht$+ynE@nM$NnA`A5KuGsi_|b0M%ixUS0ye zZkxBgWL9K^Y=OywQmB_n@w;N#vmvo%NZHA3v^~JrSDh+}?3)-sQ2L4Wjl81Q@V+13 zyS?w5@=S5(X6UPYAinqhOpnvAiS}K>fI*3e;R~(g0~<#7pAIwkGdp%vF=?4b(tqF_ zY63`<$x&e4zdCwY9wxJu@X87=7!!lSV&kTrCtfOai)?DD$^7G`Y_tcfx%Yq5`t%aT zP1$gtb9M;nT+THUj@ywsx}K&^Q2atWi zz56bgfZHq*=O0k$PVe?3Hi?^i{f-4!-Q$;w)y7tbi7a$NvyVk#l}LrAkhp6U$f|tt6r*EA%7w-bN^>U@PtyPh zDZ8o1-YBw%*$>eEgnF-TkCmtbKHVqtiCI~fJY zNr>H$|D~J+aP9sx`?!!r9i;1yj4FJF!BG8mv@`vZ!{4|8=|wMFq3ajDuVZ5+sin5Y zIfsg!{wrm@{ZZr?UMCoYbAd2UZv!?vJnId^!NEfo2=lCF1~rVE-Ie z38)d0DgABCr-eCD53Tl^c@c!XKLYtW7y-p@GN`Qui$L)B;y#e>=P!;Nv$Wq2#E8hK z#-|HRHs>`|DH97UHI#)qXW#(=<{Niau%kOLMT}4+D0h~6R#;N#G6tx6Zt`(O z%oEO-K}F6;HNQN=Qe$sf+Vd3fxL1GRQxFpr7TJt_-Pv8>i3CxIue;Q4#NS0^h|`H? z_TI^UQzP-E`7yT98pjTA*O-BST)!dsw)(ceIZoU825yi-<%tDw8pi+t(%XpCI%Mpo zKH}!}wYn78`6_+c4SN#h?ZxWvkDsLWFRnerH&27@jew{*K@cP@y1*H5mOPJUw^}P5 z$o2aJIDOVukoWnEsfOlP9Qr;okByAt>6FvV;Yb|=Q4qFEnhoeyP_4k{E%Qr(a?2|T zWBnSY)6M{Md^1fV(2xqMu8ulUF=aKA>=E+dCy`$|LB;0)O)&l_x$7;1RY1#%kZ?lNIN*7 zp-N7hIdVYwIN*z_f{(!28u%UvKTe|zvvVAbmtDGt)Hu_THXZ59ao8%OTfP~-NgnI- z+38BqX@~ij-)820sk$EYSFpXZx91;RObxLcxD;ChEpMdmnHJrzx|xP)tzIlmx9pkv zk_xrs)$@OXH3H~oj8~TbY399C^+PuhS(`P`pe}jm6OlOI+foB zarm7ctS^iA+{O@u5)Yi;w|&ax+z*_WE};ZburSjx9A`<26q2xbGO?V75q{JT71nv! zl5ds#&C@>hUyw>J7Mix5d$11cx^SDhe>DQa`pHBY=PMq ztNd~Rtp<6UO&Nx%;bFFWxJ5B%UGGIW6S24qJj=x-%`;lRFZI(x2cKBmaP4xHAyDznSaHy>iaIp&vlh-^(plH}}B0RhN1{L!Zg0Dzs(HV!@IcFRhhkesZy+ zGBc(McCTl3)G4^xmp8US}m# zq$)~8Abbd^NO&4pI70&r%!rj^{H>E89hy-#f?@q5dpoiZUw5pOC7!Mnz=z}7=yw7B z2OETyUiRu%U{MZIhEOR;%mD!^2uC)c%NGz%Ee#S+c5u5v6h>S$t8Pmt1#N{L*{O~byQHk zq6?;#0$REqu|U74BNh4>C^&QXB$D`# zl^yV`&s9#2ld`oZuR_bfLeov*HL%*w5J$GhD3N5vI0|&bo?5OM=T872Bf|s}y$*~^ z`gd`Y2|E;Zi$FJucCmv+kI9yC4L~xFAhVuIs-1$Z(RfFh(hiAFRc^_>h*5>2zY<$x z0s4iFt9r4uW!2BdJtTt1N#CABijEMJc`d*IVLSAk`?foV%_8+QQ%G`%!=eK+#0+(K~m(UTY^3O5B*wJe<##L{x%$h{U$N-0hsJfxah2z3Y zxCL07rZ4D9H^(4lCaXpY zrLEDj%avV~B0{h2o(Yx01u%)##0|BW>VUM^610rAAv(4Nj z)wu(1AiIm`ageaO5VMkn984XLFPvzrD<&*1D-(QfK`YDx&k1XB99wN(GhCzxYEQ%{ zUKl`DT6iur+1WI0^l&G)YFkh}n2c~Uq^lBHrJt-??W{RboKieyZlZO0!zt>2rL0g9 zbkAVp<$K%}!+blL0?=f^Eibx})cPQMCE4r9xJ|ZUesNcpySL>cnrh}U4)>g3m1!0+ z(G31ueqbGxxjCl_k<2~vz~=3|e@JT-!D`qdzuZ8#Sj*-;WO}V?oyvDa`MV*M+%a(pqWf0TP2XYNm>`8XG&-Cv*ol zR2Qf=&JeGPG;rxO)}BON3=0lS^6K&rV0p>-=KkhQq)nZpMUQg2*3MQwuEF?rGyz!G zDCSc*808T)&F-W#&UbS{A-ZKXEzm|oRF%bi;|K^z2TLrQ&30nxSqK2V!h3fdF+Lg02zYhsf#|==tR^Y)jS-jqKAGIgo+-Levh*@^e{rBP}Y<4pJW#ghaR&! zS&J(9Ty2YAbQU+-p{7JZ7Gc zTvMnoK!tE~KxRc#uP^Q>w~OY>1upJtXT^g=+n6+ANLe00StIb@p$o#Ra;mV4O4|V( z$sSOf%t^?f`S4$Nacs(q0_QE&8*ziP(O0Ne=!_f7Pvx`1=9m1ct?8D@gAprlDSnNe zVd+8X+6aP!*dzFAZlt6N%yFk(>Y#;IIVVm5b%9h32D^8Jj&xXJ`jLG6j%i>ZNKJ{2 z4QzgtS0O$j09{^Ca3d8MFm%0xh@e~wU@74$_bdUHJ@DFX1&4$BlS2C_vy+4&pZL?T zJO}-mHJHg4m6KH2Vt>C(up9o99^>b-eg*wr*f)|L#zbhaW%u}A%FusKw2Xpn6l`GZ zP1?}05UlDi@r9BVtF) zFr8%a&2qoOL|Pjz!Z^lXY*3*Jx1yv1_Wp6r`XUXCkkBJA<|n9C{AyB}nrK@E@|5wg zKU|j}bb^9n6?ek7`HT|BLP3h)AeNIZb{Ndi1dUaizJj6JtagMc$g<3c7_sKn1fBKP ztUQZm)bZf*H+YrSV>>CM3!BIQS!xU5uS@*VMB}ko`&G_e@S10o4R56N)Akn-K ztN?mxRWMl7X%37#zwv5^9_fUnk@jOzAlylTJ+TU0w=g_HydZo>tT22kSTeZ|2;TT| zXUveU)EOsMF!aANa9PgK7_le0KV9L(^3xTdOmpUdnpHm4T}3!}IeWv&5UO1$XGfil zAg$Ey{XCo_fP}-2CfkX4pKnqD`ZQJmh!n&@x+mCG@$hnm0gQgC8}Gpl%oI0^CGe4# z=u}xmei+?&CvLL@c1>eJX&byt!kK8G%k%*Dk@XbO*w{@QXT$rRlveqZZh>Xe@+EJUq4)tG6Ji*{guL7zk14o+F-?0d%X(`5gb zNT;qeUy3F9ar@mHqE0noRR|MXXnsfeVJj>kPIL0T6oT7U(`_Yr#yT^>Tlvxiskzc_ zg$fQi$zp|C75mn&Rf5qoN1I1LFuUZI(KsF1{rRssAVO5-x|9wWMO_VNs0wzHi2iC; z7)zg|JxPZoaOVt1W*V_k^F|Fb)<+RZ5&;33dP7e%+vPX>1{bIdGz#PMD=3httDHXe ze5Ggy{FC@xi=4_71|lgIt?t$==5!v;$JOO@+3^S$Y)VUK55cP8#aInMbBu`&p{kUm zRqBdjvHD1{IIkai)#^3rG0_5d03oid=B~N55>Yz#7n6bM&0fl2YfDzTShh)?#ig>SKB?G)N%z*NnE6!Je77)=kiAk-!Q z*vZs_?D@WLa48LL@Bx{3&8PfJt`mUlnly{9SjAK87V!gAZa66ILtnD z;JJ|mk&o$7Qu+^orI;pqkpL4kf6O^lR;@pIV4An(GfbJXV0yCu0``96wD^Q1!if2S zH8Z`MQ>Bg-0ww>i0cR=~dQPTOf)^6HY>eEZ!RIkzlMMXjH~NnG8uAm8*Ehjwz}A>tr9kU;h)5i&ahY_S4Rx47_zPCg}GZCi~kPK-SCqw_cd^civbp;PCzqO$v4I zGuR|xtb4y)%w;u%UN-U+HBpjauThVY;boR!Ly@6m98&f*Mn5&PVfPj6-~~~Ghn~AK zUpb$oL)znEIzolCmjP;*DZ zzlTd{fUJ>=Elj1ZD#K}NanwJ#4(w{gc|*B<)_nVbZT~l?qz$qYRrshGX2Tq zF=vSNf3}@7;Fw+VnjKM)G-JwSxQrtm2qI$4Irq$r964n(;om_2vJa}0Gx3ex_;9n{ zG?DCt_NKtzpLdFGm4o%6DmH$cLHh4hayFKo2^pd|IKN}LzK?q-$57avVyc>=eOYUO z8QB+)>M0s2G2MtPd(d_xjPAc&A(~OMsTU9i261X`Ez>s~NNzS2qD?=y$yO+vL2*cj z<7AL5b7ycKCI_yk5rb>_rIAI@WwS-Kh*lPZ`81vak6LN{XPjS zbM&+M&ecwZy)e8oG{)9}$v88w@lbofZaf{shtIC_MyUXZ70ssR#}U~H5}mIbSipS~ z8Nx(B@sX{m-_0b<6$zV|8|M(Y3e>31J(~pt^M@m(g$CJnky$e+H0F zUSlTBY&cxR_JF|76KHy!0}o<4X!G*t<$NYdzz6?bWWyL@tlS1N&NT$1L2?G*3lkB} z$ftsgJNEu-P&&ihEuX6#z?31hW;*A%g&aFCw_e>9bff)lJ2YAK-5$1oMz4Iy;&Z@7 zi`%6X>{9`rK4J&072kt)@f#b&C5qylvt>66C=25(Kvw|Ak+O*=n;aXR6#ZIh8#GEl z_0ovWSvWt{#m7(egYscxqPP!0hcH$6Gn)VMvGMdvLclWbg@3QQ;mnqTrmHU|QvJ`W z2zbgy?zC!Q0-K?WR(8y zU{BIF*t$n^5lYDk6mF=*z9u9$Kx;u#Y`Ac5|u|O$kf> zyO0N1t%RV4p6B==Ox=g}Da?9SOKpSV7A%z}a}$ckAO812HO%6mY@9?vks@jZTlqmY zi@Gk9YIEA9v8gBr2@7$;JBf9c((4lip`GO~;nZAe79UExdd+xE$R(?{E*(U94AU`k zvgMMADD1nvnJO&wianv5Awp%Kif|aj0uV073GW)?D?>hf6c&I8f6~0R)R7O_Dba?> zq6*etw8xGRib`!(nX}5(lyn2fL}CK&mUc`1kmMxBBGbsQh_k4}LA&yfBwShK-a8*y zzzR>GBB=(*^iw+fj zrz;b5Wk5krX}0F0gq|hblX_cO^f$R>VQaSu4I z2nx+ebbER4SjWy8!|_$|u}l^c1^0!C%(@OoS8wC!e(p_o|I~BoDmc~U!~6Q`K?iV` z2FL5R)Fj0Ma5Ipjv@oJh;<51z96WvkS7~ls?V{g+ zY6+?dgiCW|g}E=KS^eEg2B%HPdla_lREwr>)8B8ya@{X7nOURea(MUV-4<*2Z#wBt z+B19rTs5JU;CQ?6nTh`t+L);Q!wAX|1YlR-HTB1mEdEag0H*TO!CezfS#C1O>?I4r z$zlscDgsBe%8b~cL{y*Q>(`Eo#$is=51YAeVp#;p;gS7zPC!ArlkU-IrIAS5`GV8TrZB+CuCuHx#KAWD1*k@*WsiL6Xu=o~ws`r^w zdNs-^H$)$)Bi%BuYJId-_Uo(bK7>p#>_0Mc-=ci^?ePwAXO{k;UHB=n9MPP zZ~0@(OD!f|NP5~Gh+Ysp=XZc%R~!IiE`yxS6Y3~KOGtyCi_BPh@`+eNxKyZB7=xq} zL6+n7mqeYEWo#0Yi)3KR24Q9|2db}>tg3{$p|xIQ-x$fs#=CBG_Q&pX_uE5!nOJOY zhQ!+fg&KI8uA|;;DM;sOS~MLn^jr3df~xfj2GAo#l7!}nMTdUgaf27eDJvkQIDpvK zAwS6-9>gUFGsnM~<}98P=s4nUmg)*BfUhcT5tVw9M8=Fgd3M3wx)dv6l_;aZ@;2K9 zOCHZXNh@(;hxmB;y%b-pS+x=xQ>N?!xXDT#zqNW|SYq%Iic)@4)#RWNk0vmSS{S2v zT@il(W*ND?ttI{K%Yaw7sSXe|>Nx&}C2wO!20UaL(`v1<5y23e6@;dgTzPZ5Qi?9` zDmvG&$=0EcP%7$H2*Zatj8T~oOB!}Uy@G{oelWnQeAOO+?*-$ zcLn=&g(x2&3|!Lw^%^wdK=3jb2NL_ij2JK-mSv7Qw3m7e5(8$!VM`u<><7I`ZM?|} z8&`xhH|OP3h`QzL0WRVDfru)gz8#y}qg*UxxO)lnq(I^>V2RKstJ26ccg<1t=!u=5 zr3V`gaf?Cr-u-C`wFQtAXbl73f;A&+B|eWhuJWfjpmEOjqES1egt5W^ePNCvaw4>U||KbEaVg+a50nn9IorB(vl4;!Q%5l z0kWqm)&i$jq9Q=P;?D-%7S%cNoF1HDjE2UeSiiQ`%#5;12&*vx|?>o#G!#twYTHs?8%4w!isdkq=i@NaIj6L0kXQuGM5GYot zC<@Nux`vdvTGH$!o!gE6Zg`hdg6UBYoU?V@WC{=O4kXCWGq0 zbpex-)kVOul`#|mT{oTP5kEmH=ky_UUSRdFP;MOjkfY9wLY($M3`|=jXIRI8)^&wV zeiWgQqqeJ)$z+}v2ba5^P*6#LQv@f@PeM^fttiX^d_LS*fPt7qEqX3zTb$P|Wu}gq zbt%amK8}!_Q1x7YL{4dLm*c~!=-6_J$b5>nPYqx;nAhQpR)eC_ z(&zsn?Jc7sTb8X++}+(9cXx-z-QC^Y3wL*ScZbH^9U6COpmBG2?0vrPjeG9<`F_+G zYh=}`%$PZ|R%K+w4E=J8^FL1Iluf^*bLXW{+0ATGjbeeK-cq;$-CuaKJod;scDC^u zWwNgDx@kz|nu-?h=OTmHJtCM=2-3L0R%$W&;=={ErYYZwu$YaT6A(;HI5ll88)F(U z?wEonxE8xF@ihaNXj$He_TQDQa5@esXE%uED4 z?jxzFr+d}VS0F7P%d@}~TuVeyaf7$YE`g@#DyLUl8uI6!9{`Mw+Dhwc>Pcg3v_Ag~ zy1j{%h}bNARCo2lktJWOK_ALL7RRxhitPDSsL=&Jv=%HQ2|dE_)*3(6(T!2D4^~|SnEsT3sV4K;Q?oqp5l$_#0P6*`Xxr_5Mvr4z)rG$jQA+9_TQI^dN@uA zxa3vTIDb90LjrJ;9oZJu~4d4k4ikwR3Y-_l%?#R^?Zb;fBRt(_#9nq->U!jU$iD>raZS))%LV z0PAzK#|geb9egKS(97M_99yRIk$%>;=HdaQpD{s}n7arR^jeUMu}Fx+6R8B{dSy*f z5o{_B^`Qs|%X3k`4x)dv#S6GFCPU&!v!ZI_YjMlATBH@%v5=tSRTl&WvE5ViO+jVi z{N>@%U{T_F-LvOnE308v*EIJ2^#^j@3E=jv>~vbY?G!83q(LB)(Y(gbueL(dhF>|#A6;gC^NWnFRxZex_;vJf`7wRYx-IRRbS@x1+51L6k) zZN~ssbbybmFUi;Tvm6i(bdwJ-j)cioHo(7PT!JCJHtfaxh!z8qDm{X#`xAnwD}Z%RXSz$^3Csoski>p z1+%v7E?dyPs&Hxc7QxEtv8mYpAr01{b1@P7ND-ODq;`;rna-yM3^7qez1nTAUhTl z%2WLGWhi5;7-mAIW(-1tgIW$9Q$cKQj8owv!TWVSDNxH0=8oI&j%Bc(dCre``_&*N zz(nzaff!n@P=Orj4Oj^x2G1a4RnU7naH>=GY)xzKo8*@}gdB912dIi;X0}`~z0~F! z%JLZLb3*f(kr3y+cN8rAHrY!**CHvXqs4H}r`Pg0`%d%ZDI%wfAw48_pG#gnCB%E& zifDCWvA&t|HTQN-(~Iznh40wVUcBvNUm<^F-gRF0UVPez28w=&QBzfM1kXp);8Up@ zx7B}jHI=1f<+4702mGZ5j8b#+&hU8=t5G@d!&h6oomF{L{d}|FacKgBo%xYcYuu7d z+v#NDNk+vtkQ~K3F?)$ic@owi+qtlpCe8dt3YL-RbIL!Q`}apulrvg^XxW?X(O;j1 z*FCiLl^Z6=72e}HFBX1s^e4)SpZT>Soe&>UauZny*NUJ0u7KgJ&dWSzHGpj_K~P$| zZc~_*R;b-3W-YQoU459N;YDZ7!k=8t84C1+8*zg+6Q83WX^#k~NGvtj((*4kZeKfA z48TF>7eDCJV(JtIQaCSWIvQNem@+3bo^&Os2i0FU)%GrS@)UOm?9jEYmOM*RqD&Mk zIF>wR)sMS*r2zWJ)7d&-7>d1P&lC*uucnm1EXN)`TyrGdCV}uwc=G6|jad&YVjr2K zzg*Xye(ZmtEO2UfognD?T}Y*6DJc@{pd{%VgtOLYnW*WNCGPATT2+REcb}S8lW%ED zUL8sbh}qcn(M!{FNW1+qc%B_y5H2FyBH&-9)1WNh@|&j3L>Cp2LCuWTo@0=Yb0L#Leze> z>b%HUvm2A^P}EM9t}OGKEwRGt_O0vlBMFFNs+jY5=Jp}%Qht1-!l2+dx~XZo0brPxYra!<562em?Rtd*6o@%C`_Y=);hNC7k#8s2QqsQUL(r`b>xGUqW6Z zC~^m>-(BDl7o6~x4l!R&k51>=*V+R(hTh)xK4#tox0P@Cd5SI6b8GuX__V$Z=ro!f z0p=?zwiDk$#AG$Y?BurH@8>f-qt=LJ%Eb4C_Q1^GcQZ)&{anIW@W9xET(Lu&x~v-1%Py4~4a7 zK)7=AbFj#Rt)oOI6_Ey7y4c#q`JP*6qhtN;bJonOyxx$dH~GBg6`C=Ls=UOq)&nX%;Y(x z)!B(fjxZfrHrR7xFlZADsrxR%JSAhDXlilSwadWmV`beKHEh;Z#|HeVQaI-iLES~W zS<^<*6)OSGx)T=$oXca4Mh}UMI3bAwUM9_3h2n24@su}Ej|>y6f-rk~3a=f+`Oy0_ znbPH(JE$W*c7EkI^x}R&t+2a~`j=d%asOGJfR2AwCnNKJhvDN|$8EOWynVv>A{b;2 zas&*5z$>wK=L_bvGiDYNKAPyS54d@|`G0;OQ1Oc=x6{--;WUR|&CnPFcl%?T2fU_V|SWA0Zc&jWNuR9 z&54N{dyAceS%ae@>Gs_wsm^ETKty1RyD^J~U~qfKi}GP<6b3%er+0hEn3<`S--p*e zC2ZmSSTkKMTEF%Ng*^YbiiW!M+IOg4+T|yIa_AFhn#8J1Ib3Gr$n!3pXT zu4X`Bmr3mpPlx*oJsOO@1k}abfXYx@{+-f2@DIq{)#{H%ZPx*cW<60u8yV-;Na)Yz znoYPEye;V8wtA(izANDV&B$2mc5&6{c2ROl7Mw>v;*;CY527u>R~mj z=+*hz?-J;$`*e}C1FM{VxdZlMDw}5c)Yw-WUeT`C=heCSVt+ZrEp&k&BJQokArw-5 z?58W;Gn{+vT&49hfPBZTX4}nn&*lB>`nBgu zXGi&L*}BL~!U)`h=c4PN#yaQPwV}aJ1sx}qfE_?hX=-!Q25W10ej1IK2fVV<~0tX4{I>@ z_qPcUWp2Nxa(|9riam!mhMug@qbZzS`<%d`Kh2c#dP#ry!dnh-GknzkmQwWYn)i%b z99}1{wvz({moNXyjxih|m1Sg9RmxmIxA5vr)(Zga(X_J2-?Y=84%qYDWdF4~bYwce zfC$h;ZxlkR69%he;TbK>Qy&pEX++5wM@_kKAPYt&S3q1uS))$4q@EW&wAVK#p-$!q zkRk8aM2^GpKPXNP*PA#E1|yGYBEk)&1xc)K6+48dh^#PC2SK0o^(?&x(M?gD&>OJ) zlSKn?XQ%9$(z|zb;yH$MUhiP1NyP|+rq@-aRSfovq4-T>p*EI@`#<60xYybk|ZM{8H1YNk!SK!YzzU8i=J`sINvM3RD;1QE~7A zx~3+hxO_iX2~LY99J*(>&@FFKP>(b!J<>)NWmRn3Fl#dEo2J6*i{aCy#FvVL+Yqy! z&O_xG=UA_hX0sV>-Knx9e0hYXU$K2ZCkU3!z`UQZWtaC6hdvDk-YdJUb@u(H*QN$$ zoK>#D+21#hIc6o@cl=wkLfof3GJRR*uHnPtn zFxwp|{sYd0PV>p!kU>p!fyKYO;aTkK=aHX75qZadU*mukbJbKgDY}qmR-aG=jvmnA zjhcxt`fF#`+j+J>eVy3aHGRyeOzG6=t^7(S)9UvzkixLFG#X2+yA!!Ls%E7JkYDw7 z_I?)_P(Mt4PQFw(J5A_`Y8&e2_)K*k6Oy8gc8IlvYXM?Xhll_w`VC}gL{`o3>*emy z8vEO{n$DOGyR%Ho01=^C!@yOq8pDIySm_4(JC%-+U9~iRM5R3tkq{vek=h3+3R~I& znyK91-D0l^dj!-^kn$BOk3X&&;4J!y|NVj8J`f9DC(G%0$=G~a-E+$rsv>J7bfr0xa>P$1JAuSw8AuWz_JpNd>s5a$yv8bFKGCfC!BdL8KIvg_%V%zNytD zgL(e!&o(c1n8Dr#$2h>3p!JIg?G_1S*|=nN%!ELW@p5hvgk8EM|3O#Tj9()nOiQ3P zy-;iZ`T`M^_e^_o73~h-@rOQpliRkqqh z!G_xAFiQY^7eB(^QZzoD@F%NNjq(cRZE5fyH}mRCF)r5SGfRj$9ja_w9Ojh@9jd~m z+U~BU;$_-5&Y`*V5Mu4#Rs*%WGi>N)9ooR+(0KbNg0;7=sas&Rt1bJk&s_$sr0_|Pj}_o#Ph=ue7t?sP}7qQq4#8;ohR0^FZexi1$e$3ReuttnIC!BQq&v)ha}dOIPJPl z%Bo1dp?hJ0_{q7d4H(CO5jq_~_}gMb56=-8(rD03vpK-z2OR#es|GzaWgNnBnn&Hs zReEvfr-Vr0RkIYO2N!6A$FuO)@yr1mZ(A*8L&0adTTbB?5J|rjGFj2KukQU*O76ms z@Na#{gRHwNl?m6nrUR~I2q*E;M|J0%EJhWr*3li-VtW~cOs*pA9KMt$v?@#Qg+6$` zK3F*Kew?!HH(MzgACOEHtR6#S@~dqA8Xm-PJPIw7M5~ zfR-cu5-< z?Tvfpb%7WK25_x^)S+(8@QRKwq$OHPZ+|VTN0h`fWSVEKAmQD0dakW>g}QbqWYwt;iAznbca^7KkPPS2oY3Xyg~7(* z%Wd`n6m1=q+KS?fP2_Qzfg>{fp*0R=b|a~dlfvxKf>M7=k{6pGG=DXk6oPQ2$zaKZ zEMOL!yIdFzj0oDQC+{*fwV{FtGY5p1Q5Nt)BTX`g#p2|sD*T#uxibVAe~xH7%q_r0 ztcJn7d7&m1K!^8!8C{$e=zY6)#1bIX` z&@W>4P?VOX_WR2Ozsd1i%oL~TT>qYfTj0VxdBv888T_R_Yu>DXxUQ4cXz=1D9Ke#~ z!h-)=T-qE<1*sO&$>rmwrU^H!9wAH5zsvyk{ zwmb%mmsub3i;aVr9GXUy^E=ly}h+FouQox zou!?Nsgs?l3!R~{v8i*KP#_2fFk9N5PBL*?A0HSLfRXLLqX$~lrQJn$ z_U@3L-UKH=JswF5mnV!IY)HXlZ(d&=+xdzDZw}h_r~@0zy|`3b!<$u$ja4j?B=EZ; ztxZToAXyqLhnB)ce!@k9hSMJ`_6wKf=~aob;w7CqY_6x{tzXD7ooWcq%7@#3wT_(- zF@j(Rh$P>mUjTeno{ty(^TT!EcV)BKA+hOhvAard$Ve{z0AGKe<Zbt#WAQUyMRr(=Wj&>MYzy%2z+^jBnY94#PxYTJd=yJk|gtx7IIf=M=fxA4AchK4{p_&b$#B6iZt60zF&UQVOju8D+HS(w?c-;V(ugT zT~HgK3ZGp+NXq4L%hN83UdUOl!&>DHI>Z3HKOh;u@~=t?Q@018Zl?&;gf#ZUrYbpPVE>sRu7}nN za>L@O-`(xxfqVq0o%W%uOiRp}GAl9LSTT%2k#|2XXM#C;2)n^WB@3tL8P-q-nYd5G$%f>2|w_4DRuE+?6-J0l4H=HSdZ zdD5BP4j(zi((BfPnIt&8bJ*$=iD1cjk0OGZ*|lgtPRfA* z-8&P>KQ08cw15gwbxL*eSks|w<2k+H%)(zxi{uB1!I7XZV zEZ(QAt1V+la?Uv#?Q=G#C#Ag6*g)v{wcWR|P%#A~LIpSyjfFVT5hmndz~^aC=cS*E zrBApCh+zC2sg^aKI0L<=LEdFq*riVl#A}%TmRf*#xtBLQ%dI+5m281<`9ktZ34Y#%M zt=3~zW1SI!MJ9lOWh1oUDCxj;nD~p;v21(U<44Hbtqfw9wh`(wV?yOE)W${EL&Ml2 zux(IC=us#};-}tJ==%OzFdw}+_4ZRIj65oThC%g1XlmZ_g5e0ud^IK&!q)dK4XJaI zm?iFwwvXzyJ(c%(0=zvdd>Hj_uwo7Ck67Jv-R?X)6}dHH750ah)3_@*imAsJ4|OPl z!LI9Ch6Pc!!h2JVoryoV8ul4y)hpU6H>!P1Fj=cN;HZ8ngZWJeeFAof_Qp}ZHpUO< zu#GAw55nTqIVLA9nb%M=+D6xMBOY8n***oDL|VnGpZmh@0SL#hcDKp*%lGFI?u?4< zrS3fZqx;>}`FFx|a<9tJbkDV#M2QStxTueADIA?%oO84#zFKV)MFJPgHMoW<7Yl4s zzpOn+N5sSQ+(lD3P^O?NF!IZA=W4v37;ql*d^K84xZgCcl%XTEE>sj=rFuhWDOR)qe~mjIh*H7PFP|*TBDjnQ8;^lC2ohW%L^?!ujlP_kISXmHKn)OfZWy6 zDQy{+d}_T!B@^0qsjx_R(%n!uSQWWdFrB6M+F-Kc13crtQ@bX-Z}=~_V!+%w5=;YN{|;qnM?v?z8FD3b*L@u0c-}2H@pV$e}cOn)F%hMT7US z%PiOW`1h&q$w5}{`^}a84p`IG?g*B!U!RlmQZvc}QX4`mXWfG~Z;o+OJvyKY3@Nz5j}2)lBO|+TOJPj+MSGkjrx>?87*$3ov$l z1!U>XNK+&#onKg-B?veC8Z}Lb(es9|&`DJ7V{Q;5yKV0s+^|df@c>je6s~>Y#fv!F zc~5Khj&a2_^u#DrfQ25(+I-u{Wp?qL2-Fupw;C~aeb{W%K84T(9lvI>(t3^-hQD>M zxu*5>D>OnTR*{HS_ek(WK@+|Z?EY3K5J0-oQ0BYP^tZkwuH3pTiozi5vY0QWbd6mV zd=JECN+?75wgN`N;A|qRpNjd#=fiFIvKvwNJgkSoYi6OFe-w*(ldFaHTWn-{BH0{W ziD3$;9gRJ^ujE??gV0%?&n}Fn&#r15!zE#GZ0H&{_E>CGoLGui`diLdGN%jj0FeD& z%^XrvymL_V2*9C9O4QnRgc-ZZ4XABBYT%#?xhRe|B99%k=-RZ}@uQaTjI_Xbww_)L zxnwzB%_K#-D^c)>J^Da9nj`{z*yZJ=7^D zOeQVKjJs}oei5&jXIr$Iux2~K0BADQ7H}Xt6PH;S28XN*syjc@JEs?=f<;>iyh&u7 zW0+^+m$!VzwnnhAmlzhPgS--Lpes)Tx?kcuW10*D_N!e$;1i+nY4t*n;IQ2|2w6xk zT@0p&gDnLFd=W_qSpojjK2*~Rur^(1}ETK?D z3zqIIK!iQ5*jw;%mr*RfCW{O-u%x&bBIaY$$h?ZBibTs5RireR{I^MVInjqhP_g-* zEE49^S&4y7EX7PzF^Y0p0@k9As*=;rzffXN5)Hgyf&K&#BRpYHt-Y#P8Ha7-c}K-Os@L1KAI%VV$cA_M=O#s8oKL0L)47xrJK8Ald|94q`|8sPuh z_6=+E_l^H$8*t=biP-|mI!yxsb!8PxakL}V|JS%tSUxR}8DT`B65Tn-kozSA%N|^f zllcW_3@<49i%@>r;<8ZbyR94HIJDfRIg$T4TbN$O*V!Qi|Jnrjp67pzf~UQ=IOT%n zo5VD;5mC&40kwMkmlkYd=%escg%*2MO_X3M-!(DO7LFraLv$oW5+!5mdsQZ`jsy>J$^%Le$D8&WA|#o*2AXyAC? zFyQZT{f8U>XBOZURDOCPhzb3Vu13+sV84NZN_7+dKLff<{~jY3T8LAs%RfzvCUoQO zplH7nazzyBIoZ;6{sW)O{|7$6|4;n>7km`7O=cB@TVT_K3Qbi1#rN;A<$v@2f6_gy z5J?B{-+ccj1<3#6J6`0#|K>Y|VzT3ug|0vcbSAPGwsvL$#+;ran!_fO*!(~eC5-U@ zphP~UmKVzZ#tufl@c&{5_-8i1!TV=H{x9&vq<7#kmw_a*RxtnU|7mYda`-w-^)-&v z

OO^CIg!yW_wix3@3DAQ zv~(|+_pI_=Kk}6HeHFno^m+O~phG}(1JIRjb$cGPJ*q#fYdjEN&=NglQq+^CVD1QR z>Gz(gP;(O4*snI;Lw2r-uWbl>qMMw`hb1LI%h6)pAsonVInWY!t}uyvd3CZ)A{ceV z_v>C8oVU2?&UktHvIjQ+5E0IK`q zrEynb5neh@pP_BW*xE$n5zKurR29pP8=%7L6`Q!I=NZjF8M1nvhh9jYzM|AXjCu89 z#Yr9Cmn>`^o{Ac9Ai*_QViw`~1KC+`mN9a%;B|3ZgjE~YzVZfk$>km*Lz;T~vOB8w zP2Ibh2Iu9|JJQ&YMx;bkSxzui0b}6kQi6k848DlPCc7U88!*Iqyp}jCQzah4Uye!x z{As$=6@>k?ccsqqRnA0jQ@&e) zW90cVF*d?qvI(;7!=NK$88wWc*JU+6vzK`~a%Ae^9nrDPN`@a19lbVa00M8#s)|D? z{#gSAua)=I6B^Y$Ds)?aI}N1BbN5G|^efc_BEO((#4z2UhgQuo-O<5%tJs|?uJL7g zWMjS=E1zf~&Ww=p5E2CCJXqO`LpXW%=*6k5-9ly>1;L!-lJ|9GdQ=Xiw3_+KA|_0R zN%SH(&OL!U6-BGMLX!^?K(ibit-W#A+2?)j+t<3^2#P1~`%K&#T&XTkvh&)f@=2KG#tFj%7^E{@90l-!g=Yvc+X6QE|hgcaaEq^K}M$YNNAO6f0J`mD- z(=8<6!b!LSs)Uy*?6I1D`ng~$0;i? zLjzPRdtHBby6UqAb{HL&os)s4ude6y77^fex8igq+!>7r&<&d|J5bV?W{>U^W@_ORg*gYU!32V29ilehaf6A@s5nGo|!B3Tlm{P4LrGA0cRQASQhxP++IJP z_JiRVG6Hk-8whVt_KW^Xw&{9Loy_A%^YrOXUXePvj!OJ48ZiFT z4#U1T=Ms*Y;K{tf?Y?VIdA9yD z=nk>!R>Im&B$zq|i`lD#Si+~;n_;h78bhr)AWu_^z1R56LZ@%);-D{B2%Nm}rQ)H1 zOWb+vflEB?obzmotLu)I7W%IjT-!94f1IT_$%61np?syTM$LIoa-p^PKdK=t~yxVKYRYT`GwGNwlF(%?xsA&D-Xly|+v^ z;Pvj{+g1N{6SjC%_8l1dP?kq8uvz>Aa$NiR!O_?J;5ksKBnB%hYmvg5H7N?2owjda zl`c|R&Z$*`0SC48UFYg$)Ejr!Ut|Z=uH((uCuVXq0A|MK{g}P@s0aqTpxc3a>P4h5 z6l#e+yGXL$hec~B42z@!jC6SMPjE{P3F|N6Q({f`H!Q+GH~q zyR!=tq5V~!!34@g5~{>RHCnt+UexQjl|WCz?BA1nTbCcP|52}WVYs=Yd4BUN!*lMJ zFi*?Rc+`8Y=iCE)@6D-!sE4D~B0G=D+B)ss1TO1)VI#R_a@?bkl$c2aA0V{D!N1<` z2QOdr7<=BY?=BG_q(oW-|%sGA_Gv)w-@%#H?M4lqb=ALktHj&X#Upw=Q2W8T9>bk7(l0&N{cA7&tNCa6k4TG9BtkfRpX5rlBfQ~kq>wFjug$O)Ms`&zdx~*+xZO^QXF;SYUnlWap-XoBkLanU>_A0ZI0+=LmE!kV9{g*+#!6l8*h zV;4^)1LiT(rT7qefVnB61f3?|biM6%fVJL;Yvg zr|67&VB_lmz4rq)jt3F8S%DG|Ck_JY@#2l)m7)OBE_)_F-1cngv!@u;jwpL@^HkcFg0 zz2gMd1zVEjC`tJ(V(y3MUH=YF#zDRRyaP3QVd&}#?)?gE1w?*}VMbKXXxB{S!Ro?P zHctcAeK*U{qH>Es*!E*-3dB_iza9{Gd>>>*O75p@EdWKWy;v-7%vVJ_a-&>RuRvKz z45{*#O-+tdE_kT)Wr8yevL(jbGL~6)sCL0Vj-ag-|LXPiCs%YJa@N=`4WPPP{+kFlbi@I-Xn$vv^8PuCNj1g={GNQv~?s_LD!6!x!^S2kIbA@$tfro zCk{oRP|%yPkVPOJ*)Ag|5?bW*sP~@F&w3gG?%hI8hBx1;ESGeS6RM9 z^_;}~umO+rmLOHTF1f*ljP{Mp0Ob%S&w(rE;WtF$XTya%j+;M2!5)Lv(y05ydQI_j zOVIr%wkGXXN%Fx_9I}E5in)*!!qqrbNvQ(gp0E<6>AC}WtGdXlPrRo~B@w6(mZq~_t&uR|muoYS3(#|H}or6LI z#`RD-s$=G&|Rt)Z+~v8gyCV#CI}>sMl+~8OZ$Zil z?c$>RjsuN>=a_xxshCT3`JOKUt|{5ZJm09H)2d@4sE72B4!`MI$oJwlc7QgFEs`b! zYRbN%9AieW8fZwc3t$p-RK zO$u7Yh>E>955=PyJUT>tSY;rFfm{`To?e-BVUs};&Yzpn6u=A{J=(zlGtOe2jAYa> zB>`R;ifWRLJQ0K^d~I<85|z<~gq<$YI0xGi_zXo}awkGCoF;_OX^v9mH$esD1zDg2 zm>ikFKu1s#P3odhuS%a&@iDoDvWiGEq8(J_f&R2$T)B z6i4l(n(GiM0?4%=Z=V%BFUW8>Qe{y#?^T;^Fn|#t>~-OBwwGhhyBO=Q2LAPHiY-;! z`p7L3+Dz5z`VVQTfbY$4+s)`gqxmHI zjg`rnYz0S0xJW&LHOfyp)j>*vGto;S-Lrz$J3^bXTYVri%Bz|RuBdG%GFMdU_mNiD z z{KX2K3K6a+%qWM$n@gHD`{^qy@jLjC3oB+pPR

juyKpQxwbI%82*$=LZQLewc{QSIDCEe1=HNz9n^pU_^v z#FCp$Bg=mMZOI(_YHyC#nQiQQlm!@yl>qo; z&9gj4Wj}@Mq_kVrI|2l&=9+ODjF(0`CcI>2y9KnpOkW1!B%GexyV-O3k?^BKldPbY2*a^zXcM(QX7(y zV}_cnq1R8W=R`*3gLXi5Sg-*8uux&y6E$bypsb5w?YLbx&O3uNPcB2`6}Fy)M#=X* zqbj{t7%aQd}5;SNraawfQSwh+Ne6eQQ~ zp5Z|_u@04$yl=JL(eDgtDyDTyJf)P|0i@c+OuO?GHbO0eU$^t4`*jr8MO$Xtr$2*2 z7UF8}of@dvV$F_9&Gt?<=SE6m( z#vyc3nf+#RpxE=!wQZWL)5vJm+yYZJP$mcYj^r8=O{qxIQ!EA0CD?%=_HNuRt}}y` zoW5cbEqL5JbcwtwHV*8>98$#;x6eM{!1n9XK9v4w?cvgl192o0#hnYgkl9mks%@T- zyY}p_X44u+-qE0~97dXl5k<`oBvmTw`^qD7;^D%pWo}Q6cm3IXw!I{NbR3J+c?BEQ zA_&L|Ib^JmQx64z5FEwFhl(T?lhpq6k&RNp5}IG0!(-!%ys&CGJ@ z@#}nPpl7{w&G~So>GBOl-(N`nJ`0o$(h|EP4Bsy3dsFaHb*Idp5G?&19p$Th9}+Av z)B@6VIvYVElT)*PWr0S?wh{n#y2<9L#pr$ovc7()tDE`R6pMJ!b^E$cu8= zUb73Ebt+c)RL(m$(hz;9UhNHIP;dT7<-(!y(}^8AC=J+2Y-Ef)u!K>Ey=2pBmi~PE|c1u!tGNRF@gPjrDkBR6*;I)QPeI9V&Se_L8yK!PM zCuds;5_xhmb_%C^iG_A+Nt76-j7{z%X1(cda>(*9*3l@HA5^?<_3Lc!OpT@kdg|F zt<#-jaq@Kf@C=tCykJSA$vE1%Fnv+&1;yt0tLM$stIIt2($7<1OIW0V>Rnwjo`Q9< zoJ_~EvY?P88iM!?|5~Lm6}_aI$`qbjI>HsLcx~Dqr=W*Z5n{&~!)4TM&`YSqUOJ=L zREpJICW{s9ggExQ5a_173`(Vl$c#Wm4yjz|ai=8mXc6m-KuXk)rBaf1ZeHxdgQV*z zxCB~0Pd+hJq>`x2c9nDoOgn_6t2U@7he2mj3>liu!5B$V1ab6VxWY|C*`}A7C#wYH z^B&4o)P-VpYbb_Vj>40)s1=i>xy>ZZ0!tS%+(gqB14h3AZ&Z|lRYSV|4i58cJOij` z^@^OVD4=HUi@jwh3I^IXPXZnuFalp0hnJf5Z?}zD+vO^sQBpzkIQs9Cn5adfP&s^0 zioUGbt0tjv*~!r6YnQ_x_s{a*@qcCZ`hmzj?Kyrufrb08`kk2a>YV*p4yUYFTzqe@ zXM})v?V}+8;>zvc^{fykG&3BUc2n^_+!w3Un8GM~j5Gfs>5EnChCbOG`VA)@tA?_P z?CqJzrAb$eA08SP)ehRr{UjU@UW$RHJPuLz_op9WHXFY3f8?EI6nh&W%e<^b-1orM{`5=gzG}5!tx>&b&`ka zc0k?DGS=j?xgUu@3F_>Iir$KwFg|TnSQ!Y`(gLE~U1ML+kkY zUQ|^#u6_Ko^(t#fXEtLb6l{gYknj=bH3~cC-?uIL?uq1=w3aG|%dCkg);@-0s?>z6 zmddjLnGO!~*hkynN(2QCfFQ5g*z_mE;b1mdo)WKvkIyiBfY&D)cAZ!xJJB|T@c~qD zT=G0!-D=~e%@4mJ?2Bq*?bRxxrFp+fB9DC?(TFZ27H4D0z5j=;cZ%*L>bkudr6dk)X&O-dW*@J93+icsN+Iu9<+OlELG>! zbcV+pt`HpYAeUn8`7RzH7)XsE3l$$spRUH zQ*?*atc?wa4mJ{@vCCP-{>uiJjc6ZW6zkX%n7(f zoB%Keu@-TyK9KJn43K-2xP%uuwH7K_B+3#KS6OomTM-*K)jVURu=5GCu1OBPTG4hifNb}zp*N;s5_n;#K%4Qck2Pu>5o?$F19BpQtc!XR6F3&Jy8H> z6MV4GBoKyJ?1L8pNMF6D>9ddz!@DG0er9>$Xx~M%AAu5 z-h2PZ2UvKZTEZ=$pae=^Zs1B^SDvu;>i}exgDa>5Ea$ z=Toe`=3q+lpe3B(vV$pdRn0j+Y`8UXP|nzOxsMLdDt2?26&qu^`8a;I|2)D!lfs-19DBDG<>KuF+%15qQjxD-Lt4 zl^|pJ!x!S{h(s8ofm{{fyo_<&tE%)ipGF_UpDHvy0yl6-TQx`HAmZ zZsy$i6F+v+5?hUl*|3jI+?#u(NZ;wyqa8z!<2=@@q({7#7?u49v9Q?QS zQI2fVaQzOEOA>zdS9G2+H>frB(@{l_$lAqff!{fZM4_d=Zx%ea+vVY^7$C?Qq_HQA2f3VJ7UC z@RE^_1#6VLRe_*R#>PVm?n$F3vhBl?2rzr~A@QBA z40SJ~E$XMOh+jr3$s{~%%BUELrY(*2y&s{nx0B+#vOIv{G=c)dcx!s2`}p`gQyX~r zI6Ya+f`;pW2(lC1--NW4MmdwlWs?XI8h+|b?V+sOOy0#>7K2gKfVbA2{5u$>g}c?hmsj3`}G3Yhh@KJ_Xg@DAhA5z&%s}1#EX*KA83O9g;N}&WFEc z#pmCm_$xqKQn2B^A-`7Vl7bId;2sC0Y9g>dgShkZ&%GJM=heeY$7gNNcBgj(PiG$f zciP{r_kpaWRg;N{jVwAuAZ4Oy$8R+823lgTpNm^-HW;{)#Y-$@mnh$+fmRA=s@Fxq z1B08juuMirDJs`0IC_zopkRL&EWzF1+569!tV zoPgwoz)ysM5KF*3A1vQ3)(hCNZccILg)+r-aFezS+Q@{5$3T@s;h+$!{};yL^K^u8 zG2v&OeSEJ}vlpicL7X>C>1!hA1caD?h*chs!w637#5HOQ{NW;m!jjH28y_$Ukd?Lv zw#tr8k|+Ix>#fUSj`0S(7oaamn0>~xKrrNCDa!qJ8yFUfsFcVI6*M>E)1#e!m7S$f zLYOhwg0JjKKybJaw$k{TScL1jrySFcp}ih{c0fkfHJaw^?T{r5n(^#2=TA#45M)Bd z<)PbnnvkP~$7)IHI1XjpOj{=a5SSBO_8Ba+$XaZIT(KC7uml~s`3Ip0j1_2K=!4i- zZU8iWR2#-A7q_X76P^c3D3KOtPm8=ns_V)BDlrW=9y^F1ZOluevBcx#ljl1f9Am|R zr8yG#{uNIf2Y%7cv1${}-^GL-HVxJu6O>37HSs_Z&9#KA8KAT#I@cKjm}@DJmC|`O z9aK~3h#ZWosz-xhpdF2p(9y^vFTN6WWHPEp$FL{4BT05JnRFya#sJr1%Pc)B5#I-c zf?4-TcU61qosS&aTRt%e*US7-_@2@z^!E+}KKMwq1m}HaNPB6P#hCjNS)uJ>Quv>} zbU7?-VRrPKhFl`R%VHw{?25ze6E5YA!=B|dJW#-cAiBlhy?duQpxalZHVv9Pk_zm8 zm!~#3E5u$XE5zK95-DJz>(gcY6ZFZp`&x8phRi;FE-MSZ$nyJ%T8m03)t*J@uZU=c z2qMF$APJQd1;<)OiExDoizywUI&Zo-!v6zm>Q3^v{sqLiT4H%UD&>97B9U^dk%D0;H=kO41eq1JbeR|A+S+MCHspl^xs5T>TZD3g;DO-Z; zG32z<3YEz$69?PMxQg7gnC-CiC6~aGGhHOMO~$ezR8O#Rt^4vjf@`w|)Hc!x96@s` zEF<>`&&UHo6aYXVHf+2c;zhYk$r1PDmrcFq`4ez>y zP|Oc>?ti23JXKH!&1LwTc77qOLM z;*i!0227KN+L?<8n9|z%0kXmYc6y9*K|F{fk|}}0z^qywoXQEZb^hl>c+KA37_!xW zCj9;RRW!HabG4lt#M;uDlOU3SDyl@<6JL%1ArIaPwemKN&@6`a z1N}P>p0p6udJrx^{7LVfG5o1;c^r{DLTo}rDpY@)U>gtcM#qT!RZISYr!P`bllEEZQ)DjV4R_V9G~NA`^E$3MALF-bvp z!9Gd}W*zaEa+@`V>?^4Oiu5+{0sM?_z<%go_Ls78D#iZk<(7Sb!r&!y0O|_O6buqb zSfd2Q+5|^PJDC0ZN7>vtb~$${QS}TJ+)4ENR$_9wSaY3+N#nlEM=*JZxFG@!T;D*V z6vBXQh#Zc*8b4aDXm?wfC~~{i2LYDIQ^$N>=ufVKRiRivjt|e8rYbv&gY z!sZvuAY>0}*6!jD9vH>6mhudO8s_@}nPNA}P0WKh`y_010i|&nMyI6NOmy#uLHxRR zWm_oVWa8~?(u-5<_y@^HkdcT!;cxs)CxoMWz#laS=i50>NQSEN8Zi7ITe|^(xx=c~ z$wM@AxLjl*@8zIccG8T6iY%|o;m&EaLxQ(PYyn($IgLo=FMY5|E(G@xS z1p1&G5rN@+y36ei3k_klsu{~+G5`J+$0~IosctU?_K1VChy{M5SS)3_Kjwh!2CL#! zU{%Od8qlvC-?oNrqLYlU&$&yO(##%ntmsMY`cBjCN)~Ib8mArYe#U z^6}j0)ZELQ2AL31yts`miL?)CI-IZ~+&{Lf80XW5JvB_e(;kB`94N%lqUr&czU#cKxZ%m6Kb>BVeZ1Teti^J)4cgmP1>wlfe4C z>rNa>-@@WaJRdn=*V;mWlOrQU3|C*$D`VJFvAIZ9C_#o-Oo&QIE!v9^)hZtuE{8;| z_s6f==Z7{w`FJcOdyVtRQmgxt!xWT?^~UDq6@JchHwx)SWvjp0V#nY@Be-*aNwaC&c+LJ-kaHB=1&zkAqS;ShXnt(>>b z7fTxPJgBQB6t;(`gO*1{oMCSGRlyN!V#EY13@z%gUz=if{Nvf9gBu|bP$Fjyt9yAX z>UW>~B6S2iO4JUSmfK<;t5K*ZSu_VE;tQPzG21(zrjgqN>B8%N2MppIF0z9fvNY1i zA(}9;MOVmGllGqi{w}_AwF3dBcP^w=svpDtpJ&6bwAIo94;57R)~Yj_A}#`rG6(nyAmq{%%UP zvB~vFN*c`!SRIL}M}JmDRj+N)Q(F`wi>?NMRhW7PdCWvDFS@Y>CV82g0OVxDR!B;_ zxHOKZA^C7~+PV74z~K(eM^Li`y_oh`)N8}t#jo@5VIZ(V5>B$NR6em3clCC8qJX7o zRqB%%1>roP$Qw`2Fq-c0-CWq4~INZPKe3$n)Swtrk;U4$Yt)GjHw ztpLu^9Ke>{P)4j=#`OwH)C(-<6rvi?X3aqA_LoV3DUn_Km=;6kBnvDx_e}3`#qR>A z`ktV)b2;#UP|5P|?0>2$^gBo7lC|`TO`P&b0fgBa3ZDR#4)&=lCKw)yuoKzOekVzJ z!KC}NU>mhm-Odd%WSav#p-xnBN=OO#70>C(*5&VzxU0%fyW`_6zcArP$b~Kb>>~GDOrm(TeabHfV*M-q3_G_T zKLyANTCS7k8KG^u<;4OGuGIEL{#4obb6Y^;qxhD})KuM(#D)VY8<2ZWYBI@PsVG$8 zT7RvQx!1I0r()1Y)xzawUme-LY4BWB_aYqtT7H6Bp5cji+VIBYkTGq#!iT}rI105x zqnLZT2Du|TWid!gRZp5Ovf;rTEu)^LnEHLS+9aTGGHa>*1DrtqH?weOD}1=5E*CQg z)K6>KcpHz*O1)vg)Y8h*m}7gh$qQ-L_{nCK$P28oMo6@quNb}Ml949{Kf~wLcD5#9 z?JuQQ2^$S0M!NIRv(BjQTV=|e0#9KQEe82qjv3j>N3tL5%OXq`qte%_wo2E!)1A8l zC(N`M3_1(#CKYQ&24OV-Z}ZPseYfDZlx0W`pC=D(Ii zB?9pOZ%MHK$C3bMXHVP1|M4Wyz<{gMBuIc^(`+k6(b5D~pdkTltp9sn|4Q5Te>Rt2 z-91M95Os2k9sUDAvNnybihtzb;WGaaJe73+>0PV7`T!))pN7m^xS834*a5#A?7*uR zNSyx|P$ub|U%Vh6BUA>{p4N-3JzQNpj%I8lza2&H5qo~fT{}5NrpwlbQ-^5E(*gH@ zB(RZRt??4ciI!w!Eer1}r3CW!@`l}yiJH^Z4JiJIGC>@2VMvT2dCxuBcGJBt4-;Vv zB=n86DF9f;oYX~6$sgQStTZu83GYfQL}+MQUj=-sY27d8be0pYapvmZti-;~)u@7w z;Uz(qtW4O+ld;}H8V=(K5IZ058LsmH51LjMB`Odt*_m$?*(MvsWQ^h#!Cm_s`QD6V za{JweHw`$GWCOW_e}3&FWO`o*?v6g63EFh+!i{@5Jze*(yuXGqqQ<#FwFFXR zu@Ph6=eItc+X0@C3+pUro%)ObJOf-A9n7PvbnS)|x+F{KQT9VyyiDSOBq8cPoh)=U z)2L|^9MzDkIkqtRm8s$lr0UG$uxhA_5jw9+LYDNFj^&J5f{G4P4%p~Hu+>l|CH34n zGXzV3&rxAL56^z);Z7NPwr@3i1sDGBupjLY`GGj zg^DgCx3F8F;3gL;%`uqB>6Fsewj^GxyJ_-+80XO;T_3|=(3PZ*Ub_#{#6HZ;eHWc3UmQFh$7D0xV(-m ziTMqj1E&L(s;m)aT;ryIJ0SSNaqf}Uet9s}dDVPu_YJ3Q_&>_mF7@vAB*qgkMhk2<=z_Kl9*$)ywj9L3h6?n2LsV&?{q4~6 zV6PNp)NlZ1;mgmGW8#`=scQ-qGE@h~(Al$$%oMTKIJ{lI%}AGmR%seWG17$TSM`$b zshLMTXztl}7f}qrxjbQ}KVVIhe(hx>Xv0aB=4}9?&~3pPISA@q$Du7p5utBU;p>$0 zYfk%RU}_I)@U_77#oHl4BGAmqm3mfF1>M45_}k&*GUPbC#N`rfmUTo~irRVlZoj+H zIa*+Vwv8rp!nySBAS+2V5GtPyQVJl{Zn7!xLzu3dw{QXAF{r6bB!?THpyX@Q!ZtA} zc$jk=2{{5qNNdeu8*FBTXdRL)W|Rq!%IC|HR^5z}_Ek(E8Ku~g!_gLuD+|+mtqg@n zgu3M#OP}iwluVa>G8JMO)K9)XUh%B|aHcV6I*cZ6&r3YrMec{KZP(-8 zW+61M!k+bknq8x%3W!1E3Ih*^Ts zN!{r-9$D(8k!ioS1tLjf>3icT{Y35*sqL2LNH)ra|3-n<5bGfyZf)!~G`$;Pq^2qM4}u)hdeJ`jCm^RC_LJBBml&^-(~G zbR>>i3x*YDI)Hjf9!!@5!1Mifaq(1c zlro$3bB$$SmMur1sWO5%+BnyH1)!Dw0<7g#@uTLfk3X5fsT?x=JgH#0FFeY6M`ZHP ztcza!M+wb*K~&AfcAD#;7n5Oayl)TBXQ?>(GmzhZevig92yH)HxzJiCXh*vl614-7 zK3oAmM#mHsfk>4eoPOH4c))Q^ zlrJ;Q78h)M!fA@mzDe%sS}4oFkP1~2f*}zcQL`B5!y6mWr_l1_%G0mP-q2~8@pbLS zj7LFQ>B6F6gBdgL17hjdL`ZTb&&1;Gmva~i?h_VMcte@@t<(Ie@{4&3nRZ4+E#(0- z5?6jjZhW;(qsSajly>ntGI~#dXzeV@-WYlsw?)z& zpMr;q8D)9-^#!!b;^C)h2)}`Ht(C9`oEC3r?v4RhVDs%kT>H;_d0@_gVi8=V?XwYt zM4S~^QGH5a5q(PeT?gGEj3~D0+mV~E+x;Q;K`e&^hdnvs)*u2&-Chqs$}^ir+o|7U zu?>VT5$gDjU>W`aQ8sCd5o zWBx3L&~REToo=`SdeNNgP^#7+`7_ivJU(lAHPTexS0JzIGS8GYFUgkgz#11f8E;@Z4hieD@~cK# zEjvmQB);K$hrD*>lG>$5*ZLrX#%HpsCw&r!1dLCS8#yJ|~+ z*JxbOQJhGwDA2xx$BOp>J?b94kPtAHaW+`(3I}b7sG)`uJSCM+C$dRY>Fc5)+ zGM#KcK(433dd?dCHE=wm-83>DamSz-@Q6%Q9Npp^Q@ulpMUgm*tFu(azSnx(%Y--Z zyIChnGH#x`1XlwLcU zLH;O?|D91`=SUN*1#(DRrvm;Vh>j7U{Rhgz%E`p^zfrIM6A(4#=y#((Hv?0aj^zn5 z_)DwmFLE)MNXFAR@<yk~I{IwX?eacQ;8rb8LHiX&SeI04{9DTO}{?!FYIC)t!0R z@89%BI z14!jaqE@G`y@9ihg=dO$eXaCJ=i=0c=U>YI=vGANJ~I6LUalX`=j5+Hw&F_TIOlj% zzLLW~jivCX+j@u#xtYNIwE7bEmM8-O?_i0RZ-czWxDj+2r*^)E@g-VL zVu|r*)?SB?HpY}ND#J@LM`!n8n9oOmGx}TGX)3ISO`kz#0w{&2mQzwTk!A@W2 zJ)$&9B2$;nMahbQgrhPc%YQYMm3l5-tU~?Yb zcI%!Q{qI0rfV1;fYj#)*7KwVaL@*=2`~I>Ehx$Q(#&y#(?$wM8V@R+ZSK2JIoD=b+ zC>gxT=on==np3C9dDUKn^_Zqms_;5#9NmG#Y>VDawCLQ{)Y5>>2;>d+9AV!90R= z@_4_%5YGpSp}}*O0Ji8!k~*PPuMa znWCNaX+un32m%-8T8CPD5S(%3MF`MTr^CO(seVGjyK}-Dgq{%!zbP-&`uXw&5bfXk zb|)A|02EPZC8&eoYqY~Ynvc|V;(~ilm32xmIt0?v4+OGRa5k(6HmzmAo}=KcF|+t% z&%$LVYx^)wRu=JGh{(?2{_TkAP6w&Hw0|d&Cp#5Xfb{f@P#7*nnKvx=hA_~s1*qdh zKEX@8=N7iXA4RmN;rXUP_zTtz|&CmXI9ogR3Nbq+77wH8l0x~YzfZxzcYt?DCgF<-BT8n-7XoblTrroRDqRE z#`3-<9L>CJ;#bk%|6^ zazH!Ip_J z*;|({<2i#oA@GSV1PA4bb|zAZYi#K}5fF1=<<2M}wAfP0d{(FGf@~Aia6g?UIcf9mV&&tPG2SB!RCf>}x3EIK8iclmJ)15CToU zZ%o8Q3Uy!`8IST!#6qj(v2Swv)vRt!VV()(IFsr*Bv~u`NkC`n*A}8x#4gXVn$0OR z&EMtpx{ZQ$#d2`f*s&CS9?2;>0ES7y4FZ^Pc7`L>D|R$YPYD#Gg?(q9(Pa&Sa0OzyX*rtv-&jL?O1z{h1-x)kK@-cl1k+hwfDlhDMV7Ct zK@JXm?QCfhRkgk~n(P~Wx;MRoKZCRAV5iRGWvppP6xjW)KhiaFSG_*#l{8eqn#Be)7 z)qEQSh9^$52}|XBidac_wP3Sm7gk$iTfRB46QD96>F0eT6Gr zFYIl+7Av?ljg5l*lXz^$41TEF$$Z94Yn!fPeJ^XCVuZspK3-EMC8WC+qZ(`7Ze;Dt zP&r7NgIP%?=`ajkAfJMlnu^7zOnr^SitcDr4?_!nRdrhSg>;N%%}fl_$3R*`l{e?P z#!zq2UW;02C0IzQNLwmsY8XR>eTT`OxhjSmk+zL}V?$gB&{Q8uN51}@B%~7I9Lm#o z`y+iW;=``wM#S3u>JeS5C3|VUt4ogRyrs5UOHadGekK5W@(b)ANd=_^0#;I#$6pB2X3aEAqunk(YrC_f{!nq7&P-yxU^bZm|b5 z6l|@}HECa`4pyCZY|{WvZahKCgu}>&r9%6qcHx8cJ?uvA>!au82=43a6?JCk0b<%o zIrsjmX`f&rQwNdl`yXC0asEio0aS+%UHW@msb9tu;3&9b<8|x)Y-#5EDy`>xsl^=N z_k8ydwzA5~`*HViJ(ZGB_JyS#`Y5sE^Z9t=_iT*^bq*4|f-p<)Sfs)TCo`9Zey8_x z>)$dP{tHZti3wK07s}=2j_7(va2)=6(2G@z7x!vBXJRhr!GjfH`!(an=RM7xVxDfN z{%1DV`Yo#cB^Eo=(44aPMu;Tp8ooern(%-77|z>7K3-sKMZJ z3*sWEPl}^soA0@l6ge_nymaLtrFLL;+0hWS@@d&IBH&So#h)S>4t1&be6Hw!eqG@_ zGUoFR2ET<-L2x4ED;X9EW^Hv_x#>>$Tw)x(7sYk`&{C6Fzb28)+bxK_ZweyO=a~>#0l&7SDEL?9i&^rGj$dLVgrT~ zTtQoyRzt*DS)ch;yAHScJKZP|pK}6T@#k*SA5~pm3sz`81@QhjB|npqu$8MpdN#lM z2dGITj|}00h-I-Y*ID0d8=DGxyY2Nt4OR*MV*$r;y0Ms1h%Y~4|89aKXv8;v=88;8FO)$sh6WFM~aUt=VZ!L6%`Hunk+VmZGV6N z+UH|k1qtu?!yjK&9`La z-+JeiFe^(1>oNH|r^X#i-b}04tKOh%gaeibxnFIvjET2}a?B7!M`fv9Z+1|^La;G5M zf2@Yf;-}Yq@+olTDIouy0VZCeOHkw62wid^uInG?Ak}B$p{3Ax67D^aP3oATeGjaU zXt*QhY648}WaHoiGQ0)1cSiZ?9F-x^-|xcXUNd2?;BRpiapT3gNoYrgxsUHz0B#U&S_kI*`n_Ub z(bwhEPaGPJ$B4xBapRg<^U;kWTe5U|moj5IGD@T-8{mx?NXWIe4#tBY?yz^eIJq>6 zpSlhAz&V0$cu<?5|XtW>O zv)KFLp{cUCe`W2AqJ`XD5X;WCwHcULL%HCI>7yfKL>0MN1DplXsaNc|)E`6TgGO%4 z+VUuWI!?%B)OFDBAB?WG59(|odKL=zoYx$auKl*R7=M7+tbG$~xReFf2CUg_Z6}v8 z+N+$kW}AIw_D_S)Mnx*gl_OX$0cQ9X_j<*GtftXKS*X_R>d^exGc6sjf0a4rw_MG2 z_S%!SdsEO407{<|7hPnAF1q;@goee#&&iw19a(4^z;zdrT_!F4+u0MjfLL{Z_qbukjT|JPGv-$_e_ z?i0;sr!^i(PQWNmGJMoyGse4_Ti|O^R`isIgEH+}jN))K)r%i$exXpFAcGkfxL<*L zHc(qv@Km9G&a}VO#!#oPZR)>9f731Q48uA4JccfvXG^B&r`fa)9JHEKley2@wT-mP ztH2N#z|4EY!Fqr40GmhO-j~*=UE24^cw^6^?rLC#3^aZ6Slt+PQh;j4lSVs?-k0UX zw}y5J2gsd%zA$A1@y6p$`-@R?KLC5|_q@caubp+J%WBWK9^DS~ZJuK|oBUi;;xmD% zG2@7!R?XCih7jNp#0K%NRL-%)*dRS_lO`<#fP}+a?r2#sR+QAduvZq=o6c{mlwyt0 z0<>~d?#(gr5Z0H+a*x4Uc{ zeAYc-<*ASCL3QD;R6lr51Q$*SubH71^cAVjh`+C|Eb?D_Z@m%QQ*gWUbQh~Ig)I<# z01r`h3e_l?SUtBWstM2F4zzc&@n)muMTL4rkJ)Zrs)_D>x_Jg`6(70YWk39t6EL+C zm={KEz3H7h)nmWO^Y@foW&RpRx^w$3bkm7Xn{4g(H^lHE~cz}GJDZsj z!&pvvI5aQCE>p0kIIE5O>)s{Gb!#XXBlMs9&<|cxDGN&CVUf87y039=C@uqpvM|>xvAy&MEUz7l);z9|7&*W!~M<7GR7Y3^adKV z=$9_}uv}kE8Us5%%Zy%#J09>{1KN1-*2V+`- zACNjQJJ)|DGHlKti7bZ_6c514!TP_|G0m+gTn^a(cI%xq&;t7F5y(OkyC3F*W|R4C zLOT>B=JgW4^TzA>mVGYWZW589*G%orSnEF;Xg_J+4Q3y@fZtH)$}p90-7t4<@ZGe) zO3wzc8zB6Ag?qyAz|j&}`VhvbaYzPPLBpX1063&X1y1OSVS5K#*Gty}^eqTbsWf~oBOy+ z3|8kkjNRyVQlIO4-b1#|H=1&U*9j5>K?R_2Ax(-Lh-fh3!{0&LvmKXD1eoT;Hcvu} zN!Y6}#BAGsoZy8uqOZX}zDt$dYm#dv*6qdsfK|N0&-mm4A!cPaJ-3hRWB1xEz&x9m zo^j;WqeB@10NcVqKb|OKf2uQJ5OBl0t794%~< z`ItY|Vp7EN3+Y!_FGkZb(`Bbf04Bzh?vKZl6%Gc_6S=*4=aj@xR0MPUz0>lK!G@b} zvc^JXepE)Ha({kR#!9f>$KXb~3?(p3>wdF&#Hz_Vupe;N#gO?xZ~dw>Ts|V?o!+LI zh}{~coJqpg!`ATnEVIw%&k&)XPU0Qi0)0Ax6s#$osJ+EvP}JhieyjZ$;LPdm6{9(4 z+^ufml-8%Sm!Zr-G=>g#<1mCJ>KnkSHhg;0T3nmf_jhpb{O6z3(TeY^{q2?DA|Ncy zt7y|mrIi@aXYtH}Zg1Mip_1*)3TBjVfULpOxdUdM%rqU4@Xwu8yqJ&nP!ro`sU#g# zM{+FNbjy+Aa84s60eaaC;GM{Gz8!2;{FGiS`A(d33w1^rN-}u!Ps^|@J|_z7;_14` zp`Sg{R0j5FONKY5!<}5G@$4iatvD=gxJ`aOn5} z*WKhmZ7x#P6#4go!8-nVi3hz+A89aidT^ErBa|aT30;~U^vF#c;KSBABTlrS*v%zq zcuuIHN{b7ohhnTnCQ&T!;$L>8E=p;jwHLG)nEEXcP=ua%gD!%8dz$wGhJ#fxH&}>*k4P8QaMC+=p>ucCOq$eMaHTqWK95dG>1zeuX(9Y3vAQTdZL>RbL-$pD$7 z1}ijAL2g#$yK1$@$iSmf*G?7IV>#W^02muk;_QiAHMxgS(!J@RB>%#>h$a$pu}S~W zFEfPEsix{yvBmgktrh<$7>4Ry(I+8r-V}fiX#~ug!l0G~@SN`R{Hm7Y&|Pc6hVYYO zy~5Cj^Q>Qt8&_-cGq2wba>~^O5gE*&Y;4UUK7UI)IGJ%L_zindy7jn&7%G1dCZ1u* zY-uz9-`)~4a-#Bu#t;1n>MVx6TT0gKr76#0Qe~(zA5(g^^*Ksrt z-SKe@2U}D`+uG0}m^Po?3kCG0o?4MQ*}7POpK9)tHi+tuvsC9^BvZI0dP!AB@A{Wh z=S&{sl(6q0VoD6+D42kb7>hZG4a0&$ny8zQUI`R#C)KoJn;wu&gX=xQA9nxG|GFa_ zX^!5&8o(Ss^S(dk2oT4A!!Q1W+Gb<^znu~3I9v|+?&sQ5=U>(je!qgi*;hq7r%iJ> z9oUEQz1A1nR)^Q8zekj)`6yFVRJU>`ZICzWO0!fGB_fOXmFDaCLV9K}Kg`KUmbnX$ zz8?IZ9mOLQQNMyYDj4?eq+gyLDPWo891EZ2kS*yN0Cx82WYLQB{*<7KN|07hdDrf> z?gVb#U5sg=>0- z!!pVmCz-(zZXfLRuQ*SKZx6+0#?^}>BZNv}0Y2l``dvF)A1@!JKbQ)5pZz7z=i^i* zH~07!2vJOq(7*<*bbYg6K!KQi87Hst_gaq^6aJ12%B2lUH$fson92hoxO{9O#XrtV zS%kb!JidfdLmYQV;_n01bYMuf1yn{^jR=#U_pySdzWy10_=l=s9PHe$Bu&=Jkr_A* zK;bzv{wk80j%S&GN}GmWcY4~2!r0*lXmTD1PZ#0wDXuhf!Ea>K6F_)n9frk^Cw#nG zXJAc(=0gjWOAcpz259Buq69iR=!bk7ehu=Q;H3AOJ`q9F#Ah~L?tEO80>1uzURb@U z0AR7E%)dn7j`S^@ahthx`Q>u?rGoVV2ZrFos6`j)i;XWkw+taA=H#l4Sq2i{lDi@Evy zvX@~P{GaA$`{pnCmxKC#>LR(eb*OfB2z8@QA)CE42@^pMEf4|*$t-ZcBCV$YDRtPn z5eI{*vv|#}CL_e{^fUeMxhWmQLJECb4{`>of876`{_XjYk-r-72oKwKA!GAq8)Ge@ z^9?RnQDk$p;+041DYp+Pc~F`7!MIi+_-?1UwGr?>^6|s%6P+zAahJU``MxH?2E;xn zw`^V0LeJuudO4+X^eqqT8SR-mOp%)vy!18uEt#5b_&fd8cIhX6T>fWYChvA9OB zpJ^z~Iu+&*(z?wy-z=A#r|uUB4-+Q5#52{69=PH!PFoyLp+g|}W*6{+p9MU6%GQ%c zCAOdb`G<*KeHZ+I`3|QR7pSswQP5|iIW0l(?9mAUCD*t5mxSnPb%SjK;CxH9n)|CD z#HD@}KW%7YX+x`}n=K0V*RwL>vM9d|1D6r4NJEBhj7>IQ`1Z1I{VCW#x#FThuQh(I z5B?9zq?8Be%nOP&v?oGP(j|5|BYYGoj**o%TLcEbt z$I_NWzGS(Tts`V#?A~21z!}Y>XEf7%>`zwoE7>PIAK+{pklZ04@#S4X9%s(23Zx z82We<*EL_l=S0f5Y%(0Ws)1kI=xYR-anPCMr{M;76$IK~(>zmuQU|$W>k=3tfR;&< zYb(r0n))(L4qZvVp&vX~bw zu%YUmmzGud;=ODLh>4I~8ERy@BOuE_dIrWuJKOlUhOtbAf=uID^!xq%P{;f741-&l zpDcpCJK0KfNxJVNqCs$`g>*wfR-$`R!1z%?VfgY%?OrT#0Odf1j!==QW)uZLQ*ye|H+IR}ef&x>N6G~NOzaUC^ zE=8P_hH|py1FE%0j2C-Uts0Gs?tC&J(8iL-9ONKpV(|e9RNU}`aoM4zurF4#s*w}`I4S_ zu9|grSg{ZI!f|ybn?XLW{P_@KoNjVu(q}Es>gmxslh`GG6ROd9s!%Sp8gF~rQQ1J? z_Yz{>W9HJ^#OWajwNM}V$=K*%6P3#6QKM$Mg<%p{0NAbSfl_e~_o`sYF_}u{(Fx3z z>1N(HJH;Baf%~WDseDQute1aM#>O@?(#u3OE{HgAk7Eu_CJO6lT<17KUzh%hZ`x{! z9o!E6b@)a}3bg5#p$x@h*ZZ1gvfCj0i?z$slh^f4+@hdmgY+&j#x1peLpRag0AqG^ zaee<33_wJg?!BoZSwqMqqXG5Xn!AD!H1Qjl#pB!K5ey+JXZCwTm zL}&Y&))9+~H^y0~CR!ql+e@Y_fCX&z(-{Yqol4)i<9|CI^C>1=in9{ITu7TiX} zoqsQg0nrcCm{kk0k&zhb@PF8P=kQ*8t!uQlZQC|iZQHiZUsugl+qSuC+jduNyIpNh zpS|DrJ9~fU{F^JuOlC%|JGnFG7^QPZqvpDGD9Js+)ONnVzr(N2s@-Msc!BfKOEnUX zK>Sf?AfqTW2INCRCMo=z(~lNb79L|xmqDZ6lmPOpuCYZt)CRw-47|lH5gc5Go{%-m ze;rP0DaDRTyWN>hNBWDZSBQ|hs80qY$MQ}=o2U>Xyv1pU>5zy!_kT-G{ybUmL#ogc zk!TeKqZ`nk#a=@4TvAbZk&giDPIqtWvLT6~*w!;)bP`oqT9#X!BrM2@;|l+4UyA*Q zV+9Zg>n=24WPAkwHf2gS-VvE~$Ph$~?)K;l^V}q$zLkWb{$d0DX&+sjht&h`_PWI2 zTVIL6WxeYMaE+4-5Ry5N9@zb%*g8hIn)X)UKE}?BjY?u{^254+ICyj@&70;DMd3S+ z%*u~@bHoL0v#v{%du#Ol3}4|4pW1#JQxhNo*XrdrwF9T%)sysT91m2sqNlInFe~bb zdB?1p;jojQXW8hG+Ylb-fNsf^opE>5h*y*=FIW5*_0ef%@wXB~+TCO)cvd5Zs^@se z&AzW=jv-N*of5v+@)wvCgbPtGS+PgU8d>xV^-pY>=>Sstw1zGdY47!4`_B_5Ix>JH z)!DHO;7Lvp4Biy93mm4ZL_|7Od0~@ZeRG3{sB~5dA`JDTlx(-aQ_kIDz~q7@W-1lc zC7a0=l5Tw#k33Q>DXwSGB(R`0!7X{1{yO+R#?mfJb4tIZP}YQ6rCK&`Mvrk__buh} zDU!h`B=i=UVOZ8B3}`9P!xq)@h)n^vPE}gEePEhjPM{fR0Tj&+nX?Dzaw)(kzT`!w z!J@wzH->4ee|QK8oFm0|`8 z!H}?tLSi|fX3(HHt*PIVVh)cCMt1x=z8;JKZ=D^ljE(OzrHGhriB*5h{(eWx8^hN+ zQPfU_OGtQF08bQ9@heOc{)##j@rfNA8V`LVb*gWWSU&_=p^Q%O=PseQ6n}tWBjpL~ zFf5v#ohl2EZZ4p=YylClW_{Pg%9Gwh++WIDgE`s2(F!sb9pZ*3+0TNApW7D4MnGkC z;3^d^=&T&ox?;gnR??Wce***9-|{X~#$jwfON{ISglgC}t6%^KCTCIW_*{@KURfk*^_W<>O5+46Zf60E4s&0V+8=&=dT$#0IZZb zU?cu0h{!CHuz8mXzLMowx%-p=)Rwc#iJL-?$uhPbx=AJ69ks(Ozj>?wp}Xil#@dz6 znFk``2;(-1(P;1vDW|q7GqhEeWWC_LUe0bZjoxAqvMGW27)+3`CWgk$<@Q8*=@Lc* zJI9`vM1^l=DxcGzqaNTwO+%%t+FLSD@ME|pP@$^`M3H5S)Oo69uhqG`K%lQd&tpJZ zj8KC?orZ%$t6VjDdC;h@Ur3yk>cG_iL{5oPX4bXKuwED%H}!+|9Aj9Lh=-$0Cmd}j z$b=4!5RM4jD2ON(jZjy`SM&OaITG>FE4x&>?HrUxVOY$K#KIaDbBwE3-!FaYt9#-J zwB&(m(asQ#BG zntYyRzZ#upZb&l^zn`_UBo2duaX@Cp}(7S~K=oRh z4q?E|xG_*^?~2!J=$}tGLxn4XA&@hfyP5C&51fJw-hH*MCU<+t2XR$#FF*h1-G{eU zhi%`_C7qj#j<5xT>)^O+#-l4^Nb0~AC2M+gwv`gz79UE!eA$aFTUOTa{Cv4c9QXQ63-K? zI`()ZgcjL+)&2D^bMS&`a~lk?6%ak8Vj_L> zo3WmI)CJnKe^D91eRB#<8hktwptXx|GYv2+?4sT055L zF0*v1BJkuDOdW1o6TrGAr4^eak&sr+gTcIEJxX*{UkXt*aIfAtRG zzPwUKr|iVKAQWuNqGYMRZc8mHlNF?K?Q^JL=3Hb3mLcg|mg}hWyxw1Tf4pT{OK%y_ zDGYPy7qxc;R5rm8Ynp%{B_ab~rJsx47t5Y()o+~^JHQ8A)f|hb`@`nUpZNpQ-#;Yt zbIN6g@|r94cno-;7O6{>8G}#f$vu{9)>^{gE zrR*uPT^$x(#(0TTUPqFBXsZ(-LUX~13hb5%zaAc29d`1B?tg0oyvVQb-gV{TwOZBn z0-#615uLjBz;EN`-k{esV?+U#i54}lAJ1BtUI?i=kAV$Bh0;vfjW-}CIzG@Im}%C9 zNpTuJMj`>hjxgnCnR}&m>fA**e~>Mf{SxXxH`bVTaRDsm=}whtA9qY2bbi3KE*dRc zY1bpRD@XR|W1m^;+}sikp(R1!qjh=#bVI))>Nx`7At1?^<5ct8!ps0eRn?U(Kh}Me zJNzzt$MAKWWtSCi>oeovf5HdeodYakE8!3MtKtHlW29KuF~2fOW0Pojz_GO&6W)NL z(7M=doR@E4;(*uiY-rV^_tOYJVOl7+qVR z_4px2rPg6BUj!i99sfINjJwYlpyIFo-WfQCzqMb_6rAR#XURS@(8s}{2dU>P_! zoYxLKCBkhqp#(B&@**x5{ciTNepqAM^T$WYHIzgP)ny1LIiLd=$=e>$BL0^3w>Qf} zhnV%@s8*sqm6R47cWd7%VKfEBGQ6)Yu_-~djea- z?=n7-OB+WRuXq7uK{sZ!F&niLjW^nWSjU2LBrMbrqwrVVQR3Cfdg!0>iLRT4rEshI z=_v}erqZFpvlByBixZ*nspJ$TdAO|fd=7jt_V|7+#2%l|l&*ZK@BO`h9MLnEc1SSw zaHs5qn}z0~`o0{7QvINWP{J;MFf=x9Y-9;iZ@TP=su~2)UoqP_mW>-iAR7lP0ar*v z$NkXsoNBfQq*HP#gguBN9;xp|K<%3Sp>?j_&^s85VFgzAsbhFbSwzdb?rg0(cgOT{ z-nVsR+740uIT-iIG+K=iYP0ppA~BfFY98)X1>-`7B8XC7V{8mQMwwA%W{OvSJWQmm z)*|UOQ`--a6hbNMgni2k&&Hr*Pz8P%K4^bh*eyV!boAnUFhb^~*VQcpIf1)VJ1h+n zBMl8W{1#ObyJ#|WB3|^*VN*}eMZl#_76mCK*~9VN zP+Uesfd&q4!CW7BKrf~?LcBbO{N|iKemgEsbXW>dK?@!4$U}@=wd%X`4U-JZ(_8X~ z(B%_n(%t@6;QH<|^|<3ZG3_$rjII!>TTNR7MUAvBNM{Swda3v`h7()dXV&Yqy z>;LM4O|sLuE-A4-pNh<%%vfh=5GJ7&O%1>GXMyIS300)f;kRCu*ae`;(5rjnhf>uO zGn&|+JNuJ?@6Ox!cI@(~cp@>%M3nzwL*NOUXcg=X{P(>~Y zgcno=w#r0S82?aGSN~_0@M6Rx)r=23<6(3UP^#PC8fM?qNIC9+hwb|HL=4LhlIkHs zh*7`LfB0$-@XH^eC2W`_{SQOSnBJXt|I|zI_|}lBUkLoJH}Ipj&~NkyqOnEz^lM5% zMVg{&lA3H4k(Z3plGC{sTCJ}lMo9kMx3JT{ontpz;CUI;Kn3?7w)mrrlM%-YnErRM zzK8S(po>%!po;4M5Az(RN}W&4TB>nIibldpl7A%!yCWme$sDLFcB#E}0F zWWIgSK&Ag4M;F<9xHTL0jSR5Kr!KwS|DEyn%-0{xS&qR@A=kqRW)<6AE6e_+&D z_6@(N5#8*kr*{|Su$pECHF0-kk+kc}&ouKJ&}%L;xnnl>f)RDU6A z8jkr6;N|_-^XKa;gKKhYheMC242d1uoxS!|*O`j%&FOM$NBpii&zbvI6gXNREi=!V z*X-BY0aF0CP&GXDwl||a@L!$3ZJIPV9H($+?bqGJuZ$0g3R%{v zpuf|_U!mePV6uJci52zx1rX9g{E}l~>Q{99JQe@~k?@j~1te1*&A8*{(ovl31gt4e zWu1n&qFYebjdZGAm~b8BB~BwdM|IjMf*e&CPv~oWK1!cg1t7BW#L_06?!eZnaxYfd z?-cO+4ZzF0iFOUO7N6A?os%DE9=D|m3uQ~qTcidRINw5jCMQ0`kcP%viuIFn?dq0= zvVQ;{Jvt)q+-|ZAXxELKL>^~2H4q_A4XoFxDK=Z4b4c}59HxG}kxYE2 zknmWvolZr`@evX_KV7$Y0#j;uQ`)fK&q>k5NEVvmZB8!25~N~K%XMPh(elON8H}G( z$VxA;93mLRCRV{jolpJS?J|Obk4}DXk+%UlFfV^&i*Yk)WOQbL;IUFUnbL20+nUl} zUPo0GXM O6jd7T0KSt@xFR*5zj^E&c}TlFAUo$7j!pFRpTBjSs(#uJTFhlvYv@ zp%O82iMF@<*4%ldAX)bEx9nj$Q9wGB5gieKo&W5nGDxC~uuB)bv4+|rR1k+%;VTAc zd>`2QP(`tR4EC|QKXj$V?qte?uim(1u%W`lVN3uTMRXGmxmF&oBg;VZ=86e<)N8hB-nMKOr8#TJk}@45VDr9bviT%R zwIi;VIS@&1G5Aqej|2?|;A^9Y2;hL5BQZlJqQ^N>WkytN>j}v#B|2)iFRWou!_zmt zDE5-S`BBZqA9n%v{v1Id0{DbkC_njkU3{j3GaF7ybmXKibo*^goxec#=Qe~z=L=Xo zB$Kd{ZmG}z%KmbY{@KB;V|X^HHPV!U{y0jFR&+FW8F+BgF%~*}q0YjW3V8`YU!%vK zz04U(E5OQPO_7go0{s5^DLNHBSKA^&7pW+-SPDh*8fN1>@V(SyIY;P_C% z&-+}r7sj<~g~fkgw3$Z>(EpJ)76svC|KDfVe<>XQi^G76mGi$sZnm^#V!toi-_NeI zYrU4Q{y;GhwySll;t1!mb>T61-c1`VL0e$wcz4e#zL?Yv#aHP>^6u{9C~9;K1a$u2 z32{(=`4nGFz_MfNwIi%vU0-~O8dJbNy&}*0di-H~auIcB)}>39Oo!;A^M~a#02%4Z z{akF$0y2Hva|;fH2-gSz!M4x!J1x|Lm>eoI+muBbQhf02%^3$l!tJb6*^=BZueM$x zqq4g~@CQs~Wez2A^TObG-#1qz_m+7D2zG#NhL^Q{+rg8EK`)z3E=j8MLW@2Fg+*a9 zh+1D!tL4%pD}A~!`Hh42j;b;Mup1(-qB=aOtWwHKM`iuF%9gme={R;7lP6uDuAR5X zONNiMLu-9Qy8{=xQ1ek)3ToD9wsuZ1UYN92pgOGzQ~dhyaC&pJF*5Q^RtL-Q;L82i zl`L2wckmXBY|zn)`Z%f2wb+L=%wZDWZ@Q2(IQNr>pP!)H>#pW}Qmm;2z~oY3J`j`a z?#zP3sQDWd97oU6tYj%&om6`^gS;-f7b+peaAqVfI$2#wv-}8A zE!p1&2-Ya}$zIR>$h=TrKHFbNYO}<53w|%($7lok?7jYX*iUDas0{zk30NqnoJc~&?zE)bl zP+%!KjbUhrkg2#zd7blQ5T=O2;X7c6MZ?;=cS21dHT482#1dmGheOBbV_|pV(($dqT3^%py z!-?fBk2DiT-}tXU0AD}()to%BMUigk1cHtJ&R%^K3yNt1Y3asWPCLQzE5=67C_U6@ zT38HoO}p4eJ!|R7#L3)g^IPc}MV)owO=+^7^`^6&lBSP+mE5LkTa4jtajywok93J9 zRvK3vu&i~mo&CYnGZ*Hu-W-Vyaa~;Rk4dy$Uh+9~e#Z=K0yNeo1c%%#)GGzRKp>4d z?X^n=Ho>g#J4y-bbFa?4t`n`kHry3BYbjS?*D`ZLR`I|2Y{wOu^U}J;l0&i zJo-Raf%im;&^(V4QA)?6@okr!S|VR@6>XKrP*zD3CmTx<$D`DlbTeM(vp>6Z+UA?Z zol6PT$Xg9_h0|GR9`uLQt>Ks#iC9^+w-cF3?(L3z0O)H|LHW1?F7k;JoP)TT#thJOi#-^bxm=ii0_gje+X-LGNd?!I7pxau66< zLav2DcoF-tr$ldh&IUoCEa=FvJo7fCz*1!7C7fZ95)%?CeE)X!zQ&|E!)7^XR{!bb z8_NVf2fPzoQi?Er?5g!WAYpyY^uK1RWcVCWT+BKjZ*fVB6e@!l{(dea{CxO`bs&tF z$c9xMm%NV-h@vZF3CeW=(L#Q0c>DdM1p8&s1Kk+T4%;#tnI)Ohg-`gRE~&q}7FZ5D z0Di>0C|GIC-_LQC{CA5IVI`Q1C{g)2-7XD}J^-hyu4E@UI{z|L=m29}UJGPeyTVp- z@RlpXhL%p#tfY%J&zp9ulvL=R4LJ@z5JV1{!I&UBU@5W?l#t!+oX*_~5N*-+X zV8C`W{l^9Ey}|6uaRF&O%IGO2bOKCQ4r`?cfB_BSovqJ7yGG9jg>JMwAX=p=X^o=t{#o68&O|_48U&_j1M|gw+r+15OCoO-(OPPb6J6VP03d# z8H={0L!0_+5DAQZZDwyZ_$y)(sR5492@6{qh&K8OvrO?6%FwsX6m4pgP(5Kj+nHi|bUyhpwf>UsIbG@R%^r z`(a36#Kw9TGDtb3AL#7|a0m0LwSfhm9SXnD&X>0AHl3d@!<86rASxzptK$QgHrfuG zkW%fki1yxFZt)vW9QMmhJa8H5po}r4he9=ri)OW%<%RRFB*iyJ7N{{FtN_9tKUk5} zlASeXiH9qcNAMoz35E|0yKk%D|0OT0wmA z;V&(P#-o(9O7ApSuxhtfAw%a4Pyx$kPEgQ_dx~AI ztz`y84Ej-_LTQB+@G8Z>VN}*r%qPKHIgMbb?RjM%BMhizjs7)?kk_i<%aKleqY8x& zX(O1Ze+ED|X+dr;cytM+;&-M2qoTRljWcX)BtYx*&g>wOcBay!oK~@HrvYzL&y@Q( zemUHU5BP5G*V^`C>0iatuj(g7Nha_LEjmgyLd+sLq}e0g!h?u0KLl(5p=`?c6Wd3> z{1bTsSb6v%iPNCociqKB%5+>aECHhUuzcK055Z8=eL5MhUOjQ))fH}H zrMBiyEg^}%Vnjor@Qj+ey|0s4?K(AyGiGWNpoPfJ@gxUHzdz_J46{U6LyIv5E=!eG zC-&)z$B@MI^rl6h*a{f3P(3TRaeW`MnNTTf@MCD^j5v!>i-28Y^Yz|P#lCy|RW%{C zx-X^tyamG6PgNHJg*pSu$MleHzp?V(o@86AQ2QQh#=x%dL~}*9Wkr7>FPqf{Pi{>E zKu`RPlKL}QSUdqdQDW@#7}A3xI$>O<5;Z1Q|-V+c7qENn?@|lAV3Wdg9*fZ0=96F zg1Rg?@WbteL4IHA6^;1*cUZDf2I@s7_s$3numkEep~M{|%7R7A%V=}DFb6#lJaQYS zw&nWH?O!e7tLYVSqjX*+9k60k_CDX~>m41|7Vv)`uEVUppRB4+{`HFWZ^no3pvJi| zLgEy7P$&Qs2iJcct}prYV#$8nU4PKNt@SRQzJp@H84ieNvEMmP<3MiQ4A@@(x-{hI z<3@aaCnmowRMk`2>M+^q8=IbX#f(aziY$_hRC}`hbBqfMkgk)VW-oQ@_7(VWWRdv= z@yQ;&B-ZhP|M%1J&TEXlJ!_hs4v>$gbH=^VplOo@nfELMx1H(xxly|CT>8al^sf7n z5Hm)eTkU2a(NcnfJdksH>;Mq(dpE^&`Md|@<{0G(B>g=Ax)h0C&aP&ak;)eZeEPz; zmDeK|_A;WbqI#Z%?CyNQjV5uh)CQZba4=$~Ca8d@1J6mcS=~n4U>HLG1@JobRGEJ? za#+b3Q};I7+G#1M{?iuupSBC4cFwR>sz4NYGp>-YawTUC2C`DkZA+@?FJ|Bnz$7;n znBci3XFs$X*a=o0`IhhxIMc0b5YT8-jMY`t{-i=*K=D2H-&j=$ks@hl$8gHJLB7hA zu2+kJ2Jx5&f9-vYwF8QITF7@s&7$e_I2brWlRjQQju%?L?yj~N{Xg)rLZ4?!1qHB7 z1o=Y4B!rlUMy$J95k_M?DX+Gv4Q7uTJVstd1haQ~ct6hkSS9n+<_>IsIwz*=$;3QH zyD-@q-@#*PHvADEo{G*yXK<{>VfuA^nHsV7JQonU%JH;uzXWif>i_Efrp(GD|81>8 zz#G_pf;)^|yc#-G#GyD;=?QTPbNP0NW>s!CMEgmVw+br88CKa@Xl=xeI}%@YiGbfnbmr|*Q_ZKgtmIR?(vcUiS!h(=`#~W9GP9by^!$ciDUYPX@!$| zV08du-AGXyV+(+`q^W-S6IuGX*aYpWlqD5)DJPhVL#m6{iF5(0#EBk5akJin0aN~AKxwUaUo-t&~wCaC# zcHHy6BKY^n=X6|;;KL0Zv4eslql?3-6+W($Q-1A|ejA{$@yX@nQNn!v*djPht9sqK zR{Te%?Or;wwb;97YT<)5#Hsxxu;Vl_oWX#iaqvoh1LzLV1*%-?s$=KtDRt*VTZ#cI z9?9|XtrWNt6<-;=2jkQpipF=5D*u}97Y%%*DW%+Y!GGpiS-Qn|Qi`Qt%Te~+f=vSpy@9ymq z;ceD2@6nFGmB7r3S9|)`KT1n+s@J8nDh82dmkDM(@yyXLOa=qOxRKQW_25AHm zVyF3IDj?uWe;_DU90E`js)4iXh{{9(FFOaR~dq37T~I9v%{S-(IPiDyS&v>tx$S`%rGvzYgT z#nFNeey*eN2yj$yvf=bkz)9mOHq5IA@PWl*J=1ln}HhziLpJcJ56 z1&DVm39aVRaa|sxj)$g`Vp8B3C50T$lCq+k8Ux^_cAbq4 z$r*$#z_2y_p~32ZhT%;Zq!&nU5@(9N_F1JRDm9yLIv0bl&T>twkmA?vbtI9sKu4k7OBgg zV8w8Wr8W##yWIREv)c&q)2v{z9^tM}f+Bpso9~tjB_($d>#EE)yaSjgt znvTzhJ{ZjZ!Xy-eE{7 zNtRtjrqA^ZcPnGb1hrHkMr+YGa04X_iyRm0>W^Xh1vkhV8bTiW==oE%BGL`+U`_Rm z=qAN*)bl%uU{g6DJ#(C9F721uK%^HA%Fn<*5c+$1l!^;6ntA}H0;h(0k2?X#(DA7l5w%pl3y*ulmu*6`{u1M2s5B$hZB>39}rv+sLW5r|{=4GT0h6u?Zw>^I;)sU2_{6_{SY@{Bo) zxaKj5*W$}Y2l1Fi?rHL3Kj&$HOEU2_OXLeKOxRSDVq_(iJ0P4Wl_^>JBr`LP5O67{ zvWvi++2l#CgMi?Tm6m&n10WCtMtcxcg@2k4BTp7XLsYNvB2iJiQu``jcHw02$<-F-Q~7y^-+n!` z7?35JZ+Sjo!{WxNbG>CK!JzfJOCU6PPN_PAKGF>|GE5+l`)$UQqfX(jy53X5e6W}i zm(=+8n|RyYU{fFg_YPLBw23bws)!X+n}$k|6wpNx`(`XL+X5s*GGchT%ul_lzP#oM z_+h1dEF^fh&J-M;0yw-dtI}7p=2`^J{ZJ{?rsOj~oTT;;;_U^T%jzwiT)Y72Icdm# zdX{<-Ke8-|{R|#Mg~HvL=@9*;Do|qHMq`^sibh{mJ3G4nTt#?r>vHlt-o5(;Xu-ic zee{Y2IKF+}xqorK@JI>yFEd+ z(pWsYN)o#@_J+3ioLkRpa#9V+F4UMs&ymkbBt7Kv!FB`kE2gUOWY5xDz8Gs$8OI`>xR}kOXoZn0O}uDK>&T`oBa}H} z<$aMwXgUJH8DqeC8URp@->KQ8^nrTSZNa|MM_xUWaEX58v`Y7TV)cgDPBg4ivKX-v z^-8ZCwvu$M7yyfXY&R`@zlM&T@nl%QJNH0Vz`OJ@L^1b3u~-2qPC#YvJ3H^rI^mIP zyqP|@6z&2Vzg4M;(_H}p9wc#nR?lIbu&z1hK>G}#`Q&7l!sPWhC5cieK~p9kGMbV< z#Ynx~`VDHa%n(AZS{H{9y$+O7lcn+>Ve)z&Wr?uo4iy@{N-ZoohG}t4fNbWS;s-MDRD$u6l zN%j!Tu!8YF%7X@$ePD`9YB)la6*@wcXP#3Xnvwm42p zbwYatr@MnYoh<<9?i%)A(*jF?8ou?1o7IAJAaxpa8@q7N3BbC|qP2;AO=S73SPk;L5Y?V}NR=M6-+wyiy(ysd;k+A8ZApT4EW%Cd}RDX9*8g z#pB$)y(>A5kR#K$ zb8>u)fF?J8L(3x?1fxWNb~edoI_=M`#QB$OHJeS9ea8)%(pIlZX|hbQMZ_*y^8o3V%&f_7BYJ zW(j|a!I9g2Z|yM+a=nGZpQ|nE2#$nu_6e5OuilAUtewPCo>)^ zdB?g$(G|@_`#29-toz+&@NeJeMVnc*j@mNeXA9pcc(6*%A@^UG^avjgVFyoHCiXMz z>*{CXgiy5A&CT%G0>(i@PNjl;R>%X77SyDEO_|98>-TBD2wR(~AL5+TVg#fINHHk0 z=g{_^h*p7N_5S@r+&q5ulBb4Y)Slt&f(qc4e%VvBJXPhd*f4Vr zAd*1zbB=Y`?w4zBs_4mT*w?%nFkwNAg+5Pc!sOYMUV#XqC|T6)q~$Q#rdkx9K0=7P zYnd{;o;EU8)}{2#H*lYhPTjEuNn^_5z?a~!?36wIMSiGFh1uq9-$pay^*9dm7=2tr z+dYj72=5qIi1F7{LD~`+d?W&a( zG(yQ|NW`L^1fs0O!@rAeD1qvTNSRthl!3YE0GA0I2}UN?8}$4gGMeXMWlxtQ4(rv; z+#?w*wuQfi!CSP%Dq08FvB44GX$5=*c2H=+51f?>z?lbK=V$j4B&rtYk5Lv$P1|Y8 zv*&CRquV?e(>C&KrJvb|cmlEwrjVzEQylqhv0h^0uW`HR*eoLTo737hcs&KrmN7TC zfWS|bd3Bl0$f`LTFuR=KPfM| zHEUbU;)qmIP7%DtV3YL`^bR85`kZ-k>0$#i$7MP(ib$!!`D2k|PKbGlgTxPeeodae zm6g);DVnjR6B3zwVNX#XyYqMB<=W|SKqTiB-DZt4Lgg*d9a>{5I`V;J3>__D9E4Kw zl+##s%ctdT64C9sK+f{VruWU$*>)*@R8UvTni^$`ufWR|?|B}7G#ItKOPlv_0pe-H z1LubMWn0a5_EGXISfkV1FR+BHW1EMH3kyIPdvREMw3x3%qBOW3^!_#b6K+QsV8~J3 zY!8z9LE5g0tdSj9FO!i67`W(9z=vIrB(#AM2;rzCMK-CTMwinO53$H=I1#Vs>O)~! z^9FOBlj$X~X~MLJL@9F_@ISLoUkzBTL8|^u3Jb!;lqxIYCAb;0~a|(AoqtLih;^HZJumfQZ@IvY9OuQQq+T2MwABhY#*r14Q9z2%Ug> z*B&hjgnTY;9suK~)_L@>(VOp^^WG$tJRpDQf}+zu?>aS+ZIX*TngEZb38M-ZP7|Ux zq$o@oz!IYD%eDID-nZCQJ$qEG3hsI@>>;N3}<@@2;j{oy^cOOs+3y&5y zP6#hsOsqzrtYethAE#_xk|YY`pm;xzWn-d+yHUbyvFy~(LdR$x=kHAqBj^NG6J%6v z;%K?RJ6Ocrr{lBu@{|~dR}}X{%B*PDHEf^*00^ozjs`Idbl}^|B~<9eUK>U3EN@+( zQ|k>HXNHuX9gx7;|D1WgCh#X8j7?n#6p8aYn_;zf9qe2|?)GfZwC42|S!sW1uWpZ+72_h!$-E=+8s05%GTieBSx=ZmV6R0@cBZE%VMTtX!;U zeIkZBn8darzb1Zw{gLasyD{lI(^x>Nk(;Lxf6g6KomoU^^P?5^=XI+@5r;MTn_of6 zkPN>X1Rc{UkH0=2NRX{}>e2ptbJSFPPf^Y1SempmA-TjaHR=8fk0p9Gpm>}f03XL$ z=&uJYUKyC}@ywLTe5b$}F%100Wi_*AYi&U0xPFj{o_S`@IggIr&9dR_8ty|uWgiZ> zDz+Al>VpStb@ONu=?6!p7rJqFTUG?F=1h00ZIjDLcHc_vLv+@$c=D5e(Hg;B9zj!7S>X9WSmY9X4_3Keukse*#Z zP0h^0elo(G9z30&s3t=i8X5Ibc)eDthio!^G1H`TenD}Z`vSHVx>Au>iVWEbwvlB! z@mmDA&bdYFc4enmW4x6Qz{bE3-@wSSmAM>Qo5i<;Q)NE0l$%t#i<5qBu4XQ5Z9)AZ zqOZAt(GA%GB>M^3Fu@uG89_WCF<-R<4;@%}u4IRZ{z$JXMJ<3^rju9ZFjiWz&{bQr zi6z|4d7Q4AW^`qVd>#dvTN&L8?37)!D0p6cKpZ1N+~lrc_DbIx(B|yr{4rW236s5+ zHn1H^g8B6!-(*Xmu2kxQnG=aQfmOm`69p^fs zE0Fv9C<|Jjr$dDlU^(@;-H?`=f^ytxHk{CRJId-{Mlyx27W#VeD1m@NvK?oK>sb9Zx28*E|HzU66b z{pk;J1~@!`0r2NB8q;wnyET`QKFH3^G^VXMZFG~7GeOMca+B9c`EJ8Xi z2taR$XkL;mE%k4F0l(F*9lyGuXxgLmZ zb8Tv$_)g&_Tm$5ev$@-s0j$(=!#8T*LhGlpuvI!d>uG6A129~(V?+`qY! zzS<9UNuGJzVZhosv@o03X5gpXd~jkMx+EZn zk@qS^;zXJ+4_FovQ$XbyK5o(#RZ>SANQvj$%pn|R2A}Fh>3MGTndt@1lh+;`$p=Vp zQIxpJQui|j8ybuZX1{=h*1#~pNW^FYDS&W_zR)l~=Xb}VnfIBU^=Eik@j0M_8h+j1 zTun8M^GrqKt8qBuX5p!ESe9!FQP_?rs*hm@lYp|^j*NS^QQ{;ve~T(n6%<4#-A89F z)m8ssQnAwr?Nm5(j!-kr5yJRIF%G!P{}m*8ov<#ISvM{~678Sn%($W7k~uj?UjE3j zTB)*HyGuj{yN8hVd+ z#GQ`GN@yjdn1ESzjaRuUf7M{iAv=d z2Ep+y*;m-5^c9`UwusuD?X>D|7t!;DSeDQtPO*9`oO#-dK;BFyTjsJVB&sCCVJdnv ziQTDKz}IeRtAq7&=*I65wmtwk7_JRvx2@BWr1SYD?yBfE?SZ7KU+lJou!4MUa1FO< zJQw~H(*TP_Bh3-tQ5}+@WdW=x4>j=Ak)A5S(bjHBud5W&S>TxYj)96FaHQ6^(RoD) zF_*cB{nsDk?36=hhyAz4Q~GT})$^OTIS=K5cGC_mi$ZaHBo4>2cYvT8guwSH0(nC% z>m*`oNB2ap`xZ%VO5b0So&I6XHHzC{Y-ysf==FcVf5Da-yH<~b6{ZRQ^4=llLk>{% zi9qQU)(z;>*1{rfa#aEgLpHHWc`Sl9EP-HH9Q`rnh2Eg3S;R}XFobi3DpOoeBuA5A zL49g`Odi=D7my<}^8;Yu)uX2tZK@0^MQ%3V$C0O}bU~E1k~a5{X`prCW5f(_mbJ96 zejqq?)*UgswM%pERDC-EPfHl~X%_F-yh!rP@z6|Tfx=4)nhnJU?jN{mf`uV3LPP}n z40jyjimV8MEL*Z_GD|pzUSw8ux^R-%4X<$-PHu+96NK5FrvZ3L6sGT)2Y=^WKCI5F z?8ZW2%5tsr_2pOJu(e;T3c>mtUBc8p@U`6h;NPluw$9lZS=I2d*PwAghJ-4)gczxU zZKa}u12&H#E5?5Fx`B18<4L$XzZzGHHPnSJY3+K?cWcSkBTmG`i4P>gbFx1cGsM_X#IP#Y z&Shn$H&kM0;?yh#Q7R|cRq+l48J|w}(yT+wxY>_m$dIO(7Rgj!=xQACaIe5Ms$tvt z=U6tQe*%1WgINy<=L=v&Mv}@QaDU|EQpe^bt?HB)WtbnK%Wi}GdX}kg|LIQ=wpW7CV?eI$;~CG)wl0 z_Y@H1AqG8liJ8`Vsd%9HY=hup&Z8A&T~s$JQF|VJ@@P4aL+?9vWou{Ukj{Z(rK>Xc z*=E|pDfc#&s1gQGleUx>jOZ+?q3R`4tCJOX!96m#F|}blw))=}Xi4qkNA`#vBl+;lCjOk*>y5{iRDq4KCEXh$=&(z|ZW zjXvkME@N%nvhnY7#bzEWg^9YIqSMqPFllXbBK2X~m-x;Bp*h22EDF@+*FG-8ynT44 zA$}Ja=HOX1lD8{ceEJDXAQqhXCp=`F62c5x=N-_v9;a$!_z|DNm0O>rI9)~Yu#ow~OiW602H$%u@RQ@bXB`v2kTEu-QHm#$GH zxVw9BG7Jn3!QCaeLvRQXTqn4@3{D6h+}+(Bf(CaB5*+U2ocrDHzVEN@)$5V!n$@%R zQ?+*$n=+*C--AdOLNwPwI77p%DwqSZ{23*Yv+`b9`g%mL81$2eMCWVRZo%4Wl2(ZK zf&zZ*HS_&#&Ua!@bDM-Tpc!xmd5P)KUmOx7t;I}78i!Ca^AkK1_oh{M!Y`G`HRg8* zt|mz8h>x1X16-3PSfVT+vZm4wIo+gVeq(CT($6nSFnF(L8vQM*CjbadT*TJXsO-uC z_b#6tu`9sK%w6Ud8H{d(iqKl6$nh+Arg)kRaFwH6L$kV@f3WmlTN}w)h(P-@(^j)H z&#%?7RW=mMj>xerc8D{T;u>Ql7#^@?5x5`5ho%o8(% z3csGK=y2JrwdmEQhHfiTu~Hr%kQs(Ha>)wja{<9~87)Cv9$pz6>b@l|@=G+0DvuZC z;|BQW>`;-6x(B2&htdsW8jmQL>{annpzXT)JEcwpXB2;~y4|B#?71jep2~*O@l$(5 zvM&i|S**PM^RyvW8{NVB!%xVOZu{)04z*4(t>OuHpiK}oCnv9KlZD%D9yE4=(3v>w z@E%Mh4{)(iYSp>dqrzyigHAybhJwwn?X6&5{kKWrBmo5kD$i;VkpsS3^ z8V$$z%I%j_M{~$_yi^-g_i1d^Gnf68tq%-K-6U7{=o0<)dTt^t;$a%%U^!r9eJ@Yc zQM_UP8tG}X4;a_$q!cESX3*rULGI~O8lWZ_SB7icvTpNb$?&~y>Z|JVzrS(jgVy+c z*6zN1xc^gbQ5)Gw-}T3*RS_w#5BMV5Ks~BAn9Fu@^BMmbUO=b}aYDj8HGeGYc3bs` z4J*-r#t)Y3s`e@L%&gg%U&X$P{zJBe@@`Ew$~P@se|wv(0vQ_$4NUv{E96H^E*uNf zJoenLg_jrtqQPR(cbzq~_q`1n}_j z19`cqdANB2KpuWhh!`R)0c1J=P7$K!2lpP5{tXUD$pzr$;O5}xg^mINp!;zCn+xIu zfH--1xp*MI7vc3Gl>TrW5c2>yAQLxqeIO492RAhjhzG#W!3Bg4=LB&BpaKd`9_S*R zTmWtkAm9IQD~w>cP>6OQ9FT#J0|4UV;^T!*<^%$GKwNy#$^85PAQvYe2x=yf55NiH zfcznbCH!Y4GlcFNoEtAcn+U+(_H-Pb=g&#`#Nh~)8+4DWc{`>(Z>nEmAmrBC{itT?K` zB}W3zujngq8BKB#0Cf37&vq|9Lnr3$EVdLH2GKQIUP%&Hil&6G&yQp0XFW_E&sV?K z;olLc?ckxDjQE%nnK#G>Bzla^#nz4*C8rlrY0p(ERW;u{+!>2b)L8x^&^cps*R4`V zhph~pWuHOZ6R6N~LU~O+;APYT(@3kZJZ6Cjq?ClSi3OBIaugX;R&P)sHvSeRamq)h z*jd^-tzy0vrSw&wNduyU-WJv;by6IrT6xV6{o+dgjC!KZeDF|dYXY}+yd1B3$mX?$ zt;g}|Wj=jeKNb6tbiK@H#WsKY-0vtX2dS#@K8bLD%1v!sUFY>kY-MJyGUu8$KuQ4{ zj0&^c2NCuSQjn)@f4|M9x}}C;Kqvs5RNCh_q2Fz2=x&cTzy;d&<=yWKCet zL$kQTfgx|xGlAFJrnAvF$V7X2E5v2F6}ed*n_PkUdn0fuNQ`aW=3 zs+rYL^L&Gx_cq~#tDI1A-%U5Q?U9xIIx+pDad025eS7F~v$>GD+|IQWsZM8r|}-x(cz{H+;9*4=kI` zyqalO?zk~#TAP8h)mTAVo%NRB(D+PYT!fL{)h_VcZJVPE;R;=|bIRA|AME))se<&6 zsqe^DeVJCQa68{lJVe^tvfl@frs(v(#+bhJp{>o3{*>(TeoBNb=gSLx{DdA~l^wnP zpc*Z{`SZRN@Tr02gQitD9fNm~sa_6IYisDrectq#?-xuVx01`s=M4K9um+@RxU9C# zpOrr+^5Z85+}9yfM|}^oH#t?D*k9smRTI3LgEr0BW^+DiCf@1~(JV5-IsET~SQsh3 zOMr-(WRvbpc?-X*1g&7$il`aS_8Alntqo)uG8>N68>l;P=UIhP`#((`(tcL};utzv zhPBlDzU0~|=gf?IpBKRQf|LB0EdS!!ZL_W#^e-SOWDw>GwF+`6ekmJxXMJ2hZzIw* z(IFYeFE&~E6wSk1zaOg)+!Z7>tcTALF`teo@Y9@RsCC9J5Q`U`MZp!GGuciY-0u03 z?#ln_Oi%~&uBcIc6k-I~63=XKxJqDbxRzB^)T6nx-duZ4Ebcz3+{F0i&uZ^w z`IVVl^vmQ>t)|`=QMFv%yd?Ggh7rGEgfF=ZBi38;h zkEpxUCbfqOf68oRFkTMJ{Co`j)lWvp3^`Q-4L#6~POt!q7dY>OA34j!`<95D^nvqZ zuB366m&!{lU3}Yxw;SeX)cY{JG1@Pd6Sf#{Fv!o+F1dGu8>d^ER_gLOOk378*_9!o znH7DS)x$=Z*S!qMob@O$O|WN;g{yjwPiLf5_C7bLPN*eMdO#viJEXAvS2n`07*mHc z7fqDlLEf*wFTv8n0}K=jhX-l>^9|ZT(m}7Nu9I^s$=tTChdebR$V_v#L3MLtQ}%V= zYa&N6t6&ld3C2WKvZ&bks-R>kRUT@ohpaq=WIt5dhdw7hZdLDJg7ywb*vU+cR|7m| zf3Tpf(qB!w;{7r)z#{{FD>k+@!b&(13;heB$}q}Lg50deK@Ia;iR^l3jv$s@f4sD{r_x z>??N|WU9t4b4UCo0b=G%oh}C%^Ujd}zLYm{>!V2rD;u;bkqGmg%t|aFGmZ z^DIfBz?{2A?N7`tS~PKCn@Q}b{^OoB^a1UOY-GCDA*DkDF_|~or7@croW6DGzS;FF z+n6~Fz#ugu31_?1tdj7f<{)NCilfg@P(4X3!oV(LhuDzySIyuXEQqoJQp5NT4+5VB z=K>KqhDC+rheE$tI5Ws}78LrNQo=Gqa$R5%sh}te0L3ZL>c-Siq{hv`%gN144dUX3 z!c1-uCkLb^5Kbu76&4>7hXBV31z-QP1Caj|MCRoBU&ZkC^&JhGaQuH8JO7I^zrZ2G zCH`{y!6ll_7nAuN>!cy2F@A0obGqwy+#NsBdF5`sEv#LQUJeaEFXl)(O!862NTf>X|06Oy)l@WS2})nVfMxYu`%7|dDv>P-65z#*;- zH}D;I*+Y;*2N*#D*O?t)fizW(eIJ(_pfoFE&EWR4F5x2nT~f|+JU(2d3J+!h$EOLl zsn+t4^zK}GFJ=!+S#Y-7%9O?FW_mOx1*>OhNH>JjHd<#|p%DXQM2bhq`GvPJx@nMn zB^o@}T8+z{`3cSiF$(563L0L$_CncVk;277>P+_(4_2g6S$idQ*vG1*!LsFoRMWCv@tRjts{nva3q3>~2 z3nu8XW7`xUuxg;r$khXu@!QQroMCE$+^RkA*UU~zd5s~L{3+8T48ZgabQ9=FQ?F+wnz;g3C4Q9SrZ(od{Fvcle_)JGjgxm>?WM~nHB z%;y+j$J80K|9BqbcJ1b$7Ny`RPbNOMJd~G^v9fOnU>)Ssc}$9rslN-B*ILb#ED|`l zqe+eD3TO4z86)XfMh%&vVlHQRNAvT!-Pw?8<{#|yI4DvaiOo9ECN!6r!9y9Puz2c) zx2oa!H#0NFMm?Ha>N=jJvwAkk>{<|F*~Dcwhoo3*dYhgvhZ(UROpkwcVgBqV7a}Al z7Rr@YgK>uAKb#%>z;V$bo}CC4i?Dwu2h%~x^K*f34Fta}gD&GCgu`$~PWVTf{BhUy zSlo6Z@NW@XdK&QayU@l4%bzTT%g2gO5*k~)2l0Y5Ntcvmo&E0&klp^Q-~E2R+0H2U zJeMc?l@oMEPl^8vofF-jN`J|D6lhWuq9)DG%nsHrE~Mc?>tgnf+-tI<%cxKtw5b)&|BTbX#pn=nWUdTL*e~r4wu|w`4l## z{E)p$F3rWr`AWT?I&tz8o-~|KQ%2|2f~hYuKZYwfoVq?VhfNW8Skg<~Y&FcyIGnc; zSXzS}E4=Nb^!oHMFELN+0c=^PZ9Z2xJ6s8_fTD>UW|;TubC=lUS+tTnQC%O^R!D^e z0?x!~Bm==hA75n9&Z7svF>q=z?PG*dCx!d?U&(u;oRTc8jxH`XG{)9<#3i2h!E4S~ zb`|_+TPzBm2){R}Urb5M5Za9=3-J(yO(6m&@(9Zp8Th?}#bFUtKEMrsMAc|Hpc!PA z*9u8aZE7<*zx~$ekE4C)K=09eFDF^(Rh1}E?S0ZrRJ} zb<)rCMXtN+FSV}XTW=qCCs=0HQNue}a(W=ENpE3_dV}&q17Nb+eoe=3Gth^bXD1-( zDG*fcfdO`oI9ffJGVXYUUncxZFC7n_bf(JEo<)NgjAxoonCgJ~Ek8%FYH`O~%pcsw z$5NTvaZHh}#!u?x(6t}YAW7I`B_6=~#m<)gxjkZ&|QmXMje z{V0)|PSuKhlPdOG7l5B?i1^X@z5p#kTey^M<7?!zR?amF=jD6>j}DItv-=nrmiq~S zgHfe!c)K4P(r;S1tF*-Gx>O;}T-T$^SEk?rs9k}>*fz~UKM*2hg(yw(6Gr0}FWEwPN_M?rf5uwAS;i?bx zAr~*631)TR!BZN(lO<#1sOBwv(S%vR=u#=Rv!li}`Bedk&fIVMsyiEHsLN4lg+OnD{ev5~D%5>YXQQG3TiWrLXh9zPM)vaiV^Vx;_ zNwWPLS?W@JGZOS6o2?lIOukjE$!U6o4~wy4VSP{~fd%psc;@M8;YDi>C!yca(~iVn zxsPfVY#(zG4D&cbk@``p)X`-$u`9T@T;3+di;Se`hWt{g>csEs2;GpS)5DM zSB2y3ERJ@Bi;zarEW#U}Y0ssqIXa6Q zRA3Wgg6(2wks_GA@{M(I`gGDFi1w)41W1d9+y^!DFMxVZHR$}FilJDUmvugywmD^+37D~ zG}k}};Gp|^49?w&%O^%8VS3rv(Q8kX#Bb|!e9S=`%JKN%gDUT`F&#fQ^x72itp&jk zw`7dbH~rT;m_L;Zf%V@c%u}`%W+3T_Er*Pnq$(!(S%l4$jIJxY9|THubCazOWyBr| z^1o!HC!$;ehY`Kv=$&0R+RaoB-HTpxt@iFs4c3z4>dex2)epnIa*nQ)^T|Cuf&-#- z#rJEP)2u|mlrd4d{<|~HY4i&JZW;EVj}OqHw_e}e2#GudSxM$0k9Id6qIR2q1U{rE zA9b<@9QahPMIvOa#oi+7mpHRBSt`pqVBG*5V5WqKmz?lCReK58Ld-;`W7I==UXgD^ z`)M{;J?+^Ib-qE9p(7IkAx-ctYNC=Y*#Vj0wQORwnW7v!PnGF-%9qZ-I3ls_^zj#V z@Lbt%$;QeMkJWjaOz~tlbfGz?R^*Q5y|-U!jZ*9H@TfV8NUF&+wCBdtkekWoQssFA z!c<%yoDVpyIDcn}I#Pgk$9REPXehZ1Bl*gSt-h6azq$>g9YzUPbF%D91!IZjXcX1J zePvVJ5x=*a@QApqv@Pt&b>e*fl34}Z>M#tra}SOWan2%_apP|{;QKnW?cmF}hM8W9 zwJmQM@N3m%JA20q3Hz)Hc$8UvY0qX5zdJv`-5)_ju-!u$CRB+(T$6tIJe35yd`3ph$G|uz-n*cq zcONnR#-tu%vLaikqMW(F^~*7O`&gj=KGa*GklZR>@g!{)^RRM-;IuIBQh4}h;!gi} zx6x{gKISfm_!8JAeF7PVq2uwElHzUSjqYK1w)MS~n_mIG*bX@Hw=8jVJB?h4;5gv( z!g|)g<@heS;&BDphucOu(3h@?DtFBBHX{hTH`hl1!*4HMmi?Q!wePZLvZ=@C=uMPl zbiLNU*B_Bw`Q2uDo#gn)BoQfVLN$$iKYbRBCyzpB%SgG?xB9xSfMVrsc(-yDWZwR6 zAr^$*MDiiLZ=^M+n}tTWzU_XdeP``|-~jEKzS%^Q&ki5MyM7zG z+D_Op%70V1L^hTsyA!3vls)CFZacTV9ryIx-{FCnbyfEKIdYP@H)=2C;Jv#g3SJ2wH93Jpu@`kTR& zH_QDR-6gx5!-ncS{^=9{F?}_6Zt4R&F%vV$k=B*!o*F+D$XPi`&V;@fe-1ORw}@f% zXLjwcKgYSz?5^AT!8pDHCS>(scqIMn8;B_o9uHDn2-Qqy`NJy0@$*9hMB$xafsh2I zcmHCqy#JFDIia^6N7jV%um4vreA>Q~EG7yvoL+(KF&CCHz$t?otJbU#!}mj$@XhZj zr~8`u4fWX(%y_)_fRTr7C)rm%AE4c9JIpHz;V0yT_UyR0xiC4_uNiN_+vT+DRS!?5 z;_CR>G$Jx(_Q=@Vir8TMZ#=^>;iI~eO4p7TMZ9z1qJ9+|0vn|OOBnOypu->qd|>gE zW0xX3*7uv|v9}^T*3qAjadq}_B?t~F&PG0#uBi3IEkZSlVQ3$C^la>ou&DW&tqV zBpBqR2lbC;M%$(A%zKbXHCujAw^Ny0(w$rN%2E|8nTSi$mJEgGu@Q3>$iSx_jK7|5 zAi+e_Xz|>Q>1P37ILw=V3Di?4mOc9UVyt1LPHqT%%Vn*)e>(YmP(^>GZ-v(J;pgK~ z$+EEgF4!+#SIT(lJgQJ$Is`XdXaN(5m_|ie3;{o!Ux@hLhWb4?W%#dgFrR}nsEiEb zclmf6ZY)0>ov?S_zdnK466626HyxyyhI3tT!z#gupF4w{=gs3Ja&qtZ4re@MeRvZG zcc|oi1W!>URsnz7zm4Vm)oW>oGFp~+bT;e<=Bi#FCJw5)41}@?QyR)Ek~5Zq6pV)i zB(<-+u4KG~Ka=_lJIq2>{I--F$gcp&-ic2xAe z3wp@9FiSWL`at*)KC#@wx@l8vIqN9K#n236-_qz3+29IMCalXL+@E=b+dpgCCj*|1 zz~eyS6sb+fW?59rqY)wD9o^D|4OaassBzbY;g<>RpUrlTwi6e>3x-+(GH%(ozd$eR zEn;HuOQOV%C6#CZ-pv?HgtM$D$~{&%QXtJZk(>{ z@e#(aPY5P3RzM)_`?%pReGk`fJcpg4Mh8D=&6}Gsm$K1f{%Z8zp77ZB-Ns(*Yzmkg z10L=hC8O){@I(36gR(e$u@dFQQ_wAUo{gx%>_o9)gC5K_pJ_YIoejLb7>eA{oztzq zd>(_p;|o|ZXwTJA+xff@6P=rs^;sz~d9=uU%XCj#(d4N;a=>duc*91qXAiv#bgW!a zN@^z6xeJmDe8U(r@BHY*j9&hxM%*x)H~Qz6UmvOBy|dbD3J()qw`+`-qQ@wgeVUXv zIAmkGy+tS)V9zzeM)`F+syB8|`~9Hrg?tD$jI24cS*1xQY5-I5s7}=U<3TgrrIgA* zx1i_qFV=4P*(BC(VnwXH2L#~9cDU$5!*ACTAI&MPavfo*R`dU$`wTWNdraLOhWtUV zQud7$(a+#VcMx26b897_ZXIq*f`|8i0(*ZJ+^vCCSekBEW1c>20%;4)7?TNqhsB}*^kANf zrZlmi!F3;$bqs-%2e;@tIpz$GvV}9!9uy$29kR3He#Iq+UxSJizlTg12H{9JHOpzK zOS9L2tI?2XQ3P3(p-62)N)Q^c9Pv=WRYx*HegSD_N`2uhA-S5u`lHJcD7zH9IC;(J zsjxnsZFfWc*NXOZ@6ImB`cm9<3|z$}#tP(3?M3tY;3S5J0Dk5d4Uw6W1zrN~YQCG% z+hDLLHq%M)p0>bXhYpck4ChPO`Oe3}_F{ipb3rz~?4iKk3&aZLUqkzDmW4y9=O?z} zNw_vK-Q!c{3*}Sq2SnUfmyQHfu%!e1E>io78t-%)-KQVUVXh6IO!<9Rz%=@p!gAk( zY3l^r***w_G#<+%8Gf8@m8eqLu75ZBh;6uv7Sctzxe%Li^znS=(m8Fz>lZQGoVYyJ zsco;TXDsDkYQ08AsgoIqE}DYZC| z)j}tw)!+150cRw?9OX)q7O+H^b!++3-{9Y!l5kAsLi_YrJC*3|Vd0vkA}X2d)LGh< zvmAp`d`6>Z1nkD6%YNIw0AS8OUaVH?7uvfs(bkLxB` zk?~k^^Sb!D$I#Ps+up*w%N~ZovZebis@aR}PU6SP8|}Ap<1u?nU+{>|fcDQrvs{}! zLImPyLkr)LP4O5sx5JW)vHK0PA@{+iowf0Mcj%X|%Gnvv?K3a@z%Ov+FL%E^3nsYc8|Y&}zaD$DJd>Ng1F5p4aj!rA4qH$kVSDH={3Mme0kYP%> zz@77YI{6JZJB}ilzcIToZ93!hJemD2>AtD=io5nOAQR6WH06du?x=U~=PvqCh%!>Q zsqEr&+Sz33lHF#vBw0l*X%7VZZEn%aSj(%bb9&@)en{F5UT21us8klvIAiL%0|Ap` zv@fcrk;#QlEP>Zj2&bmA*ckIz*mJv8|8zi!6hm&KVmQOv z-~Xmn*bX@?7Au1>+6j*-qdWf&k%_f*i!~zS7;3<25k4?=dtW|d$v)u)3)wq*ueVe{ zf^N+~j4q>_)ASqsay&s)Cq$rE1r|%H-D7dvrsyJElKsTwONF;||4~Uh%KghZntcE$ zuN#49iGWySE6?W|*z=)s4;6E0pNMqUkJ5*UOFr?|?+(T!#`XGmrFC9sf!|~CAqTK& zLMW?c|1?}1C;26-Y_MU8(i7Vm6rqa#+!BzSMO~FAr~o4kb`F*%w+pCtu6|M8iK*rp zT(;r7F2JJn8-40vkT_1pi~Bs0LUky#HIG;_%8q7U#HnlBfGnjedh$DW7x?_2@`-nP3ij+FyYk-ivY{%!nbGRVJG2YNuff#HIHNco z9w)Y>b@}UQpd-`>+ktrAO)LliO-aa2@&pOFKu8$~4Q7)iuv znwf!GIu|liya=hi8n_MVoxFq%HGCyZ?L+eV9%Wt6kC{~)oh=qAJbU)^F%79-nPAc^S*)?Wk$&`R2wQcPDDhqZLL-kUVPQ9fJF`4b zv&Z7B1b3xKNU}<;YszLO#b17m_+tjcd*+&y*`MGw49uT9c8usI8TK+nqTeR`%+B(| zne_M6o03CN9IUJj^PIQQ{gkHu3J7>ExjS2)_tqXQe+Cm{j^NN;VZImoaKvJko!$nHC}kGSUf3Snjj zyQqqpES`1@s*-(<=P4Z}cWvh405$568B+#;#ZF(R1MpBcOr zf!3g|TZId}?WK{!`<7_X_J*Z{>d&E;v|^(zznJEy#aq0NTK>)H zBpbL24WT$&q`*}npJ`%A4rfILzI^8klr+bgBbrjVN%Rly7QfWQTxN&jsN~5mPUkD% zKdaz~YB*{aI<7t>`dt^vX^7SeTc4>o<^BKzrOtTIa|?N$&UxADxdBePsoFQ_aD#$& z&S7?>Y)S_E2a57MhBAHP6l`n=SuuPww5H&88qpO6Uu5Qn|bStHh6s#uND(Hb-scOf~3L4MJS*-)@g|Sjk zpOV#&{`N+EYR~8{t7nyuYyc&Lj^SLwyOJXJMK|-+W?UCVNC}iqys-%8tGhR|T9IXE zmoeCt5~9P$in<~qA9oXSOoX(~=~y)1#Srx^(&DJP`&aMT?z6#!d=I?cTjJ4$L97xm zyOAa?=j;px?RFQ67$X=^HemIda9EnWi@56{!?I0ptFUhNo>$Y>YYDDxs#x29vgAE6XfWezXE@g^We}ZL*~I zI)iyGx?e3(QUj`59bo6RPqEOW*p|a#?fqQ3vyCZdiB7gJt&a6b9AoaATPTl7|6x1F z_=N3W@W%b?#pm+Vf&z{cm84M-11xD)7-K`vMdP*z>iDmOGf#gqLJVI2ZoMq1lwaMx zTt0;?5;nMpcBOWminQ-JZdx_*gM=#cqQyvlr+Op}&9<>$GsV$ADuGj7ndv&7#(lhH zavx0Dx&qm2bB1Rk@BAt{vrlTr+A8H-e#E0>gTb>(r%e^NQd;R@9=(J}yPCph9vR>{ zhm_CqltsUuc&-I)*>PLV8h>+bxDK{BT3+3+Cv*5*z4CygmmnuNLt8~cI-6O)QChIN zUZ0PS)q}|iJczUfein()_UcbMi*Ne!<1X0TReGkjN?ED3Z4dAejruhF<&oiLP@&Lh z0Pj)Rep=Db=FU#G1C-LJONbU2xY4P5IIN2*#WfmL1@Gi!9~{{!ZuG}3{kY6qu-_>` z`Md`|G)doRowj7mRWlVToY{ZcM6kJZSYr?{Fgk$C=4m|wgqUDf5qHR3L^D=f`&D$` z4qv5W^}dhzp$$770u;s!*3v!6s!4q92D_TPWl}s!TGXVE=GpgBax*#EoV#P+h#EQ1 z4h`y0OVrmmG2_B>I9{c%+~Cbu{EDR(QOv2v_ZVLPBfgfUVssV)dfsUx)FirrBFXlV9f_&vHFJNh z&ZDL~$QJ%i_&fKC6}**$yGPv*o540IQo^W^&c%gTo4g&t0b7Er$#S~3A$mVE%Aqzu ze$S5Wkxt$%tI1_SU(DfGdsDuT`s87c z`Rg)z`)WQ){Qja>~8;?xkpsN-_^Us>W2KBEl0Xym8oU^(U-sD?X12c zw|Eb(@1;65RwYGW%71bKt8ZS76f_qk>XRjG`vPw;rOU2UMJ08s@}RWMyhvAQ~TJHP5h<%+Ej(5*EI?>a?px^fK-mY{1sKnr__Kkb`Dpdy}z{*9Nj)x{>33)Y$M`1 z{h-&V<5wxc05TF*OVYS;-26-a>95fpkB=3h3xeosL)-;u*po6l^PhD2^&d4@%@4ZP zd|$lpuDHH*p&z{I5O>+w2P25-#K^0CP8tkx^x_Q13|d z3J9yv!OL}cGNi*n)SLHrjq#^PzGK9W8f%;3;_}?i%V5vS>2XT)8Dy0dUqpLsx@4Cv zh3^qCG3`c%SaSK2I;Bf&yPG+C>=91i*Glkov`kz?RNmp+$KDgA0d)TS_wSyp7w+pn zwh;l)23b z;N%4IK&jrJVM!t2udw)VoIps8Has&JN<;sja#kqQh~hs?qnas1MHf#-YVz-)XpHh0 z^AUQ*9FNph74;#zTJGOoUobD~d{WzFWDvK%&m`0>K^`cjenV|(Vca@ zSmNADc8)YzzJHI$Y#E{RO!{_?WRrfYrvvgHNOswLw(bFQzyRJ`%8%41Ca(UO3)`Bw zbVk0dfQ*0rl1%{{Y(m=ZETGy=ES&34nPUjQvon<&MrO~%y&$Y*>Q~4ej`PlH8cbD`!X7U`)vXbziz?CJDs{Q&KqT-9#8Ly`i8Q?w0-t#C__)~pL>g< zTf9#KB!ZS;m(a%E=L2ncxPZ>{T?bC>jX@4PE>@@8NOaZ9Knz0N^?@}7Y%tz zO_G$m-Vw)o(n=iyD;^4!b*Wi$HW{hhHcs(HBk=NIpn1V3?1uK|)jM`0@LYZJUurdA zXrXAjnYk> z81U@5?iZ@U7wYK;XqPLj6tt)0UK?Fy&1Ce`rKYzi+scH^4*qWOm(@>}Ahj z(}l;tl!vx{ODiRQ)z&J@UOt+Ta&V1t15yLU=}{f1xxS{k_8UwKx#91(%XO71_7QSD zZksGIpBQ;sh-=q&@8zS*Whc_nmI_G(h@AGf$?Gj2$K1#$2+g)ux zG*toKl2>l3)Z<)k?ebd@Wu!i?Fsb3!-lw4NQ~YQTxdL2(}QdQcN3K0c2e|D z6ES8R0Yiqll;TnJNUYoXUz}?Bc5;zq)#9jj0htA8BKRgJm>=5USxzlpDC<8*$6-f=I%Fvf6#H~I7c;|MCp4K)#8c?12(37dRRoXvp)!wcq80t$j z6-+6W<;;^U9_iQH1;rR0t~!)Wf;IREFy*4gLL0TlVy&)V;zP=VzphX@8`92JpUO-d z##_T~p1-R?rD3lVM<{5ZdnRSc&MTvTT)X~=-V+pZ@#%8QMktVYKqmZ_NLW(b@qV`= zR?<6yU|@dK?Jtel6HB4WqB-q@6r}XQN#QY_ZNO^O`;!;pgVhS@+5ky6zgyvdH_y*Pvy_i3Z!x$u@#vZyZfDeSp;{Sb z5vjt9gD`9ztOVg;s5cK}J#$1^qD%=GY}G5p<6)fxg337S%_t{@J) z5$>G8KJ*!LO89qdG2rIyu$`!h^yci9eu|DATIXwc3{dIEjDb0YvdFdswhwK%7$w{N zsd=Bj8hZ=rOA-1@p6!?Lj-}!1Hk19J37%Y|M-9W$z$`UGMKQpQz3E4-s~YfW*vNlz zZt||bYu$9+?PvwHcYu5=Fu>R3STz?S9u{-YTSmV^g%3Qx7=o2}ea|b4-j;Go@S25b zyfsfIvxb`k0^eMEcZ&H;6T-d#-OiZuw^_ZLT_vQ_O4*VoH{^OM|7Le zHY+y3fvVaD;A8>&koGRYT7JkEfB`++bN85E4{7|VymH>2KM zik8U>PI4bJDeJBy2od#Nu9F~ck!(hg>!0>gLb2SdwcXc~xAl=EWjWxY?sles0@$Bx zm0(-k@qOdIqZNlSZK!#VMH7KnxX8tY8!`=d&_TwUp8=L2wXNPcfk9}_2C>$wZr6M_ z={ah`pWXN+IvMW(Z+267E|LDl(_7;BHPsif$puE|E<|#APMMSdWwIhFCuiiIpb#kusc1^C0M?9Yyf<@9-*| zi;q~{8QEuON;%7{Sq`ba#VT$n>y6kH8_v3Ho*A6_-Bc9LLLnH9SLxfXKQ;Q@#t+AD zCjBg@dN>!ciO;UZ$z(daNts2;H0RDv$tP){vVt={rIeaDo0@xu*Ep4S6`YVYv*7`f zgs)}3?>%Ubp(q5~8h??!c0_$KBR^fUF`{2If~BeM^rweibei?Tq=w|d9f+qwI;vVA zHWZ9PR7dGQl4it_!j49+$W2qb?jYR)n4l<-7M^}+ zLc8n$tVnm|c6syzytTTdgA&d~Xy^zqt5bPjatn0tpWZ)kGs@`-$8~7No#&T5IVj~- zWdt!y=2Po=A1uPV-bOcx$r@rZIwC7Zr-6gWc``ypSS;zjeK$!!wUHy+`y_nDT>Wde z+k`)X5h?px!W9lI*Sg`#ewTLl1>y3{g!c0q{RXd(wZ!)qHK&AOw_v5rXoNVoN2O5q zvP~Ec8Hy%6tu?csy_%~1JKcBE5)yg3J4x|O3zbBJ4ma}&EJKTjSc^E?J|P=`FhcNR z%tu~stAY^MxGqQ4&k;^3*9+#y^K(b&#{Wlrdrjh>cuK(^&RdXjWwb^oH)x((>03s# zQNg4a&*3I?!P8bZ;edrovW~}pBC28K+y>mFL{LIW`e1E~t#iPBHF4t| zkE6Wym#Y52bB)3Qw_67hQ^wNjNT8Ld|H&ipi@Rmf&f}w2aetkN z@!Qo{mr2h=j=(O0KiTA-$Ta0`2QDE;`=W>hXdjy4a*FM3Q7OxF@e<1 z5t>Kyd5e6Fu8$paRG5(sBZLh|(LrW&YQ;)ges`X1I1HG5c`3W9D=PL8$d+1>pq+_c zb#F1ytSt61NxpwWzhouGrjfiD_w)n_p{skc{$Up*cbmOYcosrmL8Gd;`VuXjhE3f+ zBY_PAiS2_!1-GqkEnK>PzZJIKD!l25gONu;n?X>(2F|{<8HYxFMCO0J@^9{oxzf7* z9Z+qdpx@*Q`|ekLp}Ll}Tr{>gBU$xPcPqRPBAZk=t__I_RjlmeXI6rKzV|add`&=< z9{GdU2iQS7MShqM@4T6j{t8V0red7U&eyT zV@x!RMP8jLlcsQMGzW8N3FfTGP2U-{#SxZhEJsAu{Mq?H;Gj?SJOZ)Y zb-8+pS1E`R+bo!G1522fP*>tiDF``O??a5+5lNhBbw1)f7b_QW=uB&F*V(Um|AfXi#;m^+k%WI?$|Uf$$ii}x z>o1Rs9do;$f7s=r24mcD-uDzP;z|9Y49}b`eo%b`^d%1cG%vjvea!KKk10VYpTV{{Fh#X4fj>7id`aJ?dx6YgVF_oJC2 za7SY%Xq+HwNBNzlV+~FtN{I9lo-wlCc4g7tR5C(~$GWaNk&87Mz95Sk8S;)bq2~D| z0+P`wHWijJIT5Co`EDZd8?i5l`TybSETiJumUfMMAh^4`ySqEVCAhm=7EW-t;O-8= z-Q9w_I|O&Q?6bdnzj5x5?qA(KdaN~9&8nJjJ?|!i_`_@F4QO}_dwdg-!j)2Ccak;& z5C6~N=5+v5n9h-pqCsXMgrgtk8&0N3lET&ezQutIlku`NL8P%%&>7AU)0={TW{{wp3F^Uuoh4a~2C177nAnaZYR z1>$3^a<-j9mG~19do(*EdMbhFq_`Oop47;>63#C?5Z){}5UtBDt9&(%ds0}YE`95R z$V~~Y->G?LE!ZexIG&e|{A>F{E{}nvfNMvVwJ+heL(sUk(D!wLwN3!r+T%q!GxEgf zQ!LzNDD1D+ye?T-6iCr%JE=Z7yHCN2%Uw0pU8cl-J-S%dGk3 z?cJU-Ih$BoR8&-rBGTh;roT{+&$hPhb>5qQBjlsiZH!Wf0zbTHQ!4)2lRg2d5-RJ5 z$Mo9$dLj`Wm51LtkQ!6>JaO~b>f};lLOVjSEjff<6o&qHf7?uRb~Nj{^tQ(A?sKd z<_qU-))o{EO1Ignd2sfltF=W(7kl*c+AF13c=(momWQax6{cnI#H!)J2p?kNU3_*ae6FfrqB9MpEVBF}PucOuZx;*N?Z6C+=~sYtcK-@G@XUBK6$ zKl^pL@kdOl8R+~`HxdRa=~YMG!8~9CPs1ypXcvdhwW`q`I#TUtsp87?2NexilnNDk z)!kd>3+>g?Sb5Kxmhwp~yCcKyo3L(V32K-MWzb0UX%>Q*4{%Q!be5%c+~|8ItM{SC zVS7#y$`$eY`l)L)kl;1A4;6qp*an^J1dHFlo!-R9N7$icXCndRS)nIk-6v!Xz$)u( zC;g2OSnKrWCD=g1?4srE6l`A)DtazTIr|`fYo3d5OjuVWY+k24T{y1Rc0cx%?h+r< z;tTonv!x*8i>wk0g$r+`UfWmWS|{EfqTn%t3Vp?-s(ay7zfnZeoNP&G3BlY9N^LWe zV_QWd5ZK`K_j?5BJy?JF&JS=@+OhwUGha{szoslR%YT}(DIHUv3TIzNP%@hTZOgKK zLOwW{xj48#K^Fhpmi;7KFmSW6GjTJe%s_!+|EDjTqPY%+m~yV0i2lzYC@jGKUs+eC zimH`x%(f#<2x}dRVsy^BliQ4-~_bV#+WEZr)#sZE0>AJUf>R|C{}CZTy@4N`n5+fojjj=Yc9K z8~1LUVOZ~5!N>Az*xZR zFdF4{c(>?b1_2nzzzR*YrAN-ME|Ef;LIJ|wn}3Oq1pm_@ANMnUe0eYkJv9IR`nU)n zJv7hMeS5P;7pQK4a)cbmKi$OrKMW5 z^tAtv@ZV-Du$8`&yB>9h?Voc+kTF2?Dbi{I&np#|W*@E!?4vO_Mu{ZsT7}3Kekmdy zO);kK8Idc?le!A#Y1sEkr6=U~^>lD7sQKSn^VQmz$q zZaL;hCfu~SdjB4^m8QS8(W~@{C#Cnmq$CGb=E;%nCP&4dkG72()2-sX#F)kof21*i zURrIBL}?$$C!3Lo9Ol;k7guD@@KcESnhk__b5v)&9O?XY{>SU8!N=8*qCGBV{*ZSL z)t-l#;_;@3go5O_OCwU4%wz#u(&+WPLs80KSW%)PJ`ELd8iTj@8>DK0OidqfAcXPh z%jUq6>~?)(aG)+k!2`;?Un6zZ4%t3*4+jr>brrwUOw1iUUSc$}jNw3|wAx`->X)Ixx4EI7!Zm{Rp z#eg&wj8C@eVzo`A5@RAK1A3`?0c8#rk>dRRhEeeC>KWbf?6NG&T#sgO>4QkPCqR%^Kjgpy1`IbGGf z`-1cSlBQ?O1>?^6}eIVQz7+qiryO#1kSx&RXKKrz9Nx zqaGrf{_)dNFYTR)WN#^P7h~9Hx8@Hah(t+Fh8)>iZK$_zw!_CgtvwP2go`;Ncta*;aq8lyuK$PamDSEZy<$ z338cgCK7?dS( z#m^MXP!b%73J-QUK(8Pg?&=O}=?T{j}IJ>J} zEm|H|!fgT;W9Y$`?HymEJz)oLWn#9g9TjwDs5|1QK<9gcmF51jwbAegs}xfHl{ltu zr{Q0YqvG$0q2VtKrSbK1uIA+mr-phOY%r6^5j&>Npz#H#q%d$VxG?O$thk+!`F4D@ zB$R)6FhbMuHMQR4!p{4}v14C-pctpV@==c-OpFldR?Yv*u~c)X2SawnA6qhy5! zv4I5{tdUQ|i;|CZ-G&AEd1tH|0FctRId-_1__4KhOt3Ez;fAm3;H4vB5EBLJR#_QT zzV3iX;|^GqsFA}+3+4}Lj^qMTwT1D<{y^)4%H8f4{Gl*tZm20Te==BegN_XyzBX+q z;`TTxj$VRETzZwFG?wYqTK#=ws)jNL*11c*-5K@Sbxf{rWg96$t>uri{Vn1zUjFu) zO6PlX=;FaGyxVD%Y%THO8vICKVu;hJSh`7|W)TWbhtK7;O^st&{@kOPkzWEwzuj1`uRy$s#y4V#c)_kmAC2 z<+Q%m(-cZe@(mpIrpuTN_^DC4k0|6C_xMSj5=5O2eBt9LNSfhap8n{$L%9IUD$WJW zD5&rhnH}W3=X)SM_>gC92GAsm`PEoW{Ph*2z}!nH)(TMTf_WP=VofANp;MUP8+J5u zQ$n)fhWO-+^T=$Wp`*-06za#K@!7U97pnfsObub^mlX4=D?~l%Iq|isV6$V1!<-8V zEGOVM^b1iselGrR(JA;>e+aAw?(PBA&Y1%FK&(B$L!VJt#`-IUmT_I}BVQl<-!RaKm4luO^a4Um*moz9te_eFa5~cXuHEs+DB6h_7S83$U7AUuRRw+=DrQ8u;=!4ogtA%ua&6Vu5=SMk5=RIwr@}y4 z24+CI{xq7t0Sh`It(}4yhqjjtT3ei72{kvhM*h^ys_gqu zGK*Q*I+vL73p4T#&SGY-JPH+J*mmoa@D7Hji;s*Nxl223i^Yrr_q=_KOs9yA zLN|WsmPNC9P{ZWAx}Y)r7FH78cO*-NsR18CpDL6>OPh8iH!>hEZ{>#S_;M0J3pvJc zawNrYRN?$aM#G$N8ZKnmskPOTL^OHKP$_#8YkQbn)xuu}nd)i0vel8?T-<<_yV{l< zR|rY+F)puA013+PhvrU6jm@>6IV<}`jDFEvK7Wmkah*D05Nboi7whip9|8;`@G;>F zip;Nv>VX!gs}%qysrJ|7<}k1ld~`n@*yv)JxshdOGszgqUBQyZn36<%0ks##%mGuo zC>~t98~H5GH`IFlR+Ru2pLyrFG2g_MTIhBA7!+f~wNN(j*@M_M(9*W+z5-chH_ft> zpE6NS`Q>g+yT{=|q>^p}7fzW_#$ezr#xjOr49s6u{ICJGAWp{KHcFO1GIT4Q2*Qfz z)xCY?lgX~X$?KVi|Kfoc?LLb>l#L!j?;!WG{_O3S&ZpePHQu|Ta<79bhTqvc{19me z!A6&%#QKhns6q%lPN8%~?G~Lz{bgvc2oTO-?MwOAsjboe0j76_PhVd3h-6R~qo-J! z5o6;r`*IAxWJi^Z-KO1KP>7jNmwLF+JFNwk6n)$Fuv>FF*+;$U>>b0NHVBV2TJ(M& zPH+pj%+#UIyG9TENPA;Pg#vg8SXm%^bE8W&@k)pPoOQ3S_H3)LV=Ximzr*w zieq$n$jnRE8YDBJYcM^2*d~c;bY_b;HsVCWh93vi-tYBlZ?LSpmV>PPA-Ym0vY$?3 zDNw3tMr5XOiDk1k*$U<)UfMs6X;)~E)*-y2djv@|U=wjuzV{)n6fhUIz0acX&1Im( z{yfnkajs`vPT;;6>2=dK%Pu|dEX=bPcVV{vBO&g9ORdPTX?j|qbupYB@1x%q7O*EY ze)ar0LJ`S7N;igm7h8=shb(|#^v2Jk_6Pne_V|9SK}#^=go#jvyJY#jaUz95v`Gr5 z7kx1vYrpNf$FOPb%$UZkHxt@;705pDfD4rVuGl8tZb~J;=LC3gI!ge!c?wKMz@E7JH4_b6*8eUAg^5xh2)dSSZM3qOR)aejuFv%G~w`-wl9F?B3>EPx1xkc6kG z>i)e7mV{I4{u)RjJg~bluH%pVkuj_B4|TL6fYbq3 ze+vPJ)eOZT7EunYOLw@pZIo?`7NVZb-~(x;YsvTDa*zMue~b5j6;&xPho8lJBq}Ir zip9n!@&8jHMFlW(GyQ)~&N$1TW-VvFou9;iO$=9fru$r}I1*cb9`@JudnF?2 zQ?id370$zzb!mKP`jg;uY#9>j_?^PwtldMfzcK%cqkecSuK$bspTz3u-?o13xGXST zur`#xL|2{kzv+M+0%?wg0TSaa$;q4ELfbA(A3EVr{}o4_|G(N#z(=6{nBk$@f4R^8^J2Wv&1j%R*M{#`Ha z3kBScb{?Vao!$4I{Jn|NQ>-{HnKGA28?l4EYd`zqYnc?~|6ahxP*kg$#PlhIm*cM> zeg;ZlR+W4G}h<5K)~HmV)OA4=^NUMQ9Od6ePzgvKKckNl_gGTT%X0g$Bc{{6Nix zFiSp+)tRQKVjg>om!HlibN;`MNLQA5E?;AT-v&)QY|N4~JR%(F;_Q0EMSKm1c|}YL z;#Hv(*Kwt*ZfD_TM~_l;p4R>f1qLy)th}uXm>I*Ie=80Wngt9dCg$Km5jS!J4elHA zScGk=S>icdLLzK$$-ur0rcF859*(*nAdWHD!+UKe+zmwUjWX*yTqPkok`L&iapE2T z;;UM|3#EWYS4H7eV+1T1X2Dc;BDQBlXAM^SU<9Ap|wz zIg?WcgVTc3cX8x}%zt)(X_xGpjCY*?-po68s=f)lqqcLGKkGEVT{p(d*6vp&qb+(a z8DfLs2f7_VDf;P+gAg~S|Eg|G@2b{OM+hsz1;<2eSZ=OKI1^LWc<(J*fd~!=+%%TA zsJjtdHDu*L!RgUUfFc%isLsaY1mJUT(OSxdl``wq8WT#Yj^OYT_b5OY6oFj=sltrO z%~i}t(*f13p9!H1#TpB7aFSbqFY_9{qg^yt6V6vfq&`29G$wroRFx{1m%(+H}0(T-cD^_zVc!J@h(0Y_EoJj{E| z%%ha9PK^H0?u6OX(LDR<%$pR1-hi7}c}!yE&yOD%+aQV^Uq#0D>2^#2J{^bkUyEQ( zCIqH-e|L>e>ZYg@Z=H~Rl21S42P{zw(8fRQ(}+}kJ%R-1QGN~7xD~_{M2v^F5LgXr z3`(unl~QE}8LX60d3{4&YfRy+OmUi*c#yKDLotw5U4VgNI{xT9tw2Gb{}xEJ)dZ6q ziHj8oK3&GfN!Dlt#};`5a5*2rU^N)$G*teCUE3WQlVahX&ce<|ZMc@m0bhm%?fL?$ z`yB>9P+GAi4i}u&9(;s;MPWn5V%>#wzM!6LllTDk0ZlS}Gadu=tB%_7{Q|FT`<(4J z&~_cN=+*et2m=3Tv6pLDmd1u111q7iHeCqS@2ok!gh!cA>%#?vI*h!++ ztuEA8a}Ng{r!rE1o{?=#OWe~n5l3CK=T3Xmu5zq-wWl{1SH5Q4S_ABu32bsAgRrDU+AaHeE7&=q*8)j?#v1UZp zbIhaF8&YX}X}V7L{W)+1GGgm)hwYd7yOZDV`n_xqAw)W~_GWIFrVBm}>Qhzr(6Q|M zY=L!Ok4+@Pwz00DV?o4A&RlAo&dKbT6~;CuE_pwveRvw&HXU=ouy1(*zpD2UOgFN# z^Y45!mfn*69za6!NB;){uDL<3Um57uSiMu;W))#fl4IYG$#73Tbp@AV0#)JN_sJGG z8i6nCu19;M2KCS<;rV?@SH)awuVx}LG5eY6+<9OFk zCY^>gW(%h(|I_C7-cdvYwv!0gyv*z5vZ;jEJtnqQ? zksj8Khp?>_vb&i*Vb>RL`P@*jdFZEa6_`!wH5W<$aaj8-8UAtT;l5XVxzKxv&ntUn?{-lst(P(O_eZ3W#&ll6vTYaY zgRjR)+aEroF`KN|HIWsxmy`2qT!6de3Yr$~Vq=16z`;K#K@ex=svd>UwtU;Qqswxti2;IRylFtIq@sI{dBlwieNj69kP(QCWKBhj^aeR$Uvazk_eWW4nz<#m{o#uW;p1{}H=kMSd z1U3L=DvJ8h_t3`+Bb&x@8|*y%Ez=Px3FXH)Z)r^iov|k znYs25M~3=2gH5$%EoWJDDeK^bTRO7*c+%i>3|M)~-wrX~M2^B=+G!la@yAT_a9VUJ zC9vBuuug-JRRRp&wY8!UEf)-QDe>6JRa61S1=!`utoG&nbbce>=mOojQKj=WR|#A0 z_`+4}hYUJjiOlJjqO5zm2Kr`xQYc%+T1(-!V!iEEXRqi6Gne<}XD>3l^U>9;b2o%v zRK@Gg{d2qM?_H619Dl>&xd_mwXQYRfe6{)s?AMxcO`}wE|EIXqpvv|x%xU!pVrNo{Yy7c*bK{d34~mkmrNUC9l}#p z+NfT5m@jaVVUn8&ushoG3M)QP23kOmqQp|p^`Ce6BDueZYiR3C!G5vdK|U$<1b{=M z`t1WgO;6A?j+lHGX{*UvH|V;mGP>RAZDbi1el@@A5lt(2Uxj1YFoTQ9(TsXxJR47q zoHl0ZKxa6Q+J)L4FA_0iE4Wiqxr8fw?GlID(Ql`HI}wH-F{5Q)=?6VwEbf3SAvpqO zBXiMZthfW8H%KMlH`N#QJcq7|Odry(bTM(1$Uh#!$F=dU1@+~|su<#}cbZdaj5<=w znC2raQ99aFlSNvk@H)@-r`l59Agja7HM#f(Le3vd-F+t4<8BVxyLo7KfB#v$4#~@y z=f*aYLZCBV1__z%2lq!tvX3Krb4n-3tB0cM<;l&w`D1xGd8L_@``TnOUIFBO2iU~xAj!OO zc||}3p&Oy#MK;0nc1zo%?Dr2oz6z9t;)m8}fshY4D?%n7m~k9e2+<6a$33n>&Rv8X zRb)F3YK8a+sOzC-d14V;jp<#jnB?7oJyfh(8Y;{u~s=&>I14w(iyEg^BPqM%Ed-Q(1jq)1RhYzDPd)BsCZWYFyB$bI_qIQVT|j6jj{F7M z4gb~}b@_8BFIt*hWa0Yf(}cl-)Hxkgltm`%p3%|f`&j2Mz^LC3wlm@P`#*CM4Iv2-atWPiEg`}#;rypi3ih@?Uzm>LQSNp(oJAGSCeI436T4o%KR z?Kb^oGzDWIlHO9KiE5DZ?z{8kl^g%bbfTM;%CGy>A~rR8?~7|;XNB~-+~VV=FA(nI zmZTPkI;)Gefb6Bndh{{A*8IX;Rkru_cjL*M^B=1v=Y8jiQB*;Ol3(2<{F314=%q)s zgF`IT5wl<3jN3?)*};#ELA1C0cz1RQFE<6=bb0dJ0x9TmcCE}RW_oDsU)3UKTxYgm zoSz43nU6igVVP%U$n}32i)XkU*ku2>*$f!fnM(tcCP@#hA0sy6|mOg#(? zc@B3B09n`bUgQLXH9GYY=~bN1^(ESvZxBvJn5E&{ywu7}AKnd5F2}PHV>*0e8tcuf9~qq| zNf|1G-P$4yA@X~bT6LqG@m>*7XbaN1$#9Pa%tnG7A{!mTGSCJ#GtjLr%&R;VdYg)P=m9MzY6rIk&MNQKPmH!z0^XOD%_>FS} zmv3VMdKT_a!-gJ}uKE|BmZ~IEcc!=4UX`|a_2XU4^-c!3gvfG~Y}17cz2BV^MNS(^ zz==L*H5R5`&i8%K3m%(6z0p3=8G587I-6oyThy+@M1H-Z(+c}WdrAW}(?nygq?N>E z!SixwDrPuix}0jHyoM*_YmykL{(;0SwCs9>oA5Xjn0pb zPLu)`T(ZPhU|M}hCLiEGG{Eyg3to2$HGYRFjQd+L=qFrwngpUFtMtp7VmGKcb%3lV zHZ*`|W_g@$7$IJtZV>v~Go{-;OWIE<7e3AN+^o2-C$Zwk~>Zz?A zgzJ<1D04mP^q+_W#=@Fn2=y6qkcq$@Kf}*|EFV8zME+a+IMssyA-;?xGTs&Nwacj zX<&o;*`PtDKQ*8UY3LITLs$T#FbfnyzU1n@O96mjDv^j2&ts@ba1@|CaQjsaO-9-l zhE0OYIuZftdLhO}ESKjn z5sMP+nSH2qmJy_~4t9~2y!pG@LlLK|47G(CIN_(o==X7sR6yY)sg-NXJZ|2h$}A^| zJThofzo3CjX(3mIp%2(~+7PDp07TxLf|#0cPN>_!rV5NiZJsOZVxvv>E>#C z&OVgz{rbk>Q`%X+PSp>O z$6G^#`jQbsY8-Cs`?*jg;Pq*DdYtcEx^~L@*K)J_K%u1gIxw@&G^=^6JKe)w?(@Fj z_@JX0lg_S!wHwO*CdnAQny2?&V|RU7KJo%sVhr;P}lLg@#(3 z)R72D0!PG3hMAXoV8LNc6|9%t__;L1)@DfqWAt&Rw-@P0?db&X*1R($Y+LNu#V9WF zh0nakZ$1{s@?iTg5Ln-)%Vebuk?SEvHRc)3!?A8V3ybMO8xk$-baS^2;fffDfKydpiFq{!zYYfDP8cIA?|aB261Yl(f{PiqxWw&FiR_ zJA={QZJVRF$~2pe?6IUuGh0SWvdB#C&A`oiks8aDI-G>x9#4WH46 zO4WvJrQPHY+r2lw!u?&CbSG=DTTrhmJJx8OCoi|;>)qe2+NBL^HD59qt#h*>@rjgAlpbsW-d%{9xw+;%2OQJGZ88Os5MCBHYYo82HD`L7PVWw{rCyCq9 zt^^4rNq=8X0Y1CUjs2f)eLr0+9e1+#HQ%E1&tE*0`1Po+M2%Pl}|Lvp{{|S04vL4<4E+p4cIlwi|zNHl}$;V*?J3cdjsLm z zpbxzLs!}Y8;_z6a&pT7m-YNmOJiZzMU1>04mBMae%*|N`tx~+iN7$(8FKu-lt=Y$8 z#gD7q7yt<1A+-q;UA3K5n16sjoTUqApeSp3%|doIio>uPS`LYpJwkALB*~>Zn%=-%fEr zaS0_~O3%jPLceXPdE%M`>RHqkLNTmSL(%;gE;wJZtQ7qaG4!8;DLmPd4Mv-{Apz9~gG5sg6r_F8u;XlyYw597EQ9S}oQq!j-b0f4n}RrS%@(Q!XY|tk7WMK| zj5>VX@n)NivwQOvEVeAq6+kBg$qonUv2|AN<=q7wlX^)zMyeU8H*K4Hlu&cGsw{U# zOqMOa9=HD0r_s8=>52U}quQCUNabsM|z+ETQG=oHgJ#YHRF(?5%omO|EE{KS3HlxFE8EG`V69R+e(56>!QBm#f#c3wmWZ zWjR=WN9E{Yb$*$wsipW_UCZ7{b^fET!d)0o4-4J}!X6CG+k~}BzSkkDzsIoxDQH4$zu8VpW|5td~ zAsutsX}PfqoO!$`7MN@mknL-8yJaP_k-xMGaAT9K2{YRKkpSUK&All9-Hpps>rF)E zz94S}%2E3u6k`MfEe4m;7%AK?6X+~5NEeDZOkzrL3!hH!y|+WledUI!9U1KZY#Y?>{O50fKm z(|^pU$Sb@-KA5T27N3-1DFkJaK!laQ}#b5(crl z^eCeJRh~X#-5#YB4s#TRhWsiHM-+_)f=2~{;t!TXr~+~>jJQX|OfA=(w?k2~(JBpr zX4&nq1=<&SM+6}AHQu`CgPk7p?gY%Tk=y>(Cll358ukCMep>{h;)BHhxU!rjl8vDk zd2U{xl%X&bdC=CW2JG*#s2@a;h0`i97=PQ~$aF}6o>kL%_-jNJ8?i0K&@Wk%-!BaR z2Wceuo~VUwU!;tPf-hhDVc{D|o)kj}cnj7>rDRy2sx<&S44|Er8hnZ4LKaoCPu5*& z^J(^;dtqrQX?fR`%#dL#@0y3{Cpq6Ijdk>wk?>Xz(h^k`y32>pAX`|fyPCL)q|niM zU^1kN4cZn$>6aYN>qp)wA&iGO7mmj|v<$(eT-yu0>rrsU%9BC~0hh)SP=SyNa;dv@ z&&D!V+ys!N5yW$>e|P>Q+cYyVigQaMd&AR7?^P(CT%95MePz&8*rdAC1`j;YX-Y^o zqVvK%kUgpSuzTv*QdLLe{!YL%z{q#3_a<bI zjN4K#{;p0N+J+wZ?I|qRiEk0*m_*!EIz)lTFdv|7`I!y#&}+fJ9en_$vak@=iDkw% zYrAMBOM~zr@~p<0%dPNKZNnMZFU9U5hs$$|dla|K>LAk~qZ(@1%Ub4YyH2iPF@Ndt z{hxsO*N|TZFH8HS%^uBI@y#lagfyP*#VjHBTXJIMhB(8fKJy>j)Of61tKn6aNZz$U-1K!c?&2 z@+?G{f_84h=5!TdPBRZAQFta!V{I-&5%3!c^p9LUAP|i%@JG=J_Oy zFXlI`SO-&F@l#%X;fYXQ;tl(gXZUcK(T(V9kD`Y}es!gHiJS!`+*TwN!lfZR3UR-% zpafb?=%=q1*^l01b=Y5LwnsS>K%NjYqWnKnM-P`UPDe<^`5pN9Yx<8$=ohkQK~&2tsc1H z&+_?0A!4#OZ>G0sQ1!hyfc?oEsdHDn^W{TX`259+l~om+YW5O+yJ`z9udd+18>1`7 z!~XHAwUr`%!j&wDHnh%qy)#ReND9xXC0FmCs@e%SgsolgB}@EGFL`ZtdaLBvB(b7= z-#8{=@lPdE+=R)}uMI7`Zwg9Hnqe0iU{AeI(Z;c5?g}c>d449u07tm4Ll{?lu{PfI zL^C>NuD^YCr|okj&8E5)gA%kyXJ~NeQ=>AZC%4qjf<(C-;#Y6q+k!>71_Iogga6&?jB;l<}#M z9?Ow&@+d|g7O=*J05=7cYyD($N0rt~%xF@fT=CJ{nP60o(~x(fv$_0IJjP)K(x2-o zs-iMfOw8ZqC+RVkKm~NzR%v0^OF`m;TD3MDI-zt6r!0?g!sp*$Bw43bH}_!EBmG9( zr2pOyCrx6Uvf8zn+Ny#4nG}zwW^{dE)1+y31-_k)s~fZfjoxE)n5$Br_hnVCP@_Zc zNAH+Zvkg6q^m?+KtTZ?|bUT#2QGX}pn#omg=X4}-C#QX4JM0WtGMwt{R;&Ex#=k9^ zTL_cnvMtyj{m+G?mVJDcfe_rrp091HLP5rV#z#W9ywBkJzajqtq2Ieqee; z<02&b$}_D7Cjz3we(mGnrIqeBHEYb}+o62a`h7BcNe4IDIL)eV{`iDC67(hB4c{1! zMmCXyXRa8nHCLM`B5LOKJ%_RTG#i39=eF=%tgZJc&L#q+8!O&Y)Z-S83|_m~TkN_8 zQXTIu8anC){<{03jz-u|k~VC_GcymMiGUpOGs9fRtnpBsXd|PAln;E+A#a*uB>dbI zGS&=Sx6{R!T}dv_;d%m$e*k`bg8d*|yq_;S^5VqJ8n)a5=81zzBxaA}(XYtGq6qrW zEK*`_KYPYv6`AEju|Nib_52(SF}IXe{J9|7?3!!fi)w<`4LRw~5HacB+5ycae9W%T z?=5RA=&r#%b9m>geug>kRl(NITpNNx^=ccg)uI-T`E}kcCd^FjP&@}^Cax+m_+KOt_& zVT)kv`@dQBIuiYNK=IS>)4+Q63IBGYEd|!%eZAT-HSZjZxf-A3$3$mo#w7la`3@!A zqu;W$Nqb~Pml0ySwP{jkWb+a$@*W$FZgsye3=ybEnfv_v5iS$K!h{5;W8$#R49SW- zQ-+lX9n%Rtid>Z*ozxMTrVv1lNR!KKPBmx5eNc`#((H-|ji+vZ zu)H}!qFA6?mo|cWSPjX|dOEmnjo$$i9WXROk_;J%JGMoiZ8~+YCAnGy;6#|*Rh2xx zf7Zzaqf%-rgH-1UX`7ID0s9W^edmQ&9_O?6jlj_+uYTw5VvLOqYZr$=Mn{?YXeV@? z?%*b`nN#1Te@m;!5&=`%hP%&D4^(#399`DB)-22qS)pC7{kZc*?b6vcFOS2U^rOTm zq`O7y^)xfPA9pSYHYIOn+BF(Xn|b0d>;^1dYHUxh)^!I+kJfckUvTC|fpm`>yI~?31b(^YWs6Rw+2V=WrDwi0!23lm7k)B z(d3?JK2O`y;KH_Ly|=L~4b+pjVqbi8Oh@)4C$h2=p&aHHY3 znw}Et)VSihNd*)w8RD+I0pI_mRFb*PLV%l*R?;hz5k5(cOv`jqb^{rp+ZmMBD-%-( zrEvjD*XZPK`a8D)fMOJdFai4V0!{?TWQ! zPwD-EJA%3lpmLk~R=jn6R|iVn@S8k1;T@eD8w+1#hb{2dWQ82JrLCi*3z%YHY?^2) zO|)WXox_Gx5{nPpX>bzyWj-ANV0DH)_)3VDM`Ayeo3`L()rZm5Ly?9@fVmNV&P9ImgBJLJ4O)HBOrB@@dv; zKQ33J{S869<6r5Y@_NH3VTsV!EX`ubFj@;+f54ms{2rf(_J)LlBykAbbU*DIayP)d zDf3*iNya*1LZP;{9iXmFxW9%|h#i5r&81*7e+SFSrxw5FK@ z<0Hor!YYRSz)5?NSn7><0^jA6J%@Q{?$2Kq3ftltPu&+vgIvEFNDCSMo?zML-epoN zNq)!VgB0x-1(lVVIyG%f*O|`1CnS9NqD9LHaG9^a?|)gAokE|ViI_r{()P!{(0t(=;fvv(%n0*Y->YgY zY$7MJPNHSpOeqR7#dG#FF)YI(<5&FWL z&W)aFcIbx+ri=gmrkT&7M$GjJQ?Kgo^j`^wqX+!|AWT_4QFi~h!F-A7fy|kN_l?CK3Quyn*eV0R%%l2r0>UjL|Imb$8%1~|Ss9PyF(-w% zS&A5B^#Su{{*?oA3gG5~`feC;)De-I(4Xi3yZy5IVEZTH*{K0*dH#V_JFMOr!>&Ub z1peR&OZXxa1UN<@tiosmRj7EH`sfx}pi}K)I;#xfg{d;w&@Y~bCO`IET9?X^QHYB3 z((AsdFP=onczkKID>yFkjL}+ ze&6v!i<;2v#ut8z5@Zw(_PP4yXX_uaKics}BzkLmGmNd``jhCAW9!|227kT0{c*{^ zy{SF@-lw|~;n>t;TaRA4eIh{roTPDLJ*<+le^I@v<$PN@CXq4QJlF$h?^!dj-?u#B zI7Y2SzN(#7dH~^*&O;l>o77pB1BbKfDXK#Y9_shB0{5-s{J(b@9?unqu!8GiH*%3g z;Dp!SYCwZbBk6J0r7PCClUG1p=CivYbnoS<)j=D z*x>-90U@dW@BxzZMv3#!5<>g55Blf4=F>d&Kg);pA2T89PeBlC3e8t=T;L}PwQ_f_ zPym{ICS@@$-KT4izBxXcg|^M}hY)p?nS^s3V_T!44yp3aHZi$Mi2B$I)LsNKn)6@8 z)xMa6(wWhOWR3V#zJl#cpDdiyBqWn|Yj7G9D8x!=b&wU!;|6tP?Q-9jo7xupce@=Q z!isBc?|XVb=5QjeA}UZ@AK>G4qkSWA_WS`z&!;+3i?WYlAq?woAkd1f1>bzPU@8%U zm7956tXREWum2YE{_=mgddKKWn{I13?%3?uw%xI9+qSdQv2EMv*tTukNykpU-1l?N zH{R#`t#R!dRkin3d#yF+no|v=K9v|K%Pn2-oaibRmbozW+tMzVkNP4JFR&3=@nDe- z8>L+@Y!x2W#I?+9nUNy5fsV66UhCLed*xIBQ_O%HT8Xd{?FVf`^Y{?#AYk5wB0q*u z1ggObMd<>yUg;T4>co*3$@ha?^Ej>Nr{flbBg0D98+nijMl{r)o+qT6^>&Dcx}}F+ zPiDSoSWee5Zb7EYtdo_`o14w6PJ%9OHa*)$HUTRha~XneiX5Y-y#Ar|^(Zh_28iU- zpOGesl&>wVzn`r{77i&xHUPdCuOd9OWtT@*Ud>gY;KWFzwZ?ror^DO)-di?eg$j6_ z#1C0NrCQ};QI4BN;xIzZQ0pm7O)I7qz_M_6T~ScMfH>JY%#Cg=0uQ;_{t}{`GJ(EL zNRl;}MNo`8iE+GCXALn;b^D@a_b>h z$DU5psSAa&=ui{pPXY3``hu|OA=e|C#-MOMEb7VHSBO%Z1@hQ;Vdi7Q&OwY*u+bv% zboh9Xqb#(wqeY=3oLN5FHoJVfsiTWS;I?eVf164HSAr9drtU$ih-EW}SP}OF>(vK! z5?%7rSbN-^Hc#VUMM!@aM%v(cY^_ZW)z)=kF5qKJ_W?=Ok9CdVe{pIsAHp8%A>g+8CVS07RmuRlVR+}(N; z5#4DH{mEA*Ft{c*y6BONut!gcqpo!Ja^Vx>W5>ex^lqnDTVGYg7en2+xjJlP0HOn3 z{KQE>s_+juxdF#YkXR?$uzCMQQ(h1PP%Pit6&Qs2&KGq|c1+&Rld3EF0{X3ENdCP~8@VwJs(!4&*HK7P$9>6$tv|t zD{RjHFdbnOSmq){{q;}8%;|j`r^Q;5z3h`)a0+QpKfdgVs~YC2J6(`1jm%hoPBxqR z!ryCdVG!uy33%YI81Q?a<8fD|Hgq5lTgNpu-+FvASK*CU9)Qt25%}(5b%D)|g3POk z8dL#JUw2*YmtAf*JU=SbtsXZ(F$*C5?@&2&W3F@b+Q)e*HUS?s+@bjjDn86(9wZ?1 z1(-Xl1Z$sKAp+o%Lo5d&;H&7jf@8wnKBT5>W}SdOXJx0>GOdqIz}2hrnL~L2-v9jB zyYWyRz!54(K9R$fZk*4QJzNf*sp2u{dVlIW(YJ;i3H&{)Ww_b>7I$V?Ic!|JGCh01 z-VPrLnOgpkPZwrMiWziJ3G*SF*wNkUac4kHb`8IoLS40M8=!`*m@A?KWYsIq_w8>s zb2cS471)St!~S*Op5da$Lp?WPe=vUD{rUWU)AVZio(`|w=5lMR~&m zGyP0~+C%JTmM5f|+aqU=rpCD?Ux`U)m-w6lPcc0RG|-MHFxD1gu7kpI_|}IpX{izJ`klh z)#1zG&1`BP5zX5-cqid9=6|PPpLOTaawhYc)D2Tu!!Q4Gh!)|EtG+Jx5!WcGc@yh0 z0ifu`r99228RX$xRYrSn9{ZTPYvw%4bcDyy0VMG@-f<2*8HA!&X9iHLq%8mzC11P` z@>3Z6Qkn4|j9s6n&3;)d9Q#9Be~T!hN2%cvSS1)4yCt2W?u{qp>88zR9%P4*5vlCv z??o{ZS(g1|?ZHP-8^0TUEKU#v#lYdw@5LWD9P5*(q{Wow$w;>HGG;X_sFkWmY)`Uj zO}>F|FOdP_#x=FF{)rq(fH=VU##`VAGG%OQ#Ska!I)(}R{b)`IXQ$`3xiygs)0R*l zVhpo{%QAE^L*UT8Zmg>G^?YoR9}Ro-Y(WUpU*$h#Nx)XfA-jbFV@lloMrf&wSh1Gw zngYUDq-{2EdR>6G*Vk7Y=Cna&Y8A^ujwsYWhh>;*ev@mBX)5E=$BqEci8gC)H26Ch z=SMbtCSKjHH%Y0iqps7R6^;HYbYZNg0;VMCCn30b0j=eW< z0}bY2>+z@qIAlhCV>Z<6Y#a3bqjI(TEb_Lp*}`K{u$$1*;;WvnyNgqHe~&JYwi zcKutUwDOpmTK?IEm+H>(AAE| zNhcUNXl2dc>?>>tO1s<0zOsvZb+mb=PPbmb)7dbSQz~0Ext4%=^QM=N)K6}~T9^?y zK2LM(0PVV5>(4BFDSb`OwDemg_{?{jq&i4dY@a$HKQV3gk0Rx<;2O3e>e#Y=KQfN_U{9{4N7k2Vwpn*^Xb%-->md|HlSlQcWuYmmStOM~`tj$c1D) z-kO7^VRSCOIT5rdkk1-nQ(jz??c+;+HT%Zo(RehN&pDX${6(~hZi(E=0leLA#@Fx{ zhLIxF`QiL+i=Prid-%Y;B42m?u>+w?`btG5WeXzlPy>}{2xK-ai6agm^w;tz0Vb^tFu?h=a>egx%QrOS8;nC^X~bv`QhEQm0QbR zzBlYCnT|T-1dQ?cv;tjL zQ6i3o&sxVN*;df*gWDAZ1~e84gWJY9-Zp0BeX%JxP#9>@;hlXZXcU05M@;oVA! zAC0+_ysRjq@Pgl}nN^)we{m`Nky5ynOdcTFuKT0MgG7(S8nh4=l0-H?`Et@P{^Al2 zE$d=7EB<46d3zhM*?Bd3HK|+|eoSlNQ&-z$>`mAWuW=9C3$4MXcoDeAfk>=u7)U&u zO{hmWFS?@L^VC>9)ro%;RBpDcasWXDh$}_yOduiokq<#wQDhjG8fG~-PWb|gH(*I@ zJ4dS`=1g)Hh#;KxS2RPxsAR=_TYOKWN5FKSp>v{V zA$FwqgzMRaEUJ!$^HoXX%7!&VC+1@BWr>NBB~jJhFHDQ>99~aMvnSjmO>>CNoSGIt z@jf%^TLC*7!B(8BGnh{upC&)7bHrA$+bJ zr%~#(v3n+u=)A7~?5wHX_bg$b=-4`YVAa?D)8q*pP%daJSj$9;^Q3CmjW5+cwF<2C zY0U6yoR&qCaHrzZ%+5%h@B>x73R8jTuRU*7W3h(o+c@cDXSQvp%crQp;LbRG7S;M= z0oVsyG%L9jJnYB8{C@di|M&;Xm}FN2qq`Sr)RIR5#G6_FS*8PDX(O1LPmrbh4Xf&p z8OT6vLd0y$m8L^IyYR-6|3pWa)Zfw3`%h3AVAlUl{r)Kk{^R-v4Pg5()ucOW{ES9+THfF z9Fp)t@$f`ZnRwrA%Gcol6IOEQoGrhFYziXy|@^Ujgen#@rx57LwE`?+}ty z(cikn!6Of*X4J-)G~kp$zQyw)vRCx+fR#vz1;yM&pgfeMUV2a(7#{huX9B7LR_ERy z9=}Vev+U4ZnkvDh1cC*g)nd3wgBY7uHrpR9B~Dha5*z#n9CYPka@?-3zh3*<5w_!y zc=6l-?Fn-&oxj^Vd(9p6d%-Gr-GyWdp)E_k9KK{{x!paJnWGJSV=jU2t8edq1G06_ zGycq&7uI6QePm;2*=CE+ADeMM$uyi`Mc?t9z9`{UU?cEuer%TvzqyE5REQsEr{b_a z^yhSc-RyQ*NSf8o-N(_Yt!$?E{fL7ouNo9b-lOqIB>Hd|%}x!@QENC!B{Nvk{T32m znpM`wJT5+DqSsSMdcE%78HvQ&Tu7 zEyt`%?UGQTA+%11<)>A_J)#wzHvmnjj&av7a;T-xv)TyN*7qft7^TSwE`nh7vwAx_ zvqc7R(uw#op)#*avcxz=7LA&);d)W+garg?AkF@!iXvvNH#;((@D9&i1Q<)}QjA=d zAD2-*r=rOP=TmNB=J9|b1X9K$UFF->+crRIvTZJ?4KyiO>^9y`k?_AhetuS590^Tv4HncT=RlF=hBGdi&a@(!Md@} z3IY1hs+6>vgRqyx@zlyba>hJ^HPx*YZf`?zh{X8{la&HA2$&r35 z+o?S#>YWxMUGG8nCKM_vB+s z8?XY#tma56l^+=qmcab$XyW5U?_k`@p5thc%YayyD zJugX{NOV<7Xz7HKMEdl8lVM^y(?ZH_Ni)7r56<#uB44zMKbi0IXBgR%^ZGY}zfs?B)h^ z?KQ4$UY7+tQ)!45{Pg>(xdu~aaWAy=uEP`_iW4j7tafzvYTjJCEm%iurbVN39S@eO z3gMaFa?gLX%1nK60F0jPYVtFwiI${j{-|S?Hpnzkh;n z4%DfF*oKCE0PTr9MY6oy(OC(~ycuLlbHjP37E!WgaU))po=UEjr=OWv(zug3?a!4LH&~JL6+87nJKXS!tuCLrSdOne0V{9Q_ww zKKOZtI{Rbswd^Pfu(68mK3T;&4Fb|R`EO9woro8t(dTt3Q!d-dVykSg2DP6wTc zmP?5t577GA+44HC4~|qa=^!Z7j(wR>TKYQwTQ}0Nl=D-t1#PMgc)1sytk+||&WFQxoX~WI(j1ak8e;8YCPa>GJ^2w~wrH<)=`62zB zt$V_8(QwASwPE{O@Xr3w(mp9?*)Yhz&h5^wc3ZvQvE3oMv|rwU)Pm9Hmkx& z-f(k4ntO^$v{SO=pktMyRjnNL-4Qe!E+}pWY{YvC*U`#ZMM6Al4(Sqila%rH%pqEc z;X6oJj%zft>(5Sa?7k7drWxCbrRx3a(%bg(&yiGmlMQ#OVLJ8mTayRXU9~BUR>3n3 zz&S*>ONo#x#Kib+PI}r6%+`6bd)|d`2&%AuP|r9oih|{1IX(BRz`2d4-LU7js;h3_ zu;c2eC|z<^Wbgr)(&|g}9Ft;V9uV9ZMIWv8Le@GNPG-!gEQ2Fw0@s9Xm%FuNl8&8; z@Y;N_aR0{0Tq+XWe6=xuR2+;VBej4gU}J7jhTK}}llliQ?emVO@1X^CzBMRGZH0L^ z$$r>2K9-G${T%hdGt926`Y zIn(vYW}fZJpZ zN70((Xar*~w|Dys8o02U@nxj%M2#V(iS%dx%eRvM7hCfg&9|FWaKgjwsKg7~Q08dT z(v+xDb}7U$NEGU)#kT)e>)s#?ZC!JoTXiVPJ}4f&u_1+&Icp8P$<*TRL@NYO23JmP zEBJ8?H%3Ybx7vV21gI=jGXtFzK+gITzj@GpIAgFQtu6|U_s6d2VpO-XHrdbqr3^19`Koh{9c#9`_R7dK5-2| zDxGP#G3|6BvI-RthADO@DH9Ci+_BW#_LfNuVz+he;SI5I}D+b)yu18*z~ zM?AN}O51&>-~;M~et%ZER!`+maeeCnr-Zz}rFR_+s~gG318#f9;rECk6ZjXRtrys_ zyTKrrEQIi6o_PD!;V5$Gucyy{ zc$_4BAPrL02iUP+YkNPJ@W0=UK9qyzvs;j>3 z^7k?@BmE}=%scrf<@+Dpi1}aKC}k!k9}2+0{9obUq`FK54m)D!Q_YE!zKO~yHoA~u z2G}+mO`yIbrd$WvA6fO|FQIHcO{xcHwsu$eY=h!$cU=jqB5&4Fb)&`^g;mASe2i_Dw7@*x&Zlea zdr}AE!Dy^3Of9ZsxY& zx!-)7>W`1k7p&R%X_L1cM?Uw6FJ&j6S`S}z8+h5fTRXGw0O_wt=`kzS&FZaLRX5`v z=XOto7oI$0$MgU#yKOo|eq!=;f-rg!)Q)1g_aYIreNV{v9}Iu z$yEJ^!H3CsztSy-F23-1Ke*uY%$$J^L1n!$1y}?tCe?A?gkI+_ec!}oRQU%nCu3P5 z)QVDE$V1%*cBG9!wrNK88&(^KFr%&rpl#f@PqiC9l^m@MPdc!assMYmaIr{$u!tsT zFY84Tz$K1C0%xyJz@hH@FezEA?6+EB7AQ(JM5o6C$w$JUHk*5t^tU^#kphAzUc#f_ zAw08Rng4q2tbj3I+BYFm&wFkYU(F=zJ<|T&%6Y@O?o2@%7RQZwB5O$~@nzQp_c6Tg z*61}kPg;HDLOLw-*BU@QzF4Pk(p!nvqiRqBsPIbe2_oZhCwZ9pm;FZc>tR-?bN8CQ za3_=0C|BCp_0T9;sq`w7E+rTXgvr17tJ=i0N8#{KvUL$d5?)&gbY&kvPO>0!`H_1R z>6$Oy^8Fk_8X{6qAx^a0cAB`{2VLTjeL}*``p+`uco1Rp3Sv=q~ z0q1MH;FsPFSnKU0r^3wW^noMU(GQXH4QV{;1rD2#*jzl5|z`LTKZH(MjD9zP4l$560R zx-=XQXbEfO#bl|5B77m7ipzu9Bb{#;yJS=|jM19>2;h|jJk>At3*rlv= z2q00YYeHH78eH)+JLbjFw+-Pv{c2r%ucO0ND(&D*tntqNGT$exnM} z@EQ>RF;Zv#7f*Hg2Ti2_e7nf~*M*v;x^B6_favq2cGCK1EYgpQ{j&C*G3lC%z;4m1*6r$Rp+~G z7OBiEHZ&F2Woldjk#Q8{Q24CoWme;_vqLYyQ`y^U(G6-98IYl3=#Y5~;~zP^(z&Wu*o}3d1!hS4Z+6%hU^Qr@3%@dtW^DSXTqSPM7tBy(K zIu~v*NhR3e@v?o1E+Yl>FQ!+67Ot?KWg;Olmkn09OCP(&+p0HN`v6-h<#P~#SL1iZ zQ(|pn^z&fRD$=N6+|zLS0d|syH~yLr13g zzL^$6dgb0h+1C(QqG-;?2i`-#`CLhcps}xt1wBG9BB_}tbYSfvr8(P_M?Bg*nZEll3K&W~ zDfh|#sH!liewV+{-US4Y%%Al8y68NGAwv%}GEy|njPVKl1dbra^c;d%PT2s89p*2D zpcZXa6A^V_9JaAf1ccV}=>NPr$L!y)j*r(rIwUOrVss8EyV{BWp{B7g{2u{trEyDb zM%c|a>JGYrIGWLTgH#c5LgN(syu>*XW*b2anY4p1!n*4MJhF%;^d>yMWaH5xNLh@N z2u8B{@}T`)H4X@Is+|I2*~BiN<2y|81l3VZ!@~hnb5<%%!9ZzS63IT{+?zwOXVh#V z?ixT+JV>d9@B}B+lHnX8CUz%J(70VH0bggZ7y?U(W|@9GX<;Q{gn{5G;UrBhDzP~{ zjJ5tJXfd3&0g#a6rc>B)pM)g-Cu@YO!$nd5^H#6!%&znG=njY|0KV%z&54wj!qh)V zCDf|pbJMDEuJ%b@^A+z+8q?01H7LC_h7<5u9F}aHrojSDQVxc-cYw9w+tuCGe)A?B zvpWtC)S0(+j{@H?-L`H9fVa#aX{|Zu1UOUx(M2HR8g#V7eR!B?+?5 z^Wd>h>i2dMCDn=RW&EQ9uMY&L-5Gj+liGd_|8*q~P`azR_J6-eV3z-Y2rU1C2o5RZ zqTdBZ8E0VJ?|AaRG1f~>4Z96CMBn9_87IG)4g~$$Uvwlcx@Q9-GhjaI6(qGh@?TerthBI-k4pmz8wz!n37Sf#73DYgPBcZ zPx=L4z8^f=7nphb$9DaAV3YY{%i@^yNrv9WnaC!kajqTsg~YPpCWtT9g!96-Eyk}Q zO=7ssS$hJ{V)c_{cxa*%x zatFau#q7a9O~RHWC0+fCvZi8KU7{;~6@&GGoJ`mJ7z*3xJj+fNE4qzj5=o6;3%z;u zZE^SUczK^h3!HiIeQwB0+_XM4h0!TR8)GEI{Na*hr?8nk9GT`xt8D#+SwsKUq%MTY z8cb{L@wiTpzwJxW1lf#s&ICAEuu=d#-Yd4P_f4mOs=8`|U5=f;zWDLht=C2P77Ih_ zyl3LG8V7PJF^&?t2KlW#_;$U2Y>rp34^C@);P-%2N^ZHY@2K_Fho-FYnPjXC3uK6} zM7=UUv)(ybBKY`ZL_KkhY3%30N3(6ydiFjZEBurb1* zHm3#$Jzqg^=|d?rxC_9&;Hng2?1~wc*B&z@tuWM;b+v-Fp^Z5BeYA+*stE-dTw>3tfj@OOR>oEY-1!bAkGScqM>rY5?wh^cvF&7bpbq z=I)YY1urBewEdW*or0&+0`{Mf0wB@hLm5yFys3qLAwBdCP(KSB5kz>k)9a}JZpfHI@#3VfK7wm*lk9E&S+eU!fSDrWsLbQ7Em zM@S4M2qi#A_bq_S_O?|z_GT0BdXsoa(<*QdJKXvBwPDLA3g0nTLPNI+U|Su8Ug^yA zDw@Rs$v+`AAP5FcAc2sFE; z6X_mO0+>*C?H<&UKE?gd#UZhIb%sB2T>!F{vU8Xp|tco~M*4*~k@dgSWX^$f!XN+--16 zR+eh;Ph$gS>v`Q+dA0N;CbBi3o={!aLz0!O;m8RRt<(<&)cLbYaAg$s?mKpAY)!4@ zWh*FFt_;ZUcgV#3P=y+++WLYs(I7o2_$d!VrJ7VdP97r6YIHk)d5FLHz|wIp8*KdG z%sBH*4|Eknrv%>gG zh8_>}AY1`mOC@hF6J4LT=GeLF7-}EG66myJDFtfFsqAzSo0{lu@8AiPatuaIJRNxB zLOwbUQQ;P^UZkXzgS*m$9&{gP9I@lYHM!{Lt&vLEQ-xQePzrnX1Jph9&R#X_+I;UHgyzXUdHH4&+Zt1cE$Bu96gv$bFEWv%tt_tED3 zM+T7=K8_k8MCf9X@M~XRliejLGK@485$Oj2#SsO*I6TweVzQ`W;PhDYv(}2UBdcDK z;nV>_?N2~;r}S&9xbF#(#HkZh6AmL@q6kAm4lFrY_Si20n<|H)s_s9jKWF0gBopIh zX=P8bomH}42a#=JV*O}*c*ZmcBISRw0ev^;<}ps5R1-i3{9 z;EszLttaf~)(CV-P*B|V`$C#H3i3t7*U;{!)KTY*pZr+QJo~APN4%9s`yld8otPYZ z$mv7^v+X@5Guu-3clz)l@Su49@}7(2h5iOre0ssm@|8g|n(`-V#(T<;yZH3pdj7tZ zusqSOtuAp6n^;dBg*%^&6`zQ}ZqHL3K(1bU%wlLRXz`6hA;kTJn=nBE02Yrg+heL< zGdqWF{TNCpMzyuo1=4uW)iDpRSfKRC-^)F4fY=e>$n}vTVX~|t%i(a&Lv6U{at)#lHCrD0JPwC%jiuRChYA)`{4KSqf>rK21SiyU%c%Y zAbmL*KiYfW`l4w|iHd>GAAub@yO_8)DZv>$Sxn|jcn9R~5svHL6Je+(b=hd)z9 zlFl*h^Fp2@fJpsp@=fjAgQ{dl^?T8~(yM&F$2@h;(|>&Fri7}0{ZHtZ=K=RW{AHGZ zQAme>n-Ws=i@vK+Z2u={_*N;xX8(u3Ji+-3HIC>32p!^9aQ>2V5Iv&lq}R1h;CJC* z@fSYDLwervuKT(Wy$*325@{EI++2_T;ShjpmF~0cyzpkcBI0=w3)G$H+hbP!u^|> zVey`Wwe=qA$4>ob8Aq@+2a(g*H~p)Y-BsH<3QXiP_;5UJdwRg;#$U_1Rcsj`<`5O3 z?Qc92XFlpb_#?ZdPmG;TR6e3AY*cb3GsdBo6@Gh0y0}hXJWmneO+s*6k zY}2du`O4twJgU)tV}3FlZsi2P@!QESUUrJPgY!<#ozHesz4^;bj^L%c-0LgFeYa&Zfy3q@o59n-DwM&q_mx1K8zRH&prpA5x^?k8G62&F zJQRv&)qR$0ZQ`M6bDamHvVGtVhf1*0VM^{>L0O4czYGKuNSgw(_2UIF{H?A>87NLm zhHP_5+1So=n)g|6n@((dpL(No3{4-w%bt{N;a+;qA=@Cubej+@rQv_ZwY@Yak-gd_ z6vi5y!7CczLEVz5-7tIfH}@Ca=0#C~1{ggYRA&k_a_2;=5ZZORrMJ1MkMa?@LO=p# z>r)gVS0g85|Jh%h45>B1ds-%4_lSRoMdI7Hi1MP4FM8L$u~?pkj&lE^(CY%KbB5=L zS1F8KB_aO9tT4WkyE)I~6aaVKHlR*!ee=U$L$4D_kqIPDo?URr7v5ei6%8$OH7WOL zpz0z?+RZlk7dICFp6co98hZr6IT+aGeVfm|W;vUAtZ{bzP8kWH^0f^K`^w5}bZwy? zWW!;@sCV+J^x&nuVnz$L%nlYU27Y1Dz4Vz$CL?bIo)e%JFg(sUCFYFcek8sHZRn=_ zgO6MBC;?i1TY)#$x3e!294VQ{WH{$2$g!YjYFQoaq6-~0wrcKDEq*3GjrhkWHh5dv z4HbS?XNWjjE{_`EX$>#`{@gr%SR6^|mpvXM#{GPD;YocaZGj7kEd=a@GQlbl*EBSZ z!QA=Dnk#=r1aACpM>U3rQyN^8cgMn1vc`}STvEFswVN=n3yk?as>rY@c2-JX_}+PQ zgmw)g$>F}Mh81E4+-bvXq9L4Nhy|3fIs_9xWx3}(&Y2k?z1BjfvFAq9B2I<;914g@)UtZ7#KJv`rHCQfVQ#+MT()=h3gC0yaam$d$OUs^h2Vf%-$p&wiuFcDoQ6Zo$lGD z4uJgyALxq{s1g=8nMH#C_Ha-GfBN>}%ogLIj)=A@DcoUTj<4v4@hBx`->=!_8uS{y zE}u8gw&AFU+g!_pYJ>8tMWbI;YJYb#>0`L{d_zJ2nlSYUb3Iike2I)lKO=2$&Vz{A zE9+zrh_r|Ges1Jdk6xR|)?jebHJDelGM$Mep0>8u zcLc9irsIroeYi!)!>b%Jv&wDZc&~_%$?OpkbJ5UfZOO^WMfW-L6f1Ihea9G*T??iY z7D58)1yZDwfkiE}uH;&eR*20YV^zi}!3A+`j7#3$&UGrr7Qr zq71P(8QG_aXquJSrSr>b!iWd7h(Rj6ellqO7|i!+NzqCd(Qyl3CwI4Rvk&Mm!LTz@ z?{^&EZQMY@p>Gqn1CqEUCcrsaKaD}8Bd{=GXuP02b0@&Z-NW7M??V*IfV^d88s>f` z#ZiMw7g)yo^qU@hj9lc*X$nFJx4F4R`8y`mLy=;noV0pA3-ioHMu77`838QOwwkfd z2pWbk-~~K1>`Y#O1+J$dWgT4KzyDY;K=9YuWFIn`sX!3g06mBx4Il;_4%)V=Q~}K~ zXe?c@x`n_qLQ;ds{Z}N|VY2!OCpotAM;|;IgfftKZS1{09x;2@ersHDU+BmaTpF2U zKj}Pnq_$-b!X_bAEn2Op)uuiU>XPt5U!ospf49ZQAAdNY?9amp&>x$a*}<~~ zx8=%=3i@OE(yOO~8G!!%>zvQO`!U-+sqPML&u=J_tKNxn(cwVGLI!^{PxMR!zl5Xs z^7fuXKa4zYtT(zexK>t+eYOi3#1_pe2Ndr4<6KE1i-!rumg)U1d%$VR4RSyC4DgcF z3V)7w)Em5ZSe7#OaT=5rAqV3s${eG#6Q~0e6WJ0c9Q%su{Q*t!K6e+ZDRMa&qXXv! zex^Ne_M?T1%KbeCiXLcZaX@2HfvVo>$1LT<*TFfki)^35oj-jYHQQ1Yh&qO)B*$xh z2KO@=*b_Kblkj*J@xHCQ>G1+I6UhvMW05(Ax_2!5^gNp16?a*Ir>{=3ac~Wy8B^O~ zRhIf@9&j?*g#i>rxzqvGDVo_*rMRb#!FtcqUEw+FE3X|5S6UJ#LY?9;(n6shMKkI(rSybaU$8Pa&@*| zgv*s;Y}Zn_@su?+x;|N(K_*}MVO`G2YY04cVI_SwPL4k*}YN`EGZoxr==(>iDtw!$8}jk4o*Et zMQ$)OnWie>b$6QI=dJF~UP&rUWhP@_MI=BdmN?9hWs!=KmV-><5?MUlm)1forx3AT z$!sJle0&{i`9^_TFW=P)r*y#Lun<*hfe#UWfdOP7BquXi{yNxihzi#iglZF4p95;B zh`{r5=x9%S#SNnXE~JkOr?+?pDkz{1I*+Z;7%~5T@crt|4&eC-8@{U6iS7gO-a>Jyo-_>NhefW` zBHj+iOB{J3WZG+M>Hq~VsCFM8r=)!p&ixe4GhTJzB9=O=Ui*%r6zbpwdJ#u~yAK_s zSM=pES6Lz@f6u9ElDjXBu*#AnRX>1KDi0luHood2;BuOV>sANWdwqT86d z5mgGAI-7udrm@&*&LZN`-x?`qI62`si?@?^N7c32;1S0kuXhsq-V*2*JR7%?X@#PL z#t7aVJBnJMAxS#C>883~p*BYb%Z#%kYxvu=0nNE{2dQ4OQf~vJwIU?N3a zQ9e>Hs@+nUO{oZCx%VY!CV6F7RciX6^4|8J8s>k?8wkz6vQ%tnUm89Wv<8NNCJkB(#$6`$_ z;V@IlByeh(K%ocDtl;t%<0NF4HeLJjJURV9U087ke3ar;=z+kd?@-}*AHa7^_4T=? zL=WKe@p;Jt;NbbBcbgA+xfHRE-&8p@xpDzrWu=gF3iryA7%>a!2VeYsu&9NoU+^s(BvVcbXZe2u#{1UOrmM7B;Pu8 zsYaKR#FSKIHa#!iiPL~Q^xV_YJ+{0_1i{ZE9e7kxQbn=(K;I2?^t3(8_UmzcTnTK@RZIVpno(F~49LGxVFr{xH|RT%g6T8RHB0FsV61ZluLIQj zF~6PLf+DDQKhI+BF6Wl}2aO>5FDndSEB5C529p^4y0%y7e0a4JP1Wq*EHV4PC$ax$ zlAVyIeOY)Ja`MYhHl;SKdH)~3Kz;2<1oDBy1l@F5TXyNaaBTF=z<=~ zrQ)Mq>6#XbE)i_kcm=kfB>tK3-ZKKkG}@}O-@&LQ|kKuCZ$*90?GhReY; zU*E;eJfxFe;gYN9rVzQYm+m^>tvlqD&s%ay3 zIw*-dtj{jw{*}ougSONIHpfD6&JAQTgAk0PNrR?8I1DDSQ&l%l+5? z=QP%zJh=1A1Wq%AmC!c2eh&TlUiU`V^g5CZoa8oHI#cwosTN12AnjI4Bbfi)@%nD@ zYmnW3F~k3}(e?e}*P?&M{@+RdYYgx|Gym7%|1(w-w4TkuS)Xy{$OJ)zDHP@Sv8miZ zin7!*eJ%NdJx6(2hWSIcxU%-xCKv1Sm+R8pk4wly$F5pSu|nK=*9@5J_pD*U#!2AM zpJGn@jF&JpUXW%lR0U2Xn}-#;dGnEgh7*Xp&)2~qt>dEl&eJvnvp1#Rww!1qiAN7m z$9LT69_RFzC7H{hO$n041fqi>)ovXbI(Or)dT#5Bn$<590=(f!M=hwnFp>_m>K!|O zLks4&xG&C({tiDj9>u^bdTri1Ha}j-Ipw+ja;=TBSS}ge+SYHY?edNO zKIe0^5yi=oevtXLf}R%pw)O)6UIYe08jHu1r-VpZ3T*1=+m}@|K&zO>vgwzsewYkJ zM(Ws-{jQHl8NEK;wnt3l&D8)c^5#AKOoX2pgweAVPDF<^6)qsJQ?we;T$f=dix$el z!<^xF&18e&XEiH9b>9PK9JQINZ1WBfb0{ZGBcx3yu*S^svPQ1^`&2Goj0P z##&4iB10oV%f6w&k-=0?s!QbKX``3#8Onu>F)b0{x$v4pqn+ z4S1*|sb29%Fz0Jr$!Ra@!Q+d_F`mOZcZBcscwxiqwhnZ2+sZbW%wePy9dx^8fIXQZvabSl zCELWdA=9X3uQP8aL61L)Yx5^hspve3YVw&_+t26WCuMh@{;3}u(pZ^|2qBPB0k>|wRsko0`c)*aU-ijtzy4z*EU!X%Ls+Yg zstfBXlo=7ORziU&Gq>Z*25}-kLMzLzS>Sb*9<@MN>^y>DTMrc?Qcf4{A`k+M#$F>NGZ*p{}gfkO}_ar@<$+|AT0k;5lGQ6 z1^u>tW@h<6**vz@cQ$|d_8IPSCmxMW6b&$s`mg45?mXshB_oqG{uRPDt}oZldu$}6VlXCIQ3JXxDL!tr zJj$o{^UdGA!oW=5cXxNcprfKSxjzisTqXB~4XCs0ClmKq`U&E6#Qkmf?Y2UjxQT0D zn`VBd8S^K%p{`@j7UYJ1@zlY^^{~%_%8ZJDl6$&OZw&+<0A^|C!THIu=2pL(VFWdv z_Vd^K2Ie~b`d+yORYH5}DSksOQ{uw<;wc&->uO;<#4jv;thi90UF9x(Mk`;!+0&Bf z_q!A&`K6`H zkF&KQQ8p#!1#k-_Wcrzs$S4Wg2EX01oADgTX>l)_zQ0|(OpnkF$ZBK}-8F`UW=PQ$ zMoExDnIm8}xP-#Q6v93~Tah1zU_)<727=+&;d7qY3IWw5Pg<=hUi4VK=&UgcCru+UCs$B&!q! zd8bb{l#zJGjp^qp`EuaL+A>FAoIuio{N$yFM96dYH-KfnCm=DXL=$-Gn?ApxLX5Np)u*`nFip9#)`E3it`TW<}{}3HEl-bdm zT-A>k5AeCgqAK(Cdr0c}U3<|wmY_+j=AG> zY}>ZYj&0kvaq~Oxx#v6Y9pl^oY>cWkQFDzwSJgA0X_bF$R(zX({X!D5wkqGo4pCST zBZy<-q0s#{M%exJ+72O=9c`-9d$AUK%FUU!7wiahFq84NgUdfa1b=kAlRgKP=8bi$ zKc2ahPJYwFcf`eZxixo_G2zVjtDeYT+{6oYVD(<9Wd&fQf>AK%$GuyR*Y$qJuGjTi zHbRWhjQo`6CA5~C3Z!Lt@qQxL(tUlVucQ4%U;hX%tGuT?c4X%P_~r zmF>Z{MR0wgM`|{twhH8tW`#WL!_oJuf4G z?vDysbJJ`qn3DNK=HQnyN@%kR=9{_KGGS2uM^!*Pil(3TGOpJG>DjW`gKG6I!?o&K zhu(I1(YadDh+&HA=j(Z#$7Qy^XR!c}7swOBx{(!{hZ^vguNFpFI=TfCi-BQ!!7TtQ zl}VlU1QZhtveippAnSGu2y}XFVsHCLz9PRD0VebAHHuitEA|+-c?u7hcMd~;@zm?% z(jMU2rhJ+%#18tjZ^zTqsa+>Bckh<;$(Y0JoG5Z5{)>9(cJYVU8Bk`gFJA?hmeX*j z4j1`;$9>db8gqbI5cLtt#Kh2D|IY}IDr6qdm1JR!xycfr%f&Qy{1=F2OS~eI%4RZm zd>a0zz5LC=Z#?#7Hi`kXynNlE9D{*U(! z8#L8wdy})Is=taM?fFqyaUuFjOp0Qmx8=mclAvfnop7@;&3#GsA}q#AxUCK|gLDCi?G?OjPx^BLEGz~hs{7SSR>(!P{$*nIjgQ~Vxf;H3U%t1&ERWT%mR!gsmc8v z`G7`odX%!7d8cAwBPwt)&vM|DC*2?7IENaK&YcH~4Oi}9@=0<*duilT1AYQft4K^L z{cwM}MtLXYK+a9@<ftZi3JXFl)D?%*-S{9R%eT-1Dq|lryypl4glZ zH@j$b!Vpp;^&Cb}i?C}Y?A-*ouZ-~v*UudyY3N8GcE83-7|Cm9hHfQ6H#fEA)BvL+ zw%XtUolb0kEIf~ca#$`?ni62c(lU`pIYFULzH^LfcSg9QK7e?&!9{_A2PTiB2#BQG zEyZ{%uIn?;<8zSAuT%X&Ref1%o;cK}-c|5z)>ooQddui_TtlxHSqFj@<2<4Nh*}Rv2pz*(&v=>j3l){Y zYF|+qc;=u6NofyYU=D-_#t2NrgHkgkzajLo>L(b{pU1MW7Vs_5bJpR6XqYR?11qQP zCV_8&v8R&^afL;`EbhhfF|Xh~ACDVWCl_P(LREEkJPolDc%1%Pls0YO@y0$SX)S+{W(_cC#zdfqExq3 zpszv^V>ebR&jm2louq*8PtqS}><&Mx`+J zf?9r60(_3L=hRxSZM~gyCVX+Mo#{C;>;kLhUI;M&Mb1jU+TG1i$reX~LTf`|QO|7q z8U|+2Y34KZ)<$Pl(Bo8R#gx1Z}`+`?PIf zagd-#$vd-ta4xLo_l?R%T1;Q;xZ-P6w#bt|9|BGL>2CZT*1zU*v^=XQq_xB;s5q0+ z`10EUG^^!#sfS$AfOFGEWb1GriY2u-K?NUAP{Q#0D)z0xg%7!UlWy*jV8gFR(g9-@ zCqnA5PM|#QFmoH7KSE8A57`B=!>;>KcyX`~Rm|T|(mf#0P4+&UH_oD^yo!Bk3!1$X zuUks=J^Jm93Po%FHvMTUw|ASSJ&&ZgSGK zx)S5DmjY$#$H*yZ6nu_@IuQ)hv8@#yO%A;5ghji<2k+q`ScR{6VSnR-EgKZKYgXX{ zfGy#+Z;`$_S$pQlw%)BSw2&LUN1?|Hy+Iy4@V;TEiz3*tPg%cC;OYbZCJ5O5`VZt^ zO!<^L{5Ux8ACzLmVFPx7WKUy42@uBAA~#6-iFWGr$E$w>aW}ekMq~VA48({yk(DAu zG3jmxRWGBF=>si_y%O7KnAjeh^6InA0gERTP_k&q$R7%PiY)!22)U`YgpDy^0I%nsbAMN+AE00Orp5!p(d@5)DWQ?UIY69>cf@9Hz+cX|Dtt3P4;>X-)p zjf=4b4M-+40o6&4wF9N6W@6){V`gIFU|=C);$)^{WM=(tsYArX!9>T#$;QCR`u&lS zj)jepgOfA4)*pl?$q5)Onbs7PHrd?%8-$CW3XJznal!e&q0|?eHddtK2%nqXdHj$~ zO)6!-7(g*EhX;3v6Qz_&Rijyg z6=%=TrwOYC-3T)l_U;*Y5R^E6oRB+=L!3OKcM?K6^?~PS;0Iu=dR`ki0k*+?RU|}2 z3|R7<_J-DWF0Kv!-02AH6gX{00!cxG_IjXl>PJ*ZcOz6s6YW2r74Jl0j7mQw&#~y? zf?9%A2C)db;}JsOi!exHi*;qk*2bGDVSQX+7LcWbYEopv)hk}bz$xpx)iEBIJkUAn zkMCO=m^_+vreME|)+JjM1I!sCfg93ShKHY+>riv!zGr(ITBnYMf(;C zfY;0HUv_O~XeQv5l7=q{5=%EW!*7_WMK-Pcweq5NIaXWlkrQ`60CCVJc<#1!+LPJ) z!$JWx+?DiiZLdXu>%_*mj&`ri#S$yfJ#Gv>9k*yvy{@XUx;;(RtLIcFn%|4YIN9#I zjUHP=MWe=#KYb{yl08=w7&{+qb~q=UEx$Le9z~nWipTEE*ftYAo$W9@j{owdQ!O1> z`8sz==v66x*!3CmJ3lHfBxh}-6@Il9-HRZW$wFgMVyJQfHU&-FHblqGsU6^*5Yjhf7~ksa%bH3wk_c4$RoxO$O4HdI4xtlcvP%I z)2Hk;U)Hz*B2$Ju4JaN!Eab3+L8$_M41|u<_=pZwyJlkIR$FyM-OOe|&aOulV}|Fy zx^szCK#qJcR82|~-PYcqX-mdPfal1LkrjrA_z|e%^9vM+Ad5>ya-HIfJJ}3b zi3G(H zo51OWcu#VngL$wyGg@m_uLmsCqI_UEs&wA;?BWK%?u>HZTd}+3C>Z%vfYQ8X~4AK$J z%;+6Fv`iXi!z7 z6IF=W2U8+dmppNXMCBs&^4Hh4c3_k-_qw8Aiht+vNq@@sly}-(le+U7GO6FxusIOb zjKzN`gPU;D7>$01pTeic8CeF1VN)442%k0Pg;8_))<75ro0c-OA5?k8>vJ$|3jVF^ zUO{#`E81d8J9!V*`rG!_h@0V5!i3edJ24s9D|hc+_V?D*7JCcPd#=$(kl+0W7j&f( zyj|rl&m9fkm<~Hv%@(b4MG~j`09!j&&IdUj!)P95d)o)+x5vUAHH;Mix$fH8ew`ik zBwI~8qzj>k2T#y9<4ES%=JQdIG%b))Ns4apO|`i0sc&)PRCb=l`rjs5S58TZbHA(t z+BgWkQg7nFsFUz(R2wgOPpZSwcaj?WsTH9%Hj?lip?Wx5o_}!mHV0qO6}$CE383`y zfT`@4W?Fu0n%};gZnppG@g&F z<6yt$dMbkeFsnLr`Ht}XV-lvJ05yy^o=FYh;jmq(BU|d9ktA>J5gA}|xzPC~3~?w}P_-3L zTs=oJ2oD9`D7u_C8*}JIfmVCbRv)QMB1b`w+fLRybB(U6$ap%cnx-c4w*?e2zXd{= z6FNX{MS2gn_<_zIoMir$rhu@r{zD%73xt&=S#TVL7=@LMh=E9nh>4Z`-|rm%5RV%s z%nbb24x+FcYybG14)AzZjT^*cjRA7#Z2Wn^g-MercsUQOL z1^$m7F2Bq#M3KTcse-Umr6=>c@EezUN-07~?>vuj6f=S^ACwz@UD!i!d$sK9?o1zk zheNn%KpcceJl`h@z=xl5NSk4H#c^83lirJi8?cj#q+Ld#5{6xg=6b~6mF2_t@zxOG zD#$q0dUr_^m@xiFzf*%{!2RWIsJi${4{Yy9@5|F89JNw8v0IHHzL>>C>I@u}OtzH8 zEROriY|OD!#k{ zEC9B~+L}%VVbV298GvW`+XbBbhV2%w=89+eDe~S_r6!SUWp6Fs{Y(R@JoLkNyrbv$ z=4_wm+oP7OvSoyk|Uxv(5b)09-?9QTFtBf#sjp(um=E|y_ zy{>Mpk}tX{uEKq0k+(mr7WH%i6SrrUW`I=gm2G!kTl{ML6zKrQ72CM(BzO8R9)7^3 zr!+(_NOV?QWxLPoncnB;funPB+>Fz(G~>$yYd3&1MT~MHAsTmxP}D@YhA49GPlVJ9 zx)5}#-W>^X!hFFzR(>E$`6TBawdStR=R_C{%Ep6%k6rCFD zDb|8j@5VNAXGiqr_l%3OMkS`^`+jt_yV@i6I|+KhmK5EvGw;rYS1IbqO!bfkgY0I? z{VR|2IGsmK!2_Bz#g%23wY_IWu=i($grgW_q8S_6N7G_7%1l!$=|c& z1EIy99tTsJol-Nc%uqymP>rz;WFKA@cnE(Et&%6Sh)87>+*@H-4mRnq12`?Ok$eU% z@c)XK&0rT$Zg>Qcruo}ju-_Ye>#3b~2Vowzm?ff&M0XL)M`K(ZB<;8fBs;SjZ%*Z* zI?5}n%9uwx!^gGaYc`d=5( zy;^jt_~6iPktEVyh4)tfRw?4epTEpey3NMI0zR-6 zqptdoWwf%YL+~of1-BWNFoK0T(~v>u4f@G_E}#qmsQ*$}f>qRVsIHQQnmx1@+`Rk@53EZIev5TkCcuaMI&C+*Z+zO)GGzjXDpEX(ID zOcVNVUzL?cJ!V)^*%t+}*?;TCm{-bSk19;9usN&Zyzam%N-baZ?el@?~Oy>J`Cj1yN?zk{wE~97q#Wy zxuN*~RaH;i^Ltot;l^~xcjVar{||dpY^f<_bu|h$2Uyr@y?r*h(pk&WECNLJ*EI#7 z4E;^}=5G!cZJVQXMm`ql{Nc#8s$9-0dDB|Uikwyo5jxWEO17K$A~$$O_uZYE z&K$|NSf@@99kA${2mkO!rQ`dtUua%7A%M+RxldRvSOF|<;2S>n9E%?wIhF;PY}iV0b z<|~^yoQqtz1V%1rO1RS!-xjp#xp;IU^hqQt8zQ{uDpANLd|pT~_gy%sB^$&L%(iSh zz;vc5K?D4})6*YH2;f5AU~Ga(z3_1MxgjanATS3RDHNOh=2p zd`YrIxtU>@a2$!hF$=TlMj4Kh5O*hC_{@`sd8RVlAApd@R}eEG$fkYL-cXKXZy4;` zZ%p12>E7ZbK+Qm)lmJkr2VIw$d(*8yR>nwe_%yK?k*_t z{%pe}oY^;BXtO<8lb&xSN4~|N$UiC^gpw}d%(la1tDn~wssT1ItF%5AGU&C6eCthK zyk69kg+3~6alG*`ux>XzY)zHq<}c&K>;3He=dFp(w-Vt4wRoTeSJq(&FQiLIJM21G z)8Adx1(KHCa9PY>PR`q&i>38vq1tVHY=K|w;%UR5gSbwjep@>E@##v4Ui(sRW`_~N zy&hxKd>1H1K?70_JVhMqz3jfaXtpAg3*p+UOf>os?FzQ8sVSA1z7-b(# zF7G{2I00B|y9XOOTPGL(XNZe-N%MS;rJ zOLpqA`uX}O2&(LKHP(7`yZPx&*?|iHWI)a>Q%_X{3o7my?$r{Mk0BX4w`QHp9E5uG zJUI~MH=Q@-#1Kwe8$#8dK6O@V2)kb) zcn9iuhU1v)c@oqzp6Ivf3FoS_R6Z~Exg31VY_#2=bl?8?l_F7d%YB}fTC2ro(@@)EnN=Df6|yCxXB%H@bsqYe72zLdAGlp>3Z%oaKIAic@}5Junc%*ODH50 zwjv*<4-dJuIs||xse8Q;E=7VTBTM?Ce)kmdlfnEq#Dn#JARfSM|3huU#LV`;K^_yD z($V`($lv+(^h(b(O&2r`&YvBkNi5C^=3Wwn-zgUx4k+Vf>PO!-8`aM!g&5)+o$;Z(0*GjM3@1-Bs2FX`gwoY{7%ZUe=U@$Rf}0XTy=qrw?>CbnNug; z#2c+MG8R=pZE_C7l(>dwPDiT4K!Nkmz4ZHE0jg2f@Fk8Kr(-LcVb$7Ck3<_$a@FX? zPELPSaP0+=_8TsnISaf~Rd~QQ-Wo?}*Q~1YSC^?)>PF6fkMNLg|QB#W9`>opeVr7hUpyvwvmK_mNiKOf>;PNo9 zvEJM2n3Tr92vX*@{q4=J%M*4OQ9h)*BQ{P>{O-^kd?d$)v=W-Fnznh_K-z19Pl@o0 zi+7#S=Pd?L1FBEToFzsYG+82U8>Q6Uj_6x1;zV||E54%5Qj#Y-5M`V+`N36(P!1aPqH`B3s) zFSL9k3?FcCnv`yf40h0jj2{6XGH*{YOXKL0DOPg%F0E=xGf(cEb~yuLeUB?RfIz2G z%@+nSbLi0Da{z^wp)Y;}(^ugzelCCtjahivr|wCo#BG_eu*ZtG>JcMB z*ug;|Str3MJK*&rdaFA%wL z*iHDLNNAfvj_{U3?9Xt{JYaeNzkL2su0K@HFu-|v(75xF^75vcA7g5TbF$1@(<<9@hjQRa~=nCNF$v@SzzX-~B z2ZlJ>9`Mq_%(c$?h>x2AST)K4KFYPB>~E4?H-rHstSNgBdn={Bd4WwR7ctXd{Tg)3 z81gkVRufuM%R;pZqN{JER6p3lL$4!`0(Xkw>iHnU^P1vkn4~NZFrwr; zv~UiZ@6}HU4I;=&UqkU;BA*#d{p<8g0};hZZ`oibW7HAZ0iE><5G}bc|H40$k~rMg z!aH%o?ZFzd*%%Z z{v3vb#;muRtFkYdDRs%*?~Sr^nOd7W4(2>8uj)h03M#PttLZ*Mh025a1Ya4w?WqIhLYSGlf5;EFwtCGfYt0!v$7wiG%Xe)5@$z5X^VYKwvyy-NE%TciAd#+|#C(a#0>?&s9d zZH>Ua{uRPmQLheW2JFKh;>^afx-@R^#Atm~qti`4Y_M1G-Jj(8t@O9^H^;4L+yFbN z*R}tmXn`Iv$pXo~VEhs6QGyrXP^B7)aa6pe6f zUe5bC^7EB@!E_!s`C=rh_{Z0DHaM$7SG1;owD;`KH#*;8v5Ws<730{d997e7F61k^ z`q5#9LJl5%8E|_vt7Rs4D#OBhxRol@?`dViGq|CVg8?|i94Y>aVrG^>7(|FLkbrk)=^OdrDl?^^>Ml@eD({n(&mKIXkU1 zHLN*mPW&pmUWlpb=VZ7tf1MAND>|u@f`=Uisa<-&@iUNBj!-awr{wZ|7e_?iTx( zF^Hc22`en1rFwCrCs2Jmm3;3BAUDE(7ohcrw>V)u=2EBN{vo0w@Mb7URa$#(FQJ-b zIaoMCp!q%ML%Bm?lp#!!7QZX68l_MLmAbQn(Hj6L9VG&*h3O?ZGeXwigBbNCt#Q&? zMUr9kMHR(XuV)}z2@W01!K25c{p!&hkyD6|GsmDD{H=DM7ZlDoblVN~FRG&r?A*7iPF_(X7}?nQ zezpLJ$q2J;j&Xm(i9BOoz43)@!xgRgY<;cn&>&}5;F?+RKCB4(rajGbCB7;c%4hCH zaSoUy3cM+=SX~fAlmy<5Qq+_Cb;dkN>tS96;j$9wbku8g*Inu=HL2U93(YHx+~>*- z4Yit22`BRU+~PhrIZtKRx)q=^#<0|V0R;n|jfmjF@XcLK+<{d?CARx%1UOaF(-{wxd=Kdzs_9 z4MvK0`$QWgLfpUz9iNUf?}4G5B)m;4^l;Y(wZ54F0JHKe`XtX(eeY;Gc!K-_Wn%kx z>l%raf8lBw8UA;bEbdwrZu5VH>|dM z3tKm2zN+8{?jP8JzFHTvR>!^d;oNzWn>&#mKraQd(09Ss5PN$wH)TiSGkqkM?d;ys z^OH+)=k<)gt29erV2+S>2Mj=n5*o)(6V|FB$;bW4-(#ZYjhw}>E{i^7W*QVI+_G>2DBQzoeNo*IVv;UJAf z`+?c8DWG`l;Lk!wJ`9YGjKMNU0K$o7$3aQe;ddpch|IV_Q|U@#%mwi+j_S5t68*9#Pr zW}sd-wz%NguOS@(3^da^H4bnDnCI%Ws~VSs*9_9qv*9e_CNN--$-sz!64vtFR`^xb z2QKhWw;@IpJuw5mS-V6u1KrgwN-Sg=os<51wU79_rCdSM*X{)V=Lc@>M~orGU5z2E zRsQKw?5D)zc9=zm>+mVUT1f)htaRml#OYnP&(qx#JvWv2@-63^c0No2pc4sbr%4YFdBZL^&x8A%aA>LJ!gjb)_RsP-Gh1P12qvIjJeKbLtwQs{uqt( z5w`*7EYe|w@AC6=@7GlK#UpU=VY-t@9CGo?>hi!EYT~g;iy%Am6BK_E<8v1Htl?)q zqgrfY!p$@7)tI_}>MdONjQjgVBYm@PS6IXet zmtK@6b3skD--67eXWn#-*O&krvy^(L-a5>1NSGzgtEjyG<`Ncv>4nV4_y8Ivy7}18 zyz5k%e-B{orJ@W|%*1|2gjajK-F2toPW)0w@W}C?SW<$vxI+L9UQup7_@SFWWiB05 z?;s`~LP>3|gmhm0h=m&zs%9EZOwMTC6wjvXY-9^@iM13ZdD7ESM3M>!xHzuiXd&F-e}Ar-4nKXrixH+Kr;#iA`~yc#6k zeW;>DGsNgV^~yfG`YP@{JxS4tS$z_m#kSv&1tzi_;;d#=<`NYi<=`4J zkUJM89#3vMXZ0BC9PU9~N(jeu9p0Y2;;w1O)}B6MzDd6n%^Qj78SdiU`Pd<*xF4DQ zHP4TuR}fnh-^{#tI5(2NZ(>_ED+GA5zzl$vTBP>iEn`*L%v^MfCXIjMAJqbb$~S$l z*PQDwZJRuH9v|L;aqLNhVpJH+0^*hDOBN*(lG_4f4oAvw?uum|RJD5GbA6vq&;Rv$ zMn9xq9;-uA9^2eU3eT2~BNnoKOX$d=%FfIVj!}uGH-B0IF#2Tp& zW{6PMBNPRSYd53=$*f2KVISX)YDA^E;5eayC+qoqQe5#TwaGFOch_Q&3L6mSPyT$K zbn)QM)ent}u4Mr+GN>$z^#qgsqXm4XE~t2*3B+;nNh zvX@xUuOta+(tjL@Lrw5AU)A7V%wPOe2F^Y*F))CsP2&K_WOH~sfF0rbkRXEOj&yKq zJ5naA7r)j+crOTAPN8)5SP0ryAy$;ahm3mYdI-r(c`?oy_5wmzeY1y z2=lK-&qexj!Z`V#UjuNtM!~}V8>iU;4;c?rYKpqHY8C;&kTUZ-(Kc!yx5uXFmf*j{ zkZ0KjgG>__n<76_Cd$TmD%i`HJqf2Pgv7_3Xg1Y%I_h-y;C)_Yah(9`D?v%Zy}nnV zLWD?4ge7oL&j##p(#Q0!POy6i&nHX}WzM|`64JTr_g46|FkVDNFI9OMc*8{AGFL!V zC+B%05i?}dac@Qf1}L_0 z2q4KSY6lA9rSWcPCN2OFbLm59M+le=nu`*lXFw)s3@acfQX8_>8phBYOBB*uf}g6; zD?!HeRKB2}s`|%012c;3R6HQW5t(N-a1?^VrV!HfsuV$lg()wzh0A`W1_}?!D8SXL zV$rM$pwft4%^WAv;j2_=FuuA^Wn*WM9*N-s)Lk&PYn3q0T zDvhaO9Wned9R)Vx35^6I+^91(%dXMdenl-Ctf180F^@|xYJf7p5rVM$-SoHg*Mfu$ z^JY(oz6zXyn3&@ZOw41_X*pu%Pd-w$S-e!cG6XLuvlG2+F~V1&DsLG=4#q8vqrb=ym)U_C1bAN^^G zZ4}bnrltn;EL(oNfPy5P6ILN3x^9fcrIlC0NAZHoE_ygXjgtV{-rIYf&8+yr-sA^w zwU&p3rsU!vCGTkiItE!RV%)>dwwx$zj=Mu6p{!mKRUQ%YdPfk2I?$kN0y^QcI1f^mYpDgkPi+RUegkJ?z+2ViX zBnox7@UGzLe258Mudcngw7FP&Uv}IANrcH2jd~_=cQ1z6+9(h~TShiQ8PO|GTQ57K%ujK@ zyVIr1jdE8II&64e98!PjfPJZgVYXQ8OmI0TM6xkW`a!}MXG2zW-2$v-&ho1KIcsp0 zArS@#<_Er^zPhYMtcg`1(SX-m!m}~TRImzw2YexBjXHM4hs=>Y`HOI7LGaWZ7;aE; zlfp>#1wt|cpQ|y@Qg=$7P2j?LnexO;bXfCiVb+0-IF*#i&=f=ckL`y9t8zYJ{*4%` zCWoNeNxcwV_Ck*^d__A|V;v3rBAnT9rFCaZ@%kmu^D5Xbi=QEqk>i9 zlsls>j}*m1$w_?DLL2wT-VSArd%7bWyA4F8Avm*xQd;0`NQsQP?fL?}9ie+|{nmKz zhz-^t<=Vm|8w>p49F$cW$1zSaWTmNqNw@FQbKR;mXTFcIou@6m?l&9Gt}Wh==*6+O zJ-cWj<%w+`9vsZJMuau^*S-&C`T^_^2}a=r8%jBrz=#({;oQUT&=MvlRw7m=W;zCDCT30+B32eQI#vc&1}4`34@nyh4#N0- z6}DstF<^243p4Zo1EkgYzgyn_KuTV|Atexo%Qf|42xk=rLt+T^7x?78)*##=?;mKC zd|@hzN>9VlKr5*hjU|S)T~u`{KjSogOt0_4`a_hxeot!UPc9)AXUkdFrS*18%w!CV z*==7H$2Y`3XsSCtS}Fl19ge1w+Cv@5NYSJ?Eq?y2Kg!<9?&AEoLp#-?Cm&03ejIN_ zia*@sz^8cTe1Cu6JJ}QGtG$c<;^P$*c2s^CqAICwINTjiOiOv}8@UUKzl~o~#8P=* zM;p^zw--Fv@&Yr=O>z)$v}3u%N*v zXvQGKuAs;md>WB0LpP{x?+&>DoVvG(+XZ^+`;1hgTQ1qJA5i6=o40#Zm2$8fB~5vi zBLsk`bceKD4(0+7bzz>KQy4M^JPN&#+sUuXbhO3(dQdrI^7j`<*2Z=9dE2?a$?krB zU9;nd%)Wj+x&Ht)diPNLrUyu=yw~MAOu5wzruJ|GKoA>9Z|puRw?8XiJs|FBgN*AsW1Z$SXyHW`K4E|V6Pwow)b%X+wc zu7d?R9IGp`SAX3u6x($Pw7GZKf7v4oOr^%JI#Ra!`d}Eh3vuW2;_kz7g?7vE=(KpZ z^mGp3?j)cRB+y49A%+f)J2wP)tf-Zrb&X-1m~Q^C zU;=XmCsV2RKs3dODZAx5t*%DNo_eR?z@Qr}Pg3_aK`DSv0ftm#o&YTwY zRV_&L_N1C^6S5l2xrU={uIPOEssk{lcj(o$72p>C(fN(fQ#s=aoEwEOw|?JKvsxlCAHkTiQ}`k#npN} zvWa<5%H}FN#^B}A{4Jcm9T)x!RpAq$l~|vshM}otPMa6>7H@{6laZ{~wzg`+6lk6- z{%{9i)}Rlb;YUwH(-2mm;DO8^H(=*V&Hw#;yFv0XC=oYZ1a3f3ZIk7AB)-eTm8zapVHC5N z;aQch()n14Pco#;rC8Jw2No+VQ3(n~Lr7g3l%j^QPMo(KFBgqOxK#~D9-cIMpRy`H zJaKrIBEWKA8C}?w%MTYdhOLkNx(H(&2@tDM1&q{$qHk6j$H#su*=1IcEq5f61WVWqg#@MP&$pu%>YWy_ZxaoOxQSTFqWSwB15R+o~ENCwMOE z52O@AE0qhbpBVF=W!TeMD1RzsV__EExH6@ltXu&PYT>pAbnMa z2`ttA9Q#1Hjk1@??jfi?5v!Q5u#M96Vhxj;t%de41trj;>EgaK_>F|W6}1Im{*OSC zQN>}x5?l7fkt~(}S>B_eadd-H#or?W--BY~oc)6ik=fv6V9@U(rAaDJwy2OJ{JVN7 ze#$F+X5wJc{;^{zTINP$JjE>($t^O-sKSvnMmDj9{B!{xEK{ff!mp3Spd8Q+jQbxH z@j0IPli8nVSz68$rM^RGSd<@twL_K9!;Dc2W{0tg{WZ=+(4N%C;S5VH2(DtG2i+KD z2g14xL~>?w*&A$gw4R{%A(^~=A;vlA8@IfY0()T?!ln3Bn(7&bePXt(m67p;pLF1_ zoEo4RG6y8-^zD+V6`N@o1$GLhNnu#sQ{qrYkt2VJ-{Rvl+(S?^m4xE~p-8RVQt$-WM4}xFzKBIWATGZtc~c}|p(n|L*VP%wBB1|4``(hAae_HrHj@*Sz^ z*njhOqA)}xHlV;{HZFQ%*$|6Bnqkx_+390JLMx#%27$L8;bDx&8Ds{*gpD0h8T=yN zi7SoiU!GtP4NHbTo)%yL2ptnr%n7F!$vy=h~SMQ%U0OR%u5 zFVTjI4Os&6adS?K85eyeDaUgqz6q!8FI+f(sC{*SNR#_ zyL``Jw@F!7<6pXdv2gpApUo3h!Op=eiNB`jSRx!dGj-fg0I(OaD+^6(Ma0kyr@y`=Q( z;M<4Z5NSX0)v}G-AR)Pg2+vzaEsF<*3-$*>1JZv#Kwqr#Wc7UioOfrye$)HIhmrhd^3>f+f0;sv)oN2gie> z1Kf~YcAYF(q+V+}p#tIKUO_m)8(1R~=30CVmcr_mc;plfaYkx&>Dy1g|EK=ms;D(P zG2$#N0bFqKQFIM+>+$RIPl`KIy3z!(KoY^)c!giR@%!dBj7M3QHx8udS}~2E{pm7G08bZ` z>FbvfJ^A=26Ju*^+3_FUs{&DhjDx;-^#kK9cQmMIt{-G!Buz+sZh}WEk!Mq|K#ve) z_424`K><-1G6`^re+9Q!B(&R}{iap2e7^sORw*ay4&svR0d_l&ajvKEn;B{0`eX;N zD}^O+gA2bddy3g3`sRky((uhmgu}B`DR#7B|Z^FIuHr-dqt4lQ8( zAC>_9LL3?8{4Y51CujVglFi|I|DSpNrzYDkKC)Ct=@5?18(TOGevZBpOb%^%kMn|( zE!QTh)Q3+Tv!MTJSae!WF|xz|G6I;+)F(jh|2PcL@&BI~HV1icPj-Gji9zi3ZoQ$d<)`nbEJvB8{%Z#Lf@~iog1af*QJa>MK zrCPSi4$WOFP8usE@s1~*RYUel9+~|g_hxo5#}gcpHd{T3+11DXnsuerbSX;b3K~nl zR5_CKt-|lEb{c6J4Pk~#a$-iNWJb7EZu2~5$Dwg&2(?a7Ym8XeL-;UOkmhfYl2=DhSm;i2CPNsx0^al{syJXij16Y$x0 z&pEBiCS^zG!(ahaY%h}rWRj;dNn;VGC2*z*%( zk0b3(>BK3-I=f@qaL_nUNN`Y_H0KKXQkMaFct=+-hgCnWn&l(YeR})Xy(DXB7owb$ zH0}wV+n<|>w_>G@ZY%3CKZ&T>5Nabgzg@ddAHgqiq@6WPz%L~~^LhtMuSQ z`Oo7WH%Fa>oRpyXSq2}I`u#ug%e%Gc<;(r;nX=2L=$r2tS>HZu`Q~ixrgPO<-b#NI zjwh!xjf<#+qufH{%VWsfV zbS33RRlc0DM3R_29qG53QjU{uD@%$_F*`#@3|3%YO`+ly3n`i-ur2D%usIAmx#CR0_k9KnNnsuz4ftBZ zk@T|zLOtobFN6aSELg1uK?!1FV&?i^|8rfg_Q&>LW=XXRYXR2qK8#P${{;&UrU?89 z!0Qj&A&WsBAB;@#BSWdENv!G% z@nW&y6dipYdE3_F*pIIHd-2g#RE9C|A2RNZF~t1tkg#WZH)_T@{45gBqZ2(e`C^$X z^*X^vf(JZK4M`|4Al>L;YIO^-!L6ZY#!+ELS*j-);a+DBH#Sx(?N0=jqUUboT!cwv zKpjK%8ZiDjhOPRhhK--DrAKBy;bzo)Uz|PMfbHA%p!_|coC8UrZa`Z-ZJ%^u^(|SI z;D_FugCG^7Llu=DF=wEpvfUfKgTi(-5%eo%%=!s*4fisaCPQNJeQmDL#Ev#mrK|ka zs_XOKDhAQ{S+E{`WOiC)>x#P!MfLb^X^`rNKT+prCwg7i#f?Vp*QHA~A$D%$J4#b) zzr_X4a%EM$^Tn};yAx5gW445FO_w;Q`-AV&IXPj3aiL4S$&m=mQmf0b@vRF!4Wv#> zTT1k77!VcG68zwmu1A`Y8k0+-R2bUQf?$RXBhNqUiY9pkS`vyw( zcQB~geKYNnLlshEh0iMH{3himNqcoOm_Rlje=jUbmoANA$<=2OlgP_OG3Qf-jh@#+ zjcVBISY^SMX5oSon1kCti;+PT8S=>;2g`(nHG@P*A+#_Sb4k>KvWQj&{y`$~}H$@9HsK}c;JUjF*k+xqOzJG|!j%K6rPS!Ci~ z`<35jqqXwWy2!>8@7~9(xo-lOn^j~zygEP)g&VYh^*lrs7i3LThFThMQP{RN6@VY(QAR(yGt#Xnlc@)>ayx`tkEurP1 z;xJZZ`A0cRd@m8Nov0c@J^vG&r~BJyV|T1CH(UpxET*s`hmNqM-lSGG*naxT&t`mJ zn~>)1EZpQt)={dd6khg$Mh~D|UL2P`>e`K}+_67g&p$(`L5u4Q?xq@FFwZpSxI;N& zhOQfXx8bxk+@7h!#JImJeS1ENEQ}ImZIqY#a+IeHGhlw3MjwxuE!bzreUg{)2cC^# zW<|cUEa4Rg@3`H$w|PmLgISuUigo(p`^TSua(_mNJ39IvoMhcWp3_<*%356lwA)7k3i#3Xs#%siftlov#1tg4kW6VMDay_348cNWH!tI za^a=hN=&v?F6*Cd0>$6{D7`*~+J|}!cPhp%#6#m1nanQiFJwx^<{Ye89#Qt|bN%oQ z^(3nVakzyGjI;HgUjgGQSj~Umh>7j~#nf;neHVbR2j^f+s{R8;1!DexebmKUjKbx( z($UM_|4;zrz|!|4!zDXzD?@{6u0nS7ipSzo|MCd3zND{_D9*oJK#IX067MT5QRi43 zy+eQfYIMO*^SOHNGu{30@@gwSxv0Ve!;QIb;9k;&Md#yEI8EYBa%X9MjUR+5Wiu->}QDnzSJM~HHl`)KYg8DUcEl= zeB3O|>VQ7G8gqEdFN zZpYx3jP7p3vGlyJYYZtytbIhxj!O@*x$>1l53R3VX3BO)xI>-?KT8HqB(Urj7JL&S z?XEM!s=b(X+Upx%|9ia4k-)O@;IgrOwz}F*I>H2fHz;pH2b(g`(>(yqn^oOIsH%LpPOZnH(^3QD9|-vgSFh(Z`_LYi+2*LZ(+T#Ufc%zXxzm|o0V&_ z3Y7jdR=&T8RYouLgh%Pw$l-r`&m-QzCiVM$5{ftP zkb^U6$gYRNi`%dSm>#5tQlQ-i>h~no;fzND6-S5I^r^c0#6C&4K)rI)u>(9B~qbgXZI?5s|9s!pHCx*v#%WOYgY zchvp3L?tEDRMy%hw*KSsL5WLHqXaT|XB9nhHuZ~?^Uhz`n`>kIgD%aZ7MMPgVpiz# zTxNO+f9z%JOD>mh7O)>m)pH`vF^2|?lu{w{&BXn-5r<3(ZFVM3K(f-*6la*%Vf&+^ z1$tTJY{Mh{5Bi=xFBBi`zwBu3DSPND1pRUgIvEW@tOG*FIONSqg63QxIEW3nSqe*k zv!&`cS|}5Dxx_Ah6gEv|&S+RTg`^SJBio-xiEv}K|H~*(Y+a!m#ZK}tOYIBJHN z|I;Vx%NQRfYN?zUBhb!gx-|?&XqM8bWS1_NpfA^2D168m(+($mz2U+nzoB*nZ%}nb zLX53oYN|CCgYm-fb5}y{%BB|y$Z%0k`l^raXb~)}r-jkm4ds!=^TNM+8Fy9b2qBWi zPc6`pqyIg#IPj>iX=R^`J{goj`tmmBM4X1Tv6|d}mi>xa>;Y0|3CXKSi;EK0TS(D2 z3^s5pkRb9*Y(`wObn@yjP49W(-oDfIAFG=6S&>&y-HAXOP$1CqokP%K7D0f5*a`Nq z9;;ezQprZ}nz(ujW^$d2jogdJwED46dTjC0qA6XbG%*s7pu=Pq z^>K28jY4nHzb=@p)}EWpcgK0wIdG5XdqQ0=zl*W%YnVB!FjFOvihk7IcDLRLphjr&tTt4v zU^fx$7aU0WaHySOpS-XAnFujXWvwE7-rJktc2K}pKIqSPmxC+}L1N+|{|Gc=<$XFy zNAWtp=RG&jz0fDq{94o}7W%TbR@u2S3Y-S0KbrlMl_4hLYEU$+hUqPymTQ$g1KN58 zmz##~sFDWCV#LDSw;n`5D zaHyDoo<4xjQcEc(l`hx%1x%u@-pN)eh--KMbU><4Cv>t=e&T%gG$>8*`^0L?_AOVD z$5MH)k?(DDz0>>I$Z3SI_Ny&I{g`?;9Se!cs&}l*unBwE?#k2G(ZUnwjz?^68!luZat}Yq{E4+UB}N3bkbE*G}V|zfIVS=OEaxIvUNsp2}K?rcW-3Bk+Tz zqYb;msA+yWl(d6i=Pt@$BXmEgvD!%W%M4@X@W<^bM>j}pArZ)EK+(J`oygBF>@l#l z0JSF^ZAjvPbDEgzq6owL`r1M=+@#}h<`9(s;AGpmN2Q1AI5v|k?=T%Mnzps`DUE6z zrq?6Qd!%1_lKLtb&H+!gIdTjIqzx?lkQivWOxS<{>EBG%8Sj6V(i++P_%uA;`E(?u zOm`opw&v+PCqAfAt7=%@5s`=wN+M1i1l777wcM@e#5{}%%aS_U`caFWOsC2iUVsE6~LQM5k|r@+_7W3VNBQ^3fE;)m2>{0a*~a*{$C zTtYg_u98F1RQo;Qzg{D3|82ZDNbTr2ELlm|*Gy_=L|IdiHQ#<}NNx_*fhu%trH{L| z=9|W|EoYZqK6LpdEY0uRCcfYF8O76%e<_OCbe1V$Q~vI9n||N#!Wqs0Bd{7ff3-+! z%>NL+MqpkpZFweU(f%{pTgK@gbS{lS{-q^X@)ct~g*AjRa9a_hlZ~H{u|LU$uYB~S zwXRQqY|f$D4jnpHpsE|(6$zmfjs8ItB~wfvZ;Kw1|5P8Mn9zPu(6r+{unS{Yq zx3#}7dLabZjdgSxmY`(hi?#m4jJh15$GH@rsz*pRn`O8JOr1w8jt@EMYAJOZ zT1{BXNln&iw0sP6k@gdBvYThDmNYcs|oq(ST6G3!~Cg)caL1wex^gpN^Oid_l zDkKQvb2o+bti#gLu)j}H?}g<5P*qLE)>N{5q&qKG@Zv?HwC2x=&CQmC^5=vpskD*F zUAC-yeQ1F0+i?1C=71_pz|`?EpHcpO4V2;*5vXuJ)g6e45`Ln`n~yN!q@*y`!&^~= zzn347w6j@-v;46o;la4#4ib||Y3=IfpOP={p#h)a#L33!gww(Cq<%qn)HM)+xo^&M z&3DtR{*L)#Xq&c~kf{XEFtd$8Q`rB~-%OWE6`ie1-n^Yv^#)Bk$l`@d=MI-2UHG96 z547Zq*C)otxc@aKjlg(=hYj~km`XdNRe5WmunxIbXfC8_eKNP6n$J|3<#4swyg6sf zjSY*JO(#P%3NAz1&Wx^eQ**|vxz(|ENzP5xg^fby(Zk7N;lZquyd(Qoz0r+s>#+(u zo~I+<56XJ+_4AG@(e2b;Bl*h7&GJB{DyYRyX4^gsA^bY}*qf3zbzir^;L^6Rw^!bP zcR^ve9FakhvHLtkjg-jU+XNg~eYsdve(8S5J2aG#2+TJ+7gktAabITK&UFViHs^3M zO(kgjFUf|gVCf!+5BvuOH>PD-?8_=3)N}-g>$?JRN3^;V;*}uio#*>Lg$H|&fquex z5}`*{0c_UYoqRo6)RKBr?XO>nALxi-mxqEHw`aez3)XSM*mB~nr7+=LabOlO6$>+h zlNea~sU%Z3p-@0p^spKperkHj9VS#u>WwQ>g`@C!7{dtE$aR~4$t5FHk*ImpepcU? z6Allj>+}{)SUb)AfKp0pCXDR03W{4l&1KAIq*l6OYbTt@1REX8N^OOE)&5i9aG-!5 zyj_nvtpoeb#b#X2c~H3HSA1oF%VAH78qCZlgd$NpvXcq(x|u@tl3SZrQhRkZR@C1o+T$J?kJnEszT(FEAznmG#0TA7CeYVIzp z?Rz&$ji}!%*Y{TVHewjE;*A@8iu=Q!M{R6xf>Bn&r*Xnsp>iCo3=X z1W^bu{?18^iFD$$>3Oc|3D2uYFJNnE@6M$Yvr;w_KKKOLLG<&`B z6W@dAJN4LP5@VAN*OiP9RyU8tO3<`98UD(vr}I~Q*8dpG=nfA*fL7RD!3_d;Ve#+jZf0tx_!!k~9uL#l>v-WG6;TNL$M(}$H4A|Z zhR4JL1W0kRb8v7Gv9kd@JdE$bQbbG~O!QpLNiCfau1TKr!0QYf8`u9n7<`(s8co{l z(zkxAc2oO(^5Pl7oONR%oa{KM>??3G_-Z0|1RKnhJw@m3Z7@2&1hrJzCGRX_wBOHd zVj0y%rHu@Eyt&O-Wf`WolOyphK|eZwCcUTk@OpE&8_$XL1)%C(Lm&hVj3)(*4`}Ct zh7YrYjVSvPpJHsdG_BGLC!UQE*3w=swnKXFLkN*hpPQeI*eQ};^vj(F%&n-aH5Nkd z&Ida2*7%llJsyVTLZX$OD~_%hU^)m5>!LJ!tM^ciydS(_Z`HRCMzmin6iyiW!b_cZ z*WaKyI64EQ`j_-m!wHcl4P%XzDdGn}n%)3Yi+WtGYKpP6f2BzBhOH#kyiZ(UYw<=D zqs8vrrRzGRTW$0h9NNlgHMWVORx_3DI@z`@6CV}YPd7V!bO@c^kI%TptG^(A` z9XLZhUZf4wGlh3Vq>~De;vJC*o>M(w0F&e=dy{qPEjY{EMs003+&2Nc0a1HEpf-F2 z$|PXCf>VTKZmX7KQVQ4mu7VP$8Y#<%_5JZUAN+~Hr-hlk*UMqUxqf3mk`vM)yVK!%`z}B=ZhttEubW_g|o~CiA8*YOne8p#9S; zC@W&zo4T2Nn^6%H2Ocd`BBoJK$Y~bOtqBn9n zPl&Eb;sU#CX;-u`j13v|r-fg$3!fk;ah*SPgokP2al@`-0E&F(Du?4g_$BlE103!F z5$2>(*bp>+T?)3qCJcVyaC!p!!$?V8(y!kTM4*onB>)bR2)`}@kb0Deseqosh6Dk* zx&&+i1BidcQeH00FBPE9Irv{)U_kQNP``mN!mq0DNIim!;<(oyx)qk8!ja1oxc;*ZqB(D5!LP}2j=4cj?GX>+lN20;lt28}&aHbdb%XJQKg z9`FVF)5GNf(aI)9;|E3SPf~#SWqKHm?#>fX;*v`nPWt{IU)-HPfsco29T-}hCj*)% zw_!X8)>l(0IKX3Z_P~pf2l8@@BWw22L6QQ%0E5c@{s7KTfNwm8+eb&MR}EGUKw&&K zEj$FB#lqwV0u(x6X7U4i>d^{9be_P-Qnnn-4T#!QY$nFcSU@jq>dOrn*)OUQ2KwN` z$^~HJ#uju47lY9m)g}MI%RM+mO=W9e9mD<+*YE>?LrgOcI-qq-bE--d&{-yw zsPcm#%-8Pf9Zctm+87ls68Z0lOn$)Fph4a`YO$S3!94?cE)st>YoSB{E>DW=Aruei z2?(MDtKg1kTKpWKVU>u&f0D`Xh&rbHa{LCU6lIb?dUZhLR3>RF2;)Np=!#{z*a9yGTFe&^JyFb)j43$cTP@tS+vO)G?THxFVXd}O@Cj_$$ zRh#$ykeE3lwi4fqwtq6sKhYZ@0b3MQqG0WTNhiAKok9of{2u9t^wguA$o`=H0}Rp^ zoE!G6{k@(4bCKgWFnq{+5eF#&7#|a6V4YWdPypQLCP7wt0jS#Lx7d!-Un+ia5&*$; zJ1eiCP6#sUC`VEL3ZT6xfA1ssP8Dzms7b(!^{f8#Vs{s6G4u(+3mlc#l)kO%d_}TG zK)fvCT}MVv0Cq&@Pnt6CGTj>*ebj(&=}$@kn1H^nM3{rNos3D9%#)PjNFrM|f`7v6_E9f6G$@G)MUP?w?L1(DJ}M3JZ0hvjZ9% zt3*-e1kmLdUX7#RLkdecNPUCpmRI4*`GG`oF1wqR!nUvjK!Jjf(2NPp0h13Tp{j#a z-KjFvkN*cW5)d~mU&&VD52S{=q9GI!XGUcJSVa9WV7mS-OQH+3Nx}X#8r#qVBBLw- zqMU#TAC<@bA4sSR>V0%5_z}U3z{;qt*VxS>At^oF{?nJw_V}xNoi>3OMvwm-k$Hq#EPg`ph5f(c?!DU|>k97*{a6>-%|hc&&S?aGH@ZQATP1iX z3Y6?z?-(hSsr(a>ll`wy037j$oXGo4GAffF?{wHWu!#e!T3S5+pNb~`hKqShf4_@) z%K5LTd7p!B0Aa{%!?^Y#{x8mq390YP#n}n)&Ojtby3!`7-5v_41X9q@0nz*rpyi~X zYLBP`yp*K%53oF$ZeXJaeySjVW$v-Hh+kcMHORufAZtF%GJZhhss>o6^5?&OTff4!trQNN{63i_69Vfy1(OUiTQL?B(op*I>k$7Gc_e*27XJ+Pt@ z^`kiI|K9(SzcTsN2*R)!R1Wc9bFu&a&||vY0p;AbnD5#tf^R$ctAurBfQd}6uKd(m zOWe8RZTqNi%QkWyuY)Ju4)t@`=uZr`-M5Rg30G+ER9j*XClB1>i<98`ObnajiV(Lf zWINDv<=e|Xzqj1>nh)>Gb1u`_BNZ=SA1S55*dz?7_q*BnmufzZ`-d5JL~E*`1c&0p zhnx!y&aaQBc0o1w65gF!af^RtSIYN$*JdL?&34c0p^q54=e}m0>suW#m4_MA)40z7 zJxs@{n7GzpmK`%Vo_>S7dmZu4GUyrL(tIw27ZNh6o6vN5wK;usdvJec>hM@U!UH8; z*4}~EGS>&38*nSoE>ADD`Qr0U7PZy5=D#k?gv&UW%k_Jvtv(Oe^LFCjrv9m9eqz+M zYw6eb3K=gi*U#nSnPf&?&Ifwym~;lZmlP-CrY}Yeu^DaPqRv`SZFNr zSkq*#ub6E+m{b{d^*Us|`*G?yx&New(Y}L-Id6N@a+To3o8ah9NqmnDTq03UvJA0SHsqiMKvQFfqCU&7dyuw zq=gS#_7>|hwT!pntZco`TbJs4Nh(zw4=|X7cb3{u=VXr*Z_ZHN0QmM%T^Kw5v z@%)lCI_LS7Nsl(N)H+au-{9K4jAZ}#M|hpDVVO#}RV)U|g|Z`U2903UvnG?f z^m{hbKT@0Sr3`bzY=Tx-K-LrAqdcj%`g@?XUn8(;t&~(8n8O!+meZ@)$S>E;pPK6eLm0B zC)Zu_rEyO!G}L6?@@E?0BkVU?Eo6wdO>2C%;}K{joI70*lXwL`=43NZ4+!TpST*>X zagVY*^cX}YgIXVUw1tl9>@dx026KjM^wVw)ALnwOwX9_Wq`GHAJg_;wS*54s20G!Eey9Pq6WdLi|i6eNa*j-3~}!sj~?e~L@q@VUSb z-XnVG-Svl9bz4Qf%Gc6um~w_S(Rxs;Jqx@QGBlqUB^t;-jdnE(_kw*bskajF`6^#G z-Av4rG*tfTPDfdA_1(<#ETjMVW~tixOIwM{>F)u=bq?Vl_=hLWCSfa}XV+FLk`+yC zS%WS~t1WCx?I6fPhoR2-pIfw>KKS^%2u`LsNm^HH*8&z?jY|xOva7RSOtfkCB3*{+ z=2B1;)JnLDM=b>lemE8xrSngldtwac=R1C9J|`jo)sfGj7WFm+8n?a-RHPyiRcpZ^ z(8~x&4AZManBu&6Xj~hBAc&^-(Jtn!=FT*o)aKxc8@fmnhh4UUU(3FnD8E70cpaKo z->rk0%gOop)naQp8Er*$(1I5ei?LCmO}JAX){qDhgN7+;e)YM;Dz zI*))nI~(CUG2u7;c0;^>=$PTL`MD)iH>68v(4(N^YevUVg0{p2$alP7MIW=3{7fy% zXvs&6t(A5wnc(t1a0Ud=G*SgGwA|YQ@%r|g@wyIslY1c$L!R5v!^OFJ46(R4PLEq? zer5u`NBTnP>!r|;TB#b3Y@#jO6OQ3qK7}F=-p~EKr}+$Vn`Kj+gzCcM2QS^{c2D|= z-~%IDCxgY{QtXR;&}aaG+AOVV`SrrRLu|YLwxxl)9fc5CRKe^E#;dD$Q%WG3hwOIJ z?Ku37^I=?&y7c90doaJN=;@0SUnrY*5CNA1=3h zY0)J36*|pInu>z(%E&GM=SVPqRq@H)T91pG8=gy{?GU6#5>Z;^qzOl%2BXtLzo$J7 zJeQO5!0|QPGe{=LHyeFKMSF69z|mo5c6EG^3&_N@bUC!Je+CKS5DBP0l_hNL)-jkh z@>(%gBdw2vULEqXvjNdIIc+hxHNP@;D{^cJNj&f7oidxb;jv)bcHdO2KOLZnFv!s% zP-ZQ8ScUtnX!3wutyqn0y!;oR5~k+^z3vkwl2DoxL9sQXR?Az=GK;v8_NQQJ?Da~< zrJ~mRS7#*iE{;CioCB*~8G2?sUT%vd4Ru({BW`D;^kcj?al&L2+x;$fXABkhK6WnH^_r*>y{8G013C7CCzK1M%Isi;piei#3Wa^M`B%LK(>FRG8bF=&_d_ATj^Ntwwc z%SV=r!ldzs9GefOIJ{l0RcXIJsHv`djZY@^Hum0Gbe=9lW5@W3zPE&AZ zF>nJs9{z%mkrny)r9Db6WV}-2YO|%0U3?9U>)f#SEY}j-uuJqt!uu?e^oGNCxgzyO z!~5?3!nYTlikp6(xW!?(bHlBaJqarHXmd1a0$H|(9PB?kk{Zk<5-cTE&eL&RVG#e- za}C#>v6er*5x=`r=JJY47V=s^%+ny`LoQ6nz|AaMOUTMKsFmFjbIa1v#{q*8hdrgFRb9wxhxbt_(oZ}CCg7g3!DeB z0~N|zwG?=3qK}_;D9^b}n(}8spFY&IjNC+q$0lsLdvIoL`!13`Ji}bv+oXAIUb~)V zE#>aSsr7{u5JXkj`R>|bOE1f;(w@OH=pVs(F}keI1XxUZp(Qu8IM-g>+&8}*kD2^v zU#(Bq_FZVo-x^*w8>r;rp;&8EU0AY}1f@c``X;S9t4S4c#v(cl6tq1Vm^AVVaeNsn zjH^&f6Fhbvu@pIYhS*y@a3j# z_gLl7R`2EubEaCBPJV6$hvwOBtDH=exf1MRVGrsEtr8lp)yds;w$-L`cEpqOZseZ47 z6=^zlYC4<74*Ad_h$f?QHKEqtr2t7S#mYLrFVN}mL{H80Xn8up=~Q_+&OvQ=-^^-c zPASso@MQgPBEjwSv5vO0?p4uf9i)`@_iLYVIL&BQLmJ1je;IFGVQr$Low#X~jgsj? zqDH6sfW;|FA6?qpL}nb$KsjFAkSC0ptG<5uqmC5@OiR)ulg~6W%SE6x@~HR3?mTvNsq!SP%G~E^pz@XQa#|9n6u0ztk_Y+l}cNMql?Y zIl_C;u&bX1b8eT1M~AQOGJxD@RhR!RQ#p@3gjKF+u2(?VIIUZ8V<<*92LiT#_VKeC`})Yy84F*ATn7qOr70-V)_MlcCrPY&}aW<7Rq= zce}tJ%gga6Mwic;sx_=*yEz^@8@_w%TXs2+IW2)34H7=?M7i4WpUz*3W1-iOoYZ zu%}F}Y52@S({oN{pU^CEo?mNdWMInh(+AE#V1k`SP5-5-A=&vaO^tTa$6W~KFKqOj zY^=vTohoP^2b6=@Ac+ZE1TJbGjE_r~gogh!+55v-j;}`xIk~Fc~ z?$3APHTT^#H)GChX)aZ^im_t9vXEn6uSMa`1B9J;L0sNn{p>Lh^l+G3C89w&$H#&^ zoBk?D(cQ4y?KhhTd02`d|0&3KE=jhg?Mk_jEabPZzwdP>dcZi(qZFyh%McE1@Q9-? z_viJH2nqB2wi(`dc+qk`sywEciyH|{ptBx4KL|VYO zZ*IH2Jsfww-mzr2g7{vZkp#Y`kF5TID^7=q4VWL2D%_46SVIGqd&&u9Iq&1odl_aw z|GvFQcDm1?hr+s8Sc`*qZ*goTf;btwdxVzleO-KFZt~kdyb*lecYoX7oc5XGv#PoC z$ewu4@xF~ZB={^@>xdTABd0Pa%Bhwg(J(Krl?QnZ?FZj!Z z&(*?qo%wJ-j{!fZaA~#cP}r>j_o3PEOL}|@4;7t%=vp;j#1afVe(Tw047$TrH|n2W zDkFJvy64L*(A%^e$U?J#(EL@bMVzBN0=4km^r`OhnF}=Kbe(>_>x{h1wRfJ})(7K} zG>~?Xf93WRxX=C;c$|lKDcyjQ|4Vp+fOnzk$6j9VYG@A#nKBgpD|$}N(smu$$DlED zm2agK=qM`9(cM(Kq2H1;40l8trZ}yQrhEvD#ve06?s&tfyS34@Ofn`vc-W$?g_tB& zeqr~~sBV+|nb!ioKa$#JFgxVlJnD(w$h|+#%x4vc__#_kB`vI($lTwaqE2!cdYS`< z5Hzw=S{GXgGL|LX+ZOylojmeIQ#ylJEnXyzC2WyX-14y~r-+xPM zMM+#a4JA5_S5fTlsV8&P7R%EpZftcw=p1@EKOJhxXAz}zsX`S05xbA3?nX90T_Ki# zxH+|LufOgiI2|NJc#!%dQ(Pp6#P3Qj9W9|;N4+a^5Lt1~^}z^RY95{{OR6xWO?7cf zi@sy~?(T5y_;dy<6rtouK1VmMCq=>?h2;**v^2dKON#PT)czqg6X_p94Q;`SMU^u> zV7^4_AyLQt@Pqr%x5)sRB!tuICsw59QWp(13{D-SWO4Gy7{DXQ zyU6Vp1o9G!W*=8%@p_6qvi<+y4yk3$tzIIXX z!&81*5#&w;_c|2`3S8w%1mEjp$T^?SlffC;6zDcS;r===z0wJmWi(||k2`SdO3B^V zG^Iy8Siw3oDBbwEX^cJbJn@sVJ;j%1giDjxiGGUagL3vs96=r*iP10o_c5aBzpi@?wpeo*1O?2e8R*hY%!JisE zucj)EGE;WEx6A5(gD2?9E8!8deYcC3dzk(P%QsTFerNZ}Zg;j~p%j-)$h*e^g%w$) z%5qQ8z7=bzm-B>~VA@qtIw)y``ygg&Ff&^b(pgmQ2x*;$`G;KhIjFYK$9HMDuNBhF zmd{e`JHfdX5*f!#8I%#Uso{Jv$%?u79YZ>n;B?4g9TeK5uH#3GP+vXRzNUmo`k|IU z_`4pheF}}%)C{Hz=O1Y;1x%SzP)mqrW^~F%6@_{Jq^S*9(4&Tz|Hn|P1+RN{kL^NL zp%qW3vXyL2X_Lg6W3a?z}t|s-6gsFIMEf6ko39fYBh!l#b&5kS^nDMN;raCQ9SJ|OjWB?;Bsr! zs)(O;19!r2QYg_{*Taz!6vt8ImdNaDLNq2#FJmC^G0mmSC-tT4)*}K1)&G*M&R@=nF4Y}26c&WLZ@i==dM^~j&6!8keI~1CiYEtH|61< zWkji9-eTx;8NJm;%tRMn+bIS_WoYd4ohHjp)lOI4)*C4)a{v3e&LQnJ)y}&ZIo(CH ziH=Z@k?G6Cu=Nv|dR6dAr6Ug(kFWUjUeqxxD#+KbT|*#KRi{FRW}m=Q>L2Y%NA`2& z(vL6K!3RX+z5QuZAPHLhnv-QE^12xWj`?7m#4Ghcg`Y}c2HiB+Ztye=tYJgc*&la; zb-L+c3b$PY-MS;v3P!;j%VFDO?6yq<9d$c~%3Qg)R@_dnQFRq%4;WW$gF8#(;A1VJ zhnVbC58YfGq4TPOQD2uyx6V&?N_sEP_cxAs!>^@U(*Y)RP;PrJ^Y&^+nnG*`_hGP3 z*TalvKOd~aTAHUfwXg^?QU0?@$<8!R_W} zR##P4FNB`B@&`tS)b`w-X#?(>bw)*hD%Kvz@*5^fhSbl=SfdCl)Vje@V7NAI=B8zB zBV7r0s@WVUAe$kNRdxepjpc7%q}tZ8o2?BpuM>Xj5)f-;HbN4a0+MnLUeAP%JREn4 zCLi560k)j1)P+TuLa#Y22_``~-!|47sh1eegVeGsA)0fOTSt?^<}-A4^JP9$Nle8i z#c~+K2Z!yc?22(wiOIUKgKC%HDQF3a>TF@~@UEadkdW)*FZFus8x!^_2@a=r~5wXq;A&uIB7P2pLeAK@GY`vO6Cii9!m(S3xUK zau{1%^Qvxhoy}*(7OAAV$YyE0C%$t&8aC_;DpNtU%8!V%k)Oa~Tbowb$tt8Nv7-jb zxZ*4}N5mN_P;T!8y^5_VMw`SB;f`yLS+zURfCSM6nkU`IbMe;A_wB;;{+wNwVB9*> z(g>U>FpR@qUJDlcdwT}eTugsLS_aiv9m6eC2BH!0t_Cpo|Jdd$^$Ci06ND*|)n+*5GkNruTbGrn3nfg$Qw&iBTl~^ZI zzd@@=Cp@|35Ht~j!>M?=oBa|r@nWlXnr(bwtHM!Xot@GqQ&mypiYW?WhjrPpSot_e zY$EvR2%gDyqJPCi3?nX#@eMd5RxK4&0&&ykw|6v!>&tFASLRzAND9=mEq1UJ?<;2b zR2g3-<6K$@MMLnvvnUH{#rN(h$&Dv&<%4F-asQqvj?dN}wB^6SmK&62J5?YTK*VB^ z`HWWhqNRPtY~&EiX?ONDR$ehPrHP&w`6_G3NK!HQ&>Z{C(Bd=+?GuJLD9Q%$PaVg)LAjb7C5nj;N zXbZU=?Vb#3X_45Uc+FppJY!J#(x6^Wx6i-S4&}5iSibgF;^2e26Y$eD{szxs99<>K zE{=g22tvmX++w{JhIe%GLuOBRhBkPH90pS0gfU9Koj|@T4fP~enK=IwA~fU1KM*LFzj_gosng#0lkZ&gMgkVv zZt^8k_4vXFA}Gw$C=g0S9Ir}|8(lsHcB@`<@~~b{xgU%BqesR1NDvq2U@{bep^&7+ zQreS%U$%1GZ}91~GX(j6_z@-+!NizYru}t7ZOLZk1_}c7FxsevI{{l?81bEl?3_5I zOK8!Qi5c(Y&^&sUpcLO9lkppfDjiJkFIcH+VELB=(6voQ306P7QQXov?X+9|Z+TWF zp#lVja?vmdqf`Cn=dx%mLD&=0_#%FL0e^}ot1kC9B>)i}; z{<76+whtwiOgK_+U(&ehk=4!6ShMQb7f&oqTo1Tg2lmEIW>}&%sxr=6#^;ATNFsZ$_fNc{jbxHv*{YqcwDYQ z>(^>C*B`?6Iz9-2)v{VgXDvfN?e7_s&eWWM_9Tl}{?qii zt&Nv=|Ek}R5CO!m{z}y?`!5*kF=_ffF2#}cC4~-&aIcsD^7`-2b-a}`*Vh@wAdsSU zG4f*KAI)vv$Tc}8ce|_J_0IQfWcS5B{A&CrMvi}w=eRjUw3I?ed-&=)L2t{4Azye> zLOTDRAL8Qcv@E^tb=?a`%zd3*F%G?4xmo3LKG zzacmqN{q>&f*Q@co_O!_MJu2dvp}B0-KzA~C_+7;bqty(_`?nx+fxY;cclY)+XMXT z;7?xGr(>6+M$vSvgTiZofA(Zv!rZNs1-ytt!FA@Z2)A9p#Z`)3Oqqy@u=dxJS#J@Fg0k-XzMIs2@gUE6Y#R#Y| z7V+n`H?)Ls1c1S(=Ie)V9)u*v6eK`W5uOrgX6)99j)}h9l4m=1!ra-EXXC@p1;+Ko zdcS_42T=e$5GOG9!=o~@(ZXKY7o|peD^oOucg!*A;+2?^D)K=?^!7ngVNn#}7-K?t za4I7*)2OCw&`+sD>B_UE#W;dND@W!rh#2^nEJnn`xBaqYOsEA^S!7iGj-`xPFyzl` zo^EYH#>y@@Ykj%)F3|9sY_K=mGO;X&8ick|F5cV)UjmI7L?$iIhD`|rv>H{-)Aj>X zsl}jDqd0L!oIrOMoJ|)Tg2I+(;|yk_k;bDCZ~g#L0zF_1*ph^y0|{tCw2$ub%QD6d z0T?^m{>z1bi6D{cPnZ(u?+?IaC20(TBNX94D&_+I5&ciivFD<}Tu{PSUd0M&`+3q` zqc(SCMAr*{UI8)~c|HIZYZ@pI_vsQ=`xxf#)h;2N?Y>;g3rEZyL476zabOgqE7u6D z9jrK1D36QV(0sjbU;A?{A;;jZh_bpM0nMAzVy>8Uc}x0R*!_7IR1CpcgFXT`6wH9{ zvMADb`IR(BfZ_q?r1M7C7ns$)lu$P1E!j&ZQAjo5_#xro&kMB8iOk{LR$#-lVFG6; zN;R5UNO7>Pg&5)c4|sY1?WQK}i~r}tTs zV=oEWhNvYp{2Fabq0cClBTc1u#d` zM>F1hVFnm2y8Dwn8>S>GIZi`Bay+#pE0_xDt89#zz^u0+=n@`Hd9*ND#D?RSWf?f> zAnAL-@GfJ70enq-FO(H>z@(CifC;KH`3*@$7(o2XvZG2j;p5r=Mb%qI)zLI+);N>71;GPYXBe7{zM0`<4x`gct5TBS&_mPyi4x48-N|( ze3BTyGLZkqr+>n_Y@j?KRp{$u03!MWOn@jr^dI254@JT$axDE=zdtfOHJ8q@zQgRV zcO=y>;C8NmI1OZq|JQQoNCupQH|zu$jhzHrBs`>&nu0vykwNkfW0MpxuNLfQzg|dH zt3s0Z#->W12nbz)-M~cjT|zev)g_?4Di(Q~Us-V2DI73TNI&fY`k*T@0B{ZtB7g(@ zW-tgmrO2EXrtEJwvNCAXA0#nhW&WvvSy5HL7G;*4)$%coC?=SHD#LQ5&lBl;bp=OJ zixsawk%O2nm$7epx1)#(;ujSmB2-l?5R`;sUU-RCiNpgAf&m%;B47!t=;)ApcnMzJ z36zJ3O4O2Eo(Q@kyJ_3^M_ZA(VVR0|fudj03V>0wOn1~~JzT8ThDn}=v)nmwZ3P23Y zr{SHlAxr_V9h_5JA0;(lnNJDSQfvwg6a(*#lprc8Hk8xfwyUV`bfR+k016{e|Anb= z6||eRu(Afj`cKFzQLw!44~U0~^zM{T8KU7KbIb$(1**s;S`fg?5LyE;GHV+a0N%d(Vuxt2kUVQ8|#bFlhYJ8=pP!+F@`X+dQ*=}w}ngu!J4-xDHM@)a61;v9i6fk}D=+=*5Z)&)IVWZ~-nYVUM=Lb4tc{j6mEW?^jMRmmp*w z(F8U)lqDHf{?qx{;lE4xyQQ9~|7o5O>*x|eaccSx=h6pRRKGr}(3|{g<+D;svB9b4 z*8!e*zf=Gokp5Xk_3KMwiZSC+8=R_iAq9YQyq^EqPZ~pcTvFEBD9%Py1wtL9st3|cGZ zed9LzFZL~w?E=Ga>~mQjsB`R_OzdUN$j!vf3Is$cKeEC;4DFabn1}V+dC3{+Q3JZty31n>c?t!Dg7~i37+(!!g#E=(n zkD)SrF=y{ahgA_lkUp{C;+yyV^*-}Zli|`XHHYgXz1K|vf7JHd^V!>NarSG=_Wd%G zV09R>YcwXm6Fi3?eI|HM^4tDxodq)0c-K&I$dL4k^;?S-V}ws%Vv|vl!N9m{K z`IY1j(4HO9NldofiCk5hDf@LJ|LyPV2J)D4y3WtO!)qc$pm=>$+`lU?VyDLgWL;w2 z`t9%=w_yZ%_4T36;x0som%J7AQF}JMC%PF@elLHdJ2%Efmh;!9jIUZmeE`3VFWOu) z)|j)RW340`*7y!(-f>Mb>pb%Io1DCr4v6Jjofbc+CBL;@1r=}ejqkraACplnaY5aF zYv^SAcy*8kVj$A2bN9p?p(1B`{80Pl68Xv|G037 z8-kEAOq>KkHi z+AJIv&Tso)!S-)qPkel}=Qo*T^7&_xPZwK+;^XJTp#5JG>Nf3HnUA+|kI&I@0*F($ z(HCL9!{}hCSIUUY&M^2m6T_hn-a5Fq7#q<J~$oSdlvq3EX*GilC4y}-%0^w8AX z*h<~SftS6bv__&GCq(6BgC#~r3A6A@Nd7=7(6TH08c=e&sp3Q(cg5vXldo-VM&D9Y zd8kJE{1U?n2h9C-_=-pNx|gy~CV1-ZYVPU98+&L*jK{P0qsv@n_(&S@hEUf-`)7uI zBMlCXMcW1e!rH{M;xD%*G80t-_2OfW^vo-v`f|S(NANGFr<(+)I60S4s+YB`93ot- zK~3mu^^H$sW-C{vc6P{OO;)qQ{FPpy;7AX>Dz zsEMz|et|+88E;x}da?u6>)wr_%V_(H5WCepKFJk`ZUY~!Tb*vt^}ks>b~pQY*uJDq zcCO&uDd^9_CfU`l*sriR?5wb&lbZdBWw^WlJ9OgP?%1xAQmz6K!@v6K=;Cnr6yd|3 zRe$BZ7~0l&)z+|#y~k~}!E)th0eai&8cykmISDtwY3_jO?++DxnF?mz(70S2)LE%S z5+Hxdo3kb;h%XHIzx_V}bymj8I>1fbH z!y&M43Y<$*jxBy5t29@CsBT9GA~)6;T$yPXGCvGIBF?sGV+ z%&K~MfhpFf-z*z#v_7~A5H2@e@DZ=v{c_`Xjcdg}Q|)_VJrQ3qf@Z)v2G!x&%Gy** zw1{ken6^T+<2KV-bTjEmu0J|S^ZoKehiSnpLpDFVYS=FJOXzP@?Pd1oEL*!rU)ABY zdSBgw~l2Gq2UdoV#kuL|;DsQW1R0A&Qg5^(^-= z*E&6~zuLS?-1Tk+j^-QU8#jxhN4D+iN{3VR#4jgy2zlf(R?g(5*C6~rwSHLr)}Yl+ zFQ9(A_>W;VE{|5J)#QO+`aW78KdkK6qXWg!DhIwZy1^P$Nq6v=R18d22e-JtE}VXv zXj`lzI`M?gnMjI>sp6+Jg!hp5f2$j?Px3wH@ae_D`2rlnH-Lnk{)5KmA^x=4v{9JptAjyWifLEjzVg-j;QAL>KJ6 z57Fs&SKgdFxdVeC`PUu)Xd#jt>)O=G7*SFldQ42Lwo+nWQqQ5>M7qR zxNDo|5miz5t*PSXu{^7&!-#dKb~bD(hQr6Z{-mS$7prqj-jZwPR%-}X-KAC%P9bJ> z$6L;ZO7(u1lq|>00qxVv3-~wET=wAB4YcCtcRdb?65!7R>sh*qEbUD3(o)4Q)}SJ4Xd*iv1nN%>D1B_x zNv|0ruO1_^OkK1kZlJGq+j;V0uY|am_|%@3)C9@Z*BDB>yi(o7ttrr$Tp47zva+zq zf^EuNZ^m*4ZLG=N$RK2qNTwE`yXgNxhqrlhRsIbo+F_&Tzx1lq8<1W_VgiHySKS@R zt}z>ek$z<1V*4+>%EkUKy$baEp@T6dXTCtRC69waBK%kGJ?s^tFS!&2X#QJf!om7K zdALj^S>R-Pgw8wlYdSjVv?2>3*I2mPl^v-6OOX=GJh}c$ksgZw*ptj|-}z-M%&{vG z>+}ob<*3hwFaUnp9MvpzPcO}ukk&jdXCxVx*=%%x)`XZjWfSxcshx zi7F3ddLu&Wry!|m3Hd{Tq|MYFkHWU;q z1Y3%9@|QTeVhK+~teNuZV^4`Xd>49vQH=@@_brJhyX;ytHWZQP9mIjdPephWTVtnW zQzi%`a4yc|Z_p6x?|IVq5^+G*R2?|njFb6)eKVJ#SpK;QHF#rReY~#6MzWAb5(PZm z2JLz8r-ATrpp#L0w{0)a8}Vgnv6&dF*BuuJ+~+$Mx8gZ7t+}IoCn@%|^r`R-c~+3; zg!Wm!D1TQwZ--!ErNOKLW|Dku zbd(LTgbd6AFihYM6oAhTD%6qG6#5oP1ZM3+DKSS<(<`>mAt{xb z=&M2`-H{4XKRz-q@W~j4wahC)`qZL2A*vhxh)S`t(=`lRYI*#QFE(zgMjnz9_nddH z9KHlvQTYCYmRWoh_?jEuteUBxpn+O(BrzqwFpo)YoLOc>%GJ9O=0H!p2DJ@otWB0$9AJvYu1)09@eo_rLV76Te#qR;UX;2x zFxXcNZzgRkDQ$RxVKFJRuj3m2Gf4r+dErFB-|U{4&pq~yl8Emlo;`F{gIhS1H|4;Fx-BBpampuajKZ^4QzKld>6bY z(Ne4JM`10W$-NIm&NGpB@mdA9X|kkP%_JM z0Rd=pWq*(bQeZ&;xTXNi`}@_&0LK1mWM4;Qjsq2&(I# zeccR<{V8FS%IkPV1E{u${m#;7L`nNx=z5?PxCgs{+IBPWV9Y+8)RYWH0pJ&rDs2ON z#z6o&;B)(J#%u+ehf>@47Z@-}hl5B>=c}c(5s2w9<9^^6R>?5+5ck2Lo)^^zlS21} z11R&-z2y7)?0@KDE z+(Y{JsIlA|iH$!;jou#)#mP+B0fLxrO*kg)A_vQ~9p3aTSLu8H@L{hCwsX zY5sZW-3Ugc<(48p|J?{JFw8ezCT4t3a`m$e4|N1DWx1enGjQ;;3Om3T5xi83^|Aw0 z<7{6_f4@1NrxG=MF0`kSv>br!m{Mjb0;n~VR)&fzFie2|&bfT&0|1~zf-e^^#Pw3@ zfLAsv^2+g0r6~X(1>j>68@~pXq1GHh&W4tus(QytD<(9mcu#|yDByTc_okQet9!?` z#J%^$wtOoSQ@Ie@6Qi;LmEMy$838%09q_M6z^0NJ4B#0z#Rgz1SrdU#$_q5)2Vk~3 zsBM57+uc;j%7Dw6ls1ikdEf{DPCk;n_lFM@QUM@ib56!=5;!Y5mrW}GTdZp#4Ny9( zv*&$BuI-+Br_tCi0z47&3&5_y#6M(#NWAI)DRX~R_Bw9pkY0!Zn1keFd4Z(}CUHeD z!56@5B5Ggf!T{JvHB~Z0OoSp%RS-qo28}Rz0~B#ZqKwkFMdmR@ zz+gS2Jt|Qpe*(|x%JCEdMKluu%l6h4{o@<0N&_HyZUhVjgoVI=Hm&gYc}K22RXJYV zc3@pW@zGQ?a&NA}@}GMW0=zP@oqLj7J(ZVKO^BkFGE{B>jg^Bkv3^6(Kw?dchjy4+G_h3KKMlbglfa;?`V|=rg8{;p0DMFvJD$i|L~IA@Jb{43 z;SfyA?bo?%LgLRgBU0J~#MdHt##tYX=@Faly{tf+S`J877zWRBjkR(t<+U#NH=g==DJ z@A3!()o;v{&ONe~K@vbO;Wt3f0$dr!cbrv)0-)+`pIAv&i3+1&DoF*R&Vd{dmHv|8 zepCgydfTgw7XmnrbWwa~g?mf`7~Z&IKRU+fT*&UfxKwQW&dH8R7I+QRd{zR2z&^dQ zlr}iAF+q75DwM!57e^tzKQhnv9FV1mKXR=xU6~lheGU+zT-tC14giN3hIzz7l;97Z z3k{ag*uZ(mGQ+Eq!2=h(cQ!i#Snn++w0C%J2F3v)fD`C_j{nE}A-zm+8^hpJLL;QW z@B{L{JEa)j=^|NZ3E;nuL(RX(u?%gceLMb)U1tw4hEQanWlZp4_L}RLk;s$0^}GC2_ax20NVviRy(HmEx-75?EJ`dTG_raBO#E9W z_WN8Y;IQm<^;}3#)8eBfVv62B3GKob(tR)se()eaU4JRdjQE6#H3JOhP^8+BM?bo6;SWdh_Tb$TJI+-c`M)BMNfKYF}naa z$j7IZ?{h6@aTF%|NtcgP+6ID#4l(J&btiYSMY6cW<*N-JdRxXuJFm~}9P!)vz(Uvl z&bl|~akg5CW}|l@Il=qV_53mGE*Se{WF{lvN#IT&l-9FdjcvUTHK>oI(`9gcakies zaPq{w@+(HSk!8)m`rx-Jm7@nGWeLh?Y_vRR>`gopUR?+mT7d^%JvNl<&|EE3b?zwiu_htVo zy;@Gcle=Ss)<=Qe6pQ8d5yl(Tgk7!F#P`vpX|Kuas{VVZ=@jDR$>PKK2Z{?J5G`9h z;gh4Mxmi&o>QHX7>-s0XCq-Mr$)~sdyJhiX)U}z%+N|*zfowL*tMb3ax0Z+!VDeJGU#yMBJA7pi`2|Jj!JymO`H zO^3wp?&}m6{6wZxqSjbTCbi;~0@7cI@2^mB{)5%cfZkLo^>de}9+~B$Z3=X?o1DF2 z@`s1tXZ7HY74tBhNBGArwNlLCvy2C)aF$z|fTHi)Y2lq_n-(>3V`A!9^+cV+Z^AYu zUmIQOuEy#F{gtzXF1Kc-SE4nD-Q;+WoaYL6tz-EUTbn(HF1{(bNH#2I%j{|@{KQ5JlGRlOf*AW(P62wAJ&kNebadxNt zU1TvHePi67>$PW+kg1|}K3FJsKYkHroEfwQJS-B*43jbc-ACTwD0w;q?E{M= z9_Elwr zMB^aEOabL~kG7>2jFrej#)urZE1~wJJ4XKGy{2MrVw9?(J?^FNgqy8`eAnNOC3tUl z>l>$7R8DG$A9MMqs%vh=2Tuu4@5=ASoQ#_`Poo%*OIOo3PGgL;KpvJwHNPRej;(Fl zMQ_fhel9R7& zlB{p6jz7qK89Z(n0zF}9Y-<<)@)S7cqj9$p@wCj2A&qj)IpeRY98=L+S!lWb?aTjE zIOJm6&au95cx_+smj9sg!^W3z?d>wi({M^&rsPQ8YUi1CK4nbg3ZktVBF5#RwKL;* zK%F)Cp&rSBrNoSQm#>T2R|2WI>h2}+UB|4aqBbv4;j zGC69oD3z;8Kq9quy>@N@Ie_e6zoua9=KWfJ8m-ffO%*n+s9|8a6&$3lTCek(l@=^q z)%a@ePJLg0wd>HKUn$2M!qle0=cZR@mfs^(w@)1LjvK`b~!d(Bao-j94@~;eVpq}vmmOAgy(zL_pMDbmyUflm+%0-anM@oYIgGXLY z7gBZ_#$p>1>IatGo8UBQ^3dNZD(L|2kF^barXfz=tY2}VC97$3^z{PI1-QyXFNXHM zkp;5?HKV^TeNF2uyz7I`wC|xZ3(X0>CGy50a0es7oFT~rG2tn;ZtcJ9ZSPgfuAYIz zQx;y?eo&CYft^op1`oM^e6YFlh7m^EcwO=)7eg<i z)b(KH(Qr=NOiLA3D~mbE-?vqZ%TaA%ceY94j?V`U^hn9yJLKFjyIt>i6074Z-$KpM^gdaO5`vj%2M4v|qh+rt|8us|K2F0d zV?F$s$nb0(eg@IQ4mREylpt>AR8Cmf55G^j3=c}>vPav(k8oGr4Wpeb z=;+w~$lq#;21=1RO9hbIe(iaTTwxD&hlc(JMpT;(vaG_zj}dN4A5$Zl2J2xR!^x^H z`x}h>^QH}>PO{JL)KFc@rj(+o-co_ZRagop2O2f-)0!4JBJrEtHdTRo#0A%pwH~N) zMEkGhZ&>hHE*{%^uGDF6M%oh|q~3s}RH-f4&noA$=JC~0(REk#4tY^(IF@?*+hHMi z5ACfEJNc@e5^YgUSKiq%O|z`gGRiPF{6!&e-{eXs@x%jzGCa?F9}cjJby0_Bq^kvG zUYy$4bG`(Goz)As^!xvj)e_W}LUaLPl0cdG2w(r+Y0G2AO0d)KaKGBa{3w+kxc#b; zqXNTUSE=tlk7_T`j@{$yI0x>rQKc_21rbg8gL33?AH(_0h15y-CA~kxV%IrVJ*ho% zwGFFo&P2Gc?=yatN!t!zgTyMMBgD{4z(lBLMG~)eoTX+_P!he%WB}$YVc!LaY~VtY zJ6Cy!$njyB>`;uhp=;N=7~|e-r(N2{v3YL{Bu}?~38h7KBMWlLj)lOzVY+9c*#EBdIXQ)sy>>j+&A7$)aUIKMCAv=uPMV z)L~z$WB(2~HPtt+W>#J6x)W-UFD~K;+{~YSkdha6@hB>O=`fy1ExGt*k?3zV0zT~UJ8T3swR znO^TDU8%*{_v$v`d8_7|adIyiJ77M}^kA63^aYjO#Z*Q8NTA_WlJQ3lXbPsy^LX<5 zA*ZcdV2!`^m{+`@vmbdd9=!#zl&Eo@m~QxYt|6V7Tvo6Z#iDG9o*jg1?K-FcL5)Rd z#Pp8Nt2T0o(bfv8(6|Fh%M>N6Qf{)J(o}5SLhXSiQ?tfHa*vLaPTqc-TKZ1MHr2zl zav>8vbIB%(r`yT{X{-zq6n|Lmnlxv^-vmb6td6NXw>?61?03n)oXvN8b@MQq{ABIt zV50%Nx1C}&EqZT99r*{N#_lhV-)2Yi<{cDgkXpbX&r}|T@eA}^19V@gM=fSFiw$zS zZ`?Q{0?NJ3SZI5lkq#a7!em8n27L0wP+|KcK{A7eExXB>I$qD9h#xM+b?e9jg}9Lz zv~ySdeX3yI>#D??hLM+U{ zE-sn^k?2xH5Ww{Df0}~zOwd8Tj&XjW@DPI{VOpz;JB`JOXX=s#2P<=qW>LG-fgrut#}vc+JiKGi#&L z&yFQ2{RQ$PGQ}FHJmn3UvONzm!s{)uT>+vR%pv5$q@-B^#t`vq; z4nf$-gxZoyyCW0KxQl2HM9HJ&$;MG|$)lU3uIrhk|{=)cuUJROgmr(i zmKduQ)+B$0xRc?=zFdVQO2MEh(1acQaM}lwRS?$q*VFQae}=aoOW+_}^@C_?jGV;5 zH_v69l-&%@w))7p%83(EkhJR~iFBeYsN7jjNF{kbNx<(UK=aheG{j6aE-16!Bql)Q zk!%6kERZfhW$VL=;jKqXVqw<;&X6jihrj!>eFwU~7&$a6B>gP1Ct%WkrH2e7fI#>c4pjmkM6j|^ z&bcK5h7mio6ftv12(cjo)+PDA4or*!(WrU9L~cJ09v5>6v|dXwPI&>=Zq(rPM8X*j z{yxHLr z4R@)Q!^03-?a~+xCed)5AI>_VaI^L8D%5Yyzxj;ikyx~D>oZdOoX;{qKEYF<7Ya={ zQMD~$Rpb5d>#3)Q{D)34_(q6Z^o3)-T|Cm~1ifQrlPvPz!F!JDQ`(mmK7EKY?J0C< zjJEiFB7FHEUi+b$3rqDZgh1W#98t}EpGOYMNoRE4HJX>q#2WSdu7Gb_$r2GP=<7q( z9K;qsqq`KwWzZGW#wKO}OyCc05C=4mUUs5NHTmzfp%U`;%h*Ko;hPLe?x1K*9TYh# z_%3h)CCWn{_aL!SlM**WXm*r?nZ+^xUhAzB|yPmB9f z`;_#)ph0K*iCi2v6E?$wX<_}0wq9Dn1uVT5N}TX_pKyzCq6=Idp<99wNtJV)@dsX5 z;9T;#%R{a*bUua39umo+InUS+rwRM1^f;lv#0W^Xd0P>Qnah?N@k-I+YoW6#7=~^d z+XYa#?*S#%Df%h2aWfNt`U5LI?JWZk+i@TVG@?Rh#ARbIzsgP@Xy85}+1C46w?~Ra zVo1rv_LjnNoR9MNNPGr0P@O5wHnMK9GH__M&sWbwM$Iz`C7rgyEn5a`w;$*!TeBT7 zDF6L>L?_rrif?x{`NAFVnB%07#Kx_RDt8OxXx5qu-cTv&sPzHEmvcz?r``o8JQ~)t zIzw=9329}b*h4~1M)6@nrDHQW=aEU)uxmSY>Aa)y*Cq194LV>qlx9XUmY(CS>_vS& zd*vVCw;|Mqt}fzH$kO7_xRG2ScTso!T12fcP^R;#MBvkyh}*W~;o3C8vV-VpN|oJ> zL--^<)Tz#4*%c0Y{3nu&-&E>YxQP}HJv=6>bJ<-9_q5Lt&jI*QpuZ;YUr@O7YW3yt z5PD(Hmvt;qR9107%seAyj|sUOHHiaR)~&kCmx3mQ>XsvgJd2S!5U5-9r^VD0_HaAK z(q0y=b8@x}$8_>F>+nm|>mL(bn1*l%e(1^!gA@z)5giF*eE8r-gyc^>O&T{I3A|MT zHfEhawS1Q8u1^&5*Qhj9)S*tsFu*`UpioP~^o8OwrtNV9*@MwZWs48_i1P>HK_keW z$ius#L{`9goI(@bDbCVi<_E^C%S=n@2kYVa(h%&0-29wzweLav$P-hAqitTqIH|jp z2*jxBRrE*^rHIQxo!ZpWRiBsLjLj_E1ctdWMwLnSwAsx!MymbK71dz!-&Cyv;Q6v@ z^=>FixR}O3EsO42Bd$67b;em#5n4aWif98lwqGk&A10 zZw?UxuPmL#ap@mV^0k(@_9}JGPHV_g-^-Pglh%oH`vS^D*v{Z1`H-!$%jP6~lQZkj zsp!YAu8i#<{==6&iX1boQupPGU4zDT=^{90+C(GEaxf|v13W|9P5!h2193jfrOTB? zqthpl+TyIx36(f0Hu&7Q@&*o&z|NN*&?g{fqFt_FvbNa{dj|0SVbcSdesD1he)X1@Lvb51rjXRPYHVY?j;hNos z@7*l#A7@fsKWl%XN(2EdKd0|dPUDwBxa%J6+o@Lvau;c2dL)+p2rAd@BA0VRzv>?? zoQKZi@%WsG)^R7HcVeh;zDptp+X^61;|S)9O?|N^T65@Ix+YXfC(X)57<6R)s){>h z3hIru1U*?B$=wX07AJw^=;@O zBCIuzFJ>3e8+Bffd0R~&yrn8Ub5n_kO;z*fJCHXkePr(V-Nv`R*@6jU%-`ow0dH2+ zsaUsj*am@^kjHO4ldE4K;@wlydqusMK(xfowwoajhvw`9RCJ$5ZGp|+G}ppQrJKh+ zUZ$CUyK@M7_G_RBp|F+>*5RuvbnxxB(n-ocX(g~WemrE`Eecd3;lf=lKf!`#VoR{h z_wlKV#Y@}tLO3}k@-5e<#S&@?Vy-SU^Hy%Lwep!Epm#|%ZM~6kRIAXP`K4TM1Nxgl zGRd1RO>QPmoO^{M9Md!Taqn?0)mX~xn)}`|!{L6_)kjpCGhGtXrd5Bm3AwVw)YmOH zRi(ddWj|}yX|4aO=Cn}t7;crjif-7I*%u$(&Zd@o8Qm4GU=T&|c@68iL>ZyJ zD0?{(yJEQ0n5Je3EAH1aO3zEeTonT;Ise%+vUVmE2;yr4AF)X~NgdXQJOsy1?<^qH8RAba_m(+?Pa7;IidtQF_&R z2!2~)t&C+X6it(B&*x~mC?BV~!!K)C;t@8^c#VA|m)5J+l+eFm>{!iJ^a0rfCqpm7 z7}AuW_)qbwH70so$Xt`C+4dUv4Ga>kcO8ErN^}F2$f>npYL3b^8q21OaQ^Z5Tw6%w z+N3q4{<2fS7-HD3L$;z?GlDJgQC+h#AD zv9U#X=8id(ar}bDZ%qrM6A#Km3l!MAPs~~~$w5xK*c4b$tX9{eL5m=Ot~EAWTUV)X zTceyQ+YNS3@oi@8%HKp#_%8H32~~_Og9qk;7W`H5fP8!1p|9Uy5>Jl#vy?tg$~|Fi;7&eBU0b~@ZD7-VDV3|#8m@$*dU^fP?(@Jd z?`=Jl6ouzl2&w4%h0iuH`HMtsa<{ca_u19Y@d6}oB5S72ndd0jyp2UIzJM&u3r`XP1|h%`Q#LNlh?J zgQ`v^P8pXTmmL->G|*YB%BO#_;CTiG*BNQAEOXio*%hq$^rPK?G_J16t?vi)X7O?MkkA_7TJ1 zT5O*8N00WUcfFr^n2SZdAqE0F%_(%ZQ$A7ZE{El&Z0}EpN@Lx6k zWGQAy#NU=AWh&c*{*V&`*aCUfyp=xZ$aUFWP?F# zmX||>wD|?29UcgN4CIZ84`Tl?|CaLFY20>a|Fkb4*++8t1b!eGL(+~s`G-lsHj6`v zi|mlL@_=aO82`~@*_={`X>^G+=gV17XpcYk2PF$|OL_!L5dGTuVc*``UN(?34#1L= z+F$#SlfZ-m{L+L%xOkc!n%E_T<@SEIyZRDDp>(oBdLj_EhLp&{q5~5Pd1kVI|+{a~|sj2b*EE<^!lf5eq#v(Ktd=gY~0ply|8T@!M;V0D3to6;Yu zLWh-)IcDv}Q(3;^_{TQQ%g|t8e%?L5KFSME)0{M#S2Z34sh}m?@<`ySguy-sQJr$8 zWf331;DQmO&I)G29m6Hf516H7MXkNEuj&z_Fi#gtfTdy4@Vr% zj(cswXGi7kRCtB)KHlc>PigHz=dxjv)e=vYoz{vC)WR`s^gG zr$XR_^p~(!dPkIE&3Ms?4U6vk4FU}l41GB0USdPoLaMyn#=4mgI>yxU&5#>U4-qrO{o+mo%jqte>y@XP3x{P`NYijuUKM=&%+Pod>{W7{bl(gdb&to@%o`+DO$;=B>tQ%{lL~m zTNs?Tq@;C6b9h>NbGT+7IU!Igx7dMTN*#P#)^+KK*6kOF}9dq3^0j+7LpZRmRqUCKP3P4(^{M;hs6qC1sw+qL#`gbw-Y z7*v0MdG@g^G0hNa`-FD6!~;j~HQn*e8CwrlPR@jCezfq}jPt9wtuk1CWoyXJO3jQe z%xRrrCoyeZ&84^{Xm@z{Ubn%q*#Y|LMdL8y0GVAOdt9Df;b9v~%8Z#1Ro{!)J>-+x zr=G(pu}_pYBY2iLrKR~gLfOta2%nO1FVM+0m(P^E+GdpP6!x1sj+?~Tl3X}#$9Bz4 zBb;L_?0LJ#;v9;VB@6*h{IXfI2|C6L1&|I@y;Xnc10SDKDnYe$B;RE;g z$0S@fUHsbUe8Ta!=i-mymp0P1H{`z(aWmmqCtS2l_|XO17XrRJJ_`OqS9eh7#Ev;( zhUd16pmjdmdiauucy26YkNYH03_iX}n0Wwko^muS6%v>Pn7I)s)x1*JtP-g&IdtM_ zLO-2NP^3Iy-oN*QL8Ne47L!AFY^T0}ZIx@z_4!5igg<6f*`A#}-v(&_ZZ~%!GhS`q zepCdFA%|G3ApNi_1!Dp1CTc_b$~TZc?0i49G|KX-XEXT)iux`VGu(svR)T#reDqp0&+u9J^YiMNOZ50@uxOHBoV{D-yiSI0 z1++ln+3^}5{-@Kh)2L=$x9G#@>tBb*9tVG1@6!&hZJd0ZA7A@vYjLrletd%=Zl*&~M9*Bb(2^Qgkx0gdwmUdT(*;iT< zfvUC>zbe-}DwfrEPxda158|(Quy}v&oH7}y-R$!%)fHYYU+SD_l@cp!mb-uz@qBys zJPgR2|0{}1lPKTIQ(9Py@D#E)@a)4JIB6GG532VFPghbWb!hlI_d_Fj>nFIbzme9D zr}-?(JDkXP7LI%74>MU);F$`0`#xM-^p-mV4PEojf>+LWVW|s^QK)dQdAIY;WiDOI zIx8f&UcE8aP1r1B?zM`xS&DDCe)Qnj*Ur(FD4m`+mnX=ag4NCr$EAIb)Gir{hxJ?5 z2cQO4dAuc4X<#tUMZx4B?SZpVAohf+@;nS&sO)^($^a7oSq#@?@e+ z@mQApLW`)wtvf|aMf*fU3)|*Y-wz_#7zJ9cp@tTd3P7^!)8uv@lVL8Xt3&l%+ z#(w?t3rGFRY3pg(%`a|`gAc5o5Fb7X?^6Z1rHRF4X$1pQ?fCTK6I#8Ev?jJE?irx? zl@}d(OJ0h271{3*^Jm`*k>kjhbm#XP2ey9P=Ga&{fxC@4djGZ>G1l=aTW+ks|0{n$ zQ_aCb^m{8NxEL$3h^w(P+iG`c-VndKi8(Wo)IG~zghJgH|GSwiWeZhlEm@+T_O}Um zOXWJ{w9+@npQRIVnjSq)zf1VJX~084s+%*Y*c}!=Zbnag7z+MeY?7jO!*SvMR&ksZ zJ)?(NsQVO%5}j@M$U9_ZZ29%(Qh(Yz8Q4nT3dr$_@d~941Cd#mYsdC}nseBW-7&xM zLMv0ozWPkB5AXSXv{7kn50|@_mltrPF-#vuSQT}Yn-Qy~AQxlUMkv=VtPs89B4}rN zW$E)MzrTi?ourQ8h;@YY$H$!ZmhXXLIvza!Wo{5es=Fj%P>@?ebU+3{JKFRHE>x9> z^6#|*&dr(Z&jhLQ-xeHT8dM!Pxs92Li|c=0DVf;1F)JK@*SdPApyV*gmGb<$yOB66 zCVqw@ED$ytBD)DGBWQ5y+`aiL>NjRAN-{7L90zU`NWgp2PZ16ilTqz$65c-9tyvW( zQqp%pGN%`uWVTml-i*SqtlxlPvOt8cL(qP(si4y;hpn6gb-uk8dL6wUe(FS4^Qh#1 zBI?EwMIlfQ@)Ho)=yE;F7CL-A2CWIP*1+j^B~vvBG>;I-cr8K+aRL)-b`@vkP5q?E)wzm8&UE{ss->S9(*aRcobAB zq!-(wg@DNbZInhh{X!z77Y|U8s5O~>t-wpv(E;c3km5=mU9#aP#uADkA1FrQnGig| z`Xa*34`9z9`m|GEVW?41YXRgAAN}1_v`(Dl{-WLlWL(*1j`@yfGJf z=pBssd;IbU#~c5(^2?ae=uOBw`JWah>ncoqB&>NOy=qaG0hQM)B~M8IRrx{I;4B)r%PHo)dG07IR0($PT0v%At6!dZN{1cQ;QDjRm^gM zAU|Q64VQp_unl{}=1OL<4Q%~mg@fHj_DAV5Q!xM5&mc;Ot}1rki+>p#Ju<6@jXQpl1Cr?uG%e(YguE@NulR-=Bq&=&@v z7w~?49-fw$-PAbr59zJCxLHF81}1d}P69V0|B0}p;4H;&t&umG&Xw8gp}Axr6M%;C6XyVl))kuVwuPK$Z@u0`N~;QM+R9MUn~a1aqlue#*5Mu_ zUoB*z$U)>-DcDiAdbXsu9)$i6IudG=9 zLH$)rwX(fay+5{!L6%{oLt{B8+16$@oq6V={B9sp-m`3Huyvq=_cFUr&8UFzF`cfA z7@L_kSO7dt$Hd;h@kp_pxUcJ>H&2#mRGDbx!fY{Ktdf^w(<&!gIFy>O4pyhO(OEzm zme*@lzd;Ui9E#UHn8&X9EBqZ550g*L0Hc##`N38yyjth|W!A;iiZ3E3+~!xWv2x060|;75tH}2;md+kT`dekenIcpT zY@cEw3WkufkpY4SU}5}U=<8Br(;Alp39#L5lnt89HYY*i-x{(be6CcYJU{cBKZC)Q za>dT}_^X(tvLS7Az5SlcUtt9ADR*_&JLVxFXZf8U_!9<-I!FTlZPMpy#Zpx6p2k@Q z;^meMfX^cxp2u?V778QchvXHI1JqC}187j=YEV=D`ZeEqjo-$aou^vB27U${sfdt= zNbe|^y(ORBHqS=63GlqK)11vI>*AwSS7|GogH+h5lOk1_Qil5j=E=uwY}^#J3Tsv= zykH~U{b6o2mu_4s5iNp%_aKexEI4?Pfl4GS?J?o~`UHIzI>Pib0=tpv8y@|l06Yxp@XQ^}E*iO^5R9qbJ?nfhCd!5N`>KIKI?8?j(!7&&V*p$;dHZCWpej zJh(KI72_n5S{%XG6`31ZI}@Hg1z=yVx1o}MjvyGXzWKX&>(3C?e^&i@X5_ZS< zXGyM@y*lG5kXF-NYW9_E#Dmg5mqzSx^{5khQ0l3h9pq9++p*2{+1Sh)1~e$XPTrY> zsfTE$tfO^t!j@p%*OIZ5xir*+t^Fn%RS3B4Uu@a%>Q5ip=`wv{VzdWrBReT#4L6dV zy2&(4dHtD1O{#I=0bin3@VJ*;kfk^QQ}fuPD;vWLNh-|_Q;h(5npFF0tBK~ZA^{~UKhy(SujndcbM|Y4K!fce!)e;&NwK=Y;PSN z(fi=!J9JlM`WQc5^52p6U2oTDMks#{4>o+q_-mqQGePIkk|UFA%Ah*Y6&1$Pqi}3{ zy{tbni^xO7^x~AY)DW$1_-?oFYrBf;ax^n}shIRZ!4G}Ax}xBn00HXIbAmcz7Y%jp zO-A)UJS_L=IAM@WFBUvfT;cIlo|D`x{0YUj7%#3|5aa~yM4;Ct@_Ey9;F$ziPgHst zs&GcCb~chgC4})uqj@o$>nmW^I%Cz3Um$KT@VjgP!In)cdWhiZCALpZe({We3`m_k; zxMLWAR+536bB8M8fJgQ%}z5ly{H=Nh;4ZV_qLXJ6(>c(X+0QO zJHH&*n!W!u((bmirDkBTT9>D%aP3R*#HvHTPT_&pVtRSVyR|C*K1aKvLcMTK@QoIR z3{bkx&N`)obZ`o{VH<<)wsFCEj`uK8d;$07t&pbF>Vu*Eqg_)6VPX7_c#Vbezv4Bv z|Ahtq@wkIBA$1wD3H?Xhy_9ZZIR9dQv5^l z{c4i0MPB`*@Fx=Br{e8TA|U0_F?6A>HM{*H2Cx&3Z97~4R|Uv%unxr0kXTLVQ0uuN~OJ9 z->wc8zRfj0U6OdM;@LlyT6sR~MT<7Vza^pLJOq|MJFXfPACHYYl5j)xeVM%;H~ThS zzmO!MTG?0CKWZZL$P-94Uo>?Cq)=c|f{o<==y}GOfLXer$+(L-jEo|F!PocvYi|GL zWNK&&W17;b4+fP|ruZE>kl_m~{m*>9XvfB)rqIG9=g=a9XqtsjHa3J-WHzPSfh zB#nE8q=jQW*`0f!y2N*~oq?=o=t`0GZ}$b{B#Ncw9?ZHAIdZJ)x-S=zh8O(2qp+%Y zjQ8E9eBD=sz7DK!5%Agk=t}6rk`yQv|4YOMC|JJcap(ij&)kGWJL$YL1Y9R#N=d^1 z1Ve%VE?ED%HGQ&ty4J7;QDYkg@*7;n zM8509N%jd>wJ>_-Q_pq5E{WN#DGc!Tg$&)GZww(NRU+#$Z0~`~DNLe4y~s zP#TrXUtJMp$;53cTiUAME2hy`6(yz*!eR59%OLK+YXr?Sa?iYv+e^+8F+%6c6#iY3 zN=o&;0+7#9rezQQ<4&Macv0?qiIh%oGqz5q>5rFIA`v1E9Wl@Rvil$$og3;3g~s_8 zHPypJb1G4Tf^M%3%P~1QOu~D!R|#ZuFz5jlt8=a$gz_(E=h;cz{fEm8>UVS=n#%G{kLI6z{g7_W{#!W_SDyW2;%h?Amb>+*dBw| zMLQbuk3Cbdo%D$#OC8TqDaX*5$lTZPa07nO`|rOBNI9&oN?oSVw? zT{aA+ipE~9=i0vvee@)NY)lMdyNRVr-)Zk#?%me69<8yr%#@|0*BunfW425FOQ-%IM<^Nt`&H1GJoF za#H9F+ii?tw(~6Y&#G+HHW}sh&${4k+#<1>pIlaQ_@J<8Q8F?D;jME7r8=AB+L;cE z(K6peIb)TqVZn8-)ZK@g^jR7lP!}Yq-ZOH=B%k2wNZ01SgVZlAGt3#q4;`aNs~}p3 zxcV4>_lGFjAuyTch|Hz;{z-Hs0*v3Vu8vIzCJR^RKq6d&bcosbX_;P$&nN3#dzR5Q=nw44pl>dnFv|<1OMTqUi-m?4@s} ze{K=|XRZ)y(YiOYAa+$be)BOVP)SX7wWA?M zs02M;oEn>@J2@YB*5OnpvoUfG^A)a>-eXQ#%}jM07!&E)&WZ6HtdQU&J$2@_P{wGd z3d&t&x?%7($|TAvY0dN!W%p2kMQb4RFMN>70CkBp{#85tG0SkkZ;0FgrK$LnAusr3 zEh4y8aTZ1g_}wOG_P_9d=P+DQ6h=mQQm<5i#MFsXTA6s#J9zKyU1@+=1cdRs}D1I?w)R)LsCii5x}ZJ)c4^3$o=1tY~$mLZuUwI%Gr*`tMl zziaf6d=xMW^?!qNsEA^;_^f7)=mTe*T{Q)3$!$Y?q|MS{@Kwd6PQeC`^}G*GlqKrU z8CcBi%Pjm@UwhD)vuR~D#AL&SoONpL-w}#etZUfjTCj%!Y&eGj9lu0-wgvZWXMs3_ zmtoJR-fs`#9rYE%q8 zZ|E^ayK=Y zotiw#Xk^L;v~wy1ew3drr)jUt^{fEVz|EPlTKs+Qe2eG}FAS=~P-AWD5Y}*%hUqwW z5z{AF$8)y-vrvtjQ8QkRsNb{z89CpFbO&d~?$Iv{f^X~)Hs2bVZG;#jf7xq4UwUx2 zqBtlwZY1TLe3hA7&hn8m-oj3P@{T5!R-e&qFBK#L5EwBYa_^ioqr#XvVDEpfsGV+J z63sc$J>wqsl-bY{bUwKk;#xb^1B2l`adFV{Uwqx2bFHozswI-uAl53~??2yWlg+q_ zCCAoXV+GzMd>9+=I`1b0H_A;E4lExlz%92g3x6n6xg^_S^VJp}9&%6}wNn2b_CBKl zeyVW-m_01Q)qFPCwG5iZox2-vK%S!4FL@CIGmE!Hwwb>j)S?L}7J-W%PO+a&SL}ebZ28z#vXYm6=G)ow zV|vYOj{tdKW!P4VNRC^Go#-3bRF-hdf~P9@zCUqrh9^aoum4rD(z3AP^&1%`Bqbb@ z%53@d4t4Q`;Y|Q^YhV_I%}P1siT4x);kD*nj`4nJ#bYg(5bKk*+usKoH(MOy)_SR& z2EEfOV-cEbrXh*RdZ#W%-28@flQW0WFA&O<<2IdoCK&8LRBfbzYZ+SyCMPIcV)zt* zWL341B1w9Om!#uWB#2JZ{Cx1RZ4NlsZ$W)fuUJY%z&QM2bTHwsuH35a&fbm9+E-5XH(gk<1te- zg%?mAu6rL7_N+S{GEgqqa|@$-R_D7u?N8{noNm+p$|*nmsGgk39pY@31x7l;@y>l(xL|CmLtd zF&YS%ER}wPbp&6qOh|!A|9*l1v!%rO0;~OVxD~Aa&b*f}{DyS?N!kKsXJJnvQ2q|T zPk^U|2(=Lk55P4UuSF#a*_^FnC}Sk?D=^-It4Uc*CTiepr&_vh!ws7J52pM@cA8 zX~Lqgh3E6r{rT`FTcqTgd=U3|sy!coq{K`@elAN%6uo==w2i=U4Zc6T{psZ%5Pekj z?Uzx|$00dTq@2#E89LoR4$N9H$z!B^X_ahZYqbO~{JO&n4Lfrfq>Tw*C453d2kiuV zvP$->uMr9s#qARtyw3YAfqzO)-ng{Hp_O}ZOuo8&P=ubiWl6S6G7$k16_*2;V?ku$ zI8~#`y9t5`RLgm`A$sX=ixJRQQ@nFdp>=XvmXf^wrGiqSHo!~UWVkkU$9F}= zo79)#&(+_(f7jnurqxbZaIGRbIxXvj3%p&vR^V1?id{C7Xzjb~BUQc{v zD^H>M3^5OV1R*R}q>N4`z%gnFrsOmaBXzCPsMlwPeBY!>2CH98U9B)-m=1GV&5sKN zRgvIP?WIT%Rr&_SN-anNc)xkMxhrlK_?GKgg^aEg0LG2aUwp2(Zv zN&ht_uZz~h!XXy7tRv6@5jk>T4#5p9XDxgqGM$fE<<|p*KEfPWHZ21=j$wF}#$ZHx z9+~xN7J==QMm*kKWeWg4Z$51sF-rsS4Ks4HZ!{OC*T6k?uRY{q0J#;8iffpA^o!8b zbw$CiyPEQ)bn4^5-NFTr*+<(7Vp+jcjRrwXE#UsQn`D` z08*%?GX`e<({JDjX)n-CapU09^=QGN_37N;Vp=|hn15@p2<9Z&9b*M6uLw4{*ks3? z5ruzr0N|Y7BXI%a8Ey5k@x4tjxZ5;a(oHLxw2R$YCAiM=OI!&qCrkW_o=12W#H*Z( zT3b{MW%q)AjbDBXsa2@y2Y()d{g#dFZJQMhHdFUvRvpbBC7+u-D#*7%X!+m~u@xrtrmGS}KuzGyr6^ zr+WnI7t#P?@8c#o-!BuoFAr z3-+D?4TNn1&4a$A9;FXuejRMUP&I;=HP#?s+95Lj&);#@iu8gX%I8y;3W`MXRYJf0 z*MtC(Zz2BNndYrGMYu}nsi93SSeOhmBn5RHwP`IX9!UVJ zK}429Fku#%ImXg)7EigJG*R^NRK0~o^a&iW`*!tM2dl&H2O0u-hy9Gm&LUUGz zO&#`|3?Vvk0;d9^X`+Dpp#dh?NoU@g_ALO7-rov)f4H~RVu{MGq4Hxu74ODQO7S_w9L$)L;m3nwVIb5)PVjzNYD-d zlo+)IW;@(=o%@x(E7jomXxyRt@lyQVu|!cAq(V{{9AYbVTwn0lqmgchAB)~+G~AB!_f z8I4=Q+wxJh`QuGp5v4)fPFx<|F8~uDX6@E~LWM=tLFu_B$6ctHi^g@G9c*fw(?eGIkJk-u> z85!fSRCb!A@q8*#*%Ti27qvn`MG#qHL3N1`$injlq4<1++I<42OpGdb6Rbk-8m-F zORoX9^w}yl7z?%&n%5yvxwHaY9VD%v+JTqG!ho&Sf;3Bezjbh`!M(c%&Ap_+^+vcs zLyo9$%zPSu<5*b4*QDNqmsL|4_-Owarz9h0IlwhN4yz@7@c))?YQ(m;ZMCXv`rtKa0g2XCAb5|2O_aJ-;zC7}p-Kw3o0l2!A( zJUl2+P#as_T--Uu0-iE;nj!1SjXql%k?Xgv1x=d9!HMnwF`fF&Lan)w?)qOqEc?QGZKB}ib4QziF?XBTHq}UpF;T~ z3m>keT&-u`(ZML98=7@9Hc1zOYwOic1=0()h+DpAcq?_+A`Es4)kf-kRZW-?lQP_R zO35W#sUBKML`EpCGQW_{*yigBz2iN%@rNc7Y@3 zNDhQcwY(n#l?FfrIsUL@bpaCGEX8RRl!Z&me~4;tONU-4+VTzwb2Xi9n-gm;|#XaWa>Y5bX}R%@V-s76Q^BIY#zS%W-o@m>qT9C zqgb(+Skivgru&dT;I`3x{_ZM(g_d{nT2|)hXNSO5K5}e-9sWJPKDVNX`Rp58vHYu5 zRY~7b7Y-=oU_iC)&X`R-f$>D=Zjf~>#G7}=g`>xGS(cL!H~5WcZhY{IqY_dIu}!XO z?ls~)yi*(0yj6Cm`vy?=P6) zR*hQzXF+b!MahiZBgW%52E|BLQ%kvcp1w=OCHg%%IW&E`fOT=!k`WGV@>HV1d zAAR@n<&QL|pKX655J$vsCyfY7voENg99NXO>$Az_+K$uKwptjwLPBl0uaT+kJ@Q)~ zKidI;Q?fNK)@TPCKGL1W$&=V4hQJNi@SxFwCAP+t1L@8pI#RY+HorKBlCn6qf0qaD zx0)_JqaAy`?8^h*mfzecNs;^WZ2e%1mSSo-_ibBY>NQd7bDXv=&gy_=t+v~fjp!K0 zW%7_pCG$-bvb&P2pL&t@u3InYD$O^fW2*rKb7Wz!XO%Yn1y!9^-q1^QjAp%~uH5BV z_a=kkS+uOI;p#8dea)X_%Xs4m;)JCvk(1JrDf<{2;6K99PaKN0&&j;ovHL1~g<5bE zH}G~dBex4lTCP6umDq{U(s^5i+EDF0LJmvI2)2XY#z?xrX{>+0YfXz&-|!e>7j6LN zC>&=tmCZT#+1s(CRoLjtq}m~Aj!hSa%;@_Es-I}-e~PjaPt{4%oJr{E)xjdT@_M#z zx`Y3?A;?T+-JYWi)Vd|&j?jzGE3ec58V27xcu1BZ*`d#QTZy`%MB%*I@a$q~lWlTE z=OP$BxV0Rql~>!)@0f8}QV%;JcpwKjLR-N_?=$PuG+!av ztGL^#4=Jj6rFSpce6lk-dLvhXN#y!`dN#sN#=RB=2eWxDS4`TIi{3ZFcomH3-$U#S z_GdZh`1a`UfEvlj6xu&SD^7m^JJ-GggR@jCcR+2y$PXY5vhJN2CC zN%^BKd0aJ+sBt%&h9m<&>0ZWbw}BJpsxM0XG6=Ukd!hWqY`NpwET%^Qg&3Zg^Bp= zG4Om1WT#&tR;<`w7dFC9B~i;P#!(Na39MQ_=(c%wvJZL+5{(`9FROPQYsfvvQzRBuqL;>Kp;yZ3JBi-63@XX3Vo|fA*mKEO-xM zz2p39H{=of2azN32tE-g*i~d3dHimz<~S0fVSHKDs`98-G{@+aMwF@&>c~yUmxhl6 z(L)jKo3y>b455ddd4k&OWZkbHtI%o}2Ia5)J75oRiC55=hi>m;ZWNnge?s8G4#k83 zR}cYU=%m_B*W!>na)ILFa94&Fn;BA;v$4*0S-rQlr5%Zzvqw^%TXrZRW$2LscOz*f z6-$nikordyz9=!qKA2co+rRE>M*K2e6&?!P^t-mprpN&%FWQSfb8IEt2VuvTEjoW2 zeDaiCMnCh%8e7M_s{cIKJC`*Z8gaFx`$hsn%2S}_wt_UErz~Q5RRe;R+56B9YGZp~ z;=Z$pfM`OUr(&y}jQq(X-=GtO?2QXDs8&t*%EgOy*a^4+;S78B7w>z`_uR`ZE6T%f zQBDMB?ng8oDB8svjhrnhhs_#5Xv&z@qqRpOX9sU9`|+rv;+ixIQ@`w-q!`ioTKfTZ z9NxC~%c?lqUbxX6*>(PKJ;$adBw;b_vNdlqJmW#i$^8KhBN(k*#M%sf*s8{I#K$W9 zzv`^yhn{e7iQkSU%pyhyLOmpHck75GGR^Cks|zlFP7poeMj*AA8bFPhl91wCb4uV* z*?Uw>X@|R5Zp~E}j_M%f!e&=>;{O4BXq@F{k-5M;u8s}RdJ#oh>XOi zhu2dqc{Gm?79{#_EKqj`SpsQ+lseDD73Qm{Z2j!!NgKYP+_($Ey`{5YV?VeMYmj3C zv$OKv3%A5|?r#y4G4x}o(?p{^0X!tUufn}AEnH38okXQN@F& zIPTw_N7)=bU$7V|P_sPk0-qgk4fA8)mDe5v4@BF(JKOR+4`brYTbKE&>|jui*8n0) zAlumB&~Vzk$2Wc7jSthOYF*DQaTPslg7<$lg}OPcAdjcVDcP>}*@CAZ@?Wc&?s?3r zZ;m-7a+0=En1aBERI{9gQLaN|!E#T%u5d$4oT0s1!6mE7A1{L@bM{w_#B{iI=LX$> z4O~myxv#?%atrzK-ygG06!I4V?*e#=kiUp7w!e7Z$}SYy|4crO?%CtJ<)G(T^E{h* z!xdyqKWgBhLm2x%#_$dgcl$nl?mpSv?pr=RxoH*evfnU#k$p=4atEbt7VkQ`B>fye zvUp>?iYqm)-E!V*Ef`NWzi+HIHu9;8mS%1cl~1ENtr560R~NYVer!Df_!Eq|C)fly zW&RaN;bO(#YZp;L0_L3Iqk+G;Y2eO6pHGm}unNa0TYgk&%qTljxLR)mv1IbeZJxWg z&0@9hG9Mdb@SyUzPsWqjztfVNyAJ_e5C~lH^Ey$+6QOgVaM@O}jUl@mKM7p(7S{&? zImUO_WF$mf5I;%4)I^H`qB*z|w7~ge_^!x3RFHYqHbvy1ePegc&waTWSj+V6pQ~WBc0<5pRh`d=Yp@ zG&pYiqg?eD_}DZ(%ozO2;uugGZ{IU7X#a0lL+r7gIrx#4IRWHAfSOH2X3e47_UB`N zh&^PJV7Nn>=E2Ver6`Rqxzh}m{Ag*c*!nQ%^vt+xk*Jf5##%G;J_OwZTinfX3M~`S zn@AjRsxTtbpA(Y62dX=G4`O5b8u)1acf^ogKexppW6Be$#?LhZtmE6!ZfnmloT_CO zn5?l_>R3Is3;Mf=0DNPIaJS?1oWW?AwS-|znP{%-8r4Wog&h*N8qI%fS}f+L>9EY$ z{uUiK#B4dN0VY+Rskx8>m?twc4k}KBVG59JgXSgs796U7B@45J*uyLqdw z$kH5|Q*RQdDa=(BV&oDIi$&Tsy=8<$Iv%OmQF&1B-?=MN114$|4&^3VBU5f>!c-E$ zJ@}E%OJn1PCGLWN30~Tgs4&BRHIXGWb*D^FFhfM!)nJ=#NSeAjU`z(^;2Xg_iC)29 z15Wa-KP5{CGQW{>^LRJHSQdnd*hk5YxbwK4eguPzJkhLziBefD^Na!zFY;C(0xE*? z6cz+|+c&RRz->Y4)?ugdAI7-l*K;!CgtOqiH1#-2b5oj$OWecwAZ?Rv@scn3g7m(> zss*5w&N?b=*pdirS01yfvfDmi!`*z|XlZjd-(~8`T3I!lt}6=2G;dxxm?VC;*9hdb zA&>aR137`@UA8umy|7a4->9@Emp>j~xZi>_d7If103Fkjo-)6cGk8(_xWlOy-iw=X z-LQ3$edmX8r1Nh^3H?Jdw#Tpl*)(G<-qTBn`udsLCe2Co%BX(n5}mcCBWBy`>ja#l zzopvC7UsgI1)8p)EAY5oyu=P+V4(&&113YK9sAl@ct@VQWnwy&zIH!V<}2It+U&`5 z7i@2$0Y)p@^lJ?)ObJM$LkA}Nq~<+QLZ>^J9`T;r;$G)d1$v5%=o7&iz-aL5wX$w0 z&clbas&uwu$q+c<|KGzg)yCHBit&Ak6Z&$2tr*qiBc7WQ|I$74`0&|aWS?^S=M^cd zb(tI2N9cXnLv>0Z&`RNAJF0L6u8B85GUxr=nx4asGuqphk%B8|C8qDKww8ZaRHWWB}SCWt=(nb8_bKGY?BFf!Mwiwj4`aTSi{k2euYa?4SRE zHoaJhayY&AUZKr^lZ)T=XBv0xZXhg|Hn(Vg6vKC>cDu!vn;JRMGWfe93(gqFZ6afO z>JJ*!aGs@7~yufM_-Ts`X?!8^NI$x5BV4qjh zm7jHS7p61}K+yZ!?tNq4N<;;)9&UV{W6|ZvcRJ9m(3mE<`76_@zI~Z$8`@rpBxWYf zPzmqgKh-K!=&40#diLh-h+m%cm=oouT7B7v!QZBlLdBx&buU zI!#qXCPK1CT95eI8t=rb1n$~3H$ricgs_cSnL|1~blpbl*y)PujVj~e&<_f0>QZ0K z*xCo#24@NuuxNCd=Vt>#x=LYD*dL9>EJdPjE1ouN)4-f+^!OC^`KmP1hVI2Izb@Tv~nw;Dk>Ce;0uwK;+ z$7oDBH}tpgs+-EptHE z*B61tO;)0q^F3i)Wy@RkQcjMP^`Gt0oe^C9sA?JSBqn}~uRmS3h;Zh`?nfV_i*tx- zns`Mhq>th{RTBe9ZDTW|DJ;}UgRiVwa4nr*KG2}=J;;UyyR;#8?ie~Yxw=6)comNL zN1vBX<=C9Zy7GHee=bb^WU|{|W#`|{$LaCoz{48FWZ@9m@tkGt&f=lVN*JX?G;BtB z&GI$MH&1l=y>oF_ST_W)&tP8jgTEZN{VKm!2!2UtNBj#oR{m_mBhxZZn4%}0ug5xF zApW3Hdyq$%+ePO>eHus>T@bVHJ}=R=vhrWs24>%zAp)Hm*#qIOB-3Ej#>OU=!Q?~vO6D6Wg= zB}gp6h7ieupEo$s=q(vWzvRWWd?u_5aH^N-G9 zo)L*C^M3F~zdiN$=cqcQ33v=#5OsI$UiTXW!px@b42Qym`R)DiL-S(?B!?|ghb)~j z`g-$pVDZCQ`i823SAqs3_xv~{^mMWWi#&aOwrkn)pWAPj@At<{OD;=+JRFO!qCbU3 zYv~}xu5M`78y=WBXaDhLunw8gC$N+hgwt#x9-$HC;bmRwV8P~6o%H_ ze(y3sUz7B_$MctszTZJ3Amz(MTb6$er2d&m^1c5ONJ}$IkJo*XpgkB@EO=;DVxO-1 z_N;lC!QR&>%pS3y0H>19^3omVuDZ#4ntp)i^NPYV&oIx>BgDtsbLx6tgYwDUJKOiR z=P9`OpmJWTt3vYd^HLb&%!!jFD)q?z3)fyBUSO*(0bVToB8OH1z{u4kbl*2PS`0UY zq2zDg;xK09@z;n=om5IQvGG+}!%BY(-P&YyA9~!ROSgPH!!BVtQ_{mj7_w7;M3MLO zq5!8&vg)k#Ao%E%@aWZ7AR_ThQT;)VwYf7uR-eY}b0nWRtt-ndbC-J;dDnzLqoF4i zn}0JxPaB2~<=x||K@_E_WsHnr1yEhv=KYQ2KpNO~?|lGW5+ zv&j$m1FLF4?3lWqVO)$4zB<)1@g^D1>Z=C`_j$qp`M9$iga>CeNVb?>!#WTr>A|HP zk725rL9+$tBH1%ux4fmOND@5ZYdvW;o=$@#CVT`sgi=%pKroWj|4C&u+EH-BY>M_r6JP`7qe6I`>(EEBqg6d%@mqh=!l#%2!{0OHLx%

srr~uaY$Q(7=b{R!1nEgP)q>c zDT8E~`v{2zpe_`$lS|eQ{Y$~*GzuQqnMmxCB*;7;!N|lIPaUE;&CHDKoZ#;hi2p== z_dxE|o9#Fx$$NK&X|Q@Yna`i`*yA?6;F^KC3R2$Ri9fkUrj?=@rxhu=wM%Nf)v!iH zf>TXY$9|n5<(D2T&vy&QRBK&QgT*IR6_^I|Hah(*y=TI=t^yV0Nm zJ$8gyWykM-ATgR&I~$OGWSu6*anhcIVLM>sHOwhiE98$FKLzG^kq(N+Vqc5JK!+&o zDycol(FmrM$npzq!n%OH@S`K5?gh|Vsp`w2q}zBYE(cLR!8*;hGmy(PSRMORDI;- zMRrA+jFTg5f0ksvd)olhhxX~?rR#U7p8jOMqo+@NJYA-)3vC{e^La48v1P2~drNl6 zU*KT(Dn)e!aMo;z?3&2g( z5qOR^rz(_JC%yWAKbMH4LI5guPrR`MyHle``%V!1EIc?HT93d^W566Wc23e+b|1K%rj2<>5g70(D%Sp0X{Qq#GV$`@ zfHJCZ9T#0DN9%zN6vBujYmXrHJV24k%mX48_XW-;ccMjZ}BE`-zj$;!y0dv6|o>3w-*rP*F(Mo*dyH4k@p z5_R~pp`n&OJ*6be45WL?bOyoWzY(odtii{6vZpk65>(m?dSmoCh_f#|>;X1>5@@wu?mlhV4e& z!8-oa4#xSv+f}CWw6w@S#ckCmni?`|4U+#Bx24|+{^Ygk4}(ma{Q53#YrA~e&$}G= zl6@t6NqdMBPiLu1VfMF*oAA9qb43tBS+fXbkXr9zT5Y8yBm*DqVyvOIycLG|`6Ca@ zIB0+=@A)I4-PykCRXU6i5Sq6aChYf<%(@Kf&gCk5^CcYU<340vE01Z>&@5m<0_jUY-i58E?P~STseATo07iOv`h21ViG~x2hdKL+f

u3dr`__&xHK@mu z7c{$j0whF9q0&$X#+nlCW3LnaoYAc^tn{uKNdRM>L>Hk-(8gE>;JZ-Arc8RM;4K_V zW+W)^Blz&+a_?vjfz@WSADp;SW`s1e>&KomBEm8e@Ylm)2Y$ z&@o`TQy|61DrLkXP$J1IWu2P$-+6kCDFlGWPJ77yo$EKI`fmn43)6o+U685&6#xMk zS^jrqmpYR6xEvV&jLaEmb^}uoi4h<2kuxC>72FkNB6l%40C*3J^#04=!z8XkT&+{< zAUPwDZlsJ@^WX`iD{7H08rQc9?ESdWU<=ax@9q2P&9FaLW56Us} zTO#7>?uhc8qWiYJ3yguCj19>6ecZ?-j30bAp|NYQDi4XL!3Yg!_1S6JWbDiWYX$g% z@A*0XtqPJ4S`FnP?Mn>uHT5U*^K_{Z3ludn6xrhl69z;MgF+@f=ryWDO~R{gMNLl9 zmUI@6xfhw_{QyqN@`%-$!Bf2t1jU9^0p1P#ofu(?T?pEXLsVg5@pH1b`iE7(qFxt5fOsp<750McIrEU3;%#7Bb@K|+3wkq z)cd*nTe99ZJP1OUwg7!88e8q7iF}LVJ>QW^kGEI)A5bv^gJ>Y&NcJPE4$+BQ;xrt? zl+^xAWg^OHJ|Lz)Tr7OPg4~x_t763W@sfBQdwhVpW&7q3gq8iG&a0?u-gCE|HtJA4ONV4*CepW%G$AVqZMnUV${g+v- zl-2;SD-xUjy38wXQz~%N;Xc*P7~W4vn^G2ua~3U;j~|!Q0v(=DW)0V4qAx$aXS2lC zcCgpr+|}WqhQ#^8D0i#<^izg+ZatrIu8jfMuulPj1mS3XU`M=LPy2JY#fFuf)l)vE zcpr@KoFT?aF{8y|)S|5=mYLYq1l@(z>6-`87R=$1xSu~m@u4x7cDapRi~+!M@~EikCyV~wT+*js2NyJ`7O<)o@(aC=dc1$ z4*GLgUHB?uU8#SN+^w{GcN<3t)?CmQcsBivo+LI~SZnaZ>&28U2P27_uoNZYMGnBS zn0+@b4xWJHl|yQ;H7^o5i&v#85B;^w5lbf-SP>pprEkvPo_rzf6z9BABU$?KK17oq zVqVB$8fe)1WCLTXZTVe$%=Y7FdzBDiKjITT($YJ%dAQZEf2!_pwEjenie4$OGs)S^ z)wocQW-(Dn3S~ugvvD6$rD>M5@urQ2em#=}jx3TCH1ueG@9Ij!u>KvE!l0>Ehq0pg zU{JF9^>)Ym+fxz-I%*KBs`%kvU2OlLup*QpE!rVYLyaQs7)6T)7jc|0ifT2$a3cbM z2$lIXtp0VWe8yy5P!v)4!#p@XxA@&&;A21KdW+=PC$B|2;LZ|}RCa)6D6T%UbhTxz zJNolEboFXv6@SE>b5-OnN-XDEvMX*Q7ug~$%UO1d!@Vj8YXE7=g1ChwFv?0T!kUgr z%_%dpRT~xyN}1re_g32qH9rgxGaV5_W4DH5rc10Q*&FF#MfkXUQ9qHU-L8m)ZrMvn z)-30~qEu7aiPt30+zc~VJKDY;hj3xabrZ0z5{LAO?Yr6|kClNoCgt(b7;^=>l^LhE z(P%i7#o0EkDuu83_bKwhS^*IyC7=WDp&yEzR~`F?^E7hSGQz}uLsSGn7Cz`t^!Vyi zRuf!+rkQKeTEv*js+_k`M~)oFs~KMFlVSz_QUs@OlcJVvvA}K|W~8PCDvNfVV!*(+ zZQ7FFaL;t>Wii-J)=w*BtkvG&cAq#We<`{$ht!dZrFqBzI47$AG+Qz8`Thh}ldb%ajm&z?*g)L&^{XX}54G7g*l#Y}o$39<&$<&$ zKK#Vp43A~$YgKJYpnga3QZ*Z6z))uKI?^9+*%n&6syN<0e=I-R&|=){hz>!X;ph({ z%bcWdMuLXeZO7M~h_~}$U%nA^|Nh8BUQHDxg7B7D*3T-PN;4ko^3{P#6g=!0+^aX8 zG>C}}(=j6D8$bTc_HdutqAwp-3ixQd+^M7xC6CuWDfob!Usl3z{qFC(?StFBc3rpq z>XL+@pR(ArpVzQO2(AY|+WEbizLemhlkqi5?)9e8ax|P;C*f|h=nh21LsBNKnCK%= zKY2v{s^zAfR&sCgp!MRb-#;Ai0R!dXl8vD$n8AU|UC(v3LrQLZM@vpEM{Px(3A?ZQ zbG#nkYrx4f-K?dcu%6y%l9m`W%-EwlZ0*)9^0uX=Y^5dpq{QP|l({+G))owTUm8z} zQ6qMG`{5FfN_p+iC{5pT`F+WG^7x>EoLaVLRuxz0!gIf@VM-mw{EWO3iu0HvTe4|Q zB7Q~?)+-L-s(bTj+oaEVys7Ie`n*=#wOTca&xj_R%UVU3z2XkoT7bQsA3jdRjq-)w zbx2h~|FB$ne1k`+?(NdJWZ3j2KX%P;=d@K*T1lf7^VMjX=oHjm&K)JNIHDWaf19eZ z=4MYZ2kA}S(1aZB@n;t%VhYnQeD6_FkpFUDW*#fx-GfG%Wz~&L{AxqVOuf}Tn}Zr| zTE~mAEsLtj+G*BqnKDMRKAX#p=KW@@;$t~~ve{gvb`8_-T4zdN-g*9}&Q<4Ef+i0d z36nCH<1Nr35BdTZT}I^IJ@_Qb!`FbE@NFS1GZB@!)na8t!hYj}NTa66*5_mr-^Xqf z`t|XpZHhzk{mq|w_>YtdX%^f`xf+)ef0H>D@BSx~;qVM8jui=M`oV%Hi|; z_YLANkm>|RZq;hkOwIiYcx_ijo01XW!_ad4cm+%Nw6lPfS)9RytEzGb3Uel)w$1d%@A1@r+$rc-tfRBx9 z+ex74SEbl85sJT6J$0^tL+2!Yc-$FlV#N(5#fG<*620q#{(lhL=fDYp#nnsqylEB7 z-8(R1=N~Nli>C(%lR5@G9(dNFs^M6e5T^Pokm%~y_0M`k6ULJS@yr?#&qrM5&sv5i zk;JcLK*;U54OwP1ltOTV!*30Lcan(l*ItDHzLgBVrVI+aKpjHYBN+7G&xnuXv1rME zp0QzS7{yUc8<1j-`b?~_d@8W^lr+^fX%vkN(FA`(wOt{a=(2|O<_@2tS4{TR7#5o#)(5K(FnyRO}r>A1@L(*p{(&`d1R!Q`=NkyNA zD_ggRB~B+No3lKd;>MpW551qBJNChzVw6kuDcQT}Ciu8g7>(@xCBy9uVeL***6xst z_9sq%@bmWB{$sxLbB%fUoVUbq^0uq8@ST3fmzT^&;wonu@n$W5FkVuoFNw^=wkrKQ zK8WmWZ>LPb!$rQu;dYo+suvc`;jg@hpFwOY33AJetdGqm`!_T-)^3j7AuQ0iqvmiW zZ6b;BtA3N6C~}_c z$E2MqCyKAo|Lodw!u2aNk3)SI1_#Ueno-2zwj;#kK1aMGgcxqc8o<*PLedX2A6^Nf zci0}vm7!7M66SsR@a1OB>krtAC!;21Ki_}m>Z9&9-p9+?j#rg=!RaQGAQWx+fT4go zrp&_hmHIZJ@x0_L#y)<|mW4L))$Q}7CcnZ}G0JPbSFV#X>WJ!ltqS*#C5}kU zJxgZwd$MVrFzSn+AEE#HQByHiD#I&+^hj(f8%<9J>6${kDv>7g*{Af=rVj^cxOo>A zLVsB3ItV9y(>J0e4v+L$8m?xTQlqoFL9Uo9luIm>8~RNxR(+>JcJ+}~dJ1%j;F7V=`^CAv<}N;LO4+I%qZ7G5A;xcVWMWLUpOp=W^V z9*+~3H^WCn(L3w=N{`CXW^aNbl(2>PSIv4;MVBs2u~NL2;uI7mg}Q63a|j8}xzq*E z;n!AsPH!#Q@^3Df4GoGCiFa#}>g0^Xvr1f6ds{&$h;n4fjo_0INmC=Wly9 zjUA(Wp1&=Q>W#v;kz}m61j?$n1|7NP?{5FrD}dh|P zlvv4H@w{{9b?QA1M`{zci;q$@I+CF^T?P!-@ZHiY%_GARVJk&k0*&NHL$!Y8-+sD@ zmf=vkFvD|#-X|4b8O?6^K>78caqt}9J)T%B}R&hg#VT)CBS2oEX@#?D}Z9Vhy!So+WYbwq-7sMo=oGrwqQicy%-^SF#XK_vRc~Y+40KaD!yRZZMy5HR3tVW6ETHDp zXBxlzM*k{4gf+>nyCDDbN}GXR$K4dEg00SJQjIMx3OOHHJnxu?sUUc+FvM(QxXs1v z)V*r9ROb8PW3{TSz(Do8{2{_qsf*cPI**yTdjyR$2`^NAh|f1_z5P?$W?ioAV0A8ecSw*BJR#Wrg`JYA7kT!`^+S*6%Hca?elCMav5oBSyVrqfw7>ma4Rs zIO#~Y%eeS?Qj0i(k339-bNXXit?pF_yBCOd2|aofp;s*U@uCjqhRd?a(=2w9c`Yih zncMA6%4lh`lKqs|#h-n7xzvzlr$3c2o^V~8pXnMbx=N+>Lo4$E`_n*t(Stj2ZJMUf z;|!i{nyrM>)pI^dc)ej{9igokMdw?t_~}k9LBVZ$;&-?56_ncpUn>!|jhy_{djECY z%gSFJJl>xk%BA8BU8`Pn`j?-TRhHL|o%>OPFh8z)}qOT(+*@cW~GC|5Q7mo-6#w85i+8<0t8&Fh#Li z+@StjcDdCAwi&dzm_7}!t6vkl_;X=O$mv?}dTOP&?nut1DfRFD3llz0DD#>} z%Nry$hzZXn>oUA-cv}08j8S)RT$A&KW{O8u<*uf8TCS(mhkE@ATcU_ZK36lB!d-<;JOlLOsL;p}YEeu=vxG+`3*=&R)jl72ky ztfqnSmK@y|mn8CR>+2mXw$AsDH0yJg3$9lOSl`j*oGG3)sgKhP47HN>82K7BilB*m zkXjUIVduFaG>Xq}W1yjs6L}vysZDuDljtI|>ZgmUITR${UtaQF>!6V(^e6Oi85q4r z{W{9@0r4}Lg6N~iKP8uGQ+EVkb+b6WV2XHKd_2;HrqG#oMuSU)H*A=0tW=)j&D}vI zif+^JgfFMow%!os&D~Imr^+v!6ilSyok>k^dcsCqgNwXAi1#Hay>3%&U^q7$sKr;t z_ecM-iBOah-Qv*;h}XSTiX^(};{53Q5sIe+a`Rv=e@2eEr%g9NL7lMb^-AG*V%?mb zhS&o3Cvjld$7e&|ljnU%bRj!mz+8W)3mfOJ{#9{;F|XLm!!EH$;$Fxz zy=H-edv_~Jg1zyHOg$ZGJN0h{jb1I?Furb78~SLNMXZriOy^o|VCv&`bb6cJ#Cv6J z4uMPKUoUih(q2EQaf&x9((!?Dlbpe|ddH=Uj-mInGt-Ysjrf&3%JUeK>qv_a5{+pQ z5}EH~RWCOtHmh5ox!Ugb$M1pD=SyM1Rko_kR0R$_R|>W49PlbLsn=AD@* z&Nvx{Y~+i)N9M&FWE(*z0^nXE-ZIHdxa`_g{F@HMAFaZ)quP3mCx^(sQ?wfejm|cX ze!Y3su9jFtG_CXoa~FXQbH@fIM7HYjySHx<7*$1Kgm1<>>IlQYsg4mAm)W@@yX}kD z3QH35!OisR;+gp4W_f)oBV4DmooV8iCf_oOM^YtqFbc6$21wa{V3ylT&3)EwJAy zx>415bUhpGPm()KDn`io|Ho?)d=(qQ9z}PXW3kNw=qi%eyJz4f?qXo&0o%?l+t*OHnRb=KjquZc9+3noE(DqUG8@v=CcfV*T}X$=X%^ zdpj4B^R~)lZbY2BJetImW=%2g_m!u2dyvYA_H6a*7xy1f+4%TG-}(4cm#auG=Falh zS_dL}8+y|#+N*cl!es((M9<@z4e?a(=-)r8>?CI`PEx^F%Xm*}-7fCJ9?NT?7z`ks z@=B?|Kll&* zy;S#1Ur&Zb&BP<@FQ*Xlc_9T0TMO3w$CE^Br7O1SP1ba6YCZAY4I9@LUm&xxkm~dc z<;mbCgPV=`M=DM)XIr`Xc)#giiE4G8wCU`Yukc$9e;FgORmf|4W;e9eua&_ z@Ug_ytF6L!N#E|tHt}b6Y_J7n&iqMD*a~6XzI`uprxFRl)y-^9Ze6+5K@&Lx<;c^2Tz}t7cPxF;T9%7zI`*z;C`>2Rk5C}d9%s&jJ%Cx6TDS!peI?)vQeI3+1vhGqTd|( z&lxwes*|Jf5m(Gj?1Dr~yYt@`b6=5bc8Vmu<2sqwa-y~KZb7TsrI2UC)bH%h`j)TYGCJ#V+M(usa}{I!44 zhxJc16Uu(}ax^iGuDi9u=59u&DBNdMfDy&O%IBmaRJW_V_gsZJ$81y)A z_N;{O$u!bO&3Ww=@;fu#Qs&#EWgc%V+{aScWa~5hMP4wGg~rW1SERBZ)N7XfwkY`K zRxC_6Wx^11>F3!aEmOU!G@ve`KSkt7LsBDB5&AfJ$1yRz;MZ372Wk}IIqCTwjBx7X z(?pA5h0Q7i%N8VO2GT6=na?D?p$y^$wPe8Qm^BbUFANav5%+)-!Br{}x#(933G z1S;CuJt@JOxz{>o5TnCTQx5=^(VM;C)>Kl$hPo3{`bZLETy# z<9DBRU3UM>GNpiene)RS-*;!NCuL#Cl;zpl=|5g;6!c=cX&=ZbN7+;@NZ);Lit>)6 z)>*YgtQ{_Di)o9ljS*?`_-Pdfn74xe?{F9$X$ZzOg1;3cfhp{Nv_t(^GZ6o_}A^4X$@LcRq@ZhO_Y{(Z}PBGA&rdxkG$+2xe4eI-fe3IOc||UZZ^|b}p~! zq0a_cr@^_AGh9(0I_vM?2k=`(d8*;U$4ZDLzm)!jKWtAga#xsohE=W3wSBg8XbMYj zb(PVv`dx^|ir%qDNmmqQJ$=(>`aov`ACtJ*bM=e}S~}l}NB2Z5@tD$yFVR|-_`=uS zE$mdj&%{bA`hxBTQ^|57IL(anu;lE)3!yfYLY5tL!U?*Wf@!s`l5AA2in6J3ZlVS? zowuM+{oSGw@X3F|R zy|2w|)t?FV`o;4dr834BOuZ<%j_+Dki#tB%Ag=Z4#5snF(ql=*x;a6U=W@J!F4a<# zQj?FvY3C=H1mZ1=J;z?MHBqT3=tus zm*LXtp*EG*K5YJ;XSksA(ukhx2fQ%tee86 zFcq_HXPPAH(Vb}*LGhS~;e|#ihae0@lO-}HTtn!`L!`A{{wJbeqgIDA!P?VG7WEG*0jCgNZ zy(^;6(#_>syr`n!07=b$^&@@yMDw`hbXa;xIDNoQWc0Bwsh%N|ar_l+JeS0VWOE2y zjx(oOo#VXav*qt{`a@gue2NSeA=zg zW2;4~#M)lWuo{g~3K!gy?$x?9{@DDEHWW=$*8JMek&kluh*kRYaHA|Jk(HA&KLfAR zV7HXgj)QaZeM>U14R>Ss3dAfsu8wv@O?4(svt3s2w9(tv-zc)Bkg&(QEjy{^S^xYb z++uUQU65Th;Pki4?|!~%EXnMsyzQSuQKXmA_h!T1$!mm8CgW~}xcZ$PEfpsT?@Q>A zrH?0U@9SS0Y&Aey@iMuzn4GMowMCiZS6ClZ^L`{GGb(5gr-l^7mERE2C%InA5*B30 zu)r1`p{97MxneA?a_>`IFdbFNQzl*VM6$R%f)c6_j!KSE4IKtIw)NF}`Y!}8F8Ib} z&OLJuzLqX(@Iap?(?0TN|0g}EBeHrWGq#WaG*_fVc)$6$)9w&B9pILZ?S47m8HS%L zNpVfdK$R1k;VbM<7G{+@r+xMA^!ZK|p&=bQ+~ogr&24z+e2L~6ChEF+3db?Y?f1}1 zYD}=6(EF-uC(>Mn?m5I`Zl`_J*0Gd1QSGuYWAk1p+&21dS~kvNsL{U*e4f=ruOW&w ztGbmzMm%2I6`3E@Biy7&W{~C5HsFFEuDq|#@5?#*FW+x80Dl>9+Sz@;VmnHgXq3&Uv~n zwt+g9kxp;)=m(@$$TiY*4yzbQkCNEXGAsPD>3!8t3=0pNJPi|CF1N>=<(T!Z#J~B* z*u9w6_iXl_$e>rDewu_`_X=V1rPUY5oRx_m^-0X4Z+g*OFE8$C?7?m&Rxo)dxA(Di z_v$wt_vgqfzktd4a&kC=FF}9@Q^2Og^?tPFS6@%?9SznSw-QW)C|QP%ev#?;E^V%N z#=c|vG1=*R-V}v~EPR#jt}d(zD0xc!UH&JMQ^AGWr%fLm%Y9?s7je=^cX;L0+h5*} zKXUY6at2LMUdWzo_f2n0I{o-+o@9u7j{Yynh?;^gW}erbBV`JV#z-X$lplErnA|Fo zv7WqpUvBHBfAG`q3gIC+h|fY5cPM(_yr4wzGp}NuHY%q2s;pHija38t2|XJLe-GdV z-y6{7Y)&MGd&h`Ocz$=e%)iDn2yWcuy56R@)U?bJD8dq_)t%wj!dft}VH)U=DH#14 zYt`{|xnRX2ce$9aUKDlRdv+?)y+oUo`^>Y8h)_0mBE)2QoTtgWZ0q}1M>RqigjAe= zsmBOA$l5it*knI-)o~ELhMSeN(kncISJhVfT3BG&ksX#^r#uuM;`Ceg^SMbOcQ+Grl*2xc%}7>!3_DOQ)zV zVmsuUdf|mWisP)LmZzRR(7J^&FRQd7?sKSk+IBMgj9{|V{TLh8Rh@xpVng!p%yW%S zT@Jc+nz;K@dQb@$jLzuQkhZQTdT?aE$~m&~^%cG}QG_JgOe8E-|LZVhX* zPVDhd4AX|)9Jh~OVNld!uw3gXIPd#0N5e7J%jRR^djj!%6{9n-ZlJ?&tfaSsSXxH( zE%%oN>Fh(fxF>Dh6Sf%R8NCl9HPeS_Ogmj8x&vK)M#6IHww>( z4KQ4`KmX!kAaVK&E$L+5OIQW_$!4v3oy3@Cd8Ax-kP>0P%MoPPg(wWM=#6A=MGe{< zmw5lH)P7%!UDJHC@*G#FS=&Ufzn;)rJZ)#4CCl+U9o7Y6c>u9o%yLNA3xhi7ftG-$0`ht7R2+N z4;DaCy-d5SN8T3@{-y)3xSx93Z_~A#qOF^zM?ZAe zZ#-CWTYjWE=PIK%cQqAFUn561UiUs(%6RlM$tY+4H!mw|hq@)tV`k@jFHTF%22?*K z^*Sp?G$qs+a;mdqz@=6+_;uA!7lGHm@Z+hep}CH;T<)Atn7H1NCDX9<@sGwhIpo|9 zq2x9fBfj^!4fm@d+)W|tjfLCGiTDRE28|0!>(7;c+w{Cgq^9D!_9WBy!t?5;d*dht zPL$4U%0{632z1|@T#Y{TMUODkz2C9WofFC%;!AF6629wa$j6d&Z}+_MqT+tvz`Bk1 z$)Y^c^E1IL?%Rz~b(t%iTR6TUx9p=RU5W&&6L@$G4yd_FyO1YavLzO8TZ5#GjsCEd5Vv2H3wImg;d}IpEnb#DqzD{}Rk_GU zIzN7v<7`uOorU9BcNOe-6m%4OTyE5g)kU9Azv*PQBWF|31IB ze{uR%h~aY8`Vqr9!sHyuVS>mTHSO9v&r2@HYh;e5izF$n)vQr@e*KQVpPPuns;g#@ zslPr#w)s2rL%o&thY$P%OFVJMB3-j4+DfUUX$aXGS+BpZqteQqYj6rcm56-$ENp=P z?s*&A?Kq)c$k&GyR#%*yzbA0bkZmDKNnh79rCHlX9are&Bs6zwX)OFsk_H1EgbJ{b z9GkFAW1u<{$TZS*J1NWK`DML6otr0Csmyg3&m&B;HA>Y;djG_}BzAF&6FoJ88DRI@ zn9B}rl6vBK%Z5SZYsPp^bIO`XIQ?@D{HX>TRa5b38va^)_5>9_r3)8xMfejrUfaHZ z8}sy=aRBMDQx*M#;j*L#FG4QeHF|zSpdI$7NktJve@wJHIV*aKv>?H4gwiO9>M7Q` zaHVswdGMlgt9-D~XF2D>zSHGo#pN7&&83D6^DGte4mHE$SLBq?+2qa?6N1AU!O8fe z1xW$Pa9C#u;fG%ri;LwcUw3Z1)0>SP4H4(tYAKo7hl)5Xjmp_vnXb*0I^VpZ&|_BCmWAgGP}@9vK6FSdp!;l5TyPPm@>F)?hOql8 z#jV*8-RINkV=5=)Zhmj~`zqzHWb#Jex-047cgh^HcgmPxRimRyZ@XJ>usQxRDHZat%n#J_*8C zBI5*Z)0>pA`L1m+lL@}Gg4QkU=iPI~EXYPOEnB_Is3&~N{~*V?+o5pjXqzz~e-%7m zMvK+yBt~Tlo5F}Gu{*!+9~(1^(zoF`-9ZvpSlXghN>RJwYaL9-7ZUTWHZApDu*xw? zZlkXeUrv21k6QgTC`Z|TMV#?x#?HyI=rUhjl4PqA+Mvn3-bq^_WF~$z^39?? z{>V9Pr5jdU3w|cz$1a5)>+fJsPIIXdsSC+uTtJL)!GX&Jbk|}xFZVn)+i3e)-|1%b z{m*!P8THZ{EXPE8bB)4 zT#w@q@sW3GCw@C0n;A41#;;6rS2Qt`Gp)XS^`7WBB2}c%DKjYp{cvDfz*C-w=gJoX zq7y$NU0Vgxrv3|v`Q{n7coXL;*tn<(e%T6s2<|3{pQkiQD~OYk^jUICpu48owYm0P_+*P=2uX;QJW%%M7`FuF*2Yv*#~-M6iF zH%qsFElmc|Jr~*{H?TIZn$}#}_`ySg(r|sQsX=Ei_GSTZ{YW5ETYN;B_ipq#E%)Dl zHb!6G!Jl5=yhmT7<&Z6ds*}-fw$IvPoSWDQ*jXK&52f|_z50P#>bu>H$hNu_X^$3> z`bWh@Zf+KVbKj+_q?!!W!xEJmG6YRjtv05Yi|-V3?`)6X(HuVS9aVhCd`LSJO8Y^~ zO;(STpB~u(Xg;%bdt)VFdz+2Cp5l~?nOXbIQ#%^%xHhW0#|G;2I-R#KSGG;)wx2S& z=)AH~B6ga~^9df)d9q@DiF1{`&I$JY^R@G&SF$e4PQqhv+v{WoT`Z$X`CU4Q7MGjf zN&`fi*`g{~!muh1bS+w%Yl`+Ei@zQ*bh7;Pqxhqu*Nh)fTeF(3>1}GO+emp?ALV5H zP(Qdt)=~}Jn*{oEJL^AbS;?O;j6c>(ApLgT=A~#YCv7;|#@8!>-z)*;{|QHL%XX?~ zqhj`0w~ne_3+3<1z9P5&l|HI9Ll5V|z^t{1`*$Bq5vc^;sNv_8V4zi}d8}Z`W$G!H z@A>Z5@S_99Dr*#NQz1Sqn4m;TR-qOUn(nkyhzPAqhGkRgI}$TqS>& z_1)Af(e2SMCmHTi$?>6ZMP~W?5@yV5Pd+C*z+SgZE%MUF|CBR*$<-QR=jrAui@$G| z1`gktH7xbEttEYPN#%R9%kx-@KP`XiT&ER#1jdAe{P1rqNkoP;-Jv*A-HEhdRv-K0 z9H*Jr;{BOeD!Wr70-1}Pd|#cs^RxZ+&XZD2*A2<2@o42zv811Cw_OL)HNP(gBiG2P z+aiNp-@i%zk9C7x8Pb_;TBiOWCrG`tCmp*`(>qN<)kL(W@m)K|gvIv~82u>RLdAod9hXySFD^YQAWRPsms`~%P$v-O z;HP!3$`;A#LuTFgV(t$}UU&ksq?sl@TDUqbmXV0L;IdY%iGqKqw4q0FLt3zhtDJTs zo8fm|amz4Mmh&F20iyj6FO13>8^ji#;Nf*UrQc40=XN0!AZ^~Y)dvvT=fpsC2NH!ZYISmy=@H~VzXUO^HeJN!g|vwxxlvFZr&d6IIms2 z+52c(cQ*O_^XtRz_P;ul?8{Pq`Nhg3Q_@2V7ax8mnH%}&XWco39n72O`&GnSoSyAJ zaZA^P4nNbumdrZUvpm4nCgPMVGcw4o^KP)4J8Hc#$XD`e->*{1Dx6I@IOc@f;g`F4 zNdNbjCv^wAs)7=Nzulfr(uRbBL#z9K2RbVbqdO<#?J`|{MsY~FWunQ=#)G7T-rpfN zix)C?>xgvjO%QuJr}b%IbCVDM+@tY<{td@Qk|z9eHu@H&_;53E6P1CyPT4D)w00xQ z(P+v)%mWp#`i*mAz66V1O|mh0G^^O^*w0>ktv+l5d6AP@w}=>UU7 zjZnV|UP^I8vXja~e-88dmtFg0;>-BxkrD-c!&`Xg%f+EZf!rWuh%kbdH?(JMQ-KO+c+i-{sJjRr1>;c8@|6`l_v zj)=>`&{XYR^VqdHy&p0xDOY*Sb$|6W`${fn;Ctd;aGgogDJOGFf|FyAsx2DnR}JeY z)IPVr^Q<`%=uyskUujC)I*Rp={*vM1NHo4llr$$Ri|GOVC&Z|V54)dU@$yi+)6EKB~Vyo+LG;^WT~KR>dpCh<+2z@ zm%hI5$-pr`v&0%AQ+q^=VBM2p=_;FM!J1=+__xyC{o6Vj*8~@gPl&8rW!32xRGBS% z+uVC3-DqRPdi|YA#k+>=6KpBD-yHfW{%)CK%bFqQ!eh@v zwXtM-w01E(z%7|Jb$ak5LpXYg>Z*M9K^zE_3v2UpwapEn{quZa>xB?(J) zfBk}gR3!0R?o`wbzV$mrR5zv`T+1&|=(S69CXeh-c6FncyG$tJNYGo$!`~f(x<@B< zOng>VbMUKf&!BPdxzK2_Im1t@Yfj^Ai=tv`ontJvWFqn+5zO$UN!T;#=Yjn>`c@JR z9Qk4IA6#&WwN)?q_KKSLes$#ymbd3*1flW1Q2fB3oF`|=oHE1JyaZ`xeEUf}L#8U@ z3tn_twe`R2C%DQc+4_R2J7x{%Tu<&V!b$O>8KS%Y70goDZ_WtsBJ1&J6eT^H)DW7?^Ezjoj zuO-JaCw4l{XYeDx(#T?R-Y=wIBc#1}F=as5=MSHcO0ia=lVC03+_vxB^pKCyb<1&a zhd>?sd#??;=h@ZkirkZcb5>8rnc1`wp0?_MPyE=RO2ZvtRjHn4%nG^vi&8gxZpI}( z{VTpSI{)VF7lR!%CP7;XV|QM-3N?yS@F_LYq>#MOX-V_&bGM_f{!~PMg)tzeaAU5H zMkGhywyb1u5KRy#($y1`<+@c6fBBt#Wy%S~Ww8w%QR@#bJGC>*a4(ncg}ldXY(4{o z$A^|&O?nkH&)f`7?^2c$ymCR0>xt=wOnyxp{IZ*5!HHMt+Wk4!gHN$i?!s~J!4m>s z#U;G6#A{!o<(2t#qxPX~7jsZ!x>yCnY{jQ-ZUMhO<`Q&gX^RiF3=m$~WDi)HZ5kOl@q~Iw*&WD!9u1;E_N~ySuG4eWH6_7Uv>EMvzho zbPrDucsBLn`&ypYv#;d*I5UemP_Rm;V8Olq_k|xHL3vAV99Cn2T4QQgI5O0)@KkfS zg~ZjbFG)^6Bhs5@N8K9G;YjamFLq6C?a*7uH3@(#E~Rt4Ci$*K(TbiEAe?c!Jhb=SY8y2Cwj>xQCV3}#21d!iqI<7e)l%MB0jzoQO?a6;c@mhX5N{@Tu0VaEBZporJK&Z|CyRQI{Je=h~U6eIRL zf66PwShM1*Pgy&nQe=pVMp;+=`~21VxK)oUb=t$&McnUY2RXCz&N)8{@;dp|5^pKZ z+u#*uS)+V|EfnmR_~J>e_|KyfPw`n&X#KbrriH89eBV#xn)oB^p)i#?dVR&}hO)Vz zW(po=bWeQpg=D=p0OZ{qq)j`It>FrL3lW`#C5QXQMKN6QGpJM|My{nV%Z zceXW8zlcCQ)&($f=VK>B?GLwFQ?1JyZPs)*8GOX$?ZmJ2-Vv2mJ;Q3{WMA>E2Gcz$A}ntfM3QDA60HEnTF<_T|R2`u8u6 z9$k*FgvXcUT24$}yx~8wwRW26)AHWu{X}S~{Zm%ZFgTgg1)eQ~7-9KR=n}2W?swde zN@!WS*4bcPI#630qn zFjy3fs)Crf_gC8I4}(5D4-LCiK>Tp(dq@S66qk~O7v4jxbQshj7QiHUqXKN|bUB1J z`MW8MBzl)=)1wmSojt+6Q8*w1HybaW|15B@Q|un8y`5tB_qNA1_W#N*>H~Bf{#gzo zNu|Yc7#t3RfpdExCUFezf5~8^4)b>)vVCDl!!#cOxA7Ga27yA0gGZ6JRsf0ZbFx23 z5-#5z#Qg!nkjjWl<3=(-T^6&Wu|Is~= zIoweR_?P+!oluhk(v$%-|5r-b10iAdc{vc$z99C6v~LdoVE_Z)uYtH=uPTTIo&>Z> zCDGz&lnl^Hdj-TwC=KU*1eezV@33tpq{e+%tQgF`?d=O41>dR$8(*)4H0ZGZSO*4J zYc<$wWEG?e`_(`+MBtsI8n7qoYDh`^un}Vqt9Jk5|8Ol)3kktl)sQCmVo+f%BnhKx zAT?O34x*txYzLCCN*%Dmk2R1M_dnAA$6}=5js1}jY)}jK`i~0!87>WH)bA07;F(%T zo%SEY!D3+L1~A}S9fbOS8#Y?1%fO*w{3l2P zCjSJ+IyOLZB+^K6BuW|uYk!4UiEt?R$tSRzxdsSFCJmqhiADqb*(rq>xpDh0eZc)8 z-Qgs6=`zCNO%QkdB6zZ4Vk4w|^uIcS+gicZsU~pnF4rgM94RnA8Jv{VUxZ=g!R^yJ z&^+$H69Jq7+7Zowsn6d5-NsK43g8S518;wV*#7l2@HTS`7^>a`om7_u?EfE>kv>qy zf%Ffyvu}KRVv_pThGgKZ7D$j5Eso;hIcuQHV`*je7q#L75G=4@GsLQf11N!(K_0?1 z0BHy05BaC${{H^6-$S~W#KIR_A$}T^IL~>7i#+F?oju$=+$>$-i!Bf@84iaPM@r$) z$X(ZEA;L++{>{L5CtD!~X0$kuiI9@B8;_Iabx+G{Cc->g_Fm2&@W*CIkPMAMiOXQ6 zao{QLa}Xmh2KSGX;|@8wBnp1;8R9>MIUsr6#`|E`?tt*F5Y#LkZFs|_&ewT# zZQQ)C|J7N zU2(Q_v))}q*Yk?Im7Dz)8y*j5*tHelELH|9jzghwFi#hNvIlJtHO)VKW8i0P5I+`o zKu}$cN7uvC+TIz^Q?R*a<6*-C1Y~J#>EglTVQ0f*ZFAMq^O^^bzSeoT@iRDb1}h_u z#Ytk2d$vg@`45%?jxE}OiEXq(az`Q_g`XiwQVbRYOdBf+YtKL|M=-z=LOLKce7X(dh#5sPlHrg*meNv^k}z)< z#0*n(LNtU}ShN%HQ`-g!!zVi+iWqic7C3Mi_{zmp9I*LOg#U(+40P={SSrF|olK-6r+!X-Ng)YDl@N;_jcn8Ex zh>59WV}RQT5lryYK8TFxfQJL~KRjOw3IExh|JU6sIzV{(JD*GvEhUbT0=tKo-$P7v zlK+^mq!hf^1y~I1hUftTk-++$5CaGs4;vsx5=m)s?7r17^GN+;ga0g#hM_MY5!xj& z-3cJJy%}O8#-gOeQ7D+J3*=P0>8##Wu&lw0RcWY0@1-lUjfj@ zbVDLUK$?{s5EF@{xQwJEN@h=XQvU~kV4tsms?ZmRIVOmUkyvUk?=<=XvCHqv`GDDd zUH`>NENb7P{{}5A`Y>wl1}*AdK(_k}#Ii?rFLIIgKpbL+>`>+(J3I(~d%;i!1IP4& zMBpaKK$}u%aV!$n z{0eb^+^4J$xLER6i1Y77Qj8QvTm}iOn0E!FLn{3c9T;QWzc+>ymg@!b-K9bbq6g{o6bdDdk^y;3Kg0@W_5pX5f)fWoBBS32amUaQ^X+;n z9gG|NdqrswTv_ge_`!-mYmBfcV4g?%A8kn;ayMxl%rpdqe6Jr8C6Y$Q&=McprFjf2 z6fy*uMh!s6_TFcN9R>hj+d~jF5fBXN5O~2e3}B>f0FnShV8THNOZyK-fq~D50at#5 zkkBp}?W0Wa`$33HexHebvyeU%-(>#ru)`e7pkRd&z_IKQL>seooarbUixrnbBBgN$ zfFgY;6v-grt=;iOLjX{Ay^$1)lL2KB;COTbVx~P<`@mzQ;lxo;?qG%?5jcDV=)VY$JV($-uxcs9r1t;J zF8-6AAMAKvJTm|5;@H1Q!hb71j1-(O3rzFMIM6uHD8#h82+$)VDF!Jng$2L`YcnF4 zBrr1n8%@CJ>4!@gng30iz#8zvIAHq06u_~%QHYrcjrj`GX7= z_L%^3aG8T3SbYrQIT(l9O&4%d;QB7y!s`zF-s2E2*bA`1Mc}yK(G+m`I7G*_Z#f5=IFQZ0ufuZRfb#yb!!!YL?9Sg^C&ma# z1shC2bpI+6F!=EsU@Ch8Bm}@OnPMyiDR(`97FL`3%aGX=xD28R8LS96{Oh~0<0KIH z;uP?0EKKtq$m!!G#2WLEk}-xyg6hA~6C_M~Lf+FO7P&9uJ@iBVyAwF`JK){o8zdCd zj%GZHlR<+LPFfnoa}X$5;Q0}-=;>*o}rGvjf$ycWg?RXF-jVQ z1nl1_Jjj2*`@l%|R|KibELahUk&W>0e2}XE9|S=Twje_=(IF4x#;*Nk>`u5e4VK-7 zLSiYfMUb^Vn}%3V9@y%ESpOG2u&960<4}-6Ny2J#fH3np2Wbpsrs~Tvuj5rdvTL}Db9evPjz#1m*6&4t~EjO=<<+w7h_kf9|(Ne^&N8YXPoRr0}N2Ul^Mc_1N6zA133ulaKPMM)ou~&8)S+Im}VZr{++={jKYHW zgN7~Ufv`xHfIA>z)+I2md>)eDPmaidl>+ER0m1&Y9n?Rz14vgcfztEp0&wZw^yWa5 zLu3e6iM>aw9Mm}=P1^I9J^7&zlfnS=bYA`|KO~57yQlz~G$1dNmXhA}5N0C41Je%> zZITxuED;6`q75TL2J}{>;e|zziaq`T!lWdey*os63F6vK2FO5%M;wWj!r)dJ!B#<#Wg@}g#L-efMT8^>X1IF=qNP4$Q)m=CwE{#PvkaXil#bD+WF(V9 zi6hZ6yRN${6mIlkoBTKNfunu`$5;6Q^uBA72OvVji$xzs>3=7JhS4fmc<2Y{r~LI2 zG)PUPWnk_Vpya;|vj4^jFc*Hk3Y6!!0tx(OlZhB9j*|qTEPoDSrb8dj6LyDR`32}w z{sdhZ+~0_@-%phJ39-xVOJLsx(fcp=6o)$`o_~rqtTep+3uHOPKOv?Ax_gP;G@#3U z$N~=9lm{jbDmUWaKuRyyKuj@M1*r+{Z{EQ~EG3SX20HfR-W-+*|$7 zGDv_Cpxw5+z1=6FoZ1BDxw#H@om0 zesUY+wDiBhFXwGQ0|5wuboBo>Z92%B{=@ygO=1p(1?)eN2Sja4h~R=rb|5A(&=KR| zIqzm;3CerR>pcG{`gvU3oUhvdrys#%%mY9;#uYI9zZ#)A)ZoN|(ja{YZ0!6WNE8%V z0EEbpAc&x`QlJvq6Z<}A|B=W4CafT1i!(y-!}(hP{Ad0^)Y^EC#gWKr2IQ24W+^Vqq>K z1UIaH1i=7nbpip95F*&%GD3vtZtYLXi^cvEQxAH;dpu#KU`7Zql}L!--KD?RH{g*! z?1R|ra5F2%4np#euY%|1f*hPcd@e*f zcwlz~00x*J27n1pM}Tmo1P>tm$C7dWe+3XHc=FHGaEJi3|2@`%%nL+>NbCY0zzC5q zq`ZND2hn991Ze!r=l^%efuL;5e?|wt1yDmq|B1+unvWY2(;zMivPRth!Qml@9atX; zVlHF|0IENh0>=)C`4Dq8M1ZK@84_fzONfxA5F-O{!QcMK=x;awH_idcWC#WS(aV4K zjQhV5!GE9rR?`20p8nt*RmcE3us$My@o&1^e;>2uBLWEjIBq6%00|`XQbd6W8X5^e z@y{R#cnE6oATm&Fwzaf+3AazYRpK29DkNG-U6d{LDGLnYriQ} z$W9w!0MH?~O*>&gw3!kOVqAY|+COOzB$^Xo{F}`Dv(FHGtQre|jsN$~{);>PyLQOV zVq-!`EujI3($*ku703#SylChELhOH0z`yeVp1*<<_!B0?Epmdh{=DOl4j_e0|0mW! zP+JJt%K;We0uZ483qSrdBL^6W1rY@R<1a;rIAKUF2r0}Utv5Kq7z7YS|DOaB!tnT? z7m_jlBL63M_=ARUf=4j`1Z4jb|95r&B;fxr$pxmyf@ohpHpHHJF#qrS?;v`}19rv) z5TXBz4F1C$7dQe3GFJyCfDK~ye;_FY2qpnJzkCNVtv}pB%4aVe2uUC=#8v5F0Wkkm zmk`@l{QsWu^8JH=0{^w3f7$%6obg}m1G0HUk!aRHitId~j+$Z;Aw_aBt)PYrTH z@F&27pZ|=(#sg^l69m}**dqHoNOndDphx_FNd=e<5n{aQgaDM}HE4M7CLZAZ-$)n_ zM71CY1EivaP$mB#_!DxGz<*|gyqw?xLI4Sv_znVv6yO6?|B%3i=Y=2-+>nE3$dvfB z|C3|=4}rh6Nmv;C$q*t#|0xKxWJ>^0glIV=P{VU`|Ix#De+3Q>;QyeO|I5qYViguf zv~V6u=3QIV4Xa1dxGv*nyn?)t2zA72Zo>bq`1oNw+ciN*`jB(ZrB613#;{0~%29^1;X!Q3+Ir~qz~+MmdF?V!!Y@)!(?F?+ z;9t!w&!nY~;2UN6PevH^3%?|wJQ>}&4Lmz#AiqI>2SvbgdRCV z_P>VJmTvmo_r`zwjZXn8A*NVoM?d=bt>ft4Wb**OOhjmxIIfVS4EI_kOim&S*^jLZwYT6(-#vs+@# z=cQ#{> z_caj=KW?_5*szPqJHH%it2km`&z4(9s<6)AMINa6QD$3#Bs$z9a8A5>RiZJNrVpg= zFi@YvGhct+zRn_@(zNzl31%YytRNfzjSir%*5Mw_kdg@|ROLAqlc29di;oj;Q)x%n z>=EP^YsM-Gy`IW@FGBomzTGAG3!d|7!xx8Q?Orw9-kPKZb}dYJfN(Wz;+}4R`-zW% zZmd^vdY79QL`s{d5|Sx4#DL{4=#F(u!cr7pI#F^_ocCFzRK#lXBF<&BGDGm)Bkwu0n2xFcc1*cN=ZOzLo{nWsqu9Pba+L1e$h$M)fqsD&vgVaxTw7Q=7xv8n&Aw=VJ+c#bBd;)>TT$;?-7;SUByS zGzIC)PKyt}c_u@}V_B6;bQ|}&k=R16r`n2)cZxm`(+}u`UhP7@n(YF$)F2Y?N5PEf zU9^!14T`t|$Up<1aWN4U)Do*0S>!Up=LrsKfew%HRu#^T%UH64f~n;5El8^woe>fR zwM&ek6!wq=UGpzF6qgY9-vlEm85#v&=+jAZ)b5`q7_?VXiRDItIL`%Kbh}cru1ZOZAhS|q(e#_ zJZM%`wS3AJ4SBenv3>{HA2>jsr_Yq9Z^%L7)QNPQEw5>pMcp8e9mQz&K6SpPHru|F z-(jzI+m@?+RB=y>MT2F`IG<6kz0uNWD2a8g_8Wt_ZM58cakgQ5#rrP~Dca!Ci4uUk zB0N2`gQ_*-Axo737|BD+X87Z<3+eoZKM20TWGu7j{fl&M$D^b9&pj~RRVvlon$#hlpX=zFm8Ty9Q#$EQ^ zNuRkG$CmpmOmh?sHT98WfVA)D@$km8+1Rp+bW}bhoSZ(0CwRqbO|?~+dayTRFM@Ei z{9FaBlf;un!Oay0h+o_+0E|yR38#J62ET(3uc*&(s6;k1(n)>;Hkyrnk4n13g2ivU zqmY*DHIT0`_iQzG!aDgZc4+axzbPTm4)_g!u&bFiI;j;a80Whe&ZRXsJB(v7!msh& zuu3mcUr#fzsx321mH&qi1?x&xP6^0cSx`P(2 zE4w@dY0gxx+GYJ-VKTL@kj+|>j+vs^p0l*!5!^_&!34e|-7q3@)UM5{Th0XT_j!9q zk7b%FYGMR@>}$#NmTxUx)Z=bKL8^r5ryVG48MnuVYVs}ZPJe0%i?vj&g6^T6@h z^+s+#gXw7>QM!>d7ULfkr-AL%W~&8gk|LPRUp6S6VHfZfL+^hf?20-exekBqhXG5g z2Xva=C$r4fdw+}`wz|&3o%hgKnP)6*j~5|g7&6qz6*2qfrgRil;xPuY4=pz{K&m2} z%#bS@35`IIF58%XT*jiB*a;eVFN`Jqa$lVoemUkfK6p}HM{?2t?Zdv4IbJ>>5^s;w zAWztTTn<;3$KBtCxDWL~iBD5$E>FTyZbNe1`w`Q%p=^&#ns&=rTIc#khV;cW`ee#pSw?#v(r_IHI(~ zVS_0FX*=T-(U$1-uj0?0z7qiNmdobNm(%j_s(e^r~cvbK{fSWwE|n~Y@7O#&lB1DiTYp5 z=*hDzxbuwK#~JmjM_a0=TkOSDQw5#QF^96*>=r8NbHBHOBx=4AX#L85dgAj2&~W}f z)zvIvYPqdDuwHSCdg$w0O12!$G&ax?PR`=iX6OxJ$j@V)MB_Ox$*gTf``+5}${Hbi zmlTH8+oXzAYXIfox+R`oZWe~+`dd7^$#kRwwYdQ3J5pkmxjsN?dQ)X5V(upqPhE&VWQ8<-}|@4?(_&bY>fF%+haqw3_8swRwM65|4O7osQ3GJ zzx!_Lq3n;;1gqDbB@)$J{yyfk>zo|$Amp@rFY~x+tDp&Q^Dov8s)Pngs(V!W&hv`e zhW?!6WWR|gIz8k4Vh-Mt>LL1u>Qdu*D1XVnMU=Y{2cehew-y#&fB0a|G_bG`RGw08 z!}ktpFQxvQx|+?G%I&&|Vgf;@2EQ&w?=6GAVNIr8DX-gk}z++rPJM@r=V_|(I-ZcyuNAd z)ErJLOVD|?jlJq#_Ym}i-llhnbtEJDo#-I(9i6jnEThP)O2UWcYhu5V*`(v?=4koP zN-Vm9-|JhkU0k>lvFB zRW-VGi7P~(40xIgTidT+XltK2;c5G=+i0ZLd<0#|X&ZV=<$R312q`ah=GTPK7)p-D zn-D<$&ZrortBJc&OEVre43)R4LaDE&(W=3I7ift4%1MB9-Ak{h%I@Oqx3^2F>z!eL zKFiQ|eSO{6mpl}vXrAU@Q!t`pVDHSV+A!AdcEq8pT4S#(do{MjeRB0~xs|4+m<$`_b5L-Q zjtxdg-lu&%(2_~3?3VwmIm$Gd*UY?HQo+#-X8y>dxja9b^cDXxBqB>}_zC9cH)3J$ zvJ?QJ7?7K-O0eo<@(9i>Z=bEI;`9Q5M zs(Xd}hBK?7)2T=w_inipus(A!2vV2otvVrPz@S#F6&n@mH8GS9o9is9%0xzQ*Uvws zevw}9X4?CWTCL&M zSYPzRN7EPJaUajFbkIquD`ytbJ_I$1Bo%*T0y9(7Jc=-lLj}aX(>M$9m;4kpo%)fl zU$I4TW!>vEa|QOran3J@h^!Tl4CM^Xg=YA>`ApX{%B%`^wc>5l?oQZmLk|U%!l`E0 zu_VRcA4h5rQkAK)npy!gG<%$fic-U2Qp@u}t=yp%4H4*;ufoig31j1oWn~~%d~bO0 zcoGYJI-k>3cA{aE_7Z(gc`jX50;@f5+2bmx!gmaIP22O+IbgYC#y|PcVk2>{5d2K^ zn6yHNH*zqcjoqW6J(|bHmfKoh*wx_YZ7d-rC9!e6_bm)jVV_msz@XJ@X|-A2LQ1N( zI5fdZf8k3e6ArljkkQpd(T>Q9~{maW#-7EdH&E>TB^)^Ftw8h>}bDD1N-CIR- zrL5IS8+t~)Z5`I{V-$8#72q)z9BFXZ;YdaFCD$WSKv`Kz=0dqU>B)9fg&sS!E;)2; z{Ijq371>w?QEAE`#Ty*O4Co&4NEF?U8Fzwi&UML}d{l)N*97=(92XSC9HCE%N2%gY z5g|0Kd|sK#th0JLuj9vrbfA4kQb!<6+CNV8v*RkKS(;e6esPw=VK1fdG`l=h?x@v% zicFZvSW76JlJ4D1G~SpV-Pa=Yfy*kz zLoUgsW5YSQS&BGpNmXCdg&n#W_XL+#4IF)JY|c+@3_Of7I!Odt(7q>kAPRs?;?g7( z$QFvFkt+$&pMEMD8h)$*r|rpFnI98Z-Vk@v>12J8&ob<);=dKn8*q#=I{@tY{Co-t-AB%eY&bf?=CPw)ZUECZ}b)=(E|Oyd_Vxi%T` z_n13dr>OJE7Z|NOa3&ekR`M?x6IV3L^uM}CG<@lMBE)Ya;DX5^OV)c zwhR>npeiA1V}n-AV_RNtZ1S-{9nA1~qkMMiVa61%0gD)D#FQjavBQht+TQy`lAR=PsRzIOLQ21MWRx1MBmryyNKPvDw-g^-pmxDLohd^F47L|y)N~)+ zARaE)5^vWq{3$SzN35mE`VOWNmjB3Q9d^Q%WOh!HLW_IOzOwEbM0 zppa$ucJ)H5CnsyS(Vs6Tx@97~E66g!6jhi39mv-z zQK428$ldtyd3GAi3;r%A@yjJB8W-p z$K}G#)MQLU!mG@^9H2Gscbk4(T&jv$d-GTIOC`giYL3j1i&_=A0WXQN*S<&42_IjJ zjXcK7?_^`1-LKgm6qKe*l{5Ac55H@Sc}CMwE#7J^!^A1kKZwW@XI>LcA@pE|72&7# z%+=?^c^IJWe2jb@gJMM|8zJF+t%HgEUWivM@O$m){D|K7v$GW22iA#}gt$65{&5H} z+98s!DR?5jwdtuG!0PMpYh9KIL^o&QFp;Ea^v;H=EzoIbz5gQj`nEVW8PB3Bw4TS; zA&qlLvlLu+f`mjE2OG?1lj5sXMEo`?XnVdb5J&@a5Y^e(9wyT{4;8#yUPE(j&e{`NS^GE#i7%np?wBlSpGs{@#na&R!srlK>UXqo3_`5 z#7nb!q>ArPl{^m8$D5R4&}`okPz_$?dKSBNl08UjuX|zWqXwWrKHD_`r|FvjxB1+! zXF+&@BmknbbPuLU!9#%V37R+5N9LdB=09h5>Kc)jA;Urq3Jw9CbGu&S59I;?Hgvz8 ze7sI%jMl(qsWZ%b{2AH>ks|GMCR0864IvMOxSkr#1bJT3Ai%3vgpugWooUx>&g+;? z^;uqTkkC63yiQQI&gF{EyZtjPQ?U_|e&}>j&x$esB}kzCD} zR>*Pf9~GI=|7^gk6m$LyUh8JSxt3VJ&GRch(g)lZc@W#1A0l)Qf#^t$u>(PmeLCcj z@j3z&xL-FFYj>eUlo~~G-CiKpB|n;n*g(#5075VZzgikkx;+A zHQR}<0d39`a28B(vpZnflM{DD13>5q{bpT@>z$#KgK&!*vtli<*8*F92L;py7seuK z(g$%3I4elrFxig-D=G0M8+Ujo1#2%=1LN>BVH<|Z%vmaQH(33c6Yf~kt5UO!&$>dyn7a<7=K13`{pru*^9XK=do#)(TrIO7OSUTb>+F#9#8tdV>8A zNLxtxw$bwv20+W(uY~#uA`VxeSeCNCe@zx_FkQ|g@HlKHGYk9@!%FOX?J|4`Jrt&| zl5XB^djZ|Wb;zBT1RIUL?ytQXT%o@7TD`HL9DgJeK^w%$gH`ZjRG}5dNvGQefgVE0 zwf)53{mRZ)V9uc1?)W(hdDeS(H6)gCukj%_;`O=o?bx9(5Qgg~UepAFYo8UuYt8|p z&=YNe5D0FQP<6V74#h6tpp&z=6t!8@`*f*C4|0_?@)cjmWWcFSFMrK5A#GQbc zk4vYQZwYIj0R2wC*V~%!gHY`oJxO~puE5H&MC(t4^-#8xNM)6Aq@UtOL0Z;{dq_v8~{zNe^P@D3mFDw`xZ){ic zA%_OWqw#NL&?h%QaAy9Zj_8l;i^px0<3no3wC4SpPeeb^dvPP`^%B!;c?8z+KD{N>w} z;S;~f+#3U?ZGNcV_lH`YyNA(%Vgo8Fo<$6u>L0EY67@|6CZ^kI~q)st0=8-y$ z*gNz_t8jjvz=o*i!%AaZTSa(%jt{fDhn^YNI#baM2$E5Z~5dUm{~nV2?~d6XX6pU8N~0WGYs6 z5ZVbKU*fAF%f3n5X1EjyD$E%5CIMwGLp@Zc{%@pvBx$x_O;}Sd@?P5)@yREPx3Ozu zBKJt;n%!q_??e}Qa=-TtTw8Uo7q0J-Irsd=+0shmBqRy^oG`JpdXVy~uf*S`e1*Z? zw>YNw5~9iN zFxXJiD*nzfDxcw@cZVw(*U>qS5ul(c9*>#b3-L5KzU~x&MUeAT!`n4g!+ZC}-fQzL z>DO<+>Gp_0jpH}Q<0+k40&f`3lUiml$_G+YeBdNKsGtXOLuO%UBrp;^Ujt`}FhG)* zv=k*J!gQ?^%_Z$n>~J8JL-7&hdybSTBe zVsx!A3F6Zq@{|c-(}GCpPsmc>o~J*k5E2J^>JY-}bv5CsbpX>-;KGZ$>fJhzl2MzA zVFJY_Ep${4L^0PfWK?pJ!bAlqltBuF*FMD*WUY@BQpFS*)A$Bx zT|^3Rsdv7W0oNfHtpm?Ld5RP_0nf1%1<7M!$XvXFhYB1N2*X=KKKQCwLxY$TFJ-aB zY$%x(C1E{fdOMl6rzOQR@d;CCgViRzhhdB;6=`~=Dd<{-)T)=9JKT7;lRtNje^^ma zb9_Edwj)ae`3z-$VO?rGP3zb9F?zgR$S}_NVc#{u$#(p4efFxep2=r>;-N~fuG*wo2YUAO8rh3NL9(fJ`|X6l;mfg5!9^bnni ztDwg6^+o05OWNLrRBlsE#a@BCh%eq*y7287PN~S)9H( zmd?$f^&Li4)6O6rBPY#w$qVDw<8n3;fv>mvLlsEEZI65N+SQ%5hMx^= zN*xw+stA_LLsy{>c-S<}%RJhZc_upSw>dW>`mqoKu@{vB{H+&1l6gSB$^J!&{Fc`h zXLmSxzT2&d%3z0>jx_Do^UTpcHa}e-{dvcaH?Gf{IKO$m8x`IEzMS#;S$T!<^7C5TkBi0cX0P8KgFR3E zOLli0Jl70=j;PLfej)QJfZWgJvS)SgFJjPWc3JBya$oLiG;#=vTx~AfFWq`Sa(F4g zW}oE14)3izHmaT1a8+E=2i z`0!NZB}TWSY;J%0XMH>%kWroXecO71jk+ET zTm$o0)QYpHvN>V&c!F^3%;MD(w0gANWPMEvhDkR^SYrVTr1jqO_bub@^1y86#q#w1*WItd2?#+)jSm+ z+aN-#+7$z$$oFSg30#xeKswWBLJR%Xl8@yA_QDOLE9=t*WXDci7MVM>{26p5Lo(Se zimgW9YHto_#>yKlmBZyuQp0D)#&t?d<}#eA!*}?LTF9CrzSMtPca+Z&%D(x;qOrBoDm~4Stqo`{WF0kRc&C02W=+jxjN%fMbF<1Ux z@`J9BK&BV2;pzkZ+FYr^_r4nkn4W;|hBhigVbt@P#TM_(ZCnm#Nvp&tYM3Mn#QcF< z?ywGutWwdMWao-RnEv0SgQsw&7x}-6edI`(mU$>-0hweh+g<7Vb2#Z(T1UY7lDz0F z&f7KAOVvxKZmi}b3H&g4-j^QBFA+$eBoLT2>X6;Pr~0>?%zsKUqL^=qFz&#vQs0geHxgKpw%)a+ zWgJU`3;H7K*02x`%(Vo?H*b^^ACkpn616~z3{m>>Uwl7cARTJB&Mtn1B3B*->JnYJ z1D4fWR5}+HR(SD0q9UK%vJfi!gTfh>QZzhKezy<>V~%~0_&&dZ;r@Fkz`bSuz!mNp zLz&Ui_wxmCWzFmn3fbl_sP8<3OsBlbg+=kBWr=h;@J|$`Fcd%{l z0CDnSBXHTME4C2^D{!4Uo}DK40ST&D@@2f9S)(QFY{q8~lNEg$`kdx{-OhZfvE;dx_Ejl10vdnYWo^M&VQ)dlRbH4p@IZ_$Ty|Hnt zQA+-?QeHzO#Bj;tpm)SWWF{6oo-7;k$@2%-;+b2BTdd*G`Mj|4@M6X!{aO!$E+~Bc zzKeZ+@$zzBZnwqFg3Wa^iR#>9u{-|E=r8D90?7i;W&uxr$vL z7jf+mj)&qZk*{?>C<&FFRF?1&`{AG&8n}tMmWberj+{H{R(Q})QGU4Tr4H>!W$YN0 zaF{sE_`3HOc#X1uICe$Tqq&agCkCCx6W;zBcT_Ir#J(T_E;JDl2|SOV(;6@DekV9- zbqKae6|m$kE?m2Hjh)7K_{u4Z4An7zc!WO-losd-?lQVuGRl&iFL5N1ORryxTn~`y zKGpuM&zvihT+?$dH=sT>Ur<)tLxZqL>Do}L?to0RdzEgd!sPP4&_;VOn-~T{}^B~~RD-lOO1JG^>kqyFnX=6Ys?k>y-?P~}lTvRswCdC;{6?b^jG zKbE4bSv61j+dG+T;q(!hZ|26#rsZU_ZdTCaTh_h6^s5X9DEV6bFW_Pg7M893dLeQi z{j)2t_--sMUyiorxZGSgD=W}&(m9!mSCUBsXL*cUIJE7LzO(7U=ax@fZgQU!g_{>C- zD$8UXzRgPFPMw%KMyBqDf%7+vl{RyXrmt`4qJ_+9a~j2zjRBs)ngww$-HLs|DQspc z1frC}&4zty(mLTz52HFys9V5u7`>?{Y-0o+h?XhiCN!DV{L*P^r!jM{z?8FE{P1(6 zZJxHHi>uh$Q5^{1+NkjHyWTnK?C?{X4DT}G=>^GQd^({(5=`z^NI4Vk(QSBka3iZc zIWch@KG#qg(^r`z4hfKt8F=IPy01IHB7e&9){glKRgqp0-Nqr_`fMwqQa*Rqrv)10 zZ9(v_xMI$ucpK`-faodGIbRqF$1fEdvvb4?N4BXP+E&ZNwW$3*KZ1l*6@jxAze4qT zU|7w_@sG}xo1YNC9x6LL;c9SJqnWU}Sdr&Yp79$iGx`fwDmy7Z_8}`y3d; zxoPac`26C}nUaz=vo;EF?j4j%oAJ^D%VP}MJI>J~BE?9zVQt5R^2SZt^R`u2-+jW} z;IeFar_9eTYUV(Cojewi)#{c%zdvHFOJ`{BC6t;ETYoRoa;m1UZ{vs^b-aj{%M|qS zSFn_wySocF=gMeQ__4Hs^(l=%bhP9h0Wq!Y@Wy0bpe#-Fryj0mUm!`pq+nX(y})Ud zB>y*=k}!+)I3>j|r3{CNUv{}_s%iFAP(QU(27D2x9dSbec@yU*FxXF(5R-6xWaKP9 zcdRdlaeqh|VJAijHRbS@q;;{i2y3h!ro7X|WXxI4zSa|8_K{bcx4)Y0d9XmZ;m^v> z6i%Jbmiax~_F@t#{1qTsm zd(z7OZ|YnEF%svZ$qaltMZvdHC?lETX-8VKApZiU6BmG`mm+glmoKaUPgC8Rd=1fk9OSBVC(DUoF7cw(F*^F1$F-w3V`&y`;*seOU^K zZJH-14er)!sC8X4#KayYXNYF4nhr?PEFK*rR>=(u1R7_wx{T?wyvIPU8V`BnWCN4w zVrG_g++=|w;7W=kNW45{5gIO(eRX}t&Tgg|lv!9Y-%_nx?$&F;C8k-($+!^ilRTUt z`T0uEf+fQvbd(qWw2lFXBTItfqxn;&a7VEwjq2yG?wK+m^4x2;*+Wx)q5D-)VCLIq zciQF+yo5!4P6WkdiTxUjlyaTL{JIcv7I()>`P7!?a@O^gGJIQ29{uh1vWh<8bk|!H z5I!|QFr`gwnW;^d_zoAfx=&tLe(ZolOUisDDeyLw&(J=9ooB zwKMPG&<1icM*^8pw1S7MM`2@cVQeJALAuE|j4+>7@)!^w)8>6T`t$`BlPf+RPMcWK zpE=XGGc;?a)gt$i6i{TSA93~TxizMAPcLI@l!ZfAv}<%GxgKc>>BTeby|WQ&FQYC1 zq79p_N&7Q@kb!^j>A@@!o3=@!^DVT@=5T}+GTs{+qlS;O#h;OKrLzONrgk%aK1opSmqf7X&USLs@&a|epY4-UKDQ;&Lp zj~VJ>G=XAcI?tI+?IUxrt%RZ|)^xZX4Qr*346a88iXv&r6kocnqkNefcXrre1^WUe{w+08JuBFLyz9WNPWNaHf2tBjQR9lkjUTr|@>6vtvwFiG&R zmYry##|^5xuiJBOT+Hxd3Tc|IP&J!~JB!b^_(U(j5wU}v+kU_hYrKYRTnN&b+e>WC; zru1E|#DP6DY#M4UaA_r-eE|d;rXLjWQJKfS;`r;kyTYFJ{%;{G1(O8bt|ffJ_|fJH zHg6>@WjhO8Q2VL-<47gCx7csIcPVtg8quDp;1@FJiqGVv$(lQAevwbQ;o*PO?{4#1 z3=cLwewZy^3=}SED0VZ+5T;gNbaTmA*;5}3dQOaRQMu6UyFnPJW?l!CX!7y%H;ggH z&!3&Cq4@-bd5wx>A0MU?7)}|L4qGdJa#;1pXqeM2O)bBv6@DKqHeD7xVdrQV&BkFr z*&Ock&_mjzBDZc;ht|+bNQvtd+^AWiz1W)Q)~Y_-emQbiL@e%Ip$~ z4O}(l7r(hKuZ%`!^1Da#WN4`io;9K}+52O3=nGiO4UfTr76lHIYs`m6{mEk>WxEbhCMDN?%PrDCX zJ!UTz`urh>dA{T_XavRM!L5yp!lG8G)J^UIIZ|guWwk`B|9MMzW-^~~zf7I*@Red=Fbi@gFTH&T%#b0uPCfUV6Q^GEuE6c!Kg2wDDVv6?RAu0_qLirZZ!EAh1Cd*BmIK}S zC5#6JP8<1q_`wp}D0VatTE+%QzH|~^_@+{=s0UKcTNXC2ruKa>3zjuj*vFT%%rw7U z)=?n}dX}g(dCE!%4{)ef@k(RyyUGg1EP#u?E52HsaU6_`5n9Ln`Pi%VcFHs>J9tl| zkhmrrZv;sS#BVML3LJkyxN8nwC@L|_errLD`ErItAw<3P`Zm-l!WeL*g&z-xC|F;p z%1KRRnm|&sb2m0%1Pkf(n4pyKCi>a>YBZulvp3vJhb92+G#vr^#b-0iM1?Y?ikPI0 zVA>EZHg4U2)1^blhO%2kcnoq`R*cXy9u4|S+p(z(=tA_t<7t5i8h)lv%M7!c9O``r z#!t3lUN?W4q>W)$QNpcF@+0U<4l`lOTqA(c4 zJcyWSFIJg9!aa++d@7n#hU88Ke=OE^_^zI?Qp|2@%R1-Yu2c95Y+`hUrxk!LDh3P` z-)7q`v{rfQGk~U1MMNnuNs%USgVk{s;zuMwdGTQ8MQ=cq91iJBq zmLs?eI43g>nccYg;hzCH(c=;a!Nx{!Tn;GH+w>=C4%OauSsc2$0hXh_!NxZc8U}xp zS|~V~;&y{+!B`7%l>QzFUJY}6@0l+!Wq9c5hSq|&7JmL7EgybL;?U>q-;kt=_(D zJ&91LM}FxP=b`Qmu@7lm;7gcCA>Ncc=+NQ@(Fdn3%;>#SKF<{E4XzJS;lPhWp&Q|c zH~taA;yo#M_o?LX@w@*52vU3ER{8l&TXtrxVD`8ca z-PM*CiH9lPd5`01m(gj@^I|*~Ya_k=055&}tvJrRD z>AC&ma_yefkF6bTBc7}iZ9Dw3=N{vUJ^)!HF0WI0`wa5GCNLtgd0i9R2bVwZ0Z%-w zSRE0@-E2GBmtXF=p7;Y`IwC!{zF)4~lRU8opmoHHbdqhyUphXqzrcc`M0)ae*e*$* zXkHLsNkFk3T^rjMmv{H@tsuPD2vE1kj`5}N6VrP9`%8K;SS)dRajdZF5GuhT$|ZBm z4291L<>rJLO0I**N%EFSf=SAVgUlRRqZFaV5!lI?9N?5B4vzSgaa66uOcX z5g{_NT)GP8l1kW;vKo?#KuLKwNo8bn1YNnb#65F>uB=ufp_+V5F(_^}8E#r8GlVNh zCn6P?j5{r7UmRPHC2&NZQylGv1%E`*w)?I&^DBkj(3Uxg3ntzXmE$&r(Kgj{uQD{X z44h0vh-`@D5P4)GuWSOfY$64R!Zfu+9QpgC+g*BBAYT}qw4GS*<^E_LmvYy~BX z(xjQeBXg=H$#HV4Bv9Sp(k}5eo+~R~e6})oGSXn^E~P8`k~k{`IH@CPWN>d6u{PB} z_8X;kvfW_AF2AeLlC&c^Pg2(4*e*! zQY6`UQ23f@Nm+W3K9pVoBdX9ksz4{I$cr(0_%p3)ayi4ShAkmi&TxUHO7pA&WBhO_ zVwvu&y)CMy3fUm8U1FsggP?Zi}HQB}>nssmSu#JSAIAMNUqt zoB^ez#4RP4Lq)!A-L zjGjxTuZ%Y}KS?8surRC~m|75*A6GoD{$BOH`T(Sh-hk?WI-Lrg8l5UpS#r60nR@w5 z*-UxtEbTG!vBEL)vHdaeG23kOtoQ8iS<7SSWAS6kV^doscLaBaTkT_xW7lIuTPAlJ zcN}*vcMys@i#v(CP(y6XrtkWbRGl91F{e(vYog4~kaqTEuW{1@r7Vk5{mc8xw86&RHm6&aPT z=dTxgs5+@Ssb8pEs9mUDsFSIXsgbGvD332!E(0l-Pn1oRvy}-nEN0KrRGX2 zmwr4XaAOrp9xwWN1nq+>l$9%iH!bDHZYVxcB7el;Lu#lrQHnQ-<3`h#U?izMg{dc} zJp+oiAz&%RsZY|HvekpYtt6GDC^wis3~ez+N>%yV(_an=Thh3v-Q9%NM0uta>m#{m zaF^wHW?0;q9Ew&BJ(iU`Cv)8J))WN}Nn60eF-D421q;*RGbh$`XN8Z}JZA-M_3`JW z&u)RJ(=cuks52P+a%j`+{7Q6()H#LWhaeMvMXN)qL#m_O8`Ybs8}^%;8<(4_8-$z4 z8`&GU2lEH)2aN~dgPRXkTcmKXaI$c;aF%fR3z9#pKdHZPTSj|edqR6ed-}S#vAD6^ zYifH;dv<%!x}34Bv6Qi*v4pX_@q1%sry{3(r(&l11(u@3y;)}uy@Ph_` z1oVAIc}n20^vLSa>geF`;0W)qV@lu%?+APdKGHtaK3X_jIN~|vIWoM_zgf8vxcPD8 zd-He$e-nQ5{>Jga>;daR{eko0;{%cp+B3~F&a>x(-2>5s{)51S?*qIK+cVWO)-&fb zQd|5>>bi3LWD&CII|=-#0)j9Tg1D^=W)N*wMz4b9=XdHEHluE|;Tc`K9Gn?~w0R64 zEZOTa_!KDAd$BobMh!lvFqrML?~VFdLaBFYa3Ez!Dd13<6f1Dmr6;nxN_Wu{;1pQ` zW74MBefm3@P0~0<()UIYEotha%1vwusMH5$IZ+g-W_r`{In7MIDsawp*K=@xjs@9R zohrc347std#RMOj_+<2?&y5mVvV7#Ni*w;%{T%PYN!mYhpB~UZ0h>e!IcLy8?><1vutPG@dq>SkF=nR%on$eO`3!OL%YEE^t$pr&kA2jArG3_Y$9>X$qkZB1fPJWav3-ht zlYNYR)qVDTmwkkNSxbSR*mc3)nciXEY2I<(x!#|C6V)YqCwfPEXLyHtr+R~8y>q;S zy_1*vTKihZT64gss{?{Ca210cq7`)o$5t(r(di z)@~K>;TMMphX}g}XUDt8bmNTY(YKt#l(=;vB|4cFI%#`4xnrhvOR}ft9*6>|IsHmj z9~TMi{|{m36kZ7rw(GA`O{ezMwr$(C?XDVAth!U%wr$(CRy}pQI_P>wz(LPFY zuu;OEMQ?D{LT-cAgHUgF)(X2G)1IO$-e6d> z0ZnsKvnfgkht3MKI#Hv+R&&T!hoH&|@e<90rYqWDw$@myv1%Q+HI<{$-Ckg;pR-Y+ zI)bAKYju*diNT)E&TMPlV|CV}KF1z=Yle57ur*B}+F+*EWUVp0=|Y?8!t_lW$aP`T zQyq6@^#m2ojl7leBFd|#&Vr(oowUbQsr{tlI61D()se{ z67DkMQsxro(fkqPQT>tq(anRbD?%VxAW0xfAWI}B|+)TQI2*&{meQSFiK(e)AD z1Nn{O4fDD<)8qfeqWjOj`N(yvXXJ3Mwi0{J>Z zXbot%z;{UKV^ZxG>vI6x(vzHB6bEQ{Fw2hFvs0Ixf6ewY?5Da+wI6|gI8wAlR_)vJ zkZFyrxFYJ4tqo^zvsvv|>$9y5wzzQC#@L-)>cg&0xI1rW2Vd;}>KJNY0pU6_cyhPJ zy1Fpd#<{u@&Q86w`_Ilnw-a;qhqg0g?-zoIu@4XQgXCOj9Df7xju-XA_=5Nn_2vjh z2xbU|2&M?e2T-*DcPFzl$Ph7J) z;M?IlqS~W67(f)D5|B~HRQps1U3(2E1*8F50das@K<=Oi5GNQ0ED5FrTY|B`nqW?_ z2N(&g2xbHW9l?ZPL$Dy&9}EE&0h54Dz^GsqFbmij3Mt3<__;(zWHMbW#tWF-%9wh7!A%H$-S`p z6B^CVonZgP^rh&FHyYNuK{K4xx{LD0VLHL=PSm=wH5{_@Ca60>d`I)A>5Dd+t=rSO ztpYyd_NH>(y88<3_H*4TbVqRAVeL$E-7)ym0S>pGJ$7b2|K|8&@6PZ&6ZWPFMH|i3 z?XTU2-(7f7T_3!8ab55CbjMwveE0?+-$MHSM!rK8oJ7855uBxZCeJ$ve>M@Ev3e$Z zCVP2!S9xD}XL)aUcYeQlhkK8BmwAWzGzWfSe5!x4f4cdT^+gB;3nd9f31ta|f58J8 zfrLQ8zKp(rzWBc1ed)V{MuSF^Unzajec63MyOTx}Mk7WuMngtZMq@^EfJ49@;1A#c za17W79Q_-eT<=}~xjwi)zTUq+x<0%Xrj@`C-`cfa_M-Ky^|JQ7_QLhd^AnM-xv3lvO5>| zw@8N?-YS8fHfq+iah;wqYHo58I60e{%)w(PySiL$qqEfHLJ49y_)6lFe8=67Ma_J8 z``0@cy8ij$J$5?BCkfnPdA*BW$n%W;>^8eUAt?I)O!a-lE(rN4ygTmRe?k7FPp9}l z5RiX}dkYhX|BC}jv)%uP)z344ruav`8Zm%oWxznufno>a9wwG2{A6F~6pyJQtBOcO6ZWWVf-x{{)2@(qFc`n3A$4(SFC+Z>|2@tJbpebX~K^ZA57CRU>a%@T=0 z&Bv(R!?w&1Cl^u6dw-krPyuqdn=EO8avi7y2XQPG>rCwXCqGdAZ*uC4_n7-K52LBZ z2WXPmzCZGYJto$yKQi$49!lkpmo<2od`&##-NhE9c(y{k^zEH{lD%zY?*4AQWfJ!n z+?eYN7%g-q6N-A2GVEAb``Pi|2c6j=uW<~Qu>cMLLx7n_(h0%uE^i*@j&<(`FmX$T zbJ?RDJ_6s82wB1~(32`5*mCHN#9C|6mEh9KWNaV>)=afPk?dhy;+!l`Z|xujI=} z1amWw8@Wds|8gz{)sRh!0-IJ%*-r7{Kc+1%wNGfi^Qb1>*RD)cgplR2qGtbAgtU_W zP6yj~$(fa75ws;rtghi*^UV;327xbI4ZQ2Nl6{nq3FnMM+OcV%d}-%DI##G?^T$m4 z2qTdgqf-K947AUmMu#tmX?g9`(5MfWT0_w`sEsJ!V{z|5%0Ds*39L1%1=XWdCg}|^ z*3$6r!*ykwv?dr_VJB4magI;LxlCU|FwLmfnHCL{^>*z^@AqoEs92-SiozruI@oCX zR#A!&MRE6B+r#ALl5wIn8v!O7<+zwNmb*)?aEH_^G1Mt#DSOLq^5kb()(1V$vK&8| za7|@om4r$V8uCfO1C50!1OJQBMeN79ebc506QwK&D3OI!FcwW8rpzs%Vhp6MOpx~_ zUBKXZh(L2DZTeNkMMI}Fh#*57j-6CXHqFW()pxp?%UdH>qEv8(F=_Tw;0BbeoXC=s zZ`0_2T$jqjal=v-QIk;hJ|rBRQ_^p$M5QC+)cE%;h@>yT3do`t=oz|lz2v0uJ_KLQ z-`nErbY3wt#&<5n-E^gz1_K9ozd%*$zjQt!9A2dQ*-n*lF9}(3ynm#qO8%v%jQueFeV{%xmW^WU zX-8eYmmDgf3VbKy-Yt|mj{Zz}`STEwc5snMki7HiEvv~NAE`c(OH>|?1scM=kKO~*3?De!+$!8^ zJ7~~I<@D7`Q?`x)-qlwO$H3LA*F%A_S)umjs#RT8Vcn+6>YT0&A>Z-$7y3KwP>8ma zuA1Z>$1zK*TY>3AlQ$Fk&A%A4ef%Xf!HZsd^g&jnQzHHq-xmhA%q`8}nj6R)HA^%P z0p#d?DZm-|$8!19zT)$59IB$UH0;IRXG$@fC<;xYG7q)He%m^{bP`M4B03%5*Sc^- zU3h3yV?UxVh4qU644sxGgR{vmvo|Ce?y*v~L4>ZvCQV8|qJIuOFnG4eQO-unx(+)F z9yckJUJeXL#2wg)1oCd}CVr&I(&VBjv9uFBh611Gt}1?PQjRJ`mAXHpD<=IJ=F+pX z)=jsNRZv+yNSaRe-#kWX#HUuxzL5C>;fF_A2y1jFya*bqgBL3>ECm(ht}JgfQ`6uR zs)ES(Mka32DAw%b7U7}wz<<6&_J{ta_5HJ*Ty3-H1t-YBz~apziS9Z(`3IPgy+Lb< zYXN+iP%PPrJiMB5rFcKLOOrORiuW*yMN9HLYmxGvs>`!|+PYLxeaLP3GtteS7)uJ$ z_|!ofn86=}IOr!~-dcCXZnm8c%7RU70^UaX81>l&o%y}>Ut_(842*W2^ga7(Ts(vS zk$)I`+_L+u&)%wbEalhQdfHCu2|p}dO@M1FR<-&+yFje<&_e#lyj6!|)?=K^b|LTh0>7pFq7VurOm3Teo9atFt&Kt}!e35>m%zw+ z41{qo*8*>>So@IJ9z86B?@2l>c_mJ>&*0RuV@9AG=x>-fZ%HYI^BPQ(kvzpA8jxU> zpfh@R*z4tO>R)sgc#@`&2p+C3%x1&nUK7nK)kwHTrhVB;0k6Gl&yV2VZD#vCK9pm#}G z`_2k&y{7z`Ufj;cdPW?m?T$h6&SK2cG=w6@bYZGJ0B;dBGnll+1+w~UcquCbyt8hd zVF1M>8EUus13 zSCfn{%a;%)e>doJGY-BSnVVo@d>eEEzR*xl-L)7(G_3}jS?uhkbMbiar5dECuDUz za^nS}5~u64GX|njpO|dr6`bL%Gf!yh!99-MS6tz>I?}#3S-CZmq%Dz*lVF7 zUzcleF!xYmxr|yz&RvyNU%i8($kr0)=eqdJ`{WHrc!sMI@*O*!dZ$X zXm>4^rOj|W(ECA*Gj4L0PQ6|p5S7&4_KmdjO(C5)$!E7))b4W6*?yCA9w6t8n+<2k zQiAOES#3U=_`=b3(%WNsR^ZbYPB1r1!}vFypdy(?fktH_S#RxXd#k6~5~2##=MPz0 zs0}nb29>tmMe_HO|5h`FT`XpGj{zfRJ!vyG|A?l3X%{xoEK+j&o2j=7c*a}@g6ovd#`QVW zOD%|7%rrAwXljVsv}jvscA)p4932VGnxRvUs+DAU>I3-TD6Dot3V-El8e2m{5J*4C za-!ZAy~mlgU>)@*5-@0doK7BR4l(hr&{uut1{EFCN)s1L)XZx_75btyYX(rcxn!F; z74A8kgL_Kr%v^eg=p7?Pi!kYuXtfT}<7hi10JF?qO^p_cm6$!iWnqmG{**_j3oP_xo6d$0g^K_&p zt|or7N_`|OBGECJ)H;z=YtXlJGd*Y5DP(1-5n1A6_>Zs%^~;`FB8N0kJjA$nE*TZt z7xOaDdF)f$iA-!s95Hhy!*@Ur3adY@a06)ePgOwuaqvkaks;ZmTabcUtj_Dleib?4 z5Scb+_fk2*o}}aA_i+L^=0z~14O~<~Bgq@P*pMqUu}O@9GKcP=5djbd?Yvj-h%V$m zrt%B_biGPyH*ZT29WSMFo%&B@qMPehzg5)sMw|S3?4T0j>zi58=Fej468$91RsCls1 ztcpxr`%^7i87bppCX*0EIVB`g$EASpBHUSs*(V(@GGgW5w@oKA0UIu-n(7T8Wz0Qv zn*z#E^RcYvS#P^Jx>V^)i`b`3h+vpg_C$-PGD{T%kB)5eJD?xR`ldT08(6Hn;aDo;F&ga#}Q$`ZceIU&5iRF_7 zzr~tFc}v9b{wM{-Volb?TF{({BdF<|p%5J}z3*fO{qQ>$SW&X4&$Cl4wmJP5U0u}J z&Q5ok?DHEhPkI>N{>8jvOdqsUIV4qrgm?&Upg~>hb4S0m+<-C*OtHNhT zFF!k|vFg(R8ZRO@On4Fy!B=sK>i7uveu>K#ieJdvGPlf*&)%5h0o(QXxpxNj=6nS$ z4$i!3TtX!_inj3^LqP*1rj-Kj8?zHXA1wBGm>vZfu-p_J|BKr7S0$N2409BkhZd(6 z?|m;8FeLTlEMDaZdG}EAHC<_V;#-0jP>}4(+`d4Y>-N>Ta;PZ z)OX+o>S!qIuyxlxzG^Pfi1jCAR?|7-_?7tgwY}cr%-{`2Li#U|XKD@XfY&__vn)}f z+P}}ETeii(EH!7U+A(jazM2Di)3H^{^m2IMDir**#N1Gqj!<>c>2>=A5%F#@z7g)g zR>T|}RD>F~=$jX4zQL|0M}xt0L+xhpK0|-{f{D%-_pL2xGNcX$wGqOpi=;jWr?c zkY=$8#zr2(L&FtBKHl5wcr?&{2a|dZFiPt5_Wcs;VNqdBQ1MS%(o)fW!d-fOaQ@E{ z=u`0fI_5BHCM5@YcoLH0Tjob36ctDjscH9L73W9Ry7Z<}=C}3XfxaWCh96$VK21Hp zI&Dm8oHILE!)4*l9RAdMkdm=fg}mr1b-rBhEt;0)`VVpK znAYzKO$4!(w&x1136-)8*_l>J1Vat|kDdI_cKE*pUKSo^uK$<7>+9*OKXUWTiK(ZZ z$}pk8NUPc4-dZc`*=qjMbJ3#}e8XYTm*hsZofGh1f!KEj6sr4FQR_fsUMNxGdu35+ zc|;E3?*c3muU9_28-wwH-mAFX>$|(2t&I)8rY5$grmj29YbfIX{)vpB*tLN!gZMvq#>kQ> zGCM+TQRiFw|J)UBteG3HL;ZS^cWq^NiOMin0RP^8HzcMm4l_h;o@;1f8Gyn|5f!Ii zSim@a0^SZnHO6rbRB2J z;RkMf4EtaMJcv@9%_zAHH-F43K{7j$_ArdYuq_-C^5;x$Ibr^%ktOB&GB{l#EN_!{ zw3#+qM3&9*-`^eSJDC2Buu>|)mjPEPM#6WDR%1oRdZ`Yjv7sO~^;2jQ@R)LhY zGla15quAD7xaF?_-0Ar@qC?u;rFW(HI=F9;{jLm1ufkpg7y-1A0+LMst%2u8_FQcd zh1*lv`Py4#01Ymi0WK0@;r&F`9i@Yh=!&~PdN1*MKHCJ7m7I{IzM&M`2q^r!4eJf~ zSSQXezg&A2Ok~VG9j-y_0B5a_>^~7VgU=WKh*%6vr+sM{C?i@+Z1oe*Y}xFL9H9jN zlWMdD7rby=QeVl|r<|_}Y9Y1-i9FM4!}?g(AmxFsaa_Gdn)*%L-e+Mz)-AVW8$UQ9 zd5i;oSO>tZ2f#-MK#d1}ONTnj$oLU@YKI{rn*ejzguzxf)BR9a2L#+SeNB|b_UR7n z@4@{2Ssl!`NA7!uw1QM3+%{M_5Tfq#e=X;YH=8X+^U!F#Bpz4D>_Q7b&w`CjLOV9ud>?< zUy{;*57(gM3zM+hY}MtW1KnbABc!<@KD5&Q(yHAk(zc@su4MIOu>I8Lm) zF#$fkUcGPQ^qAb_LUKs>ZNByISa6-05r^_W^6aj=evM zVq=d#tlun~?b4fB_+CQ=Ter$J5!{htSdX62Jg49hb_*wcSUTLIv?Xe6=k;dQH{mx6pP_ujoo0ZIgp*Fdm zAp(bB#|>jQippJUTwKwlV{G=`-L+AMe~Y(O zE;Iv}v*pHBgId+O+EmD)K0NDK;)Q5M00*6?da>;KxuF?nN4|V#j^-64SPHJz)L`2g z+M1uy%kBbtMF7tiOOOt0ajbe@zzr(%j==Lz;e3SZX|)Sy|ywFuJ4F zR{?l`(R1jxb^;G8Q}Eu_njfF&jXDaxR%d}EYMyO2^{yBH*$Ep|E0I4o8o;=J?KR~@ zJ3Zi%H1!4-*nY?HLaGK}dRA|Dw-y8jc{pDjpmR3#%*^y$mNKkToYu$dTBg+MQnXCE zNi7$_yfs%;_|$jV$vvrl$Lqp05D4Cm`Oo=6h&K#e{yVFT;k(#`@Rkc)H@JGKF3TUN zaS~`-=l|-^{>`#%E|kk}~T962cD?0i)2(I*K^q-E&$kiwN` zHhZ?Y<<7(Q>ra)x%a8)sws0@5L@wuc`wVBTUD3azqOLd(!`=&hfuT&2BDBELTK{2( z=6nluAp_>*N^)al(VtVdzZ+*NvF7kV-Y(Llyfho0Il1oYj*3o5)zUWtu+O!%pWxN~ zQl+10l<$>phs5_Kr=D5k`GK5fD(9$aa*RxKhW}iAaL`m7-INC2&nXfTfR&F0Qmx9+ zvpb@1b|p=AxEk;I&O7Rr6=SZBg2ej?pAb6kQeE~kBRrMIU9ZBxFX1m{R>WE$mOJMf zM~y>bNX=ks@BV`UWDkLyvc^y*W>o4jL-~dMwN?v&6`@)54i~k{e`9oYQ~}p>uJF;8 zo3&cI-c-!)u5x6h{FlmwGO(@CRpz4-oTA`Bhebk8X5dtjtkYL+L#F59hUloTxx3g= zXgAc@iQ8NsYs)I__X~Q(9U2ArH#JEnm4T00rb3iJZjV2LM*y}h-(r8iW{06l3vlCT zz<}++DDGr+7br>Ei7LtTP*Ff#al_nR6WK!R400TW_KZ~Rj3W2!E|mbIT@JrV%eiZ|L>Bz-~- zRaF>@VmMdW;^G{vMJh4h=7qbsR$3qNJB|-YGED<0ueC@hImj2$t_!;Z3OcHbOy`ao zBU`fgyqLnhpcr&}DjhcpN9fMG#H+AheTV+aY?K~9Fd+VXAZrFzOYN*uWnnGMN7@cb zSV^hnre{&PrKPPX6N(KD4V@-f-w)LFiihagAS>Uv`dX*o!Rv+vIAXBp0-yw%Jdfpt z!Wj=)9P{>yi@QSYoxE&LR&!hMv{n(BY_E3n&xt-m$?Lm!XmDEVhJX7$P}pzhr&imX zpXzb=LF@M!(ii~CK!Sc8Anzz`(HPo$ms|UiJ&MhX$_+!St2s}NjbwAZ@cmmvy{=Aj zJ7hdV+W9AB{9cS35SKfP8y=|KI;$5plM|1;3e)d%1GU7*fNUG{^bVchnhh`g#2Azu zG<6JeXUw}hkwr4fyxZu|=R8^LY3eXu^>CuAJb9|q2?j0@4LkKMb}KMCQB?(1E#4ciJI7ciC#>lk?G9#YVqM)`4ojWK zn`vm7teC`6Bv9aj*yLOK)I4Uk1xm)w58%|o46vS`pbfyj>U5!?<$2d=G1UPb*gAL` zVbSuSz%b)JS=}0vG=Mo$*8ilGvx5=Xa$K6<|YN(l)=C*1!N~ZotD>`$Z4tW(c??XIjk` z0PDkBw|}_tJMI-kgmZRU0-y;ApVW}_L;}V2`g+9O3Fx5WBB4dF#A!JqCk@b#GLQ1s zCj2SNz~r>lCXw_sVxzMj;jK2+N&KUMjp(9-OV#W#o7OZH!>Mm|kW@-^fCi{Ksm#gA z?UjK&oMgG& zH145`{JQ5W?xVcJE0uDu_rQ+*=S`54ubE@|<8EG#gT7xN(Gzh=+x+72XNQ1XTS^)r zpcPXU$@G`j@LGh8j{J>+@2toMF!N;Ky?4X0cBRz2M)*h`bz0wTulm}K$SWZ1Kwm~0 zReVhfY%lu@U#lgg>}RnT5K}n*ApXDaCKPorP^-$epZqpwZ*se3b{uPm&P`T5En^&K zg=Z7iB&bNOx`pTqnrft2OaU2R0KtGLiNLGp56ivk`m;V;Xx^W3zVNVqB9PkP3`YTI{EE?NFsM7p617_doPUJ_9OuwfFsten{1>-DWvCV;E9xp%$F-Z=;R3z&Hv28^m7#eI+@&6#+dI6N zf=yWTAe}CsgepZ1V4J52y22WxeWhP;QMr(b$Zk+L*8PH@z88}N^1M|`Dzslygqte3 zYr(2Hc9qFx1zhVFy!fQSP~8y$(&*})~3dz``GTlJNH1 z8gc&90nRNO0Z%?iK1i3qOrN{1A4YwRs|o1?zpYRy+7Xoku^m+IG!1-J01oJI33FAx z;W${%9$PaP1KU8afua>94WSibhe^2>PZ^`0LM--x$|r6;7tSj0D$3qH5;ukx6RNXn zn+ltZ7RwbMIYQm_=DnYc4BRkzJU$CMPlO(rW!RntfSz{eKbDLCq$b=r+;sFV&W8Om zPrLU)y}{kUy~q#4`lP#Wj@4dFr*BV&%IIHdhQfaNuN43<5u!D%oO9qMbZVV<;K8of z0H!{JRTSVUC~W(I$~Ox8-Y*NAy<0>eKTd9$7uFkMCC2b85PP~77WKcHG$h|6L%cW^vT|o=ZoA#h%=ggDS4>l_x`Wj(1f4n7OiMMrX>fylA+mcbA zVQ$gkTY4TH(*4|>S0(95CuGF6-F7vFhm1VBr+l{9t8yje%!{6QQISH;2y9?>3!~_fBlH4DrAF z_d|XRdc#*So?!`~84nK+biol}G-`NQ4>9H^%YQ79B$|u)K5Ek;-M9QXz#<~ZPCFic z#zi+Sb;iXwKK9%o1Ap7R%m1<1Q0R6TuJbez7L*@ z0C260f3+bv3nCbW3?L_|uH;L;Fsw%MHL#|d@=FZ%k5hvJ{0CfP7Z-@Dc|0*;W z&z8}?>p7VX!yzI<{$O)p0&$H0QLd!(*Ktl%pV!y+d2TC^Zg=M+D{AH_^~jwm)#&SC z(5RN@YjR5H+b+$Q@1IbC-UhGZKY#gvFWqf-zta@KDEw_cXA4i92E4u%&L<^d`(V#q zzJ{}6>{!q*`72~YC!oh#;&4=5?avRJ2bJ9qxfPX~Hbe9CrL`RYwmwg-w|Nx>pUq!O zc#M8WQ+%*Ma9YcQqtV@gRNeNeo^yttgF?P50vEr){X*^3(CeBaA2<;^jL4s_{(~Qw zB-d*(6rpsvZO-57Bl8Kqn}E$)8hB1>yWI8`_=|5Nq~oP?xro?y*}XTS(Dmgp0#_$c zB5?Rf-!7NLQA5P_vT*o zb9SyMfV&K?v#|%FOnCthC8D&uv6-U~bt1=9Y8GJ0zFC+F_d_l(pwkXBVy3Z?R&z7mLgD0FdP9p(P#QaC*+wm(Pe=eDERK)Oc%SZJNmnLG8}6)K5Sj zog@LhMreu01Sd`+r!f$&fllRZ*TJB&KFda(ES0;x;?jl#i@(VXIBCpuVj?qGX+vj~IV}*2qY6<0pewluaStVae8m)Z z+)-v6b-9#L^$CFDc%n!H>5+nVwj(fHAIpX3{*iR3JcXwtf@Y$qQ+l3$B1VesHC?3q zyLirEgA*^dleM`?TzCZ3h>hIx1s0wQPmht+ct)aXOrP~aNK;ruJ)^w}4?U`@>GJix z&L~H;1a4y)$Edu>*($XQiDb>lX$zE86T zc?84EaMyYxmC?($SAUv>Gepd`V!rx^7yd@_Z~@)vgBod(P*Um(s01#rn4HBYvM9`D z^B7Yik;FL{$GIH#o5a~~8dT2;(*k@wd5M&U-wRr-W!h3KxUtwrDpu3^N}-sYg+Bb` zny1H|Li0Z)Cb5XT#j;h(My3Aib*|-a>Xw=Z5m_^_ zUMz#Gq8T!FnzpJ5-H9^LGc*h36IoB?_-AhA)+iE571g0BC!)2*u8UxzaNAFD9Yd-z!P zMBTI}$YQ!PUd#H_7AtUl8 z+6db-V`;jbg(6B@{vQF4Q;*r3tteKRl*y$M;g{_`Dqz9vUb>u5<`OAIi+xXZO3g4w z1ZSl_3CoE7;ID16qY80_`SuUB1=Ofy{>*wg3<4P(ddZmSOzMj<`3=>$X)^Dnmu^o zO9Jf8$sHE#47}r3s`BmRgbAHX9AFGsCaEplqA{yUPE4vFO=THc@o;hk{@m3=YDQ4t z(qiLFZkOlH5_kjIX|kPC8486tweHh4%GGk(+9yeCbjljMx*r9jI4xJHK#-l{71NgZ zwmIz>jN}3zVQ!3sSeOUVtSLz6(`ui|R*P!%iZJAaC0uw3zqVO94;rFr*mxw%VxV*) z=+E6{XDl^#YB6mvAdG|^OFF%`i(Kl8Fj~I9ahNFe7r6pqg2*J~X0;h#`FU+dQL!Jx zrUt##vvE22Tp|O+0dST}E^*9+_rDiCk{C}^F9ws4S_TPjd0k-2sL#=`l4C2HIjCv9 z-(A8o#S_|i9uECEv%ilbUEaBTLXpoIyV`REH(CpR8Hf&R5#y_E8mlC&>^_5FAG_6V{_d;tXq< z@O~=M6X7RdvieQV@os%8kN^KQO8?*|{RXhJi%0nLLZ25GzTfqNj-f*-{?KWz3i)kQOPy zhL||e64CIw#TE|{py^P~*!Y3eX`}H*BJJZBy0T?Dh7aQDs&U7XDb2>$mvJrhYh*SK z3H5;Nb{Wm3pd=V2TAMZIgM22mmNNbilAID0nzAVmg=`kPm1+xFGcKEoPm@&VI7OJqn;ZMwhQ(~PB(|S&_B@yK!yEDw#f(){>ajpyqHQ*VS#1@mwNuipg<<^H-(p9~a%Y*m zg)!d0f@SVeQO;(h`v20OoldMm=TpPYzZ%$4CdVgZ8!cq7qOF1{3UXBJ zsyh?6YMU^7OSVG6PZD^&E4&?~PKPO9rw$v=Vo zlns9RR&7$rir%ZHOkp?<+{3gptX91ItOt*2)iP(lW$`B1MuId?@Rb`W&a8>qGVjK= zX^R#U5vd?wok**Z_?8Yuu20()NM+YA9WAKMW^r=tH!+iZ5Oe{9GoGRq1Wi>73DzuYlrVOe(%kNpSk{D;OaN4v8nx4)g1A~~niT_?7RYy?t3M0$8@(E4x9`ezW?C zG|4;f)J15Smu|T*YW^{^I7t`24cg;$B@G!yMbj5LotAM~=x*``E?KhFjT!0PAh7aN zW34LaI(FjzEPKY*%3jw~-xf%pDX|(&Kg0SXD-HF0W>a)xv{=+oi}-@6Dk(5zzEZX9 zhlGZLm^{gzsi~!%3NG`;Q0;F;mVx9!5(CKuuF_kzR{BGZVg9rDHG;)13uw9v zrOcKF;dMh4J{*9*+)@)3`9}@roE0Tmu(&>lnBm`3?Oa)Mh|vF;Ppaq`>?b z%#>ydImXQKi{!^;YPsN_IZ8>Mqqmw%>i*(f6!`T~w?(b#Jl9Ic@|2XAo}GUV4Lb}i zQkW@mgQGJ_(^(u+39Kezniy)+v~7G3;uX1m(*uksuGmTuMEK|!&Qpre_qq8j z&>|078%2gNN^lKG1&x6oa4D&c-o-zoq9xRigDU9>v?3vo@~zcp=Dq9WLVQwm{7B7_ zqpoV*B580{k|9`N}`5rs$9EE7k9;F>bJ5*6kDXRZn1MY0vuutP;8U=;(_|MntH6V z*|Nz+U0{D+mlY}8iBx#a+HR14gnQYNjLu3!4F(t?RPen~{-1x94*!*=C#r=FnsQ27 zj4+kabRUjjXx-gZf~8s=PrJczmiO*QRJw{%$+Aby8vaceB4^T~{F=Rwj1eQxy@b!A zI;U$c|FJ6e*z(a}`%12=PLEq`lPxx$TJpc-4MinJjE&swZxs#t5OHrRbmU>3t7O*V zC@EmDZim577|D;}=1B%WGGumrxbbH05c8I}89# zi7gak&K>RxCY2Y%XJnfsXiOhuTYuKe-Qx6daab%zL>ktpo1}cV$ZhBrr6`R<-26>N z9%+p)pAgabD`U|S8Fu?uFxwen=Es}mQE?$|fF}5(HI!XrrkCkE3NZJFmC`?a&`8v%a z8$Ug@${N6tk42w8q_Kul$j70FoSMimrqGt~u7k1b*R4lt4&t0g z;_d%657#NobqaykFF6knWsJU#@Xv7aJK(2u0LHkm(9gZynyqiGK_bjht$`-MdIIJE zXxaR4w)qgc`H;Vi|C2QSW@!weX$hkp_xHZnqvOMQ+yKf$ zU)oY6pFPddh~ZR;5n12$bc>9mfl*ceQTCHl2cc01(>P&p!N7093gh>mK z8?e}Oae{6~(}%8w5rPnc7J?LlT?;S@up4mS`{e}Lj{1%Uf^vlE`_>j9u&3#S*G}-S zhzR8w*fk)qXZY97iKw0E9ftoue|SPDTmM3g9NV8TLUT6hSeRKq-{LemDT-Pyx5Vt#A+y!EJCDZiftHp%SW~8fu^x>YyGP zpb?s&8Cswf+MpdepcA^F8+xD@`k)^M;0|~=JOb{7N5Wk&2tzOoBQOd_U<}58VFD&$ z3Z`KOW?>GF!ZA1w^RNIX;3O=<5-h_hcoaMu9s|DvkA=s<hBF8th!Dbvpej_2YEUg&fEJ=UbQoHM>d|7=8s1Y@x zX0!|~M=Q`uv*1-cep zhptCApc~OI(M{-P^ec1=+JkOIx1rn79q3MU7rGnWgYHGYM)#rn(QnWL=(p%W^bq?<>yaKPpt8fcmjo09{ zcpYxVZMYqG;Pto@cj0c_gE!!fxEBlXCcGKLd>_6a{{}yRe~TZ)58>b8hw&r$QT!Nw96y1d#J|T+ z;ivI4_*wi1{2cxxejfh`{~7PaFW?vPOZa8{3Vs#;1;2)0$8X>_@mu(9{8#)Aeiy%o z|Ayax$A8Bk;1BUf_+$JD{uF_Ku#nlk(0?OMelOK^ElMBg3B9N@^f+x`31R_Tt}`aH;^02 zFUd{hX7Vd?3)w?%CAX2=$sOcQau>Oq+(Ygqzb5yQ`^j&}1LU{lLGlp!9eJ2MLLMcL zk;lmsab zMcyWVCGU`T$$R8)99?miTEABwvV#odSE?n80+p+LC%P~3ee?*4xx z?%p2c zo?4ecH47A%5!VV70x>d|pj`zae>gcfG73IE3UhRFWnpa!c%0392Y3@l+VIYAa%p$D#MbJ6XC;Fn$#>85Ki~8GzlYJz&dkpH&fDMFg+K&Be}oc!1Vv;Q z=H(Y~W^oP@gkv)FHdj<8Il9)@ZX^g)1oxAhrZ;vKOIn^J2+jq#KhxIO(?vuO!7zU` z+=sVy_O_n1t$Ldvc!LBHzp1^YvH6?tjCnA24XV2xIz(Ud(qTOg+#A}b&z`^HZNp|1J75zTTX6;TvBQQxGKKUWD6Vc0T@t zD__Ut6E#7EugPZ%lH45``n$sRTp{WSE@30c2gVT;igKY%FdD?&f5~-17wx}Ndx=(3 zg198fqqrO{NpboJV)^igtI9`eiEKivHFDQ61<1+Qq4U`U^7(x5^fqo8yE6_i6-5m_ zLE?IhxJ~5|D}j(S)K1He+ksDp_-vq zLe)Z@0<{)u8Pr8kf5*VwQ&2bHH8{%w4peGA(SYx{uM^?iY9bbD3AFW`zYz(X9zqB0 zB76_9i|0qf+}C}d;BlB2%g(EY=X!`f7|()x0aOWpJrNmLUrPA*c_eTx zr7q#U`u>UU0gqy655TnmaLt2j8O*7Gb|zFI;GIFH`kJ6Ee};A@&_)dHP^kIv+>iJf zm@fnzn_*lA_avSRaD{M<#B*WI_~D%5R1q@b|HD7>I??t28i(P3pmm@AQa{lBE6}hn zfvzKQ#eKkoOaC`80?%VrprFd&7qM3ku@|letTRv&h;(8EQA5-bSNRhDIR3#PZjdWzTF~`iBDgsCf8pTs!Gj?=A)7*O38aE*!3M!U z1pf$iKq-Jy7dkU^F_ia(BH>73zwn&!2hn`dRgo_&D{NcXxo}r_U-&68S6n1sDZVcL zQR0+Llbn$TNvowx1LcYT#d7g~seE^@$dD{dHbzztWySwcu7dR3_T5IWaXY{RMu5%M zQ$|;^f7|I$$Yp#A|MK*)xCgZxYk4D+l5#jRtT{PWsHlxfaH&U>m6b*$4%%`oxZ9G0 z<#8pYCojN?LP8BIQl?=m>V_{*X1f(+iZwA+n#xS^C|nkEn%Yh2k)cr5)HqnV>){Vh_3 zYCxkpea+Nwz*o|sA1;EBIu#0~*vM|l<8sOsO1RY*J^FsE-{Zr7C=EmBmO5;*;Djr9pR&NoJ zMQc0CrZ0S6+R(ri@w1rs_iPn0oKsIMTJvsrUmr&wN2$>}p>3mzmkw?cM(DeY&veo zPZPKYZf-ttvF6FCQ%~2V%zt@pu_s27Tz=+axzjtN{>iq}=j)&7n0lhxdXu#7PIXYNVd1^-8y2AX)GMYX(eeBWZMXz@sx_sj6q}U}>7SG$U`T3=5 zPQ@tVnPrO>*5%gJ#a>_0nmNCBf6bf`bEv5_e*|-RO?zc&apd}k>!!^ZcWOcJHxIS1 z%sgISu)b~D-bsJG@a`t7A(AUd-_cMsu{X`$>s6gOyfFW8W5<36SjI6R*=knL2?AS~sIL?;$tx%}B`mg4uE9#>q8L?1Yu9uu`>uC8-PeFOi26e3#oT z0g_os>!U3jA7kG7Vd0)~ovL&Zw>hD-bqh1^edc!znnCB^K^<4#KeguYP8{WJCP!0U zu+mt#G2YW`9v{nkpoy6=e}!5l}A5?2W&rkk4;Fn#P0_Ws?@XzLE<^98g-W=|`1=eBKR zj+96LecDrZCSZ#*X__RD% zQ8KyvGgL7)XU`SpGaq z8P+-uYsFySp#z)iS7*LHAYULpBwyzMNurKV8oY@bk#_ft<~>GQ_13OKAK6L{|CoiQ zbk-E85tkXDe^D-a^you}=X9TY?Y&KHZTp{PzD^0ZTQEm}R@DH<0XVx^I>Fis7Jx3m zUyx#!Vyt0-edzI)FgE=5w9_jN)a1UfxUXvqb7yr^l1`--&sWk3t&ix{n#3KWv=w`b z9-6$Xol~-U`?QMsZM*Gf7oK|PNS-O$9K;>L6YTD+e<)3fj?0M&nYf~&ZRugmJuU3~ z9AFdzo)a`)FV>57F64v@qRyQ?BiniYE?q7&dKQd#!EJwY7~Gx1g~9ba<~x3JaXYHLF)GHr8<4X<^0w*H<^) zJauf{v4&?*^jTEI)Oz#!D<|K|O*;7K@t<&he_I9-z#-VHfYl`I<3Nu2rZ>qyFd1EF zJF~XiR^^Q5mKlG#$hi_}oh$%tSp*ob0gP3Ik-(X$ahTz{&pZwH9!lwu?_gc!>U|Tg z9X)up^Z2RMvOk@^)LGq&?DMndwzl@U?C#3Z>!)`vGZvD^SM04`@zT??%650G8r9mp zfANjp#-94qH#!$rOr1BkBDp<@`Fp{k$q(&XP*apT4QM$Ac5?(UQxYbKXRt~3__^i@ zh$IPAMHneRRxC+7m)(3P{_&+7ueC0G?TL9?K7MWAMbatB?Jci+xGrasbx9OW&OwHg zEgwDi^xC6qjt~5m>3wJ#d2ZRL##{6EfA0SMylOK*J_UAX6Lkvc2-Xb^vpmw_R!A3! z_RL8+x%f(*@|z=Tj#rxNZ{=c{*Z`LDdFnK*roVqR>R0S0QU_L^_@Du$PG|cU&i?7A zpPNz~1ELQ(4O9>CmG8HvxX>KWz;AJ-dd7Y~7UYB`&U5NG+Xx=El`i~`Q#Wvpf7&zH z$Vn#GGuCRf1??EmB;XYre4kM1;G1I!D~^HPfxyi#2Y0vPX9%bjk3XD5uf_B20_7UhiJoFfm=3}r7lw6(i8Yw3=$Ho7y)V6QCcJ~6+xqj+cg!PVdA zB(dh5jhw?F>pul}MB(Tb>G-S|e}W~&eiAv6->kCq6p++W?{^Kc=~I|-<5bQp^pPHY zkUF8#`}vsExEY^*fnK{&VbUb=X*w*zww2Q`@aU>BT$<)sb>>M(NE*C<#?e+RWTc;h zF2xXuL<&Ik>jF=&bFm>Hw%%a$bPm5er`a&fPZ+*9pxdl%rk-A3(Af8ve^0!9&T+Id z+Enf&OAa>e*uBWRke-yfW$UQRFEo#y-Tmyv>dPCmY9q-rF}V#ZTh5K6-TGNn=MtTn zjxanouXSG-pFd(*`Me{Fftis9=2vW~;&6eJB@h#a0ad`KA*r5)2qBOp5EF9rTCBPf z17;6K+rxQBqsjVoU;m#Lf6x8$(G|bz)wD8SpJz^O^HGU zNQqP^FB*0Ug2*`o(6D_H>+n9n^0RJ7haIYu_1qpvbXY|0VS*_FE%YQxm~Bet(X>b; z89R=-5gwQFX-~T?D<^Kwz#W?{QL9uNs%#v2m`Uz*#IM_ z0j+5FQ&Ab7(>%ES9!b&TOw%v|cMO{bJAiGF^I->Id+1-mYG@zi{09I7N-RS3eXPUn z!EJ0COStbAC5H9&IGVn@9`ByN3j_=SnAg~4BSs#ae?75Nr88@tCZ!=ViG{*7u}gcW z{^sKP2^n^kCSklgw~E?51PSaK_XB`565D>i)%1ry=)Y23?Dh~I*2dvJ5sAaiX7v5S z6E)t2X-=t&F|!z0y%&e=Ki8UM)QCN zTTJPFf6^Tr;U|7}Gt(&TMB(G(Vw8H&7R87H**VxnJZRl>unUL_k&=4ACd-WhsbVdI zo3leVd*D-NH7#c?P-|z6Hayg^yy>we?PwmK*+i#kXHy;6`p_sLyLa%(N{w7*o#Uq! z?^}QZCQrwb=YHZ~r9x;J_lFH5s=)hwBt6>3f5h3q_Q^!0kmjH-&~4P0R0Fn<9-ZWU z*;b=hc>l$jGdOp#CV{4twX>+X^>NWAdf-D2dpo%1?!cOXwFL5g;`;~ac^Qs~C_pLs zKKau>h$CzLB2gEWiQ%CSFeZk6%dotfF ze+|YKA6uCDY>plYG}mIouEW0`_Gm=R=1&rFIHclVq9C-KS-{+3K4X^u`K2Gu&RD%+ z`ZF*6xOxVdjhW1Q%o|KQ+JG|9$ZMyI`;Raem^06;LJ24bH5^|Bkg(@XWOdDqZR*d+ z&br;ZNwnggv*n|MQO5FQ?|I<0EE0H)f4({U?xqi~+Gpjs#zwb4QnaGVIhtI^%<0nr zuTwO$sZQJ~eR{#+w?%~^A^ZAjA1RgMy)rYwyLAw9h=~aF;9biIba&JK zoY6=<-1_AsYna!Vzqf9xTF{Typa>L#Rst0koSnID!;EJy^z@gcz8H4uaHy8sfAUOA zT23R1yae8G3p1U${v)%B^N(c*m{ZJiPp@9RA7y-ZxUVWdh76<){_Irj zj3I||EQ4_}eJ=HS+{ zn@@H2rYP0A5Rjn6{=E~Ij>&SCE^eK^ZOqQU1an8kB%ybCz z#$2v4q3-#$>LEo?J~^rp02ny zt7P`GACNQ{F3bxF=;w*RPUZL@3DQ5jUy}=;69QWh3S_x1?ujDW70;MxhGPm>6=#tN z6H~0H4rM2$<#R*1WwcOdZy8v^*<3Hv=pix=wi+DM20x+1vn|mse*q$C3@_4mi@zA) zrDAxo0e}IR{{6B*r6!ZnUxsxCFwwjCi(R=fvP9yVn%{n zaay@rC{?MX@npCmCE0>Fh%`qljG2I=F*wt!^Hh!JLaS zjx~e%1_@!-&>=n>8$q@v){&E$BJ@1-F^XlT@lnN(e~)3jOPQ%8<)0^oIWDNB zFsTS8ea!9n4^zRzaSBcDqIN;f%=4du;eX$vqYcb{X8%NVlH2YD5%ZoUufwqKpG*Nq z!t7*E%wRo^WZ)x?o++rp*1p*HCAFG5NjQj1hMYfiH|k}cJn;x)O6E1BOePnYq987VvRwxS6cD)Z#j zqDchl8}Qwx&T%(DK7uo$duJ|yh~G7sXs1}LxMvo5e<*$B&h_u^UweG?q3SSgL{x$Z zNiELlspB7iyxHZ7BmaHwn|Hq7-j|j}J^NUZI$Yo7jr0E5;k^3NsTU)`{1XKLu>>fD z&y}#QldfdK=@>dM37jFa1OS>)CG-c&ANvfUGyNy*=JXh`K(Ehg&K$FE^^{SbWYqBN zD~R`(e-DsoW4STOD4(l|DVehG;J`?0FXly_?>1Q34WQW;%nLu+B|vu9y{-9)ujJ}o zVr+&3kjd9a_Lc0o`JY#MD`1Q2L?IGe!gPvAi-7ss%FAdLFT}fE2yyi z=;Q282R_hug#v_G+(y*7D9i#gZOV8ORF`c zWIu$ZyaM8y1>ALzbKmFrvBlK8ocNGDaMFO^%U~LVkjb5LbU2F6s`P#oAD63o`svzd zf4isFrX|NHoh2HL(V89g1y$xf+!t#$7~=A#ko83wt1r&Uv!ukhbkn6$d)tlNBAoGO zG6mF!z|nM~n5esV5~gH>Z!Atcy*|KSp8p6>=|9Zp2ad@6XZGyD$KCTN_I!*t{0{D2 zRrS~RE}wikQAF{$R9ND?lvfYs6`D0Ve_K@7o0$`4PJ3+N(#leS*u|feoS2H_CCz!s zqsyk`JDL9^+0vR{JagQcyz>?s6~DD^)vMWDUa(Rf!sQip^_`U&Q)OZ;pF?qjgv4Zl}sMGhwq{)9UqiKu+$vYb&}ky25JEt|=bXFEbDO`gp@twf$$ zz@%mBWf5T-wLD3LB-{<&DGTze8?uiv7ivHOafB5<3gzvXn3P<>L`}463E1fA1cw12*vq>FEgx8R<(@_8ey9NNZ#;KSmuDCqgpr z20W0FkdV&kyxQti0I$v%hZ?t=wW=^f7XZTF0-=rEjQ}nFXV8Y(K85oEpmB*A8HuoV z-_^b2YwbF9q_|NRVO87%y^$SGNX%fggROUN=j!ziA-{&M*-Wl`BvHqLe@BEEB9~Cu zs|WJ}OBQyStS{&DrL5#RhYq~Z&|3wE=N^YJCKaETga+x1Xv5g)Id|?%iM1J2S zcf&wVbxJf#bt-#TAP7Ol4W-51I)yOAoylZK({;)aPNb>Y)lo*0^0WfRUY080f9lK?Zdav6WiMsY zM>y0h6HQ@AmPmfl9A->to;1I-bX?jZW^T1s0RpQG*NaD^HC@(hSCJr*DP^S?C^i=Q z?Ag&~4EqDEN6da^!@fg7^L~JRI|JV97xfMwEe`9vM_wl?-M6K*Q>zdO z?72+3G}{@%$tj;Ve^(&FXv+%hpmPDNzr0*pow11Ntk6W1=T0q5-eM zW>j^kNbT1EA|HEIz$y$|eB=n6fCk=HDzWXANdi`lB;H=qG54*dZ!KtBe67;ek-KME zgjM%;CD)aH)SH5328Ee>&e@qGW0<3SsQ6xn><#%5; zu~1Ay9CRv%85yph27&kR?3+5IHWqHISkutGs_R&ZJIb3|*kQ3h`kIsxpuc%miX!P_S~`v^O)#Z;hILx5@VbvYpNJ?!ItQ-hDxY4|&DT zp#aNZ8lL5mn{>u`4F%O&&7>`Fyf~+-Qzusnb-JkCQ}SyXnU5`&T?^giPI0&-lsd^= z-8!wrf07axZ!K)vzxX#XA!<~(Zo`<={E3^=Qfs;&RfdW1DVEIl4Vl5Y#GXZnopQ+O zSV&Hu|AP$(7ToNMfsz|HoN>A=u}R;TXRS@x?CG&6<2jd@-xqjK)MdrTO=)sYY9iZp zim64$7Fd@AiT6_52zrRi{-A1TSs*Ikr3(NPe`AA8H9=aY$$cPFSl-8ANoy>BNEZxU45h7Wh8%;2U01pc>wcEfT?BQ*!U?c zm9o;!W+6k#9L}P|=wy^BUjF_KWso2?86_sm^ijrM&}4q|1iD%x7|AhG$&HO1kB?EBa#6)S#-fz%T{TWITMbi{mMVZ|D zEgEoZrTDG3aO&V4ve^3^MHV}SIF|(NfB!q+l)~aPBwGHX#sC4p=MA|yAdKn0*FPRi zs7Q1&UZ)o*a!QM0jmT{`*vBotT~(CIjJBwx*$;2cvsjq-4UxwBmrs<8$pkE;lo5{b z*!K1&wLBVdjEJ4}1oM2aoiZ3?BBgS|tFMk1N0>;1fh&ui=Ns(wU@Hv~;`e~3f8*CU z#j=VwEI7b&;z9llI20Y1TP(W2To?0$ViQpU9TyR-V%EA(j!7@`*mS9JAu)x99Ok(& zohn@E1Po2lrhLYMeu|Hi1Pg?~%Ltul#NdpTtMbeVPDNN&-5&B8ja45S9_l|4r8)sO zx&PY$q+@e(y+$Hz{<`hhpnR%-k%+_{P znP1vU9$2ye@Q%Y^C<%x6{L;Db&%vb!&V3)-BXte|*6eN8_LR zZu8abw;xJ;3O4-v58bb{(YU~OA>}GbdGHE(@K@*xM@)+D=D}i|2cs+a z6D!}B=t_`;3>z8?lKk*t5RI;sV4pCOYNIM4BUiBRe&lz9eCIHf=wI$e1+`@%!9t-~ z5}#S(8QHm#9N&^H5QGZMe~S3b@|4_Z4|6->tDRyZf8qX0e^1MZ5P>j6 zp-?1blzVbJ`cYF&c}Pg8P@xpV?%jEvE16dlQYU1>-hzTM5^Y7ZYNnnrBv@L~^&(+V zP*$R?U=Hk`AP7EDkH6&Q1Veio`)x07BO(Uw{B4pD5vu}iikL<02(&rF=5UD^6clLl zh*)GKj)LuV5H=`oe`r+^?L-S)%ZZu945+h-UZRWb83}i@ppAbUp?@kn!wMrgL?;w2 zF&6sT;Q84^4|~@F*A|#P7k-;zW)+N2XM458D7emJXU&AZMpy~YZzJZwN{#T$gYeV- z%hTEio=+pHS=c=RJhg-iFtC9S*FpqQ4!unWTY& z<8q4P4O3LocyD({*Wc{9D`4yr6nigpL}m!WU_BnRHNA_5SDKZg51@S<%F2PE&W`kS zZ4IpdER4UhsAJ_~YCcs3W697~4t6Z+)~4qE10D^6zBd;SEgkl0s0%>c>4<>&e<5zH;@dQp3KlKDn$_eTe?+)rTESEfeHQGsS zgRxmqVyDe7g0>m%EmM8)+$JbyC@d5U6wj?J5gOO$-{+?T^H!5P{|!pSZ=llPUJFG5 z^%UM$lu_WM>Y$`C3DBk}8Ptt1rW4*vrNF!dXxG3zE4f2k$6HGE!FUy8w<>cs7l>m#Tt$HQcKKZxQrYLc0XykV>EMy#e)NsHX`|P!*&f#>?QY zhf)R4lmpIMc#h!6hcOAz6~o;GMGWK8fVjzyMZ-7Z?mVy;ii)R1&=W?fsT9gc*{EzP zm&&Iasb;F38letRPmmpiz0-eF=~QN5w2&L^@Q?bwy6x5J>%5JA-Tq^Q^>zfV$EbIx zpU8#cP%_Hrt{$`-eTaUf)30B3+|>iuV{m=LSeQl3Y33h17w;b4xBOUsBmXJ>p9Q&s zVZoDvZ-nW>Vd0y?iJ<(Tl|gR@y&Wu`b|u2KEclt=Z$nHWT_Hz9MWKK7p&LRkg++&1 z!uE!J94-!T3V%5K(}?7VSrMBfUW{ZSTO*G}zQsxOl<#Xa$?pdVf>tB7POsP5tagi8 zCXE&f1q`_h)GE@42GWIktEshYM!VS-uX4AyyWQ<==vE%Wkvkb!wF`u(=#lf}Y`pLfG}Ngfv%US82n@6L?>``Cy$mI7ldN%`58KIDFq{ zu#y=K(W~e@VvC5ivGgkMS-KPry+f$9_`X5Mkd+FB#&mz>p-MXDdpsQ-$jaS3p7~_< zCNStu*h|2K#L+UD#Uf>G7Bip|TXh7UUQ6$nl~g8q?V852xTs8}Wo8umf!{lEa#m_f=qWLnZxu&QO-eC>=cx8D20>^aLu@ozDHyxLim#wLg^ z%r5JDjXthsoy&6SR@@(c?D*qL%kQx{kF9whf9QWq&NmiD1RY$~vgI3Mtt5^;0SH2< zaM(p;5reLaSuyKi#TI&Oc6bZOV3IxQWmF*!dBLw{seJ;eCe=~~gisX|9baX>qm z^BsQe=sBOuR@6vw8jN>tWVv-`C8&U@a=}#>AIR zZIM74Ex$oDuv-Fwa5|FzOrc{Jvc08tZM=UZ&27%Axb(vL-}FAvl^^A9YjZe z@vNVdh*(W5TR`LzMd&)D0@wr%>{XP8m<{RTH|o>>y7#X4GxJ+7KD73OeNcaZEbicB z0aCFLui^-ZfXF~Di;_l1(BS_Va6E)6@vX2QI)%ZN9~mrjI}LRE5BER!QtSAuTXQ$G zh$ITjv8I)?3VUW~wMywi=FVQLURzX)&%ATr_XpbJ!g#)k&+2s{kxL!~_sxH3Bt41P zT?%AK{{$3dH^c;kMXWZ`YQKN8BTdGy3mi5=o}eQlf*Ng9tQ!{TbYYb}9TIDH?JHlK zwYd`m2A3u-B9tGZ&=nbYL%M{8^Rgb~Vedyr9`9U$aW6`%PsSZ=CC69mg>*XLGugi~w zh>Eu|C1UBKoFjnaihb=OZ{xX9+(8611%lM^=$0`>B#}y;?+bwja(|0oA2gs3^prYG zkp;B?9T5{BMd;f^6dQ1=4*o{18~=E4@7wjQzr8nefnDawRMWf4U83Oi_?tcFd>0%g z2(+QQHvRRe_>5@K71e*mODFN~4qw0@ZOR1yWJttXi7U%|Z+w|66yBQ^bQdX6SGGPLTAZ6~iZ zczZ8Awc^=DC5=_icxgzjI8G9uKKFh413zuhT|o`db^&eT+$@}11wV>}97DI^HvkX$ zy{%uZnAzyl?wNl!mp9L?Z^dt!V~WeY_0t6LD-Xya4E?+?P1Z<@OKZm&kN4yngHo#% zB+62U(WX?qz0Z1!kfI~l*af%+?U`-S$wC>>uaxCPm*5D`4+qRzXfXe7KjfT80PPO9 zOjJxmbH&ANON=^p-W>xw7Mjo}SaX~0In?Wn2(CDKDByo}guOu1PUQ5j^%AK;yX677 z$?Vf{F_+O|I@!NtQ))@--oM9q|H=<BZ8KS2`E2KqC9#qejCE{ z0d>Y5=(T?a7pt1=@AoKi(&DCsw4ywI#pDZg=}1~O4Uth@J#S(UZ{g&TWwU@>%K-Ch zja@}+Nd{2^)cy()D4a?JdKyTdg*2mZHht4NCO4*^j&5v^n7;K3i*^CsSf$KQMy z?MGQJc2tkd-nYQhG{0*Zzh5E zM##PV(AdkW7<#Ywd3xvGJ=+MEcsu`l;8MnE9dVWrT_L#Yxw8kWB;RNnyCmWYhJsMI%UnGSDtO zfZxS`$0L0HO9$|0_~`LvOCLeysAT*MkvF0Ld*EG78E(&eASD+D5+cNi138GARs!?h zvVahl|IgJO1D-okNr588j%VZYX6HRyt9O4Mru*>N&EhD2VRZNWyd-REmL)M-x{u!H zebTbAfB*f&BKmw^@K}%xaqkj;2EyfK+$D`BK}OG|C2Fx{Ds$KAjr6o!-ocy?O-cA_ ztY?KZp}=(FTHP%C^QD3%4H-#s*-=_;`ozw~8!f$KXOA>qd#)(Yv`G<{7!IK)=fr%+E+-?VShGTyRE95p#m_fg+u6D&b*328+U#Uwl%t9Z;J1fND z`9EGcJ#WK6TtNfBLd#|kdwZW>Ue)>JM|8@Zsz|w3YcwkByxzaO|Fo&#&By4y%bY3% zI7DETPX~MecUA_Eq~pA_KW&%E5#p4BAmTq=mYrIM4JZmWJG@~3d@iw zlhR6oaG2YCcH&2#{X%gt%Pr}a@ zg(R@=Tms^cViJiTf=zf$XN?v(aiboe#@n^HsMuaWSCpDEQOE~bMss04z0;E@jWPWE zsalj(4R9EZHtfEM!%W}gZeD-gu@Zq+OY0MKquxQ9-m$zIbQgsPS*d)vcMa`nDT+@q z5nqMKX&+7y6`bYxrSeP9Jtd_8K!0%KpLT1^1NdWEp}oQ@EambDe6GE!klvc$GS&R_ zC3$TA0VjX?O<+WaT?n-5+76J|ik9C9vK(2E7aK$bwy zJh{21?+miZ3z7>{9UiAhH1`+Rw1lj3>DgF=dF!5q)%_>8 zxAi$v5~5ZA_WF`~l=Ft+hLhY6bB~X}+sG1xQs+ z>4|LxUJCe;fH7J9fyN&X%~S73(5dCy=AD=~a%n@wa_4@PFeJ@_#snc1F3ZeJyS@mN z-Rm8>d{~nWJ6eAQr|))((vl{sH0H28g&kOv>rF@&;il?@MV^?jHaAc?GI+*ta zL53{6taPa4ncjc;-sgJVy{_P}jG|3t{n{9<$!d@%H&yW~CN3?CW|cg)^8SXrL+gLN z=Px7HLKN38ONdYP-n~6K>7m0o${nd|ih zw|F{o=4YuY&aArJ-{3v5{oQM7tyFDgUFfG52kP97vi*PS4y}9jPe}Uhk;5yKqAbn( z)!f@H(9R<8?^8TGz*yk?FIL+uHof(RsEOSDTPu*tr4}jrJX+b{{e8YIdh=#<<=N2{ zWx3W|0WVA>Ptem_A@5t!76W#yL5hqDddqx|X~%1=S%xAz8yqi=l!ioR*iNmO54KT> zOPD_Zrx|}#E>%Hzm8!YG!m&!MRAzCQ<^Fh24m7Y44*!=wXBPVBm^pBaUl#%`9en_~ zp2?KN482+=SYpU^E5A5Bw=BD++MZKmP%i6Xoy&i9 z&%sE=BIcpN%w)T(YgZlLm7+1y#)6Hr?_o1D8#8~5Gh+*uc-CiR9$L8Nt-NJt9vB)t z_)KBO#P{MPo29PI9Vpw)0<4CUTaPk*0uGa8_*D{PrlP|(tH$>_$yyu@f@5r2T| zC@7eNU0y|}gQwBZDtRZZf+|^^{1%10pgo~nT)264;VlQ7z`4f%@<#07NOl!8x8&!y zG+z?OJ}@%6EH_1$j%bl6R?Z4S5QEY@{QiIDe1agq8Rtx9Z7ytE(p8#XXfT9{q(N%2 zI9VH=J0$;(F0$sEgj@_L2D#tukI+c$2m~PR@FJ81ULkQ97@_o5{?Gm1&88QiU*)uH z==o%0tjdZLlnJS4bCUCLLO}SFV;>Y|D-@a_t(LK@?ZU5Ij8TD5Vj|>uDAIMD+me6j z^5;&(vi6rZ^v>57Tv*X7H+IiHg+A{HT72G&tRBAVX=Y~5v08*$q%@Z#=P zw79!F#ogWAwYUd&*93P6?hY+dio3g8ac_Y3WuE7}XXebDnS98H$YghCGymQDx_<9F z-BOc$n2HhK$^YmaMO9~EwcF?_|I!F2*+5pukf32>2>*m%s8^bBslwWZow<|CED|sJpjqY>zJ84ST|HAM=MEE@E$IY z9NrF#4)MDPe@J>Oao%QBtm%FqyJfA#^tc_?kGYM*lB(9uD9%2#-!QW}+L_+jLdmgz-Bi<#FiXN-#B;D+azBsUrk(@WxY@edsSY45+=Z%zN(`w!jTtQ;$yY2!JvfGA> zB6oY4HGrLdOAtdcV!k;y_4!b#*E;|4V9vjF$jFwg+FfLant}b1uIuv*3roEAQ9ISs zurZQ`gmUtPFZ;*sc-j~fP0*Xyv>teigC#3WeY9fRIgDhM!A~akc_z=LXbO}CLPH^Q z%(<5`E#J7X1G?%O=68o(4SEN9M)ANmQ|X8%1OC8pjL#7&EI=B5wg2}++vp}NveyFlT+haKwQK?-YSxdjI7Z;qkOnS8>zlm}K z00ga`a)z^JNKO*MGBd1rnKR}gpl@n)i9sN$&7r6VbzK|CEt`=th9Bp5VG361YN?03b@EqPN3!WwbS!q&qSObG>GxL)X2~d7sC}^-{;(im0jh zDt!uzPeTKk6!7-x^-(?03pciZ&(d3HZg*A|JNPI&R1q#EBz9?VDJZ-)+jybp&CAHR zf73g84c(r)cmMwK?YoMSbzq*6oG@*}z)awiewm6EFkkOA@V<)Q%4$keT9{3A4saIK zwwy7`5KzaZ+eZ8@xfS>gPoL`xXO7bG=jRRSAC0ho$t$744n(Kg z_ld}>I<`T-0<#;x#s=yBQH4qLln7Vn(zJThdRR!JA;VV;M0=v$i;cXEx-olH5?ZO^QB}tn&FqU-AkFpF$?AQ3z)D z157KcE&D5lkt5|M{ffW-TqSH|oGFfiyG$;MBc!QY%3E}|*-t4*U@DkLTPfaVo%WK~ zw^iUKUnb0;QOY3J30s~-00of1rNNbsi>R1-8t}A-{t_rD+k~-enz(1LU!lEnkV|+G&QW>3ULr2~X>sxpR^5Z8%8(Y}v;-MT$ikU7Q3)O3t)mS@ zmK42)`qGF>;EPc;YpMW0jgpBC6pK{)g^B5vyHfYclvL-eDyO5E*vL62PgY~B{d#?Uf%yFV1D@_scFjgUuP^h z{V@9hJU97)P4XC)n24e`mF6^|tfg((?B7xz5ur6N=c?%MT9zu){IS$>^1l`_ZzYkX?<6B?^C6*+&{G(2H?4D}XNY#qeWA zi7-w=DC~cB=KSk2|M#}Yk{h}gP?Wg%n10e+M7OuvXHo(i!UkiOMD;l-@hOk57cE5l z7j)w)zB{Z4F#+m7#2>wc5p<5!B&rFf&{h$q;su=KqbmC?HIg$f!0j8Dyht%Rn%!xg zVxo}e_vc`tzrXpDXQW(He+h^D7><4z5c(x;M$r#k{Fg8@sgd*c%|?p_O}Q1O`#f!kb3|%FY_$gH|d=}pn zwc;<+U6EfAvRByhW}iam&`&eQVr}UWL!@oEn&wMAcRIgK7#-xJHUIgK`akr>u+uKw z<#6HRq(hY1oipG6mb2xIexz;>cFHzIfM zp}#O2gzcYpzVBgSZ_8dE2y|jSr?~NMCtXiR(cM=1=6y`cfEBBr?Xdd7OcV$A0GKA0 z$DFjsxHbg$cpIbWO#njdX$v;%ojuHzAEYXSU7K`%M%&H3S;BNkbjQYdOBJYrXg%ho z>^(MknM~G=B)kT75{V(5(c*el>I7q%TjqCI$Ld=z7>LEczBaM5*)R|>bk#;M<`{=C zGG!I7uhATKrD`x{sb}KBEjF(=f43hChVCXI7g-IRh$fGU%Bd67|(N!*fP! zSR}~mb9WXO8$I_1%h8mT?+JP!L_`pywl-eJp6XVuC6_iNzx?W2rSg__Sh82SecE2k z83PkN{Sc=3!^qjK$>pkzYVVR?_)H2Ft%>Rq6J6AQa$0e*3XXUZW7rd6E7qVVvzR7` zo4T=ZDq=C={dR8%{4z4GxUIG8eBRd5 z%FZ;J62A6Q3ew7T8s+Q1pqX?Lg{Q}OYig@%eLb$iI029QUH9kQ<^DM5p+|}DjB8&N zoi#o3D+0T$LCuw%>*}7Gp9@XV2HuKzYGjn@`3Vwc|UU!`xMlaW(F=*{d_ag{2 z_XK(Bc@~~7_v&GJPPd=IrNnK?*>F2bY$(SbPB`?ZY9`w;TRfe?5_gN>E^Kc=AF=+% z$W^$*Z@`G_ZcbCp?u6xCeN6)|GNg_g`%2me1rj;#Zd98?O%p0@jZ~1LfthUXxOB7B zG-3E~pYyx={CC_5t9o_@gJXQGk#=JK1knmlmJZMQ3u6#RwK&DW>Y0}Oqmg=$wXvr0 zT&+TIXas$Gd%5jd91oMi@s#Wg=CmUsqyC^D4L}VC{g8(z3_srHTGPd4Rc7!fi8r>T zLWzyW(YJ+-ZpG(MYZE$zsb>W}pQ`T1IlgO;7l zzwXSxIFC8%&*SAaPHh^7mgRbJS+&<%tN^S2s{URrrUJeO7e7D0K06cdq|jOyv9vYsb^==6A%zAF-{ox&$hzF69Dk5#ynZ{=o!;JrOsm=%4VEVh zoKv;9ClpUu9Z7}CwRCuDsua4bq;!nS=(4It&HJ=&FwVyXJPw=RYn9n+HE_0(Y~9kZ zV55?+_cUN+X&en9)_2<@anc643Uguv>NYK#pHOU8s>T(MmZTRZGI_MC&-M9g+W=zi z3mysJRtxE5qq!YDuu+`Kc1dWGMqP}ZXgQeI_$>SlFCgo_bRyP6hx3XH$R z`c-7%By$?5F6iSAt_as%&C1431|VTHIAPAXYUwgbwN0G=ep&yc$f#I3Q>DkulKJ@% zr8;K}9u^8sli6Lf>x#@BpKd#e1fe zWi}(d?>6Neh~F2USlGbN1$MlJyUPVV(oP!L3WDKG+>-g&ICGe5x<$@btI*9QP+sQf zmRdBdlc9}wBZ`I>>wYNb8H{5IePXB|qId5BBU5#a%X6)h{)!$?ybP$1aV;wSa-)H! z7v+HnNksfuVn4!T0QET`5?~(a#3YO-(xz+uJ_bqUp*!1>(%`sN`gtd@pK(lrTDTv6 zBzxtDX4xYuL-W{|6UCEgjq-bihs|YWv61+0DL;H7An5{&<+s@2y64M}Pb6&$0Xfya z6Gbw_7wVJMVAFZ-#z66OK#kwy+?Bt5(u(Ett%3|)Q(Du5wb?X$| zWLuoqO7LXVVZ)N+ywfIbCtg5Dzm!IjpWZ!-y7O5`l!;+H#lQJwf4?@XMdSz;&E1?k zq0alk71-;tkAb*wJ%Ht$PDBXt=m6t}J{meXvvDZN%zZS8wo{7fNmSzM zxp=vDWi#A_S}-5Z0r32hgH(AXzt#^)GF)x!@A3;88Vq_yE7_hB5y=gZe!ZG4y<0nU z&sA-^uHWHowfH%UFVe-wi|!01fGdEMO+u)>%k_J~q4>uQi8j8}vnNqvzGb1aFBc{{ zf*CYt9g}Xm_^P6o5ge6^5C6m;D*fq`lr7RKn|RQe969Wo1>li7n;!~^Cs5BOb(Ai? zv6Gcmzd=J8f4Drr#&6+bOro{y_mv}i)%4Z4JcV3KiAy2#hek7T#RQ85OOj;<71wq+ zUVqAy*{K0CoR}gOZK%x^%PhgNxPM=$^g!HW_}NxI_4uvf^meNb4JC`$s)?GV`tW2% z_m)S~<_>x34e%#zJrXtN%<0M_dFxRzw_2y0zEH0>XLUxpwMfM7iR)FjV;qLo%02i~ zu(hn>`btgt?}z>MrmLndVP(uvA{QVI8i68B{LO|4Pxl8D=NOWDjC6~cVkBJ0 zmGSY;0XqS8W?hWU-N&U8cQ&&UH|eCgYstm$E(PXHkt{IcT3IY!&QN`TDd|5R4ik<{ zD`3XlEEf7y-b`Iyd3^j#Hc;LC9+}qCEJeLhIoQlr6%;&=8u@WLmX1WJwprm>oRnIq z{(W9;t5xOrGiMxxI-!!w=VSg;$tG53zS+tK5Dd0CTTP}xIWyL*Z1iiXnTW%mI*en= zp2Y2Rp)q2k(U`&OC7-L+2)^mBZoZl2X=_8kW7Z#;ss2)Bs`XLvQ`AgAp(j>aZNltQ zo3F38KyJ`n;+^%WOZ#UzSC?R)W}P?D&o)YaYbWVM6;1VehX$}gT|K>K9rlCD13Z8R z;9v6={ul+~MmqVS2sz7vpx?%|F)1kM)`ES0qCh&IDt=DXAlqv3v1-;u5NEZ;#Y@S| z+#Mw?@kkOA8;oZ-Yq$9;M>5(LLQ|eGi)%bm6vX1ctq+7x#3upOm&kLNafL;gUYpm_ zt-lCKRq)0B`rFj$iBC7K;DWglqjNn3K%jL$4kx0t{{xF0=RsLLp)K&8vdR>$fiM7V z3ap-m5}=psJkF?Xe6SC~BnyvQbkt!$$f;4YKgb#R3#)}FRJ(7mr0l*X5iAWSw%A}? z>#@Aj;}!qvQ5B^)O6+^v@23gYs)}HV(O8}3l$Oqvt|#m0De)YCW6)HrtY3Zx3DPku)a5?G?Xbs4xyM_A z+$96HG~^@Y=u9*CdTKje{FV{`RMQQM_4Tzbni*LPhK$vo>k5>Y9}gFcgFiU)$Gn{q ztk2k5`kl?TI*IK%)n^Q~j_Cx7**Zt1wE?x&wFEOuawDwK?cLhVk$X;aq>vH+LshsR*r#_x3x<|7t>)^50 zq%&XfU9OnyiPn;o^Z_ourh5|Ts;G%def!-{%kYNUA!j!uor` zsye<^bei-33g}9a1n)4st!psyAEW}9A~fl^nK~xMlZtDxqKpA^T!yvj4b9m$kB{J5 z$+p=fqMWcYRfgN32JC`tlxrS@RT`A{FHYjmUt*~ve*R#tZsT5J-Pg^?_>hmp_(k zESl_EU0|XI24@276;R?C$4|1R&FJ^!vcYJ9KH%01F)GM5NvI7)IvNXX}hYsL_c1s*$T2b!F#5lp8cqHVZ33G zU{Uo&p$5F(UogFr_`a^@H<~lU*5u?Sy4|}U4C_-H$<3GViZ|!-;kX)#1=GkA{}Q7+ z>%MJ}?-tEZGSRiErgZ3Cz5NLJJm3A%m+woO`YvZFFT5Grz*Y|koU5BkdZJa zkPm1%``Yf!A}{n6K^_4SLp7~6Mva)93Ylia$ziO79mXcDH9MnrBB{BcOf@WV zt(Sd5G3HkU?i{r?4GDwoJtA&Or4A-MRth}FE=;%?BK0VlKZjc=%^@p||7?m@uNYfy z#z(=$xTE|ul*EsGrnzGWV2{)%Xt4qkX$0^$Ond0HG=BR_8g7~9 zW@MV}yacQ?p>aM70TtRGpJYZO+E3hrV|h=PjnwL$7~w5e6Z%-2s=7$qz(x&TDq_6jIqm zk%5XW0Ig50`u2{IU`n4?`F8-pMr5+T_lBB+5Egi0N{p8aWi_fmbx6#MvV@qnoN|Fc zk=uiEUAOlAb&`9E8;gxLp9ZeCh|(aYecubUs|rjocG(Kw`^y{c zA-o~Y)~v69vUpqD1xW0jNUZb zM@NQ*zKz}Ne+A%vt;f@z>irQk7{m#4ID=nMEC8*i!Jnij~`Y-UiXmTN-na5Rk0L8CI zE|`FDNJcyT?6r~~EO5ZR?-fpOW(MeX*b83PB=in&&9ymdLfd~lQeSuB%YB8ZyCA6+)Htro93keVkNB|_D z^&APNbxUYR>5C~qrM7At>t*-oD_PJ z&i!0U?z#YlJ1YyAyH@7GB}wm4oE@RYWi}(QQvceSL&UbJbrTi>4MqH_{?#TX1o{m> zSaH`A=$y9sz($4}=oeo+e0Xf{VDo_?N`S z{}0nt?fqAO2qaVVT81Rla)hJXF+>fSe+ z%)H*pB(_|E>S_+^P)bu40$)VJSR(J#Z!|=+M344*l>|4VKR>gG+z5NGLW#1bxKQzEe60kfzFo zaMS!5)HFhGVq*0!|LGFFdWjVvVWDbt@fh33af{iNW>IK(5g#9f^o^VXedOmr1pgs! zH1xij90^hk71D?8m4p$0!0?TdR^<_KG@bxB zb5eajgrEIxDh}Vdn4b`gEk6DdF37VBhl_kydFaV#nb88|*3`6wFK{q0;-g^>kz=Eg z>h?ObhbeB|G=zmI$yDNDlElCLU_ejt1fFtIS8=6K>jI@T^05(Oy~7HC{HKpG;o*lw zlpM_Hkl_LIZ|v0QQPJ-3h!vXqDO7+Qo`_td+gcI1FBa))c5)mawOid zL*n1~3%GQ-8Is_B4)YGXG&SmyP|Z*sQqodWA56#aUr~O%>^a3TX1J4I6v*@KD+yiT z@R+oc%KFIgH;rseC)JPnhAcVMIGmwf$N5X4xdX&(>^@1m+4lk z9~~b9g03e!c_sgvF8$mYd`QVG5B4~wXi;HHPsn#N4}jJX4Pcg|9|f@p=1ksgghi&v@s3HKVD#DS;ClP%#Ox7>bHNHhP_? zN7)v63OzGHoT9NS+(`a(S;kLgjY-wdi&Dm$guXDcR`1J=+<;pF-i@gNMSM z1~pI6c$Rm;-`iW`jd21m9@IC_0|L|cz<*O4S8S$$AUy&HNHdRju)O@iOx*~=Gto}x z4Jcd4g{8*eah_!-)?chs2_W8b&vg$|cP8PsEQGp(FOzfmz0m8q<|GQ;(9mZeeDc<9 znO|-0Tht+bEqwonN#$ex6YzBJcwf+ST+OsBz`E{!KXjeP@!qw~mKe~#>6dVtnw{Ig ze{l2>IeotASlR47ED@*#%qH}|UgX>hPD5^9vQ?D+;1iP}I~><*7$G;jSXd6k(gFDH zX@2MUd{2O2h`m=nQmsr2b4HbC_qqsOfn^ID4&o8&{fk02aMnI*L#yK7QA{391CPe| z0PmY{8^@V{ZTJsfvisHXntj@o|9D@@7UnXVIP-n)y?Oe*PLRtg@O#P?K6C2&VL z!W&&ZMn;hP^&hy)-~X;QP8N@mWAU6mDHrLERw#>aG&To$$zffPaK{fal&WOAjSIrS zWGw|}%i<*IC-!at4pT>z2uVu@^d$#uqR9I5L0XvHM!}1Doh<9Fj2v-+OmhTQF_`bxpof&W}CPBH_r!)C>~FmXN7B>t zRO%uOj%1|(`ouW$)C5Y$t2lm6EQWi}RjcXomZ?}9P^(vJf!h?MpH8b(ulaz?@7*C9 z1!Hy_vQd_G)vXgXNrt_~XTF~7=a}F>8C(jd-C^>k{N>jww52k#QK}&5@!%fX)N?d? z?K$8aPGVPH{!r2(V{Fw1$+yn|@af`h28RchO}rnHBu+YP-xd{Tq`FwySSvbMyQx@e zgm$k_#GsCY+v)*!sjO?0qhJxNYLzftV@Q9=@%T4r9FXG5vP&rsb~c9PPBW6R1IX4DYfcj zo7Vs&>s?(*ll;=xBNng@l^MTl9BVpiMePB-$j{i(X)k(B9JBvGd+*l{ooP#+FDJ}o zEMhRe;rmfLD-f=4q~FdJ>oH$u&@BE{YyK{@ka~4=xiyk9vbd)3DYq$Sve=E5^}JE0 zY&$`~E~PD|b_>03WrL!*-8ROU1WCD8at8~bI6}$r*r-h~;mhI;5@MOBeRb~Ds9QE5 zU8Zk1Pog{GRo19S-FQtOzG48=a+P=*Y*My>>89?H)F&6e>>r6L%TAWbms<>xHoa77 zpUhVDLQ)uV5bY%eQ%4-r1Lpf%>DFozCLmUnuEIroGz#g$q}*kK+6J;RXLD_3neKq_ z#fw>LK_!-fh}f?*%O{+VJ+e|w{H|^M;otN?n zk;MZJH%L;{+0=&K#`zI6--p!0k-<*LWv|Le6*pR2YL%q8U(q;O!YP9`3@5XJfW6ns z30Bef+r~zAl;G_uF~%c}p-qRcv;?|(C9VDmjToR*c9a%D3Me#oxqIx|YD!?fdaO3C z?q3Y4_0luHNFZAB?lQKHs{&}%?|#+5ZQVc}Kv(aXS-Vb%FpQS)k7n82TYzol)%cKd*ap1Qy z*fQUs&Xrubk&$y8zLa*m?ti7ZdHD?|K@BwUYpDXY~G$gSM z`7<&9XjbbiHI7qj#~(INhAialy?UKV1vEH+UBOx0F!-1PCqQl1(z{&vvt$)tqyir$ zxR?*+HJ-^mkwMWn&QXPG9VPM}m+Dr-c)IuMt3EPK{1ywW+z=^?sn4ph&WRW=jwd8N zPN+0YO^8Fpf2|z|bpdp-S)fHLHK$q)=cE{}UlfNfZ;Mxc`?^rsOoSYm0w0{8!c_R7a6b; zOrI+gq#;E&RA4i#i>pjvY_n`L-55z8d41&~{j1VOf2V!}QJ-_6JBs8Z=^F5TzSrZ5D$+Q*t(1Inv1h-y zdHrM7Aa{@qrwJh2anrI!KTTXN>zWd}@bsCW5|=fgAX19drj;J9L|U^R8UtDnyF9c; z8pz5{-G%mTA-NGXd+g3O9m@Oy|3nB}o(>PHi%d zzh^#6Oxtl=a_YYmsSAq~(U8DP<;w`n81lfrjmgPTe~ZKpjMi0rpiJ%J!gfa30rL<( z%7@o`4U=+K*1gZA%r94ZzP_Y1iGs@LsTC&IYn?x+V=8r9j@b^R+iHtD9Dv%Vq|vF%AaNSMe+rR0@@?DdvGivd zvZS=mjHT!f$}o(|oSdIE9c9x@eAKqq%addFn<9o}9u~yk>S!=if5{*YYvVUiU$)wD zDx{kpI1p#1;xV3ijc2%MGFr*WM=1y6eEITuv0Fl<7%E*or&+F*s7=R4EitvkiqdQMTk06s-ys`#x$+nbY0!5Ru6FQ9d}IolZd{5h8G?{C zqx*~Qr9OnpCS9gCs8@Q6u%oP-%ce`y@-dFn1R#u+-hgiBXHt$T;F~#@Cx0pfEf3gc zQYALjFv-_FXg{~w_Ez=%!9=QJ?diTvJT=^{oBkeeRL%3#nnBf2?e`pg+PRY8KrS?G zDyvyGma*U_XI&LdxkA^dV=Uh#o)^Pq9aN%m#S*nrcLLq9ivBAPr)o?QA0GMda7F7S zKWhB%oIr(j3r1P7rirTYx-7+uITL`r@Or6S)ulrn9~s$=eujx-?(n+8vUL{=CVCxN ztPoCUhHgDl$7{)xo~@(8W#;fgpSj2PY#$*m!tfa?4#7kF9%0;$r)Th%a}a~8V}sO& z0R54lWLgLn4pLkEq=WA0#uc6S)!|kXnRE9PHD5bnGs&9=8iFHR)9=S3`7H~O^Dv%KoZ_B zW6~pck^E!<>pH{;_mYGblG6ES+T3TH#2X%uQ<;&mnbz2#E%ghp?c#g}D{Zo`u)Ayp zUtQMPW0sOZ_0?6T0rOc(8b3h0MONhrZ;P|JAG~!=7Q|i`CwnLohqwmkyV0Ed?h$ok9nZ2u}P?cct=-T{wrQ=C`TPz)C^U&tUe{ zS}6>|uWOn5YIUkwEwS9X1RTRWjM?N&ohi~uqU|-`KDb7umUydb$-Hn`yz^XL&*emL zcJG<4RbI{5YxD|Pc>>&nd~@@4D~>31bCYgxlYB;X6dx)wWUTN~+qRgrpVp$=RE%YQ zdngLU$S&V)N6=&(vw-{epwg9{8`tJta<$Jh{i4%^*M3!!XavYHqS^Xq7|^XvwEIgb z*iu`>K4H0nr}Yj?DblBud{OP0&l8M5t?3{dY(Fk!eM%N&L%`wCH*2{Xo@ZMPT75MK z=ld8`hwPkMkk$9n%;9&|RKxt3s8_raxk=Wzdl+}?N%E!YR*kwjCTyA^PPFUn&KEn{ z5|2pbVx2@yK1#Y^>ayvF6u;%_E4hyxYl`*g-b#YQx`jhHD-H?!9Z|q{73?v&{bQ@Z zRi~v`Ur~8C5a2V>={2EFyTHS;ONWd-k78n00akJ;+ri$Yf~*hi*_m$guqH*F_7z!T z1al~`gRU$!-S5ktRPA{yH(`qd_FSuJZmL`zxKWPk8TLT5Tx)TMnW~4gP2`j)Zgc|% z|BmRWeNDpe7-?&9kW!zI?SPtJLjM$F)SGaR)I^RG?TIch>M3A*Z}w z|I!35E6|IkEmkal$bwRyw@)zCtZ1cRpxiJ_HFBJ=60|D7N;Jjw`TfuveX2uof~Nz} z!ajM_0NkO|bJdiGb_!2A2Fim47IAKX5@#P5e)i9OxRM87*032Lu?^AEncX+GNtk0n{|4&K!jU*F-$G<4S{IEp)vwNSQR{ zLc7=4W^ifW1~>culR@}$u}{zl_G5mHTBO{U+lV<@OjhdjMifT z*eX@Q5j)?L9F~#j&FTG@i<-7tjo0PMaF}HE`4c|M z&fNIKam#l(82Z;sHvZcw+O#dE*6||%V7whVWTKs6(^OREZtb5?DvN1Kx#ZE^>**Ws znNx0{&G`6_vVXjk35jD|$}(n9vi#!+nm@GI(Qre~%;1Z~a3-N!y1dNL>ES&FgxL#c zO%U-zvDJ{rJKKt&wZa(H&>X(9=%HqX2$DlvG~zPD2&a5vNP!Yi`$VG#<@F>q2bd5S zvHQgImC6p9Nu0|Ls;&sF7P>}UXckI@{Sy^?D7qOuz8PG85sD`a@hMc_N4h*5$_(K@ zZfY$8@sE)UzIfv`SCD_p&hi74*k^bs%*o&$Ug(lF*B#og*kwNuKf~FlKt+cR4bZy5 z@?$%}%Z8Q?aO~>2k?@l_LD7W(a0cXet=y1Tk^fgXRdV~kxas|X=?`8vsQ)i(y7Ir6 z=_j9E!(v#4V$|_sXvSjX>SB0@J$F3};$7rb z#N1tlRa8SWCa3V4U58b4FEtFbSw!VVn4Drb19Q5Iu&r7605i6W@OQUQ>w83<`1Rqx z_uxCRKB!Z`f2~vWL5kDxV187X|5Z*c+0k*>ku=?4lLombe+=#Jx#4_lJtV%N(%rca z36@zHoeZPn2hMKn4Ye<)Fq}(d<>12Z*$t{MtuRUf+Dz!l;Qj8O8#XuM-cKfoY+(dL zio4}EoW26R&?ZO?VY)+(yKOguz9PK1;O(@s!jv=Al{2SIzoNcu0 zP(bAK;Op-D$L1#wL=;8+5}G{tYxls7AP`y<=|gfHvfXXD5qJ@PBMyY>M>Gv%8`9ma zx#4*c{D2vecEVhTe0F3Vi;DPMysrx5u@uTm)^XOL6}?WDcvj}{tIDB-%b~alcT7a#?#moIauGRlF?(DQ zeOxiWLJ_q>F&1~+JbidJJU09@An8A>CxenDd<$^MkN=D6faWc?QAp(hVJGIyaLbL8 zQN+ovg)qU)(ETlRFDBa{O($;lp1V;f(>J{O5uQ$bwh=`)(hthhBre;WOtBGnqLGBD z@&D+ZY`-2mp&EXCGzK*cPj$j|?DO^_H;i2BML({FB(6pXH4*|08}WShSvM1%I}x7u zMK_ZX8c7=l9B&1C$y|RBH^B%sq4YE%1l*F74T@g;8sZNL=Z_fT4=d%5I^YjY=Z}=V zp;tLT{Wu1a_&y}tR9`Mp1K2X~j)j_%g%^RISMp3lFq?sSK`z<$X zFC>Aiq8NqA|H1$rdnh-NFO-2C{jjESY(v=xOgEV?jDb8K&TV|d(8>Yvjdvi`$6=G; zI?%CC`obG1)DN>0dp#(0yY)i$rW?rCkFk^dVc+gY-weHAz0n7wiY58%(cPrKFuZXG z!;8fi4$T~V|8Q{QUufPqgWdg$2Zk z4HX`+-DJNo^%DOh1tk6pecpAw>39)(6Zs4EF9t9uc3^tb@WS^dv>pHJh98bzjsjT| zS6UPGrv^!;?2)Es6?l=mIwVJBst6ejT4Z+M)s~tnjstfs3Ok6lg&4@OlqghbR3|W2 zGFB;6#{sG<9I6Z)INBnrOQkY^1_~^tz6uiZ41_e5xEX00>JAE$_TVEek)^M7MMPj) z2bv|NHnln};ge-wNL8>yNVBG6RokMV zgGgKGx|{$#-BHPAM)aLV_#);}>DwYZUQw7<3;d5R!vqT7hGCZ3bw4W1Rqo#fLRui7GoFX7snPE7In_G&biM$&r#2n&Y91h&PmRV z&V|nd&)F7v&!NvH&MD6=&auxm&ND}?`@>|MVlJA+_+1<(Ah5jM_DgH73K>rB;4F3fGyme_2aS?eD zX%S@+NfAX6SrOIuME_9#RR36ikbk6qrhlS;zJIuXx_`WXu79+DwtupJAtVHn0*QeD zArX)aNCG4e5(Y_w#H}m8=R5|Rl?gygR)CaNEB z7f~eIDQR*mb8#zrc#`F-suowyFPuXA3yOrZmf6m?oUVCdiA1wjoXp>!{`pXTDf(hq zixtl}yYuRcn9dlxbLxv7&wf5&cPHV`S@<*bB^=u$v6Nf4=IP8T7)aOH1JLTqvlkvc zVfw;c%5qOLJ-N0dI_8}p82x#+RDRouI2R?h#$FXowMMqj=X(-tDQ}#Tb!T6dzMes? zt27j^%>O=p{IGd{~V>)Fzt2wPXb3AoC`+fTRjQI5O zA@M=^Vd{b9q3!`4!VB?+phHw3tp0RCd_frRx)2kHC?psH1rdh;C?Mt#EQmUU1Mt&; zh)gn9NGJf+f?bk>dx)KRh{I1wlq(W6u4IV}Jex$VNCAyaTG((&Pm)vsK@*df$6P4D z$VtkI2nOqMpnF9a*s2=nP!R>T#^+K53d#(}fXXL1qim@YGEzC*Wd`U8@k(vD6SDND z7rD?TktNz#K5ad1Ic@uQ(8-e3jMa+Og4KrAoYk7ulGV1}tlp~LqTZ(7yxzLrvfkFk zteuOGlaGUstAo9RQwX>L+yDsyg+M*P{K>6Jkx5ff0O%F?3PJ!PfTDrXAX%U+=oj!8 z$aoTGGJR5Wa$%BdvSrd^@_up$NB{s5fU<$vAYGs?Xa%?e;sf%5I)EJ@AD|ED8Tbss z0AhfWfyp3MpekqvI0Iq>vVj_aTa!DW1`vQf0O$&I1ziKLL0^DhK!w0UkSWj@FL#pcUq>FAo#0LG`jFh|sa@nQ&?d7B zukP^FUR+n#rm%}>M~LAl%f3RVMtuU)31g>1eO%44L#KfYM@K}>fm9yQaGd49S7lIM zfROG4H!n@|n}f=beb;*L1OsWuNmfXWOZKb7siAU~Jw zUab+`%1#PExQgn54u=3kv3yD z;R~lEGSMFEH zR|;2*R}NQ1R|Z#tSN>PLA_F30B0oi@M218rM1G0P0NxV=LIY9*Vgo<{kpY^tbzt z5P*(DhoMu^U(hk=kePv-G;}f=zd8v06`g>NK&PYq(9!5%bP_rc9ghx2r_J<-JFK{~ zkgfj^OI2JOc2@p;Bv0VWl|S|zcDP7!lF2@ z=MStXTru!8FtACb91mEA2`z?5z6UP|tV=vlz1@97M)|kFxQaQxk?0OLu=hsAgga#K(Evg)#0jqEp%>R z=m+@IO(jrLyp+SDVZgXSC^;-%`m*8omqF+sm$U%^lXq;_FyhApcN~^mcdF9`jA#Lw zPsd}J+}A#2wFhkHp20%GN6#>=`tIilQd?<&=DZN9&teXY&YwgbQG z$UbDQ`#h!f3d4vSZ8pP1diIzsD&bD6>X+>;_KaG>QnSo5T<-k*VbZNfjw*iUG(gzD zrhg62F3C_53H*R~eL_VOcjr!s-gvf)$SwiW+iw5=2kx$)?K1n^k|FH8WdH3^W=XW^ z!hL`z%rD9>ER^{83X>TS;d><_`2Xxu-c7@Iqw1t>$%ZeGG}3NyOL@qW^E5`OP*C8o zi3hM{a&*w_nr7=szb!Rs4GCw>cz_iW@(oL~GeYIydgJVA3Rp?z%WdQC`GS}p)S3q}T$+5-y)@z&0pztya-F)^64YDa+)lqR9npphsB@w1G6}@U* zB)-EdTlaSH`vs~QLBp(S#x4pfbs|DLkFCx!`{$q}a7rV>eZsau28T}=xlbsfUwg1_@#<^C+*#o!Xpx>d z_&PpBuPk<$!R>j-@w6_)rekI~ZtC#|eWN zWEyIZ8r)1LXNW(su!Wk-z{)^O0}1=hj1|P{t$o|M34Gfd3VE`QA4}yaj_| zr@lb_vnZ*#?+Xn=M`=RNpVlY#4nIzrW<1XEkOx4^&-gk)-;bshD%C+d44JZ6LQ)Y{ zN`hE|2Ti1<`w5yFQP$<1!?~kQqq*wBq$;wyzj;TD=OnnF{o#JWBEpg*_CoM;TiD4^ zN61!bFm)`2|CeQ~kMel$mtTlJyuAJJ_(!_DLu;!^u(Xe;GXIO(B}^LO$kYJ3EX-(3 z%u#<$Ammq@4A-a(w!fA?*HaHSPx-FQPIO#1J2j4+$2&K+Rk@3&xafNMRho4jxxcE% ze}cJG$v%_XtEGM4p#Dy^nDilOiLX5-EtP&TR;-sIZJ$|tE0%nBWt+nED966keEqAO zLKkjF?OMDWqz6o>s$L$Te)xSeEsB~_lE1A#8{p*Pd&57-{v_94GYaP7XCO1#vD%RI z<5u42Z|)M$Wo^-L^s9N?x0El+KMsVvs51ZUev!x_r$CjhM#ux^bQnMGbiucI}+osB)&m+NLvS$OF+}|QpiH;+Q)JdKV5)dmTZ$ar3x0o5?_FgrW zi~(6~F*^*1_pe{Mn_9^j>(qgNL;~)0*2?Ay;3C(LIoVkP`C?BwxY=@k@(sQb7ku&J z4QEdlA79rT`%ruS*pufxTqQLEZGs|W6niOj+ce8D4~3pQb@|v8P=RVF$(d}>Ja9Wd ztZ5NT{QfPZCYbW0+oz8oIWFpd*L5bcE2t^~N@QW7#fN1zsv*P=2#21@+fs5TX3$S?4hc5nHqsZ?bEpRe}r!IU~ukBrnkCu{(FeMz)spVTo z`8n`#60Yh-t;v%pAI7gOGtzCOtiFq;15q=_F$!>1j`2PoS&m(g**XBx@4Q>0+}))B zU|V#CG`j}t^!uq>n&k9)Z}n(m5%8nGY4Ljxlb9c+tnJtwxja)V`2L7Cj?YZf)YipN zM!`r!X#{7&koPKu@ENCia0*Cs_2aWgu+8-En?fePJ1Mpp4B{BR>&FN=cn!Z`PPACq zEcg@l-B*=)kZ+=6^J1Cp{A5z*t}m028MtGkJ@_8LS8aoMu~AkKx2O`@$^N(V$%{7L zR~4+{5|48Qo=VK{PfE@#Gm18G4h`0d@V#u~<`#Rx*~`H-QaQve%<-7Z`X@Wv6u*bZ zTPwGOgFiW_q2(4-Nt;q)Pq4%X{!yrkckB%|HpgWmF1yG35dJpkC#p;H0VStM_2&Q~ zXEn}6XJU2&HFYgcxB9tSPiTMGPYJwkRfLhqs|JxQvHrQvPYNEF0XUH;3|J`eL0{Y4 z%h=T1OFyp}kV$oofCN^ec<2Nd^7=^hMiUeNi_rJG2Ui*ZYQC@qp0?f-c*>craq z0<_;cWkR`9{z)Nd^|fLap69CG(>Qm7Ys*$nZpabOz#o@hUBN zat)q5bLJ^LDHlPb+41pK<2c+l0&sES+!XM;6=-zu0@gh`x>nKb9p<_p2Y{Xk2lcZT z++n?^e@-I%`UXJ<%xUWfP@^QRWZR>#qLV+$VTV69h{|vh@M%A-y~B~;U}K9@>rst- z@F(vPVvmmDd(s*+Idu|)VRY24t|ll&J7IOpviG^u-(cc1u-f^NvwFW0g34Gq*(iZ| zRkD3yW^N!RjzZUP6(KnnLk%!czch<+RX+?X$soABEN3+v;d&mcC0@=dd-FVoYwi>K z8d_2D_XPxk0P%Bmb@OsP<{A-|d@Asob4;{tV5-(j!QqF&XAcuga|^cyYZ;A@^OH7K z+E0kBPE4Nnn3uV|K1=fntD9`i1tTRY6Hz5$0Y~$tN3~5&YwE(dotXO{^7i%_t zHu@{hYh8UTtS8vw$NV9``bV&c=0>$B%_u3_`y5f&S|gtl*gf0=6F(+gUDSJ;e+}gD zSgVPM0ct}Ke)Sv#A(ZDJg)pK}AhlqZ^nM|i|5lmF@?Hl-v zjQiv-De-4D6_Q*f-hLP0lpych3Bg&>i}OF6O6hcOp1cw17VO{}hL4{t7Q{(`T|NhP zJ{cEh;cn-h9&N8}V14?u8`(YBDJ=SOgujogysX7jCNC6Fu=(O7^(EBFQh6#MRVfr^ zq9PSB7`FNywLG?Hai_6SaJ1H0bot)zwrF1BXlaX|a8~B$%@b|{fIVGesoe1CSKL9> z2RTa4PCt;fx}Rk}e$baGa5<934DT+ z$o46;7#5Z2s%amV0^l_c%9(#++fY9z{d;(L5}vjMVvJ?n66C6Snd9}*Qp)g0v8#@H z6OaKhQm|>tcUu1w0MxW)2n=F?jg;QYwPq&0-P)S!%uUadNDquH<62qvYWMdy>~CAq zjFs&dFaN^VRe$v{BJ#A4Gx+3>gENr>=^w&@syOr;PraWnXaL^C)sC@mj~04Fn!jAj zePTWct7sUyaJ&m%@pl4;mObH4Sf#oPLnW~4rJijLm$9zBy_f1`leN4GHDq)1OCuZuss zB_kJce_CQn{-A~y+fsL$6`>s8h01L4r@NQAJ#A_IoNcsbbkM) z!PC9EKkp1emI}l(B_|xflIbZ^s8^R$nDhk9V!-+Zt}2?237wsSLIONItd^z4nw^%^Z1h{u)p8L5oB}mu{}9ZI28~Tp4jk(OeU(o|K#Z+;nII# z;4@u%SbyN(Aumokr@PZr2Z#$JifH~N(KX*!g z?xOdh7;2W@V1i~?+Ls3;9WJ`NpV0EwoRvc-3PV8@9d>^XyhsAGp@(x8JK++}1h>dp~r%I^i^)ZkXiiSs(Czp?~G+ zJaD7aT0Q}-KE67f=_<+GC$-~ZZ2EbAC48cKWMS%0^k*t9e2+Wp4FzUOQtG?ZIX zx9EM7yJm$oS{cUJ^W6j07^N=pkl-DWM^N+ZT`&*PJZS%>&!t(jP|LA;!r%38vh@^o z-OV=hMf30VqUJ;G$c+fQxzmoM2t>Bm7*MG;(`~r!t7W*bGPv??uP>3BHZ7e}QpfGX zA5Se1=cBrle9~i55y}4P!f}_Uc(X9i-0=c6glw;d&1 zb}e4PWIdO@d6^%!H>f#N*Vu!~ua@l9uhOu~mzkjuL91pKzP zX1awSe2 zJ`8ihx73)i1Op()z>+3^fcKb%3JCDCC&%paS0cy#DNXz>%R7*O1;pkn_wpf|pZQAy zL!zht&y1hfkgEQ=3DS;ID2CznWQvE_ykuJSrK^BN1+i1)@Nk;1Hzo=;c;2OsVuh|#YYPaZm?;(l!o>}p0 zZhdG^#faVAN?=csik00;XHS!gz0rzmAARe_B1bNZCE6p48KUupPdkH9a1&>yrK*I< zhU`sCL_dmKgr zXa?73|NS9s4;kJ~67LM&6aQa}NP7WB>|#H&B@qk1+m{%4Z7h$7WsNzB{g+;1MFd5^ zBvfdB^oVF>5$6#Zr7>&jgIHs}RGi|#*niBh5q@?n)px>CiY~Ol`|JS$n zB|x^`o>YQC)5fYteEaLicPn;s0e%-Go!I5B&ZY zcltN*c+sl{zd@2 z^v*YE3314KAuNU!*=BYqB8CZR(>fFY#IOh3oDRhrFnC%2BV@mbbUeWfR+8bjv~==Tq*xR4Qz*L_k+oJ1df zZE@lcNY!$Zt^G>M!Jz)hsvV}H@$9+mS`OBLO#G(DXY9I%a{;yr?<|DsaOXm7WvhR^ z8A{va`7E65zb}3M6FqdZiFv{Tcs%$&6tj33NILRo_t45FK?OOt4CH6_km-ql2cCFn zLdM3>@h0vGs|NwMJnt{vq5Mt33gX6yc`3V})%RQqQw3vV=)CN~FS4N^3tatRogc`} zClz>w@;gJ_o0JtG?+~Gk@J;>-0=R;~(8eZF1+{l@lML(^e3PL9VDb)i$v7A~x8Rr# zbos?R1gfCjlSL0>ekQ;9M*0IWG`LB7!gWP*6LBjQG>q8fJrREWCE};qrpRkQ^`Bas z01@zi$ygI z`w^at>G#bfytvKV9)Qh~op)m&N{Kxn>ykax5F>uGvgC#X3Wk<&xwP+3VtbnM?uL7p z@Se5*Iz+k&fbQ)w%#K8CT5m-#@RwN_!({C_N!yo1up+yoMFF1b?ul77!uZSV^kEpj~D)33!5VCfzwo#>G-jSJ@a6*Zn#JeA zVCV6nR|AG*7;|gna`)lN1;Iax9kQH#X!<|wapHZaIljQXX7wSsUnZ^S9_C*NHV`*Q zEVS4y1J#E!7fcO||Cd5`!!-)lT|!%Xb;-aN0?sZci+hFd)(j!?KRge$z8-< zOVAPGg7;cj;!EVR*@cLNA7okU0uYe!A6$025Nq<~rI2Eg#-WrEX`}S}CdbP6_M3{p zvfOl3RVI@q7RC*@umRb?3w-Rh5@`MAsM~d$o@l)9z4Zbju z*d=ouI%UIeTn2)tl5?iQw z>=s+lAC_9S$ko`SOkb)PR!vnErDxBctYv=r!y3Qp;fP%uJu7GX^qsj-ZQ!hm?UU-* zH$7>qJdVOivipzEW6(V}tC%Q&#r@kK_TwI}GG8%O(BsIM?w*xZg0iR=tdN-O9#fQn z`&Th=Lexgj@hUEg)jjwHJ8!IRPyVW4Sy+SmJd0gSbc*jELl&GIqrL9 zooM9hld`V`>^nW(tCVHwUMfOS;j8>*!8T6~dNx*x%HqA0n^yR3Fyn$6C#q*75U8Y8ZCyZ!=wdTl>jkZ(X z>6PIN+HSSQ&!3;uSBL#?7RKOjV}E0Jj$?iea#nGc z-j=&M;vXqvFzDR}UlejgIHc4J%+l;T?GxE@owJ`)p9@xpqrcAl_3qzWY+7mBaB-}! znVTh>1=;deN6ri~Es{D)ItJAsY7jsztOi!QP_s}gS<^m?n1#(Q%s$-bbPap+H*Hbf zF}r4TmTv!rYs8yD=pqlwH%M&x6~rxK!`+Y8Jvh-lP}e=&b$R9xe(B=nd2iDaebC{$ zUVjL^wDodr!e59z=saI9z8|T)FMYfK9}LMT<_@y;phMY9lLmgt)cf&j6@a?w&WF2h zh`T}NL(kW}-C*;P%$=}-$o1~Si9@qXhlarO4eSdkiEm`VPqRmE{?p|m^&4Z zUwOS??uIO`T;fRFw^NRz-fOm7eIGu_Ktvy`8?vHxp)C;(+2FrWl?WYN&psBtCYDGX zTsgi>^1_7m&>nSq5y1L@#G^GYT*=Vl)#44iMY~nIWxMtKi>Yhw>%wdJ_1-npb=o!b zdhVL>7IbTLD{<>{i+L-3``Cv^G7>S!yU4uGyu`f5yu!SJSfoXTUT|DyUvyrNUh`j< zUsK$Q-n!r7-pb#y`#ihjyCb}Nen;ON2BZQ0Kr9dnqyRy%zd&<;b9eI~&;s-nFY2Q5 zFOb)Y*YrNz%@KFs?*d?hK%d>BFbZ*$axrjCbL(_VT12f3VoR z(!Ak{s=t`KCc6gt@HR&-3^Fg0p(Ig37l;eQCF}xrxp1*?DS6R;jktzgFI+#oyMxztxfh5nj(s;2i}1`GJuKiON~ct#57+Y$Of8xDSv7B=y4d8 zA&zdVnmw%;4x|-)NaKYAYlS>wH{=eZ>3T5nz$_4O_#%HuixnXCSm^eDkYD%-JEh0+VF?G&rdUI%R@2p2rca$ppRx=TH^V+LGH+*;3fjE3bClFt(6lJwGJlcJZ9@4PR_K{TL;AZidLh&G*^hA=dM zBQQIlvu(7EzpcEDVo7w#eF=9-eu>>A_pt0RL%-^$a8uOdY2a$$ zXyEl?@!~8X*CEUgK(;Bi(VG;Os9)$^$m#I{Ue9gFg$NM@DFFj*G)qoPL?-!%S%>k5 zl_hEy3X?ouT)X5=lufiQg!KV)ZDegAlj0J{BrhX5DWN1`PyiwT5eN%_1ug_E1WE?9 zw;|eKZ3}G=mvZb?MR-!l)d{l$M%(C?3hdQIcn!&ULZkv%DN2>()K}%DspOU8e{!M)3DNBRzBBJ!!vjX2aP zwK}w4sj1TPcc{Js|Dnka6p6nQ0N^v>~A#9G%%6oy()k{LAIb6i1>*?nk&s@<;3rxff*@As2ZU$u+88P%l|8 zEibT_ftR8oPZL)YM-#6Piw|cJxo${)2Xa|)ncktWM*UjvTF#Ky=eaGpa0nterDI^3 z=E&)Y$RYnC>mvT5vPSJ%VU`E*;o2u}rfjBl4XN*#TP9luITY7GW_dy6WFe9vK^=$= zL?^5R*16EJ&?(u`zKmFgEiWuTJj!uX72`=KhlFHzj4sn16}YL3@fwlysz`mIh$)d{ zQ(tD6reIf&VVBorS96S+R)S}_XnN-QkG-|sD#42l)(fF$hjND}bSX_J znPoY|$fFXmGNqm=5hloFYCKaVOk~YG(#)h{mj}PnETH00i(1aYdGx`I*QO#SwbT!V2{(y(>9=Uhn7d zC?OSua!TgF2+fw$7LjHCN!CgHNo9rFmBKWSH`iWNQ*2YBi%Naw+z8nS$g;QsGR?~r zMXDmH5|jxbG7(v@Ojy=J=0cWaX8QaTgB zQ9ON8U-@E6Rn67cKSd2uN(^)IUrFhYdE5B z(>XbMdr@qc`tD_7^6AfE9FSP8Rc%o<^+f(vRZ;NYgzSD%6tSr8-`L}|Bu76(PTJm1 zMF2*h)e{~AQjVeUJQF|yRR2mA3nnN*Of-a3NB`$p~ zF)yVrAG;TcLDG47qnHzzqnYEGW0?~XQM7%bs~l_DtDUG(6hEpQMR6&5>3)fODSyfC zo_k$(9dey_o!p@61ND*h(eeTN82A8+Mm)`2%^c0V01Lob9HrZrzluaDqUhZV8`N+0 zZsm-4f#-Hn!hML|l+^(g&85>Nk$e7i)^+@KWrNzS!aNVabr97Yd%th3uYPq7MTP>o z7dJrWd6}cg`Xu{;RuQX+HP|X_ZDDm`O>(szg+Rej3#f;eIbNy~Jcy`!Ct=yEqbRz| z0xxw5URV_Gb16SU%@P?Z^(AWQht$f^)beks)f_aZIpOgxDxO*XBX74dKi_-ugvQ&d zxMl^7Xl-Sk=oD#QaeBx5r~n^)`84Pq9 zbiAlwoe;V#!@@9~F3t&n(=6U0ME*z@D^7}Ci9kmtPJ>;QK$kV{NF|dZOOET6N&!W- zs>X6W4*T1r9JXh@eQay`AIkXTlkyC+RkbwZwAs~^b@}2{*}=U!zqv&Fh}m^}wNUX% zDt?+dRL?q9f;IEvo~@|_@_}=;i*#)@Y_)AQZFTbEruw-13j5$d-(DY6Us@luZ?2DV z4YX#oCb8zThPfua_V_~)A0(;xg$76m^cM6Eqy^GV*PwYG8pjcz9oN}6+Q;8l-bb+} zy5_!yyC%QJ{vj7th6+LDp_0o~-J$NX?pp3(cLR6D_eBl)4cQIFUKw6F1sXcf^W%_x zihcAS3d__l^)3Oq_r+d$HX1_D5zkWM2Ks2$oYsgwVA#NdFGOoSve(~A2(D!gH$5BonlRNjyKZB3UKy)t-+K@suPqWi!{kAe{IP0BSIUh zt&3E1))m)=Ai<|P-r6uEgel9YFhZw0XClW8>5wOXs*61)C8$KABRi%cs7j*yWbDW) zlRir!A9!U|K%cFyv4X@Ae4Cc@G`Dw~?Nr~n?6rJao>8{Cme!cIpqiR4|Cp*Ect9sR zU-bUC2i*a!V`P$*pMDNaZl_hSeqKWEnpNOyaG`dwuAPRRww~@uGAMNM82X=r-MHfrq+yv5^L^m6SjINQr`g`ih`5rJ!=GpuCo#n$yU% z1-#Nl-?Pwv{OwMr^LU*Fw9;1JwGapz*V@TC9WNTWvhc3-(FfqZLK+O2B;#K#WGhXQ z%Y_UXeuR%13g`}~IZcQOKnB3a<6Z)=0SH4Dv@m?U%VNU9tkNM_{&)hbLW*07Xk4a3 zgIkqog0K;bL;kxqAHX0{d97uEj#bu=jFv)uIUE~f%C_TCTxdn$81Mz$MY(t zrn#pJr{U9k(@fK8)6nU;X~sR!p3$Dfp3ffUp7h>hc##k!rI>pNG!7aDAwi>{iS!{F ztI!IL%Iu2H>CtKa>GElSVo!9>eGhj}evciVdscQ9a+Y_NT&3y>^_2D0@&tPtcq$qc zHRd;FHx_$mc;^%j=~(4gAg2|l>EVS{>S#T*oI$a79(+j13SpU2F)&TD=d?!z&p*pL zi$ANZQbQ}u6nW?G4K$dK)rnCl%r87+Ks+(auUcEx?krsC`#eOu3pv%jQ=csvKTatpFU((^p>-o3jcw9qSH9w>>H zD2JC9Hb+^d;5V3rp6F`9`=tbI;%{T};2d?or)|Li6zSr5W?PHg*Skr{blE7S<-KjZ zTJAj7X3o08LU!jiDV-|kH)|t)(`$MsSjnBb=eBc>;JnnGGy${0aQtN@YC8P{S3B&sCs5lgA|#r+{CDt zM8mw}?w`IR&N{<|axAgaY^EI2D!Wb8hf7q}B_d&Co8ku2|JgbBHhZnE&irn)3<|Bg zv%D2S^LlL=NjGU(J#?o+EP_;}VS3=SX9&|jwMg(Gza?d(=vvz!Cde{Hx5pgf1fPZ-6U z%cB=k#tA$k(if0D2ViR~@$O{1M(|Kg+T7!a;R~jIN#&KI=QC+b#NnPfYF*R&oap?? z_@8Hy@2c|68lA2UKh(2|RRZ~C02DWPIw;=;VOW`7+;?U;-9JhfKInrh}x?!_2#NN_!V8RmQsx#tDyJ9Ua@Uw{)w*-Av=C~uHbfM|p~# ze$ELQ(FT;(`)p^+HkrU*=lz#|T`UKfJ_a}>(!B*+F;||Ul-eCj%jxUMkTyG3FY333 zNwgObvsP;`dbsOzvU+q}nWE{$HNEK5?`gBjH_FB zRP7oJ##>z9>OL8N^J9<4t&%4y>#RSDyIp?=?%MP9ATqpP@T(v&&f{7T9Z?t$Gw2$& zFwrzaPL}8tb@I?+0rc3P{akX@Cc@9G3n-M#qL$>NpdOY@Y7?t?qmN{8%N}WN@COQ% zQCWQoLQWO$J3oU}t*z zMb%kiVVbFR;M6qmw5sZRp}47a^wcyhK9|AKcGEf8b{D<7siLg$ z_{y;Y7uWg({lV=wXQtNRpTD!lgZ{-y)+bE+ZjWPD;!jnMeJx2yMuI+0XfpiOx6<)s z6L+fIQ#rUrR=u%?Zh7xL-?`j$&jG2OBHxXl>s#)8-3DIdjpqL5X3)1a3lM3Z|`^NqrOxG61e&&3u9PKdkFp_5WI2b zxG^XNycw$AK0q6J4pg-~BEj$glWg9P`!MCa|1R8 zX?EswS(tvI7K@U=FD!>d_iWQGcDr{r6`sY*Nx<4b$n4!pJvEr=#07U^5lxMqa&o`1 zcyzurLwHfz+e+bEF;IN<7rjCuE(Yz7=Sb$z>W=Y?s6mnc25HXJMs5i`+ew>@W@V~r&UJc zIF;y=#Iu8)^{G{qlR$WI;V0N=J+sBcuj-DdJ#ey0;XWLz$mt%X>Ex{n3E=&e8koYsYC!YF*<^-Og?$0GF#PJ=*N* z7#|JFuqA7@y<;BVK9H?%@>wyP3|IDUyxG2OLV#}Q^~~-MHF7munGf%^_}#r}Y8ROs zk00)Q2Ck9-+HbD9KBT6>OUqv*P7PMg!ywo^<#1UFi%&CGr-F^>x|^H2;qmk0nQXf{ zJ z8}nQx$h{5kEPG{w%1%My{UHo}Cl%L(gHQ)~&jHgqO-ONbo6~~vrv<&it1Fb%>QnK? z%llOMm0hqS@JYDLoVR*&t5Db8h^Fg(EMT2JFN8M>h7^yCHhE0LAo$x`#^OwQ*?&D= z%RrzfV`>obld=4~x(QtUja!CfbTz|UP>VtweC0?y{JOOQmbr z{$0)XFoVTMoKCKz6`CaXo@|MFH04J#%X#wOCmz_Y25Ca>B^O3}A~SBt{>{Hk!P!TV zdCA%H1s_Z>(B|U{hkAL%KO9u!Z~tJz4KTor`to zSk>)uf4Klp=Gl&Rv+RndRdJ>VQ@C|*WB9hX!2a(ulf1%Tj@g!i(l^73`XL0(^&`d4Tz-Ip!5$ANcDwBWdL}uLOhrhht1B#6yd#RE(Oo z_n7v9$5@MmpYWv0*YR3De8jgAe5>{GfmHd%1F4-C(mOBiCt-}pGEe3X;0j$`Wfrr^ zdV~CtC18{>L;RyzM+OXL_Ws=x&Qm+e4$+Tj*ebrV(s%3{qF94~wK1T~s$eaVs6zuG z8lyrOwEf3z<#+;r5i;Vb^E5Iw9Ys=6hx^8$!MxC1uPpt!M_!p&xMeo@tUP4z{aF=B zYW11Z?%sVfJ-9rL`05O7mG7kO?M@FCxE7XwnAjv;X={b>a7zo7#y_cTy@}TcZ?o@MH?uqDPBsgh-lZ$; zolCzKssYr3w*kWg4}+vUg9NM0I&Eh=fxHi@yuTB z;$KG?GaHIK2IK;_aw~}fw=yfzJ1L|EXE*-)vC5}9^mnG+5^ZOaT?vp9VIK9;;V=B3 z5vD`efCkK9k|s*s^pHz$PSZ7KDtb-KAM!y5^6+ysXX6G#O6)9#94soKzCTrQH5)5EZW`ylP8a}FjOak&`7_I$zeSceJMeSfsU z&dzpnZIg?}-s>yPTw7dcn~e1;Si!`&YX@ADz zX7;e05qat?d$0`McMsdX@~~*MQxquH0lgo*`aRya-{t9CnRWjV@!WZ#^mzQ2T0ws$ zLbc|^9ebu6=NC_JaRL~@^9X9)d#2qYNU{$*C&1kK0|;(R*D-2;@-6|vO`*0ITkb&0 z5sIE3h|dYpj>XYL6snDAyy+g-wDMRO7>8Ns1e#e_ zk1hP&8fz&~AH5Wa5S~cA1O;r-z z3~>s)szV5&!@sejGnh)2k~<1Vi!uwje%jsXU%&Uzlx|Bl1fS`7ow5Lo?$tR-nC&DT zn>hK_!6i4^KjMaa`%m2qag6;ZX`e|cYKlsze%)c)^1KWJw^v1;L$SHlN zPl%FAyg><5I7!j?@t_1`50>i_y}giI)kNm`y~sdyPZxeLnhyP##iKDdt^(C7+(6tI z7u@deO;NYF06sg_p__HKV^tB)o zviBzzq2spS3LIP_X(Pef&8idcfd$OZbGq92!wnN#!0T~|V&N7%8)IRVp(}K6{PDt~ z3s#P}x*d^bW3Bq>x=W65wRK}{V-59dX4mk)U32DHOG1c1dC6ImdZF6Nh|+{^jpXvWiUDJQdzlY1J|9XNZ;X@)~-&O zLDJtl2}Brb&8h5bTL+Z*$f!A|)EE>#Y`szz5QKndHpUmO3N4S%%B`oy2jg;nC{0$I z>})8EMX~N8FpCf*Zx<$|Wv_$kxV6wix!&m5K!@=T7yOF3a+~wy-?I>2?}%Tw*=#j< z`+1Jou1FygG4|oobad`s5&Z6t(#`p}Er}TLYPjC-RluzpootRLtzO{*?#Z+d>=f#| zKAe2A%U@6hl%-G4_v+7#is?J0dq+-h&sHhP3W2BzR^q7xTO@B%3kp5sZyh;DB+M4h1*A7&ki>HytZC8B%&eSQJJi=GPB34(T&Sk}JdumD7OQ;uiaTmPf5OI0}y0enbnTH7KK&a0JfmrQu@h z0=C$td`svZ!^wK2Z+<{;Mr0J$6=5W{-7xOOH=0E*bqpUftA)d}O^zar zn@(=yunUeFg`YRPx!8|58nIuDm@M)hvq4#B_)v~;WVm;04myW3;V1s?UZamYX8X&s6xJ$AK-zunH*hU9E<-f8i47B#R zZX#)b4WTm67)aL1XL^(d45)yryFx|Y%fpjHa8E8BEn+a%2!Bb|1$ zS#F5UuYKf`J>*^1mPIjrVsi9#?`hadwQ8=9AKWKFOk26qVsBBWVz??kKH9w^fxU9!n?TT5Lv_+jiCXy;jk6{bPn zEyP;U6InCX5UAfZF9N^RyiwncZ(P}al=ML_es^y=ffspjH-AVLaRdVo)OQM(3I`iB zX*cf}uGvG9dCEzE9h$T8&;5zp=XG{YH5_V*GbB5t&m+!$TRL)#x{A)m{_daicpbz& zhy9y($sYg+}xMkfR{(?(w#yz!BbIeh)H~(awd_Co)R`eVd za1Yz+{-^F5LA(C{2+_Q{$^0iI_CYT+R|J?&`Q1CJD!KAcox$xbimDSvH_*Ljr_VLI z;r-yRm2n}z?kl3<&j}aldFB=>&!kb|KkS58H+BEAX)w{V({!j1R*S9v?lbeRF6Lhq zto^6WD)jfle`Pu+*}?v=OsL1BT#WbHe8@^KG6mOyJf!p#3%@WR<8Fq_1l&< zC!y}nFEK{(nOu;`ypl#Pp|S&Ucg=&6uWriQi}a7z68mkBtohgO4vy_q@Agab$Gt9B z$_yB7|3i!2YG(W=2kdM;tor;v2I@ig?IN{dmnJ*;*&Tp@@OyWe=oVf&o|PG-{->hE z;0f;k;CJ@JhwnX_-FX+e5d6HmtGyLO?4aKA&V3Pp++ri>qH%;&;cy2m5_*0rlXk;4 z8uqBS6ejJTXJ|MyHbiB*rUL0de}NmzW^U844lORUoRye-G!Lcdc_3b5g0c^#v?%d6 z{xtd0pj@87{PUB^mp^Jd2?T1uC2wrLO9RtCarpSCY`&Y%E#uISJ)dM4U$d&#@C(bs zWq9gSE#l`*lFz*By4HOO@%|XVH~EsTv6EoznN?Kj=%TR*4r$=&vA02(l>Zc7Q2ST+ zc5W4xUwy4tJ>A^e8Ar^>A!?yNA8g-Z(M6Mu4}PNBgk-iKOTVu*r`ebeVjK_y-z%6cs&|fqT`CG1)c+ z8(Q>kHMXFbr!{ZjwP2m66>x*SWK0e?$E73fWo zjQ@pkkx26W0p%Ug8+|K_;NY7(1b_+=zpLhGDFsdciMC#QLYt*sUW>z~MS*`PswUHN z_dthD#;*kk4v6^f0UsU^(KqmZCsAude+fIsRqH^1!8^D&gN!|FVp;K)raWp|RBeCm zi+i`KboXO0P?c76I`-p*VL^6Srp^s)E#hpOPl5qx;MF7e|0sL&c&Og@57<&9p%SuJ z_K58JR>>AZwqz-?OR_IB$0URhh3xy5$udG&r({c(GWIM{8C%Fc49__;KHv0tzTfA0 z{raQsbDiscU+-(X&(b+_{ze!(bYQ!JW{&Z1FPCT!Y}bvH{Y1+WEo@wbmF6fqhIVhv z2yhj>qnrp{I{QzgwtBehMB2d65}laH`DYv07hF033WoB&DCDJ5=W8$>OJ+?s$r5*- zapA+1lv`I&VmkC({G}-^DFuK3zZC2lAN2n8>iV=f-`V1EP^pyWc4W}h; zg~NJHj9t3`R5R&+vjj`~Y|t*b@j&C*!ka0NH$sk$F`dIITYKRJmBh!G#K6L1t`p%t z2ZsCj6znWmXix8$o;P4$(4mL*zwmzWny+)|ZB)8g9E(%fa1L!Dn4hQbwlAf{g7JO~ zDuX#~n}B8gY6EKm_yBHzHDJ5caNW%oEc83z7;L9mXH)#sKl%=tw`nc~gNGP;7{{m8 z0KtX6-`R0EY3xRnSCML6X!7F#ceiULQK3Z+Q{P^f?kd1qoMb{IF_3$hB2Jl!?;p zfgZ7c;w1fdaGqTERhEI$U8fFZoA7$eKVATr z|3$O(U)#42&;}`Cj~t-HE{&weJ|g6sFG=Z#gI@Bt6-ULp+>)M*X_u$y{}dK z5c)b8>g!*RqxNpT_m7421Bfn54P_#bN8m~%1+GM_bs`EZ2RMU1afE@3 z@O#F5@QC=HG?d{nIbmg@y(2eBDA(gqgxErx+z;LRHV=q^fcH=R4ai0S0NWNq0Q1Us z=WiFD9dse$wobDl!9qjyj3eRTQ~*`dH;L3}@%vy7Zi6|f_*aZ>xqwTZU7d&v0IqdX zzE*EY{&qq59oQhZTejbWE7`XPO)t?5gnsD0Q}c!3!`vDeqot@#-av8C2@wUbix13z zEH0w|AVAyG)_?o3VMEvk2zgn$94g7w?FFq(ejL3kgvHMiI>Lq$-YNRW3#Na?ArkUx z(~%HpFiaxp!eP z`;gaE2fS2rHML)2h8CxcZZTLFA>S*|L3{HY>7h9AKy;2`F|ua@yfndzPr*)rZT0xw z_PKMSREv=p$CyM`SAItIrpz`3@g?QOqYbswi;*uLZ>Zr|jFsgj$Bu`kfdVb7FrK?sv~FPI4~jt|d#WA}a71UUi+N#-0N`C$SU>;3^;N$wENeD5qZ zXq-7<@X={wekwv9WS`93CwM?uH#E4z&wb7XS0*SXg!%7%tv-M&5d`qB7>R|ycm({% zLvTG$(gN4>b5AzZz{B-9fzWs@;yI13joc_93&)_9BTfy~?)e2gSVIhN{T-9spaU@g z0R_&2rE{WX`yANDCxSQB&OiQ>h3~kId+wKkmG4$7$ah?^tVs>+pG``cGk)4ZR>V@i zUb@Evp4X_|Yn*5F0WZWPyIB-Y?uY;FHp={yWBQJBx}-1nzJm|O?^eHQ^!r`v*FO=v zpZT`SiKBP!?VH%=5qmgv?V{fxVO3Xu0l+Z?*IJ=Pw~gNRtl5?Q^rK!tzNFhh*BxN+ z+D`uM>UXt&Z8IF$HnTG=eKtU-FnI8cG$H?RZSde1Rd+ez8Te~0_5c(Pr8B(1U-2OKT{x+ z-pBt~fJy|YJ&+P8qxDeDp<*1twrV47lGC7-8&2WH?Qr#mu|cPVyPA*}JD1=*jsKJ} z8}HvHqW@)r?_Upou27Z!#rd9m@o7G>4~x zWy3ZFa|g@F9gKsaEa-$x%l0H#r;}ir4J3$z&2LjD(go(KYXauZeo&$2w6RG^F=6E$ zeevrTbe7;j2h~^!(?AMP8{rJDyd|Z#*QUT=PlD|(4z3{LzE)q)bb&mhmm6fjP(Unb z{;i;KP(ixs!5(LT6)CU!XD^Gs-ahc2e!aU7*DvT#yIqBtIF;t(lT2q+mLAa@x4zAE zgyYu3TJR6xY`r&PA8>x!8Wzrl^|QM3#+K{Dk-njw=2Uf@`}5gryH~q%5WCMXA52AcpD+pR z&WoJQ*x=y}=lbqv^&S{Ylz9l6_`cHcLBjo!!lL#jXj;8zrQuQFSmFzw=%>>#sy#oe zNr=D;!6AjebUs{a7rPQla14mtc~KTMw4 zUY&^hA;m@QHjwGv$s>MaiAx}n=uFT8s8nvib`~_f3lcf8T1Vj6A8|6k*d)$leP1nn zEzb2g>ka!ACXX>}tIkXDEmm1K3Y?w+y?HYrRc?{HSrm)w@tQ()R?rNvg(jG={jXJneJg>lBuw z1`~UV!k~0suV-vWJ+)_Pse{j5w$rW`!c(8ZmS6-?fai54f9566r3dJvXe|{sKV0r+ zKr!k;=H0BsM1LJ)($@;B5eB6l=Mj@{%0C*owY>~74z)Cz)iIUqdD&AOY(id2g``1J ze^^61wQW>o3T3L4@~lZAr5r9`y>iD8s9>`CbWCj-E^{qY`l&zNN0y_jS0`xRv682d zrI5SF11OvC0o!FQnXM{r=hjG(0uJ=p&RnHOl9bJoR|6Oa)XNewFJxB7;ZFCnyk=OH znt1hYgjAkHp7gVisrl9z6_VY-9J`RG6cT;>6qY?tDcFmwIlfy?8%E16%imM01tXoK z7-sPIS-Ll&h^R-v5ZKS2xl@aswu-6&DvLa809!xnYo=8tgP8J$%w~Lf1}3q5Hxr*p z_Y|ikb=;F=Rds^=T^LCisXB>hSJ}JY_0BUwstBbUv#f&`d$M}Mv|-d~iu^t5S};o1 z!*VBa4n>Q(6-N)_OqWC_rV#XqXNc4IL7A;F3l{~f43<6@&YN;XjugjTv}pHCl}|MT zIJZ{savpmM+nd`EqeGcS0i)RTTrxw;qW#c^(MiASw_F&p*RU0NmKr9YhaPzi8J*;5 z)UXCjX_g@M(bW(GWwe+I(7)&(U(kFyJgKW{VzyH+UD2GG)KifQm+zqHe3;I) zR%w2zr6I zF7l*tz}6m-PK9?VlAmYmsmNMP@HeZX^psxp$g_<7fk>x@m12rfD=Hh8lsR`4mSE_} zpe>b8k%NbS*Goq7iOrK?nEy_{A#6a-)kve8y!Q&np-ob&3B;H(DnjW_}+;J9*OqE8pSuSAkRc5tHqt2|2 z0|r5cr!CU6_IWr)ia)cNHsM3QczBHN!>5F+MVkQyWwok{58*Kw`?3JlMFczpGhMc= zx){70gkdV9ROJlXeToquYRaTj1rF`pUTO`}Bgr7U)6mHgzu1{|g7)ZYYaWMmrEa@*Y^1l3 zk-}JUwFculIU=X9i`p=eZIV?OmQxGH8TIjcCLpvHxK4!yT6>fa?;YClm`FsIA_`wP zoQi3zehQ<-F|Uf`He|r4aEDh*bKx}GEUSXpaSc4lHsdNQc3cy0x2}lw=>bUgICl6Z zM9`O2Fz)l}TVo?Lv?Pad-3E=3cru*$s#PAGbCfw#Wlv|8u6$e-2)5*2Yu))$)7C4? z!d87XS_^oZ(=b)|Q^rYJcusxylsdjmU-^N>Y`0&dap#h~ZMCRyzFI178y%H@Kn{)De;iAk-+34Y@4qR+wD>xq$Q}+a;JXuID3w~W& zwg_k;Ak;m?G(xgj9ZSvGad%8O8w0;Okk$t6e8{%Ot}fZCU+ZqB{5wNtRy`UQYm$gN z3*y`81P?r!V}a3n-bk#g!gxU@$G&^z-FrYpNNbxzNFU1n<3;DULQdIP^)ZdvyAZ>8 z#xc`b252-JCmru^M4aE5XZZ|r0{CxU=ofboy@(VeO_rXyg&*GUiMq(V~ zHS1~ALHYQgi0w|>Sv%*4Axe~2)Fc+>zI3H(llE9u~)FH#pcLN_|T8R`mdDp0j6x20f zNB<`UsbB(yEz_cI0)?eb-y#tr1ttC2Bq9R9r>z+FU$Uvh#6PEgp@k5Jz)(ygl&-mm zzyyp5~6ygxP>5I+q0|VKyK>eLPlY*8+(o1oJEjg_H@o1tx{+0mg@|k4=4{ z1c3#cv?WpfL$#&zj|yfLnkjAO2YV(2ex(Yh z8sj&+L1PT4Y+Cx0PYVwfXHLy``MllJkVUuOhPhw^{jY*pXCp$0=kA^amC7T!y5qo( z0zivi?cK3Bb7DR=z~cvmOZMYHO)!xW8pDax9TUU0#-(gEn*sUnye;AJWX_5hibf4$ zPp$EGC*t&WPGYxuz{Uc#;?JnVTJ<~KG52ou#|)nZ8w)Ip#OAf*_0)0=?!bhJhQ&0Y zF5!6?9bzy|v`3+eIe#761e@fL?5upWQ*@5^6bgPU{s$qLa1~-xEvjxt8I*n`boh>4 z(ZF|zw+_4*GY1(5I~)wip9Wtf|A-UCJS7&UYLRpnkOVG{KHBw%LA}LLcqwKC%Fr5b zqtz_C4A>J#pF;#dE~?SE=$z!K!qGQK&dV0W6+-#k8V4A*R-daoqD31J<(JyKk1&)( z2taH0^8be5B0>PlB5R@$hP2s^T9F>HJ$6x;f_)65S@FLycBPK*m(}UyCPHEETp+ep zgVofch9aSkg|`9>y1y-62Iz2<1~E{A{v8NHLPHbW0YXOiZp;coD}Wn*bK1bFgBVQh zA~%T6tj=yFrW&I2-!Z)J6CqS*ScUk797J?#MuLCZR#3oC*nFGy}mqX?q~@F z=;ri$`$LZfBxWcKmD9frq!nhJ#9SYDEc_{fxt9#)cEhodQxX$%lc7jC7o4(`mT#;x z8{=3=FL@&|c}Gcf^*J=JzNmeX{&F`#oC(2sWgF8y{)f-($Bl+ukj!;ur?tG@WN<>0 zPx8&nMU8oq566zzKq%=%6aa$ZJO~r2b^S7|AS=1|daWZ^?y!6!&V{_)_|g$8dlNB|Rq zqIM}$024clXIWtz&+f^3%bfL#PKkWGa`j{R>-_f$k8!Wi{yA8(efX8ZDQTYq-jJ$kCQh&``L z`lc_}6Sal-)s?Zw(^0fD7)$NFUs1-EF{LWt4%fK~-X{55sEKx)Emm!A94Fij$SPtBB7c@>tG#6eK$T`!l8?e%SJtrKBQa zcmtfNR)5s*d?jaYl{A@u--3{nvX-mcjg|BLTNrrU@Yv(;fKgTw2A(iH2EH(zr^DdG z(wXrEyW;ZlgJ5sGN&3&XKd{2aNhNLm7%u0}b@jZXqjUOtuBu$7&n~c<+9cCdX2!k} zPfJ|?OoL?a9JCn27uV*`A?maN~I!|eP^sRiY*+MUbnAl&)g!_hn4$eq05OlRJZpKvF#!ePEM zcPdd^eC}5A1^dsNDVnV!iFO@tSN+UZ{gOO)9ysQwXjcp%LoxZFR>i=rWIwylGI5&Q zDca)Qw~}?hjab5zXK$ZdG5$y-j32(0sO8 zTm&Qi@A5`2uK{h79SGSVHinG6nYB}dun zWJ^U^zLj8Y&znkAYZZBRb>dd??eAaa&p0bpI^F=CQ57Ctt|;8?&OITT_-pg6XEo53 zjZ>Ijs3?r<%8ix1)XH!0Zd)s9 zSmNbX>}fUZj@{=ike1jbJlCb|c(qDL-lzvBW;ss`mkPb47oUk{$l5)Th5wi>ck$V; zOT-;W#pW&bLg0!+BdSZVOZgH2PUUjy580PK|CG%l`0RJY^}WYEYl&(DR=ht5Ao_xl zPA{%~6a+1aJsFl}@nM{dcqzIJY=xFdly0C|O55L2axY z$ZMd_(h^UxyAWAtqYdM`1QSmANxMJBgX3e&H<+&$g0r1E^0(b7i~>%~%Aa`o0W||p zac6P|$FogCp30q>vx1kv*?5_hU-`^8WYwSIzQwo(vjI6etkT= z=fzW?^EP2f4>zd75S7unE!1~|DOa^8{ z+7$7ytM1?_v%EfwVMVt<7oQf4PfI*`Ucsq%@B|pYJTonCrOKj*PauoOxN4sc-@vPZ z0e5`THKynK5}S?j$@XmRZU`Nguv7=-VFoEq_+u~2AUTjzsN>~BVL?fNU#Tc6>B^it z0~(ME8NMNH-{W;0G?1pb5Ej{O1$yDhQOPNy1VG9)VYY^y$*Kc4L9s*__&lv$PbDL$ zY;no;C6ze#W+cd_BvYy{klFJU#Ae`nic`(8n*MC($UXUMQhg@O+u?@eK={49^=eE* zg3MYPHXO5CzKMg}NY`2bKV8?&4+@ho!si>5azL&Dr#nRP_?!lM#f;(TMgWvB8P*~R zgAhy!c99d*j>nYT41`^+xAA7LbWBK7#f!|sctpcS(}V|cGpIFrwf-&o3U`tCS4TEHt2*cn% z!^B}d{gf79{PbFyDMe*Qj#N~m=&XFXi&NtjR~7)m6(ywPmn$Uxfysx6k3hzYi}1@7 zRZmMQz^Q}&PJr%0ZRDP|i3@dQW4@GyJ)}-wsI!q0;+fW5xcEUgOKQ}O@rxHD=#YHy z&*LnWjzW+_3;9*5t^f-iWNKPJ#c4i7f};i07tSW=`<@2%1vCGQbKoi3Yv5P;_&@`q zv(<0C3U%<@KOHe{9;};N0W51HUt8e2=~wYm)o(Kz3qHem5}jU$NGN?!i25b?_J;F? z+yMPgBL#d#qfOo8BL&liqpjT;I6!y)t`ptTNLnU4WUjc2rp45Iz5-75Z1HEV#Oh6R#r*nl z*i-2!Ht>7zR$I<;Wsp)uP?x$=T8sjV6*V^SRL}H}-svBoH*cy;=ls26tF2%qcG@-a zfzUqE&o2-Xe`DZBkr?85AN8##X<7`KobHyV(r^9^GC5A*uxIM;nZWOU89E^9x^%|6 zboaK49Oqx+XhVp_V#N$df@ljS1;N0#xbQnfA?Mt*-hXJx?HzDr#p(}VaEC+!!;Z$n zF{fTQLyxJk3gC4c&RFT#c+^2iFt_HS@+H^d(n~BIM~j3%6>I|>3npiy?uJO*``L?E zj#)2$+Jd^oBE2$Tn9}ka(sKOzY5bZiO9!0!Tp3{!rJgn&f8$UFxc6Nf!to%&*GtTAiK z{-lP~)^nvs_f{Mlu0vo(yITVWDbX;9*Nx8UaTQm&bx0{XbNvD)-^L2m`DO|>j&cl9 z=NVWidkRDy-GX&4O#ZF~q{HTfAm!Jd zpM;4Z=+b@W%JDp@(OO7;OKpL!NN1x23PDW4dI2UsfDoq?P`Ia1D^9F47z3vs` z`#;+L>jBDspUKI~sFJdz+!*+*q zbkFkB9(yf%x500m>X8xO2~%stVaCBd_X_+tT)E~`ypY9hM#SNDd%+bZc)rV*iX%d0 z=x8ZHp!H+LQDpX>MADbakGmC4{``aMpIzV8+_OgHr5~QFL9ka@TS%9o$*6@ct}!C# zC>Tv0Z|2DdGI9zJyC93}3UH3N4H-lp#NBBut?$qd~HIPv)q&S6GVF(HP2 zFOO3$+gKu|LH9#1U=&SjPQ*LR_`;ts7b$VUbbtm#h`U`>R#L0>t}5mp5p+!m+%aFa zk7tc2swR-r>V4=n3Q=TEI67Ao!0?xa|8D~TXdyu5|7amb;Qtn+4aLJFN;O2MFclEx zCSt^_5y0X6F#>r(o&C7knx03_aTt^>JmynE-Ka~ z)s~(+1>hb!)t;{@bs1$A*BR@QZcEMG@l?nVCDi_aJ5q!3nM;vv2eQ^XjVc`!dt4g-kZ!wftV^P8fwp(Pt-W@YIm&})L~v*;ZsFPdx%S^YwKq_g z+JzHc8f{H9y`yXgG`&?&tUR@5D07}#3zR%hEuf7G;I7p}QE~@ppwzhouA%t21GG@~ z+yS~MChh=DlnHl$4oaFkKpW-59iWGz2YsgLy2&2YoXk@w{=mR+}oNcD{gu2 zp+)!5(i0I$yh*}IXOjfXOvThup6wsCj4j1v&pF2P6rVTqYIo8yHb>RO^(7*+Y+E&e z{wpZExb|heXHyxxt;#ylKr-8&gzT>4gPq>*n+3!j{pw=>lKf9q;*Mdq_Ww-&|4}8_ zbK9Xtaz`ajRy|zgAg7)$H9HU6*vx1!+{wwqQ!4JJ<*YF&^OY$lOZ%V>w6@2|h5~v2 z`6cOS;73Y2noLk}TX0-3LE9f}bjHEYcFS%Yf`WtI3_o8@i@OH@_dW&wXQOWXA6J46 zx&K+@|GmbQURsZ+1J#6PPVk2s!h3MPkIT>);-Zk}NG$t=W7;8J7F&trmE+uk5f#T1 zg}9TR3h9Vxi(2l^rJletGyUP6=-SYlNr4E8qq2jNQk(&vzg0Chtb?zTJJb zTVculs7y^3Rsnwqe+f@WF956xnXo3a19R0_%C+q^^|gXErnP}JpEZ%y#Z{|A^r82! z_`xFK*$vvt=HtayxlFSrGnA|5YqhJCtF5c{tBI@9s~f8#Yl~}EhnXDTGcc^a!fE0h zak4l+Tr>E|-9GSRyRNuVTrh45SApxmrQo)~kLS|i&Sp^==0-+j1Nns_$&Zb*gR#80 z$EtT8o#xAbY<36Hc2czb5|Tl_dFD>FjMewZj%La11cx)p5&hXYSYcemJMafgMe-wt z?qszwYJYIQlO|)9dUpLW^#`8GqwcQ?FR(<|Wk+GT{U5{cJU-3C``GbLblY*+J4L1f z(M$!8#hc&W$#`>Po@pQ)kjsvB5FU=`$}Yf)ghqV6ll?;YXN1~06E;EP@U_Vs?g?%5 zOc7-6d2cK+46TnDv(vEW{KHA_yg1Fp8_s+uz3tSA@@q&&)(^t&RfB@UCqpLByQ|dk zTOKccoY*`!DY=~BYgi{Zc7pTq#@;IKTHyJHQ&x}NJ`79-ET?U1?(hR}rbgzk)}i+v zJpn%;{66&V4XN|YeNSFq)yWCfZ!L7iNY65lJ}J4XmlO8673;bxjbjdef@pEENf7OR z;VdMWd$hfQ_)K%2O* zn6QMf=N{0`s(2L2eqg&zE3KZA^U3|EsYitDQ0hHhar-3k&bL3=JP@+upCmm# zy%jUm&BJ`@PaR;AgLDoa&JYaN2lUXKj5dOM}{B5 zuiP(o>&e#Zt-7t$t+=i1t(RL7TcKMuTWMRbwsN+Dw<5PHwo?4s?%2iNHGPi~>U|(I za^5D-{Ku;Y#zw^vMolm8^;J2Uts!fSFBc=cioGJe!n|s|61{qA25MUU(KX|^eq0Og z9c~Oaglm_x#XB*WHHIzRmjO~ayqkW!zuUyn#PHteeSA~pd&wrRCa?E2@7J1IAF+m{ zgr+<)4>J!L2^k5M50!sZ`$#^lHmo)T{6s5$M)IP<83k?y83hFeA%*h_VhU0Uyb7`k z0ty!tBo)LJxD@yl@yxxH6@)JV6-wkr_Gzf=5b=qqCta4*F>*>&W$ z{HXT=qqj$G<><$I_SKd0!|e0I-uAVLqrJbJH_9mo@eA+P&jh?s#BkWeRJD#w&1y!Wf)&({xb z%DJXV>FO1P^|sC|toq<&gHsV**XSLG;?W{UpGMVn${r;?n)LIN*H0Ph)UB>_ zd`;~S80FvUMtdMD0F2o;As=H|!L-UQ7W;aSx{AJ`$@%+L@_e#+#gEsNUKsUfBkQUnX?lh2)+!TC z$roJvuW!HTEJ!loN?vBWES}nu#ruiY%SPAXq{rnXA;s6p@l+OF$&c16^0S;xhkDkK zm#`;{1y)ZMpTq%=jCz@iOA5-9{U1s8CYJxiIT@SZxMR^Qj!f~5@nYgcaZK9x_wu*; zGqany*|5ynd@pNg(L`prbIiHbeWGA?t2o;-fDBotaZZwoGIJC+Eq?tL={5Y;MP-;3 znP3)9BjH^B&W>FQojF*9Frm+Ie%i>i=e zbu+T2N3srHnWL5xa4Bp5_R>Y`4RZ?1ulD?q?XsC~?k-n1dHRKS#GEq3J0o-8FVTC-MN*`x_U;*jU(kr! zsLruK^7yTEHX2m#4SnSK)Lzg0K+-s^FH^81CBPq09ZvDkpE&jT#h1iaNhf zM%j6~R%SXEGgz3onws}VSK70is`rQcd9(;41?O67fC>&I-(0hQg%VO^&gc0eJKrMR zcNY>_<=Mb9_n-b&h?@%D%aeR`%j+u7%wxHEA#@B+Ckd!AW`LYw?>G{wB9tZ9 zL5-a^z-meS$CX{@hNEw8LRKV!z0EY|Jw)RA*eh}ltJ#qY>{fd*<8Qxl`Z_L-UY*D{ z7{52OX{UUT=%De06y#v_wmT)kK|PZ6LMQ^7@UNN#hn^t{_(6~1FFAc;i@Wl^<0U3$ zAO|zPwTibxA%HV3xj+!z%TxCr^u2u!)Gd;x7lRnS>6{yJgeeLWThJ7%iB=dP%#Fu= zVH8?ozwR9OyJ&!{ z)Ud&)eR(e4{{A4{^G&`nk~;mZbe>i>z##oyBd(JN=U7eRX4GKPzC5zyD^EaKBpEA$ zoKsaEQzJN6i`XDISIW7_MaVZEO$ikQ&=W&P^!D}y7erlowzZXz@LicDD#+W6)hR;) zRe)vl2}HFUw=4^}Uk51D1*$@sV8U;nB$&0LXvl`l7Q+Lh2=11Wj}wxoqufB`QH@xH zc@L1d=L*{wzZqcLw1Q8;?cM|WkTSw$Fdm(@fZhi%ko4b zbUOqdocK+80uO*qS}SBG=d0x3LD`=)Z!|8Mi8ap2Sa>_T7qRM1kosdFnEKg#aEU{ z5Y^)c!#0RoB4rr_(e8NT6#1q;u(`1)v%AZ#%(Aze<_23bVV3sg4{}tYb|AN~X112K zK5Wfn&0sBQO=Eole9~9Zn%tVp+Q!<@+QVAg+S%INIskn3*WTL1+Q(YY+RfU^8kkU- zu%5U+aeqQ%!ePR6!gu1zgzdzQ3D1dZ6D||CCITleO`My!V=Z-&>GWY?Qh5^owNARbMe6lZGMvPsVwJ&bP&Y4 z<|;j{JnTwdpN0v$o_{^l=PB-f|B2V+tGWi9A&u@@M^}XnIJE;v4*LVr;1idls^GJg zXV==1d=7Nj`dp=`^g!17~lavM^i=aQ##8{(oU*(qTUF#~1c zne>@@akypA@0ltY=YX}Se;)r0Aw2#YimC2B5kdw_!06dlsM@P$aIHNXy454wGpz|@ zVt3^4VbZ`Kt2*vUx_YcAqmvO~r_%i#$00|4J=0zjCbIvmFZKSjz9R3(8+Z@J{%KcI{fl;wj?T_-uz$=MX^{m!%x$Os;&AjVpmkle z@OjkKSc#T*`K=alnHME%ZI)Xk~&eq8cXf+g=`@8F!0YV6gy zn-YnIH%B5@v)^8c53$T;>g0KOZT(!mgRuVH2&Ne|kMQ$Nbs8ec+BasttNH7v8=Vh` zwFIZ^)Dw1olP-HfDnRT}HxSPgZ}R3VxYN1z{SYX&sAlj6XL{-wn=YvNe_k^_ zI}$neJ_u0n|FYECy=zeW*_gOMMI8e1`67@Bg*eTens5Q++fhlv77J6p!WqC<@-v zKDC0=T-NeiuJK)t@Lj&?yL{ev8RwH{&^3px{&I_|N6XzDaoC&Nybx2ByQg^q*jsw` zWG}N|>+?=n2J4Fr8c$QNlL+vGJxt~oOXhbZhNaTAVGV1wm<6j~> z*S2!)p?mD?uEq!>+j!HvdjMTxiBa>OP6^VwL%sTNF?h=xx~nw{-PJN0#gthaLATMj z8(rVM1aGc`H^m_hM!;0;8vbR;#-qb->bdTdK*mO_kwBIb> zl>R+U5(%up)AvMH@NarzB6r>l#tgwft=V8=w0C@woQd1ZGRh^tr&yFrR;K)w@2yUO zCrxWp-pc6{tH|M<)Y(+uRR13T9@a$veyS-WOqrS~1gK6K)|vn1Egg`n_}8KCd@lI?p+OIr`@d-5SI3qG?Z$ z8ab}iZ(Tz6`Sc#yUA*==9{C=j9z`DE9u*$XRvxdEt-M(|Z6jSb@N>xn@D^2&G*5Xq zI9B<@!l_4KQLSLZ6TIA9z|=A1O3gb>0?Me1B26xq;j(oU9)5}mC9wA&AAwJBWbqEz)XO5(x~uf>)!X zF4n`rE>DM&X{5GMXMg%iVLsV5eFFMi-n>ObUDUJZ>I{_n7?>I*)o5!Y)YWp6VNj-+ zfz{2+h924Ml3n>o2i0HE-ft|`EEld*scX}V^ceJ(bPyemk%Rpi#uk^aY$0hWDvvry#S3bup}@P79l7b^3Q z(RVurA5=YLZ#704xk#Ef^`pGzeXG)4tyzb7tCC!An-57;nY2`gXV{bZ1_!Xi6#r%)SwxeOtN78AreJSa>NF{VX8WODvJgxUnK0WUpnCruTBo z3|)~|+{M?o!!~h`Fbn)yHj|p!K!eVepa9|?@S5?9(eyJXIO+Wi0-V*6It_w~2*por z0tydWo1j}qV#-f2lt?O)SOP$n1cSDTFj^BvL>S2>7J{WlM&C3d9-YD&!YpYLx90?U zDM8Xi;0tJT;%upj1>$VVrG4*?)M*n)EK-bd5oT|YEj}a6-bT=AK(v6ZV02h6!8`Ec zkuZIie@T>J9AMjgL@=&Q6{tnP$)Hdo;&hW`5OJmvA_SbRNOU3*=I(?b5$61oBoT&v zGn4?+NEJw=GfvS!gjpm5#)&u^2u>nS7&?ZC171TCaRipc2{?Ofn;`@ofNFFc_z zqWBA{i)k{44}^dz&^WXZ24BfSr=ZAr+#^qW1H~(~7QIRwB$6gX)k5XqCu_Y$6q%41 zKOu^Etbgo_NR9WkB(2woq5+bAU|;Qm^}@c4w0~bs(Hc(_Ns~zJYjI78x*e=llSe3 zuzD2f`w=ii)DmgHS(f(eE`wA-oJh!?v^yHY{Ro?zLR7tLI)0tJG>lxGM4e2XRGqw)q?N3dw3VEX1iUjz%12&6QUH(@ zkQR{Jlh~8lliHI{lT4FMlTMQ}kuZ^o2So-Yp_|b{;H?767t~Kka?q#mQ{5L>GMx~c zcrbBv!W|utHbVEICDD~=FZ2{*4#674ibx4cL6`@bBSwNo5b{AR!3!&AxZk#TwtMk% zOSesZwRMaOD`J?Yt>D%x0K!*Lz^8;d1^kUv6WfyS@iXl1mf(4Qom{PKn#oSw>uR~=k5|qj%fB}{fZu-(Jmh?S zi+{L6hO;SITPb>7tc$NKAx`dI@sBxOyQL4KBn-Y68VkKyScT*c%6SQ|pEwbEqPsr>3*26kQt#l8b zZ*R;Ll%2YQNu}rE&kYDlCWDv)2F29dTK}I9Db)`>K03XTi362`3Y~5bl+9 z^Q{QpW*dQ;QzrV~PYrMi$*o>!es!ub-}hwmhCuq&<+~Bn*V!&FD=1rD1KdwFckK99 za=I-|xx(v4V(4(o?#fDYt%>-}e9rNL!T8An_|9;QGKUaOGGo&1R8xHiqA0c z#pJb!4Z~EuWyXj|0ZUClRLfnt`3L;gWp~czs8f3RZ_*}pBlrw&U0r?>tfoA!;~v8) zlIeS)S)1Q~vGIM(0B)H#qKSV@wVbIrK%gpT^I|i_sf=9HkjWKzV0^aD@&Y)EL4W7z zr1L4|0$-+PHvx{!P4VVS5o)Kpu@TZ`bS3#}pWPwP`BBNhZWrdz#Eu^N88R!-4O&!2Co z5PwHD!d^djWiMdtT{h=pmhV|G)YLn^Jk1wQl@`2-o4jzULVzO|`@>He0tBAlQI60v zoYz=x28sGE0>Eg@B5v6xqL?2()4C||(_kll^6QwITI%GL2**=l2A%@zxu%Fa<`I4T zQrV_qckV_+(K5pH22}Mul&jWk>?5WP=4V=W;cT<5pW&(4R`CB;=33P(ZO6ttdK?&U zDr`LQh($M1)mQ_YxT(iD-;RkLyija4(6_Z~9Q}Odr|>wr`h&oP3jn9zA5KgfP*ZkfismKLo!< z=9Z2g;a-ZSsxv2BOhZ$0FJ(~$-XP=5Ko@W?B~b-hl36`Rdz{>iJJfIvQK~t?dUCV> z5d03AvLqrS2@M)vDRO$7@G^eR@ewWr-@;7i9-R2&hp(R{!sL6rAA@DQ$yA@8j%6e; z@YBoJ!ZJg8g2s;v{$#yqJ!$>zddT|S!t&LhS;kpaS+rSQS?O6yS-V-PSy@@1vqZDX zv)r?mvx>5qvxc(#vi={|{xT}AF8CV-0|a-6;K72s2X_dX1a}YaPM?I}?hcK6(BLk? zHMqMs7Tl+K=KsDkv+msc;ja6o`|PS+^{ZXGVV!+WNnj2{mF73iZ)R6LD}5_TD;H=% zh_}M!OU-#L-pvLYY(>d)O#Ap(rQSSk%?6T{C3CY!`)*g>-hOR4t8$`%j6*r!WctDA z6@j;6TjHvWLs`Sr(h=FEvbSYh`0Cfw!kY=7!?w#+Z;CeXsv$)-<}~7gcw%@$jI;V31ki-0B`=EXWiOwL=GaC4=RutX%uFy%!~ z&RSXi<`X2+k2sNG%d6z*Br-wc2_o)^#O~8F&EQx~G5W-9@y;azxRVL0o)3Aa7`fA@ zU>aGyI&Wml%}bmfY1ij&8tmlnlEFqINdlh@33sPY`bi-Y`tu; z{Ed|Rg!6>=gu{T#fNzz3m3vh|i0K&e+vTr&(XqS`o1Nj}pKr;9`bq9~i*DcMipb#asvQk!>eaI#x2)ipOFJ6b8ok=Ja7p*_Z<_iz^Kp3DZZ~x=oiHsq zB{>b=BF8KBF5#~H&i}6FF6}PzuI(=VuI#R*K=+%*yz{Rb4ZyrC?dw#j=}d!;u3dS( zhEQ4CjFNX!qTP;GlSZ$OHJJ@dZIUiiIr^-OO?=(LqKRE?y{=F>ZaMC+%-O+NhS`!? z)+5&=w4+Z)%ty914R!s3b3?V)I(xc=S`X!@zrxFXk8qCE%Q_iI;YyFx)fRzIuyEK zEwBz)8?0-nyQVFw)1lQ7=%s}k-0iIu@T&@!6_=Hw0FR0Xso?4Dso`nw>2TY6cDI4r4Lo!^1g)WTkb&qQ?QfsX5H~P; zC;~+>0PJVMNB!IWGdA8@$$Demdeh|E;U#=Xm^*Dt>dIex(}o721!2glyQojLa4)%e zbRGSC{PDMITc0T5xh%_Z4@QB+Oh)CeMQNXVN)D!-t%ck&5XLx^KNkf~Eq*(7brclL z%otPl0E&{Q=7`tP++_q!GtS3oEK9kTQuml$IWui7Lu*F_#^(39Z}@M7qWe~kA?{8s%`3|%_9_j@OH0RF*KQ9j zaLYeVC_So#`ZJHlT=rT}m(e_8(+^nPYnMs+fVs#lI%#e^<0J5hVS zajY>)VAgBa>(l(jvG$8IP3dsk^)pJ=7$51F0hT;3`3Z^TvtSV}rDt6` zUyJ*Wrx8KG8%*``>jeA^t;el#YA90R>zhfwTj@1*h2ueBm>gaCgWk80c5UuVW#MaV zIkw_0-3F%nNXKu+M@81ibCJEZ_Ho0{MPZyL!M_f`|Img3o1pREFUOX=FY)TiKn($K z-=I)_tH9izi{62_5BhXrIv;KRq&4<(=PD`)!UOghAebDhE<-Qk9#2-!O?w=tEx9P| z_IH!lrl|p7%)75`AKF3BcF{90F-05wzeR0D+cVE7>h~V0)-FZ2)smi$&O=Wv;Zuv| zezppnBJBQ}^RelSm@Ckv+p$muvQ`bHxF)7-@}l{=9u}J(R*qJ5S6Wtrmn~P4SNK+L zR#aa4g(_Sz8ylK#8ZleOn<^U}nkX8<9PuE9M$IO^#)g)g=IC|YN3}!|V;&nhuxe;s zm)2i6FjsHea@%a%g7Ch`y2y#har=JzN&6890y+R4J?}ps5e86n}f!v z_bp3!K=6TnWLwxcWf4Hm6~CEF9$`MkgI^MP?3P!{s8#6Uk*l_peXNo9Yc`uhFj8j! z(4K0glGT!D<*7W$hi8Sl!n|(fNSo|N#-fR@w)x~&d6o|%Y73&a-;MojXIu2b30!&3 z1D4IU$Pnp`lY7Jijmt`rw&{)bY_DHdK?_?=;mL4$>H{OwRy#13*5Xh@aI#ch{=jN; z&_1ryL{Z~>(qGP_&tkVcuI+WBI6M24_SLbYfN9%DWAZ7s&I7PWTsG83yy4SWU@;+^ z5-5u!H_6=R*C=pV5?C?O|Jze;(>&T-AXM(tJnTIoTwy7plDPISm(W|7J@P(I`l-M0 zXbRfxJ?LIW;1IyXXJ7o1JK2bm3AT2nFBO;lO|U|BIALWNEpvjvjl!3UUB?j_rSvV| zMmbMIaW<^3u*LL$#}P3+uVm%g`Mgl%-yU>$ZrYVRY|tOMvt8K{iq_Q^#XUudV%InL z+uguif2~>jqr!PyX|@y$#u2QVZqR>xJJzLe4G2BE4(FiM>pyM}Y$#l}+?GU8C)z%o z;bO`r-`9tO>G(AULH*!qK7%gk$e&Mb5JV_yY+n;+M*;F)npR(PKaZa*WLh}A{oHTC zIPZRr+hzeB33N6o)cf7af4NZfTl%BorBiIyv8AG_lWzna(d(3(b%cW-i zs4^I==aZ|ieq9vJk5Pqdp=aHDU3}Y?TfX%Ms4<&stfJzB^_VZa*YXAJ$D3}TJ%)wr zyEuK-!=+WkR@Fg};dXyGzO4|2XK370JDygy*-~JfzQ*BFJE>=2+|%l5KNB<;g>ijG z;rij-dL7gwnPziPw_?6=P_0?xgC3=z`0d1=;ZSLO&-6IEHP!_(sD(m55ECIc`k+?b zuKH7$)iSe=Q@B;=)?vzB8tP-xbyL2E62sW~p9@oAoJctShV zrhRzIpa5%D9TR?MV_Y~*{?5jvjSSP$Z#F59po? z53WLgIl!Ao=)!>Q{%EM_xa&nwq}qQ+yYNLV=%m$6xgPm!9Eo%i^9y-aaY~)&n=DY6lMKT-jl5yLxaz3AVZ~>7r~3 zj!OvaO28l}T}*MXz6bOh-*yS68M=97S~7$6!_T8&HckH}R?PBjvkoR$CWZxfb5RYo zs^ij5Yv}J(uqr6dZtgK%)UfPrgFcCt>>y|k9vabqd=IkCHd=x{!AgMKaCG)5)URwi zJo`@JEW2|VxzIkWz|N}=*6ZF?*ZJEuz|=$Yuhx#gA1B(j^sGJJMbS1MD|e15T$6OX zj3z!l^W6Y1>B-LyfJ@Zc%dP_GC4K9rqbY3ddABv1;(>9V7fNBedvbW?7QD2qt!f}X zLCx-3!`r)ka(I53_rdinZ*MyVDutuvxYbG$xH02o7{emI<%%c7#aPJaScw5)2*|YD zB_Cp?tp3_sN9Bj2;mgx(bX551cN~7)YX|R%+#Efs?18e54zKjr#_vxGMh$!_>)gvK zZ@>f32x22I2ms9sNy^|O#lRaxlV`r$xjm5L7~?MRQ#9@cz5J!?`la`y6XpMmdmkOn zWdC*iV16NaJgV)0!2&Kix+n%;5MCv57uMd)Km9y@2~FL6Rg3{_?-4-x9U?cn&Kk0* zFL1=`{{g;#AbG<9)e!sWPz9l|eNh>~`#(aP-L0)-ZUb=tGx2{}F#8{X_J06S%K?%Q z=sXoO0Io`H_eBhWAM1Y`)R1uY%qH3pL4)}$hR~Ss7vov#nlA=t;?>^ga8``}sLSZj zS50eiwP8>#(c8d_bOr)I`J$`f^6C~{1%=nI2R0Or+c`@Bj$woF)#m?9iG4yB3mAC; zexN!2FVkL~Q#t;WlyMa@(*O9wCH#LZ`~T*@Ixpqr{}=SXC5A#A-u%B6yd?Rd(F7*^ zU^UNFOx;y?Z%R;OHIKCkD@b5rH4jnL-cYA&On723_f^zhRR=5@5=L3f9Tl~g)NAS! z@>t9*6}=YJW$F{cSROa~a$pPAz81Ugt`!pwJ!h>Ij@+ ztLg~+VN(+?b!L@t^IR>uQ1dV?1H57G>Q(pYH66p0blPd-vv z>rJa$1SN>Sgac_xL!QLyhk-{|;dionZGql@mklq{qbD?`_Hyz!>elrM9&81sawRwF z{_>V;P-PzR*b39VjR|tOtkw)XEoH?8y|MPWv%u|LT^eU@NDxsnj+A$Zjsn^NgEa$H zesVFOR{JoBMA`PIF(GY5c3QnVIH|ZmD!M)QS$dTGJ+=EK?v}cQETE=XvH&N=Qec6J zcg1F1UlEZetQ7&JN&ZK}uS-Z1{xl6W+0=a6mW+e7z#j za#~-1_>V>KI$dU3-TGSV)n=06;)Sh0jX#R7q@e&LQ0qpsC=U&%ZBdDNeBy|pOGp2Y z5&FtthB6?;oYwW#uMDMEc$HUpJH}Ur0F=@6%3yhA=)R)Y<3J4t>f0jH>{$8=rXdHp zN$Rzh|A8>PiqJyMvVApc>6KRh-%?~>mr?;`e0*h$6Ta%Fw|)R0YYr7JT$|`AjJjh% zX>F>n^N!4>ZDn8Qu@OL#t`UfNmgHYuqbU6po_5TVXTgTM1wgkd{MFYD1H}vWuL=xU zU%~C57$~o@m9K7f&yGlI(|MJ(k%G#$Dawo*&_ivu#e8+sX{=hU@as0UiNC@cLq%m? zMIBxj=pGP}c1$hEz_X+|En2cLU<`F#_^TtHq@Lo_B7t70;ky3`EB(iJq7h3F>h;Vn zhLa+@tQ}>D-9n)@`KUE5B;9sonJ#74@WCVW982w#z-@n=cW(n9huXxqviK&%du^Pz zWB{agg1cp$yUKP5C%TkRy^;HovJW#%P;VMrbjd7KPn!sPOD#7F?VncylAY`5CEibZYmm|Of=I`bN+Rcabokv22Cy976*4BnQAQYHh3DssCj`ekdg z*IFIF2666-?(KegEJz=%&aL}1h>T_OP1k2pf{@YGmO zBB>(aoNz+3=azI|w7U`al85~n;eMRm9`%Wk$m8cTQ-r428qyZ;1Yg&!^*)ep{D^zW zQqxg{-bbq<_}vNF4L`m&hu%o<-?lnCYtAEQ4L{;m9KG5^@}?u6`MEngt5?lpPotSz zA!E2lfYV_7U*bO6G8P`~R;CEWlgATwR&C*{7(K*&pRUGje?hQsy`-YT(|p zw@Y>WYQi`Sy|+WhN&98&uImC9!6SiQX_se~?}unQI+3h+k8>Xesb`B*YeOMV*6YBm zVkI7>ozsx`QW5AjC})Tmttiv%t(6>ch91`*@SI-${%6tB_gpgF^odZb)1c&9FcO3H{v|G$Y3^Z zc@G#>P9w>22X)%~%KpsLKj1GsR3007#Bm3O+x$x8jK39>%~sw+3@=x~qUYrgQ*%%0 zNh*SDgjC*>3c$;0SUBz=xth7@dwxXp7S;-$LzmNlri~-^4IRcM;_5BHz-jB(aQZXx z8GR^I)DUqrM~ER93G1E!9zY&43 zD~@T&WrQf4G5B5h@~<}JRCJv$^0^S$L)q;3w#ldT3>22EaX+P7fTkc0_a3mTrvYti z41OJ-5rLw2j8BCC_CFB)&7kPZ)wrRDm`|!^;$25^IP|53(BPwCeou#7LXAfwIAE~D zDsIK>nN1--Xyip2cnrV1C7d~D^Us5R{Ws=g`9f_1|L9@NV+qY>hGT1KRx*qkHK`eHZxNHU8aBgu2STgiH&JD;GSNU4 z0W0PSkLC^QjpB`9jIU)y%1xg3MhV6kaNeV#{t*x*Pn{64g>05L2zX1~*ZJ{aecDA% z1&YkFlDzbGe~iR)Mh|?vCuwsg_x0iW>EO?M_G{!_`$p(#*m{1%4dVfY`l4udyqR9` z?uYAf{%T;hvyygwt~1Nu0^JVbxAuf>_UI2@Oan;M?YhNMKRloHzR)u_h2Rl zN91Jwq#lyL1x5D|lLm9~VaT#?fK}tMq_odjZ%>aptO{Sgz8qnmFHr!4!IIDJ=9dq9 zfJX#0D=W^0{XJ$Xfov`KrZu8Fy!g!lE~U5n?zhx_bMkmEiCuC4__I=b{^BfhTTSJR2Jn)kvHWhD*nR1EDt~O~<8m>BX)VM! zOJnt)bLD6)R6a`+?6Cn3VJk~WS_@BJhCQz}MW6b&7GSjm#dP1=ad6XHjZNrfH;KEv z04>qappT#Dref-2+WF-8|GnU^LPUdk%MjPjWT;JJs;gf5sP98yeDl&nK1rZ$q%WJ9 zp_oLZP=3b#%zu>Hqdkr@<~_Au&PO)p&9za^M>6JJ?>L)|YS{+dInwFAvE)SLk7x1c zS}@^{7wAeIAs#bhb)<9o$(1Tp1FIccE}_x$V8&8|aV|DXoi%{ai@FeEE76Kg;fZM{ zQ%?yB5v{>r2(p!Rr-YHC#0h{%`N!%H*weg4mWJjm#`+C_XwU?{2T&u%BGKT4N?=e0 z$69S8;r#4H?Q_hHU=}s_#F{e^-Y)<@^84I zY++TH(rmbWB*7nJ*{G^;huKW~2+eTl6L6Pr+rHE7qu;+_nIN?K3Cb2FdV}CEZp2Om z8{}_n#6JN;4SZ)MLcZ+$!2T`*#j9jC>cU zdXvxNm>z8}T8*8C zz4%9dc;%m5lrS{@wdW_ zO0qxiz8FRjCNo4?0$q*Tl~WL&M@DI)q?o}vC@qhmnASQ_S@PH9!9JQRogivvN5?aAHpg@Hu+~CXBt^QOfYX)m2ev0O)@#}WFN(qwi!h~vO;_= z_ed4fRroN}UaV~*a-VLR==#iFm7z-YCM6S>m`8 zQkTr$7)lc3d4pCc-Y`niY`Nd|X}QTp#n|#vtq9U04a8qtfgSCmb0fBeu1YxMc}%qI z3tx()h41FvS;6Rph=@_-Mo;wb<6bh{qxnS<?qA+8k zC~TroHKIrkqHrgoXyg>w!Vm`IV|Md$;X?ls6SAr(E&6h>7!lj3fc3YV8rm*lL zriYLA?3%jI!;X-s)wu*?EAiO8aq`zuw9vMPI5S`hrY-(BcmKj?p zJhLaR>OzY%$zQ)yy*ga$ojbB(KW7y%tVNd;Xl|-mRW{E95p;(e?y@&&s9KtT%|FL% ziQX~s(h|}5j&Fil6+BN3;kbV56DHh~Wh$-3aShAVWnfY;&jjJQM(_!5>lvzYh6qE` zAxyq%@m#|^g!pw^SLr~wG=lxQ4y!A*;ATVodeW*+wYaX~Y`S!+c^l^up+P;S{8704 zp`ZC9x%k6f_@hyw|0P10HjW+5TZJq9%Qnf*qCDwaxmMvdduGlnJo#FMDt_E_`fRp2 zDz);hBF;g3rYw4jwu*|pp$Nh3M%^8#Nhw5%jIokLNiok(G$Ga`JcpF(n5W(T& z(3Xwvx3J6P9-V}n;f}Jt9PMvCx|27@ZaJ6XJ^rkm?KBFt!y1NGc580goYmb5F2fl{ z`)rapviVZCBj5D-IU4$MwZq@^Je~i#<@FV6hr{fIJE!!eCyzMo_5il1JU_G}u7#iW z+-w=&!h9qZ4Ut`s^~3`qiAKn7$(|8?B#7>b-t0fyxG> zKfCbc%`9juXt||%R(8wtCV^APmX6;wl2fXf?K9Xo@WFgyrXCM%=u9m8Zxo@ z)syEL<|PE^wmH*z~eV$Q0x1L+c+(c}V=+7r&(O#!i!R z$i>W&zNGyf3X%d$-f+YaNukjNVM&G>_c}@h6N&HRP$LVQ;Bll8Nk-82VQ(S(b$?G0 z`$R*B9Tb4gk(wewgiF`^r8|M0dITA@JFe<|A@*#qs|f-}e2Vl4oh_2C2?j@UitGqI zBm_P{ctebt1|YzW>NPPz-jHIZ%?LFQK;2Mi#2!ZWFoCa*(~>Tyo5ivTbqheO&N`#v z#WLuXF~QwXy2WbmwJ<^5kiEs#4YliaH$lV}dy6C9tNcD?UK&Ct5u$2>S)DvD3!(20 zfs^8QiV2~7ANpYv%PA#xKNPoD9-9BaFAYsi!9tdL@07$ZO9RbO zF?%1_D3*^y7|Okg6RT{7l^pu%efR)f1(s&0O|RP~LagL6jXicls8Fx|Cb<)Mfc9UA zZ|?UGG~`&Ap|ZWEo5<%0o-!Zl!$M3p;ngHsX%s>=du=w+p`}P<6R>xBeRBPaX-Y$^ zH{s4>Zvm-kth!#y+^AcLXW5(Nr~S$b~F;OIJ6u%^vH2E3~_YC zakK()^ziCGzR)BNxaV^&WPPFfI*4H=TZ6lh3Xv#{Jv7s-Az8?rru2ys9yG|OTS$jU zTF3gy+tpAkfJwzLXb@w64pMEaIui+GOjF6mGy~gm>d~aqQFQ$=rh`t~p6Xz3O8OYw zLHT^DZ&J#1MKOK($O4Jnw4-t^fBk>c{2KU{&m)kTAr5_0A6qL=@;50%a*TE__Cj9B z8RnacU<~S@bUt-cik^gEtfIW7Gx32$bDaIQ^WRK8@d_Hv*s?*MZ5?N_1Mv!a;X(9# z!X_YgS;{+JB%iG*by?y*_IXN;Z+mg3 zexhQFDcP3hjBJ%Ej3?eU=ZqhDUpvUO{q-O4&MUK3d^d(~P& zFf%3UAmX;=>WgN!w4 zk76Ji(O4ccmb=t)>W-MJZOxh=wlp2FO4}YasgL6IF~7`60#(0DFhq1S_`hce%Vh`x zOcWc@8dD@LA{ zvc@r&BUXzqVMyRGmryGB#K1L_QIpNg05Dm@?jlCnsQkbNwJgq*Isw2~+T9H6fm*HHbp~_V9b=uk*y)H>d)=Tw>LB$$w zN7^o*S8X)`b4SsxuwE6EAX=BWC}UUTt_t&o5?|^#-@inE+MY20rQllCr^Ig>1TaPj ziUoc~EJ0$73(Lh&3ofxbM8cQ+qwX}Vc1YxwAt;`izghBm`r9E|q8P2ZWr@od9Jefm zLiJ+QFN|)n3VEz*2VW4}Qit+uifs>3+!C8*^o#6O1*Vk_3Ea|~C07bg)NV@j7^8=Z zP>LlEVXk7lrGyK!R8>kWr(H@z7=c0qRoZFA(gbhmw!&3ahZ2u3#9x)$iWrL(O2bME zMN}zDRG>GfihhNM#cW@MuM(}Z-<3Fg!M)0~E(EEvd|{fAq2|t3dza^wmMx!_`z+g5JlsyVsCRe_pHzTgZpGX5Xf7=d3 zpzoZMXA2)DpAKLy#k{eEQ^+MS3xX$mzL9yq{iPV4f3>f6Y2}S3oJ1}gU3B#g&Kp)Z zHcN`+ugs;HH;Ub-^oh^kJ}*Y_7O_|^p$aTOhnBFd7SWr1RcR=Eb;TPX7y;gWS;c?(|=_i;xmUnRg z+Z+QD6V0cXj6R^h{CrRRl);)Cen9Ao{1nGpU^rQMz;~&8Po^)EJau}2(i~TjTXukX zDSMBrFFE%Yd?0v_s;>e71tj}2_arM)KvDVug)8ccLIVd998p3SQ3e%JTnag1$yDPJN10&au6<5wVqptBt~BDF2y>v1V~g0Y4;pFz$&8L8 zC<2=+HMN_Fi*DpgUjm2e2r+73T(wCd$Lxr!1p-%m>Yot{TOwTx46fwV{t-(^3Vej{ zRwuI=0Y}t`i3RdjFSB_@s(A$J)=(qIFtLXPd~KZepK^;?cAHeU2*lc~b2DCcgAo}E z+^x}D_Vy7A3shjM|CUQP)o#Sy0ui_KEvNX1GF{5TABcrSimC->ZSq1t#IidD?i0UD zOsLWO)DOE@E-9fFQS2O_m~s0_&7(N_Q*lS+>9Pd=WLS)`BY%4Dk|fYi!?BlYMi%DsydKW4_Bocf6_e9)qTT{G1NS_udtzGvCYo}&Rs6&lgsC3gW2n*U4jE&R@4 zM$V3zDm!Aji+qvb)%VddEX8CO{&RP$nL?`Oh|MngMUq#40>{pX5A?WUR+?hH3wIHF z*E`K#H)2^Bb=N)3WtfUk7<<>p&9Rnhu?szM{N}V{?uI2W=OC~|PBLRivLH?}7f7;% z|1WQN9&j(`T*~@t{B;4tR<<5@DfPI!wEECivz}xrbH>Q0N_fGboNg)oxW~HMPus4Z zVhK#viD8CV{d0i|qUug0lrdv0Th$Da(}*Vf6J^m~WxC)5@zenG7|~bZF36Wt+4L$~ z6jk+=BMT+|f712lzm@X{WoCAbnOOqWwb~@JNtr!k=6lr_+CuIyHsgX-s0-5N)GaCc z-GbGM+LrFb|K;;RoM$ujyDH2ytIHO6AUf`3M_m<`!VBo-ge|cvz5i9PfrjRF-&a3F zG~7vAGFQ3~s(#pFwZx-acvQD6VDTob^qiRAR6i`RU%w$4BR8X-m*WM(pwW94c?;CG zXe7gZ)v^nw5GUvvBQDZXrMhT=%N90$Aj^EbS{~xXo2q2YR#gI#=8bIYEvzMmnDfSu zn$#{ZLB3wUgVw(4+O6VS(1qAtWB4Su^{-mGEPUhzvlyW+AVMUsu^&?Wx<$;v)#D5A z>r?!?lB(Di)UWX$(g44ncJsArb6bIjtY1b<3yk$?Pn{q$(P|!BmWR||rX5vR5Y74@ zc4i&bN)V6w)Tgfcs$aGw!H(Z01tNL`{HX=P@&tlR1){bDetcfX6QHdiD;lZW`@;XV z0&Xt$sMl?`+0i;fa4zwv&ux#pkz7xug0N`tYWLnzN6%0>{Rrs4+Cy-JYZ6~JvPa@e zAn8Njb8W=VVC9RO+LCd~YvNeevM1%sn%YBaL|&G%$FUqBc8YIe$l$Q-R@(U_z%`ap zyUr}YWdF``RB6w`k*i62*>)_bc4yborO9jAcPvV`bHN1z+RGGsZ-8Yt$x(S(eJoYC zugS3@gQOwDWe?DBWNP}lY;BL;kfhu1<#;5ZVh^|X$8MX~X*C0LZ`f|P-Vu}`+K{-^ zXSe6>2%|?y*n7O~-=x26&l-TbbM5HU9#p-mRhv1Pqyj|01 zdx8QDf$Y=Yvv(9&RyrZ@NN?#`**S5#Y0_hj9^OM~lsJL8iSg+b-pz7UX|h~)fgTrk z4IF8g6)O^a{3G^yyJYYzME9yYS+3Ew2ztiNk= z(8ImSv;lTOjx2gibA8kw*VV|koU+#Cvo^nFZD?n0c^RzZdrBLGI`lJL9KVK)pLTj| z3!P`)v9`Xe9P`*|IbZQYhaTPHPyg6J`{aqV609Z9?LM48-N8U_#IQtDD7rCsg3o)r z$Utu^hoiTzZq**GKxm>#6#dbAS6*-+SkYK8&`a`9=Ftp<;xLka_Sx(6Y6RaNi>Kmh z7`RXOV(O0g@e_zZG`(TN?oQxQ4@52sZrCC`yKaT+h%xLH`WJpj+lt$f;IW&0&f3a! z+Q+uHa)-0G%xq}{;PkSEzrQfl%Qk;%E>@=J&-I4hM zZzu*3&xP)cpmhk-F*hXdgdYt+6dhp9EzGlxM{yDV=^s!9#}IQ@4t~g5WO0U9LN3k@Svk!=1|W zxjPbH3?RPa&)V+kna7>aqn}9f_!e++2iqP4bc$|MoJZf$KYo5De#v0l48J4vMSh86 z+c7+^yyJV+eI_&POTIY0Lurqz+$_7pe3X60HSC%D2fh=0Ml~DsyK_tLlQpX(yI8-M|wb@vV>($ zcs5-zs(zrogon5!^x+tuH*%VULr!!y?Ir&2K#&Ar{GDBuhz5-y2uU)~xZ6=&mWXN} zg&JPiIDuVEWFI`{QaYx`$yVR?23Xx~K zU5$g-Kc~=+5ZJ!eHI8D}OrZxx2q6LR{%z}&%y#%H>RPnr1hYssfo}f6RR(8xyhsM!GRE=iEVoGQ-4@2->-4v%x`B4x?#97bly6bQ zyOpW5=4l}W5&^2l(N&uB^bo@C062+8N7WFl_kkZaVjQ(X2%?ZUB+3CSdQ$u-=>^p>Yqh}I%mo}(CBfM=7#&AB63(9V z=#YPug!$M5=Q%5@D*EG|cv@PuRD&XHt0P=7%Dd7(0$hpe;SocOHWN z`AOcj=lHIvwmW=S<4ZUt8T!9P?s++G-;no&;C*yQs28Tp2&|*g0u%{sN~)3&Yz|CV zaA#|bk4v>$U~Kq|kL$Hs5N(>Ok}$_rk;c!kvaUF$RdPTWZXDCJs>u>h?+zPOlf|FN z4;6~ba-Nag(5Jy09yjnH55pXXfOh12i+$Z}2iz!zH&n_nKeN7P`9C+T9lQauzyN58 z@9hh4Oj2Jr2wes6|Iq4Qk|890s6_!&-NYPgXXTE{ayVnCgC_09rH*QHxU9$r(96OY zS~FD5fHITz4Rk8mMuue^`*7ec=qBJnq9TTV8x>w6=S)BSm)e z<;dT%a-)g{5afKd`YeX`9k%p4!#YX;Y>61dM8_$~vt9d!=wr1k))!nryejOaZ*EBR zr4OuW_p-p2^4xI$c>6HHba-m|LM^I^_cCtx>~OmdQ2f1325?`l!B3x~wfn(wAQ~Dn z2EMP|^~B#Py|=J?9~k|YYs9{L6T|w>V4rOX```DzPm%tR`<{weAtuf_K7D!nT6FMt z@_>^4e*?M(fS#lnc+b}~_`fLs`|_EZq(=IGru|2JQ7-HmItG999zuF7*E-3^HZtds zHfrf4dY?i>q9a-Po+B;5+)+#uMts7QE(q769I+pT{?47}AfR<9O6yBTn$4k3MzhB~ z;y$yhei%s-K;+%%wF^mA1BCc|(+!U~x>}#5!kzmFXIPpm4Y1w#(9i8Y8{Air)nCfF zU#6q{iKxJ*-qSgz6-kycMLx)xyHaj2w06iBwNSDBw$d(GS1tJEC=BKF>r9Yjl1^rw zRpX8nI{EBz-ot^KaO+CLJEme~jOtJ0KF2qn-N*Q2W>Yve^*dX!NP;p}wQcz}VPiP>1Aq{nJjb$D+wm4zZ#DKVXl^(0E2F=BhnwHdT`yKzrXtZljK z7VX+RR4=88QwrFekn7}@dO=K1Yao4J>jxfv+8TDW^ovSGeQZ_O97Dei*Z;~V2lB{m^0sRGqfexlq(wEf zZmg_SYm+LypJI{Qw`<$|=2KU!y_MuR*+o0rE-oe>uK-Q`hQQ3vmCGOt_q$rYpKR95 zkxgIgzYR@ttsO?opV;D3qxSTMUh6*pXd!ipU)`a&C|aVhKKVX(-$;JX?pdSjRJycG zBY%(-f2iJS|7oi&=c_iGV)`B$IKi!Qbd>+aICh3KI+tp)ZDv;_A`{|wv3vX*-uN`w zb)0APN*7o*_1W2ILZy$K)43VrU)OEn?^z-n-^ZvPy}X*8nN_9u_4Fw9Ah zGw1Zyet7rO#O#8H!@xhkSvRtu_q=VBdab};&j_?--LyQnT=y_j?bQ^dB_?biN#1_a zxw-@2;#ENb+VjEcFD@P4OQy0>T7UXQym*Z!W_tkYQcVo<`I(xv1AJ;(q^%Es3{dIx z-nDO@66#Ff)ikf}=G5MR+h2C5{w*;$Wdig}>T*~qC2`4q)_)hn7Lb~KU+wX&guU*YTN zG2IpR&ogx4Fk8~n^q@lvJ>(&I&l&1P9bI5}ubUe+M!WT7W@$5=zUb>|`_)xhmKUgk zAd73o=@0MM{ExPU2CfaAEp7IZDtyuHSoM?d8IJpA(YfXhAyI{VpbK;^PJJhO*{jXc znJN1iUSl=TuFoue(1Pj$IVduSNd(;32LW{G0W+5NArFj_8owe zmhi{PL5}OsebJMJ-?qL+)A>z?C84BOV)a@mMq zH+42AKkytH-DHaxSuV;hIyyvzeH@RYhonvYbP_x~d^is5{^l+&RULT3w;+N~bm*K-V;m zfSGY3A}5r>JL%3m-`4zZ_ae~8oqwg>+KSJ0qbDXs__S4zP$$QFaac!iUileeps~R( za(0$fh?w)e z+r~4^A`Q0|ixY!fei?X4e6}@%K-S=1mfctO&~cWzJsJRixNG3=a0hqzH|-Gwm%IZe+w0t z`UOP=?fAV9*vBo?5R*O~94@K<3UNBBO>UVZ8=ZOga&_^(Ba9zav0y=G1<- z#Ob1LE4_!Q@?h(o{BbL^tu*g0JP;=8VbOP7H=+Jwq>@w8eI6y;yD>$;r^AH>-aMV) z(({aA`b^#}{xqqouL7>H^sa5Gz0B!;GcER<;ZH`YLt2T6qr>njpT(G*Mn;9V^S?b| z(~P;oD?@Gf%H#8k8J*Cw-+<|y;S#>8;f^4Co`A`ht%%V zN5(=L@WQoo;Z2{21N=(bA~`WwcBu%nU1N@X*^tA>;AGMB>rKv1)dljOnW9s`df?i-p@ip#cA1YfTTiY%+}ihZOuU%M6jB}qFpFo;jw>eHJo!^O{pmn}THlYoW>!io!>h!r zoh3zgjAv*O;mf0HadQ%;sqozUKjb#W4iV9w`$lU|le0gZs0|?izscI;Om}z{WoAU| ziMCsty7J}6iC#9P(GZa0{uqffWm(Q`U6P7EA~=?W4bU+q(y}jZDBNP`__P!yI+B`6 z@B28c38}hL0J2*5ZyC}X<`fL2$DKuEg{NpMKK_{)o0=DdmaV%?QFP%99|uJv1lfO@ zpAr*+K>xm!VnytbqYIa8(n@10Yg*9OhS{9&8ySRo__$_RgeF;>s}!qGx;KNaNdB^j zv_72R@SVOjICVU7w3rvG3>tBAy6>}uOB-BAF62p;$KCdj?Ba+7Zmmap$R2~<^b5#*5G$ALb|R?FVaU#;d3 z!LZ&un?ZIf>|p}K?XA%$MD*bAKx?IQZ859I5;C{ewj2i%OvMEwt8P2Xhia#w{c`pN zcO!2%T`8}n&HDlJ;~sPh;qm!YJYM#o!bx6~(p}`uD z@AWS0)-*{U>FQr;Y2&7`q{LE+sS^YAGwZPib%x31A68VBX-Lb`o()rdV#vsc5C@L^ z+{KLb^yrlv3|qUWMZ0%Ra9e&n^qB%}O?1x_SaWxe#T@d4zG2jbqgkWyJ6eP-4i_hY z(L2$L%WFRKf#25OIfWhA;2E}fybHP0-g!lB4oc(J2+1O8*Eg5(j9JGDl52krc;9xV zSP)1DNEHzs!HHa4BfpS#I=oLpmLB!$`8>;uPSRFV z{?mG%`A9Na=~E9vX5;UBzpR}1%{lcev#7oM(peXe({W%g$V^E|-)<(C9PJp)s8}<33Y)v< zM)vq8?HMqXbzC$3tI!-2{z%sxEj`_GUbecT{LHI9_bY!R&*XQ5cFfBC*}vJ@jZ{OF zI(6RNw@I_C>_XgQc?zS>km=ej0HB0R9+SU?T>QEc`_`m==@|X($RhC0f^dR_x9QKj z_K=WA2a#Z;6YefwSbv)1T@XK36^4ecN@@trZi!}hSl@e78<)2W6NuuzPuSZ+Xm=Wv z1B+Bh>gLguc|F+qTVchQ-%K}Pdj9azDUOWBf>fuF=1(3k_@>;ki_9QMfEc?}(xmI# zC$BF%NY{#r@3PoR?0$?Q$|E4P`B2;}q#b8wyOwQnct>q&!ToHnv?WwlS6iNA@N~|o z`Rdj=fsE@PJiFk4rd=tJ8+K%tz}0G)6K1kzZjVp|pN_mou6?8b?%llRotb~LZo$;@ zDOeb%aNif?R%iNf6E5u9512JfZ?9-pFxf>8kP3A_|G9L}?5G>L%e_Cyl${E&DVcGi z$AHXlkRMkawYs0Te$E`eFo}##MLW{0d0$EA>udH>M=vB}>nOkuJx~Y?xMu831!S@j z24g3|L3_hILxgX3qQ(lYX~X1_ao7Hy^(FfZN;6 ze+BK8UZORho?NR5{Nr?vOM8w8pK zs7zWa2C>{`@?8+EQIEfL;*G-`fSKt#*tDA@Ua$&Xb{8L%pQ899Y8znalL8CIC|Y8# zA!~NCEQu?+@&7NPz9~A;E{1k$w^K~psokk<+qP}n{ie2U+qP}nc00A(`(K}jvywcX zbsm!J>>aQ89N>C=QO%{*mz$FP*A=Vg2A~ws{As@j!TTy$0>1w^Kk53gnMBFjc8*;Q zKbwK&R&?|B$V(f8W+=wz^sK&dGPqGA`nGpHm}G=}xXNBtqRCrW@)?;-F!HgX;)Pp3 z-1vRo_^IjcrZ?0P>9hN|qRLfc`eEKe`Q3lsMP`bq12oFTl)>tnRWQ#mPVfUvd@V$s z=ewy3U%!H2awmqUZe73FWVBMEr`TXURo68i2@Z9IYf9~(xe5@m>@+UD7uU%x=Y8+e z6`1Rv)_0wl&~m0`PW6);=1jjO-#hHCp6y~ZjLNy8b%&3jcvpwXONy4CAJ#w1xzP;w-739 zWbG6p7Ye74tYgy3Ch^>^_OV_(%C=qdNv7YmfuCDX)`N*_R}BTs&*~n$RKAM`R~MTj z5Oh#c=h?d+D&&8v?U#C~M#OefReiY+pI#z(9oT+NCm;P}Rwh(pL55=oxuh}@+niX>Uw=%o&x&Lt z1AqSOuUrB72rsn%zx$G9{qp~uL$d#GKH;yWp8fCU&HoO*yDInHqt|nIbTlk?0NJ_deavmN)j3{?QM*@#Pq@j)KE-@J_fph$ z|B;g1wozVyzhFyKoHE#GoyFBGc5?hZN+#$sES zt3qnSPx4(UPMMg#Ou4^W*7H`lWYOk%>sYI`bBV&nXwm&AqjJ1cHw2`(p_?%=2s_WD zb{ZU*oq0btOwqEmKJDi&sZuS`G?CNoDCi!@Qr9jvU1x7a$Fu`IzRfMF`FfJVeo;W} zL|d`Ig)F3N3_WLIoRib#qw9dET`i^%;=^+f7>+2)WO?4X@lx*u*~PZpx`q3lg>lkQ zB(~FJ_w%iWL%{L4egvX=quY>am}b+&EG7s( zJtsl*W@~~iQH!ZyZjqXoNmZ8`CU7ObVbivr#pjL>@6NsI zn-_hKp!RNa!?@h&?O<&mp*hRdT32tlIVW|SSIw??YVch5=9jZue<;MCBHc2hUXy~K zf=`pqBID(DSdP`?;>EcQFc+4SH4EpBd&@(-S-+q}q(|J$05TL6>%eVYoSYa<4LK}U zNjKCVe6Cd|;Cf*OFRW(Ub%SllUE{eWqqWQSAhYr9qT!XPero2mt)pxDvdpXNSS!rP z)aSb-V10cE>5p%&hxUkik?-SN(oMsOthsi#o9{)hoQUNKtp-IbTL3J+$F(%9L!}0H z|MhhVXA`D(#a%{zV4FoF$r?`ri9~;?v=4HS)TnJKYXgR%t|B9NgsmO6Jm|ruKF&J_#?#0gIM* zdR$DTb{wGynJ0fIA=8nsS$i2MQhF2b+S4PZzKP;!Uq@Ogcg5uH%>=cZ|QxiWUyzHZDvcz#6vc(-7U(RjlvBdagb>F#jq?tMJ1 zzYw3NX;Ccvj`kS{xS8s9+OFyvf5A%vR#sxtUkGMiBcAT9A{OeWaJ-gWwQL>_0==39 zmygw&5W$%goI>Vj|4UtDA7yuFPiuVgxEtIix`s``AlO!>p3hBvl-169+*Zfu)FN1? z;4W*go(?~1T7t28vMUx-WwvdF;L-3X|87gQ0Y2b zIF6f3Rbc|gihjxFyzJT4{A=9)_j;nZqYg!r=}ESo3;U}Wa$_c|;kmEiAcz$MHYVDB zd(wXl=9nED`p)?t!YoqFKnqkMK@>>P3 zk#6AmdfF1W=-pCJxxUicaK!{@SD9%iJdHdjj~C$7RFPwz^1v0!S$4x!^e=fg2~H-= z*qVmPhq97=xfQeVuC1nqFSU3L1ATKSn+69zHV4NK-|hHzoICS4$xk-?5A>df(9a9c zl8|6_Z2vj`>2_8fnwpT(ZbHK4^b|XLDE<;zZrPzz=UF~jb~Uj9GxY}`ZF%TA`|NCx zI*H8;mw=*TwUF&DH4nCEU&)cpE8HB^Ba`;W^smnu3F3>C<(9}zCIt(Nl(5FMrw91} z7pF>>Hqc6_Ei5+2&+9_efV*Is`WUP&+g!`pu89iJqRBnv-f?`W5~ zoTO>V^6OQB*rg;I%gH4;WrgiN^DI*JZ8Y$2%*H>ZZm`c~ZkzVx@4CBP{TUcY3-Z!q zo)Jz6s1j0gHy(61s@vVt(BgvylwmIf%GbJpuiFprKI;bZt}Os+%WZ7bsX_zGd-R?E z<57Y_E}Ihv-pUesl?zZ$IWK47DG2EbOXYqvu>SVN*sg%MR@=fdeBi0@8l9Mswmm-~ zl@de=fL#VxgiT-C;#tYm?U=2jShQcNzaQC#%$i}gv+lWb&zg%_K|I)iYT~@0IYo=J z%41q{8E*tp@B)atMS?Y#P)KLOw|#GVD6J%$45z~C2d)?HaUQuhYD?!MqsEtXq_5W-Mw?Df zr?pvKY(r?;+N|EnGhhLpP2f3M)x}Rw@`tDua~JvNCxBOHnO$xBRcQ<5&cND}qH=7t z!YS#={eoDg=NCGkLEZe-SE({swejNWK!`HAYjLO&a zfaVUu^#;pxrqMm8RZ59axRe{8weeHr1I*WDV!86-{#;-3d_%(quZ_#vB_c(d72ow4 zu10&`1&}1Or&e?T@<#0R znP)pWy7WuqYDT-8P~yOS%R%Q}t%M>+vgcUmG1a$2PA-{LsI925oa>K7#MnB!%X6>xSlGlgsMXuXNBblwo1gXbV8n=@6} zf%aT9dfmT16^3Oy^y|JYpn3^1i>n-G3pl^c3gw09!cm78az;79RKc=)k86T)4YKR_ zVbvtCu=tsPg!rFGQT3A3RCEPH!X!7hVA8fVvE3D{U5P6Xi-plc!aJe}tpC85mwTk* z9581Y^!cfLAy(C`bN}JFFccj7P>;CM?(4ir!@}p}@IFz!CUtlI#$~4VMB({1pL-W1 zDf3#M)iJ4dT0UY?|{qn>pWA z+W(7@s77dIk?bMpY2v(9JqjtnyXn~Y0^AdDvRi!)F4id0$Agh3dy**3u3%V}m9~BE zzm0Ba4Y|VVKYwJT{r)y>-mqo*U#DSXa&mI9`G4M#Hm0VNlNnP|iQ;po5*~4#FUlF3 zn<7h}XNl81O%uEkzXBwKiGqx+WYwi5$RaXrp)4fk;hb??0Zq zk1RGvaWCihn|rME_W1`oGRf~*cT75azjm*B3>l-3_98k?6EQ2rKjoEw+tz(L8(qpF zjqBLPTu+bEVUV<@#?6-NZuF-SSnwwtUkT)`E)X8DYPki-3r}xfjihkP_4|&v?0eCP zR(utqC@j$YQ@0gmQI6%^0G5+8_L+3L#0n8A}?_k6?4xN1Y_#P_xu_lvjfNH8U&K6Rke8`g# zrF*77Af_ad4L1H=MfdpuB2A8({f8bAJ|C9lI#yX$qEedhq!tDY+Xo2_w?*Cp=0QDd zP|s7uKdNp)>jC;~> zWSyrw1l5Bn(NW9jt%aF`13m;NQAwgh(9F8~{P1!KvBtq@fkC3bsAylLtEj=LL@(qd zpdutlP?d%5Hr(g^ZJbI7o@=IU>Z%p3!_w@2(yT(%gRaYWt@wmotgR%>hbHAvXif)-AOo z;U7WmKMo8>`2P;W;a%pT7zJSmyq95d8NP0tV#R@*f6-tErwGs}w-GENwl-T#uGb-E zK6v0sh2R7&EjZLfq;QC$Dl+U{AbsWGi{lz!pPTUD0=YOC{ka0>>%f}Nq_@*DV%7W% zE8J;Cr=lj`j4!B|3(QcI-1$x!eL21J*BGom2W*w~X90y8d%jkQ=~$T8;0Z$F={zl$}DMfm9wXTRI5EbQ9; zLsjul?0)s+lgJa|@joEcN_TGvuf#ndTCCDST`ujIaU$Q3roh5~(W2Y;u z-UB*=xobH?PeOeRcdkBjA5I=lHsMWQoj@_yp{ABMwWZdb3Atu9 zXA|J|Z<5Q%&(s7`51Y-^?4FjBKj}NJg#plsb^RGKLLQQSou^QU>e2kCtXz?o6r33q zpggOJqCzy50G+61@2JJKzfb;I?ZTXnOc=qHEK(f>48Kq58 zFv8x*2E&n_V-dvF4Y?-}SCUou>nK&z0Av(O2azB&PkC9u;|HCE^EoC?c>=_;P$+J9 z#`%SXYa-&9nm^w7xQtzk4xfHh{46bamT z#G@9aC}q!kh>FjFvW=w^uz^UaSXyyj6_64U-@Xxb@(JsB_mMn_`*8M0KX5E3r9yMft~?sRA}Se7*t*+<}nm=QB3E(oLOLAgmjWKKvbUWs1xO#7r6=Npx z-Fve?7!4Br{-9{J0)I%_YQ)qivGEy6x6LXZmS6a&9+^9_Zi2(b;+K;-sRv7rI*h1$ zMn11#M=1KE6{k{eStwlelA(8gfGK=k7aBp(PsR+owRP!(I>F3H_8n*|h7gJ<@fjbx zTlp#QK#N3%ipTvT-#JNsGrr7B<@7o0OSJ(MKr~uGQUr6`0FsW8`L5f<73bPD{n?4q zDY0&~Ra#iD_&D|?v_VtJZ5_x{Z`>ZEY{IZ!*JX~y&x|=(+NT=fNZI82>9-T|sgkOS zn6W=(Y7IITiI0~=5!A`u@0$qPR@)obbP1st3Q{`!HU37nf_ zBLBiNAu=F60!&AE=2LkXaA*TD{rUX|A2MK?xw7$VW3}rw;7x6ZWEa#p^zhbSqe4Kh zo&W(!>91;n*16mTx+ufqQraVP1+tNk+?qBu1(~DyGdeEE#EsSuu_fU!*oqSR$`xlMV2_?muA)0zmz?Wr$uL|YS;zEp=bh1(7^PKfB%D~N*9RHuD;|BgL#P6wP zntpmIKv~g{LF9#I<%N{}SNeqn5L>z~Y9SIv@3wB9^g*yjLK66V`038pIBZ6zQAMGz z$BJaNKWZk=ADs&G9>a3mE{;&;x5M%}9~#O8Ye`lCiscBcApSM(ZC0#Uh9hxz16glP zg!(6u#C%9kT>i8FkS}CfqUE|TIlpNBVVnL5aOo>G9UvTy1;77~rzk`>zdr{))$r*D z>AoMEEB*014ql&Sow!Qlb-;QPMIwwuetCG1bV940t~W0<_E$gNNv~u{+)wX5DJh0H zk=*o7?3C%W-$nw~fgMhB8U5MU7w$PK_Nq2r3V~^=++sbLC%G)I^`=J0rEdfKDCNEZ zfCA(|RxwKoBejoza9{LWW%rl&jRFH1sr!}Y*X0`+HT(FI)TiZrybQ%&(D2u9P z|2RaIth9|FA8;>zFEf+ zPtn@BQmPp&YlY>Z@a^2(B-fR#Z93meYlATh9P8r`B9|;EzjL%3v-`- z*2bXL3$oD3h6lz+SWH@F&G}7tWK^Qlje!Q+W$S!QlJ@I`5Ve7I(ynO9PUA9i41&v2loS;C75!N+v`jYU6~fMaKa?GcImw0F zIJ)&doJf2+ZwON}EF?Tvc^?#M`4POl_4|v8C87w{R}E;B{0hgOj?c%uEV8I@DiYJm@!nGV*~Iks-gqc!!g}6n>N&A0NFo%5}Ds@4h9jl?Z$dGrn6mMtz6z%J(q;VJBoj-yNjex z{%u0o+Sp{JF2`JJWQ;kv>drzUlSRw^^TbW5xNTy`9W#K$%b=53G6uS$?NId%cq*1^ z;w5u`C_4`Mf@~A}$6>wgCzk2(E`D}Xc0}_&%`BXC;MzV9Y=Oe40oOeBy!2hfqtm#V zbj@FSDeT`*p{SYP%=?FY*NJ#GZzJJ-#b3XEVo)%C|4I8N#d>|$1We;4eF#X`Z!(s+=-jS z!0aF|f}I9>FY(;wttBkKqgq~Mxi9i~F5)$b7rHdnlq=|8skenPF_Jh~Ro>OKt-Kl* zV@~cXF6mcX0CH^sx6VCq)%peLJLmpxU(v&Ps9$$qyAtxn$SpUUeaU^(wteJJxd%v3 z$i}1>Bt7+E;UWL5v7?56MSZ|_Ww$IFhaQguEYPV6inS$?RhpL{nV%@0a8D2G#pTxg zlPjjXKMNT@;X8ik+k!&CEY>Nje_}azFeW1|#Rp5P0ODgivzKjkXnGj_u-QS!lam_@ z60;|PZ|oYySG?)6)Vgbib+%7@=0+<~%+`_7YH3U()LROFI#86L%z6#P`wtOR>PxNc zGCgq<4WZ-~#<8k3`S(YlSyV#}muFFW?j6>;-7WSbT2b6sGMq>LIz+Soq^n<3(iP(J zsYr^40Th>ec%&wc()$!BHVia`xDp6Y1aar@@?^*aM#jnFqBUS0%G-G1F~-IM zNA>Z~>hYGA%~5j0W&aZ9i)dP6OKT8tqI{{>s_R2tjo*AzFbugoWf^W}jfLV@>I z_RaaR@)ZZ;YSy&@e)`$wf+x9~95_y1ei3pX3A`B4H6emPhW%KiVIs`7rEB6cu?Sr@ z!diiMIG(Vx;TG}{iXPZbpCOUXg`o2v!UpF{4g?SDz`Au8DmNEFln7)0m&-2LYQzjD z4Mn9cxV|;lx?Sfo|5u%HkxV>@Z#&@TJ#CB2R!>&Xld0n1Jx_bx1aoAvHD>f6QXUfc z_n*+PFa>ghNOqHXHq+AOt$$Lv8_!(8&qbfee6gQpv$B#SNZ+;#GMMNLp(P_o#Bk*W8hscH>Q+7VSWariv!k$C)vSvIDFCL53 zH7domv{%g^K}c$73@sdyf3_@XP}V9Q^4HzlTI8L;5yB zRZ$4rm%%E)_KvhxF^@bugKfQULJq|wp^g^~WFpl#SkeuUO_dJ{!acF!+ zXZmJgGpf1%F7Ssd|b~TPua#h&2aYX)a|ys^JZTwpTZ2 zyM9uY-rei~f1p|`Etb{Vme3ATiGR#;^in=;{+3z;6$|HTIqBAcM8qswkoTZA^9-6Z z^mM6)PD&8JWA$%U-h2F~QF){{zw7T|k{-@{+ZK4z^a!7fQgRAK? zB{A1j4GlZQ=+9^_XhifIv}|h%9xM!&QRU;&+oV$*b!@GQl(eMa0Y(u{xtrtui32`zZN_Ut;oOSWc}K5u>;gwNKengCBX;<=~6_DRLUo+aYvj26c_L_o)niO4;Lb0(X^pr8z8 zko~Cc1IS36F!YTf_w9)e&Hp@LdUvvCFlM3lxHCudGzD#%i#YlXuarK6aMOf&aCIX% zA?BQJ^&!eM(731u<%$Psiw+Jn5;r-6Ht7?Z8_K7Mh6>%&%La5$Vo~V(FbvOfp=I-i z{leso8I5!}SC;zMDTit_HZ*))0GaP~OTZbWEK?D&neX7~yenAsv zE8TTs(AS0HqvkiFGS0-Ii(kZ?iqGAwsOE6LZGUqKMn2sHxfHM0t8PLsoFQGkK*uRX z*bb)kg#bFuOEeyuJ(Q%+>+P>BCMhTrug zbP$Unspl*rYINEd3g4xSYzuro-G*3B&sZjlm+r3F1{)(fL2VYc`N(=Y!^9@3t?@tV z`Q|S6!*VJFFW)H+Edea?c^L$zf~FQsBAX&WC}$855qNMk2~lf6T0T@`4lQTr? zllq-fq646?An+p3_03bN0-*l<2WYn-i^$VwPzOQvJXmon?6CDHE1A=pcA~FB9&&Q? zzFG%BYa85Ac9FtWwp6OgQj-W^m(O51G-zf$o*oOu3wI+y!N?)x6qA~Bj}9|~bZk~^ zjr@(%BcWH*4Qs_5wjwCeUVFFmgB`?(>j9%$Ld-h;EpOox-NED~`UD0Bc+CpI6vnwOgGq;Ce^kFl#yVw*QFssb1yuJfk4N;wFopXgADUg7U5!X zkUFoXktc2(xo&>KVU<*>;KfOPdh+!Z`y<`D?OIBHwP}6nW!Apvxyg#p09Tn96zF#n zrkH)+l@;vAGFs<9SF!p5nJ;9cL2*q5|HwWARfwT8ab|XslKGB&xIYB_t1_dja9_xS z+LIYfwTkylPLunE^7KC$2dVUpo7G1z-MYugjJ(gfhMz4teofUZSknFnA7E=T;&*}L zLd(b(9(3OMhOI!J5x+;p8FF+Ev^%RN?S zlF>O2s3t3hmB&vXG`DWV(lSFwAWMRXL>7~d!0P)VMj~yMEVW4N7FB!L;Y>JbRBnq} zJx{41ME!r0zj*(xL7auT&(I`h{Mp;m%q&AocM)Ji_g!phg%NFoGyhfVww_RO6SZo# z+=}Sx%{vZ@=H+G&K-DsNLZgfrvbI%GIwO^VI7;Ff1D&KiM5pox%`wpnU*eK^ML2cy zJz#-)yJo$n7nq!BvTR8&kYf1(@NDAKh*YtUL-?i4x>EH*@HNsmn`!ptf?7+KDEqeU zoyxU29Eesl!st!X+v+RJ-MIb6!G=F;Ff%~e@b;P;kpqkYs>yWxIjCuw{SI-dpe9zo zbdxh?BqtwF_2N#PT&Lm;nGh7^B7N=D9DN(d`tNwKK}-?1+?}iIr@JHY+>~vilA!?C z949&Hg#aOZDBmC#xpYUCwGOC#E~E^+biOU5DR{{ugFPfsNhhzMhbLsV%_r1)X1FhH z^eM{*N2X~Y{u@`Jx{->RqGkUqG^A>He!h!!`F z7RaZ@C{CtfBI2q43X@%1@KZ@L0hMD4hQJjYMgW#iqifd;4O3c`kInLy+(B@p|7GO} z`bZi;%dta4sI6_r2&`X#3gDs>s7xv8(&qiXOXNiJMng7~j(N@4(v$_bb0T(Cl3?Vt|C2~;{BHS0SL4mo!9fM) zL5C1KxuWpLfbi(^2+JJuUo|lAySEOj`A~uu&?5i$29AGiI`{7%h}PAt3uHOP)*FV6 zKkPbS?tL8Fcha4vL`;gmK#)ya%$x$ej(HtK1wmhkwTa~Xbm8)v(KFEGq;OCxX3^0B z6g1}s>-%Aujyzj>SFetne13V3EacbIS}L9cC`sX>^37ok zIH4%NNP9EmG#;vci0>eqyg5>}+ClFE>IgXj`VL+9=kdD{KawAa#Z$RhsRgGS`PCR! zTM@gIRfNqaKss6UD64z6D}o{z#U;tmRGe&d6g54YCGgoTdSXaHytrI$)kc@0rfqlX zm*NQ}c0_Ugy>h3@QX18r3 z7`zk^%A!17!7~c_V)(=->{zVPIiTfiqgMV}0tS4pTm@VBHC`CBJ=sxxf!TFig5IAN zzZIx;g+zI$90G`hqiU4Gz*cb=pws;se_HbJuu%M|2)?GLpTxfol*hd@iu^9eVoZOh zO*=v6yNwVVlre+Q^58kr(*P?6qSYS?wy6f)cN*CzmH4J6NGdn{mOMZ;-b@^yf*l%!e1r`EcD-Ur>qOaiP}j{M+(hVY z9~!VzIET})!4tZR3Q%SVThT6pW9=QE=JpThq`ARo5D0rQf*fE4I$a|Gq*J1FAcW$Q-qaPNsjE>HG{#Y@HO6S!>M@Kh&IE zr=e%g#s~)~Cy>?20=>qh4F&L+Mtui=bO@amJdWB8Ff9;;JLq4;$ryEZ8NGB?19wc4 z0hg@pKmOR;!4+ zQDvjNvK3(Wzv#^YTdDgdG3}AOjcLkga55^!abtgkF-;0ZLMivgSGfZ`cNw`E8})G&7HQ>2fUyOktV8k|p= ziG)H2{uG2=i;2e5TS$8r24^nQNh=6BoM-k)|5kr+9(a%<8^K^2xcu29saZ*b{CI!P zbhD*jz4uF(!6_y-U0QpF#gjb98Fp9BN5GunSjMRAU0H~qJ1hc|yvTqaBP3EmXFwL( zk74fY@VlLWBvVVy^#xA}f@V|W4h+0d)gq}Fs1hGQAd-7R0FqBk-13KAvqfhi!Cw0) z`3+~Eu{3M4pu|lrblF>cYR2k|H4zhr_Ki&O*=y?+TaURD!)ii>Wa)`dlBNwaZby^o z%`ZMELHCCWtYO4$UXjz3{1e7zv#Hpx9I zI6<%7ETwPmNGn!fuV3+LtlrySwD>`Nab!EI2`-|Mzw5WJW(HrY*1P4o*4jz9RyHqZ z6Bv-*A$T$CJZm#?K#7h4vfdgf#zH0MQeoQsR4V#)wp^zFUMwE!`Z4F>&{;ue37Xw} z#Y~d0f&-h@^RGLb#M&X2k0vq{q(5CG3{!87p`5W5#_BeiC*iZzDS=^8#xe4tlZ^X- zO5h3fE*)=w`#i(mIhQyD_69z#cnPY2XVfOxNO;V4#o|)en2ZpBk)D=phNo+nWX#Yd zctx5JK{qW5BGsMJJ;RRKm4u~l7!gdB^uD4l!Ie)Rcp2mWye;3rnHlKP2DL%|6|_OO zxy?B1i4jTjOC*4gMKsmAFrsYCc&Fd$jmO*1iVAh30=jTIALjHUwC@@j2>7idFKJw5p|t)d5tpIz=2|1DA)C_C(a?xU6CN1Ae{`@x%& zNm?lS)%r?}=)|d)sY-)7FYVvqfYavGAJDkxhU%ef^%{~3m-p6yW;r+fEzXmk2gV`A zvQHxDZbH3;5jK_+PpU>25y}&2A|~>hj91=) zLGey9TzF|fTv_o4;8+-dAN~)0a9F~;Hsx59J&a$6-G;Rs)!da%B}Z>AhkGNwv>b`N z9-p|5!g{vu7DKX5W<`r!y(#`e)SE+Evf}nzF$!QA0RjHH3hjp6G`s)!dap#-5%Qx1wr>13dkW4gIPbXUCA{l7*Ti@?^ z_TSpm|JFy|d3d3{5jESS0?>Qh?)BNXcRfZIFqWCs-xlX+}x>E ziZ7Q34AlHoxZi-b^lcxXa^A?qNfOrgK@0{7amu9hre`Ygvfgh=h+M6l>am*hrLl3_0E-)6+oU4OBoA6k4Cf!^2zsMKCAC zN+PATf-XkR=RsQG0GPL|BB_-s^V%LtdKf%PS8~14W7=mXPP>o%+#JfOL-E?8Pilc{qJe_MEXGlT zh2+Db2*QGkuU9&Q$ik_Cir{2Q647Bmyd8)L%Y^yuJ6z4mL71-kGMl6eQElL#P(Di+ z72Y3V^y1k=GCyFX=^6Y$uFHj>^y1_nDA$Cc$M0c2Hq+%A@u{O)?i%+`(4-lmTkY!d zF{({OVVG<6AFi7PLr~a}=RqXzT>nhN(%BOd^`Y(`&JmK7 zTi@O&8ebfp%g_4VJ1NZhYBv50n4`|jFCF>)@^fxalU8A>z;(V0%9_lW4-HiM#KAW6F_?U zM=>s6RSMaEV>$Z@eb_9Z--G>yh>BnuGj{Gzr_jX$l9RD1@PEs+vt{t5F?~ltdTk8X z(lhYCpZ?m^Yv}dcbeCcPj-S6wDCytwp8Eq%TLx*y*J(WFbzzPad(aJbr(F}azOBzk zTH4ZdoXeHlyPJnO9{APJxhC0yA~w-w&NAqF&Lo09CK&l4S;g5)F_dSbpd&}So0&G! zwJkgQ{QstSrh{V^J*CF1CM`;_IkKjFVZeWT+il5BTP!}()xnein}TPO(^4i5f7pw7 z6u7zz%5aGIFI^EUrZhXj%*6tRI)j%#R7)VtmXXSMg^bN+KTr!b4^;!4`1of|>bAsn zjKR4`-9=H5w*T)0uKY;M=;4a;2r#3*#-%+k+d_331JW zx(PLhmC=U*qX<%#&4*gbSZ_+yo`?rM8Y^Z7q{vH6AgW3ougZ|g(FOPXdN&!eP+N6nTuRCFQbdr9k_sZyD6Os2uFi=jd zW~(&|+(m|>E@v(MG@sK)!|#3gc4=D&TLkY;F2&f7k@v%vw#Gxv+;Y?_`dB)&w*k+J zqLu`vD0Ih8rF#iuXYQ0F*Y-5OMh#Iz~ z?^U&H^`PrX?FsLFo^M>JrML|K7gFLSU2I}LSe>6ipg+LG{k;>@L--+gVE5I=nY%+6 z?Dj1VxLg56Z$XrejrCaRawzo<+})vFcz60IhQdaE;b3I#;sZ#|_a0JHuS82sDiyF8 zd6G-(_!5*A{Y2(>cls4Cgk6x5u%lRGtOz`)H}`fM9;9=OQB90#GcaX5H+mb~VjQw+ zyb4)Zu2N}8p6O)W7#ikwNN8Vt4GbQq9QVn9l{plAlJGp9^UCxO{j>z5jfF-}#T7!D zzmJhv?r1n~VgeD|{|#OHhDxyxE~vu$L-5p>Xk-`4{$>?GNrM)H%=hLqOR68vc*A{|31h4ub4rkZ!f9+`HIgwfE<}T& zOpHDjS3gCpGRRE`;KG5UST?wnKKlc*RjxnCW`gT_`llFjnhM#_iJ)vr{Oxf zDl}#Hw-+0f&fOtZ&?YLy7ZumUJIE$8#O94|pEjJ+EYtgZh+fc_e8|IKwA;uDP*6zg zX$tMJ=#mAF?)YFMo@pBl8tY*1;24zqpY|;edz9E!5c#iMiLz{nK~#E2n2p1u;)W~P zOL#Jv(v%uah!6yMu+b7TufRJ@Fa-k_WKdAGS(m;3U~{nRe{31yIA2cuVG=eL^%_`; zR(HYN?j@+VgA~>Rr&3}ixfpWn+bkGZX*m6O^Ms|z1%yMbbB|iP+s#YVP>>%O^ z2frR8I^jpc>Wj3L7C0|)Q&jc=PIQI>JJutZ$UpG}wT3dbE-3yePr?)i0G!Ug+6kkURtT~xS)Mv?zAJi_duT~%;K z;=37CN4gvGKQwj_-qQCu6qeWbi*pqML==pho5CU@DljZRlR^?`kw0k?Vph~J8*x(8blcHX*16TM%C#~f>Y`3K!2Ux1s|$Us&}568 zQXB$=Bog=>n5xGgAuSsNXoX&3jp#P^C-I<%hY*T|0yN)P za$-)#ZL9LBa@=}!K0>Car?KgI=0LI$Xv!wg{s`yjYh^*KOVKz3*M@R)-XYKeO_WvG zI1PkG2^zUKYRu+k09_Kki{WoJ6kBo9S@~ULZ z2toJA0`lO-Jm4z8WKl8YxQj2_@^{K7KVD|IGj3{R(=1{L4020q;AwwALr@6rh}Ud# zuFCp}MrN4gZsqrWG|DL=hWI9JhFEN7`C$}YMWJ;*JHMZp8$~HsJR7;fblOOu-nK;$ zh@O+>?2@rDXpybH;0878d-Cv|ubL@#OWGpoZ&N6(si1+zHhFT+YY7eX)p(Ewu^VJ1 zG(782v7gFR=~vgQL3ovd$SundOjzOK&x0q`ZBZgd(QgT7vR^&JI=}v(83zgImrfUN6|UzkQgr z&(+<~@bq3da)cL z2nZtiU(L~rXc$-_^tcjxoNhV!(mj+e#xk;#1J=8ot0zBKZm( zmos5OJkx(g-A~|wLezxW7n@T$wGn#EMbL#`H9=up9g1Nbi9Yg(Tbp)aDtfFKX}`MA zx#Ck>V@)K~HXxB}arH5;;353}{tk*=v7w(eKk1VP4k^g`AKsw39w268mjWX{_w#=?NcE6TT zesdX-it~GUJMA-Sgz|f4BWB_OA*kWN|1i+ZG84#IYr2PvhN^h;^bbA>DrPlfGkEp@XF z4K07W9nGHu$gIxp@(nkn+M72i-FL}aa;YCW@PoL>eFq=?ZJ?gP@(XS2{$yjW$_cj* zLp;qz(DZCPy!;B5>1Mi<6LhCEU2Q2wZ1TlOl-Y`=^&-pCvyEn>&en8w-E9()C>{(w z3yfo)?f}SlhphZ_Xh(uW*Av%C#NPb~)}((Im{7`2U;tuQ1tk7DBO(MldC^aw9>IXD zCdsJfe*HtDPHG(~3e*%iscc(afq!Zgmv&F-ylBour(1oVRgzRy;0P!_SVQF_%MM7z z+dNr|k$SEHsoN`JJJD;c+QUc-Hi#3WPr?z*BOdIQx5Y~637ih9zVW$$hLj#y_Rt+J z@qtC~aeO6@RLcGbQUYs!#uJbqB+P$au_6hc)UK(V&D9x<-ti@ceY)a%)5I*zxrn`6 zZWYZ5bs9X?=pax{(eLT6-KwITyt|+d+JeA8@1Fw(0#Xme1jGOZ!H<*V9Dt=pL~t%J zwJo%&VV}~&VI@30;+sSNNLb`arR)Q~hDFOYPlxBzXCO$TT`Q87n9FQ=O;vxG-=xuP3@VK$XANUe1@QmvVhi0U*Ycn{x`~*&g zf6g`fY!uU?vb<>KT+3n1NI))8Fux+-nGS3WSpw1k=_9=kg7izsZ1&}ub$NbBHd}xVMk!u) zFnHYhVx_3tqv%E;pHTB~#oG_!7YsCWq%>8er-!VjN&6l)%u`pK=l0)@+cRw>_fWRU*ybi(`|rI0K^{3D3w{i$!hfu?sRVQ4-?W$_ow<3;DyNW z2G@8HoF;d~VqxLWUp*Fk1zjO3h$Pw`SZc_!&}r!XP@z{-IUcXzSST-BdX@^YQGk=O zGx|t>9noBAm&je}ql4`x_=Q?-2z95>L*~|E#8Hrd_uEh=8)5Qc0tU{M{2-9XIx9S( z9%sXHB9l>i&n?d|{BYIOQuaQ%fGOG^Mal=nZcJ?+KR8a;xqbZ&!*)W}%@9zaAe^BV z_IM*X9NG;nbRI*XKLlUJAPn$3D+^AzvF+4OH#5P#?_ZbET%?(a} z8cWyZOcy$={6z_08R*4Qe(RHZB$vH+IK9Z}uz&vGXgo^c)EiWjJ60jUZMV{K`?`0N z$ob;ftor&u71z79%uHop`)88ik*EY1_UQ&U@Is!b zCl+Y8mCWk7=GwvP4AUpUC&}4CCpEs>=lK4e+fIw&YEXN<6}|+v{yzE`3DMxYedH3X@K4j%^=6C|g6z3vf;9 zC)mMg1?(qxyz4aVp?E03JDh%hqG0)_P?;avpgm*B@NBK33~Bqp(vTI&0M~~5@wq}p zMQC||d&@gr8tLqm6yrop$bP4~-4&q<&_D@wg-Q8m+?= zUVbe}WN(doz`h!wDoE&z@69WnTk~`Gr|t>sy_kR1JACh14Q7+=#Q8XX4SUJt8DB&1 zHNqkY@afTXVU;av{x9!75;0V2y5g#QV7rzmfxmS*;d7l*8$51QeihdpN>AI>l}{cz zV}Gj7*h?f#(|m+Nkg%8_G+EKGnLNZ2qKVwp?1e_=G)AnwYuIi;`sOpu?8@3K4ziNF!DXP)p_{>eY$KN zCb)8My_b4dHUUNUYYlhw2AxKyb6d%}mQR{Z!xxo@yU4Ty}X zcCD5T8r|YJh`(m?pXcx=kJH(5qnCBXte0qUWI0!}{cca%;LvyICW=lPh=Oj?MdQI#uZYc(ejQC6k4Wrm2=&GfGd?6YLesrc;T2v z1c*Kvgd&YV?|O2R;z2Uer;0n#geTKLiIHS6`kqVmP`#sn4so5Fmpl|b^D~#!bO5GJEzW3&f94gHz`5ORvqSKX(ZyWQ+$%N}J z6QM`#oCrPE;OTL_1vUqZ$9n~>yG&Oy-)&H}P?wd+(^Y0?n=aN*Yk@&>ck`boPu9N8 z|AU>simjvx!vsw;Geeom%*@Qp%sysjW~MSTGc(&|W|!?UGafTDv*+*bxtLw;N~_jN zJ1dox&uuC)Ga};+WyY7d$4Z0ydrYCm`vLV%R%TjXOWBDLg+NJ2(`d}Z>CW03MZ{Ep zbY!KLR2Ig0G`95l{aU)jBOT0nG;V~7u0MKI2-|h3>%lcGx!j?|k}IlvZs^po zH(~nl*R5hsB!@G^Z~9twNR=xvUzUPEYI&d-S@8a2f!91a#I1TYGk*5XlA5aXpMe&> zd%C%YuXB0E-6DOD(zC()wTE*<(qIyQXI+;Y+$0XjsY`;;ySo#l!}(@r1*0!8ndj~7 z-U%5~QE-xlO_P4mVv!oOG-w$8r=H@- zIOS}wBd9{BP2p*)=iU9dosO8WEN$CN_9spyN>+F=hD_%nLPCexCPdXreKsxNsDj^p zNUM*!YyaX4CZxVAjYRV+E9NbK+oABPigui8{aa|1&r>i6l~hW&^M~FBS(7R5`E*aD zmJ9rrxVYM;Gigwhvw!!o!JGM%aD9aq;&qRkf9gIT9u7~a@~Kx; zy+QonI>K0KB7dQ;m{TeYx4t(-!EJv^G;D{CpFWv?DsyAb*(2-pCC4Ivh)l$P{xOYi zl4Tfnxr=%z8M5!Z!>g%{?bw|+>yJYS>~Q-K<2mOD8(n>})*R15Wwl5EZbvO`{W8(^ zoSe*fn1hBGiyLI@zgf^ycO!R%2PgJt zcH_Zq)~H2!Uz>(Di}PzhMPl_IhFR^v)ui4uMMH&QdRVWn?A-BxeUK{$ftIeSMWjR% zhxl0AXuEk+o)DKFA2rk$-%nN~bqRU!;k)Vv47EdRu!8L6@IUW9FVnviUd*O&uk7;- zxlBwp-&?Y^5jTHJ|Hu)`QSyeUV(g79@_QbDIZQ6}*JRO6Iyqyv{e9Tx8X+hy8?lgT z?56$N)t~rZ&oFs^AOEQQoy{qqV_haN)zs1 z0K3mTNKU1hw7D+zeRudUr5lJDTLc{vSVW5w*{!Un2XAW}I2?3J`X$?ocus-nwAs#A z$E)Ji6La^DdnSXRFIrTXGGSn?KPg6?AR^R>5m;FK3ciMaNR0AeGx*in@}RZDkHh8k zgp~ZMi+6rbQx*19p;Z6RM_$L)dMAtg@gaLQL|mk(lV8?T6YSGD|G>F0;V{ z{gMzs?xfI>6F_#gY5}z?Q@O|_--)q~gb63Z7Rxszq}aDKe`qcmL=BA!>iMGz@k zRgDWX)Fg2(l6|o>oZ#4P_8ZUr{l>~yQSueU6J?OYZYircEHCo>bB^lZEzd>Hd? zdz3_8`iw05LQh8F|5gZO24{s(uXCA4!q7^MUX$)_(kEUd3uW5B@y?9xG=kH zRG;2%=o-8Z2p8nzi+;ESdd#ePy4F1Yj)l9u#b=l0jP|3zTn*|7~3jUof~YHLb0x#YzNKKNCNAty+& zx5W7-A5|iQu1&$~Ij`0gFjB0)xEaxBlTs*;Kl+PBKfMoI2W4&cl$o5c7_F5-Z@Y+p z{OI$gSJ(>ax3dM^<9Eq4y>N)Kib_P%h@khw26=9R6+@mLsJY^S?bg|`uqCOo-|-Be z6ig{7&FmnYpX$*B3&?#%r~?u%Puo)>Ob=JSQ|vf2{%LdTYU=gvQL}Cvf`{w0`W)$x zWrf${g$7x+_h6pzf$@x{mzq+zn5Zg$cSw=x+~`QP+T!!wmNQrRaBBulcKYGfD-ez`Fsaew!wR6zDMWWXS$4z^H z8Xl=f67uBBbF@7#c^?#BpR1UQ;5~04YK=+EPwV%!jcVpztFddI{C*?b$?pcsUXAKj zqXwFvj#I`f1AHMo6Esl}iF%ZO$nNdd?x!FW4eE!j;#k0|?mv*B)~>H9RX_Egb_nvB zwum~RGyGrqPoDeu8lIQ8XshjeXXMqO6{bn z%HA(sqHKg+*bPCrD4&1uPd`W7H#1_F?(;mr482|v6zC31)s!;e&|A@eJMDl_Ge>wf z-k)YMn=Xu%%R~vf)-c|^BRRBHjyj3PjK;3YVwf3~+WLYpqwRvFe6J762H)4)weBAm zK!Z1968|q)18Msa!#)hLO^?y8cv2DAapZnA!$@WldC65V9njgZrI#5x&p}s%CPt?H zKmFZ)jNhkj-!~t=tqb0NSM@>ghQ3pD?^N|ADsB@`rEYy%d#yyPc=cO^W|E+wKl%>? ziZV7?22KmnrFzCcY?8aZFFU^Ak4H^N_L z;zHuWUV(_l;HXsbLf3}j@-4@){js*erw{wQ?}}Sq5t=^~)|PmGx2SpZTp*M{6Xq9* zJk_84r)s|M2o&@^+GQ?4_5z2$u&J-^N!ziYu2>H!v2b;JOHJ4VAlXhGUHmQtPf>Q>>gu1^Yyb~-#^1K1YYs$Mf`;w<~ zOYpJ0=L@V}plx`6$ekmmggl6Xzi^QmJvo%%kaKR&g4Fp1%^zmcv%}pZ*@}x?P}*UY zXQubQ(>?-7LD485?o-mfqjpa5McVIQiUY}QAp+h>-C>t~5AEw`v6G3Dm@g@>koioq z*Y&-~+f?%Ji2j?}099B`$r;YD>JvM|JW2+n#^u7~D2G9RjRVgEjKuz^E8|$D=aFHH zS3PO5C3;)5jYk4BpZCLzYNr>Z5sB8DMiB@%EVm{zQ;fYA;AMO0Szx0pIE=vrQvjOe zjzqG(5+U(Iw4c2a!xBZ0k(U7VvAZg}zWTox@2dwbnypA<*_7l&$>)NzT5A!POnph% zr>~b6^F{D~y=V^#Fw)SfPt45tKN)zi#H(Z~#IQC9ND=4DY?Fne+LXB4b z>(Jg8PskvG>o3|ZEXHG~sSzq>bxzUuwyIQjB7jDRx>1KBGXrszrG1)ALXGj!N1Cl3 z5evetE|%^G;_95x3RxVg4;5Sj%bMVwH2Bp@;fn`U+K=nSPk? z?gqz~CG0I*r`jMj#y9_HV|8Z(+GYRh5(ee7WT~J6ncKaMzF=Fbb$<%1D2!h&j4Yrw zW{WF-K2xd7|E0EcMAL7I1uts2!F~N48DL(s^L)t2j->@VAT8;nih7}jsG!bCvkpN4 z*74mR=ZlC*=d~73<5BPT_a!*Ye1}Ln;HM&BaK?tTFCh^)V@6?I&cBO6^wJ-D$SV+# zBtssR_p~J&XgK>^C;N#QOLSj;ic%hvsbZgMb@*8^s3MHP=$6b$A}%#(5dVB4^N zc`-smwCCiQ0MYQTZldg;&a)2jv#E2rxx;xt|5N8m3-cMIQRC~Hm@W%wa=|clVJ{LI zFpb5*XfY*w3^VFs%Ze1H5wuR6onzS-$S}1PKVg+bqCYo-qN_S+r+KsEr z>cl(^+$q;d)SxIsk%Z0~wU9iB4iX1{dp3erul)d}OHLf|X=oi${asd&!#6QAs<5cd zEtNi~4S^>X5*i3v41#}K<6m9o_+R`DjQOxbZY!0tXpzT$xboI<41CCpW2>o^(5`d+U>W_8?}7z+BO6V zvsy{BGP@msy|KO5z+^d#EA5+Zn+VCcM>z$>jishBkwPI1s6SN1HITzPXdU)BadA0U z*Vlr;l@-qQm5uj^udgrck`&Uo{S#WKmxE+Q`|0CyqFRW7V~5qJ_}t%rw!D<_u{YZ{ z05yNGUHSr9$QzjoIV`u!u_zT>SCj(v(0wl3^#|t8m3qpGrK(D`w-GstPtk=I|8}#< zBgd&gMZq4su`x=DZ`skkarrHM1lN}2tjzu&g7U7rY`Y(9t$TEsL#De~_!V0n z=oTi7b?19rNGdSa?D;+4;wjb>Q)TbnN%Si>bc_$L`(f zyq5r`m2>WAf?XIfS(Ue?P?z*a_Epic3xT&io;G#J99W=6mXx@da;R0OtNWoAFm=DG z+6M=hdtz5K0terxo)uw;px+Gi&^j>O3dSy@h}GOF{W4j$UFhc;iau#5#D=iiL$d&z8nTF(IyiI`v5a%iChsNE?9uR~w%^fW_?N=BJg%_yMm$|negxKs>} z0tPl5R)kJRLQ1Y=Ob;h1#Q>{aO0ssN?s)K1#wD3`M<^NQbNg{TC1{)iXM{uB)ATpozC!>8 zEX&Z1MYk7(X@R;})oOKCtBIj3!VD!`O*8aaVKZVx zV^W94o1vbSrugK?pTnBu@dx?5K@5yeEkX$dZMR^Wai=41jj&*aE(YU;voXoL6k0(p zoT`m~#Ew<3x7;M2h;!YhGgp|<7qCBVNIC|*qHm@Lgeg-O)8jWz1Q}K@Rd`||EU2ZK zO>rVx@+u6l#D>i>-IhjaR3TN4n5l?|0E#vtUet3!W)ghQ0vZ@{M0AnTt4UI$BQzOI z@FXgGdgy^Izw9RSqSl^lAyP`hqJNfX(1yr=^vwjxkPL$4T9cow1-qgb6pa+kzbxK+ zn|p^FH%8iREGT2h*AyaUy=K_uGLmF8?m-kr)T#{)NAE4e&=4tCqZzcqUdNFv_PhV} zM8JY~1Gja${BQbBnIKRh6Kjx~-LN)fAech@&&z&%HWH_FRobdy2ewJfs7 z*&;Osg8|vm@ZHwdR(&n5<8vNA_6AF4dTK(<2n)spCQOKIZMq$)MY&_ZC&tsN?n#GF zCA6s*pl+--CHNIG9(@crTK{^$;~da^eAT5`i;F^>D#LGKJ@Tp*{p=f7p`i?a!-ODK zoXR2@zbo)Z^)HnY4v)vq@R0W z-;=C4K@z4C<+!6EW6%t6K&Rc^YI6)e{www#RMf5f(9jeO#OGWI;hhjuXd$loQSchpTD94ubbPrey6KgF11UjX|I z1#f=O#Qr@f53ib+i`TTqss9LHJ_Ad=5@<2o&w#R+$=jU?coQt5>UYXni;Ih6L_)me zS*YnL!-QCEwwQr`-Dvx!2jG(T^&}Us36vSWu}6ixH-5mOFL=fEaLcux0dzEG=jLv& z^FJ?oB$tgRjbiuvgTRY@+^_R8I~)Nk-IpvxBJaGCv#Q6q)Eb$Lsmg%i{(eN@$3$_w zK%)k)4HSgu*XwZv(MLM;rK$;~10$slHO)ZSZ?Dl*X?mD{HK)lZqK{+EzPHwRW6?WQ z$$)@w&bsZ6+YLbL;O{5In8UZ5;6Po6wXfmN=dEX=kDiz@LZ1)a=gaNl>$eAshxWev zT%vB5z0hv7_bg2~$DPrjdU922O!367_pf)NzN0Lg#njd3Z%{s2HW|*rT#^6H#Q6XF znwC`mmztJ;|D$yv-bdy&|3z7%|FfoLx^|42IPO@0y4&<`o9D}3zpnD~>`VasWsI#b zOeB0{2!tanbGIei00ieSee@`{5Gh1RG@djQUMRG53@KdXc!kMWBs^5g7Qp<+$<^th zJMeR}Su+*+xhb-r>ZX6_BiY4yx^nT=|B=43ekU@2?&Nd(Re5=`S%tL%|9KR{juni^opyMy-8RhXc^O#f>mP*&?G0c!TmX(A zCTC9`r_hXSaO&%PzXpK1UuAI>cf6JWxl`$XNi%jMXjBK#);g`ob zvAo_0Cm`sxh5JY#^S251xVv#7&I9fwNYeYWWS>UNSL^{T9Ras_t>CtD(+!nsQ~6bT z&rP?LcWX;)+AXT`#(e?er?t`6VPfPt4Zz~$9b$)$!yzy zwN`a4jzae{xW3qCsd==1+SbqO*%OC~&BC6x4ZMpE&()yv5HM)FRXd*$qZ9Vq3+K zizY5rb=dXfE<1K7mXp9@NDh-`mBQ>Kirx;I{5fhfF?}gIFrba7)Ai>xPrc1=n zrAnz@k}2d#P9Le=QG8Y7Lu0fBDZ%%Jf+U=CA= zQWbg5ZA({!_S`AvAMf84kxZ+?|E8t}VC6DX5?6xUG)`5qW0lUB*WnRcJP*7AG&5Kd z0Yqx#`^5_UY?p;8^_w~*v7ots$P_TlQuig^yB5bhqT@UAR`W`!DSk0o1&)KsVVT%P zr+|^bbB*8?C!>=kf}W~m=9M}xM*SH)=%*Fo^8i)hPO;AZlUnV1X2*f0A?-_)3#mmw zBGpe(U?PWAm<*CTe8MJPju(@=#<9O$E8rL|0F`^iW+n5Zv_x4BPcnml4HQ8Iw!~G) zvtke8P+k`|b3K}qCq!w4BmwsT#rwvVr2!YjpmOl#LZjqd*>JOTwXFSp+AEuqQEps8ULQ6}z==S>tj;+br>&DBsLaOf4H!tr(8L5oNGs3fHXN?#_MC zdET!HKVFNqB}Lr$w!HHTwoAlN-JvttpZVs2aW{ z{1SDX{q|yeztZoXrShSfe7D%1!)5-Lv)7FYzExhLy&(AZg_g_?DWlLI!LT-{gTy7` zGTfoA;RSp%K4=$zMw-l4LjRrK(6HYe)V?=T2`u}GDJuThibDeoiQY7{Cq{$!eN(rN zeP!#3QX?Or!q_JKy$2xdc!`aHC!I#a`C%14jTf$?eb5X@`H%#Q4&n?`@+4)yGhyWl zWBGg(*MWckvnP<3Z9JbqCoL5c`4(L`XC4ncp7aoqd-KJAdQ{OLrr!VS!(E24Cl3~z z0!0m}5~vc022+VLh#QVq6F2t>MFF2M&GcOI>rR4N!$K+UaCX~|?!yHVVC3-wN9M`|}JrTsJY_xHT+%fK(EiuCi5E5GQ2Wb6j3 zF{@;32Oj_bh7-!e&+Z%%YHq3{i*qrO9%Z=wZ0gw9IfrF!&kyOQ2Kh!DQdea3cBb|6 zgcXQffho6G6^^vQZP$ByLSJU6V@E`juE{82#0=N9&wm$`mv%Ere zTq3{QU{7OrI*?d5JeWARM+YRIo1a{BYh994#_mtJ%2sZuJHovu^l2xLc(FH!^U|B@ zk)S=d8*^-nOv~kc*Uw30WwZP#DGb#Pq8YT4k|@GO-!*Qs&$(CzC$%Ba+7m2{J0Uns zPR9{{Igs3LI6EJ*L2J`u2}Z`Izp(;5x!)roeODT5aEHYymY2+jBt8Z9Ryk7+oXRf? z*c!tM7KramBxo2^q&v?EI?w4-Y#jB#!6s7qicO-I%6a&HbAA?IyC~w^PQmw0UfCd< zd^eDM{pG*tdJK6=l1R?26kZ;E^=_m}Q6No!!ESE6FlkDuq*v!Nar}|)-ZQ>{{F*{3 zfm4Zft0z2rp)_GdWAgD<&@f`Klv0C%dZpt!R&%Fs-lY6qh9dVF z@d@m&;fSBG@%bxpmW*+;j~9s+em$LkU!--O!PfU$rsjRNaLCKVVRzQ}v402<_`USN zhs{j6e-QtPm=yB<2`wuQj74nk)9E1PDOw5nLM&BIy`o_yxBGtxeoX($vi|eWKmY%T zVdtNJ{`u#hfByNu6Mou%{`u#hfByOBpMU=O=bwN6`RD%?sf&O9`RAX1{`u#BpMU=E zM1$?0|8Fwk1OfSfyYYbOfB2sp4>ZAEBS1i4{^$SwmfKsIHVHq}F~`1-pG>^Xz0EBw zINm1&#=+Uf!`1YNwK9?d8^tPARE5!*HG-mFK#LC}h_zC%CMYKdp|NC?rC@2o@cKjH z3WU+55E6%L4tlcX%{?fy{ifxA6rWw6d&ZD|sVQfea`FyUm3LP4{k;_Eb9u}0xe5sU z_E7zP;s4}(r(}2ncXr{(U$6aE1Pk5ht!Z{S!t?xXD&E}re2jP&Mn8ULK`e+H@=$N) zSX2ah9;*`(qdJJNu@$^xg2S{0G>7ldhf^%1dTv8*6kFQrK$_q< zeyFF~2pfQh0>MP9YptOM1#9$d+dS-r!~(nX)SZFY_QGZ&X~H>?KXuBCabem6B#*y( zsO3FusIS;u)_arIJNv?a+2}=nLzL7|9mexh06+E=Q7?VN_ylE1MZFaKnH1@5WEZ<* z$_)pa2`Kos%vcSxItPZ@iVR0k{o;Vjc?F4^!4uMdZQO=zm|45M6fN9% zPVv8(HMhxYRxYst>4LiP6G}s^8Wl0B`+{9E{Ng&-aj+dJ50RzX51R#*S%0V<&e z2Izj6AbWy5fxr0Pj)(DMbpaQ|#3GU8L~ z2q2)uz|aa+P8{c*xt&A#^cGgbhwgKMmZR!7%Bz4aEzhkWT<*`m@?s+HqAr4> zk8bV7wWQ#G^zbj`pxp$Hm>@q9V_t;ZeFc5y44Vv|yPqm+*P;>nKaLC|QMBLQttfu-t>F1oA+? z3~VT`^hXc;y*|j70?7hf!V?+<(OZKXa}akrFz15Hp{ytZWj`UX*!seM^NH_|{IQVY zhg0Z(Ohb{05zs>d%llgoCrp=(H=tM}U5{bD)@UMR$X^Hxq_V$X4K)PVTXsbT0>~61 z9vDJT*xzAt=%L_$XU7~iq%b2W6B`n8{+v08vEE5C%6sudOLQ#W&RDZ`)$5H+LZ$M8 zgNf)jK>Yy)QpQq<5#$bugc!2rU!j6ZM~=ZN1??&tfk)SW4sUpuy9n;qe!Z^U$lxk)PozUT99)fV ze0x@;)p8W9?3}0DGoq*T>}Tgss>X%DkQed*9_V)hQwT+nGA%}kB)-pi#?gDCu9nGv zd!3<-!{K$4U;UfOQ8|dl|DIM5#(9*K&l#>nH_$#fl0*$R2S0>&?N#lpP1Iv*@l_i% z^fsmrtnZhfgAN?NWG7+DXYsMpbVv-9l$26ssL|q&q#}UqQp?=EzmNYM+64m^kD?9E zuYi|4fy{Wjkv}j6_^L{fM1>{}vcQjj>aS-aA#bO7in^VCs%u_=5MVv6S5}1o^pa z`#h2vN4(|SNG-`Qe8ee6l?1haSsXMxbpW@!nO;01-1HC!0$2?A;ZN6+(M}nMI!IhQ zqaLhCc;Y#zRQ_zFxL%S)tp|Dl`Eyj0iwxx?=C75TE=&g>7z0>6hLG_I4MY%-;ox&} z5LhFGeg=>(iGY@ih_scbeM?PWf@~-$P~m!?PS*lYkFz-b0N-cyw!Zd%)U3)MUEBp^ zq9db5>grM7rkV9b6Wo*&hIfh&L|A`>5ufGL6tOsaS6=U1=MStrmox6BYN9RPm^IS^ zYTga5^e4ZnIeXal!M`TkFWvKdzecLQ1swr%vX|+?%3a6)O%y9W~(}?D`s<$=1K?o2D@hNJ$FiO z1N$GD;{StxI72=4H3hwtw(8 z+Lnu893Ok6ykAlE-JdtNU&cY8}KdoeHeRV_%#{u$`~A=vi*K?=Yf5G zj#M*#7y!FDVJPH#(pdgtVPa(Z5-n17M6V7k-d@S{p2-C}cxA1yu6&rm^!@EBL!IR* z%bW}ez+qy8p9>(;qkN5TGf~! z_{^r?uO?(3;}v4dnBzT7{cf4evm2&r8q6mLr#?}O4s>KXas-&j|@=Q-S7PS0aE4>_8zSh!+SMTqcNGXFE|OncR6;5p0Y#?YS|lr$A1>bzfL$Uu$^{_YyJUfM*hptp7RSc>>~zll}cu-`;>AhU#k&&+jyzx+S9Gj7;ZM4;ft1~fIOs}~vJ z+uk@Dg8GCmstx5+O5BwClq{Y&<@suiDmyBxN%4L)AHG_69nM8WjiHw;SY^4NjwU7L zVbb0hGv~ZP;Ze#PCt`3$)-(9!37Vh1T6CLvQ+NagRZx-xcI+|x)JCoB$T`d`E~1hX z=3xAPTSeTA66TMlFgWc+d~)Uz1kRS*u#b*r`&dQX?RfW@k6%z-XaquF1LQ#CY3pxclc{ zA@{x0-^--ZN`M;b;Pm6;{p9h(LkE6w;KlQQVs-t_6dYy58fVlV>?iP;_c*sZv2813 z&;8O_Z;W5ViqE~y^HUhjV|kRN4dwA+!i%5CEjKP#9i}04*JD<58@FAXJ}Q;9#3LX_ z`^AzpsXPy*IpNq)k^*D)-wT@HRNkDH2-U*S5z<7g_cm+`c>RWKW%sj=d=6%l_~l4{ zRk3iN7bwEccJs{CX-<#t|6$Um@5SC`rfh37N%#3jQql*XvL$of`yxvxXOu_HFR5@F zOdOV+WL)t5hw~JTf~DPBiJy1Ax+#E#8 z@aFTyD$XVGt}0P^5-#LfQo@q&7Q(%M_W~_cB}Uov!U^iSH)8(n(NlCttCB3SO+y79 zd){++m#jibTvc-_*^_i)Lrky`&r3GlL|Igs8+S7=@$-puu>fPP6pFtoC%pXm%vr<35F$q69R_O z@-jkTupf~#+y_jyur=PDe{jlHTg>CicwXCL{%zZ%xQJP%zV={aph>3-SV(qmJtw2g z5pMijWcFKc%5|M7Rq7MP#h$M}DAI>;3C-T3_@5VZRRt-`TLQFv~R^#)sN9)S@FD4tqa@%j9D#=#dB^J>7WLGp0_;Fb^X8K zohQ@oFXk+oCOsg0ro4k`^ZgR_TnXz82WZo7!tYLkKre#)%}W+?8lJBV7jDqMgcqNm zH|f2Do8lG7&=Fn*bp9~uFaK~Qc6UStDN_Nwm!=z!?7tL~vJv>uHTyrpk1_?uh)K_6 zqOUvd-FClCBQv$^;o7o)%8@%*`kjKMsR>B8j7r2H6uaPF*oN!l;d5VC4E4eKra2Xu zTQ+x$WGE<7w{RGN_9~Xt#xhT2b0}{&LDIp~W|o}O8XCvg^?hLWtQcSue$P!SQ1l$? zonc=qJj=H??^gTFQRbWMvo;v+tIXr~4i%pc_4)*$zL4yT$v<>w zrm5_XF5$#@=_mAQ8h2#Ub?U(ae?e!n;nnyA4s=}^k92K=w;{?T>VIKuICLkU3VuMj zNxqQ9+}q=^Q>YXEgYKDvgtw*!JDgB!Fc`f*VEQDEMU7PRsYYgE_BU z?9Y%>{qHzDY6`huVhW%P<~bDrU_hV0RVd^%m^69#ge%4+kqrdlAz5PBzy5gUNKGnK zyoEFQsI}RS5d7U?<4|h6|>+ibvwt^ z>Wm3zRT#=*247kWe?s!F2ACc!mgD0wf+Fk-A;CA!v1jedVO94c6|>K`Q9}m^yq(#A z?2gtVHzif7RoZSy0NqwVnQmKDrQNQBv!{$>;wL0eXA{~G{PJeXYB&BsTqBO`BdkZa zGWHIar5l#@X_>aPhUHwN(x+oB8dE^95S9kJ2{0`RMq1lLf0=;HF42T=0{`Nm&+uZS z#uD3R(+hquAASkssAX|$euf|9taZH7sZFZUnsv*iaYy`++St^mLO5E@nPIKK$W)2? zvg1E+<_}bmVsLDYpDt+M%znE1UF?%}9UIAC8t>^&m)$25k>JKJ2za60%3+a0 z3^sw0>zq4gY9Pc|#}3NDnd)h{Z}{n3jVCjnIeXpi^b=3j^6u8@{8EZOd%CoJd-7O& z#Ww^mIF~gfdphp zhmDi=cD6n4H+mtl8UMGPO`X%99<#rA!?)b{1*S&9o4ubP4P z^SVwee-Q>J#E>cwV|PEJNu79K+1Ktn1jq9uG3~Y2K!4f4<}d(j?kAmuM*V;mT%X!c z9yhpqqHZD|Nx=l&rW8#D!H|hE`CXNpRti+Q#>J~H61+1fS@OHde2VYQdjQt`BstoE zicQytF;uj;1Qbin*Mo%D|^{g(dG{E<3L#p zoal)RCaTlmU)GNgcs9tb4A&pVwY6+SJ_v`g**My*c;ITR+Hf;`_L!C7IwFfj<#VuvlOQU`C!7wvK7L2SaMDDX7kRreke%q)jw zURYhmVJnq@2GY(X#-c^T8sN=cd1e36e~6BYv(*ta*gT_rbt>Ftuu4jX{g+h01*%gA z)=)Ky&8F|cY}x-Cx}%Z}vvzHH@!@153jG=XT9{&*?}cg9^YB*kAhFIz^4BpDM@*s1 z7oSdONfUroihH>`qE*pm7QUtXo?x}L^(FAgBZFYZuP@rY8X&__H#U0h8;=P5e-vJY z!KJ2>{zb{ls{mndniKha*~8d3V>83Q;i~le!}PC9d0J4~8mm>a?L!-(kP*nf7eF2( zpZymsWxklI()XCToaE2hKoYj$>Xbi_@5cA>0fRe3@kL3nF)0P?=lI!q{?2y?BZ1~Q zk;%y1If7-K2T}T1>NL+(zx=vZbGSob*bu~v-bTwt`GZQkL5H2X7>;I|tVeQ?R=TL| z|1KugOH(BOb+^spBO)OmMS((tDsj{Dwpe-PSO`yxA{ zI-uNMy~4lB)@Zl8tE8t0{=#vw9KoS0P{r>I#2XW)!Gx+N84iU*;k^Rw*>4QGY4%g; zsQ2EE2!!`SMFJrXhp!2+Ma(=V=(efRyY7K-CUZ3GTSeUP$2V;i>!@J4f~!D3mXK=tE56o62Z>Bz_g z$BL6U&}!eI89_P-2xHLG$3R!BM2(i}nHq(Ynnlo?^`MG)iC|3jfA;w;-fQ5Qq=EZ= z2;K!ih3?Q;OwH;1Z!RU_1OL}ja%vSkMNhl0;r6)i8KB;Z>6Q~ydrd%Qve9O@k>^$0 zXt(h+YztA#RxJztE@5MDKRV~z*UEtfH^W_S*%;Yi^Dg7v-D4T+LE?Z+IsIa}-nb`$ z=KWJV>REaexRaGBe*_W02g~XMA~e)8Wm)YTXwQyv8r!f?Yne;#{0sKdc~EMW!!wGn zbQ)Sk;25CF_vu!!8;^K$63@1R#1cEu1yL5>wBtR|O)2fRoP2}$nri2*@_9wHu?=Z`oRop(neKQ?a0h;C%G_Ve+%wA&UlMH;>x8vPgKAe-2U+>r&OVHf!Wxsj0PN3!Xu6 zOU~@gQ0O@t*7H+w1r__Lw8UC)2j0KFXo-Fv5qd%0gBcPxTt6xW+E7(D0QBYz4o2%6 zSimvNxB+Tue=JQNuC~DX%ED4R2B2vn`z3P9x^MV!-2rE*?GNfuj?sPrV#Fak zdEQLQk8Ym?Rvo>CJkgX~(soO?1DOU6Un5Zf4U0wx|2mp}!uuw>F7R4Ct^kzev6f6C zxJ3XjGZ6oNa1#QoHTI>~aH2 zCk54Ve{^QoY!^KyVEcRMAYsenAp+mlCmSY#klO%mWzu!JEFSyPDIW37AR(SeYrJWY zuigVtRSl>>?IHJp3W5Fr?P)dnD+%)%Zpm(86pzdb^JS}k!*qrMvmTb<>EhX__aO5$ zR`o4E`et25ly)L90_DZ_d(WLXAQ6)tyFZi7e<49+G2IOkt%&-oV98-M`fwe{e5Rw& z8^{i~KQwLIOVE5&e1b0`Z6vPu8Lra{qv5)MfZoPqxtLHnW9bt&c9pvORNI zf6GB?#Y{p^(6%9Rh6~+flC_R%HdPGh&^5{u*0RwBI^ziwtJ15P66*`cj+}p3s9Qus zO~!JnA{%}F51R?i9xTroAoUNmwrV-=1Ay!pY%U!Of`QAoVHZ)J=`lQq)9SBmZ4X9TSY+TKrgH5s_->e~#zWg>Z&cFgcu8bYExr2dhj*Ws@1 zKfge9CUQ@sFcQf;QQw)a)>j^AMK^fn|KSQa+%SFIkKEinpacr9&b@N(V5B7wfA%hS z&SlUVw#!TY)>n%{;Qj{LLG=$dQ#rp=Rdcbir3tu!9$|}?JH#~vu{MQ;u+U3Tnfd9$JkK}{nT@(^;1`JcGPjXxw2}2`h8VnvGRBR` z{wVAmn^N$(nCETMZ#Qh+*fu!Je@l(A{b@OZTDMa9{zzJYNK{cWB{Z=d@c4Kx# zlr=f@?qhnoPgmDmv0Q5heF(yh4e@BpT z9Xt%uKnnt{g@Z8X(T-ldt{T|an9_t9e~A%y?Y&LG7^k^bt=Z|%yEF8Se?R*~-B;3X z_xDwKrhMgf=y~{2b}kKi0^A7{5=RIe2?fxbi*JO;hu4v8R>10ehii4ZIhATqlci4x z{bYuk^nOL>FRkS>-)}U5=+LDu8s5o{Uyy=00h!5Zivr((b%AQ>3(H!5i$1a3jpCVI zx>zc>!I|-~UCiKRTdYL-f3%M1c@+STS9i36W7Dcy@_E{~+NxWeBZC4xtdFxthh{9A z5|K7jXeMX+^Zuv7T^pD310hN?1dF;2<}u~gZQK&OKLI72vKhG|m``(pY2Rvw;eL(l zz9s8t7H6rm8U4ulPnzF1Q9i%KR}1+1gKb&a0>n3YmO}wo+Q}6?e~>SMdyV9a~i2Q4}u4wTi{MLM2q0u657Yt z&5ZWX(8CtnPY}VLe;|t1N{zfr3_}$h-U){7w;Yx)tkM)dGaxxT*e!88L<9&vwT z-t=OY_Z^o@eWQE)>GgX_b+TbBE5R|D$LR}ny8kcaz&fL}m~bOTrh*&-z(Nj0$>LCX zgUH@VD=~18FKaoJd6$TqLg8vFc`hI_UY{MZ^UCOtp#N?)aB0847OurBzUgvR3Yc|6 z4ENwj_ABE^f1WkfA++2ywL21hRV!?FC=Wcp#elaV$oL%xwo3pHhlSr@2+n_sUaT-4s8; zio%8cpgA?-=Ac*(?eQ9%1+8s>2i0JRRhW{%Mil*Rf0j{TfuH>K;pU{li|82z6t-E3 zYy%Y{o<4jB({kaYQ{W!jZsp)Q?W>t9ZqZyOGnj*DTykx!(n%Gp3F`2$d`f0DgwYis zL`z9C)`Lu>_&?aY%h*bSu0hOZUo*p)*}i6GW`=9#F*DPcnVDh6FlL4^Gcz+YGmqKM z`|a+JeKMf`OzWPJxtuW2^W>Vfuj|Ki1{@K&X*2B>UrgNXr09 zUyd4PQ*=oZDetK<7GpC~oDmIUNcE?S)|NmOsfQ!V9sQJngV6lTomlZH1J=_+ zf0E5~(l{%Tmbwm+b|~fH<=tn*#5rs?TAaUGUJIT_h1@kPvfF$6MyX7$PKLUdeFkT$ zmUfCm?NKf?N_K4>I?%OTh?fVfP7XX4RVnyHzc(kU-ix|eEaImo`NgV`{YMA8eGRod z&JoN34ep!CcM~f=Q;xJSwzKMfHYP&Xe`c1;AM4xRf{u^~t#;J00NwFvU;}S>$ZAk{ zs+c#6wSn}Ugg5q@QeLupsW``2m9Q1DNB}A-pe>S*dV-KGat=BuhrabHVTH+)U_~>F zNH@L6?Bn#+rr=_Ma8t0I7}=-axKc*fo`9X2k;ZjpUE-yOlQ-I#ET!Za8UK>Df8jW9 zgJMiEF9jx&6rOZBcGNt=1a`0-5yz6)8-iAhih3xP9~9kE=CwKf4Vqng93Q`q z3`^Zi{g+vA-nbym$zKl?Hjqsae}Ej;_H6}^Jj1JMySprNCM z$Unuj+s7FlTX4*TWE@T4l_m+aik9njGP3|$+VbuOb=pXQgrTpBkcXF$51AH zDAO^xblHI6Y^Kp^NG(hHB|qY357}u#Yc|v(@EiS}KiCJ|i-2j8U*rE|e*ljJ```aH z8O-#K(OdNWuWiwXCpI!y!#zAMHXprWm)oNw?Q99e9Q8osXlPCmo=~vRc%!VP5M`<5 z?~Bsvy1*z>EeR_YQp!|ywKIf zMYU7iZS_1K-lV1LwZQiDf3@!wAZ_QzUZg*oY#CLR`t`xVLYQdX3T=^=VDdN@tPx2% zgD$6C){rIHZcJliQ;zemfyf_ZEO(FDd>@0=42(~G)VV&aM{m7e^Q;b2ie?_a2=R3I zI0QBgXwoix>cqRWpn94ZWM3eh#z|6T=T>I&SThez?5e6DFE1~{e>G~Ze7Y(s&M$|C zh9b8SdOk~X?7tBbwr(=Vc=PgVY_7~~7(b#QJ0Faw{K43_Yz|4E{yjz+a!uaJDzZUp zYdOTMP>4!F7=B2XHb1{9=IzZFm(0Dnw$>(u9sQ-Nt9w362jRH0YVq?`@o`f~P_PSS zUhl0a7XMVg9oxe=f9A8*V+K!xPFo&2?KR62rK5XGf@_ zSp6K+tQwGHSn9i1jI&c1h$a3!jfUQavfBvX;EBrk5s5oZe}Xo1z=^#cYA{I0#m@fje7?l=WoBVg8A6KCjh!)5VCKa&?I^*jM1T^gP!#BJ zYdUNVjpiph>yvc_*Ed^rCYX27l2XG>^2g50km)ESe@MU-D<}1h9qexD;yO5x+jlJ& z`xJDmjPDsM*PSBP#vyqg2!3KPDOrbNBw&H45NN&<1#no>a8;R9Y5x1>=FV&nC=cXC zq{8^QCD(gkqSgVX6I55)Bota24PYb{V&daXP<~Y&g=A1a)i)|jg+=o_qWR)Xg0KHUWCv6o zXUb0aYB02tu$%k;r3GN+YZxjxy)g#GE+?}xh~uKmREYW)H`zcz6g8%SP6okOnpkw- ze+G^lkVwr1z@flfJYDtKwi?%OIUT5Tv_%lXX)7fRP3=B1+i9_OWk{J7Y@EP;!xtH4 zEauH{;2@Be6J@3&h?4wg7zXUD3FS2y;ft74Iss9OAPPY^oPjwA@>rsFpOl9mc9jX8ViU7~e%e@KDHq#F<`EFpFpHJC0%sy^;SGXd~eqJTbT zHMSR@mQo6A5G|1WoK`^T{pI+S#x0oR{AS5c#n~vO1Ga)|@ zV36WejaR`N_a}CcBI^kF(P!yvR|CfszGx|@ELs+ z@=N@EKqLV@Tbi-*3&&9gm8L@$f4V_Zwm^%v^+f`lTc(7hNLUw}D1T||5yEBaF^e4od{I;#};z9JibI%i<8ZB0zGFtqgGXuk9>!3O*sdE@AltxTc zLL<*%kmLe=h6^f0w&RQl^jj5PB15)HL0rs?`|98p>rk)qBI3Me@2s?68#wD zshTy7xpsmyh?PDCzQE~wj%6p9G(8QlkAVy^d2HFfz;V2@R0{$Pii4cq@|P>?2Z(eyN)VWx%*jaGaTcyWye|jT$hPK`U;pk54=h`qx7wz)>!ws)EMui|VM6 zD@nd+mbNlgL%^S?lE}oQf55a}n|?gMtEFxDT-+ zIJ-so-7=O&Dzv}*ifZG$J8p3<>f4Y148lrRx4mB;GLOiq!fei01YOspMTqBfr}H?A z9&kjsk;BFo5MoW(KQ)-y*kbm*XI*IzJF>`xCHB;9sIT1kXxttGe?So+A_9xvBQsLh zhy8^G%>MpdbQsc8wKG=}6O(NTERY+HO@DKwbS5NI3y}!11%pU! z+S_v4G40Z!zH2X4jI(8$Kumrwk4%(}C{0n(DF_7vq7}o*CjNK<&MAE$lJkYh7Usja zZ=LY)#Z3gsLBhaJJN}&jlLg}8;|oogveqzt>$%!&zdv<7e}lgCJU|C3hG3_Pf~Sbe zX>XFrG9xt*_Vd3Wmu-IvxNoa85K8^zyf408)1xRC%ef9E5Oj?&E{mAUs?acO`~vkI zCxL^ZM-C)IWFv&`xcY`EQMrsTN1U}71&|hePlQ>MAuX1a>3F6?|GFobd&&>vk z5e$3s-*kLKLR_@?;r|0uEEpZaNE^_f@#G4@Y^TN_e|b9!m(=~jhaPSq83L{&xwy`* zu;uy2+1j(&-Yi}Qab%j!byfkffIai>atX#_x^6KdX?Z<+WQL-aHEFr$Te zZu!&{97`3)67U2+j=rGyf(}csoWkcVW$G>D;Trg~w z-4myAhSt~Dh`#c@x|n;qeBTr~7;JpBJc;aVmK?{d4o-SV(yV@?CJu&zZHuYNmZsVO zFSo+m?QquEd!Lx)llQMX$4}#hjQd|7P3z}zf5Uv9o7V05{070nbaYtbt%?kIJlqqo zbgPdajeBkmFpJId`YWjq)RjC}&$XKb7HC1zIEvFGP! z(qSh*cOD%^gdOiEpXo-sx0tNFb7kY>&P z%qh5<1=Gvs)2`cJNpJA&XxgMWB8XgWr(eH&*<7o2Bm{FyK(|C&^%m3DjGDt{kZK$s zro8EI@tLDedxY6OGYh?$`%+F@-jHZNe`xwE?L>-Fe0bRh>d3E2pbro{hlV7#*%4p% z2Q9TQdpw>lZbi4h{&OL^IgT$72-D5_;@j+?Ce zCi|+Rrf1Xc*IU^i{*YkLQ~Dl{Mf0b`g~@T$Y9pr8RB?D#ed)PWFK2AHf${jXe~JYD z>5-8vg(+HTgJY?e{-0G2m%J@2kFiD1C4OI@A0_UWYlm?;_^qw^qJI?0Q_J9VV1#-m z;$}4;xY>B)l%t}KohCV!-=BM7iJ#Ytv~>T74}Uyuf6f2>Ph%LE(Jg@2W!t@!%@HZ$ z4kdp>;;w5|XZ`A~wkI4e-^=1|f30#ig|wTM7NdR?DJjWx-{;xzr(|j8KbO`Wf8@Ro z-4s)`?uMOz`A+M5d{{X+)aaCR9D*P&R_eqaA0GHuww69!xP*c&25*d!2j3C@EA)Mx z;=5W|UtibN8{>f1-1+e^D^<0->k^;M?{*e{|H9AbarM5K?Xh*+v)I}Be^O*{9t8$Q z!0p(>{r;EF|I*vwuIugZM{RridMEn%JJS;UsY~BlR@?t%9`*lyu>j`(U9o`w>K)iR zmn|X~7$We$CwP|`-j!zR|4Hy{sy=w%I<8u;Jx62MdU^oRu^CBgOG4t9qU2?=ET1Y_ z(#d!->mfpGg`^w`>q0t}e{1lCP0--PK!y|@IeGlbd`<|msD8xE58kZ@7=?^{c zs?WM<<`I{u464Zw%5&42B8I}e@-w`yP~*TPee832*Bq2E^fDo^PB$)@%lHa%4Te_q&q_q&2p+`_o{ zsjv5J(+%=;$f8i}7C3oWFX$dizSttO2lfo=+Iafe__B$W zf}&Lma~H!O>6(L>@5BDANQ87nWM?bTcI~0{){3h2(Y~fV#ASECn&hqJv52fw2w5aN;`H`3cDl zKRql|Y~Q9C>>?jOF^ML+pR?|B!0g@0nE2t#rtX zU}+1VA-5$;re+!CHd?{2_c9)MFwXA0?ts9$f>8~KWY$KJe}@qzNeNFNEwyefR$e<6{Ef z{a()?-=@fVf097S1LC1sCM{i5-oot5hBytJ9P5#Wqbk6)JN#%owBQ}LK^fRL-qAel zl4W>4%roHA$}JD~s|rOlC$R*H_g9rRyV~t*=DbdQgP7X)*yA<2Cs*v8%|j(o<6)*m zJ0y#VPaqn*9kCN9FEi~v)wX`kO+FniSa_~W|L59ibI%0HJ2oCmc~A|sX0*+Rcelq?Zxx7gqMo|}MK zkd{u!kqXIIiyZDiC2uq)|7s$^6m6q9OdP-iVj(zeYQoU!mq_!T*iO{)4cro9qg+Vx zB1`tSe+U(~{|Ur#Eo;R4(P;H64^FXDo~cY&aaYkWWId*qCPWo-H|52M%FPTk1FW{$ zQoSz`LOyR+Z4=d_4b`SkBAQb*Qyc}Wg!vgfDiV-@6;d&NA{{;Xc48be%OhDL-jC*) z#Ol4}xA963fR&Q&2rJ@jkDj^%u8sc;i3K;Re~83U+=XHYDNie-q$tJ9r%xMwmC#m_ zh+#mFiBn<4SUTC(>F)OW>WkfXld{lLIpFc+7skR__R*&Xms$#NxT=Y70Vu>qzEv>P}{-TP;VVL9d4Z51Ek9n(DC>e%|6|6QC ze`zC~LqcVxDPTCDc;y)?#JKq;X>lMC#=1(n8>-p=Z0~e%-zzYG{}|8H6@EvcCnVe^ zC<;J@DB};gLn{Y)$@R3{^PdmL{pa?MqqCe~x4Kvf-}XV^-W`9O$#MpWac<=M?$PMd zQQy!7)e@nqL3S|rz=;wt(JabIS1x9dfAt@HlRRf#lL(|k>^FK{shwHoRuN*NO=Y1S zRjKZ~#UL1#;6*RP9J_bysEzq^(^Ce@=Ihy7vX8P2L4`P6M7yGJn9k1;|4ufF`5nGP zCpkNOOT=ZENKxRCqH~{+_P3aClV~H6iq(?SJN%qe-O_h>{W!QEHuRa`!|Iw|f7(8+ zZM&CCljy!x%%MMvcsz?|J>#aFU3185^JG~;<>-a3sESC3T#5FOyK1u9m>9K7@|z8jOf8$q8S>HWV=@JZe+3AIP(Wx9dDb08%VRVAa@fJv;P^Q-O)+h)^etcU;D*4NXv;5t;f<{8A-e~Z=7b$7rp;(IyW zV16SEr9z|&=$EKY2r~vKx#6XZMjl>e#MuG|p2=3(uFRylbmZc+3oQMxX(T^w>Z+XEVz zuZC$f(?aQ?e?{R6Q30nN35sL6WWP2QsyGyQ@owJ}k8y3QF*Ka@QjPAxPe?hh$wvTU za=dX`h716KV5~wd8A6cdY1=u7Z|pgZq6 z1O)i>H#h_){X4LmT*#PZ6VK0=^m&$y?Bl5vKrjMG6yk}brG*Q+-{OwZQ7~XOGitR> z+^6{s-0*(;^pM(4y6j>x;+Ldxza(>JH-hs`*`S9cxnip;gYh9OL8`8OecW5HY zV~i<(bnU_>7&Rio;l`)kw0Ss9SK=aJvpy5J=ig@^a}P&ddfgSFx|)N!jz~;w1~_na zzaau&#Z(r~MF~56vIg%__Bh;)qcf&X6$%5qf5uBdD7bFJt~u91qmKHh<+c+kc5v+Ty9#K#+FLA9IF4!MFEjd_AmhG%#J&hHxD#~X{)SNe*n!Lc z3<&M4$IVWT!}v3iB>8RsHx<#*$%eAl75df_)hG1PcbNaZ_E!R*q6=EjHZM^EcXloU2PVn>FH=8B`M#IPk2%3JJT4$pZ(Z) zlnfvCn6WUo7?S72Evz5)@mu|8(_RfH9kZ}C-N8sZX?#M8KkoIZUe~B!E+rL`e};I< z*QxMMT|XXHGIdUXq=;1C=GSq3KQNjHxcK2;CwcKxh0YP|yP_@UuKf9?F3~DDc%!0C zasBH$a|AYYwaIl1jUESdl92b?x8-jfUbR1)?e~LLfyXe*Seup!+szH)Hr8Q6_Ur>b ziR+enzJz z+8!dBp2*B}*-UTj61=!wQ8d4BN}_IRkd(5R;_?1i*4h60`s#9sF2AdA{pL5w2%9JL z^1Ae;4vmy3PgFUxu}pr@fBeD&%~BeEH+{(nj^A_QjX)DKW%m_HmR+Z~B}M6noSUA! zg^UP>9Wg$-b;cbHW~{9nA-z0|`$lv%;}{^GT1Ace&Q9tr^n8rE36Y>TY=USihMUv* z`PmeJh`Cz%w+(hdJw63&dr}dulnFeu4+Gm!EM`3;!u}VxL?wHpe}Ffxrp!#j7L?8y zac^T{Dbi5hKI57$SZ>gU@nWpV+^66B4wq=7-@)c(bO83w86}B1G375`=n*3fOl3^z z3PGET6PCvUD)b_Ja8PwEZOn4w5h4YHay+}uiSy4|R|q}3H+3$UWE8%v$DEse3K82e zK2tpj)cv~0QzT(pe`%92=B}tX??Y4YNjAMk-)=P?={g;$ayjLc0~+kpT&a5&cH|%! z0CWrdTo*C6VQ|B6QynSxpI)L-sqnt<%9`@D*E=IX5*^fKtV#pOgI@JIjCSBVz|b@T zHw7Z=a+5+^4Cr1_FQ_W$S>p3OG_((^o*_z3vdcE zW$kk#vn?^Xe^k5wIDudlyv2OM!X=dWGrzoVSuuVreQs|#6?D6EF-gw4gSe2yNd*w( z%hO4kxrDV;UZATs7`$3T;aoMfbg}aoQwqWcpJkEp2_5^)WXd`nm8yeg^*Mbidb_x` z^s9c)a-vjlaFZ7WmqB2A1UuUpim=CJvChUyzNS`We@WcbOe((k9w5$=h{A1{2W8*j zV21b&KTwgpi(IBivau2#uVC%Z$f zF#4-^aQU-zPUtUnwZhLxnFOdi0`?+6@^42ae?MJw;R)wC%`(tid zyYPKfzAkXg{+3-w#gJU5v$fw2sUYRU(|3mDeB$6TkqiZ22VOhGfG5=iPNK|YhIt`a zDJaTNW?{qbO-zz=YC)>}BB2V(YvwBXe`!8DXnBqPpqVIoo9Wr4#bEGfbPaZPIR0m( zR9Eod1Rcm%o-7NqfS;v>Q;pGh1g^Fcl`>B?z6Z8atbhvh~e zB~kMyIz3Tf>hW(6IC;=j8tzPhe~f1BDlKmG5E8p}z-)NL5Xx^zz&UEkN@QldmJM@a zKLu7k0DW6Zw9AC~XTV&t5Q+DPlyr`j65rbq;0c}IO?%yY;nY@}St)`#c5P#4gE2C4})RFFXc)n9J?~e))Pq;K& z@ZDIssK3bhfbzJ6FbH{f_I%4DVKKe`zRau|9Phll_rzi6Cnxr3e+caR)@Boked$EXBjJv4Tts z?-FWzof2tFsotGw_PbCOCp*+bdP3}&u`CUtyNosRiO8_%!Up-~h1?w7XUy+k)FJmF z@0fvdQ1j%NA3qxvD=dJG*mw)MfKx?Lm_O-rMOv%h!4YZ4cb|qZXCrJWekSAk z#fUY4PzWx`gBVtHg#>k}-KV1lWhf-#@(P`QK;d+#&c^1Qe{$;07+t=l;{$yIza{ph zVETxOrw#~q!NgJgJ&$6PW+)zR^8UkCdgCLhg_=_5qs69#m_s$WUBL!L7KdRf-JbLr zJ%ZK?DG}7Afl4P$Kq}ZX#_Lzq^OVo!Gu?UO`v%Aq=o{$uRCUl#O(=Ils|HEvsN(l! z74cP8p12*Me+7p?u>I%t+72l26u%c<;Kkbe3zU3M&f$4t#+mx4TmR`hwfV7=vT&sA zkNs}OelC_{cLB<5fyRn~lrhGIzPlH=i(ZWuym%Yl!T*xvY$1siz`Z0_0Ub%=4&eL@ zoy14)SVv@0wh{#~$*FyS7vE&qMzo*zx3rcdtYMz9e?fKc_R|hGmVT;xRllvD(n7YTS+erIESsik3(2!Smsv2Ji~o&O?oB-(>`D z=M1U+e_oU(uLMeMg7EVsRDwtc%ku8^CGY>fd3K%4>0EY@y#Rg-FJAp>asbBQC2&*e zaZBF%e+4{Li|Wydyb=s`_P0HCQC2DHVIn#m8;D**_}RVa2qOE1SX~*E+y+it;KXsE zdmq)a@~NhE$cKdxU^u(hO`nYAd`+z>ef~zAe{!W%*xozwfVZ^svk8knQ_hueQM#S; z0KDMxzB!cNXzi*z4zk2_;zl9XA#47WkcvKecGm8b(;2zgSKgM=%6SJleP)*Ejc>+Q zkhn)+iWLe`bm!A~UM4b0+ceR=NU%>{xA1ldkbpaJm@>y3@<#jL18Ctt!}zywa38~! zf4gP%JWsPaR;}?f{SthB+*rKYns~e~1lJR@4x+Nk0)X~6k8@t@BYYaM?kBXn+MLTpuovH+ke~PH< zcEL&}j~awE)Gzs;?oEb!p9ExPdlpD@JOK+Oq~|8I?b25-6ise`p$ch17$iu-pYt)S z13DB4A?!R$#6pbUOCXp!ok`2cO^0-DP9vQW0e_U~q z!dfCJ)G5mBn@ny@3Rx98p8B;6e@TMrGAdEZ3`ia$+!AO>T`W{q9Vxj`ZYi~4LX~6W zRJLI<+R5Q>dC)1vpZEh;@UpVsGvrH1KHdt^{q?AbJT<&(e&g9AA5@0R3&yOz8NIh! z^h(gA*7N?AW(%2KiY7G}E3#9sZ0s{4&>bO_Ps8(;ZK>$YgRAKV85%@uAa61s6gCIihujFFz= zg9afi*fzE6^mhwl2>f28!;W=uS+xz!&FFU5!`3&`A6F`6<3{D<@m9`0iZ=S9W=uoy zRD6QNdyy6tUfU|)1nywge|TzoJ1WPR%$hvYmbN&u-a01uBMo^Fw)E%tFVXJcoa@V5 zDk#^(bnTT_IS^+INpO1=iRncy0SVI8*6n_~^4M(%1tb)%2U{lrKi#~oBRYwl=FM&$ z?78F_BmGTwAqPjVB<9Y7#2ZzMx7%$|*bc6!Atl1X5NTCQO*J_Lf4V95KSPS!3Qv-h zHw;`jz30Pf{fI>4R(*e9EL^(N1jM1TkySHd^>0P)U8FjcjQ^cIxCyM$B+7bGz*z28 z`%ZwH+wlIE>?a+gAtNnTJ=$40RuHZU-&skw z{-?vS6WPwB@9bMccR&WND2-jLO$T0>dQDa~9b0=3U)w;SIOgUHAz0F%F%aS#=02Kp zRa_a&?aN{H!WPo{dA>3yFAXV3$#cS&AyS2vbi4^ zk)F#apv-5^fEba=0xf+C!>*9VE3Z*$k!T+vBJXkGs;BB;?Cmm?y4+;8s?cS&t(EHkzf+-fGyf9O$sTC4XDgin5^lU5A8K^!qb z(XAyF1k6jgLKcKGSracYaalD(kfyhRt*<<2dlypa8Y%&>NBdmQr2M|;&CRfSaEUDX zn@g~2aW4w(-dzEa=uF*mTYqf`-$NKmopAQ*BmU8o7-zE%WBt>rvOvi}X1?tQ{Bo(N zg)@FCf4p?cJI>kCzzV}#0iN)OWj(%=3lwx|kXt9H%lfcQ?FSQ?RydZx2k9NqIB^G`o{=HxOD-BhqsbQ|w43wOi3a?I2O`ZWd)d<*S+ ze@|Do2}_b;ytt-zKv&@+IZ%#_V!LxeY}g=NYG{)WEj+)6R0$p!n`NL29b)j8<8XcwLgNn(nf8i2r)5*TU0RI6fq4Ge|Si_ zHH$2Eu#t$|NVRT-v(%c2&~U!t+M`g(s3yzBcbOI!x|0rL3TB2#=PKWan?uh97u(x- z$(RCaIh~MDnBXnK6t!spg{X+bGb-}^#cF?7i?`+MHY5=G`?*S@ED2v|_T?M^#*PX( z&SsU=HErVkG;7O#qlfd`c%s}*e=}4Reh;;PgEmgZNVs&b*CJHnJNMStLNb?pih0H8 zC-~h5LM{Kb1te<#8UzEBEWj9C2k@}L@{;*m9>?0OE@IyDrl+ZDp`63H)wV|P@L{rU zE)SD7N?9euzLA@#;gpadYb&Nrl1b}E&DRvp-T{p%xoFe&)06}f7YGKff3t7hxw2f$ zRG~%&lB7t14{%15YCsKyXz+CMco*u_=0ntOu|zx0{(|pvxo)PBx($*iYaJ0M<&9D{ z5?-)GO)+2Hwq{9@=S=$WMc7QOz&=bBT#GPH0pS)I5JZAB+GovXX$@EEb!6l$>z&#Q z3ErMr=rAy0qW2}_X%uPaPy$37mM*F_()_5S?3HYgb>rSXz^B&@LL63x2nLfdAGX@h+lZ*7UPm%*U2&zV0nX`xG5EnC9jq#9Jg z6J~Y#I0ZMhYf11!M}KdQ>)9pGwD)|ai9ZYWUV-|`u`9}z511N!ll$x0Olot^j-t>;=-I(~&;z>0pXQ*&~#5ocpmN&We zea@Qqqq!*u#S9f+j0B4AXO$37k=jdpTwt6XX$XluyRwn=*@7xi&wm*$Q{m@LSa>2F zaRX`fvK2A6@MAb1jnF%DtbRMop==vh?w5d2X3I%LioZ_yx^0zH-i&yMCx3RQJ5@5V z9h1IDCE{qW0Votxgu%Rg>uA-xOT=b*{{hEKx{Xn#e(+*~NJk`s9@8;dCgk#aA%!(( zX{tFX$>M<9R)4hPumN;6z~AaVUH@ali;;fRAMa1A$R&mA4e zr6dt%VN#ORyle!Lj6wtS8Ys$rYgUwvNBu#c@CFcr+*3?lty1v{GNB;-;t7w zT62C4n`ed?t>^JTIJS`vU*OZFn(=#JS;Oc0^VoT9V%$qs=?cS@KK$3`!Z>ZDK2v~Q0$-sN zX>!Hc)!KKqBb)6xg^f=4rDGF|7_rmVXUAN5d_h)QCjT#?zTyX+}{6 zCRV7mSYKvYXrdr@JbEh$2pno*|7IbCqz;({kfVa!^)nin=gwqv!$1vkLe{*o}`LgtibKKYI_-- zePH+A{!|=$+Kk^^mfEoVPMK`VWj}Drl%GoVD}VF%=LFH&+Y{*PyxZunH%jgM^;cw| z%ilV$mu#PQ4q3V|X^)cumTdR%QWa{(Nzqo(^4XmZtnq^S^jJM1_mdm3qD)rYU$m3bvo;G8?h6yM5Z}jmTx&qUEOGl4pvo}6wg82mm#aUNLEt^#) zsoFr2xfx{oWsaX?so9!UhEG@e?e5#0p2jfKPHpfKvJW!$U(kWdPR>{zO*?H34dAn} z8+}gFxg5MgEZnXUr+nAq#dD=z*&Cir$A2l)TS59KFLk&_6v>#)dcscQ(3p&!Ar^u+ zI1O=vVRjIG$n+a72=Upt#f_#=Lgo1t91j!hWnS5u*-c#^ZJGu*)m~+;HU*1EMQU!B z9BaQ=$R>4CZ;o-8okAu|9mB(qsh5a+V|Jdo`C8m13T<3nS`T9@DBJKJZ&U3%9Jj>>bGyVR;@O&A!vb$nK zJbhT(C;}gtg3gkjg-|7f1Mj+2Rk9=$f)yHp6OiOt1=~j>iV*0kAHxDzr>b5CP*WdM z>qHK8gfBS7%<9i#Dk|ouihpWh^ImBfvf{vSYxXC85fr#>15S5UytaOPi%bk%Kp{Nb z(dR3L$V_+WW5O&|lCK534HFTXO;;=$p~5ir$!)k!{Ob#fqzIJ^HdDGX-5Js1qOQ*s zQb~Xy527Ihh?$bjZYly*s<5*Qq>iay<dWmus3wZ5HT$N&{)QY0B%Q2taew`UCUcezj*e0ojRW(pm!fkXIksakrwp$6FL94-(XB>Ro=N+J zygX~24Ofvyu;q*MS{WuUV5C9!SnkN$cG_g^kFfM>Jh9m-2A{OXn0QWj;`E|i5!zlU zL{<@N;XGg%+$14K>aw>=TvZi}yR-8_&xb{4u+%&{QrBJr7=N0q$mYEN=5#L*Qcn#g zO-0d}Mzw62bOoG&LMG-OsZ;DpxpwN9{cig-FHgWDD+8b{qoBk}1{5FxfxhcWEz=gD zJ#Nmqr_ofon2UGPK@%w!(R>2f5MlQi9@~Jm0SZE(BfO_FJY8mCqX!e4hrd~l--(^W zAh9+Cgn8GeTYucgxB?iWo-?py>#rwkNp@VhCcwNHsH8v|En)C?>Rjvg&%E>+XVEQO z`OwT#n0b|Cfd=Ir|9{Rwy(~sZFBx^*TOpXyM0A!jK~fb}g}YGipZ|m+>NVs9IOmAt z)<;O)SW-q)SLfu(({q^_qAQ$j^Q5$zxIgwe0K>hfmw(`PG`{CV!l2cz?18n$Ld_bg zMrX>wWdj-OLHLW*aT$BT@WJc$!1uKKIsudtJ@a=%hhmj#vShf||2(6tZ;jPDMa<4Q z^L4NeUlv$T*CEDve=BI7c4Q2K^1+%xLkyGOX|ZgBGi~K=OG~u)eH~+R>Lu$`W%fvV z#!O2EUVj19S1rlCHrt3jH|<+8r=M>;w}yuODRlhzOU-Eh7v%Il6{{58FM7`DdQhTsJgyOQ9^K0V zvegv|hF=zi=Y)w6g_uIK^sBMjbAJ-zf`2sc?}!*>&#i}QR9psvFF9~W{}{%As>0Ud z?0>yL9Hgr;-8>eG&K?Du*=Ua!=>9S=zy6>yRvca(1ZTB~P2#o_pxVVqkxe1?3mIbb zMvH_h-|RcTws_l$&Z$iBPk;7z zpQId3{Rmh6!sA(&-B8nM9@tH|2y?j%#Q+VL$Y@m5VE_>Lt38r~_yzStQ$Nz4*R$*K?0K^`(;|+gL zLvZwp9;m`oWR)}d5_C7XUr+gaO+97`2!!m9{9FT8HlGS?gE zBMh2^u96yS#A{@x5c&fs!N?B+Gg_Axk~tnhEAIZwL5m_3B>nuOzh5iI12is6a-`D& zkrGJfH?O$_pVMt}R|LqrPI#6oa&{`LA>?{gn?UX9=%NyRvEs8l_Hjvyji zacJSOW(XWOOKF=ufQ8lK;gL*(KYw|@zantlLJEwFf1 zS7vm=b+n+mcOo}RBUtop__>vU&KsWo1Zsrls!7E!1yvHwsXkN%o-V!rDsw#D7N0O| z;1Q;Gf?X2dlnSz%Ew_+^Ap9CL+P_vTB7WV8ALqM2a$d9&MAo!o>`)@abcTBW+C~b8 zxtts@#IAn8O}mU;+kYz1?ssU09DJq{m^wj0V~!?VUsJCe>YQ=c13i>Fcze<)ir~`v zn+1c3VFwX%PWMFK<@XT4Mj+L zr2-PXlx^L>M&u|m1;H1m6k~@C7)3;2r7exL7Ch(~t_N_^6jSQXB@`iVFMz7HVh6w)2)_HFzwa`TrR*=}s~`-$mnxA=#~YVuPnQ8nsUlnm(& zREwxPn;|Kn@PB_}@1A2k3*JV-x9#p}+nTm*8`HKmZQHhO+qP}Hr)_`Redl-cX0yB5 zO*U^f*}KUk=kwP&sj8$Zb?P}eRZr1xjVX@jeUb!U{MbTy4SVfr=sVc;Hev^%JS-*1j-x?sgu<5Szx-Q^Y~$WAYXo*c1CJ6+ZL7c%kpFIKS6VigAZM&W$880P%KT?6fr!uwnf7> ztqS@cs(;9R!-EF|IO->mh0L@kJEwNXgijL;)ZpgTGgNqv`brXX@BK!I^#OtgCx8A% zK}F$*bD{WzdjrNsZbCuIc@JXsy9pd}7Hjbha8~Pykvei2z*oPTK4;G4AHQz@macrX zEJ!8SG`&58YKde=0_z$$r=+d@`=0YcEWrP3gHDY8Q$wtES3<=%;;XTa#RYWNb6saj z-W$y8elTW*P>9xVq->G^0r5b;9)Aq-Agd_;q*rT9059p}E27+Lr*H1`1j;k-&&ZnR z7s12O<#)cu2eM(R_;)BqzB%v*1JfAJe3u}0Fo`I~fj`CzQiXfd!G>~G=dqcS*QTc` z;!Gl*7{jafRiYZ$Ded@4SUqU5^0$bKST`U*G2osA%8fZ$k;t@ zW%dIW9{-K3Wz0_OW@O0`bk5G3#dFLKNPg5Z0SJ*50pW`VsT@Oa*F1jU&w%Lst9%V` z+A-KmMC^Nq+$H>_@%7vV!}rFbPQ@DopX0&%sYAVZCRsF}zMzOBys)!;Q*>nzEe>GE zC>3B2)H#h@cHUP$IW{+jd4J+W)SG4GPuVTq+KrH-a`tg%WxYI_O&^6K2ch{n9yX7J zIc`WHT}(M}(5BJ6ZiGYYqQ@ybJ}?CkWn}^*@4L?zZ+Lhb3D50j)0-)^^2y|(H&Eh3 z9({rV=T>AMvtZL#Gf$8X65Ljl458B#h13&luD)_`lD?srZ`0o|rGN3lkrm>|?A^=P zo42b=?dQAF)D&o7r0!6;zlS=p4fY^rz@A>sO2Kqs$wS<5rA~;OrQ_!b0`n&YLLe95 zN)sKhqU5*wUop1B=Q=E6pax+3IovT1VC&mJ-B^{>A<8PCpDTxOgf=j`nL z2It@O>`ZZnl+&{|igr{|yux3yA$s9Zi^>7UZ(xhE9<}x%Nn#5qKlH_TL-J?`Bg?}^ z{eYVQ3*hH$T{;4!XSEB{w;hYYcZ)7_9gNPhzSH;$hf3G8<$w0;Qp&OXns)3*b1{IO z_0!5_UpJcLq28FxG<8NW-GwQ(u)E@Eol$^tK!Fx)L0L=M{qxd`^RZe3i<-KPH{-e6 z>1|#MaYghw;BMOUX5n#!CG+DE{Gjb{MYqK%w=5Q!d+dUx6gT!TEX3fFScV7g_Yrtl zA24GDz3{Tp8hpd*q#s50^KNQ0AUH z(JwNt?!yFPw&~K)B!#q*AFX-8&JqT=LXbu%u(4QN zK84G%XgWa-q!^aizf~r?<Vp_&)xhPiEAx9c~VFv+S1JMhjj3T6N|UJQwSKfYf+n}4s3HR;hu^gTnT>i)nk7&j3DOvwp) zNSpN`7r73F2qE)(lF8V1Pc?jW-p%6yG6n(L1bEisdF``u6Hx2*6f_xa1JBa+FF(U~$^VE?eW5zCG*f^`HDjgLo&6-|m< zWi2s(+=7}I?B9?GA+YejYHV(LHiwT^JD*ndOFDs@+)%CX4Bt;nj}?<b5yVwAD?Dd!xsr3#n3~)Ejv1zX|PKG49U!k(PVIjYA*I(@DR1 zNz#tRF6XOXovaf+>|zT2Ab%pulQ1r)UI;kh=+p-ewr!V4^nA_Yn20ci7_3rB@6w3^ zNDKLL+78$(c)_!I13%1;vqhCx@4-9|3~j4AcoXR58*bF}CTK z=f7kwOb9at*mEJKP3!-h?8;R?w=F{5Yy`d1Z;1eUuzA4u@4C{x9)Al7hTqW`pJ{=# zc5b7Z@Y`ofJlRBraUOD9tZCKo;Oql`(-Y-J9)LJJ?yol7cTXx>Ad$%%qN$2z`(Yod zJEDQtO0Wr0>Oq+zIFZz0rA_B>c3@WD_7hmFgMU059@3#SHA|DDAYOwg#3xhGfM_ zU|4aIM1ffw42eqGkAGMB{JAPoU_n{EmoUeS;yOBx7-H&)noJ&LEH=Q2D8;Q>|01-Bm)V zz`<<{wE@tpl=nJ7Z}2Ga37+#ny5fy~<2pazQ*(64uMg2j)#lADwRE)m$~4&IMzx)| zk;v_Mu*z@$hJPn4o0TI3W3eHY<|euzBo6Zs5srx?N+>J0erJ!CMjDnlCG~iW$62ne zspli&{=+3zfE@&S@fg1Qs$6|jdo~2owCP71*c3=rt)D3S8nTQg2s@U?WRUP-EDTW- z+y*C1kO@_>Efh9NPC0vwdcinKM_80LWOM|jJ2|!i$bW(r4kQ&$;A68Y3uX&1)3epG;V!ZOxQcLuXhYC2ROBa|Gl=KuD>Zd+Vs!IR znn=!laDRQu+fl_3^g_*S@Jh|3KX-xG&(y4~Y%-w(;@>wEzMH(at+t8;`Nm+YVv$&A z$}-x7)XxqU#^MZ$$ky?MjE}qt9;B4yRKg~pXsbkE48r7e(L80w-k3sBpdE2Fhm)R( z`8x(wVZMigX1Z}1t1!K_FVyAua4it z^pE+L{eU?U!s~*1LP?o21fpe>1C#;|v4WsI`26J=$Zq2_N-2h6FlFq?Y{6dlV78vu zpOP(GB2UY?KOx?%=mlEzttPqRB00z|NO=a zzZT~y^`;Q`VZOfo+m`sSGn{3y4d@c z+>0x7_>nLKf*jYoavWWZ^deslI$gY*XBFFO+6tN1-MNU;L4W2(Ly8Gd z!+#r!{~^B~=$!vWdUv1^(BV_ z%qll@&arB{D9Kh>IK;eI!+-77_cM(DV~p2l)s>p531Nc2L%!Ug^ty`#dm9jY=O2z( z1S!sU$mKIpjOW!Ge)Cv&MBH*=cLkw%ry=7Ep|u`1G}tKd@!;m8OR7oaZ5nLdE9^mX zaK32pge>%jA^*aD8*TN$Jc+g&N(h*di~-gBiwuJ0!hqTl@T2Bs&41SUN$p$0lum?t z6JLQD{@KGW*HT3JX*bexzs9=$dGahi-u2B#)5U@!R$<<7LR6b^m<4BTK-aoFgi2OF zL2-!GNQeyrxB-8IOTZkT+=ML#4w*_8t zHDlZt9QLj<52Fxk&3|xBV@=)Hmj`|OIEAJ+jxM%m_{nSCgtusOP*g{z-w;MeMrm~D zA&p}MGcxShsB6-pGy%ek9!F#pxhdf1uxpRRc=rA@F7OkTz5Qlxe%}6?8qnokN1-ku zrcAM~UVhv*2BI4O(RneTh(_4fr8{3ymf5RUypGC{RZma^qJN$#trw!mOCX!VIA#y* ziM2?xORwTgP0tA-JG$&`rq5i=%2lz{>uku6Jx%GwMcdk!8yg|l>FTgrqx%hFm9Crk zkv9S~k%*k+MwX~mR*LQkDKJH`nsE3u4$szGqM+&%l>E_q0T56@80;jjeRdNj_JpYO zt81e<0mrM+t$*DXgMNiC*ne0|o(mUtg-{fLr)>3=LHd;oJ*(BCM{FqAhow@snbEl@ zx52$^vE2He`jXSlurw{U%aq7bULFxI!L2lLJbW%^{f!@Gb)>gFzPR@d`X+t{Rhh){ z_}~02hWKAUv^8f~S!<6vkNPqLyj@c#$=Zuv;wUu`rGM=|<4ibPj@F`432n00oKabz zHcDjmS!DT(z&8-#LD0Z6e5fKoz%@w)6WI_q{gg?~@k*-~N1WrhQgAoxjmJ|cxyeP9 zydtEpANb7P_UXpSib-X<@b&w)kG4H$eP`0YUvrshnpQr#?oJD1QNW0f?t#jc+-f3| z5`u?Ci+|$KV890$2xCyWRnH_E*Lk#cD)FNd+h~!Fq>2|XhU;llCsjx7(c!4n-rnEo zlcl=NcxQ8+bK!gk9MuU```SE+!5 zhoAmi%9r_S&3GZvkZz9A z(+!kg)d6MIhC@V5mib!nPBoCugZ2r$kdnzlWYG4w`3p&TKaj+cV{8!RdpKX`9nEIF zo3!cpca<++9rVf{+l)4?!stL6Y$Ah$hkv!`}+sGTLcAgMVG8t3n-0?l?;w1 zU=+~|F-NW%Q_XB$bpZSRU;gyho2>xFQTu=zpp2}Hx3TfW+3#+asC+~BIEVVs?tkbX z2&!i5orcV(?dsl~mK_qJBURPpECcBKIe|^T+e1fQ44PQ%fbv;G4E8+uf`xLl>0(dZ zp@5g0RZY!I({ejys&xlhWGbwd|0E_Rj+T~`@Y#KTw)11*;%0eF`sunKZRxRURp3) z*u47AJNK&xWmTs&ws_b?*YgRtw)>e{uu>8UEX(8Tbys5XusxK(=f!r1DUU@~r&ryO z_V@kQIay3{WSg+b*pVQpIT8pJnG#Hpg>VlBEo3@%97+-p3y41wRtcQuvwtp?Xac2- zbc2<-Uw^(Q)^2O;xBEF2Iy!pTpR`FwM_wIhP9l*knErhH(87U{C!RVR23fW$R1hjs zb%cV3D8Qkb1f*+WEG`8;ci8j2UqLsB4WZ$m_w@_7lQ-3Oz_r0wNoHD>7wX(vN_7W# z*yTFPHXHsTh((nikPuQaH$D;Mkw?+4V2ddBSz`GQF5EU?8?8KN zNWYfSjoK#N?@+N;K{Ck&JOL#s1HyUDT5&in_JNf;gHKzWwr7X9p?@^9oWb0jJ}wu; z<8A_x*ce+cy}I9BYh6cN3@jl@EZEVxm5VM#^fH5SB_{q#>mD|kJ9oxJ4c`8j>Sk=u z;33c<{^~j-8D8*(ioq^n5its=t5B{il*OH?>WeHt%0UdyqGh|IE7%0X5s++jNy6h5 z!|>OIFJ=J91l^I^dVg3HF>@`=`%Xc7)Q!&Q4&_quAxk>I#+V^!Tw3K%6u*Po)|>*@ zjv1T8n$Dpn$$CC@L|S)4d70#9me+>@vOjADp)L?vi}E{*kBSU1p$=#%e&>_e z!;7JXQ3{0oT-q6Av29OvyFs>|6k)M~;)jgs4qhB;KCOlqrGJTD!CrOl6yHqGx*-Jb zo(_0YU&PKw>u~kkoB6|nuq#IT(|AtA5|*cST5A;~uhpw;9Oe4$Y5Et<)vqxNDsb$0 zGT!QSlq8~crES|N=ECywdqlPM+ z2C;QACOwS^ElR34NF#-6(5`L)pJMY!Kyd(J4yuQYXs!;BTAuRkA1S zq|l)mAT?phH8dz$IDyBVw8vXulZJ?H=}KsU+vfKq+=p^uoEyg9R-I-g&2XCbOj$e~<*?f`W#4mzg zdBwP+>wk^$zSNC!QfH$bsjQ`gX2y>{IZHV1ySxvQK z^{?=PupL8tCBl-9B~p93F|ouk77bS11ePHcD)ZnG^ApnT@*OTG?Ev#;iO>YIT{fB;J1PQ{<0mbh*QlXOBTAlaU{+^Hi6D;?lNj+vr4Y#UFNNYEgwRE7kwVwRAV zRezZp@z?Kc!R+%YI%ygC5&NLaVkIWD&2g?ithMQWhbu@V!fm z%BL|KGe?eQc$*b4g7V!Q0yfSN{E~CjqDD5sBw)m{an%rKx4IdvJ((*x7xPj?*sQnO zc3IUH5*Jqify)!rj*h9437Cs4%X4#kl7EXVouHJtlRA`WFc{~GPF=dV`pYy7m`!t% zFmjj0x*HL-Y^D}OVqjvrm#J@cbrz>GrNAJwkfO`=ZCKjOEYiFy*_||uBP;+a zUP3orjEQ9Am;J)|4tt+)G#?e#a1oa{gEy%UUf#GVMw^8 zp|3lKCIN&Z65&F|)+x?xNCuHkPW;t~J^?G+1On2i0q9VIXn!F*pO1iB?a=vo&or7TvpCRrT+~d7|PzkEk0?LmnBq4D{k*p!L>EGFWA5FT0O-kp?k_kK=R(ZKc zo;!ogyI&ADcPw{0y<$%zonZ%CwJKKL` zbU32`y?^qId=IDvp7^Af1mo&8@Ro)LAgCBKM%WM!53q;#uhN|%F5Vk}Pp8TJjm;-% zls{&qEWnsC%&X=s{G1AO-)4pLw9_{m)05JW471q=`cE+P-H6Cn+#u%1re^L(Qx+-- z`kV+-U*6i(p~dTL}ZUL%33yavF~~<@^9G1d!UG3E$Xx_$H&KZTyg{*oL>a|qy?f?+J9Ge z|INt#|N9KtEdRR<+5c%BkWTN_-{;^L{%en1x@T3fn#PE)RrO87%}LAbRQsb$DYLSv zCf6FZ=;2Cv2_+eJ(40B#S`ZL&zjRdid{|-7kUlMCu!LkFIZ{3nyb_^hel$33YeWkk zm(z8tnK?+3gqX1km71$v?T^qa@MPNSmNdB~tO&$htmH(i&e>c=kh z#d5B44#q*2^Nd&t!>Ftxk72_h|BB4rTI8x5hxBi)@4-IrhTU99bPn3u89bD*JjzuW8qp683qzNfFMCOZD(p=q>+b7z~hGX$WH zJJ{?7)jE_g+&A|DUm>QWHGlTUHJ$jXxsaqBqrYl|{6HD>A=-@7%NpN|8Kb_5WEbgc zdGWJfZ#^~IGfQi?PIg9WUbnS(GwutLDIN#uJ^9C#Q8d2?1}-ul)V>R0Hh)xs;m#bl z5rg3E%LLG%IHklRI3Adf-KPU9QPq5{Tb18<5$*Xm=0S3x{;>uYB7ZK5jZ@816Xoc8 z?st-0DWsiUppebUnGPBE6+PhsTz`Xpz2B35pH0h5UmeFBJKd(Fr;o7uSS?i4ebH|x zJKkuwRXct`LRD{TW!K(p0m3J?n%NBo9DcDI+&h>?@^M|{FQ)NGhONBQ=3p{(mQHW= z7^bIjeF7cxO{JG=1uYQud zm{qi_uQmr|Th&}qdrOlXh{K&EOY2(ecIRV$Ezc7{>LV}xm;d4;{`2_OoTw8Pe_GJi@SyKj>}@k-@cmY})= z5v|-V+OXVLMJ-|vxuw?k?t?tb&q!p+;06Rg-FX}QS+YH}H~h3j3W_#uW@Lo_;uwPo z0}6r^LRmVxNIW3Z3$*pJi=3PHG@o+Yxe^KnIT093nEM5PRb~i7^h3!VEtpImt`p&! z1UC~h(mCH3C^Pc*e1; zMPzB;~c<3yA6UF^!@}n6VZrQyLGXE`Bfp zosk&(-Da?CI5utY&fw~lbT@`HrVy_}0i!;^E`Pb=3%JP&=om5}WD()k);IDAHaSHc z8Hr;)tCcrHs9@u~B*Rcs3Rl1gy+ZLI)u@C`*OQN)!&$*p&F8@i?Oxqz06GS5SI_ZU z$2LC|K9iWytypHzju4$9Sh3bXPFO%fkzkC&9!JgPWb=zm<|Y}?&rywJt~(+uJ^2T0 zlYf(u4;te31SBMjStYGTc>Z>E zEF{5Lk)(zlRuNd?Uy+m&+ZNPER$G&V`&-0vQufF4o#X~= znNrF<@|a@mm}1yuMZKhi*Ebg(rmWa{YMzu2brB=$@SzNj_Qb?%7>isyCXYO!hz7i! zzK#Z-)Xe}VL^n|&x&aFtF-#wv0?4}e#<~K^dre2ASyv^CM&~6 z`XI_H7iPfpI>vp$Wy@RHaHk%pAk6>Qh%EwqHhd*yXtDk*w&I(yx+c zlp$Ez!(WzWj9*jdAzzsU9DH7v2&N!zdfn7M-RpL`mEhImaU1HvnvFsVm463n7j6dd zN3q_$hb?0|OjVmzOJSHWyxOCOj0Q%yA0xvz+*%t=SOM2@h$) zi-b0LZ}r5O^jE(ET&I&99)FQ8A29g{qIVDsJ$Cr?WH731HQiV!6~>G)+O>6N<*lw5jIaPOpU1tCm>>v-66#XnZW3GLHQiV#82s}Xc z)m;nr4sdfZoKn?QC#YV#r$%EOh~EuMVdox%5h8H}j9nUf>g7k}K!2L?B{niaeY!M% z(Rt(SdEM&!8i?D@{4?fs2#H1b+;p)j8XL^FN-2(&L}{kO~3WcKP4-0e?StL%LpDU#1N_kbO1-3&HT{ zr6g=d$pVC}$0sUT%3H`Su&>^pdVWEO-{DS=_D6;-*ytA{4h zv9EYNV(dH&dYWDM0chz|ju)1uFxtg9Z$U(JG=iwh*ndD|^8Mk6(8#6dhKZ@mkk~e$ zROw^4g_JC!8tt~7n>Nfi?oP4r-zSidpJnEAaKt@Otl+pN#}44+)z4WMl39Don?JW0 zj%{=4#?53X=@}w1s<8N1OwJ=fw9p<`S^MYWiuYcjtHa|g8wH>Qm+(+*jGH#3n@%>t zqM?H-2!BO@2F$EY{Vy_zMhSQ)%SY8t=|nHJ&G+8_b}o{0C|bDKz<4@!t>?Plf;YK4 zpV4@?7yAh6ftpi+StKdZnORmZ&~^;Ck@@UWTAIxtaA5a@1#5Zz=={btqoYS`Q6@q- z_z9e=rz}zG;lmj0Y8Q#Fc)Rm(=b7xVunww0iGRgdkrZREy<^1NVt(I{zvT<*afvxx zG_z!*(mTcw;v0H+%V5GZ-(+yXuIftKSR(qU6P-D)t6$}lMzvP5kXyoeLQj@*>%*!5 zDW!1RUeF4bnMc3sGyO`5h#ofK%P9FMUD;|!f4IZxc~~O9ZBpx(5B2tnnt^>Kd7#^Mev~BT7)wV5D+$Skh`DXoV?CL%yJll2vXI@IFTPP7GV@LiV}CC_sXVMlC1dw8)RClh$@oDOt+P5!4{6EP z8Nn0v+8<5XZuhjYXkgBf>sJ;p1nvv<^cIO|u~@>8Z=g ze8=g9!~lgrdcVjlzTBSP#iJ&IIq0Cy?Ag-M5<&7wzliaiF z%x-_~l40q_vdFl?N{Z~ctPOwR0v^Ody!m?%BGB}gAJ<;}v^QEWl>}X-KDzj&JX&6d zHZPFm9E4~Ghn!rUrmqJzl>>p%E{T?nV%R2CS+Q%5pUGg&5bD$I6f-4ev9;T5ywk0K z-b7KN7MJ>2tQ|+Zy(?E`pjZ&EOQavo0u+CbeOT}oGG-Pp%ge==eUB68D&9=ur%C%_ zk8=fwdU~&P4m(dDd~%AjC;GX(!9AbJJk(%`AQ_?sz5eyKyuP}*bsBG$wU};|k#~uK zl;7QIz3&J4m!ofBH?MudzC$9O85Y?K+%FWgzJDA3uGQ7XIU{qxaUa2dbo!jQ`JaC{ ziXZudKE@DifuDKq|9EVQz`s=H|NQgM{~s}R{PWL0|NQgMKmRL{xccXxfByOBpMU=O z=bwN6`RAX1{;z2E{`1d2|NQgMKmYvmzY<%KfBxST&4DV6!P&5KFEgB$E;;Xo` z=a0&dD3E&gFmttV;G3!h3}$i8-hAeMKV+TYy*#}w;xB#g9&mqM`yHMx zYN`UNQuei*WRhO$;p68~?q>)l zPr=hoe6hy}VxzNY{)aQQ&=K*GR-Y95uw5reoe-boO=}GK${0K>@hrDGOS{4Ns zSitOS`gJPs^b;1mtme|R3Y(+Dmhk|CrWFFGII*2o8X@ni8MdRUC+CU*X1{+i0TkfU zg{ky%F}@a-ie>HdDeNH$-1eCq`558PtiV_CdXKK-eLRQZt1mzAeSAZ$ym`|$?XZ5y>iS9)nnUB82! zRplloMYnZ2YiUBeG75h$&q+;p-IVwCAqH4s9k_bM<{~wG`-WMmR|F#g*4fu5&H=g$ z{Y4%~ynjWXit;eer7cfOoL^Y!D{3h+94t6Bdi9%9$wz@o7fka(N9BWn-E-pZT>;qm z=Pi>JcLh-MegPV01b*+6eu5ak)CbAUN};gtxmc~9si*TTQyhPNKnvjy3B#nHVa1XG z+CQ6T-VnSmuJgRk`3nmGp7l%#W|7 z31ZABYA2r0H?wx^knjq|1d{}oNTWN#4%u(mlJQzyY}yZ>z+L*H)z4t85w$T)7F$or z*BVVpLa9cffBHlvdLvb{-7dv%?FHAhQOrk61xOT8r z`;%3IE~Gr@b6Jf*3z?grw!d(%_asY}8kgS(12?$Wq27POl+`Rq6@NCSzzG~Bc8bd; zmRbado!6dS!^^u+zr4g-=|mg}riHjc$iOeF&Mh=PPm%G$lKMYnu?W|_huU=@I9t#J5-zi3{%Z47dW$*twq z>-!z*nSy^HfHtcj?|&du8i1(xmQV;AOhcvGwR;JKRp+idG1rG_Ml168S! zh}o@38K`RgzORN~w$#$c2uRC^> zPg8=R?WM6J?i08BN0=_RDU_c}|CGQ5w2H4wvQ2-?7An8X6C_4ANs%El$@e9MPujO1XT9RY{z(!_v==V|f#6z|C{k=!gs zH4Oo0xsXh@g15_ivYKY<_m%Vv)d@nbU7f4WJ~d4yN$v%@GH+zlv)~cwO>wCm8_iqZ zrvHB9l0t>ItTF@s5^7qNu&}IDl5f0(kB}uV=&OO@qKe|88app4Z5c?+ zQ@IOuse(6m*=H=5F4r7A&5SmX{(+!Ind$Z$ktgf?yBphb$R znu}i+V+6pF}ToT*qv5g0EH%D{0R>n%U92d%;*^Y-nKxMGCIBG|%XPW>E`O^r!LO+@Nt4uP{f6CErhIoO@YZS zbGF_s^bVH=3Q}_^Ad?XML1KR8#iW0W<)pFzi+Tur2H;xCyXSJR8-+U~*R;fEKX71& zRg;@#-CN|!R;Om(w(_FMz_4_|DZwEbp>tu}K*`NbAooqUU6jxU0Z%q5Pl=y|csb*RO%4JD29@r$c4lR-4f?6H_N!vRc2&4GF}^ z`=f}fIY}ds5?gqY16$y@5=EDZ3)n}X$-8aSx3IqF-tP!5b&^=zw4W9LUqkgR+J=wE zyHc@Q@cbQKnOD<0M>#zPpxb{xhGc{}g737_SOi5BPz*KlEUET;%02b~U!q4HskVAu zQnzl*M`JavjIM0}ksjr?gVArBEyvn$0705+KbVxezB^_0CaYhOdAQ;-`n(-)MAd_> zZrC0-2vDnC4&!lu5#;yF>EfP||bDzC}R6eN?IOCam^jv?{x6~^MV7vXr z%>NIjqh8MdQ@k&mki9Ny5}WV1%&A&gOaMf&j(~y%Saew1yz`iPGCl3I!KvGXly8SL z#nVHQ#Z;wl>h4$SGz%pkl{kfB0`Z0@RiG4n33_75ktdCskFD`Kh=ToGwS_e~rXKq@ zb3LGLN=#@?L_@9<1O|Uq22^SH!|4p0coeiR0R+64bbhRBQ%y#Xw~u_kl}^9tr5vN9iPqUJC}_dn3r(1YOmOw3&b2gp5+%ip_00vSm5I zfif}F0EPPg45{`A?#Xh%Pllh=l-*_Wr60daSpzX5sUHanfJq$82C`fDeq%xWRuxU( zQ@V@s`gPwi>SQt(BPPr1(CvPllJ>FUJFmsx@(=_hGVOh^u4=M{VklVV&FC?k0L(DR zZG-*JV+t@MZM1(E$b3tmy5z&NksD0kBHx+9GQ1gf(g;%x5sHwD$rJ0bCo%e5O)LGn zwNTyY!WfSZyXF&NYE)C?5&B-ySZt(gXaIi=-bFnLOWp{Tz#$73G&ECh;%+hqN>Vbc zTXg!6pkK0Z)r+OmAFMMaZJ&E_rGFMvYsMvYv>&QQ1k z*?Z_F3mAC?D(+-XVgm_q{rKkCVs2L+W@OSLr|Y~cDVuhIm%zer0OC5;7MyQ_2-Np74r@9CxjUtPnnMhe&N7vdah*c0i?^tf*}!sGA>|yeq0%>59@9;2{9m?Ve9+Ux5`d1q8TB zmE1UIhmhQ00$o9Gi>q4RXvVci91&q{-6L#(0M)z~IWRSYm;dY|L4fz$Nm3|9DuJf zz>{=DTk3i%NYpxPJHBEIKQe!RHaa?To|F*85kBinzz z&(lEpJ70e<*OPSAXxWpp32Q6daxa&M5%@muk008;b-Cd?zuQ<}*IDD#p#v9g_xe?VFOaQG5<*-xt zs(&A7x0@Dcj{Z`V|}UI1;W057%jDKpNfBRfY}5A zDn^wQGe3?kYEYp~U94-K9?j0TGTLpWW*Zcyw)cDMmdRFR1>o{CA++0sA8$0wCMWif z2=M^}G4&7kc>vTwnUi_B9ku7p;-J+Y-MS~iPFPjv#k25$%t)z91HKxySTGSa)SO^? z|2|QN_Gs18o0WaZ!Eaqi_E&#GU;6Y|-#&J!s~lxZ3(ZV1GLzr5l$}_!$}smzVg1H8 z#VE&&`f=%@2j|Kg&1`vSn!{y;q;$zp52$FP;#4VIcFcqMX6v)81l*|R7{N+ep4i$g;;{m}+d>p4^ zyFl{Hphdk;+ZxNfpYPWAIHI9M_6lNTFyi6&ui^G+aWlX2k`BnGw0Fl`5iurjew&vO zWc)mub}nA9Sys}83_3W8<(bqY@p#YBIZl$wt))emUxYBEVid|F(f|hY+h*1W@6wJIVmdti`$#(es6jwS^1)a!xX<%9&%!$pF5|Qf2@rjthSlusW zF5-!;x|y%DWBmDkIruX8KZtvWXkmD;N$}dXZQHhO+qP}nHr{L7wr$&1e}DHZ=FGA; zv&!yCRUVwA7Ag3pjg?RvixRI_9IWbUjpO$C);JhnAsxrJ;BJ3BV0_=YCK3p~O-3%q z)&(xqPB^gY5>aRdsUeexg$5u-PZDmpMdyJRgj!%G#6aDr-oI=-^Y9915~vEL$S9G_NgX&!k)rQpp2x2}!WOdhnWUP@L|ev2 zX$8ed7n~TB{qleNtn3Ze6ZdHOTHlDZqF3WP=^Ng}K9AY)b#ACyXeBvHJ6P+7o@t^s#CUw^sxcszt9P2rw z06@ktj)el9#lfd-`I1LjtnZP42tjK?=~F3e_GNGUwdVV$*_7SQcn6*sO4F*B(5{sJ zFJ#kS0WE*C>3vC6|Ejo}l-u=pvtGH#m^6?7alYR;Ckhj{U%)I1a2_$SafoR`ut#9={a!LnLKRleeOmX zT|^%V8I?4H&d7vyNu}8P&2?S8-?k;DrhfCDGZcT_srMb8-1)^Hmd(6gl^zu!SS<~L z;^X@LfvZ1Dyq!PDfU2k;sT|*5T2D;d5#Yf?L=if!5>J<6YPE$L*~2@!7k!7l;H%A|Xv~uAJ{`cARsF|Yz79xy7RmAV@>ydMK&OnviA{#FWr7h}? z{?*!+ZTz}>)HBvk56Jgrzov>&(@4h9%OHP75R!%XT(y8+~)H)hSH`Tpr6?kCe0I;2A9YtLW#bkG4p?( z<_9G?c^TYzMGJJHj zjQfO!w(A01S8}!ebk1`JG#SY*!WQk2QyFAQR?dtX_UdZ4p`<16O>KMUf46LjE zug@P&4Y!$JrRz2@VZzuOpR`DYUy*;|R2egAa($;H701=%iJZBB=w@fJe{R~P1LKq6 zkKi~)EDz)`!zhm1T~_$}`eBT0SEvGXQBc}E6OuepzjV`cq%dcvjKQjAKxtZE^8i%qZ zxq8?FG%k9;Af++yAKx)T8MDt-O~{XLdyA_@FznY&?^~|O2RzdX&=3mPJ$AOm-*~vC zMn}*Y{{tnMT^Dku^ZkEGaY13>Jx}JCoe@b7Sl^;3IANK_@G*J9k#R{hD3_CJ&TrZY z{Hm!^IZ*L5!0n1S9u+*?kT6q|7Gqxg0@#s3PhD}u^|qUJ$OX*bEj$FjX>3W9s1+RF zz3OBK=Flv=?pV{^7mDzk&RBmpG7(+}W}ZLfDvRD+ zAFtMwz?EFdHU=Wd`0&*QgMp+pwfIfL)=l);3@<8%S(t=oayqzaBatc08_>{IVIsMg+_UGq`X`?h`k#kaom z+#La1eSzd6ugZTZwe6V`HvWP94(zbVa*0n`%bY6p>)u^f^O;}0^U^7T!6{6EEYnhT za}pBLLICS<$v%e+ITl_+=nS4K)u_D{S=(7200hC`wsZG;6N4L^s-BZ z@!Bd(-!Bth*DsgWYfoI<@j7)~Uj{kfgXv-_+*K{IO3zPtNrl>h~bBuPmiY`or(n9OMS`-TyAj6)2qho^BUsw_;6%FE=EL?1^Kp4dvVxCv4 zwzBgnn6YWcXky2^T+^RlJny2k(RMck{EQMeQ70_h4#1)2`NhpAq++YPQdbk?Mpf07 zJjVuNdHUhmQjF{{@*Y(!FWLQOW(bwdZmbG8c%pxtNI2-z$G~7~r03T}SQrM_dcM3D zah|P@(3Fv4Vh~@S8)74J+(x=~&AKT-7vM0re8WBJDhJy+K0o~gJAJ>Pg=+1YE{lM- zAc7G`95B01)LPcqqOs)vT4gf^5-8KFsHX<1L^3235m8WrlnxgptWDU00jL)pA-udX zyZ(Ow%MgM#hv0)LgdhT?dhPDr`|_n?E`Q#de7hr2EnvCGFldo=bYs_2%czSl9&o&F zeE~43O9HZX(l(4Lmn^|<{Ua7kO!6?WL*v}GbKM(Z5w(;xscv&2m?`b_B4%?ELIfj~ zI%KK}^Mv@|2L%IOP9>af+5c%pj!<>bgZ+OqFUc>u`^3)ps+2DZEnvF9(4OVVrA5~X zQCA0-BE*TOnk$sEiV~|Ah0?>qC2ey=c71WJ7zKNz#3u!HZ0vifEFULz4wQjGx~UI{ z6vr3-P{_qulW%J|34k<%$VB=V4YJOHk>77`3DhIYt9?99-WQOm2IK5~lbUnoT@-&s zD)GVQ?`_`=2|h2>_|w)WIm73T3=6-mMNlFD^iZ;aH3cXg&j)nj?P9k(Sw=GR8DfBe zF)+WkG9a zfdb6FJ5Itzf9CUu-0Be2aM1PZoL_&wy%r6Sv&of4#<}Y=Svh=EoH1PAsX|1NMgaq> zoX8@2V=ZT7Ck^d<)`I-_ZTg(&eJ=0RbFr+`XS4uqtxF;%5wO5sR>HMAaWPst|<39H9*qCA6(+9pe~+;w#D5Mz@aF*BfVH zf+;cQlf(Uk)h$wc~o{i4;?&$4%YsSnmN+4~z$^Y&5 z9q0BHBVXntcB?0CIRv(43uVBQ{x#RQ>P|8!6Tyun3IIb`t8ep+7?)t3^{_|4jcy;-3 zHY>Wif*2FOVaF&D6L5A^s*C~r@D9?H9Gwz}tj@U3-LJt$G>$&b{}Ut24g*1O)rmY! zFw6LKdL${?TkfqJ%Sau{C|jroauRS_%8hJy(L5;uK!^}$^1ixNsneNalSDQ8OFmAk zC9<xWo5EU{?u zg-}YLN&RWpg-f3L--Jqm>*<)hf)?XXgJ33{UEFB8f!KXV9vmdM7%Ke}H z?Y8b1Ud(jm*PLgx@Kabjbs+~F}An zNxB^fWm8pUj8TLR?4MX+elF?q_?~h{6h(}?LI#Xuu+8K>G4KDE`NL}On<;8c-@}Hv z8>Fza6p)Zng_Rr@R`0t|xfld=C;}3L`oCr}EbQXX0N{+b75W8-<3tJpOx6_{$O`}@ z1FL_qd9KzL!>uM@9B_8!iTlm2TXsbpkOwFka|M95}D0CWeeY}%w;?;2mq*g zvQaM$>}sA-8L>!!HBm0UhhdD|bp44@{J($YSqvt{U5%^2)i87|Z9j0FIT+&EU0Zm) zXzsPTx{WrK?i#+hSBQ}MVkcE);%GGb!M*wszCUConNnbgs|Skwwp|;J&+}mn@zi!92rnBXfq`hg`)dIX}8sNuZxoy-baX6!ssI-gR1tp@}+?q=Tp1~eMe%FS&n~q z2EF*gG5pLHaf}@m#Idho0hF;4Cr&gnakZauicSq6!UQlfUu)1{+?3|U2$?Jyad1@b zTM0A0a9^T-g8}tz`48oJW%YKswy~yEpcgEw1wsG_z=cN+>c&HBf%c3~1bha>`!5$; z?@mA04Bx&ri{9cw2-LR1zJ|uYH&TBGVTcfsfc-L|HM(T-V8|n_8gf6V<^6uJz4qjD z6R^|G+LoaN2tnwVmwwLTwN9)BnVtyXH|_xbspa|8bZ$A+R4IDJfe;a~g)5F45d~-t zJn$tseuYt)NP&pTb%<(;T}30$%?PhOU_cl3a@G2Fb~Ze~6p#8L9&d}fAKHJEb{pRv zUdFSFqY|Vb6H(Qy&PF3{Qp&lH4@#Al{8jZ)Od}<4hH>p z^Fe_`DL0Nee&&V`!~?P4?FD~w7B?sNq%p+&LQ%?1TYhWjJ7=(fun{?=6cI;QEM$ol zC_yL*ZTIdZA`$)35+ewO6oAM|O2(Ww&O6dw#2?rL^nSAuz**bwJ zjz092WOi7D4Ljn@hNXW&PfbsEe>qwf-Y_vUGyB$0Ps5mwGK?{fI#}LLOWc)bbCL7v zTJ&2ko#pINTgG_BMD-zW`FQok$nz!vmx?E#sU?n>vZM*fBaL=Zkg+Gl(-muMOQgsG zq;XrQsQE7*ES;}LnC(?(Uiu)Wx&Y3Qtz#g-3kZZG921~Jd^mq(blVi?@>l4+=?3Al z17NxrP-t%e02p%;AKgOZoPV&}z$hw}8qk+>IOBTq!iFYzc>*?Q3i32X4Zj*YgKZL0 z2{2YD#?+OX|FNHA{a`WpjlIKVTmlE!O3xXG#2e@ciNEZ)p!}vQZP^hVohp`xEZ5zT z=W2*-cGEXK+E{;mqLknibrMbrcks!)J+rIU{b1BT9sj~*w1YEkE==`qNJ~&pX&&Rj zf2R=3ihJitwdZNR-Kkjr1;uVI{2-(hB;Xhbo={sAeDWFWNq!RVL$|lyc*@NAh1Nc= z^nY=z9OSaC6^lE=y=Sxumi#j zm)f&x{r5-3?E6>V8B~hb2EKzu!3xB7=QucR*Bt^1?}x+d?wM`!h1^&}VA%g#c;@`G z$j?xdcj2hLR1*lrPH34w2Fd!EO|{ZvZ?+#8KeT^-70Di4OSDpJYSXc_R&mNFqu+Gy zsJ&1V7)2-K!aowKi$|{Gd(FP+|2agO|CPnk8W=-JWxw9`=)*?$)4;O1>amUHVr}r) z3&2F8b?TU1frF8Fi~2{E>w6~I*e2Ep4v8xB%&W4~ey`5*Y|JCavV#TEh&%!_ZWO;I zGh%<*9*SSw_cby2?snOgz!2JQ9J$$fzBRlM%_~RZ>p+v6Nl0TOa;?i_XRU<&4jcW6 zCtLf8ZncGo988_QTnma{p4PkOCTA6oEQr5}fa)i#wj)q0?Y+HP(&-jWbKwtWyAfY` zD`XKsSul#;Cv`{Li!Uh@|4{~y9|YYX$!&kxy?gXZFceHogYARI`-pnEgNJbbDGT8oCwQ)D|mss{yvM>hThhqV>q&pNpt;qJ%Ar>1NL}x4lHbbwX-lr zIPN)(?)wBRxFtVvS?;l&x4nYY^mu#5#PO_%)eE8>C`9g6bqCX;?F+M*Gv+hT&X0fK zq7%>#7xYMp*GBF>B5_EzHEV7$@l|Pmq5x@D^(50m{|1;i?tw$^`xLD0E*e_@L*DLE zGd9s5i=sywT<$M&>5X7X)WPJ{_%mWf;!!^Ku-N7!pzz5Y1?OT{I5r`m#`lM6cM!~f zRh5lcO2CjYu~^M`>WViey!RORtiyjTs_==r88Ie#ko4Euc=-LKa~IruO6Bc9B|;(^ z-`1lnb0CU0CPlh+JA_LHoPX!EmiwZsn&}xa+g7L>bKGg|Cir97PtO@<0l5!n{2fEN z+WWYtTX<)+_V1`!th9;Lx!@VHXizJ@xK#TnPGp6zHJUa4L%tdwrfYg%B`kmHs(RYx z3|#fV&lXQyxNX#ya9pqS`8c_G>;dZaj85I%>r~oCEfzkFz*WWdwZZH4bIk)Mb(Q15 z3jQjE*WV)>M*kjtEyJhkKiXeXdne$}$r9O(Ovl*@q|>eWtT$vkl>cCpA*Ty?>; z`hPjteaEnNUR%D~>#646)8KzSo>SgBdzLe6l+~qzv}}CB~4N%gKMGr7Nq(eC-WO zV$V^`y9T7{Q%d18R^!8UbBT+O<{sNvz9e-f;%+@uUAN`=>AHJ={4IBD=oy=}>KN=@ zOc61!?WH~k;iPPTpUhP(_Z+Tm=>mqNL{K)!j3*ph->zqj`is@{Uzzmp&YH}lTA{YH z{pWMLLce&@yuX+`SZ#k)c$(*%p9urX3b?t{GCyH&;Klx(Qt`Tvtf@F2?Ub|@Zye*k zz2m3U{N>Qnd1bd=Y$$1f0ZkHG;+Cp!8TMWi9d|CaSM>5nWNgms8?slX-`;hv@8&Mn z+gQrCKP@QHCcN$gbpA+b`2clyO9ZM}bgSN@unZt^}-skbjW zw?3l>Q^tz4Y+wrwV?Isj-S&$#9b4W^sGBUR?}0Hthcr>vd6y6I=d10kWpB@QS3YZZ zH`8MOC~PblnH=~{UX%@2j8iVJ^~=p_KQlJHXIMM02IkbUX9Erq_Ia)#qhM~b&g++- zIS`jmC*4)#*Vum>RmPd>`~z5fHARbZ}*^}P32ZD&TkF^y)UHYOQ% zyjkcCOydvp&o9d8=PCN1XKf)1ga-Xn4mkzQ=txNDY}QyL5)Itmos$>K6pR{+-6Q>5 z>H>bx_|e@$jU!1ADEvp%#M{q=tWJ=$xkD>hCb=vFAPRrFz?fA8SHQINnOgKN&hawa=zDLXL)u)=-fK)&WrQPwEx#U>Sn&)gzKFLsr7AVvvX z?dgi3kwYF!k8nD_jARrW7H(S1SR@La&Ld`&7fBOTxwC}pk&&4HeLh}WiT061XwTc< zUIB{>qH=$FVN+&@G<)}_Y&8xQ!GjY0lT!Z@q1C|7!b|K~h0hqc>ffecJaXZ-&nIlB z@ylIK35Tfz*I?}5q383sSA6zTjbn+-k8;a)KXIRFRWaG(S$OoZeWLMnUJio6d7!?aHGr7wlJFu8io!bEH6q$*)A z-lBiv@Cu?f=$cZtvOVXw?gCMslY$nEG#;pR9vt)?pF#G@T6q_J9}zFZqgQqcH&2{; z`_Ws2W#84EE<(^Mf~bVCRZVtG{-I;Ue@Q`o`8gBX_`Hy3Zm~@OAlZno);(GCeV45U zu}8Bxalm|az}!7(Qk?IFN#nz6ImK+RZHa$gI|~7-vHQ77-Fmo%pXlw&BQwNHUul@ZV#6b>){C{Z5pBe*cRe=1M73~i zqbYV|mYoM%VGGf7KxaVXcKh(PFs06rIOjQsuqe{1h7yb+PcThb zR-DmvF^(x{oKv)0Tl~A;tNQ)Y8RH?TXdtJi?Nq|T4i{lcfh<@U@7ln|1Na^xz6ZCX z9f%_DEATwn(xX;pTvQXWawVA|cpQHOK|P)KgI(-ohcm=PZ7y`dlmVQNP)>lPiX^Nl zUc_*qs1c-=G5=!gIV?5rw3x{U7~>I@f3ZugkC-%5)GCe%Yj=WjhWrlnM{}Jtm$TLJ z81>$%g%mD~SXL%+mN-743Aq@U%C3M^#52vV_y=W%fEa5Z*ETb>7NlbwGLG>WyEB8Z*RT7b%uEEw6;g-6qG|1K_R_UGg0^um5i zu8c~O4sM+YU;)`;u26Hv87ggMVi`q6RzV{m3b+ZvVOq}`PU5O#1jrDIki0zqz){wW zqKmR4%dh1AcZyd+WMPn)lZcz%L0Ob%=eU1tcxOaaaf0*^9F6oWv z1ulrGQW!v$s&Fz}Mdx~>92$t**?#Gz8{=l??e|^hb~jS4l}#?K4T|An01#UPEyvVo z1p`DjgKPqqEW4jXRmK@fNh^HfrSO5L!_*^liS8o}I>j&Od9xJ&HHMK}ApweLRSSS9 z2v9la0u16e#4n}cH=lpaLE5RX3M0;SU5DIhnM*BKUC#h@t=8#{g#fRt*H!MgzkRn^ zi7VEYI-e*86dEx=W)Mr3ZA+wKlXkH9Tt=jH_SWU_04U=Lp{x@$>q=ObnC@{#oq_2e zFbUs-_Rc$ofJ{RrRgW7goFfnov8ZzMzbPmEye4v^_((Z&#QA?`;EQ4ZUL`+(+Vinz zM-t*20STbUVUtkoV{wWg<&YPfYia>^>`( za{ZjnRQJ+ovob5}B z=|N5sjMq->Is?n%#;%PwIbnlrlvo>+%iFPjd z%6kTHdIdE{TfOs_TeoGS-il>;TDJkr^4X~1UhL1gESVZm)pOk(XBKG2DPEB*lpgd#rWj@93_*S`(i^N($6vELj` zv+H?wRA(Xg>&@3TTlo4_vh!a7HI#p-oRHX>YRugITK^Z>j>RTvM@58Q9$*aL)B?co_upo&b+8=g9a2e_%%ip911n`oZ?W+2Dj=f~GdpD7z+lZ2J-3 zjBBPWXddL`0;H67G5{*YTw!z?wIE=KG~%Ukx~YA63gUp?X$s@-$#MGR&0>mXsiLwHw6o;UI=TriE9|TbVs4hfd)MYFmaMpj2 z&g%KA+_ABoMWLfSb_*|A%{hMDZ1AhI3=#a7ZsCAf!F`O+%TGJjAGc&K5nVE*!r~^)1 zlUv5YNW&Pj@z~J%zcdy7%D6bWe7^^7+wH5TbeaySnoie`$slHdHC^{yfrx+Ji;GOw zQYA;#4joJt!FnGnug)H|>d4)^JSn<->^T#r@TZS&34i}T1*Sv{P^%m>B=5J|6PNk> z@$u7%$cc1Rs8CwF9b6ieC~Grwvvl+*&~lwhg&H+QsfDyT<%PApyW|;@iIYYbclWtx z7k7LMYisU*jt&kE`?WfBZQy?tc}`&4&U3suR9IM8TwN+ueZ|Gj6CVVv@$vC({D0qy zWMt$HIV81riq2Qo^oG zp6C1XUH_5JyS%>MtXVmc?qA9QiHwYtlnZ&i*~xTz+>tT5##sAoMc6;Rt87@c& zd3$qXv)dSKG@8B#;!9;2%9x9v$#5n2HuTlq@dx;l4+oIv`y?He?zC+`jwT)??&;Cc z+&0^W=l%M9ro%I=uLZNe@_Y>9RCZ7HR(VaPeZ-<5Bc^xyy z^ZyQmdwYA2Tj$P{FgM4?)-UK9_)eO=T&`$He-1nq=!ky-LPR2a z*_FXYMUPuSW=as12t+`f#xDm5P$C5RXk+*E^ne7;y{~hC!(E%d&m%xTLA$uRC(4v@ zcs+mYTGw`*LaXclTzc-C(=rnH7*M;M&ll>3$K&*NbAL1T+^bZMe?A0XE0F4ScjsUC z1DG@+fEa)1VU;9ENE6>9+~lgz`xBSOp6$=c5TKvN)DnXfFpxydL4bx4B2@-LeOwxI zqfzUz1h~b<)M`!z!}Azq9IWsXlQK6o&G!1f+1aK7D#4ebk|0r5zUa2+$%n5F-HbTu zS)*>O>`9U(gKhy%0THq=m=lJ2dKzv-bV?s)hD(3)c7jV&s*u4eiNFM$OMHuq1Y}4q zfCe(DXCYF13}Nq$|JOA!HykR}tPD!X60ll=Q^2&^R05%VJed-W4aDO1WPzF$M*+zI zlryXt1Vm)#6Z@SU@Oz@K<@yBPM{N=~yYLe+Vw*9_EVq0a9S?iNVMe#2wMfJ^X(6zi zPtSkP&%;2+9CB0jL&DG~k|hkQ>Xe~%fTwwy2Hjs$th7Q#vuPrxPZd(2Uk&;Rp0A1; zeYkJe&sD&N;;Cjbf$xfzH4!jviy zTI9rQH3Seol_^oCsv|HAMTmefBVA}uB(1j({9ABgh|x_Xx1XFF`QL2~@Ip{3Ot2Lm z4Mb+w&@_Bll~`!{l-Hc^ko+>$drFT zGZ377LZ4%QvcK5=(oQb?924)3vC1i8PfaO<6#*$pDomzGHEM*~cskXZLQt4u`U~`I z-Ll98Xg^?x0+3lF%t$z(z3`DFRBLgK*Xwu%g% zqa|rTn$A1p2w((}!zvCiRwhC$jgo&k)~HRx`=VEsI!>%Dxaf&;9P4?|ak4U#`=FAA zht3#$($Z8~B-;Pgl-A;|oh9nLb^Neqr7^W>_tkv6&4$Ig54>U}g3IfHT_1$|nkh#v zuSu0@h!hbMCPI;8cD(55BXubjulNc6R?TOw-GAXKgGHeDgl5in*dMM?rB$oeIMIKtZ~DMkO)Ey}ue zUmi~s1s#!!VUAF353CrCS!Kjbl$Ap7XyeU?r!LM^J7am{P1`(R?QnlxYvu(h?x<9W zkL*}vnBM!J>zLhaW1$vFq40jmmC~JVpj62KH9?k$4gpqi?>iwZR=8CoTj5FF8ZR<* zflT3wbta&fL$ug@jiq9BaDOANPl#cNC=Fyo2HTT}BqUY8rw#o^IUa$>1E@zQnZjHP zaTTh?rsE0$N`r+pPPBi{8f#v`?dp&RsZ7jev&8?5juh!qDupIm?2-aJ%svjcwFq>2 zJp!fl#fm_PrV0IRfm?HZf{$%ugRKgcGQscIP}}tb^i^NfKfW+P*E+mt33GLG(O@+K zZCnhHsv>m#7Me(;q|SN>0F@zVHhMh~ixhKm!Gu}Dvbdz)-)w&qyC1^WXAG)TQBF9b zC;-$4ufuR0;Vl_`LFR!90gL5$98RV>LF$AWxaF6&qTB5h8cKvTQB@6(z$m0dvpLsm zbTyGl8#mp3p2C$w@jt=O-kmZf9Fr$8Q;}pup0plFnMY-|!#htrE*1DRdTqAi{}T&0 zn^U(sA2f#gUr2voj8Y;68(ltnBQPwa;`Ob}w0%jT4uR`d{P^EHza*Xq5F*CiJjJP_ zU0nXsL9%oW(qpNj0$RiK`q0W91W|4}U?L!~qzUQ=lRRR&|I4 z($D*IP>hnHScZ%-uP6V9{V#3hPdtWdByttHiPOKoG8=!qcR-~mhvKs4v8?3+)NwU% zI?YBCwYIXjvSdn}t)k-+3ToqeqInrvDa7*GnIyy$P*RmYf)nap3ELB~`^yNAUM35Z zK%!OuC9UK%)7efwPl-3$GFA)F#+AT1Tn>$S!>0r_V3+KuEVLb<){V@55@JFuw4GrT zR9w+yDFlBq{=o3Q51c6bFPsx;edz>S=uD(8{!%QB^lw6d+JHlW9A(p#%!W9z0)8z!%SEZi)llmW_q)Z$DY(SI0xsC{l za>Z6uez6daFdJGr8l^gHV2j&R=Z<0=@C`4Zu|&=p3v~^Dh|3qZ=WZve=(Cw4kiZ4O zh4yDN-SBmm2&lRGCR!GgPV0v(=V(Rq$(3lF{GRarSX3rdQEPFLmq z`6{pbc8mh@c`8WCQRT|x_A~FB$;M3iHpRT$Lf(5t!Xgh9TgWW z9X)3@DJ_kEPrQi`$f)1836&W=Qm3B5_CDe8FbdS@GON_NHrdzb*WEeCgk@w4)SVq( z=T1>qe-%9Q2J-++8kL^f1H489POnV6E5mv%7QvWD z-~=Y5gZ2H)k?OoslsSLS(9-G$Yq}x^dlDg562XQHVk1QI!-52?jIV|gTs&ORlnJ0v z6vMA?wYcPxyx|9dx63yUIU}3HbBw%Z7+@$>^h;M`1XY|NDFj(H?%ZG#sH4NfmeI4Y z#SS5Vk+p~8(RM*YpT7ZM@R;2zF#Giyr2c2OYI3<(#D^MVC` z+F!9h*RjvnuiKtCKG-;LCLSF6$B%FL#tj^QI}6_}Zp;Z61*$Y5ZG^Ta)n!a1tO5c0 zjQ{Q=arsF-zt|elO&K5(;}!?8bj^$~b`?D0Eqzv{gURwda$KoU#Y)N`nq6)?@+=&B zipM|rXx8X$Y^!o+}s%W(fjmEt zft)^Qs2JA%>9_q}4Gj^4v;K$n-hpU;p6^ZHRf9*D4rRRgB=ZvY_<{I{jOv*cNI(;9 zuvK6j!m$rOb+%|5E0T9Dspqy@W63x}MPvN**QooeaKQV$5d7s1-{@zv1%>q~SH#HB zz~G~el>>1cIk#fyki6yghX+RRjEoFH8hl;dAv0fJfC_>oC4w(5;n80_7|MZvDAnWD za0!PMwMc<92c!Z3D;t;WT6aH>*ZchHhLtr+)AlF*_08A+@ApUE%7G~1;6NmC#_V^< zF(*^1qeQeZcJng!nwKFaB8eCkRRop@(n2l{A&FosE(4SNXNlg*@}2ATn`P4Na5f>z z<*%0euD?c=^RC;u(i^W+k-Nix>15J>d0&K3-mLLA>d6pS%3o6&s6{Dpw*}i{J#?V~ zlExBIgbSakA7j|~nR}W`*>ds0uR{Wl+TDZOO zA~W-E;lG$eLlXk%GJo1v2Zi&jCrp-`t+`XM@;BvLwE+pE7(}tK5jNg`O(+2u7^%_k zovLacb9Ktt8o>@b?RR&bbZa_K&Rj0LjjmH@Xw6g}u6G}(!NGqe0}}#y*TB#2-NK!Y zpZNt4m#%DfJEMuUrrUjg1g$6_B66}K7BN>S88GvWmAb!RQFUJow~ML!YDZMb2o}}; z{71&(9hZ~TGC%w_rzVYmnSaarA+VMXO#hv$$->IY3x1`_rCKSF4T`z`2h(kaj)8~I z;xhse1R}Gb=#HT#U4a)kMc*l~3SH;sc~4qM`vzq|<%FbN&kLH{^?D)kLg~R5levv8dX*FUhpQ0%YnF*s+X^B>roDCvYOP~&C#QO-J;0x{UQYWVm7@1n?9X2 z=iU@YM2qHJhCCTvs%)uxyl;DvDGip&WY<4`4~+!xe!tH`5!|gDstkHAWbF@C_!Oi( z70S!i=(2lk)Vd}FB+MEz`RemcQ$3r8T5e4<;<056hsXl#%&I%do^5UP}hblH%93KB)C^d<(K#p5t~_S#Cj6ouZb zn=o&Dq*CiY>y-wXvv8~=^xipg6%4He3q{1QSl#|puM@6`>NBF1fBAR%VFY?MuqCzuZvinHX)NUs`%&+lzfPJQ+aw$=4hRbs{i?Gtxo?f{lw`5pOr&5 zVQFb8M-C-w#NS$f@mHKnVlZXoims`e?MbF8n^RxG^Au6c?zhs?fkW#W8S9B_RlmXL zzWOZJ`}WStfy?mc_wJ7mcq%<#jppXA&)U#`(QxB@{Cw7`BSeqXPMv?mDACcBdABVq zim3y{PUB#s{x(a!=P?x@MQE9oBe%XZB?ofTXH_#lH!Ev4CMKp^dZkLmwbR;3yh z$dV8^U#N0Ajn@3mZ)|Jt7q4I%lb}*qV-`fMMoV?4*>Pyh)$)g%b#0`2wPHp0GLUG0 zer?1k(srx$)^AID?~~4!1D%Bf`M;i$G5mVP9ci&v^Ox{GK*Ug8?{FTw zb?-g8xmzv#NC7j4I(!(zKa*JshUZJc!^HhC|H*IQ!|@8CuIoMw$N!xV=YyJd43mU` zhku$XI7l?~oT6>}O;mr|(*N3A`^Hm$*iRSu!?(>KY9=pKefz%@A^-pPdl&eB`Mv)S zIsh?6p?UxSq}>1Kb0sZs4Hfg%U$7Dh<%Xh$H4JSTny%56j7G}NjSLGE8V$xS%_dD* zdN<_aQHlmcYAjSlL=;xBhM+~n)GDNg!U7FJL$leuHy`Gq;q&~N{0!G8hx{{tZ!ojd z-H%z$JI^`q*&ZoX98~cUMMstY?=n{BH!pIMz$gAuBBxHvbXp+Jt| zeMv<)7Q9WAWX=)C!KTRhm-7_wTi6{BuA1lW7VG{STCa5)6w#?tKM_tBwn8-nUzzCd zR`WK?T+LIS3*qF;<=)}sTR`N0W+)uH!S2S&ih5@$xYlPag3h*F@?590cvM!Q=gv*; z*x)3UQTLd=hQv{a(Q6?q!(932b^~0PQ|DC!f-Tj%sS<}{SE^{f^2Jc#y z-KDL#kmdz?CR{Z$1?l8>+;T zR=0Ebx8(c5{j*L4kxGj|_wiB9gKi?@ z48_5v)rvnc@dF=|PmA-$S3Xuc^T6Z4Bf%T9e6zNxnn(NhIPznEpf=VX*TO$HuGeTI zMR(bw_>?8gUP_*6)V*x0;ikfazI-e$%!P9?e72@qECy`zudoPNnx~SbmO7t30v3O= z?ixVkgJCF7Q59`Z5^dg8>NmODK1}&OyLvXw%B4s(ZZ!J>_uDR5jJ1c|U=AZN7 zm+9>hvpL0_wiaLrx8{5`aMgvotfnM`vJ#v7@WV+2LJ1?V9gY}(6b|UW=Sh&PE3^|@`*d7( z$){syPu64R(EGH&nJamkht4#7jih=j`|(v9+Ry;|W<#$F_d#WM^;S>!oz-`y>g^5Z zr&u5_Q95f(EWTh0vs9lM{L%u?D^|IvJ(KhCpfGa9CHUc&$$ zHai*bKv*k(A(^Tisy!FmTq;v0G1FO>;_YKqE&VfCzR zP6BUIEdGqOOb0RuC@>3si4zrx3)+wZY21u!;dH2fo(k5Vxb|kihwJf{RHwnvMIfDv zr!+6;8P(HpXg$|qOt-3P2>ZUmO+?-xcw_$a+N31&l2DF=%^|Xp3njvE#qu6Cy9NRU z1y0GrlpJFzpM7U{soxuZYL9fH>s&v9^XAj$mu7@tfSbT8MQlV2;itI5VgeYPIhJIrkssCz3BYH<F;b1p z$5pko6zUUasZ1vpV$Vz7aQna}0SCx{fE9r*$!2i#07Od9!8?v1_jiti#>4I^xW7P! zW^}GLs^|(cvAC?HfAcXcc2?vzzTWJl}0UC?bLkjuEJX;FaRHhtVoe+C*_JeB{^umt8P@Ro$#BAMD z8MAd&17Qq9;B$ae#wHujypp9}+4p@~3gWp;xtd2kK(%6AL~u+_NG%rXHA7E&`ZFFW zL)v51PpMzh9QEC8{ zLyT?g{9Ulg2xYGd&KHSu6nW{KbCEJNZ|S7YDM8@%evGyajM6ZL_p#EXcfi1Z>9Ss< z__aFru3&F;m2)z1a6$hpwW8|`d0NK{8hFW^YnDG>$VN)8gJ%=`nTziY1f{AiNxVHX z6Nx)Am!lBU#^mI_qPq&BFB+A61XKA-xK zWK8Ew2#S|gZp;*N>|-o#_K?=IbKvL2P!(x_#?6l>NCVNHT&5lYwpT}nY-<5d?Pvoi z`gB;fC2DEi3a^E$=Y`aNekOc!%PuC*)jtj?txV?b=BD%yv^Eu^jZLh_jy(3@v{4EW zG{90lWqW!h;FT@WjDB0XxI`lx&NTuZ8|S*ml-}>o=g+^WFN+g0T}|)ylBv4_o?D7Z zAEhqBIyGe427|8NWcm6zGSTbF5a7Se{)^8|?73YofL{Pc0+|ti&0Su(O5fkgTbwGF z8^esO_UZm}E7+%wRQ5vq6C7IVT*PX}L^5s2o5E;#z|alUx!Pn3WVv6yRt+?3Ul{qJ z+xj1Igsf6_qREGzeTeC}aTdpHEcsxlnIbO5lMZp8+8b))L;7KyQBb?+OxXP#FYrHV zXk5VOfOIR~nbL5775IN4ZQZ{qWUC?tWQgrgs0;a$2{L}7lIwpTW^aZC(U zk*EbK&&`VQ&k=3iS*REI^bJ}qVc>-3vqT_=oWzR7P;+I}f4T5=RT5n~&-v{W5t2H4 zf_n_1XBzPlh{f%OqVupOJ^A90zi9`u z|8j3bA@d7@WhW_MzBDHl2HO+hKvO^?-q;PgXfN!=r_j@M^KHn2#W%F?6Nkh3xSY8G zO|<$gnSLtj$tNe6w4$r;Mq>{yN&jRX%!M%rQI{3IokXu(xyo}(5SVK0Q}&4C>&(7q z3%T}{EZ_rwbPn-%z-XO2T~rRjtg4d}=nVA(y|*BHSjq&DnNDy8NnDK`u|J~V9m`RX*XcUo^+l22X=KMMO!EXOJ1b^~((bU` z#mS}j2ALzALeweE2t2BAvAmx@AgF9pNhIg(WqZOeK1Qg zj?mbD^KWynH!4y#+3=@jPCReOA)dDQ&XyTAj{LF|-ay`@%cyECa*K+*>mde=*GNfYzsox*J5&PZe@O&GYSm9e4&U7Q<= z(9v@@%>{lFh^@74=*xjqwk6Zf+mR7XET+ZSI;?*NiQ7HU%#gfj*Kj%TIfn$Y>pdcW zfmPzJosleooKi(UzpKnn z`IXU_-9fQD zC7AdvoeNK3q3%vMLpf7nrxfOY&-B^96f#NN_lff1JVsM*|qT$O_*zn4mTZn4q7s~+CX^Z)d(g5Luxl%K{Zn{P#0I{a_- zz7P!U6$g~tM=T_+cEY*-JkOsebKbFx))yBuPc*_Y6&tzTYrD_ zF{EA!+yD>haWr>}pYA+NFX>rah5XL$bz7smHgsEQ@0Aso5LCm@LrO2Hph{hZ#7YgK zDso5T%NhHQ(ql3RPb14<9_lgMZjY7K8{FzF76MhJoMI;IO@!Iy)>5wE-`^OI!FET5 zQ>ue?2m%p*N9pM6e7(DTw|B44Vk^fYZs zxCit$=mr)r3zA!N8S&FE#3eGX-OI~h(z3P2wlfaNhpr86R&VK8KK&i}N1zv>6p5`+ zJn-09>Dha^`RqlFBXU$EqJ|ZJE}4Qb26nXe-sWb1XdT%MyX@R8ix1t~seOg4IlV-V zH5`LM6{I0g9d5AM_$pRzTz&(16byNOhEPpc8ZMe+i%&3*Bw-iEKlZKOEHD1ahriWi zxO_l>=XUCiif!-z?=-W_#(HxhV4zxx;f4hE>Ki<`a0_e>NR3sRZxmX{KNl;9M0^NT z&Hz<^vaA}uy;V%Y_TaWpCrZhGGFVVEmO^w&fJq?u>zsV&$~~ezfUIHa$k^NxuH7QI zfU$xp`jUFD;xUAuR=W0S%Y#vzPu71}*0saoEs*V|4}JJkiqa7eZkk^84y57XLsVBW<7Q zA`*+pygtjoM-@@B>be;3*(cYa2j*mzfo(*4&*hg(orlwT#+ehOr8~1+>R$FsTOEf@ z9?o66rv~#L{^4lHjlF2@{ME(lRJ<79a(~36ZAtb9U*_*6Ha!~>8D%D4CZ6wHJU38( zmM8B;N|(I5Dz}=Jy%KiHn27wn7wcGGFeCg)V$n}A>HkC8O%EXc|Nj^C|NQgMKmTuW z9sTpqKmYvm&p-b&F`D}4pMU=O=bwN6`RAX1{`u#hfBvroZU6JnKmYvm&p-eC^FNbv z?0^2hG<6LC{NI*+>irM@YuV>ssw+Ex0Dw;NzgJggy2ojoI`8-%w?B8iKP`WH#4lK= zXVAc{_bJS{>0>4KQkyVR{~(}@f?>yo)hfzOg`tP&mt&HWPpk=|hYu9O2yHY7gEI^T z#~c2^8dH0*l1yT&q4~Rc;q3EK^Xe_@b-&5jvPeN<)#A}o;y6~Pe9QIv)!Vdx^SIqk zXKLJ?gKk{)qKYeu=cb0EIy{A}TFsvEQ%ffd_E`D#Zos>cFn3?JGEveO+3li(GX>yR z`C#qRr&qHMvf};R&RAPM^^i^R+W4K7%|km4lP5+gvjd%oG8z59vJG{%JOx zvNJPQ)#P`yAvp{F7*Rfj1AsY1M;miC3)rtbwEzre6AkRhT&cS+0qj11+@jH7-K7Ax zCvR1>ug5VJ!b!~AY)(($yI#gT!9P=xm{(US$6~L)Glg2~P!jhFvg(H+l zOf0Qe!BW5bF9T(+d&=#9G~%Xyoeo$ePFbHx0zP@D$d5db=~*k=p$N}(p8y?Yo`nyU z9e^}2w%{pVS&jDXxjz8XY~64}9%BE@<;1EL?v@IWAij=1xANufpNDNk=>b~EPLOG@ zgYR?N--YJ^wGS1Y{CFU^K#pa{{ny2D48PCabipb#??HI$&L1X!0$?MLf`18(Ojag% zZ-!6ls?V#^ftgxDQ+^WxxOuM@%YYYCQ?S!Z%-7Tx4GV8V!_97Y)WttYJm50m5SVzF zR)|hVzRH=s?RnaS-vK~ZPtC3st-E%oJ4_%EyYvmhP>QoQ6mZkNJdxMM6q!yJmF;%0 zmt|B+eGGmzvwrb^;86pTwKGOpl?%-SxCVmdOWzKi>%PfY)9+q+E4;BqubFAlwpo429EYQud!%D@t5H`)SV)f zH`u>0UP^WTuK^)6`a{54L5Sme7ofWJu_YG%ED?msxU(S{WV41o>% zlMBy(BT2*0T5Wr+-Dk%tH-EkJX}^&lTlYNI8O%O1s;1EiG`jZJRe+UU`(8Easqp@p z8bS(2fOW=xB{m=Uw3aP_~-@Aj%jPPEBYn^G{|hS=xljkRBGp#}j63w&A5 zR%{ekf=hFjSOew5vS4c(n|;e0PNlamBk*K@OZQtXM^pZGphWmIy?^2;P+MDVKY7@y zgJQkR(BiUvu3vb8)&>)cWLPi!m}I(Ht9F%!z?-ay*RnxxrA*~+0ri1!U?I-Yq>}Ko zQwIgi3~;A5VUa0?`I|sRr0`mt;pUvB6U6OX6(~{e3&#;amTMb=;OI~e!k(ZBqxuD8fe99<-8ZPU8Lfhr_#R+ef^AmD z2eXUJgYF^nt}AQHk#%M&U?+@HtJ`VS)aN3QKN@;{vy88Tisx_K<7hw|D#6tzs}NqK zL!Job;G=bSL<@rCrB#R>w&-cb$q1Q$1Dch(+Pt4V$Nt+9L`MY> zj=HD{HtI39!PX}>Vi|IY3dnls3@ys(;*Gw>jN$lEIP#F|gD6@$#Rjf=L@+KhSPdo$=F!5c|F~_3Unq2@Z2Q4rj87fHMm)vHP-!tf|$G>tmP3~HMT@|(g zE;wPjR#-Tgd;4)Q{MIZcdY=fp(_8n|$s~RQNhZum`A&Cc`iosva$otnlT!pR;H>VO?18*(?44GhMH&Xco?Yca&l4u@5c65liIMt`XBW;T_H z+nfyx%0;%>ZuB{!I2og^Z;^I?2sWr)q7!cDMoyE#85h^P-D_dQ@8Vr!2;lEVj*_Gq z18*aWKZM(!`{*Vv$B0rC?u6&tAUbe;WIau~8=q}*d01qYKvu%gDKL637=_#I6THrA zvC&Vq?prCHXDYq|%yncI9@L^X3o8n=oGtH5V-%_FfsLvRqrnhf)(=a6@*yPd79^{z zV7NuhZa8&ndWH!;&|veG8tlZYm8qBQ4fAWa%jN~QZKta5*QqKfy*(qP&NUh7gfyl? zC(mI`RlSQvl<98mz%<>&>@%QHhMpb52+;|ipV5)XvZ1cr*C-v_G|k3^zk(;CI7CQD zZwB?~!~zXUy6gIS_1Euz$M@@>5(M@(FXy#Y;6NRHEh-@{Df^uUzODN0mNCp*N=|le z{`9K&4lUT|OC%eyerY9dqZ`nYXa5wz5B35k^%)j8uI;tNF<*gihdC5*twjHb^&QR` z)j%(h^h+OeR+|KwcTxq{?$7#5E{T<3#ns>S&cdN|cqu|qMT5eB(yu4`S%r=QO#6y2 zhoQChUJ;fn&v95>`>6PoCrpS+VPnR$omHv{v*{s*%4vM^g;{7P>WaPQf5MLN1A6jj z9~xIwlPnrzOI8DK-wJmSfck_GaIJ!@@E34N4r#dIi2QR?4qm^2R{=wq4OMMB{tsw$ zNuRXp#1kB8h3pc4ka=?clCajnz)f=hO>yFG8D=Q-pJ|)hKul@FO!LpDMkcAL%pUs5 zGlco{gW|)PY;~2Tb&uX)e40+mBr?JiaYNsUA~+$Bcie+9NBP6CDJnj)4c}XCD&0;+ zkLA_x@0{9__3Jh1xJmQ6l?axoXg$oUxO$i z7|bTUk#$F_A6ie2#$XTDQ)o$2f{YNXp!us#LvX!IYbk&@h#$q2x|kM>Gm12QiB_6# z2|NQ7RKm0akO4@2&+8 zVT($3SmjlF=}djNvZmL@rvz1gIat()=-%zao$;AB(@9Ipj>gG10zo5@9J<3#lUT2~=MrQqWPF)jC!dW$&#wr``oys$ZvT{Hff@{}x ztNk&Pq84xlr}B$D!JCC*$hr4kUdwEVetm%T=iF*of%(uJ(6?IoyLzDfBk#-KQ&iUl zrJph3sacmVsi@ahoc1HPx$W4%b@Nc%9zOzqiV^V$yi74yx`t?gD1AWhzagKOo}NT9 za5{1fsSQBfa@)a*mz+37KCGaP8f6%5x%YXLcg6C#seXRCUUAT9YbO7l@V#zZ#F6$L zDJ^V1SVDZ}D5n*Y_GNuPmC9#CU?wup6M&n;YCIjaP;+NsQUD+9Wv7UNa)YS(!HB7U zH%uBnbOfzAI9$o)R!+4s`|clFBx|{zX-w_t`}IU6)$-l#-rv~lk>LX|T0N3<-XNC- zk|mT68HCR)En_0~hH?lNR?E&omXQgEPKrYuN(N(ksCEYLJ;>R}hwZklD@!-TLv0BF zOmc0hiw7F!+)-A_zNgqY7MP-2;4hee2TBQX#)eokNA!@EG?J&c;x}T!Fl58oAai02 z6~V;Hpf*LiTAJ^hzS(w|%7?-y!}EI`7vmo%u$8SoKzeFy%;|PKne0RAPTIE8i&+Dl zKmw)&Hw60u(kK@JR0`5|^hd-{ zv(OTX&@`Ouq&T)nDZ0$T;_sijmYDx8f8bz-2S>{T< z3SzMWIJB8-_5u@v2ywJh_v&+^{6B^Gs=D{ZVrj$4)FMOzr)_Vu8kh=32lBX~>2zL4 zn~p+#D#(s`VmCzirzTugZ4tUU-KbHXR+s6PH%yW zR0piNPOo2wMwUIZH^Ci4di7rg5iLYT zw1FaiK;pCZulKKA9E<9$KVNtAvXbuoI`*0J2#M;jmR#(Y$>3z^FStiHo+jjokQyao zMRGD<^5f+{2>;~H0)KeUh??9plpOsiP07)o!q7v1+Ar|;csi+P;i&QKjId!#*Ru9% z`c2{IfecH56P0nKc7AFd2&DS<4#oZiyi=OKo=AcG3 zy~`Q9hn5rfs(WEzCvDZVjb`u_t|{jz)&GE{zmSi^pKda3j{7Q&ylAN9V;Q3?ao&m)1N8gJVvcI`_Ntqj6x_gkVso5>bO5~x-QTWdK6**6>s0=`i8fuaynqVpc?V&k&GEdiU6TFS zoV%Zmx6OLmlg;`JVx=0&L$mCZx7?Vu0oV$Pfig3tF4To^KCP*iN82xh3wsx2Qnm7bQySgJ zJsfa)JFW!j6336H7ZS>0te2@U>!v^w>dQqHK<$M7p9bbA;2xW#(ijuDP`!bq5=1q z2EfnN8&)($MDZk@UCVNAneCYM^x_WiBWli(`SAJ*xgIdJkfwOJ3}5PhK2&1^QLAGz z*c&5wfWMa&Ma=lnvm*fL8G3Jx7MnUza$tMh2bq-}g8&+eV^2hurTQ*Stw!&Xn+u)N zsIwrf+Vw-u*375I-PI*uyMGtqk};2NWc`oK>i>hs_5ROUR`TNi?T3$xUXcIsx*Z7c zD~DAW{eN`e2Z!%@1R#}vNa(uN1lTo=?Obdt2%L4c2&~Y$g%VTZ-+<=q=EL2`d-4S^TW-`N{Dn0F9zB8npn)wLSbnC6PHrl zekT)~BMNQMH7%J3uY|e*5=8<$k3}sLRk24-1qwW%9s6=3!@|6i0}!d)-`9b_M&+jZ zkNa1ku<^}w4_OO;^uuamBDMXkm-qFRBJ6kw!JI{7oL@Z#h#FoWCsM!)mNf*G43=Gf zK(#YcmpJroR5yn@d9y}^f2d=p7!Px)v7k3cHCb8OPW58 zy9{TtiJqO3sp)WM%cTle1@A>ngZ00}hdU96@OXS!L&E!3i})tQ)_NGtr4AYQ-=X-faJ){7l}XKTWFilQK`b3VRZHOZ zMcy@Cf^adI&jEX8llkz2S|9Ix{;cPXSVC^eAZtAF+GHuO-8s`-n2NdFUEInrS37QzS{8;YN0sR7pmm}qrt)$g1yI= z>1*bsao^``aBohd2Afry{?_2OeZ?llre2~Qm8e1V9fS+xNaRx1x9z=G<^GDT?d!2P zb_v6WGj2#1A0Cbm{D&j*$$~?nInN$QhFIc(4i(pJ)+cjkveT3!G`wDL4;UlSOqdsc z78O4%QdnbPDaSmFKqDEM2$RCyHGW|iZtt@H_Wd@$z1ho}V`;B3g?lVp2(|Rc9=ohJ zPNO*+4l%0`6o`gri3loyO3!AW^lQ#68vnk-)98=Cr_^c%>qfW_x8B4ttfV)p*_@cy z6`Sv2go4*XU=<&ph%Aie?50vOx*|n?Ee}TzOfL-e@n2tRJ}3v_ZSqwyNpKKNn}<^+wG;{0UZGj{EFIwzk~}oH*!pUcLwRP~!V6y`c0^uOHNdWamttRg zcXPf2UPCI*+sIJdXt6fx`-(aDKK#!~%}n88seM~RA2xJ+Zf0k^Vg zbV-WAZ*6vaUhbYg)ZgKdZM{tPxW629G4&Rsib?_w1Se!y9~aHYva&amM>@yZ0;+V4 zD2QJ7?cX{cpfC$X5L(He<$;O-9Wi@g_0@DCh%L1Em8(rE*Yp+6x7R+t#l^7 zszjE^347uYp$pPT?%}m!Z;e0)_V;fzpnhs~To!j9)-k@6K=GV6yuOulmByzXAHp>H z1x7OW5{)o|ZPKzz_l_rj`uz}s=B{vl^;tiSb2ijU_SfJhsC-UTG(?R!GS zlD{(UfN7~@KYug|G&*#G*ggv7h*> zbL;wb5*pMcTU$(jLy<&v;((pcLMlBQ4VkxU#CaB{O!e{yWCoUh$VkTvBSj$qZ3hS= zB=g+)>2ci$ni9Ou`)qEj!=mUZc?O)vfrED4QrX)vq8g2~NCfwFNEnm06+CR1C%4%a zZE+y+utVB)_vxNxOiAQ*9faH}p}$*cI>aftt(Ms;gwUF{^2t(h4upxm`(yBRD*i zH}&uO2BfV)=6IkvtNF)n?1azdxs>nwx)UNEKO}?_(ZErD)pdsgahEDr!Q#AMtOG#@ zX6FetOU=b9RQ^>~5tGOcv5Ued0%yvKy zdfh;gLHGqGy%rU|4l|^VVySOc(~QRaj9=h?ctBsZ8uaGd#sPQ29%J?SZR3n6{>xlc z&p*vTYL9(lwI}4X(=IuUqh=yX-NeMBVWM}i#*oWhy&k{bj(JAzdAnF9hu0AQ*uG-( zyV!AYP5Ng`a~Z9d?jyN%|9cv;NT%xm@_zf%wf9skp+?5^F-GfW6+fExy&(m+ISE4dPb;zeYAI3&hBA>f=K(!~Wdwm50BG^^f&AWEQMiF>(-R z&ZJWu&mGZ)v}cqvvmlq3sRqcSnDkSB4p-!1XGA)^WH!_Cgw@DFMA4#P>m@>^aDx;C zGo7B~GEwuWA$awAAjK7&ABR1NRqW?dMSHI~Uzy?iEBC@uMwQYiO%MheeOUa*N}*hNq_?|&psJE> zvOaQhjUXT>XtX6wEta3Ed855v*#Hdq3t*H7DT1eZGBSfcnElD$mHqI*G> z@Z8Z74dSY3r98mJx0IE{r^KR(M8n8%Qx&q=kC8DPq+;MvLx^^>VWOIGk#G8u_@GOO zr_logGhGJ8p5bF-cx?K-uAP^E)q3u>gEsDOAdB|ek8>O+oejDavG9K(jaqm|`t9Tz zgbj(Fw(r3vd+qbd9NZ3oww&~WftVeL#wY(p8&skaw_RgZ8`vpI%c&C3|x ztXZDOl#zo40W50Cv)zOxZk9LQm^`~Oy{_kFj@jsZSz3wtb;QVuR*n*XfId^o_$u%i z(D;GqsE6JK)uFlAOs&RDyTAkJ=}a=O7&mCyfRX-n*Vzc% zAG)mj>DZso_`228WztJwJct5eA{(d@=kK`^K6-w?t~>C%t5b&iPpW+;Lo#&5oG&>$ z6u8hLfr4$=^V90Qqg^V0Vh;b!tE5B}m#yErg1QbLfKb2aqOl#y7F{-P)&Tuo*mGiIkzDl_|H$V?0jo zY#7FTS*OfKW|xds>JnoAMIYyC-*QhKp%&wgH2MXOSVPVb5Z1ASVQbw|kQdu+-( ztZUnaJ<#GiEsX)9bqLN#atcdJ&;G*e(;Af*wdu|F%yVe|ko9haLFQg$cql1|YM9?s zdGTuN08K!$zs>}AyxB(=Dt4010r%*oTBxSH$3Emu9%O$}(oYTz9Y}h61Fhr9CE6>L zyP6vZo@+pJKW03|^H##be{8WF#piG6=-Z(}2C?07(PtP-=geK4uLdN*9p(_OUrd7- zSUPjcvg7Vl)gIl~U&ibj$*=Zc(J?LNvZ&BVq7G+2p0U~^jVe=wNEH9;H+n1&Ufl`a z(`CdK{c$Tx?X!QL_Lzs6$2$;@WbMvG70Mg>_3jy-%3u&)uzr{ zbzij{_I%NY8i*;<0H*yKK*d4F@wqTrPlcZX_U;C{JDtHJ4r_7VaJZu2 z5(e37-7(=%OLE(_f1AoQRk#?vN#iz7w{mx#Vu>;Cr^CGdwuHjX9f4mz;la<-Qq=@B zX=SbctYEobusi-boT>D&X5Dm`t%OWewsH;#P;PVJt(PRpZa5q9wq|X!`o1|4&DMJv zMFiQYH_fiKaQFcSvB;b_1^;-SpO8=d$Tf*BntAEP@ zr3mn6(C#aH`}S12m~p`b`X1tE&`SJq47)6~Bd2k4#Wf5}u3{=pH)P*Sqbu{?;CczLBB0IMU9nz9-53FjC4#}>hl5by185Y+#I zq>t=12*Mv_XDaKhXttW2Nm~c5K#F!M&z~QS|CiI9sgFrRZn3@2XC8Z56qY4O_L^k z0pjw$e-Nn_?H!LkHZnMU7=phi89T@nKnm&29&j# z>%Q+&abTUratl8CLur{WuT7FpCUe!8;hxU+OnTCE~{KULce?C8R{e8#bCDWipsRc-e`0QoZzO4x1! zNrTDre(R~U_9ez}Re8MCmRCXe^bv*RJo6-nXDVAT=XN68x%ZI|6bUs?D3X_*t-beU ze{$f#@B6+n-3p`6&qq_10LsW~b`YweQWltirI(+lva7F`*9w~r$S{H***2Up8h`Nk zQsiY^G%qOZ)yRN)qJpoLXJ>gCo5mnvX_Rv1+ zLXSPgu}CpmNXwCiV*<0e@^gMq^IDo_e~2#g)VXY0OM!hfJF9xcD)MXvA>02wA@xLQ zCjcnbUoFSe)sif)UM1}I(x)+N)kZ&Ne@O$& zaP5hyOva@7RBd`=(A}oICx{N+A6@ns5fo<{m2R0h|c z|3Y8BL_(98TEyf!?+^tIz!*K$j`XDm=W;dBNGOM15ly_KC=Q4Riy;8Ba$ELXNoKZe z{raL_Zs~zJSpTi2{Pb~g3{S>ofB8yg^Y?ge92z(IBb$y0%vSH(5bWYzoc}xVoLUF# z_N3c+7lFC;t^r4=FjeuW>Udy|>UEZ`>82;-aV4A9 z6X*uO27w3*;NDlK18ZcDn{p;hm*n{jzZi}M2|~7RP@1%#;T`<*JvTrAo0<@Meg;KDE)A)>rsMOm=VlXCEkV0p+ z7WXNO$ww_(&4jF_ntjt_%kkGC2N!&)K1&^ofCdo+nzCSdebKvaL%s8EOz(XCRDNfA z+J4fMBqR7;ujBY_ zcJ0xq?Q$hMX$22r@&rgOgM(h2cHLPCo{E}EJ@W`-f5-q;a(b_6Z+I0lB;>F^eNj4` z3Br61>seJt&v)(?8BO#}K0Jl}s+{`D+^nLfD~9x0+AUgSkRs%Z7I14g(ffYu#Uz0I zGDC0dOQ^-o_EVj%1?LVNaPcNy^vD(ldzSbpGpshFte={T){z*By4_pIyBK&RYMS-Q zqt53if9xN2)Vbt^ulJ`M@6=CT-=Q*OL6lcQ=;6OAXz0sVhbjyve1bZA!YCoO^KHLt z#)V{xPzI~kX=v0ZwzfAS5?LsjaS@(2zX4lwBr@HyoiV59LdTPiuVlL9J0Pv}1!U0E zSyg~k`*@x!3^YBzaQs-JtgO^26j)$E3^IHRf4H{qL-mzf8tkq2TY@z~;UPJ>K~Rl` zvlB5*=?dt*ZP(~+lRcK+{J8A^gWs zf6$BC?VE&uu1|?J_!69za@ny0vnm)rZ25|u29Tu&qb#dU&WHK3@r5u*mgNnO*}4Za zRXa8)4$(-46m%LRf=7)W;*A8;sh)Fb7{v_1k&%McfJ%iyH6_@2j>meD5UjQvQl6z* z2Ci)VkC28#dg3r%)27!kSr71thL#8bf3F}nv$%8(>K(A!077Qfs>|zZh$qJ|{jwSk zB$u^?)@aeJTVm+o)_Q>f^(6#A31^?@boWOjS4dG0!nns2Pl}lV9t1IZ0c7l@k>cDd z&d69c^mqrZPJc&AcfipK{Q!eHYO$VDmU@zSk9z0S92KqcATm`dk10k8V~1^Of2Zd& z1Fakw4!>T|=oxsTAu2OY^guc@sij$=;3Sp>IfAkUKN>iXNO>gczFbCWsf}e1r}Csl zd?zB`-a*jZRCrN zS<+DbZdOw*6X{%jg_wX)Oo@S{jB1WA)*r8y_NeoAkMPbXq2`6(ask3S8dW+AkT+anr9* zQ4$@3v-0WKN}G7&VZ9JgW}qyVrwz9q1MCY&WH~Fqqbi-~#h*WiW@g?mHD3=;x(keI3$;i-~TAt*bc;OBqPN%iJ(_MUSRz_KGh%=e(A^wo#eKZ;)BaC%U*%us0t$2^Q z?jq!S^=?nlTkywn?g-e{9zBSph_k{<{ge&DegmQOWXI11V?+9bBzf|L8@md3q~uStnKjUohb0D*ZZ@ z-S8YO#CzSeoo98W^$$lPn}FS5Zgje^M?=L062pRtQ4LLoP?5>g4;exD>#cm9PS49g z;;@r|UpgKc;oueHKC9vX@hh(7ZnQG*}c`Veg;z}sIP5X*q%I;v^^Ef&`t zcMK}tZ;rPq&PZ{77MEiKG_GM(h;R|2TIbdQeEijCHo?Euf3cI--NCc#*9B@6#W1)^ z=$h0DoL04{ZsB1}#c=q9V7U392@IvlT}=k@=6(`B^x+H!2k+|hhY4wQEPvDP?vE4B ztBlG;Vl=tjCcVv7~RS{$1`X}!ka&p2nS?zyC((cA**pRN( zhi1CzIh58%f2|{AAMcYLVFyY`aqolbmORuvCd<~Ib`laVuZEZGf#LIe)=Nx(ye45x z4TtEn?qN(LU97^D(L_oB#D$zj_`C!Fsmuo8&k65Es^T99aZ0)EN7&N_*L>e`TBd1E z2qvnE`#@r=1w5`L=Z#w@lKc0k!cct=&za0?S_>)OHve6&D~GeQ8Wl%S&_a^WFg;5a z*ikLBTej}8mldo;RArRyHpXOjlTQRTy6ZUb1 z^5_qD;`k)n_W`n7IBRjDoLi8D>Gs#CO19PSaot+Bzdvuet!R(4?eU5u!ZT4J4>-9- ze~7zi0(6j)yYDa2JU#2Sl~akCLu%g%fE$hLlX2o%)jQcksl%y|ZpRb4aU`X47p;qS z!%f)uWn^M;$fWyR1coO;h71BHIN7BM!tzLBN5fY-s?%wv3aKnijz9dQzq7vJ=yFq; zW%FIP1em!mLY%P@H@kYVIs|*8lR6DDe{f&Y2Ijt}AtVQk;DGH#XoEH>G@je-*ez#- zc4tu!Z&VSA#(c@-bfgJ`2Z#6SBxHWKUM?zQx>^4Vr|uM72^VgQuGqF$(y?vZwrzK8 zXT|K;PC91CPCB-2t7G5)?z5}T`}rPc&6@Z)M%BQ)R*Ds~*+#oRA-N4atu?Tle{gxQ zN07+=a(M&-tClk|nU(rm9pbH}Ai=E81^05WloT6psDujNFPmtP5evA@kA6!~*9k^{ z1^&r7-tLaA(>WL!j#5*lK?FiWR_ozbhN4-^r0Zb)fUa3GXf(*;4xC2FKN$g%MYx%? z-w<7IPaG!%5OOg7)!8M4_kPl#f4zx{TT8-?obM{eVdUNYu9(VWQ`u?|5Hi2YBt^yz zZWz*W@b7+LG?X`6z@BXun|AC>A)=W33+tPbtIYGL9?iC;m_aK+wX-l`>{?!Z5r$!X zy?rG)R4R5$P0|B8pwhe=ZuX&gM)c`eJh=b!%B z`^6Dgvk?_SdG5`K3lfPdf6!E+5@^d1?Vl@R!>IKH|au%=u6^r8-fwCTh2k~oLZ{eVfo-9@5%)1 zdN;&RaD1R49Ko`<4Pf56=fR-x3mKmW}! zGrRb+`LPSQnn+g@yo2P`tQ(+nM_-}RADGIigWvu8L$AhVCOn-v&l%Unmk%wAM+ma* zf{Q`>#dY^U9^#P0Drfkh@aow!z)}Ub$5{0*=$>+XQX)p%`+7}&+XrUux*K%IDk-Nc zjtsv_?K(OT`@7C`e=Sn*F!?8BL94}yxl!<4qcK3cZ~d=^@IHHSGBm446<^%Ja*$~X$5YKTEEGHO>++ytCFUKdp8yH2te~wcJC$rNtB`6m05;L7} z`@0dewMF;njK9!G>JGg7(7IvzYx+`CKB`TNaMRWIPsFhiO3EvjE@ zmko1M@)(B{R#y2&j zlBGAJl1m5^VuKgngH&Q#$&VkJ#I$n(JH;oP_Ziune~(o;{$<&(L&DW)+8gJq^oh*F zcTqftu3iDb*5(;RMqjkqg02XAFOY^(FszRpyfy&SR4L1uyxPNTk?k{&zQFhzJ7T|s zDxWJNBuK3KEZ4fKNyW1G7)?eg5`8It-s379rShB`9cE#u#VP`(-&Uu*=hU7Ulyg_J zJ*GT{e~e1l*1|3e%B-NVe__M(FBg&?-x~?5N%9s{BmqM_U&6x?rfBNYq1CaT?ohp% zY|oIp0cB{e{+bbAC)7Lc?^0=;*>(1S)}wV*b2Fcm!w&LFMOVGH{vHz?NGN&DtdLgX zry3>~TTT!G(kNCllY#8(s@WO8EtW$J4v#_kf68{euSHzTD$rR&#caWn5?$AgzMX?7 z%^4@o|NVAjJ*R4>qrj@)Lm6_EhHlj_UJnL(8@VgoCZdaZ4eXofaJlC5rP}hK)P$1C zO%gTY`WNFw^46?I-QLw&e)oPKwVGpai|Aj;T}}`CbZ7--iF>FIRlc&CDGlN21`}cZ zf8Xuo1bSQqqR|q!pUaINMRO&0ODPTqnHzXT z(>S^G<6#$)7LZ+@(mK5vl=?x}$DP5rf6k4S{}U3~5JTCnmwxB0rj7SwGwz=&bc0e z?}~g$0-3m7Ilk3m9X9+V@>}xnHGECUe@Y?_HO!Zei=tUpsj@?lz2XWrOnPT-e^UHR zONIi3A#$etrHgK6H@DGGf{5pSeh2xYzYzz<9VqY>-@2ZB^pgIKfrftFF}7=Rd68$y zcT)2$l5;MRmJ?`5>*2O^te#XZJ$*lf_r`(68q=f;%ATQv$t(yl=dvp6(cAwI*C2pV zpqr4T9|QwmJ|iTrZ2Ba>Mu0f(f1dZf5+%ABCX;%68cxD@3@~Ks!wCk!46NaY!e%^_ z-o#OJCw4F`{Z_+kV_ATzb|b*Nvo?O86d*3VaKC!wTa6NuP+POY`ue(qe;7s&`1YMd z=V{S86v%5Sx5SzU3vTpkC`BKB7)>*@J9|L97RWifGv`OKljl6g1{0a*e~B*$g3S6K>Tq>{G1bt1Tyyr$L{jQJ-c? zs1pAxy2XtyH7Eq_<6$b+s}h}z1p!#Yb=F^n4vrL7)dxCh;5*^+fF&V{FM0-;eZ+|= z%7NdC8>cO~68mdTuj6=)f3xZy^U@kG)a4S-(7tk)RmhIfUR7b{h%o+=#1~!|!p9R6 z3+rQ(_Yu_CF`HM)SI$>+e0xe};3s2X4f(Ua(bj zR8}U1nArq{CM^Fn16EyB*m42UB7~&Ti?BJhLvy&_FH#0!dZnyr{)?$`UCrPuMyF%| zQKRte`TwYMB$@qRo#Psm zGoe=N$lZ7QRw%yA4`?>Aj7*FTrpYl@w*RatJ=61wnJwj@I#VW%LT8@YZRg(cS4|Z& zE(Mj*(hWhG;FbV6ztq6LaJZ=l68U9fxrU#VBIfK%+q#R-f6RpxWIW{SpJQ3yMXR){s6#M|VCuqv1F#?`(klNU zH>j3lG}|NMqN7d_l@=f^{a*=~Inyr9nV}Ci#FbEqkUM4}RAmSQ4;HLgpNbW=q%c1- zTw(@>JHK$Be;O!Uuae_g&g_vFJ#@vo%a^49xr;prj-QmENUEXo*hzhT)RY$WSa@Q0 z101b0*~^9~=^PNy7@dWyp(Z!xM49L@zQ!r4U@VZx{lc-!GejgzKNKP|AIi0``OQU^ zX&3Z`^9*QpW5vY7T__`H)Kaz;B!nZK6z=Lud|S=0f2ivPQ!{`!_f{njFqR*5V|$e- zhJ7nfxetFM(>*|UTHy<%P#T8o1g#Y(_RpsQgZ6KUNR*k535 z-~*uPe*;~R3Z78`&2&7uwVMj5XMGbWe*%!06AY5n#gg^+*`b^2NOuA$gDGzpP#vI| zpbEi|fI8QO0zz86F+$LsUEI*h+?W9W=BHx&sQ{?#$mRKs;QV^N{i{I4*UgPFhpt3h z9+?ujA_n7RRCsGZOR4+(@;4DLbReAh^A#8-e^oI8uP${&ASss@EypphuG$Fz%fqT_ zApzL=7HYn%FOB;qz5!fEL=Od^YQ<$nmNZ@b?CbLP1Q?{u9UUkZX1u~I>HnCjaRNVX zEJx3EgU-Yv`8mi9^$1*sOM=!H0lZMqU2O%Hw!C;7!@X-GqLG9+{v#UWVBG&= z*H`ViTFi>@d$q@C-edfZo#;z|oLW^M`j-OG;uecxxw+Bxaz%&pm>1r)Xt1Ym7oPk& zXf}?)oaia~BQ%>O^LgIqBeP^=Tg-wDe={wV2-2wkzM$?(J8klhWMie1H0{bMb26S5 zpb$bF;V}BVUh9#mCwydZrr@kX$)t<^wN!Z}(@z6)Q4 z2Z^dg6cN4+^AoePSNh$isWRIY=SacYgeHvVb_oo69mVvl+52#QCly+TFP41-f6JJY zuRvtdMElJ5)^S}iPhnKXNV2)4eOUQS5(-2c)A(Nqt&lYm z(-CEOjhkqX*ZxVHR#Pf9StqG@e<}>eLPZRL#YZa4l5Ox==hERgW?EgJLmtd^ffDB= z{W4XT3SPNpws-KiZ{nQxYvG*n!a=vBt`3pq0#?-Rf*D{iTF$*((5&QLGUwoFc}3dO z>`UB0(^cTzFjqg;sfDbfv{;;H;wv!})Pu>|i9p=)gLMCLDq6XW267zqe=eE)VHJ~N zZs-Km?|~uz5Y6LjvDhc9ar&S;BBrhu9VD~$13@a+RHkqg2Z~Dc*3|s^Gw5s<5x@JI zKvtGr?^-elnoHl2H@l~6X+LTTYft=uhkaAI>u&-IHo8o{n_CRtz?CkUa3?YM3W?46 zCVz_*zpHS)0t@0|5%2JEcU~MtnJ64=jiO5J{K4 z1X|~#kjlAZ*>%@%5_l~iUdbon|Jk(#8ZqyJsU-G4nOV4B1Zs2-5=B59M3aiAaD_5p zm?wDPMm4wXA?MBpFuF(i{&fm+#fc8$Mw6f=Vl&rR5MoDEbY0z;f5e!^eViqW{l;C| zltKRSt2Ua+a$ps~y0+JHgQ^6H8eS3zO=(YJrOEMyMUZz^&y`u8bGD?fpv;{rE)_Z` zZL!kkXel}|VdL}yGpwMjn*bjaT+z?mF{QWlAhjs8W8%c3#Iv9@DfJsJWZ(|@N2bB@ z>`aMEo#o`qj`BA6e{f5Mt}yVJw2aL`bL9j71A*uX`u>p3ZF7E~ipk z3R`NLrB44`O_^LUM3tF=U=!vDe=Pg2GPj$^XK7%1V_et* zB^wZA2>=ob<{N3i$45(lTgE)02@@q2CC{?>?mFQTM`Dvb_DKKm?Vf_F6cpdKcmd;D z>}4mC5Rqm~f4)Icn1oLp$9p8tY*z?Q^>L${ zCNSwHq3Guv#vI};Flvr`b8b=rpqpVL4;Y$f;;|UPIVSnJaFVZbb7+*rHyYe=<*it8 z{60I<>aBl7yPua4xNG1{J_Ld-Ie0&}jWH_SIh*k5To#pzMVO?qQ=Gg>(6;yLf6G|B z%IWnfe-AwPTAqb4eGOOGFt_ANuZCKKMJb6V$*;0Hvp5TMGD{W{Gp&ta26*Ml=e~jw zxP`ZJ2l-4JjJ&APvWkPRaoK{cp)8||(gvx;# zjOKLx(g1s;z_6ZLj}#@*tBhw(ee$RelC}0wgun9qf%(LF0`Dc_LG)gTyubeU^Fsc3 znw}vI+q77j0i>ByU)iY1pso1O5YZEPf1T4Y6Rv;XCi6}l5+pyNFo!Dhltl2gVBhIx}3TT#~w)^f1I9` zRG(44OOUy%L{6#Jr}!IokK|J)rsPa;)|_<-@&`UM@$w?1h^y$ZiPCRJLGzT%hdd?i zy9pB;R1O+@We->7|2PN3z2IT}kIDGWip`WW*yY*}@A}I$wyO?0C96S>9h0q==d9r2 zUe;E(4jc}a$dey)FUG0ETA#{3f8Wo#Z1=3V0tCsqKo?~fQFi)bN?r0bP3KhtQGpLy zyq^!xq4NL{bOF!B0jnn%+?=Gv{mPehJ)E}kw_Xf3AH-RI~!h%9;Ijrf81 zyzqEFh!NsA87?2g`d91TliC|EySk1U#E%=R5$DDgW}bcKp$w8Ydc7zAH|QOEOZ`Uw zDJO_1FmPKUcW_uaP}HHof58#q6E=;z{T*f3(Y@{dQh|D(Y;wKs05k=A?-}*g*MTpQ zA?!gk4sG(e;qI{7N`-nu6283xjEN8Qx(~g_Bp&k@dx;seA-(neJBd$I+j;&@bQOM|3V{5|A0<-)xC0hReU4kJL8KLJ-JcQPKN&~@t5d)Md9 z*@>QQ)OMv7j8q@=Xl7l6r=S$RTx>jf#_Ylw%L}{DnrprQC*M0%e$Ou592^A>7)M2e ziJ1eKO=MWR28Hj?@Lam1tSxPyMmXsq!9m1OL~M}Nn*<^AfAf!e%mj81-tTpqN*%Q2 zvbV9`u6pk+hohy~iwTIt6`@OX!9#{}Y#GDQk z-+!HE-Ta1w6O|Yqtul28I@XkLF!&CL9Q?^r``^LOHyraWZ!>rMg+yM6)^LlNQ3uLfZYV&~=+o zs4KQvHbI^l*|oKLX<@62VkdF0IbJSk|AmSzLMM#%Fec^u!5JRfkbk0H~)OIBeIrYE_$ zCK%>Ge{1}^ARfAJwy?PHo?R~lFo!SiKrsLOH+F`V^@eaGYvvwIYsg8!Q#5`;Qp zbbpmZ985QtJ|#nOyo(p*2o~Q&9iMSP$>XVA$sf2e1_^s&d%Whu)V79r&DK4aP-kO4 z_Xn3wQZV#f<}b{lhiT*f^sljbAw#hnf2(AJe_ftS2aebKp8*rqy*etQm{<_;J}S-p z%D=bnjSg<*|9f~EeJ?+#dFJgGiIN%XMB@DV6+-vJV)%_Qi4bV_?gm2%9}-nY>;R3~ zgmZo|-kgLtPUAaw=B=}iyvYUo$k1gqyX%eSzT13((hfqM9}^6-3wOQkOEjwphL@ix zfAqIOf1S{6Z@`5Mm$W;;^*>&X(;Tu6zFMT_IzD~iDI*pcuNjG)3veC^VL`PEdk|r^ z%`7xbXxN~go!6TuPQ%Y@>fy7=0XT}ga2U0G-NYlC^w95(K@ul6+;oN z@!hMkPS+1wbadcvv2R-3n}GUG7D89BSjGl+t{qXX&!^uVkMl~y$CFTSE6`l7f1h>vTKlFj~jFQtgt^(5!hRm9fDhN zu0T~`c2(o2QE*@9GZ@}Nh_w-=R$|Y-`g?gzVdnyi3k~Oru1j2O}71Ea~5Ew2mVpXmYMn7x1am)@gZ zu3MBz8Z9M8z>qzZ7ow~$e_?HydaP+`UEBas2;~wyVu>P9GaUI;MD$Mz*@+=I2sgA6 z$rM`C1X_!1x?n*I)sizWy}nmk@E_e+x(^3578na`h&DpcuZzVG26Sw;t;k-Dp0^~r zgEy^iQpc@ka#J`jQ)<{;k9pY+sOI139%#Dh=o{%669%XIP{=jJe_Lu1niiJ=Y{|CQ zSnJZ(WJidqm*U4<#%13@x!sr;mWll?=3{5Q3adu)HBBRnBb;P-AYpfG5IN_MX~O^r zq8k#$iro&BK)HV!N|licGl2&B?}dt*6r7S7QQ`YH4Q`qeAYh18|+JW3g|e>o5i9)5RrH)*@5t&lB$ zlGFlYF|+~n+wAT9^!8t_<>7->R?mGev4uU`?pngwpZJJ%3cKQqUY4G4pfNV60adpA z{wI!paS+efdd80%kDci{qc`=fb;$AxE`YUk4&@SKJ(#|yPwc&QRv4l?o;h9-SQG<~C9 zeV%F@Wx2}@=*OT!?L>veQ*Pn;n9ePcwoa4)-C8=~Lp0YzWgY@j$rDb@%d#K{6S$N684+P#j~6 z22GO9QEk~&85pC-n1V!R%cNqw$$t)c#77S=f3vb9RP4ypm$AfQWxR=i?1FHIj<{+#Fjs_fHSRYK#NXSKvHg)EImi4Pg96hv`jdSV#u`Q|;UMcGo~ z`XU@hZzNo@?f+d#T%Vk=ohskgPCQ1KN`X(-a1CcmNiYc=vwc8Spx-XU$f?f}RUBkep=*a=t}|ENqVyMbW^CSi4nsC6_;1JH?eWa$XL$b3#H`f5||=R%0U$ z-d#~7<1`X3BNb7(e+v7aHO<8Y>Z3FvEo`i#DOBs68_mOL{r-xrKZcQ0fyoS75=VuI z7V+Olceieemqf8hvJ661Hlh*Gy~oWr>7d|_forl}8^Y{l@t zyXQgqGI{UdjDxV4uL)NMq3xjcx3$>8lhoOzg-}0NzW6p*+O6kVf9G{@QPT;iwQojb zk^HGF0`yc?5KZoer?I-zm_*P+!}sK9v`kOcDp06yS{NovPp0zYxR0$@pU?O5`j_JI-D$P>_ZgIV&7K+vm3p`BD~$ z4!`Z?ylJpiD2Mp#wzRZ@6jnMMm2{$?sGH?;=@%0LtgIq;dvt{CMleuMM#W%{^5Ds1_sUYcMZQMVPY z|HpQr1u!kIe=0~e!UY@7!L&VUbv?K;!Q@S|tUdMt;ElWnIM@4q`!k^okA(8Cg!X5l zd2#QElYv~otm3(6M#LC8Me)lqY^i{_#*OHv7gy!uQihJ4y<$P|d5$h>XGoDPYO$8e zut};No#RUvmqj`MM@R0M)lPJ~w=+)_+87ozN@X#@buwKV&Lc5~WADy-_9Fd% z$a9eBI}!v`=U*7%wV+J$E^1_Zs zFVV+xA!rc{8}mRQ!5J+p8|2DlmR)n9`uUAY3R2 zQ-wxl@M$1r!xH9G8fn$Td~O3Bc|*r%7<-BUjH7guZw=#_RHI*{o?$1L^WGBQF2Tva z{G6}3cC~?6e`TJ`9UAtF15#aKe-VCbI819Y@x&o&)1a!LZOmZtFxn1faS1bbgsNyD zJ%0jPAE_lK5;hyLAvFIl+9hP}-0uiPMmu6+y(hoE}yD}rOTS^PIO*7{9vfbKY zmv%tq{tdKyOCL80VC=2TpK?UAyyPjgkt)VP4aJ305eHOa6L)7(#)Jmp8V5@vfX`ch z4j@ho!oCm%k~e-%{C9ocf7c*Bx`5P(758 z0~LZrS}|qQ9$Q;#M8azsVqEa>8$#1~Yh619r1JkL2%vcVKZ1bCG${KY3h3cGv5}M9 z5s4|EnVFd~&ap*isDxjm)ZEy_2=V`|csJ37Hxag`tllv{=(|tovM{7SY!-w0e)ODI zB|8U?&II1YJd8*Ef3j`Sba3l1V51o0(9k&7)5|ToTGdSkoHe6y;Kv2mUJ%VZ@)nyF zZyDCLt=YDA0HFT_iM=i4f5jqD#FGe>QT3%>bZ9v_IT>AXPamaOC_MUnmYsNOsE)V` z&>)JDOQXIB;+iRIV7bzUW70U_{xxUqF~Q|qvUUHhO-)FLe~A!dIhDgQwJtBKk_LES z#0B)eUOBl49<0t4UqSxaYQ5~V-{=W%{Adj5)q*A$SAlfoG=mc=RI5d1^RZ{uln&%GJHm^M9&fFS$dg9$Ef3R+p-;1CM_W>nX zL_nr5!3jQF;v>?b1^q7i)>UL4Ap;Tsb%jvH{TWHfYJoM$8TK8?hV}iv6G)OF=)C(0xl5dP=RMBOLN?AyTNX7I z!zh~B>NmS38fZf->xqFBWx{S%P*MW)<^=Zz?EFN1Ar4^{?#p%}Ou>bOT#=kKe}z0q z@9*3*)MX0KNiLk{(U1{Q(MamAqRpy*@sATfiM%0;iY>pxCRn`W;^G{-QKx3HwM6p$ z?4j0KoQ%Z44LyneH}oC3VBgZ?yjGg-huSxe2gc0B6}b?%^L_1k9`$Rij=3*w~utj*#N1^D|w8b%tl zr#P#WMD~>D+=_!U&grmn}4_ezBAj$ zx!RH_zkjEhujP%IQ|e69v)%MeytA?$w7t82aBUhnKR;&R8ChDw_oUMQH1pTsB&4x~v#* z-f!xoHQz0nG1yBmM}#D(89v1NoS~# z`GLu@#cqF~*>B(bAPV=5;BPv^q67&WlgS5*$X1A3=D~*aKfQ;YxqmXA<`{#!9zFNn z#@uyqb%C{Q`p_X0@q_X|FUz?DKEz>jpP>T_ETw^vcoU>n?N#vreP9qi^?F~?XJJ4n zxdWtCE2N+YWZfoz@G~RR{xJg1+~Oo=s)cY?#qvN1tKtM~2qgF!_bDloNhJh?x7v4Z z#T#57BI9T@q5__Rqkm}rq`Zk`YB^#(VbvOVU;BIsl)TFrMf$$OjTemITSLMxmAUh? z1?xivdq}CP?(}ad(n+kL*P;|LBFw3zY@W6r%GTzbDo>`VywO=}ShCjFjJCFO>-6@s`?&S5rzu z1<`tE!zRV9RY3wP#(~GjW!X;s1lzZgnPB^DlyjJD3&z}TU@kowqEk6><29XOw_5>xS` zx_G(uN!~?M`hTyG6V@Sb5Q&-IB^5?-m9BuFWA>E_EXvD88SNeZE|=;Pa4_JHkU)|j zNKZX(L-DwZ-$b_0Y8KRZ5n3l{Xc?l9x%ZUOKGj8Rc>CQXn*x~*&(KL#P2V@BgnP|S3{w$xFfW@fV{$I2>E=d;~s4VVi4{A*hR0!AIYL$p`?#}4rlW*uTv&dkrUWh-hSi_3ZgIh&&KsI1SlLbE6w zTX)|D4o9DUyj%#W2UfL$lr|(VmuqFksp6q-ZsIM7}ks6UENh z!A&4Hh7lf_S$&Ze2sg`ytpHHN3#F)Q&2(8+X21*7;z6O=1}$#p(r0N6Gcb!yL4R0= zipOZy`8<*$&$ibAmGPpm6o-!^wgS0jObs>;1A4 zHqc>Tv4kN~H6Th@{2-6|O%)^za0fnYkx%Nw3lr7BWb7G5>@cAq7FoNBTY@8^TH7LL za1eMvl4y?jp!5>`dBXOlKFmc@&VPeITKd41XMxp!MJpv1Ir92k)OnXNADUjm6b34< zz;)WtDy7UaRie`j3K!2oS04{zFIZw;#qQxE?L|PmAf(Urej`ykcB4dPWM2{MXZ`gF zeeGXd)TVvovixRO?)pY6D^3DSU0+=>U-}i8kW?Ph)@JgL17j_- zD~MZ8%}5RqEGz)VoRUo+psb#qA0}+vTANyyg?D1c`AmU3lH?&AD}`&|$>&q&CGhrM z%K(+^!6jnR(qa;la~7MU41a33!=pt#0z85LO#SiO**}jf=KoKBTTQZzzd9I0(X&6Z zANADCWYM6htG=iePDY)B$F#cuq4nGg-DiS`2H+2^!uVP0XsQ}EoOj7Af*9UfAq{P) zR{cxG@_s>gc=|_B5>0D_E9k;aI%)vpnz2stk;*B^!KuaydqUuCzJE9|;`(*k%1E6T z1MhPW@rE4Epl92_BQnTp@$pwP%{vD5vRfYGz|gspd4^MU32dL`M3fx)&0T`Pm$m>H zBzOfxJV@gn%(ZH1yD|00QgpBa%?2nqLylfFxCUokskOGf#-55a%5mg4GqZS!Ia+QZ z+XW{l5%imQk(4S6%Ih=Ewo;&jlTUbkNKy z+pOYH7kZ%tD&5e3`HO|Uc(&&BIb%|&Es$t`GnWoN@xxEY{!XSRiEUJ|s}<1l(B?iF zDTE5ZL_LX7M0RH;H!&N7#liNF(L~H6S3zu@j1n+Fbb(B^+<%=zovQm{^YGtuN^cnm-d}wZ<1gM`Pe=amZ!I~_z#v}d%YW0k;O1AGy11--mNSKoRVArm z?k6?zRGt)rhsgBd^gY1Y+^2Q#E#xLqoj4QyEY2S9h*;u$UJ~oi=4QOe%BSAZloY_D z6E2L%g>F5JIj0gPfoS?URS4)R%s%i8B=qs4*WHi`s&YVY}u%XLaZw*Hs?-8ygpp8_Wy!> zd4gf?`Bvz_JR9X4t#Cc97&4wfAl%7n33pb!>VIAJV`hhhQs2wq($dmuQ-Sy&KvlZt zAR|k7CMGM&sR-^Pw8W4iG9XUFL=u$JJ~YBlAVp{9OH3Slc-zMEd?zH%7Ymts41XwW zD_ZS*^{>b<25T5!{MdF5Zm?gu=QkX<`SHYY{9}DvHO~KJBZo5QxPv0T0nsm&=;pYD zE`Pz2;WY#%LKjj)B(_J2iTY>}^xA0#z=c&};Oh;`_n44Gz&?#JBP`X7$WnFGfIMp{ z@~%)t7;8u+UaF|EGoCHPIRaznWg}a;rl!yTx+jd?m_a7lfy`C|YtAIuq)&|!ZvAZ9 zMbZ{ZOl2X}L`S2I%BfasMc3hK()SW|oqyDHV9-h8ZoF-f=@L5FY%lGhh9kCLRuu*|_#v&vRr~tXPCikL5XFcV3(H}kUR^t7iOSP<|3 zfW`Ve$}t_+YuYtvS>qQ=1S0a=EPor#b!-a@LJJ`_Y;j|h;~dK=;-EJHa`fksOfueIE6;+_0DW4kQ#=MQ9=N zB84m6QZ2?-^tR-v1ul+Y72NANqP>~u1xP`1Vi`=&Wo&dYL8jxt+`;%v^nYRJmF~3b zdxfRz1zy=xju=(`7gIDkb977y$U*tcZ)JI9H&adsb+X1SWPh`)XP=LCZ&TFKUAFnU!4 zx{0xypLAB^94Y`X!_)7gg?~%|*%Il7ZIO@65IM!JZGz1b28QmpmtUVk*Ts}2!LYU= z_I?44>nU_JzR7K~c2@%)o)|UbkXYfhIH7m{ghy<$FITajt5kH6Lk~`3o(f5H(PH#& zq{y;p*a|mclY{%q$XT(NbS}7xkk?$ZM z0<(&%3^dM`+902PF*r8r!TFic_*0dv%j37kGyo+HmKxUhMiI`K$u{Kj%)cs$YuRrv zJwt%Hz^DUoL8sH6)PHY-TU914=>eQj!N>>4wzX2ZzvSW*{WX`TBI~f8uBh>6XH8 z&Rf8AMq;jRy`P*+i|gabGMGfH`>Jy5Dp$;*FDCI9ktY4^+IaD8E-g&5h6V;HS^?MS z^t84MlXvdX@eCGb$}VZGzFj@+6sqaj~8EIB;^$3QKrC@i3rhQdo`M~VQ3`H?5}#|ecr!?;5>vEZv5l#vROuJjFK0C^{b|H$t28dN z_h=*ZcJ0|a0j7eJo3}U`ZqWG@iK?)(kw{sJ3hm1@tdl;7V}{kIE$JjXz@}ecGlvLM zszB29X0ZA)>r_ng7F@pt@%Uc0z*xnG*SO9YYE=79aDU?16g95(D$2H9o*$sgZe(PI zA=$oce=OMLe(?_0fbn0!a$4)W?H(U5uPu}--TX4QR+$_gAe)|M3LmmSvgw>vZ9q8~ zLcxtBWSB;{e;tsULn3HtV{C^XUWTsN2yQJ*mm*-5)M3l!0LFt8H^s|G+w<=5|8Bo@ z@AUd;7=I-nO6v%0!3N$C@Om8m(jpu|4n`QNhoH7NlM=J{8+r$4>beDo?k47*aCV%b z;O1K7g-<~6LXx73A%giBfkCyNl|r}WQDoI|1-u8fCgVxStLTkm!cJd9?8?~UI`N=0 z)ENSCIf1pQOdjw|IRjoLie5WC{yQIg2?5R6sDFIHj^(gK0v^@YGcCO>_4Ru5Zd(eh zxCI2FLn%3?m|;N6D5&29Ialg2{~vpI8JjuGC<^*5Gcz+YGcz;elwnF!W@cu3%ghW@ z?v$Bv%FOMQxp%&EqHsO zn17i5?$T9^lFhVmZ2f)OWmr2U7{MK5;zG+%X$Lr8Gc+O|f=C=72iValqA%Xv zw|)wt&*G~TB~|367MdW7yRg$3WUq|P;(xTvi$`l2dOwmJQH<$qT>xbAN~gnXvVuTlDXnI<-%p*x3~jy#JE>hRN^b26nUi zs!h1=mdpM&)S%*iua-N)PGYh3 zb4}&y7&TvO;+RRZWIgBf<9}39lilxDU#?b^IF#5R#dQML72oSNjn0SrOM#6m7uuQM z8JY50U$5_e7O*&fI+3}zG*&%63Q($40W-C-m0;J670?Ol z)t8DvbX<3E-{Rq|{gey9_nY3kWBmTOC;NVGHI1b*<;ur~Y{}`@yMNCRnrrIQZ;P~7 zErIc}m&12whpL%&0+=`_(cjB9Y8e{g{I2~(a5t6PE7|Wd|Jb{OOn(Ad?xZYdauuL&Oa>0bVEZEG;?2)rUm!w1@C|Mu%BC|S`wmXI zJ$euv<^5cJe*VfpGlqdrU#9#iD_G&UUNH^cLcYy$@bw`@jobk`l}|_Q@>RpNeq^rX zgupa)x6$DW*Vrgut@{(Trl!X0Dw(^!tJ){Lyyy#+7=u~ufqw{2<6VVi-~aBr;S_OO z^LWr2S9O2?Co*rXVUgEyWif1vUw5&h_-#ENVpoBJ!Z2+ZgOuS_>305!gOGS@9bW4? zye6u3GRW^WUzia>5}s|8M5DHCw`|tNww9Kvhl$>7LEm-vPmQTyq&6v{pH?gyNN|v|=9= z%>hI7ulpwIjTcModzwt0Tr_FIoSDTKU=R}njclm!_?ml z4gE}(Co{Lxa`@gwZA<#jQar#>EygT|(2vGke{|73e;JuK%!7mvHjos#X$ofdD&LF(!R`$EK)sMX) z+VmV$D(=?6u_xFr<)f_h@jeD~3=$&m_bKS@2Y>9nw2}h?!z#sCMi8hsq3q8ZOi#Nh z0c_7&Jt-irvUc1gQ(K$+Drhk(C6Oxpyuhb$b0OFwcWy}ok!U8cLBoB;WJQBWv9-mv zHWt>Dj3-koLEJq7c!zXJ#~jqIt`?TJ9w9Q8(04<8|L))8CcmTI#kx8YM<}6YN@ing zOMeoJhYTBb-}K(`8tq5AJ1PKLjB9}_7$RT@T8QJT=|ksa*J`SJ7N>2{jf=5ld$SUz zc@HJV?*=q5_EMFaD&Dha5;N5wNFALRxe-tz`*A(NqN-Y3Qk@^RUoc6sr)U~pTdQdB z!QO7)`G6Uo?=8Nd2)UOVG%#>*ruuo^jE{E*8jX8akPV1pmL8(uRnWd z;eovFIAMq9Uh{OPh_A^Hu?Q7 zuveQ4TU#ZYwt7A;9mHN!oS>LNY&)%lx3P!~m=~VERnrNQ`Pk+mLIhtJ55lNC-AAQX zG4F=8JnP-jxV(YYpE|02yMX@2<$so6+ zg0uikMq7v0XyYm9SjJeaAJA&I85Q6!?DEO01309n9vzWo=`g-|K7EPW*nR?+rb@A= zkKKvFOHCssO2%;4I7NWBI;j4!2d=)dxc_X7A70`(%Sm5CW!%EQ$BI+6OmaJ@hm6sR zURgtF4VP4YwwYR|ER~I`Z-25me1B4~rDW?~Xch!6CW4vXv>c83%F>|HLoB@h=XpQg zxuqCQU82)8IGBWGme9D`b#W}k7&2&^)3LDvCKXdT$$VUvN%Uk|3MDO&Xqq8_0D%vM zLvO~_@TziU>1uYSla44X7Ot@gk#LpCcd&Ng_He@B$oYdHug*dFhkwvE=`|jNG?%=6 za6^EV_;zx{0+S~qYeTvyGZPoo64w^Evo%I*jzc&TGPt&@j(QaL+mQ0M`^iB` zrM@weT7#dofrVnSaD9qi@BA#y)M(g_mQp1Sqv!sx%$UqC%#u5>@AWRTd=Oxo?^p(Q)j(q~s!;clpLp+g4tFn^wXR4R$kET74mDT!&? zJ-J~#80RS{$P;Bq_H_jWm_nTQyj=}sYz0{7rfOtT^uPaJTZ#`B|Aa+IIl!`Jurs6& zng0$KCJ(BIji`_l+-@xj(JaT}p!<3u`I8YS6yGE&siCe9J)SAdEWRIKS^tPJF*V_x z{{AKe!<<%6=zn|967!%S9eU_@TH7JLj?bvq?npuGpnrDuONQk>qbTco5h?n$sx{o~I?>{&6P5h69$<=~7J@vC zE70vrR`|24X=Y_*r!>X(T=%Dl93*qk4+PwS0qh6_(tpHcBr(>*+pjx0hDqrIEZ#E< zedfwcgfgX^;@={sovmu>--H+7Pd_#UET=D?;1Xb%*$yN<*j>hnyA{?Y9@+!35J~a6 zbj&d#BEy@;h$I}Xw4pY+eepfN-f0@W87JOA$pHvtxq`k?7<~BOxwgZvEZZ=4Ln-9a zx-=FQ^M3%D0Nr2riDsbG8J&YX%M_ruA~@X90{9iVtYcbKBy>o-wb9fwWAn1|_Xc8* zCC>G7v^#CLj-Qh>Az~uwDPsx(?%tx1nsv2avE>DC5ToeYuCA`5`FJKLw;|BSBn>Io z4d2dptflNkmIucoNa5xtE~zJ`!ZNdwg1f0^T7P$Z9E9F=egf$~<9_qZeB)!kcsO6_ zoofB#4Z;eu42HxX0pCe;74pGq6*^zy5DaaFuqQr%VpVW|AS-T%)r^G$*nd9ldyFbabxg}$v0Ho1W%f9;|?McRP`d~V4T1%B~ zZ-1(hkO37w-|^ApSsrd~K`NCtR=#8RmL_6s+gyjT!`n+oy9}+>esi4(@J!Bn&#JSM z8fZK>VzpFR8H<5N`JVakbBKaM{kf%&3>`Z_z!t;{Q@zOf6*Xj>(#fQma|&{CiP(l= zx%%g+cf8Q!syFzkI<}pi-RObscFzaKNPir`TPU{KgKn(DZ$$sHBo3PJG6v-t(ivZ2 zXYb`&gS!bSLcbGNpJ@1=Np1Qlc}Z7Sjjs()XCsPCjBM)SO-WdFm0&PIJMc>{!BY(c zDN^{>_p^Q|LXV>MEGURkGi(M125h92GzLAY90B)7EUh}js$HA11P0V9PV!$o=zp+x zE`@y(Yi$B$p5ZFk}jH-|^Mdz~xV&<_53PM)gTQ}GYGJVQMU%^NlU#XV4o<}(V zNtQUqTmBfni}jYOog$*QRW66#eSgNz($=ldop8Z>5s+arc)^$v+siDDwUKzPE{om5 zP!vFKA9wdB96|r@?!6IvhX0p6#{l4#A_^h^Ktnv-`!|UH44UVxDkTc2oyI@?J40+L zt*QV3cu@iX0U-du>wiYlI{^S(nE`;azd7+{003Byzz!AuzX{^z+Aum++3K<(so5?+-6R@N6Bl7)cbzP#I+vT#7;+8|^vdMCv(X7XeeulPE?6 zTq_f6a@ zM?Y`g?T^j3_Z?xxgZ~HN^K|Tw14YNFXL;R`ExOTYw8IAl<3gPLAkgb#0$XS?`Xzk7 zy2OrGp;YM0=DfiMu1}u=(RVo_DS_EpnubF^Er$=+6a|u z4y|&&cwfZTp1u6@hx>uyhh5#y23BW8c5b-WJ=ZYP_xt&Di<*0sOShy!>zD*o5^#@r zXG{Z~+h4lZ8>Mp=3u&*@s#sSwd zU2Q$LQ5KdoEs8t90lwe`T(z_lAJMP(!8pjn^NEd~EzFG`K7V7|?m;IE1*7_?;wp4J zCouw!Wuw?*RLl3nG_a0}l_l+Fz$6^N$TL0lQbv@9jdLz587r`#16r({)1A2XZCiU!;3a zhQo6l1JAZ`O%D=yJ3V-vT=YZ&w^>KybDZo06W}`xet%vKt$k~#4ze+|$kPEF9_KgH zyTAdR_!dM{f)4hRd1Z{Oxm*$=&yI5%E&W~*=LcG|edvgaAoFfisA+oa!Kql{L5OtZ z&ucr7j(BG86O&0ZW*VvBKV{J(pIEK4*Ek3l2Kz92y?LzxHg`L#K%70|`MUzZeV=|c%1(-wzFINU>C%Q`}A z9dPUI<_zluH$gfuOzQ$!wCz|@OKI6PR9JA-;a?HscgD}Xs}0;}RGxH0#)I}GYOId? ztZ#7jao~;cgo)2xhlt6L=YHYBT(gM*arW$BIDd;Le3-v+Q$uRsp%iUGxmIkeVJ>a+ zKbn8{D;W1bkM?x&j?A-eIee-iRFr7?O>S#$78^1)xRDqiCgghm(&_&RfwQx`PoB>I zZGWXB-RQwo9FKTDAGzTq)vt%9Lzl-Mw6#wswn@Wd8P}3s6VIiBCOYbeKI;Y6_dNIS z`hV*?^L099uFtl}r@LMpm7m|iI7d2R43%zLwV4B8N2RGlqEJ{NmxMO7iEuR4_ymA9=D%6)0`WOM50dYC;IqjRUX&W3`xx^ z1~+dK+j2xUaz*9FLYDgL0kRcd0*kgu&=tzGFp|m&2~)@z5njF$%$3Sb>Sx^&9#B2te1GKI zzrQ_gELY4;-^34hE=^@0^PCful0+t%uV?PbG9C6tpgk-$;|NqPDW)~L$)=PH>V(N= ziJzj9o|2mJ`S^odJRM|qdkei2*lr6v_Z@Te)o>+#H^cL9{V|3yL(T(A3x*S{j@7j| zlV_o#99Vb5nU8Wz3#4h#h+M+e(|<(Y2VSy~wQ_Mt)#>h?Ea90=XpuFFIfMO%y}&MW zZ&*UMr2Vb|KB%G7jQAoHg<*}E$c~nPB0B_)L1Gpav;OUp6oRI2Js>NjTy4qZ+hs7= z8cOR%=_=2}^6BP}E-gPvTfWE?E6Ikg;<*^F*piUqOcV~b!uG%zHnT-xNq>X{YrA)k zS~?PvM)WcV|MQcDC!g&CqAgs*NxWRo9)zAy=g6~#5T6}mte8-M70ahc+Qo^H@4 zYWqUXYLIvAqXoh5@aX6j(}uC7E3eVm$y1D-UQKj9d&Vvtv;d-k@Wyj|wN<)=v!G*} z+=5rmE;fY2N}E`V!s(v5eEx7J@ZD(vGhjU72SEYre7n*j_AXF_I#O1*ygfJ~U4+uP z48;VbfPuR_8#9hjU4OR2<$1*Y)9k5+ZoBDaE*f{|QG-zkNFFaHNkYDaZp4HsQW;k0 z_gTW`bA`22lyzu9LIFA(YEf^?{o@7ovPHwJ)vzeFP!WV@TJdj%XA^mu*7kw!pO@ z&xsI_UKNN(#YlQ+k%Wkf+bK`+hB9zxw@cUT*>y{G1ng8(+Hi0t9j2!DoIfA+`rbNC ze!$OOvDd|;d6u*giiD;}5a^)3qw73-)tJPoTigqO9Ey^ffn$Rc){#~tqnpO625l8^ z(%MZEU!bRQtA8!hl>Oohc1O&?y2v4#)3KZ1)FvC(g;sGEgZs8E&?WBe8Tz@#@LD#b zq9}_iM`KM!HjY|IE9?;XnTLSQ2pjd&o>_>#U@D=f$ zZ6D(EOn=#?NYrt$>4IwoP4yphD5&mSf;(^+rVmyqL?znuTf5$*adr6{0xBKeX>`Mv z7l`cWIS5ZJU$#iP>d3ys@|-99ka%viPCU5u5&0F2#Ssi6>Jh?)Uc~a@%yVfknncsL z^dK7ARuC|t2XxJ-Bqfp`mExh3O1EaHE1Jg$v40^k%URI4F*A!nrWpk%Q7f#TnqSAA z<_+WX&M5q~e|EMJ)WpqY;LHqJD=Lo>31)e2`xE=v7>l?)vq0!Oi&Ri8ylZq#eh;TD zZ8giz1qa>v-e&JB)sO%i*!z8xw#@qJj9HD@QZ#QjbkL6mVtwuWm4|c<;g8NLCQ*8O z`+v%~*&UYtFp7^Ulzjn(&LZW+!OnbiOFQl2Wpz&x79cYY#{IIl3uhb(N4)4_MQUz{ zWh(e6)gCUbR8yt4B*S`m-252>+tm}!6};o{PW1pzbQ|O~`_`E!iM3o89da$GjV?q} zg2mLv?;H@LMQh4p?CYSt9JnZV&6QL2m4C^AVVr2y!KGr|uj2y;UMD?ioHL4<-3>4(`6iHUFGdWv)K8wQNBWr z7Q@nv@pLt4n>tGF>wAD7Xh0&lX)uS?6#-#)?d(#1V7`1~q6-!=h_EoC^MNJr;&46C z0XhW;b@)X1`z-0*TLMU#3N`Q}Sui0y4ijWaa)#)l?=eY=0&Ge>9kcit-- z7G+gnLtUDIa3%o+AM!-@X5Hfyf%+(jxiwzVj%GqbO=IH95}Az{#|;}?;J#*l_YC8)%t z%w4R5ftnzSLmQJIr!abSz?qpxT!n_adR+gRPNb~NVHje%$#sVgjIVyET7Tx1ulU1w z;EO|3oK`+c91AmrLXxOD)uj)O;@G1uU5EUOq-$sboutf)q!^+gO~%_Uj&ntu$1Co0 zjbPj<@F|Wut+*z{qD*GlfW#hObg&!uaV%6J;REBsWOo3R7tNLYmBvxInX7j9>~P>- zX?vpLx8T@v1&WSZ5<9vRE`NmSAc_iZ49XA-ZKR4yNCaweR4|yLN(R*h!!$(PI3KX3 zA5hB~@B{*Q-a%jJB>|Vg6?l34N;HHzyiBZOru?C9E^)vw3R~<14IxWsRxBn?9f|=; zV_t&>B1=;Is|vey$P&6=WJbgg8W49DhfY1EsQ-S*LnX zhzU<9JTRKt;;}73)qKO`Axdgizz62Z_k95Pb06JaY2iC=qlH}rJ*h0ZN@1|dd_6vQ1b_m-};83Vf~*QxE9t59h--9$!;nSz?9h)ZsyPewTsoSKG6B^;aUbDEG0` zbU4Dq6w%hJR}(zZMSpd+o{L*UKc2*8q9|i=A&q?+O9VJdiQGUk5ReRhbV_1h#Ftj>XTG#46BzEr?#OS^)ST89gjI7|C(NS3*xnpN2fc-oou=7 zqwiNxqb-5_LX7(-=+@+Ql|iy}Qo7qca0J~!jmZZzmGkq|`hNfk@7!_tTkL?17FqZ@ zGUdB{&5%tnTR&LoP*esMGKaoqlcR(@P3(G8SuXj$?(UqATIvsyn)+TQ{_dMhRLn1< zfq~PnbH}`B;5C70B7Ke1$ttXz!I&eHrKOtb{XMUpGNs_6O>_^j&vRwP-Biq#C1fTW zct3Oy#7OmnS%34GIG+iSgjxd)`(54 zyqP?s(vXkNvZILV2dEB$_BJEqaF}(M?^#0346P?!SXwaMzgjQpUu&?DDZ#lf<^Sv& z-<9KI*Uinj<0%Rpvi1kTP`mb|S7m{NG`3Ihow+dj@qZjs|JXRo`gr9EyMu6j-0IMq z`*y3ZuZg)=&PHhZuD@o%!V16a(Up^Sx+_+K4o!15rL(9)d^9Ta{9*onto~Yfdugfl z{xOdVheCB^z=pC^JpnOE5m=b!ls&B*-x7;`&F=jXHP+5J4$P+EHIop!dsOydL zxQGsTbAOgv#=ia5Q^ViOH{ebsI4afqfV)*KK!g+DzKrd(su#Fy%h?R@P1I_dzHbZGd4G1%JFf)Z?2Q=7*a?e%xNWs&?CNuWsuJ zP9IDW?U`;rNN=&vU8JQx>r!a5LN=pllQ~LG_6YBw$daaLlb3)&u*3pX)bQ8@BVSe{ z+w1%N`mx6g=Pk-d{V@-jyfUE)g)V~_aC1Yg(BZ6MiB@OJC0d&Y&%DIF$JkD^VjwY1 z>3>oA+>g9!ww6`Sf}UnLzg=5qH#89%Xrc$hc=}H+|GbT1-0=6DRJ!>SJ#EnQVv6qn zi1K-rX*BU$ZVJV6hzu+Q7J>3uT$$A`S9i$Izysf;!59?|;X%So*&*sFIwV>$tUcfp z^NC0!)2k=pt|MQP9?K#xQ^Zm-mr<)=FMmao>)V54NMD0dcE+9EbQdel;M`B84tqE;b#? zPh`&>2wBhK8xs1G42c%)*#fM^uRCXgsmfEhF1NX;yp~8>M#F?92`sgPLyq>YQh!wK z46Ywo#YCQ8nsk^z$(_ig2sf=n4&&82Y^3v`1tlqm(l;{zO;cV=0WQ`>?!@1txVpMr z6{?l>+cWA(S}}UaeL)`vBb@#iz`E&nqWA0E%r8SPreV*TwGmxUDz=tSks!AYi6YzM zrkpl0!(TD?3yMhUYpS9YrR4BUTz_^{pUM^-ooV7>Dy4Hz^yeP`+`P!?2_9BWmV821 zaRyW04?@A@;R>sB?r!4{;euov>_oMdl@~1ZFP$;S+b9_&kdvGJE?1~sKpL1ri{8L0 z?k>m)?=~eB2KJR+ayab&>}oIMmpCN2Gk2XWG(3{`(c+`5ms=Qe@(dv<8GjYfJrq-z zp+;#!V*ae7!d9mr+FM^jgOyb60TAQZBh{!MN+p~2c_r7Qno+&T8Oau$SF-6-Zo_z5 zLXW=Yf;NIEb(T!$QwMKgje!d5&+WsFufgwdQ7;j}f>hH9^Df;u|DCQS=J#IFJ8ZP+ z0jGD}W3+n3Eo|148A+2hRl{0 zB9$#<-rxX(a%~zq_gbZQITJ-+%&wiw7;o%^D}l5XUh%#&p6uQ?F!WgnCt6(#g>rp8 zs2F*sMwt+eqMCl>|9^#~I>~2ka%A9VQT&0tHN&q-Z(qL$y2@RIJm?qojcAjY5xeA1 zAbN_mmFX};;}v9g(MUv7gbBCV@gil>6r1#;@;s?VY?QuESze=keFo0NX|;qFWEsNC zAX_f9yj81;db`Y(|Dm_hM4phcfgm;5`;uk-j6ZxEdfLcPVt>1ZWF#^?aY=RwDa&Qt zlC`8`zjy?)>#+g}rWD*$w^iVpKKZ`mTVkTo36gxTK1D$RS#nmAv`m_&w(b>bS?-^$ z1U7d&)>8)(9MlTh>TO#fo#mDg1yL-b*l;VWdP$iOa+7F#`Fm#*q>$$x5lwF~WX3Y= zwx@u<77k%eaeuoFCpHP_JeHcrxQ~+d{jGHYrL9CHO=O#PfWhL7&&W2RY03U?I>W5^{*&38{7BZ7Lh*Wcf}o2qAp1h9Zp#SB zi)uo|N2eZ4eLjCBdGCd(rQvh5fbnsM>mb`z%bYrlt>@n3Gd0J9?l)_HJFxx2=kwZs za!tYm`ns&Q=CkhFwauH0F4J&!9-V5!o3)oH3xB>eor_wUAD^SN~QO-rSNrcP5EJ0?=4l4!WNbG526p*I=QR0} zK1S>_2T)j8Tl1t<1W04 z2&IYP)tD@g`uPO2B+TJ8I7K*>D(13`)YP>_f{NzG1qJLfmFB=jm4Z+)1GUz6=5Lxq z)mt>>G|zZwFa>4?utB5Ho{3wBiH#UjN03IKg;#nLq0SEHEaUXvXSws&s}&llf`0+t zPt-i-DALQzs*ToBE|<&;n**c7n)G(Cuh69_B2YttrG>hG*hXpJV`MNEXu>`5Y`I`_)B#`<#;Kh`DeD{dXdWuS46V9soYJe5?qW1Pw}Iy@Wl9c>*K#)r>lkE#l) z)LB$Hke#rxu+)fj>DO)Os~a`am=vsB)?Kuw9gi=?1RnzpI2D!J_fRJ2Yur?1aA`e` zZM=Azi3egTCDUKL*ZVs z$dFOm=@qME6LeM3!fWi#g^CP;G1#kwS@vAvA4Z}fP1&LRy*|w=*m14cPjAE;p~#)hUKF&N z+~?F`Jo^Ra{DE`!GMTot3}aLzb&ReUHTI&wVR9YO6mj%q;O9}u)B*+c&@crI9UepD zV?c3Bh-`LwISX4r!hhVagJ9u^2x>i(hxrIihsdQ^hN}+WQ7R$cy^Ap*U7c-1^Sn58 zARD59fGS@FwX;~JUZpH}{lRN%gup*9b@!1WTbtjV|xcSc*6Q^wyaMt}a8lAP33Cf7Q1$Zb$i zEFv47@k7EyP?kxX{7f_ye+VX4QPN!}aX9TW?P#%^V!s<`lcWYe5a1|SNj)6+y^1!F zLm54Wc4B7p`Dx>&o}-WH9hCngQNJqW8B=7F?@{nxHrA9i?6cOGJ+KhkZmLCt+f5_6 zMK0aCjxm|xL4P`nVUNBL|B#q8tDB-Rt*{Mu;gs6tbd_I?Wi>ZHI!Zb#z|qEUV+K)i zHM9w#6%sV4ICajzy1tS2kFN7$jJfjy3!$#dDUCC=tVl`lF&0G!>u0b3Tb0L@SWY9+ z5A4?%saxwEz9i}D5GVM+Cs0%HqC{i09lpi1}xRqul;5u~X<^n}+xFlU=y&Cndy!a;(>Y=LivpcPVOAbfQzz6O~<*4S& z*(>UG1cg*R#XPhNM2|0Ws`*qaTLkk%p%M-zJ>6sfp2tnBOMk&G_#3S};9?te%tBEAW94tVlJdcAheDSEshtF^ZSYX6cv(3ayoOZXM2+NR6d=HS;QX zg#bxFw!c5F3JM%tjG<(g{aEIF)b8z)t6Gs9V%ds*VYU-!bD*6vVCZ-YvxC@94jKCO#gTw%W^?-m(D^ciHcdr_7}AHuFVt)Z;Ow`az&f#kz*-|7%+>K;*Lk{qJl5GG$i|0$+L zXbFGzM@929SN!Iv4i`+HS}amOZPdzZ1_1?LS+Zk`JzU&8kS&bQ{*ugzzK-c{{Uos= zlo-rm7Ce!|&4KZB#Vy{ClB%B7KP@N(S4)F;cT42vwKZ5~x;q#E)-;)NDy64{W-u;lh<)(ibm@MYlZ3W{X3mAvvoIwW)X#6;cAht6v zFD96_{s}vHk8M4uI??3i8b6&#Ca!~}EFPQGzGmV02(K!^?q!fg7kBlVF?JQ;<32go z8SUTxX6P;2wn9sLywa914%zbqF@#L}J?a%E<*JK`m+X?S+(C+Pe^G1UJM9uTc!Fi8B=*R&_ zHrG&-Ut-8l0oMQbRs`NP_f#2Y+VcWE1O^Ncar}O(U=N)Hqjh{ufJ=A?v_OA6!?>!+ zs~sn~KMkYoDhqg#SNB=Eq^VcRPQ;4#aW(P@|8&cobTaWUr1Zw12wL|!f%ONnK$=Pq z-kv+K`xL63K8hPijx2$CT|n!SpRvDFz0_`O*_7XwW>WPUN6ohW`B`Gf{}h^~H-Lhv$G45Lv7pA}V3KzU?L3|vhgP}vU%JvD#$NYeHctRbhH zSBOiXQb5e>z3Lr~BpiiL=X{eR&dsAu-Z5xj8z~l6l67l?UG(jxr@z+k51L;V-{<`R&}VC3TG+>cRQ{}`-*&)J>50np zEqhGG@8N0dwuEGsP!@j$=?S9esia!mE{87{PWL0|NQgMKmYvm&p-eCU%7vW{pX*5{`u#hfByOB zehmK z{mGnfiw65)K%0N3SbqL7CtaG7!O24lC!XaT;eL@x*Q3hI*Pg)-68??fR3vBc=+HGh zJmVX_W7xW?0+rY7<(NDZzktrPkV)VKeN}%tAMyQD@Nfss&N*h!IIGbkq;ZlUsIz9~ z&q(?NqTrn7hRQRqKxBnIePHE)1p3uZQ_L4a*^!cMTY$bpr<78YdkReh6S}jr7#PB0)^MLL*l6qq3dSLTb zVWXl32kJ|+fdJ=+G9J3tXpY5JHkX^wRu1%sa~_!_K^u`c#gn-@XGGk-*VqDMvhTr{ zu?#%VqI!68lE>2X;P9)gpnA;BP3DiH2eAu_d~4@OK+r7oW?$?B(#g(Bg<6guIkYt3htjF@t{61 zW31vfQa8A=?EhDOE>u_P>=@r8Q*o(tWZ{2vpa^^@SC|kJ%Yz&8AqSPGgm}*Xvl{tX zDNr&d8g_^m zKWFS>w}K1bhaAgq%FNwVIw?p!07TDZXr`A&GYN!eW9qG+oP2*{Wn*I-U}b|}m?j6q`7s(PV}WeMzxd((_O(*e zpgPfsCe<;m_P}E|@p!tRq2y{*(3^GyGAsDZh>c_oo+NS&Kkt23Wxdsb?AZMWYMYZr z#g=F}h!+{cHKUmm4?*h*GKTL?bgL_k@hgALcSKAa!5BdwaMz;I8?16E=iVkVhYx(|pVfKPM6M)g+wjzSn+*19KsKCv{4*jaz>^lB} zrg7qQ<~3QxS7yL@B=Boc(8dW39lLN2`2-CX`|Yu(8)Sdlr@+DLx}4%q&F!&S zuOm(sf_HOAIb#S_ut;H`=%Rn*IR`t9DUSZjaY|0_(QPdEj@J%X*d3{O1QZSNbO7U` z+WST<6xgr9ow5*g2O>QP7&D=GKNx5^@w{Y9c!xdt z*e@aWKUe8|c^`N?U@_j5wfITxi0H;}Skuy_H511uEjp^z>noh1h*wvo+xPTE#DO_1N&No*-)!7^Nx?4Bg8c+8b;r>}J{+QD7>B-*ly#p^@hj$@W z(;L9;rd^4qRH9nTI9nd52T>FNAp{P#4%EX|6xaaO#Mu`g5OaU9K&K>n8CGAJQGCmu z41(9PFVybT3SSdY)d*Key-cle%U%W+s3=j?i0KC}iq;^+@(_a>KyJfm22JezDi=fc zZ}IuHBpzU6jAC5~$=(}LofjUPR(|zUVse9lGZ#>nfCVAJlh1d;Jj(0BrPb zA3Lnlbe_A<>b!pflRv{5_vq&{4)k3CRRLcEkRmAtz>@|_I@gW8-kBN+@4xy8CTX&I zPGl`&c{Sdyv1L6Gu5`8QbRJ(n*+1)QGIjlOd9SvHEzQZFf>cK!ugd=o#s&|I3{`9l zvfmGjEgZ7V35wlEODvrd#7*rb-Gex=uRN^GL864lib{Wlrsj7$t7A7MRKfWfqvP>_ zR?6>khiM}3sce&qdg(OX~{_P+S(Pbr+xJxfAYVPVAk z0E!**8~dF+jW?p7GRe!?lRM8u>3bL1b|NMQQNx5Z;lzkC^u>@I2;}yn@Yup22WNE} zfpaHduGD`+Cr^a+v%o+%c=M3!p_7jO-v$Cg-A|j78=njI*9JtOj3C?r2ao{l>c-7v zp!YpsssghCLbe{1vMaPW>#n++!BD_Sk{q88rO>0Sil1WPzSPuPTPNXxIN-ue?U7;rNb+9|~6Uveb3yox)BOSF>J+NZG%)ZniPvm>I1vf2t%wO2dc~+c6U;W2MaIlH&67K0>h1?cn>P z*YJB=Y7F|5@-4hAtW_L3uRHIUQun1txr0$h3-?11ItFM_2mUJX1JGLWMp|DF*C3tv zAE6Jl==tSj_dHFk-Q6I)bh(oo;}?&7btiwm&ws;Z6NVXva!P=NRaedvW*5qzNIwJF zr-jA(EVmW~dG92=l=5x;lQ(ZLM^?(154)!V8-fj=4-l5v5eeTBDdJSkF6?NRcVoGF z71+LspC52|TM?el7|OHftR}+G-vqDd1BFEnOcR0N41C=9^tt`S+2J?EP2PT%>`{N- zTONq_Hu|dc`RUX9dz9o(8PEgJ5~bK#RR^(;=m7NzTE#s)jfU5H%xge;?{8%0oIH(sY&*caGF_*e zlb@Rpo+-Eu6vk->eHc1f3A=qazx{u#vAh)XKhJJyJx>Q6PBrkoNLa*W8?>tz{@yQZ zEDWj=p<1kIyV_zJ{x{W=#D2Zq=TQGJiHO>334=8pGsdk$P+en3GE{vK$HhFgvbPLN z;9aZNs9X@8dHTc+Nj9r^5Jo#xlE4H^GnhO=7+l77&%&Xb#8^KQ>ui`q=h=THap^OM zC?$cfS$z?UT0p&{oUQ=;(lW(1is$~}ONG&QPYY}~XE>QlO^KM(7}e@3P!cXCm_4){ z1l#KBm#m-M-!o-8n5v@xG}2H~O7NxzgYtly7K1=1?A;nr>UvMP@}KaB#&(}8F+x!@ z=#e?=?8nYWoWmz~MTUyyBv*e65VnQgUq)(jBSFBHItuN25nt=oHXCDl`4*WbT(JX< zK2BUWyFuC%|K1FI{^|VpJ*M?QX?_ift0oLj3Cgb=2(gF$9Sk|bb{5!Gu< zu{~MMX}8MtU@XY~XxA)S{t;W&#NG)sB09`X6e4V~=K`DC3V{@8EtJK6GM9{vg$z{y z$t+kKydadR$Xz}FeGgr&YN;Yh4|F)uvw#ZuA>H?zkX*@*2>Oli=`6m>Ufj&d@uwx# zPDg(1Z*n{FV!M2#4iJCOe8Oso^uX&t735&{(i=23cc>V$J5i;C=_}Q`KPU|NXZ#+Z zv-4mu%}@L|j+@k4*QGpA4G@>YYoVl2ATc0QK^VwTfSOJ59`E?7>o~CC*q@lmR=Gm^ z&ituCa6ZZ@#uh;FhkcM0uw^1!nV&B4&5uFq`M%W7%21?5pk9CF%YEtr!ltklN*g6A zZ^nJ?EGs6c$GTKLGU?;NP{2ZoNLkz9(>F}8TDXU9E;;ng;Xy%SFak)5d}HwM0OgwC zgTT;r_*Q`ys>>WNX5)`21Sy6ncdOuPIo?^5?$ z1bM=sW`uPpE?z$nagaW3#{pvucw)%R9uU)w?5g2i1Caf}|7_lAsP;OaTXGu!0TBhO z%avpDXJ4bQeIdH{$7nm9L&Dnceh*@>KgOUcvj7lNVYz?u+JFjSc=jjbQcU}{&145y zY^oz&g^+kf6)#6wY!TC94o~`>js({p_82(%ZAf15ANk-KwZp;C`O1tS1k5O_xE5Cy zrV5W!78ZiA*mx^8q)uxz4B{n@%#;0BKmG?}-xytq6MXq%b7Es++b_0lO_B*GwmGqF z+fF8Wnb?1}ZQDD)-T#-}Purins_(7tK6UGKozrQrKR|7F)f!`uInP)tKEy?HB)Cd} zwIFKPJUjq64#ELm+no`nn9(ccdH?#_@Zy(X6oOMVrDJl|9h9g{NN2xnVc)rE>A%aT z{W)QmLhTjY4kPRx7w{MaALW1jE{3HC_aYsK$qauz%h~YN+n?0cV=%!#i;M&FkHh}O zxHfH%NHMcqUiVq2W%DF-6kCe#p+e((Ko7mwWxyc66mkhdchA8v5}oM+Z0{ zuFxV1LLKri<4~fIcs&mMJh|~x!1z)EDg1w|IWU#E3r@kL#iBHifr=uhi`}4-CH|0} z`?^mZ``gF-###qmgm8(0F{T+=0tvqv1nddcYAkm{{!P3_Z_E#M_aw?EGtnOVZaopv zb5=rB19L8RsG}gEPMBB~Xqx)!eyax38p*Aay%H6q-k9sLsC24L4Cyfb(k0D?#PxqC z3Vla>29J7SVEPbhMJ54{ha#WmgZe-T$_fb2jO#-YLa|rW6J+5Fb$vt z1&0Ap@;!^K`$p2D1b;v2KbGf!f8Ku$Mw2pgM-@L=&J;6QB~TPy{#DZa_yvx&LJ9}P zKiDjzzUIY?%UCVVx)Sf{7(htmv1_O>>vQ>Q-+GJebd$8CNlw16`_1UlHVn}o+!=0k zz=$x4|EoFITowb+>8}hx6~f&>z%lq;1@RzIXAS1SgU3Yr@kw-Y4y=Y z3m#KH@lDz<+Zt2%KBb9v(!zR5sFvWY4&ocbr+whrmi29P>)sgC0E$5X2lGte3RGGL zVa6PZY@Q~lTC^D&2odkkFmv>UjvEb6TUT5w{}W~4>dcg}fx zd$l*=X2JTP@N4`d0$l*Qsvdude4*BxVu|&$F!nQ|^AN^yn{EXxJjG9({nteKj;*ST zLB8~@jzhk)L@8?Cg1^2~=7IHOAcIgUs8)0VffW?A;dl;S_ElZ`l_VsLVo3#T#hFY{>xV`@^-Jul`wuRvef_K!U2CW*EhmCy@JJy z-b!A>rg0hNYb==E>R6yQ!}#DYkfxz;oUko2J{T)yuhE|l2}(^{?yv#A?5I~5LnGEspmnO) zGZeObHkG(^E7do)Z#PK2R-$D;7!ACR@S(5x)KMIqQ1&G!?JyV9pE~o(b3)4#BA=ai z*Z1c5I3?6x{7wDFf1jdl^G6r2T40A=PQVz&K}X>TK`%r9 zCpLUDB>3e|L}*ftSVh#btFlfSom(VF(VJIkCk|PwM>sg5&3Hb~#l`r>r)onseC3aj z-2^%oCro-~(F}i+czaqQ9Y0agzWNLYo8@li7bwO3 zbfyIP#$E=bDecls9|t!~&%Db1{9?D>ds8hKz(}@JvaF!24lfQ+gX0XOo&_1n5oq-X zI^{tpcp8y3koZ6L{u1H=tcB_z%~F2HQfZ{Y$_uuFJpg~`ancf!fJ=ZwnSFzADE7`# zc0T(VmPf5QMh$uU&IKR=w|@Y_KQqumQ3X3dQoE=YgK2*03AE*@*aK#ciHJtwBS8Jz zPos3Y0!J<2>Nf87o>Vhy8} zLM$Ts(bzsFzX8Q2oaA36)C{SO6G zpz(j3U#n}w0~ncvDyBZH;~QhSnjE|mSNh(#-!}VnIPFqA-xPSwFK?yqG*lx?Z3=&v zFQC#12fQv!IJPsAVQL9She{CYAV{e|=mwm@K;e)iR7zHF=rC`rmN1y9sR1_rK)??# zfR9>;3AGSSP-|9}fffWcF~wTCm+13sZt;KYn~f@zqFimSre!2LvdI)?SaFgy^dPOc z)X{Y2ug!2^x|lV%Rhg8uSrT#|*zI&4}l~GfC7sWA|a#Jol@)4p~b*x;k;E889+7lrCkylU6#fpl zzw5}8tbo^x`2(JXAS5}&Oi}2VL9!YwE*m;U>s<>v2qa!bg7=x6TK5*a+e!TSKuSre@4<8A<%l*G;ruZT*|=#=(E@pwMP5&!FQ$e2QqIIyNWi&L_anhL9rUVw8e1 z04szXfG+PWQ{07cn%*9?I7?kt;14?$OBeYf=SoMB78V)NbA5u9iP%XQhm0o|0j_iS; z4fCXqwmJ#iX%1U771i)l4Jf(>m^i9>Hkv4HW1h|jFVjVJ5-m&vczh&54AcmK_GJfe;Gu>0&lAeK z)r~0E9pl5p1Q%|J0+3<;@HQfq0Z~$;se$VJg_YC-3i{w%SztZYjIV!d9&i_qvuCq> zV&~U#!C&=)hF4?X6^MU(&`2{>*E_ua+I;BNAL-`K$&4x#4P|NcBF%uxhMa?yV}?8d zCuEO02E3y-!P3t2!Cu4L#8iA*fnj3&J4f#e1*{RLVF36*e@O7bq%bg>KrtGrDBe7K zX8P$%R8OAOsWML1QH6i-+Fd#WovZl4&D`b-QDVr&_CYwl?bf@FqvE2G28PguwPvSB zFL7Q&^r@+|=fOZoawnXAEA!(EK#ww`y(paM0%?WIZJqArE8oR{Tp<5z27XW zCqC@6iUgtP8<2m<&c2?I-g|FV&hIBwYy3_+CI;h)o7kEmMi|_F9APP~L6!(v86hx3 zsQt^{0i+eU7Rc(qQ=p#VvmkoRun}9j_wq-}y!``z2$5AWu(aS^c(RlU<^K!u12A}2 zJ>UCf5LI(Q!K&f9ry2Ow2f}Sp-XD#9cQ6nC4fOpBd((|u-Rjt5Qt&nTtN~h0Hz58CCVZoasU1_gqxHQEw>tSn5;%t~vJ-p1$AW759iJ6PO0e z;Sxu7oX>v>_`U69kE?@GR#`(d00y5|1`V&<0u-y5;lpPUE6kn(>qF;+epceehEH>& zt6+T%W=HwZ-(Ff7j6e8#(-{OVk=)dTv>vyke5{%NaWHqBf=EmE$P=L;y!eevs~<;d zfn4KHD_LoE?`f|lqz;?J(2=VdOhAsAc6@hrup56M3|FXMt9Pb)pY_N6s)u8u>kF}M zm@~HOeSgiJ#xSjD;#{G%<+Z1}s;BA^)Hi(2uM{Y)R8CF_bCPem?Nk;q<269R9jb|9 zPu!-U(@RAGmt;jpy&s#FMtC33_O-F3d9H#k|W8KK!b!#5_-~FP#rT;KKaC*!Z_aIDx zL)Hz54uI_sWC^Pe!(k2#1CUhYk?4?Y>VK?uulLr8g<_hS3u!N6U)(cnNqJrEf$@Ju zlj;o$yG&toUEc(q|1sy?9?-D<3?JfoAMk5#YukT65x0jb15gzB4;s;j2}gmElLW%T z0>ItH#wsQ4l!Z0&gCZ*Uz7tO`1R1{g*zWEqLZ6FK>fyl5*w-PraJ{|H+JIb*p7?Lc z$Jd|cF6YUrs{CJ#U=dL{NDB2)gVKM5AiyT~c6WPOuNQPp9nJw*&otCkuw10H%ws6m-i#C>5lfyyuN zQB%rL4!Ye)jn}~&v9uXq@*8)*{l3*DrrR46xHoL5AnH2*nFCuBe_c1v`SE``NL<5M z`}e?%T6P*9#~=JhOB^WgR45V@1hfA0h5hMSSUh=6M91sX()obv4U8`x4tr3T#m7S7 z7U_G=GIm||vH;VY;ieqZOJwTIB_M0<)+G=+PM#7K0EVFsqHD{;yYT%0x1-v!P8jR} zckTR#LCXb(8Oyt!$%rqU$%cP_kIK%sbnW=B4$LjQUI??%S?WxXFa$sur4T$ql^Qcw zT`hz=_wp;ZW~~z3m6r`UvW%FAQQ6HSM*@L?@oHx>s6Ok#*W z!V2;S1Q=vKxJ%(JSg}a{vv5pm`BeMK+vc}_!Q`e7G%o!;_GFbIa`k_=)3i?l&&={?y2gnqE8P9(XeOBu2>Pc$Uf%eU3-UA{CaYEJ}K7(8>4=YTlmZ%5vG*IRy zq|fW^ySm4BBfT^SEwRq~dvFQed8qK;E{uLZ{#nNBa{+5g+7L$b7wjOjRSa5PL*Bq zSVi){wuNc)94(-dE^quDehmH~jed+)8OuMM(E@W*1-X8E8V#@`e6BjA*oRW#RYHP9 zGD`Fm2M6a@QY$OLpLZpM!=d_F%9Q6~tX=TFYitSMPtuIHUy3qXv*7@8-J5$M=SC3^ zKg3jvP=bR0vGRXP4mALkTu|Ki_FRo zay3!$SzU7-tn+UGhfxYl0|nlM;fR63oMITlsf;+?BvbK$?e(AELs>Zaay37r)(1kV zK&aid_4fK-NLbBLk#tEZ|JxS2Fp_{Y$asuSXd4fdw!eQ1*g2~B*8gYKkr&yLC8zl zq~m~ah!cM+oFN%_fl2pkhFi8}&k>+Z4xJS)-iE642ZL1Wg;9tv=zn>3?7m7a{m*U$ zC6{}R*>{f=53OJK)3BL6VE0}0d1vR}nuwFRYSM(_a0*~x)!>{S;@KkGgcc`?b6s^9 z|Kap?|1}{%?b4O{C&axU@4GyUCD86SRX3SY$BcgfSJn;Q(=c5bzYze%suzaHKC=m& zZX{jjNDJ$qX^gMC_NxY)lzOUiS4S9Ki zA}n+WEcWoc1waq{fXDcCyIoUc&-InuN+$d=@;Tq9YW~GaV+Qs%zmf2q5LnKa{P8AY z!Ql80kQO|)t4s!0=PvV0`c*>H>zv$tW{rOv)G1357JVLal@OkAVGOcD(0#m! zPFJ|7;SEXvN*&=3-N+wh-qYLY=sA9E2`UiVKyNH^G& zIC@qmqX(QV91J7pzPINjX_V?jH|ImD(5bq|_7aRfra3>IGG0Q`MsO__gqeTxt*=Bw zBjYGD4&j27%U>bTT=^&0m$az(Qz%fadKo=wEQSmW;8RLk?U1O6I^4d z^$QX(-;>R3D#?|z;#bwzN1^4A!=LEUxESN+Q_AFBEUv~lY)wz~QItLbH=$eZ8bB}G zX!_hCy0{>$02Ih9k(28Zmn?r41q`fV1XL(T5QJ!6WPEc)YN>BK7tKJrwko1hIR{6@ zBkt7|n{ub370Y;2zQ%4MQqcig%1pBOFk_xX{)&(Z0vf7Bv#OD9fVdpwU|_$6Y20u@ zORN@*h7x%g)Q{;Zgg%#LZZP$(nA|FpH76;;EIx`_0bpg2pAxuoDD-ewuk@{S#s#9c?LqVO8!MWOptfs#k`hMk! zU~(AOpy~;i*j+Uf3xd}-HWsMeUN^Mg+ialxRgofOEM{mMBYJqM_p2>E*@TkF zS`v{24V9FYHUoctdgHx0QiwqZdFH^ZqS1OvJ@t>Efm(GgRL(Bm&~ZN;1AG}jwSVAR z5pvUQq~pmkyh#={ecUfR#r!MB$^kFvY$wBHdb{`L`^BKARsA~>c`-6(%&-C#(IRlL zI#@IdbXfH5ITB=<_gjjJxm2cVS~u* zi8#W5J8|zr@Gl~i_xWq@M~5Y!N4q7v6JKlRV`z360Mwjtf8nacd=P9Ilp!fEAU8$kh1Pxw={K*(RSj6XxO&3zZggyvPn^ip@UN3qiXLb`&rydo&yF?|>D8z(>KtUFX;3cCW%3FX7j_Sx4OIuht8F3`qvdczVuJ+Ij*{@YfF7V_i~ zlZ<~z)d`fUVl1`yX? zfcc-_nf`ty1cB;y2#zpS=d_(d{UHcf@Mz?$C1c+J{=T>2%w(rXu%!?Ot>BDwRSe_> zXdUsg*yORHf5jK1<4zahP0$=7>$HVYiEA!G8{UVBQTQq*){zDM}fXk?aP zH~K$(CiQWHpH3MAXS)G^3$&ZdB)59->QtqkmT{FSX3u4Qy*LH=p9SECid%4W?z8#D zjP{a*jzC1Rvc$ujW1RO~bYg!A=yi4dt$_e;dm8<#23WOD1IXt*^iN_&@);iCsH@D$*|IJ`Tm@#&Zs)9_; zC+Ie3F`{yCU~;MsS9NgpW!Uya3qjb6o5e0|x_@40u(bZtcGVi+Su%h7!@{r5F{fTr zm}d1w7zJTD&Xn+`_q*@TalgwNjBk|b9Vz+n9ggb_Bm;W*8#N|kW5YD7%ctn{8rb#n z$!}E3vhtD$Tyf&r_B0;+Nh-Zrc$^I?Dur+*GaO?Qdr8dpKMTQ44Z+~*`OCNFtD-TL z4YwTb{8c$Ma{jB!|4e@zgE30h`KF!x9N6|3S-#;p9J8h|?RmTu7n@a2pv3Ev7q^bB z^E*8DAjO}0UzvgzAd}h9cd`s~yJ4|WxA=wok8sO6%Q$}j%zIgUK1=EWk+9$2j zRWA(Bf6dPSAK&Tzzm3m&0sqtZ+zSQ+-WSDI&> z)v$kToGhO~d(btgqqIFVJsmgY)9mLa;m?2LH_>1=;ZGZ9C*77b8S(TNgZE-JB0@qY zj;9*Qrz|EzHM8RD#po#Wvn$};LB~XIU*Rk0LT;ligTp7s=c4Um%@9t`fm}A{H4$ND zwl*h)vFrKXR>glK;9})1?z9Fj8U{~`fE>6@Rf=P%_dYYbJ|5(+Z7U{zF;e-mFq=|x zbVRJY;mXmedsChI!GE#qDklP;GzOXpS6$W}`S2L(m zGsTX_cQ(Myxiy+f5}ByT;{dabzAxNpawlsS(zJO%rene;1^KYD66L%vIwWO)WfH50 z-`znUAHq%-7Y{d`hhKWpmQ+~|#4IF2f)TP!k2;(5LSMLcvC)pgg{6a~OR$HC$o?t) zThflBwKIRvjuQ+OOf=#zfCGZWJ$9d`5;nkIN2j(O`0lpuo5$dx|FNIC1O8hE zEEuVZHdi4n&+n;*_>1U(4G_BINlo%GR#<0&T{n#DAALqgIN{*a+1V?F?>Aolwbz1D z95}iJ9o~o6`#ho#uXgk$G(cHpFf~%hzj)A;D*S^-`#<-=Q{Hwxh`ntp>DFHB()r2+ zpi6%ZqZ9T6Olh#6h0MKSGLEEbt%E#TRzttw7$QxQ7-)a`l`dFyuAUf?qINPZ%0ViJ z{unUvcl-w334=(Ccqi-z(jt~=+Z(=CEs!}NC;Ow@7!_2_iO}(+%q6OkvT|@=h?(ox z^FM6oQw+Rj2BOFrHAl>~@QPBYJI={gBhL^04j`iJzA-0>= zHcJYm4=~AYO970KF;z8=1Cu^#)-DZ|wcBF3Q#O^9UywLULh<%hR-@}w3heK2|Kv(qD~<03 zPX|QXs~JbnsCC8!*M5hdr1-a? z3-f=z2C)w>x1zr_XdsMFqY7chqTW6{;=N*AXaI=w*vmrvpLlc@1+MCW*(4RwXTvRC^dW#=6CouAx!`}P_}M7&QLqHB zK&bIiKbT1sSg_n9tsG3sb570YgZ=NmR}ROk(+&d%>zv`h#ztkYAH+2%|C?ZpRpFZL z!13co7rm&QkWky+5#1Dg2TaTL3f=_>#|6JXIvTr?u6qp9+^PsU1)frTN>4O7@)askuH$2Ha6&HtY zy-kcx9zC0~(XZAN?JUE)CJxuDU9Vo`jZ*N1B4n3$myL~%YSvuEm~(@0XG1x>L4g5J z&$oI~UgD=A>!P{z@{uk0ug~=2vq??OqaIF9!}k~+f3oczn{R(>6A_#QFZz%I{yPK! zMbB43&As@U#iI zVl{f&JYHA%qUKwkm-~X9V+_de_tWR^*e8$OMjMlE0i1tJD!28u-ievI7d@Ye*&Hrz z6-O_G3G7`DQkPSpP!={>NYS7jwG%B86$*4x5`ZZwNrk&meM3rId6RON)xyb3*r$#3 zi?aTMBnWY+Y}0wDgWu}ugXq4#vLKjW9u5b2I7k>4a7Lrqq^+3q2n!ra6%Bo~WMG;* zd5GgCjWB;v!^|Rgl_`pmAYj80KPSLF&}i+%G{4*&>5(TgUVn=mqWAXD?fS&!=mukN zhfbLKyymw>ei3}F7Wna)m#^?d3XwgjU$`E-1JMq9f8*2eL!B1fDA=*$8wtP&cEEa! zxW$GS2ALUvCYPMY+FEtnC--oMKxpn-XNH}oMbY;!op8HSSxv%r@}fI_Yj~L0*aUGC%+N-{CdHUY?vjT>K&M$q6bz9iU6LNchm%p&RDMAPzzQE0cbO)7s_0m4wB+h*Cz?Kaxahbn*P zo6R_X+d=$aFimF^Og-XQ?J!MVCw&@)`;0J59aXO4zx-E>jKClQVSftcRJU%i7th(C zM+)c6T`p}#QA;bi3=eL>(EVwdrE=8D`4$SYd}`&LCpE+B*nnb>D_4%k{x) zl*CH!xfXFXbY#(V{?jy%CXTEMLuOG7AIG4ew@5< zcsmym(|Lx!bu_5-iS5_4_x*n=m%F9&Hg)w>Odpdz2G+t7t1pry6J2&?U4m5Vg%^@x zjXr6_yrml)96uaaObK$-lwKTA)K5*3IhjgBQy`{rQxU1j=d(qMP)nyfG*Oa0<+CZY zU+^Z={JLDszi!u}vR1EGJqPVcH>OlAT>ZPcS~{ujn)Bt zz@;V@ivt!~t5}BhpNLM6kZ$dJOt(aa{0dndAK%+gCjLO=YQj_eT6){)Uh^{hW%<%n z^YqM3S(X4n3+_)CQpbNN%3Q^+?m(~6+#qgJH{$$z8*J!s2eL|R%$3nB;#4_z4wX9h zsW@LA6pqDEbhJL|SafAk-i_WCl_?dS+O76yW0O#{`g6GDDK<;7QT68^8ipg-x0ujD zK#5E@ckH~CRPk!5h+a9?Mbqq%^oztJk7aA<{ZhIt(@tjYyE}i&0i{E*sQu}ye}$;H z65Edjvi3nmf#~O=L~Sj$wfH`@k`iuTkN7b!0W8!C2gJz$>IP#GFm=@8IRElKBqWxm ze{!oC{;i~>tvS(rG_>ah!d43DbA_1SWa{tkoL&dHHjB8pso9z+h9PD0w_ZD$?mEln z=%gT}42aPd-I9Ogl!uab$TjEcwB#jQtxO{lGJFOAL_oX0ei-MzW03NR1^Dpm_vOgK zbn((!U|MM{SV~*G^L7Z+=Em6rQT%zD`(JOh_Oo%TF$Tu!=}2;s;y@1QFh%ZiDgOER zDTP9Zz)Cgv-S4C^g%2qfab`T)r4W{b@5!k*UEMEXWC`vYdB~%GkBAX*-&ul0`w%Zy z!!wt~$Hd1K6cZW88=0nXEI8Zki=}G~N*9OpAi-5y4AqpdG3g4V+Btp};Ac~wldr{{URJxs=bxElu*tpK<%tX#=ehJu_B z6%|K;x#}_bua?x#RnBcJRWWW^*}q-Txba8`0Bh0^Ex=I*qfr(Y1LKq{qs1X9z}Nku zQ+;dnKD95N`%bW=IWw2r_BFn+7>YvAW!IWvCO&onmqvY463hbN%#?#|FqQ)+NlJpl z?HuC$lp_a7P02hvqdLviy{ z*q41Y&qg+_%4oNmwABKu=)8lZFtpJW3h1PVL$BG+v@}dv^cwV+ZSQNvZeY;p5@#FB28>q!Wq*|hd8->(=C>bxIUkXE&$AP$p=o^>^+W@b& zv6L7m=YbFGGy8zo)Vo=#&)|HeI&CwpN_o6En!uGMXQneYw!;Rp ziNm;n~g`h2D4Q%BwC0Sre-aLM%QXbA3joeJWaYo1LA9c$HH?zY%PFiCQ+ivYM6wr2e}5WAYV&R5LA9%<+08cHVq zf-P*OebD*kLl*Ocm1N-93^!W?q34gx{8syMK{u`$<(myVI1&YfR+nr5{gE1z5UJB@vF zjis$Y@jOsRsQ!)+9wvCS6VOIxkiQ*4AV_8Apr+;s*N%BbzU-SJfs}el{w$p>XIQQ> zk6ypxTy@AT8g{qzg_yC@{0c zD6A0oq$*U8a&gJT)#mCmF(r*VIui}WCqCHr+LV7Ph_tl9r_pJEioGfh<}XBl2k)C? zF{qs@TPR0DZa|MJ*T_s)z*i>A-ARP4(G(UNj!NOI)Gu#F5O0JWeky%k_|;D zowY)%RMmzCapQII{3Wu-=&g`{I0Tu!jnq`~0%mmP$Z2Y}wWn_<);sM*F4 zec?1s7zBGTAiRjM@%%|Gw_Kya$SpYa1^bL8X?b6+NSH+Zia?0>a@JQpS<`MHqlrmO z^*rmY@h(D_C(~q3JoD_@4`wL?Yz(7>Q^65tNiy|9auNYF@K5JhCTJgjFYy4UhCOL` zfs^w~J%a~V?q{#|%H?%kGe&RQBpri;>jk=A^RP7Oilk+iKFZn5eyhqzb~L zoCpI}K6=gY4pCy(Nxdd5ESN!D@BDms=fxmF@ZmkfciNh2Mvs}sLMmJ0 zul?-Q6r#>YMl5|UL&iXV8%{0lwZSy2q#LO^M=$X_E-povAAx(HFS*M%p?*qDIruuz z_DN`5vZKvz&#;ofYhmQBz3e8&?l21@z%N82EE$NDC35^G^*W@sV>7obTnpP z_ft8hX_7Z>#c=`vIHg-{U&SDk2aEj01onOTak8UA~9GX z$pa*z+_lnlj$yGtYjCSjikWFPC#lv9S!&M{;rl-6Z)Vqd{Br`lADOhdA3G@81o(6< zDrD*fpR{C3l#3F4nE7$ENj)a>`I+6WF7O9;4fTo$q!W2dqB@l+O&UEMctcF!lf_B= zk5EiUtET~1K5LPGS8P4cv6a=FB{G|#9MrlChnRJa5f57FLIv)#`mBgfQg6;1>C*3 zEu2TjpY}k11jr|Z3Y>-L;a~W^;ro3Mi*2Sd(Z}OLmIljTMH#iM1PiVF9y@~Ap5N&p z6zQ1wEES9vP<&6#9Cwy1%_m$a;_6Wd@OB8NGkD^y@&3Rc#=;73wPOmUg@8oiFdGl#*Ga>Vf);WooH_y`DJ!cOIt046FO#&c+O8fk6eO1x z6<*D*wx)T#=<4Y%oO_RNZR&&P@H7c4zsRc14@-sa2C>!L%jyk@KO0|Lo7zGn9(pgI znh!omYyws3Xh96jjSOqXKl$!A5zN!mS>z%|elLz;{5M=hdjAR*nI4+$O~ zmM9RT&|mv3GreDISym^rCf}&QAb#bR5n_+*pUk)@~Srk8n@P9l7Ou2^6S7Pd~w z)b}N(IN7bWOoEW}dH=Tl;?*-Xd$}=BshRLRm;A8Vb(4)3A?OX|_G?v61FxbRc)h8( zbn3_;CTsoKE!nBIDM}?xU0fxPI4WESJ~}%Kw9hJ_Et4UC+J)-S*TXF#P^ox-@fc>S zIN3{jdy1!LP+@}qU4?nYHDh&?H6-|WU9q#c=ilLhqxD*2_~N0^|9LL(^<0^1_VVr4 zHs-Gi4dHExFohKa+zr;OH8qrW>Zs17&y?4v>Tv4S*3>x?SA+8!a*FF^o`6f*>2npm zz@VB%v1xi=d|tg{);!wJ=T4`86TUP0-;9644lOfb^GO4Iq^7i1B4lVZfJZA)dcHhv zpP$5aUtL$p+{ISxy?6<>gYSd#=LS6iw#r7Yx?xfz?p+u!{FZ^YM2e-B$4fr~W{)hy zD0`;Gqwkfqnalq+WtgfW;DmU0jrRl*JYME=vLlj^;wQUUvf~v+MmEuZ_DM0Ai5n8f zcJ1%ll;w0cZH!zV8vi)m?1FhVcubMh_qc5Q{kwL}_59gC=}kB+$WpPh!R?Pa=e}XI zpTgKkbwb1|+t&>-uHQ-I3ra@Z*|IrsEFQ%t?&Zv`$m<;k1G8BIVy_Dh`A_TnTLiO- zOqI=~6RAJlX8zYX8?M)X`=0jAJ{6nyiWxN&G$j1l7n+C^b)1T1ass$l?TfW**KV>S zvos7$xgVAKL)*~J19le%sg9o@8Ach`w1HWbRqtIuLYpN?E-Fgoi5Y7Yg|+}~XmSIj z34^Q`O5D^W$%PAB`Hw_7w2i2umCzcQ`FH+_D%mlu-@hnAW9amMnZM3C5(S~&2~o&Z zpDy@@d(3Owp9A*`!o!&a>Nh;RtEs6|R~w`mn;v0ev|Or~l~Wd`5yWU4$W?7#v$u>t z@C~Q@K;QB0_j5l(eEeLHBHh+$WHCP&&~pP>v7EglqHB^-26SO})khN=)&?#lDaKhn zPX?+ydlAySFP+MNYPE!+AmMfP9|i%-+b{UEW{B=S03w+^X4j7 zb|Y^xc`XJ{rR(hOz)H2Scy$f*B}YTNAk(0z`-?dv&{FuPTu9gnFZ{#mMUt;U7?7n_m z)ywm;%gru@%Cpl>R%!iB0ZAnI>K|@|DPmcmLN7kpj#^ z1eDtJ%1VWp?%2we7jZMv*|YDjFZ|76eZe~?=U9RgPEJM{L_!2?GJPZTatk$B%*yL7 zUbQ*xDs?UKF#Xe?K}{S&&26B0?4JJlP+$~;%x|VPp?jC)2xN-IKq^`F=cT$n@s&r-;IbLy>^>v>XVXtwR+Aqes@w+Q>5xj%hjgk_C0G@ zmvnt!K`REyDF%^ejk2nQ{?wiZ|K0rKyFB+?{1pqf;q{1}_XWP-BM8DP>Oa2Nc7FS! zzV&U7V}4!IRH?QqGOJhE*)>vuN|1lcH)qd(3b!tSun1!54(B+PexFC_&-KJRZ!DQ+ z>u-%cpi8cv^wn!-OFz2dLM?-~`88{Bu^%we@CGcwmrBAkMK?t260ce8#hlDS3zy6f z=~K)PAxzU(o$J$!)mp^|_Kp0BKg1aqj`(++mR-Is&Q*{kC@JH74CPfUv|yBg{?NsL z9LQKEUR}>o?%f3mA_Yo7n$URu(yVl0Y4)DK^bA5Ng;fK*jwLa{6BR{f41JcPQe7kI z3N+3q>AE#jSlBs&EZqfrpgyz_I)jglYm!09&nc(eKQ8v$@|AyZ6UoWm}X7AdwT*v0@fe8E8v+A@Ur5;KAs6 z!|)8$n^>HfD*fH_14%BoPbE_(^LR$! zdG%ISYk(x>;&)9HpNha^jyYU^^qz>Bj9pEu8lKuqr*jPhhUYRZ9rgit0!xaE2yKQD zPM&pYSAS*q8M9~j27UCgV90tqEE%T`r3Jw|u+PTs%Acs?C}18Fqq@c-pHiK_Vi>?; zRDFCawhV8~gNdSr%VUpY9XDmeu>0a|2)spHbD@@P>R|l&6ah;jT*&u-%gRH6u9}6J zyFW~$3O{$-GS;KtcSGO3d+w5O+t{}JEsnl5`C;%5I?>e2Fo&)4M{$3KT9E;1@OOeA zpAtFcvCxxl%OG(}~NLNyTon7~dNK^H$EoxR5>;-l#3GSt5 zJ-jY8-lX~l=p`o=6`^#e18YjbH7ZQ18lRZCI+9Zk-yu2xafi3Vq-+w=;V)j zW!S~iQjOx$1BYKwgXy}?Y`87!u87xTEm&DC5-2&fvwU@KF{1z*+mwE--z|ot&+3|N?_qS7*|=7 zbHj&;$3vg4=jjbCcD9Xia#jza75oMvZfL90+e=4kE0J%1_Q^nCYSFzC7Xkvu9MROV z4g#LIa4#JIAYZ*;R8k+GDk{o%5;c`(fk5cB06!kTGa_wr4&S~fZ6kAktX$i^_88N+ z8sLH0y-X}!x*1v3w49czghLbw+K*AgUgPoe$YtD|2B`;Aec=L#sl9^)jiykM`+OZ+ z3&`LbpPp@hIJ}QH9j^bzF;(=H$+3%8XFSLft7^vTJcNuvd5mj!u<4QouGv6%#^Ae5(*GJ3+?m>b zf}+ng5wUG9ih;SKC7q%h8-9t|jQ_|4neqWt#ynb*d#KayXS2vmo;vw9TCyQG`>#N)N4PSB}G44t4 zti^rR=YAMQ*mtQGUz8Zc$q_aAVVbk&88-4yjmOun&ffkUkXbSl{BxGLJ zpPtns&c9|@ap?1jCxO~xVa;T4D;?oNaNDzaC?y6`Dx1vUAU+irozv}*Dhjay0XZ&bn$ouWmmY9=r-u|1!PGrlVG_d}+! zZoW$m%bNZB!7|VfsA5TSnnaWObaV;?*d{y)zlf3v~!5u*I-JT zJE)wryq2hju3m4fL_Q*_(9yZKhkkhfl~+$2j+p8i8epZZ;tsHy5h}vA<qYX+k`I;3pgpu(h!0~%?>oBU>cRb-rolB~!Mm%~}Sxua;5tEnA!lYq;{12HF z62dr-yiLwG@}2+l@994(3=FEKx;YCjkK2C{lnEzoTccd-FG-@1E7t|hF*=E=w9X#^ zz_mT6xJSyf*7^OeM~t=l-4{5J3KD0#qxSi&^0jR*Re?nXdFCbGzD<}c>cp}JD%fgz z2mhK0wHP^C6W+{v|8Si3@B1i!EXVmB7mMheJWj0!wGCb-YAG^Vlc_q6@$Qc8o2Nf0 zm62SCo1a(Y1&vEYJ9o4l6ta`+CokOg9K104?dE#;FC^YYko|}n4vEjqmvYDZ%?W=% zI;SB3!C-||dLJwq1`lp3NZfu>G$Rs>(M_9TV2YR8Y~ph#>OwA^cYHE`xLHZpowtzp zSK}wu$Y!0QoL)kPC50qK)H$$Y{x)c()x( z#y7rD`sF~Uzjy(JE5%2DJEM@Al#x9D1WaoFGp-hr!POb0o@5zbMV)!- zy^Tm$z=f|8Y5WOoM&Tn6fbUWr@alM_c$=UEf|dbmTJ~L0}t|LF0kU!|$q6C!wf|de{>y$3* zWE{$FA{&IP>j{HubD=M<0 zx1aYRQjzO@)sWTwXvSp=RtYC0W7X)QQtC3>GI#Kzy3R?Z=2!GL4E|UREBP7b4TLy- z8pYc1OQfcMvg5kkKdMR9(oa*cs$bR>ug?i|+mEzn2WU8Wel5UarV!e6W|%K2-udw< znyVva0S`G+K*Q?p2swqlqKJ$cy^@(NpllJYgnjJeW24I)Hh$M{W4WvFhtSmI$vKB4 z&0t?Fv)-_orxS!>QJtKwN@ye{QiDu};}%V(I20*=D#-yQr9OLtx2h@W(5@7n(geT! znzamb8liJN;dvmaN(LFK#szHOd)WnmwzAFV-qo=`=-#i(g4fkA{<|L4Knmx1Qe|@R zfhczHCsplZot+D<}G&2}3I6m4*O0`{{yg6wGfcIxZu7kid%qi3>LL(uCoQ$WEdv5~{EE1;${0t&9c z!)u4$N;S1bg7kG|dALR*bxs2zg+3rey7)qW4iz`d%ZDf4Pgy;Fz_rq{qh>p0Xs+$N z?`64OI_DO&I-{C2GcAR~*Yn%S2Gv89;o+o|KKnSok;8M=_{LLB;yRgBI1NWMAWnXv zY!}B(AhLfoXEQZVuXfiC$`pquXt1z?Od$`xT<5VuKhyWevI{KsCz}t3=$W`32;;AR zzTh?Od<6(QtOEGM%f%olzTX3~RFZbiWKqNL6aRyM5H&nswg!w9m}OH{ zYJyomXjuWi)@vwYz0odHauYP72fSic=3tGJMTfDy%t`#*#LXIB`6X6SZA5{a9i>_Q z47~a}Ta1vkY3Uj!k)g;Ax7G$z%xiytiu)|!DN2?G3ZVE)kA{0%sS;HFj$N#{yxbi9 zD;Es5wf923)bHb*`bTbOc`;nE@rfnyl1*oxKcEiU;NhN&ZirjP3iGe(i0@?~>_E+N z%3C7SpXuPia<;)({qKjL6xuO9z;1JHMHg+0@Do<=^ZD|<-~H-1gBfin#w&?`g^y`~ zryadxAa`JngN2dOp?G(~>v{M`K<`L}>}9-{^(oDxpx z$vb2I6TF4FD1k$KcGmD){UMZpo8#x_HrbW_`TMr4^~sxWKj>udgT-d^XUoR#h4pQ9 zuNO@wW4AplyZvOEKcmUTUtgharHrV_&~ImeOXu{NKgI#0;7VG8!=)tUR# zwY%f@=aYJt2X|}F#Kt3}@tu2GcW)Zs@CW9)r{ma~mNFkm&szrNPM`39jp#J%qD76* zZ~{$g4TSbP2bBig+`LkkNQFOK;806WNmRR{3iTx*>35{)ZkIvE)2g~*_0&+aRxiy! zS3#KTZSI#uwn3_*CURw-N)`%x<82-dT8Ean2OZhU)+uT0XFv8VFva%Q8}WUc-RH#G z0dT9W_tXCyvy#pSpZlkO?d#uM!%`_yb*B2XAH8^a*jR+xF|+r8(~d4J!TSngp{lqx zyOKtscEPl9Umo+%F=gX;Z{NZiTC2MO_KT&yoKEr&TWw(A2RSHczgrcGQnz7TYMAR7 zR?R(&v(PIJAIAk56%4$-&}SaQ8#${6IW8R;^ync7`9wM_5Qo&I5otX)D_tGUJ63h%^;@Bh&_c z(zIrx2irX3!+I{iH{f=Q(Ljt&23;fL%>{o*Diz+}rgxct_4*uat3~gptyu4?&D7_U zspr==%U1IXb?z;;p9K4?yxSp0%(}@fyQqJVM{?+oBlB_r*_%sIs|vRJKH+bqlQ9Ac z{EsJI3?V}A&A2~Wj5s)XVrder5>T#!S68Fpzky1KYdbAy(oA;ZDdT{ioxuw+s3aL7 zwnAhlxCcgmGjuvXMsSgonGL*SwtzQF<7$d0UZt)7}^@uQL^v6ol zeW=2eGpJzAWF7+{PnPPpR{p=~O-a9oJXsAF_xO2F_o7dNUWuIHQiZ&&iNvct`|pR~%3z~yFHS6}C3e_9N&sDqNlh_7L+Qa48O?Y-dU|lz#2$`| ze;a^yO?fQmRM2vimTux^4rwB_6HqiiA4?v8z7Vy4vKU}Bn&hBADR_!0iGIu}ade}D zqB#-Iw!1NKQ_Wy=r45CZi5fBwqoW6vTwhH&5r@*Kxo`8Z#@v_JpD6uL8xBsd1OQ^= zr=l3jo?E73Mcw`HUpT|}XbfC|_NXO`4`yHTHtph^yn_iy0XdaR(o2gJ@i?`KQk@8Y zR>x;nsqpYnr93|3>14PTvFjy2hx(Sb)UWz@E~)osFfA*-J{L(n+ouN3v#~p$Oe(jY zAzAv%A^x!c6`kuUDl1uGtOuWmX}sXA-)l-G)(qC8YbE}Z#_*~|d=2RSUhf@$9xms0 zs1oYNm8q$3y$Pj9)9w~^w<8D(D{&Y%X^ed3<%mfU2AQC`tpzG=)TW?rLv38$u$&T* z&Zg;M>;-BRlq#8_Xm0o675E(s1LtOMYqK3T!)FTth0s8{#vf*pvvPVMh4uLezI(N| zb=rretyFwfW|wW@SdI|~cBQj_GE4?vJN7YN%a!_9R`sM^7iB3gvv75daWaEOo+=ie+aUVE7i&eI zD>)vGM*A@|n#+5R$0f8qMI!(W8pD4dN%8?k%vibiIcmVvi1AfOA>EDFrLp8GpG@x_ z%bWqY`J;9<;)FDD=4oDnZRTzjsf~H1(Td~vHwi@g1YX}Iuw<2q0&iIc61?asulvLe zxgDq0=QJ3_sysX0>{>j3tj#{%QMa_RC^!vsZ^?!iSnP3hQ{uXy_l@?1X zKX+Y$wyG>&KNzzJX;BO{kzP-3wqGa6zrQj;vmkc5S(2vaPV=S>y($07AKqxE?HUQ4H1VfA~rb=a4qAwN_^<#BoCQ?Pd2R3utoW5-2fw) zX11o`30Z9-UO{W))RY%!C>=l9Zxhw}$SbQ9Qr)S06U|JdBM@5L_dzm67Ew7pOb_l@ zdMa4D#hE>xbA>W>dg~Qbq`A_KU}?B?$8RfFD{?u|{5M!UOt==8Cc`N60xnRIz;Q8B zb{&g;4KX5o(oryftVC!qwu4?OCy+1AXG4gTCbT)HLyVm?BNsJmJAp4B!=wt_^b#qk z8pL)IQ`eN5of5*i@7mDF$cy3pEIe4Ct3-z|xl%_HRds)R*0v)~e2j3TARGor#I`}F z0rHk^7}|cVaBr~mf>k|4*!YEQPNimvG6f(`iXU>;JUa+~ti0~hZKP8n9xmOP-8YNc zS}7_4o0*pLf1RAbv6X4n8^erJsG&mcuv40rjBjv%$0FCyPzfY|yPi|DF-s%7lS$%hQ(Kl77=1HKyKf{I z1>vjrsn?Hxd9K&O!j6rMeEMdz`s0&Wq)>5S|9YroR${P_rC@A));31f4tNoPBj`>M z)OtEtMOKQ2Yw2kfRKfY3AlYkxxY>wNAgE6!0pk;i60b=QDN7D0l*_g+xt+~lyG?ZI z3wQ4n{zGHZCI%ZbqNa1o{p@XFzIoX0KAvt{MfHsc2#Mi|prq!u0Lg%^soW z;O=7WP}y(G-n%VD-j~|Lc`;e9lCqYcx4y!?RkJ}jqf%l+5l~g0B-lAhBQV!x)D#vL zps}T6q_KFM%jpeDG9K^b?fz(bc6Ol_4!<@eAUZHaLn?z38g^;iYHsGUbgeKu`t{O( zLf>;YCPv2aypB1~MQLOAG;kYdo5km*N5%Ai5g}Yvw=`)Wt2nNjy4?n2(VcIyCv~R+ z{E9@g;Y6nCS-?vwUF_0*w?uV4b}D2gV>@uxoQw&gVZxzA4Ax|Hn0TE+cKpA}r~D15 zBv@7>+HUpM;F?`mwHGHA+TZWg9sR(6H2Fm6uGr~m&QM&}$5RD31-kt3b@Ly6I^+k5L-Y& zU8a)oPK?08m9&Ui+Nj30BbQu4NR8$;YQHz1`>n)q6n&YH6|ZFpuSqcSmnA&DUB zgytQun>SJAYo)*KE-3orY+dIm8F<8Al2R3G#S%ZgMMu=tmJ0Aq2ljvSCN7j00wMu( z0OqoiMmzfSj<}atTitU7LU>?(K}oKXZlDx)vY!N%u!`_evGW!ayFB&Z(@vIL=YkER zld9AjgYK+G^kNn~oX`kVp1pg2I~_?#s69laggeR!X2xg%h|Q_1EPV*K zF5#Lck?kP&5kMnTmtOOK6oOQ61Go?OU2CR9Hza!$nJ+)ue*{9BK{(8K4wfhn1H?}t zl_`~D=%MRd3nO;Qpu_coCs8vEN^o7<{Xg;VQC71e&ID*L(of7BBHpv@j{7wGaNsMn z#F0HmhmP6wd>>8>zd*`=H|-PLd5<5P&nAt0za3}7h`jE!!Ws~N)09nO(M@xn)9sN> z){7!ggV?g*!@|R{vU5t>I;32E!df(733P$Q4nb2tBu=RAs5xTDL!y$@a6r!PKHqEu zV9Jeo#^x=jDgJ|TwmVv+!g)R~EkLInWZM1Oz~ve;_+jmk7t)5g3PS~c;CW%{nsr(_}$^hz0goIP2L z_YD9(1+IP%Z9vpWJgr?(38Nf8$oo8->stOA9Df>8*{`pEMQgoegD;*+u`Ji49*7Po z2J((D$H0IAPiSblnr*O&gp@4I)fs+?$N67d zFT0@7gdm%Lob8Xwt0!dVB#!f^P4(Y#GdOGgCJT!1g~6k#6SI%+WLohcyno@Lf!8|E zT5RR_MMevJD&uT%OyzXUf5dvwpe>H{l%en#{+7kq*_wogSS@qY-Dkq_a}nvc-^unb z(g;8WUdIOMw<*T9WProyrX>%92D?HkSd`pA1k}@ixb*;acZY4}jOLB}j^gjQLRz;O z$~b@XRN^&{moZ1pqp9nTsBJTLmHXo2oJ^Dp4(ptPhZDXgz=sa82G@Z-y0oM1eM0st z?yZN!uUN@#$6?a`vE8wAzJxWHD9!&s8&21kvGW#Sq4OrzRo37JjIN{R4TSvXY~w<^ zZ%uB0es+$dVN(`0>|)BIN7>tR7-~-@H?*zhOP1JX-NRATYKk~mrmr~b(i2pz9hh=v zoy1cVl5ez}GLp@)8uy`(1Wn{Zw}GJtp^B-P0H))JS$~kuoKSSV=6{>4^B-Cie%|N& z_?g|D`2b3vy*n|y@N*rd9xh7|N%!a(=ba6IHddFcR2(##O$Kw}BIGzV=5v<@bKvpc zBHNF*#IK>*ewUowH)G_}e&3hN660h)Sc*F}a^M(|oN426&W|ga&VklzZ9^-M7+5vT zNPJ3nF#^*nDkcrV@*nMnB1@4#G%_^0#xmh{=~YXKW%~s})%&@wKI_85RSVLwz4qRJ zl$$y@I^jY*Fffdape2k(&chV93$`Bh>kS$q@Wd5_I^N5MKl~lM74Tt0weIMv$0&fTV zxj*_StABEy*Xl-`oKUvTL&7o^u~!s-D@O{4WE?xW1?b86&a$%C<^j25eyg|bE*V*7 zG@_~Li*@Z8*%bMCWex_tIhhy_+NSe7(4YLCwU}fhCVgWgWU9A6K;!*BwPG+ALa5?| zN|K32y?k;Zh%E|xWbzZKQG)5e=>Rgg))urTwh^54RC=&wdiH}&Pu=us0J|EKH9%e()w_dmk z1CZMo%c?*8z;`?0uLDBX=Tt8VZC;h&xv&$1&*;hQ-mTH!Ra0?}LnTg>2qH9ri93lZ zY^~IF+M9OWcPIAb&1qU0AK&bMvZZk2^mw>fD6CS7lo%F7fw0QsA+2p7RcX052EV?I z%9PaqK1Z~>R!SE7<>)-irIRp86W|64g^Q3gJ%1m(`e^82hS8_het;xGRJ7Xi7*nZh zpZ<lhKm@ZpacPPyH4S1*K@-riq6L{T!uyNkIjT)gA z4cJW93tW_5&n9o>k^Tij>=2!vQS^*Lt6EbRDN-5nDHA)N_?$Rh6Jekie~XMjJI;5L zLX+`E3EvZ!ma6LZ&bJ1AD9#uqFX>Lde`YbDUUmTl1e2V*m3~%{(x|EX)DUGToIKH& zUz0%tztPwYqQ%0ft^)plF9Rov(=^Y>&q&vEDJpoojltNb;GZ1%6yG`Yej+MGL;rFL zjuX6XDg=TzCtzNBzptme=BGt7VKVjW9UMD%V*4Mgj~`hZH6d`(iL_~@)I?18d8fWx zw~r|_cRw#Qr&`LgWabexV1kSInq+{)lRSo~{d39u=1%i5lV~b`&jED?Q$?2@u2U;> zrM>R;biGBqYJ~%NXt-%dU5#w1CM&3fm{Q;z9_n}by?H7G`53ljku`LVd`N}v5ljIq zk&gRjVFo{9obr82R4))PVGQ6+m?gR9cwejZ6@$b$H!T@#*@c#}L5S2Ih)C6DKrxL( zJ1?j=@1=$*rCPp!YesTh({Hp~M>GDjU4-9`O`TSXXG*NdJ(?MYHSAsvU_98b{r}kY zej_9w_H}nQFi@~W3Yx=BVUcGS&Y+|Z^rN=bVXFMWXe2Cui=(!>2 zX-IownVB`&-2Z7!r^zKpmpZnR?M@oy>vpf#{HEkTnbCWGLL~K;cmiXOVJ`^$$zj!u zQ1J!~Qc5Az%~uKU%7h;k4YC&(LLViyS=6~cuZ@T9nj3nYPoDQ_%lQrWfK1%_VvXBk znG{9?cPMXvenZlr)ELp)1<8Y+$;3IoEYUi)re^p0i6h+P>I(KxsYRZuT`1Kl^(>-| z2rvDtRX8YWUtefrDY?a~&Z*TL1XPUXH6cV6`WhTp@L*JO119s}|`+Hkhl=8tJDj zGb1BMr^`0ZdTe}sr+qVVufOh>CqA22?h3wtci;I0od4c4gC-vbpL8q=GMvy&k{P&q zC#5HnzadM7x*rZu%;_8jAJwR7aCrL42koUtY`hU zbgdNRI`r;nrEtHCh?FV7@`R=Aq49f;ooP21CcvCc&vv~TVR_vh?g)Ri%?w!F`uh!k zdz$HC9~2al%ct-P)h2iYVVVA?o`3;!9VKslLA`@7}zk~n51f= zd*>eaC7-BlScF3i`BCIaKy!fUt1I$<(X2`FhcVE1ZwF+=scu(r9S4R6WJF+q)@eQw zI_-Avl6vmi*rho{$tN)l=I%OIt#NMJU!cbRd7d(R$_joQ(}Fj*l>R{Pq6goUY2-b3 z@3V9fOB1Vs|7=jQJHK_Uuq0kqv>cCbJW2H*I4ez;p=(|MaIu~AZG6r8>Z`GT%UCb= z3H;Mr63#w-SQnxCN%tO*kZr46i6h7q$orq|^LWNdJPFtl7ciA<=4x zYCxl-XP5^XW9SL^V;uO$4j~)eIJ2P4@G>p_kVw(EQ(%m!e!~5pxTbX3<$v9x&4qB?g{}dZf%j`RU^)5i+Iv z{WN*xPaYqMLl#MEo*ns$)z#egdmSg@k<2xZzfT-ZhK;PxMhV{SCUJ2$lg+{r8z*P} z@?O&07i#GUMNZ4esut^i=oA}NPf2$iDA_Nc6XF&-Z(T1Q$g*cEdv`jVAz^{P?fdh} z-?;9Zh5x+vd0a)&_V=We8A>$@r%S4Vm5_7gI`_+pp6IZiNoBynoAkuZS8xT;j==h6 zg@X8TkKy3Zj^$RJRB!(rrc3cRT6BuHIwd;SG?3t|zew62CNcVdCJv7Ch55K;RV`r* zmC8Vc!Mld=t6$Tr5j=Ks8vSCH!n+%`dX46#;YdRdthfD|?7t!Y-F9Zr_VtosDCZX>-yPh0s~i!%u5;0c z-GhKt4@I&^u~t>Y%PZmqco?Lf*f>^>vF?7AXRwSPXU?I2Y$U+Mh%#x(@~}*Q_3MxS zxEqM_upLLX=`~azE)%stJctGd*VCs6Tnzk;IqsR!{oS#j$K|}XzuM}y>ZoF%Nx$_a z68nc4k7sUVlC#h}SCWxCt}s^+Q>w{VBvO!evI`F{z-6pF=r%VN zP#I2tPfG#*1K)kRhVL?M-|vJ=?dHu=KkAHocr8W?=ey;vB}c2rWRSlfgxqh6zadhy zhf1&sNG#;Wh#3^nl40S3sb)LzJPwOT3(3iQkJ(wYnlt!(P8~;5Z%=k9dC|kS6IP-eZr z*pOHKw0?|uD(V(tOiR7*BFs+nZ*4YC`$3uti+^I@dR%JSakVmP>V>|SdNk)nL}t|w zcw1JoL>pkB@A zvL?9>VtON&?-D)C8YL(gPs_`Ffj@uPBIH~t_1~A2JCL-UW*aKzYo!^U(^ikWq$oWqezkM5%R1b@us7sivF1_1y5|S|2`5l z-kJNB%Lv9+TTmuObMs`#oJS4>Ls=|;m_H}roA|PApMvXrL4a&mJ9TbWYkurgnuySp z`^a7%8?UT4_&cC|%5aHrmmCq=uz&;7-Eg{pQF)ByMpFRh6=g2pJkZ9qGRlJgM01}xv+E3WV&LlEXy6m zUpU>pI|#`LbTy#7J3lKp;R_+@Y7!oaofem%3cU!(D>W9DHXW@LhSIj|NGc7)DHpko zYHc=%ib)0v#-1m-i67B@E;ajqb#qj{+UKRru=V$5!EwJeixPoGlp6R{dy0=?I-u&1 zl;cTGz^2RQc;Km1l$O(TmMa|kD75j(=CkArp1}5wN}8hn!+FU`AxLP^K-2z9F#Rca z*et*906;*$zYxv8H#nV858a*sZ)yMaIb@07;sG5p^i`WKz8g;NkIp$(msRbjfBZIT z1BXb>lauI1kOt#=Dgp#uWetNcdw3Lk+13;TieS)^;^7-Vq(!jcC)5;Rj<$&qkAv@G zrsMzzT<|jf_r;Jf(!Jh#FLv+U7q=|bN0o@u-?zHF@r7Xq+Em?7^&Gk zud&zXApFnQ9j2mn0i9IcDItsoe*&2T0Pb+n58vh#MO{-BSuPx@g}$13RAen$L4?Cj&(h|y&eP`@{T|B*Dw!LOtcgcH6nhrMF+J_ zCAHu|f41i!jK|3;EPFL!hi-30Fhr8Plqn+$;nh&Zrz&vWfR(G}f9EWl!R0!;p@}py zI}#HZ)keTuo7$rjjOcbIfAl8$A8bBq*MSz2##J@YnbwwIq84{tuK5S=*!#ggM?xow zN#?`5>|1kvn?P-lRkh6Ss#ovziB!(qiQoxzqala8+tmBSxSh>d8eENRMD{HyPc|1qBuCEqKlWEr&^x*KPj}H>lvS z^V)wK1dYb6eyH3ze|hjFzjKc&wM&1w;fJ%^q)J|%xyoUqlc9-Tpi!t7KWAYQ*vkpw zvmrvEA_qT^?)|$stAFJ`=dHcosY+qc#$j_Owk$GIP_F?78=rp7uP#T1-bXi3wU2zX z*32hS$<~?%rOo{|cWZJQ{|g4|zAJq3>H}>A`W{rKH2wC5Q@W_C~cQ~=oZUx%1rQ!#O zDJ^3|SCW_^fe-mkh+LuT{lkmf;#X85y@cbu*B1(U>&y7K*Oj@sPxPJ5@%OP}B#x+De(J|Ry_J$ zbKCaVe|b}V{BkDMeqF3HvyZZP{;g!gz=k2U ze`=zcGD~yFk0wSibRY&zHxRMD#Q4}gAlL6ra&e$~vTdAt*nyE%M?;aBe7Q0n%)L3a z?~#UD>w84_TeL1AdKdwa{{vk-b0}1aDHH;{dEdu9j;dJA?khomvMb@y{aiUVB%NlixDC!S>31#c3Dy3QW{u;g_Sk_q3C_+FyiTFhy% ztmwil35Cw0?3_R#C^Ii@h*%Xn$pUocDwqveO|L%)1`JFMZ&%r|iRtRMOZdjze+m(< z!FtK69>`#h+U8;{{{!xa)%I4N(`F~W5WjZf#hU7V`Qawyf5(4nh?&@spi@m$NedRG0H_P*0s%Zrf&S!%-CEFD zvi8NM;1G?&b>%2;!oO?B>+#=G)kQIF^ut#jwCm7epd4NH5N)~auvU`8=0__WVw;r3 zsgYmcNPtW-60Mx~6ps+upMieN5hqVXa2^t?yR==}KS6SW3F6wnuJ**Wafs=o(^;S1I^*7o7V%CamS9h96#wj_mk??m^WEGU&_g3WZ88L-e@wL`@c0V7x%b#$o=0|>diqV?;pWUk z)U}`rscAMQ)$&#L0g@Z`z|=-cw6GA z{{-^c%J)&&wBEM#JT=lB#k{fzP82;F%txkfIqO}F)f8^03Z)6#oe}1^>EW7|jtoa0 zK!=@XT3yT+f2;x)fl21k&XmayRw@d^%xBpfz@Fm)TG(&xhG>mmSWQadCjBtoZUoU{ZLSBte@s z!e!Jlf~J%wL}@f=Lflkqp_wYF#De^&jw+*6(W(C1fZHPBtp=2MJcY_i_qjnfZ77;(#ZCe7Rc0y?*zWXCjR2B?s@fNcbYoD{uQA65Rr&1VSVn; z8?(xbQFW1=x_IQPlg~J1m9j#F;(>$qgaD?KY&9Aagbkayr>K*VX-zQ~T2fXcQkbM- z;C<@NwWTz*cxj1MqS&W|-+0$|@R|5Y^V5Z?f9#Ue6TPhAz8JEh6Kfjz@1^H+ z^jrI-2u9#?0Fh_8nS$~?zs-0`-FK{~&SDz8cXy5=hy6%kX{w7>i`IK6j5!8pDk7A4 zpcj+fTG~tHhpaS@LMxmN`N4Fs+62$*79YJ>b5;1DK%`Oa&`|ZchnfS=WC);5)SSj5 ze|<(XV^Wm4mNmSpa6Q$`31UyWjwsJ>dO|~hpg}t|2wdymXFgp|Hdcf2i|#GPhpa{= zL5jhJ1*xyjMl+nD0%vTc4bXu!ofnEKEi0vjw#p)+7e!&zlV^*x|1v!kq3tgpA@oqW z-JuaQZ&v9`QCW1u$vi1ThlRmtNlRE1f3_BEQOXnqy9r$P16(+Y41~watF4R))cO_H zvj|2q7l+N?A=~y6%#Go{*%HMJ!64h3OUa234+ONrQHjHfFX8;bxEM>Q$UBw|&BUvW zmn0#3BF>PaF#ocSswkDjq@A*RKb?L<*IEU8%vVdQLt8)7BWGuR$Eq8 zvL>sqWj8OXYzbc@C|VK0{2^z3VGQ_uEA&-VozrR0OCEGtwJl96f42Tk%BsxMYKPON zD)yxSk3{ZRJh}k#G`vrk@sE?D?;7ZA}2x zQ6Y%4`gVC2RZOp4W0gQf>j#!Y!tK_7Bwnmkb7#(^t&9bV3AVAlFn>o~L)*9la0s_K zz=U0(1fr?YL8ct1nQ#86e;fNoBPFQTnU^uGw8NPGxYpGO?8zs6M zESB)=qN(asR)+HVJBar>7O19$2VnaFe%aktMvi~6{2~Yse}PR+V@HS2!NvbSKbVWg z{l5?9o8T^Zr=7p|w~w>aNiE$b4fM%!#tN5A?XPK>nVr2pzvf{nnZ$Y)|C9z3N+L7O zwJi=FrhDdoXSDG@e~&rvcpHzua~~J@M9229=Vd%1A|(_RM5aKZeoX#6o)W!ttF5^p zNnjHRErG0}pjCmz9L{XUUl9&zyd+_7Ny~mBM0KSbyZnU1o$ zZW|0~c`%GVIfj%4GohIT00E&TU?B}CDMIxDlkSU~^I?-$fAaoC<4pKlM=VuSXyrq% z?1!pUl4xZx;H!AxN6DX-YI4cy;{GNgaa~-Ifwmf;1t}y91O_CAI9r7mp%Nce3~3oj zOdUlCzl2spYHyz!L${=M=iYM}zQ> z6)rTGT{kJBL0P*Sq30iz0g1ltFh@fYH$rzv;6derraURhHS@&)1*%FeQJ^C$2I*|< zUl4d9-HZ|hZCji`qct+-c+H3%%-%?R1KWjzLNT&d06xUg0;dC0A-%388LpM;i2CP2 zD}VGEe_0~s+!vD`EW;kEm{mbL^#Bb!2~aIe8JAh=}Ed>m5r%8R-rkRq;^Nc&2$PuQuqb9wIxl6 z=z-{laeK(i7UM)LJi4K{lKw~l_+;|hcqx%yPG3YVX^CTCAo8?(Sn)xO9$rU(;Bfy ze?aL9h(K1uwFVezYe{KKs=dHB9@+!>*_^;I9Bhz(IlGuYCEOHzVEW-3kTeIv5@P)n z-ERZ~bv-gjpmXa-&sfH)6fH>j(skI2{~$908#E_F3c@B8k`9{EdsFQJaMa3CPa&*PE6<Ia&Xok&C(fju)lqyLV8&S)TLt(1<2@SKWm_a1~Ap#|vu&{L=n+IZZC?)H20Y~$q zjTUYoV|9BkhK_{zMRBK-%a=h`02y6DAO_<%BlG!Ye@4)W_XZj-g_G#9hC&f+cgvOR z_^JEn1g{N5>+B^TQEw(OXIVQpe}yt{Y9vilWz(0=5q$)*9{Y=f($H{xH4^wz4CdTK z*(gjmr`IkcEx&TZ5N3A4)9)1X+Jw)N@W2d+*_o z=7@j|oWaoFpgjY|5~u=K5~NQfdNawe63eynGkTwQV_yLqKF}ASf~ik$f9PjZwW>Mc z>H=aHmp+I$5>@*1BMc=1Uwl(*LorW7TB-~(4{j))0c#Mn1|T8`62g)wm#Yt6R=2)* zo25CR(!^n$V2+~z+f31E6gG%fr2oUoH?~*81lz{ejwd!Iwzz; zZQHi(Oza)!%sJ2f1-CzUe^qyNtzNb2OHsM)oz%Fc|LX6Hc@Alr_4+h(&i=H+d_m9& zuq0J&5wpWv#khfljOnWhxSbPnnl@cx+c6`1LyhdC2pcHrBhsu(XxSCw7Jd1<*`-%_`S+e z|Dg(osCbbQ9{UEmWQ!D;PCN285=;i*qzGEhLy8JIBn>pO{kjiNZt?SfDoUGkop+Nx zWa174x!*N+qAx-I>mwuv`!ui^PHpCJTyz<*7ySqW7WB!Z`O`o^`?HCbwCZLULkSy-;uOMl_pW9gXf_;b!!_gpKlH=ki~nUK0Ys`K)e9NTOT;z2 zd_~3B_2*C2ABBnirEpQ*EN|{jv}Tlj$WTw?8_HST-ro0>(adTCSuK)l&=*(w7-T%T z#IQ0X2Ex!@DM$Bbm~-}jZ>=w>Tz!*)f0s|DMTrHQS^Ya)ir7JLj57mWc#6+=?SV~!5GhxSm_l+OfMA# zBy9K`8PrCi`N8Iwq29_&X^0##!BsY2ZN9m~$9IHWVQyO&UTf5k{(8TmmQ&#qgHzt4 z6Jw}we;bUeuyI^Y0_X#15{Obs1q5G+0jJb@^FBpUpAGYLZbL-V(I=@f{olw3Ld2>Io zvo3ylFHe7bGrE^VLv=H|suwd2`Uwqd^GbwfJ^OL)TS7b{sIT)@-H)(}pZ}q>jiwCA znhU1}0b3?U#>#(9b#?kB9CGn{wAlj<)#aF(kg5*%HdM4%%1OrM=%0&RrEg?B*VOC6 zf7d&RDg^}Rhdebcz*h7@=z{|JP<$v_Ya1hz9on@Ezku4sJyGa?4p@Qs4mc3pXTZXZ zyuE6=RLcG3xCOQ^dQ)wC4dg+!(o6r@IedAjK?r|3cYq9*?X?f=d4tF-+civBL3Z23 zxFa+w~7wC%)z z|4sz8R$Nh+_Roo8)sJ(yGY_lM4H>vYTfGB;13s%7m6k=9dJoRsqDf_96alTAe=kW~ zZ=zxV(XtV74B+OUl$d)F1+F(SyK5PKBD=}2EwZ@v4&ogHNI2U^&G~G;u+X>SrI($8 zlFPDnqDrdypqY?4eNm-bF&GHdJ_#}p$_>viY3?-#!;uc6Sej#dx981ZHVid9an_B! zPpjGaezS+eoJzYkQe&KbjDEdoe|u476t@UG@p!Q(XAx;`n(BO&j1y2Jt6K-<=m!pq zGEH?8AT7?V^^3sS{mQjnz@T(XF9#-oE);~W+7c96`V2vqBo7mTU0-vpvX=hQo%(1R zRuItE5%YZMhC!8v6^5bq!^P6w&{_JYn4FNqTzBeMF9(h|@*aXJyjVpNe>FB5O`s_f z{j{)TGnXIUjKGXhJBM3`$&-R)VDY&ac_weRs?K`eUckbrUQY-`w0ML;A|SE_+$k zBKH2iDWV)5348$=d7%ove}Gf!8isDb@QibM^`y`hb_0rt51}%XqAKsuKf{I&eqZ2M zr^Q^NzT}j+Zuw5^B}^o|aW6!^D9T?HsaO7~(8aX5bvdG9N+(5uMz9jy6uI8hg6fXU}NWSShf&}Qs`bce`;p~d-|t+a_x)p z`jU)IyZ)cyrNLC-x0(EkkGNo?uHZei+Jhu&&P9^|e0HtD^Xc@MPg;qobK+twCkFwx zzrSx)Cc^(4!O)m?wsb$eXpfy@%=!{EZ~jOg6;};CLqm{O#%6Uv3a7}xjb6z}jPcf0 z+c{;`OZO7Wbu$adfBh@HijW`ikEqa4y>yI9o^qbp&6!?=l@W}-l>;$i3XDYum3skX z3!%@%0c%ugQ7{g!>1dNa#G3=k&8l9}-TJ13Ri;BGliBe-(9g&SpWUt0{y}3x4?w zd;2!w;^E^MSHZ3Rk-de`vPZ1?6%NgVX+hm{M&CKW=e=Zp)a4iog8$MpTkp5QS&sa} zgV@KZrXsCYrH6P#QdJOJbdW6&Z?I2=3*HCh3dPYusHljW|n{A zpY=RWPyr+Ve+w7C*BF-sSGb(g2BISXV^K`-##=PO6bX?((Y)e?PXDn8?Po%p2>jn@ z5jELnA4Hu?@@n1Ve^*}XL5m&lU(m1dxvyDZ4K9UQ%6nDuRvkp_l?$jMAA`YkE}2SQ z97#B~;FIS%_9eY&dBw&l4xq}*uox#9U98sTko#1khP#eUWapLtRcwq5c zFNrb$e`{yIu=M2d$1V?NW#HnDL!mwPFz_14_8`v;t%QDuu=WcjX{_k^bL z5AVrXkle`nKE1h*#S9kF3J{s2`n~yWnBM-@fB$dAW1@*YAhf{J4*mO|B+-8@g&31+ zGv!~RV)kp8HB7&6rw0gedyq8xwG>(s2Lfs9D)N@v3CNJ7Lm%c)b_z1Wo?peJ401-O(FL ze=O-tPxbiS#2%_1Hn;Apn)m~W3L;6o07{!!^jBAU|0Hc@-(A=cO!q*-s9<{1#eNK1 zV(aj~k4Hj+x_!nq!Fvo{vp5+|UM0Va6pkw0XG#-ws@R>3Qm`a8IxRWO&SK%PSA`}` zQC208P?jRqcx0xyv?21wWxa%(WF2bCf3wd+d5bWJm79k}{#W?MYqr0Ab{fe5-aCYn zF;p~No1pOdk#w{U?+e|RrZL6%V~Y2C9!=PPNEjIC#b zT&Tf*Z;8yl7agjM&HMpXmy%p?J+LWM;kk6lEut8dWkbZ~7JrkdIR5R&{rMxHJSzP);rb* zQ8!@{@a+k!ChoRC!Qs#{i(649UP_ixe6qzq6_Eryj<_FcS>+2gwO1axW@9IrmEN0auD6y>8% z%}(flP->8j+M6(lq)F*ZHgI3=ZH4gd#3c0pB8Ep5GrrF`Okdb8#~ju!e>1chC~ewp zD!?ZSXHZ&2$UvjI5YTZ|=v>+ZVA=8hmZJITm0IH7yI`^y{YN}h-T=w@VVA>P2fqmkK{zxEG3{>IKc>CS82H3$2zMAQ0o3FKi1 z@q8f!F6lnIq|FG{I@HWz-_iVuKgg=?_$D&MBXj?S8MyuVJoqPye_U2{MVVj-MNRWM zNjofF5StspW@tYFEe@-L`*>gQe`5hv>a!psThvGGI%_2qi33Xy2Gf%P_jhd9omc*_ zu%t3{Y0@o^r=iWKG8@wyuv3u>pm96GCT1awBkL=7Fyf*!;M90Ld zYYK1Wj@=JRlLn{;f86~Dm&)~EyI8$5$n@D)2@8w*=kS}y0QKf;8N(X5CF3C*vWr&q zM7+^QczrX(lRg~S$iK{BT#xos{dz1X_qx!gQC;c)iJJ(hfb}1eS{6Q&J){V#zDAnu z*NYdGlhyRzp5fWRT+rPeuf_!`vK=EFUfkUf9Sa}rwE?HQSGugk!Ls_ zue_T1LFMR!!8@AOW$Qrd#A<0Us$6Lr5V01YGgDMfC$Ei^29DVX%(H!X17dyLJI(E= z?g>vZXcL#x0MGdeKm0+g8Ugx39v<^K%^rVSRc-b)>uq^fa5MhS5yZihA&J+m09|8i zL(ZKkT9rQ^f302n-O^0sV&2T~&aGDxqxpT5>EH$I8$`EEJt(*CY8 zPIdJZe|ju~sZ+iPm5x|xzM*7|+*MSAj?R-lof58IQxq4v;z#pnV853Rk_`hyl?j3I zRqAt z8d(}%Wu=LYEPbTii5!ZU?N?uPfLd6b?oGL-=;Ay9@ut6mPD9=-BfvYa6z67nR%om2 ze@YVX--mgdy677$hC=F(PIA%$L^emCcS|HM+bD{H2Y@N^%sS(DHZ{o!Q%;3njD>IV zqr`r>-uq_$5Ul?GP;{0s+%SNf1z3~!%z;Msg9k-fa9z)e^|u>U{~g80{H0OI7ix zjoC>kXXAUxivY|0D1w>lWPK26PpV*dz zI0sf8-&?{ppAi;1g-*;Kuh4;@1ey0w<@Ws*4R&_UYr~h=AbA$5`hQg#EwzRuWUL}* zm^@M}us8wq0k%O~khN3e-J9||@)~W7Ue(T6U9Md=rjAh~hqE<9Cx`(SlM=trfBhb1 z(K`c`X1s#%HBpBqf{l$4M1Qx>zO$`1|D22En0+$IYeFAu5f=_yNgix4p?7fK8rt4{ zG+>AB=~k9n@f9zuU!;n@2Km9vjVPK7O_RA^IBK3;K;bIy`ZI zKD)6y-^UfPzf#CPI`eH}4R3q2Z#O>?4EGPs`@vmhTvs36qYK|JL7q}Ue;^Oa5K$oP zQx6jtGt__;a#E+0?CVpYteKWWA9d;oDm_!(;tfu7sV1~h?wF0!A-c5l>!DUZ_9RWd`K?6yaS{4H zD>Qtz6Ha!gkaB_|Pw=7#f2d5KdwwLS>D0uF*M`58h^~~y3+&mCTa-#eVzf>$X>!8p zK(iqFnok{`dSnZQuUMffgkjj1UTLsM^Aj>Fz%Sxssu1X%wOf0`;ejbeHJi=Q=M z2xX^x)b@GqYgDBBLAv?lMIidxr&!`G)9mnUVXE9t!|UjQ&HOV$=432h0Pv6gEa604 zZpOp5r&1IFl~|qS_-t!qnQO>~u@qOc@hI3`C`ayhKqSSmQx% zAbXtDmJBCklTj&u6I%&P3Z@Flc!S->0)FR_EGP8}TQ@naT2I{P{|(J=b2LSM>rmU8 zueBI{;&tpD3+0=boa~It_`l`&jpW@SgZwRZ$GD!YK}e9Yf2`x`L+&Sa#SD=JeWUBI znHLoeq5rZ5bFMD~Q>%m4DvZwH@P**VWp5N-RBTg~D9qvLj3beSCh=tW=JrT1d`H@P zN@(ppj5W(^5bR*BvuuLsA#?SIhW?{Fz%!BFzB{Uaw5x&pj1eFsqTLzJj>zfUKN^}$ zaVWYn&Mv6+e}y?e$?I>FQeXp2;?&ONkJK(>M^E<;dek1Vr?)Y)5#O&N8(#H~{=jEJ z!bQ^!YAWzX_rC;(FeTJ=dmog=jN6T+0ePwO(xt2=7(j$j)6%@>TA#(gKSEBvm@!$o z7Of{CkS3{`4yKJn^Rh6W-e1t!JhrA0TGEUu+pr4!e^c`BcYCVw9DEC1(`J{tTIbrF z?^gNbVQ?^RHG{*SDZKK_0&%cr|AfvFD^*G0QqYQN7}?{62R)(TcQD2_C`Hq}=>8O@ z4B@=X^QMW!sKY38w69EDeij7B-xOT-+;0s3F(ADZ7Lz8*=h{cgm6{SmsCC8Q@f2=; zu6vuve@pX)zros^2DzRVZLU3+Rp{?o)Oh2m;hk&rHU(kuTb>$e2f)g6Vfy=L7>Soh zN)nE8Q zNuE|3himouz09*c5cFo5&2glN5!06qA(d(o{GE2?=%f>)kEo(2k1dQ@0_Ko&xl&9Y z%~BO*j9MZP_%vWtkc<1RVi=^ppG+(vfU%`$^zTTA_^?khxS=tJ&L_h-tm{S ze|L)CK0m!EHc52q|_V0t{D2DKm0Jvi?TxJhMC0VCa21z zKm@@*(BN& zQAnF5h|coH^X6#BDCu8uxe3`-`L(iJQtjth_qYxQw9jUQHFYV~1U&VYdn55he^>w* z4FRK?ImPmY%Iqksy0^{?qs8jBYv&M{Ab%T0XGuJ+exaECtf;T`m?m5<7;Giz0#N{p ztVxr9V{z>Pk5E98!$&=W0Rl6%8|u)VS6qry+$X$MjM$Ro$`kM!-;#x`)ZaGW!$8I| z7jc4TftyG&{uUY6f9xD}3jxeIO#6A5J+uj1L!p|Eg*`CH#UEYx=2`|o zT@AqV zz4eU3%VemW(2=vY#ik1-`@HMjt)Bb1rOIDBPc{>jsD2_|qNOYrewse5}(WpJz6g2VZwm z$09GZi}x$it+cz%g13I}i$kQvSBCo(y~SRA!Ze|!189EF73ubh63@E-@P1C;y!mCY zAN`y(-7ZtQkkh$se?)eO#2e2FO#RUi&shr>tRNn$&~L_xQb7uph}R+Q+WV}=>PRo2 zgS_EJv9JNkhFbCac{?5mVO~gW<#@t4j{=bYHVmd>Q@bYr;3zo+E*a<(2M?kl%i3 z)_ai%77V`T8x>opo9ubR@teNHw$U--r?7#B(u%&&3wVu`-pv29E%aRW_La2bdBhZp z_JfJI>+C|Re=+FrO)M(jvT;+t;8jf_4|6>}ddf~9kfv}gvx?qmeVtGQy|^YQaR zK64$eVIw|*zR=0`_od$(cy+s_Ye*gZ89LT)2AJkHJ*)@6{B4b^fpkH0f?}2n_m7vt zwn|yuKQqyCQH;E4`$qy`jM(?pl8>j6&#p!xOP=iBe^cVXGsA+Tr`n$>EcsnBxYrL! zDZ^gp3*n`rmWKdjt7KuQ7fbvj_jRW~tFRm^()Phj;DEn&qqe5r1pLTO&DS3ymR{Rm zG&;iXa^^>pfN5MQbicxL3zAy{mFU24p%O$iK2%U-5&b|l98cRBjejtRUjmC{ba1S% ze7+9Yf6b77@JvY46hcTz#U~(|Ui_jvhWItcLO~KCDuGGw=7HUNbdDAuEjPMrkr;nG zvv8iwgFdO5DDaSbReWHWO_jsknEABJeU|Fg)Ik zC>@~%f#OVjzMW}$&oS_yWI)0%Bc(BKsj&PEvo&r+OF+MpO44v&$ik67G~-wt3ndfX zaBdgsa$Mo%AbV^T=JN7f%8OpR_r8UR%e!`l5P0U>&dW+KV0t(H*Xx1(-5EIaQAm-~ ze-R%EpDg?+wX)F03|U8S6Z85$=c^6jl<^0`6|**$jmT*Cif>pYvq@LTCKPn2krC|W zHp*k&@W%$IZoP7yQ_M=#nD>QuE4pp`;O`X{&kmtfmc(5`?J`xgZOB*@&os667m4Ep zv+V(3fl1RQxt7;Y+2uUUuWcLL&9gRse+K@KiyLcCzHsy7?1yfwM^f@nGN{;}!~+z) zH20d#C#lblO;gKjID!QQJNu*kO6EBkZ0LaQ6@`{KIS(<6cERddUxlXUTM~sg z&A$?@z21PQJF{)@X{V)|L6*D=I06ohd&OZ@D>vB;};F>Mq=OUS>9A^kQ_x;C9B z@+lNc+8Vr#zv`K)$c9XasZ~l~(tF0$*zkLHBkZqd;dDLfX|W@yo9anRYPu+cfVkCHa}#R!FqUm%^>=H5}X}?F#3fNn}{Jk~0e+tke%;-rNYZs`7-=4C{uf4hwFz80^zFn^cd-ek){{|T_LguzvmIq!@c zThEh=_m;v47zDA(ne5d)ZYNh|c^}##xBj}DPPxw9StE#j-@IPS8%SdliCjX~oG{T2 z;zuA`OxH?OBKD&Cl=R(jMgvt-(tF3c1wMMhb>6u)X?i8nC`LZ$; zv9NshEUuo=5ea1YW0agr+>5G65P`xbnKgi-xedm5+r#OBSGA24Hy>_o$QL}J$2{&i zz;oUie<2x3U^~s0Pm0YQ5RzCxD7+GgS0cCk+0ST53X}jC98|8hhT>auVe*F?K=9O) zli8!(o9w=_H&ei3BruXBS~OzPH-JJj$kl@ zZeei~%8(uE?>61ib3O_Q{YEz?r#?wBcn1bte>9%jjFA?&L@^S~)BJzxX=0$<23Dz} z=bkeQZ-K9HoQd7kgO`5$vR=H+wm5>2n}1w!|lt<2*z)=~c0oT@|Ow-osM#$zsjlPZfU@$(2av>uG6U-5u$K?gKk5j-Y ze?dcMMPbj91k@9MlZ>2;nbSq~WRR(N&6PK}KjFVSp;`H+x*eGe9O+M|5#x4Lvg|>oxm9m2*?P87Fcp6 zE*9oHM2Rp2+=gQfisLdNo^+1Lv!%gsf1g06Q9Gpc`|D_4=zdwq&`CO0Ad^TUz(D2nGw4;@ne?s0O zeMdw;hWQyq=UyM3DWMF^l6q~E?%dZEHL*YLgW?TqfVwA9?d2Q9nNoWlG~73WfWu9E zckwS*#6*|5?A{XBrcU;e$`gXFVruJZaSFj<4N)R*%EuK>o=pT>CO}y)j@(SUc|~ zem0m6#HhlP(4N?ra!YqGT8D(`e?yIHm>a>Dil|}{p1r=q>P3p7+uTHQGi}YT*-~Np zoOW20u8>>nLD_<>yutVta+_;T2DJ>2!J4E08*2OL`)MEJ7;U0R6r7f3Kh>l23Fu z*nQX%oZ`p`VDWTVN)URz2~U@}t6&++UIpI(P~&MiP-h^{KZeZco3hO_%}Y%vsy<)9 z^qCLBkt;W{jOIyfbh+n@A~+Q+5+Ouv`vNNeuTPYn<`5?iyI3oHGgD7yB`eBQb}+y# z+&Zy5N9Qw)cZjb~URmz?fBtwuL@u=AVVA7MazZjHUdNu6344N9Q%A`~lgFQK%pj~n z0OPymj2K@~D{eUGy95C^&x3RWCW9A!Sux8YO#gnS{^-@fu<~(3bm|)c`)fqCb4Qda zq1WK=RKxkzPMESFt)t^gr#;l(D$e|HytaY;L=ivr8u0k_{Kx(1f8K8=8(3C(6d2Yh znO}2jpf*&o#a`OKpl@@id!s4n-x?jlUImSA1Jm~>#U%7%oMc<6_CU95RK{ApVvpy` zqVOqDIyG}wNZ8CLMXy)PmWM6j(}A zuaVkWNVUYJ7-*#KUP+00`HgifRxh??p%(?$o3F0Kt1&{#EJu(S#_v)XP_<|lFjI0C zKlIyd*rC{p>sYg05&-UBL*r{LbYu3o{zme>V+WogjMc!<96*8TXb44aTsSL_Q3>~R z!mdt?8UQa8fA>H%Kf@5DKRUmRYV=)Pf~Q(dlgO?%Sw&Jj<>;bV=vaT!>m~C+5;gco z+-$`iPxOl8qzbqUr|Mu;2iPHvXyY|IE!L?2qbJsCSQN%P!U^_=Zh0u1#VbW^Qe6E& zjSnjSD>Y!6Di{%9C4J{}UAz&$fj02HZF6}G766C+f6=pf)p`>rRng?u)r%_}z)0H1 z=$|a=R|tr~qtQ%NlHg{ASp?eWSjmIElLM1U=o!_9`BRjB&i40Gd=KYQh1DG-oQ9-W zHN1nwp9*4y6pv{m_&)ncfWyjB$SO}uMH1n##B#O=GLu)y)D=!cpKfO zq~NWZe+!QQHtQl(NY69?zh1$%9=*_J8%e%sn#U*zf3EvMMh3*y$UyvIHIbE2NvO8>u?pj{LwZ!hPE zC%Z8Gk!94-*9YYJpVpj7-8Z1cpo&6UNA78KL^D7TE$`ca87jLJQrzQm@npM>rGydp zf6OK7%c2GeJ@aQ!%Tjinet&F~ikF}>SpY{sxWBBB1nM2*awHVJ&a5@CBih~6l!6_t z>bAOZ1Xs_?S$TZ}t$n$u1^-d_)^)Jusz>^LT8Kc{72zqI@MAFtysZBz7`q}WvotQg zt)|Q=gQ-Z23*$sKCN2ZTO@hTesRjTFh@Yw}$$t!zTSw%!A{32Gf~;e-TBCp=r|itm zc};=MuM|HVRDq6W)x&LivZw^}hlN}LlUIR7U2#XFbzjC$Mj``;W`3{_&&Dy2;O4V@(iNae;u91b_u3Ey3nG;E?+Go|5-A2JiIp0_sC@UP-`E z5r1Hk3TlkRpsK_Y|vO)3g=>92+pe^?(6 z<18uK)XUh_45d3wJu{olv$!nApxYJNsDDoJVV&MxA)%X)WYE@bMYvG=$*yVAK!FI{TPAxpP0DB=&I--Im)jA)=AS%Yd-W$sO5fQz~fua zUBc6c{+DxAwY%-`(dHVaIv{Y)PA_w~uqb8kS6@^a?l5Z&xk^MbAgKelXq#C|ay;t-e zs+FEV5PO%iWrtV$MrO}31lTD(g^`Avua-977w!pr-g3B%f{hh^q<9TBE6-hQqlU-d z2yGm$D~b~magBo?OnsM&&i(?6rhl>IgLI}@Ts}|n=b_kYAvt4g^c5+Q@@|}IxVNBP zX<7^NeMcG;Ay@#S_@QQvFZ%WdjU=~`A`W`b7b7lspP#Uo~wIu#QFr7G4PDRbt4rh|@)Rh=*0VP;QUEbz;>T_5j+;Aq{} zpM_a{UZo5$F|!y<^EcCztg!O)A+^G;ui5b&jGhj@zvxImYI>BeUss_A=6}1b{ihG7 z3?$}NfGWeHO5B>DX@9Rx8h?sha0&OL+;jN=q?1=-k1MlvPLd6(u(WR2Lwzzsos+d;dew|$xA;B|dAV)ywV zWp%tIg&Jw=DA@Lk;r%`AF^gs#fX!dYZeAi4We9{+5TK3$-9lM>?|eqm5j?JY}M`$@fvlp>!boc~xwsN7|Lw`;+Bx4u&MntxCudMr(`sV8G;KpO&KRM3tEl|DBJS3qDY58onkMu`HV=X1Kf zg#XYw-#_^f(6J!LF7c_>WuTO3#4E$Du!AlSk7}e1%K7eg5L<&OmGFyp-y#uTOCrir z2Gcr6KauT09J6L2%7jHW1Fx28EFBh&?K(djYjs_}f1D|J$$!VvU~qN_R7UWMyDw-Q zX=Q3kP&_jd2?}D`en%(z8nr2XD0@zg1Fig;!hPsHxVda3)~ZUJWLKejNdzI4aFdVn zidycA@+ST5{M^gS@_ns&mLsW4t8+v^vk_L*2al!H$fhnJh5V9kaaoG$wm2mJJb!<< z>T2#rn%vhuA%F8lV#|?4(Hvzzj+EiK%(QHwd7!>(d*TO8|CQSN(xUdMmip`Iu z+VNo2J#mriC+^zL6ufehbqk=r0w#F5wyB_ufZ31H?sAF2(BFn!lF$*B0)p#UEf;^sqs(LWW78uqxx;PABM0PUFjl_aOAxpc4?#6#Y6OsL)~J9F-@{a7~aJRueGam z+aInUu`snjy zCYWJ=9ngPAJ3E9v;!Ubv6To+KDdYJ^5`tqWl3V^tbi`s^{rsn$6}YG2rf+3V;8|$3 zPmW?UFWNSz0juWK=j>obhbbGF|Mi;lbj43Tc&wgZ{3nx6Krq{`{ZU`<7sn zk)D$Ej=M`Emvhb`)7M34y`lc^ns{>%2$>ecxG3KI;>Y`ReMTVyO}{BVHl?%dq6x_N zCyQidT@e0G6;xHVJ`v4thyG(^QZYu68h>LXiAZksXwg!^u9UFxJ6e)Rn-7Ge({ro1 zPW8e^b*W0-c=9nJk=m1g z#YK`vi*Csj%$aF^$a6FeMS~kAKM0 zYyFDR_Fr=v5B7Q=f6rzo7bTI{LgL{n3uP80K6q_mqN zi930R2pYcT0aYwiYOQNTw64EXgV!?(!2I*QIMrw1KFSW>K#5rM^Y-=m$$utZc2bd& z8>$_y`Oh>_W}qe}5T+y(ttqtAUemquN7M>4i)EBkt9X3hg8wx6cbU^%7rb`bxoPsv z(P`9gMd_2Y^*G!Ti$+B(BelZXlzIGV)|YF=Lk)ku*?G?=O@2Y?2?Tr101g9T-^Jne z{A;^{U+9w_k;~9>=;#c5aa$ms&PO7(b7EE(ZH_pHR3*R`KVBuL~Kj-^7IFR%5}B zrtn{a>^=j8XCGVFmFS9oHk#RM*1p)0>cT87Xw7gha;=Vm5UA~yCS95n_)j_HP1~TI#x^!}NFSV`X z#bzTyDG-RS=JarhWO>YZf;6W53NUqs(V2B!3^22?@ZjmX*_)?@~1O z25<{c%-BVLOIXM%RVmaDUGfPo(y6<`48~BFcdDp*GZ_y{so?`SYQkXA9Rla~7dZ3d zaV^YLdAe>Kv{%0RC_9kDdSc_c0cRw<=uj#etWD6xGr;kQ9i6I=YeeB4-me*9-uAm} zgFRZ@Ke(|~AAd4jB9AvpCu8Ds{oLP;OOyeqx}scurXff$VNO>0nDx;ri{W5G@qeF= z3h+}h+C43fn+fO!dm9#S%qUXFL;N@HWd|sTpjDl099RS+K)oZ%AAyfSRw?FJ#XfHF zQFj*b&2(*+%0L_u5M>!|NlHnVo^Xg2#?a5yjET$b!+$Lw*T6MCckain`(ehTy0re$ z`XVIbh69tH)yLe8JIvDGjqs#D!L^Rd8<<$~@6Kq7#-toT^FR zxfP3I+r?7hljV1QF4xc+L3WFh3pf_gmD%bdN_XdBE(?o0 zbPLB>HmN4$XRgrGZraPiNOzMk_tVtl*h=~0mSu#a4ZU&H?OfU0v3V`N-CiMQ}(sCQ4h!i_oJCFLS z(-acc`)9u(zgVZgj3-lJ=b9(=7tuzq47vHhQ&fQV!9uT04HpVtrqhy~rge|?spu#w z%DGAIF&hJU^0VDfKS*zm-{NRJ?|@A#<9|cPug|RQb!k6g)sbq9NP9ll$DWjew1=bL zCqIz%0cxVWEDPMGqZxF;)DaQJ0q?7tZK1pdq#zjC3l*=3h zEhR;h8;3&=iupGBGZHhOV164-uORaYrlOoL4JX4_ARNk6tLX!U6_D?W3U7>Zoe=D) z9-RXVA*_t@dKFTy@15>o3_m%Uo_`@X3Eh0aI8`=_{UaK;SK{Y>38;rIDZ%f5 zONajk%|Nj`^X&t$yTYe1ny@06>~+A5?RX+C3yMRHI%-))KKdzM!d0I7J%2wzh8wPS zHlEGYTEdntS&q^pVwhXS?LQUny8!1z>DA4^A=~G6kKANNJ*tT`Nva47l9I*O1aDc+VAcx4qRg=X+4LO=h853YgR>0kz zUny1if|>(2rI?@CM;C(_&VMO9sA^daQ(POI(3?qdoJD0|-4J6FngNXjI*?(D2!Uu1 z-O=)8xUtA$o)_AMZu2_W#?R5}m~OwCaBlM!>zePDufXK$Bf_sH&$oMRU*qqxZ?qo7 zgfP|;#q=MwBBk1)%Hb}j82FwRGGPLhwt}Y<9vw&uja1nY{rj?e(SHGK0H?YsHCorQ zndWs6MM+bvF@rjBw^+OBn@}AqSbxPWq$mNpKg-J2bw)p*?`5zExw8X`%@B%fEKf z^G|h*Af@`yM3WA$ntv=iza=h9pG@BDAg0!EGEeYeXv!~q?o!(Ua5(1=?3ZY0dJ3~) z$iOACsv;r#>bIj_foDc?>CY4Xs~DE;bitl&+=@Mufk-c@E>*#5eWsa~YUwG|>qk~N z^2Ct(1;GR%jM>Z;C(Y@imzgA^NR%QV=KeihI$|jqWkt@@Pk#mj_|A|S^21X<-H9$o z^TE}SOTD~v(i*wD!CI;~1e>b+CgkGSJ#f%Bg@D4p=$@GyJ?31=3p;oeWLM6p9dn5| zJZ-PqJZ(=tycM1m$7ru%rD1qcy!sJu5Ayn3*bWjDrVq zPD!lbDeG@BtAC6okK~da{Cl*^zB3B0e&G9$bUwbPVJ}$01rz6$&B~|px3HI^u(S11 zVE-?VF4t8fVgt>~P#a%nIjUvnaxFHIfnbSVc6?r-_Fn7nQhlklH{kAocXvXn(bY_s z!2TvmbJZ*pFT?|-=v%uz#j8vk+h#zWnhKZ8`H zE`sa}KOLw#U!Qy<(}+H<8$5__w(+|@gVCjFOYqm~C>crH{=<8Ka<;|2-DoOzn@8SY z6ESb)R)QEcbX0Dnv~vqg6v{V3r~Fi>uK?@N*~7MP0vs=@(Azg!)4X23G@Z9iG2txR z|3AlhXnzj>zi}R!lJ1A2sUeCoW0s?w91ji1m)2J!oRhWR%=M=|&3g#U%|p%2T}@ov z)+DRz^$dqi_cnPA3Y{$V7FoL64T4n@8MS(*sTsUY-g^!vGdQfBY#4gl+A`dTc~<7; zwgC|mkpco8b{$?FYf@bR7d;-#(7ePCUqJwPeSdv@JUctPNP?nUt!laX?D5rCN7#>* zo4Xl3H{gq-n{9D?97*`{;E`uLTp~ZA+1Mh_!FO+ScYR&kK~ZsK_v*_2dVAY>_gW(@a!ojVgr)aBUY(|p;3y^hz0tD+mY_@H&7 z`hTQyDl4mPeQw+Lv41E^2ckU36XNS~=aV?68c@zEF&Dd<^Ky6)cl}?{f^YJz8Xogg-9{6asbBY}~7&U9~To=TY@!wt%k^`j;RO#BB%3XqW?2&ZnZgJiG> zlVT^y1p|XSK<&B&Fetie$=g$JvDih|9&$$S}ke z3Ro@B41k1D?+=M zo{$J^T5;%7UDsIhm?Fg*=&~gr=fK{4`JXVkw{$c>h6Ahd~EE|VprFT z0IuRT9g)e&N!)JZiN~KPF0U+qb#Hg?`vr_IiHa5a`uk8U6d?gVB7b&-nL?l;a$Ty5 zR-h0-E}KJ9l*Vb);mH3LBK60xPhZb3^DM88sn{Mj}++M&0RKTV4CX z-JRVZ?Ca}2KR7t}w11R;9+h)mx!uY=iof*RWIX)vBS#k(FMjX-{=xS-jx(#3qDNj{ zXQ4n>8jHseF)mI6;rSkQ+9j#liG+YA z>L}$LX3g15mWn8ADy+V42r4i`3W;Z_teqY0je&uImwS48{#6w6$=gL0m3Aw)`gr7# zNAF)=Uj8wuS$=oJar&i>M({G?o+Lo0tCDz$E?0~$ujwXh^_l=zlGtw#2QpV=+w%k- zfG@9Bt-=v&RDUaQDrNWrXduVfP7TUwKzR-<0nZM6SYkF6E`$uO^$3-sT!tEC4QyvO zoY84G(KrInZN`rVOx1*s2}d-lO|JlmLaolX9d>EV{e9kxV4{a`eknp!=fgL}D?ARiHa0hPIR?91+ojtFYugf{EjBrcS}hbxcl#+-l>s*IxUo1mi!yzP9!qUJxWx$t2P#flOYo>o|0& z+XB+`^b{U=-~qZ(fsVg)4QP8uC!+E)wLynI7&E?m&&8{4*%8{6iM@y2#; zY}>Z&eFY@-A`}&Rs>KiC1PV|^=0tud?;S+!9pOr zuGo%^gQG`#0MU*=0PR!=Mo|u1CTrkpqyHR(NFDh^Mz#WoMLORs z15L1h?%|(^5Z6?7@yY4k_->LXtg7&$67l3*PXr9Iw871Ymd{%zWvIxnfDowmIF#$n zj+nvUh^$E7?`MDOT&y^qjHIEc(Ciulqx>kzcfM54U>Z|z;b z+Vei~wj(cjMbEUwV0n0XcN{LBuR4t+8{e9Un0PsPN%*H)M;w>YB}fgQPa&UPniF4t zthd&JCT6N%=R@>9U;Dwh(u7S~0v7>8Qe`DhnjUP8mOFJP(~kI^LU3J%PcO&au-RDO z+LJQ)56(Gpq#wV30yA0y=N|G@V^K`m#xv2P zi0qO1BcXFDN(p{VtOr1CgquF4gHdfg4Mq!#h7vvOk|hhMMmqxzcbm zvOT7RgsBRL?4&vNa7h7w74imumIJK*k+*KkFm_AUlNh#=OGDR}!VLU^jh}K8w>^+C z(m*&?pA;U17xfVxq&gZ|#HT0U*6G0--h}LNIDoWH3mj^8-P;&dK*MvMvBIXzErsmE z6Q}b*xVHG?qB)IeN4mP@PzS3W9+VW4B~`p>_H(<#)M`V7{B3O^EXr1Ysn0cOEiV>A zs@jsc5x3{IyUw1?=Mjjsp{x)xU`+cU+@tOw0)5=-9}y9;c3o5Rld?*|Tm|{?%xS?{ z9=>zg)k(i>-*BsCClb03bQmFitAFZC!k*#oay(~Cg}rB1K~h`@zz9+~xiiC5|3DWf zJx!%L-5rkS;tq^AwING?6(4du#`*z|0)fiT2%h}S(?m!;F9ekotx*Cx(==bNl!!1g zI#O?QfywtUwMfGDV0QR{3X7YKmjA7%*zsPxTNE~2r!`euIw0B)8%Liz=DE`7(Nm)pNtZ&M;U%`u9et-m;Dc`4nw)Q{+p3B z0*EqfZnD9}wD0`VKH! z$08K6?7IDOHaL=oO(mi)tj4F@L7Y`EaBCyy*;hQKy(iUz+2bm zO1qfXQ4YRx{L_-Rj_<7TB6H>IBrrKZ1b9S`4PEu{;KYJ|=7rcgm{#8cX>)b;@BUR8 zTPOOi!8-vCWC75}I2*i0&Q|eSLUmjv|2NYc_5i&}YXG5@3u>nhWwQ5mH2v#rXV}pK zCr4r1j$m5oH>?gGPvPoZmW+Kb6q1Daz%}-o;6Ot1#*OBFP7oLvojl;Lab;pAIK|Rn z;D?VQeC0HM%g<}vk0i~kB@4;%Z;@uk3M*rQ%Zzm zJ|Hz~JtOP+vcHFgCXT>u&x;|1_H57X+w8&Vd{N6tVZ2hE4$@X~PEO9rzvSB}x^8wf z4f9)nFJoppu_ex>A2hHy%K2yy4naQAGVyNB*5Wbj@fb&tRGZD<&63JH{>=S?d1m%n zX=e6KLR&*vo{|!FAUNZu@?`}>Ag+9gxrPf4D(4se{$?y&iBOOkHc$^~kJ!CUNo(%K zW$+wY6TA2{c3C>x=gQ`a%lS5+`d20#V)&*dB`p~~!>X(KjDG=0{6C^Eytfd@1JQGCLoR^p5CsO zmzRHJ4GKsb{>&0mIAD=JjONcAV%0cKBd|yE?Lz|b=5VF7?`P2uEwQ_pNQWJaaIQ?oZIx_BpBrsC>=eG+M7Y_PA1{pETYqFD~Yk z2L3Qq_o0Df(scdx`q!)x-2-)VZ<1O25S|kI_q>9Q552T)%3mS z3;5R*pkx}%7Vc}#1|kkxbLG*RvYCBK8j=8Yc=%lh1!PzB07Ij@QXtg{=nM}0cEQul z-y0A1BWw2wwzk8|o~q0LVeX=jITMg&Acs1geoHh+g!r&9mZBClC9|%@L&@jgT4g0c z1p^YY`#!Lj3rfv0#WShSu-Qa^{yhmtcu(6cFhFh)g^zfhlRV)&j#Qj;lM#;9D7W6a z1ka)?7^*OFd}`udww4?F{!K5zT}l@H`J=fl1qoGiWhz&xicGW&caUfEO&Gk^H$FRF zf8L2C#m9%|-F_6~uS6;vTQCmRBcL9Ns==YF^F^wPav(a(3fjfr!fWGyAwIGHVrsBj zWgx(0DGnYrk7PW$PKYV|cw*8&JWcyO8RN$U1V*MFQ;L=jVjZf3Xg z$k-J=8=oR{C$|gNTk8(Ur75_L^nDdG5Hm+2W%iy~3HaA<_`)1m)igi?H()MHS9GqK z6eUPCPKekrxjOVdowG0o5RMa@-h5ULs2VF>v~Y9KEx0VLESm~{MOt%gZ}J(k15K>G zbv%0Q;{SaagR_7nc-?d>lv?)(QCcLo8b+7CuH2imflBS3LOr5uC7LfsOU2OMFZrID z>In5#Ffl?YDxy>m40+iJ7j|jcD6+ywqzp$kzJVod;0)ct`>5CO7)<+y1IT$uc!2;6SauC z+0hpaEXZQ!_YcF@02GV~tT%U02Kc+b-{R`(MG$9Qi)s+kX?|h{g#;jP`t% zb~IL&w!OSAr7y!m4*jq1>%U4&|AKH5QU@LQJIB(X)Jf(w_sxV(#CyG)JZ&^CQf56< zm8#NZM@J-oOhc1t67SE{oC75KKBy?r8@m%{^9)^1l)+hG623lOF61V$nUJOCsQ9FJ zZcFbQ7DyF~ojsj~2^p#MCO?Xk|BQ?%t=p4iT0qK}l`3jd8j%p4DT|I>SxR&?U@9*^ zVU`)V3T#C4q=3Ol4dvWWze3~4T|JQes4RM~LoY9XZk23kltV)(jhNLcV9HcgM@~na z+A$AbU0-ugg>HWF#i5Roi`ljxsS&Wivi16Siyi=6%W+AM6jz(L9ud(fR8(BBD)-}o ztpCk=)lm8p7(PS$pL}zK#;%X>q=MX#yY6Cc8}y#zC1X48?73zH&unCi$$@bLq{XOc zV;>-IKZpVlhZqJUacu}vv0SK5k3>iBl(uNv_7v z&(E4WMLo*$@(dE8VWtf~8F>%H0kXk)1%Bzt^uuK@NIX2e`bwQ03-}!{tBJaX+?`QR znUb9rrOfga-M9CZ30>|Ode%2X>8WHDg^BvsRre??dKmm)y8q^QG3MuwLuJfF3Z>G2 z?;+4hz-L*0L51vY> zHdTUlfA%&0oyFI0v0k$U=#CjgSnssxS|PJwl5!cTI3`NF@}GqHu)A9NOHpcEDFh~t zvm~s~tB5ufx!$86Qyi4-%7JU^h=!1MY2e+@#2S_={sb}0q?$v_;ht^Pf|V#CGdj0sj`oDLc2fl5?90GxVu z&EMtC6EbvzGaLRo5jTKyIyPT|u1*bCy6fod%)X~aeGYf%Y=Qlro;475El03&#Mfr+EDz7=&z=LxWh?~F3gOnT1?P+ z#ajS5s&E|q0@kgjZMZ7z%!)MlJGl)2Ln zdtb918aDwx%nDjscU@zDSwtp*V}g zP&5C*y&2=Lvh96z&||pccb`wtzRb~Mlh< zz~<$;bnjab{1w!*<(&jv@GT9yCdy0ak~Ux_X)wJ|Bfvx}Hksb!=M-?fKXj$^=1&yt z;2-u(wlnJwP6aG~WOLXvj?djf?+0~w$Zby)D7O$ zX3eUaqwj^n!}Xx+>oTGdx&M6l#-=2M!O(GtUH9Gy5MbgZr~4*2jd;k4*cI3vd)^$P zO^|B|%>GUMr8*e5T&rV6KzAaKSXfmyCf=~JeCdImBUMy?-JNrm@k^SaV+nP#gMys~ zd8&47)!DSe-_%EH{`e}p3+Er(sw_|soD{JWoF}X`ctE| z+1d;JnYDC(kL}ia@A2%rI%pKeci%1mlUAOMT`tM#$j!>ivX_AYGeZxh*p1m`VSU|F z+Q>ySbDp_jE)U4_=VKb1rgE@;8zfq;#Qt~_T`TPLN4EiTezI27+Fp_@wbUHwsH*48 z>gw$L&#LS3Qcb7P?8&c6tNGpzn$FB!|7%^neoQ=nrs_$7|LHO9-$Jiy(DfkMAWaGm zT)6|Il?Y<)oArE!C7Zo#(OK9IsHYYV&I#$CH7ynWZiuZfFiD`)Q*$mO5o0-tM}`~w zZze$GI-lJ^C$6B!gB<5Ksy{hqyZgJsEUYA`hXG7Ee-D$RvbRvyW4gpx?a|itGMR3Ou;{`;D&q0t!y5vaLG5Z620QK8UiAx ztG9)gI|m)#HPfBWs}}8NP%en1D)-;9gu=gnG(Da(PB^+gCusG}cvmahKXrL@{15W8 zjUr)b?hJejE3e-7@R@|lHp~V?5#qL;X188Iu>F&&UL(InOdPIURtYtm9gloUOiQLQ zgNq02C%EUCSJ7in^0g&UpTR?$I*siAwK*&fw(huI=t+!#fs2^z^&St?iGhE^rUABp zM*|8l-@Gk$mnZ8m;>RvgrKT2T+X^VE@fLBH!>+qeYPuh3jlmnzkrc9>Y8f@(?t+l? zgo)lBbbRUx^YiUZx&`w&~%FLw&is6N7bxqBGetw(P@=Vk^)qWtmZ=W)k^pTad^(TXe+jIPI zZ(8348C-37JC3G@lgIuY)d~@h_vs=(fBiHI9<00y^;yJIKB*Q72SYbyo`P|j5&PGE zEXZyK`(**wo7bE`%PN5)Zpy^zWN&v#|-JFOq6 z0jj2l_PzxoJFT5|Q)^Ld|F|Ik$EK4P`lCF>-yuP+Mwg4!UP$6Du<^*3m!DfC_uq*a zE>nm6-$O}$-upulJEmF7r#Wtan|D|MflxO(&AXoal-hj)88tCXm5JjOpKJqOr>*8y z-^?04)3L4kf<-6k?_tQf6|0asZnDXlMXP zm<0g*f870dasMYC09ZZ<0PKHD{*&c{{vU0AKG^@I|Hts}B~AnYAcy?F_cO0wb$g_n zo~Ukre1vwtWLTX`(T^uGj~L+Lh3|oeLg7O{;2X82ksv4bK^UPzZyrJhP02|PnDk-W zGprf}wGY7>fFTCYPl;N8!$aatax{=vQnEIWhu3nY*=`edB5;sph8D>X%*$INy)6opJ5pz8_t$ z_FmlkmlB|Uk@8bL&vM+wOR+3!$P#R^kTeQo*r;~c3!xRi=^4{X>dY(IpR2EtR~hMB zo|~(@Uxqu`7&;w)Pg$C_`HyaZzFrTd_dWiSZ#j-J0mh4qghbRo_~J_S*{S$MDe4a) zKkUUH1#w@9dLM3gAUm%2qTUej&8?@8b>X+shgBifX0o;)!a+w1!wwJjQ6$%*ZP2 zn8bddvAU#xC3>AZpUr!sn}RfoV*+&l?z*Ca{BdpI^}K$)EDn<`TD2z*Y^@vFuO3du zD6J|>rTD#yiGQ87O7i9Av*~s}@p7o~On6txNZf5YBSh6bUf6`iXFH20p+y20m9+5e z=DKLN&MWvm=FCQp126T?JLU|x1PF#C8T_~w&?4-Ab0vY<4&>ZL*v7fV*jDAz-j|Db zf&4?mgh9!ePRJM+CH@pv+8}5p`&~)&9HX-#bcF)Q{~>tvd5P}v>Ee8UzRS=(GL1xL z8Il7DamVUAu?}fOREI!L^^oGGZF6O3j4JWTNa`}V5PduN%a?U;_%od*NYQ<9k2ix) z;Fz_4aNd>p%whFv&p31tyc=kr&6#R*GdDw)17>XSN5Nt73hjLZZJ{5UpDRfAMd%sE zR$)lthHOmdB+*?cbZ2$vz~^LH!cBkQ29aTO4fFixb3slwhpZbMv&*b;j;%qyM^p2Av0t95~^wuLCA_0?!rI2>AzLMWt;5 zm_k9xNd1T6_vq17^7Yg1P8o*ZRSqwzG|r8+KD0**3!fCMpS(2pT{tW7Eh{;(hz&(# z!XNB{Z>24s?hw%!TotA}V7GLGD%y3lxCmzamX3uNip8c|3yh7YpQtK(e7n`pwvKv# zy_S8KeOKC^Vw>U4EPx%NhUS)%Dvv%xSASFg=ol+)^>>sE@2f zNOFJ{l$;A`#-j_ac81&fj47prhG2Mq!H_24#eUJo$JKVdlrtk9(Ji4nD&MoxzjL1A zHs?S+9J*;8ZDkid=f?EOD+eMkC}V#L^^cQvim0k0eJVZyecXh{+lQ)Bmk!T|m4Y{- zI?*KHZSjWiMoFHi7FDNXa3lBm3BP9*#Qi2JERTn~)8%9(MF4NWs~EE))!O zw=d9%OP#oDuzeS%ySi>2`@dY+o83lXQM?$Kx5Y1QQVF5Z z*O&k*rsQ6+W@g#$nANeCa&C)0F^aHgg5~i+xHIta6_y%{$x85~O?=$8;~Bg!-=ML4 zvl8lH@b9Cm%Q=)<8%azg77d1f=*mBEris5Mk|9gM^&S6u zxMB_kQxDGCrOk#D?u;94aBF#(T{89v5HTohr9B_%>y)A*fHj{Tj)+R42_TeP95Xd&tSGijF z9L}Hv#ygrty^X1%9b&z%`dU$_Mc3wAcRO6z#jBtGQN?VBnfqfwZ6}0A2_sdZBpuIKQE6)7>zu&Yb3?t;cc}!nyWyLt zmwDB7IADX7OCNfIVu^ZC_P$@qKDz*O+zlpTedrV7M$uML!m0y(*zLxJ#b4;w54V0Q zTpdL?yDvLdBZm`fqYMsUFiYjbN~Xx}lycg8wt?J0Yq%qHw1*RaK_F{WIVmV7!ba&= z3t=ThJ7}lz&C*1`2#F@HK}#n8)U0r)@5ohdAzZ+*m+8vc(*>Ccomva6TGtM$m6lCV zo`O4mNOUxWRbKPzYW!Km{kYJ(3sd+|-~2KUyjkr{nN5s6L4?{jrs7SsPL4+aF6c=o zQl8QYTgW9!PMG$8&-Y)Z(I|UwUFp}m-ztIZelZgGE+2kwD@4z3S4g%9BOLsc*y2-F zyZ4Wkd%)SQ&0*~=?{`Plau7~Z`p<@ut#C)NlBBYY1RJ-qq%Q!EL%sD_@;ou&Bw5?8 zBFI{1Y!+y4cjMbpUu7t$_+3`|p!jrOdYP<ip%P&Gt6|&b;QNqsX4CTlP3lERPKc8JHCQ7CB#TO5o5`-|#NNjH$c8U7 zNS&N;5&=4kY(1E3R;#OC1J56uPhVVBYr=HcQH_N^)D)3(E7AVjL3{oqM;yU6CSr6Q zY;n^at_M8r_YM9pj1$?75**+kNq(_0Up^TAbRWcPuMImQectzmQNO3p zg5ThO{@Y}P?rWz|v)i6~+@9Vb3D!Zl!%-DHqe@WWe?Q~x;obHtAj8GEd~FE(vHMRd z;?vVvKG%Zp)0|u`|NM%Iq~WCdLlJclS#&W+1Z(TFpMH0pzHO3a;tfa2jn+@PG1Xk- z*hbR@l?AmvbmXoLxjwc=*^ex;-;J~z!?w!59SZzzFYc@??Tc36L}vlD|+I;vLgJFPd0a!VW>C8#DQgnK`5fPAStb{gk->Pxh>A#o~2 zl~sMyYjwb*9;h1%zr3Ov6ORvm&EhV9RDu?qnDbAb4X6EATgljnonIB1}*edFQP-@Sr={}VZrKr~^|!or0n&#tZJ4lW=-Or!LxS}|gTcivlo z$e54hqKMy}VUK=HFC4|HPJA~MoRYA8U{rhFXZDUqnqbEdbbv$!sU!?~OY}VYA*g3P zXIk*x-0ypS^f!djS(OVSeac*Uye8$@6NW|UphKo*vD*A1#TdCeHwn9c2n94wAJzE_ zeS^IBhAP-t>YIJOD`6fyW|W=Z>CrP1Bj3ku?Y0d-@4b9)@nhqQF?kKSpaE0X=xTq- z|I#xv#j`?t@Gm@;jkT@_?=-yIlk7L1T$H)3734v zCV%k#VYWfTw~fdf-2ty zJdBE5+e;jrbb2oe&Vd69O(L|Y#R7d23oC*PyG7g+b!9AuSzX+~40hpCeu%$NO~(_z z(hI~T6$=F7h!Bt^f#)zty+h1kcRdjcm*;z+OBW@fm!kz=lbD1qe;F%2!TjYt8f{jd zUKC*adzTxdjLXXBH2LR-;Y1%&UH_8-?DiVV!Ak#(5rs#TEjiG-VIbw-wc$g?yO)d1 z`HIWZ^$RQp%Y11E@djPvT?6U2n1ZSrT`9>Omlw&w8>SeOW2fX+f_#{~u?Q-KwTx>3!8*ABk7F8-E67rR5q*`A+ohiJN zOGQYt^T;Jg1uj_t3R$7;kPG3PVq}d~sYg5-5U)*tzY?oe%2D-UcNL&hk5L0ln47|# z6XT)F2bC&ANupFOWU}5Q7Y3=TLQgj*u(gKs*2)4qe@N6#kV899@^ofue)8&GW8HB7 z>w!Dpz^An@LtJABjX`W zFt~@XPe0P5p;t$ydPIF(*dxc5#S}`2TL-g@@VyZ_hgWMf8KLIceQ;R+&;?w3eb>)?@5jF26OI7 zOpx~ocEeVovG3WkH0Y5%|Mi-aJohw#W=$SNA%7?!1u12lilzSwFgdX%$|)@?#_9^6 zUTyN^pmB^;bZU0tUmrouAN_q*SV@!m5O@#-k;KX`u_4+pkDT+A0NhiTJe8-@79^xG zfBZ1Y+neE7-Tp>iWgt*I44ro+!UIV!##P5LDU9b4u3+gIxl~RKm{aSHcah$rc3WAk zzX&aCoYhEbkf(RzKIkrN=uC1@+Ij36Xp}o0XVcyKi zo;>;ev+ngBfy`NB$_Y>z5!C$qt`;Wre>OG|#WGPK)&{7tOp2ovs1i)J(`Anl=q>tH za{%7@4st;SG#B$1pSp)=6ME%25q+z+kEW!yU=%(jS0UX*{dL!d2u;wYcw5W5S-QDe z?)cweNi(#zSf08dK zKmHEWBCS|1j2E+jS#GRTniP*dMx#iyO>7czE48abl ztJCRru`bCYm7o*zqJzVqV{`3cu3J|hMyX>(LVjlOIGpC|`iPqTHNy*@pS*w2C`(P$ zf083Cw$+79<1T>G$fX`)EaJ+Se~)EF)Z!axRG{DXETx&ixXc`Pu7jVMtUm;IR5j=! zykey~v8~G0s1XxW-tDGq?CMTrEO4SdZ>?%5GdRDDqiVtBH5t5Z?thJ>*s+tmmWeG} zf%w@`%fSN`_Lw@p76sqp+U~mZb-D9`tZI82gD+K5S1wG2Th6Yftz)S)e-ccmsy0tM zVj_-7GJbpKjwv-swZ9rGhf!O5vs-pizGW%fL)v%c-&mvOa^QOI8r)El+M!B-1wqwJ zxfG^;fM4XmK^F@->oFZ==64%iThc)S_?lFDkv;kiO-;6FcPGCeKUUhXAEb^-gFnum zK(nxnbbL-lW|k2M_@_0Zf79u*$7@m<{WtPA@!bHa$=oWhMFPzn3(#E_BN(=YWvp0a zWp6;5dXk+)tf;|)XN#|;qE1y@V3u1zqt2ye*1$?~fhgaFs2u7pAfCWCwmH-7BR>KW z`wnsI#_3y>65g%FXiqt$2@{K(r(6U_f6+218Mk~<$P6wB znP(_MuLg#&!%!GYu@C~40E3etvqXyW{Kn;~eAyiLp(S-E+fr#*Ok273Vr8!*>|?L0 zA5NHOO5k9?@!rDzvjPwFc015%`!2b33H^YKM9kpRC!}mgh^{LLtWqdl)pt&nz(@CM zPxs^%(?&t!!MSEze}{x^n>Sbmx|VFTWZX0GETJSljl3JQ9G(0OmVfRPwj1fU=LK$1 z)ufiJrqO-mS$|;vFGKvK*kVD6Q(+mUs@vT2WU@LmH;%yNjt-C_IECGoJ4E%L=jqGd zFB((Z(>P$_luw3pAN^L(CZSJwC5tP%7PflRr8+LX zeFy8jthu~sDM;-yXI>Ss%zrY@LMQ4t{>lebJ1}E|I}}AxI-j??BPZ8K0i4|-XU!06 zOe7ZK=5Wf67ShN2$u8F_}mF`-Y+(Eka9GnY5^?%3))61(WqvEY6KhQf!MT4&z$nQ5oBc z@Ig%VlW$=q6nYMJf~s_p)WZ3V%y`R8ozFL28^q$5@x%aeSDX(6;)(95OL@2Sl~neX zds_Oiv5}7U@NTcA)hq&ms!Ouu)`yYSlbdLce~OK62SfX?ADQ=-m{Pq>w{8{b9sv^Gs3+kUq`q-a&!ZcW*|$_8gntzy5o?T!QBWWrZZ#q7>3ydA}qY4cwc{ z-^80oEKq0{&jvrCs=6?$m9}0p-`xamh_Zzx$DKADU1SOe72^!y%wWvEbN5n?;^o_> ze}7}U;SffeX&XY7LXyJx^dRf+Z2R2hp%J5$Dt4r41YpF=Si(!YxJcKe1+9-NahazK z8L3E1nTqv|lB+f6N1Sb24{6r^Zcs3GKBLwC2XA}ov5pu|qoGGaJ8U_2=u~LuR8b^} zhMXEmTMS=;Q5EeehGKPGK?bK>UscW%e;uMxGOgyR^;w})of_Cq(*RNJ2FK6K@%+|? zY0cg^#}$xohNSX-RCS|oMPmW)VP!fiTGf1oSlu*6Kd zO?IB#X}js#oV}I18!8g5$KGFKa0P9}C!_wTHMwOfD~2!4gKD$ZYxJ+zdW#=Z&WJaNJqF5ASGC>hCM?b& zyOv*n`XX{B;;>Vv8bSKjo#;zTe+NrXuF}LV)?laQ(3SA>SS<2VG`d3qE6&21S{i5+?3Y2u}L!`GFlORI|VJf~poMmd!f7gHi726#T zg*n(FNn%BV+~6%PVlBIr<#SzZo@xN=KvN}jXn2zhXIG8afY^!PlFj$ru+Hi2v#%Vz zH=np`CUr;HyIVPs-D=paulJaADU}DGF4X4E^dZRFTiKt7=JUp87s2Naw9(5lUhf>a zXx6mz$?j02-@fzQGcUcef4vMA?N|5U169r(z1dRf+K2eb`8tN~`58Cax8-bD*^>)L zD30rH7ugjbyUa##hOU6j6tfI*A`Sed2G3I+Au`s7?~a>4zCP%V1U%41iG|bj%Fc|E z=JD-7+RhIalLtC8p8mAVOtrb1&g?A*78S++qK1$n9eYn4ca`Rle?3J)YcR}no})Tb zP&@-qw3#bnS9oHAy>U~iIG65Q!E5S3SIP1A5Q@pzd{>e9V`uhr-Pv|ycYSp0C~jwd z_C!$;FG)YI1+)kPq85Nlp`!l-R^Ga?WFw2v7PGmR2!)bq|H6p1s?D-5e;Rqxz9YaS z3FN^<$P6Yu29cvEe+OeZIseYH9bV@2N3Ty6z%JRC^th2B?k#FJ@%tcciA5T!oY6TI zR5pQ;4ymeob_R}@!BU#!0KpkGe^5z>B2nx~rZo6nO-@!6CM%D17e-{FZZsf z=7uFQgNwlE_vK;vezfk908;9|>K?apcKcUu{Qj)Dd(aEA`AuRJEHr5pJW7%n-+C4A zFErnJ1&PRTuoNgX!jbR3Xa964GQAYZM%>0sZ?-bSF6_N?QbW*ots)J3Kx|gwXwJCX zc)p_E`Y)ApfA6R90jHY=IfLmFg~gWNe*rAFBS9|xcVqzHKGf6>=PNKiDpIZIO1ualVmda$U(6N)5POD+~EE2~DQN#!TIk=|fbMjI!| zvp!2s?GDdv82{Hfl`L2-jnhXz(7SX&p`clMe$f97>+gF{rTl%C(^IEf#lsc00?TN8 zMD}b*e;jdF0F4p(Fa5wqkwFGJKXRKTyFuVyoyDK6#}kJk)~-!p{+CLyq;a`|+xd_c z4PHUe>H>^N46+ms*F=ZVw#~Bzxq=mhyF<})pc{-!^^mXl+kw0!nRcwr)z4^k%)2Cx zzBh}))|%nZcB|tJ9^v7xrEsSEJD4Qu??kTTf7DYIBh-);7I)EVMYWJa3YcmzSR`rA z$96YRYW^{I(1_R(f5cZ@g;C2?Jbrz=i=0mlInxR<#;Tu~ekQr&(eV>Dz~Q)xI7O+d zVFI)YDr7x0brx5=JGvK&JNF;)&&ebH_a7TwW?{PF+03$xq7tJ{x{TkJdzkIGuG3~lzQO(1QTq+~-h~0|kU4)3 zdu#V?w%mg#bPH%yz^?Og4TIW#cfIyK0D)S?4=TvtdH7{BpCbMjv66ykxoqeldC+6a zWWj|Nar?KW@_x2(K95JLjtA1#tL>K$e`gW!bG+BClSG}(I>(qWII&E{Z~r29_WAcW zZB69~KT^-V-+`2SZb|dsBViv_@LyA#5yci$k=0hTNGok)?d_#Sbj3St#r8!7+E~gQ z%c*L{tG>E!()zwBPgs8wdUa7Xn)GU(y@C9)p&zk)IlA=cwI)MW<5FP?+7LDFe^k6C z7q}jw(xcXS1Wwnb;5X7Wy7U>d8`24_0^gC3a@i^?-7mmUw5bo(HC0h4$NO*_IE0>< zE6aC!P(JKV_e%OgfvK?1iccP6`)&RwCd+!Y$7Tfq$djLbH3@ zXY+5kdh}TM5k1G}gfOheU*$^*lLpx}Z)i-(d2$u1Jo=vBoCzjMA*@ypI2OsyYa=-_ zTvmYeG8GFw?_s{>e_(yQKj07gf^j>amB!auj6-1>gPJ#e|86HU<#o)#6ijRgSl=ln z;G0)xn6LR7AhlN0minTEta7KYyP)?-6@DEcC{|{+){u7M2t6X6(DTDTU*f(FA^4{H zQ3+itgu04IP^+qUH!&@zIBa*X z2xbe#9)hBkHDl_h^`tV2mQ1Qy9CFXeS7{Qyf3&a;MrR?XF7&w<{|{4lr&_VfI;{U| zQ|8lSt&*%-;JAp8B~!e;0goAw8lH-5-j7tpt;m|LZBN02#UIjg%Z|ND=TAoQVhrpKksBa_bTTJ`kiW;ZY7)u%dFWv6o91_v&>INmtxydZY%!92;h@uc#U z*u5tg>4hiBiP73r?H_t$qCgUcSg3*Kf6%nNpt`=G*Hokva!-g5-x@Zk7n#Ed)}joI zz^b{As8hFu9_Uv3J11fM*@C@IZ4C6*s>{YLRrYJG|J)(F(SIybO0U>X;rcrH32gP5 zX7w0mJ87Hp2u&o8)Ajia!GL@g?!a zx#Vc%84{x+9r7xQF6I$HRE$51UGGX(7<65V-6e^Z^T>x(DS zWMAUiU#gW6U+BLCq9t;hsKz`qoAQD`3SFR6j3S!lal5sTz~8&uPcv@t?yIMJgb z9naDk+kiN4Z~_j3&*eMQ7arKUZ=Ke)uQJ>3s^i9p@`a^?4qQ~Gtme<)jPmC-ZyycKtZW{Les_F9r{{<5|H^=I-r2bWU2s zw0fw)YR53&E3O_T4!`}K8${MfgJ@dPTaGEm*a^z5MOn&7a%Ox&jYv{dnwo0+=Gufc zCJsd_fgzWIVX7{DD}qucbPp*3S`ES!{A+DQ#ILWn#GgX)o%^bre>+Csvm2*3iL|FJ zNa8*}AB5H3&nOR7kC@`Bq2E>^mP;Qwxqoo9{lvXXl)r9s$ePD{&d;EwTa-oclrGNY zwDxk&ahdCVmOh#r&?@=sZ#bZjsEsFv&bI!rw@oGQzh_a$-3(%^=JOynf~lkCAnvpR zW3}Vlj>gxWlqakAe+RXzLHQl4fg+i(s5aa^w$za`yEttB?`}5PWIt{8!*0TFawe0LnaO14%$eu;Fpte*_m2&T ztiH6)i>&;-$wAwfZ99pSc1zkW_lo2yM(7^4(>1&OW?++HH=#3_yH|_Hfd@&WFa4`z zxq=f^=`Ti`e-Lwr+Oln!Dax?#453Ps(jQPhjto@h-Kfv*&?Qdw{ANOH-`DbHmO^y0 zvacJ9UZAPk&5iPm@wlg`9Ow(F!tI*nlcnFGx&PIKz1nnK2pk`W>K!!ACqpEiy+7`) zceGY6RO+6B!)zKC7)hAG)N_krdHi*DmcMAugQka}fB61;O29i0l}g2*@w6Z3YZ^c1 z>z(-{xmmd{%R|jD_m)-DjtW5y6J%8}+uz9bv2@q1--;p8*vu)B#IjE0ItyL}d=bDZo-fCsFK}_A+bi<0eG27WTa_#IKsqMy??3F^uqG#=6P5(I2${)`Xev>Ep{m9) z;G8eE6mZHFfF0K3{u|n}jl9hkzG6Rrz-)&P|7=$HV?(}c&k|M5$F+&un8mHl@lUL_ z_8KFtspDS;?SMhtoLrKzKX;Ay-gJJ$D=+dbe^(rH!lg)2)biH8dC26`sYqFvcg79Y z?sg~50>gdBYOZ88oCxAZAajKukieDQ;TOakOo7@6?|9!e+#vD9F~Pwbo@09+A7ZPd z;O14#GDjHK{q3qwcF;<9QQA|oSuAoF?OPg({D#S~PR`!9@}4w+G7I#6 ze=u)UrdGQ$?ztV)X`I9&w!NN;z%1N*OEiW}lHonO0jixdiMWsOM!bJ?>GnbUt~{FB2z{i-|E5GFFV91%dWG(7 z*B*rQMZu^q@8s0EO-#_%fOG0TM$w{;f0jY}sY6-y*e9RP0o$588G!ee4YRY)dIdlOco?S%(WskI@jWi>E77&=|?z^MY!{Y4qQPKPe_)O zxo38wYXNY^`di$)w-WA`Nj$c^dLYcgvmn#S@H2DrRmTMJ==8EeyP)9EmQIHtvFDCwW;MVODxgJNCBCcIzkrgVz@5PQk6RIPBT9+)Gk*uM?)+KE9_ZEPP|c zyc(HV3S*tFxE~m{b2nroe?wbT8S7&{ab?VvkQ(W(QZUqhM_Ca+O+g@QV6fjobUJsD zW+%ODJwno?d%Ztk3&bKT&HO<8GdNn=*9=(1J3@S1bcGses^fT^k1YLw0se)=i(y=} z@8Lrfw`%H&o%i8*n`JK(byd=DQk{_OU37WS9gw_RsH}t;w1asEfyTag;%1_W|^_gUa z86p~`ua6Rz7v5Us0D4BU=tnHU5+h_=IvHaHGn_B#8E);S($qAnA*3`(@H$33OVgMr zi90Krh|S^1b* z&u;ggw$je);^>7bUEvVgHAbkH!#w7_|Hl+ zymLuRyh7TFu;5B&vmm%x&vN!C?n;&8!pPRyu+@K-`6QS)A$Hb!Wf7Cl!m7Vx+A{tD z%VJQiyVfc%Z6xS=Gn6;yeA0U{Psqu}blnEdXD#QJ^M%Xr#;gC&r*k&6zx6TIwKM|0 z7>$ZeQKx_3e{7AXvI2!$bZ-pW?o1_bt(QG&tnc)ztt!&yD5aQcR7WcVceNtEI_kEJ zH;6@Y6wg#XpodP;sRKoeBP&q1Z2P~T@~`l&___~Y+wgbjZz=?SKwkT^3@;G8xP=QI zi7YQQb?t3bu;l*uK4s&Kuf2c7wCL*PV3XwKRQM42e{|)(a*!Mj{=|pwx$_0f{~|*P zg{E)ay+d5W{hJ?&nmMy>SAC4!FQ@68Wa_@J{^ChGyb8Y*miJ^W#xh+xbb6ddR(8k` z>}b+cNy}j3q^wWM zRyi%3e_=g?{L}O#`wA_6Jg ztrKsdd2EWR8a}m;+jW&eVlCz6OnH%L5jGl=u6a#ol*JOiM{n;r{cvASFHd~v|E9*_ z!gX9wj_nC9zQkS%iprkj9~P)u#KhSFzbpntmsZ%_b$(1qRoE2GmYg9}mknS!dt!bV}w z{;r5}z&0n}t*wr3fm9b(?a5&jT7K0`3x;nPFlLG3bzG9fPj)LY_mtk8Jz zEWl%%N>lYGc3!h+&8i@1)bUPZWa~kPQs1=EKjW@jc(f3)ydPh0;@mB;$+-rjs3xa2 z<9MhZ%&2!V7u$wTG+8FZtOB4j3zGFJ?V~F$nGc89MpcrhRrl1EhUX5nOysoqe;&u# zlZ4&d3{YPtYCZosb-sC^`1kp~X4uX1NLW`?6_$TSVtuFTMh*_)ad%a|^KAH0%Gxtg zLU4EhRb-ZnyvIz7XK7+TPSu5!K{%xa{$gTYE_%E zPPI&+h+e@!>rKqx7?LBw*x_*{hl4@o=v3l|a6LFVL$gV=et*iH zc-TFlQGQwBYt`^?&TQPdUqpn%n(`KRsaBn$_K&m4YFypK>{l3vfi9^9Ge~6s5Me9C zVje>Er2AB0BDiZ zF~Z_ejx;6zSD(|gTOaxN@l?gl*7t>Zy9e#!$IK>3k) zM*dyhxpj}Oi67tX1i$G6IJcxOlG)DIJuF{5{badj;k2!DP0UEoq$ z!YVkrfAX?q9G(WzI(hm6A7FHm@VEv2?DN$h?ny<~K0U#VMC?G@@UY6N(h^Ts-kQJF zV*grlfp0OwT=#dm5m`{ZF60dBTTob4%18FC#DQ5)7WOWIhPe}N$i<$_-9Dwjal+g8 zPl0B94)oK&EH4fbs|*9q_zsDPeKfNCs<^|}f7UD!g_XtMLDhGeb$ph+`|Ts9k%{G8 z`DN$Z-G{X>u0FJ%%49SQpq{CFm%rMs@Kcx`M^R7 ze`=D@V%9GqN@1}Z3B%vi0ZciGFz+j=c@jkn{SI{}hjB-j`t8mB#4O4*o<&xf@Zg#T zg9D3ciBGV`wvXMe^svh95z?8(nmu36h-QzsS??=oJD=Zl4k?uaZ`vP`%RAq?y;^j>urXrQHk6c`CgX$hxj?y zLnz@_;e*kX`cBu8d%50h4-aZ~kjjmten8maf9_N}IfP_1gDOqyYo8k?6b5qMi1`_i`@f zrlUXX@xt$i8_ib&tjP!dh}QM%hSGv9FIENgg))B9&6GoA-pv0M0#~w?RTYFFF@gED zKoRx)s@Pk2){6svtgG%`3J)cvfAcx$G-6;rOu4coLn(WY9H_^{Y&fP?HQL%$Smfb+ zY|C9^;&GjL?Kw`4d1XaK+ilxS;|5q8q(=ZSS5YBgU@;V&tI~w0@{f8M@ENv;-7w|3 zj!>>+v%zVm7huY+ToEVMivi@?gX@VJCGR)ZxHN=3%L=pwMr4gyq*KC=e>W9J6{)}* zJ<->EsJg6)xeUk1rew@wY~~aU9Q^td7^!s|NYnC(Sa|>S_e*iQQMqo$Lld z=K@@&!~vi}QhVVNER!haf1v4N7M5}#Q;q`3zx-*+?m$|c|UyU7|F`+u^bk>Da zaetKLaXX$;Vc2ih z2`953_GvNCR^e9F2}6V3#`^=7)S<%@X%0^^@q6E0_YDj9Diqpu6NVc*M0&>GB%uflXd}mi`atYp5 z`k}$%(hU_g=Aaa^5p&63?~~=kbwX^^G;?cl(plvBe@e5)kZZ>H-EQrlwyAd5$>5~C zVbZ_GZEUGoT-}~6$+ueI1hoC84o6@9@7px44Yjo32qZiVEV}zEyBP;8~QIg=F zRPtQ7KHuDz)`w4 zG`~!+e+>;rE2Tr;Z*8w+Isr}&Y=2xd{Zz?%q7W;X!SS}}4S~vVlkz3kk6G0C!_8Pv zJ!AlD;Gi0PDx|a?!eo^4@74_oH0>TsCjHSCClUfYq&K>{O!zDa7~VZX+n=Hq5H!R!G90;ud@x0 zzJ6PbpKaTZxGkxFZ$FI3OMKk;O<}I+W;#0(>byw@oLXoCi8+Z_;z<4B2YEJ5Er5pk ze-pOv#LVr)-qT3T#{-`L1e}ei7s{JmB)LTR42TUGXKW ztuXqxW{(wlD$3GGcz(2&+=XXgAGJABe|D1z%&E!nXqiP8qU&Z3(G;RkG*jYR)HLkB;lhiA%_)kx1)i(2Wk<^Ymd0 z>`uuXntkh1DL$Y5cg(h;pFxrYFQwp(Qmbpc%%fGn5ODuSaM@C|aZ)kpd;Yw1e=(vu z5D9`@JDllg|bE50KcA7Q23vk)bT}>sh9by>+fSO(Q?jB0nN?f170E_x?3$ zW-lRdl%Gfm1jVcJ_6ex8%~!5A?0jev;A*GLA$yleAL>WNAU;7@^DryYE7#q5h20Tz z?i$f?=3#MqJTma`9xl%nbzr^w~9he|hf^V5A$6iWnhlm|%^`UTG6V`xBA3G)lJ z+&(iwE%ZIs-#z6j9zJuNe_FAj()>et{l-Fkf6WBb1agDUjX{c3eJlR_tjx-_Ag2j0$n{DmTsiC`9+$2a{3CWJ6TH zZ_-K_{Qh&GAM1=_BisH#@D_$Vs5WWa^u}$HDNK)fky^Zjv{GEBW{kDUDB}{wYq;sC z6E&i$e@ceWq6(V!>%MI^^*U;CD>i)@;a+ZYo>&$9ayS;S<`5wJF657` zd2dUKuDzAUTY^~Agt}}FW=>;5xP`&(gpW;A!6x}ei`T0+chjp0`~^NvZpp#0Hc!Ue?Wqg?g}T~Bs!G&tkKkq zYwzL-nP{SCO=oy6g9c(EwS=vKS{97Z%IkC`?F-|ixY*D;-P;WJa7k3&kxL!?7m$!! zlB3r&XEHjJQ z0B)zqSW+R&)O6FpVds;VGVp|=zf7Gyp1QRc?=W-U2po{fzhBNb4 zPI5)w$B~n%6hfFC{%8SBhQfiV23_)Ob}{As^WL&|y9Hv`jm14jizIWHYQu3>OkG_* zxp^`lch&l`!#wy`pC$kmTcO3psC$ku+Fea*S^Pc;7i1qI8~**i+r}$M!q9-xIrSVC z(DZnXe;d`0SGn=1khyy4TWb=ps1Zt&M}$8b*XXow?HSinWB*Cusy(4~ILr&_& z*5+Pf0i0(Og)X@6^Q0q^6U!%=PzZMWTC?;=fAa4S;_3pTMW3i2Y}w~;l-IVVks&Gr zzvVYDSDsDK_UtmI$gWj%!lLPau(Mi(4{px@bXosJXdUC~o{58xL+rXKoHS4z0+KevNCoBr~+lM6yT)ACgwvK_k_#vVbbB3T_e@y(% zs_NLn8y$xuG*1%e=MlR>$;6e-yuTlv?E5(Vmu}9w9?JK;^H4YHUU;Nmjt@N?RW4M5 zg~vZzox&09WvMqFuv{8SJ$hV(LZ^Lqd-~|zU9EXMp4Awe<#x;+>Q!* z<^xcemAV8Z3u*4Q^xfMDC8?Hxh+W%;ep};trKQBqV$EDNE2og_^Y|adJh;;~2Krnp zC4@6>K22^vTx7kheA12%_vr=hrCpknr)(k#Q+BS+d4p}W>AdvMHxOYTrZ28h2lbCE z!61;f9H1A8If-b&G`2d`e?u$ir?gRswN_Q83P;-ZW#TiFD5Lee%TN&Nb@oAHy(TXU z|Kac+|17X%geaXBlM6TsuxKetXXjs-^q)-L{VB2=d!wkgJ%HE|4kHm63a)PH^*=y- z@Tts++=~uh;Q$6+Q08~F)=)ck^eL4*r?hMtovaVex(pGOfHtsQe~;QRmOEIBCdBI+ zhuA^YhM4SPI@mZvDOj5(8k*_@+LMn~fvQ1<8Z;?zHW`#@+fkdg zk}6Lgxot$gSI+#Kh<#TNz2P1M6l?)|FDIR_>h$ru$99~&s)kz!nuUZ!I>232!38I8 z?kAClhm4nt&W!^%0Y-Tnf8$jVT2}u38;p{bUG+^i zwk`Zl?Z$>|gmVGU?ed?V`w2s|^`VLiW^VJqrqtT)58?BVM7uZY4_fXxlYYW-!T}1M zB7UVME23zEIQ}_Y&?!}vv)K>YB#aDvhoTE^G3oK5?@t8>h%Kb~AGEMOEh`g-Rj?%p zgoiLa6z~TLe~Gf&L;`kdjrpr_g3|owFq-pXATJ1K6c&?0Fon=}z%k~pKTc;(ig_5u zv~e8+L1zJB!K291gIpw@7doOJwWSt?O6d)l2R%;zb>p?_85DK=P)zPOI3dHWG2#%i9`Oz1P8eAzs?NF-U7#1oPDd zX>NEOAQ`{q^eN2F>>d2eY^PkciQVK&k{wei?yO&exk;1G7n0qknlvf?E#I7KV|?Kd z&lcTHe|>x6$j6&QN(|I?1Sjfi-7O6s%`+$n*est1bKOww6EoL!c5ip^b_bD1ttXuR zoh23j67VmM_i3fh+u6Ph`l`d7)#{yiCA7@Z0l)E(;JbF|DW36QFW9_Weamw48&Fm2 z?ud})%x6M(i5==6e!%9yxuv_@K2qNoBxe&i}K=LiinNjDZTb(9mdM@CEYB zi0@40IyqC)q=zsnXoPosSDyuAXfJlB-pnz(j&DL|A}DB)O_p#bQu4%qdoF}(t?L+H zf6CMuZ3D|ao>E6xky%)-SWtucLo!0}jXmOxvy!)9HQ}W2qSC|q3vwRPpy*H{~FG}M)gHy?XWP)&E;w|CNs1haEAsu&?z*-Op`(~6H2G>S2EKy zAxSy>)bBkX7${SQ>F&I@-|l|9-|sv2?e`Ey2#L@TB2i(uBvgw3g}+8fatB7MY8!JL zt8N|aMhKUzePvKsO|xbQ!7WH|4esvl?g4_kyE_MhySux)ySux)JHegh{l2|-@7C7t zk6T;yRL|2rbIz~RU2|r7diXo_ZM7YUmnGH&f~36P4=zkBh=Cb=o~upoj=EN7!No04 zQ+5GtJOQZF1SgS1kK7gyw1bM?T~%n%oJn@;UN7vU)P(zT46-L^0P0Pt_125uF94iG;IbgHeX!0sA<`3V6RUHyec&9#^NEUAF)JU|0kZz;mF|P)0nt zZiX+<`fIo?D4gV=9dbce{he=+VXMtbWk%Q;c;j<^jmJ|FYCQJ)&H!ys{|b{QIK-iM;P?LqrtOvQS6=LMS!}Dc;rAWBa3~#o)3S zq?JukOdOYfK>zu5+mDl=gg6I>tuy!n9|%OLGMkFtVn%#qDrPy9$Q@#Gr4u6GMbICd z;ti73nvsj8T?SEBmM2poU=toKqFck))P%eZ%VL0aw$5u>m2g?T{S*1EjE9x!Seare zbFElya%KYi(DLokuh2H4A?8$MmzYK6zQ3+5C?2CiWDj&{B*YAa2DcnsTKvFZ;44KZ#(q->RXSsOO+D)U^ZmD~4fN$d!lztxVNYk7@+_k{Qf zbC3d@z6>XjL~^Gx%(5Q8bYi)=1l~MO@7lB|7i4roTVt%=kv2lbMSGjnUEp(O9i(!YiHsl?+ zpM{Z+XjUkVCL11m4PtxI~;(9BIqWjZLMJH4wp& z2qS!)w3$0Hf>seKpTRPGC$QPxPOSy#pn9;-aQ|YgVK!&XYcf7(5Vg?t7~wrBCx!rj zxN@h&2Hs?;)5zN0srJozp6HBpB0d$JQ6LG#%{(ft4x47LF)lHp?pRM77NB!LOzhl5 z86M$WRq1f^sw*ebj8(7tT1h)nq9Hev)?&O~ZFsp|L+R*qJ(ZIrlT6$|jY|zIoE%yj zSmo|Ldcq@=9T-^*U}r+gKW*+cW;3rF(%RYTVK1(W(>s*()%UhOvzZNy_q!+bNACV8{2Sj$zS4928)6~5Vl4PRf1%h z<#i^SVIa-{__dA_1M*aYhe!y+JNE--p8$^gpMrLNdMg)7*J%6sD-BPFXv=9ARTaFdY4}ofhS&QT*|GUPA$v+@P{gQ|X28KB_y8?Y2)X_G z0P)>l`trFwlr5ucvUMAPcn+g%IC<9VHL_o`Ag-#jDs*GENIXi;lt;eC?6iw(^$fKw zS4%wD^-sYhs|YWF$eM%%isvEi?Pl=#d9p&ii@+jE5ErzABX{}f>@zo>i!2j^6)r3d z_JqBXNkCpO)Yojpor#){Z+`1pziDb*oUAQ_w8i36uytJdHrP-BH3emy+xh8no@Bn1 zsDlKPd0x~5Bj%l1)$a}!y3dH7u}cdX76oYdfB=A{-eS^6MqsVQgOlDuwgYq#hQEOO- zeV0+Yl*JT9_)8d?s(dwOojY^TR&LaG+J`wg*K@I2wUnand8`)D?6tHfOKU%D|8`y_ zC^#Ju%B+>1<;e`%8F7r^!%BAl=;rO+@8cB8sCq`r5 zH)#uxvD_a3CD8!teAh@6r!_&ea&)cX&)vMKuOx!IT>ymI#DN7YoH3RgG80JSVug5$ z5Y}|Gc#1FAZ$@$ZX!u2`8LqwF_9MX{1c(v;6Oa-pyJi`aQi56aZXo;qewy&N=CGv{QmKFqXN!+#k?Z)%02zQ>ZHDmuIu$t`b z7wj+Et7Ccgv+rPT7;3NB5p5sUq3>WXd)bho#4N$;2+jfN^J)m2du9p7%wv69$A27a zwd5y4^OgwoAdJq~t9N7HPUHv;t`*&REPV=sVY%NY74Zh)j;>)C43UWWz6~WYG9(X& z7I=N5jPkam5Bn@P6#&DafQ;NMM{n?#z$H)~qm5GjN1HhamdJRN((cC>Xq7jwL_TI1 zltdvm7$gXKdl;!Ol?E651v0+{4uaYcSr3l`Oi2I3-a= z0);vlN&P3^cOt&@^(Ou=M3}5+OFeMk3n8Pvh%eXa_5YQ|3n^nuUP_36Trj8&k&<^{ ziM5TX-qqktu4f%4vBe%_4Bj0-GvZ1ProK2u$Rgs*7#n+c`dHpXIzvDJ=9MnnpS9*o zO9IFO4ABoT{|JvI@EfZ<49albLibk5Dio8ZK^*`eyouA}{o%JyLR6Sk455bQu(id3 zJl0|RHPOx&Y=2oiarKf`GF+;njMb9zZ=cj!I!Q8cBj;)V;us}ylyr(@1c4KN+{Rww z5WiusUrNjIpH~`wB=d*@M>qAsQq|Z`y*t9Ngt?K-;l)Q{4OUGTg|M>%56hsp&9CRu z(wW9d_qz_``S#meDcUS=39p2}aw}uEU50BgJKyzro{l7m8tlgNq-?q#`Yu6$DdxzY z$jEBvn?G!?27ov3N{lro%2JFMSq6?XerzFPZ81Dw6GjFQioOBzwubto*&w30Xb zEXq_x9yZHV!KGC`JaCA$jqe$mVW@d-duxo%B6cq*!%(spMlaXLcqPNAOAYf_hwbmD zl;FyZUhr%Em#+bchLc{(F)WqhxAeKqlD4DBviD)UhD=k#m%~F|xddaDm<(>1OhZX@ zFip65^YK5g*7XFt2b=LYIxU8$NvLjzL)sCZ>Dva_cf~t54HDc&STC9sz{B++s#)!M zO@^Zs<4ZcF8#(w#1J8Ses_^>-erovb23dQp`*fwFgC;OKl7utwmP(v_YbEk@DF;BT zU>CS=8lDuV!#;Fyesr9nDZOpxn@6bLS?}+De7tVHMccZRPB<8Wjgx*>lbkuL%qFwc zb(&eaFboSzuBI0Z3;oL=UeG7?>eJ7))Z;+ciUHO;`vzhZWjphRAFDTz|8iqJ)_FHw z*$+G+er6wTKGa6YD{BX$Q)&e!2PQ2h^H$k8oe^q3)K(6i7(gfUA0%>1Rgxavq19ZD zdQrYuj#pq(YBer$g)R2;dQq!2h;^EnO`qj#(pagcsd2SGGPTpte6E)ynys2umcIQu ze=q|S8*3-j5=&d_FKpdB8!ym4XxkFX@_*M)AOKYm2DFTnrj`m;n-wR==jN5=mSz@S z_dbGVci9l%O7BK{d1(>>4(yNN>0DmDyd#7vEJ|w3mDJQ!O!L#Y_V$vD^9PwrmC^@U zMVRH!!PJQW7xu^W=iumpO_2lZ-)O1PHC}>>sb++wMvR{q7CPs>K5Hjgi#b21#FcOZ zvR=Dbn^7Y_zKI&pGQ_RNjXd8gF`rA192#4zw?uPJxu%}=kEfp|pyj*80n4n#xaG5O z5=xUG$u(Fs^YWyhXSv$jJw0RW-3`vwRfZSRHCvuj498E^S+LhTQ{N^#)!R&k8w5A9 zZZXDuF3gwk>Kd%f%T`-j1*^9Yd^6F3CU@!6kYA!hbGmZ%R@_H{^-q9zz1FarEUiU( zOnt1ilIbb@PE~t#{8|**ayEyYBE6PM>-KO0oVj`ea|4Uzk(sr6U0d|r;zIiR(&eTK znG@bpQ@f#R>`brm^YfDPTr11vMq93{_}`U`h6zW>#mW;C?!9wUsf3qxN`rc!oYRTb zF}Zc|fh^zA4vziaec4sdEWWQI=J&6XmD5$Mm6tg44Nmv1u2J>D5C*65^})!^0Ur0^ zJEwX)YdNgo`jk?R8pl?Nd6{k%C%mZWmveRV)9006oQqt|)rM~YD@t&s3s!8Y$tbVw z)F9RnonBC_3Qq3|rlSKDo+F-sdVO?o<(yV$Ac2} zd zgpC1hx=Wab(%OPA5%(?k;0@x*a;TGDx!Xd$+B}4c+Rr?zcU>>-tm}R)dyN*9;*usG zLX-xXB`f|ZqXM4l^Dh!jjg6h_wH>+oo|6~S<>jdxdW(0VBlV??4vmc$D^=G6Lzc$P znys9w7a$dmilnXgZyUh4*-J*ar#s7{oB^j277o8@Uj5jIuarn@rRJ^A&z7$FuS!e< zc{b z?)uU0ud7(k@CZe5D-q8|ci|+n;U`8BIl|@vQYovHWH5|yi7$Y#<}atl5jr6@r+C6D z_vM;(kU;a*0P4F?v_

eD|lfjrMJQ@ZRdHjfcaf%CdwPDUwW&`Z?s8;rAp5nm`ey zRwj<)oh90im)Oi#5i{454G0n$CG&UYc@2vBUkfUyhKCIo_7r`68wLBVLc1#!>6Vg)p;ktbn{CF{ zt07*~VwOMR@K>PyIYKD&gpq zeNa(RHO)e6Z=A1NwBV$K*+fl9Z@<0pw)i5$)bYVmkx2WrOq%yZ8lFp{SIUps3a3+y zDW+#R2w}8Av0OSMr;5%n^P;kN34;-y?Rko}v0T(Q_Em7El_{a&RznL|D3?EEbn*!U2B~H>TU09gSaBa9SIG-P4>sY`Hta zHZZTVyFU;&hq-@1FkIMYmS&ikB`E#uQ9xP2`i@LK#at#pshFASvhRJ?MTvaaVrUf7 z$9%>)SD&f5q7Y3Spe(QaT3I8bCip^@YtdgLDH$L09H`XfrHe8<0{wh)qCEfWWv$S{ z>K#}es5ei+Tum5Su;y%SbE-9}GOs=Es+g=XU%XUp9bQ`Qbi7M3XXbSNTWVM%TuhC{ zQb(=c;=QvI$_;6{~a=-GdJlu-2w|-d! zz|}##=F$t={)&cx$rkE(M=UogU1XxsCIZUMN{`>xR8>7Yxpd5y9d!opKhNf!QB-t# z&X?SKRn%TjbliDW8afZQBP_>4u-2VF|EOLOQ~pR{JLc=F%;=wi-iVmePCgnh+j00@ zgyYGN-0X1dEG*2@*z|DgZJC#*M5Wc5u5+8cUQt^pbGWcH=aS<(lRR%Pa8}%sYX$6H zPR>1?Sv&O|M@Q1<6o(H^KNMuKQdSzRD_y2xH&Wy7fcmLke_T<+(tGPor!4z&a{SB6 z#PqkIjCV7Y)d4}MRlx_RvxX?@<^2qr=mqVD;+hTS#d=S=(OHo)hLzAIm>oX56(5U#zMjWM(#Bndfv4 zlO)u~Ie*g<4ZH8Y0rM&Wwbwy_3v6L%Jeyggt*64x-JJv{phD5+Y#5$?eblRG$nUpIz5o6q{I+FLKQiyKg3+Du;f~- z5p1rO-uzQGCeql@VAXz$0o>2gj4vpqa(+Y5v<6}lsrjyFDB~polhF4j@q6ldn$xefXX73`&J>^^0)e?9JKdyHfL9`Xot5H z^aQsn#0*Kl*v~ZK#-#}f(Ue9?b?4;NlZO(7#?gtKuy6%!nulOTz*@7}a_u;;}( z>y|+>U20ae+QFYloYgemvZSG{%vwyRB=^ZUZ=X;l$r4X3;=^n@Bw!xq#@MkpMEGna z-n{~^PK#8#{c55@0eV%Xe1(QAzU_8`@?qe-^@kw+{6T6T&1 zQlymJVV-NNXD5?2P`OE^uVRmB+F;gna%T9ldHFV9vET@Sd>@PONEPqJL4O}jxrA`W zImEIba6jTvq?rVNb^#Dm$IP2m6sdLIQZA1stfwQS<&`xxII1{WRLj@29F(Bf7g6}S z+_1-cxZZ?FvIA~}SM>5QQQbY>mk;p8&j6YjmXP8$}s#)K1J(;PNP-IG6nO%AW2`1 zSTsv7*4lR7?YV0@U>+8zDm3R6I?JkWjFK(p>t9+8*76~i$B37&2Udhjm1$zd-HwF@ zGl<(IyA8t~axCTQH6Afvj-0gH%6pOHoq{f)S9IGl1;Hbn zrAz$8>A!9?jacK?WXVideOo1F>@%*uT+zkf=CnoHgvgaFN5-@>v1*f8r0@zZ@h?0u zkeJmcR(}l?`_LKyT}Eib{e0V1+ihNZ@E#~IcTX*V?CR*fB5_2(G`sGM4l@NE%F>gF z&S}7_3Ixj4Tp05VXb8F_UFjFugPvdqgyGYd_aV0i{cI6?w#xpgNbWhqz)mt5#z-g? z6U+!z5L9TRyJG>9tHTtQ64grOtW8W=8N*BjmQ00QoM2MoT%N})i5g-+Fey|@Oi%-$ z=9#DQU?szPvP8$_#!x#n4xwaGi&PQ`bBfzVuZZvnY+9LNx>!ehXTJ<~=)lvXRuCiSN9{2P%VbY0|%)Myo6 zsDCkvau7}AsUgwkmoh1p?M;vVe8zvO1q&mdj%0eGSPutnq~j16E<|xtX(|C!=}g-w zMqq4PR#=Oj!TLP26Y`APPEwg_1QMHDQb3Dk6-uYgKAOy6p?^`r!NKHX)QJOn1|C&L zNoEH$H;UWMLAE5WsDcQhCC-vpJRtWWD)gv^BUh4o?cQ#LMXF^C-}733bPm!nyvx$5 zVYq%ge^!{tu*4MciZAoafUK{yIVGY7b4laJ8=JS~6nM7Z%KeS=SLGn!(WjZ!P;L&)^`CjZ+jFRvG+4n#Fw>BXA0Lmh{`F(D zw6pltRWEiQ{DYk`J(xSnIgWSSm%&om@2Nz*(lC9AN9*s~RpM)mV*-E&V`rP)VW$4& zx*TA4$6dEU_3e7|Ns#|77N_`?>S-!;p)&o4>Gqqp8~wamsQGC}osh=Q3Nos zLW+mepG2|Y#0WHqryYQhvPP(R{{59g99Pzi;nYS$QTWJeQ3ct$`cE1j&BUmSdE$gl zwTJu`q4j|NiWGO$*1Y>XrJC-pg;bS?NHb;a7!i{nl`TLsYkhS|AC-Fy*Xw6ta2>pu*~8Vo zpQU&J(?wys?4rD-4@v(145|8HK1D?y#$v&<_&xRNeOy9{g7%0_ak_p_`lNf}^Z#EJ zU8`Faqcx>i6g^J7)$HI=rGDA_@dUca z4SVx$O}S`@g#c*hOPbAUd8#N^j;q{CzI$|SJn-(ll&UOuv;c>^E}B*^SAiqUN-ru8 zj;j5al9ehnbFmu;@6{U!HjT}8)9*hPA)Z}2uDzZ%4iN5kUq$1@7pqGzGV>caDGgQL z$3x)=*+A2XEdU)eZ!~W-D6D0k6;aljji=1n zVL1wG0ar%KOdwSblxeb4%p{*<{Or5IoIY62e_FyyzP8-d>WvU4FL$ljt}fbT#LlP=B%ASdrz(uT=q^FaS;r zoFLb5|9k{?T@lQD5cZUJ!H2vP6eO~-3t1ZH1AzP&YyowIz0<=^SY3zANls#dAi#UWc4L!h}J@#@Vi#~ zZI9TiU)R3IiM+$2r|PPka!ppP<{>$e8n91E?OFy5wE3xSna;5bx`60=p<)`DdbLlC z0Rod$w`2c@5H|8=(#SjN6hzO$aE@+{IJg(aOs-_q_MQ9(u^vV$vG2}M&R;nXH%a0^ z=exXmL~0&sw!iEV4CN(TX48M*^n@ZQyk*v!2WLV+VjXshEq!aYe24K<_c4}#PwYy} zYc+Ti?nzew)N3fSQ&dHYGCvpv2&ckQP=`SID5t=&do&>o5RbmK_~2$CpnuVV;DUwy z+S>)GOX)Tjf?NM5Tv<;%X8xwKx?CqWejQ>C5g}C`U_7xFc^fb4#JFS4eK(rh40~3W z17J?9LG&%pSWc|5O7l1@EokL@?YKt*$ub+FKX{FS)ox49-~ukR3Im*7*=5+M?c~K)X)m z?6~(L`8z<+1;o)A_C55rzlIN3D%=}2R&x#?T9#oK5p6WccM_fTYML5Y#VRBBcZgZ+N${Ly zY5b+p<=nH3;v^U)cmSCvJBGmhCiB(0f4vnSkbFJ!M&R7)gGgAbd5FTkjY1L27J56}g0E{UW# zVy_O#_IA5gy%E0P^{pdE@q#@O?J0H*dKXR)GIo*5UBh0nZ{2iZ)TM(>AYKSnEQ#0V z#$Pe*G5V#SneBAOYC`<_>hAMZE|Nn5{y1-$V4H0uQlJCx8fu8tCo=c_&`0)5rbxPd z*G(5p5#WmW_>~~AdQkVw7Tq0t04p$hKsV18{Ty2aGjMw1AF7W%fF4Lcpd0odbiqHv zDZ5ZpCl}`JTmWc|`r9(KNn|MVbs6aK^;&$(w9XUT1^m%<3%RRM z{sa4p>*}kT<~Ip0GE<^X=_UBMB|z>cDOqC-d7%I zE;u-2@5vkka5k8@r0Dy!#@d+-#U3Ly?O#ltVhK)&elffh*P=0>jyRkEZ5{~yUbGb% zVwB*=ilk5RSnzW*FU2``k3Y@>`tkdf{2DhL_rKrfZ0r*g*lzGwvjcb?lGi~>0+abd zy1gww{cAg9jL!@5M83t?b?C!r!U+?19q?H#BRs84 zL`IqLp@cqlu5WJ=YLS1#&(3tLC^~Q+o!Tr6nfV|E(_T89I{=fbw0clZKQem&SMVRz z9hUhX-Rz8hZb`rB-yKHhUHn_$%{cHw`AE%L@LPQV+&CA47Xk?H#q2$|Av|9XP&&UD zx;@A}GCHWSo*lLenXEmwE0(P{GT_U#;L$Gm5CSMKj0da>rUp0s6YL%-J};ODrdx*3 zEA|z(!~|$&ArhQ2AKbViRHJ+|xOpacDUP|UZ>mlmwDf-bNuTi{xds-o>Jc- zEQsnx_8`p*eixVT?C1DTPd~Q{<*vU=@W8#oUdeCOc45pVe>#0)=qof)V)`pI3_yb) z3p14Vm7&>Nilz$!Bw2&+0AZ1Xu*Ry+F}ED+uOnfHRbNl| zV+*1-t3wMS46EL@h{t+RCDz|NzD@`Z?x`()N(436sS!!wuUoX7T6`M{t+-e4l<58@ zfyLJjB7$*j#pjMcq}iwj}70}`s79f}J!F)4oLw+w;d-mb~7c%VEGZQdMcJ~JE4kgfbn za6MMvoqVgX{MPZ$z;#*wUnapI9Z1xFL^Wi$9;7@B!A}AqqHm!DeZ+oEemeBuC<%T; z5r_}~ePn*<{<_3ri0A?Fbhm-oykXPm0U2~6`slgzAsqBNpO;}pGR7R*x+L*giQ_jC zMX4hVQ9v2Qf2)x|Ei>viL-ic{Gf(w_Cn&Fok_?EZYd4*}f=QaGZ;4xHp!I zi~Ucv^S7rxNRchXur9(OQn0b~uLm+9_J!WffZ~^bschsr7btYjBGvB^dT%)WCyXKS z&M1_ZWXhTHWo@~?T?e;5&|mR#pW6R7 zMPBHfEXN5&o-?Ef2aHh^u|8Ml;9rJTDEWV3ZNsl<{S&}9D{Fx{t51mh3*f7)gl7N1 z_a+swo}7-?pOi~+3xJ+5Cl-)E;>4aN88)KyGQGT)2)%Y&Um7iJP|k(9CYxJDf zLX3qPYur)@2cubee43x2!qQ}Qc)Y$&+VBJ)QfCUS%I2Jt(ii?q>AOi-jrO+^MQy=Z z`Il5Zf;VXb#k}wi;LO_oH|;8FMRvRJ%izpDlsRhtq{{e6-zxBLUEULO6F+T$6|yioU$*yRw~KMg34CROZxzFY6V%c;>m0T& z-KXI{jYm&hS5OAih2FCIUfkjJm!@Mih1K_;6Bt=SUS3eE%+J~v;A=d-4U>Ojjk||y zxAs(RuBq5u;nTPvWHLZWjdf3{%F_JbYm-KSLpd*ap0wU|`>Sp2lQdt1L-|9llQhxv zn<9H$$BEpDo2whw$0x2=4_vPa9hPw&+;N?bF`bUB7dh0;K%~ziZ2a0rS4^hhjLzWZ zx3CLG7-iFpzjKYNL?-bQCv?Iyx-q$Znce>BQ$EBn_{<2KVEmnKT*Wb|VJ>rBN}Vi)*nDoLEjA_T>H&{zWawzmF73(jMCm8WDT+*|D4;@>`XWZ4_q8I4&r$}E$q){KTUABE5w`-YD96R-+X8}B$MF3aua$&|W+(dFi_l`@Q?VZ)@lu~by{Uiyaob+mc@yaD zr7Gd+xs0GaKlt%?Sd%wFT6(3T^_G{Vg8;jEe{W@%yK(Gy8vlH1=f_BBuC~_AY;Z$Y z?(g_joz*RS8q=Z+8;w+Z&Fnq)EOLytmiUE2O>9L zBCa23x#wD5CT{WvDuL%wZSFDhOG;|id?S>I{!QRD(vceH{B*r^Cj)psaod~(UczZ7 zc{}Yd3t|Ub{gWSLylUIuCm+a_S|^*n9)6rq${M~ut6#$Lbnd-x#1?dBp4;%)_l_b| zwC-CMMBurdc0TZ2J4br(Zra!{?rN%YPloZn?p!v6JG!bY+hae4?ocdp+}5$>p1 zdjMgv?dzj7Zt2yXp0e8!v}H{<)}fm??#FN18&{inrq<6h>e{YCmYSaD+GVfn`uRq^ zfZgWxE|wR|N1roKqtl~0XXevLJFCUxzP)zmk~7zdDa38Dn9dAU+hXP#8zC4Y*>CBun7_5az%>L(6-LY?@1( z{{r1TDawj#%^8_15-o&;*w0zc{xLIcC8)uvMbQFl_`=7r9Z7z!+g&b!G2kCI3&ahr z4BVxd2tdLO%?pC|KE+MQDL@6-sM7t2q2cExN^qlE_!}gw(h$GCdmR-^Yc&3xvBtNO zgAmW%p(CI)7s1&klHNJWii>;8t{@&eiCRq}q04%C#0a}u4>~auC0r39eCzM6ob7Ce zb6lrR6A;FuG?j{aBcP0TckN%Z0Hj7fOPl4Z>mJ<=%o`~-lI%?$oNZXhlBLi@9=XK- zOgNyf-VRc`%^T%OTt#Baqgtt>?!!V772t847+C!wSYflQHlpgf!F@Cdo@3Q0V0s&e z4cgK+m>ro4r{)^0l5}p>L6(&Ej95;75!~D_pEwu=lW70c9sFnccncY{7?}Hwf1E^j z#XV58nt&NK2;)|bf;D#x@iQRC?93sJ4}UU z{>A~bJ2&j9rFOnh@jcioaj)ypwaoF?tqK=G6TeTG`(beTGbB0o-6~~{FY4ep zpTBv^ltv!@8t~(_#DS;$Lj%;#qAw1%hulWVSffW`+#r^(Ax^uy8k~%JfvXbu2-!U&7Zvau4KsQ4=c|Uprmhqv%V={&ed(_ zCL8GWCECan=Q10=~x=$69cpWio~-F6s!-krEx!h7(WTY}dqm1-Hf!5X$6FrA#7S!_*>0Pha_!*d5=Hx_4Y-wNx4A*ji2_N})?$Si4ap z2W#*(9h9b75&a(h>mQ)NglbKrP_xsehFs2B& zh~T1)C@9R?b~k;Zp9O0)^2sbsEl)|a={0_{R<)8+gPP<(W%L8A*)DW>H42_Bl-;!A z%x!Qn-vAfl)NjU;Xubjq2lH|8z#389k*4#NrZqE1h#b@o(k(y*5|i-~1Cl(HyuH!_ zL8|B`La58gE>TIQ*{K>9(;ZG!{ zBnw0N${~?>D1xZ)M;v622`!m#R4Z#il%`^^M}5EXGiFX=-}9LJ2kY}6vnIO+UBI*< z4RNlgs(Tls%rGX?buw($Taw?T-7Q@QqrbJbDgo5;7{ zBEHooWGBg2RAKzoJrt6ugCAV?HS`yp!W^J>YfO(kwa<=IO_4NGS<$n(3CK~A6FTEj zqCdF_)T61Dm)Z~H`m2yKndAxDt2ey|2@CB8RLUeyxIE1zZN}Yyw*A?VU)U~d*v{5{W&A1S zl!YBR8e@^hvWoZM-S=%J#}tQ>N6lgvx5Q0%;njbcH=@ULFK6*=<~n1zf#ZNQN@kA0 zQtUvF_<*&RMr|n`K=s-P7@~!;2>xW)vBRuW4&(qX8IpQ&+xg9Zp>jy;(NJ;pawHCB zKt1>6mB-o1O-OJ!z(;P4+?l}Xeq_<}nBX=%{N9JE%-_4X4~HQ63NE;CBuVOTOK^U& z0NWDV%pH3?A;uno87YW-ALHj7fXXX|M&Pv-1Q(DN;h+U$S0M%cS#_6r$ z_;DG@O}@6l3#O;`mL}IRb&9|6f)f`Mp_*2JB@H&Roi9f=*UmAGs%cw7k6!noOks|f zEYC5knTQ95W7j$Is9LvIUtHbNpW`Yvx-!(Xh%TGHJc&Wr{HowMCz>KvycvEr}dVYM_PcdFB>DK$BGr$RrIVX8(9yPQQ; zrj+`UCf;IHFn%o8eKQFl?8!XZwkDpbzZ|b@58!paslp4jWPKMT! zZCu8UDTG_uer!s1j$13XvkrS`GMRvH9x<$sPzi&FJJpxTbsb8N67_{n6(B_e%IPbM zpg878codaW@1i@tSi`^)!Dw2sZyHM#wFubuW)YLa2Bb$A@ToSy;MR8&FHtjnR)7ZO z9Z6JJF(J2fY}s{6=PGAr=N4BPFSRGrbGK z#~O(pqDX-^m*9`Q<;=z@yRWkY^<*lE`Vx+PNn}LSD`F&E(U&)r)V(r16}ZJeDe8jya53RG4?j`$;A-58&%PNsPJ^{_S8ZDC~OGR)PIAd-d?mn!ESlXE_?k zA;)k6bXs;zE&S^tH@^ol$T>j&+RN zd!pP8zU3n(+26G+8?*>)z0VFl*M9PqVS)V@Roq#Tq-nOYbR;M}D>cnoH^o0eFaGq? zl8KBFBV3Mso1quw!oKkf40GezKZ4Km73VQb8B)I^4xd5r_DwQ(`P9 zhe}PO--2Slg0yWFDtQy&K(T)+~2Z6}Ep2%|Vh zP1J~Yj<-ng);g=j3R#I-)Q1o1ulTLHi=D+CqIEgUhV+D}8PoWC#LV1bjltpro z(!|T_UlX;e=XHO?O37KnD-?l6W zMeJ*5Mciquo)y2Jz%lu9>sa!Tw`kDP+lI?evA=fD7V9j5t4K04*kUlg9ED~>!|uuz0NQ*ZrQT1{R(;;3h=2oZk}?3g`z#rJ>Sl|6=gc;_8#IzpUQ+06_R|XtwMXL z3O)UghW)Th_yZX-SdWpxD%;4+JolkQIWD1%$T6log8zc$0$Gi#Lg`sBu4J5?w>+_| zG|m*WyWL?T#>`NvndF%Wh8Bn;#76Y7bWM`vDbFQgEX8EO&pqB+C_=oMoOtwe93-$U z>VDwnuN?b279d>=4&L``$#4rthJF9Wh0Qn`A_u;5>c0%PLmC^t&Hhs%_13$o#5!`Y^m@|`Q zPqY`O2rzpL$&Z;UMuRLib(`4@;_!3OPGx8hj{bm(%n}dXsGHzOgYj2*s+0UYXGMCe zH)X!939iewRiEnVmW@6}79g$4vxA*r3^9sYo_^m+<4Ht+6e8-IQNx_qi1tTq!#aL7 z93<02E{?Noz=a5vLNrV*mITpmHSk*?!P!XOARtIAyk=lS>A0b=ka^%+P`0u=UR0kE zCKoczKD;ku;A+inV2&-Z>X)0BWxMtLDTZQ#GxTdYYr8;WFScTW zAWP-C!Kn-a*61-};V3qA0ap$_t@FtYai*5(!}PY^kO37O0u)6nrIJL;PCubY^ur4` zE&y>pD|&am7R}e#Z9wq@JBN#3((5Nx3ur*rn$^MRA7UcbTkyLC`rOYs(&!R|{+|>U z{=Y?wIFTZ031y>s4PZ4s-qWQrCwtH%_7QO``8mEt?0K;;$=NXj`&VYqCK|=QyYTK) z*~5WCg0$KSa$IoBQR?$(XZL;K#~P&RdB8eIJN1Dxj8|=q#ti0UH#j>A^#{s>{ zjRh#-tAiG}l0)$`2DyK8WXSfZJVeOcM_ko%m5dr0^kA-ox3j}YOc;uH1lSAl?7r0% zAYJ0baf$>Etmo3uC+2GoCkanQLbnN5k@nysY%=CCiwFg7Am({mi&2ug?Yumwd`wz2 zT&qN%h0fp!Fu=Z-Xtnh2BvYzCg#zM8wDeeVG=fh7qx+ZIcN)si?Cts2nIV#fjt-W2 zlF}+cG{@F``5(IvAO#7kex0|*$Lk`C-j>gNyJ;_)!-?N%L z>2Fm6S6QiRxauouTif+hNZ}9d{D=Oom6HzxoBoFsgcq$hPNiG%lfmw%2ey;ZF(psc zBe)x%nvuzr4={Q;&WeAZpnK(lf&F(Ogfny)ML>v)uM6~tb(l!_){lOy$e>2m-4b;D znqc~YqTt|Zu^{^C`SF%a(Vb`u z$m}n_f`2oy)H-K&JVLyJ3Y?4b9S3#y?(7t<@hl?~Xh1(z_wR;`AmHHAVV-P?1ii`H zE(5nQTktYNgKa)hRMiG%)37Yzp5Tt4-d;C-Im?W45P9F8sfJET656@853{yR{MfRb zha?Bl!rgz-d*IBy!4fp1K))W+qh)nM4vEIeP-m*pk+OhX=UHGlsQmJ1qQYJ21iQ4ts{#BO3nppMK9?kKHzVc<8I&;uG|`+wspbl} z?`|Fh_ZlnDqkh_TGC$=4gnMm$^P>d<%Hqev2p89a6PDhs83qJS^G#mDb$O6*Uw9;B zA79ZcaIL*J-V9a^4TQRP(m!(}(c7g4$bJ*|4xvYYsx91fr}?z^$bo0^_N=3!A_7EX zeXxF5SXe$)NvmR_10V{L_7N9UQ}e?{>e!}ymP5AbQi&aETfG=b*v`nhwjU5Z=2 ze%Yq;lT8ipUx#=4n~UD#0-2is$_&Zn#|8Q0yOSU~h%SPk1f(MVTKvjn;z-$dJ%Ts^ z7nPP+V;R2R2)gLsaQqq+@QA7P?KS~czM$ym-$Zx(Htk>kM_bnc7Dcjz4-gfGj7l`rQzrGp$Rhqo#hpB&>o$oH+N!{IB z3xp5%q;0y}_ifzxEsEN;jy{afl(4$Y_nMUfCm$-rRpIZ>MrL}i5lji*f8V&h#VIel z&URf+=KFiCIGxxcr;qU@m!&&Cq!QOx%$p<~bH+RQMNxbIjuT2IMFx)^ZT?xlNij8U zv&YScm4`ge?0$ERjJ>LyFM|tCGiLu*lkGY@q}ufTYK<;dW(fy3cRAnpw@#p$u%iFa$bhBCmiv*J@untuCg;xSwAtl*EVcCH znYRYwbp7i`+MAf0CmuC8@nn4M_3(wm)-jK^my~{4Ia3}pdi0A`Zvq~5^=qB^_WjZk zkE++{t;_8&r`nkah+dYXboM4VZ5og{(eK*2nX?}WBIY!& zwV%>qPkR62uvHy(-EHLky$;Roqn|ZDvSTh8yJzsL{|@<+TUL+GJHFO%5aYTsebvR& zdhK6*U_N#G>iKB4q?YI~eNEs;@!$CS1KrKfwAAlE_r`%G;jI$S=jKGt2C@raD^c0_(i@$ZjTODvvQn}>DbQ`u)FdDIlS^wol>O}vE$Y?DW%tLxEl1Kr zvpnXtoOADcqSZFh&u6{1$9&oQu!BNx?@Q5&7Pi0pD@Ih=DK96qc%(bY;*3dk=G)*` zhKhwLk4EJCw87EDoEd8Jy|>djc>u4NB<2WC$`@mP(4+7>V;Wp z@AX^i^v*8wxzv7XpW;!?*G+EMS95pLHZtqC&h-4n?(xZa4*qLGE?=8&Rke0Z?RP7! zsre3%?_};3Z@Jq^;T)s0J@)s!fwPxu{7n0~$WO`io8NWg)c?*#zqyQ<^*Y zM=mUj9dvOk`SbJM4;g=Vo{zVBKQWs6NUt0ro}Jwz(#m>`-Jc0AVW<4Z+%%z(IP;C- z!})|ltMzW%^L^heiC%1Kv($Qezc+e1%^pO%OFa8lmTn0)(UFv`DL5W6O#g`Ahi0<5 z%Ieb-7kn^J+LB=WqIK4R*&;8g@6w<|&E*}gb=^Nt^2+q_gXo2Rg)NIJuTk5LhFU#6 zIW~V`U(9BUTn#<3) zwRU(t`i}PH*9v;WPb0T335G5|B~Gq}58t}Rd`uZrUOOSxDAj74(W}tUIlt$fee!*H zQ2TA8{R+y@cI#qz^{G~#bt~<0#ElhGre=oNB>2x7y8Ntp+&+->!$otkV)IENn?!1Bgv)!TnC$9eREz#YnyYVrq9`#XVc6ZU} zKcw*LUAuMOGnH3Y_MSV#TM_re=bOCunsXOJ9(!da>^|o-vT({&o9H0ZuJ4*3KGbYF zk+G-9tkNOgc>8?2bA!V=cQ$!5XVkIWp^>Zn-7KB=&G@kSS?7LpgaJFXrW8#dv#>|8 z{A8ZZ@y_#x*#;0@3=4)H{&wzc(VU=7Ze7<~+;Htx{$#nL<2wUc;Bk}b; zboEH~EtgISHly!qtvs(WWXz=Z9c}!+NBB5($TJKpHjSw8Vwznitgg7=P1m6wPGPm|VP zGARfV&wQ{F?G61iG1$34^Y)h8W3H}jotCT{KhY_Eo7oP%>3#aWnLX!ex2e;1nT%St zoU)BcZr*Z@i$ymZc-6XfcQ8F=~Vct-KbFP>`mW!c4xlWksS z2tYiB zE^jrL_-Gn**od5L68fj-w29h>?Kh9TZ2WwZSIr7v-!H|V+Go8?ji@M#c|1NGonQ3d z)5+cR4~rI9&pHv58+_oCrJ~<|Blo#4d64X@W!UkRbt~br%+^1?ng6ifdnY~rWkE<6 za)N)4loq?DZI1V2X8S$tIFDZD^Q-lMGogozyWK&bHanK;fAGyBa`O-Wc=YD%$d9+Y zYP-0%a9bXt_u^=`d+oOP_0;%zeo4gnf{?zwZ!X!_v-J)AwQ()NXRmk2Eht-4uE^?l z=t|L~&l$bTR&OaAJx+MmtIlopw#>?OM7?f3Z=r3$8pnQDmT#RtbXb=gk4g`0R1Vv3 zIPlQtw%u!2rJnubYJb0K!RLhkZap@hHrHACDq+;41y;RuD&spmPOh4JZPBc2vXc)6 zE~vdJvO92lLig{n@y-)}S_Wp<&bC!tS@?d{14Z(_56*#EcPfueo)_P`+T{G97T=?8 zW_KPxL`z?GqNWVJ@k%OiX(xAhH9xz*O^zJ{PB*tosKs@t~a}d?aJYF zM(3=v{=24J(hGRqquY=pH=e$@G{Z^0=>3GDp8_wO)5=QoKHBBc>m%j+BdPMGi!777-+eOEux0g}*~fbxAM)rw`m%D`$x%hk-7$$Gzqv*AFqqN>8$ciA32iR6WTFP>kz^m*ID`^$0; zf7U5vR$TX$-HF;D6Ghe*{r8sC44Nhx_0^~{lFAT7CfoFQ>%a7jqI6E1UAqs>D>my9 zDVdkIff;@0aMY&i%yxpw#~-y%`CjMt`)5GK{@_W8b?=_c_?fhEzf0;D#=EMfV)j|{ zKbC#k^x3wx=ll+NM(0~-dvED>dr;8Lc|%>T$CX|SnS#C+n2*s-ij3+pYO>|_Z7r(S z4!#tZzROWjoikcI-8)oc&x~q?P79s(wJndmeycM_VHEvjf1lQeoL8?4KUy~T>X0ss zcUtZJ?gTowX2GwgomRzOc2c+ro^1KP{Xb%mGILq#nU|eUY_tm)(y2|_>=)lJKY9H5 z^UCW^!%GuwomJ}n~N+J zCE8PVSPC_oUwHj(m@MpI<=8n3Td$j6r{@?N8?edWkDMUb)hne&qjyMRbHkQ*jhjVI zdl+$DFQeJ)W9t?k+WY2F)`d116(84{teq+T{qt0>0j^!Bv{rL=_7qM(URM5KhSaJu zKKFNrE1{t}Uqo?$w@jc$L9NxSp3&EXzjoWm6}T+Th{a!qi*br-$P*6)_Yxs;fu zme)CSAa_itG@JXkW}dRBjxb2fczkHuv%N0W-gBMoZbVNC8=IQZ+sZvFW7qPdhaw-2 zS~_ls&LrQvK?er(>D#4Sy4*IlaGa^^@wdv5aT7ydA4rUkiu&jr?pNs9deBaK+Lck! z*}V&1?|pOXiNdtSr9#oy$a5F&G5)Lm9u8!7UYXna)f@cvShV+(gZe=%mkguPF_C97P1R^EMlV2xe7i55rR_bL3)_Gjgc<6~#F^d6vW zKH_d<;Gf;e)yp@>mCSvAb&_uMmCEo`-5o7cd+9wfSo6DF`p#!*`MSXePl*y|pE)b@ zdRlw>-Y&-@zdk*?BMB?{Zc)h`oZSBRk8vSeC)KSQ*g4KE{?W6LNzWe?C^QsZ45q(d zWHk+ycO|S8@mHtU4LTO&s;9C2>-#TBACn*1-3t4ryXjM0kYxM;lgue^omUmD-t+OG zqnjb~{jt=irg}lpk*_r)inmpc{Q>4MPbR-# z_U*T%@>1xM(K{`F>xJ}Ncj?ryp+79;YsYDlUCyrWZuegJGxw_^l^ECc^^o3vqrYrx zxnQ5C;D>$RsB=IY#LcZu^Vjh?WPed?!0TTu5{`^z@sde{ug8}hqc}g_3#SrG(D}Z)wMm>A_DIE-=06pBrwH1PrlFA zqKj*%>Gn^{^?$~>Pd3T6(8%3WHluVw{>>^>(VehUqzKA3d+%^fo?JQjT<(`&(PviA zxvk^+qvGkq%VfLqy5dt)UU;S3-FbZf&Wshqk`TyiIG$#wa`&mnnR-z_)0 zKeJ$EY+hm7A-(O-H6|B-UmJO@nQ-lF8=|G-no zJ@9GWgbq7@cs^H@*qypE>gu@tpUpZ%>TPUpmD~L6FXMy*CvEp{E?!3tF#V(LSYvZb zF}aOZ;>7n4rsX%A{w3lOS#u?uvCZmr=XWp3l(z?ymwj{E?h?6s{9t+I)On|DtG3=e zmLfa!XX54I-!8=+OL)J+OMjjIy;gx;hul?)flRuIwNEP=4mve}}8>zid04wBf*y*V|4_>3ZL1vg7bj zzmXrE%En%b6a5@4mUu_79J5 zwpi-WDWGj?d{u|Ry*=I|%ZWQRG-YsAMVbka5-GBABJYcb7c-wnxkL;LocF?0==QV=FhfYmy z+5D-(s*mQSy}Q@naBR1<;;weAe&Ctoo4d9+t+dk8^nhY-{~c=z!&X*L8#Hoc@R-;U z!?jmew@kh3@1k|o(e0zxsI2z6@S8c8?0(s-Ra*CXRgk-X)jD&L(K(fnJMev^6lieSI>2r=X-66`@{$C1I2wN8dN%- zyfCxNw-thk+e{7g*3~R4n!4@uyzvPRuG`PVp7gKTX<50Tt$eNYbLaA$ySLAd&nP-O z!QB~F+DLO>qV6F#bz?a(e{M{ zMlZa)D);TX#cQ5_6r4VHm6o+W_1NqD3EPxQ{kIzgk963(_jlT<>tP3_73=3({Fvh& z+@ru~K!8EZtUj81GJFmOuU{l^{~dZfJ7J%`L3{82sypv7{#LL(Ec}+w)=i29@nOPc zmb)~soE$!LcZT_L@0Dg}R*QcgGvAvQx*?rBy7=7U(D&{kb6-4}^0t4jpt_C8dNTLr z;TXHiCpkTdoa4hD~29WffyL9^ph zGQ3iD@OaUjw_8iJYqwp#*}q$8FR3woLGgW5g6@|olY6f7&iI|XYnRVs7vqaDJfm#OP_pQQB>$LEYJPk!_tdII=!PO z4H}b`BD>|jh;lgW?fk0~*%nUd%SVzVM5vgYPc*D<1>=Cbbt*x4XDzS0uKY z^X!Guo6yIDyX*_FSw7n@yk^dt^cbhS=4;v>S{QwGX;^l?cC3k6@9&-bj0utJ=&ysD z*R2^OHZJPvy(@Nt%l$K2U;7zlY57$gZ+muNaankb(f#KE2j&e5Yrf_3*jp1T1B+hy zg%oPLW=fBbPPXjl-TSbj)Ust+=RwZxUac$9xjJH+@L=m7!REhRR)w8(+j&h8)Ah_( z|DuO!N&TwpjE8^IsIxu0E~TP!)YuIfO9IT#KQsDitABDtsq%Kq#ADl|4Kt*z?$}H| zx=-LYn8Z`$0bwdkuQe%lx%!JeHt`jmwo=NXMrx4 z#wp_3xQe^&2)wD;$tTi$n9rv3g$ET=$M;(jd~eJqqb(zENeY;)Z$jgq&scV#jo0fR zQQoU!*N>l`xbNfRYhKk)pZv`d`*+(sWa7m^UaMEw?y`J&W54Y-`}Fb6#|p2Sfd&COrJC{^HfxIY5cE4if0#l{210%b6k4xr*?a9Mk#_F z0=GZyH`;K)9QhMjTjiQLTfUS&H@}vdpy@m9`i01x!>5;posQofW|zHNa%kj$m({hU zsNa{w^wL8wTkHBMG?q*ZfAD2z;LG2??0*{;eR-%Gd#J*p#ABzT$H?zbdBmOW3Dp&n!n=>17)` zF?E?vM51zYhpDl}i{?B@{BS>YRU+NP&^>z9r^Ui89^zG>+=Q1(+^5JqS_HK0UDL%Q zG52cP^jB+_2+t1cYFcEosy1xy!>l;_^Urf_`{rnz9O$H|n^B`{A>QmCaPR^dzE9(p z--8*$&!77{#C)*u{D*g1+d4fvXtH6s{*fPF(tFKLvi9@xICQ~+Ufw%Mc)Ywv#zOM! zi>%#$uKayX&#c)z)a>Cf?|Tux)56#H?DlzPMyTdXp9Rf7=E?u|{!0YS)!VHu3+B5L z4$XTH^BvJI2xW+f0WHK5k+p=R(PcSdiN1=6KFB%`G0@?^5{k4I3#Ha#66o-q zm=>VL5~7C&g$_!H$td2P=&VJOQgqFm_@OB#QJ{?Y*fPS++C?CsMQETC5rb|{Cb}WQ znb1d5q6ot_Qq`9d65e|i2&iz?Cn1gcITJQ$uM^SMjFhR_DOC$WwCWQjMO&bS87WYI zl&QWX)$BBlG+YQ%w8EKai@coRN8|%16NU7^`AuQmQkwj#OmQC>ftH%LWLqVpL#q|{dfXo?@Z|FkYe?OqtP%I!p15>J+N3T z5Qx>C2-RJRWXLF<7>VNh5_H4f30(qiW0KE!8OaIiP((T^}gp`JuO>J|f?WcDYV zntsKM_9FZ=kikHrGjqk8u+u_0!w9NLi{3~lkQmb3Z+Jl8!9M6rKVlnN6-St$u0haI zJ@ssVVm(UpC(Kc{Khcu?mz9=COfx(DiBJvHcQ|3kDu_x^WE4XUA_6qf(!qpOhon^^ z0@XS-nCRGYjQW$5c{G?OO>06g8CSrz50PjG}0Cb0~2bEe`_`*H8N5Fv0~Hjv|IO4;a#K#9%-4F^*`1rUnwu zI<&P=BD01graoR_aYQ@xIgsdtv=U+EcB5hCO?VhZ5OGoinT&y*7(1LesAWd0wL_%d zW|4YI5-G9?1yvOfAq>&@P(t6VUO{MDy^bi=RE1;El2AgfBT}o2ghZc1i4wFe8g`}0 zpy+8B;ea~Ii6PqFetvy?P|PS|Gy0qY^K*_NoYX`nYBaG`0|{bbbERX5$<2`IID)Qc zsELO4&mRYS+c(IkkB2{+CMULP%4oDEj__-w#Smj$XaQ**Vl9)&tV69OQfcG=lM?a2 zFDM#}Mnfw~*zgT{FrIK>hQ$*48h#XeyeJ7JW!Ec?V)rdlN=HUUgpXk$jLd4An%5Q{jxPiYEr5uTx>={@&if!Tr(3sbGO(5{Lr$!xLXqVD(Mt z%;ZEOM+4bRC+rwO64AREDo6(<`q#n_>CGa9$R!XCrcVm7SwoEO&LV7)A|CcRG=?x| zdZiMNG!T8L*jP&hrqN~~Vaj+MfC{8@qQTe2fku49tXTwHB*(Q$RJGN#q&|!u+Zr zy6KW?14>hF$ZZfvr$-tw6YWnUo`Q(TKzPB;+KV92)o3tFSJDYL*fir&gsFlSSqrQK z*k6r~nh-g0%vfC!EddJqe=aV%J=I>&S|kvtn)U44$JgI45IzglpVf8`K1O%yd#p=(h<1^S=MAi4#Tdh$*sPE8YkiS=75BOZ)p?d4BMRa|K-X6U@J>h#= z-JzQsvieQDWiHPq9PLnyhu#I|%tpeOKvwVm|fL`#lVWChN-)d>O@*7wbuz zqv8~3(RqVjDVmz9Z@^S;&`TSQ+9nvtnZvsJSF}*nGy@^}wbD)(RVVA4p?C}9Xr%ks z(GodLH!x%p4E5hcq1HKuvB)}KV8K9}<63BEp&^ZKr0AER%pyZWCL>jUc`d55G>Tzn zCm1}^K~}a#G(yu1N;E~Z(q*~<<1*dwwi!BUXdJ^#&oPwfAWI8l5O(StlR7VT}- z#z6Vk!WbSCcubkU7OiGSqE)@jqL~*nTg`2U()*jic`j&Gih}*k43zlo%zWxG+j=tinF zg~A@UD?=NnSsN-%p0s03p0qz)gC9g;24_uDoUs(RX?6kWMvW58rT>)@V+ zc6GFc?~|>Iw9yoSErniPvawamC3~8+74j!-4V3$*Su^{m*_01PuLPas%)LuC?Fsb4 zwG)N9-?1r0huu0EC?DLhVIJJ+c*_Uv?b=z+SlZf-)JLbu&NSK}uq{CaRA)mbgtT4! z7tLO27sCuDJFU<|*6ZwOWa-wa1l6vyGh{Bhb$a;$tzp&I({L^)n9-`75C}d4^D@85S9SoQXqT9niXvfkXa%5;Mwn7d>H#4;Q z?>r%+Ai9H;<5u+$BF{+y9EOLWH}4%|8Sx~CUwUZZA4dvx zS=OU|DsR&qtTbdwopc9AC%wmdW5nbM21RWU7%2y>?!gRN z?U*+Z&9k7#B5AU=xaB~l~huNETaR}1l`mZ-x|X$%7r?xC~%0zjIHmf{jk zl2*2|62l{5T3JaorlB5FoMM<6E2Qi6P_qmt3I&!(OVQm7C$Pe$Qu82GzS>!i9+pU( zF$;c5duyY|o1CfTe`KX-|7K@HNUxhKTwKX;Vi#~P z!-=_<;r!47(Xw80bTq@M8DqD`xm*{0^zKC=)6LFh=&VmKBjwr6&dk}(E)_B8?0$DS zqtV7yw-xGk-JL?N-CRpGDO&lYn=3qS%#&`t1{$F1iav5CSJrE|0SbH5heWnMz1C`o zl;?eVG3R~U|HdQXE01WV=K=R$<|yy02Z=^rcVCO*Yds8=#y8v<;~TxN{6XQHJfj(} z>wU~EkkbKA3T40PQ;LkrJq?&XZ~J`ug$}Ipk~8_QJpSmSu^YT7bm6N-T@knMp16dHHHvlQ&s07IsIxo7@UG%+(k&VVuNr-LSK2_R9y z2CucqZ)<>oa?nOEc)a29fyWmfKY04V)1Mi%(Ldr8I`?8|G-DAzfbc`DwTF>t*nt6S z(Hrey21=ds0q_ijXAq-PK3IMU{dgZ3jUFBtpv~ON3|QF-9nlS-|W2x7{J;MOk@buBc8>G(Bxw8e_&p%l{J8mo_1bswox zNjd6eINTCdJ`Xii#uyHVX9PSUOpM{k-BxIDRk$2&N*dpmf%futC|P3+g*IFZRk%SZ~HsS2-eedu|pB@*aF8Y*j^ zhr$yE&nTwm`RH-A=x3>1&g`oSKiCm%JSC^mI*l=UNFDjPeB_ZdK9-3nllxesK`X~o=)oy@8S0Wd z-awgsTFzvjj&Zqyh9oD*8G1$RdNb4$C6GvOYwTJuUc0u&!ZQ}0am=o*aZ662FBcM{ znT<)~*V>|#yNMJ!mz+?)9R1w!mT1tuL<6NLcRV~3;F-vXa^u_IM`%luoH>x3kYt1Y zo=Bq5Xp~TbZk_3bu;FtW)UdacLs@ESEQDq z{(dtIl^<86G9On=Tjz=VH_VJ?{@kCoLm%}opGl*~ucnn~N=T*h^)yELdiv0TNV}6F z1_5@nV#1tfv@t-ByC`7yd}oxRefA1Nmb$jY5j@ znWgBLZ<+x^9-Mhoj;6Ismowu#DJE;7)As3b&E6%g9t0KHE38mq*K`Br8G8kD#y&09 z6dfYxMl&m1(z5lCW8b+X3i3@W0~`0-H*My1wD?U%H1nuk`cqxhLqkcSI{Wl8O)04? z>Y5JEEO=(aGlwbaI=6jGMD)&-Gq%)RT@y65UnYe{^_>fH=$~n*H1(Xzn0jWUu0t31 zL7OjcGMri|yB*17x*eG}-4AW-msx`P>CQJ`Li%SW#-U%w=L6u{pIN7kyw1*tP1jJC zAj@;}4V3yvGa3D(^M1Oa?l}wO%*4O*v~`hv!2$}c)tO(4{uC@QWQOa`FW!ef%vvZ% z0^RxAjQqs>s%A)(wU9zn&dx7Jdf5vN8T)hdH7}rrd5h#L&dqPeEYDex(*jj*SVWL+K0ZT7?ktuw!Lt`u5olD+ViL)-7P881kiC#G$X=w-MJ8U^(ah((MQ;p{ zO<*>KM0Xe0d*-~2i>y#xV77ts#KuL;iH(bYIijb+rEYNvLHOcW9j`@Xw{wEXl9q=vO)voshI~^ zyU?6^l_1=fSs@FpJOicBZ5bnU%h}SNbrN#MCp5>)09D+F)9#~J zr;8}0^&qc|^~@U|goLRMV847VMj$xcbRu9%k3#G*rIvlea z+%gjpyZXC3`n{-F4tufskS6-Eyx0__6|}NOySA(@(WFRa@YdB#@YXe&X~+T<>!aVZ z7KSj#9gENDpqVS-^Wiw6`3j<2M|4zLyc*myGe%aNHxv@1k4EOyM`q_17gH@zL~ikh zRwALJNGukLB-XS@>L?(^LXu{z^NOuCkoDK%GGtp&9Hk*a^9qV5z<<6K6d!29BB4c& zQc@(A2$<7_#T_)z_ASNF(7jc~kf!^2|3)FDq$7=v6&AN<7Og3c)r5w1 zC8~zerJTOS63UT=*jhj_!ZNn+g5u&jBwJS;0W5{A5JKNFiCD-a9%kol zty^*mWgJy)>YJk~mXKo{7O|9eBq3u0o7(j_JBs1?;?wBE3AGeYs#s2( z0@7J>s!=-rXH*m0c}B%zbC$y*WETy2II);{f3{KEuAf&;Om;!Vvi<^x1u|=nQc4Pc z@r#*!k!9&=T#}DcE~{9oFRNGvT;Z@#0+FMP7C|zEX?s;I+UN>Z!b~c`RyF+cy7++6zjH;wQlb76{;aqaQ}IT9vRvM)T5fvJ=@NwQC?Z)EWl}yg zt}yhIYK130saUAb92Ox>gHWU(`@LW4we2f9^;N~uy;jAMSIgmOB9oXpwI1yJu4-uW zgVT_diCK9|MHKV?NBtJ9MK^w_WFq^mVp;#2!$QMSDKRO8Et>p?Wik9!e0uq`60lRt zt4mm`IY6VNsnvuM3+S-`3Q zPqt98&@DMEG%WxVAr;GHjE0W7?|y`;;Q@lvPz-w|panvhM5Z3wHyoxyG{JzKHmWdC zvGg|NvPeOFg)+!IGKNMh3oP^$x?o(wT6=+sie;4vmxUx9rBW%F6K1?=BTLuTDwf5q zRV=?-b66mA>Ig#!K`Sq`sh?N@T4JtZsWn%zL|JfHXtBUiM$u3eVQ9+|Yu11cZCf(I zgw4w~&D2UINMl#=2HSsXDg+xX5eQ{sfpw^E&w!{&6O$7oq9$2O(dtvhgEfI+@Xz8P zO^FbZmx`lUj|i{|*}N`}Fy(hHLLZ+OJD?9|iUr2JZxMO6cw?u3BSWDLd=z<&6+v3Sh?TUc+Ck{+58VBT5mXo-(LH8Cn$S70rb3ef$w zC6q#D4cZ48>Iv4ms{avy!b5tPLnCP!1^sYoaPVm!O$y)vYbO=(`V)g&Ce`f}jRT9t zG7;phIfDS2knw30&8N{&2E%zpP#DH(Vlm|Gn~b4=dV@sDX5gC;SsLgzPLm2``0B(` zN(9+>F0T~)JdY-VN;cjOR;S<*;lScFnSjmca|e-;P~XC7w2;p$;%$>yCT`pkMZ>;} zWpFfk9YRZtCJ~TqJ%TeC0A^5&$7%4VHF-1L@eWBqoIDx@ z^*?Mfq!2C!;53nxwub$Oe;Sog0)GyaH)~P~R~WE%0MU38q(}*G5Q=6?LfkPxWg2-E z^~X*E66CE801kg(8Sg|$XcCMTHV8l?{CG-ep%kBtLIQx7H!v;XPllH9^aD%)Ppjbu z24DO_TF9G>P$-3MQpx|H_=RHrtc4Q(9s@5=)FJ_taxp20D4Hi35#Y_nb_%uClZXJZ zHWC|X61Eb@-8&J$0Uk{TH&d{7Vv>fU9G6#2;o2{dD8zg-3J8VfX)vehlLM7%ui;;*ttQ zd}aVlymk^HpI0j5tyl)NWSm#lmE`Z8jOH5)nGg-`RAQD#Dyj$X)&Lssx~+3v1+?u z2PL?|OG)sAyg_6DqHiP%ee+_No% zP?NVhsaT5fva-U@5zrJu+((0k13cgk0yJ13PJ{RvQ(aO3P6FFbps|;PIFlwxidLhc zCWjD+kcq(?f?dY8gd`;b+;YH`M1cSw7>*H+a!9yP!`V7k{y-G|pN9W{T|KTNDN2Hi z8fdTq*rWke<46V65`sd!ofPDcuNZtu1E6m(PbA)|t`WkL}yep)7$;46lBi$x%u$q3;j;`$+!U={)7PXZ|6iB%{g zd6N;!Xr3;JC>ezhETTbR*knXtKln6CqQI2~@&Y)p11C%4o`4iX_6wJcSOf?V8yL(x z9y)=Q5jMEb#+VI=d?`o-A`+jVM8GphfR*8X95yfjF5Z!q0L14?OCbPe9yA8zF5qb} zEJOu!nkt-xg1c?_z?2j>>Cm4H_ckQ>8k)jlHc|rFGhBockqX{&lnam)@5o9Zw!;mL zF;J8YnVUB-DuJV2=+0e$0M3}`gZ znz7CaKX%}p@Gu%Mm;i^%K;s`{8O0hnwf|`}ATlv*GB#@&lreDSf>;)Z(2!2!;eMb= z@JU044oqgl81*q55Jk9!gA68~VSw~G39hYyS?%*FxLJ)WDx_OAb!Yu3-NpmU_2p? zhJXf16}&^xLCkzpPzX8{o$YgasuEY!L9qJmsPQ$zwKvf&zyOpCAopkKJNc{*B@X2-`$7s>l`6 zpCnH)Ap*yJIke+trP$OdPgw!zH3g%p{fET?_`y~O7y`HbfFUIKQ3RR{kL>{OL-ri& zPbdT{#iJ=e8*riw#K5D0*T-m(BftYl&_GZmyqys68jl9%ho!;#8^teT>lIi-F(7En z!9Y=fm%RcSCB#?9DnFkl;V3IBe=vj)PvQduy27;|3?@&5Swn~q0=ql?` zFn|DPinjy1#ixmyYC1bBh$SiRmVo?$D8xYo1!=;6PNLq7Fn@ReEyD7gMaG~z$-i4klF^p%}w|P=tq26K^P%;b@38-@HM9wEHc>c{B7E}T_4w^zraWGjS=ApZbt&lx=aO@b;SL7RDFRK>phy1} z4CPzQ+Op9jUJe2pFIxw+Mz_(Zmq3KNCq5YnD*1y*@RS$uLMaaC1M! zu5tTD!*23)ni4~8u)e>BxC9zPIExPguWMpX5!&%;jWpbHz&$k3NE`+LjR!RpK!gGU zKClGBV;+r{6aijd&X@*d#@7W%Ve+;Z+>iu|OMqE~17o1^0wSPE@oYNKAeDwsT1MhI zF0g_i>xOn-YM%o{A&y0XCc?co&>*I7U~c4|3^=%W_5iLT z@XA|26vzlNmAOaZ1<_U2)@1Ox@=IJq88p96|7-HOO!5B~;Xfg(iv7m&; zOWFYqa7~kj>UK2GmBEqZ=`kog@9@D^^Gplui~=t+fte%us{?La-)S09Gd@AM(ARWz zs!7ulJlJPVX}ux-hq0hP$ll-`LZS>m1TZc~gsMRx!r&T7kSZk9coP&#x#|h>r)faz zxRIpU`ylvapj^hMiTDzPL=tac5g<9dKe2$qb8IvXkd(I)@F=*?qv2I_o@B&eC3%BD zN{2^lJoW#484ZY=HwaXzc}ge|@+SlNIo_nf|MFH0X(($WK&MO zb|O3yf&O@fexUJ@2IwFq#eEr6L+~&Llof(*d|;?w@RU_1=C2qsKm2yG#-rWn%Y=~Y z#a9Q}7vA21?(>vT2tW>(ARH#%1R)&84V(gEc!<8R)d?YD#P?kYK#w;;l7HUeItHF; zgfV#KLa<~|{J?z~z38ok(@GS^x77wLvBP|1{)9o6w<{7YM-h4xZJ6VkHl9KoY8n+g9h;Xh_xKc@?&l zf_p5`OEE4&xY5Q-@`z;;9wvfV6O0O13KCKQUOQ7*k8ysxN`2AyMR|K?%qIHvnut^lW9bi}9;$#Y*t%h<4B*rmjHj3xd#Q2c~ ztPbcJ=OtzMtsn`YD|O%o>u(f48&zS)2^z|H`T@=uzr-T}NQ38KfyM));1VJA&}4$1 ztO!{og*geJ@rpPAXCTsT!mQ#2Y#>IA3Bu%w4T=O!XQ1i?YB~5R0Qblf zxe#c$gaCx|pfp6gywtS>GMzY-hTqPBAOf2-xMe&)E@kU=*rXw6#jUHdsw{;l5kDI2 zHA%ee0QWZF6$S3VaKL#Nu^{IS5e2s$R9f*e1|(fYA_W#nv6)z0gm75^sN4>L29X|j z>%g+Wx#FIC)=$IzLe6BMO4vB*Mkoz8Um#59o;6lj_&p~Q+zg(-Ai)yg5`=hww!R}L}71R7k-#o9@QkT}6s2T3ppD7pQ?RT79saT)}J z3hoht>=vjOSNqvAmjtuVton-iG&UyV3iSfGIg<1SbMcOw8ra?dBwBzWQtaxPFj$J#;RK#D0B z7cN0A1vCU3SOTe2%vnG>1bDf73^bS@HW`2oqDHu0 z52ZoUyc!6bX{H(*s?(ZmRsshJkUkK=Rcp?cuxa7?3ptIx0-}A$_j1KgE1+tR$7J9Z zfQB^$3Bn5i+(DqC1vwy`Cc@L%Eb@XZ8n>Mg02+j3Dp%g%&A?TF55_taL23m}=8fZ|i!>|`z+JU!CxY`QuP=Rmbt{Czl zJPZ%cg)Lli1_8maB_5n66{*ZJtN-;_4t5U0Ztm*934vwht_1SO0FklDfN1b*tQWkIe$YNZf{S#S#(_7!6Vn z+&u>10;O}V#RM8yA8wrssuygQo+AYk@HdOiIQs;L4%{}^ZvzdmO9N_EA6c+aY^sJM zYSwDO@8WSJ$X<4a3^mpcJQKJq4*p|f6_7PY)T;OeyHf+<6>HQv3dhpmdIU#;Km)YE z)gzV$$fsd~YWxR80Kwc$wu&a?f(;c-#N8$p4WMz;ZBo&&7jIY^oJG!dk^*?e25cd= zVxVCrSjAj_!>7SwvNZN)7d|i)Ke&6$wo`k%MrWtuW&Hu`f0}j(g`5V_s1PsP5F8t( zAME`S?vYis!)}zZG`tj}qCpv=VbUu6ry_!vXdtp_Qxy#hyI7hCi(pi=Cifp3c)=m_ zqyUGJW4M4<0U(JB9@!4XSSY5V!P~p|1mVDAS`7pYNwK`F0mpeE%K}nR$-m(^K|}1y zvWgi4nJf+B8hj;Sy}1rX)eaJO_`nd4{+kSo|Ja6L)_JP~D~xF?w8QfGDqg6?;Ddnj z{~JU-2H;LE$N^pvdo2MU1Yiko@7N3jJ_x)nfg2w&;9D+EWCwvz9bX(9;j&xD%D++k zpj%v*$}%@zBWcnOZZzP>4z5{qv7U+-tL3sZh@6_}42-WLVnsp~4FjVp8U{90H2A#_ zO#BcZ@N_{0*bLVNkq}C`_^|_o#PwM0AP{2lwh3@BZnz*C;9@-$FI;@UCkUrbH5pj{ zKk>tb9Nr)RGkKzhW5-)D+|1w&0`WUf3E4b&lgWVpgVn($dEUSPW_d~jDTXF{r`|dU zU3pS~tQk+L0Kc0KtQuD;gHl41*{F#4^>ucFGKg|{?f#Egq2RzhbSB zNxhwi*H*cy7q&mh+Tc=PVIpsFw1`yj2ElJ)u^qx~1fB@lIx9YDIAXkoK)#4K1|ZLR zJ*vll>>vP=c!R*(IJ`l?v+=|aH+^{ni}+@l&0llJP{6>jO}s$>GT_Hf43Q>p5Fp_v z0$$MJ8JhnCP@y{r)iL}BwU7TJR>%nORtzOt-ee%SNiV_!%MpAfiICtbvt+V9O$L{KcI8x2mk;8 delta 674564 zcmc$Hc_5Y9`}nk=Y13v&w$N(3`yxw=N(&`QDugh!kfkELpT<;*NVl>irp-Q#Obc12 z64hMpBuk5^Q4~eG$g9)tjTbQI*R3ai$z^f~vIi@-^mGiy79ss1&&1 zO&=vA0@g1fKO72LCu#5uB*zV7@P>_-Q~DV4iiK877qOO+FlnfTECR|qFKIZ8PJ*K9 zuo%3EQImNSq-G9d@rKc5Bv@1(2AyXvrHjDdg90jBhr{N@N-Y~kiEMp0Qg$R;hfO;+ zN>v1d#?qm1dGkii`vj&Az`I9Btto8YqfzR(dWs~C#?WDKdGcQ`LGY8#{r5phj*R166htRC)WPWJXgt9AJk=<$x9@ z%JD8un1G5U=`6Mmi_UvGL2U?hrHLBwCL_t>(sd|w-lmCjFfv(Q>_oQcKcv%6QMn9{ zGilZ^8d8*u6iAUn=dyVIlVoxHqoFBTii;cFUL z51Y#dvxaU84({&WUf!Pe9@@U{KAxB~dq+p)|2*?4(?+u>6df9e#-)((gt98bn30su zkx2JkI!{NAE*3A9s>9`gsnp8I@{Y^N@FvTF^^kIVA|ubMmYar(zGi`k(C8fSp)*ru zcyr}7P-*1$@n8`16edKzQ6478i;`FTNY1=p<#k3g8B856jYg+X5#3NE5g8mja;zkZ z5KKDq9gql^Ae+Xavv}rHsl1&kvJ!N#cpC5YR81b0B{Q1Nq=5NS7%V0zuX<;wM!}MuH38%;Y&LZi06d5~w$aDDLf5z&EZTg&s<#= z21^Qyw^dzZDAW}7IjDO2D2V?M`=Eb|x;6@Ocs%+=Lz=f*Ls1k?cWT6rWa`klyooc` zf=|E%j?Jd>uFsI+1u@RGpTtL)HeH`Rh37hZ9_cHR zY_MDwND|PO&z_1RFNu7@9PQ!2r@6ZPa2P$U=jfw1a4s=v<9J1LREM)T*rrC(0XA>g zTn#7~!qbAev!UQbgoNx!6w!G5=Pr(n7%L&83eimmq9zs3NWmk#oE&{TUA=upZVz6} zBF731O=a}IBn>+YRTYY1`IP4I)}w%;o>x{DHtS7I$*SUtfht;Nz!4ggE3cLn|x)H)2FrC>Zn#3 zW^X!778Nv4I<(g}_MT2&qFD}DKLB8pA)rVbOdT014Q2=&-Y6YIH00qJ4H3&;#~{-F z?MOKZE|?pQm!qRe*3w}@Lpi3)f$>aM0!2qI&q7yAJbhtfguCGqv;w9htRZ-I#=9-&Y^?4d5dWr=z#|f zIt|8XCKcvWH8i<#&hFc`IC)~90OAn{8ccwhyhpT|$RK$#^cm0*@oGm`A0R23&KfRi z&Ia>i@|x&s;!4s=ZRn$U`ix0~MvV&gGDb)KG+IJ_It+?b4udL|&u(|mtzO?aIeCi} ziKnZgknoV94Gsn~EJ+p_>D8Fp!(kF>&RmHeh_stbG#Z+a|@z=&^aT%2jpC3#* z{>B+7q%ml8D8wH;gN1bXc4IQ|*9{Z`RZN(!G1(9rIcT!QU_mYZ=7Cym7|Q7^DvQBk z(+MVnPDh_dOokY@HiN>UG1&~5c~Kb*IzV8e-eM{k@F7FvP$*m)oy}r#nN%9;EG9>r z#-?ys6e`3yHh3VD!r%a3Ofg{`CX>ZtgY$7&R1Q!;CD;I){Eh_5peGEC(2+`|fN#(l z(2>a;AdJJ|0%Z(fp2B8xxG-R|p;}BB8$Ng$z!aTLg*g|@96@%>0lK&p=uCx41cM8+ z6B^=OmC2$I;CyS4Oqeii(OF#J28LBG1SR-tX0pU0!x(`p8k^0cap_$6l!IC^VRR5P zoeJOaOeP2(#urp6CW}Rf85$Lw2|hfS5R%yNO$*3GcnKPXO#z}AG!}fE!l2FIQdOC3 zvApOoe$l853X8*HaX_4GDp181Q>D#jGho_Ep|HRxSZq)wsufeE&7?qIHigQfQMfEn zDjlf;Qi~LN-Cd7dV4GMsH7zi8y zz?uP*THS}ZZp5*u)% zX$zPdFhxKWlUz-}y}^1ZAaIZc&wwE6;$ggDftvtUn7BeM zfsG$NVL=%%(dWV!CYV2rr%*2z8iNC0VKCDLikYw=0?eb{V#2gxmH+~#LXZQ+!b}K- zB8VMW*=MPVXqVY4BKLy&`j#8PDecu~B7D1jm;7$OLV%>|bQ!oZHnY7hnmY?uu` z3&suc6;wG8+_k|3K`IRN4M~8l!fF+l3H1Ynf${(z9f$|LfDgfX8JM#eGZ;IALF0k} zvFKpr6z~C6aBneYC%nV?H3qU2fDAW#62H_Ps6^soWDw_cT8c{Y7`?aA5e6Im!7(f66 z9vTQ3uf>ef!LGr4kpF?TK-9$D#iW6eKp;S@gJ6(rfxKXnDC!L22g?V)rhz~}yj)NP zIFnc?(1iu8KtO_!!2k~e=fUf|*#6n@@rOV{7-QoA4ys}e#C{|!4hV~mKdAsM()2*= z2YZIkCRjOu`4Sx__7DR>N@AAKKo21od>#f-Bz9O7MiEJ5M05%Pg&b`BOezE%3d9?T zO`>`jz90lbAfY1ThCvPKaxnUXuL5BpWeNRbkvBL3^;^f01ZBK#bxLtL`E*8q00zj#mqz1aLcfJK;Bi<kiE!ME@0MKHUfqpu-YOLel}K6+2={x!dtDN)BhY0$j5 zm8Y z_8BeGujWku=2pj&@ZCjw{AHTv9QxMJY5T)5Q|L6oa!KRclfEBSuJ&zAd`p>*K|-LN z&3s~F1!FRG(G2%>*Q8$P2&7*Dd5PQgBv zmM$9;_Gc!o-0)%FlS1DrLwkR@{wGD;%D$#r{Y~fhY|;6}L@^`b8~((Ddp-Umb9JXv z(}gy-#)8`{s|8oGD!FPSE}0hI+Sa`JEY;+pBR?kMVrWU+={P&v9@|qgho@Zkxlr?b zO67uwCzC=htp9oH31d#X)wfS?oX@T3H2=QkCMP5$cleIoi6BZ$g%t&?8{!#`*q7ALS^lH-3N79KR)i3>$s`%wjjl*wpf~I)Lj|g z-%a!^>NRLy+W$lUynfHl$!{7W1+kAa`&(7|+ob#32%$=Ue1B_yYi*_7Go!v=RQe)` z4*w#f{#RW+H;MRky^Dgj*MdBihD%Bt172P2XY@T=)pjkuJ10D#JGPs7d4-+tOU;I# zYlRI)N(KEVr2FE8XKTHB5(|2-DfJr{93)D8f3xbFy7i{ftNs%$yZY|O_dWU0-&tqW zx0~p2?`vfX-gI};iUQ)T46bAdBDdDY)g3(8|L*3^D=K}@h-+Hm1%fxVy#{UDdi*bU z2Xyvk5LZ7e{LsoB?l~$uT~pO+dRVy+} zs~shcUsS$ow12)X+a>Sr@Uxx*7wH!@bK4)aczm_v#9o`FoUg9j{eixn^HuoW(~bv5 zyiKa+F5Q#=vhYf6{4C;lcT)AWINmVx?21J0SJhz&)!Uv_^TDvOrOAVAlY?xJ6Wtk7w3eDeWQWh~)s)$)iA!lO3-3}Zw zFL5!|Mt|GN3zO=byb>ID!s9_%CdX39bkUPN=$6h^t?*B(kEvTza?mZV{Y2QUD3U}mNX6bFd^gbeN|AwbK*F7E;>i2Y~vX!2!+qcEmLN5nP;aaB| zOPF>I5pL^L^wS$Ahuumy+`|tz;Fy^zt(;}{128J|y^;BVsF2#Kqjxj&L4{QK-W|!# zZP)8MtMP+d=8*$!1*a44+XTGr+2*SMq(Vv%a|^c>2u@3u+XVc$bmD|y%DsE@)XU`s_m^;JR)yt&E0TAw6c$v8Q)0^kz*rHqv3 z#-O&l%d#5N4cA8&xvocjge&i(*W1yEer5#X#VW=MaCillSwF!07r_6&Me*JOP17v)3$1z1FpP++^G}_09!c%5#sg zI@q=O^nc27^~$B1_D9rLfHVZ5jEwPZ*XQb$q%I10o70Tnwh}vBZIv2>s`QtpSO>g~ z*^PQW^=Tms0{~uN0oY@pj(hsm&jLy~M)4Sco)b`#BwZ?4DQsM=68rchYRb7n zM@>C;kl?D@krl-HoHPSIJt{S4D~)NpZcXHXK}it}S22~TZ$NSYr|!X;lExrm6ed7e zjq4hGab2a%225SK0)v`j{e(tM={)>PIbd_CP5{Q-o{)XQx=~rn)z!e3FbfC3r6Per(co^Bm@-qB0NSr3jk~7PoPP_5J>iSS%Nz> zSll;uNU3)T!LT3VI{(Wg_BAsxlq?LTqr5N<^x(gw8j&O*sU@aj(1uIN!int&E3$FG zXXt_peYjB=Z8ixy9fOY7#5}t+yV`;T;(cuq(Jn=Z+Vzunk%Wa|ihN0GS2SWt2)okt4rF!qJ5L<-8_@) z7cr3nlv6qnbyxV=2Sn#w(Nqcn;tPA-3^WUY20DD%XSO>p)`F?BlPK^rtMCUv0p_Tu1*RNIK$5tHiFS=61`NFc5b3RU{kkdESkRNWjHY+f$7~zN+eU>Yn@aruL4D zZ_erH^j)ngQ1ZDOYd7nv-Ot6tiTvuS?v{ey$l9yAcBkW1#zt+_+;=TMBd2DaLH{Y~ zb42_11%X$p4!*K$bWi_T7$6(2TikFp#oi>ivjyh6o&9YpeZ@wXCuG?WT{|}R9E{I9 zRnV50uDrPK%+lW7-6poa1gD>CYwtiv<*MY@74*0Fzv=Hb>XGgX>y9^4c~ZWfXe~3l zJmz0))hpJ#sEq3RZbRJM-LG@bMMuoty~*(aytQ|HlX06c zIk7i(gA;Yl3*}V?_ER4ku5IBqu9-(&>tR5&tS-n;TFs)P4AF4%aS&Y zc@_psD>e}eR;47ChAs7JifH7f)SnygmeP#r zoyXQpfk1fPuQbKCa>}HpP_R~I<57rn|^pw7dW8RWXaI2}P9&Yg_Lo(^y z%yIzYcxrb_qQYi_wO1k!9BU@c+b`vaj1Lt#s?F?|1rU7_QfC7mjT!_bHDQD(}`aYN?Q7 zeo#)1YYg&vLL>;Y5^h->^GyK{XnXj*=MIil>w5b|I|O@l6|V*QB{D-JBwQSAJzfgMy;n3Phd?k=wZZ-9u+UQF#H)830cnA+f|%!TYt;MKzA(jh{elkS`vFv5CY#Cp;qY%-uz~}m&z5f+lgr!RPF3RcngNBC5W~1?YT4K*h z+{5N>EH21x%77M2VWyQlXLf~GzRFndgsna10dJEJJzoa2`WrUHrMEV{TFdvU$&73G z#UgUuD}@3b?Zerry&7P=sg4Oiaiz3hDN>Znw!Jw;D)9XgztdJ7g#UJ`fS2A1&>J{Q zFy(?Xi~V3?<>g=2>AN!EKgVk(k6j71DXCAep4$R3qa80RZ=v@@+q-})-QlpWA=1T< zGf%18)B*p3A9Nz4QaY10&!JWg*`17Ai3B zKZW8=*K*YQj9`SxJOsY1Cz00P#$~EbFvhSM;HC(<=qLbxX!+fKGq8`@m3OQq^&hc6s|I z1kCYq<0S10uXQEehPd@Qx%H@Z5$iJ9I)$5vnMtJE)IsOQpA*pQ4@*uV3W!B5_^7?cU+b}xqPC$I`wc%e7 zW~GhI6T~A4zpl$htSph~Ige^9OxscC%=qpTsMb<_53V)MQ-8C$DZ=4>ga){o-4C;< zYlVBat^}d`+PO+;YMzTJorPI;e{${w*uX<)Oz6#?S#LH2?)UxxoUoo+XOVaW*@mA2 zSQMx;=dJ;k;~Wjfk{R^HrJNN9?2nrq+2SqW7Hi8r*z(D+P@NzeXU^LeJ^c zTA(@MvOSnDpF_KFKs-XvT+D{S=_^>swnj@IT`z67cx<>krzpN9@p479R^_=>^Y`Ka zaE{1%dK$%&M&HYcTYEYKHD826Ka%DnuP;bRE%voEp9u754zH#1xfj@)_AYAlR z-R-Eh(laVpS9ICvl|ug|8q3$~kgxWynl^QUj9tI+N(C7w%p>D)RpXp^^h#|8;S!f< zMJ5dm@y#)I#|UNbgtR#;^99oDigce9NNd*RO*EZc+)!J6cm4SwyPjX&*B$8VTtTlZ znV+QY-`8V9u;@ib0fxm!RYZ7(WRZ)17A4)i->~;m-A0M7hE`p>{%bAw%6~Q8Shumd zUi$M7Zh^jO5^GP9w(DD(Kf6CC`1VW5+_}66nIiwI+oqwgF zK`4c3LmNZ6YF5+`n*bpD_zpvC0*RIuGQ=j34c*rI2ng{-p!@-l1$?*9A%y^A9<fzJL@xAi$o6wcA%<$m>k?Mh5J#`C9^7E9Rj zLyd^u#K3^YruHitr)xc#eLWRr-(=PG$CaF!w!c30bY9@is422VC);7E4O)GIv5Iu7L6h7au9| zAJlIyVoA|2bEBY~@8CsoJQXzcvACQTQgS_X`*`PV;_}-7glsG-SIv4LZv00;ApWqV z{%0r%2sQPekV1$F7cu%pNcrzTCS-|3ONhnwFUaFVsQwEpUNrNn)m`xK1bTWh~@E#TM~cAxeL?&1j^q5A`*6Rr>W$^;yFZ*46C^1 zCNYauoXidn*W29_`4cy=by2}%#j09oubvJY;xczvv|_i_);cCV-QBt9Lshhbu25ii zB)saad_jM=EqzO1-TRJ)t(S{+S4!tiy;5*jP*C|kL-M`tubs~ddfvW$c97`M`K7MK z>%(p$JnpgIl~>(k(;mM(u*AxEJTblH+Nt0q*RXKgj^HGh$_tMUMkIR{KlYnwp56A{ zG)N2EMHUAqRo7Ksnyfary_)f8;@RZ$$^{Fv6_z({xpL5x$tzb3JUHTQmqgah;yr{In$DT)BXV?)N<>*n>*nd+GzA_{#|u#Jr^UOmSyfy5(x7M*U0GZyagRAi+@{P;-eH5cI!+`2KPQBoxP5vqRfY& zy=bEqnE36W#I!^OQm?n6-ikl!A_LFL8kuWVYF-22#Q~HRe?pAdC4% zDgM)ZGfp?Z$y;`0YDI2Kytfun-reZ;l*~8R&KONk@qN7tvdDywGZ3-5f8P0@AoZ%! zQu3OgRPPuDy@yMecn${>;Ng*df|6C_nh+rvWK046_#7drNSG$2$b&&Z!o#}3X-|{yJO5C#haxg2bpO| z0t=$+aqhX=b^a5`QX=mHaty_w&{g_>L9MZy-FP1A< zAP_M-P(C&707#>2v#F!&8UP@uTX_z0xwGxmPy*CXe;nj)iKo7Ct{YIL(B{3`Im10} zYAg8H{oIu8MLsnfajDP{H@aJk)5khqWEVRxveFm7+Qu7{ zq;~tu9j!V6iH_XH(In~_I0Ie2j;llfB{`E;2~+Y{^C7zkDRrILB`D()mx6I_O~U!- zq!KSZ*~TE|N}S(Ln;D6ln&sp4s?YG{DEXTv2MNjIYq6D&zdn?0`%1ecwa&)O4pP9y z*OFs^WJoMB1t=4oTYnm5iIc|d;>Z$^&&dW%hQA*K6K=)S+P)a099102P#jw03!g^2#oG-=d^{tRwDTx)W!z*FA5EJYc8;6n2!j3b%(je})YjuUKUkcmu&WOnIQQ%D0>-!=HvTY}F z*FchRx$T^p%}~9m9r8cNtWbKfE`ayJzu8h)#dfU=9P9!~IC0}CkWo8krID660CBPV zDtIB@Ex*+MDdd;Er3}=w-Vded1hOJDYwu`6&MM{kiBuh@vfR18i&c^%>QP~}6*6|n zB+vD|0I6FsnaRzN{=)=9qbihC{m4Qj|Z}-N{iAU(qvgh7M^j%51Fu?8wlubTnm0-A6 ztz2u6Oac%``{*GUGQQcawm^HNF$ulSgz28r8H}(Cy^0{!s|I2hv^!rw$yazl1{~>V z2#Op?PKyQ37bp!f-_?_hIE;>OIH4b9UIMvGVE14NuuHtVTMZdaAokJb7^|k~SlJhb zB$JohCZEy4n6L&x+dxm&ax+BmlY}@@{;$&I?r3_=L&71=g;WFW3Xlpk;-1c?KkZHF zT!*sBVj5qbV-iSELzNcYmWq{ekTQfZg#|-9#gd97bl9@hUB+c z`CTvngEozDb-gAfYi&Dc^z2s4czM4}IY3#^vR6ZB z6rX#s=Sa_Q2baqPo;YR~teLToVyph4xTWKi{4oK0;tN4Ynr8sd+R$$C@%Gv)8D)q1 z<_vq0d(7kPw8@6TOUEj|iFomhxxqO8`m2MHEf)>@cp8`dO$EhktGkr7i5n(MLnvkU z)Iz;~8l@H{Yk&USV)<9!OkAS3;k$@1*#(7Ujk=PyxM8r?J+*zWzKQtR`zMpldK)w- zP!)P^@$mZ}6@NE%X~@WgkBUdp!(_i+`BCviXF^eXQq;%gd=3UWv^Z+#04{McrS2mV zP_cc{MSAI~CgwGE5QM7{9pu^K9x{{-iHwv&82a?H3itQR}kIXAyGvTxg1$4guS4H- zXpSAXG^FRf4KZ2U)Bnh@h$F2}%J%g82GFUcGt{1ktvevA@Hq8x&t@lB0!#~~o;+BE zwv}g{cm^wgvF@59CT^S>{NV2V!gZ3)aa2Hkpk?sf83@d;` zYyy>UksWFipys32X+vxRYzS5W{|E>n=Kg6?ijDn88}x~@(z)eO8sD=jFw z+tGThujp>nWs06|UFXS&olC~l_4w_y9`R0E_JB&?^&6Z9a+e?xXxO>c$f)@7jroPL z5W*-EGWRdI?O?C!Fa`pieDDKYK0RdrzF=*LcjEHz(qsvURg|dA{ntaa!Cvjb6Xf|1 zZfq`+6_tZi+EX0IK)@51(?Z0JMdi1(#Et&}5JC+7ze9nHp184y#4kV!0T-)%QHI1M z7N&icREU33wqjELO0_Db{w@&6 zKMYvbXF&;ys`!$m5MoBZ6hj~L^It_v{EM>vn3O+O5HaBTNONCOw$y?x?t&mRVi{ZPKnJ#$4I`)m}I(#~LT; zQ^r9l4EfMT2}ofmj4kSmenyz!f5v}z`x}b9PwOeS%FUFby^dq)DcNR+Uafzf;+yL! zMNjE`5bmvYz|Alnk^^;0-nF+TLYbmEIy)z5m)zjo-0u8@t`xF}S;e$QN9FSCaYBQs zmk=Xepq%PSFsby8vT~q zsKa5o%e(#SqhamY*5{mo(imp`xxUMgm}r=Hx%~hY@(<*ZxzvX}q7=q{$=|0iB5?}i z$cw=ghAV?iVH}Yr26Gr?5(7Dm+sxAkakfHeqXXHR1}4r&gm>ycTmKrCb9VH@!MuDs z(u%!oLP91Z869xjnP3iWj$b>9@)|arM0?ajKsSX{##7(? z6;_Z{S!KHAkZ+_VEP8#$aXA8=SUbJ~r%7z;$?QTwJ$aZ;h+lIJVL7tlE^~b|&CS{MEJS1l+82bYiae&|XO1tgjQCMyZ@DSFKT%vQ=uGZ|&`n zYv?Rp%g$_+^O$%gb<(oKTv#lQIFGUdCZ120!ZNZ3&R!ft%fFMBrK9sdYY%ClvoNjs z_I`%DQ#zZbCBS0xKp(V(EozMxzYl2~D~2^|QT5p?hw;lJwDSHDu@Tc4uuQ4c2Q-h_m5WOoXVk8JSl{C?XVRSwzq*+Yb+RZQ>fCPDATA z2_VTaX_kCz`d`LgnzA7|qqYn!@kc!OgQHmhi9qYc z8grZze1WE>VdouNU)CQ*`2mf4(@r2bfv$4>;6qD;s;gA{N+Y(qR<}9w|$Rb!@M-QuN$bV?esb7vIfaJ2w2W5jqprhl_ znz~5%@2yuOdT2%h%}tP1Mu`@av(kafP}l4L)c4Sod{~+m$*ufCCmiLhkP5*%W0v1S z62PJ-1+ZluoOea?-&jPdbKa9SwTLY{erF=}yRh&`t)(5>wTf8_2R{dNGAV%XgcXhL ziL{Wcz!OMJUGvWm(qpL(OZFlYm=ut<2KS+t5!jTn6E|LmlWcH`Y=GNkWtiJ`IHxEs zJKt)AY)Nk#QWnVtY1lM$22R|{*(Twuir>{_HO<12F@HK%r`niO*Q70C90@SG9N{8lW`M{r?h8w6)Mx2`$7jowcI{#)@osj~mig93-j0M=*6p^S2(0&x+D#CpO z=rjBs*5^IOpU^y}2x7I1FpjrB3c&Q= z-+K**A>kODl?E53JxoyAqu{Ux*m9rNaL%1)ImMCNRl4^4qGEi^?glYS?auMjoYcvE z`S)9U8YVA0Sr7r+iDQap9P3%5<~#1Y)214udc)2Exmj8r01rs4`Si6h}^zwFw^b)<2pomTb_+uuV z|GZgLU?$wWC2*B+^ZEPw6L!W(sh_UDZj$Irp%uWNGFRnbEm!4zuNdJ!d&dwo?D0R5m(#?7$>Xg&Cesj@4Z*lSU(EIws?|)PrG!>vE zd{it&7XcqEhWW=3q@r^EH?*FZK~lt}oB;|jg;0Fdy6hjK9l%NqQpx{4f~cJ_Viv`c z`AmWEpL`?@B)dNl(uinc#_zcL=Scs77Kq=gUM%lwQr%)QVjXi%U;+MP*7xy`eU792IehT{f624|E9;nZ;tTlj@62i+ zU*M`depiQPnSP*3U-wbU`z0m@hZ?I}-F58AOoj%^WEhW4^d(R#gI7=Of4{9?weGi5 z5j*{R6SYDGZ!^N(3zWOr7IpC_Gw%G>u_`3*j9p7@zCgw_%&)aNzN)bE`t!GNxI?i! zOF!2?vaL1VbUg8v%l!ALNqcZQn`3>03%7cyiuxVLb4u?HptEVT$QCtoiyTy&ka#$Y{H~yPI z2#oTxp}@BDKPH9$P|O0XSYlG6v&2mYsB;HsTQ}(c-!$)`3i?CQ5`(4#9{7Jm$`Gj#f99`WK(+(? zOc0*x2?|@&ap}&mx3!1vnEQMa_QGHgiO>%{wXBw)8sPav$BBwP*iCSTTco{^}Ccw&q!<%@WauX8Dv(fA_mi^?~kR z8R8wxHVqfhC0bkI>Z0DhR%xgU~wL-rIZg7De%-uvBzC}xDhHugGI*)GA zY6$R(o?6kf2{v~nSlR48?|3GCcexK-!R5BM*d-438@H@wKY~TTx_OQ6^Wbi+e8;`+ z3muohMK!Zv)h0it9rj9>_k{J@9E0!V;y&pA!OG<%9qI}e-Q55iN?U8vN<-ZY>tSV$ z(0==?maKf+&oL?K&}(G4um&K@>7Q5CAg|8~)XmxYn>4wF`Pz>Tu-k1bxse%PpH(3x z{bWsye1(5O;~dy4SuD&(d&i2s&Oh^pb-B2Crio}JFH!=p8#Xko6ryd;fz6EXVJ(m8 z7N6`)z;V@UWx;6C%p=+vuGD(l<2RW}tFyViiM%|kb(tfqv+Y@T-xOBV;wmU;S8bZB zmT3xF@QZb|gw@3ti-1MAqN>hy$kkbU;#$+;_%i)U@ zuEnxBLEe%zCK2sAPD^ydE0PLsm*9YzoLhayYqZ+oopuvnu1WabZ>PjNs zFKUs#7uHG0#_swuXp^Uf#(7vQ@mx-I;WU3Dg00iC3d6%NpT>MJmQZ_Mg5nW!eR`q+8iayoLMMg<5Wxuuhn>T7@Rs2z0gRt{ z$p48fv;rkUPO93=XwXiaDxH?cSY@;)RKtpMMh5!@HZ(tnLE|LtX|amoNn{wea^Z^p zb1|Th@VkkUW|ikRy{%h-v0$7!89+m}s7i*06ju`L$w zG3Meu#-4YnZ~@o}i}%^15U@om?Q!xIv1r@zakPeacMe*^OO5q}JIn-X|G7UseXsw+ z4`;&pJqyfWb8){F*U7(=-x;V_-s<1EEsM~5*0NXaqGx!&|F?JFU3nLxlBWIeL+7Q~ zcN<34U9u%Q!ZO0&8GQY`Oz+L9gYW#kR+qj!#_xjTAyayfj{fZ+VIq+{zFxG;t@e4w ztK`diR;NdQ8*H&)!p^y))XXHNZ&@a>pfFiAm?JZCV&b=|zgbRLc+zr$aV*)05Vtaw zurEqh<#S}do0|A7rOtA~7HH)hJNibb#eyhkL^qRI;JoaMHzNL5eAAu(C(4O`nZLha z4^vJI5xOwY@)wh;4_mRY#oCBYv{-P|dW~2IxL6-F%s)0%HTLzdey_1rpzf^m-%{v$ z15|)QaZmnBk#Y%AyR;R6^xS}^5tiFpU=E>XLpA;>G>6KMO=PzSAy|*taiz?Sc! z_;2u^h=3zoBS>R&ZvJ=7YXI@@t(i0&;{Suq4<(8ikksGsvH#KLhnhU`FZlFO{yt&v zca#&qH`~8y33w;nO~t;$mPcz3_L~)kSTAmxtW-=)Y;};4eka{G({@|2-E=#xJE}&3 zRka#HeD^uZwXkJ4i&CGl#(*dlbX-))FvyCL|K%pp`RrMM@19;bE^B0vcoOyu>-X<) zpp?y&RNsytYAmX}>*Sz_fyac*L)T4C$%p~T+ zJqvgkV(-WzKY=#XkO|UH{+J5VT|U#2|JWp9jrJo6f9j9e{)al`zi9#x7(Vq!;!}bU z8(-KT_wn7|jnNzZEQ3OYgv2~5)mu8-bF$GkhaO=8ap+KMh)U0sLQX$xa^1m}6n92_ ze0qaQ{$V+0d_Wk{Inyq8`>Rt&&5d40*%5h0cIRleI|9Rh>9~-Tx#~&(57}>|%wg?K z)va8!Ogf|1$+ySF{H9%Zr_--zCDml%2C1Zd!HdT&lr!t{Ch3Xa=xQZv~dg$VD&RTGHkNJq`#c+kxSxLIAxO|JOdC=4l1^#+*`B8vEHWro7 zcb+J2{Fy)qS-8#W??Z7|_k~CyzBEIBz>li)pNkg!AItpTknJCk@;e0)AFFu*e=gGf z5H-_6hS&ruzpXXYCP2-qS;0eW0{Gd-SNm)rgc$l~LgCj8#s0sW0Ri?tME3P=nD-$( z@+&aHAx0XVid|$V!B*HSeka_IWs6+vcLYNQ^zU2$5Q0eu{pUc4A*%Zwl)p0EziYP( z`0l@r)0^|G>a66?gv!|^d6l)NESlIvZ@RAetd=)@S0*q6+ky&06kv|%hEJG9w;HH9 zcFXd12GTiOoO-eYd*ky1^L{weR2)2RXU478RR!+8FI!teR~{91w-^-!_GcV0VHlSF zAbFvxvGaNdq-W(-6r#JHwZV^k^!HS^RRsnbTx#7Hd_>xKo4=|<9bB~6F`pf>H zgiKtqP_}Nw#AzzQ4?54@Lkt1M zfzJ@#p9KwS0dZ&9%iH@4Zgk*d8Xdou7%<;g6`XBtZ6GGs)yWxkTx+HFv+C-KTe`AZ z&&<0TM)d8l?{PiV|JKS{-hw?j?`~;9<(BmNx3=3pzHUnR3SBp~+i%v(ZvT{x)dz}} zqg?E~@Zh8~N5XB72Pef>IF%oaAkcMFvgo=gV|jGll-xCZ-IUKbeBIQpiDwVlXH3;S zp1SXhnepD)R>FlPw|8^Cg5Nh8u?ZeOQEIPAeSPo8AFD2**ZcmZ6XSFhWIl8&t(ah* z-M;g{i0>Diu}#>>`RbIU#P+ru6CW^UzZPi9Q_dY$IX!WNm+MjDQO4?b2@B3R7Z_CJ z=8bshb@1kouB($SdmX5Qr+me<8~dP7XfOR{1b!+_h`9GXrO|;?k+#4-w0+4`l$=c@ zle13Jketm_d9{9Qh5shF-Kl6^M}CL~lyR5n$u45Xw~V{QuWl*12D#LOZs}WpLU~!j zza$#nI<@uqN~TJRZ$4Waop=a<%xy~j-df1xt`!E<;3FG>aQ~B8MNh8#SXg(D@6XDn zZ%D7sx%fPTFE_jS)-aF%1Rv^@+ei=e?C)=im5^7t z>GKk=gsqilu9($EH19Xd3&?XOR<= z+6WC1wBfs5nfvZdy{aORUKn)`e(dQapuBF=HQ&Lzqya9_4YZ|XxEh#NhnO}zHH>$f zOwXzJ4fD`toet{GZNblL%2KAG>79N*nQ!qP-`w7`yeiGhqoAD;yF*W^y?gonR|#Hk z*Jr5T$a~;DUO7X(B=HnargFMVxYYL%Qw6jaD=iwKfqVud0osTAyadjAd8Mcyrf!;gs`Pr@o|8Y`eyOO>h?r4tz2P(@ zVgZj3a=AcpR|W6hdrcyoTY_8WLsk_!+Cd(Z-Fu@`PvBjmCnabbt%i^_)jnrr01cN~ zO+i#o{m^j@0O7YbhI~F_lrftWH`$>(r@Y6fpMh-HsnP}8_utR^m3tK(*nqxO)8W!7 zgtz__+#coS5ikO7o%)sFUhTVuj%>&lZ)lrU@BbY-vf;AR-xiXr0!de(O3!b;d?{^#||ZF z8=m?XU~r}F=y->hM?TzJMT|ckl6|7T#x!5Z z3W)%`Tv`k%xb+vq&_RLq7tzTM(ciQ2iwQ_20STp;OOzCwv_ckb%oqJFZ9~6EG-<{A zU2+&kRMSeZX!ud30aPNir)>v|p67<63v@o>A{O0!1#n)Ybk)h1rm~O<%iMA#l|Vb- zPiocs^5%fY)S$B?Cu7^u2@Np?-C>A*d@5l^2ueV%f467YE8A+n_0Ms$_d)1-Agz0K zjwFR7dvIcA(u#x{Ikin$Vlk1z%Fci%33#XiJRCv*=%MiD8&z(%;S3VyQ)1lp+w zvJNl?jVKF(jA7S3dZ4_amNh>b4l0PKY4V1orT9hY18G4Ks!4flOT-qxs&|w;M57i& z(^>YnkiiAu@Z&luan8rez&qNe^8_8Ui0Gzckon6r4nGQsaiAsreR&8pgx@4WZ()p# zMmbU`_3)Eb0~ozbf7+N+zoH6$dT0>w8x)-AFKJ*(PvkL@W!R3D|P5NG1_^ zCFpd;NiAnMhX4rcPby2?=U)APynS^*RZI7_AR(cEC@m?H(w!0l0-^%aC5?0_%@#?e zLpr3TyGy0J6jY>3K)UOpZ@W8HL z!BeVEeTO{zNCtYjIdg(r8BW!nK)mpqFX-lk(}1TP#Unok)1mPT62Ad6M1jtX2JOE% zH4}h&`!~Njm`^ZwiL`*2!QhS@JRagtRxmihqmsbN4wdt;p@VWn)dzFK98VOMBkE)l zMb6_vk1kigIB_m;HwO6WqqYN**u7hfHw1GstS$B@?H|UvQ3&SI<^!TI8aJ#xA2#|8 z-&vt4A4H-8qE@;I8?@t&oq7h7-$5HJl*nH}8!S%e5tMICyTw81 zpayzn67B5#7(fSWD7zNDGHgo`eNaUx_NsMv*SdB><>>R#OnvZX8nB91yRx@ooj#e5*{GA{vf~>d&f+33JQ=i4 zJQTEdXnL0_7^u0zw?D>+7_brhfn+g0#d*KdXHv5-TwyCIW7O3fjo7;kR=5{EMm zBOU)ykY%5J4vGm$FxoLqJV$0_I4J--(mqt;_Oj7;zG$aKGpOf>08EZ0`|NX2+EtqF z_j>-V8sP8s)UW>^ngjgS?O)d|A?g1>`RC{WXGY`y4&|SN3_yeaU)cL!nrr<#$NCe5 z0nlInjRXEFlYw95$UlepbXxtX9|LFR-JihoFVXqu(-W{`DpY*uhL6h^VfpTSq{6aR zBO-?WChh6$#_T(OEwpQZx7A=zbybe#d*n7r{&3Us54w&kfpu@CUWFQQi&(nX!8^Q+ zh8Q5|bp((nHt@StF52e;-vr52Y*t2RYV%Qv_ZXXTvPV}o3J()TS4xkpiRNoD7DoG} zOOFIU^18_L=a8@m>=Y(8?N+-~&ggX7&?Px<$7>2y6=Q65^$Hv=%x4~a*~;CyN9Pt) zz=B5uE5J-m?M4Dhg$AI#UA_1(PY9sR4-viTcCepGw^3Go96ulz5B}Unt3dT zM2E&o_yk`x_G)Tu^N1`Dd9!>qcl`A9h>n6^tt=ECzH7BRVq%%bK%3ZJ*&0ksz%oU@ zSzIcfyQ8hMU8iZ{>s)gDMjUEf&UHRgVI%}kwE$B*HG-#FAdz)6f`kGGm;#C!Gv5z9 z-~wGZ3RZ}k=#Xl|B;9QY!JPkKHm61~jX(IFV-7Te6%qMwom0VE zsh`itgMzNogW@(xu0k0H-#e1?B&vCBR<~&!${;w+OQzj`a^M z0setw{rzzMrHlW>6734e!=aRdr#An+i~o8{!1bpo`rl7B@7Z5B?IQ%)&}qGvtqISM z)_Mbe`G-xl=Gkn@FSiG48ydGgMwB=jcXB`DX$sDhnD}nAlmJdPEbE*OiStI<&Fz5+ zE0yiOhPqk2WgX(#JKDP7Th15#K+m}h2-{p)(o{VxUea9B(9LIudIg4yi=NYX9POqWTA0_0Y}O}cXtJkRQs1R=l7WW155rC6Ohn8GbjJvU;nwg zMhM!V)4o_n6KjmG1F$GW7;_q{1_a%;>z9sjC?;@Sbnx-lG{N~_* z&%O(<%SnjYl=*E)GLiBO_Df~pQpkn#+K93wwyA4gp&+gJYS7Z=&0t&Gi0;j>rId?} z-?N^VZO^EdFxRytAcZ5aRxJ>4ssDrn|_ti$s z&L_52vn4Wt*Zu+BTQ=q4#2;==W+w;{)>t496A+ZB76@tCG?doxB29m|UmQCYJVHRw zPgIs->>^nA+1X=oxxYKl|24%|uToFX{LU8^6kw&s&8RV%Dt0#CyLIHO;75l?F+h&8 zNFAPo;SHvW=?#9~A)mVgeAitN#0IL&9V>7}2CCw7Z5SI_5scHXfOI*t79fKp_R{Q&zt{vEFu z@J6@2mQBZAR5^B%ahNtXUaae00?A9TgHCFyU=Rg>Xq%4kqK7@cDK2Nnex<6h?Kr06 z!#E9yNO?^$q%6! zY=PV6LG?Rdz}p=L56TW1nN_ajC}P5dk8Z$JzF9DbMWN8G9EW^l_8ynu4*ncvF!Vzf zJd6yCS-A)$^t8G3SR`iAqw^i?P%;p&OXbJH0OtrSPGX+$J`}_8Zi5jzm#k2<|BE2= z3`H7v*cYGFBoqo_S8`_!G%CS*Ii?8)X3R;@M^s%k9uyb?_8JTf(XfT7SpyGYgTh5r z;h$>BptyigfUy`~#L^t0NDweJAK2243lz^mP873r982P)39B+w;s>#-GrJ66zz`Vx zad610*mP|(L>7lvHGp5(y(O5KMt&7mikG$87mg*zA8jV%rF{-@XL zVPJSba0|5WKKO-;Z_G5<&OBfT1|fj$fjgK_ue;^YuH;C1{kzyNdN8t~4236_oYY?^cI+c1Z1*Lnm;z8#XjB=0F{e&2|AB7g0^3uwgu)_rEQtCMCh$9+Zb(z ziG4$ca%t)&inl<99-FPp42~;hq;$2oXqoCt&u1U2iB+L7dl1|KkG6u?Y?d7%g;aP5&%(4AzNh)AL~7%G0JC z;-Q0_SRxtuonye)8mtHFsQ?qyBFjiFq=~$mu_JR{1U5n~4n_jQvrz-wWl+S8Zh>dT ze>wrn0C=n)2pcgKxG^8JR&6#N_s)&AYA^}``~(JQoL+YXpdmlOTn&36EN7>e9m=}^ zA_G8$sJSZ`mvU<8u`gI3s6%jl>j*vr;ej^H?P~F$KCXvJ!O)60_JT-Nhbz)Eqnlum z#i=HT8fFNY>5^kOA(OCa4sD@V(|jegIX{vf7QAsXtnH>}e9^jE8cYDP!>8s5;=Z$D z0j1SsQP#x!Pj3`8CkMCZgDv))Z*}c~oeieTQhMwKSYTpd5b?odtosL!&4qQFA0rf?64}0y4JOtEUBem-APRmFCwUhdHQ;!l zZo{xKnmV@dc%-3y{?h$ub6c;VcVTlEWD*qR#nM9v#UP!|WVTc=z$an7yjI*rQgv0t z#bqN%dJ_u&h=hfIu)xAUbPK@v8RzTR)wMzp`jx{a{9y1RCoPh#$YJ$!&8?a1Ru`(PjO^RZ3g=y zixc}nJ4#2+YJ$m^rv$Et?Gra#z9_WRba^g4U~>#`*;zI`jsS@+`rvXG(6m)g-j#M{ zl1;}DquqEXks^R;Ip-E6`8Rir(gWVg&rvYF+Pxs1A^ORRxZfzw8gEem-2 zHU76Yj@45z5#+{yKxVlKY+yx5IwPdYlDMXS5Z7Al`T%C_{OvO>>@m{>_~`i06UkpD z1fGZaFWV#SJF)pU!`iui-y3i|edUfj_A?PECgaM=qp&=&^D`Xmxq{tMv9}JI;ju*B0WD>BP`wZ7v=!R zX8*9~=X(CDZvA65z@NfX|BF%nxjDcu>egSQ{Bw|jQ?EJkpo27x{m zVx@zD)qJMCLKwzpLHjEq;79X+B6U1)zhFQhq}%ywmt{uc9kt!@sl*~IwGkr2@E+*{ znSa2<fxoq`R1c&s(CCN!nQF)FoNewmwV;?#Dy3Ki zVENa2v>nbqKQ;T0dj3WY)Qdkg2Rx79w6`#O{;i(h)R3jZ;62maU!ep1H9Y^jIe*hD z0M`3oi?a95;fE?Z%-TS_9qVmIZ4))?gA|3RBP^YygY^CP=KCWOzD@^OKpsa)xF=9F zz|$|tLcTk{Cw_U->(z?VwfK_8rIt=5Mxa>fV_yG)@9~fJ&jMcsrS`THg1m$kkA@eFTs|!@jh=!Lr=1t1KZ8-lykNMd0h10? z00s!D-@#DfSTIJIH6kBYz{~xRZU1z(3MdNlK4)uyq9FCedZq@b9gMX*ZRAAxJKu%V zHcmvf8jsrm{?nkd+5>-L^jVqz(PrSZfB%iqztCndcp7HG|2fbQC2 zz_k9(x7Loep2cP6SnD50yqWFP?o%)Oqs&=FdQcyZTLXSL|NrdDKSB5>GwZ}=@b~u* z3;wauKWhB%HUt0hKKR4v7Myi~{{iSfu^EX4Fa!wXiN|oi`-1y82%a`>xCo-X-oQ98 zSb&8n8-O9ou~3MzZ4^P%M%{2={tBluQBU`$0dqUSxelP%Q*v)vu-0jN3Io`oUziyA za&uu|o&B?_g~Pt7;iRZ8?&`k$<)Ae2L~0xgp82>NIOLKX<`dj3SUpoYjKNr#>We^A zFD{n)VqcxMX!zb_9%c_)K0iCXm`(XTbRY2kPF1RyXn3V(B}nn!8K-$fmMEm{D6HiB z%_{y94n@URDz^rZApR0yah^>y3-A~jL#kpIHJrd*e5)!_zbTprpG4lWg1x?hwK<& z!}wHv&EX(yu_4D;U`86EcXOFp)z@;2%H4row#tr(eg;BdVjnoTLXwN<;g~~uiYNhGkOiGkrk!s(_kx>5`6lc` zJV>!N6{<%q!RXT&KG@FrnJVx|Ly*s1BU)^L2v>t+!nNXf^ZpjNy&u}29}w8lPBXRy zW*gGPh<>6f7N>odUU*Af_QtTvJY+r>XnD+FN`qjM}Z z0jpc)9}6?ZdSnC|piXDlcKVnm=#0dfQ9f`pJxIdNUsZvb1m#c%xy_p(uOl$Q0yV{I zCI=F~Pi!FKmncT@YnuPgJ`*j2Ja`kZ?>MeJ5(A^S*8F*)UH-$C=@7e{3!bUSgPg0V zmI;x(bc+wc8tjK^kPRA3;Gv4(vzs{zO$YnkjEf53Gh?4Nh^Z#9tfZaQWd_HJG8RUD z#>Nal45JK;**&>-&V&29?*LRFbNBIJXKc$NG0~R@IZUUdtprB$@PVc48)gF`EW16NM2izfZ~DCP$yUJ0jWrkE#v7? z1s;K%7g#L^w*MKV?S9{2$P)*U20XQ4O#sZ;aN7K_5mv(x)63Ii{Q!EB$f*V9{7w#G z&jF9Rgal_Uz6tE%w(A1PF&nF4C?Wc(1xwJWg|Wv}Vk|wBe!I720zKK$a$YDHCf9>O z3v$G>q5)wbvHYWZV0Hybo=Ar-Z;OuK!#TbC6ygD?R*j%qy4BYq7MIHB^OjAAx%wIm z{b?BDRB{+;$&M?0x;D^pmH=2DqYUPN_|Y{PC~d+C%q*H`%m|)OiI1GD;qo$fq*|AGA)|eTE=;7WLf;m`m`~h|aUJSnj4?qo;S+|4I zH^kCUOWv!N;E7y;w&Q!x*ABZ&Wf79I39Ia%P>Ni>X;CY{nilz z62d2Py4MgO@#9K>sQEi^4?2hxc>V#@D0@({3yVoF z1T^>s4emYF#5I_>FAhCZsVH&{GU_nqd?$_o*6H(md z$3%TGyv+n@0FGY?$?UMP{W?M+m~s=JT(EXmg31wMg=Ju@a4d`!=J^Z;kO+VnERjOV z3e?Rf;_%e1|3eC6cG7$A;}ZF^OpFq@rCf2Iopg&;r3(}J%C$~n&cO>LInG1|Hsb4zGkX+B&gqWeUWYm2nc?Ff zlUo&D;6K|`hGKUY3Kw0y=fNxhu)@8l0+~F=k#zh=L6&{?IVdJ1k?Y7bQGv{wb6SuB zeRJSG<;8ct^QT2KsOL8UNSr16>~m1sRXX!j!HGK3f2#)kSL)g3{-~#Z{p|B!ZVt2$ z{0S^@Sn(CApti8`>*cKA{4xc1rR zj>T@J!t6NJ4vPJ!8USqm>2cd9Cj5!e?@au=wj!&4%&60;RH*pF(g=-XMAp$rVEqB( zfPd5eIr9&$==h@p|7rQJF&qHL=I>GZMPUDC@<}d$wF&_FEr8WCUN*p#@M;Zz0#4jgB|+^wie(cT@s|T}OM{ayhlU@#4yts?(}) zaJKAB0hjs4eZiTH+5@j`wdIY8=cNJ3t!skI>m>U#M}b|nhuAl#2ab?O2VS^X?e=Rw zIntbe*qV|CpPYX%J1{aoLH;n@-dwPTIDmx2@c?yfe6DNv==^81=_`gZ@ONSkCde5eDbSS6$$6)<~5+7nCFvG{kSY^R@dw z^*tvG&wYfEY{zKTycoCg<&xG-1>T_!sfYP?bMpCK3pD#x+RO2;e3j;X!}j`ShQhB= ziaprIRbj&&TRFe)@)JU+twk%+fP#;An6s?Q~;eqUh$D6f$OT$Zt)oTZS z*!A}xXMJXE&rt}k)}j)S z1NLRFMU5MZO^bv}^A(618B15Fw_Vb%rbTg);T!rQPSs!N7AjG>OMvzPuZTQK45Ni4 z?8%iH!HLd%F>#wcIqT?w!4B$gF1bl=0WLSc<6LX<17iJq4HHw6^H!B$5~?3z5@|{D zF=cO#D*Qtl73U=~(d0f2`@-o1co|t!!YCti8x#H_wsox9RT<%uABQA_h@N3o0$_(7 z*ZOP)o%b%995t;i*72vb6HgupVKw?CJd;weX!BjZ_u)IrRWHTw@q0~6Gi!uW2wuvj zf=2=7!~BQK_h)czIDBN0neA8E7A7vC^Dy5*DD0=D$<`sj`6$>|xH9PkRP_~gtsu>Q zE9#Qx$P+Huh;AB1;n+K8J4_|LiXip^vm#5h`okz6^V4VNU6m2~Ze-PK4j)Y9Vk~t_ z;+dmt^V{3+(>B%izGvKjc!S>!QGs3{{06`6EWXb`$`P|4jm}XwvLDHAn!zEX&Ppy{ zlcvKkhkt9q;JWm{rhOJ^I6!iwH2BKppe%&@{bPjMTf4+P)^P%kgyn1obJ!-|mgKId zN$W)96F>4zb}LY9&Yu4?rb5OYHrYAvGs&>_neB!j$D{9<`@u%H=kG#dwjH!JkODqo1SOmRyy zQ>cBvSLeEX)4X_y9Ew?x!>~b(8QPk6dTJk)8Wo9}xZtHnpQ&}&2@^@eky8pv`^wN* zDqg7vJitg$5POqny~^O|Un!cR&akn%c<~LXj-c;re0J2AEk)5QYRX~6v3eg7m+T@3 zpI%7G^DWVsm^s9EmMnPY_`o58+_KJUTUpHLldN)*RK2mFcB=Y&{TSbgW$8GtK+=_A^ZN`8rt}$T_FEgEZ znuQ+6SIT&#?Cs)z89JboX)XQ&9K^?Mb$;q}Xvb>DeDuBv*N58-&a$*AMy4R#n>(sT zmp-B?#y;NT!Y1FVQ5&}peeBI@2N`%g0ohT9GypFY#|Dp z4AE!!aLh#uQwT{8;WJ2nk%o=l&$S4=FRH(xvYLCE16s{+HCOEE?TClRX&c}3fyc> zC|pkVoe-373T9O`(Gy6S){SL_pL-}D24YA(xOPD9!B3?#9ZGpq7DYOP=LuG5;yD{h zJa+1^i@11*!v#vt&$zA?)zf@6jEOOuckQ88@s05(6Mw6u{L-Hrc%wY&&7J4>9DU*Y zr<5kWs+uY$QUvzyYrgl?5!=bED&>jVMN>>X8E#B+J$QzEl?Lsb^!HD^3yA5GyHX|- zaqXk>+~FKI=565;;o;i2XG{6-OI=b9*SW;KV`7=}=nWxJF%?{cks7-$N~@}GKunJK z*FY@|E}6kpOaF;m3X=f3X>Iv=PelGcS?0!vChY}bjfxhdAy?!ek@$ad+a zzFtQ~NASAh>S2Z?hc{A@1e%CuPDY-5(~!o)Z`bEDQJqwHn+@&_=q{phFI%ecM$Du> zP6#`qYAMELi;ZWKo~J^)9C0*K(I|Y;NZ`u{B(#E-*^pU{sze-kU@u6i6~DO7=}v^3 zaW|%TSe)(F!|&E9Y66_oY2J+j$ZO@H=RzmC#ous|wn)VmNBZ&&yNCvrw;~Cz%2Srx(Zdzx;|2yaEpHdO zlCml|piM=_A#E(&m;s6ph>}#=gUwAJadCGse&D)ozQpUHE%PZ%qCMgMhCwoC;`fWU zo7QD}AE}HXV@8$?nhjGB=-9Z_Xt8!qe4Rr5oB9D?lXRaa&p28EY@fb#neqvi*lV)l zL&I%86S&haX#;!x?`Nuid9PV}xVW$di0jgtx1l#wFg*B-Geo9I!@Q@rTSkuZDbT_U zZ=#x-^#WpYG6IX00l(PG<$?$x{=-OMN1L7$X`?$i4$*t{2rZ=^&Aa_g<-Xk)oVy2^ z--&dwS8Vvu?Y_?t0rdG5c=rkLX8JL263v(ltuD`NI3Q}>*G?Jxn%#l1$E&NGRC-CW z^{b9c?PZiqsnqEC625>97KPiqZAQZ7R`2&yC#eGy(?s>8ng7b?Xih_9Fl%)0;``OLX()aJ4FG z<=zwDFof%orpSyq4Gy ziala_&zIUGsk>hu1J~1MMR$fII$EoYlPM@}>3EJlWW>G^86rw&K)Ri)@~xDVB!)n{ zcqL?5JH2X&DO>d&chK?;E%DUgGRCJnvc1kmvg5UB?^0NIkpNlk3@x%cwhP?_%tY{1 za_Tg0$^tZs7j8w%*$nyylb$MYRcgBn@hd8j|&TbNJZ;ibsvVJFF5Fo_5P`y0%&D}uQBK`FP z8vP>Kuh#%)o_&k$Da?3VA?Bv?wMA-oAhg|EtNI0hJ@et=Zr|9{9VVCk+0r^z=_2We zK&Jq*ork(2nu5$bq_L5qQSA^w)L zAB8vcXKwNMTz~J`zIq4nrt5(yL~_Dl+Duft z?wg7soiA;Waw4XDJVw1P62H=V|K(~x@s4vJdY!g4E2`0woxCv9Y#+aH$pz^hkD4_8 zSm#;v_lhesWrr4#53?yJwBW8~CWG?y5$~q;uk;S*X9!u3-f_Xr?CZ4Ex~i|GfjF2i z88u?tff`-;K+>@(23wH_W>Rwh3%NA1TdShHMge?q6rE2N z+~qP!wGcj+KF@e{*SSlcL5Ax>+EYD(OI+~C8%CilNg*-#=HdOqBu4z$PbJp&+^*6q zePmbNE^>EZxn@dV^sX!=GH*4GL$PLFS5#DlJH8KfH32_WVC*XPjoAUI8`&Rx02gyj z8|sUg?+zpeQuyaYL{;a%Bndq4Qkk+;S1abIU|5LFxl?JNRpq15=KFwBBgaNvl~8>^ zzZ>O~53V}vH_^zirPAVr={Zu?nZ~_p+c_avSQn?94r?==H58@L>_Gj$pD=FvdYi+-N^s!JZim?BjxxGnh zR?lV}dspP8+2PH{91Bi^j~nSOBkJ@BpIb+6+t8p*(#{<)$IuGFL1#UGguBrjftv9M z6;An?V9mkK@@m^s?crgUrfWx)v%voLOE)oSh1=|g zFCA`_a*kldz8B6Rq1XtTr{0?aa*+hPIa;Yl&dV`tm&fNcSYg;Xl4;!BdQ#Qd)iBd$ z$K};Euq-~$BSX+P%{|)thDVbCdFK7fxEh(@Ojy43A)P4kp8ZbvZai(ki;|}OtekZU zQ-Ym~<+W`E7B`nx3Y*uz$`G2E zw^Hzj>?Ot+sn#XsvPkI&VDDtm3&tB2h}!DX5A}Fe_*-u}+;i%6;plSPwCbGDyJq@% zjC`;`JXK>(xcS8{X)=W2^ds~ETtY$p=fG+yt_Ab)RiQ^g+DTWfv6ztxvs0E?omndL!Ij^s^GL*!PC zMYO%gmfpexW>1vbhO!_9sov8;Q83iav356F{$#Eu%Sv+Ymb7mh z7L8U!W=`QF(fkNQ3_-x?L}_3Ew+(0o}e6%oV}i(7LyHR#r4XuNO>04 z)EU@jl|71UnvI?1qXa__=AxV}Z(PI>=;xG|oS#6_FzLRFucgU?B!FILZ`{CzU+JDO z9Ms)^ZcLIkTUafc8FefY-j1vR=T+Ya6uB&HlxA+fTUBm8ch2A7sW-YX$%qD@YT?4f z_#8tj(4>Z;%aQyJi&?D&K&D={oN~L{1`YGEE4fkI;FZ93KbjPzH0#`LTGK=thV4pb z8ZvteGL?Fo3QOL5K|`eMVqe5=k)L~W11daxM@%IxFe8z8J{3W zZ7O*BT+qbjC}yDS)$q(uWwLY2`UISh`vE^zPT=?j@6`KQ?{!WHXgq z(OIQ$NL4NbHz(XZb5b_Tg3!oVZ{jdU_4OQW(vEJPBTv`Wyviv`NxR&N(QcR?DZVWM#G22(^1o=LxAY}V+aWT}U!YFRy z+RZCM-2`S&BBFbJQTMhg^Jdy<8|bd5sXAufOfl;bj$r7z1{a2RsiiazZ|E5U?KyfR zWUF{v;U%B2gZC1KwH&pxKKYs157>zPO<8Y$X{H@Ahwnp5G1UDx;DoHv6cxU6H`T z;+9QWNR{`oj2-Z6>f9U>P;g0;naC+_=6CdDL#fO7*i6?JtdPMKqhOI?_Qne%LnEmU z4KE{v-rmn7$z~kY&a?>7As}s-@f=9weLEi(9-1T|5V1T*M*J50-Lu?pcD`-KmFGUr z8@I}Razwn(E>rk=GD{eKZ18^L5`sMUgp9+Zl2mi&ae-TG7B@_IzUDF86Lv4t-&WlC zjQ;^0g&{v)7Nd2HC`67cGleL2Js$}UJuFH-Flvf2{8{u%P0jR#BEal>eo2Z_ksqzk zv^;vt5?VqQSI~b)T+K#k8Q(>sIEBAu%s) zG!DEU=JM;oqM;eyPs4`s4g(4;hu^SoJx(q_fYvk{Diirc zyaj0iMKtPz+bIv@cQFS&J1#$-&Bw3Ltx|c-?BtD{OPd}joz}EYM*!@u$RwLn?Tz2f zS<#j)c&n~5Zra8BjfBYN=1ylialRTKzh)*!A6;2wdv)-PG^1j3F22tAiVwQ&uGd7^ z9kZ!%k}|SV)$eJPJB*q3U8s8DS2%*pV>9jAM(7KqW@QPJThe)XJm2W_SXWlcOs6YM z@hGlX1jxUx#QbcMVy6U5MwEZ|&WS9?vw1iawHKVI5MqQTQLiQukND^z{;S&_bC2tY z6cMjfl-l3+#*f+zW7v#Ov{qndd{l0`#e18RALHpqnhO5{@UmlHL?}}_wk40$^=UTN zwve~%fQ65)w3K=jo>YCJ%9i9w#X?T88{OPF$~nj?R}epyN;(`2f8(CA&POu1Ty;^_X% z_3A+QS04@Xs+r-3u3CQB(I<5-9+#2L)8}U@Wty9q}g-NXOd5kMfS8a>nAMw>c>U#hOh$u{Xtyk^G$T z>dSrY;`%2-+C{q~O6Xyph8iz+3UaG}_5C6QF-2^8gm~2jw$hVKdJGVc)&jUgOtya^iXQoE(f%$FL z=NX&U%)P>o#+pVtYt`6MQFr=2nfTBg;kkSet`)p+_oD2cPWK1+B!0GphjqQg2fQ^o zX-C};k>2vG4w~U`b}IHGroLL0B))hbhjejyWgzt8j@8{p_cdJSqO@x2Q1TJu$~crD z4Lw6Lh9vs6l;;sQP#i2JNe~fuyFH!?`{TEas@BI`yl^D|%{tvL*IUG>c8Bq~3g=ze zFV|8>9k@Ywd{zc7c1{*^2vzQ|cciqBf?I(qm27PZIlPyRG-Bq|c zzw$7jvXk%733G)c7jaF1E3>OKH_e<1{Jpz!FRsVAr7nJa#GGm>0!@s^@lur0k_UzX zK*fjsT+iKX@Bw*yT9P_coHxqupS2YtFr<%#1N}t(JwB9lZuf8%- zu6-&=u4_T|&Ir|0LlYOhJvrcSlKA4PRVivB_Jdb>(x3QBZq8Y(!X-8W%2wRDJP)vT ziYeyB78QhS$TJjJtjzCrIot50Az)PCK4DT$XknIRYD>F+j)X&Z{Q)4{p=&ZJBLaP6FXVCuW? zDRuYkWA+N7LY4qiAD$SeXK$Fb3f_pyz#jL*YF}k1;2f|w@p_ZgBJMrs?0fw=0TX%c zI-^zR0CGMw0sL1P08WoIP`j zZIO}ZjHGd-2fbPJ1X61c_j3WK-STT8{ukuu%T#x+VHqE0Yo^w!HH>iF!JP3niet7s zLS|dSG15%sHsZg(-_&Fn-n%?17bZ`x{WzS|hWuM6Lr8<4C>BXaIdx7)V?mPVC;?$%9^Um zOA{@maEvh1qB!9J6L*J{C9xrbXccl)d6OAhOOcpRY(=bLl}a9rkUzL{jW$stdU zithOV?{1Thic%+hyC=B6y%m4>SvaUd171Sto=ppXqmYeRMg3f8ibClXF-fH?GPw7z zm3hYe(1*LdeJ!ZrbC!nC`Yk5lhiz{HZ-@ogLfz@v6;a2U*Sggas z#VUM$S37S!K#0Phbul?Wys-E{AA##as70I8I^a^<(sP-*rB;(*=(SQmWw!_3!->hz zz}uULbgR4g^d@&mmdr;Z2l?LL(rt~BALMS?#doAFE)9QCAP+a;ZXaG>^8*` z(hDb|`*s*ZSC+6>r?E&cap1EKZS#zV`k(yYCMqxZpLl%sK@VI_k1LD92c{9LQzf_L$4PN_c<% z`2)8)H7k}C#A_vlYY+CGC0t_}m|D6>G)pdf|KYX;qH9c9K$y*wsbX^u>#A48U$uGl zTQJ{zMQyngd!7(I^xE}qn|+M$qnGMa%eBM^*Bbi)wEX&){Wz8Glvm)yJZ=mpdr;zz zS?^co4K$CVvsDo-Rb*7p2MpCb?qTWCh}`L};b2RCqSffG<=j8sqzUYAFanF+fp_i* zyuWx=7=44e3n>Y+KpC&xUo7!e-B61+E2FYQiil+^euGh~-&5C|b|-{qolf z(Vs`@0SwF!{tFMY`B>|XY;QGk@6GKbCcY2V@2w>DyV#GA{n3HY_^Vy;R%wkw-YlkG z&S)1|QO)^Cg2l*54reuY&jBB_`@6#D9K~`j8Tx3*+T1Z0F2$OfF8=TYRmx9BP2)AD zP%zeFKrZ=3`Ntxc-;G&%axqz{m65U%Hk;TfpN|L3sUN66D683Y?6=?+wKpF~Q3=9A z-E?>$x_7(Gz@raAMt@MK;hWNV2}-J5zEo;wDR=I1f!c6NULSjrcqgZL!AmF;ui1tT z_zG~EpL$^#MEEtVk@_~Ld=FR&HRD3nygJU?clQ>Csr+rVxkiJn{9-}(MH}M)M73o^ zvP+*Ffduh~A|0>X@3@;-%(GQuSgkbF;LfqMGA>lQvKu|Pj%l@ep}2)cEug#q@uRzY zPv2M@jhZZ?``njaa-eYkfE}ByMp)a#aX0G)UI!uB`_#Mh`XaNa@B>d;H=StniVd!} znT?>%KJ0l-B_QBtBy_!z^OEuqGP@aDpMv31Gnm-HFFGSEy0)7E@Rmc?wB zrBLho6jMei8E%K7ELl22|2X`o4;AyKO+UT3CdgV<I0XZ%gR+Gm5T1k{uS|p2v}EbKg|GcZgh4FE#M3a+{(9*g^;KMU*U~LRi?Q&vw#BN^D(x?kwo6p#B4gL= z6T&IQf!fll`J=Gu3Bp|z^D1e!G=IV$lDuckih%a8xqJU+8dqqYxgp?dZlXF`Y ziRk0v%7rDPW>egTDZ{9~?{i&Eiv@b<9vI_BA`PReF zXs^rd?1UZF7QU!4>H{p2NhD&4BWRC4QY(ubJj5~ntkkVS)Baf$4TVbB^)0d5f=k12Qvo?3n?=z69W?~D;GOADJLsC12;PtTlm-w zxa;96)^K+S*%??^*_c2vE+z(MW;SkSHc}3D76vX>Caxb+m%^8=;dqF+SQwa@IM_g0 zP9_ExCUzD!4pMdwZU#;!R+eyP8aRsZ(Ia^D@FzBK0@t`0xVczanOI2K+1VL5Sh=~F zIY`-_VkavP?Xi;aVsnVpoCn~i~)Gdw8tJP=Oy z6pjvH=V1H&dYz!njHrA?K(l8CQMz#Z25I{{47~wmGP>Lt#DZoa0@Y}Qr|`U9`z|k+ zv~8^&^mRX7PF}p>vyr2nU-4*sdk=j%UhZwoTK}8bWuqb17JLw{8TN8eRa1dOz>Kl(WS1% zw@@P{3LC%PcQ;*@woWKs(o;jmiCIBy$FE>Llx~amcvGbuse^D%OG!B=7F)LM+{U<> zSF_($$~SSv;@Q!4LcnYu5t@hdPl^`R|+UNFkZ7&v%h<> zmSrONX&7qkIc_CmQj#o>Rq1C*HcSttln#0A(5d(a@2;U zIlG+!ajm{u8iCg}NCr2_ud*Z+vpr>hF|irU!L^vYNlJNv{v}&BT^Q@4an3t*2Q{25 zQ}!DuO3hXW6P1G1RkIwx^*Tk4>mg6C=gZViCQSKSB}J|fvIA@VF+)8u7?AcUs6& z!XeO#apCINlIxbeO{y2_#Hw{8{!UjxcM+~3$sii-#R8YmG-H28V6Tp%XOMg?h)4@L zeP`LdI`uoFjXAh5it-od$g;!mtEJ|2&1LblbaY_js=JOHk~p@N`ThU#^^U=vbY1uG z#F;pgWMbR4ZQHi({9@bY#C9^VZQHh;yt%LI|2*~FU)~R0UDdU#diU<`eNL~l)@r8^ zlW{ROLNYx|N^?*YF)mq)8OYIH=Kk_74Kz|!eerf3s>|nQ)iyJ`2v#BoJ9L+l6xG}ntI^jJfV(UZ^0To(}*esFK;|os&c)$ zs78-f87=~B-$PZ9IF*=04d*797vgFA?G-2|Vz3>cS%W;DEZXQoU!{W%P%vAeeQDSp z8relwWYYbh#aG@{&v@Yk;R*MY#jVk}3ct zDjg4*trpbMRhQ?y!Ky}1-#VxfG@0>;-Hnmj)+DOo&JFV8U|uK4+*#X73s6PZnlh1w zh*RSw6=ic9`?~FRXO5}>;BN_?#&80^n*$Xq&uzt71hKIvST@HM<%oFJM#xV z-pq2)>uXvM^Mwh-QOu1=mh`X8G&W=Kct5OT&*d-as*!O?|zz~D1r}*BMeJcdz zSszB`7f@sq(#Q^K6cb=!O^6u1VXjbbg##ZQI?UQrJ$?ps>bPM@pPPd!z{2opaRlVf7 z)LK|L&ArH`INh!VgqyW;W4U~pQQM||hQQliU+I>VXFNlN-1j*;Rr4opReADQxoKp6W2F}p57gu$e>i4^c-nfnbqaW}%(-ITK2jQL zMJ;KzU&SZ^!##*q7hqcO8SkRH(vgdix{Q&ggUjP!9&ObC*tZKp*g@DnZuEp93oyz> z4zPkz<;Mr9$~3Wozv_Dd{TqIs)CZZ3wikgeC)t$|mrD*D4nv9_ z*i@Koo?A@i^{B{G@KuA2VQm+z3)wlaCvV1ZWZWIH$yyw?@{ji-_Zk=V8D}`mIab{& z(c?0*8>vVD(n$`>P}z$~iy?=dyFM(YuP>Km@M%2MUpc3{EwXyjf3%m`Il{UsBP1)N z8|Zb;*fEEqYv+moB5jN-WJy-Fm_7XsG;75%Em&Iwfxy`gPuPa5%A8)59^`ieLDw13 z#2uBI)GoOX6ID0suKTf92ktd#zeh!L>~83+&&3-HSeW5~1tUF1xPRd-gmZ3~b#th# zEmJn}c*rSE5t7yhC)c>D4b^=B5m#L2Vh(lQI@YL+g>Rlo>4ly^+N6VJ$)J&t4IOpV zB*yNdG;h7tP|zEpXegJ@aMH{Ry(z|Wt3D)gyV-qBSO_H;m8b@gRl3BBW#cLo`BLU_JvVt!4?PmBRSD$+QId7lbISnLrL%U zFTs)yp)i6^fSffdH*PJ8gQPldZUb2c-z-Gr*lfN#uW|0r{MD-`P=tY&0?Podyg-zV zzH?u<|CGjX{ZVOzdC*>Wl;mPz{zv{nWwnqBP}fcgZN5-9TuQpn^OKymz(>ClQS6~+ zyS#bMo^@J;?uRMAjmOS;Pz2S&nO`btM3B{66?yvuqF-E|lDeVcPLyRemA~+oX*oC3 z7>!2uT|T?XZ3479>MO=*HB)NZI|j!{e!mFdp3_Uazv7QYx! zm(S}nsk=9o=mB?E!i3~wwV>~lnAZSJGN5t1xOP3$UZ?h}5-Ro$)fbZ~&@}~~+-46= zmrhl4>QbFA&?A76;lJjKm1=7?n5+oiORDFNKjs+suffqg>=GKJF_~O8 zz|CeL#GCwqrJ;+z;GKpKj9nTr$3%#iJ$LNfOxz9yc7D8f@O`2}Dnrh2V!-9F#$d{A z0t4#$_wnJkuDA9E-3|&CHeutCgM9JfZ~_ECt)C0&0i*Hs+aa{@88I1Mtmd6DBJMwM z-+fCO>(I1o#DPQZKW@z(TLr=TiUF*mI%ZmB0PY*Fy5A27J*8*`a5m5Fc_sPSJ@-q1 z2^~dm?z!1bfY$)sTP8w0#gQ|bRxftFK3Wo(wY5}9&acJX+@Apk#n+~69cjxS;S(+; zRyS3ZfK6v0l+}t90x~001+j2!CjHaiB4|r|7z!c1X82-OM&y_&!MH~>MoBxh^wd^D z6Vun9D0mOG*9SSMp!}M$px|MV4?TZ(9(dDG32_+>2CsU8bE6jG5ISVRwYIDg66vcP zp7MEJLfEC4gqeI=_J{#39~Z?Lnk?IwEiWJ8fac2|-6cO@e2sT!v%mb^4m~5h3UAUk z59r%Ey2}|R?W4>_AF&)n+;SeRg=XH{T3VSU8*AXbqdNZG37{bH9kA<0XlBh)b?EN9~9{f%;#ude3vqoHS14(P# zhWpg@<4%ZYr5Nc$H)=GURD)EQ&*##g1atyLdi|dYMr<#IUS0Q*2NT1MKbOQtV~+;) zHG;5KXsrJ>AkETJek;+{sss%V4e)<)@KKBhg=%g82Ew!0%ZgZM3p8E8ugnjg1Y|#& zSzpPbnbW0f#tUWVY^`s8xXq0Ov{7qYuaqN1Mw!7_TryZ;)Rq(OfI052ia`%EYx*B` z4tsLd9$dI~dA&`%*+kfD*0%>oY2K@9Ue<_tN*e`$+u)B7LGf1L7u1wZRYiBNhW0ha zRaeyO_BC}zJC{2y%3n=R-ph-E0T2UXB1VSW>Dap~XqtqYXkVOIHv>v!8c| zNgFjxg*U9i?3WS_NB%Y?R<7deU)8R@<5?kjI8LH+%MK*j<9Y%oGkazmX~xLkoUN?p zZ&?u6KK@F(Fyv)yu6&Bu%y7|}TO4YzZL9SeY0TgqXdlsWmq!q)#C(e{0{~P8b4H?6 z#>+kpSH@NeRk=_MscvU&)!vzF_q+xjXY4awMF*QV!9@`yVJ*PG-UM>-%1nu8b=Yb~ zshE>OHjh89dX&T1ppzTHSNP?d!tiS0mNLgC4Y7my-lz&&nA<&1E`2UHg>0i#=}IvZvIrLA}ZvP3F`Nct!vJDcIh;Q&eFcj zu(UhI!Mh9Km&A~SKqd8xbK-{FY_YMmv=i>Nu$O~JNB?>OI&2U)%7a+$2Z{yW_A>T6 zL-M0sXs)UkNq$7(t+gXL)Lph+p8fKlsJ@V*H0R38g*mf%ll)LnABITH zxb+_AwODofyPeQ862)a_*M^fs&O1f?*hGrT#%K^Z373qH4Qr-0u6;bH;j2{TqJio# zvlSM+E1oX}lM`rIM`a$B1;MYAtCz9$ytHSRy7ruM+h(WoR$%=?+))ADFOl7e~-UySMbP8 zg*^R3Mx`}iV!gS+eE$0UFET3NUpDHG5@bdV^2=!6c1|zH=TF}YZMkv#%XUsEd}2ld zHCqHhtec~dD>&Lqu)U!xA1*EdQAgE>Aqo@ab0x9kEiwFwo0n~-$aqXbq99%%ErtUv z2v-d7JR}!Afm@t1U#-xVIlpe=W~%NZp^R?s`qgx?!h13>Q>|lkfX1L`E|PwTZw{PF zjh-?@Q)Zg_<{wTdDnpRJU{Y%C`P-we7m=Z6WJly>H{OCpP1oT+k(9596I3kd(#n}+ zhqi$Lf6jnKy?+qWRsD{Bh3q_DCEF6z)JFYw2mpSPlHj#$yeD(lo27En&Woy|L~{au z1U9tj$we{_uTpeH28cOpcUM+Xw%lrS5)xpHy1%k5_-vLA>cjQ$LVPZn;a}Txit?DR z=QZPS25zZ{f^Hs%c8582z_Rt?oPxRpOpA!}_Xx2{1j>jWzXmW7FmXVQ(tR^csP_ov z$_RmrI0&{W^k%Wal56F$5mic}A-5VC3G(wYQ!ZnhN-n20046&d9!;F*9e8lRf_dcZ z{aAHkyTiV2qYL2G_%KnvufhO0q?yyTo6P#C)T$dns7rNPuU)* znSx}y_y1MtWDn-e-4sozI1&ObVNVTfw->sT7Y#Pr5huw|L(N7X4xPjeM(d0;RYwFx zNmEW6!0!y2#F*1|$-903zTX~ge;9!#6tP#L4pX^>nuu)eXkv`;j1ij0)|y7>{Cw>d z`(AQGK=(X&c`rX&Fxaz;?bU=Q*#ZeK(PKHB+88z7HOIdPh5!xpTxcB&Hw{weEA%>o z>-3&>XwQw!m!pnDH|l^oUDOkp%qn1#SsXA4IDs%ATU9}<1FddDN=h^gXS$YllrPQ8 z8<3p>4aW}Nk=|-=Q+6l%ffAwlb4Iwc(t1$N&N!Rt!e*>wsNFZ;&Cg=ceYZv+jvTRs z=Ty58Qcmn0WVe76@~{ZhrDl^n49q zz^1!}QFN5sFKVcO!!NdRaXKj<`*+%Pub*FEVD$&iUaBb}1DsJcx9~mS)xe#roOZpxb>i}L(NkmsJic$Q(YhD%q=m^F(J0Pz-52m z)^09RNXgKz$vMFFs=r=~S#^;lLAFe{r432u!L&X$iU}Sg+x2)Ewf4MwC!U^R>^uCqQ8&;HZ3CwGS4$tP5HIKRAQ-(+$wZd)h!aHl7k?6sb$ zo=~dMhyJ*zo0LV5t&{p_U9iqYIG`xQK}Jj|Ve9^xK!VK}+!}#EyjIvR4U0sIITd&j z{Y*H27HkY5&{Q@?7{V0u+!~cr1gu!jvW)evm-^niuTL$hMSC6z{Sd84%sJ|B7IO9! z(iH-s!)MXrAvYqUGJ*u!(K=OXeslF8uy(0}&!2NOsg?_{5=WM@F5N*||4r>%7Mm@UCsvI0CznL)b)O%E=;h zHNW}03Gt8xk@ao-{HVWzEIh>;{cRZOfYp}%YeEtDR`Bx3pcJ(=kdS`$Py^I24J(aL zM8!(E7Nv(LOW5V1y?&6N5iqJhmP!zRD6bs|UJMHKGQi85bgj^=#Rqyx75jrfWAEsz zlgFTvq5kHgI;lo*OfD<<4u%!p=uvamdf~QGZ8P)48jYITvsS909sQIsaLjyA{#2&b zs`RPf!2wfvlVomm?Iliax2eVsN!y(AS3QAu*l5a$Ey$u(r_DU2#PTr;1(yM7?5G$0o+7G)z-s zhHT2fh?KQ(*_?MMpaUgxcaV~JV{rp3a&jv~(D^<}r zak8)LA)4Co0A`5&2GVvpaC~3O zEu+RG{**2?|3Jozrpql8iEAlfU220qTQu^ZJMFKr%VmuJ^Lrl4KrZtWjN?#aU)q=e z`mmsiQ+)X7J>(iX6XJ*($zjn{2pMD=r5S<=WCg>KzkE9gD1a9}+RT9O`~+2f%ASC( z8qbDdY0;5GBEn73tlJ`EZP8?L$R*#Rew5s;zW(h+t@W>!RafxnLiF*1xgD*R z8{yE*Ss2%&kx!GD#mfpVcu5{u1CW@-fqXgEU=KBZKud&U-t8Az2JrOROFi9EkHMx}?KyQ?%P8+TaMixvQ`PmH`gtE)7D~=V~g5W;%i(4>_MG z3m?x(z>I_*#nCnVz8}qjGY?@8b@HhsAtJ!u9k3bKdq;eUNcWO&wi!PG2V$KJCHF ztYc!R{Y*96)w(JgJoTJktl*yQOrlyoR;!I5qFQ9D3TkNyLH$F!;%Dr_`}v8gYJmvl zJE=;uZ2MyJ%49Sot0^UM177vz%<|Z%+LuNrEH1HAoaEoQ9M9S8KzR3a%ZQS~l(D8KAbtY^u%b$slEX1XNFQaYYEL~lX;#xJCgJKNTtj2dA z{m9NvsxFjz$+5s2I!{iS6#6z2mnJ<&M&=(~+lIg^&Ww@U(op+a%GNtJ4dZMQj|uz6 zs34Nr2FLKx?kb#|wrx8kh3arffWhBk5L^Ef(IWj|{fBz-(8 z#dV3LsPloNYM-yyu`Z>z38FC>Jc1>)l5~vfymR3>agN`KhzO9bedsd9@!hnAsQ}f@ zi9U&2b5q6{Cqu;y$%DR}dpu3<`oo(<{`Gm^;wq}gjD$4k^h}<(dJkl1MucvEme@Kn z(kQt5?bB>uZ{qu(9Jo~|tBlZ-)Yi#d0MS{7akb^y)2g~4hT)b(Ba39JKM8e2$#vpX zTev!2&R=t~S3b|5Y#(lJH@Noi_JG$E{A_@%j1_9$D+l^8zBMdceIBbsSI}L{Ya-doQ`BvLAckocPprR z=#B)xgZaB9S)r*STDWFXNwC;7nWYRytO`vl>L+z`#C6yZf2?3f7$}*trRVgS)K((fCK#Qx9~NrVDFrwO-2eVDLsQ z+Tx1E@%{O%NFb2)HzC=Z!*H{|(_foIp0AALdAh9>%4R*0^`EpirImXw>=Xs}FNy_)?w;%O2yY@b;dPw9`#3aVv$8j8 zr`Gn0f|pqx6chjVw9($1IIPV5olzP7TuCSF8h>oUeRD8sDz}57{GM#eplm`iFbK8= zrJ4V?2rlP$k1eJ6kJjr-eV1x5XI9@yxcpBN-+})Y1Mpo9*Z;rBVd;hYW_v~Hy7fQZ zVE?%(lhTMih*N~jiVRECEOD^>o$J!_DnkTnre%3^jGT||{WnbOHio*BG+814H+>v? zF4(rU<&=8KBYb1m|EaVgAV(BSw(pYV@!;GAthV=`M4iE!lfR2V7+tLIiutDCQe6Vs zCO2yOJit6^PB{o|VUAK#5ueH9Pvt+%*i4LNPW_m6lf-R8`u3yr;H2(c@L+G1gZihG z5&yJp^p;(MwIkhUg6ERVY-6tpA=(_X`x@QRe#_|zEUa({I3Q*^HNS6%<+eU%`07Ga zolcjWlJlUSW6Q`dRdP%a%cV=(zNPYh+#CF;USiyuZt#RAuwBpMDSc7dDU_W0WZJeZ zcH&zs|J&UUUVPUFwOyh^^+1KLymS=h1vs$oAz#x_d(z{U_~)Av|HJ`Ev|k1OldkWM z56hh9Ev9SXC4i;X_-#zehW`j89qVF=ouqZIe`3EqT4&?NI0+`rYY1rwQ>DgKTPXK!gN(K-o4(m6On1O|! z9gm)gk>)#sg_RYLfrXidj)|R#o;^iA9f&!lAOnaDz|2hl-~Kii+}~ivP~6oA%7+!- zNISeQnjW!YQzPYaJUQm_0M9s+sTEavuL46&R~MqYh>5a_GrXq|Pi(-BlIQ`Vs)&e? zh;jsBNKQ5A`%$CbCWPt3&Hd-~PInq3*5|+R}?^sJ~ukrB~2!6yDM za5YY1x9QLa8$)3Xkn*&HFhI&dKH;-#oKAVFvb_{JaKieXSpFQ|PnIMh`%duUyzj_K(K@F|AUBto#{(_^F}1D4IA2qfHbN{ zIr`*r_#n3zzIQ#U>LIzP(=v;&n`7zSAoV(=rHknDL^4#y+IbxnlxRl4ue$?3z|+VQ z1~iHb_$bc6FO8(TlP+1%^=Bw@WOb8Qi?uDx5Zt$GB(?=Ko{vNVmeahz)~6{TFE|op zyN}>tN|5bXq5R+hB)UM}0;D=jlyGcqKbB!h03Gx-0*iyWNl;(dDX4tMz1w|o<1jG4 zgsOu;H_h?=kbtw8$02HcIZUjq*39!<)7?!+mqBgxL0rufISO2(!SsJM?26_pLH|s( zzz1SQA()fb8;G*N$3Iy2quj7&=*?H=p@rqSn1Sg;2rtD0yj)Q6=@yV8BTi`@?upNs@U3<^rR9$t$ zzIZx6Y@*RQB01a!M8`j04~Lq+ZUpIzxPqOStx09!9A`^pqvYfyJ z4Q{ft9@F=7P2J9p3#W7Rf^f)uBosA}0OtxM9*umd+MCUs{U)Q?cue}8)){jc{bv_^g_Tj}$QGlj6 zdrrlkOBguPepF8_zH@tyk67~Y#69N%D)}h7cdVtx(GoctjM=ZmZH&P~S$CKPN0zww z7Bu#k`NK%>Z&-6%?Fv$crr930Yjp@w%d3~LZqoYR8Duk0ye+yi6ip`3CFKoS8mMLX zV?RklxpCx>rTs{KrlmiG(=h+{7NFG^8sWLZ0YD+BIGA5Y>{VT7FAibA>G^^Ia%ST_ zE#4M>sbc&~sD}$b8c!|l*pr{fQk|!AJY|TcH^N)C!$GoJT!8bZrS$N9Ytw@{4EkvL zINP-|Sm@nakzU+NOw`D7Rk0(@@M;cJTzW4531{c#lbdoO7b2H25|{RS5FjK`M;rP6 z1X$6uTbx0Hoe%<6J@qfPJv1wdt6-AGE=6*k4!3@cuR^FzX(R9hmmJ+PM zxz0FF)Kbc>U@_IqaUDO0MC8zX8Z%+-jH^gi zze6Bv__>plB3WIT4j8>s__1TM^m?B3Va`?5qR>%fNR0+MxoUO>kt366IWRnrZFR{D5(o}=Yd|qB}NKCEm_2A}* zRGGE5^_hjH=&Hk^B$Q{n+J0Si50GLcL7gs^>f1p5GJK;nPxit1C>~wkuPK|65T^!c z)GK<)9_w@keQ=!WP24|vEZWfUlM?M5xZ4`7Zq)QMin;Nv^8nz%(I-wavrKY1JW2Ta zELHo^=~xjE)V}3NbOfByF!BaOgoQ$u^b-`a9!RX3z}UoVafSg@P!8m!P*FV}F!pUz z5>Sy}=oV?R0scl-87@ExL2clF6xI&(uepe`vx>nx<&^cSwD3~m*z@XC87=RO>hdw} zW=jhy@|ozI@BqCE98*ia`TC_sXQT;OcS3bqa(EYVg(Y!zlYh|dfs9A%1AJccb zWO}HN?KB3~n(D!S&*k|Fz=3O**q7LTJlD{Bq8Kdsn6Gg_dHprb`=TB%7k0+mlMKh}-&cY12_R%5;OU;>XfJ;;8 zO;s~BFa{!VTWsvrk7E$od~RnUK?Rh z-)`J1wd?X+6*iA}+|;=DFi56r6)>n79hSe2v`p(UzlPykg z`7lSd1d|7f%#A+981)0#?!1_=Q&mPBZNI*S#mw2l7MCOZV|A*`(PK9_CX5rJYu#_a zmR-k6l^-Zb)v;zNIfM$OepZ1~ADrzs=fnZO}bb-k|x1 z`T)c>JO+Dwi8S_ik54H;X^o7lTBhucUGyCq-IN2cut0Ak1;@QapC`@u&yq~=6q(cS-607zp zFDz$ejj%SYw;Oh$T!#8%#)OqFhkRlrwX*yACdWo_CVLHtF0 zM3HSy5GS$%TXPtmtjF79+BU_XC)%@HHOY$qP`FD*%v=5FM}iqId8K-f0!mxhNW>{y z{=hD{2oLC4CMP|X5oh;{3gKFsH`8hW;ia`f;p2@S_*xZIT}O(NM(x1=nzb&aLT(nC zE>S*F{g6VLXf9iNY(S04?Qqc-GOA@8VrWt*kpw*W4m-+MIaCtz z|8`L&YDc|^1v>!&+0uwLx(ikMc@a*xBv6s&pNJJg{6SvF)aE1K(F!ef=X+9wb7zcb zqTTj~sn*jpEak#05q{u)5IbqOP%YFPtGRJLbP#>{ATC44A#kI@JprI5TuYg;{qCAG|) zNvHkC%wIHpEOp;HQ3FPM3G76D)iO~tcMgxc&)wtT{>i2M7#WeDA5$RAT@JM!Yf)celVt@>PMl zfCBhB6Prpg9{!aSLyc9EWkQG=HC%e%7#30dum^!I?aq(dFGLw;)`vSh({4K$yv zjQv|8{-|o)H__PBgdgw$t$!eh;f-*G@5dBD-8YTAU+r&y*Q;|))`HGqu+KL9Qfi#| z81=P40*VNsp0r~psPV{`@zTzlQ$M@-s(PzP0o`w;8;a-YOi{ThZjMJ*6_d;81`R}l zu3QiGeMSI$T;t#FEQxSuz-OU_rNn1Q5>eG}^g^6fzfM>RCt-AY5+L%5nY!evX%&wo zh%gasup-v%8JOsiM8=^pa&<1UO!j}d$sXyecyZmix+*ko%st6twv-q&uvZDj6*l5-TqdsGf za?D|?M8YbeRs{I^o-i%65dWo}WXw%Z5#bKb>W?sW4z~PFHFs`HT{b#5N$8Zu@my3@)Qa`*9IyPIDXw-RNqn`0xbi}*k z7&tMzGv&T-AaK0%3z~W+k7ZxIgV5QR7WZb&GW~QCOMRSa_VOK)%86S6bWgO;-;o0W z+ZO82oJ{5FIPsY@+JxeqAeW25qgy_A+83$ltUNBiCzfY(<-VX|xWG65@#tH1F_g+) zZz&QorQAb38GDeY9ZB&}QZu9KG@%SQoy9sBk5`dJem#BLRH2~JDLZ_uyIY8W&k=-z za-KaLkl%3QVdsLC$7!$t*k+CZ;|bf$lpYb4%4BODqcuI+so=Xj@TGSRy9=|I|A>(9 z6!&YV&uYioFeF3Ya;RW)R6#<$M%QlY7(C(Zx*^^tc!`ntXLUgnkEYf=!OC>{AcLYC zA0$V8bc8ZWNfqL5{pr(4{3)QZ&~?XhLu3Sz;98P69xYIxo*ghYqKvX$-nIU9rW@1s z?#1=t;r7sV`h0eBR4ziEDf^YOH2$a2)c^KAiGh{v`s|D#xlxAtX08740P!qF5j8YT zHfVVg(L54v5#del3qu}Q;QjD7s*pz`PiKI}=sn39xU6l(p<2ae&xvPsk3hFCA#xXn zaDUUrT1{54S~8$~V66swv8G&ZvsBvzv^S}unwR#O`8X_ot~>Sb;0`#QzF|f*Fr^LY z?I=J1>Z1%u;Qu{P%+*rCrK}^UP`-Z5P~&+%e*L$oD94`eO1}y~^^U zGM8;@WsPow>l9BZPp#foZ9DgKS3@n+CtWpTo7qe+t07yxC$4T>CRsU&>SV9`K?fH> z9iD)~fmUnnzWs;OIxZPyg3ct1RpI_-#1ZjUM0hq#G}&K&32>$bxoUVNaN#T1<9c>3 zL6W8JaX|fCzqf=A+`MOJJwm13z7UUhi6n$;Z9j?S9mxiP?pl!ElSrT~+me5};d}s| z-YGwV-P-5Ke>zVA4(Pvdlu;dE@IQD|mY75RmkI^uALrYktYKP;>oBG5U>T$9=SK8y z24-IKf`{^cakJ7U<8*2cV&;yfWQ~mC1%s_iV!;4GW;TH5UF4N`8L>JMu4kN}$WTw$ z)oKSEDL<^2>39Ts$9pCaaO%6ZgZa={4pl#;1avNHk|@}6X)_W?;j-|~ithBxLRXSU z`+c`8iVyOj!PDPUxdD?c{3HHbQLm${=}mHPWGvjY05{m*w-i00I;Tyu^k&>QeB%lHJVmZI>25jCwgDdlVyfc!E)=;U zsnyxrHN^jq1}*Ym4VuP38Zi#r=$L4TQdl#vKc>L$EOUaXcS0;pGrUeBLK#~NiaKZ%xfcI#xmv4+sp zIlgYa8^o5$z@J)-CxOvI6r43AYSjGYR-hb+(vZ(KblvT7&^0?!Z;_fS@_K+I*b^1I zx&3H?^4Tcm`pII_T)@&*eT8jwd^?0!lgwEUNBf=u$=-V^P2PJ`}Dy zy8x9!%yN?~<~NzIqXj^`sv5u({Rztv7&ovdCd;(nRgj_$aX3vdC<{q!nDU1u$mGhy zb{a~R(s9D83A-W-c|MFImbCHE+1t+j(YJGn!^6+|t5>@B+8r&aOv)Hdq9cr)#%|tb zZa?W@{SQ?!g|v4}J1>vmHQ)LLj_IELrP#;7#v0nsuzO7NFsUIX954XwY}3z}EuvQ7 zj$vr?a=~3}yVLY>CaB69z-YTmsAs~qd^SM_{yoD)O_7U`Dd!fZUM~W>@?ejtd1xok zSgOAN>=vsBKSY`}lCj+ck8rU-{VheOpTSd#JL(qpjjvP!s@l!gqQ`quo6r;Ss^m(L ztxq&Zd26Q=_pSDfxEcUhl{{ObLWte}W8!h@=9|#e??vV__=bFkKVHtMOn@So*OCw@ zsxLcGoySOh4K)!qO=K{q6uyJedl&nJ%8Dx{Ccm^|RaH6@)wNSldN8#J_nVvvRRLHC=BBS zal=3;b-Q+-dRr=(XyVeM8Us@m103{6{p>ax!iMwBUNvtfl55X|igi=EzGV%T><<|O z(|U%A<4P^v9yP#81>{mek+N-!Lk9{-2MQH1H;*oUyNY5mzH;zp9TY7S41 zw`~lE)607MFIMYJ#9y)wa1gGw91O!ci*XogjDmm8wN0eWdo0tkEfp$MXKkX!RuIi; z8A?r{8piwB#F$y{C@QPw2Q`3P(0R`kPxY1rfK7Y#WUc@yBphAuZZ|ubQp$xHX;x*k z^vW~~LXiX-OkpI{J)Hz_pQ84OD5ASm+=JpDR)U-BP{w#=+UK#{gu~n0hmGYyY z_gX(qPXi&p(6d~}Xy+!=YkIBuO6lZCt7_I?v}%)yoM{Ci9O@&NjG5!arULw+(OS*r zi|K8<4B*4JGdn%%JU_w81dYM7-(*YFz_JzFi8p{7mLSmh8(2*iE6JKT9d>6L+}}e} zL$0HUm)>|H?2+Z1dtd}BI7t4}+lhDtIiCj&gO0VZtfJf9WL#Y_xS!Fk!UsOF>)s8X zaQpg3`}pkjB&mPJonsy}_oX#PK~rUML~@ z7i<9O62h6{rXd@~p-X~38M-&Efp+$?2@9Ts3AOUaav*XEk4hbm9<6V;#$WezGE;pg zydGZJIGcihQd5E<4@IwquIF$4g1wkpB6AF`-%SFI27x{7=heEn!J%}m8}w$M{eiN5 z90MYm52ay)*9E9-3_GhtX7`Tb95-#ytTY)&23$@IUX(S4rtwvJAkj|IMn-#Ik$(Y` zLZU|ktIH$oV8z4yUV4s;9Y|390(Rs<1pasMFc=1UNP*&jg$GQiO53i{!F0T-K6B+# zkca&Ng<(hD(8mtW6RrWmbP=mBTGq#0{Q&6Ar(E=lr{oWCK$Kah5#($YnEm#g-uii( zvpo}t5aiwUIkVxc7swZ5RqLF9t9#mVmWdW-6!dl+1rH>64i>-k&2y=?efoL9ApBi~}KtKA*e#Y~s|;^ZR^sZJyyHR(hkyD8nxe+V=$~4R1jiv=Zp0Mn$8g-9Rch2T>PmzA|AR9sQ%Um{oKBpxDEN4-a( zC*(hOg+7a61!~y_PYgPN|{1T*$ASfRfos1vI!D)T0S_~ zT9i%69?A4A+%@tFw=j;Hi?~%9+UqP=!%7 z%Abgm;jLTI$BsiNZe#qcZHL(DJm~e{kBDz0fYu}`+38lGE1YkKP~%JT=Hz2xTqVL> z`a)$q6r5)Kf*{Z>eidPj1@R%SlY*aR~`Otzd;0Q*B_nyS^7kHczkulGRo<8sfks2J&ylJL%>4+-_tYwk-l1a;58I{=z)ZzVwk_@P1H3t^cIl3h)rxZ^e|3>?lEL>w$UzCr%G}e~7gb{)Z>H;*T7PIurxc$5{$3QU@+^ zW7cn(m^y$aslYUgd3Z1uf#?b**Xf@?k<6afqDANnJve6tu`D|X8`SbFIX%?Jx_y@` zdLkNU;qm~v#pr1Y+yFQ!Y;d0?c7&c2^i-od{Jq74Z%vGd7iDVcZFE4!rZv4uzA`5~ zxxp2Q6-Lk!u+?p~ssOt&8D}Tia4Srrip#_j3J&0dKRi}T(d}OJdSOXyg*`yVTYHC5 z_uF)BulIQ2Z~lT#YR$cK(D%`veWY+yct*Mxp|#Vqb}R@7UDfOPCD`?Qn+STO%2oGg z-i6OKr2-6b;#vG8LCYUz7>8hS%LmN<)EQBCaR_EsjhW;u)y7OuvI`qg3;_U!#jJf6GD24<^*fGco>|6Dh zM3CWnQbMhm6%_atiaMJRqd~()LIxaJp7KTyZgXCvvuh$ULOjNgt*z;JUT$yqyANh^ zyZS4ij>KOss)x6uP_a5|KQsdJxhDtF+t>j}IPoH<8;J%k5HMzKZjFUf|Ef#}uu7?HXWFQZ6X5vdXJq{vqQYkExzFQl^Ib`sj0R>h<>;EhteE_UK?01xy!au5 z)QkD|kJ#VF#C%_^KxRZghRTbV{0jIpo<6utbB#=!Xx&ok!rw@Dw zYW@HvC<*+kgLzO9k#%;6y?Mk|6Z|e0W8>Q?SHN-!_z#>lmAYXBLYpIq6GG@soIw8h z9+dp6Z#G<)Z+b*s=8E;C)$H=dBF7pdvsyBfzUAOZWg3Qvq>bBO*e2qVN$p9iQI}|E zI6KD-rbk~;@=$-i1520%LDIWxRT18*q@}2Luj~XupTN1E z;;NiM&%vh&GF|^SL=(a6X6Ys($+V%twqd!Yu8G%I#G8&YyDbG41E3)(KJ;$MlRLE%WfnDvm zl?BsCZ{T>t05bDE6z@c@)9*=gBErk)*6Ch_uQ}Vhgq}MtnR0}cUQ0J>ZfY&K$VOKTz3J%r6&0yPBlUwQZLX} zV*9Hg35|J7+KADFHCadppxe82M4Kv*u9RZYRk@eUfE>FYMCMDJQFF%jEVraQ+b7%H zjwb$;dta#i_axE4CGKmO4=?HdJSD^+bBgq1MQ~kHch;rx+B@T(7L2x|Hv}#4`CD$l zye^ca7Zp^12AMXm+x6*yg*RAtse*oM^M`u+^i)C&co5f%+k+xGhrw+EnyY*7`A3p{z@*n=a{v%edc?J^fy=Pg&9b&Ft$FHGMId=UP z5De|aS{aT=Zf(gBM}f*=<5Z>mKcE``xpoU<*uJ$P5f^Q3HHQ`2{V`shgjW{%)Kiw4 zOXcWvhMCrUxfA8~25NIsPjxS*R^7*`M%oSd1?HSpR?tQ#RT_|7Eo;4wyjI_rf8Hw9 z6&E;s)nQK?E47Mq5{+JdOB}bKnBN%NrJrL~JJ(FH#%?X&oz1&6uSC7K*L?Vv?PV3Wc(evghW|uHvNq^ z5EvYrGID_HCMoIC$+%Zn7_g;UQW-?fz`&!5AqTTiCq^Yq^XTa<8x+?=9hxp_Wh=^WRQ~A=F_y2Y=A-; zO5RqH=J6Eu<%87z#e?ZAO0IBCQQDV2Oyho@MFJ7PrF-+KOC(G}`Q^zN3<8zLB&g6*TF#q6qR6lVdlCM-~d~%lOZGa2I27Zcgt)6Q=9D_VX~u z^<9c#3VBelaLqdCH+GeLe<|k!?rMuI7bgh`@8p$v3VLy7B@$$Gnt`$hxmie`j zlz1+v5E1+|)!4HDU50^@ND@s}QvN?-)IaR683u{Bl2XCYfci_I_CVxKP)*=gBm^aZ zi~YZElFB~=gH?9q_7k;3yslCBp)Gh&VQ3D?a17Dfehg65+bSIoe}d(nqYr5AJc&?E zX{VZMi1MV}OS_K8pdNzT4xu-cXfZPT%eSk_^J?&32zR4UQBtIfTenj@+$ck?+bLWI zKU5DfrV!l>XKs^001xyk!0nLV5#VCRFK&Y66X+pgfuIL3BnkH72zXj;=g;O4K#u9@(A7ZRSX@qn4Z& zD`iI!t&9(nS&=8Vi!V@3USlP__faVqWoc;*KTYm#|yy4b^5F zN2Gfny^Hd^4}}fVSgg7Vyntk;SH+Eo{IPs1T+g$I<*Xkp(Q{mS32LJ>ZJ7u088308 zdEsc-$uBg3_3$H)qQzdqxA%iH7*4UTG3u*sFxJ-V?Hg~)0Y1UWV_HQYc%Bj8%v(e} z>3ib$hDK&x3p7VW3*0rrV28FjJ!pn1-CEq22AHE{3nW7I$BJ<34Pxt8riPNYXqDJ= zxsa(U?u^DYX1}GuWeP+tWuDF2^-ZnWJRp$9)DS=bqKIiJyCMCNfA^9z#n#r|j>^d4 z9E~I&S=_1@78vhybBtk4f<>*04DZv6)af(z4G!}$^vLEJ`HLL~B=o@&*zb3|96b;M zOgVyZU+)B==xVR?tjTEx>GzP}tqlU$#ZTu$j3@d-1azv#-yk&yK+o=c);ZBou64pi zE{ReATkKa2PL~YkQ)?`Svk4}$Bov?|m1=1;&s5ico_R>5eU}*n?+0v@ zTO_}=aQ?BJ&#s(sulYt@np7shaOq5*dT$01%RyR1@CeYJ)FM>{dvP$Dj{@gu9ZMumI{5RmcE;CmwS zg2{a51KU|b*n)qi8w0CzAgBOb|0h1tUWp=WMEI1wJTw8HKlTQVaAQmdvD59dpw%b( zPGHQ_D&|ukU;p+IqMQ#$rKohAX@3f_bmdyGL>m@N5-r6?*U)tNcvl?qaiht2;a~e~ z|MtGM18y>Y`DimEh}2N{Yv3XpX_yH!!toxJb!Hs^M@TQ{`V9g|4V$PhM)PNFZ@>j< zga7q1rsChKJ0t=B65Wx+*^cY*em&osi`RXmr$?|avB!v)O{b1Y;RBdp z#ylrm>z<+W)Igv^1bEfE{Zuj*?Tzi*R%{j2y~3SKu>_w${vf#E!Uu#z+&HE6M-R%h z_RxGAWxtze#*L>c7!5ZjctM8eThPedCj|@j)^kgZ-aPpU&EP?mc|v`$k26RrcmAMJ zPFmoG8kTxOdv?GbzLVbPdG-7202e^9VBCoS5q9gR8dK)UH|wSqT_O>~^vVy0`d^K+ z?Av#mU+@8n(}$FzY}b<4?XHHf;93k)*iz^>lDO!OCCr;0*25)KTG^upWUh{T%)ty3;VHr=$(OlcCvxyYLA66e{?)o3aS=TqbA81Sc8}v3EKd;6H z;OnMpPge*#pGLBD0N|b@2KfNr(zo3UryH9G6=SYz;$IF{9Tq^lq!Ur^pN?#kb1(8Y z31E9AOp4U`>d4~Y_J`_CP@T;9# z>CK?~i1rrt7iM`0slHx;eRrhdG1r_xkC+zm(H^_a<^vqBc8}9 zqfIFMXz2%iZ6mj)`8#GO{8vfhYF8^_AM|7B0{8nVckuAEe;n8%&Va!~_`O@@_(szo0suQFP zJrlCr-wBHrKY)wU)C2^M-tXN4V*!6g{w($yt_w`3k4+j%4-s}5yR<KG7vX$MIu|k0GcRGmWU!UHNGDgrC}ko^vxQW z=+rWWB3giSupT1Z($Utmr;ChY-nAqeWcT#^mL#c z?$c``po9$(AgZ>|H-sGY2s8HuFZ*go+1%a^u7p-@>2tk?7MT4P@PEb(I{n==y8a>< z&K#oq-!=iCMlH(xCkd!GkT4Qpz@(u+0bDxjp7vZ&hxX-D?jWSi#`sxlh`gU2laXUC z*#VyCKXR||;@wn*m^&~Wg*Mk0@_V|Kg;sG)VOXG?(<=l8ioQ3O3GhJ!>&t0W6_B&s7fHuU30~0vTCP<6^@Yq z2P$^=x_d5MJ_sOUFCIqCOmF_v-)7W*oBc0Dqd462u0NaoofhCf^U_*8oLWAxyrq@5 zfyVy_RGa^Rs?`+!J5jzlTs`>l{+sis-Nipcc%L0q(kg~6Xr$|O29{0S<=|F>YDPWl(rK=UN{zt&ll>4se3 z(AcIvpOvmRZZ`8T0PS1;8$iJSZv_7@faz7Esgs>fU;nzL+5aGz{fP`-*na^?88*uw z4EQeqA;W(Eb)Nr?pYLzVJqODFhVO6B(BI21rzoO1E_=BT%W6zDV7vIDzFnWYvix&b zelK!_`8zHG+ObavL@oYq@c+UtcKCCH|HTgQZ*Twi4*v%{Wf%9t($97J+x=Tv~24T2-6f`+NX(7Q?>35D{PbY`%`> zaL@*D2MYuJw}>`RkvF$px#_vT-EChUt~S%}Cn-TZwaXILu<+QpUg*v5)#1Di2Rl97 zYRlyS7k#N%7GtZ1M7YGwY$UMhc7%?%(5P*rS~7ce#$o6*;Iqy5Z|fGjLtpCsps{O@ z**Qg9Pr+>l%B2BNPX!NIBm7_nEjks(_;zpF8XU_hCfb-bkWo7aPff%alflH5{YluR zROa{&7^!i>@&3GP9CkTsv=!rE4ti{VR+`V7;LDA@vo+y`!cq zZ4w4ozQ2+gTl|gd#U<;7xnYuDx7I`PHn?dq%S9Vl(d2sO3p9SBZd;IsLjDN4=};1- zlz57J<4e|0Hoyj;V4R2deLT7tc7B#q{z36x`m_sxgL_&x?-Bk5{~+{Bu+R~Km7Nho z=bRa_<(e>1ersMxfBKW`H3>0 z&c`Y{#UJ7QDBp6AMe`QP-f69=5XTu^_sn2KCF>~yruvrhCgI-d2RtSnBaO+dl7jp_ zi?U=uWjw|ry$d1pOG_#z>G2Ed0lBj-7=rVy!;9gfc3GKdX_{=>G7QYF{UT7>Y~@U8 zEeNg)Y13)-F7oio^%6>V$DdiQ@#k+u`^!C?71XN)Ij2V2E4MJ+-hp=9LwnCMqhaaG zo>c|I2{iCytkSf~y(B#LlF|jdwWQhc(E1zzj>$JRG7R7q<|9Sm6CoVh#`+#Z-33VJ z^Y)E^=iOe`3eDIu`tBxV!)q(@*@IWUIl=lzF#I4KrADw?x6KL&atdIjg!yL{bJIEL znKx;YKC^da+;{G(3B7BouF*TBugy1lXO^&;mG}s2vZdogJMSD{hs;OuTx2w%b)+)@ z@e_vi-o&x5)P2O3)CZeRm_8d=cG&^mRCW<_cDQ!%3j%6CZ;$ikqitj=)AND9^mgb5 z`O_0F?^@}S=rFnYM^NL(RBF3nn1fY5^_&kMl9EAWb59a@(J1oCz+8?=?B5^4_BPCr zTsCqOR(5zlKOMLTyxTUv-(DX7I=`N|3r;Ib@aY);Mc6pep~+yIxG`K8%qdW#BgTRq zQFT=LDXEl=-(=kO5mU?YlU@w& z(W_DEU*)*clQJF*p6cmm@+ z#wRn6H4@Gto}nOW^L|A>hhMG8O==qU;6=bblI}zY?l5D`0H3wEC@il=aW8#^Guem) zR1~(m68|d2^`r(rwsnSNlQ~|z7tH^isC!8xZsyCMvMD7->m*X{<3@p6H!YqG-Gc}! za9XC@T>@&!#X{Uz-m9pak8r8VsJE8`uGX=lDDmYl$zJAGE-Ua(`KOckUfb;{Sv^UQUL`-RsbuI5Y$Wmgj5lQ;$I zxY?FWSvhl)T*`?<$mN}==+1|sz&cmNNuN8S0RsHZEYoou(R7)R*SY47D1BBq`pHc5 zqXmnjV3LnReA0Y|Y|+%sa=A&$6!200Js+J`;pLs9bK6@#2{H~X>Wa&8I8@H7Uj*Ft zn5vU~$e?*XJCj#Zb6bt$s_%`ghip;+T4)|x}6IJp& zz53~A-Am?8ARJ}jgzdUvU8FHANS|H;sw7x=R5b>ybOwW0n>T&aXoUo7ljtF8x-4_o zx4E~rKA+qGD?2JAL5w}Jt;wasvatd7l<#8drXz>%$icYn<9ngbwi zJxHW}(WbrW!`@Xi3~8X7s6zRc>dG_^$_@{1ZlB4)7+=R=*w_grXn|UDqS@=J_R&C^ zY5Nw>-c)_XfG>j~YxG>KKe@-tt|#1vEid1d!$R@|o%JBM1wd4w^y+uJNZDjdlP@so zhy|0OcwgFhz*p#k_9De14e$6?;l&t?d8XP=Hs}beO~ctQ;v42r75KEe<70%KR>gZx zC3E*|1mnvj+=?(D%O~PC6CuQqyIcA-|5A0dB8Zj9oRmb1LaRr4z2FuVnM9r9-xS9! za-RNT)CXN23+Ui%k@IqW3h4r{7ZN3XbdNP&zSx+)c>Pok9T?nin=5(c9qNR!PN8e? zp6yOJ;u9yg1>KP4ejv?+5ROMXxRkBue&NrHB91#da3N{n{iES|1 z=B|&nm0NC#*onz9!Gr3bb~hF7pKXZ5uH5L-q={qWOlJvBL$2^huh#CzzJT+xt>ieQ zbQDM`1o(MKi(LzO_8X@^G`ExQEJ88V6nUQAJle}ipPt0?mXulbA87LAroK|kAlfra z23@5Lp0Px?9L`!zKCGV}d0GUE=m^UJ%iivsjKmf#gvti}*Ae;-x@)Jtq@RW%>x7ia z9J0~EE}8V35@sa)2+XO~4M$lu0?!xItwFtD0a(2}5RZP3eM)KKAvUBShL;h#PnW!~ z`pOKYP1-vr%#9!&rdl&aCh28X1E)Mo-2vZ5fwQodXuFBYu6~LGkuM7D zSw%5{+~DAZd1cewm`=XReU>=h%jfD!rSAu`$g(%P(-Tjo-CVAQygvQMA%MHcP$#zAIQUMCW%_(3%zwVb!R9D@Be^A;ml-#?*OTl<^8(LgIUg2#A)9c=ugpUC9?vMvAc%w!@=J9bcvvYz=j(GY zreQQGj?5RBSH*NtJ8nUqZM>T~yLNzVKX1Th?PlJ-ub1^q;35;?Xki#N?{bBfG+6q_ zuq0{J>UuIkr($34K6$}(NhmiQLS}NmG0~q1nBPHYMr_R{UvaI^59~{%*`^yQO2df_ ztVWlj_*Wi18E&n)8wPetorSiPp_uOk_|KbXq7=CnEDxhb@~Xwv`&lxel= z2D{-PNA)h6I#~OCjjaDDTa%>3xPinDm5K&~&kIab39Nd` zYu};5f(8xUvg(!#+%X19L+giOCJJboI_{ljo0SDXMh_=e*Nqx%F=6CsqYJT8&wdL? zU0Us}(Y?XaSe+aI`Xw50uL!AEHE34^hCn6?@iJ!)nth%ymtb!i2Fn1dMTRsDRJ8Ja zNpV~{x^ighN9AxX(6q4MdC?LElDHvg@G}u-dnD>eV)!#5E@M?`q2YqFkgk;SCKUQ; zOVYaq9hu%N35h_p`oO*^8X8fjXs}>F62Muul_1@fzaVyGle(^%y9%XC*I1L1+zDn- zyR^W-LSz<38UI9gyU_=HE0Lqsa4-5H3c=3Y12rwHf&gQDVBhLD#*7`Lh<|Hl=*vYR zhdY-kF-bv6_Z6ws*6`S$e4iiIhPhyu5wW^o)3Qb;q)|N@y&>Yvf^S|o3wm07Se>;j zp^2HwU`$a(+LV$#=mm|fmlWHd!5bSUV2)xzt)KfB9kv{kZ($4o$9OK}s1>*Yc-yAY zEFMy!IKq4Ab|Y`Mh4l}3Ig0PS9YRZsZ9cAZ2Wyjg9yMc`;g2bj>1T2%u_;7b<AvUrylb4&J)=`rh!X&uZ#w#2KJz|~nxmVy}&T<4vIc=S9d zxa2(@zEBI4H<*~E$fo1mDDx=@i}YE+xU+UL$x62J6Cu6P`Kk6W(w859xo0lV) zc}do|V&k)FubyJh2u$KbF(g9v4X8ga|PbUcf*s6<< z7p4-&2%G{K9|y@cVDCr@9FrYEAuYL-v+GS%>DjA)n-)KSIj|BI&hI5sr|RkF>r}m(vJg><;B%=Ut(205tl$Y>22|4aykPz zg>#sXRWD+Zi>6~t(CMsZ!l=qE{y|Xh#&#L}PMGNGO@rr*Vy)VNyTkPu7fRXjY^fhRor^QTx`RUNJR=;2C`ODDw3U~q77@h z+uwZFB=WCPCs^m+*EwIUy}KhINMzcUKq0peY;uY1#kRjyd)FG}?W9BxSoSScnbKZm zFmYLw*IlYaM9Yk`a)m6&be}O&L2JzY*tX^Ff_H7j)2m1eAGrK1FDXdG|5-sLwz@|- zP)3k+^5v|IgsMbAo1i?0@10D&(cXFMB4q|ZW;+tV(k2^U&8jw{s6{eo!eIG*$3w_b zk0%v~nT9*z5tqn^Dy({?Svut2qiJ4_RB7&(=S|9cz9~)11?j0b3|D%IfJ$|B=8{vE zXS-^|3hy@s8l~WvgF>C$U<{dXfR{HQuGJD>xW@Aeg- z{0QE~Hq(P>3;GnG)a-^Z9$r<(Ly0#y1wxGT-e`L#)4WT*9P zcq{zuwl!lMW#zOkmwrPaAMYGsb^ao{3_{+N5d#7vpOWalH!&%` zl=`8R@V@iyejk_x?5%7q{dflKE%Z)c>QVV=m1cVX=}jpSs2%w-H__3&X;8GD)|J;I zdwEulH&@!)9-YKLLv>AReupzbnb$Zvp=Ny87bVvz$@fI2KsdlF8}54RUiS(AL7 z0cirp3u}b|Uh*Ydrvv|bv5tet-br>Bq|P$v{s1=Ep{LZaWv>a?KGMwjqxT7{OIT*< z!*C+iCh9~lMCa+b?Qd3C@&Tr zIH?DVWUmptT#&$_WjGxpGz!8D2w)s3HobCG0V{`bw z45^orCUNH`>A^}-dF1PsU_L9B@DLyD;k8 z%!t5Bw_D)ML%%{$h(0ttT^Snu4S1)aQ~A3psM#ugKTPU0bFYUW%5Rw{fhgb(*i5zt zjA>EeG*&;wEU7*}pCn)XzeV{c+KCA5+!Q$7zwb7>FxdOLthSW@@vL4+}!V3bi^AJ{n_0fg4meoPtKfmxcVYHXMMzEgN9dhwY5$tPgU(3($x;o$qd{NWTg1X z^g)=-$Rk1L@D-9BrOo=^t#&~y9tscsQ6SPp$c{Afj&sQ-9+P` zx>(4>s(vhbO#p@Vyqeb=^8g|Eu4=-y+s=o{T|Q`q?NIFi;l>I|mngEci_YHt%(2M?I4 z+}f9B<@GIvSm3{7F_Q7dctQPH#gDDljeo^G-9H<=d@Td`dllf)+-Y^w$CR!bKaY|% zwd|NCzOmb2V9{yDidsSLUVnB#wID&trxfYj9>5a`>G8+ZXZ502P^lk0#ieAkK7wF; z1wAEYRp5H`fUU{%f`D?NwLh99+!Zh~|AoTUb(PAFp|(3&Q8tUuQp0&121eVE{leO!t%#&)Rtbtv!WL^mZKd- z{HMlINgTQ`$``SolCZp5qKkQSfd;sWw1V5*YZZ4#B`lwr6#paC3Le&dM}jbvFpr!$ zwdkTqL2_J*e0Xql7OTAXxtvG3IC%R6dhm`mHBI&N#3AL#Cq_4^Dg&dHBnnnqYCDbD_UvQ-3_U|!N zMmvC}HYyoLk1{R8u4=%Ih9DyhJiE%UZsvDuf-&Vk%$i>E77w1j1)u+h%~pJdwnMmV zHcS9(Y0-RYuZRUwn*)bmxWYBl3-mE;Yh>?g@`fqmw_~Ijm@lm^!ZlG4^2`0p-Np^{ zUPWGnai7%L{tl+2Y%z;6&r^soR1;&-P-U;B?mB!<`s_v-4b=ztyJQ_#EP z8y3r=MM!0D-k9=9mFg*r9gDH+;si=?!Qyi+$pwxre;V4{|EyX<2p<)T(qe zf-fP`v{>!6Ry+&jp(2qkZe8|dC?~a`7^)h*3`vE5g6z2q&y}=(nFtG zLBj<)?7ihwj7n8djkNXT)dqN+T&~P^lLYr;0jCZ(V#Z&FXu3&J;tI_ zaSG;)(N}a(HpOWa867}B&+wmAy&+_SSmN(RFqj<`jqGDU9?wdAj@1tnbl5Gcnbjt9 zA?Fxf!^1Ji725S_E*)zcCF(H{%dwtpA!utjMxoYacUZfq??(s%i^N zr^LPvfYB(NtfKspo%g2+;>`TlDHTJ1z&J4mx(!UBk zxS|c7ch=ptV);2-NJt9Eq;S!fIK6(Xjce8ay0Gam&0bM^b83VDUl9D1Z}^z>?BLl! zCEB->vSZwleGQzGY#dfHXsQNK#xs8iwLkvQ7&{&m#s|{S-eF!Sg@~DK6%E<-jcrEV zb@FI;XQbBMHc+|NYaY9q!tX=e9E;OYE^K>@E!ts7L6maO!6U zy0;=41eAFZZY}oR&yMV2vY*HKe^u6$RD+41AD_zTe^ZkpEM>@cI1d?)&K@sDR6+K6=5tJDPrx|5#=fjt~Kjh_fn=(9hTwaE4K` z;~S=!hWRQa%Ohph9zB2~bq1`62bBR=p$@FUF-Yhs@-1*&YLbn>VX*x$M=w|3f6}Cn zc_Ka!8BXTP$7@@yy)SSdE*8GI1VRIknxX0^F=`qEG3d>oeBvim)EavP@)9=gZGD><=gilufNTn_>4> zb64}fwrw8S3_t+6prwI?f0#VZv?NDqOARr=mr|(@zxX@XZ77lbnqjD|?}0AB4=;oo zmcEOhfihU#RX8;x7tp@zb8=e}wK-rCVmd055ZGy3)m#T!;}3xteIun4b0i&(hqt@? zDfIDr*d5lz+@L=R*UxJ_K6EHo2BAUDNY?k(Gv6aBvtA3Z!k66){;j!hIm?_R$dKsQ z+6$TgJWW1X_S|Ip@;fQE6-=mDlg|;Tz6E5((lxi~LW0*?B!xXtFXz}OeDhmKnpkF; zH@y)_s~L`{8(UvBtc08VC|Oaa&aMHa5_s$P$abZREVf!6kaYh&t;0`rK-k-Gp~E*h zW%kSo8R`H?BGX{5fQILK=NmabNWV?O@d$lX;BxN~@b2_^Ijp;Zenw@f*!-S%^DR88 z@Y|h+sM{wWaepvgl82Od}=ir`XnT2 zGW5{^I4_aSpbmEVG9K|wd*nxU9QsS4ey;h_?arQo)tUr-SOgq@qt$QVu-;FJG>dqo z$)0qTSjsK4*)CK)rIjkrJB51z{YOuOGKeO7uk3CIsH+DBBx?_wtf@HW)YQM7DKkaC zh2XI&_C!O>3>aVXGm-Zk_$ri-zac?^=9a?(!BW9cf#VQh_CVc6Pz_)?Bp3ov)3=Nu~j@{Ui`*{;W z2*WVU@PrIxz-k9Mn(khMR5fS%@XFIIIg{cg7Lu8-w8d5hSsHO z!SoP^z#Y=+TpGu{gc0!6Q#_Sc#CNc=h)^&R7CZqh&=MNe3&P=3Zx9a3Z>8WX2hDM5 zD(h2&D`*M=N`y|dIfKAWn`32t7!(DZsRT0RicilFcdHA#KYLCVjHWU_Nq$Rol4AK5 z2^(gxY~c5e3S+)%M5lood z3shALTeGQ>m9IY(t<8g0go$Pw8a0>_aUqQiW;8y2qt?~WVjw-U2pg-(a^#xzO`fbR zFw7{)pQfjZ!~kGKFJ_F@UPZ<|A3_J!vf~|TE3rOdr@%KRW#xQ5>zaIKo=m+bH3fs) z1;$2wX@Xj~PKqd#;9oPu&*&@2WVy6d`KU?cllGfQ{7E&hNR7=XmX=nqZ*xdKzC_nY zTV)4<52j5YM%vIGgJ0e+lR-;>Ey+DbUE*P|sf7EMhz}tB!6}mh&)ZD!9^B3I`VPmj z=_SQ^tnYNArLZ^iom7NK0&YuE?PXwP{GE<47sJgq?TA466>?OwyS1B*^c|qB+rdm%q)pJPqOgi;`i!CXha(o| zJbN(BOg>c&AKnD-0UN_1#PsXw+f@7{;8YhDLEuU$fqF+sTz4AB<&uy^c5;&X=4-7? z7Xwpam#RiQ4rrDso|A;Rbq+qXSmsTFgpn8+ zz9t}Ln8U{V;eGAbvKy+)bB<)kZ)kT*-u8Z*uQwqczO=8@C&zVae~7;Y-#ftJZ3WAcO84h|HR#;i`7FUZaG)FnN+XVWmr$Or*{R8EOX)bo8x49Oz&6N za5?onHfNWX*5%BuohT>7B)B=s9nQq&{zL$bY5=yl6Ye0_j-kAb8}&Vv>PDdS^$CD) z=EPY?w?{l0ue)N-Yu!JBwfak#38V4((kj=3XmVci$8qJ+T@H?6J?-SB+Y6t9qrN#k zw!+of3}%cl%jJ*+7uKZmRkL;*`emNg@Q!kuJLh2}UFqfTncq_a`|^x2&Bl96mpA5BX-aGsd@bN4$XT+P zkDbj1+t1tMAOXocT{=Z+%7%!bnhOtbzQ#meO;Y5z-?S4#;VDB;7E^(+o}k!*oo9;- z5I+*b#at$%79~;V%ZWm73}KUj#BZxj4yz#Qc@i1v&c1ajIv#?zq9*E8w~Wj!(V_|E zd~T~}2D>_zbn>6ggUO3v<_n>+98hc>WO7uJ-{)8YHKW6N3gZBATd2(ncWUj9;SlR`o+0%&uv|?62W#VF4+r>&bEGmC69^=& zg@XNBk%NEGoZJBKCDYX}0PsMFA&rYSXR_Sl=pSiH^?dRZ3B zP=4I;-}6%)EHS9<-s-m`%WR7m4{c?Pl^jx1ilFPw^^-b~jkG7+N%!!i{BNpvJbZIc)9x~Q zpa>_TEfv?##0&vG9U={~J%7Z3J51+Jfqs0EslsD9>!xG`dtlh8zZa_KL@xuq=4f_PS}>=`I11mor~Xc#iyCm0+W0X z8RB?W?`n)pHQd+N^53EMAj7<|bn2)(r?l_(uch(&0**v@Xr#T*hO?ie&UhDAhka`l z7QO^{Z0`l1)DH=d=;dXn|QCtF?+{U`s106{05&tLgY<0uJg}(o zFJb2VMEB!gq?BAhPcwQI&hele4j7v>lB#!f6C-(oi?3(9RWAA?QcM@dSGWgV>bMUR z6;zK(Pd3%xf5#l&|NMDX@yY~9N2jkWmX&V*&V#AuY0K2BIH+620GQ&-zW1HgFkB2CZiJg+_b2rO|*PYE*gXY61C=&U>C zl6WY&De#KwE%~v9uAf*rcymuuU_#xsz!=?(Wab^@W@7+IEm=BeO=dxj zo5}sd@$tPp0gSHr`y{^+Ucgi7M?p(9;_;9w8QUMfhcI|NRv(8cN0*1MF|t!hIq#ya zMpof%TL_Y0_dT{Bqd7jF@9%KqKHlw~xAGHQ<9-NC3NYk@E{EMNu+grc7`=@Kc;X8z zJlV9VS=qESrbLaM^hikqX561#gMtZNlUFb=Yo?;afi3;z9?E$i$qR)s zZqOxWpepp|Qraf_7;m)S1d0K1Riy=j1Su15vi}}bIm1pLafVKiVXryuXRnU+qv2ee z05|cq{J1NgpMl8+apl)uLQVa|fbf7>7EqjpY%|vQF@hR^P;RdDwtG^*>bditQ@MN- zA~}jQV%khgZkPlCg!arj|ujKiPBOs0+o$#FSk-Ex7=Mbxww$T_X` z@8oc*#tB8Haa6bV5t0i2P#OyK2aa5fG1qdOCg2emElDpU zo%7duuJ>c_Yt_1UVejr$Rb6*g_Ohy(Iti>dLLm%!1F>2unE2187Koc)n^M0jOd+=j zrxP!fvu}AFyVh>K6Y(BjT{rhP$d#P5Ft_~*r|}@P5`X(sTTCbfjx@l`*PbAIXpwOJ zbxPpug4tEG1`L$lWle`sbG|jdmal-OeQI|j!3WyTRtNkAQ;CMN=u>YiQ-Sv$&#%JC zpt=xo!zC7~DSR8=f^6(V&36Vje5=lo;?ltL;2A1wUCyHdbn0=5;+(&yi}}jfp=i3S z3?F}N{up6|A5{^B=Owknad-8w$=aOA{tf#)?aYdP3eaOTwH9j#2U(S(GW@dg^=h1v z!YEs&^k{lG-tANsT|~-<2Huj+b1=d`0#)QQ^?_k&u zEQ8cnHkX*y<)0^osNsSOa3+G zZTZ`2KPWBa!WLLc=Tn~~KjdAzgal`-TcE1_d#D{1x5^x@nzHVE{BQ_(ZU@FJ{(QZ= zU~*}HWR5?V@fu9A@%1mH#GzREtAjAs7tA)NSLSaiI+zk5h~oe?XXm4pT$L{I^j#G| zb}^jonq$z)#WSf9`@WExOtme|2)HZ|!BK7X~+1{%%4d>av(p80x_TBFhEo!59bkoDJ}HOINMyJ&OCGXg4(6Ee?b`4=MvYB9+CH zEzwBp!TH(Vsgf9AFq(rRFAqlVBMuG_s#;hYpxh)8@(?cp&xpV;m@L;~q;HS{kGeD7 zagzcciX{Pwk0t|I4ki;C@GUsAFDy}CZELeTAeRb)GaK!(FXGuBp>DLiii5!^e5IJ{ zrD2EpgwKQtbAzZ;uA8bKMG(78fkeM;)QT5uosL7{4ugyLRwT(~ziQ{JvN%eF`7d!@sQwwd9r<74@r5k&Lg-f_(ytj=Jc z>aa3{rE!PmSqjb5K@C2?^gjZ`ePji2)_}-Fpgf^x#b&}1F=7-Oy>?>&?q%L`VX}Z< z^U#_JeHxQt%XBLa_a-qn=X}Zq`&j0wTVeU()dMfh!~l`R-1m{~11J4`DLq$Gxo&)J$Agm{f1-uMYv(Sc%U`?ONEMFF<$Odr7K%f;~1-K|C2 z3*OGVg*yN>)$r6h=Gr!18=V-BSNmz z1z7c5dtZs?`91~a$pVpRiJDP&tY_CS1=l=9if>T#Wr{N$oKm@-pd0kIa>JTOe_0iX z^ou4jEL(kC<7=Q?+k&6w+>*`5gLe8yiVmGx$XR0~UllL=NHa>Apr;+CX--LA&Me$Q zm*NW~{EeH;u$%mgE$_D92Bdn$*Xx(&h?nfYn>Dua{yVAHh5wz@JkbJOS&>LJRtftM;&3E`iISsvX{uyV-=7vMg4US*<`_o>=YK}Wa zV`JxcElfv8k=`_drlmQ&d8$FdZRlh(4iI)qK3_Z&t{LTbB|A#X^72#GG}O7_@Dzo- zp|=I5yl|sZU$_Uz5pr> zrjDdWKle?>RKr=?Y`QPDM!n^Qi9MwE(H($M%`aRz)>{nqJVWWRoxph5D!&*(ut6&$ z%4ZuePkfV}as`ynO*8%WGC7!V86eltfFbSl+I6U_KQn)d-sO!T%J+s$eR+UCGn#Gn zb8EVi$uoNgF)gAc2IKN?YBrzud;x{*SvA_M#D|I8(zKDYWiTx{caM`n4ZSjIZ~*o zq(mRndfEKERWqL-0emL)0UL`|#t5i?KVRN4kKR7YyWe*Ysd7Fa+UK99`2pkoG7;Cm zI~WkT-TqtxHFSf41$4H$8)n0DyLtr{`VP+GeTs0^NCEK9-Q*mNG5FeMSxbCO&d@yD zgD@W6aXqeCI?g}(FWh+7?7MF|A|>u4otvha$)a9tG|e!r$I+RbJ0`uk);i9=TCPMU zy%ieP7}U0QJrQxa)7WyK>;Y;c0^?aGl9mlxn25I1!)>+j)n<9y&*|M+dI0Z&>Yo`^ zjZI7RRDbV|^}OG11Ia7o2n$$m*54qaY9jyKvE2|z>6Zy5+r7b*!#dlFJjqfnLFnp_ z@(%DGKTY!r)>bPtg|6P@!TdqllGrXvaM!yr?}}7d-3o>s3RAd%?f_Irq6$rJ+Z8fD z)xX|I+6E)v3^0B3)=&G)?4t6~T6)AJvHEz)y9CzNJkVk<7`&xNz!gN^L6qbu%H*F0B6XGjWz8`!b_p`#)r=s9#B96~o zoUn294YObEPGA;2)cpy?O zl%o7QU6;Lzd3x8T!szsxt#Ax`xEYhwll1XJe3j{F#lh@Ew*)p&IvXZ8SRL0Z)_6(X zJYirtUDhv_{hHx7Ofy1Kz*xz`b3M3d74TjypgMdzN(*7wi2y;l7n_*UU7aB0h{}*- zO+vro3rM2D5ro=i^&3D(5`k32{iZ}zgw(c8h-i6TEexU=yH+^|BO4aSoAYu0qg_! zo1l|q={tXQd1f9M5?FzTSzb*@UVeBJYW+WDh0xhoJ+7&mlo!rlzvzjNLQS`Z8i&M4uZQQ zLtB7R&uH=knV4tS1+(<@2}#P?hnT)}MNHEYHZ9e|n!(xqGQxbxi6rztPt$_;WVgqz ze_ACb!^oAdnfj*W4LbPTw`%|L`)Lu0^PV5|T>dcXw-@GZClu2hmJO1rj$4<_qw;m- zT;gm3Qv|Q2aZ|HsA0Qv8On0HL5)(F0`;`j7Hanw#-7}GZ89lFyM?sDM5n&IzMUZk^ z#54@*FZTnronSDw6LK_4S>)GI2u7M3kdaf0&c$VPm%Ixcek{u`E|fn361)~FHc@yo3Ej@Zv*zenFAI(>EO~I za1DjRBrG!E6a!op69&}3BQHbG%9#6}Y3P#uI~p>u@t`Xbwr;pW=2h^5lz25Jv|ui^ zK?1hPSVJ1Q>6uGX+NTaXoc1oONocostM*zm+MbeUDJeiK@w4u-BcDaihhiILAq8 z^Xe1S%$f`)*aSL}!Lcz0XOt-bs#@b1G!vK&XTqNPq{p)=>HIg_iAsjQT65>|Z?G#g z44K`L^)(VL#;amUHl8-3o^%}?0w=g;r6XOjr(qUR6D6(QRBqw=gVaM^+cJwaA|YcT zm-=Kkq86FvS+I8-r> zek54HI=LAe!_G=A_Zi?%YnGg|&&8r%tIAj zqGb&cg-F}`{F0~qqUQMqs0|XuhzV8_2S^T;IH%u445c3vE0iatPzJ+Hh)X647&HiR z2SriWYlvz$wDvM(j2E1pBSv>WJ(WZH?5FI=5W40%1uS~)EK*MZBj7tqJ0EHOwm9&2 z_OT|qU>vMPBp!9L!)FJB-0bjzhA;0)^WZ;IHzOWW_$FwW9C;bKskTFeLp^3}9Yo~^ zo>Q@hyDa+U7HAW>I)-~g5`BX!!)6U7e9031BLz+WloThavQLU0r_LsM)|1tFot?6m zCt>$%@UC1N%hn(ORJq5+*r#qOwDT52q#88@T?#)n7$)xEXwy*+{oGp&Wskru^%cXE zfm;zx(gim9whJppv%Wy^x`F^-+<|xDs_13Evxa!Kb*WxGWdc!x9B80$P-ov6xMnhZ zAX2V%Y5R1StMa;EFS32^UZA~Ox2|}i#ro!F_GH>fxZlI)x{2gF(!dGFFxY-+90Uu=itG)5^QKHbn`I~nK4)?@cxRb~? zOR1OP3iDPnj~(9mCf?aC%7La}K5wA!meQYlFIT!=A7%y)Gb%rF)sPze!d1ft)mE|# zxE5&$Oz>C%1AkRfQw4g~O9zCP{ZLz*j};SIXsdVIJ@UhNL`gzvZCk5J!-sR${1>@9 zf6WA@Yo~8LyhgigU$Tu^+j&tg+u05|o}8Okm7L*6ctmoY4>WdYEHe#Qc%QEYS?dV( z@3lEmDAMS**mTZTTeLU1_*-J&l{C*YGxBwQb7n9A;1|tOR3`PS2NhWS49v1vr$9cX zq^TPHlk|zuYV~*VFZHdJt}XMhgu)@$I!-TIqokITI+61vdOCeR{$F_>-%p&TveZ%u^%(nh~s$Lb|0c_ErR737>I=o*s%GAD3=jRih-bt72WxwA(_Ev82bZ+z^lBZs3S)#?ai2{i#ZRDnr z``4Tut10Va*@LBB6(P~<>sO|(JAGXemh7wN1E5D8SB%>)UhGn<{**Vd=K^x z>N5j7HJDFyB7gzm?r1_Fer@U{Ubol7YkG3oN-QCrl2^r&=egoTNeC-0Zz8p7qRHBp|t4H2g7+;A1081H7 zq}0z&xa9n{{@hovXb5*{p~+$pUhEg;sa`fY{bDXRhYhk0o{VFM1&Ac~>j9igc7#}5 z9*zP|+-MhZEtp`r+g)>8xa^evSzbe)VXwOfq9YFM3w4K{*>eA>FlVcspPQd-@0UDq z4DhCLuh5+3;?7<-gtF5o!hhb>0ikZMGp4n6J}u=qp!rKpIL2mfW;nxmay|AJrKPQhdd%f4 znWq@UZa{i}|Ci1(ZlYo!!O5w<$0019JSeXlCvQ)yo_?>8EHbq29H4^av6~|w_GjJ0 zZ=*b8uv;F49Q1Ay+eAQOo3>OCvE)lIsKq_OT#`pQ68T za3+^-s@p}=Pk&E@aRKI47pMb$WCeA?dE5q>^$1Ek?Iw7j~6lSV!fr999>Z(;vEzB;n2uzYA?{%!}g zpFeH#6RbLX>Q@790;m4w3oY`{QUDm;<6J$(TM<*?uBi|{-pV@~ZLnvu{Uqbs*M%CW_brv$7`>bO4h97?|l<{xhwmE@g|&2GccNo#N=5DAEar?n%FftBDc` z0s^LZUxVxB4;-bn{_;gG(#a0Le$FmH`(Vg}Bod2)hsOe)KQIkr~SG3eCxO22!adF=n>9gxzU?3pN5b0g;nk&zPwVc@A7oF>NTPgMQ59cYU-1nG3dP z7Y=sF3EPV1Q<;z1QXPA_C5Zv=A6APQ^?`}Sm4*+SQEDnyeOI) z5EiF}Po4hUBFG1(>!rY`<@Xb%f4RZ?9>`lZqG8TTDg+6|gAM!VY19=>4J1bi&{%G5 zyit?bevnAqL*b^Uhe$=$PP|9qz{Y4W(Vr}UfRLO_yf7x_Ar+)SmYki~Jr z@(8>;_nIiW_0kcB_JUx`XnWfj_sD}1?}9~q`6c~ecH`33jyHMI!iAFAR4xq}Fe}ke z7i@Kxy4_+n)$2rtF_2#etM&dHAn8+JpnLop1>HK>Y{}sX*&_DVDK&ir(z?Uqu5B&Z zRp8i0Hnj((w<}>L$7p?+`TDBVB&0DP7;VUf>XYhzVlmz_W;gw8qq)+-Zu_+OS$%Vs z9M8z3v#!{jBP};F^+Q`=4a!R6T;|a+_o*+5@6RABhELbZQVpEC z){hjzLQgnY=qvbKhby^6~SH>=|=^jSho2T}IA&1MheH&ao*=S4Cp-+Hko0Og zR}n{mpx#@G?tN@!&&0W5*&RnkjH@%nLDYE)b)k3anyLis6j`hkdaVt?+Tyr0Zzs*~T zcd0m2P>DQ0*(IGRR!d$!|8DNy9j6Dk+k0S7eS)#^%DPq~0hBtEzn0qx&yVk8f6?qxCe_cY5z7+5X|;0{@J#qxMKcT?4V*QzLpli0?8K{CtM2O zU}s5HK!Um$_I+f&nXdz*1d?JxJ@*#tUqOaAl<7fSt&hrrmA5B-A* zYu}vPN-r|k360D_kO6y!zh#P9GoFgba)9;=y=ZY zzYR?FVL`Z)FpzCjjMl>!bbYCVk0_tKCfC`C5$fj=NpJJPo5eUJq6*K*mnFOb23KJIP z?>JUhUp}8Eu8#Tvmw=Z#u71IJ#lz}YdR@xF=d7Pn38vslA2sn&?5HUM2T!OPXbvpU zd0+3KE5b%4L>@@eo?SW!5tfFLGdT!59uKDBHl_lBi6L@+0V#OU08Eq(O$8~HkwoPs zlz-%!6)ot|V3CYOIT(PAVjtOG8G3LrBeX82JyYU(BUXF;%f$!1}Y6mRQeZ)3X;$E zaOHWXvLg;LI}rm)?Sddl&6K4wV4t#TbPE&$EX1_DB|xI6FktHsPz!4Vl-LxQ03jZN zd-%6+nWh64voS0rV}qV|4k;<_{_ac|p|@RxX079f;vzac*@9Is$}&o#>H;ZHkBu32 zlZPm&%f)09{z0`oT4rs{5RkxLi%z!iX7!bkJ50Y#&ZPmiR*@%Z;r2aBJdeoL9`pCI;mHQ4UA=a6rjg^QT<2t{DD9_Gb?#)VgY)S$XuE>QYu(0LI zxXl|L*CDMV{ZA6@5pYTu&94zJOqC&UN6jsg5E~W6gIV;xpOXH}(;5c^B7J5ZyK?*n6VW>=OPk=}h0^2p z$L99%dA}*q4^yu7imT*qSikdbV1HAub8tx(jl_~(1AdJvNUelsNG_(>TDX9?;U0|9!l zpxo|Bie4=|2RKa21nGpe{wvfmG1!!^JxI8b1Gee_OO9{`zTZq?_OKlON^hh{#}egbd%I`>WXW z4KK&X!Q_XeC#-aKzjtwH&@eez?i2OavXAUOM%w&Fp&tuo{2azKyJcZl+T`i(Rj)sQAW!XFzBg zT*6h?C3UI6BuXC-^)T}Fkz3`xs1>ztzBFn4F76)VsDp0*mU|_OhZlB_;BH|ky~dJ`823IW%|BkWHzffpelADkk58A=M;3``i@uipX>dmZD?Sr5h_G{_e8 zouL28xHL|l3uhKe!-hFILwJ5wW_#p9YH*!UABW-HBdBh+&%2b_zyaJ&KB9{rPMr2xw%2kr`j=LH0qWEP$ptx8Cn{o&0 zDM!8C!YFi``p-Kup-^Te2xZM{1t3eZhic%V5k-Xh2M(ucVvl;7f`zkYLgOk45?wHp z@Kj}_#ex~^VN**d5`uzzoFg{#PNCJOdDfHTjV=+Q83RkB(wW|3-*Y`dfu*@WxBU?@po-JtmC!Qx*tYJ`t;BBO%> z#lZzeu}@_rN{XrG5X~GF;)4{f7@pd_H51*h= z7ltZsP4p=1d&!AC42q;?uqw>_wPw4oyJODEl_3K=45Q`0|J_u+%efD}lX6Y~0lj7q z?Xj-ONwFL9E2`RwPSH=sz)|7Ck>AMaXQ2xEJO7)bB(yt`73D!0z@?ACCj2k_q*!8~ zBBfyC@o#LbtbKLMJanfqs#Bt3W3 z^qz3z5nTr2C>fv>P;mZCx49r1@|P+8y%&|dAd2d5k2*gRW>{NNX4TuC>S~bbX;P9# z;$HE>6tEq84zp*P@=QIc4Z*{+zWpVz#%yO+F^WUN(&U+=*iqo4{$h;Gs?aYw*j#tf zr?f7(j*XV7>BzW-DM8QxhKdoU%65(o<^xp_#soXAy_uMJ0ArMcO~6D)L1Bm``nX^| zw!aV^Ugser}i=_RvV6D1=CX(C)ovXS3I?S?tmxrA7!B0d(|&>a}Nwv+_Q zF%dQ|Z<8*6+j0bk;RP6|2bhg6|L~I<{?qx)jriQ1-NZAZZ=*c5x|UGjal`i#7VOR# zx4YBW6x%?++~k#>H7_Z4jO0~Og|$)j`VRNp= zS7MT@m8M~Inu8xQ?V+BC9w#Xv^Q&TG6jHBN2En5Hag*!y1R3QFs{QW;LCn8%NBY^L zqu@?yD85+DEE0`^*oUTUjF5W z#$dD!D4u$U*odZnaHeoMB6MLBKj9H1iL;F;c7I0pn$hjWO3xvgMB#Ua8GNeRT9??J;|QO#3vIt{9ztsTH& zy&WZKK=SNMZofN7s}xr*#lOjE57sh1K+J9d(iMsNFa;t@WpfTaWZz`yC=9SuvsT&J z?r5+R?-#X{wKS=QL;lw7pozAdPtp~cya%Q%WgxZF9tCL-$oY^nkXgf@xBmkhp)vy$Xs7IIo|2$%(>TO{u-Nb;JRMi_ny}&DNnkD51}= z1zS&5T2mAu}rDsXMRrk9Ws_I`KWAB8PFWZoIPrQjWYC;^Sre9x>wK? zt$R)8u3U8fNnKj+t0&JV0h!f1SM(unshdF-X&xSdIr2Z_r}ng`PeuxM1_ zICP)SpvuF^X*L0a{-)fisJE5X9AS>Q^@N&$e&NmBdF446bEp<@Oc$v1#p^%6FwBP> zM0I+N)?qcf`eT$UwJSiBQKlmQXm9v&t$y<8hPyzs0UZpV=`@rANq0Bzq`6`>9S^?y z)8p%9|MT9&^^VWK*-XHE-jb(`0K;!$wTVBf! z+cM*?s+WONJnbuMmYd@AdiG^$_MP(sO8ai)zup5;2VwX=W&M*M1NzRE{X=%e#Ln{n zq{p1CAg_p&fOwAT3qGax=X;&(c9jzEM?rsMJ8`QiAK4%BPgC498I;p*L|eTlbG zvGNWsF(yDNf^mo$T|aT5JZ6BV+>$xX`%AE`oL+VpU*RL~*(mg`Kc7_Tg0YEHFDb0M zEFYhjtH%pDnf>SG@Vw`V`Dxh)I}gXE^Xu*bER3Q$6)F;+ue!{dC*>o!sQxb3(eG59z0Kexn+xoVN)b+jYf z`ujLpYqSPoN0F0MXDZ8&aHhJGIXv)cBa86vi7&A2j!7XXKtpdU-0~4uBrr((R5E~p z(DmhoGxiYr^Ug`ZI5T5}Tf*8QiQF@sBBZD-U6DEr7C8GK5uirm@VOWU1lKWl zH*f&I_8`?5Lha)J2j^?LT+diHRD$FA*w?5smeE;2<1bJ_L> z`HrWzrVTZd6$2Qg1tgRJaxnPuD$#&j%ySUJ2pPRQ9$Hd*}OR zI^Wms1J?D?`crXt2a+HV1O`z6O*Ov~DfO~of4TYcspI@98IGgI+T_)|8N<2XM#uD( zwGUn$?uj~BcT;0JA0Yb;7M|{aiWxWjRs3VLB6;Y>X120P3aiyCYfbeHPc{4YVqja-($1{=@PL zKiuE8t?TTy>^zqCz%D=m&~o=W`TO@aV~~?rV*T4;sx@1(DPFm3)zZL$azd!#ks_Q9 zbs}!Zi=E=0)m*srpw`)RMj~fA^Q9Bm6;2Im$Hp1ccY{1amK1*CBbwY;QR`;I~*fL7jK}KROdhNQZH%D0A`!=SdD^Zol(;< zIZPJFf>x^~r1I%2UVMu)`Z_f4yGzC7*gwiuw2KGQtyb1<6ey5DEElxrCGOM+dF-l&Qh*u)_xY%3=M< zuOu4a)ZWf*-j(<>U+(XEUWc^!bF84(O9jiW85V6ccGk2!)!?oQm{K1zc62g9N))#`LdqF zqHa}+_@EkPRplk{MwIq-%rcfPLSf1sbe&>?N<%At`ZgflSw(N-;@;0MFwD;nfRmAk zrd{?EuV9*m2yi35at3||)pI$)>d~3d$(lTUXDdp^B#5ezI^h;hCHIEZnK;MsCoi#B zey%sK{D7Joh9w4q+7_Ey{`N9JRxt%!zJs43kWqWMs4~4taIHDPsW}n^&DmS2T+-50?UCfPA2H!k zdtI8gTc!F_$q&7T-jeNRA~S&HJFQO9SH@+lD%nzuRq3Ql@mMc9o?+TGi6maV*JesK zBtaeZ!@o=!>`>=nW4sy{@OrwA*Y#bO;qH7-jyT4n+uk-T)UU2BPqo~&!RnY3h@A*z zu}AS01;_~;ZZ^Nr+wPOI*#5+SsZ+tv_RM%brc_+F?AyNc5awnsi88-9ijSUh19vNYijC2M53!uXwV66abXOD8`j|U!94ECx zlz?xR8wMhvaV`Uxu$9x^JyCK^R*Bq(%NZNXZFn>}WV4e0MAu$)l-ha!Lk##?IDz)k511CWDE*cWtf88aOkCP& zw-6V|4VeqGp22v~{=-fBAdzo&ANL{44uH^%4Fw+vH7OcQl55}pyN7vN52vA`z)(~{ z%yVdnH!?Dn_*T5K;|5%uVPZ4^1@QmE+QLiJt_{ba_>Q+5z=W@y?%qZ;2N$Sc7>S4V z{4d_RfMUii1F=ZYtt`6#w=rtC0s9vO6sm9dadjRJhBAsW(&A%fq@=1b9YhodEfqW@#r{}nwx zw&my8j{nD|VQqlW?{Baxi82nG_PD)7__6|e_fOWGcW)Z_e8vi z-JJgqP-y|=zkmS$M&kJgNt{IF|7unGjU+@#0sMcE#7G7y@f0NxYdVmJa=sl)J?-X6 zNum1|BT4x~fc;um?q3ERA{|n#`L_Wj!_;_-;t2m0yarrUHUFi6(SL8-1gQ5LXWELA z3|szZot-;dgu(Z^7Vdp(HN}cshD4Nq84zk+!}nX!Z+J^QB?%D%vU8m;R8V44xmo_- z{}YuL_!O!y=>G-w&y$jM^B99t?hPaHe;wl~9=6CEQ~>#3&kF!P`P45%kb8|fwfUo^??f=Bu|KGKBPsNT~0?&WzRB~x~kMixS ze=U6K^PU2Eul325D5IE&2IyYHGbNIR2>OJL&_$cV{4GL898(Jp*?o0WACW_EecyRE zao4o=`Ma-}rQfy|jMyZd4y7Ws9$8@}56F6;6*u5t^lPtFIR(EUj)^lqJ@SVf&!7 zzfCteN{kuREZM9mf2 zc#ES41NU^2unuH56=!l5q+LKgKHCES_7*$ML|e4|#r1p*N2j(~TRAROC_>7@HR(w1 zr09Me_nsJCS1 zc&8U}?FG+`_G-xlRm*3-ghcVhV@jVwGoUyUy` zX09_jFJ>k90|E!Y##HZLyh4Q3>xesnrT$8V;tv`V%Z74zMdxMxtiyJ1V#7iA{Om<_ zO)3MR>iUl%EC4uue~;ZeOsU;M^%$Cx$<=G#2I%d-x3k@^$?C=%Bumm!6QoS84lc44 z^sHS*Qpn47QdQuuJ!mg+&neeb)f>&4Ip=0eFf}x>0`@B{T;|V6)j3KbZL!jm-oP-& zC(0BmRxchcf|Oh@_S01fq~F?A2`srxh)QgJ^C=IobO3XPEn11Coo^!B%NR`X3Msr4?it(WS097qjw>%I8V&=1LvS&q;f&E4puwGb_HZx!5z8 zU2KMz6`9AJev#T+Pq`?bCOLoDsCF-2TEpngmlv3GM58jfyZ*A|VRLqaJGZgo#IZ$Y z$z1ApRC6W8+-Hcx&%>1#Esw6ifRy9{x+vvF9bIQ2wTDM#3x zvk@Q4dVWEVri9-UAz4PgcE_t@eO$^`BxW|@sISm=ACc$``a{inov3lRQe_@c>+QJ2V&HCZq`Glkxd_qxv=EN9@7bR7QZIJ4UB)0=kQ z;#Tj-L7sdB@iz$=KW@rQ?gzdrd^BJb@Q9C!eK_6W-!I(jJ)GnAO#{q-_O*l8{bk7k zS|XDB0_mrHas77vTV%%G@g(H*x3JbJkp}(e;=$*>VamxQ1t%L zLy$NClZg_#gMctNq*z;If+_-Y1iycvY>U8o7MZ#D4{)`wZ!QiJo(tTTV}=FpA1C$z z;6NRxv_QsRU#<^(QWvoSgknz!dfolqbPy(~51N|wZu&bJc0RRz00x2s zGsMsJ8QV^p0MtZMfDp4R<2B(*1f}FiHfNoFR}2?L05AFP2;)AYj29afC2bK# zvnhv*3}sCZgub<&hofD5;=8(`o$|*L=Lfo{2rYV58u)DZ?@6AIAN^O`2K+qSI6hvJ zt!~_X=lQ07T!o$TlH%AMaJKhHSgvB}^GjG&Q2-6xE}jl=K#1>O?D-8%L{VJRl)E)L zLgt9DMlR;!`oQJ4vENkXBo*d@)Aq77$87$GwpfBG-GrKY)blR(2G5G_x0=1FvAXE# zXh^p)OBfw5UI;b%UVEr$gb^|wkiKdfbd*Zti=d3UIIMk^KA_4dXxNB*s&+F&M4@In z+U{tA!T`=B@T3xuZjiygl$NSm$q9##m1|#)@yPeDF>PUxgrGP0Xk3GvM!e zKJ%yz01kSbk!aT#yrikM+1H;IytomMWmqU#N|*gMD}4vY701fKO4k~f#|-ng-i?!K z_j6^Ye&x4a{6rn%k0j$&MmG{>0J1rk7Pm$ahDZq=f{c*|q9=kuM@0~3 zO1bZ_FCVgoe4=4LYO)hw&A4>WLl4&Vi> z1FWx?)crMJ!Gk`w`_`qf_OQ1w_tDWoL(e!nScX)$k_VcXhv|o#3pO3EX+$+H=K{n= z6KJA=_qUUG)x}+*;%@|%z#5?X6R5#QGaw1iy*;BB)@Vm=Bh?(Jm<3n#nU8^0mtcyy z1HdBP$ui-7RKhH?jzpC5`1E}}dhqNh0rL@q@=HRm67%%>k(*~FXwt_9g=S|sl@}8x~o(Id^ccqf_0EW>- z7SkbL(8AAcr>rkAu%2b6GhK|R$9*jz#%|CRY`47bhFzKSWMPF>P&^28MA-&3d}m94 z)I@}td~M_|L51D6!^x0pf*FwcX+9{dEcPOoJjs>Bc!UfczSv%BL| z;0L>)IRw{Q`^GO{yNsflf6g6>0Ak*hj4Sq2#G@FVX56<)x3vI6>%U)DA$E--J8Q*BOC>|vN>Y|T@4QnFqZB5PrRMC?)L9WTLc%Qy#cF7= z*&E_&G>r^PZN@08&UySoQ>mw`ZnOR&M+-6goYdywGn?%c_D z&RO?<_t&iHsp`GE_FB``yQ-g;MZ_5s(Tq4fNry)JZpHcY54G2tngQ%eW|I(aM343Q zT(ZVrpFRu#v(RuT;@L8Ut!`Y+f(Tfr4#Tjwoov|1v7?B;lVDXJ)}UEAIqD@}uF8V4 zeS)#HWzLj!j3mOvDz?ez5{pHLvc4bZ-@>HKz36)Iv&PsJ6xx;g9x)i)+Gi+pj-9mO zP_Lde`zX>Q*pfr_Ml-7zyO_@)Q0BUmgN}x3e?e}b#As)q&D+eFQV{bJ@fZPc9am#5 zEZxt;JeF_@$qu}E!QU`p2EXwa^KgQ?8&xvWm9=L=X#$oaxi?`n&g$DoUXk4z`l@?# zj6b>_O^jDpXi!_KfdRR1J^VV(!-;3}|FMWdoVPP)IX;-nNHf;0P?#uYvJ%7ID9%ST z?72BI*!BPU+tu!5?{p4%#oEnz!ouEW?h(n7+^BRvq{Juc8SiMOWeMj5WS?@BbZ|C_*>@AyiM+<1%Ox0Iv*85l*$F}Y>-fBZXi*Z9quCT+_lC_D|b%B!je zp?YmW2%1w?zhpBrq-3P0f9i!lx1W9ib!LY|t7}x|w+;DpDL;^+QLA=7e?u=*7q3;f6r=Wmi87TYXmJ2YVQroiE&uSunE@6yCM6 zfv9+p&a4zIyS1OK^KM_t&a{i${SHPpVaJhJ;Z90=kBYcBGRGnzk4YzH!V(s4W#*e zo&ih0r>o7USlEL%mY*KGIcM%2?dlKg`s9ssA}C;`YV<$Fr(e%7m|#a$C<$mte*C`$ zD2V7IEr^@*|HqReT&bjxJoW1^$^11_@+_{C5h4V=MJA;cEY{b%Je>J~2MLky;(1AZ zS5{LoD|1<2>Z4g5@2tIM;wc1~a8_dp0}`mYP(*O2E!e}F0DC|n0v5y5T_6-ww`qG2 z0;rSsv26!mUtu3VNhzH17L;zMUCuE9GxbafcP=mR{(5={DRpEz`u;a{Ak=vBVgLMZ zrhs341UKk8W)_oS4jd=Q-SzM}&y7WZp(PmO^>K}Mu3$hS5?WBvNOHpp=jZ#Iq)=fb zdIv_FK7nClPg8=Bn$QhYC3}RaUR{9HM(6e2XgKKjD@P$mSU=;ixy@**G8DvD_Mfj} zSA)j$1#P(T)=u;q*biQ?3bGUz);O?PT5hC_%lK#}To$+gY&gL6 z`Gm5p|FMDAg;uO%@ipNH!u7437$DGUkA7J%cXycXvY=i5@MA(!GpK=^e8Cb_rMSR2 zUt{?w#$;-y&+f*`oqsY}^*2M{^>p8`{cRV#L1iGA(cy0%>399vfWNOlb<4D2T^GJj zvJKPYJmDVKWx3V#rT6P(70CbT#mz;rAYg0P_LuWbTET#KBUd#m)nB1>N=*BT$W%NI zdbQ8S92>NgE6njb=j2fBwRSV0rv0+*)YkWCCy9?N8y=i93E(snuF znwQ9>IDc-hX=fBx!Zlx%aa7^LiBUDZq+by;`1N3UE0q-Z`MfMx8Qn;N?i!iFm{+H* z?tujP;)ucRckS76?j#4kXLUidq~RF_%iVm>-uPxuU-0n3J65~UKO3k(MHRLP-{?*s zV#A9bu~t6*rWp=6yw5pTkpIjSz0Cg^{%aVSM=NyjOqeRKlX)$qbFa_$^fAlv(&gb9 ztjhIX+HYpQB6?YAJe`Gh^>HDfS(@I^71k{DSydZmyBqxMRm$vt*P8YApvjil%J*=* zKROLa>%F!@#?l^HNLuSF%_|%k2iKsAgP_lEEjC}HMk0>WsPLskf^0MaGJ$IpMS8{ZkRtS1ckGSPuqM=%4Op=&A)0>?s!Ab?kw;t*-=s|=)Giev7?j7g`7Vo zT*Ak$0%o`wIRyD;&>m;pGRyBWjX6=dY8e`xP)?8Zx@KAvMB%dMaq;&JLr`(i{P!j( z={)l56TWDN=IcH>h(Wza@LJ8W|Uyn=?gJzj1#EDG~qhrPS; z*A!j4&n_E|_|fc~pdlaJ`@BN?5Wq>kJ=+>e{K=?3N@C z5>)A>k8*5Az)cbyZHE8Skz<4r`}}e<>;Wo%oE(L7(|oJT1X;(ww|Rwn3|7AmJ*%uf zgETH6+ToVa>Aco(@|`$2zW!3X>M=5e?Ga+}$%*%9oMN8g*5LMH#)o=132dE^+!u+hD}TccZXaFd>Qguq5xDiGmY_SI>|0 zg-AVz9bb7JPz+I)O9SA7mN|is6)rN{E+ZtA=kWUuZX2(u--xaw-##8-KbuG~FH-a| ze4((sW0t4B%<9{ac^VS>q}|C%4SzvQdoDr{I7DxnCjc$WzKLTB^Gn@1@8asNQoqEG zk^|pe^U&X)8Y6i47K3%p`l{%a&sOf)nM?|I>3@eRD zauT%2^_J_s6;qFT4iLkRJ7>Ad@4pvW$L9)n)RTZwBAqIp(;m~3-~-_Ij3UbrDAls51+;yvN`*nk_@rj`4L6^w^s5L)+k=>ptgUx%HkVMI z0u4#}WD-i|E+xYkF|XpB9BG7Xcca`|y0|ftI8IjgU(l3@U1)86q9I(E(#S#v8U?i_ z1-IK6(8~M2jbN~CU|qRkiARL0F5s&CFWZ(=Ngf+1ZJ zo0w1OojZKV_0y}Ai9Irk7Cl7UeeqZKB`PaQ3T{NbaF{JsTqy2$UfpqgoS`drdAuR# z+G3Dnwux`jT9w#7OXS_E_mp()m25f8UWx0ZK-Vl2eCi4{@gTr6b!`!aqJorbVizx> zU=WL?uhhwYsNjMfI?z0Kt_I}_CPMD>po>?W69of==Wrp}3gJ0Ga1LX$>`^k?diXxu zpF*tA4xWh!)m!^5`$~;shspmJ=K%T-&jCfYJ}3-#+>?yhH7?BZiY)8|fO)MA^bkTE z58lSeShjM>tb#4J{14GV@d5Q8(E-=$(-S`s#$BfThZ8N|9D2x4awHc;`@gXm*|B`^ z19Dr4a?T9L`X7~)?k0o=pC@HQjd+XW2yDm^a1L{H$y`ddzMOmvGXupZ&LMeCI?^DZ zz)_cdn;z93V&M~+68)G!&VP@HB?vPNsfdIbw;Pbemu?Zkm;)23J`w(h>3|q0iO6!T zv`ncpTVt=~q$oz1zD1m(GFSc~ET&7w+Zabw7uFd4LAvRX1!G9nlw5n`M}q2yZyUYvyNiK5 zU#G^ozPA8C-9`XQc<6p-KBBBS&nk_g>%zsN92ysIuFb(Y++j-cQ{sZubeo?_&RroG z>C@j7DJ@~5pT=?eqn(wNe&K@Z>f#gXcEjZ^UIj#>XJNmX+9-YCy(sv3mPI;vNoN~D z-Ff!c#}J7iFOL{qH{k#d>+9sFy=1}ol`d}Drh!5N&}OKv22i2Tc`;Hdufz(L*SP~h zAd%|9UkD}lOb=141}(k`Va2FvYv(QnjcuXpZF7pNLUtQ zwDEweedI_zj!Pv^W}i?I2W!CTkQ=T- zy-GaMKD(%!cmc+Wf1*o>v1mC3Z`8h(I(eGZ$K!E@$ibr&+A-~g2^m}M3WL=x4JkBk z8^`igLP`CEoH#*^9@8U`*J0N zSuEWnA8Gf<7|bBuRRo?I9?(LO2&(KzWDq(jGSFOFhHA^dCq#vbD=r*rWq{g};j@BbwJQH! z)d;ItaQ1pne6<)Gi&JDcA5j#`HRC_vd=&dXwgWkY?cnC_S@cv6WxXsKn%y4<%}n>> zAKO7mF<(^5hTocaNOz50jDt1W;`XwL@ z=xf4mN~vj(HVc~OSV#*gzAsLOmmqZafPYvuv*1`#VUT3ieLX;4)lMgs*h4op^UOgH zF{V|X_pGxL;qa~S>Q&?6b?K&-(ashPyItKcm-KC^H7S}}c zr0hlK3h7HSSkD7-<_Jr3lsqs_-Qnc*mFFJw9_?Kzt)*?=t9Y|Vegb0{4KfC8>YZ=9$bg0mGWup<3#G5O@&@T4jJYv9QVhX9`(lh>xy3Q`V|go9xA!H(gfo8 z#{&AB$KyJ}m#wdRl;W|(gN+^Q3C00t4A(xuIz2~$v;^OX3r>&e-;(Bzyv`0UWp17) z1+yzEk_Bqxq=z2jROGG>&&eo%=adP?p8P;D1PPPdqc5?GHCKVLPYCUwN%~$Np3M}qi|f!V5jltERlI8=b7LwnYxq~6>=<`9B)fh zUC0*K7#5jEuXc&L8<8T)!4NSA!qjCEVW5n3f=NOUP`pu!~{=?YMt^W12vjMk03;p!d zD`oDzDLY?OtiXhmE{%J#6GnGg)U)eFYAzuR8-z5c+z_+R&Zgz38;Yb4gw+%(dOejI zl%ZH3SCHIQmhNvB)+IyED%*K#-9|yBn|7k}gc6FMhO*us<8kR=3lg4g}>2zW`6@B z5e`<{922foq1=FYDVjJhoPD+@8W)(JgtV;kD(r?X&3v;ASd(bnxjEp1T+nNG3-P%61SX_KfwB37J((o7wcYvhJK}FeF@QzZMa; z>s-1g?jL>65!tclIQH|eFg09w7Y-w~Snwa_@52c~TX7kQo-}nJj`O;Wt)?#$sm%Ib zHJljxG*`)Z+tROD-p=3|T3$6)Uyp7%S>tBtlQw`|6|Hx+c##5)?%`rcNE9iG zDwZZFdoM`uB9!`77&uqZl^ML&fA_X?GTS*eZ858S`LmsN>&>drtoPKc4~-CiCncy_ zN1KPE?wRaA7vy3UH;|X!dK=??;icu09YOV_&dbW>es_{IDk+PAyC|4Cu!=68xovV0 z^_m>qwC!tiTJNJqp3My^-jL<>+GNjVtT^FohkF%qvaP(jYVD9-&}m<_wir+S=B46H zZ$sGb!I1W)-a|EiMOSunTz$0I_}W*8$#_cc1?@osqz3CYK+bQxmS0CW4Rr12TeHW* zTZw2pPP*hkFC86w0xKktO@#Qjlh(1FAozjJea~dQxZS}sQ8_%DKh(fGn;>g-fA7DAKnUQkMM zW(9F?iybqAEeb&nlA`v0-p&+NXjRLp>f@!;;!L|yL!n#dY0oALf+Gouwys2+Wsfuw zcnz|Z;y+i$C%f_3yJ_Rc&yO&F=w>`k573Hl8OD3+O}ZS<7JLaxS3pJmZhc**;QRRU z&H20=3YK0apX<``Ht>f(MxuGgGUu_H%WavS0j)esTHh&1ij;P(U80K4;_jQS{V1Wl>JV4IW=Hp{ENQA2bGKYvbUf*DFj2~ZlWP6 zstj@^&330f9@1W{FcMyhKXP@&8LLiBHIe#PqJRFpCP>SA=+D;k-hcW48u{=(+V*98 zMpO#Xu^I7>%ogQm5#a?txJ(3>o9>Pd7|2y#e|HSWNdVnqf6csMtl#~=W$ypr6*Qqa z|Cg`8`G0%`c8>q9bGJkI3Om4me1(e^AuMH#+YjHlKdwLk3wAhAi1i1?TV^;u-y+uD zK-mf9@Qf?l()hM;@t2}xg7IH(m46D)?86;oagwy5O{5yX9AE5`{1BA}S@oSztodi3 zgM-|AVAUp2yFYF7Kd^9JKnsEBg~{AI9*9jaU)*-amaLSgIsQp5=Qy}W`$VuIfpEd2c|B={3p#sW~)XQubBB17iS+a{rrf(9Plb!A2k&5E$+FbpY=0~G-Afw z0+^vAx8otruSs53>ALqI{c6hR)y|X`HQsGN<3?EL08qDvZYv${uIr^M7=7)>KCM&a zqT+JVIIf>Z*7o{4R0Bse*Vscjdq}@UF z-u~&eG0@o(*gR-DZ{qsm;`wq(H^57eOee@H5a%nLin2BA0+R#^h-q+8?nA92fH!Ssf>?s-ufTdb_0q>Ht4(uiS5Nw7 zS8%evmn7Xc?EV6cR`(IL7Qi_l`o0g%O?;fOAs{1aUe#v&?j$_ia*+DL&Q(=E!uDoG z?H-@-xOUW{^4PwPw52=0m2)9J)_b?&P9}$K;R!+`Jdc`!s&_6^Ph!hTQ4TI<%Vq(t zgE|=8e}#!BH<;X2YMGUNax-~c7ipwY^AWizmGLd&O0ou}j)P?h>yBC}W{OPiLvhm> zU0g}`-T=Gr<2AJOH$yjLjf8S0o#dYu=twK_TKLs9IjrzZ9Lq@8osI+gBmEerDZc57)YdxK|W``3KJ#02ErwWSEWv3p$onM1e&L6OoTl`bC_eO z35W_ES+#ul#OlA-!+!P#^UoVswVFR0?goR+8dY>+20x$e?5fx5&ezy3zlfE{((C1d z^>R#_h^$JqRJ;o5Of?dVypC*QIGL=vG`H5+HuXWih1SPgHp~>XoN<5PtyKZ1?NJfT zl3TFs$C^xG?Gk^UT9+hUt+Y$qd38Wd?MdQ%??tECOh1?Gip^I=ukcHA&=<6qiZT1= z3KRJMme%tno^5+i&W+|E*l*)8of69w{dM3wRxrkP>+nb_z=Zc19JsKii9lb8O0e(L zNPxU8N+yFnY(ZbM4Oo28VWii3`ocWS{P)&XJ>5}e`L1Kz8~O2Qy7M7aeZ+3D{W7e7 zv!tKBKOk(F%AScy59@bgd7Ed;9xO(itGgtX!pXzTh^9tN>V7RIu)(>AGtE4N4IMw| zZWsHw$txbd(DmzI{3V62O;F`+Xoa~kX<{OlEnEi+n?jY_e}nTgwM92bv)L)GK8ndU zpp$Y!(FTyr0&Z>6<17`@DmhoZs5zN~D(pnSIg1|yF^sme${vS`{&*Sl#S)^B94_&0 z3su0A(bI0s9R{R+N1vQ^Yx$tXc6aeL_Y}_af2t@U&JbAh$yaDOXvp6$L}Qx_ise5_ z9nf4X;6OAeCa`!kv<}!h1k!%Q#>vY0-xvjg75mlZTYbZvedru7PiiCNsr+R|9jrBm zHkzqiG*fN^EXNt1<#$OmS_4#CE5NqgBCHO zhnB8Aen}nujcMx??cttJYP5W@Cv}>>Fdb%J5-A!A zHcoq@qK-V^vs@6TLI^s`&%exqXq=KpzAul*7okYwvh#-IDZiYQDJib?zUkf08ey9Z zH^2#RDt`JRr?b*sAwr>sUy;&9?Ne-Q&|fnvv^l|M$CclLu*yySsv-Iw^2(=y)M z592J)Tc38uezatC(Y8TX10%hBI6qjbq@FbWof{3ubLP4aU{P*@3%tp;h~XQ z*CuO)@B?<$tPA=VZ6^d5NghWc|MWpDU+B}f4|l-J+`NX1rO!ORM40=#1P{%0F+(mK zRS0~;)1OmH`|1Y01!v-Se_0{8tQTFjC31qgHqW=$zBY19Bg!#a$R|W2SniaE2|xD1 zu^B68CWu#(VBy7vWzpqr2GWRVn5e=Ud=$l_UBqiC<=RK=W=Pb+a~7l>luh8j{#fe> zP)>?*BNX#S+wdlb&m9X^T=uNpvRt>HE@d%3v-xe=&Ou87!`mQhtE%?+W(O~G;Y8GaPc}8;icQy`trG{R@sRL{ z=^3l7 zLuqO-TCzx9B31enQ&Kvk;to+-^Lt)H`}(FfORr~-dX4lSvwH4ODUX> z1ZVktmTH4k4jsVTnu9zyJw@oWsl=p~%5i8*j}raG=V=_)#lU6ICoUeSW|M^#nqV2em*|E*O0IP}0Vp-A%YE7$S%t0C-VjcutO`{s>qD-1P(PTPQr%R9aSoB*=HVjI z8AzPA>W9<0=xsr0o4KCrLArJAA1QJ3-v`>WtG3vP5PUgZjEdGSnmpTb-b3k`Z+@w@ zJtfmAwwBn&rNFPC)sj~O^{#6nVn7J+*jh$pF3rk4=YL=CUY|Ys)Jz66>xKyjcgbGc zli(@5-DdchieIV^OlM?O9uc;9Tz)j9hVxTUSIsUFp0v`D)^08NG^tlP%g5~VvqNK* zs>*KF;-!d)$vr-ekrw>yiF0M)$aj9C@s&L$1Xhwe50lRA4qWqt=u-QDl)0@f-s&)B z?Az<{snW#zBK4=uP3l-WGXzst$?nch}0CproPD+~!Q|S`575H?1GhcYH zVJUQ32|{j6v<37aKbbh7{jICZa?8Je))=pVgcNEo?)om(x|4*hBr@d?RA7_K~vEHQJ8 zz-f?wv^-O?nEzG4%~2E8k*MHf5Oa$+e_*nB)NW`+eecC;^?pyZc*MY4ll6_9 z^F@2(7Mgb*G=)$gB7GFBlA)rf#pRJec{+qrn9E4#bY{9(&`j-juV<>AJEu&)ybs-X zsCJZq5W4qRlwBomN~@RnXv>FDy&bH-qB@F?u20tr^i-S~OouL?b7tmi?`d=7W@ZyKp{^|ROhMCl4m*8dJ? z3@>&cx{98wJN`Lrso1(%Z5vfKThjwDYwcw3g!TL7lSxm>0`&Mj5KTX^&1%03 z+tBJ4W4_!0PIQU3S+;f;88O_*-|$!mf3CNy(Lv^{!NX(6Yv%63s@eUB!r_@xX;IHd zMfmPqN`?-GCw{g!v&RDZJf*FT2KBtX?mU-1TaGeWN?HnSVecH$s~R74v$`nZ7FC?4 zv;f`R8xfp+cBq~}>X(}SgV}=mAwPH^VT~BQFu4*AaArjf%lItBW+N*(1*&B0V}}rn z>`8AY)K{L(^?w;J7ML>%`a2pc7l4F?M1_Qvi{sxXI64)Y8;oNHr3c22hGwB;NDq3&aAz z!~)>>Ph_}RAaRCV7l4F=19B++Xa5&GE+7{>34j|CJT~@! zju1m8E*5rfAdm#W#mdAE_YrIj$KN;&%s%14SR&&&Gq)N8293qV+~KY3@QcJ0xd}?~%;@`8MP3_~ z>Ic>6_^@^VkatDiEIGRF0xoP|I{PjgC1~X55D!}JJyrERP`=Y|^|k(aQDLEIOTHGk z3qa?#gicHja-5=oypO-$E)ZKYJNdr6CNf=a{YWw3jT%U}P$!+@axwcF9YHG`g*W6u zUu(0In4s`}h?Lw?uKf~j;;!6TY=lKF0-N`B(`})7!Ujd;d*`9A1SVhm*o&>^ew#`y2lsMp-E}uMm5g-r`ufRG z%9b~3hfX2s&1*ydQXlK%;hz8f)1+ZX=i?a>#?hYpeG})xObCDHGQ248S!u|}nMGhI zquPtwuybI!f_LHkU0}_(_4!Hy6U1$aX)8mKG>R}vnGj}{!_ye~c_JO(fi{D~Z2?fN zvuv7Pa_Czf{^CY8^hg;cApd32%J^Z<%etbP>bD5%ya=1z9#2p=zq^OC^UpA3_8qts z40#pe?=CcGJSou|LMk#kli&}FN+t=Ps1k_tJ?5OCJ(A;CHWWUD#Dg~1P{W^O ziqC9uBM56GD25x3wHAb0R56VV0R?c1C*%&Re6SufMs(_hEn~jT=$1Il@kT2nU z5oS(DX23Kel;7t~do9+)GYq;_sJiZPM?Vq-=L*e&SRZlTJI_3y46+bJ`-=~K!^->& z)C`9tFm)>hwT%8;uH%R=wVJh`^p_M@bv%wPf*$C?ThdYYFI}Blp?*C$ZvKOhrj-}+pVON`sFIx zfD^-COJBeKimseCi)LybB>dcqcMI5AhI;h~QWc4*Q(bB_7yv4QQeRd#n5}UC^JsXhzP8qpnAue)sOP8hIN1NU@vQGXUcTQw zzY+WPkBO>Vy8V~NWN%T-5^8m!Zu+rrMW$4u!fB#00<6w+Z|B?gQr$vGO@__AU4HVC zE*V6oiY`tCcQn1u^8La;iD17!-ZEF2^sq0ZCy6F0PW%A8Uj79!gFZ7x*>^pLPyM=j zZ?)f`<2@hs*|4$2z24pW{pF^s#?5~wi}ZCG zHV>kbqD_?{?W4OLsH?Aod({)$J%h(4&}$YO2kwt(wyNHJc*Nfez$tdupc;#+HVTt) zi}IuvJBGo)6bk`er-WBJrQj+-6}5k)l>g3(`}ZK17cO>^&3KxRv);{ELwkK&n|uqw zN@8X>Pk|>Ne(WGap!F!T@o8E8T1kXYNlgPMCO4KENK8p_L^s09{jl2Vwtgg(g-BO zO-ObAHbF5e#G*i!s4|xc8gL?>mOPp9jH!`k%^cb2NYh^#p13yY9WE7DSzP6i*(Om* z55P~NWr#xGbz4ZUSbX_Fnq#InQaAaa^&$H=4(lP}joLwLIx($-vq6nik4t|}qqLC0 z?anwkQmF}O!p6AvOASU2b7O%=gfzLVRjH(LNeE92L!NXNI#5iZ{|VXiA?4+xZRkF@ zmuW_44xa#;F*s_X^JKVvnS3DAc_NT|Teue}_=VFRh%wnCcjk?Z%jtx3NyW1&(Ylgp zuiH-4wBKjMn>#5~+10Hm?u~94Stq#;JWl*wb+cM5 zi2%TVLLO1M5?C~>D>}o~HN|&e{qnU;;DAKeqeSi5;Y+jU)}rxh4Zk_c;g;t0=1giP zN02;dL~_2#m76KIY`L##rPjyS3$|(T367VSukuoV)PJIbaq9VSe=={;5jc@TIox;p z5P6Om9TYd7$3`Kd16>s6>A6!(#N755m(x|Q5h-v@u6Zv6&h_g}0nWMVuO05O-QDhB zt9woFXJM~y(pe824uKr1V0yRpo25kNZQeK#*AmgbXoDBm!sYLT^u|9jz6vA0~jcg@^2H!s2_8*KKai0OT*s= zWA-d(dw38uG|fxOcl*92aR2S-A1kW&OhY4vLYT<&7qHm8U@PYUXWfL{7-4_TMAHzq zryE!_l<5x^h%oREmOtPq`cM}xNz`I1l*%4&EnTqP=HrbBTo)N>GdBmh;OgF0)Ornt zKFV(-`%zCA?sMf5?K0nPES4zZO`6IebJ?*@gkiRfPOL1m#Np_SWXFFozScXu8|y-a zeuGbAEG7M)eN?VI2oLE$-RRux|MpQyp}4`9zhR8Qy?M~AU_Un~#IKON3X(~1aj>zH zK$0O2NcPCVNx}|EdmzWy*;v8G+0e@1_f{Aduv|Vg9f+Nk{r{()N1*RO*mU{o-!A}v z5#H+n{XO7MO?)#eeycl7{Xy@eZ2_ZMQ!UKRx#iESy4Bq^X;ch;v5;A(kR9Ln^pv$9 z(i{8e8-i8FcLH2xMZFDS&E(28;&e>A>k1zF#K~qHwOgEc-V?2N5u{VD-4Wx z(=-t<##L$Q%K9GD6$fxrQk$clF>cVjD@@ynsO;5{XILTdlqBwQ>{_GFq9gS{3nC$eszhT~dK_kMo%ave>q1lBYAtzolop8M?Iu+uy2NJ)ibkIsXcXT31zhyf;5 zH#=uMXFBINVN4OY9J3e73KZ-E&gJ;~c<@}U#YPj|MYa$-JBsm&j^?e0?>c2jSv-0V z`_4Qq>l$Z-3p_3sRT+TZoloOQ+Suy%giL&mlNIf6f_=s=^N8!aowVC}Lx;GvhObUm zIREl_O`GAh)Pv;x$(s(3a{&FB1TLr$Gx<0;Gy?N9z-_IarKjyAVv$wo%**5a-}kF4 z^>_*5&jhg5h#;cJ&(oCeqb#y2n~Qt?NAb230=J96yW`^<%4EnkGNE8)j-+GC?KoQ6B>#pfue>81l4=#!Bzr=hMeBPw|YqF{yfD+Oty7@6|!y+e=?Kkyw)$@A& zyk%A0&YXXzdqn@A^M=wCS;xgCP|hBf_@Cylrf<_>x3}Bp27K=XD-$<&8g42s;UaeM zY+c-`5fozQ)e7&o5hXK>C55Yar zvn4;o`~ph=*Fty!apI(2JL8oNx`HCdd_CBIv}g&la{?0f+QW~!UXx4Fa9rHOdK9Ac zS0@l}S-4-O-WQ^14?f}tQow^RyTl5wNDn~EMP%5~6?W+(7CDBeFUYhJy=8w;F(uuX zr1?UN^+Mhq+;N2piC3MeAs|CBz9EESGU}4mBozG+NeUGT*SLhBHce2RtspH0xjDB~@)5<4Z)KEuEU#agGq>*rx$eXn_NwJLS|PQtD{lHKB0Er9 z^n$Ll+iC>M+${!h`z*dFnEl`t|NcAi?0uC1pU-135ymrNgzSDW+Jvh1Vn5%Z_mT_l z9dZp`%J#iF+6Y_fwY$_{O!fS#NtB(GCBr28`b}&QX`|1uOANQgm(iw*4WTlB&Ht= z3E@0ORqyksG`}~Sa$9IwyRdH|GMdmI4)h@ENB)s?>a!ll)iUy8WA1&e zAl#iWzRT^ZKf+}zl9TD#)VJUZ`Ti?(0^th5BOK9*F_J;f+FsPf8IMInG`RwF8C`>@ z?f;8>rpF$hnH&1ox*j?o8q*Uql#k{64@P*C7>&%;rh~Sgvov>LKbLe$sD>PCn`gBk z`K-}Uea{7F@XvDRjB99BE+zv{qZu~4{+DgkV8#LPl0`6*NLqZF37kp|M#f5(;?Rn= z&jwT00#={(vcVj_wAh=yPgEj^NEzpz={RFQdCIqXA?S)v#{>zayK!nQ)bZ$YdHI|A z#%C@^JNMzpZE@1RO{0CLvw0DILtH!-(pX<( z?Ls0jr-W{B@gobB7&QAQCL0`ia0_{F>;}a?|%n@f(FRP=BG0i z>QGB9<&+S@mtlkGS#1u@dd4@Ct8~O270`2*WTplTgNSji09;!*_$eGjgu%I#8reU{ zLNwk!c?5BK*(Y64@#hTz8cNQc>sopfd=fMCtA(?3Ecb^a_t$K2T&h1U&i=0HfQl+D zB5<%EBlzZOVq2UX#Q9s2k=TgK?5W%LA`)GFF z>{m!y7erY=EE(>j>8NrTUuL|!_KZSb>5S;!l-+8G*&r!LY#CdjQ5uAoUU`HPwyrG3 zUQ4&LG>l)R?w>hv?H7>9K1r%Xb46p{fjN$iR!=HwM=~n4;Z2X1 z4C^fXQbSHtInwSaIc%aVohY!7nk^7xb$ zhVD`I^rrkMKsR}=`G^Wt_nX8?-_DXCB3r)Px~{BsO;_E4)EcO=U3NAdyRA%zCMEVI zBMGK?idVgMA;79eUFIe?D^ydlB*DI(el?dRkUwV@MCY&|jS#6KqyZD;D%whz6BdV% zoL9SjwN9{eP?LjQmu-{qpv&(5;)Tr99>Z>7NlS5H7*eIo59-&9g!gyJ#QE_Y(mh*i z@h0S{ld%5vlVU{a3(TgKau*1fkMVdNEx5?M<$@v8{>!;<{UR@)(#hAXoLn&80f4?` zfxj>vR9c0)uJbuLO{lbmZ=go{sPOwZN!;|f{(Ye zV5Q<}E6p;on_LV9_`*U<4y#PWgt7K0QI+M@NTwqG2kUbJcR^Xj5#|@c6Nu*bYf;SX z=JnYYD7nl}z_$%E)nq=-d2K*b42Anc`^*xj$!` z6E+FD17tOU(9V`6Sd)Guc@PMSc83cbn4OCW&Mk*_0UI?zX+v}WUjrUD7x2FtJ;rq) zy4@U~{m%`jC#8ktx~2u<5`^PeTu(^!n+ z{c>$TaBqDcA?l?OJ7sSBWREQic^SJkU`e;|?A`{L0y z@F!8f@Pf_8C=!z{I~*k7^?GV-8{wqb#(l9pmmWxjzPQ>L1CQo z)$w_6etc9qZotOHO@2j;RL1+rvw|-hRESRCO>VKW>jkRdCBH(py63k7yl2*XtfBvH zu#4Jg;^I9ZXvpo^^`A6aC{%Yvr-@(2yVGCY~h;+Igay zPL@B~u30)~b2!OEG4ivQY&pk3x&DP}TGytZ6c|=DCHa_cpAYU-BS(>%R0s~JL~oBQfy4ToHxlDW;ZaJ-xh7|~{Jsk6S?I3$ch)JWSO z1+_&JZRuV3Wh5?L&cJC@5_;?lW1|VTbTx_QV|LJ77wJ~Hf1#j0RjJ;%bahUmy2%IZ z1kg^fjW4YSC#74*nTB85axuVyR-p2r+N&qvklD+a4ck0#XXUTc$5}r%9f?kJ+)}S2fnOwvI!i>jCavlQ-6Aw7X<;AjMyt(`dCht^9wu zI;Zf=wsl*_R>ih$+g1e?+qUz^sMxk`+jc6pS+R98=X&Egm0Np zs~&Uwp5>*HUC@RU#&}*v3!wqFb7b*QH#BO?ry+u5EU}-ErisF@#3MX7y0)S zKCx!G`>KaTTUh&z2F9o0x*`upDq|w;^P&yr<8cg(9n(FuOobh)w189~KOUza$|5D_ z2(^y1kwsx$dc>10(dZkq4LrOUM>C{+{x3eg8^PN&er>RKZ}Vz|Uf;(3k@N++T+j}V zoWGLW>`s7o3H76eF8sL{pL*7&)3s)vb5CblC{k}yAVGhvZjdwxCtAT7u1*^!usgI3 zC-&P0A#<9(*S0dJ13-!9K_s-WhMQz3#TWSgsXf;YbawF6GiJ%eudF0HQ@e=Q$>T>Q z%^L`>{I1^Al4nmu0Sqv8dNx=LspL&vLG9+B#SG_xKCE2JMsni8Q#!rYB`?jCjSb~H zKfn7Gm}^!y^#N^to3+C=508QvWFB0 zdO6}d3s#KcNE69x#?~M>+@GoM6FQ2pS=>&$shy~v0dQEx-r=MEOUbQ#eB;>>xTf(( zqe3q%$~K#EXaLk%zJ`;!b)K}>V+#Us=ePkbip$@K)4fDA{3ql_>aijMP3ujbV&9=$ z7jw_O8mL?Djg~hmi-)3AFMdVNDU(J|3*pMI>&_Z{Q`%0_ZN&hVEXD`YsS=!H3Gd|9 zOhpKaELkn*p@qs!&8e}aBXi4XV_)Uz%iu{p`BOs)DZr|2O;#JV#k4oGvS_g32uyZ* ziFC@Xtir6A2IM<|532+@R$Og%)x~l1Zy!2*YbTANC!DsNH9D>axj^1@?LW8acm#*$ zL67n!lmM?JiY?)`{KZR(j%cK4f} zIteHMsNZ}DJ~j)dRb>BX3PA@!JHW+)SDYnR8M790*3%xz$37h7_5S6so_GpTF`O{LyJrtK@+SJ8pBgrvWFy88Y&oNwxCkUWM7ZDu=|X4Qpt&s zO3stz5EB>QGE%Vt%WLj{k$hhV7R^w4@0V-8QH4`*v^4ZoYQAq)Sa zvqbwc`!+YZ#~BdBl?1i`P_qA^h#crgu08ve85iSJ4g9v@8Qdb0lR)drw4~4PAwu*y z+SRnaU%uGP9XTCyXk+xVUn#a*b5A*0m)mABWm$jBtyJEBEE?OZ(1Y1zMxz5U!TUM^ za*SDr?+{uuY?TlV8Yw9S+*z3f0bh$(k(jG(-)GZ3<7;ERoY5iH{Cqr|NCdwSPwkD18;XW4r{)Z!NwpY0yNA+CrkD&q z0~vNVn@K$F*L27l0(dZGKFG6KhFO?j6Sz^k2--xbjl|&G8K3%6!$b{%2K=9}b z392ZH(FFCU2 z99htu2;8esFX%9x0wLtO!yB5miXQYn6rSN?2VQ6DgzM(6pg}dr$>pufOQS0uee1mo zW=+UOAA(m0gTO*z8kiKOgh5U$&tlh#?BWJTtpGCeL;PCT3Ds|Zyg(GNCALp~*tNoP zgg*)K6o_h%$ z!^lz~F1gq-n12$_jHbG)NOsLe4W7FyKfkvdoKHkNNK$@Zn22yrFwOv}vHZ<6O~MF! z?s6~QVE^=aL`oiMNstg-vkW{6jn;g!btJs{eof9vg)-M+H`EK@T`n|0%B5SxgEEXa zV^-u*>ahZ`3FmDiKb)(y_hx24y!&T z=U(de6+8_-ub)bQa3kk?gy>&Nww%=ycQ6qGc17EF*zQfP9Z*(IA){ub5)#w+V(8=d zQy#C#viY{ZHEOBi5)(?V{_-HYWGrMmnP<6nA*!&p!|h>5ijZzv=FH0eKy^+nl`AUt zM|q%&J+pC?RB>@J4v`wVzF-arjfV?={v+_05@FVckw5`{4F=hjGSW}ytv zSDfPn*(R4tk1|IGp9vxKNim+fYB?mAj)cn*lAa2PgUUwU7nPU_3v$|L(HRX85iiPX z&byzB8Y5ibLrorJwI^)-IGst@*PnF zb@ffz23{{a+p}Vbzy&J#dmH0SqK~au`ZT75O#U88569p2L5I|}Mx?EFcm$eZQxSYi z7B_TnibjEa(?+j8#4hX$XPY3_na2gIBAexrNywA|xYkibQY{m4$1JI6TrPLn^0!>3lS5eGNXx@*LR0adnsrx z!o5g^W|KyzJa_T89TjESL!7%f6IOG^qxvY5!#YcR(+Tq>O{?4kpf8RAmR(|vGyR%Y z9sH;O;J@anzTGNQd$D2+_uI^PGD03_XkFXWQ6C%2X{as+s&3wJ+h%J?q7?r*2d!TZ z0^wL;f1=MTYulk*$+5GNWku1qx$XMxj6! zeHOE)lBjl&f)HQ_YF|0p&}5PgkFChWUbIXh3|vTu<#P$W8arTF+}l~w6W#vRgGw| zErc~cF^UlaY8MKUAP7CZv5SI8m2^B)mn%O@Kl z;U5u#EZQKp`0B(fl@$g~EiWVR$FSG6f%vxvkXlX@P!-IkDU+#(U!qrhOjSLS3oadw zZw6Ep6ui?Jh08cHGf{Ht2`fCEqg%p9HD3NYVzKM2{JRgjChEcWlQ)lurs$uRa7Y#+`+aDFB5t55$gVJS;h;7NGn)l^}H88-~YvNhIuYtoT}LE z{wtP>e69HxhD+wC(dMX8C05&I2aOTECg9=j@0aqMolKN{+GLyx*?m6l$L=qZZPE3J zXnh(G9>gZNBo>fN$5f-O+K#l3zZe9ZZkwt19YXDUO zR}Kq>q1;FZ+ir>|0pItBfPk@w*+=lCTEYj$amJKsb#Cf=sInC7s%Gj?ymbA1;5&qOY_{`HmK{ z`V~HIy7i1HDg!MrPA1`I$U_Gr9PU7_K3iB-O zrdDYWeN#-1{?IWsQ08&o!TMQ3fiTXkV6h_B4Ri!+P(_TX8P!BF8nu>u4s5pCtp7Mz|JXOZe4f?J4W&7Z0?_`9Jo5w2uzlWGLyMQ?TkTn4vPG@># z=f?bx?5a$ZxaMlDyKE4(fasIy6DQZkN+Z=ZC60ARe6o|&Fu}`4wkRSCX}>deEp;zW zey2#YdIa*S6H7xn>}jpBm1(tVZ>zr6yt2g{zGt-R?IozBtm#~NuHF2|-=|-OV{H{L z;WAJzza@*;n-P)96!IePy=rl*wm?>d6JC6os^3ZH z3=-k)j7!Pj;;NthS^>oqyLGtQ5?memPU{M2Q_}%!R6m|@O<)XXUr!*KW)yUW7t`-D zS98ZU>G=u}^XZZ}Zg0M?w#Xy&B+Gj=pY}^K1ek01r4PiAQviIO;d>PCDjfq%)Na{# z6m5U7~zom_t- zFrl>yr-18t4G8+RHrdA_qG7!O^nUvaZYU;BP5bf^D=gv>!Hp(HC@{%~@4Jb+ClJwy9zT3b>ujgW+XKWgj+M}^ zw>G=9UH`K$qmpPwkin)~ge{%nsjj`jhk$ReVKbK;gM{HoA%>_yQ9XP7EKj2}jD|&# zV8YIfyLa8}YegZTaiRZokPEXAl%YzLkB|L2p49B4Fb;X`O;uolh0q`7u@s}JCERsS zEu50;#GKh~DaJnlU0(a_w~_@g|8$os{}K(pvV=fO6D3B_j3i zn9*L9uOu7LSlsjg7pt_BwdKLL)k%>3d3|}FaaHZml(D{hY^L0wL34L84ihW14y75` z@6}fHV;wI%$0>SI9e`)2`{)9Vk4SK}E181P^L2OC?fb`B$AKhPg+G^X`BoiXzU4894XyU>-Vb$agE9 zi(g`~W&aZ08{Uuj!{qiQ$VIsyWNe7ZVMg9KTs*U7&P!?*HmL9wy#i0AdaX$#Yr{06 zxXr(_6 zLsEw+Ay$-*zhlMs zf7u>oWS1q;X;`2-U$D$X(YuAjI{SUE3MPwNe3zIQC8y0`tbzA~=X3sxJ#v*Q1l%!}P?S2luJYjN@`6lo_(?EA~(#iNm zG7DhkK^^%uJS_A6207m?%^5>h$CcSqI)0@=Wp~AXo&?3U2yX+)Fz2IXvG*4cw?il1 z6E0uH<@5CVC*r(jHO_vNDqr{)T0WvX3UCdTSm$N5>}DK2M}?!N2Wvq4eQVdHqZ6NP z^UUScz(ZXfhokV>Y|<3JcJn89yOq^R^nIJgLc(j1%Pacj+D;J0t1A!NF?41Iu0_oc zPa^zfJjGkT#0iN!+1i`1b8Z1ZkqBYY4jBX8sXHgAq5%kPCv&3PB~(vR0lj0D3w?X2 zDDDIT>~@)P#PJOw6O5tcoqCwz^x^f-W-jbgHhGnwI4xL8g%493{8};L4;VVdAty`s zuo^G{cHa%7jsqb zN^jE}JMY68>>evXrQ*c-vtdW1kRfZF2ALuUZD6J1>Ecy5F8Yfpz>7`?D-fmDN92Mg z-=9Y+&>6}sluK;f?==tmx%Jj4#T$1FT~EAe!BYOx?~8yRXWejsb*Q8%FF0W~;r2$w z04vu>V)P)32@;dA#c~3n(O9YHdoY&GjrqYzSkN{x*L0e}_VTb=78F=E&_CfjG^Fp` z$}no^(BNp00bEWc@%=L!KPu3}aj2d8r_OKax%3*#!nB<=xKOQihoopkP7~|gd9}na zd}S{oGBQlG`(Ca9p~g@qEN)T#N-l+{Ge2RW`)P6S1|b?4P-I@i1lMU(K6oGLjHSL@Dmuz$9GQ`277k(mxgU`*Tl%5h$kE8+BM@=Uclubre;1$Tc$K@v$eu=gp&nHU5BbQY-1z_PFI&7DS?Rky7Z zlw*l~fJ|RVgm)Cx%jg(_ZyLUcvk^2$kYH*{o9g3N;vKsy{agK-Os;X-<^bk}yuX^<8`*e~`9Et}S2ys9P9hungus?S9eR*ns37R#x}j)7<-abk%Y&9Ie@Z?o*0wb zZdG{ZL)&=k=@V!V46Y36&LmQR#EnkH46d1~H$7AyjQ2_-uc&?H%C70d20I$BiMY7| zeF`JfR7h=4C!hCPMP7DP5G2>LWSJdiF(WGeS&r_7T6U9M$vZ<($?3_*7w=VXj{!bF zo+s%rz{>6thDjUO0aPVDa&ta*+J@ggMveR*UXHFS30`~zs{zI0~d1oeiJAmjQhPjG|V z52DiFXp0~;8;A-$0`ITq9WeyMrwiBjdZ~1Pbbj72xf^pSgsTPhWm+zR;eFU_BXWZC zcecvak+#Dkk-_+GH3Kpq5;5wwHmOo~!D~$Gw~Y?8L=5A!&1nowxB7`6IUKH3ww44z zeQ#w#=6*^sLN>gmHhD#r3Ptfdzq_AigCIRf_2s(1K()N>(*7gi;QRq`NMIEImGb=O z+YHww42lO}X6E{D0Y`Jw?i(WkZ1))E#GSdn+mwxqXu+UTuTlGf?d>|s!BEl_itTNi z4d*m?b+oiKDti#lWi3Ct^Jdp|_+Sl}*d{wip?-+!yvE!o`^Joj1bmpKh208T_k&xcK> z)Gz#cvI}XzgW~FbVbtCi_o(>1YP9FJv2*~CIke$yBW&d+&TA@qze9{9&2G&McL@>6 z<{|XAa}tQTj+3r^^bX^38YIpIbKoEfi9M1}-Q$yjycR?1ohuOOJh+_b-Fw^nNq~WylgFD#~Lyr^VF2tAv~+ z+vp6B;kzRxT5-!ihUKktLkJLLt+hwZSWgXi+4CD$G~f(Ptk+<0lnIk`805%;k zN3nsmS&v3^)N0u+t9k?23%G4ZmYFNfynTsW-i2TvnQWway6543zF0(h3)QFa? zfR`GSCZcI7>y}WSgFHA-8LCmugcU)#oWFpN`)O@wH*=8zO9FxH&-a7TSi1<54+}f` zN_szbK+j7;(My^%F^fyE@xL?>g|LNbTA--V(Ki;V)PfiX)cXR0`!zN#^5$N}zkT5j zL-zZb*7Cazu_BVPCdVL7v|{wp><#qAqz_(%zeIJXCRS@K~>7B-|^qx z2q+GP?h{c%1w=t5LZrgxDG+>V)pIa`czi#iqAK-vBE?|DQaC_u7uEKbqTp)65v4>j zOKrUF*;?DAiE{zo@`L%j@rbM;GH1k13gfZ`i||YfyFnAA>D+2d)7>XzbiE(j#le_P zdAAM^+%k|=4^m<;JW)bi!0gGGN`VDNd-TRX+AJFM5~()LC24t?t0FAg=(od-a7pS) z$bSG8_RSA&NEJUWZWXdKRKiDnvL6^C@SzU_YVY6#0B`_P@9fwXId(VQ{PM3zY!P4< zJ1CeBc5I+4i<^Q7;KJFpKiIcISDvgY4ldHL@{sq#u#s0KdSBOn_K!=$BF=YHLM6** z0C)`9c)P}PZ*tC-)2AKkGdbPf8(bDXw)ojb7gSed#R<8pKs64-7wTsHZMxZYHOr<; zZDTIbYEJ+SJ)@~phwT6BhLntNT%pgR@A!tc#Zdxpw*c>1Pn(tcNk5-Oi`#G)%^y2BdoH&FLc_YeTR%=)(3D`^TteXC=0yk8?XL3 ze?Bi^W}aQ(sQ!41OmB9N6P4iXFQLv|4xv2h)FjQpjOcCX`ZwY*@O~r13S%2{5wMc^ zlOTXHw@}4|;1Nu8T|X(}FcWDX3z&$NNSaJ5E_3ZM*ZLa!+qS4k0WrF-zVzcW6lV{>v-mxn|*AP!ZXq`L73EE0)Bt9GLa-}W{wJF?B z=2otFA}#W*2CJV-8PO1f6?wgy#oMv_oZn^yF4&pR_&kKkSYsobc@e&r@y=wVq33Ay zMUVw>AMA2Q?CO$mgpYCj>It>DMqTwm5|5On_z1x!|Li<7+T-fcnWOl>vMm#Xk2ELT z%VNT}vxf)uu{t(F_D+9JsrR;q*1OP!#B;j%&`!mqI&oK!%aa)_Hsc%A_xpYvw-o?D zngh;*1`1w-U^N>C7#4k}wC%b4 zK94y6t8$ei@#0Bp!|5L`WUe@0k%p1U=XrlWBm?n0acCP*y|48Pz8@HjmfuuslF7t2 zymML9b|$B{cgo_E@drVzvug@hU(_hGESE?eL&&LV+_B``wH zroMEjUXKb(Jy8QONw|pAz_+)FR8OZW#NQy#?r*G@A5PeIbKQEI7ztW!1GprmiY63T z8y2%xixDJ|Q@xSRFL4PHAiIYA%#0(#Lhj5*4*aG83QJZsiy>hs&QEV z0*hb0%((|um>cN}H(? zx5EmRIPKw;hf7*Pwh1tg%h})DNsH8k;+XU-EBGrI2-J7KSwh2L>&u9TSb3_fFzBK`zlVZI%Uxh(e8^S`cJ>=AI<}lo+|H8U?d!(=5o90Ps}dVsR(u% z!&DDbSe77whlOrZKr7Q#TgGx9alP4$+iXq10ej=-9?7v#Woyg>_a&Wa&6sj`6s1o5 zO|RQ7VYZIRl{!}$$1|V+D1a3F8Fx$jeH69R(MCq3YZ)0{ENv3|+>m!fs(a$YT0L^W zX+<6Z&Hau`@gK-&UMEA>s~%-nmyep0icMd9<74@T588b~`&2|M!O+lK$F@$SOLaQi zY2Ay@&$qLW4u>xT&NrE~n}N?wDeLm%WwWf7(-gzxfS>?#Ux!$gL6_HxIz><2d7jPp zpGaT z&hM-PPIj*E*n|JoI=^!Z=vg>YZZ5yIPOtrMt@Aq=;lC!OEM++vP$9UknVQswS=KaJ za;#Wmj5iZ$iSDx}e9<;TP!C`s(11|e!wt?>X*PJ&)`x}_L@{y9UeTL2t*4(vsYud2&K&)L6PCKa<}Vr!L?DVabUroaTG6CT zQv6_xmIK}UQT#=G0KmVB%9Q_EKa`Y?3lPVDht;&p?_u>a3Jmvq2^jx-TK%KF9*x@u z?{igiatuxz(GH6V7wdAdxe>d9W#mtLJ8Rp40ek_r^)7%UT?AVcUv4!V)nqh~ZJR7j zOojj3xDFy3_lqC&eTGVZ+QVw`=Kb{g<)3=ZufZQvDef<9pKso=$r?4mY!Wr8jtH3m z0Zn^2KiF_lsx&oaN3ZvXf!&gU9>iVWbFzj^EwdOPVMh2ZPCW5mruUDBlz}@GM&I|t zo={l&Mq1S`&EO}RDj{4Us^KOKd3*u9L^E9pQ>Ygsk>sR}N#PR#pHS;~FEBG@(LULo zb%}4+C5^uqT5C7=ulT-FOVj84iT6CfkLM0bhQn!U@~)u|3zNzNadDcoi33w~;VY9x z18Tej3hO|#bl55%ihX-VnCS`qY5V<~8HW!8EtOCLa7Zh65^5rV^=K<>+COj1Dn;M` z?ZnW&Kyu-I9?Pdc2{)e3JY3ra(R2sbx50e)7yyO~{M~5wP_bkJ3UUHwz7L2-%t820 zd-xbwC(t9Z#-|U>x#A3Udpi58%VJw~ugm?n^;R>4#?$kVwWI&=gXQ`7W;e!ZI#XZR zgTiBGbz867%bY0zr#@9)r3AK-7zXXTU)4W zYAJ1kMdw(GoADnjRJ?2e=x^a8AsN)YYdU9SU(y1~Uxx;o*5LIEo`}y?&68EY=h;Y$ zKmkmlBSz z=ks$8(Q%e)P$9eL1qgdE-IObk5@qVSo09e&v&DSwqMH(K_AQKIPvM^ync^dWmf0kY zBT818BW`tk;wXlv#0Q%R1?_>BJF#?`eYvcP9`y`vjvl0!0Uysews8lG<#6YWzeQH% zVlf){V#^*alUP^kR;vKC)xc~8zw)MTL7{u^-MzLNq+c?(Bk#f?HI z!(WYlxfxY-KSCJ#lcIiD=!zuvG-;3s{aL~MoiA(Fi|5_|7gZ0?v9fMwF-&r&t$!Hy zNUKTc%ZO^kBm(a)cT&enk@J4>>#LdF=!hMYbRHWI;blTnL3t0Ub#304=7tMqYD3*j zuF1@v%(cG%>9LBUtI+G>CsHi zZm)4CRg_9Nr>$OcGjDf{tn8FJj#eKXJHOy1cWpEK;f8skb1O`8mFRN7)0kQDl!wnU zDbl5>y%2p&??|&Z6h?*~lWDOE!@~c%AMM%XR%J%}<}MGw&p1i_tA`7aZ`?Y!!!@}6 z{2i>>cY(|XEBh7YXdCd;;;O1$Mq9E!sni{J{}Kk0kd|edr^zr661vk)`|cQ2awt*= zuTP0{fR|J8?D-T4MrfF37w2_Eq?Cf_3JDrB7(KG4TNTy~!0WG{Z z@u7iYKob&wiOlTKjK&6Wq3GX2w5_Js()kql$-<2>GJt*P8 z%tNyC3rI3#UzoXTB+I}c66FnMXDjZF#q!JYx>?}T)2QJXk;OTBr#Rq2fs=14cP~*4 zv;*4UQA^=%MDl3V{7y(bVNgmyhO4eXD8eNGt;;aZ7@ShP&vf|FbUJ%^W!Bg_a7B5@ zsfygeQX&lEGU}BO@-J5U!Uq=md;J3%3vQEMwLfRDbN-?nA&oTLVcEooc}#X)#jrc(*m-bAZ)B$9 zE7DzW#3j;b@9}DGs(R`CE!KJrx9}0=o8r#FN_fcIBa}KM{E^v7oyOE_rSH~6=+M>F zxWIO#Zq+%y@#N>%g5srK=L7L2MeVKMtpTjGvE>%(I>m>tCBLl891h^e*$5i8GDj$} zjZ{s9CM_e#v&It(SnSS1lR%Y!ZZr`o@2m}5Z8p>u*zTqYcfe;4hGZjD|Ew=gO*pv# zD+&#gW-(Hgz}Uz|kuG4GMYa?hQkfsDBhsjF4qZ7UrwD-GEK9uogLdOQ-%q;;7}qPx z#_^!$TSR9(1YMsKfelWvaMq|RAph*&NYXugS(%TxcOEJ)tLwJf4471@#^yZ>b|vgN z-N!&3X-WveA<-tYTFsJYQmQCv;r@-Yz7{k}7&kEB8cw4bWa3jW|)7NcaR^RMRsAN+V7X5Ogg;Bz%~M^n-8~!d*h50*U~`Qm3v6J?wk^B}M+{ zyQ>bzO)Jw@r7XXnKC{CnCV~#G+_ecSoUIfj5+M=CYmq()qq7m&Q-aP4%@v+R)oPXW z*FdcQV~b0!)6ZCCB9YS1Ok`um2%pp&JWr*~?a8d$Zph|5t_KRhAXWgyC=67@({W!c zlrEsr*0cbg5!(GJrU^ySdZWR^=p|GZ#-)&hCc(1aA2ag8t zM+GI}Jz=Y3nRtuF5=!H zLOsRE4@xKEuH=k&O6RGGJDYaIokzy`x56JBnY)5%uvdZl2M3eaKC#=CHyhizH=PM4 zf80NJldJYTsSn@B#T(jlbaxwQ&Um|y8#`)du$+nUb;5C<9?FUkaCAO@Mi+Zm>QHyJ zK72~`Q_8nqZ;t$_z2tdXjcc{})3G$>s;r0d7Q(#{web zZRGcWkeq}%w!XFs@%pusD0h+=JIywW2+!<_vek>tlT8rM=sp%^b@0S2m&dI9ioXlS zOhpVI8!)^LnaDd6X*F! zhXXDt_R7scK}YLDcb_)MVSJNh=g9Ud^^nsrdQs}bLQboXQ`Xs|Ax zwalJD^V@;2i(7Gq#zdo#)~bpTjV;1g^90@?0^7Z`NdsucJKpZ`Pt2G+a8Sl%cR=LG z{e)h5@Mb)l6MXnY^0VN`+dgZ4IO%;7J-x68PmAsO+#lg;3T%#x}_*MpZ3@3!Qx zvO41eM(@{ZF_am;T~+b;Il3M~Vl#;uIF!8g(L+5IhXUpEBm@>SEB2d=8|R{hqx0*= z&Ffni_qD&!FWkH~0z0qNygFrfhyeHTo=k8)PfDjCJ==M}Np__>`NGv0ly;&Wgp;jJ z6o@ZeFgioq&VDF7RVT1WCK$9-_XXZclv|)eVI?b<`8cdv@p|hBB@_oaKS+~F&b&I7 z*JU3yCrHa~)0n=15!p@fX)dY##J&<#uvFHG_mKY9)bbZ-vs14NjESwWQ;K-U_erb| z68PT~9WjvaJe>arCSv*zOq4Qo^$m;mf`j0vBz=Ksq{Lr;pPj#>9REK~R71-S@7t}q ztyecA>A?H={0*w-2@?4IF8&?#ea~Q=p(6S1srl-`^zupT%96ps4w95y#an0Dw;Oi4 z5oS1j0y!aw`KKNtNZs=cn8+Si3cdH9NA6XxBc$2esPsva*?wtEBSQr1^^v z<+a*LQd-(bkm**7m?kePBOWgvf^(X!ABTs6+dw6q%Q+G*#5H8tWH=H(2=W*?ff|7p z>e{qWZ4=W)pU2>!1&)y2n{}Chkqsc6mTovLwf$1fRv`9ctVKIbAUI zr0FLG(W3Utk;pNTp`K%*7b=(rz52i$Y7LZ%jYxtAOklc6crFvm!EjePTO)NYaF!^t zxpL2MR}Q&B=pf0zd}nfqD3r(GKwog}|H^H?J z+u~v-j9Mc9-nG|nO%FE!CR_P$FkR6tBI}`V*8EG%E?3e=?S3s&6;}*SFkxRt?cg1! ze0-xTF}w?(Est;{J%bZXAEFu#TgBNT`w6u!9y)!Mv$tUG;bgpY3zyw$)W~o(Lr3E;SyVJ9s%HqEew)?BASJ@874-qfmqsLUXOG zwyt`k)BQS?wTgqOnQT6Q$49^)}K8CkE9^|NnghO4(AS8+lgJHzMz8dveKvQv<&S~@&jLqOi% zd<2Orn!MdyD0*fv3MfWe5yzn(S>~PSSBV_LBiNpNTV>R7Z3;=;mH^>*t6Nq|V3prl zNMG_|wm&My>+EUJHya*?MDB;{w8DE-5oVrOXp^1e>eXnhwrZiMS}uN>>C_F{6{FlR zRavGDJABsyblY++IwUq2btCtSTleXjypTnTZq+O+jAQdlbj_R~b2sKWCYQ@0j^X%v zKJ2|bv~;>|?EaaCDyPf3Uu-s~j>?Lz?_YvK$RP18;b`5=o{2iEYdEy7A%dG(>fOj< zfV^UFvpF~Aa9Ga~aO{OKa1)}|&duFY^^d|D(oFIOY>kuf2m7ZV&z0a7krc$@x4gz7 z!)qFICSS;QMuR{({f%(!2z$6P#mI; z{264lDJxj92XRp9GDY%4r?t1KJx4OxRe(X>mokcCuu-dpn>XAEySMiPR2kG?C zWSjsb0G_#|WsR8?WCH04$F!VprSdfg&8-O|OXAgpeQdHG_3_36nk?XK5GYYf4SJib=z77 z%aL%uPrdqq><(RMzT;W?3Ao}qH3$9QVCRP-w%YYhPT$z+> zV{F4Myo48%(d z5UN4b#xn95)#%!%ow&^iFci(aseUrJ`=of%@{Z{=GfZpp_A=g9UE@F5W^3c~FpBAQb-bWqQsPETO~Jmv6j`nYc+5_C-TB%TT&T+Qq%Ut@mJsg48} zQNhfn*sbZQkY;+jUFcL4&`^H0$|tNZ1H$DKV8>Woq~YXdV+t0>Y^d}6GLkuia(9s}0i1iKb)D z(|==f=f7K(;tYjH$)+DK$1b#Lbb>w?k|=hp-yP6K-qYnNlTxAh6!%DYaswZbK-#br zaB`7B_8>>sn}5W@g2T=)13)jJ3sMXsz9S)mK0%!Rkw^d8h3kYtzxP;H7S{g~s4uZ5 zY)Qnf`SqC0NZs1oEA*5PMEv-GXxNnslLT>bjcQ`+fhpd;@?SSuvf1kb{Xmcd}%=%mawAsAMfljIaz)$Q0U8|8@we~4-mq$$U_%Mgu+#UpNoSg1KSfp zQcj5N3s3W`Qg8y<^hNK@$|A_-vm57qQEeajVY$~P(il9 z%-AH&46I*;DS!weC0&gghW;sO-&e@Q(kg@ug2K*hvhhQOB@g7UtN9KBYr~pB{$?-T zDGX*QYBU0_(}tLNT#dgijtlnney%?kotZvhDoy&a`a)AbF6so`zF>SGO#8li5*vkX z%>!q{JvFwBqS81w;JnE+Js@28NG>~Pd}p%024&+d5S6H{*!~L9ETSfhMPfF*2G;xN z973Lm-uT{f_v8bt_23*V>y6ugi~^Mb-BkZJTxDoETnE-3;o5@nD`P?*M z{p;uDwj7(Dd((a*+1HEZ0}*k+QaBRii?UxNYTVJ#hy_8VimG{&p<%5DaQCZ?2{?y&Boasx$+ErQO~$s!P#7OIX2f7I zCE?F_3uxHLo92!-0zVcim%nBe?WFsFj8w+3laJgVBVL9t1DkJdEqVzcPj9=&R@1>c z`wyB*n%DznR+?0_m{L5?_MhMQ2i~8O=F4n{O+4+1>FMGYr*LcO90mAGa5MJfpB;nw ztvq@^Z~K2ERK@xqfv_g2g5?xRnh`Y{ts907#&%+MGbfC zq=SypNq1~p9ox2T^K@+6wrzE6TOHf>o990F``&TiA2r7LQFW@uslC=-d(AcH^rL5; zjviqSZ;CNfCc~jv8+9}y>~q9RpMlm2_mMjS{`{Dx4jhG?k-JLF92rac;W z+wQa=4>11pdv?M;rvD?g>YEFzQEj2~wX}z49b;v~kxe0T4{Dv~U7eZ;^q_Cjzq6jo!;P!R>8%t(@Dh zjUs1xi;mA{hrV`Of;(rd(-PPh5EVLSK&$7o_Mt>|S2v`Wl$46^yEI^W@^Xt}u66R- z@naUI?JlUKv+0H95V7(t12r`hf8~92=LJ@Jv1uPU`kBmLD#5YE!D?}0FO-v@Yvo9! zNMg>MGt2oJ`f%`5MM#hv!*=xX{&~83ifcM#1iCzR!WRKaQp-83P2gBP?FiZ(ppo(H zXPa;c8c|3u*}0Wdsk7ToyR<3_aTfIUi{L&t#I6=YV!U%X8wr^#41rOa?C)Sm7;tHJ z`xC}rl3xF0be)#Dtn*e5o{aYv1;^qx$A&KH1kNeB@pnGSDgG0$XQdu9wYlRG*0J5a zOH(3$T7^je*hKQeR?5_I=0Y^#^X$J>yaF4C|Q^r?A;0=lr! z(M1w1O(}aNnc&Fw8Pio4+w_OZkkV*%oF`>=?v0wC1rD!_UL4U=EMr!0x&47MG8-<~ zmb|okLW(8PK$%84b+e+B2Lo3={h1gAPB|leMgG(g{&{SdQt92-qD)Svrmd5JBC4r( z19AEM0;WVlCu)CWGG8*jso$!~-@koie}O^xNynuq^qT-CncC}xwC%*7;Xg5=jsr?hny(dPA-Ul2_Q{PNrfsGsU$glz zxDDImgpjQ0S3jV{r(+A)lu1G&!l~nk2ueg&urZmkH2(7T_`njzy7WFi>|5o1%h?xT zl7=Mm-rigth|LWSeEphXPskSx#xMJ2Mb+CBsQ6`4QMk2wvi~s7o5qxttCY_Sf&C?1 z4tg3&5}PjnW_58q2#k!|^UcP=P$aI*-d#3XOW*DYjEv9D49`R4Qd`rVo3gFc(!^*k zqE?yEe1IW+GoA1GTp5krHhQk7nney}Wh{}|O!9C6pAvY48-9eA78#sm!urMfdRes# zn5+K)8M_oSfF8)zllm_HNbrC&HKMMw;GhvSMCs={0U@1yk8ihBH>i`z>*e{Jd36z$ zHi@L3R_%bqsi+&wR7awU`kP83`aHU?vhTuD=&R{aNkvjxVi+3sG#=ecf;ZU8^ znRwk*a}hgLo6BV{o3rTi^jwtI1P6T%B+rUW9cb__zKLG^TGyH7RGojAt;+Inwbh$A zIdf~1T@6|##!x0aZ-m!7L36e0zJVKYwVA3JJ6X@_eJ}DBDYyB$8-)Dm1T}*Uc)#x~ z79Ff0Q=fT8k4U(QPZLYd(ne&AE*hx6iABacxkZL4$v8&3NzO7-En2ch%zC8&y_bS2 zr$$;q%gV*AL>?t9M&4M1khIFL!}l=vyC=+}VUq%hMlW?$-?<&($XQ!^YJyvK;jJ1^ zNIk?vDv1|*{eO&OcxBaK5bR78CCm?&@>apjd)!3gU18DeHkV)2YzwLHvaJPT*=LTc zI(VC(a5t`GQ(b%DBsqF{zCNx3vqk$5=M-pap|RFELQ+0_kmL(4nj*dL-B35$1yooR zSlBo$k&De`PG+L)y8`WA-pOQ+vBy{SXo-r^OFGyq9hI~@k`cA11ypGas);9(cvUJA zDqTzk5uBGuDKt4bsD>>QssrK|=y^YB4AKp7EyNmlX4$5xpD60v=FIy5ldAc8_SRhg z#g$)G%nkQ7x~gU>n5nV(XtQ_VzlzF3rpn^!Su6igSW^5n5g15%^SOtm?n#aA(ds-8Ze|IMAdrP*|`ftO$-0JdC5qeoJt`r>pl6eV3QZ z%%X9rJv5kaRN`b>d9X67>Z$2WvNEo;HpaneWhP5glSm{Y<>JU|A&KtV95HT8x4A}w zkp!u}sY_!a+1L=SY;lzAJcxXW?%k~;qK z1sk*xDzl?zM;i%pX@F;FexY5FcB<}!VCj3@AO@8&UhN9yax}Hqus$x94xOu^IJfBY zOGYk`t3)(_*6zjj?*4f8UC1`}Js$lu$o}bv*PAmPzyGe|CDJe~P96|?L|9A~AG`9^ zG~9Lp6AS{h09M?PU{jlb4*3;QhWU$ONT6 z1RL-xnAA9k_m*9NJ&KLo-&*vOB5KKn?=4){m15p!B zPScF<9QZ%8yw8)PQ5%dn2zqG zx|7UuZh;fveF-gjX4G1fS?3a1>Mo{H|Mhcd@z)ySa;f3^<5l`RGVmM%L%lTe z7CYtgVhPhu{hP-#w^o=lMIYZQB3GYucZCEj5uw-Z_j(x1L2h-A=~1CA=jiKm%gQ~3 zCzFvx;f-D2x^-jOL$=8c_%h{KiusZkAR$&WT9xd7OB3KYzC93WDP)2oEZ@c1q@X@~ zNKrr#-^qm8lK5r1V{jT$YmFBS_Cmfbpb%VO?&<8%?>8QM_0Y`8n_?Z4r=*o_ceG_DAKYWiFdi$jod^-alH_$*AaP*uT_80R(}o==sc_%C2hFn>cF^hJL#NA(e@Kn;(xmS?{k` zn|hCa45b>{^=5i*Lh{*W_bds_Od$)31m{vU&V>+7(l+}?qXqBbl_`%&c3GjD-aegZ zkM7Lep5ulTkEZDC!YtGak)f!7=4p<`W-E1jeo5uT?E8{pA+6$ zORlCG=kT(0Xg_`xIk#&n188(y^$qOSBo=vQAJ262&}V)MIZ!;SRsG(o0QKOH#{$pQq?qMU3(0j%BW14^d$gf&7RE&qn%M zxut!^=7~b{Vr{`r2E${KS?`K@LRAX#FZJ|&|Pm)xui&bffXEa2p~&=-+L*d&e}WK8?~xKiZjpj@Xo+x?I$p8 z9kZ)I>wy@2QI08}|7E6Gz&guhXEW0}LU@C+qKT9^iAT`!2BOS-URhU(DI%HEL%2$( z=AvwgfozwSc{@Km#VX?h0bo+bGfo$&b#GvwYWX2m-p{onk`|e0FsVSH4wbmwBZsr9 zIqiVYGZyYDmX*N95t;@noC%aGEl9cwmn5#p(hfaCq{eU9qi*lgT>Lura#@2;lPv9E z3Sn9#p0GTevdI=Pp6r(KSKt}0BM&rDEL&-nFf?7faOLw{a!9>02e`#?Gu2<(_7oQs zx{gJm3c@99IErTQP-@(gWByVV$=|dyWss`5^;hlAReSAemg$u6q;H>MQ@c2r%nx7x2j6ErLwtGA~1m3H@Oqwd6B@-N$_xhzpEH&lYz4HrgFwDL$a zE1fXlR|~b3i7mpXmUXZ1uQe+)4vqSA%bv<K_{+3Nv%CI44V&bvx+B#F-O>CGcl-?2*a8l$&~F*cu__C$|-VBL6BR z|2;DQ3$7>#iVs>gK`iJQzw&M~J-K9aE(%HUMynV&T1bzrE@W@e-FIZJhFg~(P$x$j zH3DX-E-W6jz)T*Mpb^EZ{KrF;j%zh$n{-shAA^=+CL3*SEzv)Bs5U~*a@Fb91LCFN zWb_=;tOiOOoEf%pP^*d>55En_rXsfZSo=gS^ZNMF=8%p8@4v4z?TzHm`I{)7Y_y6C zq8$y`X5;j>-I3Y3nxB30n0>kL$_P;;^n;J3W~Dqx0sXoMhV|l>cTJH3aqhCNak75> zi@m&fJ0Dmn59L-h0Xu%BBDi#xg{4}8iK}oD4ULCxJ+i65Ek2w+k*|4u$d|_b&k+{)_lCbvZVwA@oaVk@^853SDlqw?xl8LID@=U`I* z;Ne>&*YWt#Jj{Nfv&a*+bDfQTnOyt+NKKo3H%QjJ;MqFWV~9De-*7cRQ?RR{gYYJ6funTogdrl^`CZ)Ul9 zWU*AQ*_J5^30M`RA6YiEgvZK~!(p<*yA>%5l~l~nDl66lt_IkXZZ&7=mCn)=nb0eF zfO{UjY%Lyxv6;2HeR>rAJ>KGh(@3-H$kDG6)l3?n{-j-SrJL0)lv1>Ss12MTws9?T z?~Dc1O0xZi@`@0d73!(O6Moq4jQo*b3t->jClf@M55YsE)pP3qliiz#An0i9LAU}* zzXs_pMo`-$C`hAfo&^e3ZsitaVT1viJKHZy0+?Z%%Q_6F_~d$?4?}k|024rn5f60>egpWRkUeu)ZnC7=bO05#u=p*cWeTfD_lCfkADF=s16)P7P zxcNxyQBiG$B?Dawe+z#9b~qReb5Cb%tXask_0$7hW`05wlfx(7jm1gYu8stt^e9*W z4=KF&b5)&jaj20bP~*Kxb5Dt5(i=z%s8CLk6`Itb9@W*zlyW zpfp|@K@F1kA4Y61t93tg47#CQBU;1Do_50X!jI4s21Xq2*5cU_R(y-tnbfN9nA-p)K_rt*;PKuFKJ;a8UQy4wr40CD z@CKvxI{sMvO#Xn$SrxB;V`mKpp2LYaoe@3qkmHEa1bdFb`nOq{U_%8Sbp+Q;o&;Q$ ztrcQDFPjWsc7c-5E&>#^-8H0FuJt2PW>A=w1;$TziErbL2=KtaG4>c@D=(1(Gg-XQ z;FL5pRk?Zf`?30<@5=hW+f&y1U+(#`3j=X6zsTV6Ud){~6V_{CJGG~$Yb)0HqDL^P zBtL@vd=u#+jg#fO5hQ6D>Vz|nQZD=-2vR?bIAr$iuv10-oM%w2^-)M}7nr|d!7>Tr znhjk7(y;Mt(EiVa2-EoaIW>Z-`J`NV!V3!mM7ewdeKCRX1PgwGmvb|lUhX3vr7F|` zp;7st=Zu^hvA)l>GJQF(Z0+i)mA5=`RpN0h2s@peh%>U-+@?aXR$VjJoWc8nb)2fo z>-Tm#rlHfW<~iz#u`23z-&o-!JyPUHh8+r+n6>^rA*m@y!}^AxkN&yK3A>t*tf7oY z9985Yn3(dA>aYr8Nvo+hMQC=LA(owdN~6cfmO`;O9^1bEQ+PMJvcXkIsJkV3bAEOeBvS7 ztoYvgk6)qQ`uzg)HWsX>3EL&!RS!=*TXN_#;pQ~{uEzkmFv%Kc>qTW#B?_`oCnN6T z#O43@XVO|%gFqJ-W+cNk>x|_-NeIIK=g9Hu&Gu z&sQ^hhP?OctbgnP6D1cf*;WB%uV^}=xa-j71i1sH`R=BxaHi2)+?U zut4=m;ApoltOg{$b1%cC?exrKn9*Oms}8*Qq4(PTZRMG!C9BVN036fGD$mT_o7s(Q zle<^nGRtHG`OOAlXp7=J$L`l~23sQR6v21Jsq|p1qLelNv}^m+l~$;q+2O(PP(%#v zj9@=OL-=@w1*?PT>AQ;Kz@mUxU6e9mx4v*POwgb)#q{i_lH!V# z>jsodZR5EQ0$Cn&ug?Pf#ZRyhKGCVDY3x(I`ogEQA^JhPv`wzB7XQhw7J*OpGx?X(kN%^3OR+!1{ysCZ;J&+F}fPa`J00iUGm zj@`Y~Q&}ZWz6RApw|PAr*0Zgz{l^djFE6IMA@OkBjBeYn2&5*)>&-Tmc^Ex%WG!la zwL^9W!yQJ=r^`GlW+D)o1J@cIlq6eaW!lw!kuJ3z5%^VF&qf$;4uhjE_s>^RR*6f( zfpv?g;>!<8i5g%YBFff}cXTRA0KuvN?QC1>1sliKO<_jA1D`}|cNKxDX5negTP&rL zy9(0g)9VD6(T>2Ik%mr7s^gACy@9U4IqOVFhx#*m#tJQX+>=qs@PSS-Gr$-D#x#?_ zn}+^Ylw z!jP053`PXX!1x~qQBn`;XNxTx0t7$FoE+>=QY6}Ev;Nae^?#t(eiaFuHI}cRW}=6h z-DB`Uy@Wpf_-jA;X0gw(!_Y;id)<+txbp*MKadwP@^dKIv_dbO$2PW`eqDzb$p+C>_w{wtwkj~R#My&MQ_7z42n>7I zawY~;FS3U;0G~vU6>gU5lv9t6&{rRefcLXizg0knVmDUVYiFkCY}&*zn6CiR!Sl`g zrU{Cqo(Sa*hT8YopAA^}*!XzflR`fnKG2wO9jG zt7clt?9dGEuIW{dCYuO=_4NrQab;?~YuAy5_@w7B4icmA*=9-xGn&y77?&c0#&^%O zRh|e~JRIXWYXI@&`T5PJE$uUY&6VSi3%N*LhtoMV89ucTl9sd$v#`YV;xN%+ey9X9izICPrGvESr2=nNxO-i!PdqOf_+yR z_U}TF<}`+|3F@I<&wArIxXt+=szy9Ycv_hm25F$H!%cUjCze~SLsc!Ud(~A8M=jU3 zGqI(wHlEl)V*zW=-jDWsFSIaQWESA%m1J ziu^%YE~=}C^*g@)&q&HhCf%7MHA1M3bay9wvHM_=1PzJFyh?0#7CM=9SK%$AMA-t& zicvLl5rZj3x2&J9i&<2K!zHxg&FMLyW_QE(Z)|Ijf&D9nz(x*+n8e-wX$t)5clFuv{%`dZfo__zOu&?D-?-+QQ_1x;OGF+^B=6 z1Csi1CA;%yF6wZ4ZiaUnD_A7VZ3zmgpwWe6S4wvzqnk8~2!@bstr1!fG|!b$3_;W4Wi5IfFk&Df@ZU@AxbBAUf7m6Zk(O4RDcqCYNO zbacDvYEuT6AU~I)0P|Ay5DIOfNj@T;=t<<5mTq7PzyZhjg}t{%wHr$lxa&E#?6+Rl zOB=P>b&mh--J6B?9OM=JYh5Y$+i{9)Q|k8$3$%4PR#u)Tp~g!%t-Eb|R!8%=@i%Yt z+MgLbj#!o)2lv5^IPEr#WGo%-(qW$c3D5ZLb8am!C(a9)UdQdoMNH*PmojibM8T)5 zh|Z5nASNv)tJH%!ljvs7$tL&$bF%)AgLT=B!D7W#`KgN6u~UkNWsp_qh{)P|{mbP3 zY$wZFvW!&*1I0=OM$p}@>5Gw7({=qhr+oNn_e*>o@!1gWU*7Q`O}=^?5OsG}9;eGI zD!C)@Q!73vI(fH8Hqs1uDvq`xIY8x^V3s`$H~gJL)$}{1DMV1J*qr$HJCEisY_+W~ zP}B~eg8lzME1zuAPYpd4PzIKNkXl<%hJW2y!0GAPnUd%kz;J+QloB}IN4?d2AzIT8 zJLK>D`0W!MWK;D^w;w+8*yG{*M5}EYI2-e#@vg3-A#=KL*B$&%J1_|kJ!Ozp2R8YvHgDL#8-uI*nRf!k>7l{O;3paintW^<%BPILI}+I0l}AH;J(I# zktfn$%_gBvvV66p`1R?$*QSk=UKcRuvF>vM`5pNs{5osb<=ESEO+D5wng#Qy&h0dL z6-kTt#2TPUJ}YwmP{2br5BXWtfaoCF)MNu}zH$5tz20T~xmZG<{sK<)vWCv?yo%GF zy%eT@(cvSZscQoD4E+mj)@y&jSOaPKeN1i=uI~4eV>VIHX{$4K5!d4Wohe1uKl zw++tRpj00GTs}3%HK7VgQm9!756jH@8Ml}kP5`&D=qc@%nP#7M`xBq$G{DY>Anx;pY<_vX z{0o2~Mw*X(H-+Q~W^w>fC=LxV?(j>D#d0={q9fV6zeuh@sC|G! zf{DMCb#G5cRI>96CN)}7LLVOtE|BYW3p>a7l_JL}^QG|f1`_+48FT4q<@OJJsfs50v% zx9mn*HM@tS#+&0>K-T}@b)=Sgttk+ERNxxQ(~p;_(X9x-{Q zt6ZDF76p{;D?N!n-suK`I(b_pW4Hs1JAr4bqv#wXp>0%%d7}qSQNs(=wlc6IC!yPd zw`pFj_77DC{_BseeCN+$!cUO=G=KS1UUPqNn7ZoIijWa$tk$wo5A}O$R!)?M(RCFSD!S;D1#EfEB){$Acp6O=!$51}={ z%{(xg?m23vK&1QZML;)P&gPKdetUtRzJo=>k@@|*{poAxqWlLR24naqi+yIZBzAf* zge0nVP+WkXftmS#cESo(D+^rKukW2*`n*t77EFxpAP%x^(&DRA=^n&WQ%DE>7_ib| zpY<%BSwVvH1&c}ATFt#VQ>x227t5`BC&qVPE}u{73W>MN0*~W*`|`awHsi4Rj)a2} zfg(-a9ZLeyn1{at7u}R!tYm|OflwH^w)Yc(hnkn7umdf=y?W=3yDtM|fuA`J@o?wi z(a^kDrM?qdTD5(MV^^R|NKzlU9G;b#|V;bZ4 zT+2)S>pRa z^UtN@kQ53=W#dIUi#@-@>W4vEILb_IW1%tndH*gG5p}Sj5Kkh2Qe#{ikbQDYMtD_4eYt-j#h74@l zTsU`G_rI^_yf_vxuh|@$1HG{IB8b1Cr_mxg+goo^8TG70cJ`~hA8uM{H^kUEphP;` z-;SbOSTD+G2da*;mBJQXx~(t!X961-!^>tCCfk2SY0vjQdmQVjuP+~Xng^I#@Dv$! zjP}iFQdzW6&}Vh6y*;hoSMH-e&@WuO#=uUqK5-BIaO&~aj0-m005rSjheUo_uyz)C zjT~4e3OT&jFP5hWFCVQ(*2bns-x+Txv$P1)8fo!lai=%74Uy!`>UpBz)e?Berjz$@uIgF$ zQthEpEWE@>&fbXj0CzIsprz@y7;|BJ{Vb5>PIeHMMJzr1j;ZtUs`1w{y~(*9A}e$| zxqbUmHVzH-ra~}@Pi6pq?>ZcE{oGI(Bc{--&{@5sF;A{91K}0odiZ&~@L=3&;5nXsqC68vRr0zhP;swxOm|Z>Lx8Hqr8DIt z1Z#L+Y9~E@FSLx{b$wtbUA4iAohF(Q{t*48e^yd{T?hm-a5#B`^1#473yKlKPAx|P zc(o7bZeRcZihyOH}J(~H1@%cLoSId`X-n!N3^zZwbzzLU~OZQdPKFKt5_NX z$}Hb^Sm^5spwFqbiPk-*s8y$v)^)Bcfu#SKx6RF6NLFARA`Ezq?yST77V6(%_4QthInF z<|!J1DNY*8VaM2lk?6SBZeUV*TIs+-r$p{;zaRGq(}7J?@8O-hM^U)iLdlYz*RP|_ zF?P5HJbX5_LqCsh8%@=gvo4KKFY)h5e`?5w0(jij#-g+7K2*LZcGuT3DDT;DS6{o7 z#S8IS-426(VFm3vVl9$4)}A9wK6Xt@cZ%k6I5mjmYU6YgR7vh`B-{vC!&!{)>60ZB<$dF& z{0%|UML>|L0o2^E{higbrn9Pw@+d4TN18&@FYQh6>_2HxbVqKk*6&a8%NZ};m+=MV z6y~$OB)TN5DFuR~H<$hdPm6;ij7RXp`NI@o2b+iD%wj~^m^|8}KjWffE@X#me%P(4 zp$b;2yvQi0!Pb!l&HI9C2tUjTO_#NKq^8= z7lMD|ve|bW2uq=kB{g)jf(f>HodwE-6pFUg~rGyd_OWfx1{z2Pt=G->@5tgxJ8X~~i?E$ycou|U-}sm4hxw~l zKjRh%25@4Oq{X3;^%PuN_gm?{otjNT_s2p92G+m*?-zRM6x zp=~!D#jW|_T=BHtrklp)*5nIB)`mp{qx|BISEbuq^Ep$e_@7#n>y4kwHMj={313Vc zzNU}u{jH^8I|*Y!$088B7KBt~^hQn$ClH?VCjc0Obp@g>V=s9-9^cQ;`z2jz_jBj? zXvI$I!goZ6KKh#lw7L0P#IrI;GW65Yl#1m7BIi7OBNMba10;MdJhZetsutvCXPuPT zh0bK;G2)sCq2Qs8<@rh)_=#%dlR%e)-+wDaufVk3ifTugAJa7Fp8FyZnMK4ouJA}{ zodHpWG~&!MJ$RSCp>ha#*cgg(E7X%P;rEgwocaOzR@zQi34J^)E(_q1;3KW3(6k(`Hx^6sJq!BmIZ3& zY_=t1!CPOhXR6$<2k~VkQ)cR5;(un&d{n@^;&N8H_?YkAvDlt(-3B%wv|oL{BHXeP z4kCBQ5GGj48eAE5xZlH3@x;Y6X0vt7If%Ws~-@Xkh`5E1xR7P7c21cg;IBX>~Lj)rO z>L*dSO$Z;IK))xVt{Rd&(%)*nGY;}+K8N#fAnLch8v{6KY)!V$A7BMc?y@T>8S4=- zYhEbdIOF^B1FTU(8I>Z9y99 z6uy9LP!f<7wabc|3uM!S_Fx_r5_~$4cCNmeU<|hD`p3<*EToRDFIv6By zu0j$YItuzYlV+OcQhKrD+X0a~5>unCw*z^-wS(dZmmH&eEbt3i2X#@zd|y}TBf$ZS zLBmEF6Ydm1k%aJ({}DKJr))wUP@YlnyAsC|0)O$jB9K12|*0unl-7~UHT&{g~E zBi;`sXBGfqcjVXpYi@7CC5%Hu*|*?QI`<)86pxSmK);GrhLbpezEO3n?A|maVHq}| z-q+tjTSkV!rRw9;K?Lyflp(ouTzR|U7vH&GnAMp9<(|@6>yp2P&s?z|pdXvO;{L*2 zw#FmB+RzCD7a@$Db~Sg=GJUiUgfMwoueR9a3R$s@-{jv1aVfLnySKgoQZj(%VrXON z+*!!59JS4c=iAwiPO7>iI;9lBvVFjM1-a)}v+R`_&)Baf!+B^&NfOSw*xuUHxdy5J zY90kP)9NA$Ms}HLiKFeFLQ2P7D$@*OdKZ7d>b|UM@aGUH1q`koNP1ZUue!`J2huFM z=(3q95CbhU>6J%cL+{Wssc=TA5K1lU`%$~+ALF}e!=@UtkmcO62 z{5G*hH5L+VDrS-%bInXSq01xlRTiLg%lY1=!Y!lJQ&j%gyYWXr(NNhRoqx12_&Io~ z5sSKAc_U>P7Vvm@KbnjJ#=p(o;N#zu#ot2uF$kN5W@B#tJx=o&z+Kpwo2nrRb3wbw zP%x?Wd<*IVZW4tM>o}nBAB>+6{O9SbSs?e0)0D=e<1KI>8(2>aY?b8M)_ITuJ7$nP zApVeDaDzT(Qw?GI>MA*4G`EG}KXWSEe5zMcuhj_KE0uLAtI^5=aG$oF`OvOTUl5Uh z(#nqx%PhBAb`@MxqX$y65VM_gr8-aJUN`UHl3OTt(!+?K#Y!f)i97DML2mxxb(t~D zNB?qIfj`eqA{AnlRfnzS{5-zLAUwz*VbRc_og`mKW7P6vnTpdPS4BRl0IxZp0LmB) z|pWS zL-P(>NrG{ZWZG8fh%`iqcpAMaf4oWk)#8)^sZ4K3qvM}!T-$uXrIod|@uBGvo&5Dn zl1Ygu)}!TcNrkXyo)nNVGwM8V?p-c6I*fWES`{h;8cb@(<1d5*)R^@(T-)2uHSLc4cMw*iNCa*c z%G3zR1+TUK01UNy!H1Mif0&7W&Znzy$=}>JJK!>$<&_JqY*!`;xGAw_DP)L@H@F+u zEOAl~)hLcyhDUMQm8gm5xY^-yb{4y{NEPNs8O+XNXUIQ9@2DyzrRswQAVYZPLSVoi z^b+J3Do3WGktge!&by~AtX6OAhVaYBr%d4Vs!bQ+08hd6Qp9CSv)eBE7fU?xmI zNkubsK3ocOwYXmCO0W z#LCJ`CRP4EDLPc_l{zL&Ma8WnJ|toGIb48n4c27+_sL&yB?v}wA@RA<-}0#Pn>&#^ zd?kKJgtNwMQa4OkLD1FpOE_ue6rE^~aJpthk&EFw&fIYKh7&DMl9g2VYNI*{bvu9S6F3PM#hnRVcuctd~3-kT9`Q-9DS>&T+TklShR){!|Ha&ZE`A@;6dKU}BYsGX|yz;Op$!r16}{$AY^2iE#4`<=+{TjNcvS#{T4-C(8V*}gZo*|{~h z38mMw{f&2m+o}EMM^&6JY>lmRw2M!g7*^I5Uu;`{t`L?DFQ2&u@9A?$`Y-5iw5=MB zb%*NHm^&se{d9L*zLdYX5FRBTRMsXMwdjF_B!;)zRlMg6OThZ(LUuB#o7qA zKE|}~(fSBBu_c$*v8@GDiom%RLYY8{h7+4Z8OWsYgo!0$&(n@||K67MH#;%8ib3Rv zY=48qPImh!<)&lXePz52U)Z!5h-@LR#S_ouwd`w%-`25z~kx<%OZHDUb$)f#!TGn)U$6jfEr0+02p*FPE)5R}-T)+gXAb7(6;+qz`+ zw>>Qp0zGDSpXiNxwUwI=DEpO@Oj`SP@>aH>PTmO8UHhCeVJ8%hshvp74rr#DT( z7SW|ozaBrh$cB%O_{lo))rnJL5e_v&K>HTM0v`>iv5%JbJ|DeZk6tgvt_Jl{nBwWH zhJ?#S#zxU-Or-)@JXBTIVn=tqyrXy=E3M|dG9&XeS|>9i^1I?mBpiG7O@?&>Rc$qR z16<*LbPE(D5vmI}VdVKXolK$WYzaAecg1>GveT_&A+|XMyqI0jH@@;y^nw)%`ML(; zkqf}Z2aR8Ylza&071WtN)2U>wAmJ`}XPcTRrST@ot^QghNF7J%VcvB6M)qjd#{2Wl zoyJdFho)EOl7=35^|FRClQCEJ)wZ*#@VYWg&D96CuT`VYdLHMmrWb1`aII_SjM^)! zdM2CW&i9I5>&j{SMfU|7@XI=$`Ri79UZsFtC#sdqyp46$L31J3uRG)uuM^6qI*pIL z*8Np4=(Y`ly_3bRDpp<^71k8}VS{4cVqwp?=Zs{*Z_$%{y(_t5~ANS{b zvGEtU%KfU}TlaRz?Qp9Ia`(1Ci9Ah@Y&H0nbfpNzaeWec>?hlQ>o&$OqIHMnVePCE zrluX@K_6+)rHQf-PB2=RvN>pZZYRGHO}Pwq@=~5RrA;mvx1sx5e%|*ROfZlf5&T>a z&u_ew;Dl^IVHX?)QwqP~Z^9H{jv!@w4sQg}FnVZ~atXeC)%k+%3Ok@eTQQhT=;!@^ zpC((QWT10%%~!_CDH zZ-!9GeA^{zY-RyE@oVsNGO|R1i#R;u7Xny}Pp$Sl;7Qgvjtcs6aJHk@p4y>xgv-c_?Efg~T_F3-nSK@cO>^8k+DVwxi9}P@ zZ9tZA)aMSQCrdA#oa@%e0Q6lSh z$=(aw`mV+2m`xSRyf`g`No=L|S}o7m#@ zY7YMdRU9u|zNHO;T`VrAb9;sFgLktcoJ}%>Zdg)x+yj=q>dpQ`k=3B(imate@2R;dEk6D>aoI9ity&#DbIa71RNO=(1~?PPt{ zaN-G^i&G3PrSGEFOTh4;cSq3c_%k<#9gZ!h2xDQig^KVP?G-`Pe)!gas+SF`!SQng zZ-+ciU9I&zf@NTQf3v?vO#4qW62mDTd4G<&Ar|wme;h@k6#HXaN2QvJV*hd%j))gf zH^%D_7((O@AFyU<%(L?!z+@7M)|_i)lsKA~s1pq^C`6oZ2}qeTcG`#=fT(}(dptfE zz|Kjb!gO5VV+xe*c*_{3NQ-AodF|HGeR$eAmxf&;Xro>HQV3-$m#Au{WhHPlI~6K1Xq0~N;#=UI`J#MwXzaJx&+_!! zpux;m+*$LbbKvi=@lldw1Fv$H^9U)o8?HH`r`r26Y31U6t2w>_ZdY;G=n7L1usn!R zJGOlA3g3{TJ{bLwv$D`2QMsnqV`2IT)4$m@$9uLpuF?Q(T0PiXi*4v49rloejub(a zyPpQPH}=Upgz-Y-GYL0!qmJaLdszL_%&CR1v)XKE##OlQ&bvTvS_!LEq!kc!Twy?S z9I^TJqQ;F%(}{5W*UX0$tyx_LK-O>^tGZD4EIrVRl$mEm#wnb*UN?zhALov!TL_bU zf!3};cQ=Hio!!Zg^yO?gFBihDM-I3mQjzB~)=(ran;cV}Gm#_;n`9&U0gebi*>|vh zc5-&_jFOeFB#c-YBVfMQYq%SoN{2u4A>ZA<>@Ez7JQ&>Oo^>4*H^V9bfCKYtmtTpE zTg72_f_)b))APQ26kDMs1l_>+=Mq~{UXV;X@nR%6dj9MfU{`lC1(_H{2TA(DxvM%l(Dr+u zYgsmS(vSewjx6B260R}~3}TV~0|rvpG2FlP1~SHf^{zgfL;nTJ4(0zTL2|(H0CvXz zfypuyBy7+=2SyH5cEc4HBhQv%O1Zx`cY`!w!@%_UP}gsNF&+I#vSmOrZs7h>!{FU_ zf3So)g6Sx9Wk+ahqfERB$Q1eOhSmfQrZ7a>7-XT~sLtv$vx3->+AI(GK*I&aU@0a~ zU(YVy^1O+1Z+uc92~#ylC?Mk1ozC#%Ik~wDPbR^W34ns2FR3}orQof6$FOP{za9Bn zIfIo=PGKmmC$0xmSPud@P5h_w5-&@>D1@20o>=36?PaybIG7&HWxstziYEDts}J8_J>AFyZ#eDO^1s zw${(I)=b<*32eTSLt+c??hHZAvr+mMdp75$;VlLvEg{Hp>Hjj$FnX%J#6I|;GQ{A3 zkcNBkXszMk^kMgZxO(UC$hM|^IGju}vCWBX+jb_lZ6_UDGeO5TCbpA_ZQHi><(%_8 z?{}Wxf8G1qyH;Uc-MgypbyvrXDmw1Qk}Ky`)9z#Bvl`Ai0pH${tS9fAcph}ly~)VP zib?}!b-W~xzdOz*nHc8Ylj)B3tNFcy?doRB;#F?fS?42MUhgP zyYGurR)rZTdXpV|}Zfn0DRq0p z2UTAQe2hai`iT$#;YvVQp)v`7drN2gK8pIV#HRdPdn9Yl4j7O@ynYl@2+S1gM99WS zXAl!*6oZ_Lsb3`Jk5*xx2=e@!ipwW6tgILq@8|V6KlT9S}Et8-6;`Mguh92KX!E)}aT*X9AQ3cY? zLXvL3ZOkAbbRtmAHj1L7WTh}Gb}^4#{C;SnDLZfCt~BH(C0DZLF58SB-P; z39&8ye1mkU=Gr(Tu+D{nt1{kYG5#e5(0msTQO1w}obEen(axRQ>R+ za&l7gxY0IV7IqO_Q@>zlgh~U4OzS0+9ZB7O#^%vu`HEZKv6qmvA_HaWvYSSiWRNY& z7#S~w3#1EpZU_gznFIY)|MLEq<4Em-W!g!kz;#cCD-E{1R_v^lM`Q4cieozjGnf5=e0=v^YUt$X4oNkcnX2m77)fl7eukktP&Ro7yx` z%@JluZl#Dl$>$?R*mpR=J$Si(4Q{FsGU>g-smH31(Mk+e{J9nwx_SW^0$k)A_Nt%k zNsafQIncYDGD7#lsyDkU?cz~1Mq+G(-DNo!o_S6#^M~r|emnW@Ro~*xJ3ZPuGJw$m zB+-bAHpf|v#C6!ee?Wg3O9Ppbjf*C=BjYt)`vuoizN0#e5oUMIC4XH^u${^^>!Mec zGjI{zjFWY;3$tqpqUYn^d79Xl zPW@xrZo=@*TNirp&83$Ol-V08$1Z}}R-wld?CDru$G@%GuNu{RCGcqXX!#ri_PNFL zZ9Kn{IM`C|G#P(|=pUJjR_q>c8ENq_zNGJhKPz-9Md6&|MrSC56a3OzM}!7JyoOBM zC9Rn1!31LG%%$~=PwhHVSgSNB-a$iuq?j5ZJl9U?dGy6=vml%@dhA{1gVHdv;yy9@ zSj9-0ZAvL`9ig9Nzs`O*+Ap{WFtzT)>5Iv*fwiDyFMH(n^SsN&xvW~HE_{WlWHtaM zYGsfcE(l*~7W&V-23robsVoJfI8)wE6ItTWY(7Db+$_FOXXNeXbdoJB11XAIN_7tU zTfnIh`Tc7Bts!s9Pj}0;jVH5cF(oXQRLQY|KD4o}VEfF6vM*-q#JuV2fP40lEG~|C zb!V+W9%Pn%NsgOnNRQqqNDoC~FnJHAFE~Y~TVLFD`998%k2yi{aoFhEZoR2cBG72f zT6(-JX7;8bpIX14YZTR_eO=n$KnkOgj+&3fk#6?~!gIlh^fz32gJ2}n^n`1)E=H#Q z4!5_^iJ!^azYNG+Q1oWu2ISGpR#TdCrUDQ(D$=5luwoL7jxDC zXys<*^l6WFZsgZ)7s{V|(b#&_`oz>7zOW4rM&2M*rmIhAbq?rv;&}q_q0oYmo|)f% zI4|iQe4+`v8MmS(bndsEU0h53R@w5qfHfO#{wFO3h`sJL1?Vy40IqD|wNp_8b(%!Z zW@_I~_xg|Py|tp)om-El#~(e99?y@2j7v@nM@|GEC&AEi=sVYi;1Uxm&x{uLCY~@w zoKnP(gldMbJo_M@iQiAIjq8lp2A=No<==g|8jsiBB#5!rkAYzFM~Vi-=H!I#2+Z&( zQi1BB@;^(f%)e5RBO4#)3r!>(dgtihKi&bVv%bs!lMu1|Jt6rAD9e9(fh>Rb0?9v_ z)&3t`@R_>CU+{k4g~~}M5N2G*5GZ|`$tK$fe2QPTNZFzUu8ABJn{}UkL4SC93ul{! zGhw}!09uE^W`20q0NooG@K<`mV#HjxMTD%DA~|aLRDegHbzITL<%Y6Z`}NN#Hy@GU zPcEMJ&}@*pY8gPC8j@z2&&QMS!|#`N=~q6@z54s^hfh3Iz#&)x+GNqm&A|B{biz6R z`x0+eZmwku4--GK|Iv^?7{S--k68nipEZmDuN)*+d)OXiqLUl#`pl3BWQwSPCptCU zrUQgj(;*aOWgoWHQ&O>!_1oE(m3sW6ufE;3wHPXS(zJjUx+<7q$Db7p=o-?|lSdVb&93+YQ(@e6~!kso>= zpZ%kkAq*$3$p&~wujO!fSF`=P5$4nD&2`xG>#C251`q<*yC5BlE0FzrkYbvkxMRVv zO@+|N`7PVu4`=HK&H8JknK%?*r-n7ziHvdO=KKLoWS9v0K4OEPNYc-mkd{B<@6oXG ziuLAS2i$xK{5D0L`BDhb{3dia$SY1-hvBAjCV0W&uAd0`9S_<_MV)= z<%rtdauTmv=rw@=z<32d~xEKeHf`5A=X$Ze>R z_|AaHrVk*EdBt+!=)A2GQtrE_oLeepB+sa zHi@|JnHrZO$ZS4q;c)f!k;Z4oOCaYQw$*@aOE<)8?Wx2g){sTEkT7xZV^baxZ|I`2 zIFpxz&a&>EDCHPEBC<=GpKTa__KRV?9luYABULrzX%eov24j`smU~>s5G2jn6hLTfTqnZidY^B3CjYjd6I;ST!BH!nTAoDhr& z`=KwjNPQn9uv}vj6#^FhlE9A^{AJu=a}8BOm&^LyPqm9fP!Opv;J;dc2Thc~W_oe> zF12W5i48bV<b+yVJL#Fm10opMD6E!R#d$sfTwmQFo~lXCt8KL7ism(ns;Aa@5v*F8zi5F6L;llAwXl?iB&x|A7mZ z8oG(+kPG{QaFhoe%R)89@^U$Cnog^J&!q;mQe_U0vqp%#}Y`*70v(V-8 z`V?1Yqv|>CPf31ITVwL+NVlTYg=23~R!(%%+0ZvaI`7t3JPL_QOOa z^Oa6<;B0c7X>>hO^Vv9l-)yfn(Uy;JTAcZC0pZ)Eh{?%82SlpFZdk&75F7ZsSXuP? z=}0$_z3@sGL9x~|7~@PbxhvRg%F+6E_NK9TPQ#@=&)d7@_GZUIGjyH;Dj1YU2toU+ z>g8dxy)~DXUI8IqdIM(KPY1fCK_|}}fw2X>8mVHafAV!V&CldK6C3m&_9JknzkbF4 zLbo;NODHjEi&uj5t0mW;s`> z^aHl&*M)p4Qjw_md;$f~UD_>K-5I<&Q9EaqeK!EdMNO8a?gra&EL2R!#sJcQ>Tw}p zh85^w%*V%TfQO3YxO*b>@D4(l{X_cU-JZ?C${ub*0Y~qL=ZTfbk048YK)a?F1ivEp zwM*`Ohn(>x^q)u*JS=Zwl`EHHc$Df&>gFp}VvN<|TFcy=jD?MmORbJX zX&(v*i@64d9v3ew;9y{Q9vF)Jz;|{wJ(}_ox~Yi#R`23Kv_lg%$Qqn;&Vba~6sLtj z;@GXjElh^J?D1w3^wvTGOC?HyTJMir{N1R-`n;B~q5xp}3uH|bK$mdIlhw&5*jEBK zL0w^Xr&iJu;olP9T{A*gxsjC#xZ01k>v%@?^zw0g-hwQ!<{iN>n~cH{HsCcgfbNRA zl%G9?MmsP&wHrC9Jb1(T){N&_uk+C%Z7^<0qaI$1$^X-}Fskq%dl;iKFvpq=$!*=3 z3=2jIQVp0taZJkuNHFnr8NPK9Rx}E70^66Q%IFEf=BY-SxR+$zQen>0EvOB+PxX1| zD_a2gYOtbQjDxixAwQ*7S?UgumLr2JlK7hSXW-dJ-K}x!^ce( zxE@Iz*)*Fgz~jMx3VVP8*UbT*cMF|u-m1}Ggu|thTUDL`W9qN87uyJE>&W(J4r z2Z4y@Ft`{rI3C_4JQJ#J(6g%_r?;05bNF@pZ+wD z0NljWbDMhrd_5~(XSojs!T0qj)>|X)7yK%udHRP+?VjWpDIQ;Ibo@YLZN&TWOyTxl zcHndouZh(y<6>Cg+Eh!)XdX;Adh9kf~cc$Ghk1$uW$E%l;L?GPif5N4ehdlM2>uH;Z_qB z+zP{9EaWVk_Lju>y-rekF$rh)yuVg4mf;8BE=p%)f0 zi?3NGXI&O_OqRk_x+={z;h;Dve*5S4yk5p@Vx^(a3# z`XTir4{9f+?bp(Yny@X)Ccg^=sqW<9H_g@L`YpID{+yiF z^}}tYdi;{L4~zC@o~zW_!RS0_2Sl{(&xRlT4mo1@5_4LlZ7){(LLp0&YY-n=r%&Nh zHB3G>MWoO9b9tHzBGo!!Euaz4c-_!k*siucK+FzWT!UGftt?dtGblrB{~|O(m`l49 z|McS0%dVx=seboeR&RXALRoDEqaAsWK7-iAE>zV-li;H0@(uduy>j2U44}^-oVaO; za8)LbrzcX&3sN_vbs-vAL~AJjuoX-0g^JW!c}=_Kh)X!a9dhlQp>U0C&SbSlE3rL* z?5AwQP)?BeAspF$99|XOY^d2%&{R1@j(t^X*u@3yh#gupnutAUBuwX@GkVi7UJC5lhlXY44KH$nnB2lnfK;QRWJ}+7ij|1*Rjx@nhh_3VFVM}z~b{f1| z1O^WBtqH%=8HKtG924O>J-jf5azPr%HjAHn`S5%n@24j$kPDO7Aj8!lH)$g5g?E(5 zUTVbQ^-7HKt=Vs|_p59Yvh1>hkYy<3It#tr zjzcV_1LN4^nmbBcBC3Yg^@qa3yo}B)+n_d9QrW^HsgxU8dE}sE^23G2at=Xt5=}eb?mbofFQQN(9(~f@@ZEKjbupP}$`9B=ymhL7*Gb7P;QWNlk^TAju zIIDU;y{?Q*@OQpE&eZsF@qQpQjuhP$L4^;kQ(UR+JApeDO#-wV%_5S6u<8&fVy3sNog~XN;Qas>sZvDtTkO1JQn!C+xNZFMTdDEZ8Y+j$z&8Yy`Sx%UihW;i&o18&zDQJeiizRkj zc#s;pg#6hU#1dr(}ln7cJ+fndP>`f|Pq>!co?j(^T^kn?AzYi-XL zOq)dly9w}Lb$8N3k>$bNfG^PY4$z_hr%H?yz7!AKs?IriSD%sM)3A41Ayu=avXq)5 z3FK5OOE7Gso~&%Losul8!T?!vpylqZ{s8BN04wWOha~G<7hakHeA&;LUc7@g@VVKb z7qU4Oa5w7^&T=t11h@1Xcol#$%6XcKGTdr4)_xT>C-17mIJbhVY?Ga`?A)GRd|_&z zV=V-oF%9I*3yUh88=-7;%I8&v*=p$h?5cFQ#n0P+751+-N-AUiZKC@4mroP%B$bWM zSMn)g|5}TN#ti&(t(g20#s`~e)R4ae&{#d+^QjaGz`}C$O}$CpgZ5zfc=Tkq>)iI$ z5W8IydZle)g5#0@I&MKQdXsQu)@Jt_#yr|CL0qdZo-fGJc5X(rUTap=QpCxAj$uA&Y|t5k>xIppiZ`6yDiQ!}eXR z#(-g$5_EVjQn*agn8tdST&N*$zl%Kw@xuqdOK~ORh!&aFQ$vokx|v+Kvy^Iv9f2=+ zYY_Xkc(3i`yP^Sm9La&*?<=j3rKKuIrEX_C0P57wKYVq)pR6Qh_+wn9ACo`!B#4DE zf3HBm;gkq{CyY<79WRVdIkBg}i(~z&`YI{pOwowz{3yz_EASw0GU%WFrZ7OaUU#r| z^yTNYC^W0sNKP`;Gkh>;Ic+j#%eeS?0dJ}=A*wAALaQ0|g#niF%DV%WF$wvmAQ3j1 z8^C98^UJ#yA{2rxu62!*z0o~6yl;3QPK##}6~nO;r5~9q z(xu%$0Hu>6I#BP#8Or9E(iOwML=NXZ^rmE@zJOpZe&303$5_07ifj7M~gQ-)(Rv7KZ=ngP&>r zH=$0h1+7>2Mo}LX4A1BaN0MIZ5(vT-v#E#|P$FIuc=s(`q^U`rNguET#geF7&N#QM zQKv`mB=*v;ygb0#%rZCl3xM}<^RhOOks6@Vm^y?9l=|4BMyrx2xh9Qc2R8lqBa(LF8(goQ1-bJwl<9E00?lpQh^;OCJG!=JdPqs^J0%?kkD^ z;V*VjFIMFf%n0TRc(pw+kXm;(t|S9Wt#G9ivMt5gk$zKMuNYLlU`uA0D$F=Y59X6% z%Z2KPZ93g3CO@n1i+a!qmmWl>>oyDqxw707A6b@q zuB9fch!vob7-HOMDp`maB0s||BZ{4)usKP;A%fL=ICO%;YU@l5)ViK%>%^+0s#NhM zzrDSz-S7k6j^AVH9mLl8PjxpaA#b|Z@m3+Iuz_8elgT4^ybitOS4aS#>BDfQ(9W}q z&NUyOEFzhxcx56GNx4O;ZQNCGm(hrN>QP!odoAtLW{YCkK=TMP8H%8dEL~^UPFV;k zzqks7moZ-~WZ!Zd7%7$bfu@@r0SSj%LYX%APT|u6Z+wcWjFp{XtDCph3?q5r+}39# zax`g_CAXZ=$#xJ=i!}Xu*rs2P#Sr7uu&9E4h;^;(z zKn%9<5iJ;yl3TN0nKHkZ?B%++a5%W{hLz~k%Hd-r7bAPMsrNDP=di__8es8ve==s# z;Rv<%tYzD?Oku4Za9bl{Yz2|CCAY7S3N>m@WC|& z8F}cq%pAE}=~1RXyLK^eyKblC$+=Czz)#WJ`mBO^sXk9F&c>K@}9U} zVmSm9@Dh%=`AN8%;@?asxs-6_`o@^VR*ca$Ote|g$X5f#ASSH2_gU#1T&?L1X4Yfp z)CPhl3T+i1@713N*mM(HqqrMxwvG>7zW^Ve%9B3dhal-TVQy&%yIzFGca z%3oqE>?{EBC9ii_&Ys}{NL`>Iq4Hlh5kZW?g%>F&2_STHKuQLqFVVRhq~rbBKOY>#uVn->_E_jSf~%O>XA?)aGq{{WLzmM17= zqc=Yw)aO>Rj;k>`_AVQ|9#l90)1)4{ZPrTbV#rD=+X&G!CVj8XnUT#) zg8fSk?25hC?MwRD2FH_$5gM-*`ULS@9{7pX(6v7&eQ=x>&RcKN}+; z%OjN7%6J4P|ICx}Yn6$Mc6l_De7&^cA2Dcs8l+l<@CKA~_`fh)mu0Ejvl!XAM zY4CT^1Cpc3*Z9iq>LbJ`?7}6X6@|brIVi}|tJex4JKf4IdBbfwDud#j{$BgsMN(a) zOch$5*-a5X3ggts-(r4KL4g{7F|ssDTw)G*sXd2A9An zo_HCQ_(3R-IkrrWW#PR(lQ5j;#dkJzU=%XJkSzyMRi1s|p0i-qCIuZ%B?zNZY*Z4# zj2n-KHA9(tr{P*Up@D;4j@xruyL?kw>)cVrv984BR{lJ_)W+-A!L4tO6~@SAMPlk* zw*Ay|g>*n9uSHBE)$B3wA)~t z3R$^2CLW~VN{2&S9V?=!G>^4S%u=iJT!%%~L2r0+=N zrS?tjnt5k`C~%VPTErCNi?ih{=H03DYZHuxi4rYViQJsDx*YWd+H!p;iU(jofCCF5 zHuKKEd%bV>v2S!HoWR&wp{=APRyfIV@+YVTnFrhz5|n05ljpdc!rUl^WjcIF`s*lt z(P|6CX(HRnL8S(z*FCtd_Pe;sp9pJgB}Px8(FWEF*Cgj$ zQy(bHz*E((DrPv7L7^BUbME(Z*kgyD1p(F6kgrnX5gxl?Z==+PZbJ)nOCwLn+Y zTU2D2`$$)Nr{UIUub-xj=uPp4mTl&I7q)(VCBP>XHL<7drRvuQ5J0|cyq|ccyK{2u zLk2#C`-oN zyTdl@eKxh-%9_<@#~Qil?_!F#RY!~|;w7I0KQ&zoP91)0wcd(voAjTfM#xhV1 z5!^GLFq^;IIN6ZaT}G#pbJBt5}> zZVBqE+nS2^Ou@Hd!}KR1Cq%dsf?;w_nsg4q!NBSg^vHlR?#;W>=gp^w0X<)jm!n?a zcaMkH4Y4yoCjaW6kcz>T7J{44(U@4cRzdNR@ftS63(y#?N!)gn1lhf->szWl?h0ur z--bci={Pyq7&uu!KeE!X zurhLRassEEK^1_L;o!9Y%wXyIoID9&_(9fSu9MoWI6<5E{MCH=S)$RxR66Cc~hMYKj=*`&HXr})3ttaR$IdL(j@+HR@ z0Xza!MZnB$JR^F1+iS==0u+vl#5e>$sR|~^9~Q_k7y$r|w18Eju$4TE2V{%FR9 zXI*?;aorFnhOZ_It~7Z>{Nh(DC$@yBW?w?dB|7B-b~hXv#Vy^c?r3tmIe%aCC9!ti zgeoJCRgM%6a(NGBQlprp>j4UZ%A*y`t3tc9DI)^_;kp!N3-wL)9AoqMgFcx|oy7rX z)^I3fN;QclB;5-;wn>{H{Cp+npwbr=Hk+rG-X((kuS4}gwfst_9b!9j+28cs9)}me zViKyguPhG+eZf|*U4no6=ydO*L;$~1Gd%riIX@Ye11qTHc0aNLJ@-T~?GZ_H&HMn& zzOUv1`kObN4rP<&e)%?QGAooxEccs-i3s^2Vjk?xf$|UxxWaW}Djz(<{b8>G-DcA9 z;m~gLd;q#_bQb2hySP1`DOn}Qx_f-z63))9QWTFK($IBirM9pW{bVJ39B{tt?J!Pf zXAmu0`1SUBd^WeS4m)&82RMjzHQO__LpVSFti><$^q2wqPJe3?M4A*6SNWh*Q6KU;N+5z5P(i(f2LX>Ef_Fe9jwXknie%{f4gZ0tL>MP1&pX7koJ{a-spJ2%u z$2usSOngj<`OQF4#|+?o+?MStYh!9}Wg6$fwnKDZKtrHDcS-zl4HwLIW9fKfmQ+0r zSjs&z?B1&O;NSl}h5BsbUEFqp-NIGGzOI&jQt&xnPH&@nczzewSvZS_KTmrb9CjIwiECoV7St z)WIpx{949nl91IGq%xY;J84dv$a<|Dz)1WAIw>Im3Oc8+RSx8w&RD|aD#z$mf_K|M zRvHn)pvzRiM>rmvkqLR>J4UtkcCjg0DIjWa$%o4prPn7Wd?<7*ea#kSa;gPlJ|HKl zD}2S+h}5S$v#F7I8_3;@nV6>n`<*lHo8LKr8`N$zA{r^Nm=RWc#X)!g4h%>P@EZ=> zEt=^*#s*}7wB|Em?82MNsNQYV(WTMjnsaCns2}k%0I3B zQ$S>v8xP%Q)|cbj$Qz>%OZ}h;5LDZ&FlCT{H0cXzT7(b8HW^qgk1wy%Pi7-OIV&r% z`GtroSD3(9B|DI5L*W9UXssjLI=j*mm**UsJKbW*kzJ1Zr2c3$`b9KA4Dz1QJkt|x zXJ6&qVNE)1k_Mb^w>PKPvXmkk>sZ1a7&fG062XzCsi zlLl7!F}Vxn`4K5M65G6owjd#ktfhW-moIHbZxdLkTVNVDaN|0X`jSr}<~)ghnmT)O zvElP@7Q0Q{$=7l=dpeCYy7v>-jHv$*k)(ifwmRX!)eB>-i<`P)abvMoIvUl7E$fZf z^&ueli-FzVxBUJ!>T4uEzzcO@qI5XkQu~^jAT{h>i@f)Zm#kukYWbr5?U5rYHC;?$ zF53J36qxZ+P?V6nM%S)SHEIhvq*B>BrkVw_Zl1qT601-DguOD%vywf&KgskosY&yo zW#7Cuc4ds-!@k%^_&!-!VG)~Io?iu%=oG^mOft@++%X|`liH;j01G=>gT*#Cb&oM~ zu(l$rL*tmpCYoKipWYJD6>N-%LY{vrJ+iLQp`ihwCfhNvLK+<2a%&&cm?>Unau!yz zDmB(S-QIqFRus04LY+xHCW`ZD4q!TUxM$I#bBc-)op3q1N!Pbn1Hhw_w@8279rIPZ za3)6b&g$6zfNyO8kQCpn&usynUuGauIBbJ(6r6SI2J#&hZZzS7O+)3Z&_7wqeWx5U zE`Cs^t2!eouopSZM-!hU!LmfPrv;Dm&YQ=={1z3%80Tp0#8Ip}Kf4+nGRn>$$jdX; zRv%73FgQ$p7$~^;>$6WSr0|q7W9*kUC>82B036<)WJ)XUDdtd^Wb$-Oc`fvG6beu)1Khgeq z>>*OJZG0*tmR<-jTd(MS3!pvd(9z@#ehIfT}UY{{3?JQCL{_8HH%B+r-CDY9Sp$u9t29bS%hSO9$5 zuPZ&b9`BDdOk{rqT#(qlO{8Q71*YXd9HM}F zH9qK_Hw-TP17{Ko0}q7xe5)X2AXFk`Vg+8cfExly!@(JViSgjYz_;+vS#KHCpd>&a z7f=|WYXW#W(CrsE^4|>AR19o%OsotHjEsbAOdNDfObi?h%!DkAoOEnV%*+f-glvp# zbgV4wObpC_y_^^WClkSmfQ)UQjMmHy44nT{(rwq4w)_4!Mk=}1zu?IiBn18&cPNA) zv9KQ~6sLsI5JR@C{=o;hLG6$!S}VsGmIJL|>l$kxacAlA=byf6By;RWmdNXXRd*Z0 z`0>r&$k79e+4$vSBFm)Y^CRQ?tFKZ_vwnWqi0 z>C5$YP#5JU$T-k^b50YKFdC-cuEFxhzKb<-7^B7@l_s(@*jP*e+T@!1MV!)be6j&HOIqcF6_)-E&DgK zV8)-3JxQlLY;7DAb>6p=2I~8Z%MZ=#WbuFOWyJz2&y|=`;XNqH2=uO>Zn{~dEDF8b zB3>0$*dV@5b3_joC+Hy+QfRB$p-o_iD*}tZPq3_Oo|3L`rdsfX8Ah~dlXMITie2)E zK?4{n^%hdc_gWf|+gaGFvi7Nsc0K|EVI5hYApx32dh_iqkD*1;vX+HbC?4_>B&3ER z`Y8!~@N^jVxFjT%^+p}(t3t%nm_#|Ryu)ZaYAn~H$IkOfGElAYrI=6oUYwxK3rSd% zv!-70+=WrfOH!GPN?<#h<K;_bJs>6A% zipxN!!wdq!By;)tduZ>kry*CvHm7Y5dxcyzoAePO^kGq$)*I16yo%pIn=Un3=+M6_ z)v>7dg%w8qtu{_7SPXX|r6#}wSyr&P?%Q+rr%0MH4{#PyEBRc?um4q|fYZ*e(ELo2 z1#;`Z(pe7RCQ&PR!wU(um*ITWKTQGdw}GbS){{@E+?FPIrIs3sDC?>*)`0EO%Bl{4 z?*DWkAe~W$l~Jy%0UdsrK@E~>%|?J(vgn>phXRB_iakQ zEm5$4tqZy>XdhdD5&G>2vZr8eGBo(K&fa1i{7>K}zSRZ-hBgJ89JVqU3k&3*3yJbl zObyWMq{7mjFQt@99&)U~+0%4~fuHWFVmXLbQO>_lw}j~~fclRZpmHiW13(R`a4)m4 z6mu%!UNx#5#6wuQ%VA(nMchN`e;$WZQN}x?oI?$eZp}gXwq&t2ozAWx>rz*wqWqQa zEQF87xF}fKX8TOr^ftvvU+5CUoi zPf2Qkf(6E{pddGQ6_6thoR(X@^go`S9EYd-OT?%1i&7nX7|LA#v9#=8mTLZws=(|t za0VHOe+4uDGftms($(lN{1d33lI)CiV0&<~paXa_pO(V^XFFtP;HjtHd@(Q0;|=}S z`oQ}%a5_K&#wfLw=HG$N{PEvFjX$3P*jXTP-P(>$ zDC9pC`}fvQ>xj8ekR|wcRI83o%>TOW-y!_>ZDmKd-Je_V(*T#ci(mlvv@_;*kgt{9 z%DDDoi)MMGM00(>X-<5$86B%@bA3sf(GSPQyI8{Bp`AnKlM2^$9T~gLs=tkCZoqC(5Rj%*%C!Nceu5-VmeJLxx=Ih-2!nt@njs;C4 z3=w3qZNzMVi=b+BI5q=JvJBRqq7A}|SEQ6wSFw;pOsjA-k;Nn8=onxt65%KNTLe{+ zatz9y6|Hg^DeIXO0e+tPM788WH|mY;5d~GCIB_&fsEEOnPuaFXw6_e+6udYcy{a^W zx(dEXpKY)%(0@UcA=Q{XdH{6df(~6kx)^mxC_jgzFFB9J0zM{09dBuR*dbJlowYEme`ehNfi8pALadq`Y zj)X#1Ju{uimvI3@BNJ6K=!_T}~Or4n66?P<^rYinSMP(v< z)o~n7->7VZ!t(`yUg7!KYEnt-RZRjNus%VOgL6>r(u2zFHI;Pa9;Wb* z8r5S>HI^(O7)eXSoVC~rBW%KZ<-+Go6R3Wr(@lQ5iu(HYcyBw~iL!>vi0Dn4%xJ!3 zu$DD+`g$8fKA@$pYEaMZ7vROEC&2JJfLfBkF++oEd zlf3rWr;NkASg)(TK3DgFB@1tq;$|LRy6)9LE(-*>v^*nMJwG)ZIEF4D%lsO;@jd&4 z-M{yJMl&$<`^+zgi|UnMSD%5za@(mCHO@-TgT}&jLg!M@^onH7i#7S~ul~ zUS~L<>vrYks3YxgecZ;P2?yX+{zZ9;dZIk!x7wDVMAi^7FDls8hT~KKj5`NH_>e9$ zy0VkBh!6^V`pKQs6__#Efj|>!$DdcXK46uTviHNz4}$E6=R~&`2h${=YsAev$ooFk zo1E01o=pYH0kf4!lZe`<#dU4qHAKxr#K?8j!=p*qo*am- z4hOuvy(y5sal5Q&*aQ7$#KHlc2j8nGBaY>db_03wh|BkdHsmKkt0lhx$Ru^1cY?}P z8<2m%5Ul?JL$LmbJ%jac_6!OD3;X|`Ef~|(`-?;pup!Sl0a_l0e22hr-fc5YY%{ss zPAm|~lfKYV*Vi086YBf+>lcNEsd!_nTPR0kBR4a$780z0U7{)%5Bh0G~xY9O>YBs?ho4s@K=6%igUGsC|T? z(rlM715gIBzi<$Re!=|wAI^`@@CmxU&lg*n*i^E2@&eo92VFEV%P{@qetR>Z@ltew zSP=wlePR*pN}5jydPt+=fs|Br%lhg(J8~L~63z#_OA!iLBy5@~V&!O9!S6{kX^HCq zJ(GHWC&_nDV7Lx)pTf!Wz2=&LZ@SvJ|cR5PSzVD^EH3Gb) zVOG}(-rqH?IMz2$aEb)PPmtBWv4E=&u-a;4j7Nh1KRT)=Ov?px4`?Xf%o*ohV- zP`+z~kx7`2M%-~9$6<0+*wp1rh4|lNS2lg=Jw$EwUZUn>Pi^@;awD5-TXHgdsgRTr z48^OC9p$KsE`2habw@`@ps6wJ76O6W=BSy84ll zWDQLB&kQdWMsP9?TVllihNBTGAbA-?#hvgAH~ygL2sVu7QBQ<4GKgG?Qkpv=8a%n? z7bfKni)W8+Ok)#Qoy59JZ6ceT5}T>?u&6Z{#9jD1Q9>#`&Niuz2E{R19z_l1mEPlT z;n|kt4|VWy5t?1K+mJC9WZ6s`!?_q~&8CET))#Y*;wFO98X%M?wy0-e034ODWNTw7 znW3~UCzb%_Q&D4|RJ*36Dor)Y%R3^y9PRjItkjGlK?Uv3XDE%XNR2_#KWe|b%XxQ} zHZx0{Px#L5w3$Va*azXr#}GD!s$J}UPN8s7%Tt692QK6UN@-O$qO(`q=wodWwz9L9 zcIgJSmw6I3>l8Z^kInQn0W1>90)<0G&$U_3ig)z8)zekJ=OX+#$udS-k_$f!Np7J66l|8e!s@s%uHxNw|_oryVdCN?LwZJQH&$96Ik+qP}nw#|ui_ndRT z-}j#TSMTbsTD9s~t7~^xuX@&Gho|zb`=aHS)j1G)w7b2r99mIF1s)qnn~_;ufWp?t zM^)GB#2=l+mbRa-Gz6Pl%MuemH67W@ewwNZjY?a7Sut;AH2!owAh(Qv-^B=!D5%b1S2-hFc0220O8M! zLu@Z4|JuP>^aFe{SI4`sCdxvN>Lo-mYs_e8WNQ2_&ioP%zt+I(*Uu%BSR-_mU3s;u zI?G>`#VuCqNzgz#ss6(j8uC%60Bc+BJ)E`s4!Suco)k9c^=C`TBU7FnLx8b&%tPm@ z%sI6Dg_~Yy3KY9b=-tb@Db}%fQpOmKi|-7gl{ZtxF}iLxf7sxYzuNhT$DLtjR>61P zWy9(X)tHENS1g~zN-Z&B!AD&e_3|HewP2^sm){uC7iYp;bx``Y#l$+Y(Hw{H+40$X zKnW))+0S`Ilm};TR}$CmECI5VSRC^+7T$!!G*_5cC7TwPY?~Q9y6}@$qbc8m4rBbs z^sCjA1y)X>&bjvNyOtL$V=A->jgNH^H|1HvVkgxfjX-($%%+(-3vQ|j>{bw>nOvrX z#l6pZJi#z}$p4r$5flPbRiE{6OEz_oULQ3Pv`0iek~I-b8ZB>mU;&;oYaU`T(*SYo z*k@H!X)p)uMeyx)KisCDg(ak=5vefQezc_c#cQReD~S^4iW#0u$69vA*Q)0o_>(b~ ze3g|~`alrA4ugD6tt-@4r{|j+fKR6m16yTXwFDH5jHQ4&%iPwY zILEiQXOYv-=>sykcmTc3NKQpOcfKA3cvJ4@PMdDumH}Xv>66+q^~_Z*fb=;wv~iX$ox}{j z;HvcxV)@)vwegOj#4;`{92WK1rZecI z=6OAjEV7ssn`?J6H!6PGo5lTHY7GI8e~cHje75a`L37s=0$X#O}EJh#~|d)Xraj3RIB;JO+R{Ks%bk5*>ei` zkr2%RCVU@29}Om`I77->*Ol@y8%KM6!w}|BZS!KBv~iE2n#x3IxojbYKAWfI0#n*m zMN`EMO^SUGAP{gwYfrc8eJOj{vMrQ5xA3^;!tTDTW*xUfo*ml_Gt_N1y*FQ9~-B>M9w@fB_NwE_}v*5HqtEEA~dYcD(8g1M#bO!S2Ph z)uQh8ui%ZV)qIeLu6`q0)%)Y;k%71SruTCG@rf07Buw#-`v~qn;$bnQFgmrX$KFIW zASlvOBRRrSYFmFeYY86eK|5s4Xg2O*;!g+V&dRLDd+2wr^~`yd2*+p=k&!8Vb6&Ax z6_>pej={dC1$PB2#n|FZvupW&uYchmD`o)v^P~sLnpC|DrVjS^0S~y36pW1_sU`=4 z62QdB@}C9L3oVVmn@E9)0A{A3gm3sj419;^%50b4l&{dO5`DcprYSJ7kfw~MT~ozvhfrR7mixn@ojKNK(H zPpqfM?O)oE|?nQ_PPgguh|&T4qQOkTeukub7gq5YP)>)&6gw1107a633x~? zk#V#3cOyf-`TgtzEaJ;P`VwseV*_{oIi=XHHF5*h}^9I9V^2{})i$6gzBl>Y= z28JxpPX{3#gooP$_{NkEJ-$*vI&nidc9sSCk*I9oR2U7Cbeg8fB+<)FQ>FV;gVbho65r6Kj-u74Z~ zBJXv*WLz(hNPhq9nUH#Whg-@M)iymD+hs^e@d+opCgK6`-j4^$M;aGOBo$`xnbDVvz5yXWiR|Gil887XjQSbgo*y~X#Lfs zbvX}y#uyKQ&}e30S1*nC(Q}X)>Dc=kQ`dDSz#=koSguQGyY>iDR>nYpI9p)y{)hLQn3>j!%W%Q5&A z0HpU^Mipni8N55W@Nuu6%{eZ)JnP^@V-NKPp0bZ~aHNo$i5@b~(h)Tlx75e|^IcV^ z_kbgy(eZ}tkkI2gz_739D_ym|Je-A5LL)PrqsNYIQ-O{nv9u<2_`rnF(mb+Y(WT2m zgBJDm2CC31wd%xT=msph|FdRoY}oiMAk$sp0deZMu6lb%`zZdcEmOgX*=-Qq$io9 z|A@#0OP~_lvLOJ_K<$@SKA4xK%va(T9bAiEh&_UAZ38 zQ@oCg#)-L#AO9U)egB~5`u>a_0Jz&7NHXeW)nDoMi_-XH!BYu2Ojf%fdQ{@SYx7Is zyedWAS|&sThg51K-3xiphi3cnz7tiTXSAvysx`bRQIRA=jlMj$EJDAG`t(WD zhNDHtT?MDp7+Liv*T}4C0V1}OVa@$IWj2arW?QD-1Mu*ZVKU7R2lYUouugH@7%9T)g_Xi_6^2( zEMez6d-)^WWd$lK?;q;`0(n;A{-8gjq&L##)+#aqg)L)?kiXTq+qvBOqh_kbr!es3 z5Ee+Uiu0~Y#e++&=;$ho0WOVjA!S1FSb?*XwFttcoY_a^jb#?k(JHv=rCwk%4d&XZ zf#;F~!AzPE74PXpmv@UsgX7v22|+U}*izWX0ou@vs8{D5zcX>9 zK3)Uz27Q-vEPr$FxrZr8!b)aymFJ{X*%1`=C%1?HtZivXlOw%P{?Q2bqBv_|Wa+TI zM%6a!P?Xm+b;{IJ=5rOqtkA*%l`G&}YV1HA?I(UM?41p2D}UpzBg!6kg0VpV6E0ZX2!L6bgxcn*B6Az!S81dzIy5DmC=_AtPyeqc*dm1`Hr`F8Sw@y} z8JN0Md$VTvajShEDt)fO#s~ti*?6hyMADi&Tzv!-EEN20q%2zQ?k#fWv<&WP(r?bM zf&A-ja-obQtNC}t^Fe&XwWheQ$x*#yJl5>6EA4XT=p^^5TmVFN*{5u6hUT84t$Kl? z04;M;+zLeRALrnPmp#8gQ3zf7s{8p`^0ct%i>8toD>R`M!`kUJaVsluHT;?&^2Jr> zKjxW6V{h@I-kY=%#i?50H5WJY3;SVWel8+^$?_3Gko4$^tgb{e8icHZpp!|VcK_8@ zwrFV)7S+%tkq$T^$;@dhhV#6K68Z5mECgRGu3gTsNaBhjE<(s+jLD=bd=Q>IQLcUs z8La$~%T_)zK;7( zJ!2@ysMO4wEsIpIyZqZNZalMUH{AHtM6noW!0I^w^v-0mMW1&>W&C-deruJVP( z>-*#LRhRqol5JO8%qpQ-lL@aXz=L%!GL5?%enC&y_T?q z-Q!LQOJJJf^GZF{0H7`-A;#ZBav>zbVz6m8&G=IwFaZ=CyG znNMAXTw(~lX`t=gQHZk&hL|x#r31`$aL4SuRW^EaPhSu6y@^~Hlrum@xmuJX9YN9# zP7CvZ)=4u+!A^CDSv!+)xv}IE@_DAlKgpmK7`ggSUNhi(3^1gX7>teSpO{u~Cg2Z0 zBv5*4b~ZXrHbz!XW+GNLCOT#&4kjj6B3345ItFGYW=<9&V1z3jCorr87>r_&4W^Lv z*#^#(#8Uwb8ew5({x2Wy29Q{1zY(SL&tGDlIWK0{z3=4XIrfN~O$jhxbO5e*&I8Q^?MrUQ#CFz1U z))@{+lq6(?0xkV(0~}Qb8Y4~*s+MwJ zK(YWyp7&ndM8K#9oO;3wem8%rd25tw?kXJvrGO?1kpd+Nr{>5b9N(%l;UQL6h$KBDf8LLI;N zE0K~WW8qo|9+GO7b zW_#C7R$VQ3odf64uU>dO;Q_o=a!^ZdfHXu#L)4MCUFqLaIm<}DD8$T=)rX0)6@E`6 z=+MUDr+aZo_$=NK8CGMS%^@$OJI{0L_O$To$=V|mWPZQFf;FQd9s3-U7y@Td1-&Lr z3Ul~DAPz~cEXO%>a#VZ#;1F(JC-7s;<5wm?I`skaerg;XX%e+k6ftOw`WxmA0zfS{ z%t1N_tdoW=mBmfIP#@6=Lnp|u9V-GUsni@yxu!L7{M2D(bXvJ5QA{Lctdunh5+_`Z zT6ms^)O!94>?&4+SeqrPA1sw$tEfy8!#Iz7*jVzaibJ_cQj+f&JCIAB*^iQpbp#Eom`x~;i4{*^jHL4~7KSdW*bhYOd z4AqJCMi4WbuSm5!O%Y4!5&nF=_&h-il&5#}ozD|hotC-#b9p&Bbucl4d_4X;iM}9< z<`+FO7MDuq)et0SimOw=G;g7l%)g+1*j1Y8Liu-wL0M`ZhS#6$A^O*>WB|CWv-|KL z#;=BP!9-x5ReUGQIPoc4#m)#dw=uw*QQPyg<(fdMA>^pcQ8}mGcia3$KGMhdbBK^2 zy_z5jIgn|3Y!E@o#U((p>TcwlqHZnj3t!5+_CD`j$Oyd;pv2hNqZCoIz@S z8v4qH+(jU3Dx0K^;Wqfk>0rM+MPxy5e7_tFC(`}InKF|PDXUK<*OqMzl%@&_Wt`Vw z#Ffl(NLV3D@@-b2|4ABXHhyf_j<*Kpq}3mRp7Pv_NAo)+qGX&TCAI}DZUJpTKQ`va z(D)77o|J+&q6nyx(H7t~`q+U9vH+a9AR&AT6JIAxO(pL?bnKe3FG(2xFf*b*#zMe?q3d0ZJ> zqENGp&g|x$gPw%~wjyy6a)azJYw|4HYac?+s33xv#SIJF69u5okM;G+zmi(e;pQnm z+K@BY&{W|99$z@AGV2vuy&JRcM-pYDC6$_O$`dAkL8t%HVZ=qnD_Cc!ki$l)M}E4N z^@e5SYJf|r_K9I}a>2=Zt2Tz`-K>l`rE2|2rV3T7?m3WrOQ#Vr8 z4O9@CakJk*p!qkP`Le+aa|_BQs{+FaYpPiyJG7;gt)9EKgCPc0DC@`v5ib-tpg zx-o?%&gCE6cpxB`4Lr$LT0Y&+O?^H=MWvD9kC%kfoh%1Cp=6czxY0C$C=WcuxBRB1 zH)T{NciCrw9ZDh0RHRxV8SNfE`8~d5QtVB-e-bJ7b#{I_*KQZLuFYUoWX+|@c!D)A zqCMv%s*VeAp@4~hqwALOyF?>fmucVFF&)&ZGgUKngRoA`DD8ZfAdY;{0zZ)A22qgfL@EcoOii1Mev=gnEo%FVJLR)m93eS(dTF%>2X^6M zbN2xtU8dl|Cwy24FdgWzUg9<1nF)0j<3DC#6^7X^h=64}t z@d@_;*eFaa6f5zpckd*A?%b9u@x6G_7s7;*k%QAgKRv5oeF)~=MS(g?Iq`*RiX2@P z&OEcEW(*CgjRMKODL+5a2tj3oP*=yi(Fnynmc@2XbwKNz(-r4bgWqG;wweyJe{ zT#yYxT*aykokTH3nGi(zQ2H58n%kFcgQy=S7fREln5O>JEbAvCV|%dx8|9OyyGf#Z z$)zgAAw^XZl9}27wY1-Eu-TTPDKcneH4cV1VB;CwZlDgqrEmpvkhQkbtuUq|%Udx3 zJXz7@;jkd>DBI5KAoX<^e>1VQQzuHo;i&g$g|rLwR9~?Uxisgiq=mJcu%b1dnGI@I zZJMZ;xJY49XhF>sz#a+28uQ}dM1YBuF0(}O*033?z&bWZsI2)duwI?Z1$9ZLNWZab z?DlkKBJ;D$oO4ndqN+Ag+bAE3{;TBz07-%V3nEEZe~RF=6H%U1g-S@6N|Th?D(pc4 zsoHu4(>i=&27LjFt!=ZXE221EvSSZ+sJV)E4g9acM_U{sra@=}H&P?2-$^utX52jY#RSwg{+yJwW{{W|y&%~qfNc$& z_KO)?hc2+nM?Wh2ACiCUEr8TG2Anj_#mG?I7s7v1g6UcJ!ASTiKxQ1w z-*x^Gu+AXg?ohpaPj>z`r@M{9*IotE*Qigz(oqM3V)aO;_u&U$lK-LE-mN)?xa&=s zkzsW>D#WwfmxawmvW3OAU6TRKj6=6j!TCK zUOl?7S7P}iU<=z<@RxaK1bb%|xcxsZw*|>T35(7KNK+#_`6cPmoqHX9!aFt%7IF5l zA|? zCHVHg;)ZXW?3-GyLL#7Hf~Ecv+exJ|s?Ffe ziJodHF-Wn!qF_CCWp&!2&%ey3SrY)R>57gAQ}>fraCJa@oWj*K?c zccFBT+(rS*BKBUHH~x80=bqa7d8D+KVq0%#4NDD^&b+(wI2(4ZOO`*0Lm8%uz!1z@ zeHNCnC6!_K33nX~?~Xeo-g0A2oCq$v+l>}7hp`2f$zV<8wkDw==QA8NjHKI;TE_d5 zr?QQVyD#p|QsoC5i)5EnF=+rp6`#gM*P*-ol#PKacy(Gf-i$m;=Mg8|1ZRFdo-KUr zj>71IuXnCq4UV~VJE}r<Ea~fN&@{dgH)HK)jI>pe4 zj|%BT>fvhml}yITsAX;R?M@8Iy?sQg0v2&#;BC9u*d3m4eA;<^bjkp1f1L033Uy9| zu5vO@n%h#Any;)58`9TO-J_OB*M8>|y@**@gS!j3xDfR#Ak&($dq=xFXvL2x{gMDk|}0_4Tu4H$3r zm7gy5#H&DmM)E5bNvH+oBxEwiv_^3noIQDR`CVzQ;8n7%Q5i#)y$-gQ9*l?Lq@zb$t;)t?aN zSH1!pNOWiCaLK06m*y!&2c%SYFRx#HSFYV=Hz^Di1?-O;7)G&zK(s&~l=>IVxzTVD z^0wtN5D_Qe!G;iH*)*5r77@Z&o~Z##rubjN!!P_(!ZrlQnHiOYter-oOx214NYRmw za*zp?!cdd-gKGL^`*eD%Pz*HPiF}@4Aw!ZAMzG;$IP6eO9uD-#=q!0K`@0FF%%YE}c_R*ign zOi3Pr5pjji<8-d^iI?&BLSp z?525s4Q74-v(+QhFV*$P$dUa>D^Vw@sBCDB&cE~M_R|7<%RA?Z4s7X&G3KeB983}= zgbd~IWHyaqHTRsPNcnafkam{U+19Fi+wH%;5guee_9QAx*Vr$!#{nx@1iELAojp!{D(UkW0q-xFpe*n%l1#E{Xga-t4#XfCKv1HjzKHXZ=nK z#)}g3`Z}3i1erqBBdu#Hd;}-Lu5o*1DTLh6GwYb&I|>tWaq;p}>|UhV0x{9MPO8;h z$m!?XTMI2 zC$(b@AAHZ-7eUXk!)vA<)=OvhNq3OGh7$>?#L8+l7t-!8IxO{*hE#_Saw6yEdvCUt z1)1!8``WI-0`kwYZ$giV8gP}D^oNoTIBL+}gf@Hdd8vP`-5bY7{4AhIMY4GM-rjkhXher~a?WR7*zuL`JmVT5 z{e=z#l+&A`UT*~Vqk#*t3?mA_%Ml9k)sH84?&evH}?o`cfEla$8-K&wKZa!#YZ*z z(V*lHjbFm|T1|o!P@Hm5M|&gFcZ~!H(<@6>fhmIbb3gGmoUxdOkP}98GZtk!(1xa< zrKVFycSQO%zIJ~u44JD9;uji}ofvXZ5Dz1zq2NB^hRj?)EtZXD5aJudO`~323a&LN zJv{aM;7j!^lp9TRt%G%U&{ZoI%1mi`k#VWs7aXoW8ggkx+G*N~d4&L^8e0+o9wN<4 zK_v{&DL58rtESt$*tN)#>M>;fjB`*^Yo$GM=dS6ZrOpgo;k@D!O{?birYHFOWj#@b+ncM^sk`OmWWHPS_F>=29rccy@1yPTzwYEH_jUJM$Wrz z)hn`LYIEZB2t_ydHZ--%3Nzlbecqy2bE`Y~M_t$ZSaK~Ek@I!z|7K@d zd>maRc$Gb=wl;J37)5Dj*w^Qd(AiKNzbPVjGy|8+uR~5BczQS32}k)q#kP5>oPC#| z{n0k9#2v}2GBTa9FPVW0s&Y693Sf6Q7M){vcUur`SzBjq;mo|GhevZ}`JX@B?9Zlf zR4hWztlE0nbKHg1@^KGPvt+7e4^2t#=%;1k8^-T9rkuiJ?$lY}GMs-KGcCB^@{MVr z;pAFhooVN--h}S0bNHfOKDXc+<#wFle$5plICKwU(|JHXQrHjyAcZ+Ty}Oi!OzV43 z*8!r|?X`|~3iRHsX8asqnvdg^}E z#lkc4$VS#;Jn3*3&Z%5j0TppyY??8iQKLe#T6P zx+%4U@D4^LoT8i-XkcC9k~d|nD6fEB7Y;lVOsc?Ka`WE;94Z9)O>3B%!BevAaoOFh zVY3lPs#4J#C0jB{yjSlEvS@$5-ICru(<(M9*y$Pa*MTu>i1_6Raa^b$Q*auJYXE+Z z0@8G{tg*;T<4y_7+i^)PqXm0}!iq}0FrU10R7};Gr}a5R)DQ}zj=eGUm^nC;#E7`R z9J|}^{cxfHP?_z|Z(G@yB;rL@qWfw6J`Aw^py}@(uDVfG0^lZWdgg^F|& z2sFm;F0Cdp=7qztqhvxczb7LeW}pXuzAYhhrjJ~(uI9YZM6)0*9h18KR?t8zb4@sq zan(|%&?n}kbW^j!?6s3|70zrNSUDC)Ye7k%F}>i{c&^DTqe0=#<*qm`S)G7kH-Fl& zd|YIt05n&a>h-ZPo;6fQ(dmaB{WjMao7{ymXYxy`jVWzQtX7{SU_wY7 z3G|v%&(vG#0;&|nVxFY;_0$%lsH@cnoI37$+>;=U@*5QDs323h{h-Ufr31VnH5tA% zzgSvf41-fzLMvQZxS{Ms-|vH#b#QC;{+=fOfY1ti+(crV=3jA0YU4L~D72@+?QTQx zuCn*-`MMbdIjnQ*R9e^yeR0G_%W+x;(lMg<^7YFDTG-DVc-V^iy-~izIXOP}o46I; z$f}N6c=jAt(tYxUYCl;LsVIFI{PC9~@)^<{{ZDJiBA2ioJPb1~@YO3=9Y^80tLy3Y z0an+P0wxtV0S62tyfUQWY{7-Lbl*d2NS6<~U#KP0%LMBWV3y4@obbcGs3_Y!y4q)idLX-7qDL7{>5fM9hGr!b!y)8VX ztv!vu@}TljsFTv_$ycghdztRQJMo>p1e^+rFE@$`6}}dud3Gud!IcHQ2YGfbO!!(a z6laDjclor$#okHJI2px`vtEyOL;(>7J-Ym~ve$R!a#;bg@s zzvMLtMV-!QVKJd&O6E=S2p^teQAlOM!6DPdih6#Z#hM!=a8(~hi)#Od`YSVy9593W zJnVq-I4--djsDb}LC#aE&N2Ot!Ld7uo~iJBZZ(lZ*Hd(RCpp<{)~Q@NB? z#frG*G|!IPxRa_;CFhhRhkeCeWc*{y8}Qxkv3}_^>eO6{^Tz>+sOO}N$&W>eECq&MJN`sg z)KQ3)bOvM+LOgs)BVXZtDXli|QSy$eMq+!C0XW^$5k555XwkK9k3oaNIY+t*a6(Mt zDau94^8;`XOt0<9eEi9@tj*7-)2p#Hc0sNoZ+pH?183?h#{>PWvY$;|$pEtZypf0FQAb(%4sU2!Pq{*j#UmOsgZ^G|@m$Ps)!FlW7J3Tl`%~}=n@q*Y zE!0c8%@l(+IaRy~D&CEf$0u=1JdtF+0-e;t!+DKK=C1Qd#2XT89un3YMRNTc(@zVt z-2zFoiaB&2h-PEmVMN<YD zPGBckhNcfJp1svnJp%r^`H*iRwx(MCw4h$U%yv+g5nR>!97b8b>OquRH?Fjaa`N&q z65F{qUW1mMuMm18NDRBqEBlz$PPznfI_q_$6VACwmaVua{p;SNAV3(F;UIyL0rc4h zY&@pw{KGa)m3ow~WbNe*^J_sJ{L}3>;9;Vg*>=Wa`y0 zw6m{CyCEHko$M{XH3OX8xx_ns{VO#2UYY!~mc6jI+4jio`w}sU4X9`|ZLyO~gE3#e zIOV<}b8Ig%5$ri%e}Uwkm~JHr!h9*j_e1lE{@(7Or5^bK^{0qJu zCzJ%jt9c^BLyQQ#pq%G*0f8m`L^x6?zv*Sfj4A?s$k9m9GZWyl--!CpuGZT`0aRD# zzIq^*L$!jejbo$@9c213Y~tMUugn|Budn4`1y-vbaZ8tiY7l5^=v&H+Xpj5mJik&U zrjx?Aq-UkSU?}paj51aR2Exm14?%>a76i~7XuRD@26Hjo5LhOwSVl{sS7t*w^cb&0 zAhISfkQRU_+8AFqH8c1IC^f@boe#Y1sZ6SZC)K(7^Qn7l;$qzGSLfE-ms8s7w4UdLx6V-03OM;=zI9RcpZYl2tSmpl_O2S=4q z7u37|P|3m5amDI*G#j>ZVAHZ1D{lO8P8JSM)CAxx&5hx+KJbbLMc!clod?U@XH}3f z2U8yUc9c$FN0jk0YPcEK4P;JI{WKh^mY$utepW3 zP7AUB>Z+5XPn}0%-u*3m+vB$KT%kXT!V67}yke25utu?E|KZr6Tc94mmUgMfQ{R@@ z>)UbOKmbK#GJ2vq$?=#0D1!WpnQN~7 zS7sttNLN3h!Nig^0X3(S%vbn|dzIt)Yo(p-(paN<6?|d=43z0Yri43!1sxA zbU@bZKt<$iWH`zgc@(;;4M_y-p9;PBREUVdE{XZH=G8j56Hz0!>HnsWE?#Wa>$Ocv zYTtDk&C~wTY!TR&T)f@B8ZJ5wF1F33np=a6D@h>ishK9>CW7(d1>JZo^GD+4+9xPF4HnTqZ@8cw zNj1A*YG7>a|2MRW^Iy8Y<5^(P)3pOIl_nD#9S0{HJ3A*48!M2lkAVRg#ze#j40B>) zPQu*>cS&lP1I972vM~QYeC8QiF?eDQL0hkC<9A@xhwos>;Oez52)MyXaCDf}J0S*X zsPOW&WEYmEic5l2~s}Qi!H>fsgl;1k2&wJ&#!Z76x_)nR{&0LF|nk|MZgHsNK8g>Y%scY z;S!-NConbYOl#*m+J)^k=hdoXcNl|4QctqSf@X-a=-OS0RSX?sP=NH%LG*Od@~Kd0tdK}Q8Ncu$p$r&f&S?*>FVy$0Jw z4~wk|=cP=)C&&w{7@G`h4$U(_iF4PY60;SUZukak9XTJJ6|eIW`=eC{i``PImF{4N z%c9K84o}u6|NKX14ir5SH$NT8o$_@t;-(3m>0L}>Uv;>V-FEzF3ReA z%)W*t*&l*qX#__8Lg0Hnuj@vH@RN}VmniQ$(l1M5d&2W~1mj!Wa}-yI3wwCNlcbx4 z$|5bRs@}Qb^dQVf;1hb5-GN46V^kZU#C%{ISthZ$PLXD!dLv3D4=NWQV1r58Is_*G z=vGtpP~q0XD_x84grp;-l*DVH^7Y|ptZq9BoMW(|@_n7xT`~5m9Pa3q8#LSewH|+i zJun6AYo?<3I$@-hSa0O}fc{?IFE&KnA63-~U~II>^%6F47kRyM7Hif?-k1&|BHA zqX0*K`gxpff}7hskQ30(APSVDnEm&+y`l7M;RT5j3>g2H$?5EzzvtL)6@k}FwIO}* zxXej8eY=CLxVOX+v#qBEp6IA;e;yNgsg(nv-LI#9%MM_R1E;jc$__A&p`ylhnl^O+ z2_ketrgH@9>L{C&gmMf{1dxy@t?xOKM9jVpKW zeh>C^DX;>U5Mp+;QXtHsC@$!2~HrgMjf9;0JQI z?_&N4M=+!U8uYzv7c&O26+}V;*9#FMxjRM-n)tfBIf!a7*bjIrdPK{QP#HH36{E@C z7gU$BlxTlOz;v40mKeJy81Dygpd`@9PdCRzjCSZv)!qY8^*|(Ilg)w?5%|o2DgY3i zrcE3`fXW3W!Qvig)9{td#)?7Xj}=%7S##A;$^MKGgAO|?E)^oc|CSk{@FW0@9|<&1 zUO=U36|67;zOmUa^d{u~0QP;5S#kp)K0p{y%LM*uwoq;<+?fHcAAo}4Gc6$JKnn!t z-r86t>Wi}ezeu72LR1$*dAP*!pj1dnH2TPmN4_6tQ+!d08qNzK04dRAzUQrW4h2Hk z^IN0@Cl06!BMnf>K7+tN%_2=yNN<6IQ~e7fU+iBH5%)o0`H!=aoC<)JYNT#0MX53A z1wx@unjO$Dr^rg;lChjvYAFngZ%mcG3;6Ae_yWO933QL5WR(j}V4Yxvb<}AA!@e>c zg&)viQTfeez>}gXtR&qFlldAdq=r`E2tdyh{Q>PwoCvh!R|gQkEMG)_&#J#QxA{u{ zw(If!M~WVdDG*u=qQBnQ$rSb%;i@+Zn{|-Fj8cARCuC`W*ueJ-W0GXLQc;#nY&M|o zxX1eD^gyv%$Q3{+p``xtIDfSwOJU;1x|M$hr%zKLZYu!2qK>UX3VWL~uvcH%>mLQ^`q|JDjKi(mcl6b#usI!{9;bA z{e#rDVaNI``3k_BuVSL~18=5BRa2EU7i*Q(FrN+R0Yqmw7)`=XvCiL1MbT#jAO=cb z9EG97!n2#nas#Yo>q=2XoET~W7Ru|WGXjg_B{&MePU?y+g==(z0%@gyIOSCVeG{k^ zikk|69MW#dlN{)r`hn^||Fs+}0_X?Vs2Z~!IHGvGJ=RxyII0FH%Oiv&1rRO>Jm zG?V27j2p?b<_01%#c2GL5`-p=1X>dY-+1g8Xz84Qnt1Tyu>tEg)YyOQLk2DXH4@lF zL4G}cTHufpI3hvll)Tcva%hWzMh-+I7eXYuPMGd79tMSAL**KNiUO3PkO6nMd8ze<%}5(3WC;17Q>HDV6e$`0_Wc_y;CUP%C_e5;R; z`l;~~uG}AZOVkQLCs07k1E;vPpz^;W1m4(y2g5d^-!nA8nequA=rcJ1Pddu)oH+qh z9)E9SKt5+N4vGdP8|PmLl*&~WLQ%OwqZLU)O)<{97V=7y0*K^4CuO89>tr4v+(fCVQ+;&HC5#Qx1VQJ`Dc8y6R83 z(I6SvQc*zSj6n1+pk8?a=YdhXn2m|RX+v$-4%po4fhdduI0+< z-%C7s0n5Ny$FhmqWK6oWEOVwudbF^aEH7YX1?W83`28ulr&HMY`$NDvxTL2LLWOkn zn1~;UtSIzz2@sB%9+#B1QWTWafBruJ*#UoXOmmrnvZyNMZ))RZDyy^hoj@@N}q$z4Lk{`w6&N#QJlO zwaGUKT8!)lWTz}Oo8qqF)BN*TlfhYCfZjM%pcx<8>$&rG9K(+>TH(4IlE(XgsCw(T zsJibBR9YGVX$eu3?(RL zp1t<7*IL6lv(DNHNoir+x7VJ#mR&9LHL$d>Yxc(G>jc;-frHNxZ-tBhUEjlI`z_EM z8GWAmde^Uken31Ilj`v#4|uHc0CZpAk`x7VLHiwSn1Pmd4+zC-(q+-`wnxIEP09Yc zn*w3OG0%&oMZ>x7>PkDk8#+j{bW;7C#Po*QPR*>JcGUXy{?_z>|F7PVZO>CyDHy-6 zy(toDweg~`;KtmwAne?q3rGYihW9;_yymWbT=q7tlB-Eub~iqfKaQL6zq%}q+ODXe zSanLR2g-y#xS#F@HGjV=e>~5BAtl7?qTG-Mt=_FAX(P$ML(Dv0{$#)cneJUu7^+kb ziN%|eG^C-_MQF}|NMxEgBpADk# z@Smkj=a*d^AUSto!>zf#?~D39JX~2ztMlZi8LSdX!(#jK9$sbaq6LDh%;R#49Q~@Kd~8Mxm* zUuqVvD)9QiXa`w^HKEkmCF_dM%tscxb1Y!I5x(NFz;kGRxVZ#;rw=V@$m*jmH@#N9 z6TB>46ofZe{OLZk8=2L+8FubWzc#yW>I(R3q7+6eHpwVAr**2_gAK}y4Oou{WUx9PziyrDv*vZ* zGt}~m^UtXb!Z(k9=Xr~&W#XSIb`>-|o`Czx_a! zpm)B~$|)AP+q=LD&pj@|ZjVP0jgS~(sy>3&yXMujKi<%QSe$6hB^S)!BEOw zkz5;msF0dKNw?00Pe#&~-^SdBjo=k>JAa>7j>mEq_DcTqpOngA(#`JWqX&0oi=Sd_ z&#s!^9p4wdA#q29cu?(0U)RWR5a1{+li0P{XYdnT?IC_N5>XLZ>RmgfcY1X%=)-I) zaLHbB)aq`5K{AchW>J1rLhu|Qw>y{rb>)VTkrEOQ`INZBcMlegDM7s1>u#VU-KwFk z?;E#=YerG#OcGo!@A0!Y)xy!Yt2kJg4iD^aLBCg&xc zuj&N*I=l4d%fkIGSJ*1-;!|X%K3xpf%R#JWHC3A9sprOh6dbD#1PR+bwO%>ScAulq z#9nrUKHO>V!Z5qKR+$f#eA!b&5@7WvSLYaRzD{}Cy{{d#ZgjqnPVSp4Gx<7~a`Hh- z7=1^b_M8WrSr#4M2G#+%r-n!C7a6+^*wJtC{C2vI6HvQ#vlowLW+MF{R}7-SBk>P+ z{!iuS-}l=_n`Ki5XQGz|Xc|5=e^>}aSJ}PPtEQ)2aDvt^9729<7hL*vO4yc5a||PR z9}Mifl5CwUvJX|cBo!OT{`FWi7@uE|d@P=F99&+#*p6@A6!8VR)-`ECgPloW+lTH@}%_J8X$jL5e@w9I7z73~@HBm?L z@;SNRD(rsSm@B^1jV^1n_Q2ebPoHF)&@S)qubXm`tI3m#!?zdm@T#JiRhzS?^#$Bp z^ro^h4y>Bc$FhJ2`!ti?Z&VE?ooyb0PArV2VX(D{<=7YTOXEzvl+rYE;1TR;&=Y%6K=_Ya9_ltgs4-~mX+1P9@T~}Heb)8zln!o$+m!k z;GU-iJ@^aw49hz_91RNl-tKd!7)E{sMwyn8U)o1kZl79Wmqu%C1)5+rt~cH~T{w9L zglzY46p4Dr3S4aKJGjhJEp*@RBhSukSgzbdoMBZ1ua5fJA$UWOV>`uPv-bI)mXP>C&V&y1yb-KWng2J?!|LZn(=yB4c z;w*p^T72`7~e?P2Rx0-z2%-mhJn~np$6#ruC`dOf7Z2dQ9To3+@vu z7lSQ__PqU^=$030Tv}62BYAV*Ri%`hp-MNZ5jueVEhl;9g41o^cR`o7BGg#-J*~u2 zi=nVDXC6r`2B8ZMj)dL*bG{yfH=LY|`S(Du224;OL*7q#m3r9k-RJ{MYYy+v{#)aV zI0F%ks>dHt<{rISK?S4imowNLUlgo%n@P^g`^FMO7^g9lZcpomw{4R2Y{zcAJ~fm{ z!BVqealKu}u7j=sBl_(|loD{i+nomZ@;o|wpSB1I+&MoSONJz9S^ny{K(Z)f+yH9( z8id_%ljg|(b_1u0A4;BeEgPU!@AJS;+vDd~o%QPmN>WDb^Ttt4=hW-TEc*Q7PN$d! zuqy>4-5Z$LwMZH`>i{;}hPQQoy+gQXVwxc(;va$g_zovaSi z@33TxqBQPDKA4CSO*Djz8u*bA3m&Ycqu^t1)CJdg4N(4+Bb<_4fTpHaQMe>*A23s(wx_k%4})bZuC5tguk3|zO- zgNJ8sKyF6gVTZUiLK{EJBE6lu2yIGu$=&g|XyC{kf&WsKo;Y<lDfyPvkB66lrM z45}7-XJR^s;j~ly=3%;B=O#IaI+d$~bEPGx8;;{!USQ0{cj(f&KICM6 zlD({pMvI8Yn7oAr42pC6%=%+~{Iu_m7|JlBL6=rlNlgO#h8N9l_dELes8C8_WE> zpJpwF7~EQodvtO{jUo@8jPki+U=(YL;sYJBN6hDnKfAc!Eiu|u-#riitqq5|5gh0G zce=b7v+?*kktd4eMXmQPa6}TKik0;0_#Gt?Y(UXKTdQe%W4sDGOOarS{d9S!-HP#T zg=cDU;H2z++)CX#jltyodHM)i=vNVAUdgRF6UURQ^>p}c`1cZ8$q!<#-}FWpwqkZP z>>~re&Zm2)&81>TkXPAohN;~YyV8#6#`^S9(q|i2#4M$#f zA=1>?Q#)I)GhY~NE2Ki+HpwhWttHM)T=M3~8f^+r=56&b1bJ+11)n<6b>q_GwBo*s z+GXCg4aa#cRsMDEeTdo83%aozz1I?-UUyetE51ll2J`aX>Vxi> z-hpQedXzxVaJzOWffXlF`3fV00@`~uJ^XId_)SJh7KTh$c_JH@#73vr&qz$4hiCs1 zqtCILBEApaa-zCL5>yr3m@EX)^8m$Ms2QLJ%l4J`E2^415+H8%|VLM5u;XslOD zREF8TNK$SzHF?Tcc?DHv1H~Ah2s!j!KBF1#PhQNnpl17Ex6|-(v@q{RT}?WGr+SH# zKz2Qtkz|9bq3bol7ik8$rQyrg^aRYoTB6Q$ot`L6y}v)`h&0;6aD{5CyST-V$C??2 zL;qmc9lGdcx&aa78%f;WzyW zQpE4hAD>m!y^p`^dyOagz703)He6xhM5^WE1heotI&aO-LMD9Y8>B4zZ6srWbr;?t z7Y1A17LrUBk=Sl*iS{wjL$V~I6R4Wh!uwfnjjqbzybAophBdnlYu0^|VM#oEBX?0x}^%toL~emL8lqA=HeR+1AcpB zebyPw7-6QV#b}BXsH!9R&LNJWo!uGdhOI$K!QO5>IMS!=WH%#urBfD28%8>$r}fs0 zDxXH$R|PP?thJID;ED6yffGC^zDVpkEF3H2D0TyX6>s}Fy0-q4J)OY30=^rbC z1EEHqW6$QE5FE8E)t?`SwREzn=WN!qOpIw#Xs&A4awKTEzp4UoX+z}Re~M6l zatcP1de%H?I?1Krogf31FzJ*hulO!gk&EvIw9Ip7`K(f>M22?!6y;Q*2O_-9|FWF) zw|2}aXh*Y#esqTM%_qlu@l%i_gq^TaYt_L%Do7@IGoem2Cf$v6VbEs#IZIX)X*_2| zs`o)VZ@~0L;bKYc9;??P_mleKElu30`rMw4K)qF!LeHwuE3tIhTX;?-6=y`N8_nAZ zAl`Up!+`=h<-~J|!#Ha%Ovl3wXxsHeTyF))tFIrpQ?~cxQ zajq7L$c2|V9O&Wo%aPU0YL!-wG{vsw0{6PNb6)%9%$S-o5j;2dGt=Z3s1StN&vpGL z@hGc|I4#5|Rj*O91dy`0E2b+}%6s8?-OpK+ZnUlvBnNYv5N`>pT_`UM<%)H@*gCI< zNufdW*fvo)uG(@Y#4t7T%1R9Gny))?&U)xsbkkDWyDuZ!+t7lgLd2*sJRRGWfl~D4 zN>;Hfbe?9851O@BVJmj-q?go9lwSSeT6|*xRH3H+Vm{;25IiA zDG8&;NYJhwE<=}y*65b6Y7lDYhW~PmGdYTj$bBK>-RDN({`sCGnAe^NzNFl3Z^6!wdk8qyDaml}n4sXO^Km29HjOqT0hR(4QDOyoBTq( z5a}n$?pTJ!>K)~|*R;l5uG$|BYfIH7?Mg6&3^JT;4-t-JK*~N>fBJkWN>n~3m znXYD0WrxLEH|%~lwzB>z%;T*kFV`MVx?gvy1-Bm;V;TF)?fVz_GI+H@Z5d}5Z9VMs zWQ)s{4$+Z@&UXcT`CpaWG48v#CikM?xzAHE9Dokb@s>W)Y>A4z2UD(|IwvnXzaUeaW^;7c8 zZ#-M!jmikUNjt2+RK3J?`@NdEo8=U6zMXL!UcN`+nQKi9q$egXDb7rd{DjcswzIxM z;eYh*Q;bpx3xPj*nc@BEFiJ0Ah@rj zleYKj3?G&~?CHA3yVSpJXhDb{qMUWu(XYTEgju6_d3ENg^gF)Fd*j4=sM&MguDf|0 zHn$v%WTTy7IG0NY!Gx)vYZCI!3B6Ii6tAW)@#ur?;6-Dfbom5M2 zkCOrbbSZCB=bmo_)2Ab<)lPK2$IeK*+7Tho(+e+W581~7Fx(EV(f+rrs8e2&oqwYy6z**h8lc7&}AR+t=ie=@$pK{hKr zseLHVe~XeaKHQ<#RI#0%D|y0<`~@p9*W*^(zSyHTnWfhkO=vcG7$v0U;oBd&+*)uG zc2AMM2aJxLXQ$R_Hu}W4Um}Hz$<>Bx@Ht}}eqpVv!24a||LPgy{nW$t@??|?S4L=q zT85AIqYX9FJ@NyEx+&Yyf7>mU-yj4buyJ$!mpBrAi@*-CM?*v-XZv^Q!o>w%(6I0@ zv9o}eF@^ECt#$UYjP1jP3a+?6Ty6Fl<2ZJ20Tz)K)*7A`h! zN>)xzCN3^;Ye+Uq4sI3@hlQ7emy(r(or#SL{CmPt`R~f8Obp>Y0y~(=GW!M*+}W*Z zOO(qH2Opbn{wTft`mwdr;j&X8bRSk@mCB(cBu8$i*yGQOyQa?}*>dMd8j>E!mDt&v z(dwc{+Ks0B#`5H?*g%5wu^e$IODS!F=xcW9Qga>3<@SyrBgVIU9YK}nlhilsNlY<8Kr8o2W!%aa>V^Y!-+V5lH@dQV*lgVW?M^fsQ zho6D)A^50Gl#a$2TPWx&_Q#&WM%riXeS!wBRjVbdD-TB#hxWyLD^L?Y=<6rgdg_yW zH?X+u*vpCVj!|LG*gfu%YEU|;_B~SZ<9_9K%i(j;UerMRpn@SL<7UbH_|TJBB%VEL zm?S9PLo!MCXd~)~G36a(rW5^Eq9kkn%#&cb*+bZBvo!F(dj8!1mpjDG{qOl0IBv-C zucwkmJgonFZMu-G7xk*nEqH0TWXl2px^fSXK_WwghXP)lVh|(SUS+twAx7rFTDt=% zDRw7<-G)|n!fC#~LS=L-bAQR*+gZisQPw9!^w3;!rxutZ_nS93+Q@bT&S+UZXlS6;yhfF)Sf_BZxyT(9n`K=7gxtEaaN0~i!gHYSxm26Jz1#Xa(Y-G)CRWL)qod$)Vt0OQD zazFDTR#rq~AckjPHtJZV-^`YtNY{}7ZxhPaszqu-9@WChV`Gf=DX^wmSGX&bvv$|E z)bzQCp;}f6RKamtQkfZiefdtiO?UpB#`{xX!{WL^_tjP7jTkE+dIAIF=^4NOUXUNdnFT=?adkiBqD(Wsly<)Rs+v@)JDYl$U3yX zQaF#;vZv3TIrPBPAT*uQub8z-b~M3N{*!w1RR=xNlv7ysHTmSRBG*kl-MEA*`5rLjRCtx;2Xi!J6obnLKqy87kB~{L&CSq2evg)94ivF98jZX30JzLuF3O z_iu~?p{*jk&q1l`*X!^Y`H4j`b!^5tHlK6RGI#KhPJW}=otTH5$!`FQs(5L zAq6QO*m@0W0>_)zgP;-AXsS$8iyq7$K&XxhU-nHvne%xEA$U@R7{N!8Ny=!FU#JJ( z|I~C@l$Hd@8TVIWoWak@X(?7av#d*l2-o0V^9G#I;>~~3q3vl;ipvQI0@J>bX~u|{ z$u)u?6_nU~1bB(rVCoo+L${}>KP#Vq%SB+R?AtiMy?{wCH8ipPJ~)1GCm zF!%lkJGqAG7y&gnNnV;JO6;kU}T-yORse6`8T2Xkzxl$^ z5_*bG`lgeE(6ZTE37IEFW)uW>l!{sU z^oqO6_v>qH^~hgQFK81#Ly?!VsfUsaH81TV!H6e#jqpv-6~M`yNK%+lMf zRCs}GkV*7_XUd=rFuwfPnU@2yE#aZag@yk{&_1NM*(;3g$gH&8c12Dbx$SUwLvZl4evMn4-%7N z#gf^U+DnSh+R#oytM@scHGzJMt}4_eFERN{;w2~tRK24Ep6Ci!&XpMzsf)*d@^e!h@O+V4HCQe6 zlX&>N6i7J%wJMW>GAI1+Hzy4*G;De;pZFBqlXzn0%uyLM@6mS9Xyae04an4iAw?<) zXaX0L(fo4`=*<<@PrA=3Qi+nIYGmDltM^7d@}NIZ02fM-aQ0dgQ26`qS4Y~lUqq3TcErHlrriIl=Z7(wk0k!4&#Y*vB@8z zecc~r@<}kc|G|+Q0FwTM7u{bZpBz%Ez2Ip`M&4aSFR-bLv%&#m_p{n3MGGP*vCZ!! z3Vgxfwj1LNT1KR%hA8<6@_&K(hxET_63P^g|3PfhaTc`brw0859E%Bb3R$orj9L1S zulud=0Suf~Up&9zV`fm(0KD`kB9p3x8c$k&75=B}S78;9%df(q?SPu&suovU{{1gp zRed=KdqP(IFL=?p{F?<`_$b_pU-3Q z!S%?po~H$Ml>#QUQ%OY087{!+^-H!D5sa6?j&OL{`uj6(c;=dRvyzi@8zxm2gc|J( z;Q27+RK{2D-S;!C6D;MUrKKjzD^#*A*`QykCD%OIpS zkDnYP%i7MgT4t0b1+?#{>F>ojgHh)byao`nw9WxWsF^iym-~x05$i*QiR)mv9obVK zWEXge#JQo$U=GMMp1nVBqry?axKds}KZZ+L!gCzd^j9f8dsPf3r8k?Gd2#-iR(?w> z3;smEAO7OC>ibYCwFovoUY{+rJir`)`*vV#(M3Q)zeaP_Ietu65?sR`=f!{R31L|;_isuYhY@ON1~?|+t%}*^P%08 zBiY)A3x1aluhF8F-1Ia4)l&t1KmV-X$>^>9C+x85LN2-bB(3S@p9Ubh%G)QSh9lrW5*;o^^U}?u+iEx1=8Q8GeXnsyPOA&mpk)W@_r$PV9L3*V9qGl^tf+9 z|KoyR+qjeZ#;yNViwK}z?u1r2ZtZ7Jws9e7;9G8fd=6#K+Uqw$=4E(~D(WXM(ejM9 zAVg4%ivhCk>+j1-J(y%`+BCuA(SM3Z6HL>3!Qxu9URDKB(VIJj#MOL=$SE|ml{VPY z+ckUMy&WLBMM`>2;%)VvBi_cbOOmbap8Wt^YO!^XA%VzZVqki;opdNZ28FS*9x(qk z$Ui+XUGtOjaZ+$6?W>26xQ~S3m*|e@Atla^5iob2Y>MN6LhCDM=X93z1iRGm$nG(x z!^tvcJfRE`wLOn}#BhN{(2*UF&@yy0MT`AF3fOle*JPqyc&GnZ**`%uY#dWc*t6%G z%=g#ux>CQ_28dlA?!M)=vcF93^)IW#!hyZRuf9$8Fpj_1YY!>yIm_c0I%lhyay#&? z$Kt>)_Y;oLzO}|e7)mKAuuK>-Z{Bb+#s13mtioW<4hvzS+i=s5 z@&L2X4Z}3fC#2qEr((aS+tR1m9O<5fve0Sw)lmYr{4*z^YBy35*oU}0amr)2MV%P} zonoO%!Zv3{R@jB$_v-N42?>swUqD8q;!tP7iKgzL$txI5deT@my>GQ(+Re!YpNo%J z)nWsE9)P~@45^Z8i`Q<;alLe47NnS~fVdnkhk4vAL8I^M zKD!Mh4XO38rOoS8I-@9$xsqPBevOSQQXdM|+qWv`a!R9I8(ekj8=6`n_V$T{4NmG)ZYEY;)q8SnX zxV=31A-yEmN17IP#?~S)d-6jlih;6f9@tu=!+-y8de|a=<_CGEM zN}H17YP%Oz9-w2~zo#9W>i^_6SYSWCjLSVFs!^IMG8~`vke?#ov3dT-OwrxO-iIsB z*XNRoCCcjZQ^}D3;=v~UW`A(M948B`eV!1KPtqp)`lOTE-+w;(rM@$ zm{xsQNL%tE{b+eLwRR!odEO~>*AEy0GG|H7czT2r)j_R!U+6uk*ZJe;dECNj_+!$9 zlU7rbD5SXzBRB~q7R{HqR*bkN?ADDo9P}$8Oi}AaKf8u<2B%dWO6Ldc5`tWRQkJ&gv%ONkci0$BVJk3_%~>OD=WwaSgBAV&%%Db6r!T`fCqB=v80M zg)eX1VbP)Wp7)MDQ1W_d$x0ZuNbXps{pi7XGBt5=9xKmGCgRO_!pIVdPF2!zmF(+R z0RD3QHgDt{-W=~ZkKUcEW1h-w0OB6g@@`M>nd7h6g}?x}x#I9zpD~R}+Xbex0aAhx zh?~SCqFH~?W%$DF^;*g4kRJj-+C`seb(AzMEsM zzZxX#tC3xruEmo$G*V*s^YKm$<5C=|EjYQb7XNP|bE5+pjadoe*NZpBUB!6^7ngGE z_bC?@k3=?MmfO&(khg7+t;`$QUWggD~Dl2o5t5+-4 zItV6;tRkX3-Wv}wHu3Zob`BeTsq=gYIk@gmx+)>x_g1v@!O;C~Dqp_fGd8z36+Q2| zayHjf)!gdRu#ajtoj{5|R3lW<=(}8SAsVT?8c}{7%u8X>Y@J5j!#T9t1OYPU*{?33 zjebslV4Zw5c~gLgoXrWuYY773nuQu1Ec~h=vueeU4h@xPf@^b0dVg@{|=0Z5dnE?KPtGliC%MF?p9+%_9G9n~= z<^|P}lA7sTBc!*aR%bd_4;g^Aj3s7jaU3)xr@DcvBBipJ==KJ+y=(29ZiX$WKJFcw z-_+xVY$<2A9gTsr7kb9+MI)QW2;(U8lC>dW*WnnM)0c98_M0S1-oEoC%5t*lVt_gg z>o*i~1yuS^*Edz6ZZ0nmGHVT}z@~+-T#iln<*))>X#en*`O*F%wK!0kbVg;~JK^bK ziYjK>E&MdnP&LH`w^WKY6YKcuj|0utBxO@ByRA=6rr(`6Y3zEbpUqo*m{4=qSUpQ_ zM$-?k8u2)tPkK|$IW&50VLB$Os`mC|`qBq^kBCUex+@T4t4uryF$N+Q#9%!0Ib>uH zUIM&7W}E;=h0eyq`rpp`;N- zlr;o0h&To!E-)-F&WiC=99%~m!to(CaF5b^G{{!q|G&){WMy<2?uC;CY3904Wd87t zT!F?imIebV>3q_UUE_rFrBzBi**4L~zTWALD4KL{b9S`4N;u)f=jSm4GWa|%0qq?Qz2ra&m=mjyNHP7HH)90X;j;A`?Uhk{O|WQr8rdg-rK_iV zv)DBU=2z0JIAX=(-shKkxK+p*Z+vVo(1PS}BU%U%yUfbYGG~I1CVw`(8p55wJV1e+ zmYV!`kCTVv-}8~+W0I6d@J^6=YD7%Pf>gx+R|4pNA9|cjHY!-_deOPGsWwnyyk1;f z{D9CJ%|jC{fS807Bu>6Z3PW_qo4S68HnQO$8JzctTJ1nRHas{`*o{!PPT|X|m(t~b z=f6kNSF*o9p}E5uZBV#>0*p8dh9Mn_>ONcl}Y%PdkTkRN3gQzFzIqV#tIm zfrI_HwYMa)xDZjv^12Rj*mSIFwP)hN4^D3$;IBl;c zE1uqu+TcdVcqVxh`VuG9*lR=w_$t!MsYLg|ru2_KoxPnU13X`~nX~X>hf#f`A4Pn{ z@S=?B4X$_(*8q|Q!wZ*clfV}}Fg7H684T7>N#)X!s=!m!>Be?oe9oZ-)*RhhlTO7LKf>ga9kYEUvekMw>N=k z)Ym%4c{nbWKn8v%8tb%~fk|kc`H-P;XkA>c#M6XbhX*a*u)Iy(b~VV-ME!{rpBHznj&ZV>IgG>Q68nazDT&63B;pI?WZT z9^8r&ldt*IA)Nr8eIva}$FzPH_FG=DR0AAnAdE!B34uZ)NX8<#_ai98eI&|}|2B-u zyu6A2VL2$8Mcyufv`{jJ$V6(d;z^6<_z41E9KT~G){X%rD-e(JhE2>QboTetfUSGv zs(Rq-#50gSMNG=8)ho)m6*)ahTA~3}lTaYxykLx=8^uL58zdYZmFDSl_{E>qES(>D zSV6|}?E{5tx#G#?^k!NEnOrAvoPDE-!D2V~)Szyl2tDWL<*3!5HDA@awv%6&r5eV* z_UXvVcdaPB*Tt2=8swz^PUClY$$iv1-vphCUJXj<&+!&#;A=fAtOPobMNfFuJ$k^V z?j6V(OV)EaJu-@ctU7BPUZI{oD4iWu7@-oVzFMm?!h4W_HEWf*zgLuXbLvr`ltsXc zZ^BpUYGm?uttctMg*z`=(O4#)Sf%u`gi5c0jkbDJs0pTm7083=J5-G)sSpw9bc8Q6 z!z)0{8eNE|;Qb^uI4?5x-;7t34YdEnc&Gm1JKM^Dz)i{spwkdqKYLnL^bpngKPVQ2 z2IryLYyla_IP>qXD6iEtqCz#^#S5uN6e=DraSq?H(K?WDQNlLA0GZ6a+sjQVe zNDHQxVCGX#k|9CAW*cC6Fl0 z0vzP@(1f%%^gtsD8U^Y4$O_H`(-)`N9GIl0yQ1{PoCM6vY6mpGftC~EuiNqBhwSTyO=w5<$iTlqYfnOvdpwTmc34q`nR-%8Ct1JgqWyB&0pJ1(ohF@eZKKt$b^{r~#J+1_XYPL#5@Q zBh&YGsJ@QXk{X~`D_cGsB6m715$Iu5=GD+Aczuvi+$+lIX0Qi<-P??((Nt&R$Kw8= zP^(CRKA*%n31ER3aUe!tBmLJgDNxC^`maxC)D?pZ4^PpdtZ<%&K=FqD+tWdvWUfhS zG+!2N4mxn~XqzX&Bhg}={C@*hFoFDFP~>_`1Yjd0m)s2hTs{@bqXQ2D7+*koqs>_a zpX|4e95jaKr)&;Js)Db-{z*3n&G;iC=jXQ40Fy%Kg9}KF=@XP$am5o0J^F-C2J0fo z-T1u~B}?U>dnDICA!W64fX>_XLk844xkiE3?)MetDd-6Kf5FEC)#g->TSZi6{r3WI3H6MRy0LSC*i%H}=b>~$G738XHB=yJZ*oCzcu7$R zTHNBe+(0B4W`i(+mj&woG?e;h?9PpaPv@+8g6r_qd;p_^wu;3hwU}}%ht~ZjI29r2 zlaE-D|6}ChlboI-6H67SPCLr17HHpJvCDB4<@B^w12d4L zVk!u^gp<_PSClPa%xFiseg&1DIBNAuYS?)Zv-96qMC-=HGqfOVu0BK1llehw5AgD} z&%lfep)VfVWbC>TK@NfLx{%qNN}BF}PI)`JejR1Oe2_W?Ty33QZ=Xc>@Ff$VeLy=I z(El+779*N)SWMlxL^LeH{@Ga9loDRukMOqIpIa$$^AvWO7xj45sRi8Kp(~#6d*lh< z)8*W8?)k~l(uAY5EFJTm_IsXi;5jc49QTqUM8OP8Q;b-SD_3ri^s0gBGFM`w%h~GH ztM&Vi4+_NqWc8O_+5yV^#q4?0f)Ji?U>!M-I_%A)x3~RmldLb|Jjo8Y4m2>T>A&(X z+`pS1_Vt6c%o04(cZWJu*U1eJdv(=Q99rGpt$5z@p35iS`VN;o+{X&1fje@a&BDe* zst!?{b_->@<}d^952rOXckCZpFoF zl^O$zF0ZhkMLx6GJZZ@BOU@2idafa(d8@r-U7Ie_cK&7eEJCnM23Dr5+ZTsYmshEZ z9;>(4jSUn_!w-%fFo`CfX{FtEZHn)=b#@Y0o6`_{+I+1vLib4sg`anL)TGz+#r1Xg zW)$F>VX_6+#kyV?4YzrHen0VzpGCI}rA)5#p%}-pS@AGM{qA7g2)LNdET{1EG^Gl7rgc4b=V$0-F^rS z4tjsCLp9VeImCPWw=h{pyeVz?m{YHXT@a&%f6!HQ`XI!D$$aV0C%=oW>n)k5WhF~9 zC>-0Z-)(S8S_&*uN?yRKT|8K12BI?dTNjYizUMiU`qu~zZeJV6x9W54`IoLPlh2NC zv-tAIcwo1~bSSXglhO${ujr(@7xqX2;_m{n$9xqYd$$gxCkP>khmXJLd^`2EFsGpn zg6f;Fiz;|UW{u^LEs^e6=5AzsIjQrCTtQ6LD@}*4@>S*#lba@=|@iwM3^60!qA1`mN{k zhacTBr-A-;=j%E8S*F}MVmn-?hA-0VmEqn8@zwh-T3bPA0>Zn$_8|9pN?!*{ z2seQEvTL-kBZEn2;(B!_Sv22*X7(Z$Xab zk@4W!c>b$#%ky82+oy6sU=|g?%EQ6)KhKvmk=TTjavwDSo&HS8nWK7+M2Q@-+d&s& z^Z{S}OSvJ+GP+Fnk0UXfQL_e774GA6SB)gHSzf|xL85kBxMEL0+)K_QgSvd5lJ=mOeVRcGyaAq%?;X$_T2(u|Ei zg|6U4XpO1rKaQgjjopslF2o`{=WTrSxx~Uw{J!Jh;Mh_|FtMCWsHnHSf8=D58Ontx z{)d9#WBxKy95QhK@`vmNk=f`5FJ$X`0&A#P2m?;Gcl+NF0{H;3*6C9J_~ZMR<$_`S zK{?M2=Wu|Z)pR7!4#%N+m;BLUHd-1k;~|>wRB<&ZQydMupS9n2FT@fqJ=~nkM!ny> z^=RO(QH6#y)h+13_4$M$<5Z$8a}H_OO%Dvjz)Tm*e5CBVB(Rbu=E?o$`MnxEC|^s^ zN3!$ry^N5*>S{8vWMObi{RJ-{gsRmO@~e&5&}$Cx#4PjiMDtV}PBzuqKH4NJ9;q=c zYibG{_vGa=M79|6npygD*naQPn8ruDhc5>H2PZNj`bqfyVbVc(p=rk-%4(iy%7=NJ zTf}Eo7%}c4v#8a@5_w7jDWtO5LB_2zC)qp?S%Z8^N!u7`=g=CKDfQND4>+alcj5wmG4pqY=HGEIX71M+LJUc~|teh%yJ zj#F2VARjnjfoVqx+p2iQ^`8-lD*g0+XZJ1#RQ(^PA66o_ zD2wb|y|VckeXJ z!e~%Fpbbbu!Yp4_{Oap`58)y*!AKwoG~LMdx9gCfD_4tLs7n+VL397)AlSmA8s}oL z%KH`%!O6fpZVzF`{`n#*!$(_GC#AIJtLyJSUz4y}a|ZMM5*vVp{ryScn@DkKnNpJi zd`Z~XDx0ds`Mu;irKeEnuQ~BWqo*$oQaD{Z<5#1I-Vq`;{|5yQ?xRBaBTp-Pc^6^=!p4w6n!IcR zWXAAbXusui&cHa!nb$6qSR{m4jMw>#!0W4-;e5|oj522h8Wsa>)Deae%aYA!IzSH- z?1FKmjk@+%ZL#laZXa7xODeb&sCZuIZnzq6LS z@<2>gCn{UAt|gs+ei+H1-4}NCqU$T_+!pmn%!&ANsGg~3o2C`k^qdKTmgXfMEl@cM zapdM%*=#3UtUG+CDx322GM;~{n>UxsMi9OXj!hg#=uO4Ul#A)@Hje$*^Y;1|5HaJ! zZH8i%83FNn;*mP$6vnRR$`>)ODk3Yf+SE~p$xhcRO45V@DYEgGC|*3941+R{8YPXT zRqQLT)yf?o2dj{N`k^#`OlhF}@EY*7Z_bXpDX|ItbR2J$cw8muD2_jxPF0;*mLRgn z%7m9W{Y78gm%i(JxTlh7-R^Nb-dZ7%>ziBA;m^5d)dYEhVS^UmMm{*D($Xjq+wIaQ zbpL8->x0FK-2@7_7bz-g*{=JT#@H&VYq~9cLb_W_fEiwyTse&l5K!|S98m#SOrM`6 z9X@j_jSq`BVeQm6StII)Sube$l#{IL{3I#~IdA{lcXTYqr|SNBFMuU^KB2_B_hPRQ ztaSg}={Q!{eqMbH86z9{b=(c|SBHnh^6ogIA_)Z=9RzVwU08_|YXQQJfBo>?`gOrA z#esL|*RuKLo^(6JBGI~hH4%8Ae~7GP2Cptbm%pqr-Mi?FJg#wVDE{qIsYG?b@ACmp z9|mqy@>*iEge<<~7kKPh+E6cBfjljhKWSMyb!F^#y!+Odxc8+#7&iPC`ORHeU4uIF z!S4lxY8}f{Rr9QW|KPGhv$tAi2|kA#k zpy1H3Q*0+4e{J`Q)DLO6P_vQ7ZmK_q*^%|V4Zl9Ksgz7EPk+A3be8d-LEA*GoPI)!&Q%;%#&MGXIF(yNhX@03!hR52rj*Yzn-q~B$GJ&7tV<3cq48M*wYKVJzkXFYlR;9?>e zUL@Ld@#wrDYfv76#35=HE!-p2+uuw5-yP-u%bMl+FKZTZ%?aL8zW;^r3gBe@->pFR z$!x;O>W**kzFqr%Oj*QcL9YwP>j~-@Y7-R`8mMlP{>an{dBmput)VKr@c(f2j?tL~ zUDs|p>6jfmouoUqZL?$Bwy|T|wr!*1j&0kvPu}PGzH!cb{@p+Bv3HHJYgDbOHRrt2 zE>g9s7m;XBGpm#(l?vU<-TPzvCUL%fx>2V*_NaMfc(Z%**dD+I17<=FvDV$eo0E|H zDX=0O9&l-9mw||)qyQI+pCmY6Val|Goy*((>~?WqH{!J$2C72_=Vg?D*l+~YLOPoO z?(SqYFF_YC^k6ja=i?O=?6~ASLGiYmRYZ)a3H%VE>-&yOC zo@6(_rR`x^r9dBNMY;wHr9eqN_apsvAJWx%J@=sf>|;QmL2?7`;T>9!WFYwWo6+PIYW21qtHr|K)sMe{O&LWK$T)JL)P;PShFXJYa?@ z<=FuG!hpA{$L@|#kf%*v{EzD-2k5wJ=0gbl*_voxXCy?igZ+Z9%uJBjhW#`yM{$GYF+`)ZdDf_ZXKU}u7?d_NfH!D0oJG9Punhl z*QZIH6P02gt%fAzkS=lHdIENVgdF*`n^~KK;o(im)R7X)DJ7}t0Q{g3)gKRLoV44h5$q3eg`aYNfS~ajGhRTJtv$?(=MYtuKT&#dE zBuOBQ8G0SjQu~%MSx+@zaZ(m2FW4GGp)@3`?sz42{%sh1!xqLN$(6ulYaqr+IvQ;8 z!ZY{p(}&h<^38dKfM2dAQCYr`b@i=ZVVt@j)PL{op7oBQ*#FS}ZSTQ^o)8h5tcY8; zfbd_Z6#E5DqryiXFJ?phxBK&&^>PDvR|RK-HhTkK{~^cAGeN;s?($=zcv+W15@Q}Z zQe#GOu{2mItw(X>NQT=oA>0(4Y`!IYij)1cxerQ8Gj$rYf;qA#Q9oxo8OO(yu5}naUfS`TTV9O(m8ItQId|{zMP0 zgk{D6**WNYi_e7B7-y3d5I$ED06WUuTpAeg>vXj7SM*Aug*zZIE4la&<0%1uF@ ziDACPP*#XxWvjt1HOcWZ0q_C>X{|`MTZ!JXO6bnk+Hw4OJ4sWtw>{cqO(lEkH)> zD}t`-V5-LAi3G1WtVpU?;*kCdGw#l?X$&#>F~=T#_*#NkM+pbOBN zHHA9L5ZCjK%2h{_A^Vy(xcbD$9IgQnav~n2=zqYvC?hiwP-zICm?aWgVKo_!iM2)B zi1_>!7#nuJ5^e=$1N+u+NtlZKEf9{IDf8`wVW^)5S}8kb9IUq!-Rp?9=P0`Asg8_$ zO}au$)ciM5{%wo6c8dUWYyW11mE0vFK1Auekl#<$1AfmoEZN!LFgIQ zUYEKWqnz)NhN>Aiazt-|keG|?Ep21IB6@QiC?iAVEwfI*FS8aIQe1Sg%?JNJ``@p= zPL@%o0aiU1j|N?={Hzr>=S<=`I4}~r_@Y#vK8;=U54WoVux=__W$SSJGX5H#kTg=# z``P$sB0FmRkZSJP``{B}j}iDsk;Ye2Lvh-*hDA-vYGtvP-UQa1mu@hily7B6D1uw; zM)T_;jjYJTuy^2^WV{u199semdJerOdx9%X-8Ho8OBQ5jA<>aqtlHbjzKbRF6kdy% zI*40i52tJfq@ugMtJ={%%8>{igkB8p@3r)>H|)Y}(${R1btCh$z*IEND8)>d{^sY~ zD=pGmGDIz!hd&LlQ_=FGKu~NYTbtE_`WGN+mRafg*fLe`A6rP4?m(ERAt^X677DFX zMUN_N9N?K)?{_wEtiIjNQC`ROY({BFDy{Rg+>c!b_?KkKD)13b#ch>y>PnR>ZhOYv zmdsf4PO-n0kNlQ&f)EO(2U=7K2eV|37JepSmqX{rPU$rULiq+~8^Yvwxdkegz!M!s z>gWEA84fCgG8*;bXIBl$UMr3(*S1vs92%bIopF%z8-=6!QK@YT&O@KXY7aOo)e%=) z)sg@s;B9vSQ+=1A;aZqgA;#7?ViN(Do;tE_Lwt_B#^{e*RQIejQzepwZP{hM@>`D< zUKPAt-AO@rJ>kAgg%w<_ZLiH=wB*sv@yGw26;4-F|7%vr`(M3I^$;@+8bPan1uqN# zcUI8O&7x6$tz$ss$-HRD#VDY;T-Ep(=6J0 z_YW#lfNclQw2CxXx|-hU1xWZ@$@e7u`}(wGFeNd^Fmg$J7d!?UpVZ?J`{&L_MJ~0u z_@p3$M^;C-&xrY-OgIB8~Lx4{K^D@BRMS*;F( zzS>e`x5XsSQqV+UBoA5C0}io4TYq8|a!*NWfJ0j0MGbZOiwv)j+IB#Js^TOu>vR;? zbz|98J|wL5w=^tRZJBsLRHgi=G}&R+4@gBrb_a;!g% zexT?D5Q75FJuer>a?*`7Co}i%p&I>lM0l+pNtcoPP!?TpfrcD#|RTF*IYxt!*D;@pPs> zk!XWYINcii^epnUO18-C$g?5oEepU01oXs6yZ>{%wF~Ka!eVxZ;JQAF z!kH+t`MUzQh;^eo#>$AAcx_1nSNpIy$x@^UV9S|5JIK59yAOGAz+b}sHl)fosF;UO z*vM1EAj8|la&T-Zw*Nei2yglqBzf<+lH6@{qf;SS+BLEpXzae`y8HkUXP@D3%m0E| z=K)-#^eG$Wx>40X<&b?%>7EKyJk5SEAWKvbfsS>TW$N#*CL)EF5F3Vl=WPh1+VB;NqTa+PU{O@%^PG?z1=j2(@YN*Az~Y(og?*I}ZO<=Y3!xU$Ln~J2IS2>1t1U6asq8 zu?Jz+yDBBxhYLL&&55v@8^p$v=qV#c5XrBCkc$SXQ$w2mUk z;4cqkmYRRTI-mvr*qnJQ&?;ajCC0k>N0y*pU&{0t7BkUP{4QYltG=r=LK=@gx&xQ; zA!O3z>H9ToBUBl5`GDqxMWyk`3Ik9=sHJrDdqwt?TUxwV*|XYhRBt?v8Rz))#li;H z6jf1UyO8ESmJA$5XlryO&dN=@0{)zRWFPr1S=FZRmGwSL&inD*pzfyfO5VBSx5r9< zm0c!euZB6ZOSn@+_FkS}pk@CQ;i=N}Fy%mV0yVfCyu0o|Ikg5QaaAXs;3UB5o_;;| zP%k3=>R&M^cUj}l+CiW%bMJkP(zFkDA^szjf57@JYxp8y9CFmvc5)u%zD^uZj_FE> zCOGTePDU|neOy_|OW@=hAD`n=>^oS2*p{^ek*TodX^>A~FQHmJLzAzIt)7x6+V@=k z6L8cUoN%o_-)YboaXAyoG#P=y!>nI*c}x+btw89ON-oonU%F{f$hOkm|Ljp!ZSzBa z67H}0z^)tCj^ruv(4z3$Gn$Gu{oM6jU&mzrarVphUV0AKRS0bUvAj!FU###fZ$dL2 zbH--kUvS{$##YTaq-k%^^+$)dLxyM;t@jkt=Sna)mMq@WOjACPzl{v!7V=)JaqASb zg+PU<=Ud#!AN(RDCo4E=@B z;sEcK|0s*Vq5vcP1#R>HRKS4u+8N#Yx*)9Me4~$?nyLPU{V*4>yYupjb=+g^(|n{o z>ouG<8O2kahb~r8Db)u==$EWeT1jd^HmeY@*#wk#`Xe>+JAc$szkwFJMN9qpNa?;m zHM^v~pss?|Iy;jjUPu5*3Q4UFTnZ9>cgMA`+O*hFHIPgSSyt-5T0QtlAGR3;p-RK! zxUHc}0Yq!`e;Ge8wP1%5*|Go9kkLGI5P6FT?dj*$F#QcllkC#p?^!q<73V13a8Ala zY%=YRO`t`%9aYF$`jsZPhH$`Q{`mtJ%&ax`00{l6gV7;a|n7^yP z^lbF!>Nc~zf94Wb;@7XNwma<3)wa4`Hpd-sr^SNi7i==-09<}oJZ#~+cXV6`@@&M% zq>-wg;?>nhw```Qnsg(}^f`Q&J;Bqy7$EXypgXM6*-!WDZpxXn&`jaOzHK-fn zyFllR$dB?ZOhPoQzE&Yf(gg`%fdQICyqRq5}lRBFB!VKbhk0L2u2)3WmdI-pZRoDer zv4YgcW+!l~oO%cl7eCQNN#eC`6~VxaO}K{btn00d-|bbf1k$tjrYzLSZ5d_FA}qf( zZ#7?~W!!~;)%`l+U)mZIL5}E?76+{B0tDxthEWsldf6mYUgMQ6iHl#(i0Iz&fjVg& zu4%M&6%EJGTHR3VjCe4f=dJ#y?b^jvm(8@6J^HR_3+&A}lS_Xycus(ig)`siruMtG z8@W4`_?&ZBMara2zd|?rKhC6pcGt4n-W{IdCPM z;Nkk5T`*((l0-c6PJhzs#Mt;)Dv1=q9t zK1gWePtdX=%7#_fGSl`q4n4?z+?=VcPSy|}AK_!fDpBgc3+Yyw8e>|O*WiDzLf>`|L2QN3}$BsIl}nGUAaO<^ff z!2_6_vC&;EyAmfYST)7BGak(nMdW<>%8eVY7pFqQXSZGnG5Hla*m7)BhLr8$EIX## z;HvW0XtyX>i5Cu~;J2UOP?R3R#tcKOhs> z*!j3|IMej~NM6Jf}=owSO>KCbpo1I@Bk{hRRl=$*9VFj*Xb+=pZ(-uk< zjE@2bWOQXHP`>m^&<`ks4j#23%zFrBigS@l^5&O41yqAUSfDzz<&bT_7rIw{^NWrS zV|8P?a&6j=4bSjOIV8>yj(^z5UiH1_$04d5Y>l@F4gjCo!UkWYYTHYGrVIM}pf(W~ zlG0}O?-t337oyhlQ+*p9YRI zrjk2F%-Vi7H-KJ=jNfa__@3}?y=hEk#2vbe!{Q9TUN*e4A{Z+7!x3aIpG8?L8}~(T zDvO1Qf{V`fEz5QV?x&-xkbgzK8H#GIjB2WX;O=y2X>W;z3fG0A{DBgTus~-Ymem`x zqx!$utJeVkIXrws6{M3RDLY@~kDV2Go0~P-YYOh-C4i|>XY=<|d*aQis%HNk@i_+G z@s`}byd7cG4Ac`#4EGi1G{F^1;eVym*IcDJ^BXG)C$J-UY%1mF47?`CG?3`q*{koQ zb8ApZ&gkW8<6iL?4zE!7JX9C5$FHlQ?nv4Q{!Iv?yrkpyriUxbgD-zPE05w4dPZ82 zID<WNcD&8TV;!glTB6VUre|W!Pkz{(njHRKO2fsl`yTVa?Y=N}PMuD{d5{J%md4iV8zny*)v{B|fot2EKVI zeppE8wCmTXo}B0ePR2F=-_72)A5<0eOGQd}1#0`X_x~rqkDi%<^?&tO2h`SWa9H7h z?(Ur|SfL-nas+%^!Byug)&ntATE6fewjC{)G1g*_pJ4f27mhc>QT3qpcGq~q^1pt6 zh-v92ymf+n>@qnHhPWby89kK3t0huTCM1vZwWJ?9dZ3s^Tu-3==F-nx#DtpF zqjGp?Givu}8IDqJJeH*0^zXm^^p_#S3V!xhBGHb6mq^Y8k zn<8)=7=x*GCVud zA-6~q{dyKY>}k}cbwhYW>)h|Ubf7l*62?6cv=u+$fPs^L*2A%frrZmZQ_vO-u^S}r zp*{EvRvicrtHvONE+e7#!GH0h7;bQ}7IC&aZPzeNuzpqsg6V8@yg$9W5E$#&uBQ86 zxD#i1EX7{0$5ydC6cYQtyE(7_SjL%Nt%^8_y33!{?3L@j`eV^5_CDq;*rVz>=vR=EjZ8Ykj=avft=a6@1Yotz;%zVK;p+y zV-ksFh5$T2-z6V&UyH-vcrf?BalPWpSqNj|Dir+X;o0nVrX~2p|9RhiK^9h5c73h< zME&E0hpieem%iz9VL1oX$Y5~4n{-shPAf~yY=QSH!wmY}zRTh8!4f)NfkA1zZ2um@LQu2F8HYm#s;!M;b232n^f9_Q2uCZVW9$O)kd&rCc) zeu@@ZgKD0%Mi`pEa5O?-_xKAIrzM#h^<68BXqKs{BjExYV4Bl}y3`z%tI{_N!E}46Am)kU9Ne2)p)rw;Wmf7@=q^)A>Yiq9NdQtg zN@zv=&=FhoB-bhIBj}}R;Te}R$-|_tb%_fHYYIkcTp=p_!IUfI@7Gt1=q__HM`Mb3 z#w`*eF3Q`&FiV!*er0`p-E`h1@9$f%ZYn@p`hyfAO#M&HE?C*cdfQEP6^mmbjrI(D zv~wtxmiOarNS3$5!d6!)nA>`M1VE@h|Fe|0VL46VKPI(c&u8MR5UsY0Y*V%17<6bi zpYPOe6Dpv(OX%Vcen|BAbS8uPowtISKgRsWy~U`Vv`)j~P9t-Jm4aY|=fS@(;k^fVZ{#WzI{(rdkSlVBU zPNx7EExU99<9z((GI-3pHzT4^ZS%vwV$blSkpg*%8C0BlrN^=sT?$~U+C2vvV>HkrsVkhrL*tH|TQ6pQFKUft*miwa4NXOH{LgFR zPHD95sr$Jv^rZM2Fo47J&%%ezB<$+Lty|Ff zA}|xC!6F`MI-i+1BFlLU%4f`^{7^x$k?1~T{4U*UAoPxtGr_wJ6Y57$EG^f`ovrj9WP?J%x`31i-{T24-16{EA3A8miSUl~}(YgZX2 zZsTPt98{aTHmVm3YN=1_))E`l7A)hYT1&fvt~*ZQoieLFo;ZRynv|4x9oal5+X{Jp zRKF|o3Ri4KAOfDW2qwTpvJFDLHJFLY4PliT;Pr94%1ZG4d|Z*1>j_5%&5F@Wc$EvBx$VH)XYw@n&l(QB)S;CqtF5wJ+_(Rn!61>XmMwLSPPh6MS4&$30?g}o;`Nirw`Zf7UOc55mgouz zlQ>engf!GC=0J6*xCJBOpo*FBD98-MA;V&ZW?)D3kTe^52N#pGEDD(9_o#EIDK3 z??+?oNmv>@I{hT`Zx&BYaYfkKc&_%Lcq&8%&yFEy4z#gDx#xOv6vS%@iX0G0`Lnu= zZGA7x_y9iaEanzBc*lTJM)BbxZ)dwJWfL_wa6$!Hr*;^&Cv~Hwo}Vv@$KQ1EmCEge zql4$k0{r;&Tpspj|v2N20?yUncjwN(5zmgT0?P7U%&w z0A?ahi~uO>Co*%F6SB;vRhM^XTX$wN;_NZ+(LwO}dHoy{6Ya?>%_qVE@>Oc=Jcb+b z=##!Tv*+nQk4aX%p~Hmb>rfJVfuqj(Z@CSNAIAgE4|axaT!9TEY8ea&)}!Z&_5Sx? zZn@mCVvKu6b-Wv+h`#T2bj7etq`AMl@PSMC2K;cJSJ;AGBu=O`lfO!>8+JAH_Sdx1 znaub9Ezy1mQK2N;Dtr?LV_^S3!H4Ys6MRSx%FLE5djX0KkVvl-{KOZcbw?TkM*?2_13BCvcZ4SLzw0?VJ*@5k3ru$8wzzQ>M{8b~kfocDe_Q;g1Hv5S1CKa8~A zbz40xx1K0=y==|!emXuD#HHyQxQhM-o!gL`-n5xoabOk~_L$8|k|I%lR2{k_mV(Zc z`IDVrJ<5G^2+XgY^%=Bqp#dDm$lB1s(cVbUDw*E!JNo~}TF?HUHcB#*`FHi?8|WCq zm(O6sQ(cjCAw6QyC^05Mhd%(_ zaLYKYf`7`*BDI|?hLXZ{+P}(q&^2lfpI#BJq80vT+%A;7U!Pty!{TAp$~nMiBsZm!cdhRsDAi zK&U{Iu%Y_9MNgZ8FT6C*Pc4+CWTlMXM4TgEY3#}@aE}$kl4+^G5T&Tejzon8d%}@K zMH8yCQ4U-g>5SCef9RWl#pcq|ITahSgGrPO6z}UT>u8Y#>=+Jo3rI<6z}s4k5(%p& zOeek4$=@&>$37#4*G$01pll|{Zoi}Kk(iKI#V~T4FqV=gU?Zi>P}iu}y3NwO`_@Wo zK8?|+bw7_GF{ZSXqhs7~Bya{tf%(G42$!s}ch388HN~7R&%t(r&xqoG9uiK-{hx=V zoq$miPs3A6ojZ3$mZE=G2*bHb{2GX6tR1D1{XpOpjsg%Vsyw5&3-dJ;8v_b^qX{Up zQ!2rKlkjTB_I-t4f2b5xX4BYGNm;q{Y%KMhs81n61p&nwZFCSY$8z`jtPwmI41=@S z7$>0rK>g=K1VSIR%Imq_`FZtBn+?2uqaW!rd)!Ar8KiwYxzJ`rTCo2vD+2?m9E8qo zzJ2q72fTTFUM<#oo~EFZ%{SfvFIUIgmjouyH*|4%QGH2$ooCHQyEOyREoMBpw`|x3 zPhJh5eS#Ty$%n#Ebh0et2(~1J65oWSc{HQ@cmy;?qd4dcfd(*aW_pbs!!D?I5yc43>`SnuErn!O=%O?&1Q z^XOR~p%{U)Gj!Uwi{1D|4E>svxnmQNWvGXj+uSYa<{0zbWy^XpdaaM}t+X2>d?sGK zOK?mQqGcV&!GyTA+gg9K+h7$S)9h8=)z(;HT~j~ihF}Xun04HAR9|bDnb0`EW1brW zph*&3;+oSMIVHHF_VVxj(Y*#R=iJ(Hxo>>1&FPZz=l}TcQd8rdKFH(G8}BvOieelb z2A*ME27t5Mn;v(Fe!ulQi@k|G+W`5S*{B}CE&rS2qTu1@Z~VlLIi~5%<`#{p9%wb? zO~MYL_FSdU_b+C`$8Gk9oR!^ra;|&`z-~i6Y}toN6TB9*L3MoBF<`S;`Rk54DXJ0w z#CQva-Jo)ahpvyljp}l=hM#S?g%u{(7?GvI7nJ@!jpIBhfk46W1Y?U&s*)PDrc~zu zYsFi9s(0cKanY56A2$0s%0FbY==v)5#v!|?zqn$%RMU7Qondgik-1m7N795M03?S{ zJ9I=SmI?_TzKPB;g5mHTjC)Bb_bG~30STaNI~dT~s>692H6Eg@h zkT|r0p6HZ^<9Aejn@3{r?hX3{uzJh1-9`)}9)7@nP;jk&`oXLn~aF6jZamJ_rVEU=3#G+5V$%f$;4GlLdSzMYCyV^)D$thWfw9NZ?LOSWxy(B$6@B6T3^Fx|z^&EG_MnI0);(!0yxsT9xR zPvkH}IlvU}r$$WblTF$~BjjV&)SU{vP}uJLm3$WCUU1Lg57l(eYuan&PcIP6#~)A7 zk+1UQi}xNO^xM0X%xM@0LTjt5Ny#YD96Uyvr@swFK*xESXO!Y;4;I(PJ18GAfNxhO z%-($1i(E{{ z1zU#_Q?COAFNOvtB2Xuu7RTTFFuMpJS(`dK*Q(D27QWfJAclzpwi9NB8Ibuc8WW2u zb0p|3+*Xq{HXw&^Dyss*MrZzZ`v=(3_zRGtrB2N!4NvE$kPridnNu=ZhC&6(c zhjF^z-UdJYzxjNC_^rGu#i3^3r0+y&HyuT>T;H)4Ru5b_^N-{eMmBT)-tBv6E6_U{jkZCB3Zz=>u6tE)iSgQ6qenAQ7aDb3kfw}SDd}hp*24eG0 zrJW+=zLj>qwe{PK8gF9iQfQ5jL}ubQmGNOhqZ4h?QPypX^O5@``~7i;Rstbw4Y7~p z(*JA%sXunqi_qKH#u7}N9cacB&6LWsMXWs-t&)cW_Bz^vs-yD2+$4Y8ow3lXsN4=0 zc@RAjlNd%l8R<0+sowCMZsdiXIJ4WJrcvLI@fu#t3|UKjPYS-A*WH_{PKOGzXZhtf zm{c!hi%a-sr+1cyJS#wdelH)87-mKe9x|USKW~krw;3^TG{HHZV(Ey0aqLMst-^Ad zBv=myh|w@G3R zX#N@<=pfp@f8dsVTb>M0$B&kS;l7)~(9dXq6gShhVeX#N68+?cI`oYO;R~^7cJ@4bC)S@am%?Q{+*b+SW+gXzdjF z-MQ!I;)3ZauRW3#Zj{*8A!~hG+Pj(%c-3k9Ry*KzoZk>d4yoLO&(`Ily!`C|rfu+; zS1S6|7b@|>jQ#3ZJ5Pzu<6-v`#1S5jWAXn}vFe(CRjiTyppsyWbo9w`}_XowU5X>^)Ie3j!&LjV^|2E^5GYR zO5OhdKl2A5O8Ou3N1z0Ke&Ws-&ujO~`UN1=cEg6TPh`4?5*8SWeE%PwSJd_47tgEu z0(|$2=jGw*7n)SM5d&JGkCmP?2kZ|A_Pa_eGz6wc_G5T&kKMV=J9aR zwbL0BuM^Uf;O)+o2cFbNf|*mrL2;sYi{laj?gg+?=9QI~Edm3RTD3L<>trRKTN2n{ z`YJGNg8>o;s8=~2RUv~g%n(&6`{6-f_#G|^t5k4mOjn_SNx@pOfj+WE&n-D|9J5HL z5X&DaH?UsO9@wudDU5$io?JA6C1WKEjLT+#Z7=5`&pmKz0&pueWh-y(GE_&FMX5MwOS=D|N7M(U&bjgz&*+HQz zW?@n^m-~>VnA2f$yyY9{DD(-05CmgQSXg%Z4wE zaCo|ox4L%BSKGj&%=oVP-AcG`D^}1e!mBj1?raXV6C6GFv`|9Vzu0{M9bUlWqKlRH zzV)~#dcO61hlocQ!!6(ZQM4g}y1(f|I_R_(rSzZgon1OgKgfA&utA zcHbc84y9=?a$>F~c-gZH<2q4a&u5O@Ez25A<=RnCW}jv2Yul+E7+NjJE`Z>;TEfz{ z9|&l+p8jiCr%2KP#e0WR_@@BLB+bXd>mRI93?1y3EHyxU+!F-DfgR9uOGN%*=5$-& zqpX4JG>f0k8cPRkE%-vN+>orXqk*!CBMM=jNs};p*Akzm9hiuXbOih)avcyIQT85` zzdhxfPw}Xm3_TN!eu&+vf2t2R-JLTXu+g47^i3POVT4(>Y}^5b!E4sHE|>jBlEi(C zL4Dp7yieKQT1Q|+8w~U$AwI0@rf&HwIA z7d3T&y_*TXvKYe;I_enTxSuM4_%D=OCPZ^r$}LFz^!Mq3h2S)jmRa6HZR>ZhRQ7UW zw9ou;)sChOCUiUUTy*ua7DX{_o<*12h@u{#x6J{oQXw2VBKch#9Zs!ZMx3655|%@* zj$VB&f(_8oC%zOl!j6exTzTIpq3QskICXpGrFeBOiO{ADr`!f zjbP-x2|5oGrnJh6nNL&gisA*$*2jO`TB~Ur&l^P(_*9owGv2h$6NhnssEf>V9cF{*F!)SowL5&VG5PxKG3O*s4<9bd z_yWFX>acUK-QMV-d`e(X>--2N!#t`8c$g_lsTs z$W(lSrXQ8P^+b)Gz&pYeD2<}3GsgN??u%s@Ekq3`#3_U2y3U^a5e!rU;uz2A?XhJq z9gNjle=Ec1UeS~E+bSx`jB;$X>j1BfMscq-roiw+oM?nc6t?yeCNiV4V8-Ex?SI1WHEydL|{9nRz$k%S_-laKCZ|?7qo9SiPb2mK|TXJ`f*k9 zmI3W9RfcJONYg9J1+LrpP>QJIU9ISwMpVA}V5UydKfnheqp66z zj)M}UYYm|Xy{Mp`o9KHpEez$egx)ilI_8P1UQIx$PVGO9A$&2to&Dpok|L`Jfo#8r z8?zoGw5*-TQ|NNGAh_C(+|35IUqd)Gy>}hZqOcs}S`pj)(8N>fO5x%eiCKu`&i(?B zd?V_GfX$yL=!+())8xBVf2Wb|f3|dTR=XH=)nIHpl*y!$3v-INYH5W^nRWHLG=qkwtub-%G;g z&Fez6tg#nlu&zhd3Ah`3ZYg@zMh@tqN;utp>3bRHpD^^S?&HHbW+Ef;{BvC8x%f1` zo{Jpo?CCY5-m}&f0)PI*>-8GkY}0MUnZ>0X-fmEQVA0yxcXd&=ve7wUf9wI`KV!XA z4-+#1rkStv*t+Qc&GB^0&Uo}{+gL*JLgdP4!?$(|+b=r^2JnENfg30KrI|WB2u*5z zdR|o{1T|9ye~Vcry@`a`*EB4?zBqC$CF^MCZ9?2}Vcn9bV7Pho^(of9a&MHhQo=G- zT>R-cS#h5YN!tZJDWA`~nOH{LY_#D(&aJ_l!o<%N6}BkWaC^`Qt7YW5E16V0bYTYI zOu@1XtOt)40qYJz(b^rXO^H4ndP=ccp8du`dz4Dp^IK+F&lgU~jd|h8+k_fNDTaj) ze|bmozud`X4T@WNIbhqBKXCu33h) zox=n=KA9S++^GB$5lEwNZkJus(rme!$FQ?e#Hu$sfai2idy`!y&}@vBs-2G4%2_ce8)teS`<}U8KIc0Ya}; zhT5COWx2L~qVj@nppx^T?EuGcQ@N@B=g7C?-YBe?#OLl| z8Y7FjlI{=bT{1Lef!`Heo0wZvd&G$|ZQ*vrD(KMS!_?iwpOuXVYMc^TWliHD7|~=5@aB`%+VH0IuwSr;;I$=>>L4M@$pUC+ z#-;fs9t7m<--9IdlObZp$*E1&y-Bm7M3o3#FBN^et=-n@HPC24J}Zkw3@>{qMM#CIg=Ob%gsYjz1qV&E+%ipQ%BaEn1M1Lb_seK#WP{lb?3ow81dm7vx@{*jEJqHYj zXnpj7N!;;$=ML&SXJ>3L-@*L~Uooq;L_2ZrI$@f6Z; z3}5LcffNHE#J)y&8}Eqr@^AV7IUyqbr>XHjKW+5QVHoL{lD7#!mA?*(jLET)pg&<5 z>6rf$8O%t>lAIg-T|aro+QNS#7ZE)W3c5^Q4spFH56*ufi#SMCfDGZkT9-lQTt&@ z-|jEbPST3&HQZVIc-w=-JM^Fq&PH#0*FfYKic+D&V{J^k@VYXo=Jhcp=?YB5jE2O( z`#5EBbh3Uyx9q+c+=$sh*TjnLL41IJKAUHdHG{8n@=dAfc#P!O z*&xshS)kr6?{9W4k}Z$P+oKj1+|~+33>;|(mGGqtYEZGO7%vR-wMmx5_XdkH_3%sQ z5KB5$w?=OTJn8SgXR;@+d0v0K{sk}Y2FSlRxa}x-4Zr!XG&Zdam86)meZOC&i^cO? z^~QU=%r*v4@XjMfjO~KzwkXpDeu5L8YYhF05fqKm((oz&`Twx>j?tYod%JLKV`AI3 z?TKyM=0C~AwmGqF&BV5C+s>Q&+0WVg-DjN-UETGmdNo#6UB3&}bb~`y6v*%F;0UH~XB*_kcSQ6|9s=lzsxRRs&9UwoX*Xv+>l^=+ zfs3XXQC4`HU$NpX%T?#>e;oK-6t+Ux&Rn6EW)0=kQg68+7q3k_KABI|lya+S%aQCN zZ3|MUQ`W6Q1y(;n}iuR;7!TX_DoN zcJ-vYS~Ja z&1M32L>wwy>DN@w5X6&5whw-rMpXJdCpTzu%y0G8z_;HXF{n|(=Q{1lM3giun-X^JX&i+^3eK{xg191gfWssb3mRm zJ0js_ADLHK?}KQ=7;VY=##y_b--5G4Mt#_ z&*rkJpqsERB4mk-5jVY;qkkSx2i+L7HOmZMQBz=V6;-=KtQA;m zf5!#@?w$~YQ`f|uTO4YZTk3>{7^*| zCf^fDm*$lEd5+f86T9JHA7$BwDw3)Qfx%7h3noL!P6gUulnvT9jJG_Rk*951lvtNRpG28#LXwHA2SVD*V4`zyqGMt;Dh>iIbd31a=CzEN`)3AERS1my6Epk;iv3-z;qgc=n=h`RWcrenq-Q@djF}J%`NLX+ z)LU@AtWrxVJ!=zeYbHox4sew@K$~`nqDKw4*_61vcs^W)0A!5UP&KhAUB}{pk%`T- zN!=uG6Gq)FJEoa!o4tZp7jo#lM(p=vHTu1b2FF{3d~tYGTx3?whfycG&{X70vf|5%;5W2bR} z3%xqFS)lZf;^*l_=Q*XBjiQH6rg&rUD(j*+&gK4#(7`<_m+H(8F_ z<4_!vw#NsY<~iPZI)Q!jYG!L!@Rkhfiw0Hw)4N$-X_MJqv(gLxX&i2J`3Z2!^XSsE#UvNHovkYAl*>=D z*gSGywg)or@+o|(y})J~9N)>c_$J{NX8L{CpI8``;D%*2z+W$$C0`D@2T3B(%k))J z7tbdn_DPJh89DE^F5DzS)`+=D-E^Iw)6M20MzBQ}?Nb0b+kDK@bD}dYZ?B~TcoxXH zI=nnbJZVmfHg`PE(`}gx9e{iIF*voYh25b&ADd@3eIWi;7jgUKhJLrzK&!yT!5NiT zWsk3^e07iC1v4o|nqH6A{9nxq^b0rxA9Psx+t=i-YKRP_B$v=hq+PQqb0}M7O$?om z=9bEg+EIYw6lh2uO*Xxqw*o%(3!O$3?j$3=?$85}+|;6T0cnBCig9*9iWa4aaaNXY zlb3}rO(=hxUT&hgNX>*(2^*Bnpgptu!1~L!x68!!4|iACWvt=0?8}l=(wZRB)+9f- z8?E@u%j0U!u`b%#j1j2^v{k+*+gi}f}$I*s~Z&MGzi@=qjnRdq;+WQ9+BfsNhLLNcd|JRpgokr zAK12?+#_dR2R>j=Zt9uclEX8Gf`zskangg*C>F7#y;kTP%Z~{zJ&DO-L zeK!DPf2bx#Cp9fU9>(emviWmoSqma*E$EA(_?rjD1$~PR34@AQ|=8gZXmHxdm`7T`jBVA_<1rz%> z<4O~lDOJc`jObrN1vU<*|7H+t%Gj^7BmKj<(jjCG#q9<%MigHYiTv$okS7>{&tA?! z8CU3<@D)3$;1h-4yi^yUcRFBJgv@nyMN;;s^yCiaI5#ZMNcQg@r!Vd$L{;G2eX0bl z-R++B>n1#VXDXTZMr3Xh*L>VY^yR|G|$M3GZcD6 zHNwlt7nAGRos}r9H~Po4{uu@EuVL;5=0CMdnQmSSp~lOnBq-z;@niekqzbW) z7|kvYfw8Yx3|@Ur1~D-xa;fjLu>k{zGG`(Yg8kfBm;h3dS5iv>G^l^5p+ymf4z8dL z;nY9hWw!L_^$>m8|LnU;)mkfUDxi|=$?*DhT5*HtTcSD1Y=V_yzC3j2>o;kfue0}< z0k{!e5-XXnT1jDGH*V|JeL?9Um>f4gdH43|^xAR!-OWB`Vcw4fsfoQ*U(@GiToz(!pi3UV5Z z1q`4ps}S*QseA0z@iN?5CAPwZFFzc-B(RU_;Hw)ZX4WgI1n7oDpnEATovqJa@h)C) zc-l3C?7UZT3-*U@m_6>a=4Mu!FJ1oSYUl8PqB=_L?jBMeY9qB7B+L83Uqjh;FhJ`} z_#imKpuV*LXis`Kg<&?GG@?3ow4rQWof&`#BQpy9bn=ZV6?s^yk=)nQT1V>r;L=?> zk|msGifa}o^uNwBv;Vt!ReD4GQr7?u%ouAsaz7@{V)_#_QlFfSWudI5tOH!L!T0oh z(MRa77h;v|6VR^q9VDak5*80Gfd}ZC#`Wpx(9%*5z~x6!T6(SdWHLL0ZO{-Ko9SkV zLtEe~eGjZ;dqUL!f|-;lUNc_@;VPbdtf<@(zL*50eJ}Sp{EPD~jq@n{CiD1Yj=Vo(yjgIvK2PTCgLnc**CJCG!lFJ5 zqhvE3U@aG0J=%FpSo=dmqY=2HIqiaQxNgYw^E6OSJIJr|+GRSwZ|7a@Hy)iuZh2Ho zEkC-aACscQ8b5c~3?f0~{>=GhxZgQJT>tn6YQl7WQSVF8J}qgOORsdiD;Hm7isYXSPt zk@ty&Mqq;-5$*h-9!QrxU^Iaas@QfV4^$K75B9d}3P5qUoF=N-+S zvo+3@DtyzOcgJ1b9!9Kda z0EEHI6*Z>QF`h8dTIbb?weks=jdrQaOask$*`-c~+ssNBo`4u}=Lmm|pvd9@RhiR) zv%RI7mRy18xzwFqg3;a7lextC`E#ArDcw(dh@o)O*EO9=gracHurI6bQ9Q*=n1IDc z)?4p(QSil%D6pZnyD*WpD`!MAl=O>21sooy^_3`9j|w&~x~Ok6Yw z5iAQ6%fG-dCf0wzFvHZpkl%=A2?Zti*Q;}QU%vdo%S?e1@=q7JaOa_fdC_8n8oq-J;!)TK{U5y zAJG`3p*(o+P<|O44E=rv`6|oL=d2l4Q>cQJu8z4DOEX#;LNu-3{t&L0t6*@c#DPFh zI1;dw@xOd+;ifcJumN`c+`g3vpuIco<@RaXrrcn`kiv7aNaQ$|kt|r!U8{L0So}Xe zU`Hg&KMcSsj@$aDBHgL7eK;cB98Zf1o>dJPj)~i!OoRet_#tyYQltkol&DRpgX>!k zhqm-8KGq-brDe9T_~}wO+Ovd%*M|U=zSmj!^WaL+fpOM@L5{a@)mQj-c31EGbL{Wj z5{1b)Z6i0)IjUx&b&<+WaQ5pFYjfu^Sl$(m-S}_ZL-jAKZOdHCFCIKsH=On;2@qy1 zL$3NUULTb=0)r>!tr?#MtU(H^(>FD^>y?Liq^4gMAJ!1cYSQ|tkVi@0>5DjamYovm zPz?O!9>@CvFvou&|MQYlr*r=g1jEGkzYq-5KiDM|fQ$LRsoNJiYj(J8$iBHXX-D9) z-+UM_J`dv|sm|fictgTS-l)Mv;y>Jf?LG-42YtHyA_vI22S zS(03AY?*Z?h~fS;R(N8M(+Po>NPJ2Pe5@c+^$mX>P^&cs1!cU5pOP-QuC{F3%?*zv z;XM;#W^sM|0t+*;qPs)-#jn^WVqRn)uRqQ#X7Q(?j+Frb&8}kGgI?~4egAXK?F@0b z%SRA)CHL(lxJN#5Ah*V=hbr(Me$j3e>>^fdJ@L2C)F;5#*~Z&UHx0%U#ct#y>nKD` z{)B(c!u95JDFrgwJNot6EheuXB4F=rId`zv`|n>wF=#^&XZ)?Fy;N8Y%DqKu;(S^yTE)WJL69xSFs{?wZvnjee|$GO+@n{?M2LqWWNoEE3VEsRYi zZ=Elqnu0YxNoR=0kV#%MJgIf-MvmBD`1237F)LJnCGq__#Fr0*s!+6H&Eaz!X!uOA z@zO9$V4gsRQDt!P{V7`)WM1e%=thfACl6tA&{))d;Ik1I<)V>M16h|1?!;3qtVkbI z<-GtI>60VGpNf7t$%{##fgEi|q>Z*k8zv&N21pm}5YY^yYK+AEyeM3tGTdJD-Btb-$P$#qyAPHNZygr+dZgS_S?N zlp}?sCFH$g4|csD`A=hczW4RKxulN*myLh}LVY%1$V8_4GFu2fpFa?X(Q_%QWy1I_ z0?@)*2RWUZS=eyY-(8v=5yPl%TtxlRE!mQ{IlL;;@PX zoR^$e%kFjC=}LwI`6=q8>onl~HN+SZ60{423Fz>Q>0$iBLcX1DU*xOPC(ut_Zn0a( zLgu+nU@X0}h2oI(+h3G95AMS{?|w~lg`Gqr&cm!3oW-i4s?*d=bg0rNII~lyzetDS z)lq9J7sPIw$cji?pZ98^HoN;6Ln2bfz*41J z%WERHyN{|qS*NAPTKJVZCf(rkv|+}v#Ky#IIdgc zHyJuIsg}sTvzmJoR;c-*PxDm>Ft|HOU~hlrH}=YQOx1y0WGwZl6Pjr<6b~g|mmth- zJ@GAqPNiyzW67M`o)|ljv$1~_{QXG8qwhNnZu?08rtHDj!rpvTSoNd&d`NAJSy`2= zZBSeqX}$asXh*}qwx-<`j+uMJd`}xf7hNgW?HIY*_^m;*?zUlQoN$cb!M7lf>?L)1It>m&&a)_11NQ{%j-UuN@dmf?mM&GiG^Uy$XB16 zBXQYzV!NONfiu~zd>&_)oB8r1qR_}baE|iGQu6q8B{%ZuB8;i)Qx*~|(jD-FtEV?W zBTU4dvJg4Mv&hkDVIe^hU?i`sFmd=|C~E_-s#iuUrLM@JFtwg#m)K0%cWesd;tw)! zH13ApI_N#oUTEi6;gORXEHt1?=IWT?`c;vmZ-+4mVb-a@6LjLH3IC|PoLLEv9Z+}t zT20*Mru_5s5>|gr3L$W0aOR4vZK|QAKVVAtCetSGNRFzbU-Tgi@Dyvv>cO69xyRQ; zIPujwcsaV+oLIry0Ia^d--cRF;bup*Wdp&Q6N~C4mvDhMV4_iJZzm2_$P0Rx>E6m(FNI`kP zhO_s=;DzU8OluD(Kx&x*`THZZ18pm%{a($@Ufj4}0X&?a_`FC%e0l*ldk~_ryo!9} zak-?N3_h>^uTg*dK36Q}LpleuD8cTz8w>AkSewBShCgOVd0@}JCmY_Be({Dq`VoX@ ztMa4zRT=5_KRDqAz-(>K{s!J<<6I7H%>K9I{3s;e1oNo?khitK4HDW!n>%fC7o0Oq zm--Cq~-qDyUKR%S=)S%;A`Q;Hfd8J0mHm@R2kDoSY4iUUc|$gHTJKi&26-;(~i&A8Bm7&0XCLT~#5P1;Q`SSSs*Q@Z?Vm z6&wPa`6Vhx6El=V3EUgxctxA@HB`b|ssGWNxX_*Dd@FKO=`JfISld{MF%MoNf_VSFBBA)nrWFYS9zJ z7x^SyM_O~yS|nQ#Yg z(=Yk~R8zQ1NuX(Dla(kX8jM0*OY-u_>I4msOf|Eb5?( z>m->V4ND-4oAwzX&j0i)(n0S2ITNG=IK6GVf6v}`ne*Y^r&IVtMeZ}Ody3vUc_AK4-DQbGT-x9*x2NEFo1v!>a3wEGWD2HwYs0y#1wgf(jIa=e zDjsKN4goWVtGEjdoy(lFr+D%;#ZHrY<@o(rr^&aI4mrkL@Z)=&)4C2K$l8@@3#XA>8zEL69W1%`Hz?Fv_EU9gt+Jb8g! zz*)bk)lIwGQD8b6rCpIl1BRi5+gUS`j!knqs0_&HJkTblvdZY zBQbPFYEdg()tcyRUT=iEMf=cL_D5MUa&{J@?Fe_{Vm-oy8o8Q2E1)c+N;XZh9^rc| z;l;AHBsPnmcay(>f3Usyd{|jcSz9eKkPtN$j$+Fj;sUtox|cbOXtZb>i<@ElpcO|q z?k3I>$+DQRJ{}|AnUuwAMU$M1PF@~rN;2)cUoMAI1vHd>J}&R)=mzIE=_v=_jr7BP zyH?d*p90tEFVa@j0k&V~zUy5uPoJ0iM@xXS*y>Vvim_u^l5#R_ad%LsGXl=3U#(9; zv^DR#&wFFO^rW7F!v=yzdvW<$N=nflcTehH??Yqip56Nw%oLZ0bRf{&35}gk&Tdy% zeb@B^p$8q^4EP=;x8?#9&oAxw7;n)&2S%O*L()$P5g~%8fTmn3{ZHvf6K>xx;s*8@ zjI~`ItBf)b-rAZdQ^jX?<%eHuF3;Tz+x*zigDG|4Jx4JX-q9D^b*;2dV(8)Tk)1cZ z7(4BHcdK)lN8=dSbH0_h3=E_8x{wActqX^o9Lf`nMZW(cbCIg>dG(>tIhhZ)NLI+Y)3q0i%4Qk$Tt zBo6HhD#q)bKy$N%gJguZ-+SjD$R@|O9NTXTJNA4JRVRuOwwJ_(l}9bJD0R=Mzkds` z1RxO(0e*N@eMpFEGaWAtTe$QT zfbo*CUq?&7x?}Kf!#A#M##LNvaO(#7ZRM%ky2A2IxjB#Ou=+v6*wJoA_ zFO%B9)9SzzMCwKhg2McX+fiT>R)ROJvR=h7!i+ZldQG}xEeQ3i=kE1xrBSWuw?ZzcD^~bXE7AdM0MOX}iR=wVOy@!h@~!A9EZZAZ__Kkt;xrDw z?%gbzGrU%`mBbMw+a4S~w91i9<|rM9Bu?Kz_mZRPX0~G>W2z~;hwpDSGTM2n1TuBV zM|&kVy+m`jz-muN`zPbAe7%lrxFvhvj6eH8hh*{{#e`N5DT|o~7YoUEU1X6}17INe z6hh0e2n1tG&WUj^MHQsl<(dMFCx_6aa%cr|?qv>FTY5Z}-*gQO?}{77dvisA%|xg< zFNicQ83bLBrJkViR>fxRJYqVqY6x`@NHPX=l|CAfnNy~x60i`I?&;JQucm^+l4UNk zk>hbG_?9dPC$kZZ_`rIXR#yma2mrPJ6VP0?4!sj82FvTX$EdKqlW&s~2*jAGab1S@ zi9EN~q_<3U-micUOIOC}On0HBPNO$bN0>Fg}gqRUN^waSacPMJIpedM-)q^zO7S|VL7QNS`TCA0K{LBsNybi zA`)N4OE+*rCLWFb*6celNT}&MFmmile0RgO&7g5Xj=5rI#xxYS-j??)&3dG5MYrh+n;qL!5%hY&UZnpm5q8VDmRkp&+=YVO3P6fi6n za5SVj@^L@wOX3tI1h9JzL?mOv5)ADK_g9WvTw$pM-miuwSM8Sqdn z2@EO6_QS4h-d}N1&_?5)r%vXlt9wof*q2*4@FpN4udIRkFQ<@(qMRt7%^auUJ={7{ zswX-lh$37A{p!t%E;4~){I|+sVKQ30xOdC!D(-l>7AHrQ0YLb|=$^h%p$)~UGLFY( zL5vG$tfhkDb@%{68?VxjqMW!8jl=c~o4jW9^#WYwl`uERXDag@8v@3Tr~((MK`N)NV$03P2iQ+@SW1K!L*_Lv2>U2mV&;0p z$>E~XuS={9Vy-M>p7+!=z$rbHW)SGidD8Zrd2Q`#9-(V$Pl=JqRuJDB5?;HhL8GAV z6}oHzJjB5JQ^A*u-XAI7%iUobn*db58=pTQ{}>>VZ1aYMK%h^2V6CfMt5RN4i2yNw zZnoN01DrXF9c$ugk92CF&?6&i{nr^8fgfR_oQn+ClO&-oVYrJPZTS&)R`72@1nz4s zs40~qu&PcCGaL;-?Gbay&-j0E*@uXe5JiFyU<)<%v*yz`8fHA~uKnhp>!-H*Meqwc zxPof_jZ+E2Ra64X5mM_}0(WdC2a3cY2A7_(7;xF$W=x_g=5L)l0}Ihj(K0Z93!%LL z#eqmH^E!)w9T$OKhH<~o_z@Uq$PCuxMl(qYi}s@%J2_I+7^;EM_N*%XJ2;vf<~+X)>#Aql40_TVoHSLEv5k!XDYEmxBh zGT?`9T9Q0yDo2nMdxKbpv6%*6KVve`>E=Y0JMoliachGSNaXEZ=B1mo7-P)(r87%X z1O*2I4wGCH7M<-lu9#9j8S^~G8#(EM^YMlOzOB@*leI^X0$#~VOhZm&nM((Cn;r!k zDkt;~e(5jkqM3rfd`D9Ee=Ykh6prq3JplDadm)+2zE~FOoIa4Kht|-}@#c+?{NkOW z4S(C(o9Ae()h|Mzl|Kwb8-};&*msE0YRJ_J1TnRCphK3%8jzmLENaBN7I&I>CNn?Geke;VJiTCj-CMla?YC@?}nI7R{2;8|qI@?>WNf zy{}Lgu)*Vzf@4Rg}KtjX0$?f&8HKELMo255-E zBH9G%CEYn1Tkd6PWArohsu~deplOtPogYM@ExLCkWXNwU+kNUic<5OTDQAF{bDYww zH9@wp(>#-cL6)XU!@J-t$|-FFuGK-Kd4yP3R}aoW?CVQdad>0lQaQK|vn~^vHF(NG zyQy;G(S#yOp=x{|uhPepAl}7fUVw)mfQe;GdmZ(uWFwWR&JQiW){~IR2Uy8o6+q!D zwY~$)W}A~f7sV<{h=T=@RtNA10pYAb%dC*|y(m)3>#c%wg5|+iI*9r9y->!d`lAqe z*Ty|~F|bH|Ii!rHEQ-gQ+}C^D0=h6ZEy`KHci&shMHTBd6QBAQumM@u_efqq{^QqSBd*PXU6e@^ zjO>Nqmchy=-qmdOpKXpsxJJivdX`yU{|tc%XfC>usS=VwylA44-hkF^4^ci%NofAD zKD5he6WWae)de{I9&SZBWc4H^t|G&J#l;2EC$tgB(?uYQ2xkdyUa)QUfCSSGK_$++m`p9xUO(EYTCIu8Y)eKVlm@} zRBITkhigJR4JZoSrP?MwPQ@X`NI`mX`0+T&&?O_=`2x7PeY-*=@Z-@J$g%qzn}6AR zXwlsewd;iY+I-73K#g(CDOkXHHWthg82`d}LFJbcsuXCfcQz*={8<)fJ0*FU$|>yO z7rp9ww?we%^LF^=(hw}PBRtk4zSNC2U0&KiO&!4Oro|fR-FVwn)SOkgk7*n)s}g!I zq_qd#;A-6SRanS6o^BG#rRKYA|leDNx2kAj^g%lDUou9UF72G zU@59fa2De(*$<7|2S-_!uz1GB4_3(7&2w-4P4{+Z+J9eU3_JoKQNAGTq8CK;!lI4 zuK=_a(vD&B%=4W1dZA8hPd1kC%$4erhyi>z`JF2sh_8kOWb5pi+s~KO4*e@irik8L z2eBJspLb+Gzb@1e%#zlbr=KX60J?_DO-32a0-ZmZ!boJnd551ZsT~C05r0Ql96ymn z%IXetm?p!Gerg}o7(9Jm*$ZrS0Zf86zuvDpU}YO+lRxQMgRCj*La43V6xO8%qi$M! z9!_EuiGzOLcWdigNzV&26l6G1MISun`n7p~rCtHv&xWoHZ{<@va`_dg0hmP*eN!as zPm*WEthS0e{o*EwQKs|S1syL_iRcyCBUcUQ_T(c#8v2Y2@!J`qo+&-jRY zjPQHIL#CIE_MPON>^H7ikU>b7=)Zpx&5}(B51G+96*2i7-o6_~;YWuq`IZgB-0@L@ zKRLV@WTk>usvakcm!;-10?YiA- z?sDS5hqe;q%}BQ;)9k+dZTmJhAupiL8d27%p@CPZyBpk8LgqcBDiNJYW1cmdd3Ff! zvctaN2MsO+3(9XR<9vQBCu$=WrW@2qs8@^*iKnA~4A5kFStV4~@DRpbIKz}C6q!GJ zrh+5=HGv{>>1BPs0)vz_l!8 zibuscEllLuSukej58+5uVvZeVtglBL?tAW{G;CS43opVl5h885MC-Y4y#!dx5LUN{ zNa{}gSSLfU7~X9wkDt80a#Ht!#jTSx31Xy0XaU9Lisp&2dU0%O}%Pd1Pz zHDtR0a@DEn1WidoThWG)-=hof+)>1fP95X7$_((=pvJ`o2}%_~saSXuCBvJhtV8T~hgu1p z35KNVxN|rrxTKBA}$*o;zmsKRVT?M>g-tr;XbG zkf2c(7BkJ;!SZg1?x+NmktgyQN<~`9Jg=m$`P;#Fb%bWEM~^Z)j>+Vm#y@ffkwh*J$~veF_EbJ5renj zng7^)DuMb|lK#|l^_J)l3WOB7(sc^Twi~OilfT|-&=Yl>>yw)D4TMHN;?X`&0TD= zH5v!}2&9v@da2p!7(BX*&1L<7d$O}l>Sv=P<+Pm}SR$7tw*@90e@u{WYFd3J2 z2qOTQP(IyYy!ic0QC+aYm3&>B>jflql{pXh;=Ln^k)j# zjnJ4ek-RgMC)%%jEOkG=z&P3_%oH>mmOJ1D_(U4_TfKg=r(n{M&aaZm`m zRp|5~OM)5I=7-me(mDWFM@BbC2>l+uT#nVmf!$tf5jhQ}#LOGV3RGeH%N78B9;j(q z=Ve}p*-H^ne8&j=n0fmeO-!m~j3>O?H@6&{&6yl8*R%|Sm0mF0FFlf7(aPTiNvD8C z<5^QAiX`XYfiS2(6Z8vHEs-$-Yz5fJ*;72H=hCXQf|3qeY~g~N3l3mR1}$J)Si1Us z9~o4=ZAFU+WKPOZMey#5xhov_kBmAeo2O^1?;;TQA!`_I>EC{@+{s%D?_Ztm-9GR|1B-G2 zNxNA`(?26_5LeZ*b*7dbEeEvrMss>QT1znI zmk|%J>P*fjD`?4KQ_+_c-dl{^Y^%M|*G8Cb-7E7J9j^w)PXDnhh)O~~YT4uyio}$e zQ|d1VmART4ZjgaENv@Y&>b*2i~G5cc-BT4GoVIcHrNy*Wt}V zp^@lgwIerU-l3^L0RgPVHQYf^;UObj>9cce5yP_dR-^7>TkxmoRb58G8nwU{BXRzc z9J~>h5$R1Xfl7xd=tcj#Iu;S}qrJMy`_|Eg+GQKaqgTRWDnOoHuZew^!-p4*sJuZn zn(sGpti5fN{3XRYb8kx~kea;eai0%O=?qX^ka#efYRk9RX%e6#*0_<=NHs>pFIIUp zh|Qgb1rI)*o8)k^3J@~We<@s4Hg>P$Rq_0{QIls|+tCg9HlpMS0Dkl!_Z z;A6lzu`{PCy0Wfhf%mR=Xa=hbR)pFKf?NoSm9WUZpfsR+_vq@Tb^7X?|HC`|8_IZ{ znyWDwG=z>#5vWq}jrLL4$yCDbKH@|~5lQa3bt!OKB|iSbTBFcVDI;QZjmBr0{uk-5 z4zKddJ-Bk&eDTmKl&4E&kZOq?He;diV|~=DBF~jBt7;f7##8V0Km*Q5u3ncw7_t>|P3E*)lco zLUFikwIygwfxvC9GC5W*6Rplr(NCLjb9GFE`>f^zFZTu5=2%CRgbVvE$1Fvmet1$Nai)|aFQQG9=Tua zfqE4}$nV@#*2p?`9!BR2@8W*_es;|B$c=L{7)d`^^KZrCAg&bR-aTs;r<_u`qIN|$W}f7WZ@D*3n}ew4`mk?% z&zzM}8W47cP>Ggtz1A`GEfz-^R((b}tQ{ampr0`Gp-?YK*s_=}6f%*LYPr z#lBCaMa}=I64Oifs)ESN;@l2KPq50av&oT9R$v<9Zt!jV)>H`tBs!B=bZ_V%80Um|%^XLg(KVNex(xNJjNL6MYmLkc5%o17~7Nw5%I69nIeC&sjCJ9>Z#rDYSO&4{92wcTvhb( z$Q*9M`W+1k&8I^hM2}Ev4OyH^SJ96J{HoPys!pVjFk7ea7 zwFJct25p3kY^&amttwVd0dr7vwe}C(_NdG|ZTvtKlAy%Hg`(flfs%yq1go<*qJT|L z&OdR1Zm&m~EAQ53`-u<$a+V%?=g%wd2034OIFF5hn>{;DvhX{wJ82fXnoj`CAV&f= z&1qSComV^FHru2AQO{`fE4{RC)^Gmu8Vpgsl&d_|8n=R)h3X~PyrsrTG8|h~Ob%_u z($Yi`VBMuGFKl-3!jS%srn@66yI&SD#&+7T$E!NJ)~8iI`ns?D`2+fZ2z~PLKJ^jNMpVGHUUQFamKS7wV-`JX)SV0M0g zxA5UjIMWutG)sqT8gI?Z1!dOxpk$n?T_9%!2E^UlJ~P9-zDlnAo?k=l?*_J^b|LA^ z=@~nbqKEp>&|#B)lRA4xksHSIum*d14+?5X$xbo6hflO>K}$|qTm&hne#1RLX>&CW zGj9Oev3s0U!21#vk^gnmmvKFPe`>FW{t`=q5D>IJE-}0{;Db6chY+eC_Sevsv19m* zv^QV^`|d_y9yMp`KWI-XL*2hk6Pf?lG?Bdb-=>MoOpO1-Ayo{zm>_dZ{#fycg2B+to8~>0D)S?P+{{ zX)=Q)mc7hD%o^Y|3THyxqHdE{7KSIJZ{$SY@nD(~Hhhrr-VVum>N@IhwZtzG)648) z*ar)Z(+4avbLPELFYu(03bv-3)K!d6FX_lHei_%jTjN;EXO=eqiNk=%FlZ$8pD zMdRV`(A$|I%SRJMz&DA^wPY(zepVOJFZ`}-CMTaz5xJeWbgYp*3Z!(P}q02>iKBsv*nfL$)BYMW&18# zUZ%5*Y5TLFu{z#&LH9xR3Ch@N>@%B&`uJP-u<+iHXctF1({kE!bLjJ|cJyV+qe}3Z zddJyD`R7jeQ>+9$#0GV%oRLoPnYA`)_r!EuXKgNv;6t2hh%xOIncB&#J@+)pFIXB3 zOf;X%HM_hhX|@lOeqr!|^mkgW*IR=v2JKdVZ$1xk)r@xSLu9W=-Rujc#hJ$xMF#} z7|^L=(BKF4>aQLRB;pEd?`d8gBOl*2m-+YF^aHom(e5{UW1hdc7Mn41%$nEAdPrL6 z&aWq91BN}75hk4yRdyVMQQGXG7TTp+tWqIWA?7bgkF^k{b050fI_LqfHPIn#gajd&Z>4Y zQoi=t(_wbKoEOxL%AGBBM;*M*n4!i$+iLs7&KWlyd*?4$5IHJCdxw2*HkXf$`-`Ir z-X6C{O$Rwk#XAMY&ah6tO5gnk#d|g~zUs?n_Y9`(zu8JS^4IV^W>151GMgP3??*wHF9d-sj3SRC{Okv-w2_= zhR5zaGl_{N|~t6 zzSXAAe4mfSMJ=`Mm+SM-faIXjDV`Qmt$8o z&Z8pZo;Q={hndIRK8J75n+7H@&;*T;2#no=($_<3GV^)w_y+f&CHuL3?>}0g8KNJ$ z*Rbm|tBC3C39YlA-_*lH7#*vieGD>jydxJGS6Op9X^xBHooSxmbq%WXoy196u zLn~Tug!_q_cQUVgE16&B<-4ws4+&O+)Fd0a10{!sUL;qzyyI?V|5H?8N=j{(dt!al@9Up@`M#$3^0`F)o_;8R#&3a zPTreYbhJID!^&XZ^h6fy*CTEt`v|-S(eWKq29d3EDmo>yVJ5rt%)|yz=efb$0S#ipBk>;D(u!lSTITYy(@356VO?v|g~+ zi?lQP+|`>mD)6K_{*jMl{LB6MY8@B*+Dmi8FSEJbI+9KO^0Jzj>~cyqy-JPu(3~q* zOH|be_0Rbf!9H5JTRh?1N%wOP9>y>!v^Gg9#i#_8?!~KMmEDr1jtF`qJ(mr?Q+i7H zkW(?FO>rmx3^`Q3V$|; zd39mLQWoNnew)-I>#9)GZ>DwM%IC~iZm6Do6njh4?BGi)io4<`Dt-(a&MHq_s~1K~ z^qz42A%6d{``PE!k3xLDTa8vRCe5f!jIJ1}NhR`X-Fn%8HB$;des{`LnuxRTdp6!e zKh~|-CI9|nMg!t-(*7wy&SJ;6zxXK(5DV|i?3z74pPAlEmu?(<^UFlCU03y-k_7)7 zbyxC}59eyc44wIQ8M@zidRfa}-t?qPg)HOiPTDbTie`Is!EQqr+KUeip>FrrBlBOM zBv#D+ZgjstRR4yn-a2Nnq$4;BNTS@Ek27WVn)bJfbbVvnw3R@C&K)gu4)Vop@src{1kcE0E4y})o=~J{byRs_o$qWc z2fB|4k2(kRX6yWX=IFQV#;z)tDdqA+{z6Y~L*V`eeqZ)CY9R(4F^8!x9GGeS3aiK8 z;Pdr4tAB`-W>9xmyNO(MMw1tO4!qPOK90hr>M!}OoSDJ0Yu3;DvT5zc7uBkQiaK(4 zc~?UYM!#r>1)8Le-TvxhLa5*SEb4N8%9EN4H)WzN#u4t6IR&~I?xTLG>DGoPgjrC0 z=D!wA&p!@oTFe=kjX`RMJi{6now)ycI3}}F%(k(^W-Mo)OwNfnoEm%MbV$Z7yePwc zJS5E|vgZv7d5c3tr+2OqHa|!PH^2VYdF#T}a9rZ<*8`IT4D0KCng?{F`en{0L=X3{ z2b3&-!!Ru7Kjl0sX7nc8hySko!P?>7p^THl2tM66CQDaJ6J2f`{Ko9ibhn1zt?D>; zj)LXgrY{AYt)gG?7@8yn&C1t##?o;NYR4$PWR)21j{2o@>5l|@SNhS+H)Wz;EFOYSxI$wZ)f8y619!Lg zcS=#S-y);VSq9V4pSsO#sbUy$Veipcf8LbH>K_W%v-2Cw`SvB#o1g#Cp=waaboA0U z_H(m0m_d`dn! zs&^k6uB6W^N{v>;89mh_UG~HoEGWserK$CEh`-&n+BkXI@)iD2wcf?dvE(~~Z%c|q zgnxYQuCo3bP4+EyF)f~1+C?UO`*R^-hLiTMROH?Lb|~@ap6Hnf!E0&qj-P|(i3W@b z^-guNFIHMEeAEbCmHpcA>*Q~nxIXAxj*NB@SDof6rXxJ{FLwopfbBJXPTIaSs?}VU zPQNb&cWZbHZqYiKe-UL?&($Hfbdim_urYW3{(h)qJb6Ml@3lrk&52c+%aYCZ?(aTa zUOMb`*CkGAcS;sq7tcC8IY7Lfn|e1iuICl;?=p>4(c7#4n{57lhaPTX&{%(ttw({` zxD|f54Z#9W3?ZoDw0^`v3H;iRo#~a#Tu!+81c+jZ==Hz%?D>ZQjfW|;ATC&D0D**X zBqSaU+Ycc4P1mmsq*vPKapGivC#P{ugQEFH^VE;4_T!|1S-~8VyeP^8f+| z5%EYo5oQ}i2y$#Pw}A#tTtfqg_9H}L=Rt%f9SV;mDPu5L6nt$A!6mxI!loz~^!C#v zm|_4S1tWeU_94(nv@&e^6T!!}f%KpJ7&QE703iwA`ia0gtP6|5qt{g8NUx;h8wfOn z!U5nDZb3lgP5hf0z@Rpy+|U4aQ|1lUvHy>jVYNZTZur0u0%MFpk_b2)X6;w!|EOU8 zDHgL~5Ty;Y80-d-Eg82|yQyd#{A~^G>=00+G7*VI!^mNTAj>9an?M*i_$N>Vc^HAA z#Ua64#_%X`59FE#w`l#tzWpE;Rvbb|!`veXd0I3QMNn4WJjlMz{QtZNgF(Tch5(J^ z5rm>08mWvY0UH0654)wrEv;=SaZ^#7(r&uZKYq3=RT;J&21Im55l9*w9*IMd(6EaZ z#7l+2;Nae21QxCxMHs_QBM4TOO>f;)0TK2a0g~8`AxuOuo1XiR$aYW?EHsLcfhWcg z`s|o>5B#6A05Tb)fCHa#gdTExJYctb!{&$oXeh(zF>sJ>0&xIbE>JcGc)dD-Fog-@ z2o_4LGR!iH5Qc5W5j?QOFNBH6rVnf#-{D+XxNjV=`1BXTm~H*;g8y*{P}xr)@ZiT! z;GMr;ClIWF+VTW&k%mbG>i>3d@XPp32j6n8P1|6%g*a~06E`$U*v=tHBk&1rfK$);Ou zM%oT1*E%Ne7EYgU_4> z5M0z7e8NprK(92@AR$c8AZ+23X#_L)3Q+DeKo~ZQ(56KpQ8*ldu#tc$aVYrGG(rg` z%^)~nmQ@5jEI)@(Lg0`X5`1V5!4HnUoB@_Dm_w+Wpa8x9C$|u`RKF#}rs{E<{<9%5 zVbiz%xdR@yn?>vv!yrLaii&Fc`UZg4*tz(L2Kxq{5Y@7_5Oq0xcrAIsMGL^AzRe-% zP4UXgNHh^mQr?!_Kv-^3+!lzq9Xj4_e7rKe0_d}$kfJ)^?Rf!i>og6tOyCC#2q`)o z28YBdqm`96)Q-c!OH&9D_{JQ9gB6VwwQvgv^jklnv)5>iLos?R0fWR~m9cC0)2`(u z-1hW|*;2=*iHR^_9wEgqs_Wz8dVGVpv5S|R=sw>Q-Yx+izCIgRQFuHOgF}Q4=fZrZF7~uCoi$Fo*zkwSu{XyVqacDG>ge4GR%?Sh_B~bUv z-v~6E@&_SEfk{9KanhskAjAl0A_l)M8hm6C!MX+)wFp=eUPj1L;1Vd9xf57LnP5#; zh!)<%4bduN@SE|s#nxs>Z+IDgr-WEobp^1cu>{!qx{Q#dz`@Fk2p$9xNy5OwE1=G7 zCZ7$n;BoNq(ng?w24n@nlfc2l1$#3?BJB7bRSb`TkNyD?ftV3ngQCNu@klg=NCdz8 z`w2F*Lw^u#pnd6D0^}K%0r|XDguoggH5S+uq=j#*2tHB#4lluPXNL$s15Cqee-WY} z5T~XQTr?;Yl0;M{!rxaBJWTlix&usJ1a$3I0Ij!w5h4jFJQpB zLm|;HiUQ(?kE{Y?0yI2-fi@#3AfALGDIWW^Mr>#Pz!LxEk4;@{vAD_cI*Z`flN(LM zx}C7-?b&24b!V_{Lc)d2#&RFH5&1PvG6h6kWY8%dpk z@V~MIEAxYv@;C+PT`wbm&;O-@xYmwPWAFqda5*Ie#4oa=F#qTH8f5}ZPYH;D8%^gTzO|W7kuL7-kFiR@&Yn7YR0^ z+S1%ZfFhw+o-2VxlzA;++2CUkv=#zys37!)at*1W-RmIxIhbKs5MqV(5D+^6u|sTu zj8H?;6omf3gJCmM*IZM#PR1Hi!$gokBmA*FRt zF4%__5@p+ww*VL?H2`a)g=99s5{T@~u&yG+x(7qt9y7#k7ANl1kO(z#-360gK!Pxt4meR210)JtGC(}k z1Q3_NqN@y$fZ}$MNZZl?X=jwcyzD^bOLTxlFeBh;0~^S5pyhbL2nmR6s|lnXF+e22 zw?XEFgMo-JFB2q8i6SK^3vtl_#Uk+_i;!Tg3DDD!Kzm0IRAIyp?cM@GflPxZ5=hvM zWE#r=v97_TQ38Gn7=Uz|%n&yKhI`onb09&B^6#+hmS`Zv;Yn6-;s_&v{KO22Z5>NU z#W2I%ED*cMrc$$O5r~LmS#!0)b+gflZa+2h4y99~;C+flruX7yn<;Q^IU< zw3!-7|7|8g$FY6BBEtJvAW8Tc8*piSLKB9I4zvL%5|N0-!(wU>Hx&jXo)(r3jY|O# z3Je!5@I7#&J_-$k7Z!7~ZxQ&X5h8q;6&x65hZMj8P`;^A7&PdRU}X-F?fY3FR+dfY z*+|gJXn2klhy(O3K|x$+f|RU z-WlE7$`4S}n>AViT)Cz}+QzAcc(s3>XGlJPL`$C=-dW%N&AB46D37RR4{FCUuo)Mi`Hlw?r$lX7 z4@E!{3Cb7(Xc>X^*p;`Z3V;B9!v!Qh!V8hsjsOp#Lt((^f+K*Q%7qi+5!v33?;wdn z!9m=BBsU+V20{UJI@|~Xl7NQ2`GBdnw>DGEy5{?MERuCvWl)%ntULe&BKLj*4iQu6jfx!q| zg%6v77LNnFg?%$jK$8OZufg9HfW+6D2x=S}EHuE45CR}bF7SaM+3rSoIE4?AhP4DC z%vxEbCSa72U@*HX`2Q679sLiuTmr`r7#|aa)Ynd~cNADW3DgTeK8R;Wm0#nliXRk= zY$1pr_~T}BVTGLpK;l8eCj`J}s=|=?hOg412`D512eb@225xw91Bg)&0CfmMVr%s1 z!5D~CRwfWO{DB+32Z}ksR3`|SIwAtFVnGS#vHQOoKFq&8%2pS)X{+^}6=6ZL&VF6TvvVV`a02d^1UIAm3VpZ5eK>pWH}3z`h*9K#)RF zX!kmHE^0guz2=P*q9CBraF#GIvbq=~ONk}@Pn(J&5F6{35Sy5Au%-wo-MwNUk^t-h zoH#5k4zUYwFB=?1ls= zfxZgFAS`UY8{rj zj)+>%DeyOO(B=`~P*F%Yc}=85NzgI?$o2FInpi;YgCvNI?Lmy)TA^+-iiU4X0Ocu3 zt%u;h@sX7SyS29aXLJHAyc;-mqZH^$LA281NFe1X<4CZWIK;!gW3dS6z)ib>3LK@^ zrWv4tjjrmdG{mRA#lxm%G5_rpu{${?2OqosiQurMqIfb_x810y8C?63xj z!ePKN3!@A=nRQ4>>2=6>3DBJBOM?&}lU*z8aKt!*i^l~XSir@A zG~hN$4zyo*;#!#nUBmyi7D8`%-PZbdQ#YVzFjImADZxPEqXe3tI2phIMjp62*i)=? zfB}sH3Zz1~QUl@<-L{m(Ztdc>YzVegtVkdR20V|22joHR#e;7&L)_GBn$}U((ts5oIfye^5jglo1xOw?QH1D`n@MPk z_sy`~h#2gScm!k!YZQd#0WaxFz$6Wd5aSv$ZW=I#6Y(HBuT^ZAgjyqtgRA60lF>#& z0&6g9gBl1aEMdL>=Yi!=AOuFFAqLnId& z-P+1+^1P$)Z`;ZO?ZVv(Agz{x1rYp12@+kW$OVyrMM&_Z65v!v0SI?j5&VNvh7>7@ z>!p5Omph9NP=_!Q2>4DJQdomKAi=x#Rv9)FsD(MQpcZa61RHUSQ--tFpnXt~I(!YZ z>hxgK2c`t@eYOq%7TwyQ{?GLP-3Wj^W~dT4FM)<+Ku-WP4jxggX&ekwJnTD`wpidc zOv(WI2^tK^AeNQ>Pag)u{`ylB0KP$L(+zMKxK|Dm0+;V*D}z|D!2psw!xe{y)leW3 z$1rPo2#ABl;gHHi6dpWgSP#M;6c}j$)YKZ(MJ%YRqO@F~XF`E}&Dy#b^i13^8NC5? z4Goy(z=6cG(eA82(E{xb$UK|<;udS$nn~Q|UmGgM@99xMo%b_{S1H#`_U*VGIeOB7go!OCDeO@+k~;DcBYX|s5c zqyVe*I06Al0?#-$yMFj97Gh)B&XqF!3k$TEN`PcHj-b}sMl5)~w5}+D9eZ3r*5s?kgOcGx+B5}d29bbLI_duzNY-H82mqEs1rl82j1CQ)i~wScv{4l2 z2w=bmK-@%Ng9|F_Zo+NAqqIjA5(3`J zZ30pf8V9!4Yw62AV*Nok!^_#rR_9H0VH-m)4{+(zhN!KN64 z2Jm1t4YrYZxCbbe@xP-I%ti!9G=Ov=+R(v`GDW{O{$YtA z5H}K@C}I0ZjopEO2+wN(5yL6K64p=$+Dm~3#g2;xJVL|aKz>#@_pK6>nEK1D?9e z20g-Ld@{-V9lK3ohQZ_AMvu=FWk=0MVf9rF^u>|_nnQkx-4$-;Xy!0CFnv!a9pOy= zRmemfNDKK`vG7-I;qTcdPqM1&z^$i^s>Q{R+4j*6?E2(>KU;^K;}9}6XdX3rw9fG- z~($QAeF<7MLc?9`!g%k|`$^c3rc)U6uGpN&c)Gt$z#K zjiA@z_1`LeM+Qz&R*2_46bfgYz>Wx(J*`^lj?fJK-T&t|NAIe1RNioyH8w?TK$5mG zlykb-Pb0Kh`t#YCcyjec)*cM+iOxSHrIKT&5z`pT`CjGDh!IhB##uwE!m^d}RyqtD z+Tn^>*fCK~p+d6PSsHEbbSF24PO8Eur+$;7=)JVQxBJO(i&QYmM*Ovinty!&w*N$} z{aUKbR5#M+d+0M{#p&_HP7!~xIZ}Um`6!wFeG|=>0fCjH0Vr}HM+Y>AX4N_@;;vJH zwYwB^&pu;WZ$#=^bInH!*AEu%q)g`0-;Ec(FVvm87|}Z8C+cuw=>Fb6>AX|g^^A`M zjv#M-`qbL&qm?alQbYJ>ujFjISSDkzRXk_jU&r{9XSrtYAqtVzvemj}|*a6ciyO{lvqsHz}nTyD-d{=orDO`1zx`)E5 zGsqS*_s_Pp5{4Pl{Zer*G; z`w`c=#go4rFqGk237+k1`WSXrdH-@1@$bj5n0TLMcTKAjX2+NbO($2TLsR>&ihetP zDna&cq=Gmd@rv~41hVwI{QeM6_BMUpyz;zXL;IfJ6h40#BS`qWBYHWkD7r*h)Nj_Ig$_c91<#9wTW`*cgzR}ash`(Ay-}m5}l0;efm`c3f zOLnOXFPcR3DQcD|5yfPW!ZUOQ=ECNo86Zy4s^yN%1Y%d#|)A1)c1Oa8$A zYhPFQv(mUj3&YiO0TDl~!Y!O9-tWy*ph>h&&3#{$yY~ydW8Vw=cjR-HosZvr(Rj@2 zIY^OD39~kxv>cIqb@1gY2^xB(mPADtW;F3vqCxTQVjcy#HyS$Hhb!qUB#Z|O*~)}C_HJ>@w^$$iNjd za>o8i=`jydM|fc8`_3QyxR;rC-WL>_E55&zD`)rO=wjOSXvgQBh3|HkJ4#~YjwLIN zwt3zhz)7Aw=}~2_jJYoUi97nSmT&ODbB9O8h8DLiuFO1I{Z^jGly5CU#!OpZUwq9X zar5B|bi_kpKB6$2%e>^NNG#_K#B~4t6S5DQleqFGKjs_sFiG?{-LFk3v5s55n0q+r zh{(%7;q#B{qu$-oj;kJ{d1ri(@fyQ`=fS@BuSjlvp1hwav-zww7xjJ)S|%H}SY1`S z^rv4BYpwJ!LH@m`_kC5LgNpCSiUr75pPlMWL5^CPg{VX)7y^e4QpSe9g7%BFqZhH$FG__Vyx~=@#zs z?YE21PI_uqMwYNdl-Sa?M?X(P*=l8;tBy<&iRQ8E50-rIc5>ghS0hRHxGZBPGTw4b z^|={6e-dkTf7+mhqQK|trSS^sd3{J0al0Jbj{{A>-%% ztGvO8o|t2CFX-|cimZ&%xiE#pY5SIE>Z}fw-}QKq<{3FHd$>E_S#Z~2qQ$_$zGwZ( zt&08Kr<5c=$Jnj9{a*Vr{X;F zBfq8_XVSuMDdpY9x%CYuYcPP5A)vhGX~Y(?yQG#ie&1*tzh-z5;c% zu_{6g#Idq~MQAaz#kzXI`N*pKP3}DsUqv5uc{Mkr|r#kKWA`wPU!KymSfia-12htbw^X%vfzi9cY|#=?vv$A zp3KDD9|$hyF+6(K@u`o&RN5Q7ZUNuX&uPocw!x5O%VGmxTei{jIm|H+>mklKh8T>r z$xYPLvPYpR1h-Q2i*yNE8jk5AKHWw*w_p+9;FVYEtO*m+CektfSj!`F(9mew>Jr z6X}S=_&gizwD(`FgHQ9nJk@WZW;FT2Jmtc%cn6zQg>#)oGEL>-ZIR$>`R{cou96El zlUPh)6FP>-b6G~F1_COQN$t-)_sl5Vs5)u&?x$h`&j%|7$I`urqH-G)>I;%v7LIX# zxSfCr4AKgkzucTQ0xdEI$&cPYA(g~`L_#A`>E;dG$rHg%B{9*K=C9MzjPd!RmFa%; zn5Uif#~xvI-3A+O+*b9Xw?Al`Bsuwj4Bduf_zS81SpM*gg4M*3lmRhki#iKyi(W;}7nw%-y~8QRCMA!e0)LTAR!cH90#Ckx=A6 z2TgP3?9Eb=EMGW(VEdAJ{6IsV&rB<_gkZ^hrvE&9M|I{8-IxxV@yFlqyt9dy-d7tF zbLm!JesRo^0k_8;9SN}m0|N{(#dYURj_j+Y)PXa~zI-Y+d*UjUW8~$g`{r@y^=4g$ zm>5s)vj=IIXN`2-O!6HI#tEMr^3T7FttCtL>+;n@xphC^msBzvWIo{fo%V6^ZuJe4 zXInW@i$^iARq-*wvcbzwVS;r3i2BpL=H-k^Y38V0O|q5uBO}YN&6STzdu%)$)pZ^p z8Cztysr7#OT;DxGt2sHhh<&VajrH;$N4xv|8WfOeqyq7$<@p>=nC1%)ZB@%l;-_86 z_j8Lp$8BaEjO8MF97o0URCx4uELz1*3zOKAbu4q6|4kg9Pwxz)_> z?)@80u^Ju^?&bL$DrkM+ecOptsdX&PQPPoYmn8375}9V_Ts-+G&C_2|emAp9UQoiB zjQgGzrh%~;@2&JNC4A*H>T7nB`1M`#M6A#JGQR>sL`!@2a+x-(E35He%G6_cZ}D4~ zqUK(ORlo9_6*xW7A=~~=C|_qFF7CpVU&%>}pR=i*Ig6y5@25{jpQq8LQ+ayAg-ez+ zsY~`}p0sCiY%{SD;mnh9B*7My22VY@?DspIy6~!J{KiMWnb!+_htpSG3imE@`>2%o zM6kc7FTKYHhjuGh8eF-jg;lXGv@Me*2iXp$P0JqPfC&%p-VNoixToKNb@Gy}v(GTI zjiGg69;SMYN>EX)%pOVjw9++;R~Xdg>J&(Dc55by1EG` zr(E?cAgN+69!?&myj$ua*B<2Qm%7hiUmc8z8C;b#a%fCSYAh_$|GbaCW=}r(R|lDM zn2bH$Bp5u^|G?FDQPB31pHT2gj?^dGK0{beF2ylhg{L3H^J6_dkHPd6_2~(3ahcp^{0i5{6i*fj)Lk}R z%$$hUZL4oMIdB#eEz0mCxh|Uapare#RYr3BpV8ej+$n#+Rf^fsC-WDsbTHo8b@pf6 zV$bgwdx?2zJB4y%d(tx*jC8q~?5zaZrPn+IPjJODC&V#NPOn}LtTtx!XZ%Zfkg@u{ z>+uJFJr7Lf<3hE74mx=`?{P7#U6zf9?0IXGfTA0$i;t}fo<;R7u3r| zlPN8JmQ+8Mom5B;xdnd0Zf6>pj@J$+`WwFa{Q1!~1-beJ)B1*wH&hRY zEAu$L2gP@E1*-4n28vP@;#<^vcgG zYRsmG%pp_e0B7(0>Bbk4eO^}`j+eU~4o7?_XS(5Vew=1Lw)=HiVdNQd$v)EX{gdt2 zZ?`zsLdyGzGPt6YBS#x?m#J6>D8%MOYU4$PlBWpaMr+2!a95to8@MO;b2k^8-h z2FZ;lC3gE)IUaX8aAz1v;?e)BP2*|h5xW?C*ccGF6h#faione*IeRWXxC zB3)c^-q)7z+w&?x&sgKnN|?gTpQhhFGodK#SJC@!8|A0<+5FnMYD6^1~x3Mnb8|_SU$WJ*PDhQKwdB8BvNJT(fhMryVU1-(MDg=Ex9ei{+5Q8c}O(X`w3 z&yaNZEq8urIHiTE%&z|=(|6_)Q{&k4P^>v^%XetjA?j@lV#?3@s{N|0vu=miY4 z&?vKOVV{1qXfCH=pNy3G$=AKQ=lnXwK1w#PDpv5j4@at;GaHOr-qlYZdRmM?{qb_P z`{L504OwL@6?-_-sojWZhL9xmVO0|^mznO*xT zYBSEtwlY#MX&uv6)Kb#nkcpdP=gz(yi>6&pjNzY&Il)NzQ2U-{yd{ zB^l4h!I4QUogU)8kpZZt2^ufD0Q0^VbhihHAO;iwxKI!k;lCmm)n5tyekE&(qB^!vj!TMvaszIAZ7={Ndx#`pmbm)F}` zob!cd)XRj-W6S&90~%&l`wrx3U^^N0_lOm+56|lSL46I{m6THFubG(P*6o|6sj+)R zV$TG5>O$??_fBzfNtFTufuByWn)l5hw6kE^Us->@pJVwPQQ{|k0TE$);PmK&Jyvz^ z*;<~&pLbArA>hpV_Vn_5t$V+hF^$+p1d6datM*F8%{$K(x`z0TpZ_?NTT${g$uFbn zZ|xra-w@*dxZ2(Gf3I^pD}Gv*r^f|mRIQRPFlv((OPws-FLlvWeE6%z6qTr6`Lnkt z1I?wj*YAxi&&ml({u`y#&39-uaN!?|Dg*1AiUd6y2wtktuT~rb=5Xw!0E=M<<`CQa zg=b_EL66|ZR;@vvuRoubZGqI@+faTshUjf2c_HLLW)MA3gr@TWXO|b5{n7fI3U3geO^|l5}|Gi-AUpSl=lN0kMa-I*E(? zVW-Y396tN@%y&vu>L<*5xjy^gny(FC^5QO=xU*C|WRqbyYeH#94xr`Ky-!&&)%OVd zAVe+tZU4J7F7E|g$AvmnBL;W%PlQu>`rR?%Pf_U2ZDGd8-h$gyg0E`oCF;oZT$>Uk zykatj$hYVn+!{Y#bNf8yF6Vmcv1)C=+p|)B#-(%xn)?gmdj`J|Lzw)L2=zoOh`f2j^lik>PL?Msa4siowN{@cv( zYC`Ga9?_ae@4lk%S}5+iFfN968CB~G2voe%Ri)}n z^d;X(ll5danT#y5c{ILI^!-sIi|3plB88Q!{c149X|dB3`l4s5MBYY@SH?h3V~d>C z&B&81FPs&)kBeM|zCYt|t;*nYc|b;Sckl65Dd~{9-GvaKt@U-S64@I$Wush*_Y=iF zrf4TH`5mGkzU5c(XHvFX#6~0N$_L?m`F0+1Gj$pB<f6OP< zKd#oDJ2`qs=ggN$fmFhkr2VbE8KuQ6Jy!*TQ}*lkX?RanzUA)UZ@>Jo2%5`PT>boo zQi!}Nq~Uwxkq6cO$75g4Nr^Ksk2fd@pIy}Wa_6Boi)I+pd#+Q%ZF-K~1CF=K-R+rp4jZDPt zBJ$s8+UhS{p|7mBmHeUN|5w90R8M(6C*8Oql`QdtJXoMr`1_^vkMeWqsft&EY=1|8 zu{8`)V3|@EV`~V|vmaeif8XDXJ`|Q1y-yE=>Il zr*nQwY3FIo?{U3lL%wD6H!R@q5Z5awk-H+AdYVcs;l2)9Cs^aIP#^gu>YQ`&IWMDs<; zRW)Xp-KR6=6s_sZX6LLl8(F@b!^hL7b4%EzTd~xMLhHy~U+rt9UIVsccGqg$%diRT zE$hr;r&Xfqjq?Aof8VKH6WrvfmL~OnLDbLn|Bf+yqa9ZKJalxu&Z5?bogaomC;TV@je- z!>VFVug+4&kQRE!vwC)gDdj9^b^S?SJ=Gsp^nuECl}h~9g>V{*N_PLN7yq#82(v$# zy7*-tCG(X*CUMVsGV9bXGja&yReFz4yTA+E#TkAR_s~p>T(J5>QAuMfT&qD(GCQdx zD!jrN&+W;+ui=>3U%GUK^iO>pn%+vm^qurFlgo)Ve?3q?J?bbpPu^y0QKj9N+HYGl zSR7votI?>@%IGhprTk6HOZ^#<5BR?5kug4#`QZ*v_G#fIuI9)wrHkZ(L!2R@{t8TK z3^JdBxxT7Dk9~1o=FzklDu*PV2XsSJ5W$b)~r zd53DEf)T8P46m%%OvHo8muP1o=Q}BfBy&Dr91f(^R*I*ppdH5Osu|T3Q*dZIK-d^} zb~XTxL@~`k-{sgK_1)o}_2D7(8T&9JOAenPvbW|EbM`1ac$K0I9P?A_$fa)G9IiC>#k ztMcO93a!a6&eA7oLH96HfjM_qf;=jXM(7P4QTMv!1NE}0CQbA!R?K~kN8*fV`kFP3 z`7ausWA^YPoAEOmzCPFN=fKZoa5&6EK{ie=+u-3>#U~-+=H*VFSE;k-Q-nU0wcbuv z8JmnV5JLW@{vIWGwUy#tUMmITE&=3kmAj@_^+p&k8B4NEyX~icG56>DWzyZ1d-1IU z`udLN#LLAZ4qH2nM5blL*Oi|#8)2psFXQmbxht$1+B-&u&roOIPH*K9xPFG+FHsvx8^+uTDjK)t!-F+EuF=Ax667+lNJGd>+^lB@M!lGk5f%V!_g?XjZ zh)9&L*$9o@YKp)IihCMjb1Vicvcu_LC4>U>X((3G>(kReR3Soj{Ivbe`FD*Y91eKA z8oy4hHYpU2A@627olQ}RWlf~krs|`ny?ebmU0s|%Q{*M8N%#}>adt*#{c{IAl)0}{ zTTC(;>X>@Sh%&u-+sI|0pG6`2g?fye=~?!HGLcB76juGWf-W8nbUo?V!N^~`(oh}^ z5;snp@l)+_orDr}zUE=6-TbTO(?1by#3!!3U1S+F`V=_E|*nchO)d%OUfFJ1=A6dWeErAqu9ktDci%QOfBn(#_ zg4I)olK1{F&^dVX5zP^b(QsqFODU1^*fUOtpEq(R8=P6*Q$R@bTzImt&*NvjUHhp2 zKAo@v#p|Ux)I-Tr5*xtU06-+~@iwInN_Rw&ebVCf-XuJ?So^GLZ~A9^pY zaUvjAf#1iM;XGrbyc_eWTgU^y==Y7_*1v#VIixY*cl zc}iY_+D+}azQ9@2OC8d50q0A;%2gU=C)^a8yKZGE!!~h^{3hVqM47rOKipU%$2YKE>u$DE{?!{iMmgn+4rX79eh9vb=UI{ z*AJTJzQaC-rTif}=Mx)#j(p0U`&x8Ftx5fXcPt}6Wgz{z26EO(m63{((u;?3+oZ?1 zvke~#wvsxl4}~-f>*Kw$&ifdB`r*U85JbbNQf}lCn43uVmHuqq{nn>Bb1@esxcyJ- zsMuzn`sDJ9zct{Lq)w+_O(FL5>+3cp8ebBEM#j#cE^f&^MX2pGt_Uk{{Qg4v?$oRzOvXkeS%y!)by;|fZR<~K4SgkU7J-gUcrceX^cGm=*=9E+^ z+*4SCrOQI+2G5t}eoRb$F^o# zD?U0ZIasO8+P>pgMwZB<L9&5UwK}x36U^a4ZG8I>{N)(yl}!p z#RES*#`Vu<-q*U-(ya1nVq~&5>(-fb{+e#dtsd;(l1`-ZJXW}J@WCD--vgyABMP@| z#G8d4={=Y54st>dj$1#;X&%|*{nNQT`y0=JAZAZ?mpeJ$Zq05@jCSNvxOVPkRL+w< zGXB|@wayvcG&kZrSHSF)*XSZN(H2A_=y6zc^h7rH>u_0V0g26nIi#xqr4w@QV-I&x zbfCwRsvn=c4zqd5jLpc7m)_P2JXMkTw1YRU%u0rl;%9t*2j95Sv*p31{`j0+CAKyk z!YwG_Lz=1FQsB4V)1KMS$xjN}ZW^b&xDqbbntYL^W;8*WDw}5M%o1f#fo9u@9US?TtDZM zy_?PCjdM3)6$*|ux4 z{p9-fJ$;Yf!~GBJd++OB*IMrVuoHs8h6=Iz$MSUUXgab)GR-K>rD^kvmI;GZ_bBbN zm2H}K`uWFKe!BJ|O7fQTI`IIkLUpZ>jmO>bIG>I#aicFHZKUq(bm8wORo~ApIqjIr|U-7Fj< zI-EmYzC)k7Zi8TZcTpxnEQqlpx2&S8B{TkUBR7AhiOg1fMqWl%4W z=|r&_^Rz5Np6o)FEbIXp+X@Juaf=_gBjx8xvO&ehvTJ6ziqLanSYb1lcd30bM?s^d zs)*^8T;2Wi7u+eLvH=D34jPOo{EcI3&%aEiQ9P04(0*{)QB=;2aIi_SY^>G_?A#oC z_-^G0+si+a(FG5loi2`=I|TD(3tAvayDRGP5jKrAGU>=$Tt5M!6q{h}$XcC3uO(Xj znIzhPI-e4Qg#v^nFFdqT=aNp+l#@A6+Glk!7qzt~ z_5IM9Wo1_%gPR1Vhz!L+LTd(IK*O4`Ceo7_lEe1btVD`h)wNAQ0>qJd7GYn1JiYbu zuV8P=jdQ$6)+zu=BB~VZ-ly!e3U!@hSZHGyH_QFY164|ysIj?>(O3<{S?NU?)$^-B zYxm;%=XshcsSLnUDhFrK#PN$clFg=Ja8037x15zDr?J@FvRvLl1sCz0X)H+2MztAF zB&12wK;P=_9xm85QdWpUW}vq&bmtdEsq(`pA6gq%pEA%ce_DU%PW#NCXrfgolZS#g zi}pK{PlwIe1qXvE$!ZwGMkNJ3tzqskNe{9zOPeIGX1Q1FTrf0;yz*f=$)GejQK2f_ z-MWt|eAWIeeq=paz1zlF`rLLYEr87lXcM_)OKS|M3ndM2q@K5HI9d&k+o@qXj&*;q zMH__(*#J-x@y;NsTdjQND;vLO*u;jae)W*YMrv!{==-YYR5D>lbkU3(%1D*XY^wAt zqiESVvPvwj;of^b2CGrBcxG6BCs!HofPGGRwaONrI^NQw8WRgMF4V-Tf&cu%<~P`9 zZ4NbG3%l9l@w}+YJ$t4+j%U^VjnDxCfq`to1|O&({`WQ7u~on%?nO^sMvR68H-e)Y z$M&?zCd1)JqGJ3NE*~ETlT@=|rgPZ%s$r*OF;VhyZ_S`W=y+LIO_#_Hie6`pkPV)# zT571SI#x*TS|V~E%jKmF5sS927^-$T&bq8YU8@*%xHzUuwo0}$3XQIL6{%dCVs7ZY zOfP_eJMHN=7aj!EOf43M!yuYO<|~lb>n*_Q$&r|UJ0W0;WUxPpUnA$rBwePIOMwlp zLH^n#NlL^6>6&UDvPJZjis62+VQy2nLW?r=Ilt5-Oph!w-B=KGgEXqziC64dxL%<~ zshb$h2V!Oj=GDJwC?L;OFK4s7l-DDm3I;aK7awRR@ai3%zaQNES~YNNWS?OMm9B>N zl}(m3<}$%d9yDt!j0nkuNuJ`U-{D$BCiV*K?YrMtu_?pcmlvD#jr~tZj?Bv-# z#Y)D7cx&|r{PcJe9qDYACg}Men5$fRsztF}g#_W6M!h(yO?X-b^L2a5cGG#M4~-~o zXPIrAuoCJCu4PDzJGs9&zp-wmI09k!8ogM_51z|t@mU8Z&9tOW%}BkbTgNltg2q~N zWe3^*S(y!2gXP6sMd?t%s|t4e1&<@drktycLPr_B`vaoLj;ZAgS5N3UMJMbdzwgH! zp-Im~Q_hj?`0F}MbCgi7(Um=y&MlnhhOK;-_G;~wV*zMky7V?U%3

q<{xWj)tC| zT4j+mPZ6T{xZpXTc37}c@3ZFY?!psfgm`}u=N$*44%t}aB-ibq0=PjbS==*ryx`h{ zK!>_k-i5Uq0qbu~NPLwZT_qkNe5G#3=tv5yo(a8|98{rb#`wjJb5@RONVZX=m1{f| zn&rmG_07KWO^V)*_6zqODFEzUS7?JA)l4d3pN*R<&oU7rk`6;=XieSMM~8&_q8_Oz zq#Z6jvJ2PLH`sjITo0Qtq+{R4{tNKwdE1}44Y*p_B8}qHgcm^p#{HLaR34sZk5V|^6$_B z{zsG>levaNHB2Tq1>=J8iG79NPVxhGQU)xXxeE543kJ-Nj)?2D+tpyp1qs=Z%P1u! zSk9y+7@BrWycef!-S2liH4rLF;oic{#jz3QQCj9Q+dR$q7_a%l$ctbN#+QycwfE~h zBekf19MabN6*3ewlmhc@hT;7x-1EhH$Kjss0jnjxJc-E2a&2&A&!NLL3wy+ht1oc9 zi;e;PDFmS&_R8ogv&GlbNJWDr6%uN`8VSsC_77o{SefOyI>d5)85Zv13!~D~ zgx2?9*Z`S!mHc{bbs0InTo>*JO3LKihOv?>Fs!_*RXy;R2{?0s*0(hvL-Y8n+p1K$ zq~qT6n-o*|6mNCm!qEi9e83qx>z+aoB3c}ifXi>=SV#t7%T*V7{dSyyDko2P{9 z4xwA{d37Nff6D?Z*-#VTd3X#7n-A72dC8~)dZUyvgYOBk;gR<6fY?wUsCC(-(9oC| zN<(O3cqh4=pIA7Lt$0}x1cv6%nn4-Yy>`NjSQ)RtWMC-m#YlI`hvm)vgW+J7b$bCF zQF+U0Nip3VWpfCze$82GbvIA%O{e)vNcPrC+5UYYvpSyZshDt8>E_d*meqRG*%1%> zexi2LJEeq775LoZv=}$pyc^`}VfPjWvfq8di}`x-v6DElwKZE?o5javw*rQXnBupY z96kgZ@y(p}Fss{{0!HMFK$~&3?VSFFLarL`mC2_V!qtKs2_GuQ2mh*<1JC&*4{9wU zt6%i*UDX22a0B)@N8lU{3fs@lBycI0fV<^)1d8e|aQH!1`oH3v7iJ!#r1q=3u`D0I zoagk+AqW23lI|=!z54~sr@dvt_U^|gbPeF1x*cc%9t>W*2$t`2KB_vallQ5ZjL99yjIE&GmHOI_#eA zma^M~UskQ)n2rIT#V`jrQ>E;?-c5Sd&6v;B=f?cUO1j*OW03_jv#;eWDBJ&V706!| zK4y3>4rsTN+yu-dbrw2S1RplN=PS}L_)Vu|IJuF}faar@e zJ8+u}d+?On>wbe~PK~6zY+yV+`Acj&Qt?O+SLFQ0%!im^7Y$wEs6-bFkB%z z#ox~vdI#ITE!2N~0u1QDrN`N)zl``524@fcNlr@2DdaXwN>b`l@KM)@!Jf1i;7j9T zg243~W0oa(T`H2>_D7k#9Qu$I+cqxKwv&g%qS!9c?0-d|McyJ#-dm~z2ofbf-D^)` zUcDdW7%aD5vuq!-y4N3HiRR(Si;nwrANK=WHW8NKMDQP!<_ArpbMR$9mWz-%zz=tF1uG9&bj`ffDEm$-)xE36XtkrnE3RWy&(qTNTcD6528mgg=NQagele-%8_P zZQ>MP{RTp;QLSBAb$=%RR31|yER#YiLFLS~G#6(q!E7W&x2_`ubiRY=r)8&$kUa93 z=A(U;nNP%|ax|KCfBJ-hT^GG>9 zA&2M~XoM;$seH4)TQOoo`RLQ(M-=XCt6#dj==hK_V16ZmSNQ3YyQN5XkZPcZ_)9L#oSa z{^`skCU@>OxQ)mQv4?!P>EI*MI~HGP!3ZL;*nDd7F@fl&e5+~XI|g55!C1rZ%z2<` zqZ!3JHeZ;|2t%>zeD3M=BgQ+f)sRh*OVK-T;(#C_apGJcc81!T`xzr3R7lDwhj~WJ z8s!-;AoxABOJYmp*Av7W;oEO;9*~ngJ!MV28XG7AHw}+B(3=gQ%A4S^rwkArxWG>K zCpcqeMb%$p2w)y=+wfp34f*g8V{hYnQ)Pu9xS%o(QhIYS4On<1GYz?23+Vr#cfrhx zEw6=^b3p1WIrb~G;NrU= zI1V^0|C(qa)T_a){!8xy*R-R!9@6fDaxoC_2;cFodeH7lw*z^t7K-0R*wcg?ybWjYYZY%mPzaAWICfxYXZQN+qqsks5U$hT z;%&8Sn@4wl*v;SEJM7mwkM{n^Lf9WL*+V0+Qy7qeXdxd)Ee@DF> z`1mWllYK4JLHzL(WgAsM_P?HA|3ysyB4yZt2b9>PsZz!3hRx#@ivE;Rgvpb)n)8j2 zKNSRxldz94s7gmjv(l2*%h1x2yGiq=${oagQ`JLK;j!+wqX-;`e!Zjm1WDR?6)sHGCjhHLkDTm^W4WJoKLQ70hnVYt%qMrSBalHiX}xl-{)W++UO z9>#b7xgCDmgRD!{mu4cvPLLbMa;0sJ(gzf)NVMY1hc#U}TSN8bs>qiT@Q0(#@ms^! zNG2U>kNNkc`#y_Od-KK=HW7XnDz zM~e5@Z+M@6`pa~Y`3*1a;oorIFh0fjD+Q7O)WY$dd&GOxH|#4?-~^*#2sTkkT5*#I zWef>5!~s=;0X1B8D$elHJSw%7c??^+Br%12*V1Ekq9i3uW%iQPd4{q-ba@e_XtrD$ zvPmj#C9bwioUvs&bft8*LK+G(GytuJ7-NokVuq@+tgJ*SEmC2jdtw%wGD4|@I%8vs zt|((+vdUN~Us;xI4qho$St^?b_RlwE5jydZQh3ZlQ}qm4<c`jYxmw-UF~ zZskLjLp5yW^iqV<$dbrXxe~_t!g-7N{dvs!qJ7qg1I~6-cI%PT~Iu*|IHgh)%H}f}(g>r-n zgz|)ngmOLAvQZetCN=R!xubRKs}>#XU>lR=s3bB|D*lbV&9IX%ulHajvq-aXnqMn6J7PB=<9mP5Luy(_)5xjVkYy_57s z51RyIa? z`nkar9QL_MZ2*V9{-@4}I}2A)!m(*vDp3*W*eu3aleIjRb6nSw(9DV;D&g2lKPvs2 z1uE)O%@An$=PQc{RsXM4cytR>`3xBr8Oz_ema6h;yz#sdI#UD_1bqZ!1VaQ<8Gkc| zGx{^e?Tr;p6fG3Z6s;zWx!BTE*i-9M>QmiP+)}$)4guCfc5IgPRD{&Xl*m-M6vpww zaf|W&am?|gagA}!1KR_<1Kk6@1K$JK14&DKH*`04Hv~6&H+(mKH#j$HH*7aN9;pS9%1-1cn5rI{tPHcl5s|t&DVx zcl36Qb^wDNlN~)BBOL=B6KAHICYu(UW}8+*#zN*orb3oNCZ6osY}u^Y>_#j`Y(}hp zDb=Y?Dc!7FY+I~b?7}R7RiHVT?ZY_GG|&>v`jTpt0yPfLK1zmF|Eo|t%|bhQUps5U zn0-aY9*aM9!m_wVZH2%SFMUkW>UP<`DJZjN5-_)_4Y5bn8&9sOUr}v}%Ir(7cU$qX zhtV6+ti4^a)$EmB!SjTmH`}lHsZmF#Ibo~KSZ;P-@xx%4$<%IT;0(=^wmr&VadP8?4-t$N%^B@Y>~%C@wqQ16wyHL+Hm^2quG6XqmTH#jmTH&kxof!V)|S(k(^vGC^;XuF*H-wK z`B&PP+gJRS{Z?LTcuiHxO|Rghg$$O#5! z2>NOXhOYHtK_d$P>RLwhxwu2pNB5oQTJ#xk5xtT(j63wOBp%!_?^Ah7=BaNj>88-EW{{||zT2~)2`D-QM{8lWGK|1Y!^19T9x z7xQn?P0DS^dB}ChWyp=%nc9`wh1w0jMXg1xb*^QumAxf;o$5mVg5kpcg5W~`f_dF% z{pqtfVtFBYL3Uw!f#!(dMeBv@#p?y*Md^jvoYbDy9@?JV9@C!H{-Zr^ZE)k0O5Nz) z7~L4$nB3^u7}*%u046s2HpVuFHl_ss3JweQ3yur+3XTd63Qh|42#yF22u?U0X6<-7 zPq|LHOt~>TGrKao0Q;V<%x=}r)vnbp)oxDCPOeTaPHycjUM*g&4=oR^C@m+E;a=9e+}z ze(YPySNpC+=H0`q{a5`!BBLIy8;w{0t~Ahg}YC2(4Q@!?9CuL8oo( zo5)AqK-TVGfYB)PF6h?7UvTR$_l<%-yVIWDqh?n$_qAnLX7iP1AWV1s<+g)A_11vb zt)M^i=ZAbx^*^m!yT`^?zRgK~Z|be7c5m*>9XN01%iXtE65{P(|KBLLs6wMdk38Ai z39tVNj~73302Ba5cb0IPaHw#yaEx%4@DJfQ;c($pVC!#oZ}w>RVD@BokJBFawbhN) zwbkw6)!_}WGqpQ)$h^&ci+zKAjeQHc(tDJ98hadjqJN}+Du1keU3leuZG823y?;e| zjeg~Nr+T-1$9mU%=X&>iM}Aj)XL@&hCwe!07kUqThjv^aK?q2C`~o>7kr zy)me1*K362feYFElE4MdTdUdKVa&|`|HB8c#rj;F9o~JO{tSG%C*$z?f8rh>%a=Dok<&3@&Pvg6Uep1arV((~PB38kdg)h%N$#3*sDP z5&?rL#xIGTZ%*;^x_C#h)@vprOTfgETD0Q@Rd)2~^djYBpe@TO=aNjnN67lOu%zM7 zd+#g9bA0W(*SoDJB5we(YCkd*)EWVU4>+y~7iEaj?E6^1R1b*d%aP5y;DNUWz~sr; zLZV{T0@@tRq?g1lt33VhF*uVQe;2i!y@cKV(4spY#*#uF4bnUZ>sp)2sw8aV)W_!B33Gwif0w-EU5#|teM=MuSe(|*hRj~h z%2gozYSNWH+>6e#i}QX@ih9YMw4iI;^YG!caDqcvEz$N+ZMv7SSBB?MI)bHea@WP2gG~K2MrKG311mqth z<%b(%Ny7f^S94EqulbM+!{`EJQP@ogC|`xc5r!BeUZfN0Pf#<_$X#l^BOiUjWoJoN z;P&Ng(5R0)NV06l+^qx9 zEYczrYt<$(TOm(~ybQC}l0aef!0SIl{r{}5Z3PwGSw%|JVESmd|SV_T6Cj&6m@y565SwywS z)v>F^b&r7wJYe&zxW)KIP%7iQWP5ZU$370c$i?@9g0foaH4jW@XBdl_w?>4-p=-6cYn{=rj{ zTi#2U%S0Rd@~nV`?M9GBsaCSh__5I88KDv)CpLGNU(}Pq@`NjJU}<9#7q&Dx1GyY4 zL-IXe6UsUCRY)^D_m2#Xap~-L^pyXoYQwFLqUCZTY$?#QWjQ1i1Mn%eEr zeXvX$ThZ1m&&g;4Xc-KzUB<)A+dAW#+Xg20FC6;lAI{B5L(R!3ih_+#-coO2R!s)B zx5#V&aA6JY@Qxt#xK^dnQ$SH3+5M*!XFOfc_EP`${q}GYX!3m%F61A@U`{wui|}W{DOiq6$QYn+fCO@hn`iQbPmF8+hO7cq~j}F+SiYn~&mn z^8pi~0JDFEk8S2~h0TF+@fy)~DPHn)AxsZnVx07O1NXfkx-t#N{9B#o8aJjmW`3zx zc~z3v^ND(b&fBahcv+K}>FDhHES&to5Ypxht|0&_1==1(uZl4RjBDY~{j7fso2EPJ zeMB@6%b`Hh7YLUjeyTiBn?oKon58;P_-fX!>mZpaX_Av$LexCmZnVL5?UgaMv2)^e zs?7jsIh=oj1NkoK%6*fz(@bh~U{zgb69*bq!|)d^5(q5%$TIQ-s$7^3_rM%#ev1LdE3`$^MBlRsUt*5BR7ZY2041(mid8d*3z|KXFOWkAt@l#%a_uQ>iWm0*8Ohc{lx$m zfpO?NE6hE#V~VSc>15s&i=Eu=P8X7@G^g6G(~-2lFS=Jo^3~g=u0wo;b1nZ)v?rOL zxA%)PY2G(tA7)CsG!Da`N^)}YhS4+q8TA0$lM$6wQ$(tyPC%VnY4bmCzgzvU-OsC( zs!O;91}XXddj`JeDw{W_#j9^zeu?UxR*XJu%+Uq;@LowkvY0b_m~#-1GyFIgcpat^ zuYUXB$KF}?>&+(jMVj&RChyhSMK;T__roi8cC{_xmf+!Dhed46EjH7e@dAI;@iWl$ zfSomvzh%#K*7iJ}gkT2JgKgR#np%ILI$Q3UzcmUXw84O!xY23ZtsS#lzC{K*V8Nd+ z!&*lF$Vf+*pwd?d+ClinD zFfbJ8dg-Ly;Eg_gqcWY2WI<+51_P!s-Zb&;{X={5n}*ZfBzcvlBLN9ib_%k+J5ikJ z-=aw3qqb=9&r;W*8W4Wl9jne-PnqdbW@Bxi_WFlyw}>Oyub44>Hy@@i14QtAmk&a` zWSbQxXYBlMF4keiS!OCKW(sS%-(n`xOw4PbR+=tSf1mTGJj5zzqA(ThMU*_BG>Imm z1U!JKJ$}a0KRecjqj@E|Vp@N-2S$3p?Z9qVo!#O~J7*ss#vJ}quWBD)xtWU;@W5gG zaP}jT{^nWotgU7wsJm=<8F2k~^)vn~l>5+1(TT`UfsDoOv#gOoJDjg|W}su#6w~VK zt$5iQ*vOxoRfzddy$i+M<|j6~6as=R!Q?YF;eXm)Aq%ZAe%?Px@slh!yZ$`2cRTyG zx&N~XuUag8Xshvjo`th4o_@8pR5B2O$#m`9r!8hoC~CYpAl#d}l?1SA+5^yAzqQJ8 zmU9bnhQo3p6Hw*;fP#a>bA&>$&t=hM#Khv3z4DMzN~-u4ihLV;rJ?^ z%1O?R92*J6UKf2!op#g{P6bunJsh77*M+PVt)x9-UdX_&G@K3thJUS@cBtAseXN2o(Tq!7>ux^37PD+oktuDTDJQ~D0 z4AFyP5zXXYZRa7#P(mq88!TAq#qqo@G?wNI{}-sn;D>Azp-!&{kqi@=`GV5*$)|;S zqh+q*obyjY$Ue_9aQ`u_Z@jfQ%qI<#&Y(lo`sHvb_kOv#yYVttp#tmHcuRr=ci7}O zg=V=VC^Dg?84~n82?kE$WwXuhY}>_tm8)hr`5y-ldWWej@zJZ!NEGF*o##}5)x@mW zLm&!&R;HT%HY>j>ty!9CUOa7c)mnSAm5u^;IjY~D87L|V=F?29ZFiaoU-)G)?YpbR zr0zlWz;OT%NZ8XM(63AzHG94)t(&heVXo_0*6eYU5@iSX4Rn(P!%BbI+sL?|&Q&7x zvGG{NuH9MRUEFefJ-~cvtJg^PXXO^caEG0<`m5KeFh58!83rFh17w^0Srw=NB_zXe z00Da&Iue?deMg+-3yJbH{}6%^+6}T%g=rQsIGLFp9jj#O&9xiDH+fQ(&)^ZmgeA=P zG>W1QVMlyagvt&ce}_k~(<33|vUZr4!rZiuV(qGsXZ+IWt`7S9QzleLyY_nhnOHS4 zZ0Z3naWMjfZ1tq&Ul(vcu)=$>*LXa~O@mnVaj>LuZo@gT-^YKbh~%EcgEMy*+A)}q z(qY(0Bd>W6(~0lrM+tHS!BE0(mZeRuXMs{dFzgV5pa~F+GC=9{WSc4WeYMBeN=k@o z_Q`g*HKX}Zf1a^Gk*FQN0rtwy9=wlu7;31Zdi}L5LtjqDPUZ)@oL}tBp9-e^a<}|a zZ6kGYb6}5aU~IS1f5AL$grT+TJ+j%A z5SBUV`!P-UpQ+vZ3(A=qjuA=7x}TegInA-(odPwTa;=s96RzYUK8wjGL;&Ra7PsHU%l1b!uTlDt%u(i`2@<5wrXoir_yA zDshxItN`%eq$Sj$k7MBMe{`bFDRl>WO$2S28Vua(FpuK8B{HNd*S8NG`%`vb_nsp- z=VGFis^w7VVJpdgXje>Jr#77G+GAC_#rzA-MjvB=8frZJNEZ=K9?fe%QPXDATZryF|fLX;Z}Zn2h@{h>3w zek!#5W|dbtmslZJCpA6$-z}HRmb`31lQeHt4^*7emnD74M?x#G9vrDH8uuggq~Ye> zdWfb`Two!c3PloJ5zZ=}>zqxjPq?mB;W9l6ryo*RST?J!`>cBU7o6dqwBiUTpW9Sk z%y58>ZgWA}VYIe*g)RBtH62JbJrR7`SKkJFk3zJYr^@;}>PxUFoDE608;5liW%(bg zR^MlOT~y^yF*4hw)8sNKeeLcS~wwf7K7cX=|Kl; zH8$~&7XGAC3IO6%_rI58NgOw<-_ct6BNjFs_#NSp7zmx_4B)?qa&R%++|?G~_W`X0 zmE1-1h2J>EEtSK3xvV(%I>9gqby)Y4G-bp3cWd&lV_{wo_Qdak3Hgd9DYnt8Bw=3| z{Rmr2L{c;88kCfiTFFs57d3=WPxJq};3={&2)z7Jp*~G&zc()HUhk-*=u5;VSW{W{T4qofqV!vwq;pQW z-8?~Yw?9bm)a13MQl1;on+hxk{uU6MCS39fVQD)3ssji4e}pHBhcBki8{1~aW$un} zy*oF1J9mUMXFZvY_Rrm_?tP1_(`x56M`#Te>sR+Xsmn}YJsJD!W_aeO%W#yo{spW1 ztw1ylA8{iv2jRE!Z~rG5zYg&Wd+AC$_^0!%$Nn-iV)r6!-?S*#+EEP~pd?KPr=T;Y z)w6PR{v&^ejFVTu#WX>Jl$&9;;_I3KmYe7P(m1sfrlsb2-;Z4LQh`Q;PKrJ7Y>&e@ z0zPA6(PJk9sO*O+igHKN824XQ9_qYs%X)?%+q~Rmt6-~IEDreHNQ%y}Q$p%2 zXPMk%FqkqKZVF~uTw$#wUc0Ch7|}?>Ycv#FjnzeJh&W@nZnWcAJiVVF-4BYlic3Bz zC>lwaY@EuUE)7AX0OuiOUEDFuTw0!3|Iwo)!drvs(K4L*dB+3cisk9-P04M9ZN3?! z!+j*9_tS01U5tTGj*G$z`)v+J3Fs&bn&bLb!=w5u&<+p;uVIb0QR+Y3;8hS_su5A< ze?GT<*eqKi78eSGKUkh##lLs`jJFZlHbn}X+*r! z!k?A7=Q*w6k$M}UD}$1jlS{9-Ag9hcp;ob-$!Bwm;TeCLinHfbVjnPz-&u_3i zGqp^G#j)oCL3W&Zx+Nf&{Lq|AD9y4UAHteRNKjt(XY+yxG?e|>13LvBX2WPgeW8Z< z6!TM{LviqYRtW#6sQrxDd=}{br>KQy{a-94E(Zn+kgE%^SczZXBh%5Wy*#78EOXH5 z+I-2Z`ZBoWH?wh!_viZ&#V;&0>Rb?ps0>NUw5aH}+^}DNM887AQ~W1N@@KkWtBZOB z>zC8V`}IpeV3!xalc$%br=_Q-*XBv)7w9h)1wlcMVNxVQnnxhWFDSFKBi2ItWZ!PE zzc?oyFn=y<_i5C494kQ_z3@-qqYsEy+%EeKebMfO3n~OrxlQyB5?2@S_&^3tZmJ6q z_%*vOG$wAr$}-E%{A?1=BcA?qm`H~Jmec_|jiV8*Fyki^fPG9`sA$CmsP5@|Iufa~B?CmZ%rwS7nLOws3|I`AqjMunFuc7UbzPvJYJN{h_ zEgJGlA5zq=J#<)()oJ}pI?*l(ekX{svfN77*rhxYNi+ENU&_lg*W}C?_b0#(a6{Uk`C0H9<&HSdFC(Ngk6z6e)D)iHH^pSyOotx?ZFGM9ETPy z#IF$tYlkcEnG0S-MsLiW@6AGqO5PAs{{I!(&@15Eta8N0&h$@nyb@oFHm*D%?{Y&< zPFioQzZqEzYsT0TkKI}cKUSaoTf3c4QP(NtOWFy>xx z7!ze(NBTTpdMtUktUMl&eMUwckRbs6`mO%31j-0Vd#m;N=-w_$Aw)uhs4g0h}W&N}r-PLq(6($G$HJ z=$xkVWsPK-cuh*}R($2IKD;6U?*u=B9>u_98|#g6(8HyBOc%bcBTbZ=zJ%LVZl%c; z!ke&cXJ@83$I8p66XQv}ByIT9m<7eLIwKVXC%rrTR4HJQI9M4k|3@YFX63f7OjXCi zL_qA}$N3jTS-1z&Cqss$GjTD!wrM=T(wy0}KV8|#NlTCab!|M{q4(=~TkhAH!Jdtn zOxf%_V#U8nP=j(aqL7|%i${rQ+ zILfiQ?xWW`E-zezmFof!e$D5&gnyij+5_?HHR%ZPwps%dytk%3Ct^6ewP3-BcVh%- z8`qg7W91mu-T`8}hSw#sHGu;;*4D}`J|XT4HU8$_OWID|%41c$=`rk9P`>f4A!qr~tRLqup0>*pt{bOalQ!mQrV*Joa^Vg}; zp_7G0A-dw^!6^AA?c5{ib-oMK@ZgiU9+z(Wb~nLQhyivjJB!neUX{ii;CwP*t?6Q8 z<%7Let;@x_cS<_=rO9z6%5a6#Z3M?DSXz)TFTv5UZ>?`sP_nW;O4kd(p($kLDLtE@9U&EFmEtI&L44>=`EYuTfu^kx{96C>Uqz;9Bcr5VycdESPdHUFL z{bkTbm}^PgA&>J<(6HCMZPwFod)Bo zv;+as!f)=nMH3qDblqYg@_wp^X?;?8DuL~aLrs-?y~siBDHdP6`QnD)b#}jJQA|s{ z=`0T)38zT6QLWN92qT3itcmvr_Bf5vRr=hWP3L!agJsE?L|w$rNC`%iMS#UirSS_~zShMarL zb!=j<4=s&=BRO-j&gD-Gf@%^O zGEs$C)l6kPa}f?KzPYkh?s$i5J=`}TiHzRW^aWf@g?|4Fu-qb?w~Mx$&QXOJz`Hq@ z&)9SIzBN$0bkEaa=oY?xsq@1{Q*m@x>RCS-kRB0kyv&31Qh{MwmUzL>skeijcoCV( zE1lmA!Cu#&bp9>Rm7#IMZL{?T{$isOw`O4r+=fyI)*bgY)$~3NRaMZs_U%<$Ap802Oem)sWk? z54BsTvVS#Nb)SFg3%A51B`qttO&4x^db4?cQsieo1zs?C>$8(;s=Tn58sjUa@+(oQ zE^MoU@t<^6wY)zePe`QWXh36%Pg!uAVrt;yt6l2`?D|3t-Z$_udUq_~FD_S_oWit+ zPLFqv`}x5RrzUXJWNQQU;`=uxAzO_uFR(mZ`8aN8goGQXIO*MUwHu-iPjrp@(>qC! z_Y-Z$6d&fbcixJ#PG{=zSS9KG!UIamt> zxuK4K4i5GoR|lzj!)@p8Cl@PSmIs>NM7s0<${8UF-SZ1(&$V=umuZMWv)LAyh=;Af zgv-ftUPNb=5^I?Zk$&LEU!vV%CiF(u=(5k#sWW@CQ0x}0PoCNAx%J=S{(scdH%O=a zTFgeZO?Ki9CORZ65X1==M{BEF(Q$~m3uX}_Kinn7#puhv;%8=x`86!`C0DglqUG$Oo>|^}B4b z=H;zzd+u+KKjeB8r>^8+Kp#pbUU&$)YvoKSU74k)`o{&=s(q_6bY2R1`6)cLwNsBE z)b0#oYT5Ddw1RFM)qJo7ydE>y{O1xQ$8^dJY3|* z*Krxg@O-3kre9@@ljn;`S$-THDR!{z@61E5_FGFfA7O};kN!)5QK`=Al9yz7xO6mo#DfEo&X%ZE1==_m-|f4^d5!D}^%2$qbUpN%Vstl3)K$Hd zBB}}e?^y@Y81%F85W0FWm@#@k{Z_|jFp4Z(VjhN`&T|-d6RD(5^BM${iia?$&7$%xD8IT@>hSI+(yxix8^#M%MymR1 zW<3fFu~dFlb8yuMWI|M_X=3?`>Gk4MmTJXG$86GEjn}AcbUeD8E;}s0#sC=!$&Dh3 z2{~+Aeo0NlIf)XZ!Hu|b1FMByzvuLsDHX@)ilOMIzF5_A6ida7jxJL1@_f(E8_ST_ zQEVS2d;69_i4-a6W0HY2HfXE0GZ^qAVt+0J6n$SYY@){qG(nW3ui5$<3ctVNsMBb~ z?A9o!9j+GePCe3CB&uSetXWM?YD`XPkDz6Q?mw;aQ3piS8+ybI{LASX8PM(Qok`*y zJ?VhyZSYjQz&phKz{kqG?fCGR?{tnzi*fV^~C??{9vI`;n(4e30@xDUmNh#aW=X=inH>V&KMwo&*fiekq zpY)*i>ev=0B!3Afbb)ntG;v;eHsM289=z8>E_AyYPOQKFHXK3q{ga1j@!rSQ8n$`5 z4$J%oklh){;N1EDwEeGn!jAj0_-@DNTEe1w+yvnPIEB1be=Mb}_UM?uRt32pzuCte zR2x2Hs_NX}DHt{#>|&Sx8J(@J(HiP8Bhp!HKMPenuk%XIT6d|rEmdR`s)k}%+j4Pk zZB|=*ny)iVbJSj3yGE0n@mN*unSB9$vnDnKUNIa{ruhlMg+E)xLuns^j|g=o$3TAk z$etbI$1rlqja#A@7d*>ZC;nw`$zXNAh$bPYaFBzN-{tu{lVIv;XWV2gBQ z@a>YEL--RKOYszIY{es|)Z6zZ8})1q#(*Hh28oxk%+mdi7(+DDOUADRW`>ii=Oo1d zoZiRZ*|1e2C%=yL)lam%H0dXk1>6y=oS~V+$ZEygK6rliWd>;?N-FJQ-t^8&$t>;>(qf71)74}Y{h{x4o2VEgLS{*S%D{t+)w{s&%g zHCxKs;*-6g`(W2;pWp?8Px6B9m#7B_*hH=KF)yIH(d*ywg1Pk|2Efdp=vu|l^cuw< zr}slg(#(|reE7y(n%zwEXU!!JSJHcm-ix&Uyt%}-2kVZc*@x&oGDpkuVDE?1pFWyj zpnv1~SbSF=>7V2A_`IB(k0h?I&2#c_>Chs&Mfxn%v3-3+&yM-NGDmde+Arw)!pF)M z`e%8W7un>jkMd>f4)^gfxexcVZr+xMI2;`vlmF|+1RVLhGO8}x;^|01c9&=Wp81=@ zCjjHKk5agI_I(QXQ4R}d0{ai242*%hW`D1zQDzT!KJc%n1mhgs%a!b-Px^D`1`KU= zIgMd^xYL6pX9!9f8Mje5&cVGL+&B9cg$KCPgC6e0pqKWtaN01)!7cx-lZWFteE;E7 zh;dGfamxPz3i~*GkV-X9`Ll40D>rh`%0VZUaGY{x@gUV}obqHLYni>2+aU`3IDa_E zm2BbQ7zd4%axZtjv6qu@FL%DNm-1oZaTM;O9L}Jym&&$}^0|z{K`Qe;%7=xl#r9D? zENdCpp~D)do2HI1WFa$~kw^Vgq+^Cn`PMc?+xWf9C8ZNV%~mFsxp4=P>&z z{s4u26dvHX&7I8L3GSRdhxWnR`F}2OENyic9r1JKsYRkoX4G&Is5`j z@=mx^%26LZ6M*hqB%P!Fx#~@J!;gmf(F#A>FH=BH!) zbcLU;@^e*wzQWH}`Gp9-kmDB${8Elzs`AT4ekIGV)cDmhzm^Us_~CMaAAhcdX{z$W zEq)}-kHqOWPZP~0`H?a|Lc63L$0Ph`GRcn?IT>mTVuWj~D6vX}A8Qu*@yIkkp5r7d zvckoy{P;8{Vmil96j@0V(=;{t$uK_|<0o_cWPzWo)cMKj7C%|zr^0nk7s`*-C(Nmo zk3!7tN%oqv9*{lioIN~FZE zMCrG{uf*v$#jhl}vB~i(wBHK-(y^j+Wqzf?r=vp$jhd-S?OILpt66@vz^@kBajcf< za8~%$Dt|i5r&g@R>Cp0P(*=G#Ouun{y~uB*_>E$cPe-UVz2JpwS+EOe!33CO(W2cb z=Lz!4lH5?7vc~EmQ-8iz@cWD2a5Yv9^L+XIbUGKA64y%QNGp~IS3*;fRBha_&) z8{Hzo#Cve}4)Ek0SRk}>FwL4(?w$WB^<5luQ*Xc!{C zkl4jos)ejrHYIO*;w?$8p-QH84aTNdYZ!CNscSUL5s_9WwrPA*78~R9XRI1~BkH$m zT#QgqD>tVzC4ZY;C^2W8T2WM&R!`<4fhln#AdNM%Oha3cOKWi^B}y1$m0Uork4?rb zQ;f_VHivSBK(V3G2x}>aAYzYtLk^)d?$YW+m5M@Y%V^`_e6HmY>85NNxvpL{70lwM zw(5&`9d27opj0VUIk#delyU?Zn@(AiD_vEIUQn@RtAF}f)@Le3vXanr%n;X3$t{+= zxg1O6QhHZ8<%=53&5%tcHM)Gfj4`K3Hz&Q*Vp+TpD^Bw&T2CW4H8nY9sQW`|bsnhS zlZdPIld7uBQw$mPnV3{9EBKn?>AYJL&Nl=Rv)x|K>C+xxHJLLjy)kiE7z>wTZgEH( z2=GGDU{<7dGyamG=5~5qqPospGHIGkCZ-UY?0@-a)DvjRTPC4KYxQRh!Aw(A@Y-WF zwJPWjF;$_}tO#ZpyHp(VL@ctZPcE+dQt52jZ+Eu>&XzaW3Z=3pRncZ^CMHd#DRn{< z2wSwKmN@P7x=R6{QR2f}rm zO@AVk`seiS>R7_lFY%y2b;d1e8c|EV8t!pf{Xws1o ztFzv0R$*t9s#wr4X{d{>p=8}1i%c~%0)yWt4SIccNvKrHgmmVtSDy2G@{``lDNi;Z zXk?-xi#}EK`21#bdAin$Sd>XorKCw@r8y?6^SG-4Wg};gIXuR&v!zlwjIlygWq;vi z{MoQLAlA5Q!kjP{$(ahJw8tIxTcbW16D{Rjd0F0-^NRzCkk@OfGDe%*70_3$QA^e9 zRA*)N$(&M`%$swed?GR}QMV!`u_Ngd)nbgf8e^LAs9EfES9}^{Mv@Ul>vT?=rqo)A zG!$_tU15DZ70~CRW^=M)DA}f_41bdup(fxD>P0>=&l?qL)#{)%uJSeINpT~WF@y}6 z%A}Q!j8GtMq}*nQPvptu1Q~^>8gQh2dUq#DYSSAbSg!-GIAdD-Mg|sJP$R)jcbJedCmztBFc&=;~R)577xh)&A`m*Zj zXrbjxD6NS`Mj~-*12vl`=qW|YcB?vAmurfyqRl(0kGXONwb5m(YIO~b)@#vNy;fJ* zFV0xZK|fC&^y$k%VJR4-%XCoUh@>J_CLa?TM0N>d7TN_V#$XV}TCzq$BK1y%qUFhW zFdXntCWCr;aZ+82GJlhHZ#f-siKNDCpkc`PrtJBw+FUHrsTmMgiYA%aWl&8yT6AwL zYoe8aLo5;mrkv@hFd~`^P8CJTf;^Q9i9N2QFK#Rd4YedQ2=CB14V!H3dT+WEB8kN#@ubMY#Y=7o-vFcE%8Y-`p7m}#7 zjKD=zFJ?_fzc)>H8Ie+DQa7Wbyi}65@Csp5%Q$Jyi}L1X(yD4D4UBeDmZzJzwq+~Y zV--3#(}_mQ>TR`J%7%b9S;$Fvis?$--ptTtPGm5t>K3N%qKj`xmv*_VhFDUm^4hZb zVx{1$2V6E~Mt^H&8VaXYAj~`EE{#gvipd3fXH6j$D^1NhUHW3NT0*Cl>os<#zhKJ5 zTMci(rYyFYpt|H}CT(@CupzEZd(xhu!0NIZg?1Mcl4;dqK~!T?c!CTQ(g>82DYZQi z64a_vw@8xGOxJX3fkN#JOsQ1%Y_y>W1l8JUS+(VKOTcr(aye*?Mtq|vmIi|>R_%BjRjoVSSz4z$-~)TmgjK#L}HCf zA~a1Z)HY#I+?onnmC|G`mT?(035~N=w*&*Wye2J?iCh_vQ#xg5Dg_0#dEQoaOlC{E zjIEJ0@?1uN$n3EswL+mjX^q$fa#ysZ5;2;L&VLbWMNCyqaMGh|393n@Q|T*&CE2>h z88E0k379*@jKZoYA58ifYttxo z+HCnqNGHoVYnr+%Do~k2C3~}_FgIPbx>7lvwAmb5rLAFC`BYVjO|NO1>ZWQ%uA)1E zw|`nQ@iJO=zE`~=i`$;g`ZU^t-q}jWC+!NJ(JKzx?2%C17i?*gin>5hFgmP$orm_L zu&WHhj7=ZUhlMSdydf481ma988!IIgmP8;ZR_a7{Y1*1^1)DrgAnLHIGHJhCEwT#& zYH21Uba{NyOr=sA`Bb1-0GD_)J+* ztf5TER8fUY94pleuA;zdcd9K~i7{>zW~^zsN|iRc>Q?(y!l@|BVga4AVl?K(nt!mp zUavXo8JA7!rcZVPtx}=Tm=rEU%ykXR#u^YRqJJt|F=z6|ibAC;+OXNZlZB8gOZT!kT@Qk4rs;1e zT@Inrs}y^uG(kfls!-CnMWLwMvL%IBCE(dJT7$}EvAdI|vesKPnPNdE81hW2yoxZB zsVOz~rYTx2HndTr(j0Y!^E&yY(#0giF{6_SiDT-f&2LXy^&(HGkqk+gR)0+)HzhL~ zMy_$Fql$<+Bk-uTYDYd>76@ZLxvE+-3WZI-!=|tZDkh^)>6Up)MWt1lR5CHGE)fW5 z9iA*>awUQ`v&SvUIA!LTN~W^KM4_nGY-vRjHf=zj%0=`-rA@?S>KRdf(kb>-?Dlk0 zZT6;GX>TNCRB2s#ZM2ayGJgR}qmrk4yU{7q+iG=f$mr7QoisAYiB!^Ntrj-hZ3?x2 z+AFfvtu}9+ae5}p5vkl~abz{ls4-E`O9jbvuqBUW3?`Q*oD5k!4p){&+5s8?iLy~o zLaV4oB5H4)7c-Zdre@NjN=8CrrC*z}*-FX`6B216?ug&uGgXSyB7ZuYRT-Wo)H0bg z^+>>!$tE)~o7S2Vs?=Vm&MD5foZd*)q|~JanK)gu3NA<0W)N2uMYB}nS1XE%Ml7Ng zt329#OIPQ4oH|=Im<^hoZn51Nskbs7amZqH*fQe0N|(2`s>yIV5SCX0G)6D$r%Kwe z+8NQ)xoQwd@txIG6ibj(Ps>L2%oiQ|+ zl%QUc(Psi%HK2>;Z1hOT_f8Y|80!wPF&PUtO~b%}SF{oSD|-GG&KW>uTmT zwnivm2#C@Zzq-|m*tL`CidXBGdFfsiOLz*pN-UsYPK)nO6x1#pTI}I}j74Oll@%niQmS)3RhR@5o6SG>ENc zD#AwDTTTgbex)#94H~3%Uv{#Ta+a+Tm&M>3&wn%j_;WWr(-Z1{yE*j+bL*zr>zuEj z{tHuE;ri<_AKY{Plb`<=~Glvo-tIbUxF3@dwMkv+1Uo_Tl#w2iJ$qyk`o7Rztk!)b)I`qkau9q_=&xT26?${S3LK@ zUGn>+gS>qkpLz3zZwJ<1>C-vV=tA?&zZie>z*SMx3+J42`O^~*yb{`Xaj@5`*tPBS zM}ME%Vfpmv%E~>LkT-Vb!l}n@dusnpVDbdlnZhfRS8Tg&{}-Rxaqur!pZ3DzhR@zn z0C(#P)~$OkSo*WVp`Ytj_az@&amTOTt{#8+i*IdhKRfdD8G9eak3V#oMEJcW51#(; zj@v&qbkgZ}D~vmrt-bS$XCC|GcV?NVu75jy*F|%Gr<^}=Y6D<^eNSx}O-tt9`vZtO z7lm^t^A!DX;a5czpP@MSn;?#6<>B&|DbAL6Q2ehHXMZEqP4Rz#evT*M==%@ze1Flo zE8Bg?09f=3F;)&t^Gj%N?VtlJrSf%xW59CI1y+D=%6}E;0llCPtOjeyd0-t_Pk(uD z02{$3uo(=1L9hi3fvsQ|jDS%v2DX9ifCuY8*QXm6z?j5;ZU^mzU z#=%}N0rmkEPy-Fn0v*r;126&;Fary)0voUc2XF!xa03tU0w0(Jeh>gb5CX@72Y>S1+g2qo#l4MH1=FAg>YxFdpao6@CxMf}r@*Jd zDd1G_8E_gn9h?Eq1ZRP>!8zbu@LBLVa31(PI3HX9E(8~Wi@_JbCE$zTOW+{*GWZJk zD!3F}1}+CzfGfdO;A(IUxE5Roz6QPyz5%`ot_R-&H-H<#P2gtmZSWm%3xBv3d>7mX z4uRXj_rM+CPH-2v8{7lF5AFp&06zrxfggb%gP(w(g8RYGzyshx@DO+yJOUmCkAa_q z$H6baFToSwSKvwTYw#QJTkt#Z6nGju1D*xH2hV}$!5_d2;6?Bf_#=23yaN6N{tR9P zuYtdS*TG-G8{lu?P4E`@JAZf^yaWCL{t4a%?}7Kh2jD|617;!n`VN8^61W7m!FJdI zm%?SR6CMMX!!Ec2cEgo$73_h%un(?=Yv5YA4z7p&a0A>3H^I$t01m<}a0qUN!*B$S z!ZElFZihU`hdba-D1bsJf?~)(36w$^ltTqn!d-AT+ylqqUN`~wL4OrgLk-kI9n?bu zG(rQ?LvxunMPP4c1`;Hem~%2v341!%x9a!&Bg?@H6lYNSD0q(gdSKt^OjW@JHDWJ7l3Ku+XB zZsb8;@+p%c+b=w$RM^l5YoIu(5eorX?FXP`6DS?FwZ4muZo7JUw#hdz(a zM;D+A(M9ND^aXSY`Xc%gI*7iEzJk7rE=8B2%h46+N^}*v8eM~~Mc1LPp|7KFpl_n< z(YMeI=zm6Z6S^6F8+`}ef^J3MMYo|t=yvoybO*W<-G%N(_n_~id(jWj57B+-N9f1s zC+Mf>e)Kc+0D2HTgdRqZphwYT=;!Eh^b7P$^aT19dJ_E_{RaIO{SG~ao<`50XVLG` zbLe^W2lN7Z5xs=|h+amopg*BMqgT;u=r8DX^nX|M2KpO%6TOB0j^0M^pnsr$qIc1I z=za77`Vh^aSqw152xCm}65NK{aR*+Cm*Gx)3|@}A@Cw|GSK?K;2lwJWyc(~;Yw5%J$?>9kN<#Qz%SyL@E`HZ z_!ayo{Ac_sehvQxzmETk-@t#vZ{oM`-|^e{9sCdcPy8-^55JE;z#rlnJWBw91QARK zSwh-KJLw=x$uiPOjv>oQ7g<5N$$v_+iu90P(nnU4HDoPWN7j>mvVm+Qo5*G|KnBSc zGDNnLVKPES$r#y2wi6!VlO1Fy5fC8}5iwzigh+{u$ccg|$u6>+>>=Z1FPR|wh>EC* zhG>b7=!tW65#kc(R|IKz|O9Fo}>T ziIF%-kR(ZwG|7-G$&oxMkRmCODN-gCQYF)*M(U(NnxsWeBqx!R$*0Jt$tmPi@)>d( zIh~wA&Ln4%v&lK+T=H4+IdUHPJUO3SKrSQ~k&DR}$R*^9}vd@)UWRJVTx(zbDU;=gA+)3*<%e68R%}nY=>&M1THFUL~)QzmV6- zU&$NfZ{$t#7Wq4Qo4iB*LHvghpgbXgt<7|M&d44+4un3lT5m-Wp}wk%g*#T>UAF+FVJ~#O&<1 z@132!eV&MoOWAk`fPY8lC1l%o%uBe3rjuy;8cny*^nFf3LL*g{a!$fFDq%Z*1T6V? zq&$*8BI%s0?0pFb+veYdJ|a8*xa{-q)NnendQ8lIAg8)~Z;>wZ+8mC%JLYTtQ*k#r zpN+f0+K-jIW;i~K0XR_baG{3Z3=jt&3g%W6h+!_SN9w_Cz(4lIc7RDJu^vW zCUazFa$n>k2LTdtKsa&-5+EU5;fx$YxDNpZPWM0R0begW?l-VTCr zq(kep@`^-9=lYtB1c8d+c}nB-`p#vfC5|9?7vcG0YkhYo5kUmPcr!eQw|4ZjOvwA$ zP7p#KLBut+HP<(N*Z1~|(02`ryA2vdcLZrLAM_LYw&}Cy_m(Lx!1ED;pky608|!ym z-TxRt@aKO++q&uX^E-*RiFHsn!FcVA`svNu57VX+gslzQ_IJ+go;|21ngD(X?k6xU zd~)NbpA{rb4om-MfGU8%fBXL$eFfidpyp|Vjch@HgntPh1G(||5AH&m%_ph@h{47| zTY%(l&(PmhuH`B*p5PNUf_$JK!9`InvI_swJqK|y;llsiz&P%5Bkp)^3Lfl>-( z5tM&1F!mIb4R{XTa)QBkYCTbh@BG(^aDEjL3#Az9I^JK2cwRR_L%j$;13V`cO~gYz z7W(3Z(J=P)!Ow6%jEm*QRl$4RL@)Gb!gDYbiEup;>7QRp_~<<1zn4&#F|C8&`tcYE z^?tY)0Iqp(FNHDXP|tu83V5fJDT9qr7ejwN19&5bdI*$!c<(3t4vY^49GjqD2G1lO z3vff>9*M`ooN>b?#j7M_#Q%qX4UHV*8)vHQjoH+I};zOnK@JjZ|kiv#R7 z5`RU7=q!2*eNQUM4RAd}zD!;xKc`};B5DzJmHG=$#jD}X;=RNh;OqFcaCN|S3a&2% zX@Yrz6M_L@mT-gc+W<{KWx)A>zXyLh13Lo`1%4is5VRoZ%^+`ZcJTb*_k-ULiGs@p z*NBkXkXE=}3i)@aCA2H_K>elCT5en(%4iJHx*fJH#`^=f!`OL`y0q zyCvUCv!zr0*XIAl>&XAo^~td7w)7`ink)~l8UMp|2DIqb;4SnTza3V>2=IU1I?CWm zayuOgxlBmmUx6+bx1csdjbKD#Vh(SHH7Cak1+`M~E_GIEX-P!FfGx*@n=Lt5FIQ4J z@*>O_N~mE*$~bIBdieBYwp&3aTN6^GDQtsB;j)-h)ox0M^gSw7R%)t!TqFB2GA%G? zr>5DB8`wYKxtYOR)W4b_2XiR;@3 zCh9R*Ga0ZF7ePp!3WZW^;7IbgoN|Q{9(4wTQLMnc63TThctB!-KrVk%NZlT{+m&Q6 zc9?^=Z@XFII)5yCY;yDgW9Zto0~_CcK6}#s2Gr10U-NW{DLJS72{fv2i-;^*+g>_- z;d9cuI=)Dl$$qeBtBB>DdUDa4_riO7c{&rNM(>8SjvBdiU{h#>t}8otZU?NrMT2h$ z8i6kqAtqu8J6JP;kPd&*Ln6coyOptk7GSJ!Iy``w(SRI^`}>95fT3=F=YYPJ`qRL; zRojltEniVSdToKTTp`xWHMUrXT}vG;Twiu<_Q<~S^1ecw&SH(VS@kv>&2RsCBLBdR z%_lBZKQ(pg>FVV9FRvZxiIF6goxN1%^v)RnRO{&%#y{CU^+bP_%U$rwGlfpi$TJs9 zaL7QE4Bq7T108noNh5&tZ

@i5$(Y}>W=_@|YAPelVy~=et0);6x&GnWX*0&1TF~>|Lv1TFj*l-` z-#TsYP&F#m9U`+f)b#xbDTYR=9G>2NF9?N%mP zENPtPn-(U3vfG%=pzMgKWQ)l~{0%r7KyuJI(iKj4Fh74lMlmG0Xh-v=$C2Y-3wMvA z)y0e1nM`TRX0+yg@w_m+}a?O)FVJ0igROOpV>H_&o`?ixTlO(l`P^n#h0{fVds6o{+2~EnEczQ{pts&)*Rl6qrJ`KXsQ!t8Ve7G zdzLK_VqJgtSFtdL^a>;b4_uua?e^gQAqnCO5An~VK;Q5pV(og3)g|zl^KxTpOYVv? zXHrQ|i6tgxWM*6jDc9Q~nHae>gr8;}$*7}ZZE-sB@3f1mKeHt22Yls*T4WI zhh<-$&QWf#XqeP!u_98W$Th~8<44*PQbx>8FnNEXNw05 zgVzoke6G)&g*v}}f%=d_+5wtG9iKdK1Jxt#?io#c42H7pQ*` zpY3N*E_(FnLx<;dU3l&NO|7l_pJnePhubY!BEYLEfa3t1-5j4_ZUqN`_RANvnBy4d zSbz>4CJAFBY)d`8;y`uo^NV{sx3IVS8WU-iT0CFL#JB9wsWl1PM`_FV6g@O$R~xUm zZ`-u;@sI4bpIdn9p(A<5XmbEROAvp&yQ91$IogyH6EtZ>dF#@{SbAEZ`~qMU1S=<4 zyiTkW(=OzM8=@|pJLJ7W&Y5wD)uSIauscypFSTr79l6gt8E9z$T6!Q~qTxLuE#z?) zCm5!veloDFA*yE-+g6JLcde{mF{Zm`!Az@8ZA>gFn{#^i+UXY&k6(K1oN<5m>gU?e z^%^}Dj;MriI_dP%g&(9^gk%`Tvj(7@20SAO6UI|S35O(uwM?8O(Yu`pyWJtEAUo;v)d#!cRwYBw^wxFAWYas*Hg;{7t_RNVnx%f_< z^0^~xj#rxNtL0*y*Z`jLIqEdbrn`SO>R0?GQu}9~^q>Q!PG|cj&i?u5|8%7|21Fn8 z>ZopztI+37@x|~w{lCG5>K^;USQu3|_!*T3>uoGy#c{3MAB=zbWP2AqACtjU1j6BD zzZNI7m!<1hHB>jHTj}`xD~I2%FUpy)IY;j21Ik`|%EW&P@0VjzOfx?J3cYr{+^9(uGE7*6Z7Z*?|IyWB_zc6d(&ou1l+=6w zgd?a}$TmL*`-mYDh-851({F)}c5z`H_Q>GKXoqi!X1LJG#~7j5Z?BxcrOvD`sPFyr zXWm}tI7S(5EOU~@2OGEVUgTZKOitOdb<~yTn?}#>diH-()s>BzHId}mnB2OR%@@Wo zZrv=ZV+n0$BJ}6ywd@NM3bU4#%{!v#pBZ^ze)*P49v`f@7~;Gz;0ml$NUGx?LRcg5 z$9X)R7Mr6)kHv%0_VC`-Xfi+Fx9X3J=YIX@ir@BVTG%@;uqQ99Ift@d+`2Jd5-C#$ z^V?bHJLiA;*!TbZ9Q*yIt|PK%kNo`nz?-P@g(8JC(uSo0+R4YBQh|M?^M00Sc@P(Y z_csDaLL5#2M|93Ay&?+JZbCMcarvps?3|C6R(*jS?Av$7cQYQko9bAqH8X42OTTA7 zyYzlT6e>VUq(XVoKxI70;w(VJK1WPrI>4KAO9_9C7fC0#8hapl;Sjlp3FZjY&{HUW zwlPV-Fd~s;>^Sy%xGDMb?lxOyj%iN+ZJRAYt5oYNZ9I9HQSNk@n)#&n3!QZ~Yif+r znb@51Mx{0}b20lAqYTe(q`Dr8F){3i?W5&kIJN=$c^bgYf^ox$0BTlJrxW`!1jwWA zac6%BE9gfDejHaG$}q^7pZ8;Ekk)Lod!M&e8X`i2GzoSa^kSP`O<)lXaCB zGi+5Wt&&7Aj9t5c>hRUs>yvBo;sJTfP5v>^gCw6G03a>45T7E5u`0S~IrbfL$fgPXUVp56bsvx<>(9;l_GS{okXSl;;fk~TC?$ZleiwX>;q?0py%pWQR?RE0(^ zv(EAHO7J~E0hdQ}?75#g*r*U1CjDXEh$`^@5Xp?Tu_hb%KAETlQWx|Ux`lcZtHBpC zqZ7R^+p2X6?>~8S2Iel-#4~?Pl6Dp~cf2Xu$n<~A;~odr-0fJ?zm`CQpAG&U?7S35 zITWCjbf5m|9>kIJevzn?%E0i@M;H_Rz_Lg~IiQ8nNXtlzPUDvj2r1DJp#DBy-oT4@ z!Jf=FO9QdT#~x-rmv=`3&$ZaG)A-lJZFh*doJb-LhxGbu6o8hq3)p|#>=*3vKfUzh zxfy*Mra$x2Pkl4MZOmlfXWwMo&<2!_MqE2Ra@7&`B763k)hHh2pt|F$0TNz$6F6Hl zW1spTZ0DBUyNR*l-eN062crySN!}Mg*0M;DHTv${dz(If!#*p=H8#3!N70H(=V)>v zJEvCzvQE~_raEw~1!R z2y~ZdUmj?n9&Y)1#~SuE_HQklDi^FmYfuCVLMwp_3(n14w_(P!7rR##r@R<;>Trmb z-~3E-YEC_hybP=17Ir%O)=%te-rtuUU{A5<&-C@}N9q4K+}nTSUuWBZ7bfi4X&P}P z|4%FtVw;A*aVTRD^4r+$tjnnljR}u3l)5y`M!{N9Qbl&cH5wxF&>r1RYtnbPwK4A$kXI^7z0f93OG3o_}L3hV-Wb zzKm1sj3I~eCcS@QC|tzL4t1Q7Onj=YZFNHCmzUO-e0wo1N%M+Y6%DbOy5`xA#hX2; zM)u&=(i=~A^du|QbP#Az!m7QKmX67EmMm_W{>Ye}e-7kl#U!G4w``uWV!Wfp9P`TT zb(LFw?^0d< zc~6&Ln^k{2``M331{@cbg#_&Lgr8D5-W-CI4%2IN;k`c)3qt4=iK+Sg5Pm5WO52I84NFS z@TPDvz)QjKV*LR9Fn#oLK&3{b!FLU7^kbrP2^W96a${r(?87E-3UBGz>6Q>#ej55$ zge%#Mf^V9%a&@RwrIN;x;riqx3*sTt9IY^90FH(v-HQGfsK$XOcr8nm23TBevAQK* zA-QVS1cyNyDhYta#TMsW>eZ097ul4i01<~bC@%k~Us@bOfcS&H+wcqMg1CN%{-9Pj zjZJ@NFGLx}n!$a8hOp~$6N_aAGA|~vJOf3dp!8_B8_=^>)qB0{Q$y;4a>(Q+hc1X= z%x3*0wgl~;WQ{Vb0G}i5G;#{qkdTXwAOjQAAh!1Syox}s9G?0? zZo~!RI5e00d=$9JK;)y1g%S8U`!gqh~{;;h#HI;hy@gjA&uG4Gs{>kBd zqdZBd?%7w6;Ljf+(Z(`EqCq}a6I0xs(^&hW(Jq*GKdgZ@=;HS9;2UM0G+a5?jJ(MWiK|y<-)mHzwBPPdL>vp|v2R z|LRp#Sa$RYj?(^5_7z5nbzQ$fALVtXl()U{+Yd1wc*m%MIz`BcXwX4{^4?(r$>F1r zGql0y^Z$sNnpTZpY0=1k`{;k6+uxnr`I>hXI>HZEHM%R7lWEsx&u*MA>${22A8sSz zo5xaX^vT%|VJ$C*xMl%=9pv2it^C+y>Re8|@$KJfz-KL($Dm|#ryL!QVlpedpTwDR zRcFrBJli$3CN(KW=`7Z04A$(Zuc%V*;oexYUT?~4AjcP__g$KkXGwpKanaMIQhV$5 z+#;OuXRrm-#~{%(VkA*}Z+A<{1>abm1Uj8xz5?I2oYJ>JFZ6GF`S#(tjgPx~Ox&KA zVE7X}yDG=OwRicHD+wY>z^B3z<|V&+D6i10p>0u}Z)Z%LIqmWO%PUKQ#V+CGq=XbC zFK)_98eQ6u?_~d-XiI->dhzUWXVT7_XjI(R+SRXS^96xQbr4@r)Y*GZW=N5VwL%`n z4-D-b+ts+G+To6fU~&T+HFk||61i&bg5A}*vli?bpF8l7vxc$hGnW=6DHJ^HFz|jU z^$*ZAH|D*?^>ElrSHnUb0rtbtm_&Xo8k-rh3#r+4gVZKk{d;q8^ zAw4|-=I(uC?}QpVt&SAe(-Bt1J)BeDIxR6d11?lxj zE>${FEBm|J}j7HJ~X zCNUE8H}rq!R3%4qT&J>k6><{hs21432CK0b_Z@{Pe?bxbbaDnE$6hBY*|(*n zL#q%4+jH48X|^+nms2)xZmilCh>bUlK9<{_POsYeP==I;%gPI z_S`+o>X%F{q)zT$b#mcA@1eC%FZpR+PS);4Z?K>4edULBQ?P~o#1>P}!~8}ZMN*7Y ze)j;0gJK+#pi?o-$?$zV@Sm-7r*BAYDBOQozNW5gb?32Sx5=SQDPgrLk5MWQ*TqCI zC@D}hy`m{|OkH-3Em2RU%(~uF-?8HTuXin#hgsM!CplyA)TxxB_29tz@C9>k8l>V4ry|X9qoMY2~?rWjIP%Z*K0oqG>-r`1s>`&()pQ4A9voC12%QU3j#Um`}UjnaD)(FqXO zrOOvycKZD@c^aItXVIsVXB^&&XOy_@H=Jq1Q zPC4Xs93-c}_YnpJ3vTXUp!oWAr-{xK8+E;T)|&Xuo^FdWj(3^;eS!BxZDyRQq0u?H zk!+(CQ;Q7EFfR!j@1-8WUn-(uLDkT-z*M1&_5%}RhfFm>K&3H?*l#Rz7v-0AnWH>K zD5o|nVR}i*cPmlaZtc2W^jMA~5;5Lgy`$Zwq;Ql6QYr9x0P|~rspU># ze4Ld^Iql}MkfCG_XHjBwGRhby|KPeZAUHM&B_zpoQS1wG>;;8MBX&@aFb1v8#tO*L zAE+VV{%3!rqkOPKQ7=&b z>eZwOqaq!hEsBu^{O+>Mn1bl)IQH$h&)9!5>>s0((*XxwOq9m#{SH07DqSg}7={9` zD3g1?NBwTC6rWUwP93;Sj`W_V$dS%aoJ)fB{|#_T=5QKPE#Fq7p8>Gu4J~oN7~_4% zKOW4eNOUre)&(naN{WAC4ajZR+s7@wRaun6j<%?z*$;2cvsl;<^pS?~FP|tLlL1&p zDI*-=v2AUQYI!u^7!f<`N%pxOJEhmlL`vnvS6`hVjxdsXJzo|*Z*ZW)gS|9Fh(7?H zj$e}$Cn~|P-~j812l+DK5OiE_vCw~hODqhCO+fLCDI!qCu62K(9Ftb+vC%1}pqN5^ z4tqX~R)s5_fT1zkn9n-U&v7P6U~nkNGJ+Om4a``%I?o*MRD@;L?jfJiSal)cA-)|^ zssnJ7`_B6j&E@1e4=qPDcRWJX1rK}#spb3i>C#A^Nk9?aooLk55@mQWQn7#2Q}HUD zqnKp|-q)G6t@wWn01>XHBh{Tx31Yb?US831yf{Z!Rk7* zwGCC|m$Z-vmTlFH&#tOpwNVtac?#CNPIQcHq4wev=i$*5YH!=RwXJRI7HTiP;ew;_ z&j)V_)!g?SO1uj;{F@H?SL$e-|63385B@{Zp-T4MuRnhtyh9%R4Z6w`lcKwMuo&mT z=qmnfYVd1xH9$g!4fO>`e*8FqL060MnlOTDr79pJS8!)P^1DI4a~Mj@FE^utn$n=a z&``4^E~D5pqGKgFp*cG^I3(Dth|4HT&Yku!zdg>{l%@+6g=Ls+`LoA0J#8?g)@1-~ zK$5>kiA3RP3HG9YS>vWYO$-j+#epJTg?RA*aQQOTjJ}5!HTSKJ=l(-u&roLkO)H}g z4iN=Q)M8VnCO&mUdus^GrgN&NLMc=*54 z-rbxP6dW3)P$=Tl%RIU5t59QgSx`_&s6r_Q>fL!AE7@0n<5MPP0Br#Q=?S)?S=Ccd z=;JNTX*yA8KtN`KtzZt&PY{HV7>~cwlB z!^ZH57!=^I3y4@`AdZ6Xbr3eV+)%3|+K6Vjmk~3G8Bk^uJwzwhG6J4vK^_0rL;F;2 zgcW*nhz_`aw8U6wYlZh`6W!cXGu)eD^j!FDf{~TbKb>pU5~JWgj~g`;+Uj8@Jie8f z12fgbI}gH7{V#88A9z2NsN!IE`|;EgF2KMBYq%C7h%#txgx;CZKa*$yC~?0!-U@N) z{9(|2uvC`t#NRAW`G$-E@$m0XHKGoU@MW1xKzJ;A(6QGbjQ+7B*h zU$Tgbqyi~Q`&Vd-7WA*|j`?E4GK%62Q&hruPgi^A^-JIU1;#ExvGqVlc$y#t*5g52 z(=#xHGVIk*=YjrZGENa*_n%JsBT6(A@76SsXBrkDOT#is)J5A!WBLF>CS z2dQp40`X}&fD!WfG($LNraxF$;ZjoMdWzqNi;+dhqF3@MELFNJy7lRy< z>Eqrvp_6v1(-obVb7l||)Hc~k?{M72>P)IsVgvV*X9dVeaFO81WzaHH+MQSaAx zygIxcchIjpe~hr+cK`JR^$ztDxljyBM0woRjdr0A(T{ZMjjNWsy5V{Pu5TF&Gr*i; z{?2pq?&E#OkLEYM?*ggOAMP8wkho8a3;JZ{Al<^PNFBh z-=Il;A4m|i8mV=9z0R6pvzTSlNTE=`kh?&wB7LYoRjAJ}wUo|iGh1U-uC_LptF0BC zS$z1D3$rU)+h&$u{$v=x+g8Ml%!9R0(eE-Z!`dOl0)LR2xw-PKjk)PPkqtS;l~HJo zxhtwEx1=nFeiyARbL2M6u^L+OTC}DyEvvCLUEKq#CVH4e>5p>G#lc91pi~;7D5-IXkzcLJkvFmt+?-wAc)D$P=@0 zH$4tdfYl(WKnRyu6d{+%r8-J1qUiCS^denaLs`b$>-ZT|$?r236*S*}68{!|gim)B zTg^4-2ULXe(f6=cG_2L+TT5cK+2In4XuVEvm4A`7qI=R^df!s03O|FpjqZZxt*1~H zXk7`n6b{9|#%HmF;3~v>(QlB2l5=a>tQmTpS|#*vF2|Ihdw8J`c0D96#o5qVQa|z( z-kWMZP%91$5QGoTY^=mq~bo+wi^M6Xa@RLZ}Uy!WQ$j}SOxP9S$4~M>fdfw5Q z8#bT8FGQrc30l_sHM0fQPl1Ltm!7^nFe)T8C|2b+AUSYj1}W>1X>nuz%I2-}wKKlj za{o)S=PVt?|AFz7RgS_GHcoVYc4_bH^f5K-SejkC{K3$p$DUkLcAwR8boKlALw`qN zp0OY-;K0)6&EFDhC2{O2KoCTQ!Y(3<7<66Cidj1=w$NiEci)Y_$I^DR1$mi;le^!+ zmr>d~G!ZW42CI1vVwj5T8N&*Ba)Ozso`Nd_4fO%muqq+Sq({3vf3l#am-lYN_~FN! zatl)Q)$KW{3Hn(}@Im~e!p1Mhczh724a2{Mx~sYxikKyJzuDJpr3Xuki+%O`vf?;G30lS0b-?;$>2p zP3zuR2eVdrzhUkNYf*tWCcbQHiv-e0`Awq!-4Y0d(~*)!|g4$UuIFbPwL*J}k7 zB8Zu%V2@aeI1tuHT@HE;s)sB{=4)8YcFqL#dWK=@FzbjlDtd9w%vLx0MuNU8HmGtv zixgE#nShS%efu(6H&&S9P=AO5wA$c~eY~uRV~-{!@U>c*C_X|GSoj0Ff(;-Y&`f?9 z?6LxArvr7FoYQ--h8s3zWjSLI@JSDJ2*qMyq|D-PF%O)rTr|2Hf9a`ebtx1@bLlPL zom;fc^VPPEGfKwBk=<6kaYp0A^gD+9IXm`tuGFeC2bo2K4wbg{v46Sq_eSIoH#aZI zMxIA-h1q5=*;w1QI~%mG#`_h-vp!BDVl}aB0g+1tp=*~4U=uX3R}mUwHl&N+pilkl zo_pR;&uh8x$eIt9E&lnjkMYTueutXQJ#e5UT4@qOik5u(!uYb?XI{h~>|L~Z>GHm( zQSs?>sO9xsjmZN0pnm{a+{MWPq+%gn#qJjYk%3$mA&m^9!T&MfcnDSETVX$R3WGB* zJW%Fx80fYi9(>`ImhsoNQ{mui|_qWA_ z@VpbB*Xe@77e5T{oBzm2Y67vlB*>8d0Vv3BhzSOZSZ$=$K7VINnv72uIBbMGK}SRc zHQK0HJ1o-aLMpo3B^g;YuYP0J=1lY(oSK-hV1AH7S7_i3>Eag7%Y2xJJs%x@)RQ&* zparj6WLBPf9@l84VXCNZ=I$1$S`w?pLp#^RM*t$Q4c_B`DAkXMgG8h<%>mn|%W4XZ zMc=%*DnzZ8Jb%%rmxFjbP9G8?D&C5uuq6Z8hXKbG+nNDS!`V^XP6RXsg4FTo<}pP$ zkxH%iOMwP*e~V8aG@uXklsZh21+@Sj5fdLp=vze;>vyVl{syfJ|9D`}+jTAfaew*( zo6McArgxS(MS<(^x4X}J&)bU;XhT;`>KhTUX_25Ss(%ZYPT>DMbRK`QMH-1>s!ckr zmY<-Jl;S^TITxPj+j0V#(c_|kvLpw=2O1-Y1R1$*jZ3$PMM^viLCF51=k_P)wA{uxjr>mLmk~%e-GPKf?OSBnnPjPcYrTf_o)y?`9}RYWm&OY)y1{aOIm@ zPh6?b&8Zdg>@m8WluS7gUA8pWGK{e2J z0&U{lESy^fKZ=ALLwDfU4-fggtxv6(+32&bnSWNNC)cHK!57U@MP;74X@dBb8{`mz zeqNX&YoNs?HRFuib7HkYsnrVNWy!;6V=~^>n{k_vBE#6|1-KdQo^8>|f*H`Sq-8{x z;0VtT1liyGrn3Um48lP}RFBPm%l zL`HShyouesg_DPu&H{4H{mgGPHWjTU8ALTu`zu7CP%0kiX&`+T(yUUigEX42A}P8r z1VmUxw2qO32Y+byTX-iPf9p}S4`sgGUNtg%?*ez@{LX#+wh%ms-^2IlEIIM6P94i7#*f7WRI?me?d zE9Ql4xHlk2_u{DxG=c;u4eiAJ_+9)rJi_O{vLAnrj~rXNI|z4Tts)0Pc=`yM0~(d+$^$AV;tdzbh!5H2s{E@>nQGI}m8 zQHw28nY&JJq^ITbcII4gQry>L-OHtM`KII7YG>h}FXb<;PfLi&iqL9PCw45_VCfk< zbGYH!3x&C+jf$A~PzXKQ$A8}$C^4u_Mz(HgPtU!_e~QsWCMMC;M_-TBq&3$R%vtkj z+u^ICkOE~6;4B5Xg@D|Y{!B@Kr@+kFvs%m#(I_j6bZuR7SC+w(<@4V2H|I9Ub&#Cz zxMzrsnE2C8YvjqYYIdO^B8@cfm;56@#mHsTz_#Bx`1ePz^LeJ5j(;7jklQ$68vSc^ zl{4DDdfwo^3SFFG7WyF8Q7#V6`|--DdF%UQ^6UBKS~hFg)AQo8%8sW$qLb!ShRd~D zqfuGw@%-ieXHEHUJwfkT>QEuTAp)y>*6#zjvod%j9p|NeX}e605T_Ib5&zlJtmIm3 zV6myrQAKZ&H7eyIBY$F1NSaKUkWvDKLtN%F6F>57=ZgX%Hx7@BNbmE^rJDvyW8#e= zV&Zioyk7~g`SUR@!yqJYQ01Z)lEA)m35YL>NhCf9R^c^WMx?-j8}#@T-loNcMYepp zyu_4_fzd371FUvu%|H^p-fMsrsiM=ftSy0ItCJCl(~f#Oh``^JXu;G`L}(JS-?OG)56` z?klQp4qEBdv(X0gmfZ`h`c7x=IyOx}$j%4bE|s8Uzi zL&#B5W3wC83gVGcyjOV&kgA;06Wj8?6!0MdW6JOa8ecp#PrV;OCzoxVcYNN+rS;{@ z9Q#zlpcD%l69knzEi=So%@(i^Ww-MtdU{PwjSC&!x|MU|a-(b1NQ17p{nDMsM43ixybpYeXr3;f8X ze5=ojr?!G)S8bJWWT)ubnfC+%hD^M)WT^PLo`1TY7kXSh&cKkg!i}YU+9<6l!yr#= ztmKzZTpEaEl{~iM!TQ{T>wdHQFC!TRD5g&q7n|(4cUxq_BZrVPWP1-%`$> zu|Dp6+fT4W02E=y6!TJMuE!hL>~7DVpQ$Q8z4CHjz32G0cdx0nQgsIFL_fXUU+Zd+ z?SET)aP4bN9SNn#kR^wF0SJ zYLTKZB9--?kMpdNn>L{<&yOxI&B@3S@IplLI6b`u^1kJ*QDDa!q)02LH_vyQw!hwz zX(+U@fwAInX;65Y_2lyTU>g;%NWOS7u0Y}wTY<J z8J)k_y)G^N;KI!pbC;feXlU@ja|LM=*To4|OKqtuS{yGUaxI1&CUfev=zo(&a#fO@ zO{I$vC&W|lIDCCzNarioxEog`CFWqAIXWUDMUhyZ8ZL2SoikdjhqTS}b&acY<3YT$ z&nnbtgltR~IyCHL?QR_08LbM?XnzDsSs(LaZ$O%gn0rq>{e9A{Xn?Y= zpm|Pyesl9BqYDdj<0FlQ{C=(@KYtE(dK4XYo<>8fwO2&K34fA(V+o0^Y)mEF9)`_m24stg>bj7vU~otTT`{KB6c`=B68q0j_qwTxv= zCw}!plnR6r6(-L`;m%{+mVZo_KX)RgHGoIt%jRyS_}ooNmrpFaAz$XL`J8+au?WFZ zu#RF9(R?20-|G@FUwh>iDJ#gkwL`0^c|stg6OA3oFIJmUPz)jtoK7( zSy|Fm<~WQW?B}dO8{L5pF3Dy`aqz-S;Vlrr7D~aZz62OTxxC4rNOLhILay>1yk)*b zfy?C(9(iw|YvjW#SAV~I$1+nM7Cggfh}VZk>a;P;`PKim<;{B!qr~$UQA$b0*KhaD zEh&vw<+LI8_&sq_LQRiLc`%VuQ6`99Qf zQB};55l7vB|4);1oS>-$G^Kzhj#!44bEY?=n%y#`J6s|wo(e5upUKi^a;(MWr@CuK zKU`70%CKK0imCEk&CQgmO;OtN?wJ1KA zm9Zj~VSjO2PIk{;L*YaBmzOBS5wa-eEIzxf$F5OCC%t)JedVs|)zU`(PVutS|v$RXyOtxiIsUY#Ivocp4qd|mV*BgOF!+IML)jJqznVijS$bqJJCQ?SrT#f z(SLGT88oYB#OO6VfMmVf6Ag&KvY9UcQ5Ml9+0@S!V8eP&k8hmb?RLh8PU!zxLvEi2 zkRCi-m##2*e$I(g*0@cA012Cl$BXXjuQ*gynMMl~83BAIdZw;gZ9ru?y@hlRzH}%> zPJ?R8B(d3hy4<0XaCt(wMpJfRF-jgU1b>7mLMRFs2~C@9gQ?OuKKO`|Zcc`fZ+#Dl zP24{HGl|jDe&3Y)-}2kh|KRtE3XJG_dZPK@fHOT8IgD9F_@}OPcTeqtk_>YWLIDBc zQHn&Hj(%bP43HwNiqb8@<4CdFsZE8gHTfP*?`p5hh*dKZOy|FTJSA4f)Tt{vkVrB4{NT9nzXU?6 znf$|n7YOxU?mrKtjv%A&_19@CfZF7*GgLpd-CyTXiPWUO&Zpv$#$Ojusi=^80xX%_Q_lJ~&na&_nMKue^p^Os zDJd&ZO@r)B2@!5^ayjH&3jH50qQGmHL7)W`mywAD6a_LkFgY@pK@S5Te>FHZF$z9D z3UhRFWnpa!c%1EgcYISvy6DWDY6AuX##HwLmO%YT!p6o8t6IXAZAn!1V)bTA2??pB zk=~Z{-bf>jWYZhz+4SCf!e&XY*l&(3z_7XZ?%j9)xbOFJ=1iaY=4C4SLD8S!d?br_- z@#C5e(_4Okvxv`e3AnS9uLj_K=n0PhIxZs4L~r425kO$#!F`CG;{vddxER;}Oq`jA zH2HDnOPrt+^R>|9e~<^k2jJ;|!l`Gl1F&L$ECs*fYC!Ql5g-rkZP50b^4RAJIP3(k z><3Drr}A|Oeh*~(O5vP;8vVchz~g`v!|BhDA_Ps?4`DwS`$gC<#J(2$2JEHSFUG!R zU;Bf|76L9GpD_MwK7J7h(1V~Cxj=6|{}ax?2zns_`X=@1eG<_!uq*Q!%VMJl;z1F6hp;V{nYq!~j^y zm*?wozdTQjUu}gpdUeLzV3U#!uJ8s`GS^VyhuFH09XiSV6B*nrL+tz z2W_AokDvtue_#j%Kn#q4-1s=)Ww;cG^R=Cz2N-az0}O)%$c~SXe>DEtAG<%$vw!cI z2IgV60i?ifkb!m34R^q2(GrwI$L)8IVD~8Ib&budxPl<6-O|B&` zA>X3})Cy`Xc8C9gdySq;8|YK$UG!H>E8}N!*qy~(f6BbWBDRG+9J_PbPm0*0WknrD zPV7!Bx;W2GDfk-p!&ZKF%_^%_NMSW>HJkYwUgI7HQNBi5CR6lB@x#xfrtu#TLXUxR zeqPLyw~{hx1z9$`1l86oLyfa5aK4pP$d{3|H6`S+u&A_JT308nE-ix8f)fOXb}g;%;%X`Q{Mgxaqq3vA zx>dZWwq^A)m9=>xe5$HVS~;^uw5VPs7A>43?Xb1!t7XE**=5Vr8j;i>ThP$C7LUGY z{0BIOM{ni1%wKZ?fT~MW5Ja=A` zf2vYzKm)M4)yhe29gS`6O{(hUre%uN+#)nEqgqzOY39^R7R;n55}uhZ~B*YY$&G6y@)^9bqTjrI@H<~)vDCbrvSId^$DiAEG@8D#uxjL?{ zbXJ|BwpLj`dsYL7W?0v)44$Y}jk;GZ?Y1;ZEUmRd30ks#wPN`r^99Y-m1^Of! ztD6P$@^cL2c~g?-O<6ToIawzmNG#=2MiI)3S?1I_V%jU^DJ7n{h)y%eYOIudf zw_9bB!M6I=l}i<2pvZz8@U#7*9s43`rC2t>;wR7gk zr2A2uK$ufv+%aZwk`Q==j~`$&wxiOG9agDqu(f89+QP}La}n9wE7QS}*$rHErL4Ye zdbOgVj;o(_!&>`eJ3Yr3e=2KM49Mix)>_3K4($`Ae7cHY;z- z5QVwQU`{G(%Sy3IKOBw1bEDC*Hzw>FUQOYbcz;5GrU{D&3v;l1P6^52JkCur5^qMw zz8;uSE3f0)NHnKj(6|5%OrKXdV@BoN>6I#@NN!nPi7h5PytYL+f4@~(FBdc}Yi<%X z&Rv~@6ml2bh%-h5L+?|8EHn8$ux2o^R0pXIq z!~0LXxMTxSq0ov}f39s_xUhN6N_`l9ryg2f(d<33xAz3M`rIu-gR)Y*I=n*VUoAFZ z4&i+T+J=WM!7Cb2mKJo3tR+edN?3d2r5$$lyai1PPCR4p54NK%i+5aQMoslSo|PTz zTI*&}D7TlBZaCS#{aTDW53d!&<7qC%M!4X5;hwQ5Opa}Zf2X3S_HyQ*%paKxl-`Xg z6=KU&E-S@GY0kmwQ+_c0$UU!RY}d!AqIpo&vZl3aVe`87#XYJ9W<#-ZwQu>rkpqHd z)+5Yqhuds$)GQfjs}q@gDs^~3KC7X5NtAioxmdse|yI6Mo*1B3uU-HzvsF&FVTGaz72c}oS%Tyh*MO7>e62L#@O+BN>L8i z^++9`qvS=`7H=&%#VHf7%CG&k}Up!j|>z?dw|>?u$1F z1~(53Y#tQsi=l@LSY$FwfZG>qxA<4A@Nd!LxV6>41r)T%iLqqys63u|o7M<1(2d7VG5hA9PH&(VcC%uBl zcXzfdSlW`;B}rMWX#RXo)G%eHNY2flFK%2&f1{bs_96G!UB7g3jgm#hSmO0qelE-f z^+B_r-oLul9uOg2umkWbJ6h7SUMX3p6*ji_4fM4)R>|9|78}&HcG2qL7KzI&E0ix- zy1YWS0shiFpqf8VX5u96OXkg*xoBB^qjb^K*-hM^lrG|q%CJc1fLjFZFIRd^6_cMoXGdaXA^vyd%Ln7Nb&pGU&kilOLPjuS5IIJk8YdTL!FA zd9;*`CDi^k7ZuNGtZuAb$JMqiTTmfstFCPmFD$EGGMm#71E{XWD6H*P)iATCOh0Lc zT)lkRqS6I*vzuBPW}<2JO-=PPYLtyaRqafgVW!QSyRc>^y+keUSTeJ?QnhsKf5(c2 z)UK&hn4;PBRb`82vgNf)X7X5fVyu^9s^{`n5OY=`J9uAJTYBi4+BtwqmMjy^YPK~m zs%W;gaP~3;n`hKD3x%zXGfNv<1WQ&l&f-U;fl2@Dk@j3X?7(`hS;9A#mnnT zXVkYY<5t2A%Q}T+0=;MXa>uF$|HhV%Dn(aaqgGYJ735QbDV@brsyxIpQM_F{cyL+S z5i{yq1e1w^r{`&3HhVJdV~5c6Cbrl=mRHVV1$oMK!Un;refmLQI*UOAf2Op0LJP_Z zBr2=9f?eU+&%tZPUPE&l6xIA5)?%5`%{8?yo(bVGs7$%0TVZRjLzS&-TCFK4=XY4; zjoeK#=|a(p8j;_sUaWC8H@h{9tp%-yOYjN2eBZpORmeGh3!>mRv~*cnZJBGp?XIbu zGJnd{%BicgL-3xlRq!6Yf2L+GLF!1dz0Ii4V=uycuu`xXv|#Nntu3paydmbfbAm)ho_(uUzNnvCE;Q zsav|RxU70EEam0K-!*K@ue*hv9uIHZzyNak{p zqeU~bvS}_(9ky6qSGuf8yx3~k&%M%px^f8j@&ircT~aXtfB6u`t?w)7`g{!OU)lv6 zF;Ccx4>tIKQ;6+VE7z}RUAMBTYUR4t73)`4K_t>j=g*gFMIxrH5KF1@-S)G74))@xi8S)uEFfb8iKI>x5<%c3Jr`6b<~a zD+Xo@HL`Ade`TIupGu9~LOW_{3*$bO*!I1OabC`!YYw1`7y3Wgg6_W(N%4z8ub|P? z*0@NtqN<`rTsga{aaP%qnmK5Is8Q>LHQmZusotuW&aD^Do-b*fdxf}j$;{~ut)hBq z^=!O8%vv;O_PptI(d_DZqPF_!rM0R?sj9NHtfqQyf8`9ixV(|q(IOlyLYL+4D*$AEGX}KtFa@op5fEo+zF<9k%Ovt->_!#xO=i*sVxgIqsHEzP%F58Y&%w~1xv+!7#4rt{S?Du_OH>i;F9}n zThSRCe;&krR>Pb3{>Ed@Pux#%yFXgXS6*dJq0l-%O0U0qtsez&KO45U*kfhBV9dk! zXU8RZoaX>Q@n@cfMn(SAsZ>~rAle`9yTmazz|Yj4-X&CB%@cad-*4wr%l z@ov5sTifRGD{god^ng7ZTVVzKD{=dUY}2I#9=Fjb0-d~ ze=+1%4CiwkYVhc-7{V?b4(3B!zIH7RjlhUQ8^+;D9F}2dHasFTg%Iw>uilvd-E14Mwke^8<>)EY3<2p%DYM~DGWp~Q(tbMQC6INt-p zIIqE}C@!hOwJDIyL-S%603qxhxXhD>eJH%8xTgoV3E?^yE)U~U9*+PnO@d{(w7pP! zfWBlt$JcvsoXJC8hW8i6z=G>}9Nqc0JSL8OZyrw%hRToNE?^SIbuQeV$H!BEf9Sw9 z4opobUy{VFy%@$wo>soZTc9z9LmpBZL*V=Sa9OG_wxvLsHxJ*F$Cl@TE0004FnS92 zufi>(7zWQXz7@}xkx7syMv3LAOyz0d@u|vlERd%ogmL9-efh8obm5T4X*f?^auR)! zJSTf5NqL`~Cga9am(0t3Un?F?f3|=duUXFhyqe4do+iFmxAcG`s70N@{R~pXu z3FI~7;Fd1jGP<8X6MRnP^^C{No2M{Qs2#~;@6U5DHHl*$Fq+4U=Xx^VE|G7UDsaGo zYbWr|P->3e;1M?PUUz*Q}aRLE4|JO$OW&mlt z((vh)&3rY2xHZK4LC6$h{@j}XyU$F+Oar!J{Aio=HXh7aS)%cTGM-ex<*4fQq&;B# z|FZ`(gT$;G*w2Q4iwk&7f9*L4!qSdt$o<~|1Ax|YFa&MN5em5ebr=9ht^bU}e+SA8 ztRcK|OyBnVF<@lodYt~4|FbrJHXo{%;}a#sgl@JB@Y<>D;a5kkg)f&{3k`AouioF! zDpgKz2Jn~9e&15sHhmjF`LZ3;&4Bpjk>9t>oPo;#UTya&IwHvbe~xFsw1Y@T6pbi` zplAjzC3!Z`1wIt!5_^!tDv+E=?Th~-2MVpodin+S6ZVrCpm;W)BkQTli?)$TT1_S? z3tw}n!>y7<+jz%*GWFTw^~FC<**c}GXjhSFYWx)EFwifLwd7)?*e@^ehV_3*~+#e8I+4GAwO?dL&A`$xuu@#r~5?iNT&3;lOD%w=E z31b1MVmvaKTFvTMz}!Sxh^>@`?~g}c&qLereq4Mww?f^Bkw~zu8^h|wr z`fK^tPnNuZVfu0Dli1-F%x6>l(**2n(n(LH>!<3b?dIX+f9c@spDcQs@iYDzn5R5e zQUo$F4#P&_2u~d86UaSRY6tv(+JU$+qA8X262c$gNVTgIR4_@AFnsAe} zfGpZww2RjQEJt2OPfp)~VeZ$0qD=?1fI5nLZ>p~3f0&|OJU1q3!2)JMUJEc*Q&Iva)iQSbvUA)5-)c>=~YXSP7 zvjrtD{F}C5`+r~ycH{Y3kLN`F!M0#R+b7wA-hZ|Q%nob|Es+=V0KKcXLew> zV3IAse_Bf3^h&WWJB92Q0uOtFUQ83$PYqEx;7ywcrnJ!LEaB!H&Ew*o2|q`v+PuVGDK@ zYysBXNw#4BiZC&L-WKebIUD!cp7+~%8Sw59Tkr>#pkNCo_6AM>#-@z8<&RyJsTa@3$P1pIkPnj_=2hm5;k7 z`O1R7vk?0WzJT*VcwAhlD=+v$`QBZHF?JoGE8lm^B-+%I%3e4yCjWKaem-nDgyyXi z^^Hw??Gbt+LQhiY4G6snp&!3# zVZbd6B!z*LFq9I8lfrOH7;y?CAz>sUjD>`;lrRw$CWnQ|j4+iDW&#ec(BX>A-JN==9(>j6E*#37t5V5IQje7!418okEw-Cv-*gNM-in<;d{3 zxr5jzokDjuD)czhLQg1|0ol#*h>=Fhu!eIw~gV=|K!!Dn27^5`o z!Darqa5x|w9u^LV@EZ{hV_3sUFZO=na9TJ#A{@>NLk^!XH%jAffk?^Eizd|(C}KpPn3 zlM3CCD5I1qRUGNAv+f z!tE8O6@lKAO6Hbo6UwkF-LCNTYf|xu-tUxYMl+mKB+96RD!EZ+FlpkB&Ri%Kx226v z&!|D4O6xtMxIUWhWd z;dIa~N*MgZB9UKVbEHH%i)5r%lk>^LUV+N$%EbG7)ZUEMf2uR9)ftC4V>4;EbfRa( z7|{A-{VI&BLlSd_1!}L%Cy4Yalim)WNG}*Rqy=fWq1*5FjEsr`_N15~0bBpjNYrV}1$$OU5>ly5J(SHj0{Vc{lo=N2 zLt&e&e>fVD#N+|3Ol1-Kwb^heY3Yx~MLk*bkTjM`JGi8w$FESPhvhv|l{!A?k9)gg zZim=sGY|AQ!qR|qC@xNSt{&#Hk(ALhtW;VY*?7P^e{7I)iBxB=INliHmlDK#60wLV5{rZ+wjPJd-RT?=1zf`pPfTHS zwyOncf4d^2?1~G*LrSw#7D;=o!CuZLFv)u4X|p)c>+B1tq&bBME0I0uk;XhaaVFe3 zsxaAt>YOjE4}1JtuPUSp>W1w@YPCEf88I3xDS=Yr&L~52le5$3)acYIhmF$-H2$Qg zQ;;!)1KNNk;tI%OMzblUN_zq#=PI#d$gNH&e|;g9GdHTx^oi1fOwKIOi#+{(5|K!k zQJ8fhMXzVXk@be15k*L0?o_w~CV6@&HLCK)1eriGBDZ!&&CXEPoCswM5oOL142xXB zPIbI96u=sXX^cwbCcEFRw+UiONyHfM_2~o3V8G-Kn=~Pjwo4W;^jM9Ds ze{kBZ5ewXg1Sj&kCGjCaGL^H5GtQ2L+7JzL0k7K54RIM67Dren)yf8ChO|}I+vy8L z6PZrCL!V4>{^6uK=2Mz`eaeI$@shhSt#jTC5>l;sa``o zCYq#(gB%Kkco1Rp@4m;FX(dGMs$(Bus3c__J&20m?iBk}f+OorvlvJ7Wa$%8PDvG+DE?GPm%ql!`iC)%|jGG1iUYSB2e~ml* zTBSf_bDFqhr&Hl{7&8ftKBFJfM{VYwbj&JWWz!hrwy-hB#gi$W-X!&yx^k+{XvAo7 zgl$2^usyDqr8_%@ZJL}vk#5I~w)o?+Zm}p5O}d6MQI|d`PV_|l=JY^PAGF2vmQ2v0 za*n2CT|ToY5K+Ykf=;Vkpi~+;e{)8X5|}%a3d5i{nH%y&1*?KerJZw_oZ&>LJklG` z$K2APxYKNmxy_ES(UOb^f-$kYGZYdiJpoD1uZl^NDd*7YgrRfT@3eNNOkv+>!l{es z!$zrFDoQ$s3}R)lCn~qPLdirbmCQ&4W~pwNGf8uCQMbYqwsNXOg7bJZe@dM`8B~ip zHF9q};PR$osz@xV*7x~6oiU{@Y|i$YM*Q-0BJS=S45pmjDVv~2oCx#f4kM*Y);bKA5S=x zdY?J$8djPTVNuvC@`@ZIwyN5tc`jd+|gNi-6S*kYbk z%sc3hd9CuaCKvHqG-)oQQ<;e?c9BADG7sdm0#QT|?+D0yoDo}ml~n7M zi#<7iPG{D{wC1ojAeX3uIkm$}QQxA+G1fq|US zDOMW|dbQ4KjVPQ_e{?96R0y0Bqh6QOc^pwA7x25dQME!Tf0N6H)ake=8Zc!fc%2%Q zJFr!7#r+{&P{A2Q-C2`aFSGewa<6mMNV2PE)L?S^60(l0Rv^e~!-1^aylO<%)$3Pe zq)}Vgl{HJ-N1USeuqWf;Tqd_jVeJ?Zm=)PrPVE;dd&EkSYt*jk5M&}czeK5s$op-a zAZ{4RnB^h2f5)FR3+!T1*c#HA1Ic)oK_GW}Bc7~#)HtM0qz!SgmvaWRUZ1}=A{$N^ zjB>BFJ>bN+r9D2mC=nag1_E7z9>-{mvkUB;CK1cYM_mzFR?wvpxioG;GGkU~9eSn7 z5y@!1!H|!5xAJ7MGm>!QI_GxVtYd z8{8$hLlzcUV7a{SmG9pFzN)S2o$k{;Q`0pwb^7%AJ=R?1hZ>O{UOdkB0|ZI7y8?IUqg1hIz|YtKWdpQU1sk7C}-x(3xT|Q zb5J6_I-UQn!2X+Co@z%ScZ`c07Be1agDAj7C6|(D5-;&i&fC|fL#<3%=03MxaFD*w z!(cMp*-Iy3nqFfr!LB=czI?J?knLRHZW;mk{wY!J*Y+HRGV3{d+*|>B5~sOlhEP$= zPJ*GT!wlQ(Xodk85K0++qbTX`vG-fQh*5xM(=OjS{)5G_0c&`F$Dz4Q&^(Ft*S`Q~ zNS@bfNd-6YB&u(2#C*<`l)0(if@t}PnTtKTfW~Ky<%gUu6auvO&0%#28sE)AQLfa+Us0gd? zrbJasdw9A{5TK%866PG&U-CjVnIwR2=08`q>ZcTeOQkj6l+q@u`RAD?EnEdPlbxhu zV!v$bMCxWYGDV=s@J3xv7EF_(sc*>iVLNtKd&s1^8)g>Q9=ad<8=i zH-jSW5{KdO16wQ=mCECr9pD?R4_h2M-G7>@=n;lwDjy z1lpnVs3NrbR2+F11^-HdLM9Ww9Qf0I)7B0&2vyEzv%>*3ckqQ=*JRfG;>?Tbe=!lm zhp+TeppbV&u5K+i>t`k@Ma`f$pq^JQz0Gy!EEg|jPUJZ=U~ENaO+NMm+kURkS|&c} zwl?uETuILe|IbmLeow&GQ>^%hO@y^DAs$ zbX)t2ehKE+eY~#;5n{V`VJ3fWrFEKU-XO2X7yso>(+|6)f|2xxw~I;i48RuV73@1u z&uZ?jTN{@_XS)}{)Rr%68CHd7OwsSU5M|98RE;~jd`kSUEdbq*wu}_c;D7i_`xlf4 zEC^629GY=~q_Sb=c>XTR`04|N&m95m<-O-+g5|{HmpD-#6&~nuhyE}RZy5q8y2e8* zF37`0BXL;TRgyw^@8nV#7(X$w|5NVf$|NiDSDz+n;LgJl8Zv$e9k}eEj;1I4Y7T#s zTp?ckW1qhAi&PFqTl_?`l>K`(DHkWlxkCOyR~j?rw`3Gb109=&?JCY8rp{*3!emWu z(J3rmfqc1G>0Uu7a_UBGW|JTForJ{ERkonBzW$|Y&}mm?^@+OIYD5FDk;=s_7FqkUz*o@! z6y>ID#8}V@duQm!yU(?+_M)9~Y#T3s_{)pwQ`Nz0)>+Sb>WU}9Mbi8H)Ug~7jkDLn zqM07zv|+@GeJ`~2$r`QDP}YncMerw$_2^@lF*ggSX*Z4y8-UMQxK=ah;~`(nD4bB~ z-?WX9CAk;M9xHp&(Yji~{T!r6s2bARxf<4%DU`jpbTo^#jj;#m*O16x+6=qL2h^!r zl-O36A5|G>DQb!}#T=FK`Ge5}VnHb&=_M4034f@cOe|v_>0}s}NgF@z6r$b~bq&4L zxoq1lTp(561(4D3vg$7QHyoMuT-i9*DslnayH%rBN(zsQ_)<)pu2MM| z{>+|^sgAGAWn|_}I9qZ;ns0!?^H{Aj!bQ})48cyJdRLqo>o&v~GH%y?|}uiVF*HVi+_teFym zgKC;oKFnZLbV(XMx-Wmn$t%^FrA{Uyrg=i9QK+ShxDRW730et+Hw7xQkwpK6s}bTA z+zQ9MCEBDjksc5~ej_X?6#4VQCQc%`mbrxMmVTJR@pGw;Xt_&B!H7N=YqxF9v|t*K z+pB4&Xgzx%Ik5NU<;bF_L_<%WtC+Xj;#j6SLSL^Jv}VNXc(xSW>BVewH&G2C{5B2T zG+j7Oe0^NFYHz+TE7IiV+jkpf&NkZn=$)?53O=5!W5ZmjsW9XSe2A+#X$MCil^d;dJ?m6^r{5*9Rh0eDqr{5Ieb>l;Eq3$KgI<<;UBH_nz6gHV22( z;(%DEqaHl3lW$)O}o2yBwrnzq1<*tP7eUKmkUF*FG&5#67g^O-4=wqe?)_>0igH z7SCa6kn&>=x$pHN?;Z&haekAmJob|Ch~iDzuG=Vkptes<%{C;yEB!XkbG*%HARss! zIgN<5wW3I+qDyXf5IilF&342~T(0!W`L&9_+;LtK9)MwpW$3dr%nv-Qpe_Tun#dyI zt()Y|zw)VVtwlaEDN1w2(>u~8F#pQd(N*qmJ=d)XS z(W6I-4?dw^o89uCe;WgWH4{|!0M}Pvkr?a(|KNDar#pHCQ{p>%&%1Y*6HeJF`ws3O zlZKMnhx({74t#%r(_Mhb>{MXtBgcVw*tr?LfXZK7;%?K3u%34u1d1s1#zHB6f1xky zy6xP^Q&2{F^f_?G2CIqurE^|r_d$M!0QwyC*FWE~ppO209pd-$ ziy!8Y--{ogQXfw}1;4*yHAel8YHv*c9gRE(-^I_N`pxCD|IIP=2Mmp0SXRHk^aC3| zM=^ej`JB1>x$rlXIe%AU(S>JxyImA+J6&|}-L4_bpH#%JsNYTi5~L{q?ui8yYrdWB_81P?y=?{!% zg5zgRPvFx3VoMj$PhVaCe98px@!bHQKl6Ai@{;0J^iSCyvkj+e43XzqFID`Cd$>1r z&dEHUG_szcn=je@l6#mpG@sa-BINPv2$Eim1LFq(Y_dDys*^oWQf8z7K0Hb#Kt@jvmyc(c>Jow=Ml8zeH zdgv1&VUMW*s&e48F?t5JRkxY|{)B&>2}uUZ=wE#SoQYtJjnB=mwYufMp_Z9ol?S?3 zQ}A`mGy}wJeo9}T1^!^tenjK=TfvO>m+dp# zs5uw#5LtPl@{_SQeq;FjT{A(}8J5z#dMBolUsdn?#?d*}4TBFx zC)U%X^j>e=ouk}P-@gW9iu+~vLO1r#F>dJZ-<$>k!A5;Rx9B21vw1=q(eJ;~>P95xFwlMaT>p+$l7>zOk4~D4PWBCi znqG~XS2rq66+_ogSCv#ZbV(KMP|Bc$^a+IeC7}LG@a53Yh!UdI7?KJJ8Q&60oj=ds zOGyre*$qX_BqPhu~yeu~*lC^0OCsAylv9a$ctyzfSr<6Bfz@(J+0l`dN|D@@*kR(Ug!g{R8dp z+Z~9$DFtWtw@)l|gt!D!^s3St_`FiAsh#Dc?Tx;J*0$g@rx|m*%tLm)A_x zwA;V@H(t}{(#$}d^HRT4YrU99$$Sq15U_(C2a zD3Ab%B%}{Q3(1F=LAD@lo9LTzn@pS5n8VfQ&+zA!QI-$Pt7T zlKvFBTZRZj+92MLI|v#i7$OT9gfK#iAy$w*2oWS1q78A_JcGw znHcB`5)xva)g+t?;xuAyoCOyZ8)KMX`>mNVi;o> z;}~NZ8n!0Pdqh+Qj3HT zJas&I`QZ{m}g}w3YJQ-f5#u@wU%mX;1uT8CqzxDzZ&@h zS7R7atSCa1fJZ)KeGQm-Yf)&Ikf|$KQ8=f1REjktGa@0#M6^>Rl8_8j1%Ld6T59r9 zDzNl5K_OtOks<$8?xS>JDJJjdNHv|0vg9)4%Jz9tIZ*|C>awb`>Vsbg)dp1u)%8m7 zOYlqMOX5q_ODbkcXRK!qW=LjIXLM#3X9Qrb}f57^alHYrZLB-q+K|lXwC8}`3n07 zmzb>yS#r=kt7(f%vet|&1!(@>G{%15B~7b;WkK1#O>0nP;nBWVtDi#w(X_%ogYGAc zgThNLU8!(ubk;(P`V6fl!r}&nXt8(Y3j`rb~ue@kaax&|~-KHs)6z;x@ zljj6na**fsx%`Tn;uKP5+^5UTn|2AJ$dx@{$;`*e$0_EYl>H++m41-FpAQ_E?VBBJ z?Qb2hUZP)yUdmk#KVh`B5E=*`gbM-_p@?8X*dfRe`Unw(4}$%Y5Ag!=`3aqEioipt zA-E9Eh&Kp%#7Be;;ypqaA%yTkyz&zErS--4<@Uw!W$p;*jO@(p4CsvS3}2I5S6Y{R zOzceVjPA?^{{+W?L%=`^_!l?<90AT)lNOf{mlKy3R}z;LR}hyKS9y#C`+;M@q2N?- zAUF{m3C;uufaAg8;B;^>I2jxb&PM)3#vntGDac>Q1Y`s<1L=>9Lx!y>J!T@)kZ2vj z$RK1AG76c6^h3rXLy@V-Kx85^5}CQC5IwntX-pMB4ER%%gyoV1nWWMbrKI*MS`=~8 zkGWXBQXd&Jx8$r#<+8+9pBS^)<-ABm9s6vFtKOr*rK!lP-Z#VLswg_$&BGO*O0S4d zYy4|W&60%H6cX_(mWxYf;sbG%D%VGuaTcqzR65+O)=2W+$fCKZ^6bDO!>Avs8 ztode_5r7{jS4TuBCwEz@SW$Kj2g(=~H{=@$shQOrhi$6sn3p9=U91y_@<4wx3C@;< zPee94hlFJcI;ROkuT&~Az7?KgoE)b$Ltg|(zGBiaXE0|lcX|qK3T^86nCjT%80T2c znA6z5F~Z%KyHdM!yXL#^ESW(cLGM9ApjRLU5I_jx17U;MKolUc3nEKW&~p$q2p7Z& zLI*K{NI-%hR1iIg0K^Nz0#v0W5u%(6Eo zUF<)`YYZi*{_Muo7C}&Bs?{s6L&9uDz%uwzhoHfvAmFIC%oL_R%>9# z-xhXa!dsp+TjMp?{eIDK4wAMwYe%^Z)m?b*7DH}YQ6AY)XS>jFEaWEI#_G2^B=n8( z$aJ+l7+xzj2Yt0bB|H|iz(Hz~Zx<0+ht4-#w7`>YBD zmGCpHFQ1+fej+2S;YWFMiQ={C9v+D1Hw^8(<3LVe?B2!RraY=Jy8KP9z7ubI3E7o< z%eH#WD$e$Iba`OVu?Ja+HUJE|6AJBkU+l?cWa#Al?7B9^cvo)>P+y&yKHC4iZWMXN z4P#~eki&_JnTs+5eD3$Ve=d(*WI@T>Zg%sCs1_f5Ji`!UK{DEuK1M8wklzV>-kIKh z&UO#RiE`h(F&vBZW&mwHlkJ9U0xar`DL3@{=nL780INFJ8N+lSHc*JM<+UvScleQr zO$n)m`BS?L`OPgjz&8!;$LJ%|V(g>eF~jt^k@cp1%JsEL8q({75?aX;fH6v_d#}(` zofFX&Xptp!Nl2*i=!tQ+$9kOIR1K{4BL9CL;5^wq-m@N4ianyW>G%l#CqMZE8Yc$_ zk03W!vTDIgswcv=!2g?^eC&(nLDqkZoUlvA!$Cr{V?)6(`3N5th&%k?OhSRlfP<> z?Vwm|HFurA!Xc5Zo?j;!2x%f@PZv+76#qES(fDXOyi&P?d>`OPZ6NWekfsF%GShqo zEFQ8Qh;=ZMSo9X*#rN8P3Gogksm|4TAt;$AR_IRa3R%((zOB_P<6pCnu&65YpD){G zJ#O9jT_r=J6OTqR*XCKKJz*fl#_z!KDvHhQjB=^xe6j5q%<8>LzHzj1vs*ns|LW!i zT@Uiw>hxxV5e4TV%{*Nar9S#;ajJ&x`#OzMc;x;rsm7OMV$i+}V@ZD6!uZ)69&oY- zT|1O7zEfg?+paI~dYXITsLn%S&3OInj_xR>qPfx`jMHhmB$D)?_<7KR3YF3G$*)wx zGzRPsdXIBm504$gW!g`F;Ew-2hCAxoJzVc1D}bYQMALr@;D43>Efm%e4?F*ND}vv* z5m}>i_T)m4l;6}Yi2Y17PN!LG#4_9d(22iJWwiEdH7A>NyNtAkbmHOF?XLZCP*=1B zvW?NL?@Z&L@Bcmc|2gIVS82~|_q4lajRVURRRn6kc=QwvnKZ2@4UcRYkiWuijC6l_ zKm8}4el%X}S0qw+XTnHV ztkG+{pcb~dvZz_edcjOY)G(_Zkt%HF6ig;VZG@{K@E3u)OZ=mDwk-NN6(mrtPm)rP0Ffl-%D4hu}q~Frhu1aZf(b~F4?XR=koyZVrYp7&(S6fSn znrpR=440kftIX&-XRS&RwH~Ow-_e{N*x4!1&~5%Mvo@19nW#{dlvHIv^(vs4N7Fb02NJCeMl7pkKYdqj_O)fjTq@Qm9U5d6B#2(LHrA|3Pc)}It?sUxj)pv<0G?yCv}A5W$fQFn`%p% zeRL(jedT$)WzL7dGJ~s|a@EI_HL=1X@XC^B;b>VgFZwIZ5Ba{l?xf`aq0U zLM_%0CJzb9%zE+j3j!_9_DTlIwuWHof4|%1Y>N{2>gNgecvJk?Vzb6WB$9^ETr!2N?<#)%df9q+e&dhRkmN*~#dumz$3K)b*EZHKi){+L+s|#|| zZ;B(?ne@0DKwzcerP>|T7B~nb!#}6h$g>l5ls4Z`H?n9 zU<+FiGNWc%3tGyYt*55P?*LChEh~b4C&`>wo#b>oIi9uEwfQ|dKas6A%~#gZSm&eW zsQ{c__BQU$=BFoBl(Ml6rKhSJ=qzeBIxEhXulOU{M_`+p;d_y`h6(eN)gE&Z|CB70 z$ZrWw6r79L*>!k|JB;n69ekIYgKjfa zB@$6HElqpLy_?#&#l*t#%t8w#*3lw2*5AXWPfz=*VWM!Sh1YmWt){vxKdWJ9Ylp?* zObd7T9$RWdeO~;-_`GsOI-iBBt+(e^UYvENdbq;Ce1j(8c%==qoo%*InVY`ZUi1Cj zt!Ci#?%r-6TbuOBni<7mwFJP2F) z%~W_n`Z0a4(lATBk++MdimBMca5BQeOs0v5h^=JIkwi~hsH>>Fw7R9F(@sv+z!R)3 zd%8c_!b@QLegrgnMA)FQ!xNpl^i2%7Obcq?-AyX;=cET&?p3Dom6n!Q00o|7Q<~w< zgqrip!%)Y}hE|iSj7ClSS?=)S1a>yIw9fJ}zNXyY7S>>$liJcu19&*;k#q7c;cSEA zX-X}fmFw(2Y_m3z*MRpTY?*mpSx-;Y6SmnujzKhEuUXIX-hj6?x8#GB=D}}Hc=#yk zE)mg;N`>yXMNK^sdocVEh}(^8C@gy;JMUU+t0V0?yo7*FDzOr{^485qoqp35=mfcd zJO_Wn+G~4z+oKK?EZH(e8%j!D^p}tG5UmsKaj>gJjHITqLzds)*+1Bq8mawRP;2z@ znM`Y;&m1R+PbQd67*8{5Y;LWuFEh4N9xVUeeq~W7qhi3v+~v9m9B-yAT5J_3My5|q zX(g;z*LRltYFa1{9#?V}_S@4p*l)exn{(N5Z=4(dty|k(Tk7FuF>#X!B5azU7}F6F z>Z)t6c5*g3xEbhehiyueYgQ=eB{!5Gm)n%vR*l;qv)CEVeC2esxwr|uAkvy`h|J2; z?un|%j!Wh)WOY0RQYvp-!aphq53-aT*;O(bO}CU6l$O_3=bI@yc&kc-o%Rlg1tX;i z=vxd3#x1z8g*!EY;RpM0$e4wB{?xwObjQd}oj8vFhF|C27ykC2P(mMwMBA<8gp* z-*SI!4cmLitEo}1r9^Ht*ohAENUy-$M#yScs=u71-pc3LQ}0Ay!CgGV zUiv*-th1mIC=n}mSlo>**K8z^89rN!M?DDxo>{^42?k|Uc=du$yL~G*Pfd^ae?VGLk**4q* zEEB+HyCkSe?`kXNA}cf5cRRHh?7Dumx8ez-=n}Svk}E%s@(NdFyN<6moU-q=J+Ax< zhNf4FuYGJj>Bp&CtIG6RY2s!$`M1h6&fd{_UbQ&E+(dEL8(K)Ya(9`2hq(%Z->zL1 z8i@eT>r7S-0tKgex!D9USngszeGS}Ef?IRkr`6cV!z^hPgV^iwm@cFCL8wEU*;IUV zQ#*3dPsJW&bsaU@>9bd4!u&D2eQ$k_kc^}M4$cUz&xBVAp8>b$O@`}x2YJ_yEhdQH zQS-rAWbvZr8|to<;$By@lpU8R+!#j(EdUaM&U)sCJU?$~x@8vykQv2|B=DXW*MxV~ zXvvxFIk3|CBcqH)SWxl)jV#!%4fJQQDJ!#K9Nqn-ZYx2@Tl{DZIYfEoac~ZZ0XG%m z$A8XG&)3)ftkodXN&(xh72oQ=i`k-gJxbBOJ`ls(d}d{vyMOzFw9DG?#>Z_>!3e88zSVR4qxCgW+pCRsq3iU) zy%P?^oFVR{i%nPHpf*~irK$K9Zn@>4C6pakH9of?J<>1IP{8q(IX*}6^ofz zfvZI>f|;M=7M;QC^CMC2qgI3l`assNF4N7&2vLqtg@4X^#3Mt+-vdtnlrm;pe5gqG z8?C&5Zhg7>mt?YFaO@j>;u>TjV7E??gp$g$vXGr98Hw^ukSv=c*4m)YY@K>^i0>8g za<%I1C$qQ06MT*Fl!L)z40R2Q!vC()&G3D?>{apSgynQz<*uK~4o zmtw8$WOCQo?ZxlH`4}U|pO@e$E<+{q$TJ5Rh~O#p4-o_>%{+XbC4VN<8;YzKGxlQX6%0rY|7n<;k_xtWi26zf%Epk0Xff^dY|opByvi* zcjA9!zEMP$rvn$y6_I)Az_OnrvNav(J4wA?ZQTIlt=^nbJBwoTA;q^IS&_izBQ>(? z?;`>7afq*oSUplVf1-rHhy+U}pK~?EWQ@mVLGLWg8&IxB112}JxWirHAB0mH7B%gF z*Vkn6e;LcMP6Z_w7CX9Pqua)Fk6mFMmcl^{&FgmTT@vPDmY`FCbP#m-zOv5qI&-}3 zLh4L3b={Zm3|Z&tp0h9d;iB!&?rJdKnew|1A`3A8Qv2QH?rLn_b-~z~e7d{QQoFGF zI*`OmpIfuW!#Eu_^0-#0O&k*7kFOK!dnEt{LbQ%-jFn==Xd6M{z5DtnU>Hk9FyqV(E6vLTMtRy**5(})B z(OyX|JiLFuk0!Z>0?Ud)lcbHGCiVx*eow1(`*4{Dl{Gog%hz9eE!TISEQ_pVf{^ffPG%(_4 z&2i^zC8}^v`1&F;+L)--aRI5zQA7xA9U8NAP!ns{J!Bf65q1$XbPZnN_;;i~ezao% zh$Bk-n}u$mR;XJx|8ySb#~<5J`iaGE${ky8fgiFAkJ>(SK@LcxCHv1Dvb2p3$n4ih zg`O(W8w)fmwexvwB;t9|1`+AKjBIF^&G``~IJC#nF(5&F zBtIM<$6m<7R7kRwt^yfjMzxYT&m>I^D^e3CAJD44unG zgIDSA=dJjP_O@=vEMsp3#OgRjD7&oqydi@psgaAf9vypiGN$PvkB0oaUnELDrXse zf)(<->v)MxaPJ;b9_X#G3^-nKO|aRpd<%S^+HgTc?0+g%+VZ*kdOq-Rao=nH$8x!Xn_J}kDtA)V zjN?6_JL#S;FgIydo*upcJ<^@qqjFQKUs&h%qFe}scQV)EoW5$Sc|6Q=R`&3=Y(u%r zmO>Jdd0tY)1qx45SrUCafq1PZ!rzK;!gU-`bf9f@Vojs_3mceW`nmj?y%U`qBOOFn z=Xh5R+Y1|EVwm((GDfk)1QO3_K@58D24(A zRg6wR)oo_Qv+ITzIw31tN8kTeVMrqJ($l=75;l~F&isRO-l`6EGHmjOdS|aw^YgN2 z{{98kR*8ZC;<;GpVR5VZpvv?yUclZdA_;1roEMXl))eY}EG&U;*`9x~y2I&Oo7JRh z&w*nYDtIo>)SooMQ}x?)8zKlB&*WZ@!%H;+vT1lt>JAR976NYJkW}~*PZf>y)2GU) z=NlEsC^=f^<|aH^{pLeNw9?JDh-lfHkceoBo8|~88*Gv!TXh>0nsy82q-`*utuxX_ zt;?H?UD_BTzx!=Whf7|HML^r|X-(L&fKP-=sQF{cctq5UOlYhw)fpwXn((KHnHRS} zPRi|qJ||^(ixOw6f7R-^m1c0=)z>+SDe@nL3;RJ0tmOvXO9bU7=Jqirhdgu0xP~sv z6W8(w6kA8L6VY<4IdI3&lKVH(%SXw#!UfAzn7k&B61{KZeRW3l-ML~L%)WUG*p7Z1 z6^kYtUm{o?I+C6+J$YilMl~FRhE~ggj(`T7x+~>5v0+_47>(U8xit>|Sdu_A3i2aq zEBrhpqVcBc{P9(GUUz^-HR4Q1G)rtm;=?z#}nQzEF9 za_ZZUGRo=pgB7&m-KHdj-S z?=vzaCg`kU>xQ31P@O3Rv-Ll*(RERE(GkUl57Td}!I*=t=C`?=1zY^}yD#KRWAQ-TX0i(YS^HJ)g zUXME)0*0^6QEvV2zaVA#)B^D2hnHsx2pdWz4GvYaEhp`SemY1tS)5@P1nj%Hc(1nvQLDC*~qRE^%E*d+?!^ay5JR56ZsZC6!Rx z?i8mB3qU^T(0zv;Iq?<^N{Eu}D=3VrY$^UQx-fh}(`Ir;#xL1UDa;tuot_p%vMFjufkCI>+;bu7NkbYgn^mmAhzIT>oZ? zapH;IOFd>kJ(F|;^D5%OnU%jh?{I;x*WRjO?*6@2ozqnU3I^B&Cng_iU?xsNH0$BM zK))~2x9Pkc&egGLEFE<9qt^RP{P(3MTktR z8dm6{VRyC0oOQ4{F)TRe^*NY3D`f5gmO6(o>@wwivgR77G}p&dAMC_03$;nkGo0Uhnn1WtEh zFT)9}4J)KNeg|iF2%Tzr_qK&r;VYmWPEdf|w44UF_jYTdj-#4>5!oKn{ICRdGN8U0Nst?lLi6Rl9w%P(#o z=$OMUU()XgsCy39TKFGR$6km2HQkMFET~8Ec((txC)|ih-aL!dxOL zEkt!U5~H2ZO{}m|hn9pI_qbbrH8j{|kC_L>*VlFg4l{rA=2})Uh3S-A056;iQ)aue zPu#gu|55`McV>!UAg!UQ`hk5_{~CMQ$c(G+OQ+eLwj%bNkKJtqpt5@(Nil&S)e|^# zw3{63iZwl5-=940igiY=FN`rhlS#9BHb5wb%HefM*1i8X{#kx7^aZ8}*8bwfGZdAV ze2=L=C;N(D$lzpS5R3Y78QV?PJvYNwLAy!(4$68i9FFnk|&o9BWbtN5dAY5O3Xf$p$7$@Z4k)Ypts9P*10NcwV)>xrk zR4byS><6s}t_%JGPy{C#e@Obg_>1zS0jXYtT=4j(+$i3EC0_TS)7bU4{)i9xXLt$i z7t*3p99PPI!`2lLh-!$geIRMT#r7}mDC0e?!A{NkiNfwqRJ#6)s{a$25-qXizrR=%f8RLNS7nYj#CrHK@5lZvH zgh5#VT9PTbzPG2b+47Dxw$A4I-Py&3&5w~S{A_M)IgjydUTh`L`|7Xu_wP4e*U^^$ zsu4BShQX}B`Dzr|H<;{ByljAj`=^mDbx}}w0t8+gO3(Y`(cS78mIN*^Y`&i&2NmfP zXrx0;lwEnZv=zi8`>ZUcj28Klq3z>@=czzH3+^^#F=|!PQW&C$w_WdJ=~Rey)2f0o zak{k`hVbcjd8@t1zlb#Lp107RGSj*H@)k{&4FBBqkx8Hpit6-Z0qO7`n{|~1POIuG zN4rP;%ZES#B=bK6X;1Tw@))5HG^%A`0mf>_GRE(z2}Meed3@OYeOg`C+9iGmT$7L6 zjo?)s?iL=0JusX7ytOqWb$eK!YhNM$u1OlDY1=&K(pM|SFVZzVG{MGm|EJ8WxhWm- zu#s#ZZsdWeQTc7_=5iUI_`^5q9XNB1Vdq~DT*%o1n-s0zC_i4o{9V0S7&CvDO|Vf- z03Hc?VXMPMncIK*@bEMTAAJI7=8sIQtDvRx&Qg{0rOh7cx=)UQ;Th3L)Ix0;OoIB% z@l?~Ze0#P(*+qLNhtByJR&#;Z9%Ymo)4ukzie5lKj*@A(31`cMFc26jr3&9UP4R7lH%`k7pXD6wIdK28a2$$1|{j2PoPyo(_kfHGUf~{Ni zqjRf25^`W{QQA`i04Bn`^DorNY}a^?-h=${@@DAC1b;I>{xUZ1{^dY}UTFKHU6c*3ZXTa`XDv`t)sG&N#{6b56P6 zPS})8_MK+~4-Fc$0GXHrI_2JR!e;5jM#2`EnEYbxKI>h9M`Il@R7d{L4$B#_&w4+M z3G@zRC!OzEPg;qYHB$W!_i$s3W?hleAo}Nn?I2w=mvhuWwOPuLK<=f>!X~rzxUFcLxy$`-N#*7o^_Dj zSEQaQUl|(r0iE;nJpk;}m0|ZO@wf|4&Gn0P*+uUjqvn{7Yjj&2%T~rkZ@=-n8Sxf?Fl7xC)eCT%*mt#4Y&iNoDSm=t=mA z!imvh@}l;F_EJ|}Rg>co()TKI5g0<6U1irgj&uqFcS*;3b&jK*qIXfp!gXgOox*on z$KG{kqn+Y+e#dBa^A_R>9b$JTS5RFtyt-XtHzsRy_T)PCtd5RRs)*sHo)jP1tMH~p z_En26o9Kr$-!0TWuF-{NpCHbd$XRkeEvTs(+>TF+vU}igx%p%uM`@x_DvQ!?gr-^S zg0lr6^*Y(5chf%&p2(Dv-5KU)Xs4U_DwB00#mdLaU3;Kw8AZpWfx43_5g zQiBVBk39KBN1`I?aFon&+mKi1e-fJ}H5>W?Cd8MBS2_a&>jX|_$4at}E0-TQWJikzrvBw0H2z5pnT&P$T@spZDA!Epy7{JOp_WU=FtNHfI(fgfGwL1S zD>Zj1J-1)MIrm;1ONZB8|2>(HufF^HaQv^?DFZZanajfiOw@$M@(1 zblyX}Q@m51lR`v)MTw(Az?R#LdqGVzKHR{X*LcXt14@&FbwJ5u(yn@z zc*q11KEl3AClHIzTp+YQgtvdn`OBE%=y`MM zQ*+$Ceke3aDw}y{V7&eu5KP@JR`4;fap~Oo(|k)Prk7~*)F~2Ec6^WXhD-rwhiR1g zKGU@{EZz0*#Yf59&`fW1mg0f(Ci@6I$5AWoI4k29qk^1e4$-0Met$tM!;`Kkqq`{$ zP2pR+CO3)-H+EwqDFn?1PIa6y7^YB zc6$c|RlQ}8NwVUoqNd3n6bUqD{xwd0Kb_08`QF+Schm02E!QsT%jMka)b7d8AmaHi zqqaBbZ*INHk>7VVttcd`6MKr@yQ1>!#4uB9f6DT2#df7V z)F~-5Rv-3`5nT@lG>i4YA-6<5eCT|&DXR(!FeKCDy0& z?BQnq8htd*lB!k;7{C9jUjm)?X z<`XMxaw_+@x*K^3u|t{&0#|J{KVC%7-xFs4yUWdNIO7H${z@bW|6{okSA3i{Zj3${ zgah;Xu5F9NU0*=`)cE~mXECx+9E}m&0XmC4#f9^Ox55xO}R;UKUo$$DHZUrw+0FR3b0>H`6`pMLlb znIP_Tyx&fvJ-pg;`Y{)6f(X`0AH3|D{U3x~c_5Ts+piE(*2=zQO|pk%Ukce%N!CIV zrR)sukSO~WvP@;IWDVJ7Xt6Ivg)Eb@WF7m&V7_zDjQ8#Np67kvf1byjbDitDe!puu z=RWs{)XC)#kMg@yyiRkC2E5>J)@n7f`>^$@z5Dh6EPfEOZ1*gfy_q-5=_VDt7(MhI zXWO|#u$zMp<}q{;zALb8s=B19rVeBXZ#W-B*Gw7kcI0$1Z*?%C9|@$Yrp_r}Uh&vJ z7<(0OmB;Y4Lc(rim#qh1d41SaDEmiwpklgZdP~TF_x+$v*UD~G^4@0$*j=vIYE@znm`{cvmuF)Z^6P zfZJsAcgUx!CYo@dh-tEP%e4~Oz0ydSXEor>_|oV$!Rm@moeFhXFHTiGDI1FE$aO_$ zOohH&x?sS&NnbpgAG64r$FS{&w%_>f<6GZoZh&c6Y4loTfTsxuTU^o3dkD2dZO}?1 zcELNe_!Y_?{@l5uAN~FEJBLb4Gk`&zrh3VCJFd~V&;#Jz(Q zgKm0wMrK;Vr~5N&_G5+MZ>FE&VH9v<5pd%aXl^p*Jt*MDD&WQ|fMpcGvI$@h3ShYe zu*?Ej4gu_80W6OImPvpi28zc;#fQIdL40V@vLCx5y)+`dq$<6%z~-kNRp%C^oG`rG zM&3r&#<35l5LFdj)nBn?d#%F4(MVb(y<+0L$72gOBL&V*feyuqPZOpSR?BQ2!HO7H z>2&a7nge$U=X9$IQHMxCJVU%dJk(IIVV3Q)d50ExF8fQ*n9Vb=9MT}v*J~(PMOLrf zVBX}oA6e}kZe4FBe{nH2zn%`CJkQoq%}J2H|FN~nD9`ctL|2o#N1=4faoI{)C$_q~<*+Xw+N7zQsbKp;PHtFWKvyE4B;;V6NwSE>&+aq=Y%}OmN8^q-3 zS7M~-mmaZY*Uv;-VfE~za5d6j@t@fyeK>@+tIuQ+?n#e;-w(o}rS_E_&z5pw%|7bl z^YeQlMU+m1aD+PPt$gIX>~;UBO#pf0A93%B6QQ z_qImvmtnq+q~duRgG2=eGKDX3Cxlx33YGHjaLj&R>N=JZ&OW+FK18gC&Eh-K@vzu4 zNqx^Qe94I!-SbH;ToC^K{*7Ph=b~x^vsE|p7DVVA*;hX)2+0S9+X!uJ=H5t@R zPJd46=U^pfYXO+SaFhpMZNW-j}_ zB>$4_x0g>FxHa>rvpqZinXApb>LH=>ODcgD&$|0rg$s2qSo3$UK<~`ozc?PC(s*wC zWCERSdx@1|%oww^R%u!DV&X=g_E%TeW7|DnCDTp^Ot-%^e^J<45Q`FZ$vub;ITQFm zyZUU^LmflCs8?TZX4qw1&v3{nm%l259uyAN>MAC@f7{1slVHjlX!d*K zgRN+Dywx)o5lQ{pv+gq~Ej1lt$pU)E1EPO@X}crHic(!w0RK<<^{pS1XH8YRs_!s7 zx7rpjco?zG2&8u%A{}7;>yt(5vmIKIK`K7Uvb}Y?T0?!#oc2bix9rsQ32H z15B1^%Evsjp1GsM+waF5L*%t06FOGMdNtpzHA?!|e-tDaJNZ9c%#SI7n zyY*lxb2~m^I47^}*tfnU0IfJXNDBmFNq8DM2C$DL^b9sBwE?%G2W!^jy3Fl}0t`%T zz}*cD44v>~+XR-6Pg0dvHH{Ij3^?|!8&-tO-c|#VZD%b&pNHkcHeBG33h7stUoB?t zaexSi=5YH|l>!IuZcxKBu?2*DfPR4O^LvvRf#-3ZRXLXIX%Sc>=ASiUl9Zo5>Q1*T zO~)OP=hWlow&0e5A`8uQhOvPT{QTT{hhmK^jzrwkIp@S^67Kr_FmL>^d|fvI7R9r= z(=@{m_tCb;CUgZkl?e%wevdrMV^BO}8c$eRiKl&Xs58$M?n@ zKR&%U!PPg(#HCw$y&|$zoiRT6I$M0O;6%{C(9aF!`xHz}?7HqK_77pMcLHbvZG=jL8r?MHDx!!ynr3FIdt6J?VGw8r+iU(DND z*)5}8%y-4&!FhqiMX?q??UxY{llJ?Ee5^0dB;4B{FV-R{JD1N8cf_BHw*6EtM_QDv zhJ?`N(@}i<7b&X^J0~T{V9%T7v9tGUp*a%+0o;b zw$@AC!dMTGN1H_(G!^qHFu&qI!^k8SvZ+VbbE^yJfryHvuio1m){=35%sW8>G5S@< zJOBC@7>2jc+ZjYl{g|q|nxDF&jCK*v&&Ew2>2=TL)i(zz-38L~#ynaYJB;pd&*v&7x32{fd5P_8&YZbI*9B z3?dNfa#09%Yf(SXNd5yWiZV>CcdxyKBf3ccOa8^3dwg2&DQZ4Q@2%V;!~Ws!EuD&o zQtGEYch_X!5QOwvyfy2~;aq0444!`hES3%DyjB|4l6!p8A@p=@K1?d}vkM>Vf0>+$ znm4RE$)i~;pyf$*N)sj|;=Ov+$&JQ~>cZJ#AGZoFmNHzA0W}$*&V;+W=65iZhfiyW z;)V5Riu=w{NliUGXE=TdIPeGU<}T*YW0rZZ1U;c5JNWG#8%5wlgC@x*;U=&6_kVn+ z{n9#M_rqU%UU%D5t?aUpzA<-|PRbzn+_7wxv1J^S43p5^i^P00rTTSPcD-tk%=>*$ z5--4nX7BsRaq0q$pyJu^zQsM2j23}!XPjRL%lAxq-Ul&o170^ZTvWdgvF-JYfzdnE zQTPiEA2X$p=S+)`P_KxTP-n8f+jRks)HAw|s4tywj;5DnP1qexHe^kZvD^7*VfwW1bG>TB zKnw-?C&FAdjO14pz$sgXo4UE;{pI?PINA}_dNa82X@QVJefWqzHX8>}Kt-y7CS>E` zuz^aOvkbjD=V6Y0@hjbMwNm&XRf}M1OK?-Ru#7N(D!TES?BMnVSnv1wUNl^N`)30; z>R=}X5lED-?K!f8DHjBYNCQukjrEZfLHLye8+l|+s9c!mDpPbe*&6sG829)RfkxFW zbvTVKdeIayR~FVfkHh^YxYmuH*gpF46P0B5JmRV| zNf0nY8YElkN@OxTSmC_VTSHZ+SJ#q-xC%)aP>S!Np?0nt^s=~%G5f6oJ>HN&DB$3< z2RWFY-LXXu#kCl?5i|@dFr0@=0$9IJR;<{>^^(JoO9o(AK2HLXMr_U{xK2&7j9-ag z1lhPUaUc0UpUwhD0NT5TvLMz?>xhDWrwoZQl*Ln0(sdpqRzxyGafbW4&SHKWl1EX6fH?4yYXD%c~15(_9EQY-{2>tVdl~!{j(ras$i__ zf|eK(pEaobVg` zHRNOwb~i{}!#Ve1S%ZXnHy$@{ryjFn0VR#smmQ%RYW~~gxfXA?EMf!hH9->dogLGl z{Ouv%7L4p`o^tT_(KOhK;0+byKt?MCX{NYcQ3r@rBC>u-2qzcu<0DvY84tuM%r9)6LWbN2j&eMhZQQ?z1{eMd@Kxp{&XNQ_Zv?2vJFAGbO@jHy`XIYM*8@bnFF|R|vV2C8wo?TuHH8Z3 zp|oAlk;`*G=u&Nk>bAduUrPSE*|k~h4*Kf<$A3our7P}Y&KKY<)fy@nkV-cjs1Lk- z1ibZo1jrF@9Iko|$N^xTK}hx>Fg+jwLgmAjgBAd66{QcR=dY_F^76S`&LB3-LxK7r zHiyfGEfH}2x*narLagz{1BHRdUtQgQNr1R3x;VU@G5s3!b^BtB*In3EOAxB6hLNfh zz>`*2+}BI6tB=0VTYdvp$>U_F5(eg9X&nc7VhQrZ83KtycJo5iWwJBL|G`=E^p{J2 zUf2TI{zfTW;r6r!M$kYP+qA;Zt2+S!%xflrE6Wz9kP7%*LAysaf#619X z6|WetBCjkjlvgm^R{kUyRJqrKTu>_d3v1$_R4f6{fA9}aSNzxMRRjs}oaHwNj?XYL zZaeXr4V;nx*F0%@9^MRK>gvfIeEs++; zm!wn2D@k~hcmzPB%21UMsuWd$GJ|&?xRMU{;~#;pB;Gnwwe18~#NoO?OGV=LJ_u9X zI8p_-JpDD)+&9=Hl^fY( zQkG@7x#68@h&<2pjI`y?J%t(O6wa-m3ncG+Yc%kNAaoly7r%BCp@x8zbQEQ7uigY* zbI=Y;bV~?<;jwoiy9)|f{tdnoz_?k0BT9h`<8}SV={e{sd2_M8@s9#K>y1k*+PvN9 zMo_P;0nVYoTH+xLm_IXI@9%S6u9GHeiqDstd^c z)i-=SHdrqX`Qj_Rfv2}gp8gw;&DHpQWD^43JZ9B@j~2xIP9Pes8{#8Kz)*0|EHe^X$U^ zjuhqw=Fwke{x0H#`HLD;;7m}9jhmXQ+Euj|M6Q>H*5GsSPPT64Y@H_~CIS9FKlp9D z#wiv0LndVF>Oac-AyTt-=KqVh!MhqbFz#eNb^uE}Ua)}5d1-)C2d(UtbrIq|?Y~8k z-;j>*2?U5hM42Q|jp*0tGIS-n6nz0MMoYh2c=*|3C%v~nZQGJ9`@DuDR=2}`HYhxu zG8}jF9a(aGvdQuJM2-(xA~U$_iGlyL7dS;@uP0COL^KThyoTDnx}}2avDZNxik}U1 zGIy%_o6P?cNiFyJizgs=oNS$PWZMS12W;WWOtS`6;a=ZG+$O< ze>wxFzGA!LpklLPpQ7!tG=}ktibnUlkb?XEMVxb^gMC3}~0!ts_3$Ka|t{M9lPeQk1cE|0_au6Lo*# zZAS}~S7qu#%Kb^}$@>3H6p1I6$=<(+Fu%cfv38|&nRoSdDS$)vOYG6^o4jy4fU_3n zP3@%GpQLEqX&m-RCO(FuUV_Sn%*$}rgI+tW^(DDyuQAgil$u zZ61T+@X>5X>2(DKaz)jWE6SQ&QMGVIfqhrpGxC2qf3-^RKPrJpS~ACsx^nVubikcM z5qa-0C+{6aAP=lT9&ipd?NEOM?p}cU`Qtv7O3f;7RnEbQZ@trY;HR-nv0T6T&A%)F zm0m$^Wv~SJH5Z@wTIl3|bdBVh^<3pn$1o?C#!GTzG6x|pa=_L$*EH`WHBrQK&yD{_ zlFAnq@jvZ61DgUO5Ag_52_a$-MgK^>hUH2=s(N6?f*)Q$)QPT2f%;IeDmSH|D*!ap&#s~HrFgx+h=5%SM;MZXh9rlJhl-Qo&VPa2`FAo- z;f>l(%i9TkJ{j1(kAO#NV{n%-^05Qo@@pCVDyh!x-ztrxxcdl!y1@GQk)st6?98`URfy@o8ds0y z{y5nJp7VPpF!kZCxU7q_fx)f@46!pFU1$ZP*WXHis{jQD6gO=&7-Au@S@2Hd*9c61h;-zXSm;jr{((c zx^noK>m74FS%*be^Zwtxl2myGeBcM>tp9C}3@QOEF-R|IJJcA9m8@$d5kf6m`B_6-d8$@fX|sq!)I-k3S<@O=((dAynx+!x|r!#xk& zNsOGr9)TZOU$MSgy1oazs39ta{>3V=ig^2{*1uat{12}XU;cUh$RP0{2_T33E(1K; zASSgSdepSV5Mges8UGRAf`8+*waPEEu%_~AX^0M7CnJaz>`;lo(qZ*s#{-2fkAI>B zZ+!k1#;V5uXSWZq+3f;S={E?n+d6C$yl=4uHdUX|{f3hRq!cV92?3eG-~UAb%q!vl znBShKEygWS;}IKL0plHhyas+yD)C`E2}j&r0MpwvkS*+R^ud2I4cm(SwI>MKyzkfU zGKyT!IJL|0g*HD_bQhx~0rB!{CGCPTU`31<8hAvS z3w}Hu`RW-%yi6{^ZCxZkSYsw6$u9x$`eYfj0qG?GUX1kqZ2-T045}deh$UMKN+HrZ zg6rGCfn0AUzd70Z312yaI}Duhg7J#+lJT1H6!=pE;5|;D?Mr3c9&mg|5uY$xM{wOM z;CY<*mIOap0Kb$Q3M43#Zosg%Payc(0Z2ikgma`f3|L$Eb{@RC+0rN9WD{J-P@rlk zjO-=)EPT&zo{c&~hVm~jG3$Ngo9r!4GF~W~424iYHs`kFMvU8cXbxWmzq>gOIt*qW zGm4qQjAMq&xBZ#GjRESS-#it#x(+1nD&Q$e?N{**-!r5aDkwt4j&AZTK8_cMG$8{v zCchRUIFkVrE|4zev4l>@2*wM6he0&?UDZ|!{6+vg^ApO*@r6D>#J6~`P!X9Gg&kHN z!9pXCdX%=&;wBg@7c$Tte`XmOvyEEW^NWt-#@# zphRYzP>F>cIi2&j|RzAG~5KzwZ=HCfq4{M>n4I`e&<EoXfPrJ0Nqy3Lf@BLhE!uw!@?X!ysGJK<|0DF4t__U8HL7||v zvH+)H9RbzsT~+=pUI5+S2>6WF3gn$eKMi77#c3E)KjVq1p^>}YvtCeZf{fna`0VpE zh+}oI08M*CmUGqQ^G{P_&A@}x$pyw*f$Y&I@br zs}KDa(sa*NtMYg4($T@&xfzbWjMn|TUX5z~)aVL1JGmZr1^c+9aJE4WF3NQ7-y)jj+~C`=f&C)dW2Q|;j6U}|CAf~9 zP`VTSTKQxDg*N*E5$eIBl7Uc2;JWuu4rdFy{g0NOHf1qVzuQZ&xPPjD?Oq-0*pawJ zmlJ26b!k8M;^e&gIJl?aZLv!60ev=*6e*O8ketGGzw;* zTUXI}5dpQP45jq)(FOS$W(?K4mriUKDNpmPiAAMHQvQ*+3fSHe!bpuY4;sPf=!&cx{c6HRfUUMRv*xEV_)Kwayt9Y zh`gu&RmF~s)BZ_)P^9|;E23S}M#Iex`L zk-r`|axpBP+!l?C)$s^Dh)byYqTjoR3t+N(LYD7!ly>m$I z>{?~jo!}%L4pz;&UGnHL6?KmzAYc^z6_5yjthB^^UnWIbw+Q) zeU-M26YepLk8Qzs!ZWU09T|83c{aklVdBPi^polYka~hYIYj$7(81sjg9VqS-4Q>5 z4&JnE>C;5*Ugb5luX7k`j7_MQo4`@^V_QterIa*D<4q}pr zegMO~Z_Zq4_V-2a?{Oo^A3IA>}n6l`z*OpcFqA8p=(1z z<1A6NN3M=J+i|rQp&Gpv#t=s^6zdwF8M2%Ddwdf>RVjUWW83D$!JuBpD?j7Q@KLz| zocUM>f305JTk>HGmL02pPshGNO=g>_Qz;G;wkg>{ zII|^v6)bMIq7lUz@aEGlkEzA1$feYdl3lT%|^2t;8n+AXIpa#38{ z7up-3_fzRQDs0oX1*5SoT|ldGfDm(AZ3ZzgXlEaq6wa=uKC~;Hu*madJs2QC&DwCp zd)yO4F>RS`KL`$C19W>l5FKBnv=Izu)q}WV7&#(=T%-0zg6E!KNxkp-3CBT#ErNu2 z#55(_b7K!Oz5yq=z(T96v!^@0!AWp|oi@OD4<-?eYW+4Xt9lNWz7B3k5nK>zr3YMB zg0`y|scW0SbNahJ^X~))RseVw0CwkzM+x3MvLHkopjSq%L(_T73K9@!Wtr0NIYfyP z1jg5XU}zqzRG+!*9ryIywrFs-?sr7OXu+sr!j-sDS?1qSlqiv4tR5J=1Gu(SU_UZ$ zMiZd7$2KAA2k6_9fGXLD6``qWk~;eDAhoyQ1k=co)RPnw;`b63y88AoM0FD1NAnH!6;xh z70Cc^Y0u74zO{Y0&XJ{_KHG!V61~B zfgS8u&SN0qxzj|)d1;jm_ZTO3jM6+!`yj`ppO_$!Gt2xtopM`P@i7E;$rvE1cz08~ zZ7|;6j)FI_K>-Ak_L>X`l1vI(EcRR4@wg?P99nrgI|_#QhHOESU(h31(1v#NWDi_% z2ZZQyl|veO1s4KZuYxvn6R?gG>A{@PVIkVG=u%^PoC^k*TlR^vGWljw+K5qhv2M$SW6NMv^IIV#FVrQ+@sNeUIL97zg59Ik1`(kJ3EDdSJ=lZ>d!Ys6*4Bd2 z-uQAj`JA==Gbar86el*!2#PH5JWf*<=MZ1L-R6w4(?;~hmHv+%CGhkqpyu`nU zA*sPdSPj&wOl9x8j40i-oFIdvK_p6rmcz*7!EoL$}@MK%~P#4R5P&bH$5mlDMD+8(A{|cxp`Y zT$eCx+GObj;SBUq8DzdYqu-e}C8izJFx}Z8KN@8v;fPxbE^N10!o78V%uf_c@^DND zB$*^Q32_;1ON?*PY?=U%>cEcjI0cyI(Y8rmI!W@;?e@Kob)d19#np+9zatBFg8hU5 z5+AbA9Eo=qA_kG!H=c6jXs|+?viWtsceR2GhiU87mgGRS%ZCN-Ryyp+GiPKMoKZm!k}V z;0S&{4dVr-`}BU7pZ85UIKm%>)2UVvsW+WDxfGgY%unR&t%{*<_C3U#9?l9Q*7<9E zNW;~Y!Y+wYo2~U9S)xAJrc;j9h zLD()7ea<)Ypz&$ZI{kfa@o(|jesG+XJZ(iGb&^Ik5N81ZCddGnvP3Zb{S+uwi3815 z5tuAvE`AJFroVLp11rtk3CUtr5ID~zJpGepZPw}ItwcaUM4BD#LOru4I`)yHA7srQIwirz zee}#1vmpmJ>(_d1&F6mSAKVtbIj*AK3x`zuSiLF!#6Gtq;Be%+h-NRSpQYpQU;t^b z9Xy9T0zA}3oOS!WEeD4(x(D9Ki0%BsdH?J+H<)6ep_a@x`0;0+uZ`ZQbx)!xvJ0fx zR#qF7k6beZmcyX4vB3DU8Q`qSY%2O2&crtx(G=heba>wv}Pieu* z8E&Kv6(?*|XbTz&q^=V>{b}pGS86rZ+WoR!Fx8{Fh6LH#h6In8x6mH_xF-?XyM^dz zB6J^RFqtrF3+gh)We83d+>JQOH!41ho?bD5kO6(z!9aoKiCctQ#`Rj5>I*nSpT4Pi zpIw}eC8m2h$LZ|s)sh+|*k==<bXyPjs@IHSBghtn0J zxLB{qo@)LhU)>>kFgQsXv=wFuw#sC5Q4EQWZaU58n$&n^z2+cOoD0iU@e~L9>^q8k ziPmZzU>&gCpVwx?W%`9et(SOvYG-d{uUB0wZu_3UKd7xLM`>5JC@>%n^!GUtjSd2% z>(;;#VOQ4^w_5G81E{}2L&?gN?oME^GN5WeNv8j3X!KIOC_f)g@crfawx%;bv-Zok z*=6td2J)nznmYR#982`~){e2lMiI;Ju3@{0Mq_i8cKHb&`MP8Wj5630jf%iV?ScMJ z!02{F;~93>B%~qF9=}sSPEa0_=P)KQ$ZDpoo@E?Sp&SX&FB8!NHf?wBA)5cUQFOk~ zz_7$bUneT0(U^TRiI6nEYb^TSrFPqr(oeqJq0s;XI#HFW#gex4kwO_56C;hKC{r%o z0*++Djsz1OA;d%cdzOO0U?><=Z6ZMgfA<1L4E{5e(e*XN`jmm4M5(_0^F`6^_p_70 zWTSDl zg_@UHp3<%~Wf0OEPOc}Ajlu9rg_|4Nd}bo=FVlN9omn0r>Cj3DSI+}XW(=}(cCy~K z%cghze87-tcN&}zIkNJPB}a+o(z)gOA=t@xcAnCv71@=~cdO{yb4pk!-&_hUPrC^ES*RztLec*DdNuv=FRwC3s5m!24-{Pw zil<=kn#o1H-HY_%yqw<7%~mid70B;ea@lGB3G}z%;2OgOwUS1a%sbti=JZ3oH?!)( z4$M!vGMpepQ_OhUwb2_41S0?l>iMY#mEpC5Mm(2q#-MC{`@DA~2mofZ=|{a?;jM5_ zh?>tHV8q_3RCuiRz2Ox1+PUGbDVE?U{fLTy&jZebn|jn=ry3lFZvl2|h2LLRo%B|~ zJHlfD!&T?B)z@@^;Jd)V*(&Z!`4nc}!1hotAn>*^Y@3VQ7fBK85f0bP2+Sh`%%i;* zK3{3qfC8vM@K+5H+3S<^g}qk@pShDj*#y(BmpL|e8I2sShM|`Jn!oDuxDND?TK?4*}FEz{>nMY^4~sf|{QO-B=8S3q1)2 z7b#%LB<|EQ3Y_Qne3*8`DJdQ5k;#UiL)>sS3uKml`f&-aj!VM_Kp7Rm?d{XR$n`}t zu(Sl4zI7G?J);Q}l59q1i+I-^;E}=CEzmT^9)Ur+OfV?nQI&llwYXOHH09VD7<>s! z7J0#v1Hq!TwZTJFoPRNG2Ak#th56$e$utEpJrDA!ZWx%pQw5;(t9ixd7*j5pXhMbK zTU|zDhhtBc7*Pw)Y^Q=Ic?r}P3q06;3JgxnWEnB-_LBk0BIG3E9@+O_@(j|$metme zM@1io;6ji3Ql9{tb1v{2f*Du!w#%Hq@Y+uxkUy>uWI%JR0)&2TIR-KywGbf&P#c5b zz_GqaCVUtW?gJ$AJ-zv>t;~shkY)iHAkE@Hn!WnK1%aYT^g@a-1h0T#OU!||x=<1GOa*ih15jL61A`BS zzE1NL_J)cgq#R?b{0!cyHmsF2a;f+tsbiOWb3tu=U2HmI$?0EW%Fr~8=;6M55ZF!k z8|1usbFT4}%9`=k7_j>qWKOfcQvBez)=)2hDGN2PB}S+v35eGEfzgdh>X{WgRL};% z>} zM*^dZUli}PHCo`ZJ>heA8Yt>nU_=A~jGP+o4wHZP=~@=(nbm+;sCx~z5==533{0Io+~OY5HB{@F=Rt`-(*q6GT{yV z>V=EdXuY4asSJwqV=$}|d1I0Xdw^cI~W)Vk|H)~>1zBD~f zS!*lh7}yq?EEjutmZ>qRQ?&fT*V-3$-8Ay2^q~VUGQ7nbZ4Me#7nK<}KAE9gdtujh zC0XTI`;c~t8g1UJnZ;9?CUGi+j+A3^+j7#OTVnkfO0&v>{WlBiybsP&G$u`oPM*78 z3B9yK(&Tj*S|rI7s!(xCrn*ZzYPTWRc;rSsm<{}r-Ps+L+U!@@dk|turV`m5WB3r! zzFr;k>Iir6EfF&)S>tH0$;j;}yJ_s5rNUn=h3o{{kg1xsSRKRPn$?ddz%TUYi3#N= zp&2^cj9CrL^}4v^M$Nd#Pjsu|D`|g@L+d=n8IxAk)00_}{o7Z?BC9o#mep;`GTXNU z?{hCspEpoCw-SxI5Hz|9$@S{tL zf!ZadA#taPheyNj9m#hx0VTfw$|Fq2KBBetVr|8dsE(CyfalT7+|!+X$$AJOlg zuv6lz?cRp3xCyNki(W6VLo*scZ1e}(SBj6xKDI8Ip=7zGDRXujb1Lrr%DNjROXXAO z!4s%dosPyk((WzqO7XrGEdBKyyDBhA2@T_5@phDmBwEv^bO2*lnje>(;S$_GS%S0XNM}+8Q73R0D)+x^M^{Y`P zc9?gE&p5k(J6#e>m1uR**n4Kc?VDDKYNMd!m-27^W$7r;Xju$o&Z-2y5-E39YrGty zC;xo%`Rb4yw_yKRh+a^E2=sI{=>+$&{?8$LMfv%USNEUb7V6(0qPLbenY1b>t7p4; z^;)AKwkNUK-ze?1Bt1|ksv=uB88vmV@jjh!b$r&Iuf1;t0~aIL$Nb`zb&OZ0(`*xh zet!rF(Yyc5$#Yd((ps?pS%_ZO(|ngzZ3%0Mes|zS?xf?Yr?|CvKXkn@2RlW3NrhV< z&FmUw*MnO*@iHqg7eiB-2Y(27k!6=_Noi>=HHGjmJKz1Zhdbn^VCssBir9-u-6@_% zc@_irT`F78^R=gTH&z}tn4nQ}eIKhiov_EVET!H=XJvyD7b?pm@APC;e40FH^L`d}Jn-LZVEkZlGh|q> zfBNdoWJ_HK9i4Z&os7t|qPWVt50P|hh(`I2+Y@JvuP@4k42z|LC6~{3n=L*JQF+v) ztwPAj&-Fdj2xXnMf0lLq>J+~2zFl{&v}yOzDGOm6^OBU_)PnrTl`$qa!TuR%gU^YY z{zVZAth3^cD`+=yNgy9>hutUaQI<9Fsa&nD-VS0lAk=om`g@1_95!gala{#Z5S`U9 zI!uK(klMn^P){pzjOb73nl}l16lW!j22zPp_I4{nQtw1rx3lbSHm=Az&VOMM+oWjB zV>VD*j@U0^^jo(vj|Kdkx_?sd_oc>b@HnNP9%hn-D*b}gmI3?nsw*|&e2F4eZ+DWk zZTGbW_K;42)S$emb^oQ1ZYI}zb{lCj6Eo|yD3cN)%0y|SZnU7p`g{AV zA(VB=0a%^Sl$SBmQ59M8+^#uNe7SnQwY+1JrShOb)5&4!K5m~!>j7^oLo$UcJ0@$? zx*kJV{?w@S8sw{5%<<**dn%NPcOWyL0xyPiIl_p(6uW|#n}tuox>w_xVRhL2;Ta$p+Ts*l4K!|mg-RQM}E#P))Zjm0vS*ZEfb zIk#bRFTvD_Ju1iaTN@9*^Gu`_{+T_mKFyPx4xJl8#M|`{ zJdI6#_=Y`0^f1`O?flw%HKD(*G0z@r%ppvnkn;>R1C)s`a`9JgIX7HmdG}wc5rc+`$#y^&M$<=es4CfvIB^pCB|+uQ>E5{FG{sQ!^X?YK6h3gH ze1&>gy1}L)DP*dDl zKYVE;2pJ_-!%-;q;Sz<7g;Z;|7q@v|aH+o-prYxJb>f!`mW9nl;{6}kFIspR9u_ME z?l@R|@-nkF`|x;5IAlw679m@hdbPrgr0M?ineP$u3d`#{6)X;^gLfGu<*9P zCGL{-khN6wD3za{>Rm4l2;k)i?O3m2j!0G={Q-U+bE@eIzo4`;emUIhjaFNb=z3bw zhbvTb3uU%#8O35wmsnUNM)IX~gYsssUK&q8Ve+M0GKyy|U-D;`7{U2o4)3`jR;WSC z@C$M372gyG%f;AxKDc4z*L$8+v@;s4`_>|grg9GP6+#O%tXPC*pHt5U{Y961ma8f+ z#l)QD_RbuC`vGrr_N7S8*N|iu;8=0~Dx9ki>j?*n>5zAxRfF$usE^5zGK~H z17&++u?)=Abxheb2^70fg7t#lk{&fFC~Pu9Ey3Lr{&}%wW`3DPR0k)MkC?*SRO297 z(g*g~x_L@;DoF}nfqwk26YyhGO_qFo=%`9TL~MN1vKfWCTY#c!I)V1i4M zYpDIwcR4Ilv&8)UBSnlp2G=v`%+UxIy$!36qOw`uJ`z$dP)UTEqH3%~m8jUPk6Dp9 z?AwWVVx840&2{q!h&Gfvg;F~dZ8q{!N*i28)X;Xe5#oWpBz4k_W9(}^3M6&8REbXg z`VeLXc(KBJd0T$LO){{T=!C7_g!sCD-HIXb-L^QxP|ELqJ@{ZB+*b#QlXa|sFNuxc z4r!I0#U`Z?{fmYbwr5~-v3NvsJFRec31nhJ^e=g^U3PXA-#2Ny!7wM7y0&|3+wu6q zV;ojE6$##Fu!ool!Gf6##*HHX6wWOkdi)o)< zD~>HGn-(-Uj8>eYnl6T+?Nb-nwcI16D(c1%``gyN4yXq)XiRMK1*+{g?GjEMJA9?! zZ!;ZFxs0Sn9j@H+YrVvnv^>LccatZi0~`f{*q&NRL~C8isdG3^)3)wr{L_SF24D+x z&Z2E^({BoSl0g_f(M_4Q9J;dg#DzN+q7&}6ViS;Lrwei4&hJg-cIaa0!G#uiDI=R^ zOR`9+Z1wEDNvhJ1hUyEU*w$YhQvwR*waVXx(3lb^dXFsjDQof^;1RaiYP}9Jr zIO)3^J(tgr7M#F#!#@+D%j_*Nc4608g6{L#YkaJ;?c3#GL`Nd5Y1daSqbz%suEPMw zV;lh3;Y=c>g{`3%_@ z+dtW+4@o}8Nj`3B{nrwqUg#vdL`urG%?YfnM06s_%%-NIC5Onn;;~#bYbU-r=Z;KE z2Jebet*K%LIN#j2B#rkSnq?_<a~k644IT_X~& zLMhXzL!ziOmbdRXa|>P%*vM>UYpBihihHsM0tlVJVq}3#v@yEeJgGdFJPbx9iK81q zzB<5cPjg6<2$`sySgL5_jiBEq)ui$Kpc^B_F1v^#fogywH9dRl&%fVy7i17#pZIz@ zZ8>{o>!|%MWO7qY|D%-O?8$J_tb989(>e(S@#bFynby%~+EjZq=*;*gATIDkn=k7s zN-Oytt?;r}(xSveq^TmhXtyqJw9LrqdhAf!IA!AcDInx{Jc5|D8`puke^QK*oGN8JS0oaJW#`!MbF6V<+*gkv=1m`)Jz(|tE2P0l# zQ&7BpX7nO|8CQxVj#rsT{B;}w^ZDP9GBsvTSVgr{n$z zFis>%X0JApj)EZ#-v(cHiXRYeEZ3ZvNlkZ;dITk*iaFzqeD4-LXcid%>jW@y`z?j1 zq4K`iX#X$7jNKn0GpRF$H*MOLuZT8iGxc=^xNnT8@xL0~Vf`*=+Si3=9df2kAq9Q} zX6FNvZB{I~SRHnXAsP4sWW@qVe2gnjC5deWB}q5D%J^f=^v;@Q%cS;Pxw@kshD0l9 zjmy0cW%{&9_5*CQ`gUfDcRs+h+)h}IO_`KC+r>$=10t_nY&x-dG*C@7A9Pm8&nA1U zj_4hTaLY1>V^?es?k8v4D5$1?T7}97$^MUvvZHp->p0eJD8<9UR}=k^K7-ZX2$HbB zt0eSlbbLvv)$w>s@-h+-;=Z0a)`MHFbgzV7-%n}B3Z|6unRh`~F28FZ_es#lbmyDX zr;x;C4v)317aO?vR&N@)PvO_4#@zuSth^rUw>-u-`|{=LtOAoz$ac2xw+t62rn3X8 zs@=Dh%<8H)0K^pO2E8sdn+>NT)i$4Wu}Mf>Y6?l5u{Bjpx9d^!fL2XWgI0>@g}aO3 z)2iu45bD1B$;KQAs0w=I9yGf9XDe4ArecP@5PX0PvA7u)LF5Wz}K ze|bCgE6P1>*<%V{9km-J@+xrr?Un~SoqH!fUl&MqtXHpWSvBoaGX(3y@RNX($2f*h z6~(6gTJ6Sca-(!xT|ppcmJ)l*kzmuUvfDuQf*%1XLX*6b)tW)x{%anccvfbEkrdmw zELc~u3tLhQ&|NzvQ*PUF#Og(o`gXKfh+)l40B8bjY;YieqNqsA1T9v!j!cnpm_~ zSjT!;z{Ml>chN`3_lJ!!hec6^xo@|db(tJ3LJEQ5V_^7)%;AF|6Acx$p1ukCdVAbe z)amHKl+q_AU65HD`@RpkSXMgGaD8~1xEzz8@zey+R{Gk#I!X56!}d!iM<@HN)K3OI zeY3KqndL-q_@NU9k<=pfHw)qv(Qzo-7PjxUXV=dh8A8Xwx(WVm3j5NQ>89`>uC)!E zLz&XxBpr?)1$%G$Qncp8rshVF^hHH0AjHnHkDYGc_$F^|Kz5w;Q=n=#&q`vPGc1ah zghvvrZb>!K#Yj5rr#XdfzhnkJK`oa|u8$Niqi)A&Hr)f-j@YZG%8*vA%xErePyK6E zj$}By8BqmsjhYL|vsQ|IQ|(t`lH3ly-J$~?bmAxVfd3vrNBq=}1S*2>jzw$wUOHm0 z@zjsd0;FUnSre!a%Iu}g#C^bNLgXnBw;u?2Q6M@_ny)aU>FK`~O*?ZPbLf)H0skg{ z@>ipuCP$PD0W}}%pwJWahk&*o9NqZY^qe~dj64^Ko}nE%!bzNkx#zmk2%}hcJd=`njAg=!Lj@4B+T1n^DOCQg56<6OKQx|vs2p(7x zr1^f;NkSMWeP2CrFay0N6E}``ZOy>^Yx1Ax9$x^hYbmhuUdn5SM;+_p zQ}|bY@SY8py@5z7JE_KYR9-$0&I|A|d=+V#l{#1k!RrRa*(pN0@Jop6v^Y1CpUbGI zR`IlD%bqGrVyMP(ADQPd38hbRGi)aV*%K{gCaZGXs^m?;_Pxn-`R$ga%;DGIG+HNV z@2U8$9#By1c{=6WJEwkG72M6vScJPK#M3SQ6+;UQG$Sk z0f#&y0)j*t2gylt4nq#Nx{>#s^L}^Td+u8Qzm^NCH(k4){p?V?3+$OJ=f{mjEmubc zlktMxxi7lk3DN4K1h(m=O4d!wThcbSAghOc{7XAyi)cIevfsX*;dpu*#>iOd&kv<< zv^;siyY8d30Z$awg0DOvr43C`ayyAUF~43zbJ%xtSX0X9P7^4B{AZikchW^y=8_Cs zo|qYKE1Kt{ntAN+30#VcjK3T6^Kw>QwgINgT-luP+g$kpA;MhQjDR;+HX#(6D_anj z&6|t}$1&zttS@5Bg{?DCn3$_sC`|0t#_Z0fxa#cPT}c`dYd*}Vur(`Yv}uRfAB}^; z*LM`1eRogjx4wMV#Q2c#mpQK$!PuO4N4n4P1rRs9G;6$1$R26D-wn4)x4|K+J}^BFUOTVq#u_FP<17-}=FC<(P0@A4Q$5l587P+y)h!cYrNZJKX? z&X#KuYs{{068n=RXzCH> zqrlXdx&A}KU2}a~0)OMidsIh^OAZQmc}fiPQDmyayw`|e-&op~-5=)?juMG?2|{7x zh~gMKp($6)tiY5C2J*2d=$rT25+u#HtFqBer7hW;O`e~#DVjXLWd}5Qe$7^F@@&km zYuZRfsm2iBp;Tjug{YPoVm7KJmROGBk0HKA@y8MiP`NS052)N&Vkyc#hM0zeE>E?X zSC|pH%_~d@hK-&b*|<1jA_}}*(i^j5N04sZ_?%7Cv;lp~27dydk4lOmW}=c}e_9aS zF;XH^$1oSY9&YU}hc%x6PFjCiw&2c7o|FGy4H?a&%$}IWo<&`}83i4A{r4CQp({94 z)TVdlp(HcYST68Avfe?b8B|HKw- z8IJ$%r<(tg-Tyli(7*fYb?H@}|NE4Gp8*`bAyY8Iq4)jN#~IIFI=e?AMez;FV`(lz zWwAZkcBe(km`44}uJuaT*9rJT$QiolnpFbb?LBSh<}KaNZwX4UCQ`LLrT?-tI5Mx{&RfpdM``~Q z7qD6#Fz7e$xMs1&19>q@(F_keJ9delZ$54jDpZ3S=5bsR<>6zBesSsR$ndx*_eWao z(5#xf!(xsz#Xfrcc61`a@wLiBryZAy(LKCAmuAo4POb)Seg&VXGwieQ(%nJg%FbEV z6EtCpb*V%73o%~1UO3}G<3;01;}6EA#$CoY1?2>t1T_WGg4qy*K_jYBvr)g%uu+>w zCz$i}rPI7oJW-rcTv2>c{85(_PF*@}O_xP)O>cc_>eSR}rqfJx@pMe|CP6cSEI}-R z2|)>g_NU8Eqv^`%(e(XXE4(XQ)SN5))ZCTYUO3Tr#ihj`iqqE29Q7BnhWv**YV3oU zf)oP-g6e|m0y%;pjzEjR^q{ezu|Vx0?Ld*hjv#4zh`W!gk8kIGxvrPmR_dC?g8IV4 zg*=|=5eWr#9(z;1SQnucGjQvn}(Z%Ir*4c+dI?0*DzC0pxo;gkHl?5E zc4O{a-!nA*lzyRGecuI7sK3m0POM^TvywK$+pSxOwYKl>OR4Zir>_*Re4NqmD#N;2L+KUT z=%f|5?oI5fHI(aZwE1$SYKFZl02^k<_EYMU_npnPm9Q>pyQE)IMc%hJgI6YIl8Rca zlAvy>bZ^}=cAJGgw(D(2M{>xtz(nfNV= zYR>$74Hsr)n6~q%BhjX&*h!e1^<_vXjrmedK*9Rbg#xa{_XT8&BbXEGm7TZOEjpnF zc~$W7#HN*w)x90817(j87)t?njfuu zUm(w(*7mU9yfnAgzoUE+x64()g~=?)#5^o`h?yvu7{N-Bq8WtDgub7=q9mLJNt~=M z=)_zrxP}?*)a&f&l;|w)^je(ec^UGO!{VHWm4fj$)Mk@!vtaYWrrsvgX2K@crqiYb zLcO1ha=+PdG@~e+^BJR|LS#o<5n4kTb%fY3qMBuDQg9dZ(`wb~$M;F|oOAE(?8yxX zJh|3CXK!6Kv&9fzX<$VT@lQo{Kt!w6cX0)vS-qLr9q$`W^?gbK*32yS z%Egmeqi3E^sdP4X@^yYJLSLO2iD#MbMQ)!ppV`B-0`cVbBdV}XT}rom`zXu3(#vP{ z%Q${U%z-X9qO{_SdndMhRvVUVH;Kt0_b}@i;1G#lqnOT~M8g1Q2;alz{Fx@F7jx@U zK&*?q>x~(pk@>~O_%x>;6vGnE63?PtrX6nJgFh%;>DqEJ2YO>fc%d=5Vm>fZH$r>| z8@ly1JN!LjI{4!!E|`pKKeOBS$e0)*2E+Hfc}PI*&9WFFP9qW%Ldl+_e9_ZqCQ(b1 zZ~U>LPMJmcJ@@l4PI?alwB_S;F^08s?h5_xGO)CsC<7NbkBQE zu>L-k3%lh}0;)p^af@Me2NNt=WWv}*Ail%#=8C}H95Kg$-}u7b#71@WBNAbZT{I$b zi|7!yhG(HMv#<9g(H&?+4+VZTO1d|?7fOIbf{ zSYk2afOQiO@3*i-HI5j8NIXi*r8Hr>Et))45C=#Y8W?84F%e{5aUO{`1HRl`JdE%z zJ~vXjBC+CCMeKGau(@KZGaI|dXE!uLOh8Dl$#&Z#-SGPq8YX=`9yPaTJTAHeLKGds zNbMQV7TEz2VAaFBPTy65s+nB_Dc;;9*!=go6kgW>w;FA`#MRw= zvzf}Br1IY{Z006c)}V!!=ngdq%W4xVYfm-ZA4rfistp zWT&cfxfmDg|AWT`ef6=sdBWG#CSk`B>mUmqkA&^1jjy|pB%JOHZ9=1OiWiG&gv!gC zi7wtl(6z)8js@?fJ9_I~L$TQM=G8snIuIU1glE(Z=(kFx7I4fh*TdH;pK*NjvRw$~ z$qn+-Pg*k!wP>SWGj>4>zgXardNHT_)~-c-t?2yZ`-fb5(J5vRuidY-%OyYFauiHG z1y6+*B0RD93!d1*ALlFWM_ybq)RoAJ=nypQ;Mj>CM!C@TW4pda=JFsUbW)7;)R|d6s|R}t6ND<$2zPmo z3x11dPfMGm*+omFDc(V=(T|_d%v8Li|5ou1e69X(*Znh(MYQ5$R<2r0cR^>}7@40X z%iif{A7A?+>ny->or(YjzNg;E!>f7gz)#E*^Q9G;XA?Dsqe8R0!vlw|foJE5aYV)z z)nS4l3%xoeYE?kJ5Un2J`4z9VJqU%>EkB>QF-an6%uRfrskHpO9$cIL+w}yV+Fh>b z#e_4aIa!F}QF@j7VaBh{!m@SI%}|QdgoIVzJs$nmgZ6t|=-;95m{BHMh+xDzKkQA^ zqH8fT*H?)~WP0ibkO54F(|eS*`u`-O;~!*nX#N`+xCw*|A8UfL^ODT$715)zeo%~$ zV(Y^D?s3;rKgR4+O1L?2&!wnY;VIThk`SE@IgbiAqt!D!&t`?Dd`fY4_T10Md>6Y+ zoBE)Tx@GjlgGrKY18yQ3n2oLn*N*?V=K8nmiB6aXuL%!~K{EXw4RKJ#@=@8#!r!Xz^+fLvT|Aj5R%^0XBYM0SzApR6_2xgW1O9Q%fhU#h-UYEc7gc&= zE^0$;fa+)2h4z{E{23{X*xmaKo>-{yjnAX$YYy4nSqHzi%Zfw>y(nuj;JJ%XhteY-nGl{nbaN< zl)Rgm2;MX@d80;1&Vxk*c9u{oN@(E@65vA79Vsqo4Rr~YLQfA`7YAsH&!(YNBG@7q zj&UN>I?@6fMGu+X-y zaD^^woj9UFW-Q9hZ2aq@h=)jD0#mkJ1KRa7(+BVwn9>bsNBZ%wM_^g~x6<(Sri0v7 z#mySc$*@Hqgz>bL28#KEh!-fLI_7@D;Fuv(LY0>TLd?05qAEu0_K;wOu2clm2gh#y z#OelXdduFwLfPTS!I^=-c}-mpKs21jo3*ozgqIv! zRxH7@jqtUx!Po7@DRHUWLtI3~^_lR!W7TwT2EDuH+Ow3|{fxph|Qd&DU&-(!lhrvMq+ zlbNM_ts0O#^QD-gxBC+R$#g(%WaItL96cC_rq!2HK_q*!;)>#QbKv;2ATE|FAVCsm zo-@Lj-3@xD^!UUNIA4kO#USTn#{ z*9SXm%qK^EC^-{h0FDn6a1stMX}N$d00F|agN zSAlxcn=Ps5=Yr-9)WCII)yiGHwBQ97DPF@>7R-ukKhlA~NH(tlB-_0QXJud{ZfX<| z3}yB>rgH8-yz{`EWp-bXvXN={qHbBMw{3Q>70@_IV9`W2VQ`>cGK@s3kCs7LF48ho zWuMglPbgdO_Mm8DR2GTkY8#lZP34q_*t5WKX)Q>3}sk79W zS?5S@UjiMd;*+H}AbjjjMp@leEvG_IH^2}Jhi2Ust={d~9mF{l4O75mu&dF;67$f- zNELhar;5e&<)C}6{;BIZQzSXMUl&Y)tsB^8fD00u?SxgZj`x((Gdmmi2F+erEwIj` z&O{d>yNm_EL)AeBkm1=}Kxv=61K}%17(ZO=;!o57L&Om#j2~w8SM8884nIF2Sm2}n zRUbsw|F1n}R<2{eq=H1WgHF`d0cybC{I{36B$ZU|W8sIfkS2L7QW4G(KO#u%H^3(( z^0l%N0R?lk+v?!DvQA{56!5HoY<3@#dQ=UN9G3S;C_r+U8)80#y~b~fVDKcM(Lwa*x}#JS(zR>!Hq*%|gQW8;;NDPsR$BQ9-7sMIvx@8L(_ zVQwIy0wJ-&KeM;rY0MFjjaLV@cw5)r6YC={{Q-6-g&5Yy3`hGhYfaGSZod(W2F?@s z(DPm2vf}prO0eO1gHS00p%TB7Vc7CFEB(oTLSvLr6tg z`d6&?X9}XO*zeYqS01sjI9pppP4??99 z&qjnpMHvFS1=D`0%<0293qr+*4X29GQaR!R;SgTH$--wgH6(?M{*^*()Bj1!nk*8x zI|3N$RYhd8kQpPe%>WNU<6#SR8=w!^U~_>R5_svP@iG|*9}VCrEh|<`gl&X-?{_2{ zn#iNX9l4)S(x@u(m~*Y7JW(AfDrqQ-bSyr`eGAJ-5lEpEb#rj6}eq@@<5<3r9B;KJ(BmL0vNglvWU(H-R0fQ+%b7=tAQifX3Y6w(ltcft*9j!1>> zZWL7ZCvtc6X%cjHwRaPDo(v?U_TUy+ZM`(G< zpY!rrHhie|5a`Pr$#ou z*xu}GUzqp$gQz`H0FvsS6!3flx14rzNdLNy^siKDZcGYK$&jrx*@;PkeEh3FB>&~@ zgZx$_mLGXH5LT*|6{~-wfP|LIlx76AAI7F5b>&Z)v((A|LrV3yKq->D?-xrSA*67d zha3Tq$Y5W-$D2z<009y<(d&Qpx*XiyYfkq1^R<@BGh4%L8^m;HPB@#MklPI*WzO~& zrwoU%4YTl(cz+Nq{!K_TO_e6i_~|GJ7RKA#ZLJ_!U~HdK_G6{`7m~U&O0DTE0}i3cPgcDi!?3rg=lpuo=#&>oiQQ51lgKme?vsWDS2RE)>&e zyC(k!%s!Sbq~!Vm?@d+b?uUyVW$Ozn9F@qkcmzuhvJ=69iVXmpA{97m1kXD#w%Mb9 zW0}p|$6{Z+_iR(S=m{ZM4w*|bux2Cv_sICWv@o$!{bv$w<-!|OX7xSFcuWaj90!rfuSQ&6 zA?YaJbf1S0d5F=pBGyuuVwErD`>ELLSq;q!>D<23)K`3Gnxv`wa7B0V^``_|NKY^{ zFm?F$K3BDOFV0H&y}!|}7Lrwa7m2;wEI*4kJ|nY1Y?$j|{s&zy!=;Bx+6OotEB5#@ zIfM}oWzjZ*h2S0_a3m^ zKfvrUl}tEYxYA%$J$gYO4S{DoY0OI4=H8Lt{fd0QS=;v8)5+~jG;zglh|v*Z9~Kn<1en(Fk%g!G{&j6V zXM6ueT0A2}3tfIGP8CWEsq*bTXQ&8H;oYr}I`wdbxQzHxs8LKP5e1|D4_``H?EBK) z0QRN(!O5V%3hjn#7GAqQEYIF&%)Mrxv4sYAvwg;LS%3XO3!Q;bSO``=nNdqE_D=ay zo3rMy%R;Gd&qId@cW;jw65hLC9sN+a7r4C^B)@#;6I^J=E@JFLMgAva_VN3S^_}>` zSRV^uZ0-g0e6@CSa>eSvf^(~S2+f^+#@L`h8l=sDDSPD$XR{^ahlTt#oqfE>nc9tZ z;G$x~IZt(#U(Ee6qKhpZ53BA}?o&Y-PD@o;WNByH`S#c?I z5tR2PrR}zIu_m<8qok5_ObLR>nPePPet)sl6kibLz>*;HK zdVP8?2m5UM;C&A=aYuUUl8N5YrQhJl6F&GB!>(^3;p;&W((QGlH&NMGg2mZx=^0WR zey2Ichc%YTunPxB*;Kky+y3%;;Z^UI={4coiy^p2!Kd6oR}7o8N0vqqO!qi*FCp{< zU$%CZ+V5JrWH~-Pya0*lcYEEDGKWuZF7RKqTl%@znU4t@vKyNGEiL+6%3)~nx0DE^ zPc(Lzaw4U*rnuBpLp&KKpQ-VOdAi`4Qu>&Q!Wme(3W~`?>bu8@nHl`oy?H0Q)uf+p zc;M|*cXX_eW~ASMB&jAl)%k-x$?(>xeY8Zrj8%DW(#fP`DH`I>40{FeLg;jL+>4Dv zIk9T&R*7Upl8k8C5O)tvym)foG2*H^Ti6Bz6^q{wl0i#4~(d(;;X zt>&>zKQEQ(^(ze#`-S#C{a~o9;ZgN5E}6$zN_z`Q0o~7Nh$=~A=h*Rq@H0}j!q*z> zAFCcjKl-J4KmJ^&mcT-M!+d;08u{f@(-Uefk3ALeLaB)tim;oMk*b?NSfi9FobDIK z_RSA=QVOI~NPV66z?^wNeNZZVki8)GK{KgW#vnTeWkgLTl{FY-fvI|PN;FYqL4LQ1 z9Ib+PN=c+3dO;cVH)m#~lq{TLD!ha_8n3mQ7Z0XlN{px}q_Be~1s+DtR9*(v9p6U% zuJO;WQyFsT9vI#FtjJI)`{``siO+4;1}5@74n}B zH@>pOWDItj236_Ntqu8yHAOux5uLw4#TLy=<^*1~$^=zff1+%B@5^t!#@hKvAeyXF zVtT|V_qpel$w>7u*@`C>Zn-w{F${tQ>i%=#{-ri_^O2O&XLZkv-u|pDxMj57bl_m+ zhrs~ztSY@zYm<4}eiaw3g4H`qGddJ2{3}+!*(k?YA29qrZ&Fb7;3JtAOLbTa;aLgw zJdskGOP6gF^873!uoYn=7sGU*|L}@`o``HkevyQnPm9cBd#kn1PICdlS+n@+K1yK) zeUHB?Z9IHFU1;QeY;U$!(@7A(l2lSn=RAO_yYv!mk1a8M#`;z8PS2>%J@@3mGmK&E z-?Cb}6?1p9j_M4EGx#WfV!spoYq7`sg<;YRsT1>j0z`!q>?Rv+{_J*jD{4Akz>Taj zeh_O>R^3Cj#+v6Au+58)VK1mqpY!vS#-qDfW*aKWoKy$;g|`em9{C>F4IQL0M^|rA z&(AULWJt8hJf5>&JK5Aw6oLt=?&en1F-QU*=ce*BvI+by!&hZx+e$FE?AzBKzR~%dquN4J;LA^=^FGfLzV2&(P<$y_V#bh3f_E^OPc7i zmcq^nbDCE>IlC%~@j_H=aS3EgcLt<|ykeHoXFs`vREtUdR70@+Kiq- z>aVinqhz1?6}(TpJodmaka1mO#R;#SvLg?SJrk>MyfW1JUVvXV{yU}NeT5TeKj#W~ ztxu0Z0=Wl#-VdglH69?@vZ}5##XWuzEV%i8(E6JW3F%s{n&GkEjK4Zk6$sJ2&o?gO zvG0JXE(KGKeKNJ@w%u5|wz}q(3z&>{7}xA6`>)q>oc#=0uL*a8|5U}aE+@|Qy__Sr*P2X+xRhv zbPeO5lpsm5gc{&a_F?_zlxKLIiuF~6{{|Hvx(#x9%z{78+i``qk-lxzteQW%jq&ij zHWiK=O*NYJ*-&JQ@l1{H0jPa`3L{-b8s4yDvleo>^w7Z_>Pf6`IoYN@elEp_jVP@K z#dLLh+tu$FugqK)8pq+1n+&ONmmoCFsG;v}(>&U&7ITL6pCf@}%V?S&8)bPhJ$h{bu%LS&L9-x;gHpH~Erg;e^oEsaTK4 z2X=erMGE+he5k#i4BAKHy-B2AQ9{w51z$WCukPn?xI^aCcJBOTyQL_SdB%+*i~rC9_x5MUkG9q8EV-dC!=ChHi{;7y=C;A z^z<#AtA7DYVM9s2r<%d(#c==Y^i>$lUFe};b;mg{3H|;8W&dNK*()mQvKP#PfZqMu zd0zM~kB8T)vG7fRnmZ(^=d-;}tX=fA|G|Ja@+wq-^Gm5##N&-A^M+xSNd4Nyx!VKr zg|rUC@#Pf0_OxpkbF9`3NGopvVd>!JarX-k5&{WVwhoZwp3j5SNu16jOgbo#$eqs~ zJyCqlHVT+bo#xUc74cc}NOlxKWa}o$>iK!SuGV>z!#&}jCF^xK*i?-YNC~%U#n#W0 zOr6hD>FSvWZ^91G>$_6D7(6*F-P7+e7;pL%9W%#@QzgF zcJ~>`C*SlgDE0PhBVbndq5ih{ti##ipC3E~{{i+~o(f6Uh56fHaR*h{?o`_1^S(+y z`e1#Y8)=;)v@u-1EbX)Y{nw4sNX>88O0K&iH(#EucRNwc;S1i=ty&oktmp|3Mlbr2 z&fXeGB=uJ^N+xYq0ysl&K?9Mb2DbvzKvhw!U*nRN9Y`njxlO^iL+OjAB}CO1bKqpa zl-XNA^{IW;_D05Zq{WV^K0%))CjGVYlJ&*u;i?qW@CpeWz|7V2dHN*(&?gS`(V1@Hi;iqX zd{&*%s!j)ld=ZmAv+CSV`^QX?r+>^WO+m~w?P;fC|4)7Ig4F%AY27buOTax0$8frx z0YoDG{#fhiU)74oi#Z-(pYREKod=c|a{$#rC;zC%UfCCYM?|#0_I33t57_mz&&VRKabv2a-`BVKzf%VQO2TPOeReo~)`&#< z!n~&EH}Fbh;1Xz8zE-mL)jne^a(|rNyRa`xxPgd_%1Fs?58y}@ZhU9UB6!m?46%Ej35G}yBBPggA5O2zK0^J!MN0pBgc0r|0j^0D^=p}E5H-LnQo>tD zNLJq!rdsv1OYT_;0o}*;6;)i{*Uj{h38eWMo9HT;Gh5M;R(bN|BBlbec5tJC{RI?8 zBkvyd8=Mnz=QPO;x6N(${x*<8xzg~dV?ye|yc&6uS@)Fl@EM7SDu>P$G4ERN!{+b5 zn*O}HRf)Xc)Zq?!-ESyX16>woG5n#@$l)%zHkh<*OHN}p9aXTPCVjy#;Ica>6AiH) zeDe!{MNg~bfX!ti$LRRSfhvV$4Nz@ zVs5{zf*F?$L9qBQUbEp8A1VNLUQL*FvI91n1{NLK7m4hN1PsB;Y8@VsmzmBhlh*;W zS;K*&KN$^w@L?i{w9Opue)3k8jsnm&6Ky2$VZ&{otK>S}Gx{T|6dv@m2MCQEXJ(Oi=XNq!a>8>StkYF@L;DIJ+}=}IJ^v6U^gk7P9kg(6;NI)g2FjpSsOJ{=%^j;(FTTvd%fbLyIhMrW~qln^tDviL8oSzgUemn%5+k zHZw_nzyzD8vy(7r0&6y1cdf1y1e&4l#RUI3*khl*55yp030}4QtIjmLOCQ9*VU`T;7I!4Aq;{$2L_)n)5B*F zgj`Uys_`F)gLDW)pXPZX*izF@SLy0bc#%(AuLsAF;9ktiFg&>U2)rM(W-G2U8ZI65 zLax`qE1kjXl($}0oj%j^vWj%w9$tl8n3eG(TmiyIu#~t&K)qL#EzOl4xJ>$gpJVhn z1~OR`;>THlCEA&L#@MB#TFMljkAzMH=J}SEL zNEd})@Pusb!x8$f+$X{9vI_c5qeZ!-XSDsG;K;a|DGbYU;^m!(7js*zUoP-}bAOL2 zkn)zTeS3CT>!rFP`d;xpTaWDzUbtWpSp}LV_646aY)_WgckBAJKbP2Eu@B?PGrje| z+}zyewf{A$D~&zB`6yPdJ!G`$iujS%=sfbyv7slev9U3j%fz!kHNV_Z7dMA{Xk2eE z-2`p9j<$WvN{B3U$KFp*`nI4x`m^lCrf;)&`dj8KPPb-gRc!86;G^`nJlD3R#&Xou zRkR|`ow|VMvj5;j6+utDwz}coa#yczVq%`SF#Dl6v^{)qYS>%LktLn}OGjPz`ypEg z&of@$dF??CeYB@kb$XS1q#gRaiPof0@JfQzlO?GvhM6peD)E`zCD#uORap#? z;=K7w!q*O&QKed@Uv=#}t}Org+pmWyZ$u2Lzv)z4c0BZbBcfF;s$D&ffB4`HR;7Ae ztNMBSx~ln@@(+O_ynv6h)o!a>{n(jWN$S*3_gaIfhYbP*Umr{DyVpuXEiR15KES^_ zx&eK8AhDE}s%~*D4VuZ@?_9TkXbs`KWnmE=^wB_9~>^a}nM)W*;0-$KUF z4vh{=(N~WdM+6v4gn3EC4<%456xW7?xfV0nT34xwNHv%WHaw`hDI%4qESPxfV6Eah z!#R5Kby=yS0z36}8CV9h4-6w22U3%~f=z=KX6aKU=u#6J>b5z3x~dLa9$G|;GahvD zJg9JBecM+(ojx^@PliUCjlI?w-(Hh}J_a3i2tHvT*jHtYKSwt1J!XQx7i^&s9G82b zV#+J{Yv4ljX;|Y11>QeA65bJX?~%ZDGiFtXDhJ|SuVCz32DwvRV36{bVTtaZ^KfcspfR)Ncm#hiM=C?z4MAb9>=!1`SShEydPM%0 zQqfw!P~$hp<>OCE{`Bs%5M>y=v$}4?b6lQD^H;OC)OeXaIqa>P*Vf8}6XT3dR zep0S<-a}suL|qr3ES{D>b4lWGUFh6+wL;tI4fWlTVW}JTt^!|p?Yx#{Q!P!7)|D=_ z%lDqQ-DRDOW@ymP zCLw9iHZU2tqE^V5tyM$?{j8lkY3CJD#IVIv-W(EEaN0W11YS{95Xico!3N+J;cf6)cZ_#%bu$T%<-Y zcIQ6^=Do2_$*!33;7qE7N@NP!%#y`Yr#T8c`)SMge~gB`~|Fw*ai?O=TPc$%&F6LYL&*8 z)y5X3J8y=Ko;vlefho&lfnp^K>I7rUu%Obt0WJ^*?)FuU02z>1a)}jI% zPOw>u0}Mnv%I%V0b{1z2gtjGqRqF$6%f~xRmotqUer@%y@p0Vl1y=RBxZ9JW}w_R_9g{v5m+xf{Gi*1% zzIr&LPKi+3(LG@FG(^G-O{(ZvgGhCH%ResMGt+jc)~I^_P6iETI?RzOj< zdeVEHb~!#Mpm_D;wT$ z5>Ekh6vefkRC>Q?mv(NT?@FO9G`rA0^oz^TGT^p=IoEpqMCvfZjM|X~-6IV;3r9p1 z%1$co=8<;12zWhXsOKW^B+6!YXq>+`!DoBDeudxeuVe_)@p3HK4oc+>VmDYYt6#Zf zaH?t|+Az`vH^gc07?D530%WsaWznbY-+;+Y%)zZaA#+a&Dr~(WbEL~OL31v{;whIx z+3tKYbmUlmv@u@4!97a~CUS%mO_;Z98j zg^+lS%KDe$bD^JjLOm;iDGVZC+tPzn-8RNSOdMDA2uck;1wAt`N7G&}BB`xqqJe;2al8LWQ zkToc2Zn@1d*zH>A;%UV*L9TbA#zZ#)91ATLKr}u&-al&ebjHqWjCIa=V;pREbwet< z4$qnT7mlo675fXt_LU`R7NOpMNZxU`gr7nsjbmSOrY7zbI=$!A z6>lZCmxD4OI${Gg{+0N^F7?)qaYQwXsu;t;?Ocuew!_~#KFNzJbawlSxl=3_%?N<< zIwO;SJ>+96XHi0DW=}C0$nvd2I9TMi`ig6Lz=0%einLZ>0rrsX0yUmqT*@tXHPY`y z>n?YQC}_dHergg`DCs+x=KKSIJjT{|d^)t{+V)O~|Y zUDqfqJAyxTq_C=6ppR3&EMBHx#8=pzX+!S4rQ69+Dxy5eaJm!rTM_kg|zd#qKvUEcq4XK&K;^?g~%uS58JC zg)S&?Z5%nW>e<$I%L5+Y^1UwvLnqa#`lDS*xI_a8Lgg-mQ_ zoB-eEIXf@B>=C?pcf#UG4-+6930+=sPpC3BOM^zYpEo(jS(MM`KYC5w^#JelDuGs``JkH)TL>DCK1 zky@QoyX4z-7_X8{P2xhE5hT+~I(bfw-*@A%^Glg+GDs{2UTIgM{`KW=l8i%EZe(7Q zSyr?3KfiY1Np8o_e3N=6apzL<|%NngaApYBB~I$hU(m?hVgMeyvNrONggy ztRbgUn^gxtXf=RmAyh5KBYGBJBj1jWf7aw{togWdZl>p*5ub4K<40le4_5xDBcprS z59@*GUPkZflc7j_^nh&gf?k6U{fod>w+M~$&% zb*t+Ga&{U58~|;JV$~ZP88lZ+I2Gc)Z}vw=O%EOYri6UsOg^MqBEW&h(L5Zxh^UQ# z(D3So(R-l5oUJqjZTTK(60!M+V@~B-T)xOc3v4iFa!)D@q2doM2g!bmAXtB2*khvAH(f6#$o92F#71!` ze+lOG4_Oi-{kV}Y`H?GML#{RctgeYYK@vi187wnQ$~ag1W_>OHB_rNF?n~Wagc=NTDx>@&MY$O@lnd~O^Hl$ z5GnVmaSdNSHxq-2$*ixjwj}zzYS)vkNlR6dnMg}-CF_!wDkqCWJ>c!+L_Y0)ULVIW znbjl9k7a0%EI(&j64aLFYnC|Is!1Zr-s(hBF;MGO|C2vRDEIJkoQ(b9>r_jk$Uix^LF+C)<;47$mciZFGjLXJvs;d0IKF6?OgGt+S}J zZmdGTo2C7o9czz&qk}-Il*^pzjfvFPmw%JVt>)rubyQ0?xzw(4Yye~5#xouL$fm-HqEA>@>@?%%D z&Z_VFD;&Mj6>^R6^X)7Z%U`XezBjg+;&@uML`I#pgGxoNXeL9QBri5Zt{5givjf?3 z+Qr^7Q|3DuGJfxBn~x_`-alp#N$$$NLZ$7CHi3f zwkO8V!}`VUCb##-Sd`w!SEOLb#I#`;e01Z+BBK{z{o|K?KOTpt&XCMFn9M@XMHy(h zj-$>MG9pKvF*4XvL3=kM%TQwi$qemHeC0XyO@CGg6RF>m0X4CTq@H)q0>O6PXhVDU*NyAbaq4%Z(ty4Kwe z)A!pm#C|#SZs3;Njwd~&Cezo}zpm?8vtOe)i5zRDE1fG@D@ERZIM=3J1q0WyQ ze_j|0{uQQjdhdRZ5LmiMu5C|e@zuw<;LlI$j{cs#$zktEeDUu$uRHsKPv!cJ(f1A& zW#7{ZCO|5cI%nM!#nvxt8}c1sCEU zd6c}-yY-P%J2LCr)!)$MBioWFJ=KqwL?W2JDgUm08P z%bKydJa_ML3Yrxa$aZGisH@yHoMk^E?ORH!$tMh5_X@9);QPWO+R)bYNQNuvZK(kL>N~2QE{L-;Vv)d`O z+5}ET?mBJ`uJlV|+#vq6ImIKUB6TD68&VtE8-g098p;~#8loGR&0hU4s@^&-%IA9@ zmJpOwDM0}h5$OhLBn1Q{m!-p{OOTFRK;SJBQj4@mNp~+HtANx}ONU5GcklAM0YA_8 z_53yGp1J0lGv~~`uxHNBlpC^Y>ay9zt9HKW)LkH15YN`G(^PsY{IrJEgFT8B^^}{{ z>FFkG`cspk5BMMss#@v*jh(=EV=u*-BJ> z7*}5Z;;wc2jj#YY#JeKrmgN0e{|o$^;yABBV|Ads1zp3^=u~Vu1InKAv!)ekrNK}Q( znNMZ+rbtfA&>*|0KeBJwZncuBDq_5UTw}a!oOc$91PWU1S{2`qgpLG9kvNes(o3?P z6*&IQF*fGTg$%H-=^46d$_?h|-ZG?q;pI_1<17_2z#3waYZHcmq~ zvpfbHRwVWrycrv7rq!ihFcy*zg#MM?k3PBK7cQwUCC8YnKE$%jzfXH|->+KIBt4gV zfM!KyA7DOl@k?xyL5+Oelaa_#+>iG*G$~>%ELD)uY1vVC&j4d$EId@F4w-;0c_|*h^1$udUd3%L=g?stA__zcZ1Q~eKco>A! zgwpuZ_|pW91&n!&d5wjp1*dr>d8UOWg(UeT`6UIL1e$o7gxv4V(9Y0F(MjEHzAJUl zCw%77>p1!O7f-*+5B+z$pGo&ecwAK6N9{1}aP0`~4@2QY9d}pcY!z1&SLEsB=>UZ! zg(NwBxgMSb~k`EdnN1yQ+rIZ?%W#d>*fd2fXig%df3F!8%y0y{6}KXlAR=Q63w zr+pN^=f%4tGauj46+OaKB0n95p!E{oQJW9yIE{{CVoWTjh`uYs>m2?u?NhBgp!kcx^*p8DvHM1)=aB~#gbFI9W5cQ@5fO0 z?IQ)qd0CSAdyW@1U4v=)A<6f;dlsp8wRi1zWp?>?advsZ)N|%t(p`yNii&2DANOIA z!nvO-y00&qSq4DO8O$67qIlH>)cH}o+4GLgGXgWhQi6n^Wr3dlMSRzM zkqJXyevtB-6D&R zToKlDp@+ixz1Yy)5&*vX4uTvyKtR`N!4A8OL=ZefJ$B1z8dYd+HX=`B5A~ zki^yQ%sFAY)0J<7u3FI}Ej-b+D2PUP+uBxD>gzca4(s|9q@sVVSi4o#>q!)D=sFc7 zxP3y8(3$Sn5J2&YGKO#2y%(IMbBHK!gIW~b9QIs&WfvoePBQN)1#%23ilD>dcEweF z`lg&^)COIJ$U)+>)0Wm6X@z5zlfu>HP~fj$&bW@vWT zQgi@T2fs0gV25<*g?~g+$EGGf?3tTJnAo@DSMU}VG|J|) zZa|2gkM-E|rJtxOde=J1pK^Zyl{I}5FJJ`3SuU3hzQlm1 zzNW}kM1Fhddj-!6M4t4*l0rQ zH}lN;caGoFDY_57>lk@k%G$44!AEZ?lnt{a*LTFq9P0wi`euR|xCQs9^pW2>k`#Up zyM*c6v06H0-zkXuGYqR7@Q%rc2RxMr&bWhyW;Ce8-6C2yW*(7-{k|#2pZLp5%BmNY z2zz6x!S?XkX#^w#km=Ryq#YZ|($~J{q-m5NdYF$R_@hqYMz8Ul>Ni%w%*7#GCu4R5 z%wdbG8tDU!28e31th_Y(#_q->&a$G^*fBFRm+APs2qzG-pnzCz5=*ZijP`cEAS*&B(~bVJb@lvt3aM!`2D^md4O)M3F-xbDcHXF+sY9Sm(W=zp z0A$agy@6b^@kYEJT)qk0a zp*>1~@?W|1rR=F2KE_yv^yu&IIh)3?Sd{1&h}bD(>4=CVOVGL@s&Oj@B?eZVUsN?c z2wIo9sN(LlZoIO@077`PD!YbY+xzy+H2cIs;8Qu4pK&Gb? zXTa00cC4w-hO*yL0a3ti7vY|5$?MX_6(5F*UnQ{0CbCP4d$jgWC*V>zE(#y60oN3q(;qaY z(S=`{6Z%giFFnin8YuN!zEC@gPhX1QQteohYV`}7oIw>b^-l?+-ZykI~7+$Tha%{15tAp2$Yrx|0IV7rS#!6R0Xsp@_Kt$osE)oi$~(Ew<0I}sti(MSG1@#y=> zv_TY*S%TED(*KKSzzXxEz_l4^aB5kTJ`>9sCmLT^u06fV>{5*SD{H+I@au3A+J zeWE+M7-*6ws3`hUp9p^a9JsH~ghbhM6eN zm-zE42*n}i3;CKTl4|3ztw#{OmDK)m#g)0sNz9N`2VHeY=pQ#(*}SOSeg{@!E~d-y zxnJ}J-TYNO zgJ2HueLW%5vHxHc@E*Nt)3GqvJ@~qwyXn{m7!7<%@4$5IBa9JFShj}RF@*;~$zUQW z3oqcA&`6lqjjo#j2TNH>w+ma>UkH&H)-D*wu3oVzw4?kpEF6}y zzy>dX7Ms;YE)c*K%f-VMEaC0t;%N)iaF#NX+k^6vpJ+oyi*Gc%WRneE*VhcIpbNe( z^up@y;>_JvM~(Lx-`%xFyXHs?MjeTLnAzI zox8f0K00P4is-w#FdsKFr~lZ?g4YFOHB%g9s_-2n?ZmV{0$MDrKgV>nA-}6%^kmeZ zC7-lT2hi~pq7Fh@rxWRT?K=+ITBk|>Rs^rB1~;+@T8zf6_XIbRSDB8ctaAo8G9oO9 zCk%*BzYm1~W_HyzR0rROBFzk{YiJJ4I&cg|r!__uHCaOF$EKm9?XPG;QpcusM#-P| zj`g#Uiq!R=P#_dadZ)lqj2 z?nYQX3tP8%@5dYgdls@j_ufx%gV|tI(s*=a06%1IOj2W1xbJZY_$#k(rB1C9;Ly1@ zPA!upLoc6`_0pex8lgTQUAcw=6AyXff{Ozn&iaZO>CA8j?jK`a7$6$-_Z-|Eb{E}IFEJ^110~0(T9Zv!JsU)OrlRYD( z1_)J1ezml_ubmm3dE31tcZL0a#1{RQp(4O^hA)olztN1N{#JnQDS`-nN+E3IM)jK@ zQb9VsNIwzWdABM!sJbe?UKKoaUF-q`jbeyk^DjjvUlWl9c-TJKx!t}q4=e1ZD z<$o2*+keR8?WmIr=Eh0XLqo!8`*b$;_ybAWfizf!h(sL1r?OxL@$|Hjkg_+sbE!ROZ` z#l|$geFXS)Zf1YwQZ^&r6JI@W6TtOOa~r-;8710vp|VwKVx&ULsg_kCIQ88^{uWSg zui?LIpxWZA0$2EaFvSkUhg>2s1(!vH2foDVt&sGXZZ)A znd9JA)zzL3aItHJgAsN)nIJWs>OiX+PK6KnjBpy-O-SiYGz@c1H!!@CKG&L$i=7vD zbI}zQVN6Z=pw|2RaC@S?mPoXv!v1PtrY1vt4Kfhv;>^A4$H?Tcp7#~;Z zQ?*``B_@Bd?Q@Alo6((~j~Bao51KpN&u3O=wu}--4ZoN^0k^hrPtKFUr+2vE z=GR35Zhpm{^OyB{;HQXm$UtZsL>E_T3m|pgRRtrp-l+w`wI?r>&(HTW4ebCIW-pg1 zPnVxZA2t$W4gJas)ZO;~7E2DItr|`YA?ced#72H)1>A4;VZ}|YXpp0!z@yeXx1hvr zf4{h?A1&N)q6?X6+7v!9QU2cTCXKVeurScQ4VNUaWmpE3Dyc^Nua9^UzcK!@^w;$yuqms= zTG&r^X24@l0wXQekJ=-~C^z?SYA^co7!zFug@FPW(z^PKfG>HxVn^n}n;m?w|HSpu z*ifIWx_0M-cPPEe5%Q86(`+UWXP!25HRryBEV6x!7thv%W*2HjYDPlTbbGbcb=7rx zAzT{8n(l1EY&GoUY*FA>cy4wlwoUeQHmRr0c@K3UU3oe~`nGsgiQ@|ES;krLe2JkT zVCebE4Yw5UZ}l&XimCJ1!2rXP{bz2uNv}3FHgz{4n_4E?Ca;>coBP;osH!&fg&k^z zUK)Jm93~sl4Tgk3LPtV`-wOxN&Bknv%%&roSKy8-;OP+;Q8`IH#w^s3)iV2@uV1o% zJhm458JmL*X)-wPd^R>}D{d=pj<5p|7QR-#_xc~%W?5z_WT|DjXBlNV%x822MMEBO zCxcNWF!0g6Xvx3(!1WV&)R7f~Z7x!;0LPm_2P>xb+rmY(NlD0q&1d$gGFFy<06ouR;E_XRmBC~zgx=1QeXW% zbl$LhHzl=Mdg%MC+}DaNNh1V&neWW_{6rad?Xnn(Rse9M(G1J#jL(jSDm?Kdi+ z&9(O+J^_~hfL&XoSRsC1R~xz^r!)UtTSqc3|Cd!PTe@tLFIVuw?u!Hzi>!Zoepk9C z>G8dM)^oY4`I)@xudnEt0U5V=B5n0Nh5dNZA!!>q(s=5j7dDC!@v}p6v?{9cN<%Nl zRC~}xLo!neoapr-<;Eoa0iU1y=+VTU(gSs@-k1v+y*FD3gy3j)D5V& zjrf*4oK$aTlou)#=WVN%mmn1CZTIn8RCUaWEz98Zap;hG!-tbl2+;S=Ih>4)Q5_Fy z)%Rt~OW>KIvh3as@L0({o<@4o5yF8Res>~#KlAfmZg{heuXYjlWi0e%gRb?@;rl$6ZXcZ9@iVA8%ZK)*waiM4myh zrcs$q@^~e8uUYpQK&W+0d?rfk8TFFkdOT7u1J~O)egKQRd?ry$jcyZzCah@q*-De< z-4@JwG8C7j;fnaknMYNXBQyjb*YH`9rq82r%i$V|uh;Ncc`lJhv!5e5ltA{?e<+=x z@4LV!f__sv8Ce_PE@|# z=?r%MPWlg*0x;n4i&-*pfff9s%#v`V4bKJPAbJp*_V5yodhG?j0wphqT!9SXOIM&> zsPPpg_iL}{OHk8iSl*TJej)n`%?D2)1}-_-^^93BIZMpzR4+Afvho500-y%d-}$bF z^C(9sTsE0>?TCK&KaExKmFT*Mf4Z*DwH?9XbZ4zbs_uty1w#H3G}E~wbOjRY61xJW za%^6K8uX?)FGt%n^8!k)l!{uwTq%VECnvhpDO69A;}XSacKq!UPcQvrJEfWUV$PPcCSF!;oMiC1ihv!CKI#c{3}qz!s-#(oL!j==-Lsxw8oi4 z7vq&RqBwvHYXEgWq$y|%e(V@`(EQC|3{Ogjx0{7;I8W&e{B0a*M$ zF&lmh$sPl|yR z{F9Pk$(IsPHUx(?{HsiZh5nPGVY&aLL|DQ2P&K71Neil`awQEydtY2h{Lo(IE2#ns^!@|7 zLVN!KccEPWfW%O)f54AWu75ZcCCo+%M8=5q#eD*f67pC5ML>%`LOHjl|8h( z`I4?+<{T>j8dDZJuNo_Xu!Y%wHWKXm-_+^gcK@s;3Sa#v5y0>LlcG6<|E;@BJ%-dP zYM~18Cs#|5zH6uUiuI0|i4JsW4KwAavLqnhc|dyFrI7py@C?I4M-f zeC#tA{7{7oS&XH?h~b>jYV)xqFbd)Tt+p6TgAu~%q3-5miC{Ry2OF$3I z$CAOg2;c{0_)!in6b`~t@y`q3>d+&z6nVb@t_VFcE0Ms9P>EiorAWvE=zBeeRu}Ou z(Bb7NKXzWhK`)z$#4iZILD!m!#4d2aE1oPFb z($69L<83t4>j2rT8-<_PN(t1=Qd@Bqo#R@mX>k*fiIm0CIW6&91(d}y)h*~-Ou=@q zyaMbVx)Y6LgyD`L<27q>bf{}@7bmrv^`6NSB^JN9MM#h#Em<+Kcnj~Y*btcT` zbY}64ZZ-N=+%olBgXr+f3GQK_2kT@g9g*)Xan)Juklyd0SvMl`uQ|Bn&&f1B?+Vg* z;=9DQW`4_VvG`O$$)H$togvCFXG_kOeJi8m4Uu}r0j|32hL$IR8>J`ph8z(pqZb#2 zG*X(qKBV(ozE+kM0lcp#_Px8IOT?XRWIv^`5(ZyV ze2kJuGLYZ~g4#ngcw|sIK=@B_?$#4Jj;(%ATvlHaQmUh8;t`aR0U^Y`kJ4TeIb}!V zrU&N6wdVF)2cTLgRAwuz3tN_c|IOa=di!4s&2OcgEqQ_Gzcm4IG-XrVlbgOGY0g9@ zO0n0cLvmAFA%ofh6rMY*zND%&N1}-9tiE_Xlt)hxw^@COd#I0u5cI6RggsP8TnJWH zU*R5_BV&XhtFII%<5a zi9h@bS5t2jaxn472nGw4U3z}_jw{5Gps^FZ3SJvs5H$nnR9i{z{pxf0f_lGk!%y<9 z*0Rc%0$G#mMpWJ`l{kKL#pN>x(lccAK~3#w^5-dhM2H|8JB&+Vr)0sq~Syn zvZ=HpQC>j04DDMmGD4%PJmN86eyQy8L0?aXoor38A{jz zC$!68{?NlcyuO6zCWMXjC;H2F8K{sZFUS~*QD)}v%IO!A_x||fAI{?nOq}=a`q;nB z1hlzM?%v*ez0Cb3^f^F0MR;NbNqFwD3^`~Zn!*Rh-^0JKZ?K*I#amwKE0G=v_Q$Q~ z-Q(=z%$VKWT+fIK+92{HdF!;i--l-?F@(-ElK76+GLo1;6B{*>&~2`62KNzX%oQ8Y ztuPt+6&9$w?Emf4%0Rj8|Ln`mD0#@#`J9e`!<7TqfWjUs)<_X}+XndqaDfw;S=E^B41 z$~uKO5;fA*3ksx}KozaUo69%)JpEyQ8HzIG@7rA2?==;yogRK^ulJ5A$d#Rwc$&Pr z^0Tv+TWMq}6ObiQCTc&w@ta$(RpUciu_bVLOiG3fkJU{Z+GLm4?W z=$tMf{(zKR3CT3LRVyIQN~*4ctQg!16hH`&F6GYt5kT;fmM9_b4Q?$8_{z{pijby9 zhoDrEG}*IV16$Ps2!7I;Pa#gKNRRB<(}69d&Cv&eGkZFJR?_CQkQpT;W6msq8ra$p zIP;^!GLt5(|BQ(Vi4ZX4B88|S1qYm`1SX%73acX>2AtakCOJtD)sUFMt=|ITk4U3b zkrablLjvOLq$qWy%7C*m9hWk4TfkS2Zu(P5lp3-c)( zz}JtiiJ5dJF=SH}NuE91J_tDLOE5Kl4lz+dI_1ui4{YVy92E$h8PQ=yNY9&SLgxhv zbIrs^A<9VfpmVCgBoC>u3bJU>IaFX$kn~U)i5b}XA|NhK8l{A!7}$yv5a%aFsUSZN zZtVydehyI?*jf=lNRl$fheW9$S+ZvbUm+d7KV5koo@Sf{d1ZnK2AcMl_O5%knPdnZ zg*3^ZANuV10AJHJh|j{N*EhAFL_b!_=#$L|m5qAOKq~QMGrRPDv_oB>A+(rvBzm>N#%yrLnU71sTD0%h6kvH>}pd;zT_cFuTRWxMQ%0bQGtx35Im zQsa6%Z@{wnlxd5?HQHE1pU%bK?37!01uN&1Pt1R*)uh`5Xf)|GX*EG=h|)58wXO0h z&?21@osFG-otRF%6+^c)Q4^ytT-qfXB@ib~C!Nh#9yk4^{q+1^`f2(p_$m46_{sVi z_^D%FVDvC@7;Vf;j53%dWDI5p>2MCdh^_qe#4bO_#!PjDq-ThuXSma5p{i3~uqa1Y zv7kh7ILR7Nt+Ld2Es7Y{wacv<5w(`6oF0EZe$C;Nkj%ZJh`}e;p|*pSf5%B2a)eZp z3wj0@?T9Ob$2A@5s%6**%*SmVimIPaWH}70^{b)P2GvG-`+A3a`+L#7gS{h;!;bxq zXvaZEa+>$VNcq`6&fp7!5JdnM?_P@?91`{w%)3ZP17%&@@x7GR_XV-ygeEdrXh@9$ww6bTI_G1 zWPm@k4wKYL&p{0=tq|}1JTdbQFfL43F4$w*+dm=j%k+27n_+&9^iQ24<^+Ep6iqM-9VDExcdEsJlw-FIQ(LTz^~ zBHpwKSk&_(KihQkE#7sF@M`n52q^2&v*+N2(}mJ@MTj%a=$TR8l~*XcEoiRrp=>}8 zcHgm3D3WKGZZ&+rjld$a?ANo-XXB&x_bpNY^AyJvSc;vYsl9Nqko(=67P)VV-lV>1 zd=vC0^G(H@k8cXzB)@5Rllvy2tgCjSw!IcrJLB2n+2`5e+3h*t+2%Rz`OCA%bIKFx zIq2Eti8|^#`f)UQ)Oj?0^y>(HB=JbTHTuPmNbUc$BZnvrg$EuCJR9H`;2V$}U>y(~ zU;+ljH1B*+)K$=xCySb-tK)R>nUCy9jW&vIU^!~mlK7iy8L!YZrvxAOclXn}#z3Mp;J;XAi)~F~MdPU&E$HRg|wwT2QTnvb7eh(T6$| zk?C3$jvll*Mw=AmEHMm!(I&8W6%gSi z%cv21CTPv~%(YUDdpyy^b&;o7t|vbuK|3fVac0<*DzB31I_ukqMI>@cnv?9WMV0rb zV&{8rQlJ=jsb2M0z^CROEBT-X(@}sJrxoYG><3Hkn$0q+iQF-}H4D=Khlynf#XmJ!3neMZQkAbVw0^g8r%1(${vMD2Mm4%b>-#8nZR@crIsz3grG^PyQ=IDyB(U% zeBzA1k>OyPqQh{NVkqCLC1PZFdt~ zby$y|JejfX;lv3mKo&>V)&~{Hbt{^O5h~siTyvbyj9E_Ae2V?5{9+`g(yRa}^K@$|2gXC- z$FrYJ(`hy{W;A1{(bYAJb=rfQ--OBJd+NHT?WJtf%}kIGXFDB3vpa&{sob>)i&ajx z+(O0-)(qCJA4D9Oxi|S`NT8c0)68cqxyx8a#nxTdPuF(WNP$(+_6eGa*fpC2*#i!r zs>WZ_W;3=4qa$M<*JS{oM!$N$Aiv?6`d;M0^#i$6)`k|zi8Lpt8dT-nLF~cnQ!bz6 z#{Q-m$@WNW(JDuI&G^VT`+CE=$~w!M$btO9`T>!cS-LHE)yR0)8sR#7>Y;VlH?m*N z!lF(VsCkOFU-)s#v94cxHYrk_335lM)OE5yJWVyr4VLLCdbkegzo;Wyv7xVe>)?Zn zu44H*mGm-1B6wv?E3Zi+b$r4#FGwOvbbOc9ocjyG!5v#`MumtqUt4>cPanjg?kV<@ zt4|IT*g|t>eof1$u2bYy_}y_ULuKt8jA?(D3}qW%(&RrCw{#>Wn|1W(QK9i?QZ)>-mGFqG)Z z^c)78Y|XQMG^eYY7jfqN-KU&SrWNjGe-&4jkw;+S>m2jK>T5*P6jUq4s@|Nhe3keX zRp0n<8?Kyn;78P3{+WWyAr`&%MpGi2cVu=SO~!ZSzF^a(XLbl9}{d zBP`Z%eF~BW91$zeFC@esHOS2`9R8P+Yv$CnkDTdy$laWZG|9=RyWuLX%)1|)aGbWl zdps2voR-a^v19l6qr^*I%soaG2_@d+g>V?dBJZ&T(NY(&7ejV!D|*_m|6Mb+w6Q<) zyOH0q33i(nHlxXI>j>i%gdt0BLFq53(;Av+ZOC>J5CnPa6g*R{{wh_bEfA1vy);32 zXMvW0d|+5GV~DlmPSpuXmQdc-gZR{VM?u=66WITFHqv_4^{!3Q&6-QC4N*6BT=pMMD?r7;-#~=_F@jDY;F(GD5u&|G`*We}cdd1>E}_ zKvY5*ViH8w2QI{eNFBQAMWwI_sPVcPgGcG)hfiBif7%)P6_Q$y0XAOEz1N-mi(q+1<7Y9(gK zK8h(YIpByYDfLBzN_vBFQ-+))1v>x1Sjrjm#&5r3D7c=uch$KivsMUAlLhWhRTapfNanM2QtZmEnRDdPnSy<@(%@vWILxWRAZ zBQq&gAqHs+{g%+)g1t|=UT`Y$mu2R3#q&yG@laiGy@8P<#cv>$prO%Vg*X^Inz+D$ z@gs?sWyW;Hn}AXnsh-dtPgKN!vxT~*eQSAMsFyuFB-tm%91JB%bXyb%OpeD=PL$fO znlVgd&mN>@jRwFJ$#+LtNqx!*l>eaBV}w`JVp!c(kZFYhF`oOe^XN_%mLg-D+C!=4 zUdgPE)xxYMaDh5x#}A=mDZR4e1zM2eIYnhL1l}>gqms%P0-w+Al{&fuuhcc?_jO}D z3W^@v$k83VM#ukHEqY8oVxVLLmg>U>N{DIY9p}b)0ZDM|KgvlRE(2I&D@M(i24nG) zjgqUZJLARkj#~^1ybcZt0+@6Yux(z)=vQi&M5o0#*jD@-svbS!#_Tb6@HEQC$5RDJ zE8H0$NiEC97!$)Mo%DB{gGZV+|8T4?iI&b!cf^FUBM#i!&8*aoZJLJznGd()b=OdW z2fRQ1j8)*1&)uao=h3f!HR?)qGL<}uH9z%9^?MOL5-lPY@Z^wS|G`h4;sg88o}rC+ z<7`E9^%vmlwNY7%W7eq1TF;Jn?4%2Lh_G%GasNV68ULEve_W#LA!4(g=GMDmKTvk+ zQ`DRYrjZ`lM?h>9c}oVE%G47=S$BxI|7zn$>3S(`)N}oPi`wY`nZS#alIN>dAprRe(v4;bY@HW`WYre{8K<7$# zh?~-c#LMUrccq)}>A??^LG;MljN8FOdE+r=qa_{h7azeUVb9NxO4ef>6>4pOl6_$< z#7}fuQEiN8WBfc?pPi*hOmDibd1Ne<(hHXGR=AZbN)yrk{~>ym3tor)bL0ha6FePG#?Ya1~RR)WjeR9GEW_g*WN< zOhHIY!+^54#d&cBIqMD``(MtI0a<+nSk(^BIX-?*vekznd%3tGMiTsh0_#sk3-$nz zrZPryYG!p!HVo0Ge1bdc8l`zQfHp4ji^-beXr8`m=~X@%23fM>_wZT|FIfCz5YXfK ztRDsGkI5lYN{ADs9ovQ2*kJ}Z>_+qEW)+qpv;z%(wt@f|hGKuB?Ume5EURgglwSK< zFBG?|uWNT0?NWvTo^VBD4CRjfI>i9^Dm?_bf<5=OzC8+5Wd9-C?}2No_J1H)+z>0x_eru$#$v!#{Ugb|9?aLPuGrE zH+9FbQ$&}xwG>48Li?*4#0e|-qEjAvh35777e`-OAQZF)SU>-(o<;bl+39@?;D23^ zQ#qF{vn#oMpZy}s((SS?pV2EU2`6HY*6dA{g}OR9?8*yK<*Z%99Af2bz2)3p0UV(f z_q$x~fcbk%VA>vE)WTm5l%Q}W7LjerkrGeDim8uSrcBz@lg&Nr^0v?GdRCYIR-%pc2b!+oR9@KB#|2mp z36DK}E^LkngSpTww}mJ3Ikwg&*6OoQqiLx@72i1aKC82HC2lJKHwRQy+I}@A8t^(Y z6%PZ^<9znW1NZO=K1Wn7{hT0&SZTJz+&uOh{aueckk#j@wC?6UG;OK098K71QJwKQ|29H*t(!gEdGr#zA`3>|J1w^LUiT1`f6 zUu(nAGXB~zttK%Q|Fr!*obW?IM9y~-!ZJVL#@zStP(Fs%qo;7>C|}&Ra~tw*)GbKz z3ntn~)6=JIOtev!;$hpCaEnpCq;0)6i@Q-2kYu$`p8M?w+u`J9;sM*9ZSCbL9q^ms zEWBwQN3Y&gc?l%WX4t26j0+17QJvF+$JylbpH{0+n0Lip}a^zZ;t$gw^5tpio}4iFH!{ge{+MWhnl0POdS$PImQ_<^WM7 z_5+Id7}$907Jha5(>137B@*@5r@Cf9x|(He{5qH^nfsOvsWs|PH>X9L-g^WHv|0T( z1ISzPi3hD)@k+*SkA@W^zJmfx$2iw+Ua&UKzMfsb@cqU9iPOH zEwCVv=-zr*E#7)$))p0}=eo=r0v5y?i}$_5In{amQ?_Ft4QqOb&Bi{`m6Yh&VyzZO z9@(^^!t~_#?jx<_3!h1#wnVBCEk|B$++lhJf0;wTVpXG)-unY*36Fo`cEO|JC*BdW zU{SUwiW~t(cL?T2Xq#Eh9WrX}HnEzg$~f`+e&pQb@lW1vcr;AA$AI)0!%EH_-US}` zyOQ~JY~fV{KaOPE7(dP!N~iOjeO^!J!@l3P`XijZCxS$cVz1AkXaQD#R}ovL>QX=9 zxstm8r+-C6li88rL^JG+bZ+y9)15sDZHh!C7J9%Q+BgALaxb4V;@Q|I|C&Oe6rLmlF%lI$k;1tqV0&X5GiY|}5 zaTMH3zTq@CN#3{lds4M;b9Pc;nf#VyYn?7CEq6)3_e$n}vTxoK`~ z>bFzut~vafl-br$IbuGRV}LNPOnIFNuaA5=Tf+t?eCO9mXg0mH;wKsrOb)Z?b1gaSpjO zGuM|=jBOw(uKJ%gz#5fY#%YkwAI6yCIvx|`+{>zkxlPt4l56$s)>4YlYo1<_?AE`; z=iOMVx8|qv%6j)tc9C36oPe_j;+YeBanBeX_}RI!qh-GI+`hSj|Ch z?0vRu-8d4v=f1?5MBFflmqgycUkx2CS);M4Y`E*5t5`wlZY5Af;%+sWMA$GW3&m-0 z-6S^iQ26q*R7@iB?pp6CD_O(teO-|Jfo{VH6t97w?F?9JyeY0TGb2?$lBTF7@ZdIc zQf9dP!(X58(8;mh)L?rot05N##L3-}Vaw8fBr6y6Y4Jr8zxJaH2!^iZPDv_3X^<2a4GIXQnv!e`)v_wXM4QOMusNxulNW zz-LcCEL~qU=0g#m=n#SMhY?HHF-)mT*Wb820sF*qq;b#fx;7u#r_spK)`@fLsqc6i z-;y%<2w=KY4PiwF;IDW4q_$+gtxX1sq<`D(BEf^Ig%}|?(;o?NqY@9P14g*j z`i2q0H>$yEd7SCf>w)fzjpD!CUC(^L6a3+af26ND?egCG-KhIu>y_6g$=a+)lN8=_ zgWYMG(e?@7W;{gAO>mGX`CGqSeeYk#H-0=*^8MlT;@smFpAXW&@7p_bg!gAfBp=}h zRxe^`Z=oi93-Rzbb^3Y{p|~-gkx^dnT1@8r$Un7$WQ1qWa0B1;_)t6uS~}vz$9L#C zz4@ENX;*{h_Y&>{Yb*aA!n$VT%oe;q?Hsi~CVZ2xP1oSyN`;uDKgU6BM?bz*DcHOd zxQgge410Gd`|e9BaLh~Wcy?ldiwZPhe!fuB{PUDV`Zw*?Dno6TtM6MvaD)=;F9#z& zzaj1y{MoqVUXf9I?@WspA&<7=YL4%cZ58m`Dy%&t!ZSY0XVQZlQ{f{{bV5yd156oy zLkPCgmX4m?x}huM-yNts?LP17&vorApWqgQ2{^TlYd~`#PM{)<(=LW!i=FZ}gsEj` zc|oLU^P2wx&e`YdQl18V9!K%l!Al7~-I=_0+Cau}M08h*1-U%qX6moDF*kwp#48!V z@JMkVZm(afK=hp6FTLTkn@+>gHgRS}uVF(0lLBE z-eHk=XUM>Fvxn!UeSqGm6vr7O^)~s(vru^q^6R-7?|JDcpf@SS5e?{lJum%%HT!U` z0YAzV{}WX{&j4xXO(S*mjY0ODZSr(IYLhEql8M$}HNROb#5M}>VF zb)H#x)R2XLGwQs1yCh9&RygyFJ$)}(<2b$mYx(57Gxy9g2iuv8vCKK|v^ul2#CBR? zEQkCTR+GHRm!nZ3PeMk9IN_BxpptoVNRsP%NkHMEZeX+ye^TJ=3wg>fK z;LQGE7mCE?jN<_+>S+6HS@+NBf?SiL+Mm-eY%{AlTNrz~nF`A-KO5>9A7JPh%NM%& zFv&N)Z_y`!Yi48j2tK?t;f^zt6ykfzId5Qpdq&~ou{-bFL~G#rSdEI zQl5y+vn=bIWCh?x(d6bvQA}b?CYRT`jf@GT0AB#gvNg1F=an9+u z<5YqX2FzpPbS1qVmoTv?U+u~7iY=+Wm8TI=_RYH*_S}qR2fLCT0crgIaCsk)J#M-2;AU3K3m&3y0*iM7pK!`eIX>ady%T?R zZ7$%^D_n{exi2J7aHzfzJqcnpW&2E!6GUtJG#wB@Ye)J3{rbX!!iCO^VC!&EC8{iWepY)Swc;zinXR;K+u?K|nfy58U4e=ACC2L8F z-xYkhW{4O3<%S{Q&oB6Kfqc$*aY01R6xxAo&cu@h9xX<%NH+rLUJE^#sMS0Nj-_D0}g-V9o^8EX1B&$|4ng3()5lD*rIwH z$hAfKHfZw;;a;F*A2~q_`4$C9;6ok~S)O-hcLOx;2EF3Jq+KugzOrTSNmhts*QyVc3Kin5wz|%Gl`<5Pqe5v~2udH?<8!}{n3XTR ziMT5i++*rGidzjHDZ^YUh@dnkm}u!TGZ-bS4u!O^&+2U1j^Y^OiM6V2>3h%HmgfQ5&69zya@;^7k>Bw-*@&x`}S4_0(g%wO?M&Ic?! zB*GB1eN+k_oLJ$k6k6 zVfp~;^PT=zbx)83iVr@0u=GEr7Wl5uDSh8BWLr?H0Dt?Q=1rap-4>iK=*7Q{4){KV zRUw3bkv7FIj2~cL(EXqULU8|yJwUEObO&Z^s$W?CsG(2H%w{5$_y1JW`e9~LQ5PHDZGOMX~LTyN=bwk8y3pU z45KKh5;ro8ZbHqBkQP1u(~P(tnN0*gEoxkl6^D z{QMI4gA{8y>O@c|R>6d!4hcrMH||ve0yPZpXMSwJFcB$Em>?oa?J&v?C29zs@K)Tm z#Fuy=H1lVCY=Q){6Rf15QXK0JYkaiHkF;TIQaG-NOyNnAWeLRi2%Vo-u@S>$q=c=J zI)YWiki%?+$W%gABm)vKO5uq_9I;M?Us}Udh*gK_cG#VuTO(9R?qhKZG0KF`NN$I} z?hsvl^Ni#b%qE_S-Iw?}8`|+RJvK#xdSa_HbHt$oAB7~?S{D9&YGDX-TMJQ86 z#P5QlicBhaNk7GN<4e)GQSpPK#fEZcOW~;_qD6P79ZPxt{E8O;u1Mzcg&Rdxj3SS^ z6rkqD&hnQNEy)ATU?0f1VEhUGQ&2y{RSNYd%qnlC6!A~Qr09O0S1CU?q3$o7+~87_ zHe@z2%slb~qd9ao;R!{!In+j>l-z{_B^PX~0D5s-MV>hV-QQIr>Urgg&@Ko^q1<`a zGp?m9Z3N(7ZMnFL>Mm$U!EFTu2WE3I-e-#+ zIDt?F?K6B9-*CduMDOzqEjZfLLLgot;D_}Y>WX0g~ClKA9Hn!mP!jt`p zHiKq?>V+yRCZ9)TL7g6)Ee4u_XGP-+Fv~NaaX#>5#hnmzo`JI<@IvVf+$%_#p#ct5 z8$NXgW8{k-xHe$X2};i}-#}?atL1XcNHxHA2Ca*o=eo~W-4JL6EabZ$_%;yu{^A!Q z%!`?!I8eU%fce<|#GS#kKzl;971PhFp5Zvq{$TbHy#DeN(VkXoK(G}T$jg~wuz-I0 zek#&FZEwNxgnugNJ;UEXi2oDF6#!-gUB2ND{(v`N3^t$)HslmDAQdxY9WeU!;5G zj7FBIXd$HsW@|ti+{wR9jm&Fk*8>DXvWDtv@n|vIf9)D#)=F#eyJ5Hc>kh!ySZffe z1?mok*P>RS)AZRH>}o)|p_TXlsnK4tJwbHCEFU~GWYeHi3jq%u)dJ8#Lw_dV5qK7qbN!|e~QF~sZrjJW+RdJ8Lh8$xaiU2YqGatn{%KVr+Mg_Hhw z%wIA`Dj!mQSi-27o+7{zTL*(MVr+BU!BgimVGPZd)B}VMaaE*zud0Kp4>LbxPPlwu z$u`SH+5@c)g8Gp8wzC6*4%%XGqa(f!W=_OZ->rkM4}aGuj=upt6Od@d|hAk!>W7Lw-Fz3)=&WV-o>qB2Xi03uFvj0 zAFAR9@-+naA-;==7K*c=kAKtF`UATUPuJJ`9$!ZRABwY>^WLu)KY70)4Z3f8Um)|M zZ$zB;^<6ml2oU@h--5oNeEH$`EoRX8qR)fY4?-Y(XmjYo<2S7#IUV?QSR~C4}qaS;^NqY zNzXrJ%k%-E1F!n8{Z^94cR#r9sEwZUtq+Y;2UPXI@x`+T-`gi3RG^n|TmGWtf#rp3 z{TIg;!v*;Z*?MGmul~0EMe_sC3*X!4_t55ll^(e3(LgUCu&sZA_(HQD=DxXcVfVtb z9&o?q>mk7Z>-{gV74U$}FY^=Phrt&jg)bxy{y!xAi7otzH2g_Kz9|YtB2$otq6Cr> ziAIu&p%jEM1+o$e52G5xa2eqd$x|Y4!-xunrT&y5O$?I^gw2zZAumQ&4zuicF@j=B zNEIzbIqLu5Pa)iw-D%!w-3uVsY4DN{kgj!fS#)|<2> z(t>gp2wj_YLD~^Y*RR%_u_R_g;fBl~$gnAXLi!R0=!$$ok$^zy>9;dNLJNnJjVvCLyVUKCsb?; zaz>zXN?mvMkgTZ)?Ky81^FL_fe&{+Js_R)Q_ZnlS^6&Sb?4J# zz>6eeyzmYqq~b4ssz4#C$X+UcVyf_bs$e6kXw*CrDkiDS_*^lC_(XKJcpTBF;z5Pk zM0hgksG^-x$3)(upHU^~RAg3P*nl{tVsb_5L~6FQbRpTIVnt9H_MD6rMv+*Va(x+B zB2)=zAO;6#+hSmt63Me;eL(M0BgaVogc_`3XLg@rjK zD{NC?x)K{Ip3!VA!OB84#d0cWD}-GME=B7y*F=`a?3JIb3b<72R%p9ot$;GYoY`o? zReqC#HkG{Fk+3o0s>Dt4v&xU}B9O}MWqc~%zDs=|A{7l)I2&`qbBW8o zs-U~Za*K#6GtC)NCvywo7j2aqtMIyJO68)Kp{by{#!D5;DN?CWCy8Yh&z8YcCvXd! zDw>x$&v{a3jOIC&!Ko0q#sO_2yUHnLG;^wdezl2xR}!6b{gXujD%C`zPD$Cto3fVoVvuobnO&ZKhQ&P8p^O+HJg5v7Tae z8ONOV5&muV(@)<*tx~l=QC21ViaBKrD$ut{M}=*r_9`5=Sx0$ZW&FUO9GqM~1%Wa_ zt5BR`BAjW)sAdiN;_~d-?*@ATsBKO zt&D{{*TdEuUagpgl6?RKXYq9N|L%yN*Vmtj=oriBp;YHUO?N+=9!nMHkA~}_^&j(w$uLP{ic+cwHV0h7- z%Gwt+gLo_mS5oc@+-Kiz=(&F(76u#`cyV-wAm#}lcHZ!FM*@f?_aGD(Qr}vJm1LZk{A$-3$d@iT)zD5$PE>DkX#Yy0OZ1*C1^ften9&~>l@84 zZw~s{p!5AbS->!xa^UmC>>Jc2g*m-=Ak-GxC5H(jbD`1|+sGX|gnc5{l~kW;{s2qj zKhf*TZWLG_09tL)UGj?{q=S$fk|(CuKtCygf*cTn3(2d$UB3S8#0||8+iS3&ba%l9 zi0~lB1r99LJac+O@x<~Pbe7vao8^M}8g*9O4dQcQ0E^#)zTO}`QSeD1&GfQ2W7KmcnkL4)lkKH4|tTZkGn4zn~;eGj3uA%aW8fC}VEM$daZRiBm8+ZnnpW6G>h$V8Oy2Nzam! zHgv%BH11>e+4wIOv)H)MV5(++Dq{q;@?a#BIW3m-V7O+l28(o>X(U2@X07Q7mbV#X zeZ1xXP+^9b$`(no-Ctq4z%=>6h5WkRYxA*1FB^%wJ?X)ifh|86vD|B8+L2~E?q&wP z-S@!kH!e4rvpfL7_*(`UZT!USj*&=)vOZ;bD3ED$+>hKp%mk59aa?c@8EvAUF(gf6 z@0)XCm~k}I<+vmHZ%Gq5M#*tW@-(i2LdG%~0LM99(wsR}W)D7LAk%b`X+O<|JY>X_ zp3!q$bPqLQO2@21Zmh43~1xnhA9p^&W7O)j z83g6AIi~wGuf5OLv7Y_BX4#DLY3Ae3#lJ00^cZEwrHj)%2Rh8w8ST^D$G!KyUdLG) z0$OS8#aWO2K4x8v_~Yuu35P~qOyF_%;*dkrr&K?)UynU6CQxZaW~o6Me|u?r^JxdS zXnW#m2WDye>S%|;Yw|0wXeOmccBg zd46XNQ`aYGd{;}#8kaLHsRJz?p1`}213VF`#`DX|CwOWB^CpcNwrU~sru%i*8hmOw zSyS`|2pYl*p}NC3^${9qZcNsr>9ueH2O^l>*F7&WvS)&)#A@PBKC_2^=)jE%ma*;_amNS@W9l4F znAo%VCdq5UPGH!>_(sUkn<9GDflu>zC?3IAMS%y)#nzIti#1ow{aIofy1&p@D#&W2`nu&F+0 zY3c;?g8??`R;PQy(UkEzc~{eEL%2GHSR=5MbMk|yAJG{Znln|HGqDfg%p~EAFW^i! z=1kt^M3GBG+wSLrv4fx(k#rMgQu7naj09E-XeEE<1)2F zE$Jc6*zHWug{A@mq z?Fh$ZY=>grOSdP(#WD$G*8ijBqOpT9pD1!7$V6z)Tpc*w0m0H|X%AHH;kD3C#xZPc zKVsSOHUDyP&hfsA(Qc{CvWJ;yTpb**@BZ?T z)MpsLg!h_mRXR|ySxsDX^saCsk!M7qauOJxrBI7T?ItuJSGolAo#($Z0{^Y`e;VK5 z(A$yisG1|Cx00QkfKwysEj1^Zq?iomHe%|2k{!Jfc# zwdfjs8RPHk=HKZgxh6@U2B@K?JS}d>?K3}krRgl_dtXexyAJ5QaqaqyWB}`?1VlQY z?Z01{ceEN+?Nrxuc&IpVJUXYnG}<}4d#L3uoxWP9Z1_%|BMl(F2ox+KT!;KWMgsW% zjL-i$JRdwAw=V;ed(B48{@Rb$F<_j@%VwkddxG|;Oq%8}=jWm&PbazJ%5 zLhq9=ey6q1+O^M%1nmxX0+^Q{=kD+Az-jHrI}@}U>i{p^iYISt#wnK{-ZsLs3}ST8 zKDyDrHx5<(Y9IXm);J`Wy99H$^eX+K$BfDP`Jb*nTzDM_02(lUY}o#>tM43DM7(b_ z(RGG9?;W$i?q&GHedc<1$=inK3$PdUG8FOx{Qdp@>-qf{Woj6E;O9g8zmzA6oyC|c z;L~*VyB*cTz%3PWVcciI&rHZFtw>*Hy@`->A%6R5f4kd&AtJwxE&QjRMbYq$C=glb zC5EX??LS)|hlle(s`d}#e_j8tRwxj0X#2kzU)+DMs1d!X3mKRE%P`W*(A~M$*2Hpd zUJGDnDot@%^XfAv2OmA)Etee(y&!tXO&Lmu8j^%%>F30F&@EwJg8F}d1zptj$j z|FIgAPl;(+YQ&AGiQ5?$uDqyAWR>3<7ueb~5_PIO*& zt;itr5#KxBk|N}TR`dJ7NN2ROsb1irIX1N!st zw$j5aovxm*aPiHy=w<7Gfz7IcT?IEw;c4-K!1k}v^z@mufcg5$Sv!=TJFS?##Ye2p z_J$%!kXz@TI>}=e9Z7+=qxskVk7@P9GJ&%O&ztHiC?KZeMqwV&w7O3#e@VSt9hWxO z>)>yi!M6qxX}Yc~AO08fiNuvzKy#{l+0Qh%^>KJ{ow(pT`=71iLmVhHt!j!%W)h_L z*r|ZN>jQ(7f2KDQ;vYeItE_S(Whdr_TXFDt9*Mrfl zqM5tj$xY@N+WWh2Iwn-{3XX#ABUb^6jiW^)Gj{dRI_J2ys&aVOSJ8HNKrUz2GR0ZMplER@NTa7{=JVBd(y+E%k4Bw?UTsf%Rx8~Hbr|5dQv99K|iw##h1Dk zE@|7I(I6;A^?DR|U(~66nm0d>cBiVPihP>IZy0nBj(5FTv%?&q6I8?QKRyfNDLA}< z*Ct_6xN{Xgi&d2Xf~@}m&r}cbXkIH0jBNp z6G($Ovgs15!>*K*Qy`9REcQK5nXGe!dtXOP6YajqRX>kAvnSQZD5~alXsjImjm0eb zgJp6tj9@M}y!#jZgWb@fjm{Tmu+mT=A<0a&;F)AjF~f~T$5t)CEPrvJ=s8g77$RqX zJKhwZG9jQ=ik+x}d&!`&x5QBF3L-5DNf{GLO%+V9fp+gW*n$H}kG+L8tfWw{ zVoNRQ*aqjn`fgbUH;ZTGu=I6taymqZcT_ZuUXP>b-j$tm|I>yxTCa7=P#afQ6j0(b zzD7B&QjAM@+Mom~1*{m6dPs~iXp&gEad^6`=Ot?xq(h_Dv*MYC>|~UhZ;M_sDGt@g zh1*U+@>zAGQodIeA%gsRypmam3xa%MqK zW}R2@HEVF&Nf*Pl41@Ex(>(JkiTv={Q7M@q9GV>54OT$bm4l=+e|I&_XF>G!Dw(2r zW_{qYYpmja$#IyXxm7WMp8`Ll>%E3Yvw6ySb7Y@0PtlW3j&!F_DvS31bVHx!OQf$8!Y9n&2OfaO{s5xSe6aOds28(OFNFtK!Pm+XPkImJ4Be|j6rW( zNTXRFw*wh5_>~q}D;@$YH9fhPH<{ z?|o0F_y~QP#;H3vpUY*O9)-k@==%0JiWTd*Qv*d+cDH&??cB2t1etN-Gi+>3mQ;!n zNlCu-UNT7u;wo}6P1(o%PZcwP)iZ3W-*s=|c$zmjJ(M0d^a=VTlLkG zz9g(@Rqx_Y99Q#LM;)iAB=cw%xZa<=$ILrN=_-i`WSo{qT?t^3Ol-av$z5d;Dtsd$F zO;sQSoJ3C2``&-H>usxtVW)eNzw_FyM1bTu^BA9NGp`lzvbx0nmT#ar1pCZ+p6pX_ z3fRN=gg0!oufA*aPl(lHgRSyGdfQrm;okC1nKWNXnMfQt_2fO0XR{igzBlLE?j3?~ zNSZaFSP7or{(08|Y_j4qE9b1AA>m)px*YS$dYm7*ms`KlIS-ESwpk@W_{7wP1UyYe zFO&f>3k9_j9Tg)7z5;8w()98jTQeRdN8tuF)an&B71XJ%)8xaVCeHG|bjuL56L?6C z;#JWdotw_`Ibie?xvjm7+ga4;>zbXG`ybAyl-L^FiHv1f5CvK;T!6uCw<75(BWZ%M z^kH49nnVlxQOd|SgT9BoJBwmF;G`R)`1t7{Gw*It({Cy<+Ln<3mv@OioUyAYgRfm~ zk#g#Jy_LR@Y@%0bT+K7{xvse{-}jLns*KK}k)8H!ke!edz$D3x~Mpg2bPN1c`eZtpjCI+H% z-4rssPhfGb3cqd3ozdlWhUQV$`lWlf$UUX2>0ns)L;kphc5m6AUPULXV)#63FJ{vfCvKnF>s5j``c`x7deD#uxzETdE->H+iVoC=WbN^T`n#w zKeeGB?sORVhg^;uj}D1Ki0?Cx_sm_Ma~ziaYCGSH@PYt>U7;D@<@Ia zMw^odnw*!e=3SPh?=wxks`l51PnD+q>c!t6>+RiO%^%!Umy-uP%-(vm&0+JAm+2H? z8AR0H4{o2T>cpOaTH8bMjs11+ineT6!eAQ0{r+VCzoJNi_1gsFYT2-G7YPT3nN=CWp8`~m`!Y_&hxl97AH53Sk4vu!ad1M z6Meo&+B76FV4C&I);wRiEfvQ2N)&a1^UHBy~5W?~<3Fvz8Eg%y+evXYmijEXSB{kBE zk5Lb$_L;s#=qHO|V!Ov6R)yZZTdXdRS65a}2!uM*q*4=?M~=zHr|ljRNk@+xY39># z>(K^|!Sb#Q3BE(gO-K|zSb7^-rHsVu7cl!@>m7WI-7dC$eqO{qc2x?Z%){Duoum^i$xokdp^VxN_3(q(mw2}Z$4+t{WJ zOC5_%`Ti!6t+$yB>hwgSvxH`3d%F{h6_EGy@vAP2%fX1K&d}#3|4mbyvrU&%9$EgJ z*kG8yvuAN{r=OF>sgnSibuE?EXPF)%5F>>J+vosMlfX~yR(vL|jodIU+Thb#gd z75n~v68|dAL5SuSS7a$Mkg`hJCkmEmVpzI)O^}_Iug5DuX?E=jqrbz?+_)%EqXWF( zX-|LMlN40EessnemD+#~~1Y7~iZ)|YqNLfI>&dOS}lCQR6tFm2|`_-av zW^rfb2QU8K=he}BoTZ^{G?R59&lxQ;qYF%PMSU8_O_*9oNsVe^8$VysnwP@i^WIo% z8=VD8@7aHQT;EW|{nAcmEq6pJ-@tikx4z)s%y4Ttera*^NSlslfpPc`_Z={_kf?mn z?sR&q8V%j6wjPh)dukZbIHI4-x<^ZIa};$mSJ;@Y-9s`PvEr9@qm6;1@034-zmQ<_ ziHm*6!3#k?V&3rKhuFn8^5Zt_KChO!bLqx;jg^ zxs1oB2V-6F|? z&M6l}v8vH|Js+h=!e4ctR-nwir06Z5U+zhnr{UhT9Z|Gp`~;=$lkjvkSpxVyCfzlR z8*grAkH|qY{9&!)=GharJ<4}U(EKB-*NA_4jY?rJz(Q?UvdTpFOdciyYE71}N9xca z4AGRcWq;IEYLpZqb^vzAE|?~wW%JR>%wdkz?R6p>mwRETS<<_32tl$~ocma;R1dGQ zn?dZduZ|%*{L-Z97{~1x>oblF*qbRzJ31OUc0lt#W>r4*?l#Uf-9=PaPK0?ra8?#F z`;bX+h01!6vHmeW>F?T oGEI01Is%UNlmxv3NYuhMJO5~wP`A6`RuH*GwlpS7=B zyV8yixzodm0a~pW!R&R!+ahz{HRQC(SaZb(6r(-_q8+^4xZa($8kst+qO(I%lu5&r|u-39`Y5me4iI(mHqzU1wM1p<3_qvwSzH^Rq|Q3 z+NCu-Us?=ifc8XVkpb)SuN>ii;r5m7C-~Mzd!&_4awT+DA)QOqW2nBJ;FHnhho)`r zeIf0m`rkM6b+jSV)S$emHdnE@sq~N)0uglz%IwhljsjPone5xDlF9%%EWClSsQA3o zT@V8;nr~B^MWN;{eS3j6-g%cRFHUmZgwpxp{*+Vzkalhyai1z%ci^kwWiC@Xyez&< zAv=eP;4JOV@SxD8!{hmL?`^j5A`;W+h@vgB$^3zLy}>{|*Z0KcIrb*5>S`9$d6W`4 z2o#!*Ec;kqck=6Q%>^D(kmCs1$?bp%E*sc~O$=NUyGN4xpb5~CcZQbo+EYeXe z{b*@{0WtqchZMJ%gSAr4-)u#YdZDBtITP$n_=;Xahc!tX^@HND_WFywUGGlm3oSO? zxJPl8x7sDirWPAIS41RFcOB*1Q@k3qcJCAJYgw`+NL=EUDiEMtB0f_`w0R#oHor_j z5*(f8z05`7m2JM*khtJ&>#?hDnBzlkp|J;0wAx`}bs4BT>=R!9Vr@9+_7ji!2%=nW z5GpUg{ztgwKO!sdS-E?Z|96z-McV=s=D+Uwk3Nu3P(m-&fr>m3uY`UZ$`@ei!|ty0 zw8o_h41Wi@o<(X*Fm*JGyS^!nE6u_7*m}x6;8VWtb)@sCCU9wR^4b#mj|D}|=;Cd{ z{c>8U1e}M$QFL#N>X6*-v9m>G6lql=G?v7&{KriSg$ASS4|(Z17Vp!^zi;pIS@%91 z9Ube>re}#MQd2r9ryBEGN{1VcP8!w^+K1xHWbDG*9Fnd2nW6F$Lbv1|l(tER+WALfkA&)n zSe&G;$Gc@eEhHyCKCY{Gxux=I3o5;BbF6mTIHGTAv2`NMLC%LQHU*P*BJDw$PM4~(t#yIn6VYzxO1ohLJUND-x=@=ph~Bt zSMrz9S8e!rTf?BC2GokoD5-#(g248Zs?AJH+v@C@CNz8J57aN^-K$M>o3vba%CkR@Qum|vU++O1!wcmL6#p769?KK&k4qK1?v$Wh|4UU#}ZC%P+| zc-m!%);?{FU%9>=zvV$)<*j8)cCHvo3a_U)10Odbs7%6)$hX*I;_SNR0V~lgDBGx! z@jEo-E;Rk-mgf!AJte`x>!0Otuo_*9Y1i3r+eV^eprGNWcx7$sHQbzmAWh+2k!H9z z2@o|e#~Z5}Dp>(`PVTa&YrF&s)k;3k!3Gg1&7I zi1j1(j2U&Q#kmiBfsq21QIg+CNE406D3e545Z|6l{xgCbRi*O_myRd z{A78vlXxViEzUjrudv(H<9uGtm?~{GD?g%s(6;O41J`$aU)jk4a6*n8F2+>)?88qg zVc$5D=KDiBLTUBMb1+`Jm&H*ER2tl3g~-L9;U{rF=&SrJo`os5mggdAa!4@EWpQ4v zMPH-zZ+p~J-8P`eXp|v-lYi4S;VVAzT2=ND$~&v=i+fo_zOWglw+LaW`a~CVHDB#( zviSO!_CoN&lHhpNQ=?%=YYYtAw! z@`Nr5ao}THsXn7}aS5bKm~X5@=!3O!@#y&L&;BLW#tJZoQp{Byg3;}^^fEhHL%uh` zWW3L3NK?eW>nXi(h(BtUrNWRAyue5!hPzhzO}1VKN1(7x;cjno9sz|%39Se$17~iB zn`h?)!0HjQb1pB^Ixid=)v#LL+@Ot!ny&){uH1mQ>R3^U_l8A#8!I9F=w+-UkGGt$ zXV>`4I3S&Y(@ZnrONw1%zkddqnpVA&pV2e#-Q>E?XSA%y{iJ7aFpr;7FE!vQ5PkIO z^SESm&{LmSp3{+(Qhh$zmNI7R)~Tice(>T^4a$0!N1Z!3s%*1pq8o#9Lrl+*SWqPH$=XrICNUdCSdUof6-k-8+j;X`BtbR$G2MVYlCY=z z;D&pv$FfY#%8UF7>w`Ou{N)CXymIC-@Vj86=#PJ}Z@8B3G4D!pK#i%iDd3XO4RdnV8S6f7+o1*i8S` zO197rtL&De-Cl&??E*1aW423Ws)kY~uJAf)pGCmjDUTy=|Kf8bZ0J+NmgQt}eX{I@ zwrb+!bY$hE#Q^qR)!g+_wh}enNRPA^S2~`DV-*+=MFIibXyBHPqN67$_oTu?kbD0m zq>Non*Q;YxbJbKOBg}VSt1e!^s*}2l!leBRcQ{jRdWYx;uoDQQ{nfiMC!Wfk!bCOu zf&;8w?pkZVB1#oE zHL|73B-z$*)i_LOIu(;b9B-TGYQ=RYM5uw1h|1?N8(H^g7iL>)$2YpCI-IDAbV}no z->>;^Y%bBsHpT5NgIPecDs0sQU_BS@qAN&7HG=h zYX3Uo-gY8e<|5N=suCz&C@?lwQTkU~0UPi_A)ggJbbEgi^HCJJtaFRUkVu(t=0=nA>LU035SAU-h3VCH zXTa!s9YveDEz+;cEuDkdX;{^H;vf(Q?l$ObP`WrXRQ5nRvKDv>XXCT>;9>N%4s&d` z5Z`-F$duK>#*n9RSCw39sVofd5ybQuR6#yaJvwZ5>&Xk zsW~YLi;9v@5IeSN+cfahyNK(H7(9_jG1o9im{H7$mDH{xwr8ufg+_CV zPtm6S+;MnT+XkFgv^Gu-&A+XL4ReVxE$T4PXZhYeYO{ijB8K=XX`W;flFvp+%OJ1v zo~mYT2hNlu>-hHY7%RYMS`W_FrNWn{f^h`O_;C=o>%?lc>r^43E1FUvQ}Cb>ejY@P4q{PDJY@$ zh%Wd2LD4e(^Imuv6)-dZyEK(Qaoz61+Mz8amf7v6T%)^nTZ%8x)s6*NzUg`Qndj{i zm7ff;teEt7RAQDw?mQfyuLRl+dMF6=QP|i z-@cUXS1?-XHk_MFe%e6q*?O)tEvAb@=ydVfn~KL5_FL_>FT_fHeCQADTn^db#$upX zb|kY`ze@0yLmV10A zbYO*5w&lhKSED{q%2Oo+DG$F5pTqQ;b+k?Ri`Ta3jPtk{7H=hkyHUCP^C@?{@y`?a z^kxovlg5?45Z~1@mNBvCM$VvOou{_<kN^}aKOk_)e1LU~JJ9(Uqt23lMumht;)ha$hO<)6zuM?h4iJm9;?TcEs z&xg*>x*6U)Spg*^ZJK&YUG+C~!kD~(9H-9&ef7`h3!{>e%l4PV%CV}9=Q;fobS^%{ zdRB?^17Xn#d+y5L;6de+U|V7*>HT|?)>4%sf@%g@nP|AuvD1wEH`UOVm&wbmRUtSf z^#@ZK!ev%tFHrVUA&ACKprgk5ZfVc3butC9eYDHoYWe3E^PIfx>U!_W#;>S=8?LC| zjOSIwc_KRZ*)`vSbF)e8^ zMZ3*`Y{&f}1!%fj+tAJq8J+Nh*1O}uF)y{itAE&IB3a)tQuj+9eHCz8IdLOed zWmm>RpXqDje1vZH#i>x((Iyp0@7z3dk&Xwd*gPajtMJ;3Sq zS-)utvHIN`n-8b?>SsIx;onwQc=A_`_f5UaKKzGb#$y-A3~UBs`ty+c&}=P6{$P#5 zbt+kKW`%+)Y+)3^5Iy5seJ-sIt6fi!FAAzyN#RW7P4LmzoaS9peD&ugwCR^RZkw`M z8Q&ooflR5XYES0aj3oeb#}e;?=yYa>tz>XXNDsBvb(^^w@ed)}g}N4+Juklw`I?9h z*Mmy9;lqdUlKRAEF*q)UA{=RvX`(gG&P!@k)R;EDoIOF=OtYiYGO=4@f6}s&sd@mj zYOdYUFB}G&zg_qV%pP(WPUG7*r3dFNjCt_H-^>~>?HubYw*lY~n{P0T`Dkz-S4_cg zuxrcCR5Z(8!jel!r3^{{CA`empC4hh8#fJv$E+4kKxNB|bV*<@Z9AL$4bH1@jkcG+ ztgnA8jTR{$RdMTE%II~U*`V0f1=dQLsOm5*+0-dn7NPg#0?qXrxt;SAZc z*V~w(>vPt6vjZBKFVjM=*Fw)J`fws53z^`n84F?}_d?g^Xd%Y=&-jXQx-H>a;Vb!J z?HmrXP~`>X{SER_uiW+j%)3IJ5ntaNi$=w^M1)1&011oyY7DI1G?x5(tz`GE;AnJ&>QxAultLhrt-Z=(^a zGH7S+_uS#SE?tq4Sw2D=xf=ySb65;#hvt0hfrnFWF?Y4wNiY4(^wY}NW{EcHeGMc7 zIz~l!m|RxNU!TaM(A6MGBW-o_GUF34Qv$K%IZJu3b9%_0U-ffKSLHg3)0MoZ+h+QL z?AN|4MIe`rcRQn3S^C(o^ygE!uBk_;M(8zyu((UgW_~&BbtD7nwJu|Ivt{1hD4bO? zLkaJ>&*}xMg~~{C5XJDO$0_W+^cU^9Ei{^VRhl8_&Rei*%1X zo6$z4u&jjLMlp^%{jTc6o(F+**5)h-E*gW)wQchM95NZOt%=*?N$v8y$`PQL3my@dVgVf8Evw4tsQH! z1y-A8Ojn!Q{`*G16&+{>IhGpp|w{{-06E1fWP><4}FjfT; z*?m6p{rkggGMo*$V?3zCO~(;DB89g`)#x8?{5RMNn-rusUVU;{na9|u8R;=@v4@S5 z=I99{tOoEWU!}D{|LSoi4O-?{R%e?l0KHl*8VL%zI;>+w{q3yZ^46jBvpddDDVw@d z1Mb^i)w{=Yt40#YO=t%$RYXYi>npXY#k_Eu)Ovqgn)KUF3OTGD4j^go?`Cti;c+37 zGx2f%J>tLO;(SKDNE6*F^=2&!W#f^ttWTIgzsWiq;f>+5OX%g8#yt?tXv-FY2f~Q2 ziJ)S{{m||&RvHFwKR0as1y1B|b*4w0&&8j8n*MnNlzhz|j~WmbHm?BJjWO*fUvfB* zKdua!HngW^*avSEDvEORHzvUx=U8*->wm?|9t!Wg8qUIAAJ#i3Jgx9e-SToOLpQX_^m`pk#2*t7{{>K%q~w9z%Nep zh`3&lcA>Ks?;*~lP9iS;(7ClK z=79(MOa(iz`V4<7UM;tCJ1%4nfmBKhr7$$RtNoh!cL`#yJy2BuA1IpTU+?eqDbRKG zIi#s1Pmz2I7XCUM7zdp%VPjZ;yxT>fTV~@koam8LIjg<>UNHx{uZe+Psd`jPqd!TQ|{K1%VQGw1oN#=^B z#Cj`i7Tt`2H4vcC4}^xIOFCZp+Lg%Q4uWe0MfH3ju7m1N({E`Ji2JDm= z@l2TC@rdZXm>DlPdld2pPHZi#t-TopTxw>bjlCK126rsy7eU7+C7oI_Oe0rTwh~;i z-bM!bUAPEp8Rp0&(&@sRZ~x21(rw*<{HKbLP-A;}X1v@l=}eHo+h~^1~()USbXa zW~FOdr&Jf~MqOI=Oo0xH^h?jh{2{}=kdI`K9PLnh`{7JLJ7319AvbcO)UPW%G-%?V z6>PtdE?i_@&kngG%E}6>DLB`Rb4&VauL4-zrwg)JrD%&Yt5-lM%UES9Gm}+X zyp~u=k#9}zgh#- zE7f*r?Q#|Yt0IE{*%|W&7DUN{B*XJHN`;mEk=a7O(IYoM!wgd|r=p~Qqy%l&x5~Dq zqY~Bq_^IMdbx!bdb3DKJ=Scc^-$Q_#X-Rs27o)_}rS>Yrq#iX{_+E=n97vBtc0xKS z9}_&D$IE zFM9~-w-gGSZzUxTU#fl^`m>%X7Ze!7qmfyiK7#|UdQEXd@W8#au|Wf=1WpF z*;SCPK z0K-0Tqku9T8Sy9>lwDeBb$$@@DBDWmeo@z^7DUuJYQtYt$eaz#do$^+1lg@m**mAaT^izAIS`wHee_zW?QmGGwf5c+dYI#P;34!4}5N!v1zusvvKbWzj zr|{evo-ul${`h_td7Aq=T%IMnKHS{u*mc7bikh`=j=fJFsq8W1If8lsosh4(Cq6WL z(t3YFZoLzax<`-NqZEm>uJ%FYgbQ1Zf&^`_3I~8SLy9d}0qe{$L+LR~?2*)quLrx0#wln z&_uL|OYK6`z@UQqQADvqL3IulGT1OS(&$X;XNkj%$5E8xp`ZG%!+-4`nPGO;yG?-J zpIhf41#U^Ge4@I3IM0Y5_EPH*f1r~HK;E(t;_CKNyHl2iXg#KJ%@Ak4{MN=b)Hmcf z?5+7s!T?(EB3QhE_xqWUAaPeC9&bn$t;iuM5eF6&M+ke2YJuTk1w%-?I~vzJwoV!l zONP<}q439h@O?kCd&lj|=}T{Bb$maROS=sHm<8t%X`Bht;190+m?$Mef40c87R_^} znmul?QuVGUS`fUtk5Tjp0u|~>>GeW(>ow1Tme=(OfI@P+b2z$No9tj6G>}}+vrQP?KM_cqNUT8bBfgLE) zw<82rmT-%$2UCI65~!oJ{L}&4-67joMo}fRj%gRt>b^#v)oe_$%9`5jcgUX8kDoyM zciVgMgVmlDo$61f`LgKnc#ct{Goz?RES%}H-JCl3w%`~e_w#N%f4ye}q)G(H#+Q5} znYx>>lZS_ZQRi^kqqGUQ z!^2qYqZN5eTC^crFwL3#K3o$^kvJ$-c>yLgz}oBUrPmYJL+nMYg^@+-1t~@AXzP(L zAIH(=Tg?2|+>E`t|D)#77p#BH#(t!gNX<%ymHAW*A6Rkg4LY=PTnyoJh`YEh#x2rB9Grpz@ue z2lvz6FrA8Zz+n-NJ$E6H<$}`wsPoji4Pzw(ZdMY|TEZc~i+#f6=HG%`d&SQp^BZ9?=ilrgOWAW$UV#sP(^5oGd^vncmqZi9F7^XCz8{TU% z#@*kAl8@*#*Z7TUhGD6%4(vP(+$@OSj@Z%(f7F5ruZd1w8X*qF!!!;0GHaxO`Nb4N z?@)O9BvxSjz|eJRlxhaTP;nl=vDuWrU_X_o#p~+V8G4{|2e2c_e!13KkUp1S zuUl}SHdGHuW48hSkBxOTM1tiS^(zzFNlY768*EWaxAmuSHtM~Cw6=qz7e3ZsXBzO1 z%(y9Mf(&dygjG)Zycc%Ibg*()FTA!#HaIe+rCU#4!RBjBY!_vDMtq=e{N&95rtPUn=FL`yYy>3QQtSeR8wd!X1W&G z#?N(leDc`_5xBzOReDPnSZ`UcoLESLGE|>6;%2ov>(R}JAc=e*4icUkK21x|c#w^0 z0(QAqyN>~oUnHyR(V7$_3QlvOa&j}?ZDQQ{6Bul@LyL>2+x15oEIq; z@=0ds!F;T%*yc@J>fl%!!cG1We`CamT2X>b!C_Vf#p#GN*d(XYyP^xoAi`KpNLUO0 z5yV_9n=>#{^D}cS;-rw!;4;K305V%lB=$RBWs@#HdOcQ)Qz*h%og!xUa5gG7cTU(kYmzVXywwHfXwY3Y!HV$cj*gNgX)AvT~ zvuIogJ3b#}1&X~pxZ|@ke|ZQJ$6_?Ottxa6sp038JjnidOL~OYp~h(+9jE<~ltN26 zVx@nFAAW(C0h6s`=wv$GRN@;Pi3T#nRjWye zv+43u!G7jAthEqX)G~QvU0eHh2%Q;v)6zx^lC3ane||(CI5&ysA}tlx z+5~ErrPDmj_32@m@^XO2nQk^6N>G)8eqnxsxyi}?OAbHAD7pjfZg;nrFDC%4^cE>C z9<=+Mdt#a%4C*-WE3A9cl&i_!JrK`%*!0l+>3(QtP$~@*Z!l7t+bg8EkGSEspYUh1 zAmg%VR-2WZahCk=e~5CWU7<<26;D4@MkD@#Xo8*GqzJdYL^}uq+4rTZ84MDb#CbpH ziK%&7UI)=uP985%kvj}4t4&9Exml66F6p+*v~EPBz`(q8?@!!*6~T?DH(WM^Aps@L zkXi@?ey6j2i3N6cBQ@6_thyH>(%s;UB!%j3Ur^6Bn~=n#e{$4F_ok^Zt3+=IXTd{w zCbtOqjox=V3Pz_QkGIyY+IuVfGSGQdehU@glZlu34Q~+ns?$Kok~oS!Bn#$e0+y4a z2vnvZ{JD}rGRZ0{(gVv+aD*L&7*d*?NNG;^Vz-gtq;Qkf3w}m{!DRJwqZWS>Cv+@~ ze_f7H&P4I~e@4*&3u}9UD?5EKxbqUkdU@K*;y zuOTYa3iCT+KOX7gQdO2e6n|p0V?`xr_^OMHS^z< zHDf%jFJMCCS*RBJcHw{nP#u}rulZ8obAQ)g%ZcZ6e-XT@GUJNNgnEPa|F+`gm9J+v zXJsJor2&%pItnE$q`bde1I+;5^-tO_Tw6hn{*+?y77Flv!B@63C?HA!E_Ua7#FZ58 zd(1WpPS*flaE`Y;6R(-bj8uh3fZpxd+~A(MPn?mE0R9M<$Wx%#g0HQ`BE{7b$G|MK zg@FJCf88wGym8ZRs2hyk-tN075L8ZQLOMF^O3(O#98qEQcq`c635p$lz~=S^$08&$ zs06R=^#wZPJY+4>SWUF*N3;KyMlwABt%so_*j#{qB<`wQUjBSJ^+cG8Qkt!OR(_%= zT0)r4fr595Z_dOb`c?p>?}$8oxfR?ugzs1lf0qMqalP~ElHdyuNcs^Z%pDsRA!k~k zs~ButhgPa8BDCcI)|xLB*I)0|!bKS*B@>FGxwbATEj`<$51h2$_cH-&J9Y6_$kqq; zZr`%ik|ALAWQ*63kxz4+pa@ap>r~b=rl1V?T2D_+3d*XXJ9KpOhdUt@hx{1hzp92z z=DthjdBjir;t_GvS;1CbPx)8Y_|+vJf1n9*^4i3iN|)ymrsV)Z2ROwJpEz+4ANLcj z-Ua6u=a5gXRpC7E7#!uPALgVI)og~+cmx?YrE;@EF|k3t!eAeG9%MzapnfWq;Kgc1a$B&#kwIRoSc2Mcz6pF55aRo@Nh+8oI>EQ-QolEf86I+ zVtSsO3&yx~XKZZ;Dc9wrv5x z6`h?k!gG3Z@UhBr1eOWmUNV!I5XTI%A2Go0;QFXN^4oAMvBf3)J^D9F2E5vYzlyui&c(APR5V6U>-qU%i;i1F6S zhz>|d)6R}3I(0qUNv~DES$@K;S{N6Fo1_sAj$sQrNhtzw8cA0T6@$WpWW{e41{nhV zH$VcQPz2Fa!`6wWhkV_urn&V_pOAZvr`9G|QakL11}OGLi%{|viGMUTe}qJxw3RQq|jwf&> z)V%5yPl^1S(G70=ujp<}fAO+MSwQ~2BP(7q7uK|I@xi~35MHzv?l zsn#SvUizvq&+{>{h=V(sUh;WOw;r(_`EF-NzuCn>4YGw>OlB6@e_Z@57{{Y{rI0E5 z*`DZJ#Un?~+96$%96Pi*NoW&csF=$nqlxB6Mv^dTOnRh-l-4gyQA2U+oWVjFSoCzm z{7WXs7bC;$5T#05tSu4c^XfD2-C(Za6;3g8kC2*7T+cOnCxvth-=F$|&|=J+#!~mG zhJbhy2O&;M!(c8Be+j+1hMf=`eWKIJ|8+q~IJchc@I9sqO1QJ+2M9>5(ke|`k%k+1 zp6namEYHjCM2+n{ji3!W(t}li#GvmrlwLazkj&D&aBllvlVwe{9L7(JkC}boz{*PD zn)6U*z9J4e!Hk?&I*VMRo1Qv6_3w3!qxk>3(0%z@?;UyK%-xo)Mu(4$+g1XDq4=`we#B(Q2hF>`X| zu$PbG4}W=Gp$ana+{KiQNAUk{2%-wN21o2x7u1Bt56cYRiEftyOgchl%iRZ^Gz8ZI zH`sQGF<7^mf3X4yZbZMeY*po6TSAH34Vg^EyM3MWZVOR#;a$G$q*OzJFFFNgpEY#2 zVvQV6OPN-+GAK52{`SqMH(vRuH*fWQ+C~PYqcSxV57}9J(w2tvMqlK<>baTdx$egA zC~tL`rtWo5f``WqcpA%vHsYAJ<51sOCjE79orNs8e~ckUckPPCx3~0;tgWWNxBd2+ z+3{ldjxqVBW)dFwMinLtDD!j&pR$c~u{`g|LGJcMdai9AE0EJaRxYS_;=3rM7idrg zD%oSjMDYF<&zFgq@z4BEM7!3hS;eIBPhIVvStQN8J~Q6IOJx(Lvxmd?o90n~*2M&V zw(Y4De*+Ns!vKLYIY9rFP??f$*u$URysU^8`?J@Y6BnD|XJsg+$35Sjy1Q!0-vZqO z*#pqW1%jLT3Sn1vRL`o2TGgd}dkmEAuP|t01;|?^^_K_j(E}Do zAcO}UfLxgZ1{Dz!bo(1KGn1c&&yaeu2^vKWe+3*^z$M=Om|fG9F|tJwb^a`kITd)! z3;9g#zav6e1lCvOKLeo;yHg#IOB*QHJ&9_*`k>4L6suI~tT!saPZO}+;x?~F9wHVA zAG`q5^YEL0CMlMo5$4w$rSO#np*`@BCfQY3D|JMT<;wc(rA{SB26@c~) z=!`(1jO2lPospiLE~={<9vk)gOethFXTwDgKi*AJHn~QhR>XmU;0t}i4F6ZLCz7=b zOm7ptkoz@M5V6ut^9a(pCY<3a{Bs2Ge-|HNUFn!`^&NzQjqQ&P#M6l&y+-bVW5q}B zRqE>A_L9lP7iXh@CErWmXKeJ|N)3S6qTx#A{xxROjwm!C z`=j8!H{`4iCt+Rg!8Wvi;r0u+w+xit1yf5!(IBJ72(yiJF*TX&TX;>shc9q#e`R8O z6@irc*@-bb{rHZ}w4nJt7~~Y6FkuWNv6AMG*Ks8~WVY=OA3==h>kBG@C5I6j8BF}3r z{}6P+SAUj5F537XsYf$(AZ4jWf1E@JuZ*SoA~O54kxu15wEs+UU2mZ{PJbJ0{)9MW z#y-(~{XJp&Vde-B1{p3k(bt^&d*z3|@ob0K^o~EE32Eqy_TDqUbI;^HHQ2A3GO+Ta zX!6`{1Lc~#?+@#sOy8-b_Qb*{!uoWHDF6PSoP<7As(?EuDs5^o%xGNce@;|s8IY3k z)E-rAgbHn4k;*xO$jr&>X1>obF%qvy2Llrll@$CxInu!oMYs!M<0Ld#OLHTfM4&F5 zHXGFmG`p9o&#!1to^44~)rS+=kC9juHfUIQ3#C}5>#Y;4TFG@Iz8of_Uyfg(j~V8| z9aEQpe>bGw|0AT-`>Rs4f2>iLv4;JQCTz?e4;vZvVc{`A(E{Xcspu5VsT+&X0RTYQ zLIiQL=q?d5lFH(8mx>LxilmBr-WVtxFjP5d8CA4mu|zV?vOP1Vbztw1uJu*j*5+z< zcKUqi&xMV?jQhkG(;|+$FW)1sud4}2%P60>N0ARgZ_dndy2*XVe>>6J;L#VsT8T3G z*bGrWrg%*AjJbu`l@)hy%e#g8@f13|3K2EGjiBbItK3Yt&B;-T#ASH1x;uJ&F55$J z+uegY0EtG>!if~C{tA4yj?P3n4;Kqu{oPz~OyQ{UF5dJVua$)b6hSdP{)7J2R_DBz z=bP?$+d*y#ee?{nR1V~g;L)J(PdSdk0Ktu$c-`+(WL4--jMF5#fat!2r3rMP2CX3%+ zxV;`v^un^fQK}PxB}75|QLC%+0f>Y_@;}`K5CMXT2@J4T9Xfc5grZl;CPI*&PhjWP ze4_n;G~UY73dSkuWDqdlVnr_;%Zw5zV8Q6!ZG{dce=s7%1LT5TqzN5ihe7K5C9oYe z{!KHx!DGR)P7pyznZ#S(T1$gqM96J1GR35>2imP)SA^t03#DO@LJ%5~6^RTmiHl)} zfq@hymGkg}%G?2T^;F(=KglSRqhHiHC}S#&7mK^A-v zGeT(he`jiRg%6qZF$o(Phm@YQy|+u1sga^ZJ2QY;h;;JLlq;fta*CoHgFCWEJKft9 zh;8*9+d}jUYaq$1Y}L+p)```slNyRe`~+PfL*a17Q!_^lDU8J0BkC7UN4mF4wU6lt zzWdahee>d9uGC>7hYsc@TPBMaH?{)*Bh6eTe_}1llE?r_2z%A|(|?QM0soIDYi9B8+knwcPKL~}@Nb+F6c?xXw2K!H+G0$e!ASSY9QLb}wiMHslV-DLMaoBHu>Ze|yqDwVFUt#J-L(3Qmw^G)F(wMA3<=F4w-QX&1a z43L2d5)SGG)pH{P5ZTvuW?Pd>jW<{jfACUIlK>;v9`g~ql6vVz&gRnpg65y^tz+bz zMp^$7n{di4l&<8wR2GMXRoWA^<2#zlA;OQ{&SAA#i+2F+Xt0>q(3;=swBXG8A=PJy z?{xYH;p{-}8BFH&PcF!A=R5kp$@4T_{I^MSz>z*zh*LSZg#Jf&fr=v=NriKW;GN5OgW)rqMA zTd6e~HhZXD;qvV_mQ6yvyufWk11?v2(h`>^TDt~f$`xXhu3M!w>*5r-OCmR&%5u;@ zTdK!u3qmS2MDUQRljX4|1v*tbCH=(4sf8%7P*7ar% z-*c(c1PR01XE9&I_j2@$44|ft_qwCx4IP2e&XjD9SS1dOAGsn=+Ce2q0N_%&?7SyW zgkV7;JiuplG!853ryUT`*|J^BPV1nKDea}(!q3@!0QYo~blQcE&yU{o1 zF)%ct-KwZF3W*q8n}9=bJ6WvF-{e`kMzq!b;94GhB< z{F~wV-`{42@PBJF^B5y}7PHs!M#;9wmX*mXE?2EL4QF?`E`l!1VM0#=wI!eeG% zR?MfPzpCB0gkoHfwq`N=YI7s&P`G;<2o+|Q*agZPJdb?NG+dN3CXEg}f{z?=TuY;k zKbaTS?VU~@gO5M+_V=v2V=KZXSjOJXL68zQHhp#Vf2Y@A#nXto1e4|d~Gd*Mk1 zXi}5Hd5DpJ(C7a$Ml=*b7m8@auY(0wanfaLibf zTb+cUggsG87{(*d<(ztw5}ug^UR1#;QbE34f2*4K>7|T6ao&pO#c&fm^-(}^-Y@vv zi(^(O@y*lHRvY|E@l+X#xZ#a^cRNR1g)zIzVw!lSj5Mopk$Sb~_mi9>cky}0alCF_ zO!=v-D6LA3*yum(;Arb~AH7k$s;eLcy+!>2gWk$HctI0Yo|Gj~_KHwEH|(_-*t>1= ze=bP=m|RI<=^_qT14B-R9CzI92|vkw`sJS!c`Jy2zd)Q$AZa2dfzh4ufoN2G$N<#$ z7u%?ql~pQe%~0{1{H@h+l5+p-U{&)j;y(pWfDP(?v_wt*{PY1LWC`sEKrgtV*xfx-OG$v04sG(f9cAAKeQs0%&em|mW1WVCFd7!#}zhE*L*72 zrb8^FIS&45(fm3X$RXz2r>V$>45317%XnlRgW;(%M3fp*O~-=fR0-WxlFBBkQ{;9BSD! ziwZT2rdU;yh@O3fuDy3O+f&%Ehc*oi#Q=|2Gd1^uQi%+yQ^B_#Pak(X6RZ_07}cDV z*2CuCFP$iUFKQ$Nr+fSf^5DiYf8Z*>Y*8`gxQi#-@^{K7KVD|IGj3{R(=1{L0I?-C z@HC(yC;z^ zwS)%RYCK4T*bR~rDz0^?7^5;(`qlL+5j=x~ZTW1FdN1s8RrzALzzl4a@Z8q(UWvP(b$AoU1f6W;RR-Hzyp~~g z`phnKo|}=*Y@|wSkaZWxseT8$)Vht1ZpTx!D-;f#jY;Xnc7|F-mz!e;34h?0jfl80 z*X_0cM`}_6iU0RE6Z!V-+y8vDUEjWa`}XbIxBp4lioSjO_U+rZZ{NOs`}XbIw{QOy z|H8L#-@bkO_U+rZ|0$%vzy1HD{PlmSj;8z%|5+U!_1R(r1O%S^uOj5dG%f2aF|^?? zv#vAUiw94iNIOOu?F1HyM1RTryrTRfDmD#7VPOF{3>9c)N;EkUXrDqs?qp)khkVr8 zpb}y-OJc|J)-Z7Bptu1Cym59%6p9$TvvE(i&h2Kd?TNoc2=dBCUY$^eMOQY5-rXC8vCJ6|rxcQtYN$LL>HF<`*%ZKV+%@hNC_4PSthNWMbHtezk^~|>?lrtD&xkTB4-+ua1l-zw+qs+UKEwihR-LEB--&{td;{0CTPWy}+A^o1& zh?#jn23bGOM$@e8UN; z_U27W_g%7nZn{6gKjKiQb8a>D6D7f*8$G(8&+FTa9ezM1ai z1l=i3S6hk^n|v`6WwD}Zy~wikY@^wzvo&2^cbh~YiU*)&0XXLA4uE`j$jU#5b|g4- zJ#m~w?A?!GOn-WT38m}=2Ecb!K;o}6B0{i|7yb0<;SI=Yl8jpJ*FPlcq}CCmKuwX7 z%C^-N_@_p3X!n%Pi{?ypy4B}dB}r8Uj)3BWHB>&r++HhX4}f5}$LeY!^4ClVYPh`! zZG)mq6-IRGzB%-d zghifI%06Ign6zB;bhti!27)BowIXSWxh$5~RDXr}jp>50@FBfSpsLsdu;8Gu49xn- zu*=}LybI>|y$voB9yhjl17Cs#o^gHQP)t;IZ3ZWopTMbb&$&jQjbd6L1)a&>bevX{}J1P*I^p_4!H zTz@oU1M8dfB9IZG$x(k+<}NY_N&6lt9_fuZ zU*ybi(`|rI0K^{BD3wLO$!hfu_H=IY4>RIR_ow<3{tJ=e4UX|37)|bo#lpg$zj{CI z6?BEDz>{cuV5lL=LZ_kjLxo;V<#@b;V3>#;jp92gJEM(%)Dg{TxzKCo&tQ_uTRf!wpwW zEoJYM3z(w*QKWo8=*H0I@q^`bo!i&nFl;Ae-3$Q*3c?<0VUIVG!=~NPLgO(6`a|$l z48j1nv$EiX6WdOI{i*o;IfGIJ`s_0=){)ct@pHTN*-k|H>CH)!D?xNNLq9i6#%(v| z>2z+|PA+ANhr1erS7{>Gy~9Vspx*0BYYPX#MXihN4w&R&fuJ-n{v9kkWw}r%w3@!W z9c=*Ix%WkEuw4sgnYv|)(QVX@9pVv4N^IjN|31UOpt-?+Nn`1{oasV`mA@$AD+9e) z%5QyAkL0rV4yPA69rn*39F0dQoO*+5a>ptJIPF$CZeRCq5;r0_GZfYtt+0Gj`E1YStu{P^XHCH@M|6G2u8+nm4 zkEX9kv^P9|EpIO8crVdUN8HpjF4xz+5L|wJbbnDi(>t%N6I@n?z_wHEuxmA>94Y}v zb`$<^e?s}BaZW48GqHF+HEDXdak*4usXx=N$^Q> zcF;+Uul6~SH{`R_~e(h z$vyZ@WxMiD*lqND%66AK7F0 zS30+bahFl|g!Nv`zv>;X_pAn^$#&v=oQA!BWb%x!q4yeo5d`@3Xu7b<7A60ecOQuu z3N>AERX(s?OO(Lhx}5O2PN@wZH!8o1>kg%-?dr-W51p|;RcGua5~gWBLLrEl%;0hw zn8l=~Rya3plSLUB{8StAWu>!6t@ZsbR}cw6ftM}y4Z|#Q$JBIu#r>H|XRrhjNRYdK z<2Und=1^K)Zy9lX{DhbT=-3~#kf`YS9^C4@e1Se)whj|qxwl>~7`dHqXw|O+DNOoU zoVF|>ejDr+zMf#gZx@mAxzo9hC{VA5pZsUzH8U@(N8(iB5KbwL@OX~Cw;OjQE?{?l zn>mSE`6l?Zi2QfhMIHESXPFkC*E*tqq`Sh-ofOn60l_o(S-4TZeoc5mO*XMEt}OlB zO$yB+?eBWYxQ$F!&$8oEq3Z7mAEsLI;}hk+!4x$hGN#(KS~h5Oi{l{vn#q5j!<#%# zXUmOV))li}qQ#cwT+Q~oJ!ykQ+o7WzpoAv6b39&KlNIE5p2QAe`RO>2rjXiy&26_; zxmltTH6sXOVxEx_SG|6*+(6DB0b0`?9o3)X{T#ljw8Vi#6xS5(HbY46{U!4l&P047 zySQyn5^plDkP?lWcXXi8GOeqe>-Gaosnk%D9OuUi$20=I=#xPx;t15PCpRfB1T%fA zxD!oyG7Y2{Nfwjuxl|9;JIWA$NBh;#+b}+_ks-s=Z{<1R_aFL`i)x=l+QT$8btehq z_H-~{<|q0&8`@dwQHMvmc@I@?lzbn`q8r{gDEOR#JL|dgA@uD*C?yiK=)-FrSjq6J z8=Hd;C_}zgrHV&&a8dVcif0J#B|Uqy36drlX8QM(&D^LW?zcdfaVdQJPur zpH(WQy;omigePsL+F2LFXS#1lNbm_NAxaj^AnzSbob~2qTFdc)FRekCBZ%$TV;z&@ z35MlL?z{FVmvb6F!IKZ|5jUNLG1&(fl|NW^zntHZn*t z&MG`LkPZn+dzw-0IXasz?X}Y(o~Ic(}8+ z|GA+n%IG<_BpKv?WhK&dmD$;*i}ll5V36G1{O8G&wQqC$E;BV&>#*!^&wHd>Dak1v zbp;1{L_B$ZHT_{Dhg(ZaL_uSr!R0CvDM-hm=;EihD~TevR1n9Zm_bU~o`@lS4Clr6 zd*_tIQoACf69m)?_VA9`{u8~fxT%B48@a47R!2Z9U6l%dnBpafH&dQJmCUv<>94(q zJdZgtz>P{JBTnY^qOy|XNN)r89qsJ>=a~%s-vS-C;?usn)%!C&l3zrQT29wk39Mj~ z7kELpx5w}YbM=g}`kx?@Pg|K?W0J-~phWW`XQ!1;;?+1@y-xh1^ktMJR+#$KPXBX)TI->d2Y+MR6=05EFEwWddgY z1%)rMd&FJ(Xa4QwctkL=Y0C|cQp|MO1IxBq<)1Wvy5HW7+Gq+3NYOM+XMSLoBW8pY zqDrsAp>jLQ;5_)GooHI?eZ@xXhtd5zW9f@ z-T4BLNW=x&aJAP->x{9^raHsaonSUaM3griNdoE|eLIeHUri?kYRc3phNE+V#GB$< zE{^wq;;%Yge3SRMu`xM=6iz(CYjk5Ls_~;G2z>cJqfRJMUAkZ4__lm0kTL99h`rKB z%Cn=+euh=+hz*Al7>N*%7)R7e{TOh%4Zknyw{5${{#_N_@^{XpCkEEP#pPX?^Nclk zXywsTZ8QUk*(`2*D|~S?(n!O7d_3)b76LGT97R<4FqpdHeep1GdKTwD*D2oLvj`~K zmRu+D9x#aVdFDkH{PWovtNs|Nmyd&QSWHF$63PU$+a|G|I9qS9d9t2)SZ~TOGCO*Z`<*{mo5c@kf^AFqd*n`ysxe|UB4=fiAanN>FJ2SpcUv?PuH$$j0 z1MOzFzcXiPBCP+Cz|9iQlJ^8u(07Fu_&oJO9wg@b zsxfIL9H0KQZarvn4&@V(3Y||jbkTTe?}`6sW*EJVzL&RVvdLswmhcYO(Y%X@ME#&5 z=R+20LoV-;A+NiF5QU{+@!{-++GFe^qf|@SSQG!cJ-DCL@<)prl|v zhOsvE9|$-h`IPE{J0pj4*l6ai=2GAZ85gFC6Xb70_0KPS z0bPY7M7+1^`)q5tSKr~mWOsOkPkhnBK0BkX2!1S6tLWD*hAg!TQdY*q@6bh1oR=3Q zBzjYgkN3^=L2414AryX-T<3&;c#iWUbC7Mx@*}-mF$4cAS-!v^(}ubRj|MHt63yMu zFW0>|cVH?MKn2ig8}iigGYYi%KsMjm=08V)znf0thoq9b@1eq8pf2tRNqR2sqY-Xm z!jrMoBrZ5}@`U^F-9h=p3lplS#DeUv6Fn2mJYO70bAIYmgfyUDOn2UYY-~!p3!r=F zdc`nde8Cc+qP}nwr#t*?CP?OU)i>6-~Y^9 z?9Ro;#>7U7#qD#C?7Ra>yyRtaeKc_MA*wim$jxa>^j55t1i8`O61TT{sU*i7`QbS38 zM4P@bH7hgSe-p^p1`Mr+M;(LNC*bz3bUp+tIf4m7JPTt^M)h)kC8C!rK<=!5Wh89_tSXK#tm}CD&E=Z>v7CVLHVINs)GVQNqbNBNGwS z!s&c5fnJ)RMG$8OYOcAUyR~-BZHTMvc0Ix-`BMr?GdlHq{QGjOuJt5l&CoBs@!mI>cm<@s0NMC<(&%Q?6w=!av zAM)Hm^gZ9;+ zAC&baO0E;nrLKJ%`>lj)IQ82ErsBZBc>PEIMHySngQ@LXqO+!upv2ik+#YY~8r<(Q z5ypmBuZ<6Z06xOCz5sP)Y8fAqAXCD4SAu9$Q2|jw&j5HMP-Mz@fgAlW*_M;o{#cu! zv&RGO5BY7+aCJPn^=0mDD(*aIFa_X$goPzS57lSC>6)K=d^uhBcBxCC{ea@c?o*5V>(q;_b<*_r*Hv`>E$U=)hShm^FR$lWtsq4tN@ z;s7!mF#it{H|Q0gBfI)J%w(b@rYnkTL>}Yp4P8&NHl_S~!v9QdhE*6%$r(=2suR1z zTnc(5M&*KJNJoK<11|#%M1IJBtK%4@7ZIUL*F9;mB|6(QjmLb{Uk}3!Drc7@;fYpT z2H|kG%y-7K(+s_rpk@20SsBq5c5pP86&f6;Sc zh*n8eh+u5ulfcmp+5j7mH6%T)+2K>qhZwB>=YjURd`1M~-+0w*VKy2=P7PNwt#gce zuu-PG7cy+LuN$>5GSw4RT0Wr8Bv2b4eWKp(5i%#(?qcqKB&yB{sgTB^{8Ylmx2Or) zO@m#l6uf*yh|t!yrv~MJ4|(so%>qEm+;iDb)TLYz88Mpj(!fRKy8V9J7nt1M!*I_> zye0{1)UgLcj|27us(;FP3%z^5O#R5WAk_^O+}mXTwt&84?Nk}0Li^zzZLIEWK)LE) zTSlXJ5ijMJBXxbS*5z+YwdzlS7KZZ8g_1U`joJPkpQ+I0_gY(jI->47%>ZaXm%oe? zIo#m3ae-)PR7>sRA#j%0ayWMh?>P(;w#pk522k9#8FF?>qGx z6l%6hDCz%8-f(c%nxrow5in~?ZdA^@her6?AA7{j=bt1+7Mb_FEghgg_fjYQ1s_ZJ zP=1C~9+RrFCW%OaJtWCte?X?Ks0TVBWtHNk6624lHsq8Ohv((Wax_0Vn8@u8HMXjZ zL!&gsV#0F~&g^sGV@B=z<7=KFUjSe~iCy*mpN!_AjS7 z`}n!k`P|&$Jb>SsQ>D4tEW)VKO-)RfIV2f>sH&hRF*S(V(qNQ`f*qPE)v!fH3gZY$ zC)VzX^ecF%%BruRQX=8s+d<(qEtInUBI|f7;~45$}f95#>K+1vxwuv!ilLnw%2pgPLGCBEca6z{NngcQt<1 zWe)#|zX36ywuo(|66P(kn2*<9T5c=JYVsOsIQ=NIG7)3J8YAEgpYX2?;w;N&UkSu2 z|BHDE`tHAf>zdB+w$WM69pgXQTAh08wviu=CpE|(9S0dOe`!Y&A`WeG4WteeDT;0@ zLTgpo_%dJC4P2`F>Z`UbZTm^7QkB*YLL(!xP^@gT(o!KH9vE0x5^|8qle3A}XUY4+ z;(pz8a!JVIW`mn6}Mu}lhj~1o=Az0g` zLhVXj8soy8e;GfLbNAf#e5WIBN7`<;ebK1lbKkbfSD4jGl9kzQ59p2Uy#XZ4m|tt& zc3X#w$34l&$!{(7oJ;&$%vEinOkGGI`1KFc1kOse%s*u5Oy&8*D z!gfI_Pz^cwZL{&n)VW$uQL$WAsq#J|L;fYa*y7i2I(h6c9U#x&!|{E6IzBc=LH;8> zx<4+vtqbSUlAM*Vz2V z@WHc1f33q2bF)qV@&@p@*5BWspAEs|VDPh|uIKr5QCvR5lcAk{Fq&azzEH-I{L4^E z#53^h!8{`?+pw)C`;3l5@$wXSs9+?8G+A=tjI}mXP3era%~tV==l+dKOdMcpS#{r%#EcWf1KG86l=_-f*3f6l&lf41Pshi>VV`;}lDN<>=aWg*Zd z`I&uPwBn5KWrw3l6+90Tpq3>eDxw%-+3DhTqyb1hsH*nH!seXV6As71^{!`u8^Z55 z1wOJ02(yH;%_w3qb4xm)3JgNG#$T zf4UMf$VjlMGQ)Cr24cB*cXrjH zLs3N_Bo4N^9n)t^;RfJ0Xzf4}r8&|n%NHwd6$6bss3`l-{Ur}Y67mzE(^pZnJuPlJ zBJa+#o?jTQR6|M=8P0j(bj7Y;u_y(Wt;G+e?bw033hlP)xl^`r~3{u3K?RFNXvimy~)7t(IN4_ z(l#ch0|!R67ldj6J6qOjbXKc~AT2=+C0x(Y_gP{xphclmg~Xd8pO>b1=f___8|U!` zdcT7Wj7~2>@CEL)pqp~0BW{l{V}vXP;e@d=$~qTXf-jz_jKq#rZ?xPdf1Zl|zDsAS zFs3VDd)^dx@P9+yO7{;{q$;MvZJY?yuU@Y3K!;mYNi&`1fVbe58(@wNonyQ!jZ~{b zs2nj>5)C#i+5&r3%?X}O@HzKypwAJ~Mo6zFPK^pzr#HqCtL*8a1GGfjPUc0fKihz% zl!Qk8Dp8{emg<`glp-Dkf6BEYJ6#WQK`kg6DOz}4dhjyy3Nvbqu-#l#M3b#4M96x} zu+3#4&S=~RD-5qy85)k-UxA_~RIEnPYlXgvBVOuvoAQ9efOQ46alHD^^qn$6wz07> z%N6I`Xelt}NPckl+Ex13xULn|{+ZORqv>r~C_qi39*ha|=in!%f6o=N(I(Dg1XMZ5 z;i7!B35@0gSow`=kdZXo_GfW-*P!wY%jdK(d{BkibBdkxs0|O?Qn3IWs*qA!dXbZ2}!CSh_admc+c=!T$^Gc}@GY!@CmF#M7{DtTrX+4Llxo3@}>% zcF5)A-+gl3C0~n;M3gGUYi>32rV;hx6I!9B2*n5|Qk=>xe;&Wb_hX#Xju32GY+DZe zM@A4hEW|yI@MM1kRwGT+UjBx~fBIV=yoL6HBMr%3RVLf`*UaUX&Gt}PD5FdQ#D?rI zjN`~3H^MsrzJQvP1&89Q>}s~sBIEeU$#1St2oQ;KmZ9x!T?QYMen?X&|snsNrYUoxPE>#pdS+?b1`Q51}FF$rrgzL(?97K;N- zw4Ooao+X`jf7_jeno@>O&(1D&F?H`yi`|d> z;6zbKsmz^h(S5!N1wE*hLW7Klu_WQ^9fqLeCP`1YD?cWyb-JlE5Ni@?hH(30#!5&?;k0_Sw=7WZI{^XhioU4n&`Axu#f+9EqzW^ z-`~9=M~oiI47a#hSsypTTdA^veO_Vpf9F_Oz8x>?<{sMhB&$ylhbl!n?5at5_XhbN z{3pZJtpJ8R^fHI)35!VD<96~0iWom{%yzRNf50L}jO5WyPVy$(T9q!h2XULp23^uB zy2|~cmOe{K>`V@2?~_qG>Lw2lmu~1L-}RcGW6Urw0e$-Xw|{11rw+@*supD8)vd7V zKf_keK~k^zT1*czAk1g;_GbOx`HLv~9dp*>;^G((;IFtAYkJDi!Pc71XQ4OSe&`Ib zf64lKl8e{*%JkpaB7;8~KVeW8J>z;fWm?Y-wbbV3=kIRvzb<>kSBxeNVh{QQL5scJ zZt^la91K^xub2siKDfo_luxj!)G`@T6%B{``{4nf6UFg-jcVN15MUnPZzth|pXrcS z%ElD-3=~>a)B~Y^JV#R{>7dpfC!YyFe^1o=-dp31gzu5X{r!J9>UKKrHVs<`|2)IT z9KGKL1!&u^e-D4XY`+kG_QZ@4cz@j~;*O@=oFIT<3UFGH3nTD`eF*brw5wH=#U=Gks-4?6^U>w7A zQKOgwBw)c&IFgJwA&`{VO9s?A&Il>(ZGS=9t(m=k41gf-PjtEC247SnaP_){0Ke;ETF`WDBPBml&WRVT6Dx z0TDYIb4q)~@`vZn*niaS=x6o34lMTdkHP}?`qLjR0>+P$vnNkds7E$AbhUoo{DIxB zvwjzMyp;erQ|U-D_9Ehlf2<9YF)Y`}E}tqHSfM|{u1<1dxxL^{flzA;4-kMB?h@>< z_u_(`2Hc1dBoF3DzYLhJ+5B5N{O|HwL2YDa8Y)$$^Q&}Tn(iv^*O%8dTa;yu`us)D zYG1x?H3dGDRerY~#0^e8OE;n?pwxg@6F)SLb6KvD+O%t|X`3I1e;j1|{$`z{;?nqK zQ@@~NM-(DNE!e13#KRP+Ttlpa9RhL|*pkq8R)=Q;KJ5yiITWL59gm`x52OvG4LB(r znh{&#>S+FK>*EJ2sPnivb^OzKhnq8~L7)k=7ldTMx`rkbMO3P6zvsbOcH%}PBZfhr z94fm}EqhO5N^V9zf1WNSA}Ceg3!#Zgg;D}WjJ6v@n~0lBnNqzhRmhc`K2p1@0s-5A zv4C_|Dq7?>z24Mq?MHB7lEhLPK|{VJa7_BUH1I_RM2*A*;VeynI%Mh80lk`~iL?lC zy^jH-(mqYB3LR3Sz!H*6K3$Hu%YmR!$403_?r@I^RnGHQe_0jyupioj@s3E>g)j)o z}O@5wexj3g2}7^+b-z1quw8B`%sRm26S4Q&m|OQ(omykA#% zGK~`NyNZe-%WqQ!Q3dcVqf{kZ7RihSEiRFzi-2pxW_k-kL!lbkevtxSn-xI{-KGw4 z49MT4a%iThe+Ob8T}xx`QSlvlYk8$q-F@ReMxR}sME0I1= z;{4vZ%(Sad?imoXBttOTCmO(tO#(TSC`p1Mh8(BQIf0Q}I|+n04d);h^dRC5QAnv` zv(heWe_Uy3nfG*Qx=k65=2@ld@gF)C)BgI(^0~d_@aW9KJxN zOwigUYeDD2eHDY7ztTMkcbNsY@4=Can_&0)e-p3TlG=^h5WkdFNehf9L`(u!hzLsv z`psqrULfy5Z=rfo*9XkdI7bIT#jXLu_?Bl#W19F9qK0P~w?q}|puO16xAdoHxqN6g z-!-=9XodIn{B3iBXN{Y1KM=Nku_d!Z!XN}M2-+HXkf=mdiZjF|tbk|M8|BhKoyk(* ze?Pm|*Y7t2w(E^h0Lgx4jEq0AWLE=0pfd^SiBaSJ*wU_JTit%9P|G(|VrUcm**7HU zc#VyLC7D6N!m|vU!3opSJZv^h`4k6<3gie?@E~EkH)i=A%KY^tss;P;cb_jW+h`$y zR#GB3;ytQx-Ygz)GU?7I^X`N3q@+7ce|0eB%~^)FFAEZz0zn0?6rdD<0#%7Lh#iJg z6F2__K@OWR!}wAXeJ@6(X08x-G`HhR`{@jBXylUgpD~Pf7EZ7^S*re>Gz!b9hvM)} z)$~w3WgTy8Yp-+)Mf%0awQtm6GG>G2m}N4iJ(u5qh7;1`ukIWn zDo)B{^9vE;9z~dgY^vDUdHWSjk59>_2H8d|5*I|&cE*kJgjKLyzG>H3CHAzz9hV0? z0v{%b69;(XuE|J2hO!(|4cHzvf1$#Z0@wVYw!a1PbKC-TzlHv^L7&C$b|A29x-+tK zjt+>wG(WrK);cGrj6IzGE?d2&>In0i(50C?=EmF_&P#8qM}YL$Y0R-HGAWn!*|;E< zmd^5{AlFwpjH1^}N+J&v{!qKkzWB{NIH?JS(w<;$)CtCJd^Qfxj^KLBf6@7v4P2WR zi$5|p^Mm2nZp-Nh~?H zQgCJT&8v|zMUEr|v$^rof4C{7l1`P!*a0uyt!I1@@hycy46733PDgO=Qenc9+W7Om z$liIIhHtqi$4z$z``}=UpXEX!7oQA{43~!2dw)|NW9L;ZjqnXpbIsIglhv>*vNfmy zwZWrd@tpmsFJLTW5|g1L@%_hY@y#Eoiz+_GqW9qAz4%P1ys@q5e}|pzs%N$=u;Ggh z4_lf0nX)^P$t5X4RFGL|8b9NbGyiL1*=v@^r2bv!v(I8yb!r`rpGC)Q(Cx;$c+nW@ zsKKXUnu)@;lu^m-NI!hAltPW3YPI7gR(-c`!MOZEiahrP{uyLSf5cbN=;DniOUkI( z+ml!Wx1QEF!YWU1fBR=WQ~e=ZF!*)is5@)?#4i{C_*s7B!DOO1IE?>-PYV9{f|M2o z#KO1tX?2ir6|Dw;!+5SI*A0v>&|Ngd$|NQgM|M}=8{`u#hfByOBpZ}Ba zB>(fzKmYvm&p-eC^Upv3{PWL0|1$}dfByOBpMU=O=bwN6e@~&B^UwdEjPn0q3j7)W z%YQ5IR|k0u2LginZ*}w*JKLGo33#gLV?QU)#$IM#X6EMX9}|4zpseFzDmp|O8A$<+ zA{9!?f~ZVtfl;r(#Yf>p8YvhP6qAFH7*dK7(A1$g{UI;~f+!MjiNiIAJ=wBm?iAU+ zGcxioE-yV}e~8g4iWw#x+(T96omG8PSA2cW?>XMr{sBMk%0I8XUmPD4^v|G9&g}Uc zwLkJ8A)CE5&CbU-9)C$}l;Lk(p#?Q@(__2c@>rEYsiVP=TbZdzis=0vTb0a~H z$mT+I6@eH4rEB)}Tmg^UlralVA;njzkNJ}`LTF*Qe^vRwvZPd1u0gpgQXoj7bcg6C zs)R(y_ClIdx@x0`K&wY91E1ysverXb6PdPtEc{*$9rS6z={edQec_65v=Y4yQkxOkt zhH0DtoOGNC2DdP~pW0uh_H$avvJK(^_ecflfg*5da%JP*2Yh_o4uqJSZqhw_k_(Ho zmcuMg0U@YcRKykA;0@{r`;0?3ue|J~Hg_|!aewTA*)_Ki}CDuS)5VyVp zX~$g9|h&Hr3jQD|iZm$MC^i$~Xa8 ze`+e!{Eroz97rG-ST++1Ed^~v(nTVg1;y>$-wb(UBG3u4nuhH+Sx*pVV&};1IK+5F zD8Qn%h^puUo*R6rh`?5VlQ3}eU1y_Ld;(lJ!1*B+pnolPY^`_B)ZqYiQI|(SA$MKU z6~DpcxkA9X!u}!_^aP3*C}Ga+wA5AYe{tkWLDiWHCpwp(@d{b<_HS3yZbnc9W(4)a zi1<^+SMM6#Zr!X#22@EQlZR&<+MA$ zdnB9Q!lM7!eZkjqT>VaQ?Z2(zu^oub`86dgBIG9Q%rE@p+Fo2s0!jz_S`OTefA4?} z^b0=bRlv=M-+NxaN$;imxw3XW3eFiwz~(#E<%1TeVkb99?nc5vab@B27)&gFy+H|D z=21-X)>WjKOR89MX)QM*Y<1A7D{*Exff*PNENEeg1|8HOz$JhpAO!)+nJl)0rZS@| zJiq%LsN~7r!wD&i4+0!A(u{D%f21E@mkbAsvS1WH-L{&70inTZ$Wf|&va$hU_(@$X zg)1lR>kGR{$#TLp8+$%9pifG{GSmW$QW=c-0Z1u;3-WbfQ*pIFYGCT-Fkb>F3uGBb zU=T=W9cIj4)bY^lH%tyiMG-LDDZcskH}1Q4e18PqVu~+Tp%XQECYpZ_e=#)oA04bv zZB}moVzqP~`h{A9iQpkW0SutZ{(cqYU_fu#H7Sr`rU22v5Ng7~E~9-9IqwG>`mjE^ zDSnyAkbu+I>|u=6ZjwRXs|QM=L-9_=x{ZrYZ$uI@r6&wjc)uPp9t2Psb0J!wOOPG5 z08;=U`z6TmN>lWz&!F^&f2`*Wo3r{?Uh3=ZW9OTe$!Ff*ptP>xPd7&R;BCJOC1hGM zG!_X+7vXRm+Wrez{rlV{P}laG4b4V+7rqBVEt=t=YE+}U^CFFw;~+(+Jnfzl9fcQP zTR#$&-*EJK!H=K;zNb)y5cnxGA_NGc2OQ_@y{D=wncO!SidgKPf4BM7e;6GU1F8KU zX!xO=MoD;_U`n(D?1CbQRj_k#gSpq=RLdumhh5_45gP_<{$jL{pL|F=`pzEt+As0U-S4f<84R&LKWaDD}WOg z8rw?)KC8c92nDcJJ4-ED6I!op9wnfF71G6v3qV@5`&I7Ei$ERAJXqrLp9; zTQD=v2;7#iLQzVfE~B8$y3Fl>el;bd9AORal`iTH{hp+U7i5-zrc8I5I{w26jD-Cl zwgjD!*k|~judA>tkRvFtD*(#PEIs4#?F+8J;Om~}wfF4Ze~NIYSyvA%I$NXW-Of{k z@MSkJvZ9sm6Eu(*3`HVP02+8__I|yps#+rIYK2Ig)8b)ZkFua6V0TR-g1X({U2pnj zQXB;u(JGjcEM(&EOfhq%H|xz#Xrk#Kf_zpu2N0Is9_u*Z63;m1F6TsONrvJfN-?O! zugzko=BhJvf4!gW#UaE_54OjLMuQ#xaw!?@l(Me_$F?=-!H9q*nukc`%|?jpC0^2a zq%$OYiEMI~qL@UFUcK!?xA%t9gVv!B9-mNy2Lc)nx*!9BHh}A=2kH{@Z@COlTYWyT zQ1`*lhL8XjtoQD8De!PVkK^_Cc|mRKYfsIp#OvZLe;^ee88uK&Bx{~v!K{Gam$0?hwtM4Jwp zLblo&(AQ(Fj-T&_xG>{E&g_ApW2vyJe4Iu1`Qsu*mE|GLlnic&#mEXpwGr@Xc=tDQUF19@ zeTYvgYUTS>h9SJ!+HR{`XBY zC_3)xL4K&lAD`U__U4x7fz{WIv3eC*=czvq5}e!GDF@SQozKH?hiicoF)_FD!A-L@ zSfF|FH=c!drL-^@+nYs>S?cvZ{ul%&e=iE_Rb%|1vs=E=O^94Zt3(ztC;RHU-BOw7 zw~W=)=+E|!eZuA^BcI4smCeb0;hittrBwSDme9jnQQEb4NLpG>bLsbFgw4>WC-4m> zJ3;iw(eg~GFnVkaDn&YfS#qZDvzkuxej7dqoh{C~ibuZy^;`Us*S8Hs^yf4tR?Ny@z z7tC9mLw|3P(v%FS@}kEO0qhyP-8Cw`G>IB3VI5OA^jq|PUzjt%cXbaK^0+vEh?HD0 z--UL;vjhMyOm)$IUppDMY$-wze=uVM>S|QgOAN4W@9YhMeFB%&`m!k{t_pn$=Fc3m zJT(TD9hKE2IMK~VZ|0sy^Wl+Ws3nV*S#D>eNlAI=G`B`fIqwiS6tYH%XdDss^uBrg zX6J9_-KJjT?ty_76l8|GcIbU7qn5U0?55_Ik;w`3P=2jKt_BGU$J1yWe|AFN1ai!g zxRGdW8!PSYLoWd^+!>D}4AWR*IhmGuE@JukR~1W3V128Rs?Um%P(2LYe>*>JNUCV9 zlr3Xym8nvx;1Ef&vm0}uG*HynW!!RNyzDmJ{PM36`#$I%q*7_bfc3So`f+i7arxq) z0>0UC;<+)p{$%oxGGL4|e`pT&(4G;Y)N zWNR~3w6UI~{rW2|?u|>)lDXk^nWdF8%BA9)RJa2o3Qa~l&i{euG)*mMVY}X9>9AWn z$!;$xwP`0bQNl<$sfG^7=w9%$7P&|d5KJ961Cr3c{d%>Gb56XkN>rSL34Re5v*5V{ zbL+iC2~mns^tg0{f4J!lUwD7=5FXN~Bu#8nQ-Z}@@EG1BEt4~RmiLpn>iuZC9wIdS zS~`Q*-sr^nRmn=73NluxQnjz{8wP&3S|^Mt{hN1ly}Gfm<7CRx{ARpmY-ZPS29e3f*{|5s^C=--Z7fia8SuioxP4oNurnf1V@E8$`OWHQtSPaN0&w z#QoZ6LDPKUeaF4Hh)Jrx_Hc8cNvjJ`NP1y4FQv#HX7opB{GmTmL9Z1AxufIH`X#sP ziE`k@E4Sj;I-WYAyULiOYb)V$s3Ba)dnyba;Tx3kK(Pb)N)RjBY#W~U_QIVq2 z)z#XxtcsU6f0gYf27NwT{?9L8!8w-tS8&(GRVMc%-fkTpG@2vDy3Uj;)rsO_k2gH> z^dW2lGdp9FG1}8_0jy8{fGSBc3t^uIUW{lpXK;Z6UPALhFe`+ue5Q9lU$Wr#?YOG? zF&Yd@u6N1}zI#I>7IP!foZBT@h=G@F4^(ZxZ&;_vf3%0odGn@8cQEg1uOOOy-$Wf3 zf;#;HnzY-n`_n++%RoP~lEs{cmuvmSThwpCrI(j2Ijs<2G%^f2da`IFy>;}NS@+GygOcUAc ziaSlQ#pjGsVoJrl2pMe64)?v@{>ZPy2O_eMf8CjBN_(TrSTUZu34Q8D9htP9I?#Y< z$ZS@e8t;ICu4|)_t{u=ec&S9)Z?sMO?&LH6PY74>SJJq=i7Hs;;VWRJE7aG|%?(d3 z?tE^B&d3#)oblP%o#zeb+0O)yfU1|z%Dfxn94IJgVu(JiqZg)(1|NlSQe&IGd+G#` zf6NF_gGekn@66iC*+L{w%-k{k@&06h;Zb^@6Ro-+ljx~{1G_hpmof1cc>E`aR$-yz5PKXEuzc3IgK+JAZ$Js-dQwHqxl3Iyur)ek!dT>nvY0@Z*8>s!strvJm&$Q*e;9z_ z4g?Tj8|T@w_GB=sdl8D+7TTyF{rTR{t%3GNYZ050Dpe|NHpKw0tH6wREy|LvH$mCc zMltad;%9RSZE(JMvt_lLf5C3xM-JdNqFNbxhs)CS%lb4-T3SPMZjkBHF_w(T!I%k3 zgIxI-m-r*B>>!MR=9a00IRH~wf2gzEn8>k2HraIipG-&Jd^svvoa$d;hdJvVZ?r0t zDm13uGHIOQcoLgix|DFoYdNzl6=<1Ck>9qwhfbVf`hAoJ!%s;QFWey5@lHKVvmZif zVP4*p+%VYjhB1O-#*ezbbb9oV0#-XZ4w-6jF;=mIGB74OYHpjpx|ZX~ z4ChXscf0*W)3w}tbz0F1QRmNBHt)~w>uB0cw@%!wUHa|Sy8@}!DFA%FZ5K05VR91k+~d2>3*m2iaxiVELHPH(p*2Ena`guJ&+??>U2BsMMz4 z8&(p*#dX|=PEoS?7%#PXJ${k+jf_)G&;4~%r;!MS6|7GgfVOv#f6=5$bfD;C`xA`i z@tK(R)~lzx;#YIzk1_v?R!psaKm(>v#=|5T%7Bt}*QXIgl&Ba4bItdoxg#XRU>*lx*%ikD z{-A%Orxm`u)%Nd3e`hOO*>us?F45CKSqqHtsT4Z06TrE1@^s zQEWDrW-AV;3X3MpED!F~3JQ!oFDlP0QdNCxxk=dUXv#P^*~?EC=3(e?h6zA%RCix;z)zr9W^H zKK`ZCr0i7BlUY_A56H)pDVYcnRDcwAsSPi ztnH4#!RA@Tf9o^BF1p9q#?g#v}*4Ed6 zWA_aFS>L`Ww`xNv_PVjr8=rW1z?a|}6gCy5WHbdgf43Z%ok>o_%T*6U->mg4@1~2w zpHGu1=km0`v~?ECW}C-00s#Y{15ZO)w0yQ`Xo`FhWrd$HGa2z;a{1 z;{6AAhvJKppkq=B*e-Ch^ZcCd4@Uybaw3uuIdk~SIu9dtF;uBvC;|C8&%ESq?JOgX z4)?;Ze?X%KK)S9&w5H3;NjcoLtmOr`&9^cnFEfMv)*;AWB>{yew8n zpadxckPI|t3zTs? z18~L!snH>-iHAdAkhrgbdkz`{Z<~FUI_kZ4!UJGEQIQC4zP0`Wp zYq&eZM5Bd9<~9iWv!Nm{1CIYvm2fF>1$=jfSKhi zw`h!Lu>O$p>h7_Kbtkq*q?mcN*l64rL-G108u=nQ3fRrc6ae$*foAar5*TWkf3~Rh z39w^BI*V;sthLA`bBcz(avGGFWA}*UDV>2-;yW=^=J|3h*o%ihJ&k8wMPQB{=mIMX zYufdi=%$c#T}i%$e@nIXQu;c>Zzf;AhL_yzN_>DQR#vDvR#<;ZP#OIJ9{pqjHg2OV zt9D@GwH05SwE^`RXShQharv#ie?U^6#HKBP64h;I6$DvoKRMmsf{uH;bJ{chtS6n= ziP(_^pk@>L3%;3<=S#dkWkDH=Qm*>Wu`Ofzt}pNs1?}-Azlx0cTv}qqzYFVEU$jiO z01vsS>dpj?9i|(Z0;#X8?GJo=4g;m}11Ml0X4nL_Fp?w-Q(0trV`i=$e>0?RA&nL~ zV>!@&yy<|k(DVbgukmIZ-9T%<1TtWkp1f!#;YGDe0;!JLMx1C$E@``?-33nrg{=`Q zfP_Y&g?$@MKjnUx-r#$y9+v}%b6JTe;orf5mg$N9Jh}=PuIB}Eh}#B2$Bof-;d+Fc zN~j#mjJN1UUZ-KYRrXZWjk5Zk1eNQlx`^{3G*?l3FnUT$_ zDs1u2z*+HuyvCRc>~mV9##e9Q9@K#A3_1~^Uj>&If5_dFX-6#+cH4Mwm0x4+Ql%w{ zJXIT-AreXvyW`mbE&}AzKr1t~Xl---ODFkNam&S#V%l2-8X?s;fBurAYSiI6poL5afp?%?PCrPRw%5Rg$oK>wc$x@ouXAk2R|fqJ zK0ckz@sK=S7xN|kEAZ|yd(u#B+-Az{-zvaNG!iY(rhYtoHIS;}+XNf;j8y(YOB`F^ z8H4sjZ|m8&;w&TWs@Y8D{I%ZN>??LmWi5xP6|)IFfjjz$e;LlSlSx)u%Gs1Lz(Y4k z%NQ#Lm#7St?7K34p&_o}4K*1nsq(CJ`FPe7>OB}9F#sw&m9}abuR}xW zG3Z=cW;i|PAN?-E$PIae%!+ExF%vTYmdUX}!wpEB5p)Le%G<*2xNF);@#IR*@SJlS z`1u<)M&!X8e`n^XJbM`jzW~{L4}{A-OZPPgJqA~u(HBdExsL?vgZBtZ6QrH5^jb1t z&Bdn~cH3C|Jne-3t2CHcaY*$uMW@3}({Evs@LcGDT5cqQX`;R}U8S!)z>;?G-0#!H z@MzQI=^$cj?~no@xHkXBv5S_LK+wC=IiEqJ-!3cue@9m(5{~l+XcyTp$W-a#URlN2 z+J-i$!b_MEx1&>T6+o&B&^5@pH!n5n2@hP&bbW#@TImo~|&L1yBm4fQzZ z&}TB}vW8tG?91FisL@2!r5>X9B+M8$BK<43dtyS)<7}3`5|3Zebr(WdcIZs=dddYt!#UA zmWovDX>w`%C9Y1>TBOJfA8$;U3icj9;U;JpsDTCuR09ic-n|{QdP6y&uQ87IuO>^E0P@)sIqBn?)=yHyd!gT80^Yt$RXD7Z4Ywc# zegZs`!v+bq0pk+c!UvkA{0??3kI7bQza#$B@pBBYPI3+x7w$N0@?*$^at6n zu=-^nkwxkR{O7{-L$&e>iZO6_Jq1BZjnv_5mZx?96^8v%A_emEn1C zOvdg*Pi>Vj`n`_;sT&i~{P(CuBEm`F%^-wwTV(DQS@0|TROD${`hHBX<2_J2e6h}d z8(w?<53S=!#YG@25bYvosy<3M$F{%$u6AaWUxp5*$U%Y-<^*AsMry=;Vkk2IfADS) zbic*0Y+;qU;JF_0`RUHO-qT;)0M@=>r$7}&OmN)H+t zM<;_gdAumq!wdN_=r@XBSVnjLt)Z5WbQ!H50XkWEX1o?c17-G!ln?I+DfiVu#C4C z`|P|jx?{+x?S|i)A8&>0@$&E5zbpAnyTOKgu*CZnvBXi|>B~C6pnX#|f1{i5;V>Mz z!kJ*UP-p={o(L25LNNwTi!%vZOUshBUbG5hHzh7}#WkcT&p-m~?bHEm?nC=Q*R$^M zX1?r85yA62LB(ej!jfsO)eF6{w&(Oj!p?K712Au5VDeIN1F zQbwgb3_?1F5eM^B(AuKQlZd&_jL^v%lo7c0j7qH{Hmc>$}a&_}j>Ir_s0`Ou7!n`z0*BZN@>mc}FWQInw#?e7uHwC*+98h3fBqdgs`7 z*aluu@wI>H8fM~8;rC!dTAhhFBQzUD}zhJT?K|d=8+ne{3mYp|mGw zsam+gL)L@BQ-!^mtn?%nM7=RKpQA{W4e@@W5D zC#*4e;;pG?e-UV>mzsW^y;~PwF5z$UbrB-?3>no(YTMzlQqWVntZjfD@cKeB!(egjU6|Oa7H#HmuIv;v{6xA#_qpbc)T2?tsQo|v5IL7 z#Oo9A?U_*%6u+YqOy8iI62~A%>w{99Rx)gfRN}ZY(xYBhXu-%p>d2r=z>zs zj&)smVXXxPTTT1ow9k2#ACT$I+yWdO`ses!bK8R7G@Gk-~>URSIoue+5OZ5MIbTkqG+y7mjXNQfC|7V*q#+!>vZF_BQ%jgLa!Rhd?ViW3tMSDp4Y~ci1 z$PGykv)~r7jm0RVYzY!6e)thh+T!B2f3UYVcUjMTrqL<1!{)zI&|4r<)`O}&r_-B?bhZ}g zMX7@?yc2cOzP`M=)+@V^e1CM}Iy*ass59SneSNo$wcG7{oyUO)yiN;m9$yeYe<^P| zd*JCNHG8^4<%F9T80J-|h{nVXykeXj!<3lfFVZM!t;zZfarIuvjh^6tr-@Q$4m;x6 zZ2p1+l`2Ik6$-R*i&CnT;K86!A=VqA;b3L`bh=n!_%Ss%KRGP|BJg}nyy5@(gzb}I zV_jI-5^BF)kNLmYyQd&cg7!i5f9-zTdK=TWZQItgF>TwnZOpXo?rGb$ZQIuQ{=0i| z&R(22v2h}HFD9aXw-p&xnU&9z75U`&o5>?9ot2{afAuIejYGfr zwM2ij>GJhkCdY%xzdlHWrx4vJ!^WY1Wq8B*z4A&UL}M1L7$HU47@7zf5J=}2?zK-B zT!d=q2uy~W7ge_*1E~_C5<`X{MTJ?@VrghJJ<(d9tSh*_*{U-F-a(2>3^&OhJ1&E# zArT@_C0jWB+}OeBmMpG=e^GY%rQu|ojB1hoD}CjvsDfux8?5GSYPdJja{(%*Q3{7RFMR70(SxUF24urOM% zxTH1=S~8n6TB|u_Y{=#$Rjt8bVJeOu1wdq_MC*^t=&#C+04Pi@e@rZ!UUp*wscMW@ zMhzvB{t1Z#HBt@(382bvMm8T8rubylbGp5 zu~4Ndg#C(}tRTRO8dH@|20>Svn6%#qjvEk&O!+_|KwCVVe|1^58rN^x?I^M}h2TM{ zE5-DUZ9X&FsnK_(h#2Lp96<75iwx2ib7$Bv;fTudGt%IM3I8(;{dd*`avKbAgiOgC zsE~`m3Y9V01G3>rbVJHLwKa=>(8CH6HV~0vvoYXL$87?{&E-}av-i@vgzIb&0*;9` zz*d-oZPaRje@;az-mdsF{;=r6fIdc5mRIkVQc?_MYATX*YCeUJ*W*)4mms$DnIjxJ~1bXB%^OSoybV1rzg0X?tS zhp=H7cH7};=iGk#D`N)(f^$e8=ms?iU%@_yK=i7re-trL_iXiuRVHUFWUx|?2sdA7 zX82&JifhVI1^K+{Rq4aE%SCxw%z-y+Feg3KFaya_ptMyXFgqzc4Fg@c$wpmv9%P(c za7v+eq}-D}JY>ndUzEq9L4An-1$7ksTl8Z}LSlD_gA(_RXhvO^lOL0q~(gSz!q z43txSO5C6$9o=N>;7=kV$53PlwdlzBMy|s^@des+CuFcJ`xzm~_bN=OVEjt0Aa-m^ zB4<7B-8T9|#|%PvNm5j_CL0CXG4NAWOG;DCe|Sk{X4+)f0*7DO=A9stw3JkRbj0vU zW6QP$_T!zU8ek|8Y$UYiH-fg@670XlsKk6kJ|i?}m%1sD1`O-?#~FFI8!jqQC;(&W z)S{NUI0OSBPi{i~_AqvRVW`%ws4eLi)R}$T!ZrV;AQlf4Ki=5LP<7 z?EP_*dPGbSWO2O0?Ybr^fr8#vkx48l zw)fMD;>v}G(&fQlIb0bZ*R1!*gvj|}e}7>C*x#Rn3Pp6Pdgg3oWV9{Dp@Ye@8+X;Y zY-ef7jg>lnpd2Ghex_K2$_;<4K4UJ)0zbyFDgD*%CtPHP_JZ$z2x%hzXWQ$zXyT?M zhnbI0Ztz}XwrwvZi^ol#9vdXLwe_`V<~|(;XoeQkQXTd5sfAP;GpAKx1e2T;e@k1_ z*BYr>-^$r-oX3{j8J|QUKq$Zxq)cSh-j>}Cv`K^brMXlw&XQpSHu=tgeO$GFdxcw>x7FZb|O#)9142c{%Hb4nhF~SM_|I3xrX6K z&(#2TK#0F)`~9i&8RVtM0VjOHe>G$TR-UO(?EV%hc=pX;_#1D?ch z_WR=7H67A&k?iXbTz=>M}CVB@T$1U?T$B4*H1WJ!`#cUf5#*?(=S zx@qwC&zo-D$E2v!Ev*(CLm06?S&K5ffyki&td(owAi0K!T2vR!dIKI$$LU5QC&t`& z8ke_`JubS41l-{UlCXosG^sT&kC)BczQ&`VAh^RGyf+;`5a1Wh-2L2vB0;EN2AY8W z^k-)<4O z9&jYy8&~qA*G2#sDxUhS>6VeVAy_|S467 zUFR!QCm$ax2iyGS=I24@^On;=Bq#_g8yjEXQh|#LRrI5_dzBy7A>^WQ>VG0*`Bb5_ zgx7A1h{y6X)ka$yW2#^%sC3B3^7yxulM|ZNvTMRL=Fs~38vb{_XBT5nm(ROA8=aN6 zh6lcl)sp>~#lcArVX8$Qa>8H;$hL^8bZLqe)#X-LyA9?VYwt6oY|{R9=lE%yfMNgJ zqjCK_R;c$&)4DB>?;t3MmVXv=oJElymz!%mx_0&PqhZg@0kGI4zmM^+C#}$7MLVUh z7lWPzN50N~?u-qEhPIE({eofDzQoAPJofVPLNx5)>&m4?54Yp>>^-fB8;I;38+KXVGIYR2%o`Mm4$B<=;e9YviOiw~B= z>F_77m&LhSON>9KMEMqftKMw-nqGa_1YDKv)0jK$JuYL^VGl3MduE|GV_(8y%L^RE zowC2uMyM#+o11l@j(_Bu5b^-dV`xZxn-%_bf6!b5*yHwmaVxz2{ht@9(Otzh)Fy{Z zZ?OUX9esZUH(O6SE)Ec)=a@`rMp7wLwcljkH`-SjH9nhmz23_D^aBTZnbLK8ESf(h zC`^i_P#rOzCXdCo=u68Xe?4Qt3W&p@mdEu=i-=$^!uu^yX0@dl;{PEHYP4Kc_ zq@n#+boldm`+NTBKQ}`IMzsLmmaX@aH%BB0Iu!i$3A(OT9CfR^+MY2vd@hT-HOgJ& zQg2pT4Em8IB!9%yd|qb5o|7aU|6W>l{FV7NbW=>;x*K}_?K7?G_Gw{fSEE(VcBl+? zu~H}U`0&8HvbFT(#32x5Hh5!*IQRkoU!m{Y6wlSl`ue)I&KMh{`i}dttkJ0wGL#R!x|n01fJ@@CwQ0XUX><4|C8WZRef^5 zcU-kzdyGc2^z;BAW6~4XmIOqB!X#zVOkYZwl1bQ7>%jtRg+y#}>jGMpYp{h)ke~!q zbjeyWvVS<0`RriCk^S%)%bjj5HBM7})1NwCRbO?}j3Z8w>Ex52Waq{;NuAGczJ77n zZQS6 zAwO38Do<$rh^O}LHr-U7E^L1JUO~ujpQ?r!%OsWW<~w;<|mICRdQ9&egL0AT&*|F?wdddm0Wr7-n@| zcYvW^L8TDB(Q34hL!W0Qd=4OUnH`O@IuzFQiYKF(6QCA^`= z)dhmrd_b>ZvCp2(Fe{cXy~m@CK9)=~i*qWwk24s$qXmz9Pib)4>A%`o8k++g1j7@9 z*>XuLuMF<+HF*i+hMd*CoR?9jy7!3Bco;x;f7R2;w#hS}#FKF;b5Sl6l`bl70e}0l zz)k}u$9g1T$P2J+4nG?Y&A7*{kOuY*chnENr0HG`bM<&Ma>~Pet3u#S3C)#7`>RTu zoo)8jv)`tEfKBbY?Qt92lgM|@<{}fSaxs#j91=&z#p93Nj@XE@F3?`+7dF|01os{B zMH;8@p3Q5X=DGyzbNUZfJ(y3#+<#P%6bghS6YYS|hlBjAX`kO+FnLt_{Oi2Z598hO z#ygioHLu)4hKNu;XASu}QL==u*LnI(uEp)g8k+jj6^rwjsDBMPQ zm@t4%g$`%8sSZW0TO!GQVm(pIGjNNCfpj6xjVRvVB2e7^HvrSQtP$J2(SPDkE{uGq zEJK-~{I0xy@OpGDWv~kPZt|-Exr>SN3{|z&mdbsJ0OEPGN}I3_b%-WyBL19;iTo%? zCDiYrQ6c|$^x%r|6UnH__Y=eDSuXJs(S8(^kT?o41^3*ai(tlFyeA?8}H!)2Gv1mHf=vXCYw55}6t?q8m@4lFQ7YQ>R zr2{SxUO{y1pQHc^N`fY+0Ekn6RY|21b{3Ii=dee#3RW{==jzdXpbC)H?V{JN6J1o% zb7_q^<_~!kCfyv5PvF&Dee`?9Ldh^h41cx0P#e)40x~mY0o?)V8-LeOA==FkakB%l zQ07&l-4ONu7h8ve`(D2J`^Px0uCO~?9Ra~MeqjJISQ&5d9ZI>fr%X@FJ@5H&?0>p< zERFdDtHs4i*tRzu=kEC9Or|41gnc95XOB{whT?`MuofR#6}*G72Syl|fpSqsvT`w< zxc}gX_&M{MSO5)tzkk8oO6|-tr;-2zbqW*ps8V&`EgJ5y7&mGeaO~c`qc-~QO;4F} z7EjOCl5M1QFf!QTBFYu1-E@Ao=r7_?;4jz?t)#55Eg`32e0jb{($0N6>O2vjCgDbW zC5t7857;?}x}{&Ry0I|sR9SF*V^Rh11#pEB zRFKNB!9W1f7=JDwNwaX)CgSD5Jj2BRLl=-xSJ9U$1`rHuPD{_&ZiQH&vXu5tT7Xpq zFP(~5o3u250cB-#N&z~`Xu1%GuL`2?@dxet?C#JoR>fg}%v?*;vIm1RR**UX8H33~ z0}zEA8Nr*rFi9dzxc*sj&fu7-*Y7T+a zV_9G&p^8MMzrb`lgb~s)i}xq@C1FyQVn@lzrnd@6MtF?(5#$LpO)rh(v{aO;S}E1@ zfYDH|vwvS|UxN40izgs$2aX)md&QG z7&pJMt?%b;{&k3Q^)s;VSBs(RZvSEUk8+y9{C`F$GPwvRgg<)`F}5@$D^X|E;&%s^=0sJW)GKk zPJf)u7LfMSd2@gl%vy+N7ZxU%8uc?qzK&;=w0vGAF?>@h*DS^fJdm(gbLaGn^)AK& ze^S9quVSi7!AhGXq7p4e5fhEQEl`oK_7#d0`rk>&deUK*%oQ$WN>EMC?IzLUT@uO$ z&KCV?7e*U?vQt26c^vX1@gj>pd+c(@_kRuDSI654ulrCgp#Pm>xk?<)GRHk@LnYQK zAlYoq2XQh>{due~tI0iO@ASh$<>S}iRZYItEWAL^pe0t(PX1xUIKgAiYO)8&F(&=C zFP4wxid~iCyH$4XiCZKcGNi7{EGN61&36Ar#;ajU_0$krNMV>lWWZ@hy!==W@qeF9 zxhgg}ZtUBSgkvo0YBV)RofLz6&=Vr|YmyOwhzxhEhCUsDF9^NR!$838n8W>Qv6kyt zc&A{QsQ}k2L*Kz62-bk5q;syn#=JvN<=_W%OfQx-vnV(JnQRFaFjn8>wl9d))bh{A ztdj4`BtiXq5AD5IF@=qrd_vI)ZGQ=NAY|uV2Ol4g?gks*q+bU{lM^wcbi(=hk}lVh zfo&YQoH7(20;y;MQEB0V_K(xj_EYJd$(`v*MLn~2iFxiDUbcjn+d(jJ?uVO09m zsa&DI=Xi-S5|+!bbM|%MsDHgKa=GI>=!3Us1r9CVX5rF}y$&YBP3eNj z+-!KvA%DRZNQG0+FK6)vHsb}n1o99{+GagJ=$!AdXD;vRo$VW@9e+&Q{H_A(uJ#sl zB(`H}*~|1k2k_W$_n22cI#=8dtUNIJ&mEZjFaMCvdaSIZShT+riQ+%@^T_d!PBs)Z zu5gaL_kwbYpSuV5I#9sIha=o{Ed9(8!mKlyY@bhE+r~FzoPEEG+P#U!dpx@g-vV_P zFkKT$jOMCbEUUPKCV%PV`PaXmNyd*DbdXdkZeE}T#&=?Vd5A#RdI3KbpkFt?(wpK3 z(OuJVwKbU(rl+F>6eN5)zF>tV?~J4MfA?cxlhJ+J0%M?V(ZtUQT9`lU*O;2%N*&c14-bUHS1$U7}R5aYsfOWBJu}X7g=mYm#W`8$1qZB_i&(Z_D1; zy=i_m+wKRhQXNArqi>qWZ#OrHT3Ln)*s>0IC#;9je}jLS&c)gNxA;8eN#J%L%p0RF z{pSCX0KoVgLO9$(!Z>WvdJ8fBA1)T?XB8&Otzs=l{p?6q(x5 zy6PUoe%o4B*FINHQybn<6b`7vFib=d3!YqNld6pri?zu$e<@q2su$C6OsHlM0tQS-Y{Ouw9>4}UCr_Hp+F8+(#6?xMOheV2| z25||qDK4+iWv%V+@9!?VsPelC=O4a<^w7BiuWw7=KOqqkWbrF!HkL^anqRpfnM%X% zrZ4G1aesPFyx=IKr)<6>h_mYCwrMCKUj&p1YVAjH^Jg{hK=BjMX<6vzrLFM;eo4_Pi@c(KjV@?wkPFbN*O>i z`p_`+MWWZ!!)^a?idC{U@_AvYOU=Y@L1>K;^nW%clp+k}?$fVngX9Em7%s*L&3*ZP z>~IJ-`W|dvM)_mxoRJZl5|I7zfgCYF11bU~EBLK0PM97G$We=MK$WX&siT(@j^Ih@ z6ysQ}P8@&NI)mxhy#M5YN?|ev_ezr6}<8N#QO@Fve*LskdJENBaU8@y8=>_`#q`I#^Zt;lA+t3N}A+|0tx>UXYJON|ozXiUcV}Id^ z{heRlH?J5!mOQsLp9;L)xtJtj-hp38WG4sk^WkVG5A+fI-n>*RK zjVT0TfX*^Wd54UBWiVu(j!M))GJBss7rkFxoBLLOWIB+^*}2FHgG#}%Jc68U429cb zFGbL=QCl+7)@)u=FK!4)Y&xNpUurq=GffFE4(nTUwB;MGRHU*xO*zR{Z zFKPx6(_Bd$aghb$mTa|%%^}oF+-a04F+=~dN7DD0DF7FK2u%_m75fwX;A3S zfh+5>R9mo$Y%EmN0)jq(P#c`s0^xL`b0@1qwJ_?tcX0WubWY%jqFU~Egj77l9WHAT zASusY!B^Xq;JtXs%-57;et&)LC_TK^F_N(*5DTuW1+-k^=Dn0uZV!cgE@*Ea3+=B> z_>afzQZ=$_(VfpGh->!gj%j2i1yzCUnpTD|w%6yem2V&Gu;*R82X)FwFU>A)oe+bC~bDKFz zew)q?nqQ+ns3!>DW`B4zY0&BY9bJQ-9gh1QA<-4|6t4yT#+7Mi;{Us}aH=uN{M+Rq z<=XJhB|WRZ9rjoJBr@6zux!|?Yyg>7Im&Ni+D{Q1`g?gmDw4n--Mrz(2yPV{$bYB74g%_VMp0NI6op&#g-x!}D@x*@5XEgsSoAjNj!xVW3r38vy0PR#Pl zzKF@m4}y<`n4~?3Wjx47Bw9JGr>+LFv6*l5k>NLgq0-_9q#WnD!N@9KrDDzaOR3kc zQe#C8A+TEd&wqx64}e#8?RetNbfE#LOW{h5!Dvel~#+D?l7N zyOu6c78x$;H~X=?`lxFBci2XmERjg54TPwBH0u&;=6~dbg%EvUQIVD!5gpp_wydN2 z^Z_hI*K7utDN>U&(BrdJSwXfV!Exh^9wU-}P9R2?=Z%PE_JubFs}F=U@B-*8J1>9v zh9h;RqHIBu*}MCmN~MjD8mJq$`uohDfrTd1D2!2KFVy_Bt8DmO{NN!|X4CDt1-Nm3-<&A634kA|xnxsUg!y;u7)qjB)!G^*rZu zcu#kp_`Cx$`1%HVJyh&;Q{u}VP^y*1v{Z2VGK+XBD^FaGP=bQNSpGA;wgU=0M1SuE z7r4>)o~Xn>lCrs;88N3m>(+n!Ol^MdBrhB(`eA&Su%3%#+gvDTv_PUqgG(7=LEhc- z-9@cN@n5_T@8Eokv$qh&@L^q&C@CL_V+~;b4w=M3?O2CrQnV0OW{^?+1TDTvw+?SV z?{8@>iC+Vru|Rb1_EQfxmVW*8tbcxAKc$iIEtSD!oW)gc#gW1`u|J0jGnWc&LhU!q zdBBXqgtk-tV#d4tN*`j`jxY2Tk`-J@_rAe_t6X<)T|WgxX1}0#?ut1#XpyV)he+AA zR@Ar}`bi>k-W4s6+=J%BLJVLRww;F<)_zFw-OlM#_`WJkUh$P$1>)q2DSrhL4VLBJ z>x$n$y?b<>%V=G85WfQQ1Q)OVG}%!_W5;un>u`$S`hEL9R153S2)*GBb@sPCcac@e z>j2>$j`f7E!F+9AwfGTzgDtM~N^S$D%`js*P`!@onR!%FJ7hzHanT%|>!we}vcIR+ z6u$D{r<}>;w)allV9jlOt$#wJ&J=URoD^>7+yJjw-0ybfHyXQ2kAqCnomi3Zb%^S} z#U!FmUK}<1WVA*u_7%4!G_pUG9lkP3bjCMhDhOS}fg*(hq}};69+wFWl2%PLuVSo| z*Dc%~e1xD5Y{ran`rJ`|_W)|xuTb7COsvN+#cpXGkJHSKRZE-<-+y>-_iRNgqAV|I z=q_K_h&@PwKcB^&-n%r+9ARq!m>+GKiN{Xc#K7S~FEDFx?4QOqyRnvA>jYe@_TV(b zb*Id;8~X5BhV|BIEbJ4t7}Zo-Y`GxRMA|rlY!rdSE8cLdCyQlp;+9^E^4A#)A+#le^e1VVZIjWBQ6aNj$8*1iK4B0| zdL>es9^qrSOMg5yk&~Iysy!J8(k+=LRET10f39U^u-9haKyn(yAN4 z&8T+g!`63W_ba8caf9;lI15K_c`IFE6NVvJavuKSy$CZ>&uyh2e0NZ5Ts6HNm17Jh zO&+OBTYqes?;R7o5&B$kTe@?+mne5Ij`ig&6=dt7+O~?TZ16Msgjl`u1hhhzfOyGj z%XZ&gS&TNg0zy*fgRK+)-!5L3;hh8y^Cq`;wj8qb5q?Iy;De)AVsmGKqKzuW+wIm! zEC*K<;9{Yn@YE`$#_DW*-K6_p!NqNbCy9z1dVfyrUh|=~zWBnit3K{%3zx2x{;|j` z#MSiZ{aX=x7b$in<4>~(Hvu*3_?fSAXv@8-zi_c~Iv#=^oWIZ~_c8Med&p*8oUGst ze6njFV6m5#RJ4^-tF&#N-9GqfCwgSRoTLiJH5^J)DRgny$Hx1xzjP(uS_h|W( z(tqAd#B&l3=8$NRT=AFt;&Zz0=5J3Mr_KEq*rd=~mn@lF>+g}(d+VvpaYJT0@yW|2*<(#MQ*~<)Ro^x+e|904SBH9@Bo&9L&_D{zWrnHH%>c9^D zS(BMX!_wZv(>4$w3fz3f14;ZlrVRfM+(&V&iYeO;_&zQR%0ZV+2DS)kh|V}CxG zz?Ve-p@BuepQaFClgqw!ff9kF-Z#=wHs_8O;jxUQobkf$A1zc_prK2u-xb_=Y*|ibGr=jQ|2-P9GmN7u3{&RDd0TQfEG9yi5O|5sLev&y1!xORZ^Q0 z!-(Mkvzmc9dK8!1>g5ji#mjKgihqVZh$+G^ytSkR2fTzSWP&@BHu4k^l~&bPR`=4g z_K{WI-UU~9yUM}ipZzr7)?FLIaTA32 zi8p)o8TaTxfVo+Rw*F;NS)gDiHQ(kAyId-4=7^I5E1CR(dA2mLLib*PEr0lFUXSD8 zgiZzI=6Z%D$F^l(-+~kT+8fsv5ZXsgPv7iHsKAf06kwYV3lv@aUDnoc0v5$wgRkKEt`Hdvwz3G@68fW+avHZvXye5L?@jt3;WO~$Y{qz_XIR032 z^V{~?ZYSetUF86Pr6CJLM1PEsODb6^Y1b8~iZ!UwT{^ePa_}q_RZXoJYJ+kl3!|h( z66b?SX|RH#62eH?F0~(Qypth#F`ytc%-qI9o_RB^G-i|Walsr z-BhrWb{p<53wA@jvCY&3_%;R%{0Qm$NK>>5O_ZX)xTdg!?*v#=p?`p2tw|FFlNxBs zLW1T+QnleXF<}{nHxRRi{<7u65u$}~sfuXqbHVMX&>Np!wDQPb?d_eY`uq^F5~I%0 zOV_JAE4UQ?%2!3kmy?y(*^JE(8rKOUI|Y(wkW-Gq!J%7#azbHQ;u3cH_41c=A#xOz|Nn#PLO$ZEpAEV%4+2c;x({&M8 zL?f>VgHX_51k7SjKW{II>TQ&%)%nLVO_aKyG7pnqd(n&u`xgEXGwn&Yf+uJ0h9~q# z?hj|Iv{GFuf=@{16y^^zhED}j9g=O$B8nVrBp^1DuUlX)wSQ*7HJooa_sCV!t4ee5 zT&Bi`?4&^%gP6e6ILr27Wz%xN#Pl{^(kCmooKA?zO>h?hg{>NtgOx;K>E(Hz(Az&$ zGEBzE%kp#bFCgz8(ERSdqcUSu7H}rj5LwXRTRpbTIP_C(2#aLxf@XkPFzT zW0ed9OZR%sLVv`5ac+GtByq?ln^uf|f!=+>)$(4OfiwG~fYCun0}Mg601q2XuNir= zn3g7W;q&GOQ#{`v!28|n4A7dC> zI~0bbqD|{BV?uB&DiBbueap_3t;%c+dx_3))7%6?np%g!3A^VWYg7cOQvL5_QX#gxXqLbjKdWEwQ$2^ zWvn7STuGP!GO|&|l|AU)5N5T4>bY~es(U_RcWlCNVlpw7#HB95$W1ybd5JuuixQ*7E{ zYI23$V?5J(Wl7zt=|fg7+~zE#8G0N`2Q*6~$A6J`DNQ{zP0yAAi>qSTRm%7;EBUa; zQ2=ZHf%ZvK!I!@ZNiEq+Q%v#eeW;EZ%nmrMg*y%#ln<|kZNjbL{7ZF)G=FRT z#jaoY-u#Z;>@nf_u3^S(IimtNbVN9L#wYl;jnAqXSl)MnS{~mL(L?%)l5>!2&VR3=b4}o*ble{B#x~Mm3%vWAgkbY6QwGl1DqBu- zr0jU#hvUpjMBaOZ_M{;T2d=2%HM(#u)}3ru89j!xa6eWAW_%x*)^IrfK6YLk8TJxa zIzw@!4L^Bb7^aTYXYjF#;mDODOb)xvIXy5gXFI%!G^(9*bhmu&mj9Oj1O_ zslp2d8Y#L`i_JEC>C;9$b;RtM(KhdClIKPtxk(H%5h!4b)&`iK8V>Ume(>bhxrTy5sT#?}5w z(gEivSf!I{5pbH=TC*$7NPj9+2^Fd>mY11k>PUzkk6sFVe1{qsc}#fV6v49q5@cmp z-Sh^=xijgUQ00ckqiE5)w?*Nu{2~O6&GdqG8;q?N&^a8Aseb37p%u4JgygGtL{7vN zc5Z;8Lo`$7TNj_~wim;&z&#h#)&mqq=kca-8m+-6=2uo&*9|0h9)Ed~EHaD`mY8#X zxM_tLFJF)nwsbuyVvNIV=3?Vv$cOgxmDkg%cw;CTU36ql`|TbnkHyMobr6c9JL~V( z+4Er5=w3p_N%Dxr3apN+w%5_w2Uf4`FZr?O&A82Fi4F5#WJ$&xwgaaO`6=XoGV;DA z@Xy|#mA}us4W7J^YJWejzas*ip6Wbbv%K5cq-jDW-A)FWvRuPTl_=;Zg9f%Quc)!L_RB1#m38Me4{LV~+E&LGyWUQhFJbk8?Zh^+6^bTxGb#VMq>zp^85<6hY8j)&n)$< zrmoL6b-kNv&oXDLg2kgERhLV)wLeV6lUgY^$C!)`!4t;zVPVGez)P*=$T!6Z4VFVx)FDhYFQ0^-_s9W zzmQja?|%jvv&SlN@sDgV$nC8l+`U3vw^8mz8V9alb2Oa)l%F=1b-3us`K(pz+Pp_o z9W#-S#w*^mHPXfy!*V7Lc|y~0c*q~yni{Z7XhL6W>P<}ND#_i)LKCBakz5RkG5RA6 znjs6OxYZW3YPT7+J|A|&7mX3@BImTNzVbaNF@LJ-4AUAzK>m2S&iPXF{hyFU()lrr z>waT-mc<8i`s0=E(%jc&EYhZ9+sp&IgLUF41Cw$`; zxPNQ|PIr|&x7>e3B!ny=;T`Vi@|1#Qq}lZ`0815QYe8;9g#>2PT?8?;=xD)DTx6h#>BIm@>EJy7+D1p#}scesm|Y+fxjFR zTU5`!x}0I=o@bR-Vo3f@I;)D_`w!%GWp*D_5=2#-ebV0Yz()dz z<}m-n{iS6#XrU~5giN^1EtI3BGh{NGBX;(Y1+vt>ONF}f1uUNC>OA{_vlab8h$-H* zHZ)<`tOA0e(VX$PPAJI6Wk+;7Q(?(jUGr0~H@Q8opHODZvcS-g38Jt8?|Ml)=YJ7n zItH`LV2YmzdZdeP)v9uh+9zaXnd_`LiqwM4Ume%VfZTwQ2JK^+BTMUPqqV<+l5cSY zCaY*Xl4@h3*7G0?v^-`JUu!hmP4_*3a{@xLndw z0O~SQGV~-s0Rk13&w66Zw3+fAC;QyO;iKx}F-1pf4!@Z}M zpf;2~=lFukt6fi zu$H+kEz#ihv5(HK7q3&1+9T{4GcM(O15jMGB=uTt!}r{@Z%Lhgy?=Av>g)F>)9~Ie zHKX`lkkERMYfB>06TVKJp9-Of<26JwLIo#B`@K22Ll2)g;bNZxuXNT7*YIT9KS?mN zMv7UuvLp$UzshKoz(n+@(m7)v9mj?J)0jG{XRUXhpA#O@f?4VT%VI8IneA>W9ea)! z-uMRV#?xDZChM+iXn(^@m+bZ19cfc}{VRjS`8AjpUH}>YQ1d$oiSN@S-1p|+4my|> ztEAnpI*w^N5W;g@&f#-z-OGH^)fIC3-)4p9cnM&IK!I7>)fmmWzwxm_?#=rmh0tmjR$lcAQb}!)Opy7#i%o7x05LHO8CA0#R9`ATt~7aesW>-+HFkpX7$}!>fa! z%w{o(oHl&qyJ*SM$ppT^Lo8k>5fJ5@edpI^uO<9aUblv0DX{y^+oQMIwu86cvecS2 zwAx) znjE4!AM_t_0DllMn+hl{Yo%(xkG?)tn*@YC&e<&mR)kcm3HgwIR_qx&RlbgdVRhU& z(iVlE$sulg(_}?RSx=KaeV%5>9--M3of+VwQ9iQ#Zk%x{lk9Cs_Ihh<<+XU|nYf_M zC?YAe2`g0D;dJ`txR3=^CU(G2Z3K@AA zKt=VZJ%Wwk75P(LH^P?f_P#T@+W`qp|Am-&mn6WHyxoXA=}Wb;QEli(S+;{Y;Y1pX zNTXl@{dfOK1wuiH%7w{iD8h=@^L$2zSPLRIAmR)iO3x30M&QlhV=yX5W<20+pQnuX z4ctpbA@_S;pv_-&OB^J^AUHV$2v20Nx+Aa-2_(kj+psaXg1dLfiA4o-`MHY~b`dOAOCd=O__ zc-^RO@Xr5Mh!_XPnpy4Ie1xrwB5q~9UclrU&wuB418UmkL*S|aTEtc)n&0R%nwYkX z=T`EGdX412c-~gp3g#`*(_mwR7dQ!7B{9~B-N;BP@E1^mmLCXWur4Vebv%Mn-2KEx zjU*5#`QqN+uaWJhJT6Umq}2iz-zVJ6BkS`ze&MlApzQO4^2ZyW2%?Dz_v-xnEl=lb zUw`?$8qP0*oPOd^p?U%pPDr}q(9CU3pK9PNxo!3U8d`&kQ_{0I+SY9b3?2+EH24vl zIfa;qTl%Zxek9w8eQN~3oCHQSD@;8RKczIC!B0k5Ii6tl~Jv#)Wn`#=ZR!VHG|N~uEdp%vLegbmGZa96;ubV!tAE4v zHTAk6&lz?-&_etKZBHCU;$K>SH={GMKU#YA2yOJ9qWaLlfeb=wh{>qZ+Osv%w3z{_o&gpwL!k6UPX*wi_FL ze*+zD7XMOOOnzx3s6_sWlp?x;Xc2a0(I)~F7TRh~a6avlB>LgU6)0#r=zmPY+`_G< zRGRZmi!j><@$SY z3~qxcZDc>lfPF~T;m)*dA=WF3!;c4d%N4;X2Uy}1rgbd5Ko>4Fq>=-9Y*JPUP=;<~ z;)&U*B_o^qQ~uJ8{f%Im^MB=RifroqzqoRTy7WH|K*{uq*Hw2p^0T^jn(}$NbgNiQ z)S?q0)nGZ(vt9OyqPkAI6*sS6Uv>*v~5 zK2}*Y<B5^1f4y--l1M0W{IRwdXx{F!+oHj@lf6}tgz zg0hg#cp8vZA`1b&Fgl^YHbNLwOu~i$0G4=`&|;ouC^j?^D15m;Eb@zQwYxPY3Wzu; zadu<#iSl)jvlpQZGJj;8M%=OYpFnbC%v@D#7F-`0tD^QF@>hiiu10*EU;Qlm9nMS9Ta5ILflhyNBVShVias^<0pTL>m%=4?i850l_6wi5?5sa(x{!ES6+pa4u8%>l zdy00z2o+`VO}syzHJeX|Gt%EcLYBZAkSH$p`QBC*mRDIGVYo*j2d>ip=>MgmSeUFX zWPE6DiGgca7Ju?P(D;!5Mt}?qbo!M@7CP0M;*!=G8!<`HSB;xn$5`$)>?cLgxnqkM z=L-S@N&Jn9hAIH>O7RK*28xZ+fQp>`9?a%{9W>w~-sBhPqTUrNedyYUuW>nf#`0TW z^s4n+rsBaeKaE_=^yU=M6vcrI(b0ECNn7)udCnUtAAkQp*676e-{V7a1?$#a=#7jQ@iY&9*>X|t? z26*M}53P885!?@4eCKI?pctk}e1~D?nS*{XGLPWQb_ijIkce^a1z^4)mwPnqt*KPD zADKCOuYY^}M4C#%6USWoByDf_f;%( z6rP>oW%o>+;ei&`!;*&tZy3()L_DxAe3-!FhfoAlQ6Vt$x&3_cK|r99^xAARy`E4n z8-Gt3cmpRs;Qd9==hBSAYZhYqYUTyjMuOXnnkjs8teAF;&D~Q5Nzya$@@;DSQj#DN zRW5_AOHJieNhK7 z<&6u5LeIgMBsuijmxj30j zCYk@mPS)pYRjMkfN~NBa^;B_5WRA$Mdt$JH?l`t zk6QbZB(n!r9Qk3qBYJj#k>z8fe8NtE1qyJrEgb_gvO7c=+E2vcdc>Bw4@PI%K4|1C4=?^e$aljqTA}6S00DN zGj_>Zh8uSj9%^t!EW-V`*c}YvO@X@T3ddIi=+XrFr*Q5N4M@> zN`QJ`7iuHO5_IqpTY2?tRvkC#Z~ZSGTZHrPXM^3;IGsOd(ZK2Y zw-~&&w{wyF>`$jLg)Uw&Y%CTRx!E5nUhO?;I7x_HC}KbgeY~I1ij~!nWDt^EAt*e^ zi5tbCq1l0#gR6UfDSMw3Q^a@Ik3GR&X-hhM&;28w9`_-)WR*xhki8hL-dal)cE?3kN@Yl(EjvH~QzT~CV zbmf`XhuoaX9NC4>pMN&xn3J*N{A&n(f5kxS+wf=PYV$pS+V4&ksd;m4nA^qoxc!ic zkgR%g{CegSunn8-qKliyu1Lq!7QBGW_oj8Ux(pZgctzBr|H%K@S?!h1L@!z%rYSkF zGkx#;DJ^wttdt`|jl_}T7UJSR%aBZS)05v7S267kpktjkkAGPL0$>gSYci8~5%lVm z*0ukc@qv6_GrRvi>)PTdA4JmAh&z<+C4C`-PP$MX^qh7J|y{X z;Y`?2bvx8DqcG0f0x7lB$Vz;4CVI%=BG8wy`sm9N)`*}P-<{)63Tq4gM zk5ib!+^} zuXlaD!E{CKb3UxC>FF4uvRRS4yfyaFJ-C_C;T@3>0t^4U*5v>hbv}-5_ zR2kRj!Uwklz(*TmCtasz^s=4iFtZt8xHDpp`HZM~Ndzkn+3?ml&lZ2f>ro&XUmg<6 z8!7H1H-F|Kt0x1;jLJ9ESs`!ovDrtQNc`yLGp17e3cdW>bq|L^wI^G_WHDniqUNcv z;)P;sq0M)ItqqHZHwbSt7OM-f6DD+|_cFXL_+IVkIF%ofC=K<=vNd(l(M4p6cuW5g7cW`YE7$#2kQ`I%RrPD zbpYb{w7=T;&@-uMfk-BAh^i`<5)Ec;FeE1BFkbC@cwMT%ioE(DX^t7s zg(iRKfnB*MzBT`7@*Lqdm)$2ik0x6=TTYJn1Xj?`Lp=^+I0BUd<1eL;YwYO%Yp52I zr}qjcu6y#`biNW2PAs&|8@~oIrEZeUESitT8r2A>YE~|@9{Y?xveZj)7eo(o7gMqx zyce#3MG%9UrT#dk{&`wlqP2T@730PclHh+W&^5IV#Yi3yOs;?NxMqr1%vzoq0vu_M zC}VNsz|!m@!6=w0SoNi;Q=?O&-Catlz{z6_wE@tplJ`D9Z}cqm4Vm*qyylB}=f1eu zQ*&}GXb9Cu(dNr5vvjim&NA5KL9v^-mCWmWv?}QMfg>!Rl_LaWwITk^Lv%?<9PWQ9 zDiRw{lvrM2{lO9Q8*y0jjMVcf0cW|cwt=6B=a5^v5IY$3@+o5XO}XZ_?tBQMdDEXZ zs5ywLMn6gREp!=G2xcsw*&y-LSOlUrq#ag-APcH;TR41_oO1RA<&tTXp0GH5$mkeK zcXDh4kPTf(CS&r+pBL>Dd*c^SrUc0!|AL)uCKYNo=Ce^h^G8q-Ouzu8&qF zv|w8}Cx}!ek)PeFCK7Z9(rH)`sTq?neWbkW?0kcQOv^~laOrhNl9{e!!e4;Z0!!p_EKHJkc`p0dgUycwz7! zT*2}TWRGz=r8MI(m@@Wcj!>UR2z&1vomA_V=<{;kIN#>q?Am)LY6u!Wx(|PXo8J`|jwBU0vBPI-4fy4cNUe(55|eqg6B!RD4cJMmGLs1Jdmvi^Tc(Yu@y_ypLVS0rLp zA`uG(^<$7|!?0c=HS!mzjUc3izf-lY5?;{hSDhM0qU=A%QkNFzsNIN6j#MX z2xK?c3e-N89q)e`c9FFcoJ&MnFYIGbv3A}4_PY~)Ok}QvNr-hU7WA|W+=_)dxAOgr z5xzoh6Y0KSF0v#Wctd6Vl)FA|N__6fy}8r+wi$!kaFN1Ao(MxB$Z>tD#?i$|FAL;; zWk~e!u3}qFTOsjzxD-=5%G_#5GXrV`?!xdt<=2D!d_8~aA*g)i+VkVHu&kF%>kT*AW8R9g zs1cAlTEey^!ZD}UF10a>ynwpv_L@rpW|bE<=TyC2oNOl|5^7$e;r{0L6)x~O#^<~0 zM$Oy|KOuk6DPLhwcGFFQy$y)F4~Rf0hLqqx;`W^=!Sn8mxP7WWCT_j7zlKn}*N}06 z&{_{48f=pMe02BKCDkPIF%7Zq6Y(TDxL7oJMiM?`EVy*oMqPb0Pp0jG5(Z`>V?;6k zCWBzTG@y0@pw+&v*||Kc{YaY9i&AgmE3m-5c-nvGS&Aw@??zef*IGBcOr9qsxV`&o zx>``gDa;#Ah-otov*N4`=vr5VQpxHkDh`nv39~~0H{x$_3!3ATJF|H6kX#nG^DEHs zKO8_w{bE{X`|T$7;d3x<^_{-z*9xb&nmO(V4s&0Xk5PoRX1J!Yrtas*i@tr5O4Aol zAJ>07{OrAM!dJXGD5fLRZwRd;qcl47n9ezZ85MqF)II4~mI&d^fFnAJ)Er1R?A9we zp0huV3;ayw;INrjkiWmC26T1bS)@ydDO2L7R}jCAfuJUEd{F`@rV+7o?I}={W%2Hl zsHZYy(-RVfXkbq7gDCbE%%L!j-2;1OE7pJP)~h^M({qN;i79`d`D-q2<)&EXeLm#R zk*@UWs%`DZgAJeOe0@}-(enh~wSl-eHHqu)-e_Ff1<5jSI6vCm3&HLKOCM{Fqc7fYplGqY<^Zi8ppV!7=j?KQWDacNq7mpO^Eq9QUul1FLc zWcWhP`UgMS>R4}kd~xpw^j+c}sw$cF>Hp?uF@*o)hqjhXD{Jjhmr*|!fR9_+Bw0uC zYdoa}!f%JKcoVMHg@B8%QWF@3B-T3-{w~x2IX8mR| ze%|s}Xqs0(yYJ76;*i0Jjvs(3l-z5hk`qIQ#fsxmp}_|j31d-sRL>=w)_H%mb*k{A zliFz!kEKf%Foqjw(l@j8EcsYPRZyR)II z>r2w4S}FnVc2C)9s@qP0(WbVi4@eK^=-CGHpPIn(8p9zXX3GLCIOkeOmqCX_K1it) zVX|KixCINz`Dlm|NU=5u^1WPd^G;^7KF!+n0=vprZ;pBuPwhsVR^jv@jW$ssA;Vhq za@wCu0|NtgII&qcxOjiVO*R*GIYc$I%AHRTx0;>NihY5xSwrHYs7t?Wf>sKOf;o0V zw!ENLAMI^$Mq7mhamALb*a|6)OOyei2vSmrn;^5_z65ci)0JZiAjOxP~SO% z-Fg!|lZ^ff0d9YxCXPj%;THKdhh4%dU1DBy@00i4i@d7S7FRNCqU-gHTi5eKEmS3i z2$t>n{kAK)c+?R_;QMN~!<^45tJ9}$NNfA>eL)tR64fqZGIlHkYK{m(MWzHDY$4K% zK?|8d9gmz0#0nCCh*b)!`JziDmPje{yV1(rzrVl>Yqx(b?#JVT3LPCi{4jme$%#+r z7Z;IeHgtahepu1K$TM%f4Wlf3H3|q7sXBaNV>IAMO%l?rC=QnbpC|m~!N0Hv#D>uD z@MHZF_Viu#18`&TU7D4i?Ts?GmRi#Z9)7isyv>fk2x3vK2P8~bJO#@_*K9$=pfi-! z>5sqh+xdS=4QZ7?%*z*N=-r2dA`7^~P&n`^I)|N-2+=S&=FYt_QKL`5mAV=G3wY?S&;WIvkxXy6BE=Bb@W@yNlvOA|AThzH{&%b(0Ia zV}*ZoLg{XX8iOr1cTSD-j>A-(y^+oIg)J`}5y_rK+_+4>QI^#J}OBmj| zX{}X|{5J3Mapar#=jlH**MG(=sKBuk$oOj3k+1Q(G)aGT&i#JuMOfT&0&8IntFCcL z-;5Bp_M)l{Oe6m#mAD^XM0pK8sgatJS-PBX$}>18xOIK=p z20<&;+3TB;t~pJjmj2)yaQG_k3vTtm+m65C(0UiK10e^bh`VSBPFaJbWVs8t)JfzQ zUyM&&yEnH;2-!%rO<0q5-F%hi!Y}@{@`iCw-xuq1r5o)g0A%#R+)=ciEPH>sffPOv zPY5+9O;iH=@$s?JN$O*)QQ525@BiQ5~&0In0V3{s|FixBI}R} zm3hdB`6=mk#SXW#cA$BSWY`GQjXT49&F#P>KoB`-r}A*9HNMBuB!dtNNVYdVZ|d05 zN(cCebEbF>+r~>J3N+X%jWH3dgf(36vw2PmS}v@3UNAAghquP-W~!6F6=4~gSeAtluh7D2 z)tOOyQzR-Xs=B5YS%Rr&_+VI@-mO%#W&thrk&ofx_VTCi-7=&Sowi+UfKYJMf&7*S znm>WG+%V$q)ra(0QM4*Ra`_K}?a@Kiar{CirDFH}h?r#ywGe+IBQx`ZOhcQSiv*P^ z1qPXgG<}|5^II&_{W5k`KXA7tAyk^oJm8- z^2Tim>eTcMlfyT!MIv#DN>LHXu9Af`s@OJgaw%1+39lA)iY{@aI@ObdPzGrXH@7Sc z6-i9>B?(zN`pAEk{9WFw*{4~aXz{O&XN|gL*3B3BF7MTArH`j`0_7@`eKua+&w;bE zGe8pQ`T~~cZ-3|1JKkuu83D;j-**f=f;FAry|!@e&9V_&Nz znE%bvyjka+Q1RvK>!fQM5ops=vv&J$Ht$&ySdo_-JimV-z?XtmPZ3Ef91?aCwd}l7M6ZO3cTR z7O_GlghhWbz}&>9)WG?zn=$$^qjv9icYPH;?Aj)&xX8-oc7v;&WT!-Vq@k^CXzhFz zC0yU$s8sN$O0jCWQxw`k26t3cG5GdF&mPxHNsK63Zxli8LPc$rc@PfNVtt)|Bh@

B&Pga`aIHyWS@S# z_QqXyqC$t1?C2QCl>h7STa&n@SEcx_>WG z9dI6;WZ4Zc!ouTozP|PM_jAoY{x`#MnrQr6<92p-7}i>SbVG;^+jd4YOqh5$;w&;L ziMW3?hYX9XNHksSwFdy-=q+*HneOJWNFkV^#-^T`X?ovGZ3F_b@rjhs6cCV;c^JaY zzbu?w6ONz6{nlN!Xh6Td{0dSE_dd?P!rpd{Oaci-B_o85ty5jt5e=f8ods$Td;?du z2?T$m1foL;p@#B)Jpt~t!{+CaPEJo9bO3)&zB798gZ4qNozl>$CN>!h@vu*TA1nG( zKc9nTwQ+=G4b0710L9K&IZnVIi&A9VxVxbb3;x$%1NVa(hfER79i-r8ayiB(9uW!q z1~b`QBACIuQ>Tw{eW@mD;;hXHXYTW|cy*Tg~OlpzbyBGRP{ItRlTzC%2*xF>u4ppsN+g_NJwh{6(z zqS-@g)3!PMpUt|1%}N(6Qi;5sR{43yUOR&yNCTwU*j zQB|nHE9t3XxMF!4nwp&^|Lx#UcD4^?bhx4ceez8Fk0^y+_@q|^DrYCT|(MSCp?UK1zIj`Cu%t+9)@t zh|C#flC^B%=GgUK6xgyKC7;|M2#BmiybI`8i;>l_^e1I|7{fJ zu0bVQHLguVESDK?g`~CRerua)) zIjN6;I_x~a;eb~zEwp2iuXzz^lnE*Um$&{$ z*#L9m{QNwwBR5QrMa+L^6z}#EfZ%t=A#n4i+p=tVtzP7kg6cc}-wKEwcv&a#1o=PB zDPe+D*ZgV!xaq?GS-bt7xqa)^vuFuyJDj(pL-?A)roIFYrjW~i6`5>A$Bqg8*#kxJ zZdIpkJvlkC=awVr;KnH)c=3eJ0P9D>;KBZE&Lxj zav5ILC2ATYepWTNjkl+*vr`>UGG#2vrkdPq)M7^~6{VD9*uit=v}?gYNd3Q~Bj&@4 zf`{~JDMKWsg2)jI5aE;vEem46Y1<-O@wlCD+MKpOvU~czu5VlEzwfV|Ofk!CEc*Rs zlDgN&d#k_rFMWS@%yjtIM%Y^$Vr;C9a5mRQocv!0y3G0PAyazk?lm*uI!lte-3KS4d>1`X=eyPop!J}3~F>Jp?Pi}0>48|M{6BUYP;}N^B_q% zNB`6c`-6Wn>O-^}XOuU+8#6`!5Xmkw)bZixyxn35b|DWZR!U7(Q7&7BS%_Y*ti2HgCBet$fW z{+v(COkbbGo;cs7W@L=8`C2Vh)_*f>r#Rhex7Rp*LqgSTYvt74ZUG`Dwput01|0uz z7(6(dM)7lB7A&UoN`CBXrOso)F0OV$P6Wl8$s z1|xs4gI5RvxBeYsl3LoT@va4>Xth=L02|~H{%)hYdeus7E zx3IfTN_p+)h{&4HI`@PZt>b^rw70%eH3wG;P#{w^6N1>QgsKzFL_NnS=QYB#(7eW8 z^a2nCN!ep-RU0LeJ+#Z8dZ+O&OH$o~h*f`W7jIbZtD+Qhgx*o>`}~DGFUU+{&Ex@u zJm32m94^@%IT(IfA_m8pwlFcleRGaMhXVye3L`HaUnU)p=>^$&+egjKdznwU?_3Lq zfSd}BCC>eUyDm3`CPGv4Kn)?2hwXyDA;Hap%rc6Cv+fKX4|`gD=R8ecZvrI((@=k! z(~F8&{a`J9SIIIew}-`IMJj}lOC=KiJON2ED6)9am14~XsbfN3q|64+A@ad_HTc4# z$;EoIwN-Ckjnm_0k}&>_xiB zw8qjQFPM+j=hYupdQ{f13%NvJtdW0CS5QR<^lPT^#`@##GZ)wUi%8b7qLP+VFW3?H zpqDH13nxc9aIJxr8Br1!63l&9RG{^gb6VX6*P|*OdM59Z>)&9*uHAAi*cik_B4eyr4p|~0i!&^ zEVR z2QM9K5Q}Bu;?(jCQGnVHcuMe1^7P6bf72j0#VD88i%`oEa$jb?pRW8%22+F8qc$P= zMs%n~yBDJRZso(C`cZ#;A0EyIb&gnwJE+k_W@81rf|>2Yd0IbH5WH64@^X6t$KY5s zF4%`&lqIIAU<@v%ncoyhJjpbYyqrLoA}q;NnXHB#UKvywP??+>*B<2*22oJZfs!hacDiSwwmgNp}y(6FX|Js~5SKTUrrzv-a;{dF+gZyLCl z7qi`-vw3xG%U!H=d=W$k(>}jg&=Z7+OeysNX-qM0Ofh`2vO!AH`-htzQ&xOEEnnJ~ zx|oS=_(%pvdt%}(oK-FXlUJTlR0GalUq=H^`gVW|qK7Dm@+%qS^@)V`>%#Wt^3>ZZ zy`gVxSi7L#G6;Va{a7j(h)PJC*~+kqA(-;ol?5=pj`2`<)%sq3HQ1P$CNpFXi?)K5 zm222tbLM8m{>P{uy8>4|iVeN$_nVX%Whhq8@VBKI)A!Ux=yw(YC%^XzEF6^=5zdQaC0wpZ4exlYtTL=g9Ca zkJd&rR^UxMLMj)a2ZlCJvyAOVkpOVdi$Fvstq$>ZFy%?{wNih2!~MjMPbtv{oA(7D zq^GiVsyQ?gDMFY$6pb#Zc(Jm+AT2@EDydE0S2Hp8`@3HOw#!)#k4Tpvn0y4mCm5Om zJ7Rh=1Vw+ghJGxJ3S-6?_9N3cz9=bsM)NxY#w=>L^So#48K8HJs5s!ta5hu=fbY z2$ehr#x4sx^Y*85B+dL5ADN&&TbjS@x^?loY4d-33&QQ-b$eDZi~Hz}@p;zm4LmX< zJ)k}n{1t>6oqc1-L?7y=acey>rF&I_C5&!+R9xca8sj9d=yvT?e{+nn&3C49;6j$8cY)jyOrlG2x%jCF^AilJkGW>=>Rdrvw+nx1V z@t`_;l>I$SVvV~oy^7-kuj8TWGI}I~Vnr*Sl~5A5LPgXH?;M`rYziqXX{-QFixKM^ z*V2Sh{zu7Mk2n}OD~4D?-@F;OAg{^UcT9X$p^ht1&@e`wtHV~{={$@9|IC+E7|4IF zJK*jw@Xt#Rq}z@4RriAqLti2j(vU*pMpT(H?~W~ z3T>{8LnkjKA|3;}&y+xy|H;S=E)Yu|)7y zCpvdoSHCVGjc%)ACAWn2`ZZa`qYtA3q?F2IcS$Q$ZXWZd&-^DfGG^F>KeP0+Y-Oth z{qY{B_i>5*u34>LKFrJZw6$`!X2xM5X{*~QGs{(P(Rft#QDp?(-gSR$zK+0PE+&1O zFElw&x+x$&F=iNzJ4)o!Qqrqe%%@ujQ@VN{wiBtR1P-0ytbOrV)vi5D!Z$ff`F8z# zkB;C}cW-Af?9TT+qT~JDL$j$1N!b3~58tRBiDjpjsgHqF z9>%kZsb?ALSjxI|{2+gt)2oZy*y?T|~5+6^99tP-mlVJu07Kp?ZYCbg1d zV^KK7i106h$jJ+PdZ!_VrrD6y@2RV*0;lPPq^NBEyxzXW<7R@nU%_2J&jh89C=|}G zLQiHdj~HLU+_^o=8_~k2dFM4*Jv^nuznjXV;)|*%a^|u(0z`ibc@c{6<{vzXKr>#^ zZoKbBYWrrQ90Nn%8;t_`zTJ5G3e*KW!{aUkATh-fW>6iAlfA?mU0;%PG#D>gVx=^nRuAQiCOdWQrB` z1vJ?4`RV4>YrI?5VY*jE-6sW6{`9EzeH;{AjlP53zWo*P8xr-(w8&ZDd8MHB`>*34 zTHWnjGcpI950L`LXD>;c{}V?EAbm2#8iFkdu+07c9-Dt62qgLc{#C^P{PWNM`k0CT z`RAX1{`u#h|CLCp{`1d2|NQgMKmYvm&p-eC^Upv3XPOlM{PWL0|NQgMKmYu%!nXRK z|0e~K{Qt>G68sXTxVKEXD@q<`ki*SbzDu&TwQstrm>B% zL&!w`O2dDwLUu3*0gebwZkAS}B-C6=E)}iFTwu2;UJy^IBqWm-Eo_a)A*G`F1*T-X zzW^c0l%~%(vS!uY>ed6u$=UQ0kQaz9h%A(T@icR@aO9t=0t{wz&E9_H{XAx$;=Mk< zK6BO*&HCwNtl}^I>>hC6_#d4uYN`UNQ}=%yeaU}-(`CqM_P1O=$F~gWX1eXohIt*T z+Gv3g#l>Xo;iaZ^n@3q1yuzf6j~{>-oywz!w1&=SXtDjvU6D$CFCOmvDXS?dwy8x^ zhQs+hKG9(!R?Hvc&je5Qi|FR#5at;Xi;v|~EJplO4^DM{*J z`wgG}-2&5D-J5&O2)*CGm!y5nkFmfC>%f0C zD>j#D5!<&cN`0aji7+mHzVVK~y3t?dfh77@^rYl)j^vq9VXT(qdMB zD3yE_sC2wZM;A=Sut{<@rnOnTYP>Htc_R>~oloq4O!bXlhB zV8fB2KreCbj98d>X`Mt@%ysKzuL&Tl0$oUb)aSMu`6X;_e%A5Iv)-E`Rc2iA5CYuj zQIB#5U0%B&UGmkG3M+V=)Fpo*n^a~I5`Iy4egh}(O8xqpV5JjzER-JV4j}`#tU9;Q z^fE=p2SXb0n9V9u{}E>23GaLbtADy%g3E=X3@$~87g8NYU$@mfM8^alWrs)=&11ET z;w14kI zNnc@0KF~JM{i%p7O6aZp)M5MqKVW2AUadaAnX+Pj{eu zYU-j73%O^+43Oc-s7P(vq(O^RRW(=tY{+#~ZD2lN#UR(EKRotGM|9xf%~)&f>0q5~ z#$5$SfC^79LJRa(5?l*fypr_FPv= zx*VsYV4!lCIvllQw)0JZg>iBFIEPub5=U5WE;EZzF$DXn%5w{H0}GYXjwE-F&gy9E zSz^@8lrX*j1hu_t*Ia~_J{}7Xs^Qe85YC%k26VoX#jbyO(1f|Y4kacO@*QXvqjm^G zcwGWHChpWL6vQoBN^;)mokEflSy^>1R~0(BO0~-TzT(f#vvh@d)WywuWqQZJ#I)}i z_L-}T16;h&6clk$d@CWFe{)cZ>ztiWD}&=zp@Q^WD##>+K(M%fMG5I*1*t5+q5%S* z5x9=>{-uAy`&QxJ$SpnT#UC8landv&T($90q8c`kctjmfPYAlj?kelYrDv*lD50U$_M?FW!XM(+%I_0Rd{W&t*CZ)-yhG^FCf@-MEuKze7X}eEI93 zAYDKz49;|}9y3?{BmG7K*zR~W3pm7d((8X6U{3I37k1D^NoMyOmpM}_j}3$<(GgUz z0E-E4pLZEkPhp^)HaK&ikoN2RP4WDgY%x{km$v(zHqA=OPbER2m`J=KMinFtSBjog zdhA7`=4)rX4x->NS7TvKj;Y7-!_ok#pAr{d6V;IG0)a-60ae=lbUw!>9tHiI2m*iJ zM>;>&y{V?8`%@j^D6(`(2pw$+3YCLoq~_aN4J=8nys*urz~|HJbhHZ?#zJ;um z-vnLNi?W&Cgp5|*ipy(0wqrfOfiizF)BuJ0`3kM^4C&2w#7}{n)Rf(2_G1{oPF({r zB54>24unn`%mK1r_<3hV_)!(h*i*WX_5SnFIqGaO7b`By=h)+MlA8Xx;y14)(E1n* zBs%SLu&!#dg={EP?!)9cn+VJ}$YX>3!D|XIBW-dJ%zDq5y5h&PksHj|BHw?R!ZN%a zcGd`24Hb@*i_I7Bbs#bNT1_wezOzu>=*Ae23BTbNWo}Ya21-^ktY38gl%!>uWfRRn&7$j!o2*W;RXo-Vry`aE&t~R? zZ!ClwrmjRX$?$#tI?pQVGthrr&x<(wNO1+6y;~PEOkuYiq@jqv0Prn?0ot782G`bw zh)J4W(s{nUoz6fjxp^Q|h(K)mn6Ld;Hd$&KP90f|W_F*++imQSwp~G0Ec(xZwMjDa zGr?h_eYmq^Kbqe|0EDlDDTMD`s^zss>TpJbKvK_i_?V;z|A#U?9Poc{&-bcvqc^8j zpTX@p9!g4uKC4S-feBq#QzIiGjm_%@Dzo$nEO*__~ zwdfd9>Dl}RhXqi`-RP0)xETs}AO}y~6hR~JAjO@mNo*iNZnPiHEtU@DVJ2oRGJeb6 z?XQoxX&#pf>s#A_K%{?NxuNmC-YeiIp9`rZnei_rE8=C&zq&5FQnKlnc!{h61|V)z z?I8sw2ta>d$6-uoAa|$K&yuLM8!`NPsT=rAk=^`e;o_FkN`8Gi0uQ#cf_Z0?1iZ9TwmfB@Bg z6gx7vfLG9U50oGMp?%w|Lb-(5@{uL?eW}{vG3kM+1v=lvTxfa6d;I$iDLop?le(@= z^MCO)ga32nfcAgzzef(h*O}nSI%2K$eU&6?opxOkohVR9r2X4uu*gCzW9?mCIE?)5 zn=IDanBHT9+eChk~sc9&)W}L{|^!W z%cuZed|d@st?-zczu&Rl->6^Hi_^dcF+&X@# zekB;zmAiiv)f-FztqbRJQ1_|-9B8+htWZoULXD_1X%+!1(!NT#JMKoSIW0pO?!RlZ z!Eo4d`c7bD24YM-h?k1renC8b>w6kTv`mx;dJ5v3`My4^@o~h$h#VBe$)F`79^N7xP~&I* z4HO=<6rxglUo-rAa%6J*jIPrH;X*eomQLIxk4#_>+-k$8S&>YOA?=he}o%P&G0 zQZWhV6KMd0`ERr6gLi2ft@#{t-lx<27^z2A`FFkYNaeV=<~V+RNhqDGf=*<=HZp%N z59Y?@NsCH%=K4m?V65(!u@v*hRo~9n+cVL9Tn)YsA}p>ihgh2zd%oaeS5>JWwa2x_ z!ukknI{<>Z@j(fE>Y7NQ_&1oi9a`tP(K_KFs*1&67^Me|@8=spm^?^%;1`_wpAl<8 znUMl?9(#YX@YTM7+Qa$+0X236>a~Akc#B%_vsYC$d1$22u~n8}g=2=Q_FeA?|`d$Qa#dm>rM&E+Ki;yiq=`OX*% zZ`Jw^j&FVA4oatAE=vyc5iJ)7zzOhte<0PKBmrmlGTJ>F$t?lo5#nPs|L?<;`grZqAICCz#-IS4FqDQ|6qs(0ZzAj7=o250ZDrF#0 z#`J-T%36KfD}GH`w#?T<(VF8tnf2#-hp0nvoc#bFp!cq1!qR`4JE;1-=|Lk~1tM4k zmAX*C%f~(Y;Ea(vsYNzU3R+9d4dbJ=Ez9U-=dfq=F9R_Dhux|wW=$hGV=tpPVQ?y< zm^7NdAy~A55bndV9aD;znzo9cIQ7@q)_p{5;GvACE>NJmVfIT?@9)h~ff1UBMBLr? z#XKhORh{ztUs`{~f`rUi#wMv#FEHj@HgIsG_*PMWU?ugR?ngb|b8Zb;D^S|Je@0Q; zwEXolnnHi`hNi+Nu!~Y*Y^zVdrusrlOMgweon8g zNyl>cc%Wp=Be~j|@12=+X~TNQ`yo1vlE{PDPcup2b(a==zq}ct*cPaOoadJ`PX{NB z*DqeTCF*~+gDPVVPUd~n=3r=tE31aw`9x1Y&Y|<0u%Z(QaeW`V|qO(d*6)X>7OTzHJha%v=D_G1rid{qD4?Vtch^ujoxle6go#P!t8w5-gfOwfDnrtHQD_nMic`W$&B4hWCZgOgu?V0sFk@`4wx`@qtt{C$G|x zd;ANI@hyzZkIa5lZl-pf#-S`pt{%1kjf);INNLRb$9IfS#_V%d6Y}HR-r{Nz4EuG{`<5&6 z0nfAoG=u_nkDYDtHy&=O(GhgU|3C?5*M*$ve1B40P*`}+lR0K*MA8G+w8e0-2Y6Zu4uR0k|it6`0L4LDvvAn9(vW8c(b#^e5IW)_zJJxjfg(Cc> zGuDkvgx7(Y=MTBcqBqyat2HHXC0Bp4je!UFmbrXFy!;6Yx7Up0+ z(7mD996oD;5P-PEQ^6`{3c3^-9+u(ps;T|V0C1{0>8|Swd&T!Fse&U9D9UzHMKB@vZMXcSpchUm&^2t8z+hd*+0Ve;~gDJ1nwX;*-`gr%HeQx_6h= zeCAj0ymX3Sa0*i(%d}M8oP>n55WqTIvd9vpP_I)w&%>QAj;l@*1w;iwFmv;+miI&S4PFjuLr2W*z z@tH=4_jwZAJe?;zg#Tw?*L#1!UkfB;hS8=p0u6#d4ci;eMo{j#gx0QZ2w>AW?gDk; zX=8jJ-&Q0=dgW+5z3ft9ytWF{_sfLW^~KcmD=)CtSB18}H$ zesS{&so3hS)YSyJQB`#%&#{46o_=_?6eBx~yhl~bOLo7R8A4^V8><2io+u|04*K*l zFxVRD`85$1h5@#oFYkXvoM-DJG-afi7{u4-hS-Q4w~?-0vu+B|1vtzt-*At*%E5My z&rd(WPTwzRp;~*U%Oc<{h+xDK2h6S$wU#xuXe{}^R@sb!1j_U(>ZyS$kqpU1L===D zrNadYYZJC$0P00Y2rqBUu0OysgrLnK_+Sblh(M`cyLlROOU&^Wj4T=zy;L@i}a zs@q%$W=cD~h}oQk5Wz^L4wcKm@Y81XL)~eY0-5;)YZYI2yx=6<_hJkqQvS&q4cnDN!uKeU0+-)M!_B_@kv1) z8~a`=%g0Hb17%>4Zt4Rf#qotd6moIaSd)s$Ig3k*z{$e4qfc?~aqO(VzJ|BDXpOH5_#PI_H;fuSEmoY;vWM zaqjv|Rt_H(XAIYWcd8Ikq*1`YDkrjt-dM{S*-1k?pS2*rJS)+FhX@q}P{7-fsVqB)- zo1XcT=Mu?uzh7_dg+c-WbeEaA$;EZ){@&KPM5iEdez zFQiBm@(7}TfU;>F8X*pj`^o-RnnSGZ3OkbxC9H~Eu=p7RTf`!)8&UN~jVc7;Fh^(u zMG0+dTE{qsp!iDiwb8BP_4USCn4l2CYvBtx77fGIaF0&^j=yydEW|ZJ2Be;z&7XlW!-$` z^B&o{aW&6GGrpRf|Gd;WUR^$%&5EwBAjZUhZ`d(P!~~ojl`3NZKfHr9B}b>kA*(a4 zbN6eo5sjmd^Z&%ivco_STy-K(6U;I`ogPU__Lh6=#xhceGRhXJft&=KmU1K8T{KTh z01zU?nY^!VRqAx6*d$Sn{*sT=YKiPDOl3;o38%8z0-r&94ofUrd?A$5XHtLKb>Why{#T(=;CebHub{>F(;%1$XBRh` zZlHHo(O}B0>#(OfdQy5 z7rag}-zDMV`Q8Z@_7hn1?tQ6Cl2)95ZS8hr!^uWuVPaD-8XX;of+h14%Inoy~bOoWG9 z;y5F&;(^l>$V?>8DsfGSa>ac1fvYDz&N7QQnelE-}_%|>c-E< z7}o@VuqbVIWhu=)ubF|X<1&+fVv=qLLfKSR8DkWo1N$ddn4e3!Jie#g5k(Q>u8;xa z7;H0nPt5!OdH%54`(}z7)Az7p?glCBECnQFRAD8Dh1L5mR4xVq9g2X&p#HC!3=6yX zGXOZ_ZH0cp;W&{(0F!k^2J!*`$-pXXo~yORaH|Oz2b^7b;(oI$`Kl3r9tj3!Q(0L1 z#E>ZUFwhe>9MGLHIN}Z84W#b)7W@{KT;#HoQ_)4ra}k2IriNzI)ZBzAzq*?h2q>71 zAf%qqu0&>YPuaqED{~po3jzRYo@~@h1G}1MR7NZkU`>>Z?_n4tH(h^X6#s8|7K2G~ zSK}&hH4I%#+YcOP4u*JtcGnhOFPeL;u5P1ErMreN?iC`WzSv1snK&AaesHgTgzpbo zNv0GS;_87SzirosWR~NdK`*{=3_r6)9AifXaqMeY0A;Ly#EBD)OkC|}oT5_$ zh%f<+%-0$;7&oPPF+wIwMjRZK`&PnCFWi^t-(Wy}TmD0NURk|eu5GL-73c-aYJm^{ z0&wAxgSzq1TA)4S69JzA@&3!j*1OZsHN&@W&7!xs5CXNWu&<#p@Qu_#7$QU@V82Xg zjV_ry81hJ~hTIQ-YI(mOY_C1}+yv}&v$ka@0YVV^<)xpqc&!s_L8d1H_>DV&e`Dj-R>V1MxuYcYA@H#m&h*X$&#HP?U1h zmfza>&KWF!AZ$bqDMiE)77JNo1xgS~LfgGNiAY3$w8RKPAq60^l9Dm!jq{Fl7x4%7 z0KMOA1aQ{2`*^0YbF!M3D3tC!W;1vg3%~~6OW4|l5*Ck%%Z)dJwx5O2BLuD_r^YzW z$J1d5exTd^K1|2y>y7WGaVG-Yb$;RzzgojkdJQmtqUxIRf!auek=qM}Z1YK}Kc0;~ z-1hiC!ePf6+i4QAI4owR+Y_jUdBh@0O=pM$OhOkB(V7_rzBqPKtsQ8aL;s9F@Rtc% zP#nZ%?*B{47%}nxNX9Fxpf0%Vo1+hXC7B%-VZ)9%vten_Q`6JkUyhcAH%!dT%)a&0 z(=eugqYPt=qYjq0(-L>(*<9qjx)%MGOJ_NI)Rr+`F;RWUTRvWWG4i}gz@_2|XljWg zrYva!@<^jy6lCm4@pQ!++Y%|V0BPJ7Dr)|V2TSLx5oUYUnU_9@sV;ysWa}6R@B#wi z2*(8I5FZX1-8RL!{1tj{xvrnu0t{QNyps&S0B_R050@iZOMi=6~$xSU*?{eq--&8JEBTw$gLPA@K$} zLgFtwE-1h0N?UdWN2iMAAYiY*q$!q4%( zI2H~_qQ&BcX?&nJGz*7!o;b|vJ8}zu-8q3@xnkOX>J8kXF6=77ppCX0-X~$I{)S~| z3wY$vrr~!%I>@$U^R8TIxj-7H)SYJsGWvgZSjpu`Z~>RXp_!%r$A1``UswhyTbx;s zBm}k;J+T{zSB(1KC;q~7J@|rSmu&`P)+X8r*05jUzp=5_zyGQ}%)PerT&xR!N&ppS zjCW=A7-f}wseRvkDLNkWE$o0W!=?7DTL1k~G5h|NcLtT>wSn(oQLqBB-8l|U+jWP4 z!u#Ryx_f4ud?7d15E%CV7M?l(Eb=qdu@hS6k3q6NW>c;7*qiMK#t*Gu zMY0Fi60Ovl+H@?fRh;t4=r^5zJ8Cb~1V+&bx$uvK>f({>_+GOw`hO0Q=6_|evp&2qFZetA_r5aFV}+Nm#6ivxyf0@BMahhBB1&S ztL+HXN_%gwmUOyB(_Hw2*>1#F-U?X+P!^1$_etH+_Tozl#ebB+;|D=ENOD_t?;gDp z3yRAV)cS( z2MUpURo%g~X#2t}=8XBwv-2ak=mfOG1wB&YwUN7zNF0)F&6-<(Ong<^pC~|@RXxeH z(7ypDj(gzH`#uG0yNia_|B$!4)QnB^$D-(w2ABJbTzVr|5_K?nHU5lPk$9AkJuJ5Q z2q=6qN5Q$+6^=~^sPX-w+8qS*UsYuzmJ%>zOe|J2p1R_V3GY1yKI?FcDtw}DMvO@w zB>lBE9)3UR+y(c4o>F-`P>GO;#<%q-%N&T}jY*Mi-45ZB0q5U2t>wPxs%Cmd%(fNk z#vFHAy9xeS_S18QSwQZ?8Gpx6uJ%6e=@#But^GS{7AtKcbuM^@EE?2`FD}(SiW6Dk zYmH`&|B$bShv}N$R|$){s-AW^16Mupv&9n^ZX2~F9M>yY4mS(ujACyQBM;zj~%S?+Dnz}-k*IcJ)i8( z4=KYvK#4J==5lgr>B_1xUwgxn*mD%~t^uj~lv4PAjMeyX-CW}0qq)a6mM=-2iMU%2 zRo88Ke!A}7AAif;8hXZNtvUvK7gI#cYkR5BK{zSf-zRew%RPr{Te^TDDG`(nGUExy z*0<{!qyAzw{Z}UayR#{30#T&=CZ}0diHGes@bY9u57aK|%U_g_Embj(rTZX;YM8}9=>?>$|y&^){CB?N19zvK4C z%bk9)Nn7vVmA_`Co4k)y>g|iptU&_! z&mm2eb>8Je{P}7-YuVd#-IdSU-OaSvKMEU5MkWV-lNV*f72}l4YyEPw+Ruzl?-|z4 ztAROn?Ad@rgngbX$S9bbtn>QiXAZ>W(@A#~`8D=Nm2svzKY^~qy7mr-H?ehuSY5a1RQ^H~Dz%>~Acj);%?iHWCRO478^P}9d-A~+S znpLc@1F^{Qjk}NK43|2e1d!*k+20>*@K6cL>U}Y_Jd3Y8a&Bs0-deL~+zZ3Ddpt2l47%+PP?<_>0p$)s= zGV5_zzz1WjJ~$lPZf4tu(%DCX8;lP+npr*^EI-cP=J&XE*$?^_FB2hog^ z!7#1UeCbOeE=;aovoO&bI;l$7i?^sayn?6=x~7z^Y|r_vyFirZq@V?VBaH`Yod*Yf z$7hhevR2+j-$%sD@aUDD!p#$>-hTAfVA*$dr;8A@iXbXsY*mvTlYi(K@t;#rUw+Pn zHa;)pnOkfV07y3Ct94J-eBWiOLG00NP8={_9WZwfniS`IVbb`pT23+BYg?k14y<<* zZ+?H^Q?1>+ap-k*oM;t)>5Bv}08C+2>O51%2s>HhMC^X9Qnwy%;U{|g^2iMF(pMU0 zu-NbjtMy`SZA2R}>s^lxFi|ZW+h~d%nPum}R@g%H9MBoixZOT{ZOqoZaOiPS=@&;v zymQnFSh->fIIJnpY z;5rpL1I~HQAuNits-Xm9$P-KxmKA3-U5sN28s`-4))xP+_o{xsbjEl{DjLYCX*-p$ zu){@|QXmT!#=ADK@c_O@i0{GeXa}Om`wBb{w)Ci#85h+=tXxTE2p$JPP*3OmU>7^t z;S4cRn+shqWdP@YBa{;$sUit$iWe~)C~5?$Wz4_WdJap?J1u7N0mgVlmw%3 z6t#+D!rGmnoFTsh{n1<}&E;%$JVw2DY9WOSBbJp(oF$G=XhJRqrm`y_74b~7EB-;5 zA-M#cBOu0FkyH!^@mK2ra)lSv0|aevyNh&dO^ho8>3&{+`JfdrBqOy3RtG3i*d}NGPy93I!2} z#FPW;ED08aDFVuXYiT!2&5sty0EPmiEN*+&vb@!*+m`3S+GOVgjbd%42x6zS7ND{u z3r03|;nDPe+rNuTn*I6sIlZvok}IQ00$4z{m@CwrafV7;nOH_qkyX$Lhyrec zaG2J!hLgDJ7y&YbA|x-*KX8;aqv&GrXn=UevY6x1HKqTFq24;u-5bX-bJ3W}fm@UU zC_p&`RiC2*1n%9(A7*{2OL`-EfeT`)6b4YGDxA!JR?)fMD2E2(cD7%7>BhL3dHa3W zx!sMFYh{y5YlC9A7y!i9K+7>TTEPI3%^;h=CClz7QI&Bvfeo?r-01R^p1arOqdc0fj~kkQv01W!n;I*rXjSK9>W2juwMbu}LK4<%qVtSC11mm?+yUxI}xbf>6p*W&7!l$%A$Ron-Vn3Ar z4u0Ow*t)NCJKcL`%&bjZd7_;QzVe>In_fZ9(N^#L<<@Q4sJCKSp4P2`Z9WwcSr<3f zs}pe6C7IkHFOEsuJy4u*6X@Y1t9#^s4iozpaejc@t{TlX>f8;9YnI*rX>rppw`e9FeyyilEt351dx> ztVV_0xo6O&kbh7WWi;EI0$ z0-=acxnuQr?e%X1_xxj*|DkmhirW!MM zzt;anwqvnL+EEdq7da!2E-8l9-Ux(u^#D<=MaFF^l7#~a2rxyICv0MWBn{{7ABOx( zOc=#vbBur_A8*lxDG>92QIJz9`tmzz&~)A_9(36F14@6AN4o7+-%xd$lPBMtlQS>s z+MoeN4StOg1&2ZnJ*T)AQmI$~OIghmq7;k_a5gw0n4qc6G|H}t-Ww)GtZI6)?tkK0 z{*1U3z`Qxxd17poeY3VF;^I!MlA>!B8_-yoNj7g zp29hdDv-_yJ+172<6SHyixwEQ3M>q@jpj zM6+-t7(B2D0U!awEs}1jJI}j}4$wR;8ufEGzonprLjl4Le``^7)?Z8L+S*vH3MW}A z$cT2JhyVhmvaeez{Tx%N{p}N2I&uK)>tuFcJ_d6Z?hWuZQy#EVSI$kUaBH9{U*6{b z;LLnwGrFb^l>C){`^AT3C)Pg*)HwOSRCG{8{*R(Fk`0=&E!O(xubPzQJvwCUJ!&21 zVGL)Bk=C@w#LUcVW@e^gHtK*A*W{LQFw!u_Y&hJCCb{&+$o)g%%^Bivu6&4m2 zSC1YkYis8~@+8A{iOELk>ypouczq`wLe~7PJ6gzVO_DyEO@oBvB$k z=CZMF`tL6v7dQ9C`8k9dOsz**4ej6fq-u4oev{Wri$J8Zz-uVF-uT2kEc}k zU9Fn#~3n)R1VH*M4R(zN_X0}A4d}p68H3IXl|Qr!}EUqKGWeD*4KjBUwJ+TaVk3~m)8v!Pe60L zK2BKk{q!nm*+Ne#S-u#PPjj)GYs+{+8U{^1OcqrDwiMQ5t)S zqvd+uZ(X8h2Vtx$mX201ET$6MNLL1do|ckPpq^5wP^nIcT8t8<{-Ds>^rcKk#8Q%f zhaoRXaAyKkqEh_SxN_qX2(u-zPd)O+j)ysvm6Jbt1WT3RVoMoIHs>2FGD#4UwOegI zWcRI`C0@6$acbtjzqiA}@w|?iADW9t`m4SXlfUM^QOq(29q z3UtH(AtDjb7T*viSYCA1l{Q`I?8;z&qoT*HATuS1N(3SxPUDvY1Sk=Le6+E9dU`+t z=ib*jz~QdV-{%papP*e_-4kWXIJ}-ecCBkWPNCKHe=a@u&1o44d<>{v&gTnt!{c#! zyScv^d+t>#$3Gu}uN6r3y1VnQ`vFXv5I_v{uu2jnq>1klZgN%V{fSFs&-UkkWC+kt zV`_;(3K&Qt<{&`B2$3oSp*}8+xzVWgSOVN)V`??0g5h}#G7eUFiAkB8nr3@_-|TEt z0hQp(P)U#|D_?Zm^W?)Uk2YE}j%WC>U;!6{%` zZ7P9KKAud8#s*?>d$K^yilcyJ0LmFw3<4su^NIaV4){IM*K&OV@1r&eoL%^d7_rS5 zWtLmMjE;vr;xMCI(OM*8o3s$v&8O$*=V72@4!NoNAz^3~$r6TDb;{6xI>6IBO@r<) zDOOq`quDeO)29k4(60vl1kYDRjXvDBYiI@FYCzM|EKxQ_M!skbhy(=O{x{!Zxj-cf z-`SF-Ahe0qV9UViAy9~!00|;g#!TI`29B{UvJ$bQAckqC!#g!e9vAS}LURP|*>GI+ zp+!^*mG5h_699#W+>Aqie_=`$2rY8rwHg8lpURXdQ`Hd|h9X2jn2|2DCz96N2mUR% zFvRF4lG{(tjr{Mn26!PT6(-mUj|L*MYiJritV%32eMBr*h01{fMEbIe7i3DH83;~3q0g~D*Fmw0R>8vj$qO3U>HR_YK*Lu z1qhWlh^76T9P{lzI zDWWAX?J39%37m{;=9HrU>K0|)x-X9>ih_#;h`8Cdx{oceL^5!&4V$ zs-3aC@uqDauy(kvHS>ZLcT}pxM|LbSOz-{AbRjuR3_%KS>k_6 zM~ZZRDV0JKEp|x(9%dhh+gb!Vy&i#5`eH>OMAL-+w!p2qKEcPfvB6e_N}1qyY^d$} z0s5*h>K|Viplcmow1m03xoEH&fi^A%NL3NKehW<`Qc`C<1c1sAG#kC1h((GyxnRO9 zVOd;K?{Bt=-4EgGGX_75HM*L}q>Y>IK2PDwq4=NRXYWp#5{}7}n5jrIB2QWm zq|Bo-+u@xj9+wJy8of4K@&B=fo6V_PoevsA{VyaiMk$ekjV>R(5f~Oy@%mO~+PbM#>oo1tdiCSA(Tv;+D&Q{TJ2?e!rJ<+_3tQ2DT>`W5k z2`H(`AHfOru7vH0*!^XMM=z6wNg&ax|B_a6n(1sOpQpqdZ5gWtXyZ!Y94?2(yx~)V z8n8=tR2JF}Q0qo!KM64*7TV4*3M#H>vJ?Ute_(ju2Tqjz7tV>azI1{ubS6@N7k??1 z#`>`KjxC#@LP|-ZTsfT8Jk|C6Je4IhcN75xk9<@sDM~I%;2mKwfDp@FJmCrn;OTM# z?5k4G{z?6hP*Nt2Tt|dNxne6SzgP%Im<=r*jZz&pu*L1Eb4M`__=XqISR&_)g}MgB z<%`>Mw-Z(L*~}40;DX>n`?HyUZumM&gw@J1*sf+*h0Z;c$(@suV8LBFfez2G=sd@? zg@<18s;r!x1trBnr>pY*e3jRIJ4ONdJQXD6sB-0T`+YOm!ZNZ2>dp?YbEl}QzY3mtgLwcZjY?1Lf#GwD;x@MEuB+^+`r6F=AvV9< z%Qf`0hhjAjwF!qA4&p;*!vmw3xL6Eqm1^~-_RC(A{a!C2tX3#4UR?NUjjneST=d>e z8q{*9SEk*SVZ9cMV9XHC+*dJ&6!2iC{wpu@NHq zVL<{`##ciLE*>su$^=j-is9F{T3m8T-tdFK+vOXFoRQ7pIYwSH3^0@``lYKef-26C z6oRZ8cW$r=)Y0K#%jj9yVuz5(+Qac^yP%=Z-vBUp%)7Y(*KN-mA8Z^r6Aupk`>*e6+b^~rl-*gU^~OG~E@oK3t$sq#^mtzS3T zIP^ymGE1x=1Z}hX-g2MWgD`I45VDQn#%7HkJoLez;NZxY$!viCDMn;Ap4Zg1KkeRP9Kn1~) z62TXj@aQid4CO$S>hWs0gu{wjq(GVjQUQRKjZ1cat-GJc>wSK8!^#?^Y5SA@`sVBZ z_xmGnk1!*I%Q`dDrb+>5bQ^$lc*|GU>m(FG479*7zIsWQZ%} zuPF_G)S{HQ+k)+}9=gx~Nn?p9!iCS&k1=fg%stJeY&m=lGBNgMn2J^`RD~IfGAQEn zw&#z}?RHC$%$mOxc6a}Ck(v3o@Sn}0p$P$WnLq8TgTi^%6DG^e*4!yr`I~aB+JJ;n z45C=r2pjJvlzsm|5jj~Ai}1EnIC?eQLAY>we;@|aC^mnT(HB!3oJ~KDgqm?%Y9EqT|9i^)vovBl;I7Y z#`4FxLxtyV&VSq6*Lx076J9sg`qr10vg@C}hem>Tzu#w} z2<}!6RR%p5vi656dKm~323Hhe*Hq{X9W!Pd2vtf$x@<@Vi6T{c z6NAp;aTq;&ZKYj`LT}c8O_(=6QmOTy^-6=xSvXb_dhZ;$3WipKg(BittZx6Q*9q4| z^%>F1zx+G>FakXr*pl4)1V)BVm_(48OqS;hyjqQ+tx&1rXdRkVrRHa5=l_F-l>`5$ z-u;+rk;_Y~AO*W(wbD3?psz}YDhqT(cft18MXXMnkjWWUeDnu@NNl+=QF$^L;$ zYWc&>x;9e1TCt*g8Avq0HewWMyVZK@x23)JNoUJ}&ccCz{9jJV7=FFtj3?mmed8(Yrwjby+h!0olNYMK z{a=cZ|NqZ_RTubw%~k&&bO2(CLiGRuNV)&d=So`O8Yk8yOZTG#ZRunoXLr^lr$-qZAE@)L5v9h$yUL4MB^DsZ~e~g#{XdhGw&QZ$8XJ z!{_-k`5CTH4*6%^U}mSgAG4fyo^#%_JyNPTsNy4kijFG(-({@MZ(igiflvISL{6QS z>9j(O^C@NRZ+KLm*4h8_`jU!pEO?tJ$($pOgH4h1FXt)Vx3D`NTs6<#E!O=vv|j5p zD56uPej=PMY=vqDzB19@t>$f(xtgau7sAPx%e}+Nw}8maP&jsj-HnwM_0Cdot>W4Wq{yTL| zy8APMv!<=vT;@`+4c@gZyGvVfA=gx+YX)6#;RwbpS zKX%Si{M7g-lIc5w>F3!xtZwJ zP%^Wk>i1tV)Swk;3hDQN4c{X=Q5@DoHRlZ|6X~0QIM1k9OBwcp>;)G0k(3H=CyHi& zIGtUEAdW3p?|N!d8TR=+33z8?O`wPl%pA_NH3T+y!uff1_vfQm<~hb6Ecn3>jNS!s zGw~r8Bb63`?&G7H2i-)*8H$5Ts}+A@;s-t^pBCqhuY9a@=7Gn7M}jwI`DSfXHIMf1 zapcE9ZLB@6g@0~buhB+|?y^VmDNC4ty_7uDsC(H~!%c+;efd~im<#7(_-swJSPa+B-*^G)NgXPeVFoncK7u6*XeTkM9Ok* z-&&>=Tyv>4cUA_y%|GYEFVov2W^;-;Z&99C;uUVcD&fnAx}6G!BDnt|KoUoPfhzLG zr;=d%tl@77P*fWaiq+W;C@p0Kd+w2Z@^!859skbxc)D`V!OI1ee0zH*rq-@n{-bc^ zVm&>T!cL0RMlB|xqUqs%D`k7<)%RbLZ_W8?;HnFESxrd>WhFNE;fIq5gc3$zI~*}6 z9MFHylOS1FXeYGx>A38YPsh%Go~*~rq4#NlGgtC751ncF8cFq5_T#HIw4nj^&4ykV z?t{wi>aCvcJFD+Z)!Q4+Pq9E=qIA}nSbV`0W~n|i_@xD&SFCbTdnV`O$wPc(I{>KJ z+Da)#WEdB|W;9IYyoLchY<4ow6|6sT z?ahD>*W)dzPJ^L~KspzHPibDxGpeWI(0Z=Jm~K_o5cYkAn~1zY@W%Y-wMj|lC7~P# zn?qzH7fOWTise0Ob`1my3Y?OKDLKYcKKst@QolF+)E?ztw!PaMb$WiZx!({KjYlFZbu(GneSi>{l+$bQvcP6M)Z!z#8RKQ z#D(C!A*yKaoJaO$W273HkE?2FDby#~I zgLfQ3?(ZB2jfdS;aDRac&FEZhRM8b?VsTkX|K?*@?5xObe7)IyH4P$mPJBU?(GWTv zJZ#0o@;H~&SQ_4c<;2{@L@D^za%~;P5DIs;G>9{2uY%5 z>1u^rCHGDQt>E03gb_rX1C(gukOFXxM=yboMk0}%qw+WVHu(hF*Qg%Gw+#9awL5EP zfmt8(z9L#z$fELR1MZI^&KJ#xECGbNcgF(YdJ{MTG#00ShZORSdA1a;sZ2S#IwAJr z><8B@=!G4Fp*kDih}pWQGG^J4c_mA|vhVw}6vT6xay5^7fNI6K zh~Su>?-9WK__Nr(AOM zu(i|8T>-v-6rPYZ%)xO(0o-wfGBXIk-9wmf4(_p(#b`Ql)`E=4REjfedc|0R^6is6 zm!PSIj)Y4a9&S`IqSOE?hZx(~`MY3~5z1Z_oG%jRDDu)d=OSfl-qJ~(Q-Z+j{TOW< z7^Ptf?_;G&?|^~RWxYo6Yjx~h!QSX9=VajEg8o^5YDL!>^0bZ@H1Lu+*DQa&kd2gF z2hS$>GZ)_*2uf93l6ZS&CK7jKE=%!ycuA0u$}dLdzH$EfIHxp~@@D!S4L0V6n%6RG z?rdh}r_qu|n6(hEHB;A+K2rZ54Z>cr4@6r^&oE@%T;<62x`L(ZZW7*lc?8NF$Lyt< z@nK7UnQNjGOm7i@z*B7;89|b*U@Oq0Iwq=Uwl&hUUB$zIV|iR4c{*2QR&YV!byTrs zvQyB`)}Fv`1fFo{REL|^62`UX#9T`*zZtV%@4Lh?WBwN_A?qKQJxwgD!4WxVCehQJ_SEl89TfZEC zr7{i#y?2#UzAN2C^y15}j>1RNoI0A^BpPRga-E1fA}BZn;oHcI4QZ5!3dH_4 z&9s}pXvN`shTjSJY%&gFX(!%;@LAY@307z9ti`~{IihamGfoLVy4xC4qlOV&G2jQN z%$P%*?=W!nv~D1a1oZ2^IkM3Ia0|igVH7mEM=81P6N^G9r3k|DC%z{md-ZP#BLdLv zZ|&$3m>H>~kxSIx+LRODqB{F>k*{$XnswSEK=3UW2mw#HNR-RL7prIrxpDP>)so$M zt^^#DVN-hgL;H;(=38)KNhBObW&K27*gjq{B6@LiRv!>S$(0WU{i z{jn%Mb%QyGJv))e145uUNK065EXhQG4+S|1pNqH!vdYqIu;5n`sZP>~M@+flvrL#m*6GRrczhPkF79L^gxw10g7+d%8<=@bXT7OrmlNT5&zZ zU4JHCJ$(635P392!(!RyTiIbzo*HI3pMMTR>az|_1z<~vNts5f4)hxwg8Fu!dnWgC zPP*J&?0VqUac&_MOhe$mX(bvZ zHWN8hqm20#^=&Y#H@~}oE1}AVqXey{!W6L}eGs_@(dQc6frMd7Az)(o_NE(C|23|N z#Sr@qLaxR+Ltrj_E`Hpr$=Kz=TvWO6ej+vB=gXWKao$W!_f@TJ_u)ag)I{VTc~ zWwhE;Z`9W-J9l4+{<~XPwBkQ$ajNI)Wb7B%`<3>_D5g)DUaIbY5n-2bMIb+KMZ-p} zi5DC0<=R$;h+VvID+!o}?`Y^mT??MIub6Y)i;ik#HJ9y2QpY0HU-(oAe07fOHlW{} z%W3f|2^{q7%l@0KGgcX=&qJnrRGm?WR}DQdhG`D&iSF22pr2e4%rOuRdVbBN;VO); zg?lxXO~cKxbq-&DaFeSaH-=GJ-(MWgb1~Dkoy@osLm$L;+WHBRaCoMjCw3^M(&4fuoa26 z&h4LBZU8(Pb|Zf>*DRMy;~Q9|j%lP0{#|@UySwUV+6sPumEUi~xu&IUAJ?ICMX)B| zk|`tltF^cgP(@);czIVPAC3O1Cg;SJV(YOh*wGK2Ng6uUM0faF6c|?Mj zj4Y!)pZ&3=5Ons*G`1+!iC>VItg;vISPC^O^S!YP{m++MAl!eio0{Q|l&H%kgdLLK zz0DjqEA7wQTC2?L4vH11z$9+zU3dcv^>(@$%b5#1rLlgb&;F&9P2w4khN%vZ>R_Om z<>eQDd!Rr6OJh@^9#R3Z(K{sLoI9hCo;iQaRY&DylHfvPe5-oj#i>8eztD04QDp)z zApdofj-?&U1gZlL5u9P6LF&kCUd&2FTnCK(9Q#Rq=Tg!`$gW^B?3|xC-btB${u(bJ z*Va`YjGQ$=Zlv+%mAqT>xq~0I@7h%f%$!Gmy(7S&I#g=h>)PU7duf9WtYvp$n-!ks zr7b`uwPTUTrPq|ocQV(`pXK>Oc#)ej@#b-MS{c>(Zt3F})da8Z=l60t^{~kulQd3+ zbnlHFU81$F?Nl^FBA~lLO6hGJZ(Gs_MpV|LZgX>04wn30D&x7u&hV~wcsDQb!@mlD zeh;irVH&e+z7=`t@W0jjLO8Tn98hi_v5>gh8UK@txv$~)A$5|?;(RcqDSrb)rS*bK zeuC7r4g1Qlz1ok&62$T=@z=)5&9P=KBdp0sv1_c)ZS4&PMvDvfL9O5cd6cXP0pV{F zF*69Wd$eC05IES8IuRA(!SRojyUc2T2)C^^eMDsMM6(RTJBe9>*r|j*$rF;BM#K^J zT6f1)4Sx>Z?vCjkWPl0{7pZUpdE(vH4A(EA9#8D;{#@4>i>9sBz3?)AU76g}OOO$& zh)nV$&@O$UkYWH~*Xw?VYNWjrF9`&5?JCE$gV3eigS4uOHFzhdY&@N zO?46Hl+580xS8RT^N+XmhUcH+Cf;C-P~jWFgs)FIkj$1J?%k&Z2jNnj;E!qw0=-A< z4I+rfkBxKF`Z2hXBc2QcL`(BpCN?Mj0XJMu4ry44+N@mdvay|TI3TN$JSRJ(P%c5- z820AV={ap5xk5x1p-0CbA0@b9QVkYcO zL^$NvQm)|N-FDZa|U0yO)>2q-AT5ZD$+; zhpr86R&VK9Km8pAMxYm=lt`^mJ@DDt7&v-)`0d4vBXU$FqlT4!ESZ8Z1$MOd-sWa# zAK47M?A$F&4BgwQe}$|$y+n>Re;k8B6{Mj|9d5AO_$pOyTz&(16%BcRgiudc8ZMgS zNKCMdB;gdsKlZKOEHD1ahriWgynH}_=W*(dif!-zZ#T2d&USMlXrNY#>4psU>Ki<` za7)}AkQ%Ez-zdD0e=c4QiS!Vtk^!o0Sv7ontCWP}!DF9Jl9K;qu%K=%e~sjn0FyxY z*E#vlm1jg}07cW(k*T>QT&G270doaQ>?QSH)nkYtt#s|vmKU=)$^Vx@`^mEk60mM2 zzKp&BI%Mj zqNUp}gwj0Z*VR`n!GQv1f7(9tMI<(fd3~0Fj~bFx)parcvrn!;56sCbBm0QXp36^{ zIuEDwj58-lOLrFe)V=JNwmMFmJlwl>PfeCRg2T~{8+))!X?D{rjvMS8J%)H;Z_->%APu`1EE_ruVZZ$1?B^*>S5&3&Bf7Y?SU`7O!q+*}q zGXJ-0H+_J_|Nmdl|MSm3|NG;X{pX*5{`u#hfBtu3#P-iW|NQgMKmYvm&p-eC^Upv3 z{I7z<|M};ifByOBpMU=O-$j}KKmT8vQu}`>uhjpa{*Us?y;N5Y004vR|EgllbdS?9 zb>8tkZh!82e_HLTklhxar=#(*h^!=M1x336$QhA1FKz>n+n4K(J#*| zt&ms~!~h>Cj2YT!5C&%$3XVUF$QDz3v64(?tEu&?dExByQS0h0>vg}$*s@4ba@FF| zQt~)fw|vX>`qkUC^SIqkcWT_7lYU(7qKZ3;_ojxkIy{A6KJ}1I`P%rMmCZ{xQiL^=*3z=> znl60>JUtjWG}p9mKW)=>zky#DPBqeL2Af$?MSiC@wyUvI1&M2E#ht)y|L&rj4w~c2 z8ZE!rm_1vCf9RP;>e+ye7MI$r^yQYo)rrZt=C=fZ>vjDw_a+_wtt!pr|is(RWK;R0X|G0?}H%>wo-Pb~n0*(3uy zvR4}JO8~nMw`g=&cWJ=w$y*iO>v2qlND|97yVDc+f3BA?Z}5*)WR}&H%CXq%?@Zy= zI@H9yf~@*s2n8Cef#+T)fxYO_FCte{Dy~N>BvNWSh}HdQd-QPxjwjJvxJU0H3_nz#|V7 z2DZv}e<-3ey(d6NnP=fcWd|S)j6HaYPfoLad+rZ_JX)`vG&UfK?K;#hr2};!JJfe<{C-0KB|ci)FxzsVUg$CDv=|i>8G) zk>O^yJKEwOWL|Jta0o1XEGs0ZBVU!w-u65lqVE7=S5K|3745rrr#mcS5)PRggrO8? z9Vp^W`|?CS7gH2^Jv8>)!Cux;X^k=X)y(?EgGWtBw$2z8HEwhd;x!n22(&;(xTYj1 ze}yoyrncH=?+*l|_2aMOPt1C_ef(tE^B^4l4f5l`B|Q|yRu(%tPuGSLm>Io}c4AqU zsAA&LKIb)79Vme^Jcqhdgz^Ua7p6<;PDJZ2UCYMGIW3kV)nEsh2-ca-nM`nAv88~x zU?hMSLOZz^?Tf5{i6eGbKi=8t?i#s772lMmm@S*YQ_IPPT34>o zc;DT!(1~gPhF*HXD)_F%U=$GLUeI(u^#NJ|5~xdra^0e?Fy{ zoE#QhvB)wnzoQXkFmEsp>l<|KNPDNk@O3bO@^b@$tH+#%9+n&En~{y!t#_3yE{D!# zbFUSm5j|rbFohuG07}0VcmY_Eh!ENye~YYwQPt4 zJfnedRY>Ii2JV1mErcj1Z{#YSfA3Aqo6S@Ht+zy{u1D@ypZlCF*s3|P0l@(%aS~K$ zZXEb#X^vf#K{fBqhwiJ$Lu*^k1|!OnAUxoU{!uSwfPyq%3Ulv*=FVT^M2JQHn*;cp zw4r9U>*xN1a~mC43BVB8us={_9$5x{)@s{p?LIqJrTOcfU*`>YY~Ax*e`hfJ$fTA= zFWBhXUsnNEcI|uBq_4{NXKDyJ7#Rw=&Ia{$&TnR42Rbox75S9g7)7b1=-f$|jeHno-SGwP7Ihyjf z12w{@>HQN|k;d9;`^m#re*+ZzWrhxq{d4`o3$!+vR5ZhS0dbP~X06&)1_FPwB3|1D zqm?R^rv=mp!hw}EM~hn0(@p~vFf+iD+JsG^9OiEV6_LVcafX+3mQEPAZ&jd7wJ#D! z2$e^pWYzF@3RUDh*_Dr_|YU$h77B@)zW{hSFU3Sf~!k4e+YYmE`sJ4kOd}G zpnl(=-e$B4THN~2+?T~nWn0DLs``eq$p z1(nF(xX0CmHdKbIO;#nkNQXQT&LKeW?uZrw$xEvcKWx$0ijx&K2Q({pwRt~#j{O%A z#6|@X6!O5Oy%V%Ae=nXB(>En&Q#*EVqg-I>iUBdADc*Q4O|nUCsgExe6vA5>LE@5HRswGBL+u*IHcwD+g^bURi2L-B9 zX1^D^s^q`&^{6w@>gpuf<0{*}HG0b)TvE3$WHvSb5Qk+AORn(Q~%EFO5;9w+A+= zGK>a8_}D%yfkVhVEyz|`!ElRM-EbN-42%=}puy%Ve>FIXS1VI5+Z*QBZkNpqZre^( z->*|uQ2KjD%AISnG6`wSg-)KsT59?ii>TAxI)Q0=iP>kw!WsH@2qPpX_esA-x$iqZ13XsOhfj>(yVs9^bEjND|uHyqwooffMWg)}|Kb zmbTw%f8gJ$-)sqF}7qi@b;~62Z8uE5d>VT5F7jjT#`c?UO1A# z+?0dYPvWb9A*_a~wjKWmbo!)EIt|hZ&a^@fNyt2Te<@h&VB$@n|E2_Kw=4@3#*ehk ze{Etc8N*ET&!8Z>fhRHL8`SgS0!i!?7u9eu@p>TOMk?P9=}!)$i||+LHC_HJRlUb3)vi@L|=HlU_pFsuRY>Mrzhg zexSGuQsjPRKY71f_Rm4o5KI=6-pIP6f7K7|Cr4wj2kR;HBxxZg2sY6CRi`1i-leq^ zKpX^OF_j*c1=EZYZC|35)>{Ja0422u-2h|&avyX8hxp7z@x=og-bq7ldf-ZMJTLNq zy8W!-J_8?X{#Mmbsx6Q)?{}6e-fQVee(NO!R{=0iTAYy!m|G7>Jr^vNms1MQe+Ew0 zg78lD{vpe#6m8=>lX6P4oKo1Lk{vb$wO)EtAMUK_wecw-)t?R)HKKZV`*3Ic=FRjn zQgWm65E?Huf;4eJQW)G}WM^Vo5=;sBfdH|HCZ)M1{dJ%DwJ}}R!D3c5Z(RqI9Q3o$ za8tRH9%E)%yA}<%clQ2o)6uKMf1}Ip(;W=E|WJ{36jt`q%;3) zfy))4Q06I@t~D(DaX3%D_SqD5|4h`_M|9EvKiD6z>LTDW^H$Fk0W0`xwJWcw1?H0L zQy`X}KIUlmxdnG6%Bq*8n=hNMzAo^_I1>Aw*Dn~eOOLWupVyffjgeV@f1Oj;#FB7V z&8D%-hI^;-%%Q9tP>0~!_1tO^XHwJy&frvk0u#JhDTkbU?-jJoh8Wfd*nZ5dh836( z%@O-nt9(}vlz-%X`Fo1#xuEtlB|J6j@h27a+Dg!Uqb>J7kMgcqemAwx zPuD9>S{<$AzZ1UKZHu@vz9XfD%?C?J&z$9S!ZN;W@2ArFOb9F_=6Ql}bJ&fiqZaD! zjLeGQgS{M-F;H$0HHb`D`orYmLr2hBgTs~FZspV)v+w?)MRJzwf0@QKj=o<{)Y2{A z-R}L3%^n#(5Tn&2N#_mnX&_m``H(>bEHbht;%}&jU}3c!oD>omWD*2Va^>@rQCapjbni+h6TZbd7!i~S8RwiOGFQO zNh2`5m7ozDhA|t?e+GpMbEpU=Ru-)(($&&@-}KG4!&D&@J{exX>$sTUIDx%v^#RgT zb7M}gl|kGZ-~h?Tz-^aFsXCeO0Do z=+;@9i1+f^8fg1|8S-THDdcg(F)t0*2Th)b8bW-mA)gb+t3e|@hpCnoSin7^ufUp$sBoI*W9 zG;rGXHmiZTV00jl2bx~@b+qXy)Te^tD0g*iBd$fiqhdJU+$E1fqQ<&n62aSaRQpzu zCLJB>+>Hq+X{yrp4i%XYZeHb5ZEZfG4QG|PS$6Bs;>bTLgvET96W{VW!wJ#TK%pkp zTS^iQe`0-cs+Gj*^-b9r%*(YL#4+Z=tGkq2&vyTv6{I|lwB&o=_38)<2>BgB!lC4i zyFO?yZ2L7IaZt__vPao$rpD(g@LTRRvMk$x9bSaFat1e$5qsZ4wDBge?%GH($# zcrkW9oH(pbhu(w)4m6wwH?4_laq}09zB$$Jf7ocnBr6kUbGkJRT0=KYL&H-Mv6>yX zRj?Oy7U^2^E1;~+!A3oRy#vV20#%@tfmJz6<{?nYUYa!l5x!5%L7@{jgr8Ky>MRR# z;^ltscW^m~&I4?bV5(*H`G|FhtW)>VhtJ>^L8hC4OtYdPQ(%#(QQT7dp7HjO1wo!( ze`!oz3O+ zB5iGx_ious%yY?*qC}&>Fp#nc1Y^ARe=;@gJG`${eusL!>N>7^%E{STO%wGsV_HV- z__>u}SQX@#IXG#6&h&HhM5E=rxal5cQz3JbU9K__FFha=K62hkfK)RHOm)DTf9v-8 zb!cYUvv?EUF{W4lRTR}mQbHdn5&$GV>->EG+Qqe~-um-(H!mmU-mhz)seq8E0c*+4 zahVKGk^X{rbmM6PM1s^T5igRL{Zbe&Mc)V|>)@0IFov9~lJQFxPw1Jc17h#n^f zWzF9YtqDYR{~>2WErmTXO)-0W_0lqFlse{ca058_>7 z4JvA9v6FUjNmc~4L3Lo7OJmjzr4W(TRItCbp4n#cl>>7 zNxNv$s6((w?Dl6CJsOQgTA{fulrCZgh3-vn>UIt4bMl-o2~Ssr z5!FNqMixUGZb%U5uZm1bf1Dj?%_@P2@5#h9_hEvHd42JD(>biGK=o#8Tc5@6J_(W+ zJUhrCYN)(;!A}j+q3prmwc^s z4)Bh?UJW0$g2(rNf8|UFoaeT>T#S6mCSi0);&7D?#K+ZKD0Lc4J0dd|6{(*hc)BqX zG5`_gViQDN@KN_3wvK!PRa4UC41NnW=QcYDFuvx56DX`<&Vogp2`84FDRrSKg!5@l zwLIE>8C=-AppdRrn9}S%?%{;f-*F{Omppzvy^vH1W4la+e_1yrCZoArR0Y&d{63U# z2B;@c7&IB?%)7)zLu!4;*&7+mVm(s5Lvx$$#gyyux=d*T{D!9Z5MSqja%dT-B+G0)_daG!_JEjE)aQB84O_5MO$z<2Eo?B)+W<9;Q1N=yub7VifzCx}C zOf6(6A1=d}x)0UaLDcJ*4fe(eAK>rh#E>#R^z8@%fBJ^rTcgFMPE?#Y9``|JWyc_Z zhT_;0(PinrOH-@SyX59Vr!<-@2&?wrA!lpmQ{(O$Qm@^=its2{MmMtlUp`{@zuvL} zi~rA`G%k8U{^u)lAjDreY$6!{vjaak0?#7=xnx4utsb$$MYhZH3fH#Bbd&Y^f30i1 z-g=#Ve*-UHVY9GeY+@v=e5&{;t?_yKYw=#S?WWV#OJT>Mc0j57<&lBvkFBNDVLj9N zMBK4A<_ohL@@<2j&XQ$HNml#2$8HBNzC9FC>+^npG$mH;0 zQjD*O$NVT1f%ZRfDaGq|GO;TV?Tp+d0eu_Q&8Y!w)~s+`z4pt}2FTP;p<>CA+~lk?rHw@i zLdO^L6*o8Vwq{2)D(9Ku?|9@3CjuW8|-7kZQP#k+JaU33S?ay=<(27+(1+U-$;s1sO3)z{^?`ka7QQith@?>$KUJ z)eT1`@<15H(+N_w1#e#zT+=0q7K8a6aAr1H4nJsqX*H!zROUYjE4XVv}N1FWHVp(jfK@!i{+( zdMW4I_TH;{f5qPR^;jIcgz3W-H>5`Z562IV=!kN%;81AJy9bgXo_L^3&3&8o$B{?t2)a=(P}7#g8v42ctE+sho_VL|Mzr*#pxHLv#Gsmxdq8 zL1Y`a%1`bQQRq~gf83p=bcR;zcnlbB#Vzf)rOjBM+F}&iI*gsKFTReGGNsUsZCoK8 z53Msr^==iW!~f7u7?u;Cs-|K|GX_;&8c zZJx{>iv#NLGY&O>@z$M;zfG>i73{qw0^9SjJmuGPx3J7v`?PAxrd`o0{QBBY>OYeG z0D2l!H(WCCzp59Xo3^j>a=t<<+*OT3YOrKvgbXa4g9nqgf(M>P{NJ38kz($Aij*_B z8bH*j7{$vpe@0wzE1O1_q!@zMX1C|%?&-tdJDhT@m&qRYmxC^*-s03zNyG!e3E9=h zMKf}29L>N;=QvwHm7WnL=__X@lzpFkeD|#Kx#s-x_2%`4R`p33w%crEUyWj?xzhV{ z#;KF7=f=venFWN6K}1EZA3SJ>)E|RVksZ&2MBB`pf9vhP~C?PPyV9tr^u zz9w7Te@~QMKvLemCsZu(m1ze|TQ&RnqfxNYq1&C}ik+FGuF`}+p%`osb?fc%1?{I% zBi9&ghoWxB$0M2j#9!T8*RPY%pf0)EVul;aBpg zup~yhUYIG00q8r#Fv7CWou3}peV{49>wM4Vf3~`;N}f_@#Phgt(5_pmdpkzdqmdSg z;JywCV=}fvhYj;Un{BZc2Qm*k&aiL9RyXi=l7U&Gs}@0Ygfj9dwFO|r0nIoTlVg4=lx@xPud<4m#n+K)f{{5{qMGfQ)dRCK#Djm+`4)sh ztTiHTl9I(3O7GO4=M(i^tje@1{w?7x>&?^f(G8^w@JOC*q?2o%h56N_F|HL^+coxDI0Npj{#>n)E-{|;{` z?`P1#*E141o3+p9>;m7L`jIj^fB1?43M|<`9!MeVZiosH!lVsEC>|sacSlI3J!^)8 zMF!thLPo6Ndb`2r)iD{0Zy6tNzvqLey|Vu(JlU*rghG?#Js132rOf4Gb|+6A1} z;Z2SAA@-fEOn`o(S#(!oLBhwtvglCc9(oe$h*-=5LG`1c|4-esorA#dN*k( zcI~)`3grNy+G3$>2b7@K4OCf#pI|a;QQ_+_LmH@-zpZMT(OI4e3LFm@s#b&EeA_tT zPB>z$KEG|8ktBYai|PBPe;G*caZIfCgq(KTC8u%LOhjpzn0Pcy^bXb-a=UBP6V%(W z%*a1)7t7}G84?`ZS8RS4J1(xt{Ag(|qw~^x1X}mMr=f^ux(=Z1w?AEbPsI{xW=tQ$ zMS?A^*2W8B%ttd45Oa#~1g*IFkcyRATmpZ$Y!2Lpu*E}(T=Mq#e<;3q4h?f)Q|Y}d z^e}&ZV5Mc=7;I1D^F4M9Q{4`5)Z4bjx7zwre_N|V{45aA%w}VS7&%UTd?;qzpZm4) z@E586u|9{wf=xR{9^%ZIe2VkABf608jA~{U_MvHIF~Nkd(HXE4Bub57m+rq zltFERFwp!P@uQ(nEhupJ!D~0)aj3kW0wkmIqk?M!IVOhbxRIUl+4{TnzH!y)W7_xk z1Cp;a>Xk=&y9=5c#U|S$7xxGPf}&y-__e?UM%s7INK#(jAGwEaeC zsq>9BioYS*%akIz7i0Jeou-i~=uJF`MHU1=B$~1|BVh zWH%cosu>UErXQIfx`cEZBQP-2Wnk{syvWul+d3anjkK zO9`9cC-SI;e|MzcPOd@Nkl1Pa9&EDLKELe2?ErXt0<+gKdzU0FC!ZDe&XnXfl;NC4 zdl6Xm2qz7@W7yTatl`a?<%w(=5G)8_Q47p=6Op`G-gIO3?8@}Io|iplr}t%TB^A&W zrzl!EN+9-`QXx=<$Al&b#6UarE~pO8#bItWX726se>UrkiHY8Y02u`4p|WYLsT`#< z%f4dXpl1U{`qy1&Bk&OQ*!I(LKA#Ens;A3jmcn?E1j9r(&?L{_b0vNB{eE6|5Oi0k z4ELW@`%H#p=!rXDa&;(jqel`8wc*T9Yw(SBsfs)NH?NWsQC+rv=?UpMd;pQngEZ}U z!ERR8f48RizmE{F+#btMuZVh*q*8@u$xI)4HVh;e;r`0dL_i0SCvn9$vGv{Br)RJm zJYR{Hp`n+lv?*XdPVQ_N#(Y_)%tmIHj8^Iq;rzuI=WgF}PaUBV=ZQ4>364|)WDE#v z@;x@?8`iV!!Wn4sotD7_(LMxcegb60cYz;g{~@5hX%c;8A`SuB>L`27tXeLGYsAhtU$`V3>~oq3A$)rkr5 ze}*|l>KD@>2A0m8vg~*|)pSPpe=lQojpSE*u6VXZqwkay02Oed%oyH3&av_0MmI5f1u{1=looltfwYO0eg1?-JQe~!Sf zAMoJkX{qXhT6A(&e^#(vFE|{39?n$yShH=q%T+=qs#rM(1gNw*@YPF^WjCCScw4iz zS$*G}h-K@)j3R;T)SG74S~&l;Q}ETot7=#nkmso*CJu8H@L8gv!qRzZ`7;%=+=W>2 zgZ9vp%FJO>^O>@k;h1&Q{j-2se>;r>0~Wl$KWf9Cg1h?*;pywCxg_8e*m|vTa`pS2 zy<5`TH>-ck0ksJ5XVC5|d;9iOrkH8L1o|H0N6Qr@3*CT#VBnRl7=gCwMfx!{SP;!dTu{?;Ccm?Gi0GlJPx{4Xge+kzY$Hx{S zVu<(lHwfB)NzzB|8U*2wiZiwKRy2Ff&ZMmacOYduwdao)US4=@7s}6Ygl!i*U+>}e zm7JvNSBKTh32j^Ycb_*$@*$sTPJVQ0;nt6u_m|%8jlq*B$D6jb%@$(Wa3}T$OSZqO zBbl}drX=vdpYTbj$3(tff1TZ(ZjW1=o>x1_y>dR-&6)NVr=wlI8iiZLdX)1UjZnzX ze`%S$e#(YP9ZOQ!%d3x1(fbZMXQqYBq0synX&Jv8+3R;`Z7mQc(2swq;%TICV1Tzx za|Gtg@ukTSy%6K^y^yFC?H!LkHZnSW7=phi896$KoSmx3G<0G!(HX^R$E>L;nPPFlJm@)9GJJ?bRuW^~Hoh$uEul3y$b~YA`!MBece!CUj%c0*4RC$ zrX#7jx$U8U)P){`YjUcE}#?PW+~(W-?OlXOKMi00BMS5uIUi7TC9IM`L| z8k6C=SlAW6ZgG*TxjNt<@SMW zXYUQ*J4pk}aP5hyOva-9RBL);)Z4C$;{EF^t;!ccf2m)Z#HH>%u6 zBHPycv%^zA$~;Iv&x62h*AoS>mgv!q0a;?Z zIu>T7wgHT32YA! zxFtGotM@BlZ3f9k8BmCiA|T9W93;pPeK4%)2!7(j&|71McQ~wpF|4l<7CS=qiJ@Wa zivdL0>3Rg?qP!iz-$ecd_QWrgJ^2~_f0}oPr)BK|JR;x$YRrBafh*MbVUVRV9GC*B zx7%Vr?L!{fI<6Tp1-e+R@Qfd&om0PEg>fc+uFCWed*j|;hwu(seJtf6sUB78y+pO+LJZ{c2plRe0FMPFD;WvUFOsDIi6GixzNe zxY7H5>&0Y%{4zstoJ*+1&Gu8>t_9}~TyTjdK8(l~Mtj!yC^PIfqpTlVi`J2tN_yQ} zD7%>WWa?V=$)nEaCmbJkG`YaS*ZWh>cbX@!?@(EaAgU{2jPRcof3ytct3wqA6Fxzm zJz-Q3+xfQNHRHmvMW};S>$J2Q6IPAO;1#1zgAXq54Wa4ffXiEy0?w z@DKk(_-md zC`GiXgduN+8hTNueUs?V^(n~)e}c1eE(dmCRt3|CEq{^Ie*lW~V3cLG$@wsUHi0lE z*|LJcF?;u5rdr1aW(TY`fQUt{>hk&; z;>j`WcUcW5f3nNkLTj{G)-5S?aBIEbfW{I6poFW>bGrK@k~^fR2VvY}iZ{i~03U)B zqX07Y(nx9U6?bGT8+yD0Pq)7#r90qgg<*hE1FcwJIZGo+qDP~1YL1%Dco2m;mDd!r zgsH*Fb}u$ZaV4J$4@#8?Kanzi>* zOYhY?8_|VLtRiKoaW|`_o{4;}utG{mB(BT|1O@>?khREGnsc)2r}T&Tbt9HC9-g0> z**WJue?8h^o-fESNg(%<2T|8oYJV@aMPf>bhZ>II7WASLM~GDN(l8=6rd?8GYgbeG zP8A$Lzq8r=K>y3&h%8TfXon#Ic4#$R%r~hFZ1HISUbECW^aqrc9D|qfe0*TZueS}3 zN&zwr!!$DSMTfu6QXaBB5k*K`jDqyNqD)ZZe}-Nt;yg~nftPu5YQsnm5 z(Rs;ej+=gkijwRQnpH^0QQpKK59@`1G6Q9`JZ-q`7~ohqqR3exKC04<>D->JypcU~&?*CBnlxY%acy1KK7jNv&O ze__iTSIp~e?9H1;c(`^JVMJlViC@g?;B;EcJN?DyW@VK1h6JZha#u$IX&9hdP0*}JB+4#Kz# z7P@A?>pd~Z14CDieKxxjTL1$&F4;4W}?WU=w=^ZBccf0PHQ z2d)3)i6H7~?Naka|GH~wai6$Mwvxf?{8~Gl^P!Lt-(qpiamS?Q`{sO`;);|IV0Ae*K<6Gtg9sNTsda80ARt(MW*7Qf9Xom59Xz{! zU7$``41=ePp+%#}WmSvj79Pf2e+)-J1csLnn!s3^+|^_dZ|*1Q!w}ABaPY1%f0&R~ z$ND$z?*2I8yvnFtRNhK#seE4F#$!H1=MS@rL3z*Kb4SJ(brmT#o`3SrAr}`+lhyuL zB;9U&h7I{zeQ2hezC&q!)H*`;@jk^7PN1YT&pxPL$wSR!vRv(HClTrLe`Si)Nsgewmr;gpA2&({I{M1B+~Vk z@cY=I^7WfxkqyJDpX^4--&XHXwBMD|F-c3jfIT|uijOh1u(HI9@4@L2*GIq^pR{K? zcX_kV&gaEzFH&lnS@c^Xe_vjU8RHPWenc_{B5EW+kaxd9xUmtb{C$jtZr^3+R9-cFq*K`)rd~N=_+*b}~XEmyh zprD0hpJDoztgxfnX1DC!V=pV%iD)XQ+igtA944O#>?oJpvA2$9XI_3s9j|(TY}XT* zG)xcsZbm%~#2bz4f0J<%S=Bq)L#e~5kZ#8l zdU0f>a~G|PcEe3L1Z5QBaVX^b+=PZFL57TiC%D(Mg>K8F(*g19RWg5K;q1aK!CJ=z}&X zw4U4TI4x&He|Be44{y{FO2+)jKzj0o!Gptl4KfP9TQ3*YF}*DGE4e6P^DUJ7W1<`1 zlPVp{ar*}=IMIwR`$uBpvZd5C2D#oQ>lia}a1hfozTHd=IoY~v3jV+E7xk2g@VOjD zhvlNcYQBgs--w_X!mZH*tr`rxzDjcDxP!@57IP+f;duf}7C&UNHt(Q)m3mrQ0eD{R*C z@*7>IksxCG*A8e{dv`w2=}H^UW6m@PPua955|GV~Kzk-;$#Xs`M=)>7rc#MgY|oGD zI}}%(e+QwNU2R>84cJ#A4jEywBO`os+b&}Yervz3$}u`^vQE7cpVt0> z-lMa&k_qbA_xJK1t8ClA>Y+{<3Kwmqxh6a`ZVG_*hPuu~ig$_G)qhecq^Yr-BM-jm4VCIBs9rSLmS-KDQM5_V5MHX$0R?tZl@z2yNl zd(|0s%Ov(oQv?xq1?Vu`7gb(uuo}vDkl+ZO+hlxfr00KIrw>r?SsPIi*kj30fMjwh z}f4R=X zHx^^%xw!*5UVd2pnzH<^azW%i1wWHS#>cUl0Lk=+AT=Jn`(yLubqyuU-F9MaYj|=h z2f-*(V5kvnbvuNzy5Jmm!VnU{CRRqbN$VUf15tJ zg#6>aEu|wWp8q?o!;vqJY)d&HU#hV)ntwvi4bqN#9^Czl^Y}hT@T%?ee=Q$3+Llcz zMO|^1c5FjMJV9b3JfQ$THp+kg-A^I1iS+29URXT~u$_Orai5y8@mQAWU6k=UAW(s# zzJ9hs6UQ)k8_s#);O66RW|T^x_eGV#=K#0+05m zIs>|*k0X1Gk;u#go}Gs;vM`hii&Ph8~Fre{H0tvJP6!yiEHJDiu{d>#m$za(jiVb^i0xDq4ChVA zQ8k@tT3I=h?XaS~-~R`v?igGN6LkxpSSPkmGO=yjn%K5Iv2A-|n-e>k*tR{f^*!Hv z->Uon?w?&utZZ_6)DrY+KE&AP+AV;ZbSAF7iV4$~=xgP)!`(I=T3$)~9_mAd zx3p$TO=!BoNJzJ;oeW=x6JI1s{N{7H(S7`rl>{0)Kr=)#e>}|oJ@zCQ0aA|jP?|Dt z(jVJpx%PFpkjhWK7%Msfs-NbU30Jj0G^>limnUWe$B`Lk3Q%UbxkRtlR80;mqoOW~ zGpSorVK~54&oheJ!Koh?tAHe*^zxL(;Z3i^2f9A?490nGtn5xuctaFCPms51?EFTfci^IQ`KtQsJdW{9dHrT#S%P%0eJ ze-_e;|Lbzj`3QVh_)7xB$mPub{WHdH!$7s(Gaf8}!&K zj$p&&ugpzym$W1(KnRc{`7do$Gn=WEZX!e+*YkUXe>d72k$>!gJa5sB^T|gq$=_&b z=+_-XntXeG*?IAZ#~}`(E*4?F^$y9bPpDp<8+wQq|#j zJx~VbfAB*=GcIy(!lV}tNuSj8GwprBfzw?Hg2B;ASSeMzk1|djT{tTTeHIQ z`nrR67(xg5_MSxJZqYd8&ub|&$D9WXZ1ij>K^uM;P1CnIdjS69&pEp_iZ^NH@?6QwP+dkDpK~= zloiiYqf1n)PctS^j!TVdaivWO2txgMn2PbNKqF;F0M&4w^;e>SBZgG=MVvJ7o^ZOu z5);N1J_AfXVnr2X!EeM2)0UixeATDdu|3CGw2ygc3>WHhiDqbCxk}4r#%Qi8F>-_% ze^MpzgcgSIa79H!`dH<>1k`p+<`wg}zaO5aKiEi@m_xS`TjJTOyW4O_{l!e_QEoht1S3pJ z;drwvc@gs3cfv!7CN-AUpb`c02Lp%;3X~}*4?H<5b!r+5%nuOza4J8(1HNIjNvz0% zSbtH&{~vXZ#Iygab6kURB+zIbx&3b63dWQE0nI9!k%_*+I620``kytWXL?*Qu_hl> zXG*7$Yt1vc?%dm_)>Ja#kW(5gUE`MuZ1I!vNem-o3br!YcD=C z6_As1ldT7Hu8PNMNj7%`c{+1vJPIXROn=EH=6~DfaZqS5u07DUNE(PsAQu=HuF|Zc z48bgdsR{uOzycgdDt(7sp<0ent&fNbk2)hLH2`Vpcg0|)jJwol`d(ZRSAxZYZWskn z6+!gen6RRK$`(`-LVQec2^r{ad_sLcBSLj5*q>!h9(mA$S1h}{ne&ml*aG1ANPqAP zB^xS^9aPsxjcHJhg(h}4z)?GsJgo>5&jEgoQCT=@KV`=pC=%Sq*EmGv4fzweUf6fJ zhX{q}hJu9WgE<#Azd1=WZ34cqp8>6|EEu>r3#Ist8cNmz1aKsiLS0=6Z>zt{>w3X{ z>cN}-QX%s*lpAzqeHAZ)eJe}8=6{dzdBhoa_pkH=PY$sp^h9E=2iM0;$|P^L`v_*+ z5CpgJM3ormhXaaZv-^Y!d{XTPj~!V)^oTTmK!7F?8JV_G2FHpCFe_K0O%#p z9UUkXWW2&G>He6iaR5JVEJMq6h0eqzb{XV?dIT-QB|_^81765!ueSV4T3&t`!o6z( zQHg^b@r+tf1(QURJa{jemufGpc-=X!%DNK87G%b43jV+k0AENfH-AsVoES~UGIYj9 z1jP6Z4(jWe{)j}|8TP-}^i_MT7O@~yt@aqqyN}AyE_w!$P-VTrfI&rQWTYDzcrikK`?ls6)7KmcXFbk&Vxq ze;v;6q(Dpa#;}cG8glUF3r`wpo_XKcuPfxqk4hUzG#9rID}S9yKt<4mG*H=$`UQyR zt|TMGd*Da|YZLxLi-DJOpvAu<4a)4)aF6T#HhjpruQ=0`b3)`=h4%#S5iyw9qs#MR z&wPQAGUpFZ{O|p@E2ND?w1gR+<3^g}wSUs4e=3$3trM3&6@+4<0E1xh5DT(o8obsy zwb+jtSJ&r|27hy%pu{*xzKm5R16Ter+1mNqHgQb*v~bLLVxyT;RR_s%g39Z5!Sv7> z%;#P%s8{kXnR0M7Ji~3N_r@~x0s!0;wjSS*MrH}2t(ZPMQH!! zP_S?s^=CimT{8N^A}Yz$&R(wog#w@P9#jL_}3BGDvFe69K7IQ<2PG z~dwg->b_<3jb zoS9`gXG^;BN?a*olEH&gW-D#><{}dlRt_&P!}3bn@$dnG<^4<@Q#xA@l8b^nMh?u1 z+zX16l2vd)1Gh*&GWC{cXNsNb%qL%Vl(xZ#TgtVCK*uDdtaj=vA9x=Kgip}-hpeuf z^M9jEbK&LY)2=vYmxD*7=*YgbCZ*pw#+(={1t9>GXyYy<&XAWn?hW7J*D-CAQKSYu#`!z7Wyk@%CyUB^5lX(iu4Qws}Q?? z>3@~E-9$D^4bvO##Og26fFOep5SKUINPh!8KAQVlGvo*9jHd6B+HX zh5LqX_vBY5BYU^S@f+4+Ejthg33qehVa6=C=}Y8^WC=$UfQc~qRL5ue~MC+iy$YRstjD_qO?V&7METFsqo(ku5nm+6v%?ln&t z6gcW(JkGf5;9OhA)!4XHX-*QBr;mbJ53gV&ro#W}lB$|L_|G`+YQS+|U$`slJ0cQ; z;EDZskNBDO3c;a1c68GSCfz6)?SGuzkX@_==BGXHoU5c?#Pu+tI}G(R(O9&=9HZP^ zDDhXBDKv7z8#T_j(pHQZUY`v~_0}EX?&oC~&KfwQ7rsDC4zA0#A$o-yM-v{c)1qRL zFryS!vcoT8)a||cDrvJp2r)ZXcfx=)5NbPAqJn&U`|t|2mt&n#(4 z|Lrsm6WUihQCLy7Q;}xB(v{~~4LRfJ>w4Kg#$5NUF;SWS=9t6z#VPdP$a#=oL^Cc1 zw4>i*E!|KM-xG|lN=p5&-hT<);WU7fDxQj}>O>zLTdpg1tqyy!t=bt!^^_T**`k?m zk;wB*|A?~n;>p9Zc$&N5U<(BAoK);6c`oVT-We_ah*w?5U)=eBE3N2#LJ#LdS{^(| z%8%ZbfpuRNs7i)PHtk}6v!|)om0~t@M_~64Q3`eWl*RS#>KfAWqJI`eF5oKE6HC=! z#`h+~YV-s?ea^K)C?A->s881~4X}mt59|Ewk)$AemG;P~Pa5??wA365^HrKZFr7G$ z=eYzPMC}F1`RaZ@FZ>=))6u79ofa+CgEUd>D;+f&v=$p0B77pNb2w(i@$K7W+KENP zpmnpXfKfw$K-dvKpMPZ~bWd5!#K$%s%?_HpC2&t0d5&&66cl9dM()uz*M~ot4jlQM z3p-oQQm>8-H!3bUuP@7tEc27N&>@Dq*Qpw06Xn7G* z*jZ%QNU_RZz%)7YAx}~BcEZRCg`L_~$=zA$Kh8maFK}4@V=`{Dd^7nBcDeTBSN&xg z>s1G>qQxNlj?q@jb5`JRFH5Uy2R1u%_{opC7sHfcjZY=7?`Lh+dlnpi{G?ovlaiAN z8(k5FHrblG<9{l?2>%BSuFJ!7@H{}+upwz6?Y*96_PdW)n()&fi7{m@5kFCnc>y}~ zv$+(r#zEp@9%o_4x^?@V$KexmB?GZ-lQh_7n>M2XX=doz|ETGumUy=F4$*vMn8_N6 zj0YtWVnlfs5QOI0&=on+;@)Q( z${>ED(|PiJgWj<<*KPEja)5{g1Ggr01BaE3h&<#!I3jq$qIR>rrRX}kx87gMSM8HY zs@EQXCVyw^J)^q%I`AgchdqeGrb#;2-yQzBQm)#Nh-WJgW8?+B?nUQ5iOclGR%`;T zPiJ}mPVCjxcK&-M6Z5-E7#gd4&C;PLSZnD~j}^y1Tdo&YA{Qxgl-`F9HiD{`}{;o#ec_}hg z7i-N#h%b*KVV&Sfue}hrs2Jo-jM0&B`3AdYMmc^yOo?XWL!27RuVT#=mf2KrpeofI zgsc5>4ip!Lkkb_Z%u14BGsGFf-};JmwArf8*US;)Lpmo}5+0&hn0Gs%G#$rg0Lbvg z<9|r!PR2#Zcb$6m-uC_C=tRplXuJ9uh*%%+XkuB2E3X*3Tx2+T#^l5i!vnj|l54sE zC)YbvcF!i&92f}>7)L>ciJk+POk`L(2ZZiWb6>h4uPtq#hB@dU!a+onhi#D78wDWo z@r}CA_;(N9?{ykW9<=4MwXxi+{@Pm(MSo4U73CL>EkqOVhBfAkSQ24_igYi}1{xv7 zfc4Hv2cAFxmYMkkg2bDnoXFHU|2oQ4ed{bFchOzeMxd#??2;!;-EZ5J`tQ*g zPPsUMo&8u<)_?1}_&6S{UA}Z?YlFwfYG@5vXQ_-+4@8L(B7<*WBO4Xkj4bZE2Y&$G z$)ap6n)e}b4$KI`qE82k?!Qj6uB+hSM8ttrm9@qbwa+b$ZFXLX@6#9*V^Kxg|#k{jo7W`csaj46$ML}RtWQ9Ow_a|9%0^A}q!VyVZ)|&iRdP46S>Z*8LUJ zyx+Mlx^(|8X}K+!j>O`cK!_cUVO4${bl+@2QNcZ%P7q)YPtFd1{`qg{ynp**D9aPu zj?o_`w@6xf-#2|k2vuNIf2DXVOgE=41$|MRlPARpChtTYuc2S@2d7RFFtl8zFN~pwX~X{Xud#VSebH-QizK~W?n^uN*LxSg ziRxZ0Wf2Tah&V6hWj)Fu#6<(EYF&dTmH7 z7_obMjjo6Xi6Sj}fXZaVF~1mRO3V|h_MJQP)>%i^;$Z=s>S+XPc&} zdBx%5Nhr7#Xin$PI%$Cr89CqsELZfuZ8ymg2Oq})_RRCKoqrnl4;yQi*9v@@HAt|> zjk$dmSQiuowpJy(z*g+5h{_O~%JI`kxUcgW^k0I&+OQG}(P!^6(P{DUv<{}GBX_?+ z#@-_jz1KOMa|?<~#Zv#_c)6Vdos~khN|0ZnP}REY~yQHPqX9f^{cll9$Q|FdQCWS`Grt4vKoN_$B`I zIN`v-amblnUC@`rz*mCZ#~d`T=L_SKsUa}pO8yMh^?aY#v2X6zfN|~n&2#B8 zr=vfE&vEkE;VeoDng|$GWp;uls6`Z&7xrA3FAG#FS$_!^si~wR-^9f1|G}QT7~aPY zG6-5kg1l$lPcu03I#P@0tTKbSVR`y_Nn-KYXCc@nA9HU=Ka`-_HuFk5ZT6*c0={FW zEa#QjD{1#Id;f5k-lJNkU6@G{B`JzepFNZpq@*iush@JJZfsf908s$t6gXmz%wIDc z{!~bKCx40LKpz-@6I_9446SYitwB1Szo3C)&XJd1-zz0>M?048#SV=L#ta*ziO`dJ zvG_rchQ+!S-mBL0mPmW>rqNAeztv1;4CiS~1)J+WFVg|lT!rS2s-2Fuk&Zs0ce)RS zR714&Gfds=lAkrn`XA=HlqKmAP~}qWnA5QIJAWXz8w1@uq2I}L?5tOQ)j+PMX=HJP zgA_MH$PFukjN`|&z8?hPHL*hZZU?fz?46ooMY#Nozn<=U^3UeKa}sdmAXJfk(sS_D zT3`b=+7G;1F&=bFKfp*Vi8~6jTLMrn2+FM-5~Lg*0hJ0Ffhqzs{_Av-^op9QG{G2c zA%7F%XJ7wGn)^owG1Cvq?x;^32g=sWbub%IR$jRlNOlx-(tUzvO2ye%`ZDmHf+$(W zo3+!n&?Ls>Frj?oUfDYOB))vJgL$xESm+eYtGxWvahKMSYw}11~o4Z`|!v~A3p8H-RGh5c(wfM0=abfG^ zHbob`%srtXL#&7f6q&Mn7i`_40Pe5#j33wTJJWRrZ>n4CkY(kZ086PHiY116FkKI? zn0w2t5TF~bDQ+QHBt27dxl;gCLphgH{=;mfR2d{O0L3P;3pora;0<;WS~Xe34iDn!bjTDKyKi`ZLBQDqS0td@XHGd8=6KdjNL$fFl z2mn@x`49||1&JY>!MFvZMawI)qJQ(wm>$N!fZ?ysBrXJ)6OOP6z!^H$DE%Gk-(Uir!j&!0+`q`D1k@j7M_02A2uyFQkw+0f;_` zSi&@`qw&VA(u1BYVSh7*!hln(a&)uxshZPmEjVEC+gm=3x~U(+ix9r8DHTScQv?Iq!Big zvfFK<&TCe0>UIO6Q!4m7WFz`Un>X&0!W*;3@UVbVzs=OE3V-Y_8OpYzIW$pCOpl0yWcxJ}}$ib#{tz<{8GkC5+BNW>H&8X?7Geov@71-89x3wcD1Nse%EzORYW zWKQ9N;9C?Ml0SEFcn_N8onpi!5E+j*Izm_ppc+_PUM^8sM=XYk?K|B=;gIvDn;hFU zi0pumW>Q#FV}FUq$^Ji*&;@gH*f%Jx9YBBe~;EqaVCzO zmx1k^5EE3;Q7fL&fgi1?>RqUU_zGqEyGJi(&Q5caFG}ciU{Op_?%|mbf z{)(wThLKf<$qZN$LxG4A_T5Kwvuui!KsHM>4?s~epyt=U$N6p4LCzHo*JQaigwe_D zJHh-&hCdV`ENLDZZbUu528}*t6?!KwM4hZvG`{ff?u$BWMjbX`AQwE5@f<}f_V)an zIm?PnR>-URb<6BiNFf=U$3fXLS?}PCosg)v5oZR0^`PaqrRc$v_Pxb1y3sn!Lfh?L#t@PX z(6co1heGQZbRp)meqI|?3?HXo9EJ*(wm(U~Oy7m{hPJM3Nq!D7&u2>bAOK<$aY*sl zB7cWqD?>sU%EtX>K28*9@!DR_n+972bBMlfN=nK}V5LG)NGAFTyO~etG%`&be6Qbj zCj_qBV)$nik%*&_f)omJDXjVM2+ko?yqXV~O9l@yS1{+CY3K9$7TXNpk%g^gQFUMr zlr$iAcvN`r8OxfUl#?EP!#4f!vOkS~BY)cnx++E)@X6Z3XloSoi?*Den>dW*mt&tM z<5qH2JK7aLE@gjKy7%koFR%9;tJ$^_H1eQ|qlqy?z!+Lf>WN??`9D+I6d}LGZE$?P zOdmH$hAh9-OX2SWwOir(e{2_+fzt9S17yOSu;A>B+ap)k11sW<-qcImV;%s%kbl+y z=Q>q4F7c(f#1wzUH9reXi+V>K^kn^J70x{}!p6|Zi(ZalOZdgqu0=LIIV&EQGPGoE z74iembF@)9g9@!tiZqmmjZ$oA?O(b$&C2*bI&#M>cB0yUIdWH`j$uM0|Hf+bp^q+v z@v}SY2otr#3DYf)BhwhSGA{nL{iHop-b;rro z63x*KP9u5@T)mYBr9{Sy)lmgW^^^m~+FR#8WI8an4&P^xDD`|gzQ=gerGK>oXkvgy z5{jDvTCI74XnNhp9_`%6Pxj?5ozBLGnSRcvDLC&aR)t%nQQ(h6(u>^2qsA=Z?g&yH zGeu1}257uG%u|Hjq3QOxkK|GUK~BC>Z4q(+an$e;r~ddK>_UA&2r!<`$mDeNvK3J+ z6koAtIzNvzqrDbGB_LOO3V#g}rEa#B@5T3LtNRya9_~qNw=Cl)&%X7kQ5i_I?7NHg zy9xJ8b!59L=Ju>-FWmQs96PbDJ-%P{?+XLm7UW4jykv#82ykk7wGdI3=&{ZwuZ=7* z9nxXQ`OQ#M{G`B8w7dV1Xj~16F_^e!L?w(M@)exx7fmz2@8f3=Pk*#XqAD*4S{~BL zUP1Rjr1Fnze@pIN*|VR=)dMGiUJUU^F^)xp$*7yI-$2<~xNF+CW7<(y<+8_Ce7jy# zJiQ09AgCEyqVOU3-)ncfj#y+0Q4R?fQnFTIlXr9iQ7|>akn0sG9t=lak0v^Z6h1#6 z%m+p!G7MI^LLi&0pnv1hQ{=Hs5Ly`B%G4i(|0jZ`@nD1V$bjUrsW;(Q@0n50s+Wm{ zh7X=6r@yFmw!LG}?{fa-YrLLlhn06sC7$wDA%q097*ch#pY&Z-=IO%KH9dH!D5psI zG{VBtl`KK(#3%<#&tJ99M$mQL6>|maeR0qH*q%gcrW+PBhkt_1!4qW)2+6+j3>J7E zH&FtvWAY&5VmA~7<;KE?;3_P z$wr@W9sN!)$Gs)qUHp@~-#K4%?LP-%e3iH}cc|Ge4oI|xg!wGtFf2*M5{9UZ11bZy zF#^RxXgZX{#D7iP5GtcUbbRrseI(`>h*+#d`p|r-G)qWax!++x1{)$GohN(0Qg>`Y z4MBz!TxBRRRKAB4gvH>YC{nlp08KQqF%*=tIGC{*rjqbd02>M`Yy*;Slb{C-W*fMB zJyqjvA}j_-2nB;LEHULttWMZNsHppTZ>g33d0>Lb$ki2P&r71Zg?ti%qF)ncU4WVwhwXT^Qq5S_S2q1s` zKZ1bCG$`92@@SzuG2xS3VF}5fnVFf=jxmKMCvzmBT!>E+?a$26$k=0rb9JIXDR%tbfiGT|xfYYQ5~V-RKEz{Al#+)qo}wQ--wX zFo6>+WtW)5(VQImH8`tGoo>|0;NIFM%nL{&6HOv7j}jS0!w^5>4M~w;NOyx9+Ouw9 zv0J|ISHwT-Riin=1@!-dxY_ys!am_Kx3Cy>_wJTtSM!!pPT`Gho;zIIymFsAbAN5< z>4|ft_`$MKb}x(~)CUr07LG7}35@sJ5*v{U&hK~9wX7s{4;l~$sLBT`?9WI*R`ah( zB>lJZK$#}>(nmm0k0ITkf6e~e*45TlFyJ%?d>7Y^VSiNctzr4h_21R?W zN$VxNsA19ou?)|LzsfA!L*wJ{1b-{R!YCggF%)im-+uLiD1Y$5vUdsPM(!Un-`{SYP zH8(fcf{@$i(9DToXUUes%sOfJ*d~ic23j#_%MY1SeWiZwEPv(t#|q+)DNkc+jJPAD3Jz<8jgV^X-8u+xC3&j2PkkPacPqDDY)HsAN`bCJg|I1 z(7%E&QnBZ`_JJrZ&fe@Pb3x?YJ}s#F#*U~aRR7Njg9%E79M4;IiwjIuqS_;h7t>ww zcZPu_4FmGVPTh(qHwz1FgnuShE*?P`8(%~ff0*%nn60J)Nh0ciq#-GiK)EETkL1ML z#%mFV?1;K9+`W<&5)Kx_!o!;{)V_OPmSu4qNaHrgrlxiR?J0`VC;7m}+;qwY)^Zpl z4-afryngg#nWcPHhR=HK3LmUKgK7AHU>M|mxsic^AYr$UVV8Rs2Y);uw8m+NFm9@7 zTH{PBJ%%hf-`(oLED$2t9_At%;gn&TbWKxqc}87HXcizuHP2?|bCE2#XN%$@@|jR~v|56HB-0TElCkBWsm*eB)yu@3GeFFId}iZht*TsSAz`N%EbSzgGS|%XUEwaZ|^-8S)*@iuwJ%<4>F+;JEt< zxl5FH`)izynRJ|mrZjRann5J9)n|4~#NUcY#seKO(umC>zqlCmiv!#nu;YUALKMU# z)R*l*kco$;mNvtxCmgZI0;u*+ZqZI2n$P6MPbNH}oC8VB6B;xK@(xgVHyS3&zCB8NLv^ z^Zn1`JTi63gPxwTH}$-0`l7Pdrm&~1Nh_vx!%~&TQ0L+%{aERqPIs-`s8TE3$P&Pa zz6@~hg;`k-zJKPq2?zM-6XAf}@EQ4OS`D=C32>!vMM%wopcqsF{(6PtGnNt?wKAzl zEkOdjD4q1xUf9+6M1NuhY7XArB>@KN;+wU*I3woGHm1j%R-FjAY9~f_203;8KQF*O zUYfx^UT6GA&LGXQ9+v8SKharPo9d1bqN`H22%Ji58PpyeO9TCbq1F^~I z8Mg7F^RA#cX;c(Q*fCv;dP;z29AZ2m0*>kib-q1e(!I6|a~biuk2cHDRT0gzuYS%s z7*iY*@b`fvgd|{3VOBAZSVnpHZ=O`8*~@eCQVWtCyW6$c?^W%%?#ekGJDpHXPg5eT3 zMh4Kp!v2)XP$u;OlVOe7{y??ezV`wO^^M?dI>I6c2pN*f1q#cQi<#%a2K7I^hnzVx zoaPvUJ0Csw-9+DZaCU*UZFq9e&?3I!Sw+dMx%i8xbpTQzbEC4%u~vM^#qk`;C=1$#ZYoiW8~@kc7NBN zFamE4@u|vl=VuF+hw`?Nl2_g7-;yMgm_x6H$)bc9Q;FH!Z9NpN%{i4Gj8l85*z|@Z zvWaLUvS?g$eyBo8r@-<_Xsu-BJSubCKU4|6&-a=yojb!HoC0yKS8Pme2%CQqi#HuNzpaN)qWy2=MtW`pS%74c}$H%4F4*mGsHxij(`>Ygm7_1A1T&`eFJsIN# zNGQS%Y*T%cQjI`alBF35-{L!_p*yC^*R;T_`P{}Ca`Hjc9LKLuMt~$TsQnSl z)u99X^HqE*Zd4mLw?6S#;gs$xI57N_#-5U_y^)skLyqzj>0#g_47|ND%>!QlQh%}k;mM7iYPLuAnNg+PuxjsE}y-da}Zv)o+7A!b#~bxrgOyZFTTUb#54y{(%$g@ zfuC@D@~LEMw*X34sspZuf}L?jXm|j51l}EzC`i~j+lKueJG7Lfy zctay}FZ)nO9Ik?~<@p0$PiXgueGp#eY5R1`1pyZ@5iP{5cF6rvLm9--XG~x-lO#IF z+TgH|N-$j(FtbCi%zy8ZpJvdj0@=>YPy0_@XhDeVkDVnZ<&H@K8wh=t5tpL|!cUt& zcEfj|{by|YqrLlv|3m$z7N%xz)s-1@H0z55Y3=A2EkDc7>G6x*;4rq)o5C#wW9Kqs zvDAPq@YuoKQpf1tSU;%L-BBQ%cdM1d%D6hCj!(X!$4tTlHGg^`{=8xe&DT4(`p=MA zK|(A@MP13Qn9ZASL!MC0a0}6cEq-(_Zf?!*8)ued;1z;x2TwJz7={Qb<5;t~6y|a? zMd4Cfo@oSUk=wWKzVjcBKK;o3J$jR+L#7h7gbTm?Pe=PaAoJ>XMp#9o_2{HyiepxB zIfRuwc;jDX3V+gV@5>}Qu9!)_DV0nUZyPl^X7gsuJc1Z>Q*>NZ%Y9 zR)a^R8@)Oatc)F;crrs6p^=%@7a9IglWf>>02RDovZ}^RmqkSeJbx`N6smQ=;$|*g zmc}qWljszLc}Px1!JNcK4SH#SE&2~C(mWVQ8oteXn1A&a`~#TnkUdccC_Q&_XU{(D zEXW|hPjqmdR0|0&(dBM~)d*_Z78-NchO}b5ZME z+H`1o34cQfq_hIpX+@)$Jj+;tMm;E0GzVRMJczYmj&T*Uhl98m2JwQBKG*w=NM+xR z9GQ`QMWmbc*DLt7e{oTh=8e<*n?4SzkBIKHJ6BpotA;v&OqlRSoEbF$Hktn-CM+M0 z{E#iKle+QqmK~t2 z7Sz^ebjOaqmf01+CHvDr77!@J55|<7P3Nbin*BRO$gs6Gr8Eoo#DwFS9A_laT_{Ep zN6&-TtIm`E?Y)*BD#@Kw*sP_+C@AMFCPyiv-3FHiPdKTTv%p{XjrsN_#Z9RtTSy8yxUTnpW20zf_RhgKoHELBt$H7kzW zBxV8hUm8ITZ73G~OGR=%0k^oiM^NHTYXmE3LQPsf0fseWonj*uQ;>sGjpeokpqt-f zM8NfbX)7ajp7cD=JwzL_*n=Kz1CK}%7JrM6sm;{y=v2$Dc?<(X=ZdBo4%Nl5edZI9 zvSin{@%&$!{9usa=z=uyrz$Uk~U6nPk2@hcZ?7 z#p>?6=aA^exqIRsYg=miZ{1fBe zRSkn8xg3rMZO-k3;7B`D__%f=w6aq%{SxH6>!-+D^vKzI zgPqNGU^*tjTyOZp6pSJbg0JZfP}ZB;CqDl2%k$~T_x-IU#}O33<9K;G7ufu2RTrD} zoB2$BV^vXdnCt1MSPFNt-hV@Q`f&Ol;B4;GviAmZldw*Vk!}`yk7q>e>`XbPNQX8!EpTISx6-f%*=Wl;9m=D&GPl zR-EumlPM9@<)q;m_0YEDO?O^_>%U(S4rd6U@Z`s0>`HAi@Y$^zFn@V6tDlbOi?UWp z01Tf2t)itWm|LD7x0F2pJMt%|TD<>Nr;${uBb^yiaj6XaHI)47&uMJgsD}d9l@^(D zB?12(E?D}0LB2e}F!g-PcVL{2a*UQcpOz09P9PBMWVM7kDqQuh`Y^FULaFX$aB661 zv?)XU51=Ypvy+w~IDZqB5#dk(_Yz#9PZk~!qh=%yNNyh*;lr1tHSs1QiaES#V}8CB z6yuG7OgV->6tWhncD%YP)Q`p-#uGcXo`W0gSL&&P12;XMIF5U)Z>z@spKN4T;uyD6 zz|$j4RgP+ojqegD9$rIWByb|pM`V2@pQw)#Mys8s2V7Vr_iu>(T8#2IB z&Im76M-IrblpyU2mWQweRp6$G7&_uwL!2WpbY3>HmZ__I{cm_e*bM2Vlk7;X)iCFb zl8pL(QoyaBO*=_gLy0Ocq!?+bwNX0MYOHA6T}^slqO6k`4-7hp-;TEpGG0Oln(U=L z)Ue0&%gEq4FMq)#xQivEd2Hif?|-B3L}8FK%PGIA*;m}qU9DLL zR6?%z1Z%ls5ZLaFU>8*n=EDv;&##gea>uj(YtFG&@PBXZhZE9)1Ooqr7(Su)i8YvQ zyu0x#Eo&}?1`?iXYk?4!HA6f2pw2&0lA367)DlS3Mpp+kESv4pKTB&N<^!Ss*e_g7 zOP|xzo^oI#c>V`0*5^@-X*vI+S%a1_d@)BLB)iG7QeVe1Gb1n)WW^FQL_W?jpCSr) z<0nH44S$8Mna1Bn?l_lV`@pFnv#~+~i^Db&TArd6&t7FVF|pIL^=lW-i7>s-quQ1o z_Eg=0gd!~uEwe?7Y&Rc73lfcfG(XdB_r>$fYtyp)o~82Y*G_DV_POEU)Zl$_k=P*0=^uP~4zT zlQTk#5y~AYVF$tSV7m8BU=i8L+Cqb|W#POcu>dX++6Aa1xF_Uoe)nWf@&Js`^xG&wV?efex_(>uBNIeUk#n0sv$&qV+s)?5=EMhDC4YMUT$fjTPjtzRSUE&*ER7ktrtJJ0e$Z4=tu!h$PaE6T5 zL6>K~m5H3oK6~jI{8ae{9e@j3t@gxzE1b$wDG7Jbgfe<2sMb8|Zzwo~E)zWgX@Abw zr$he}ds|SqBwlmg0)`_ZQ+4b8VuF>IXZ5Jl*+`Z!&EX0^i!o&%=^%!f8I0tck(Clu%*<%r7vXEcf9IL(t zleze)xcPB#)^EF;*U~TInM6Syl7Aspb3#GI?&RD$Wim5vBht}sZq}OtYU9k^@nguz zw4*>LLPh}OpvB$#r}=^I{J+i>(&71u$mu!=ef2|4cshQx1maTHk(wjiq~78Y3=dP@ z<_cBws>b>`!B27S=E!Ewd#A48eFV86W1|Gz2stj-V)1&ALOHr{y?h%(Lw~WtC$?YK zVp{6cfWumGTzc=(O7QKUNACofGInm>;%KN|=TkU}{LV%KMF|SDH{-BY`XIImW}l{n zgUkS{Zhg%h5T-<)xa&=C^<~zfi1;nAehcF9y>x-0k`=dcogvtu_8tGkzA18C<5h%p zyX?20Hk*Ng1-eB0vhA@zmw(&EJ6Hqye+A27sr9yde7wB2P^NgDYHF!GIXpl*J0 z|3U0ZTjMxzqtVytgK#)NwJD75@QgVFp2Z5DJ3YQTAA9kB%~&YBf%avvg#7N+mNPBA zE%o&}^R8R+EI9f2B7Z~4ImQ?vAoED5s)3v<)#w_PA26-UvtJJeA+-^`qODu>6VtY? zUB#%`j0-1Lw$rX7nkm7E9+(rC8v2U61gY7Lp|2|6x0j!fD|*jI0jt!mywY^?Np%cY zTI)auGCC-xQ}_N(_XAHGRGV%)N9MdYh~vl~<^!ha#N33DxPLk95XXO!eDUv%`pE^q zivOi3s-QHrPzRachn>Zscx7xAA59975hrxV^}misL&=IKJsU`!aYN$%c`1xWrlX_t zVFbCuZy}*2zq~k7Eqw`L(Yq45DTPH`^y3O{oQc*p$(db02Clh^=#V6=;Z$Dq( zd`1dNT+FtOJAW>o(AyZJSa{mDwI;#+iap*aB=Tk#6xdDtAvt^f81%XpJQXX?<$Tmy zTvgk1?h-HXq{;c8zgz-$6mrvdPnrdHJ{n?kj7BMsF%}|P6;xmK2jHA;f3 z4$e%Pz>Usvj+En-FDMs-Ps+0hhPr-Kw)Uop2_BZwZhr~_Nu6Dqv%4q@kz?=ET`;Yy zO8rGLsx8N|LAq}_p$~s{;UHtyUJUd80~4n|lc#oe1%w|1vPM{ZPVNvldvBVA4Qk@c zZN)>fW`-9z+bEQlk>GnrU7n~$%(=odqGyKD6dF5|T1iy#JKR!Mvb6oC}-tuPl09RFY zql&H2nmgwJo~0$=m?UB&d|*zFqFiFl+92yL8iw%w*0~d#;PZ;*^>-zWUsJ4%D%Ps$ zpH|2?>lInE@1>`Mcfl8d+MYLS0NGeNUvNskB!B0&>%k7pkitQ)h6mzqVzJdrP37ts z6>n?em~pd2J;(IZR8f=NMyoGpD{35SY>>h_q1%e@b(>n}k9v^ua70Q5_TA50S>&6P$1l8&*g&pZo@7H$ISSn+lTx`g;tZu!>43U|-F5QmspQf@j-!J`zlRevrB&5KMQgg*IYKiqfo=!~OkbvGml3FNT(6jhY$4lWKm7ibq(bQcpUS7Da{aF1Tt7BbbV6jDx_{s7 za6@Qpl&jWmN2{r+@%op{Ro_+Z6JB2QjRwMG`uRwVp!T86eBgg?t3L&5YaS0;L4iAT8H1d4pc|8N(S3l z^MxBFBH`XaO*ClRanELHY-?$$dVieg%@*)o_Xw5qQ3_D)I~0xT!`r_eM_ts>)(#q{ zPCeJE#(130Yl79$RvZff3SNJ?$-V{5%tOK82AxS!tbqmGefB{t~%`Qd+@wD)K8 z#YgMCdTE5l)W|5nY-u8ED?OK=L8Qv$hBU1L#CE{HTV#WetGfFo3>*v=g}tn9H7GDw zAhI_=c-lo8hp)9iA&)7s4)Nnt+cnFcp$4sEXU!Vb@8$x>^4vt@mVd8Mm}qBfb17mn zhr7756OUetL7IZV>JOnVnyky`viI%byJC>W4~0K4Nri7k6paDja4|USo`!7?wd)Nd zdvD~v*xI=64%21iqEqv<1&=+#cdHy`XZ-4Cw8SJO_I;a#-MYuwO)uRiG^$pLX99zH z70&sn#qxHT6vXka(|?x+;i>2(Ofa{%d#-?&pi>d6AK6?CnY8@loL;?wa2^Ut$Buc18ukOYp4lgu(=kz<-Ny{k8n)T^!oX^-dFX z414e}7wxZ?Bed>d#05M+h9*90(vu~-j?Cib27_s%<3FzjmC0SN##zPl-0BK8U= zNOzUYBkSsv4Bt6A9J}tZA`5&a=9Rz$0$=xKzxC3I@R^jIg_bGxT-K{8Kd7HB$2H3B zBkqU*xR)1-s(%X*a}v1CfFc6C?;x2X<;Lgq7O19Q80^?258L)>s)7SGaB2joJG5r@R8r9b{Wa!C^!yKhkp-n(gFSw(1AMO4|4kb-6T%; zu}8WtB!(>WPo_2WLgaq-`N&YA z=O#mNs*iUuY1J${5v@=9x3mCxK!(3xUm+S!oYhUwVNJO`3aSHO|4?$1k>R4(6j<}7 z&f#`64HnyV3JZT+()d+6Uao(Ka4TupFT;1zxEL4AetGdzUCqh|J_9jG3>%2N_t7>* z)<`hZ6oXBipNql~G8pvR3|bRGH4LfIg-aA3q7~JIw{+C@fc(ns;-X=SeW?o>*gr++ zLnX&#O*=+$Q3tU$I96nZ5VAVDbjItCAxE+%;sby-hmC)jpg@tYAAGui0~(sqpK`37 zCf83V&oS#;kB~Cdsg4Zs+cEfQ>14#;Fg-O-kPvP5Yg~6BHI^6lo=gZLOPy!97)q&4 zS_O95aI2R{Z-(?yG24JuwN$nUDHW$1Y4s}7Id}$U8zXneg`3Luo<$ZRkm90P8O=+v zSTC%Ns=a?CB5Qx1_7dG%OE5GfyUasFNm*xzOlsU0#!^k7L$fg7aavEE=i{?m*1;Wh@HosG?e1O7eE}SVYZ^S*@IgZ8 zIO&ou!-qVksZOxHn9Y2bP0= zfa~KRE9QoF*or~5$g?`>y_`#ZX95W)Hj7DVY8b%& z$`WCb*h{Qxc)%Q=9QVz5dliObNpB$XzhjNNSCk1q2so+hlvyKS((iC4r50TrQ9XY> zF2$JHSPf1Yg1u>OHZiaX=fxQwy?ds^w((QV^m!dLI6eI#%X*htoPD)`68lo!7HM&n zY<1X$PVv$JFh^1kLmj~r?C~cr`rh3FzGBc?^>se+6@{-*qd*;Z zclXf(e6!=5FxVr~#?-4u)3a?`X$R4zp|L13gxT>6n(@hqtQ?fk9_s0~Z9gaBH{I_b zhL42bywj%q9Ow6E%YBn=e|&$z*x;5RPz0h7yJ#=N-q~!z=SrPIVQr9hB?i%K3h(yi zBpfgr>2ql&Rn<@pj!{etDhOsl;JFoJ->B?NB}q^himeY~Fo{zc+UwzyY)8$A9~HVC zd+yU8WsRoxrxK>L)#!I8n}`_E5eu9j%$^hwX6L2T=;9SRcW!8-#MtV{xw+@AMlv zusns6HnN;iWtwc-U&MdScd5?sc3he$;Mmgaf~Oc7HlD{(l@f27t5{RTKpP8WR!TOricW zX`Y*!v>2dnis0n$28p?hnj!$;Lj?c?g#iFB|Cvee7yxi*0RT?_=ERo?0AM?VI#mV! zMo3ud$XY2X0_gs(!veq}Z2^$~$?h+_|C|p1q5up4@n3&?{_ZOP|9@gl3LyU9_5UQS zWH(j<0Lb3{Ye?JL1;{(~LRWK3_z1N1HL~+@>H2ih_uYpGGDjC9P72LATvkODkFrSD zPG=T1nP%4HtDrg7aSRg@9<>CN7{WWy0SXcwq7zb2<tS zrAj^V42OTBO~Zqp=LX)TbAT`3*89fm+qMYu{{KPvI2rrnMA>=bT~U8%4>TT)b$X{{ znom#|0{NVe;|MRrK1c4=lsXU*Q{Ui`7RwVB5&oR>y>h$SNa)Y6gl|8H6Xcu)`RtCb zeo&hRbs23uMexKLqf^gfRLzy_iMl&-RD8I4?i+u-JJfHlV|PX6_h8g%&)-Md%70Xu><)G@%-blH&tr2G|g@f*1{6i zK7)Un*~rM@bLe!oRfj`Mq!rDM^wvvTn87R5nQ~K8$1o+))mZUH&`^t*%kn;Aqjrp+ z==~e$Erz?lt2ga<4f{TzyS?`&#>$$uRcRYE$R9e7r=EW7C-(U^lmLBjHoo4wiM8I# zZ(`pw9CDj6z^AdT ze-+(HF0KxBDu~nT?0RYkG^m@{ifn&Q*vWA`r-GS1n@39Q-FZf2G#O7a1eJmMapeHknaJXMY&K!RLMt8mr#x2l1G{bJ3K!|za1Tzu?~j~^ z()I8KZoMyQtvvVB0|h3tqsKyVwj!r2+aIO9^?HNXqSGZ9%OL)Mc9+#c`ow>?tI}d^ z0~kS6x{@$Sr#t8?IcKQNeIEUt+!5W-W@snIDLoLYjst6286AhFDl4uA;tO)(_OCPF z8bc3S)knRsUm?4awKhlnw%2$DxQNF1A|z+-!zARWvp?|QuGqyvxVsK;TqWavET%j( z&^oth#Tzj0m0Rjqi(3K@mS2AY3V#Kj#d?4Bjn21iJ$S4oQj%=`O<`|o5g#@-w4R(8 zA?$wl+!go%g}c49N0A|5y0_e!VSH~cflo45fZBMR7SK!EsmJRG-qx=h->m7iglEm6 zh5xmZHa5o9fbAUnOTOn9gSG9sdfjsOCwtVB9UsoBk1r5hKfBvI4Mn`XX$3 za-H1HiDLYXtZSgTq(*<^64q z5SL-_UV)jONJM^V1jA-1QvrRBQ~&7f6Sg?e0FbNn5nQlOfvr@bgOgHGOqxW+jPmi9 zWT{eV);R5v^n&Syk>o zf-6|HsFdF1A(vV@q#GfZEpdWQc0y*s?-vMZ^|+ta<173|Xul=+)PKa;U(22R#R5OD z&2+K#d z;#-VslQ)UGLHvJ)KgTKeY+OXOru(7^Ii#uEg8VETgK3ME%z=@FCN~U=Noo-jw`O`l z21VPy7L*-Up}uHldJzh-iq^JXw!%BUbh6>vtsNj`&mWy?Bh}blG8^X;Um8}Dg~rKV z)DaxVZm}RDg_LCL@a9!VPfFSZEO!b#JDz{^+bSg9#4~@I0LJFFgfe1zIH|@B~w8|56wUm!!2>sA*FGfyi6$Dkk{?=ODpljB{M|I->Kxj zMl*VBT*R@K1I=oD3%Px~U?bA9K#~Q@#<^t9VQ`yiajkK5IqWisOO@4OpeHeg$%#@7 z^!G|BjD~;HpckMg(bKFJHE!;vOlj%GLoc7n(%4I&7pH{LjH%cn;Cd#jR&zB~so#o` zWtDw78lY)fG@ou-ydG}2|21s~2qf_CP3|f9zA&1p#@s7KBxVQz+Yo?aC`!R)Lvzn- zgDn)n0lA%DqBoPB@d+A%lv6f~Q7K}WaO#H!Ac;~kn8;V^waah;<1)~tND307H|7OlSvpv zktlyIMM|*}az4gkzr6sersMb)b*@Z>u zxTUaZiv9Wu4M4n5MHLEAM{ zcAQ)(2Wc6-XHSQH{x>cY?}#&(9QBD9-leTXqT#8Mgu3W&K;0*wTC)TVt2>eR12HlS zNE}F_da@c+pn1Gn$YvoIox>E#IWUb!eSx<82Y;w1axV6HF7d3c!`y}r`7b?KRX2Ze z1k)|SZV6xS@Q+o-m-1;{giwghBU(A2HbwI#7X`d=t&&{p}sU0#oaKx#owtWHV#A%cwcZi-5qEIsB(Iv)r(x3Cw5@qBs#Hv-X!g=C;tM^dzR!X`P5{ayno>*`XdyZGZaqD zD~uah%=+%ed*LXSLfgN1FBaZj7&NF4a?h+HBUTud=B1a;uw|?-p2G~YBelq#*Sxl{ zh(o0vg(Ouks+pWy!<*uZ;P-#cEc&r`db%Fc%)@Qy#sXa@rhpj@VSQ%*9p}gdo1`PF zQ1}b0bVwbddu(k%FPA-C4eRzfC;i#(M&Aqdupm3c+g-Dc?Apn+MXkkREME_7h-)K> zflk5leTJsUdsj8H7=xo@Rl>|R>p%qM`y|?)pki0CO7c)w0npk(r(}Og!&{UU#Da@? zx8(bkD*=r&QEZ_yEicSE4RVxv7mrT5xk^WhaV;`o?v#=J@)7qE(RpOMW)L^F9r}u6 z^VFNvR=%4awGP})52`uIYI6N|E|~FxEmaB5Rmg5GLX4-@@`=XsM9>IstlH3GiQebY zz7wB|z6|au<@8RbLeYOGh)XTJ!u^DSDDoP4;0JgaR=$*|0|o13uo+$*ofmc0$O*FS z!1q@w1*vQ)a<6ZdbXBsVtW24Rt~aKlnz(%S@^=Xa5q8Al5*v8l5}X1?L8I*15>uou zJ<>yy$Q$uU*XcczLI)-Av}3&8jXLJeQhNqoko%g@C?1+D5%qt?Aov{zhg4UrXH#bS zP*KAuD`R>;c#3XL_kCTEOOSAX&w)-vyHv~p8>tKu39#~P~(I@%%% zCcOqhFR zPuSqZn)~V{K84CZO#A-0v?b{kqa^WglW3&LYLndtuxNkIy&5w0s6R-%hsS}Wvm2L2Je)&#w&jdjV)E8>8hu20A28)%!kla@#4^iS?QuxRl}mtOJYJH zlvFdR&l!KGpb~!ZgIWgwbzDJ@V1TD>;Cvq`q%5A`^TTJd5zN6way1K;tA?fIzJM5f zi3=>09KA(}xCBi&COEBSEe4nzX~~ajoJukZq5S}1MFbZlCXI@u9Hi_My7)A(DabIA zFcAq{F;29qIu_lUAz@~G;mF`v8mou)BsI%*v-^J-=@~&kxJUoDLD2U-pri8q7rZ7b zhbRUzIiPA$sOnrQP2G(J{A89TfBYBz;sP|i5OCo_) z{m542D?hI;PtIGp3i}8D&yXg2LWTJ_&kyj;iK}YEZ!#$v9(SOhKqqx(KkziJj}zN{ z6#O&i5!3iVJ8kmFHB_oM$J$}LQ1$_cvf+Q2Ol(w61FdFfNd?;YwdV3XiaouZSwHnO zSJK*sK4yWQ>nwDvPvgPClg~5f{8-Qxp;$6Qt;_KWyu9Ii7CYnsYzWlPntO|uF%3|kY|n$gf}@v!zT{4RrSsn&WKUQJPvn0T zKX?qlu5DbYBn8!qk1xNPJ)zT5jLvYNi5Uc`4T1NypyF~`^i=FxLro8_rJP$^G2gw| zE*e~EvXd(#crF%v@BXzT&(EQkmwU@w96W3r2!^SC<;|eRiU4imnB+fwZtVIL*YMCZ z!}f6L4!@0bb=2n6m-l*OV4#JyTfu)$Wd3HbYQ@Tixa8HHn|`t*UJ8VzJ)P8DP$fAW zm3?}*d^^&3DZ06^)_!}RLq|ZPJ~U)UTdWy}nxG6WN_WYb(o1ZO$GPJ0eUBOIVEP5h zq2;rX6ux~>@nRCVCVfAT4SIExUc$LC?X4B);~(^-78;fAyT{wC5hTV<>{x%oaaqw1 zUNYfur|8hli~wnud*~j$;nJYFC1x2m>ChaNuWq~H^er@bw1gL-EOQ2Qe$PXn+RY`^(~!q$F4HUPoW^D%LoHw^oOgfV#M1YhIHvVL z?+NAWKe1DWz0c;rzz4LC%PiyZ-|~}a*2ClwVem*)M-nP*0eO1E0ft@#W{oE32uSym z7Aj6LkFjB~QV|_NA6SpXnpr-*Nw=K^QViHu`B|dYQh7|;g}bR*+@|-=Vf~G6y+xTk zkm^2U`~KR+KL8qE-kX1i`64;fn`pAm<8hr5R|V8T#19uv2~=+j-0x&pmAs+TOx%HS zx6=UxR|}MS_<1<=Xx~x2x1r>`ORh;7N;4%}b!G~&7d~&@2q&vf5V}2PWAa<0>6na? z7A3LO_YXKby35dYGPzx`ONhNcwdk=xQrpofQ6AdKoF*&vI4FN-A@j=8PGzqa0NUpK z)6j(NGn7>{+N+>Ts z^F_W$r;sc-l@8d*CgCZ>h3GLU9Rcx~QF<^E_~h;=9FROLv^{&3BRujm|Gm{uML(}7 z?Dz>vN-8F(XE?4XQ=Q6;)bdGJmA&2|ysx2@7CWWJ3m|{axl5+mFq}p{<@Z9NPd%-6 zp8GRLXinL#U!@)MaS<4M#SLo=Rpus@!LI??$QB0^F_71fmsm^C`BkG-6dPJyH^R4U z{p@#!ws^o>W#5SLh8KeVRj={R4HO<=`!Lb%$gS>3Y0t}wLXuCj1xRvWCfIe6b0KV}G4; zZZBdRE6_le2&*!;?hc}FSGp`wkZkFIMuEG@8I1U+zVe4x2y5e;;Xj4+v85z9I!1dMz0@R`pF!`D+W;6hH_Z04_~E`(BOdy!?|Ta$^N zJwqeE`AFiG)o>X1m;K70Pc&%bVlmWH4+1|>)F$|C%?=F%tV-TdH>U-(7#tgR!B=>S zQHOs5VoZrQNSJU+{{#b5ZEeg)7@IDkdy0QXHAk88SR5@-6;HCuJgCf(X~xGG=$7X< zIW}bCj-S*>YD1SJJrA+x!7A9ascLk{UIrfc8jt4-s~8H=K)fwlH%td2wgc0D4kve5 zNkyX~l9c9@lCfSSEZRyr4@g9zx*sW$Vo84^JoeZGuNqM7Ilm?+8y}-6^cheV7LtF< zPLYvK*V56uL@&?#vzf&1>A-g4M2d@ENmsLF52Cl;6s9DOM;0GxW78-t7e;Ls>!^6^ zYK9i}-X*5(D}l~jg5UBM4AjOYsx9fTO&i^=6J&q* zbUPf=fJ=Ds@L2YU>5o9z;`3EXf^qJF?!ri_cxOe3|35EmEGwtv8?dzHhRpivQ$+TN znnq^s$Bw*vE1Pi8G_8U8E^nhK-3jx?QAB+e5YF6zppA0l!Qb(F_-Vk5$)rT!X!eg> z%dpC#o~>zYR_2h9fj5HrX&@bQk*9w!v8l0uC^?qW!uc7}sP_OjyJMW3uZ8?L196N> z)9!@7xzKGi>9a``3kHat$9z^(GEMvV*8+iVRDV@J3iZdK#-C8(yPxZ~;iuQJLcvc< z%4`86_uX51c?7a9Hga!rs#h?a$`qxU0}C^PKz8#B`kAmQ>Qk)BFW*^wDGq<-+$oi= zcE$<2iGp&@rRz71L44?D#QgLcp)_Z6mr{2=SlXI?hx3^4w|Gu+-E}N!BRKk=y?&Fk zyug4N$D6^eXMVqzf#WMuUhtPC{Z+p;_wFseJfLjj=~--=8DI8pvK-`Qg;q=I?Hy2O zp)*iLyUI+DB|02Qr$Sfq?2dnEc&dDb-@jn%B<~bu(f!GM~J@f%f$mg5%p@-@!u0Bmm&_3R#}-!g#fm zN4+!bLms*~$eNo5?SdDc7xHIRhHO}jLqYu;2+PHFb3t^HI$Kguga3aZ5d^KK(H5tW z5tO;n{2SEmADi3kPxcVC#}Y(oWoyf)W{i@iIE}c_=Sv5Pg(;u1FmH={(|Gun-mkA* zLNM>qUh}(Ysbg;7%=osBxf$&AYs+(uw9D%Y-In_^r>)QaFHFA3PL2$`3LVurdIig9 z3{GiTwXov~e4e7qsBnMUI6lpZikR<@aEl_GK0}j4lWF2#mr$C!H%ZYkJh)*%-R3f! zc<9nlsutk3I&K2Zv*`K@=3JJU_l@SDtRQw+G`dp>+X(TWCNxoGQ5ca`zQpJ=Be_es zeRtWO0uAa##%d6Nw_|m$S;~xx@*3kcw2MW{qL$zo@n-#PoJ)V83}qC0IH;^h?+^PZ z-CLY2<~(hrH@>~8R_msXp$rn8p_#2i)`5783~=4^G3xK9eB)vm2d!$Akm*Skixf}J z4gHzwST_;!QXzzntwL#Zm?=+KcMwOg%%bf$U))3A$F$MNZ{nI>A~lG&`as3<)m`l6 zrG?oCyPTE{sVaY)HW&DvOhmevMk;%$Lig`MQA6PvKj+1Ln!n3q7^#a7`af@Uu>cv& zQlug;^>Q|wex_1BZnB!5-NByxq==Rryem&e z5cQJId)=BQ57p*(I76+6sw^Iz*O8qMZwtv_T$NM?p1BxO(ohIP@%rVf+oqdc&EZm8hJDxL$)Ee$o7b3 zG-J|~V(@cP3MrNn@lg9QcGVxSTe33C?5WY}p&lpx)wX|3{6x_G!0O zPV)qVO>zg$vq?dM8t0L)%SD=Ug0$l~GejCVc}9On)d=g}OM-)F43tR+w7=J=- z4d?NdL^B+jC0(xdPDzo5@D z>Sy(gD0TTl6tt66LsF2h=&0VoRPleREJ`7D=`Ae{G;2tTY5Gd}80W}dpA^({X*TvqmIuP6oXq-qM}fT$8`u|t zLfweh+W98eHD|nVe){QAhpC;ccvKjp2w#BYe2fz9)TvdW`Cs?>k_WygV87^3;nfYIQe#@UWWtMuI?bE|64@USd0iQ-o>MTh zdoYVyVpWjPGsu(t4#lZ)kqF|`EuK~6fWtVoC$&^H1dc4YcF_Eoo|~QG%{@QWn5Bm4 z))jH$1e16D6EGZ4jK~LB9~Vrhy5(mqQ62gBe+wbs0cR02~YU9m5?uk>$&_vkioz3TRFqraL5-J! z?)SKdpW&_N33sdCJTvuoqsq&pM?r2phCw(H5N>2(V48nI6l0JVi;gmiWN?vRz~h4dFy!!hwQ;RU2&V3R z>EyOcD17LIE0<@aB_KH*pa>uMdov3Eif6K%EB$Gn0SXfig#7Dnvv3zkirMySRgha` z7`#v-)1-iuQO0BT15zTjK^LuoGtK%m-^ew2T}a z*HnL(Dzt6}P)j5zX@4Np)K*A|dwAX?H@w{k`fX(}_XSGgY>SP-w}o$C{J5P|W8-(( zmEf^3l~)v^`=DWd)8buX-r^S?;w8TPK?cRgTseY46?$4+H4fvIIX-wfwNLFhDE!#y zCq>s=xQd!#St%ijP6;)q|Du2JGwCpLD%XEho+K}y?%TFu$Lh}#5oI}#b_DMA=6bKW zz=JCk#A}lMvOtTkgL~fPk4ikscDbHyO}~pxpL+-D0{&nGWb=RA4TAhO_hdx;49OHu zTLjq~dhLIPKma5*p^`F|SKQvi?w z04R_D`RAX1{`b^&{`u#hfByOBpZ}ZG7yt9mKmYvm&p-eC^Upv3{PWL0|EpWqfByOB zpMU=O=bwN6F9D|D{m=iOvd#ZDe{X*(_>cekdsFPRH6sAP(DC2%{+&-lB{FbhN&j(k z64mzX+sQg5ieO=nC5t1ulhjV#P?Xq_Kt0@N-DAO8Z9E=;$%J2#e$hSeV=7dk0m6ri z{r4h){Prx6-0h}#XaEkpWs>##A4{^uNm<-{j7X9huAe;5vKji+`2{-D1R;MSzX_U) z_hIzfC931DWTu2wh+=8&2k;zI+HBY-2dM#qFA8H+h9M zO%R53)lUEUnK6zmG^_PfqO3C3_F;O0XB};HslIyTVTr&Gy(_W}{hVUKKRe#`lJV>9 zTz0M=?zV1Yu3kZF%-Q!Lq>_JU#7C^tCHmQq@2}Sk?$YyE6-Q{HjAu}hU68IVW{=!j ziWFI)%q_K~U2b}uR?rM;^+yWK(V-aUMlzOHBLZAT70}EIJWpc3@tRzO$xo0~p*He{ zjm8LI8@?qb22aH-&FeWa@OTy17W#DIHs@|8L89VjUe1#vQvuqn(b#{*38L%rL7vxA z`r<%+h&h{xQ8B}Pjm4Q@fZKgJFMV4q=RzC%*X!^$PT>6+uWX8tooIs6@oc>tGG6~n ze4z>Xm(YuNM&2heeSCSTBbhl!#1*!1pD8f#EWMm!A@FF~m`Pq3V;g;ODm+8dGXQk9 zc$7~yGGx@6ZY}a z(%%x@3H=Hj;y4Z#6}oaIi;%hKNE}hSdStwlc)c#>vJ_%AKpKT~4YI<_8F{(R*THS9 zYA1KV+z&EzaWWJ7FXrhv=Z&;5`K!`fc!kT;(TFQ}n7CJl&pv#(z^skPtfx_)#6PGeb%@=e%2wsA=+h$$ z?VY0VqTjP%uHZG%G`h3y{a1cpsV_4)Fuz2n;nC#EA?Csm`cW-2BPUmcHWok+sZ0v< zp8Y2o#c3Hza=d?-8wTsEjy@RYuaUu$z1{8FN|;D=FdS5A^cGrd!fxkvh{dI6G{PLe zxG%tSXd8&OAS;CAT^F3crPLDEE&B!-u_ZZ0zuNkBu-krm?c{kbJ@Q4c;+q=^I(0u) z1&LWCP$jdtQg}hm(0tWq0GkzVt(|8KEa8928r~6wxchIp5Q2+s#f? z=bk?>TU@lN_QXpee5g?FnJrxSNZOCkas0Ppo89S5p9QP_KgA`GOppwq7y6+a!M>3W z&U25gb$zJyyO$TL+)Zi<=hUB4&TNoIr5j^VzF(V(G&Az*BDr7rY+d zW?1ol1u-qCzpcl^K#UjynfypoMx@E=FCEAZ9-EK0Ga5b}RD@wWkr}`sScoJBAiyg~ z=A>F9I~^&;eh72?xy<0tf5+d3i1VeYBS`5$2AUvHfVYI`#o9Q3ki-QRHK=?vu(k$c zvJ22?8Amer&b9q^%6N`6w=#wsbNpMqoHKuxBoLJVCm)=&lFkk!`$cZ^KP;}Cx) zsG|_hBybf}02q`R*w+>erGiDewNFI~{@I4fuD*zuo%)fsMEZ{?&yNa;ht$pwZ;s9{ zo%k8LeDi5qz5pH%ohl6FQnfm!nTlY2sNx_fVMvHIkUox*;5xV#?w-V;xRVu-iuid% zV|iNXHD@9OQQNUdr%O9>RZvYcQZaw+BCXORX9-lOs!UlcZVBb-)djyH z*b(7TVM`9z_m&Og> zwI*^G5kbBSqTHsqcHGX_d?o%a`)w&_V*80WWA{AALDbAJW`u|~k_1_np#+)}iNa9~ z5k~}U|Fm8+c=i~=oo4v>k*Hw?6zqX$8Fn>%+&S>uP*Aw%absfrW8Q!9%8(eG35+Lb z9~yvD)3osoJRp!sG}Cj(=nOA2$#HhjsO034!0fF^=$dse$KIJ z(Ut3z_;yXdyMYm%l^tG$um}(^k#pvi!LuPT5%dBB)PrpU>G}`0=_?==vuE-oPv@VM zl-zB@tAzbksfeF=vwDAS%NIHm!Rg+|&sPwuA=u2xrCy+(QN_M|W&DEax>ll(SkipMSjHv;351P!mDk}tv` zTri$-3bscFkNtm9VJimq)$YVzk|*A7=Pqo;w0vw zeINhQe2soqVRkNby6`4g1h)hHe)xDf;^xir=A+j7LOk#+r?u@Y1AHXS(EmJX0grvi zp+V%!fSiAc2)JsLT8WnZN~?L~-&k)_$F&Z>1A~JUVj7=COtwg@IFC*t4b5$-aE&3{ zua;?5edXAKZ`ysv6+%Fksbdcmx$Kf5IGu1QLNf@hP>LuKNLl+`E2kb(6N4=5(-BVH zr?1J2AGyS-N&GDu3)nP*8l4sNg^(B4srE6vclUpvs!aa7+7KhTBj32yl}Wfv&}}Y* zr4ZsmIl?QzaBMDr$OXv%Eh+Qt9)52LrP+fN2}k-cy!%;VwNJ-ljQQC# zI$eLHavK)-i==*Lovb&(qq)u#do5+O{Y;UxC2BYARJ$rwA_ZSu#2zMQW3}50s&+f5IC=Ff*W3~5< zTh`7>mO6-Z2BIBN2*zCWwg3R!1*%ssR>tUqk0g5+Qls8y_?rsLmu`y!uSHH~2)^zn zOdlV8SW|Cz7R3LiaF8f*C_w21^DZE&fyxNJ3RXo8PMW$@tN(w4 z#z=4~-~~Q22LadeD1hs{L8E&Lx@I!KaHCT3|TR_PsLKdMv#a( ze5LYwsp_jqe+TQb8TyePwV!OpuTXy&P?2P`oPFrYD;7i@!hH|7JaA)VNQeZ25Sp^U z1o8_&r8aawID8GUO|X^vBG-q-WIfZKH9BL7m+^a$`rRi?D)K4Dk$;a1>9$Wn?k$Qu zV{S<+_Hi(ANOeDe%?wckzkra1O2*W6z~sIaY;VLMR<_91)=*xs7IRw)e&&DcPTbe2 zuYc>i%<~3GfheQ}X$^*(F91vetY61@&;%2a1Ujo1%zQnkdSu5CY;Wj4hj$sNzs%*8 z-ULBG#lY)v=bHW5)9mk~P){)|QAyRuSq^_k)Vze#n_;^%$-S2&4nbiHnh(;o08+DVBowwlg$az1 z1#Jb->e9+w@nO=+N(deYf7y=AWtEmuqSTpXV&Kv>^Tn*c#eN0)CQtnF zSe8s+@i^X(lSFz9{e*}Z41oVd1-Lt9;^BdBg4H6i`_aqK+Vo(Z3HmGy%AGQ(j;12& zb=wvPYn7R8lE{X^_P!)%GGQ=$YW4PgUjB@vP3(ydUl#W#mLY$t3DNI-S!AXGz-p7D z16&YS=@5mX4h2?ls8C3JABL??{sbxkKUMmbtm+Od#G3l_VtP`N($mtU|sO5>@ zW#>QdGRA)lvb?f2K$jp~0)gY&v1O3(o1sE|VLHu~p2!izzZpygpl%<<`Q;|RC%)NF z#`K+)5!Jz*OB{ddDM@J(eya(w$awL((dgG1%de5Y5*Mb~nD4WxcB}su-evl!Pnrvf z8$=ZOhWG>?_sq!rF4B%n0-g*-KEn_7juHwAj?PIML=i!-&k%lZ-&OB>YcBJAums~* z1Tq;^%jh!rpi$OZTU~BSj!?`d6USio%Ea!VWH#M$sRe)gqd++Ytr3j!9g_j4gU&)S z0V+^%K!8fXSz-eQNt+7%-Bi$cfeZe5FBon5>@9WjR3&r5RE=OseC20Z>%%8F)+#9+ zl)y-<=AJIyz+8Jw{k!xQx+xp`X_0z@vjzwZ#t-L^vn~6Z`1ZYV=3x}0U{03VkX5M6 zF2d}268Qq{{#x-?Xd#H?Aja9FPjuXPc)Ett^3i{aFm!QEkw=D1s+KaeMpRD|9kRi5 zuA7^yy-80SwtFRqiT4>^`EICqBxfFrkJQA zP~CrGn5je{DD+VQ#Kjce6PJi%y(0EO6_y%rC`ZNf zUj{oxtuM_hD4*kD9CpWo_1UHeKZIzT1}6wR(#8ZmWfZScIE3P;++8T2V9_Bk95Oed zCb^;@0^zHWJ4kKdb_Z-sf@`I#K9+6-1Dk&#+TH|{gA@VKQHYuS-W6;WxGbzgSTX2x z0u4zyI&z-?$s;hdXQ58)rG`$Kbs;6b#?gcEa*jH~nefnz%$2S$LeD#zRi@C0^^<7b zYR-(sZ67UV?!79F&EL11WL~P#a_>!sU&i>+SN$6(4^F5C(^7X>N*PYS@+opbD-wSq zpPlzM4&?c}rPN>i$iNV|OV|Cz!)5s)n82hH^N}uS<8>fHcq+_FF+c&(iZKs1y$2L2*ZQDu5wr$&XGX0zR zzsx*MJ=Zz4&-vNJMc)FS)?LSdazxwnC8)a_hV>y>?-t6Gpqql!ZD3qAB zI7-x3pBE@VK3Y=(ykjo|k`y*6CXa&~CTAWce}A+6+Iv&Z8$eIAQLreZs0=OiQ~Ac> zPc_?ZAcL>b@9&TU9p`R9Tu1Et*z;SE8?Y9vg*Z#`15>G<8Z#%*67~R~!$Ctp3@!!^ zW%?biuFx|}(ednONDh_y7!`ly?K>xc7~J*&2=`1+14S8V2TA3mTnMKArNiHnqht%1 zIVL0=g$wKM--gQRl}1wZi)UD|jikDqD{-;lSSoFP?T>FAbl_*m)Jh%`chIVg8_~*D zR)$F3=qO;&%G=Z`>X@F|pY%@Pzc*_w(tJY>xBJC$=px+&so;A?!!dui2+_LfDn8KL zntNCdaZ!y#ouuycA|<6a?|7B@+xQ|Q<)LY{2fZ(m+s?ar*%e7AOdV3llD&p_x92GR zp!9V+*nlO3MgpOL(5Sw3Om+i`RVcx?La-516FZzl0?M@p7Zw2P=Wk6XW6YeJUY?HiLW(Qa&At&+#`U&;*{8)}ljQy; z$76PRD|x4;9A0dd|EF{Tg;vP#bz#E3m4OsPLnty>j6e%sLJ30K?+gYCn>emqymCW} zX=AmB-c&^eu<;iHZg>HF)Lc}siEx5Sy}YD*K|mctw5fB6F30K?*S681Od-PA`f6HA ztSysNZiWRrQB4QZl2a9bRcrp*6dR_4NsUXHQ9+YAF8h%cMoxLF!(1(k```#E<0*K{ zekP08egrA|@-^8@8u|KKo2;++3wcTUp3}#wcVg4)JFPq)mEF6&@o2IIe@CC342APg ztfmbyG-PYU1=^kkUAg`MW?u+IQ2-QJgdi~~mG+cMhZYSwdK1TgMf0UQTm&3c2$eP` zui>u|aYr`N5MOtpAAtM2wjA*?IGw1!;2H3O;zLa2`SxkVtAQZt;3*o<>h1%7qE$pV zugR%(Ptm)b_@73HC6}F7E!;O%V?N7>pub-?kvi42Z`K{K*ONKP57c3W}R0l(ov z^QC`xQk)H%ssb1@N{IWlA3RmJ$2>O`9~V@kAcBT1<_v5l`UH(mb+;3X&QE*&Va3El zO1lNhWv8>^i_ww2ZfN}+iKDG{d{^qjCUtogyktG{jsZr0_KL2J1`6w_r}M$fR1vLs za}z(F8u)^5MR1Ek>eGOWoP6Z2?G1`|8~(87ORX}#E*ZGtyDf$s?P$9ecVt0SNGM1) z6cil=zgS{8x>LwH8mb<6(r+bx!ZUhu3nzO#hUaaaONYe^E6Ahw1YH)E%Guy*ZhoX^ z0uTJ{2}eVJf8m2wUP#@;^)2#x@PxnpZm>mRt;gGiHYa8kB9MWeg0TT zujU8s%?4h_O#|nfBbalm9agG6#*2#qF4PnOAjL59G$4`crl3Mq>8|k=Qc&^B>w{}% zhILmlysmn{Svby|&G3qzU&{u6)d?70jeeIS`r}4_ElFQlYxnwl^Py9Bq?0QvJtAKu zn7Q7ABn>JPat>C83GxJ-fGy$}@Q%^|OEb?4dktq5RrYBKhJpTXAH6T+v4ow50O0(6 zA;AX|Lcpy2MX4nscyer+=%z1G+__h$N;p_XY4x-)^3~oZ@h${rQFU( ze5E()RF7>lKeG0a*v9NCcY|4C z_*X%qM6h9$TtP1|hJZRzaUvfP{xUeeFV-K#|0oC$=1b)mt|5l;pkg2v>hQG(6_ z(_y934z}>>Tc^G2`DRf$@nM@$AOJ;Ihe&$%^@RA|bE|ZIKcQUZbJ8|37)#W^+6Xa! zLhoX9gsHFwStMv_0M7)W@{hg!NXl@`kyQUAK|RA|Ky;a6AvAUF<&Ktk`uZCQl2*_& zH{o8mGnWYF{%i3AFmP5i*Yjl%MSVfevhKR85%|^DjnkyKKN|gRr#DK(@Zf0Mb;a;S zf0O*_>ON+ehj$t12+jsJhakfQz{!b!1A~}GSVy5}wZ%Bae-?E54)(sd!KeU*8eJ=7 z(mhCw>gw0-Z~dq0H(@h~%7B|Kh{NVH8*ShViEET2qT+oy-HqDkxu9~RR$t(;*sJzk zeeNYRb-&3Y<`F^4KLwWEDTeeomj&==+rbt`3%#Vgif{l7E~f+m*PTO#_}4-hWw$sy|gkId+_z9H3(cHzNrdoK5j++STp%+XJ$VI zk&^0`BTPPue$jbIRoIDi=y(iOfBZV~1=lEjV5P)|;W`8&)f^!3&X z3A4r&m$|;=`Tn@}qiomhRUsJTpTUl=^OOCmW^b>7+@U&oA8=iMWY}ka;IWv2KYECu z3~{;`DkNlr;IpUbSPnQ;MsP*oB|xpMMWNf%OM;te$nkMO|0piti{GG_X{VQKyVQk~ z>us6V@#Oo2FP3y3XNJV(=dO*jjAxPOZvC_-7(yP-Gg28iZoqOkz<-niolVfd5e#1# ziY5ex2DuN7N5r!^2LXzI&fzYjQr6R}?s54I!Wr#5Lj=U{X1u@lkBPG>B?#F89dQXjLFovm_ zpyndh#XbF&gvZq$7;hwr&Y+Of6c*?8O~CnIGoI}MwO^m1L)`Cw13rx{E&J~$AX}&s z0C}G8paEToPy`qmu|F&<0Nhn{tX$kiQAjN}Agqk{2hsFGfc}e@_3n;5^tmX74mQk; zZ4JB==iB?NRkyRj6W=Y_`1;e_2#Teu+c zU<@$RrJdrd*JAH~J=QCPxHk4bx;s)Emo!kFm&~Sob!t`uqG~f+owBp;NwDB%pJMPtjxrNgSVlp^O zo(T|w04O4V=YxkSQ(WDPPLxAZGQjPOfEWq!{R?< zPnK!ISH0~fePZ3t!p4temTQ#9_qm*bqXEeCezoa;KZFAn!N8EYrln?jcUYk2o)eFE zVg7>cbw~7()2{u7en_M3K%XWO-s$EJL~9o!7n4Px zdWL)OU6M`AOYLhj5#+nM^tCe_G(>Ic0y4&4#hpN}q3ZU-KqtjcL$v+=9$ZX&9xC*Y6N8V@zsOi!PGD70 ztDFz4CRn2X6D*jb0RSv$`}D|L`Wh4@e*~-oo#4P71i8~|||9uwP5MsX+$XN7tXe&45mfi)d zEM>f3|7X>a(bzt;Hr*eXq{N{;NNXm4#n%5iG=4ytgc9|uR6(r-Mu7>588rk3>ODkx z{u}U20E@|bTu$GY;t?~{t1Q-US(umwz3#lq_hPUK8Dlj~AQ!~VgM7>rywfzL_Wq-BS&ixJJAAs%>vN%d*`wrtIo#ZQqKJS$YV4OQcR3kIpu z10xrk*Z=Zt-+7f-{9oJ%ayHi*llLA8E^5E_r+y=w-|oA}^Ult{H4!GWRU`>O-{io+ zD#1D2K$*hZ1m-95a~(D4|Kap@{XM}?<XXWhbdY+YCQv#trV% zFl`B+0RY*u2Zqo#y#btdBvtEwNCWdQ!9B>zvGddX)>*DRTiPT@F%( zAg)k;6p~!PeXN6cm$q7eR!69a{tdDpat(ozc6b5?7Zkfh&MF{OkvRm*mQa5LV{6ml z53}t4smJo9#nuy&^wX_wo%=3rf2B7+%vD$BA)xCKyPtgi%$$yxuqObOtx19&8Vp(* z#m(eoX={8mzS`g_-CM30`3oE9fpC0F3QCe=utX2I%LywJqUuh6AqLCN$1p#^a(a1Y zF;1hRi@Jy@t$z3DZIz$r^2sgY?65=5%;&7Ldm&`-epQutkXmuEjatE>lFR!=6wu@dQfEoh z2}e~DSLX`(A|l;?ci>_!^cnGy@i-vOK6qiIztE(9P{>*;603R*G0a7wRvXtra(B^R z^2TvLA&%03Q>VKyh&Nai*gBRcqX!&K?DQk&-nZw(DHN)NH|Imj(8=1zwqgukCRsln z(q4j6MsUm*gcx&wU5NyT$55mngbR{|uYO~ao$oiGHxK)N@r#OI6hJx;hlXR$g`Fk6 zqTxfnGNYd6h*YaA66YLErJEOr@t$aERZgau5xc6oJ_;>^6#7Jm%E=HjpHw32WPUZq zZf$a^i>&YoxC!2JRRelhN77{v(Z&Q|`XNJR2%lURIb|@*p<@oiqd?hrLx|*r$2OKF z7kjsIQV*nmYAPcrl(MszJ>pzlu`0I9TQZL~Ce0**h8c3ia#sY6;Zae< z8kG&S{XjC1gZ};ICNaZ#P0<=KY6@f_P)5@g@O@6pTwtmlQP~y7YYq|w8GtY>_(?{k za)E=3`}&^;!hfiZGdJ|pJUa8pl!K+ZLu*Hgm=526XK1zl-u1MrAgM+JT!W^%V60Tz z#`bi(YXm!|Pu`C?sT88#vm~)DSLi4COIqFXBg>o9-1ofWOAL~3OcbPJ9)*59mben5 zfMgMVqFEsf`bUb9qsQQiQum*kSBlag2Z>7~2MI&9%)ozHVxc2?GMM|=jvst7+kJ@Z zu7A>h0fB_|wSL-V?Yb~6NUQt0pzW8R2qcDZ4l18;ir!T+GQ)d(XJv-k?Quc=^q2U%oOW?o;Rf;1JqoR(OS(=EVD#<1sXw6aZ>Y zs6T&IY(4E(6v%lp(GE{n(N5SJbh2yikp-b>nIZx16 zy@W4Ky~TAG+nDe>=7=21K;%H)n9C#Z?p}ZT8D?1kX#(%vIyl;c~CU z5-aA@vspvb?yJ%ReGCXerSFlhX}wdO0*ct(uW!{~H`D7jNypt0={=E#?|;gv2tl_0 z`v)!CSiH<*nWMjjn-q@}6TWDFuiO;6oF0U0`$RzHxiLMzugzmU55%KWR=-ou?%Tjv zQG&ii4~mf(PHkaT5jF)2%@ucP9GJ8D961a!QZt{YYlmw_wSDUOoDDPeVCJEK)3SPs z_;mV3IyxS+Q+&705KUx>BgSdT3Pm~4eCCBA)tK?&**fspW=4Y z@H6sEhJPdhR^n3A?yo%}AKTzVZP3ONtlwFpx^8;h&6x{6Aw6pH*y?L%=6u_yzWUmt zO`EmQh{OeB3E{^wq^mW5xYCL|(`HLzuoi|jX>l_3iyNRG%m;1b*>3l$(%r(!`?7QP z`JP4_FMn)_6$ZzYBj^)S|5ephg) z!*BP$J2vpGJ_r52;KixAV0M#v6S0LWBu(Va%=mZ~`wx#7nCkDHK*pAFCJ5DeZZ>7A zQ4sg%&Vuj7n>VK=`dC< z?o92J=Xr=_Xt!a1G-*H_TYjd0d1w0j72x?RTOrs(lpRxc@^yzGoWUcJGL{Uz{rLLc zhSL)r!oik;>@)(?QkBt>7NE62CDDmvL;uVdr2S3@!A-y%0?V|yL6LJdd<(-r&E9~W zrG!P&36%(vcs=~{h6&*ucxZB9X3dx1!H{Lr*uL|C_1_VHI6h{N!3Dl*f(Tw2y8fu? z7LP53K`}}yFxM?~YBW4UpcCyMKI7V$!B2;@fwNt|-aO6567j7r+!|$xr)3;P^4W8# z-!BdUzGr?o!60+?_I*~LsL>wc;1P&$7Uo!(bM*7Bi*`(YosN#)DhS}VyTLy?o?yG9l%JFoW+e(6Mr0(|4Zvx z`Q|p1_P-en@H2+a5fzY$x%i!C%m$Qpc8m^{p~`m7-t^nP!(`2LOaty`-5 zp*Lbo*vg7=R-0Ge;nlz6<&)2#n0e(T9=PJbz3px|_>)9(GygcVyPz1tnN)v_QS>D~ z)AuY0Cpie6v#XbP&09%rD)ZZNsN+}p)X4eo4&O5nIzxoC<4r5sIk2S{Nw)4e6r-v> z<$1h+7zc|*hrh_NU*~;hc6~g5 zz*o~+6m&6C{<1KeRCIJiq`2a;j`y&yOi?G*dpnEPQHtY_Ga?vpIt@!#z5bIeDjVsY zoHK9#ME{wy>ApNa99j1xBv)DfO`-GZ*~HK~>TsMbsfepP-4K?8W!#`~G&MY2R)EVE z+BUy1!5Id6Wmi3*Ts_H#+k4i}#j!bml2ROru)ysAqlK<7)L?QaV;9n-aX_kV!YT>r zu)G}kye~2+X@GeWvy0EwP8SctMjHngCzYE|a?zSZQKp+oP?#7!XqygYHsOUXf9+zU z6`2!L3sW0^4*`MgQ}U0v4SRFDzYPZ%3YbV(FMu6_*fn~eyByZfR!gh875MIdvhJP3 z@2 z?Ya?pT9wnTz1F1imheLt>qo}z`o-zyKLlQDr|c>80|^sjj?wTQqyhs+{5r5_?H7 z*0#b@+5;N5%Vt5#m|a#8LG;^C4U3j_1Ug@bXSyLKgnx|QBbV6pG#?zCA1}}E#H9Wa zV~7kH;(M?_ffW^@n4Z#2lxkvYF+wqWnLXvas&0!|SnuF~?_qU+6Gil4kfT-luBMD{ zk!F|&dcy#q_EW*2kxjAKf`j{`dJ00`(veHEiZ$B^^kuwJks|jCf{vTixCKgchX~{~ zw7MrIZPm{8=pqNj-)v<=7mET^$l`;qr_K)Ah{cJz!=vyI9MD~42{Sq2&Apm?pbcv0ur3uc6xrmj1J zvwZL!(|ZY|xEcH_AZY+|2!*KQbW}GoHukL|&9LyKAK?~%|68s^PmS9xqM90X>ySUj z(Ot~bHxoA`!MxGjM2g_2EI}vRn8zJS=gFz&U-9Z+gR{4{CPY>AGpmNuLrHPgM_`JT z%(HTHkv<34UI+>pDA%wZVg+J|^((pvA1i3K~t%1CZ{4ej(BTLEF z4cCVC>woWmx9Nz3z7JiP{mV6ob$GcI`MpjJeta555F;Ap_Tdrt75zdDK$OE)66E{D zt+mL1RSV2q!I>Nbzsd4435Q=!RJUK6nZYum0-cLk2JC{D9$;5)SnObe|TTn9j{K?4IHd#2TbAx#r&PkRuR2^j59is*3YIVg5^X+nssq9w5g!K$ioJiJ(fa^jjAk zIHXd4+u*w4PUoU!47lMT>?ek}9X`AAJ+A^YYl{pd3)qB#hJv+yi#DjQ;*fyZOxu z-QRIw400udsAI5Oyq=;}I-1-bSGgi)Tkex3 z)t>R`+814)@tN#SE@ekA1aWK~4-%JuQ{BPLtkRGo0Xr%u8pKNEXe7h{6B6PwSHar4 zq?pnM#SY7blb4WBE6Eo{-3Re*grSm6$DuYp%cl>*``YrnKt9=T*hs?xLa=}{YV`(9 z`K(7+;83zi@S_DiQh5u?j{8nX^4P;I)$9=rJc(?ui5o93@{gF7B|I|0Q*=iEBuNV2S_bL~atdr{QiZf}sdzUqW~Od`QI0CCK;pA0 zZUC!jL3hSa`i1PqK!;-0$%&Y3@WC&|%)2O;jMpBYx5KfcD1qzl70(S{&1{rW4m^ZA zZs6HoICXQ1uSn92mS)k?nNVDgVqn>>PGA^u*}z^&^!I7KQH!T`EZ!?8^(2OLlc%7g zn=!iLMq|o7FA=qJ)jT?XGRsD6%VwOZJNUu4V=~oyi0&7B@|Wu;pAp&IT7i#Dm?f>s zE^9XRD@F^7R9H+n29J|Iwfud51{lV+3TM!7-xUJ` zFbIFxpMqJH&0B1Rb5>~KLRoW{OPdi?k_t}4gIh4Pf1740?KQH#2X|XMwQ{yy+EHR+ z^4@ztI-J5V9>P>&R_T#hBcMe`Fg5(*TpM(W;SyiJh@oSw?U=K;PB=DKr;E4*?`Ks-64)LM6NzoEBJ&|Ub>sQj35bQwXj@b+{XIYb}QwlqF# zZ|pW9p(tZJnupWj{o5{r0z+qoN7wC&&P}dTX{X)hgIE23{Wf_XIyh|pyQ)sA^8q`E zUf6K8XAscrx(!j1?S)$}j+xqZE$pms&#dnFw_zR?grp2ZYF-Ez$@6ewjBv3&qOg@9 znWdbWZRTl8&Tr~^oVam#JLebGeulSoG^p^2<j$ z+9H9krqv#rD9W7j+7#T+dy{H>T`uHXw`o#ZtJSHTgLbDKQ>YZG{8L#e8Bu}57!|53 zS`Y?@f^Zx0Lo%gxpIHB^f9N`|-!6i7oI-Rr#4-PWVt3q@gvtBe`*HmaSg&B6_O zx*c0~SvbI#R6azdu>&7)s*1*BhlSQClw$cOqEo{poBJM9Es!9;f)>Ze_x2NsJ`gw? zaOJ<2-ZnbdJWPLEyfjojJ#$f%#6i%2`w|4zFo-Z!u&LV7sWsMtjB7?5|7?Q|9qvF@ zh>kgbGnj^*DrV22P-QnU?Ncc#;_`Nj9rNJFL@BdFnDnEn zGZY3>MJbH&E$u@@WN!E;w;JKzii%nq`BdUxk^+Q>ASK_C;; zH4$`uip1|dHd3867LAb!0SajlqfOdHiAfJdt&nSul_`l!)*9&s#H4uid@zoE$K8s5 zC+6V8uRoT<^HV{^)xebEYOti1SjX)k#?6hh2g2C%6xUu)mDaOy%Q1R}%IR=2;X;3Q z=n#3XQVG8K*eSVuJO6SOxZNKlQTY!^<}s$+n#B+ngYSvSHyxcXA*6Ax8#zd$j|gEg zKbQkV`VcNwL(`W*W1w+4`FMu$dd4Y#Y;%rQ+d|1|z2e0o9Y}DcCVdqJEDYK_iB|TX zd3c!=C&fE0%7ih+0!htY7i;S%JXcd+25S(aTdue7yT-{L{B&W*iBAMo@}03<QY3DewQ93C$#2oOzpwk^dd-AiE{@!7jmMpmnFYv>FF~6RVbm?sv`*z} z4ei2OAjD)i0%UiA!W`_;BGW&5Dp5ft-x02XG0uONwcK^>xVxUmTSKG_JF!tv^MDJ( zishUo$VhP!5i#T#t8SD3yrgD-wqkaDv65la%Kq(w+KpSBA6SE$NFKH#7_}lu6pTZr zga(^94^P`jtMb<1eQF<+{f@t+J~Nly@-@D&7>rEEY15o$3L3lkmO^z?6vzzVNSA@F zGn4@*PDuEM(>}!W+p{=a4Q+SkcmG>-ev=E)!*}@t7GP8a0?7i+WUJACY`vaT=U*xJ z2a;8FeURBH?8`o?dp)a0d8A8K%4(ivWX?fC2EWLfdoiM1dYjD0?m8OwKsWcXZ%710S zk?x3vWw(K3Y&R}y?7M){V&#^q#$=fYi5g^ypU{67PfnU7$VwOJ*z0rO4T*!-upk|k#Hz4=@+*|eUn#K#>WRP2?cA4CR`IK3Z1*YjcKo1mKa!` z;jyH0{RM}Aztc4%$>=@uS)%n3U%38tDO241t7)w;GLG`wBIOSZf92f|%55<*lG5#T z+Q7tvJW~tw{4$U`Wxi^JlT$j5CTE|q2}#V+nMg1m(ZROIrtDK*xP=uSwN@Qe^i^RX zUp^Xm-z2kM^<2q9DI!uGT12T@da4|rB8gHkkGNZZv^qHMkVK)Dr!E=@?>C|%73GQj z9?^IDj+0?9-IOCb_$ipIA3is$&9=G?*I`)JUtUi7To&Ft924Wlv1BD^0QxnXR4a{@ zF$4|@R%F3cmNJcEWh-igjn~EVm+&rww|t@?B(@e36Y&d}(U~KMsoCbPzMW{#lo$C- zX&&}}AS5KTh0_$FZrFnXp+)$O=T9n`E`g~p*k?>}i~CY}f&{8pcmmv)v%bp7 zs#ZNIbqpfP=UG>^cVXHbsRlEmnP=yIFbgSQeF()jC2S$)1QQQL2Vp=R-*lEmoaXTo zH*jj$otg(YIlt63cyQ%<_G+tGTGKIO@U~5V+%`B^I`FO1GBJqFj6_<~y!ji+wu&ST z21<`ViJZ_V2mFAgmrf&`U4-bbgdXE2W{d!gB39IbXnBw0cRt>`^TKWc@ZmlEcbck7 z2Dh2|d`fGgul>yAB*OMb225Q}eFlFk4h_wQgZ2~H%^k}2YGpso9S_r9QFSCK6GsN5g@EgGZQwkz!i45=eYG5zETBQpT z_0;dmYc2eWwd*;$ypp3xYBQLfN_*iDqsBh$K_gZ0z$C+e%mf}j zLf{=W<2nyZiQATxGw8g8Uj&S+Y%*OIsq4v0=Vf(iw)2^IRtY;Z;i z3U$ts**p;2LlQq_@nO#_1AumaCfoyiix}}BEb1?PqRtJmSp@Ag@#G})eeQhN{Id0r zjQU;|hL-K{t=895J6W1H87Hm~6)MV4AYNZksCAZOZiadI;@SG^QltT7zlTIU(4xo< znCxuVbHV^D7f-x z>P`~f=AA{CS(PU) z6qU^m0BO^3r1tH=Hz8PmFw4~SD8%_UnRFx_>+K>?RtSNRahmf^@n@h4Bm?%kV))*<84uHcEbl9;`Cp>TAFiS% z#;wO3hyWGx(|`np!`vCT;5SAhci@ zced-Zbk`}xB71;{p0Ee$6CBXre>V^Rlal(thf@4BebPwjJ8}$(IB-p}Fp{tsG$+(u z9U>@5P7O-js$ETgb+cNL)zjT?u3g^M$q(+s)5I)%!mCzB7IK|+qN~}Ll^bGzH@>zu zH3di9bY4EyAG{D*`72UU1L&FR>DLT@^4@R4o28~Q%Y={oSsX+EdDy?w4OGSt_)$-< z$>+awav5EwcaU|;<8;`MXS(HIxPSWI)^(YBdBXiVRaxeL@%5w^_edIPJy_Eiv52OI zh{&(VO`vtE`qZ9UT{))T?9y!2@*J z0%M@iucUN;U2h`tlig~I1PB?g_wVa39$izjmmBjG>T%C=i4U6{H<`F$0-jJVzgK0{ zaLYP@*PHT7r}p%s(!V}C#oJXjMJOey3M*t0MuqahM`ve&wi$UeB~oNhyHIVqIygo6 zN@Xu@!>nZ|dkJq(v2^rGjBtM{Fs?XfEN?P~1Rk${%XSv`eB0cxHD0UqU)<#SKhOEU zp39R>U%ua3NA)UE6WkUFky}E1yTP3OMFpjqJgPP6HRbWCJe+*BHFb{2S?9Qhl;nJw z!|#-G`dmTBKd53}Xp-6&n^S9_F^~H5x!vJ}_l)ij!{3lYi*(pr5?2)-BMb|Va@?KGsskFBt%|sahFVV?@4YwdXyn&`q zg5DIQPZZs;ziU;J)!DEya(QTIbh_CA^Q`xOm?W<2c3J=DPxYGf`Ll1rn@~uAg?xLR z%U@NFef>xuxv`PTxUg5&uNxv9pOf$xJ&VNAwS6;_iDB)+tpxnJk3I3DkNnm7BDtez{TR8Y{6aA#kr!V*+5O5%xe;2za~ zFTb2Sb`$NHBw=7meJE8QT83^OusYC5w0s0e(Mvd|^-L=)d+z+=S}cgOQINw=Oj#nx zHTh|R6YC(2>7_l8W2PpFFPvCQjpAicHzEpFf~%zF-}xpgq{lS={3Z{MqSa;kI%kg; zfO;oDCR2X8;2rKVt8RVv-_HvTW#q4a-Ei})q@qe*t&?PEc!Y`4aH?QZOj?+R7p18q zQ?`1|+%o*Y)1UI`{()z^pZyu+<>Q1H?y^oTjqyQ`mhI1i>F602S(S)9pbfjLIvQ8E zHgF+MKF;EPGEm{(1E1n~=}=OwAp`{pr?vku2w2{J!J{#K)aG&sN{`Gct@k#_IlHy>74y8x zyLEG?&XLwtQgt~l21d|J{b6hoymv|rLn2@Fr<7Jr z{E?qu@uELfyxNv+BR9e1u@l5bE>E+`*#GpyfauXo zOm#|j-?Q4UqK+RcsD<4!@&RNSqby3nKQ*Vp|1=u?kmZ_-y<*1FzaFviyucH9?1u1& z_^)0p8=rj<@7k8fF`o`e%4BOLsnsj2%qj_g1<2mg&Dpd3txF&*ylAT3Id-|v=TYi& zEz!;!bGqsJTYXo5_a$dn>gqL-( zv~*riJx?g@G(Kz~M39n@9C@M7L#$%c=bS(s0+M}i64v_1o zRGGV?@5gLVd3-Cn3}?uVfvkbUZHxUYX3C0w_r+5mc#E*+L?zwO#_;ng43=0ZpZB+= zn;dN=GZR;Th*||+_O?Z|Tfg^)u50JqCBe3Tp>^q73|({L!{8lsyorZ?7HhjvVSk!R zfgVZV4}7Cfv8+-W_}Lj|wa?|~+mB_x=8nD~&Z^ivvkfxLz_}9Csr=pVjCF`iVn|e` z%>k!Z#Wo9;xMzzJM*8gO*ziSjMSPxtlk*87%DNKbw{?=`SX!!*UwUBo38*t!*P0D~wX1t`8Dohprj{H&Dcg3hh-+*4?k9RcsUwls z>BLHDDP1*r@2B1o_Ij)VE3H8cC8KhdtI8#6;Adr>)UWZUNq_WNRh{)cgw`U{BZ^8> zA#Aw6JV9ETA;AGAym@o-rrI<0m449e zmzZ~>z=edwF-1JKuZ4sU66&BcHKU#Tu<&24l+=(x*H4(r^x+aeY znSIk$wes#e_stWDmND>@B=$ zrfU@DB0F|#?bb6zUh%`*cC3VC+yul4jJ2#C2+En%t1ZwgLw7VZrY5cUz$cx>&V6zvThiAay+4fH!)3Hm z8pov!1{=Gmoi)Z}nLUbe^GmTR7^>BV3grDFaxJY}TNt~4H}6^1w86-UPC>pFno2I_ z788PbIKMeGxp_v7t6DNQ>i^)$M#KZG^a^`X^NC=4f#u-O} z!J@`8RB*)&1{Dzf(o9earZ^(%dLyB$dEe2;z3;Y6Ec&$&z$+pfseP45H{|h2Cf|m| zvAxfYcMF|=TTx}Z04fUX+U=SUG^0;x{=JwfpFobU%PZRf_0IiaC+bWxWfhWvx$=A*p#^M$nz?#34Vn|jV-HRPFo z{ZO_WsVkZ)Ups%*0-Y{P+~n z?*&bNOc!!Tdrt|y!Z;)$0pPIzlzQyU=?Ci=1*3I=nbznFBwPl?Hh6xinFGeVxuJ z#JevV^8{6qAm}YC!7$f!{U@iiglXZk9~4G^pN=7Fk{T+0{4Y+M(uZ}me_X%uo&3USC!KACiC+uCkjbw;aL@yRHFv9v+ss8GWTGW6B@mRAe003O@pW7du5qei(V7lroC$74`a}w%?J*z8l@{^V>aI zPi1RVNHpY#|W#8@~CEL5KJqd8rgj{IbN-k=M!5@6a(YBCIt zGn%a1g+wyS(x9o617+g~cJ-r|t}2sb`adEspB!BN7iE=zCSMvgFmg%}Sz%{?AiF;% zpTlehCLn5@EJ_=_?ujYm{3Bg&@XSi5tYQF3K(@bN1^b{i2*4i*TB=R>qG_dxbQM%s z6kn}HDWtIe62h@r_Ke-V!fA{MItuU8Sj~#5V<8-~+0;Mb?yEIn*hpUyf zI?{W-uBgb0-hS@KNO_KrHGO9H<5`z&Xa%g0j5UMHN{Op%^W4G9>N+Q-nm^&cFxV3n zjN})ncOb&_86<1{Z=ssXj+=7-s3v7|KXv}9erZ>nJ}2NEKjNBQfZ^bU^#HTke?my( z*rB5wcVE_gN1 z*X;pAIY#MJrJ7*oPikfhU+Z-wk=|&RX_*Ns;X`hbDpQcgsiLFUUdAL|PQqq2uly3r zs5XMYt&Y;HemZVlt!;Yn+O%{vqsUN1hdXP%X~uP{;yyE2vXbS20tnvH7cD?_JOMm1Wl9PIlpwjL{1h^ zFQC;xg_FYwJbPy>e1Wzw7A3HW&durns6K{rvi<(vCA-qT{M?nbK6~>V1fA}GGTChX zZrk|1GQY3wfA^wDW$d+wWw)Qs@Mbi*`0L8m?e;RijyP-S5#!d0JLC ztexqr*XpF{Y0C+6ywCp;NjFGT)I_c>P)I{yZoV&|Kx$DF_MjqK+Bzj|{~pAi2d3Em zdBcC~u=<=@J6POl>iqWq#jK|Dz~)-DegB(pST04V&QzW8qZKU=8;ejmVe}qw+SR7U z`&fl9e^eILWK~cr)GU}W?8{^PJt1!#@9kS$M`?A}!+bT@mC;K6WvLAe{3Hbi?suy~ zQs~x?OAT`!!>D;+au#^Q;$gcap@4$b75K`ddnaX9BgLj6fgC*oBDWHH>=18xGA802X!*k0`=isK@C!5lo@=e?y}mff#hkK9RPDcSUfF?crAe=V#k!Q0CiUk?h-A8e6=DP*6XeTJ1*#V$kHGN0b-uuoZr>DNwNlh(q%%#EvR0Ig?*hBqSNjA4 zf0tvma1XAnS{y+V!_!EJVoW7dlCW(df|X7a(ri#%2e=U?t3+LsLY_coENx|(N2I@0 z0FtC&ZG_mQO`1_p^k7+_f85CB^|rX%rq>gpkwR6=cz3}al1PR1x9MGBzPSL|ZqfN| zE7tjLGxqsn==rnFvQ_^^o_~++C&D~0fA4mP5wUJ^%P#649<0R$@9lkk_)Cle;aR^271+%i;h#XK8C}QTCfYNv3zGuHS~`+J71hI z7J{+uj3~hx)fAIzK;-AoCCdm7zcefN6hua@I)zkH#j&hdxe5_gQjn1!KW#@~aKc7} z%hdM04g_?sd#Ren8Nv=-BTIUs_2_6_-g%T&yyIsussHadrPM#~;97eOf71fiX`a=- z)cbCi<+D^3TeDqSRico)HIZHvH#`eM!{?$H@}4_}VtMU@pFbG=k7zV(zV@hPvrk4}(l*WFoV>#ca6TEOE8;7& z6wx@9iBhcyX2%z1iSY1Hg*+a@nPiw2k(*^dhx(Sb)bILu4vK;)e=ZrGD@=q+|_4`e!gzCXMG_8bIX>@NIgf|x5 zKO4Qn!{wX~RRZ1EQZ@Chx1qEsn%%ZEd#0Cb%piz+h?!H@L%0GL}w{ z#LzyU!S`?WwoV7o)RppYimcKtY%4LMfUY!_`pKZ{Cq9Pjxf1`%s=g0|?_yc$ydNcO z*rC1KM+j4`e+gtcKTGKisi-TaqM9fem3;IRR+Yx`9!bvK6sCh@3K^=FFX~;p=N%4I z0^QhKQl^|u8w-zQCy|!(G7Hz%>8CQNWGP}{IrX9+4WSn{8v2z0UckofJAOyf49LMO zY<4J8-Jh|v#mA#js6U5BbGa{YI0Sa4srVp)V|X7Te~CUp2Lzee>K8qvNB$t1gR zy403EWs_;$V;M6nZmm?VN1PBQ&OJ>_FiqSoBQ?>l)mpI}CzF6APhoXk0!!8y$Z%Gq zz(I?i^SV#n5Zke8ea?cBEX%XgO>RWP+U(OEwM#3Dg43W#BdTaJCc5NZ{}+VD2z?O$ zuefWwf3#R!@ulk;xK(N8=FyNzK!a?kiTGw}tNkWH_T!BKk_o=k&D_+R5+ojQDq*WR zVT~dV&v@@80c7U~V!$@LG8#r& z;B$e91dNN3u{xvW{=4Sf9^ zeK5`^)bMX z0&^H364?Qsv5+-)L(}wYg?Wdj<*(`?z{D+Vb1F4SlqvvnlK+&k=Guj0=608CBc2ZN zaOuwMzFpGPNKp#d$~0e?baDd4Q~)3)e-sSRZX6?9dX@s01GR7$23-yfwbESjJ3M!+ za?@selk7La5c*qIdQnnWpo9py$4qHjHoV3D8;jgHN5&KX>v~DiL@$l-P9}=4O>J3O zr1#A%?Y*kkJ25cu>6_K)k56KfK*oZe^iayIL}MaJLEHSQe{GDa z9q=Lmh0~tKtMzoSjI0z6*U-@@sDkl3MX*-`amW{CR~>sQWPJME0=Cx zb~|6VahvGU73|(E{D;DzNeD7#KuP12`_mUz*^75v_qhYJoaW7uk+>(gp-wJYYFM zRsc!EigoQceeaj<%IV_~)!E5OD3@n?blBC3g$ph1cX0z`TVS0y^oxlee`QHew{+`r z(ZU_nCO)3X#cNyK(ofCf=x31iSWwTy5y+UF`5kOU*ILHN3 zBOu3B)HDW$MPp0HNMrFhhtoTRcs$PO`@`|f+}vU<3~p^mKy+Y;nnVUUB=qvQ<^1ed z>3U&y^xKu0uIFA%jFkRG9b=x0!sgyt;11Rflh19BlJWl~g1fG6e`!)fRB~K1cDoD4 zpt;y$P3lgy@GBC|h7p>fWdbd!bg@hK-4@pN*sYKjkL|!(cQV9_h6;xe(p#6(V&HZP z+4cV}pZ3?I5Mx@4XuH!{hiP_Q(_ETZZ2!1db@T&K=Mkd0W~HS%M{?a5Pvv9h>+-|Z z&VK}9tI-4hr+yP=Vl(?dB8N=w&jxgC|ynxH=%1OYhFqG$;0 zNO0w9f^Pu^c9~AXIW+(URnQ=0YNHsl^7IP>Cg*PHHUztEZa_$#GV(Puo-<`>+Vspy zMrL3@5Q5hV%{$pJZKBB6NPpj3l=sKlzR6S2^N777rYP2ke2x3GDyj zPFyT6w1~8rw_q$QX|$tF?}&Siwbj0m!-oab|w)CHYNI3abbo6}f0JvddHb zJL_c1buQR6IIT*p(d*7?L@j2*!3vE)=GuR-(-H@V*oQ|*xR=h8%;S9np(#}sl04=1 zTC{-zY@SMZf0?K!Xzm4$MNo(@zsUT*o@BKubMDmBpcv9+k*a3h4grnd^~(PqFGr$o zkU)Cz8VheMl*h|RG)?>J^W&oTbKf#Sz?(AqlkGsdErgnA=s@^tGLXf*vyTxyu~a2- z`=6?p#K-9I$xQ)4Q8L`QpLAb8Xl%-w)=nKJ|L}SMV#|dU#6d$I7ED8 z+nw~O_hG?SXowVE^3|83bPxO1O8HJ?uz`2IM~h7ow(YlJnxrzsl6 zq8jJCq}wAJZ4^Zy2eD+qhJ}Y?WapH$bx64Se}uKDLE~uyiW~u_eu|w^+*7i}kcLDh zsbB$}-+#T^20)b?a*ZvR&yf9xalSWNq{M!)AjwCg7-Zc2*1+K!GWc?3Xn|0pE^Zxq z(JF-;k7yzfVmdv5u)O|D-Te3IYUEv!vi06vZ#2rBFUiUcEF}(Tu3vP4&mJze`E}+$ ze--H78x4Z$XI4-sps32})-bAR|I1wW{+N#GIi*a|Z5x$otfGnjAh2fS70d8bqf5>} z!0(kZ_%wIA6z>}Vcm`Pg5!wK+mUvdXsuV^(ewg=lKHs(SJvjb6q;ybUi_&_<0$V(t zVqUI8IS?IC4B#DMiiQRSn%cTjO9q4)mI`!=*e zF}qqVsRSt%91G4V9%$GfLfIqGI>Y(i)}A}iL8A#*dW>3&W`gP31F(kGDQ~jLA{0`x zIA5p#EgI*4W4+>nL=}Q)bbc@@tD2CVlQ_EN$0Hbm&`A^9|nt}O2|6Cf17E^ z1^4ldg92LXJZH9@-xnFp_oaxn%{HCWvG5t|L4~q3(o=@SrT@qVE@xJ93l>mM<<hvx_N<9%XIMp{qTe+SIgMC|PEia}P&Sf2k>AW16{U zuS-u*wsv61nR60NkxRbSaLPzF#b`W$Jmxo&3EcsN9E2#QV6ZSAhtIM?ICnzQ_FDLB zvd({Ok^B9a_v2-BbLO#7`0Cw_;f9^>DD`kz21|NC&A8}nu(7;ireLE|Z_=9&7b3-~ zF`d8Cn+J{m5!!jWBYX?ZfA+g#=e!*wo$>p*S`iy3`Nfdmt&ssmi)2q5k8^%nRd)`w zUT+&(eL};iVMO3jxR2qRQBpE$2$ub9*B4rj1fY_l(l(R|w@a^DPAoep5U4)Lb@ka0 z6s%g5jP14eCg0M+(h3*gf`X!_2QHyEa2_VRTeS74-)K+^fhDZKf7c=n%XE3E$RigQBBcIyY>ohj_A7=9!w0OEu-FEhGCyTFsOeWAMGB{Yooz*LnwD6OQxerz}* zHS7CCE#rGXJjngkMOw4Ud0DR;adJZ1z6c4+Si)SDuN)~Hl5*_i z^IN-Xcge^yp%P9_e_yI=&&Vdr&nt7#>&?kTd(<>u;DY?(^{hoF88PY`8zE7?`vn;9 z_o)?ux)eYb#a9qdH0b4#34w2s+b5BoNR8r8|4X+ZfoW|)X<`}i5ij&r1Ce(6om!>+ zul>&72Vq{diZj|OPIUz(6>)ms{LJ2XB&?s@NAi&=NbaIle+e?d5T1DmwF-;h3hML0 z^&X%$<$db~tI#ZR8)KPuhaY+FCj7O4Ncx=W#UV|r5F0d#> z2@-(>CeX2`kOi$3y3TsjZu;&;p1s+PE92vvUAEDLcblI=Qy+yrl*|gK*?%lPC^`I0h-LWR zHf@~u@1jO1g#)&dbpn?pH?m1vxg`Gp;5&q8X5~Gje^9E{RfUR_MtsUdE+)Pv&enzK zXhq*6BT!EA-6T+?yph89MI|Mwy1nzQfgg)AMoCM$(;r@#^e9(cECPawF5F7Ls)(sn zRDEg)GUQI5Y0Ga&AOYX0>;_R{p_JDw{>On+`5CGg#218{`4lCby~be7Gte)#e6pV$ zT0bGBf1;s(IRz&P-Zm9{!CMnhuf0DvGhGWa!kJK+y7dl@ox8F9kJcwo%#G?`*r)`W z)DkK}#s}QfKdn0_Wa@igm+I3kWm!@SaB5J&MLbPX0HR4ALzMoxB!2T}`RGYhl@}Iu zdecQ$9j?=>^QFD+^)$Uj+$x0wc_`Rv$6bvqf2l^R$oS|Ipllwh_xZhfN_hEbwj_}? zG>$w7h3*jy0jrUY2PR>9zas3ieF_w>U{GOnpiSr{xu!VZYqS-EgjlyN87$d_=F&m% zlpgR1)g}NjjRd=|$hRM*`YELvzUu}u95e6K9LKZ%b6xm9j!m7G^5+W7h&}2V`ZcUx ze-0pAm~Z|6Satp)#2*i|_oNtEMTOTTlDW40;ZQ6{PoPAG^~sySSB?Oa#gw)DNF;N6 z{lCw49&65$@$-u8ZC3C-2Hxd!I7$L`s4F;yWIL9EQ|Z=qnZvw#C{L?q5o|_y>1HLD2LqF|f}K8S#`)eC?>tfW&kery>i4B_U>E}@M*KoRmS!?B zhtB%nbGPVx2(DgCouNGUt><4a|N52OTfW&nv}}19jcuZw_8K&2`gc?8akX}E91l!F zkssk||B`$Z27|L{eI=(7!RDUue`3N$)m0WOo1TiI7gYKMIDb<6$1nA=UB>~cJVWIQs5;&A-j!ds zNG`EJU4PX`K4+O27&tmzwXxS@;_5mbn235!x?i38Y+1U?`QHEJD4l(2`y?KyF#-lUrVaW+2R^QMR9c6YeP z|I;+lV{+^7*Y9bjg?^L+ztG4(ZNgvq@m(1s_fpDCpq2x;Ld>!Pe-exI$`tDz%(VBy zjn#@B3N&M8@o3`geM>syuG1q)Rz#Ie;i;D>%Ai@?*i<4H*G|7c;0>dr?cNEf`&Tvn-f>_P8NL@tf{FAST^bxf+L;$(Q%Pf79pjf|bUVdus4H$P52E zO#D@|i^D~v(Gb;uLPJZp05C?^6JTW+Xk`bNjcSF+!cH9G>~HUO#Em z$s(hsgf|%EKKw?D-O;v-~8%IU!0E}wvC((B0TTDyMPp6cF+13 zYxzc?77drr=atwEM5~#B=h5F)(Ptwt8B4Nz)_wTEG<`dh8mka1qfBXIzUHUSu`57LOG!oZSxQ2G9 z=|JKiQn?f7jk}*Zm+a}&6Jf{_am|Y(PqC_s+d;46L_C72`pM6!qtUQ|_4z2?husu5 z_Exe?HRUt$m?}mO$i;l(cfOmR7M|^|WNip@RL&1wMAM^Y+csp)_l@qIajm zIRXagfBS(yx9qL!fl2u9Tc5{u6m@@3N}0YyqhPwY3P=elN3L_fwD74G^SML@46IR4 z+(HFM0QCs8Z&oOfALker7WG(e)oJz4?@_u0Z=+eKXsc7Ab4>#g*2b&2{ZSIVZ{pxM zPneHeR@E}vP^lC|7_2LCf`<}TZLiRlw^aW6e_A3FshQUMu;yB!!WVEjCA5=xA1l2# z3A z!CUE=;BAA0HtYciqIJllRh3|4;GUlXbR{L+)egT{P#{PP{ z+p?pIjw=1mmq6qndOVJ)flNI)FxVLZ*)AmF`P9rs)`}E;72G1RjBELD^d+xkUF%>Wt(!=8rOw5~t z$eyf(*!c`XA00B?*;%QD|G-b5w*H4y+s_C7a=U4>#IGv-K2D3l;>BM1e_P4%+6f8x z-zPrjyZm2>#N3erbOHhssUds@8KihvIDe|iZakO6((z()^8QnH7Pa~;E{{{kvBdkc zT}oc`FewEhwE`~xTom+NvS(M)`gMc|F80Mam8gY~=Ers5Sin-Cx}>#$m??ciy*qW&QB3~r|FL-3%mUwRfX9DHYX1 z-zy!eiy{J(>PMVyOKF15wA2t&HJ*z}8^6)J<`>ucRLC3EOk!N6U~<)iNo=E{Ugwrn zJ3W`%u5excC*~yAL3D4#@;!pbIfDc_!x>qbZ_t-dTezHSh5m<C^Pa4@vowjxRhkyFS$JhLhmouoWZ;l6`!xjbrX~q*P(6 zsZ!9y2K%35e#71QADN6`OqE4NLKHVoy37T{KoF#*f`tn_o{4YU_Gy^TS2*x?l{4pN zmFA~Dg^37tna}K%f3fk(dcDa3%`=7*mG$6s9hqU?HjAk_qBa`dr{glKXfA4gksnD)W6lnK>1+e=pVN zyxsEj*BRSPgDy$o(6ybcM~`z)$KY$6t&T^p>(lpH%yUvYG0N!!!|ye`N#d%%}V$hk!oO5 z?a4leX#lE2e^O4S*e$kPw#Ea`oT47*7M$glPsxbZk?xfQP_%+Q_f2)an3*D^(T0h}6&_*6NLTaa?){GXf^6PR3Q zHx!{pMn^(CgW3pKYh!yuRHI^aU$xT*#kI2b^qX9dq3!x ze@MtA5%GLjmji2#A0voOlB$-uJ=N;{KB3BaZigy@POqo5yD!n?E4G<^Cx<^dcPhWc zRuZyTKgT|dQKiJ@7I%E&?Y1Qz-(-uYsZr5-HAO;koX1wRTSh}FG zc4ho;fG2jp=zm?VEo^ooLO6WgNXZxwe@Pz$DGGcD>5gN2=AI%Mr-lUmJz(%{J_zv@ z&E0QzhimCQIm7tB2(6Q;1+EIMDui%U1UI*EuXSQdH54RC4vAe-}&) zeET^eJT?SK6r`YslD(5lbGq05^WK^poyuf-ZEQC8A}c~81@&qm(DCUvys9!JsC_g8 zRR@U2>&-l3l`O4k5SpCt^LIvP@qZxD?t6llZ$6L)z#la-<~k+a6o~@1wCV*md)8fA~7QT50KMc{YZf&mc-rNGQ>}bWd~$JVz6Y?UukT z+e&^GF{NcJs0w1UM6e-036ZNby;j`VEq+B6lFL}m`+cFncfRyb`(2q^2LwOaY?DtF zBY_0D@oSp`44vuYk9mU%RB@i;*uWZX-$@o$Kfy_7xqoASr*rzBsBV9wf7%QJ#ugYg zFC=l&Nkj9)VQ=k*AGCqT)a3DZ*VexiIZ97(IZ_g9_Irz9sNgn=KrGV7^{ByO!{`~FrIzN|wD`&VIs|9|) zlJduY>ux(9yYI?R-_FFEZ%cLhY`*w?34g2_{IHV~9G|oC9YKezf7S|bJHeYj1gUZY zl$U$RQjFa10X3r9p|ASUsqWI5Sx*-hJtb05QW9z4G-<;+JX547d~h2IxL z!T|$@wYT_J*l3{;azzc{bI#&q7Kp3rCIrS>YMHZzasII#Saw5MQ7#smeRCUB^S~Y< zoGA)$Jww9B?@lFxf48jOY_`4c)|Ow2f@w;NOUihEraw$GyFSBVRL_r($Qp08GT(u; z1$MCLa7Lja73;PRkl**RSPFDznztKqR8-blO5Z5D)`WMuCq6Qar~}$*If&oF*vU#3 zG)yRB%OqhXNKgb!ln z)*?=WWkr`JNk}wiWfypSL791JLxjqhNoK&S*TF1+Dmwi^P#_>GID3kYO$^t6U4pl! zmhdnQ*2|Xl0DAM3Hka%9pD@47ws*4ZHoG}z`C5slf95V;f@aVNO}HBPEHktUsx`QH zMn7$|ELW~A+HuRJY-{YEGQRmHo+zW(gLLVm;RAV(glP!oD{yRFlGLz)En!O-iQ(A% zV}lsTAeE_U_wbQwN4T{UuhtX~E04D!zy8xhjD-4lohrf#8qi1u09{a*aG+Uow5PYM z*8I-mf3>eR1xF}suB*p+6aHPhUQhp)t1gSFqaVNPAYF%!0%d5jhp5Y?hc%KMwmw^7 z;M*k3&J6qlM*^gh5vXOnr?~`3tOokgN1Qz2LAi)5@6-0Et%79u6GS!tTe>oXAJ;&FrFOCC=bB@w%behwes(zgr z89ANz4~6rws@Cf*20w);p*SI}Yl5V#sY)4pv0_+E0W&f)2`@}o!oWq{;6%Ht_~@0tVKMF0CPzQ`L#F#>iO?*UA&kWPa#3{yzo_1 zDmw0y6a3p4fs1jPVz+@P)2cg!ljVAAe`2$A$s#L;zJgyUcsXO`EY_`XlyEo;lFi9) zUTNXJ1q{6PullcN$-%<9UIGW<=g)S3{Z@!;TT_&*4+9b)s(l<2`srMTYSd=jxhjef zKpR+5r?4LCs8*bwn;@f1JTGbNd<%yNNny6zr0_@V71O(a+SaBS}Ow73G|;K8WuSWhGC2qIM|u{`1FBOD^qHwqxH z5!c1m7nSO0Q`6VS9|r5xe`r>)aa5zUxupLbq}#umNUhv*7Z(`*klSkW`aDMN z%eq*xov`UO3?Rv6@sS$!n&7uoTBxQ=Dlx!6tE0-um9(lS8?al%yj1`aPo|NXX}-3o zWKghP)dL$Em%5_7(Z==`kz0UW8d<*60vQ@{oj`Y4L|?sCJ+FW5f6h_|Sib|bA0rYG z#jG#o_vN(`GOfwxLrcnP zgbI_C^t{i!Ikpw1m#)k)O62<#a2xOY4!;t=sD8W96+M)|i`2oSqF2=1mqIqRVvQsJ zz4m;K{%F1yLGfJ;f53CCG?S5k(}9O)~+^iZ98Wm-(XAQ5(-Ap&L1KAUAz{~O)pHkt$sZmc4e*)I}_nFSrlZ@4%{h@k` z@F1#Ch!LZ4V1VnYvQQ0YD8U$7YFcQ48!re%m6nx~Lt17L&&UW1+J74#2~qc# zkKlVK-R)BGn>H);r6?`AVP&3{p+ZBUwWK9130m{FC}i@3+y-v=SzI~_4TQ(bs;rLj z)%q3IGx0|Uja>9A(}5= z3wu*eRN7%+0wIwf7MNbx?ZczAZ_(t3m$(8a<-ouRODq+ zu(l?E;nTOKNBxkt2r}g(^kg!O*HFsIrD~fQDpIPnqpPe5^=af{#keKPNa_s~HDA%jcG%TZ zyFXo=o)&AopZx2}zX~hGW8<8vwiL9q7p0O@I~%K&YuPZY$-$SlO!QQNBSp+qbOV_Z z(#PCBmr8iGQIxeRD?@qw9YlK_3zXBse*-Z6EdE&CR!2_$F#IC$kNyCSjblfLFF-~A zKQEXI$Nj$x=3AgHIA@)I4|h*&;G(+H4c;&cSe#Tuiz1Uf3le9B zP>e^JU3c_`G(6}=pB+QWf*DYZ0ssI|5-<=36yzcL0ErJoO?fa$E4e38*b^q}2qkI? zEq!Pe{g4$(5-s%xeB}@Q$a&LJe~qq~UEJS=#BPc!GEmk6G{6M}0YCsm;ODAvB9!8z ziowkz2`QuSVV6;Ai0$oDV`!FD?%jJHU)aJ5+4nRUmx~BCt#ZT-?o98%9M`NbC0_f3&=VQb5sn9i}JztDAWdqY;PH{gV~#j?;v|H5J(2ra)3uz8lW^FO2jv{M8mZb z9Z~;0sAZ48B1*O*uJ4>Wpu>duFJ+$9jj0rN>aO{VP-o8z{&gq z+}e^Rgmi$k!#F^UBt*3*9*vzk^OG&~z+xT*j_DsLZ0C~ga)k}2k*G$KDYuy^(d0|S zuoXct?t$)^4@|FQe~=go&_o1bHQ*Z%QmR{Sf0=u``$#oEI2pzev&Bg0T%Y7Ic?wm- z-M%9r|9Fya45*V&qX{d$Cqnf}ko`b0P87&@bBEN>mAjiGQlT;HbHzDxBvFDH4@w8- zWz!llM}TPx2!PhYH3sOZYl*2#s=YurAKL?YS)4%79BdH(f7rVizr@_+e4zSa8xYh7 z!V+Tr(mp0ri@b!TF(+3i2>S1bic4Jyc29 zyQ1Qj)?0Mz_?n_NB1DeM9nxXQ%otc+rKuv3ACeK!2gMeYzQ}Yh+5w7rkRGytswTyi z8RkuV-X&AWe?J=tG4>||_AgrkKmZ6B0V=>70hNQ(hH#gVedFTpxeQXod157WIB_{J zbXUKrUa$X%NCc=6R0;8Y9tTM%40Rkr}IhK|<}xe`iqefeArKCoFDX#O49n97#y~ zT*6SjYNCYeNm<@Kh@c|Cf0Ny7{Z6o5xp;E6!_%}RZ}+n?ig;=BXLOJF5>tRs;H z+ud;_JAP?fo#M0sX`H|2!|Ti@=B#MurjX}NkEE$9ZTZqTqK<&qV}7%d>+6rNMFL)l zKwTIqe|m-08?TLw2e9(&SoVOvF=M)%?zAi9ymL5$9x<`7|EO_u=R>Gu03qsb2z$;; zW9~m%sgLm4z~~K42JP$7mp~M_5+QsU(3*&cm6)%WpVRt$82a+r@BqI86ik14L%tZR zRLu)k7ZAF*^ntw-DbrpYqbcC|;u>4)i+Ji&e^X?TcyK~+4Oj!AGyo8Q5#g6axmi1+yIo*k%gPAhCe8B9OQrp4Gc&I1LTPzK6Cic>kKZ&Hn3(h7D!S7{SsmEi$HAqXC9_#_4Pp*QwDxukba1+t=GU$1#)FoELQ_XE$I3c|jX)Ra|WV#(-k9 z6f94uk~co~KffT?T>es%IY*8bteHU6e^efeB|rfl`{dykMyIEUly?7s*V6Pwwt21PQn_&9;z1d>05SmJ07e1V@?jU^ z(Ejh>f1dND$BOiB(=QR=j*wQu3b+8;AjiQN3ZO*yxmwn7fuw)&t2155f8~g51 z)oA|ljVEUk>1JFkJRbiBppbyj0Aye?tA<kT9f;3J%vere|SD>7U`_(v}syzDeD?ui$41_r*@C$j4FB{lKxe;oc;GbRBO z2t~#e0TCcZ4=b4ae#2dIzcjk$f4KzN#?}@bFluc8+eJW7@evfD%fR{q7ct43tmfP) zO6bUSt(`mUVgC(8k-QR_;R95_m&Ob$THfKeEjOK@&UBranw-HJ6i>3B3Rc_##14=E z0D?v=5&&%{nH}%^8R)Lhe^-P`<6_M6!5XxxN=y*{tPwwkG7xlo#~%* zoSqp0MOY);hEJifU;!Tk;{jBN%0l=73_GVanGMQgKIKxok_^>@Q zF|7Rgtj+yk8a|1_!MdB?HVK*pD+7aCf8xMdE&N>hmE(@{=xKk|4MA*R6#Xdd!6^bV z6o9D$fL00;FmTG~%PR_+`dFN)s(Rb?6b*ikpCiMGP6B5r6;+4@3!Mf15jB(5q(t1;#s&@Z#Lg z=_yBGLNbj=jD-$pZQbhyhqspNA4%@B{}{UZsv$dn?GnUqb>r^q1A^JSXe{M&hKGHXt$yqm zm)}%w;na{U1^*vTzNx(uu8A_XPTa9=+v+z?$F^}|f7>=Y={OzRwr$(?i8DRlGrwT= z^{%a2d#zPBH4{3gFQRlO3I(RxCqd##zTx#P&9&xWIMP87M|EuP{<0auil&Am#Z zWi>nBZ}xbYTWQxuY>c&!)~`2hFQSa(9*HBCAo}bgEX_q#U7(VA0&HY)?;s!j#9~&a zs%`?Lf5*GGe&e}#T)VaN8uYXQ)-pc3)1FMj z3j^CaVqdP@(J0a}!qLVK@~?*%T*|0G^$6RaUSyP>W} zix+jh-{9~F_~%0N{3tn=)(}ngj;)c8>QELk+x`;1d_8%9OAP&*Yu(A;>zkYVqR&`) z&}A#DTEyJnH-(p_A%ZO=AuUqD<#$e7L(>f$o^i>jo)o;sY(NtB#aCujROK1^7ue9j z=O_N%X@4=7q%S!ordz%fcLfy%YupP~AcAy;B=yEO6}Ff@w=PRiLhh`{-{?LB=l2G( zgfe=y{l~n+_RBpBJ8yF9$rJDg!IUx7N_V~Bu;6Mk7OZUS4$Bt8(emBvM(wO1uYmL~ z&V5lHKccZ|xBmrR8B7KJm?^0Ej1MvD3fV)cJ%3211fuWqaHdL-x0TdGlw=sF-TlISQP#GA4^FLIhbRcFam< zQml`z+RiD9UWT_|p1WCK-kJ0&TtVbNqC!FT)-n3~ocqjX&iE>%3}^JCTpTU7&{$+p zxqlZxvJmz{7`R4}9u4K-mVq+qODHn?=uA+qa1jix@|{4rvXD>Mer&dBmb$;eXIPm>%3cXGF&iy67eGrz}U47x^{~dX)2PJO2e?h;-_n~HiC8QK;DgRByM|BXsS2nPUbPNjJ zE8^a&^IcYq_&~lz`W3FxUg74|OY4o5W{@R>bq5I( zI1JPxB9P=yKuEkt!@YU5ANF1(%73rOwjNmPoqLF}xEVAY#S!&sHy#1JM~C0xTEK0a z49{gn4u-wlvSK-xL4%su&ham&NmIeCCx#WWW1;j*hh(}0ZbOtw4&bz!~%BY!#;)I&r5 z@8NQabzym3?Wo+`3m`7%$0s7c=F73h`k!d~+9p;GmvV1qwceTswt&EM6p9?t%WS_j z^PaGDzTrI?3!>Yfe$Ve7W3hw9)cgeI$o}vC8>V-^_5WM(oM>VT3@fy>L#6wZEb_0U z5M@+tCO;!6VY`7^L-+r2dVc^HzXwjGUrR=2`Hmj)cc1%+5TzkZN2n`Na>l!^c}Cxk zp4@V(>{|3r1f7P+dfrkubexM~Ba(K8M;hk8Ts13uXC9SISZ8#qPw$VCo#06)o)5f3 z;vK!Aq`#dRX`a8E*uvDq=hppHlN1mrz>>uZA+!laoVqglC#kdg?tjCFpt=VVM+GvH zFZW|vlUj%WeLNx}W2~&nX_de5&yO5YW(2%lQLXi>!=D_#Yw9Gi3?l zC`yFnIFUpA-;iPzLb_m%{UNFMzRcm6(m%hIVf(j*&24$KhVy1mw>tY&N0auD z6#0`*%}&^V5NhBI+M7`D#K{>;HZb2FZAGx{ghaIeZVZboYW$FUn6a>3jy|kiW@s}| z+O*qLh)WPbue1u6i9&J7uj3})xwHqsu;ckHMfJ-&?SHRF_j)M|%`Xn+w>Qv+EB#k4 z1$u@lJJnpMC1Br30*Gmqg*v9i;udZ8=i7MqKcYaH&cDhcmq)x?z6T3UwO0lrd>nIb zh-J^r;1?M|btAf2bv8o4(5>f@znL!B@J<=3{1O8Zj3h7;0u=rbgE0L%+_fHe$$?9# z{@~GyCx4c43Lh)9?XT~|EBV3(z}u0ng#-hSjz%UwzS=*uxEnk3#5-?wH|%UqNv8D~ z5{SbNVg-V5oYH-E$(xZZb;w!6exn5we-KqYa7|C;s90ZvwuJWTjVG0I%_2)i33Yddef5uj}J_@ zoj1Pl@Z>U7Y2q!<=b_E#G8@xdkaMvspm9<(S)L|n7?1)oMC50pSV(ryi0aHAyfOW% zL`BD`Yl>*)irWuPmjp%%{Tiqx^Dzg! zi+@_=M6A(QXnix(i#9^Mk#CvaxE|$~`psBw-c6BBqq@`q0vCa}JjQ=>j&Q{ZVdxqx&bHR7Vh+5D2H50ev`kDSHYzG{tJVfhhA{WLS!Z`9rwaa1z zUJ*Dva%$!Wm7|XaA1GE=tpjNjtEC~xvVW!N;_$V&99bf=I{9tHR4`0NAnxtMTOiBR z-f3P(bx%a9L7SMA258P-= zdB_@D8&a+;k*b3E80}g*OEZnnh2-O*(>R09(91C#J_7rH81*l2U~)NnPm8I{q<=)5 zmiC$s*sPlsz-QA-xDGJoFS<_{HC!_6I-Z4L4(2OjDM&I zYGT!1Rbj3gYd-Jg-pN1vQV$;(b2s4Gq`e{J35~pCzsquq-1*QjnhEK#1PRH|9?dApj~_^g%GvDd?jREp!kZhFtC@FwK%(WJsJ8F zA1UjhkAWvG5+Wr@V<(yQZshU`efc(x9{>X49(3%445UW+XA<9ZKYy=XZF~{d^WL%7 zrJt=bOm+1Xc`kz}Q@`<*j##LEAZ3r-S5yO!E|R~T6R+P=6&Jb^M)Rql>3>QGNrnL; z%J||5RSDNInN*QfeBoVRi~I$3?w?BZ7**;}iC}kGy>TYcMfI@Bmf~4B1JedwiIfiS zN!(U^w$hKwrBs;UVn)2eniw9hQfKb7@QalOi6MnNU9Y2slkN8NiF=0?0NdD zA=CeQWNCPng(@zp^oe>WYAABHUwzR5VqtN*H}!_Bi{k{ylko;T4Slza1ns<$U6|!t zp{%kgNqo=^^E7qQHdqXW)*qeZrUwddj=t>vmAq;rD-IbDPnBcR8GpaGsYywkaxVI2 zD0-J0CG^kp**6OSV+ja=ps|GFf&$zwK%0DI4K%VHJSzSK)%C1c|A-~G)fJ_T)t+Is zR27Tfn4N@hF@BJ|e0{qhM1lQ9j1mm24E88M-4U0MFn3x#R&OjD8Y7X1rxxjit7-L( zYdMH_VA1ir!(a0qVSlEP??mtM4jTwgl==8lZr@+gU}NLBF?@{+mSd)<|EJO@X*EQl zV->l>q*0 zo=28Gx8%_;LVsqd*yPE=Os2**oT=%7V?~h-1@R5V&}K@@ZX9iGR`)bkct4{D0soOF zhi9&@7k4(7hxlT)H?p5kF1(uH`PZEs6r1*;HMN|h=Vc& zWN`bG!-OUDH6Z!iwCQB~`cw#O#^talojSZquhr6kG=G@$mi@Vnuh6bs|F(gA)AND+ zlapqxWXrcWQYZlIqHKXE2G?lb7(v#xScCIinhABZ2YTals4n&VdYF~Mo}|eS|CK0z zPJF)?`G&7{{K@WAVh&)`2~Nxah3QL=LZX^ZO@df$#Oq&?m9hl>J^OKsQfY9s)(J*U z4j3IsW`B4;^GT!jC|Grrcjsvj=CR}Y*nThYssf7ID3jWwEwpqJPS~yIEq`+Fb9j{S zs4~>>@bt{rU(5@RUF*0u6KL{zaCAb}=$gW@RP~nSSZrOE)x>s!fOUniG|6cs%L^wy zmcSvTo$gWFm$~mz;qFK2=FeCDm>b^`iT5nC!+-OIsd76F@1sXn^RGymld%MTz(4x4 zgb{YV9S`50N>v0@Vi0@4v+Cn@s!kd4E!-{mUrlM>bK);Yn8?~z&;%ZssGVyJ@4IoV z1%++vZhcLBFE6g`18H8a5yR=X38M7vzBP(#j|A}3YL{{{EU~j>OyJSrl@9WgM0}x* z2YxuvRU(@_HM^nW2 z4z;cMT8rUl9>?CXFy4vD$~^Br0;1v#`UZXf&%1a9oL_-zbGqa2rOtD z-JE7#RWtYd@7$lT8T zqoK)EhvIAFoWfc^sEd>Q{zfTzR=^}y?Oee~?J{P}bpN1d?Gal>8xt$x!y2ODb$|b; z0xmNGHi~X=Q=t!Pz!fNzF|n@O=b$Wh+-@vgoQE<$L&{o$9*7SyEzNVG^;PowGxX$} z37v&=(RvaLVUnWhVA@C|KO61&;}wugEQZcB!j% zuFd6sl~)c53++xbB;tk4yPzxx3x8wwPuLuxQk4WY8MUZ}kv&dC@G}Z-2SZ$gQVi9r z?k^$oP>%b2AF3#{ImR4~`nng+^~vFdCobsgZUdw0|5YdO$#? zkyv?@B>p(7r94&JuzUZa8Ov6U8~5-XOop%0xOB6{tM;w{?PX(;62*ye7)xb9HNnN} z@~TRRH-zL%@Y8-!9Vk&9)Rf zEt+GZ=at6cT75om^Bhk&y?5&kaHU$f0Ows<8tJ5%BZ`>GV+&*Ez&XS` z&Q#MUvou8+qn1cGUJWP}#FBoiSb8bClZhob5T+EB{yp&!F9vfVO6gDQp?#J&&k}E> z58UOPos##jFK@CBV)*C>t82X|y= zQe=_A1He~CsKM`kFMq$du>Va}fvRQ5Uy67lBiVM>{CN2<#2dZvNwKqvNBV0k$cSgr z#U#cQUQnAkn8xzf>-K2JDEXhb+=lL|IIZmdt@d}Uds+tp+GjJvo4S;00-t-!eGqsf zEdUILfKkod61gH}Hl$VEJC}vg5_Q|P3ovwGfQ_PyBo1f4V1Mj>cJ%jpY!fyo6s8hn zp$Gu!r%6*lV@d4+w_sqh!)HC50UQ&hJMz%IcYLaI{1>cMtmu;D%CqTX@G5k zr-6)R9{dCcWz&r6s~8?W92bFP!W|;E*#+_z9Ef9>`s*-fXcM}IOf>@ob6}8@FQ(|- ztqg#?8i?c1u75+sg1`4LRD4)nn9;en6TN05Y`h-2%~p98Sc|YM&XHu64A+KN!pe$S zNo&OJUWcU~R+_%ub)g`E%Gi<8o zrq6BPzni1~BwBV%FZ}oSP4G~hCiF(3%vSKzz-*)?Du07mC`*b0u8aowQ|)+h9j);} z6N)9Ace`8meTZzs=`#379d-ozcKR0D(Ie+uT2>gPzttlCAc zrT|&&t$%0aUnj$41&>^`EjC@r+2-Bu@BBT%Z{DB2_fEaP!^ z|EFw>7v}FWBn=f3VW>^|N!RLe0eN}ot&g_%(D&~NhX~(8eTOy=zVxM}=I8!S*NqZM zbR;62g8!yy{NRgheAVuEiGa!LzVaS6=fwO8fq(srW!>Pg8+W<`bIng^o|k2Mfrlc>R9-ddhuaJx|MpjS>Vq9V{wSMW7=_G4a>r`u&p7jiqdjYtj=coJB|(-ayKIBH=+2* zX6SGKbnCq+I173|^NotF(@nPg;e<^;Lfe>Fu~X^i@cY79Pn7mZG^Y~0i@d{dLpM_*5fnX=;#qAFU;s-iVoU&j|lEq|#A z&U`OXOvqY?Y1l}Jq%Cr`{e9*C4qDxA=^9c8eT9wnn*pY|Ob_cpuV=0CHQ=r&&Jawp z5djHOm{zH)`{yQFu8L8&ZU0CBgckR)`uFpBah34LPV*E`RM&ln05|%*1 znH}uwD_?H|b~D7E+!NANMPO1=35oEgmrgXtU`}JqWJHl766mz3*+i08V$dv_ZXNgKmPc(i-PFMqs}$)qcE69O{K z$O!ss8|kTT_;UkTw_dr!q}55+Wb6M(2es3_Y=3lPHiN9=-EvVTAf8xK*riA}U{SP@i~l~Nb(|-~pkT-%GusAVc3QfbBq_TP9^gMBlqqS|oGCk@D^NJ;w3C3X`kK@~lC$A$ zJ~q!uwYlk!N}^V6)Q3s4>LNHK9;+yfz0rr?$n@J;l@rfwjLvm&@rTB-mvtx!Ja;|^ z?odC2U!G$D(|;Vg@9U_er)@1F;7m7Rw!3T)V8#*539|#o!f!q$)X{J)q5l*^`hA{w zZ8~50OE8YOHDnui)hkVr6%ij@s}#?q_nfn_;rHxD_?cJHbUpHEi6gL{FD{Y6Xt-QX zTtsk+ta<5o#Ysu|8YTdULYowUF4EY}(18Lh3d7l@ANAkb@kS=ss z^!hpt3lamga*7bULQsYSLQ$5t#Iqxfo{W>0${+t67r2tQ2q8&a2AR1u`1~F$vTr;Y zy!9%?JAc+9!6k#(KJY=Ll}dQVWUD8{DGtG_ST=mZZT8tUsz2IagR>yGT8od!?JpW# zLWK%HM4-VBFqBaivG5#DYZl_|x7+(?J!Z4~WAt@i}u&rsDp=evILfDbH>#w?|6{{mKGahi!!6n$Ha;6ru?DilP zjoStB_FORW$Mn#y*6?%9TfQD=R>xR1Z$y+K87Cp2M0%BKg`lYr6abLyknj=M5%ePI zwSTp7gUaK)+4#~*lLTbogOILx@4EpmTXG@<8RDn$LOmz}6L1-jW4ytM9NN>!O5&3& zY7#c9t11F072ZIbTq*Ad9s=#S{!#4B&Digm_6Nl5z`gaW-&>=r06qN7Ue`WM-9b}B z!aOh%7)eB(HE;iR{%Y9mDXW6o>uY|4Zhyq?yY*U-w_kc;w2tO;MkSYQv$4Fi!ZOGRHn5OX#&|Y1*t43~dQOI8KYcOA$)Wd1Dg&zu_cHDyxu^7`BvY zUGY}==*2cH*{y-|C!T5%cOo6(Fc*d1H}vWTg#Fn&D_!2Ojn`K9_BPlrfQ2O#wtu3` zMQ8ljdcJIej}%(qAdp4YWUuaNJEbby=gY6($u z!bCfm4~}FpLn}#;WXrZ4j>e9##*ms%?~*`EYAPfxrnZ z`f<+zj?2yn(MTfeX^vcST;717!~$H=HGhHi9OPT$?h9lGZ{2mA_Gy9MI$yalcjh&g*ZSOC--E zmW&P59W-`g8KOh|{ib_H?q?Cc|LDf#)E6-t&%l7I#&er7!UCrVTB3P+z%M;bG^D$r zDpl0H3nrm0&^49|p}Ts>(tmG1mdp3q7DpgrvlC_%gFYG(cm=B~{;qGeUzXEfO{GD6 z9@zOAa)mz6Y)?6HImGMpn9t$+9CcO&ge&5kIl zqN8xeL2X}fVa0lh`b`7NiUKc<5W~bki;#{Nn?G7t?>V{kp9?JhNX}TL6IjE6ftf*& z{7Y_xB|^N1NRftsy9kUyF>FTolg<%2)^sSY6Yz9O=Zq5T$Y(r|Ti$*^OkfCDQxJq} z?9;>g{0?eFazwaWVt>m%75YEVf%9w!h6z1kV@MC%o%lw0P=8gngGzAB0B_~$9t}b) zA?F{&G9)GYDHOQ3SDEdR{^wKwZ2QJ#=cn%Jf9d>uA5X&WzBi*YU(MGwRJ`?Y?I?du z!S4|0;8BmEenr!G)W>8=D2r!Hy|qbq?(2$}*dO;n@Ps!&Jbw_W_VNy5O{u*N8txl` z!C)tSxCWFfqNB=Ob#IAjQ>OSz)i-5M|_ zu3dB!z8Fjgp;h5XXiw}*xo0>St%F1LA;&k&jo?Z}Rxu0B-rQsKBE-^cZX&pww&v7q zsW5&`J1k09$bYW&AZa`{e zS)Vs>`rH@p$c+n8M)M>trrc{r5tIfJjua%ceFc{P*C)b8b%+&@S)vuOnWZPQk{xaO zb1=|7!aAuuSLZ97XNb2?PFeQm;dnt+cC&2WCT6RIp&>*%=Bc@Md_ilZO`r)^+AN!VY#1~h&%|M@Vw_uJV9nnex? ziX~dcX>JYJhAg_+OMM3XF^9Z2nu_|P(INazz<=m2C}V$8R6;M-`DZJ|9`J6B!dR+44|T1(B!UIJ=+RX!;1&FD z|C^eh4%aHjf4&BKAuJ3dd~3s3lXm-%p`fgY?(L-hG-|cnH~a1X#}yil(?$+V@AfFM zAbRwj)p!?RO~@uv#Qr zJWFyGH|)o3_@U^E+gP(*G63dZLE~pFcx(2w{!T>Ku>(sV&SGF_4j@B!G=v~EE}E4? ztAzPAVOJ+g34j%heQ~{M?RUNGAh<8ZC+j!4Ti#F;j^u$>Wi$M8AIzubymWQ!gyph!= z$JYb()YeMB^wDFC1pckv=2L-UVR6ntC?^siNy2G@29SZHRmMwl zs95z|PvS4LH*U=Q#Q1|oi+>=O9HW4RAj$EQX1jQS3160_Lem>xWqSVuYF}q}6X*oN z)64Pc#U=!MWEnm5{Rw{Yr!{v{_g&m#P({A2BkwdOvKb(NlK*4C44F*|A^vH(WU^hy zQo@L9<_h_BQGvkNT7SGF#>3Qri%xi@!9+?DM$7;1ki-(@FF}dV7 z1vS5sDLAN#JDODww}0vVM8=yxEaD8Dybdbvia#2y`!;?y5*|1-^M`(XF^+u#H4ko5 zY?B|0*1NzMPX$4m!{CD%YXUJ!3i+)h0L(yX30AiOhqR}U)cp4`Sm)PQU>}0ZN+OyH zKciG|V-yzcy6n6)vy9EVpT_i-bu$poAcsa9)FXM@ArA6=sDEy(#SUkR8Qm2Ux{2vl1Y)U*{?gguneV;pkyixY z{{9RX@5D2gPk+MjKH1Fu2~Ag&Pm%EdiHb>#u8ItjA~_APOqy<5^P*-!EcX)vp5Als z6Q4izzg?=TJ#0^}y5i+`0j!SIZf0#{*9GF!Ezedrb~#gOADVrL4~M?4WVme zs7-D@Lp840LZ2hgXP-(-rB*7k(7(`XGWb-Hc-6ppmw$rb**-EdS%)e^t6>|}dqo}~ zT50hFFn2jxc6hXJW%e9H#XF^^(9*FB)Y9ksBD|n4S`L?yFfk&I6mLLg<#~&3l(4uP zVU5Fe#qol|Zt<{#X&@*D3Bi1ZyogN34y$A~{0-tqT>`7Ni?h zYhi)kNPmMO7&AZwH_WW@Ro~vAk?1Z;*g@~*a>Nzq>kHbgNa~y^Bhl>y=_!Cc8v|%4 zRtSB9poK`IxgknZXWy$ziko4?1W&z<_Ny_sLKIF3lZR16P=y9PDdwDjCc-Td$hL(a zMycC|McYC$+6#k5~JJt zt0=qAyObU(b{36s{&sqj1zK)Cv{uONEhmAU!OOwViH7*Irbp@KZ5480{w?Nm~`It4R z6MuqzJ?jiHhD8|G&pV9(}>=Bz-m?5wqJ1AGQwy(47JZ>*WY`&kw zERJ`?5F>3Jh1>qIJimuMXHkp;G5IRl%>PP78;XO=^HatG?;tGby10c-#IKxt9#@V! z^>%eM87=JqlJwC5C6X60My}spGSSB%J;amp|&>>$e{q8q7$bLrd<;%d;P5}l~`Es}7xB%&>4 z(5-XzlUN_c&}$Z=O_+aX;?y#ZWk92_-W23utgh?#k28iW`C1wb&JF>~@ZPZZ1&pJt zOil4hW=5ib!HnAecFMnzAz!j${?8n}N+p9)Gt*WF+HWiB3Bp||HF49pR z5zBoMp5(L6uf6DN1sv7tfmB2670^dz$BR+KHi36(J*CM1ljVxzEB zJ060(CnkLJ%vIZ&ic?OsZXvEOj}BU{Z7M9oWAbOPyIP_*46q@U#CL=y1LHiF&Bz!S zw%AZ-6b#28_Q85+=}u~DIsv%FNVYEADz=TL?R*3)1*j`ciE9c|;(u##ZHU0We`$%4 z1^qab)c9v=vR_}8k^Q&Z4?~%au65yv*z;fEyR=d362SV#A?`5387G+~4DaKG*4kCN z?GM+Fm>FBH-V+ud(4-QK`KKKuO)=TWsI9U<;c66!D^<=q3ZRHGK~Sh{|1gM>R$K-h zM8Cfd=)b3)9YP)PA%9k{3FN)KlJWW@3C2DY#U*zwGGei=e(}rB3e?kZ+qW{u|01~B zCrh@OA7h)_fKl`2dw#H@!y1MtD+A&w<&wDvU?TErOcC;dLxeI(<0{!wSctl8Ki0N? zo^pSPwSN3Ut;G4V!=Zm5L6Kv1k`W`%5Ir+ISbRt&uF`PO-+xv0v6({Apa1jr&=O)Y z((`w{XP66K4(#F3VyV8_9=H?0BE1&nT3?={MQurgXMl3?AwJ zWbx0Ru5f2lg;iCp&jj<^VgDGJSd>Ay#uz~&ic394q*S0QHGKS@nkdTV6Yl8r!YaN~ zz353@ssyzCCx1M@60dD8uKUiti0>8mHxCXqrwfYl)=<97s~;!4#gC~f?gDghg!UAt z1S!mv{x$S1cY!bQD%j5hhY(r&FY)(V9g;_|K0NQFeLO|$G-x)i*hZ!osK1fzPl(cM z{faU6-*Xy|_IjUZ=d+UwmE0GW^5piI@HQF6Bdm!5=YO}0&5|5&DjpPvQCf5=9i1Lk zXBOvNN+-SW=+f{PGQMC}L<6b6WH9Gqv!hdzBW%c_CLz%K7;-j=a{6rX&Yzr%wZH#6 zWAHDIy-C5+Ru>*G0;+r;!9CGDTecGQ&^|Hrbneh^a>NRWq-7Dt5`zcLclaBoy=*TD^5H0+%)Ot z=rr<=;*3e^dMqx9MWbToky;^b@_fE@>#Mbrp@uUbHlB+~6DJ5g{t)jOz+n*dhZwA0 zKy6pZD{b-+(ac%1_QQ-)7h-NfX@+@~ZFLMP&t6}~kgw8=Mw5h9%obcz<@&<&DjLGxs1=9i%9vCow*v$skqtR@*vW zY&Pw^p+gs>vD=Z`7w!7&JGeVow~j$raZ(i4WyuUkatP%Q7KJ>yUt)CGS~ zK?+J4GIBZOiKUr^pKd2yaO@3CoaAla8AU73ZbIG(gayDBR)Q{qf_;H12440^F1TP(|(_0 zut$yk2Rp9nQ-)Lc=~n4vOl+>7i_ZA3G5}dugwx+N6agyS*{T4&K1O9R0)K=r_V3eC z0Deivc%;X3F^aoG--Ra_GYHpl6Q0Gt?uZM(X;r5f2Ngr{Q|^fHMdG57REqjnv5i}N z)}06bFkPFaFc5>OvaE{Mi_;cxgnsKWxt$((@ z3d*=+L1kq3F?C}PGxs;SZGZi%-M7{>V9h;Uz0Wfp9w(@~tA!R{ue}`i=h379E7O1V zrMTz{v~a{wDGY-bHWYqIBB0!%!^?CFt1`FpX$&r=VTksP(UQv3m*ON|qwk{x5wD@m zrY6g9{oO1 zK=6M^MMWIZ*?%#}&GwKwjcH1SaE3U1Vz20J*MvIXgU#y*lwg`j9f{!CL0ce2wn`3x zCxf7+M$8mXI97hcTo;fj&r7QX{wI_aFmTn(7g-7k-$a^=pk$~`);*ZZen+-DWQ)Li zZxFxx2c10rrN&N*8sQX2QcBoUL%>`4vSWM$FTP4K&VNrMgV;anSxOr`J`!x8T;?En zDLIDJI0AA|)UPpsfspA0{l{oV1&MD61^Ikw1PQJ@{!o@$O&>74kaSl>Xk(P~1aD9E z=t8^*%*rUgS3d3L!TBD_@QZ`-1$-0V-A~S5hsJeEn3bWqyHFp`GssPckux`4vKOhA zB_(l~dw;Mnf}No)14@AMw@8DX#ZN$=nP>6qofU@!hstJ2KxE_gO2V8Izk1k`672qu z4A>u#^klm;KRyAwE4=cfi7Nsr-Um#WjwfP2f$_-EM=i^UN591Wa+asj6(q`V!PL$s zu$o#+*wUoPl6yuDbE&xhr^0m~=#nJ8x*0TN`+w5znU}(#M=_BuNfBv*wEf#TR{^Ds zwefs+IQl#K3RKoB6tJM$I|Ae$>>{(z1*u<%50Duc?73iFO1i00t1$J-kYkXbyjn;$c zV8&V^=mDcv#1uOexm@LB19a)36DAPpD>yn4F+s$T2$dZ%zpuI%9Y6*!s+&@ybuF9e z-Urd-R3#cS$P@RAwVQs4)o}v#*NwxCZhsk%;Bs7%g|P@f<|9gU`%6H$K|u)v^ zf2v~z$km4?nsj*7ezNgdVl(&2TGzfSnBW0|)z1bViyEA~tVqP(TLR0XQ_8E0CmrKgZ@o>*W= zlS1nk1QG?&X0ukDHK&VTXOfMgkc!38_aA67;7du!D{`NI(Hp>ahR%>4p8D%fbUB(2 zu7+Oe7dSz|&m~*Bq?BI}*T)QB5%q3xQ zx4mg|w>|sze&6wi5q##dEB1Oz5&+oSlsc{xm4~InkMlamF(c#qh9u#k$3Y4J$z>H@;(oHLkldJxU6heK9|3TzkeQupRbRK z_d9uZxvd%z8fadH+4wQZQY^caYq1Isgh=$V;qr)U@3sCe)t6d(7vCN5=}t^Dx}NEB zd>&jaRJ&o0#aP0Hgymj}iaJ!GfxAA4^C#I5-YP(|u8 z*uLn?fui&6*)J-c;Pa-zlYj7b8@KB#1XY^)FYa0$IRkOqe>jg2F1FZr8%^cz^N1U) z!se}9N?@agj>?VXcJ4t*f(1sXtYQ`g(+8vi66${Dmn~AIY znq+mop5d_R-X@Phk+Y@VB6D}UL5OM+gI3QpCB2WyN6*1zCcCw>4Si2rTc$f9_sZPd zHXu?Wil4v3uEV=yO{xpvs>h8UmY?+LCjbDgudk2i_P7^oz{wDn(&{U`64;(F=lVQz+jd(0)R*BBt4irbUck z$Q1vQq5gFw$JLekUrAbccsMhCG4}fU{Ygnl$#hj!RgKSjo~okIE9UiMU3d33vtf3^ zYsw8Qdxy98LVxu|j4EBKOO^UUD+v@_cD;-Sh7jIL$W*$k%&kCRwJIH;F)mKQqkHDF znQ~T(tA-rdH&3YS!S#m%KK5;(X%W?ma!)4T5=l7p`2k!2wIT zwgk)deAe2uIv5}S5AQ$1WZX+p@XLIaG6DKqq$TSoO@B0~gio|oaR$6#th&7pqQPQx zvYljCG&HV2wVS_yL6KdIgF2_p!9m>Rm1W1BPtabn;`HwLQpKn_+w4F+16|%^hze%9 z4f3BCZ-_EAcDyAU9fSChA!AmGswgR-eLq{KjVj9=2SSA^RGsQi^0Bf2DhtQIU0mFl{3uK0= z>VDWxzfZ2>OXZiAhqJa3VrRWRNCtdVlr&9EZb)m{S9$*-8#3;C)SCzvB2d zfQpr;P+eW+tJ9?NO6BMcI(B|l&DRVu&DGY!&HafUm~&+paZ$X%;DQY__xqCoXRGaa zs-m}IBYN6GVoRHs;n?Jr%b4)u^oIgB0V*VFoj_GY_6KyMSsG_wG;VS9y~XXeTl1a# z&VQr)=!76M!f2bW0%UlKkHDI38t8Ant01b;E_tGi?#=nNpRbSY<(#p`N;FvYJ{Mu) z*x1ia`-8}lTpFet27gxv_nUX<80N?*thcv!B?e4Va3M0ckW8^WQYXFgDD^xv5vJeb z$|=KEi+iKrSce4Q{~={RDd7>7;|7RWWq+BXfCVb80frs1MOw&oS`=Z1N|PKT`(p;yL=(&YqOliPf34plEz4pscuVco$2W4z4vY9Y!P2x zTnGyD@Vqpp_!Ff%>$h!<3r6)kZkP&u+>%S<;4u5$e0}wSc;nbBQVXQ@^)2#6G=K1- zN6=WA6d?CriHdB4g6F~Ys?|l5|4Wf@J;akKO2U&%76J1s4Sd>t6Y6^sWX*(JDWnVQ zmM&Gp&8XyI$%9kMQa2-StV)AeH^%eBc()ol{G2NdgKAb~ie9HITaOG0pgN~HA~2s? zlS_-Dv7xK%wuhr*5l4u$D67mNynl)f3=B;7_VW5t!@M8cki<%Lqi=chI82njG0@ll zVlC71b02VgD~^-8W!wKlA@a8($s>|UBZjHwjLPOI3rr(v)d3ZjO~Z8K0g5PqxkkOJ zA7GwmDvdoel}7q=@fOoz@!>9u`fXA``G15Rm*;R;5&z8hMQjhAKDJaD<9_E0D&@&H((&^P9)#5mAvaXE~;K*}(T zhXj6zi%cm@;0Z%#M3pnQ8)*eL1)H`D>xL^1Lxqn+-g)E%t>S^wb{tlQ%X&0~sGd$_UR@?y z5)-Ws3||CJ%-nn)CV!rlBpBO6myRw$hT24dGU{Z?2jy#*2a9#z8%iT`4h!GVC}qve zLrGdKMdLgXLMd}EP$48G$Pc^y&|dlN>&p)_TCzNMIx8m(QtQ603BCJm-A0BAorl+1 ztXjjwBX92+&LhHco&%^JzYuu%vG^Ha5NhGeFG0Tfz=+J%JPGKnYMy7((zZ?AWu1!%IIpeHJr< z__Kh}E<&&*%9)%`0cn^#&n_y2UF2tpg?5%#Ma#|Lki8-G7p#b*rA3sUdJPpLm&4qN zw*2rgQwNIufrSUTj}h+Nx{bp}e-1MbeER99uyt!Ub7D#dClhb7f{TA9B;Wq7uC9-W zAF}Wgu!^SOtZTryI=j%h?*PtDWO1sukD-;Y1*~F#as=&;T)PO{ z##o(Tvo~SJX@|t3jP?&S1&GY>5-0s;CXi|A#Qfj>JEoraS>(l0yd{ZQf9@Y!jO=q` zIiXr*B@H=t{FqR#hWWX?D7l~f8{UfSJ45MPL<%}PJKu5Z_U*f3NVEC0A22&NJ0U&H zx5jeS6hmqBEw z-+u+CPM+jCtrsQCZjdl55klc3fDaTmP)R@i%+@ViJ|l*-n_mrw6bJm0z%nmOc+t?t zL3ZyMngiG>EDvnmdjPjG<#_qzDRxKz*ov61MMLR&6J`Lri^B>+a%;+Iz z7aGn@!w08&={GR_2mg$D;WPT_aX1cD*%cA6+V+JWcrwco;l3aMhYubk&@6?9fX;i# z4wBGp&%|6&(vnz!={JvzT%Qy}+Rd+qL&}rBAWrx{$mi!mKA2%8E5-#KU@J;s8^SUz zdmhKZ;hQ*rwI6w3e}Xv41+~HDc>9SJV|qqpNOMj7y>%?8d+KsSkHU&o$91ikor2rc zhQ;6i2TcF`A7NSg#-2(G&P={#9v+rY|8Zz9*-aj*(oh=Y@bn@ObHX8-WtFt=?V z5dLgwK|?gFUpUKbPDF(wRko7bkKuRV+BKX!d4dnyQ%{A$V=MDPzE^QV2qwZ zKnIhISfv8aw9&Cqk`2bk#<6Yd7CiIJ(;+nTX5{-4e+n9$6QKTHP`di0;G>7duv*No zro&2gcdeCK_PNp;O65A@z8x4R1u@S}h zbgxF@Ook<+#RqqG*v?2|07EX^+%yY~x!?as%tE)HEXRkxP_TP41_;p4Aqzn4r`5C_P^n z6VefaXk{S8KJjzN{@Pz-vGI{a(v8bz>>P+Ke`b<=Pn}Pt)pEfSmh-6-C;4}Kgz2(- z&mQgYaBwSF!i>Hp0DN6-?OUWfa!m}Y$^2?NynIDa98B~l?ryGysqWWR{p|Ao2pu<`X05zKf>Ht{fQKxE ze@NB0x6Yin-WaWe>GdrLcIW4RZ=Tc%4Pvevd=gQi7ohssW<$1UxhtPa8U9r(|IYG3 z*~e?r8$fU6N8I^?J=yy-n(=D;;h$$mIa6z~x zI*^#MajCnP8w?BkO&RL9SyfUN9Np4jf6%M1Dq7Vv=jT)Wr!3Q~4JY;%QC>#I=2sju zl@MiFU82y~azT&%5%i|<^cKH&FV>zJ*26i5uJ%vRB~t{;U^NiMC=fb?_M}RqrW~o+gO|O>_byHqDTKH1Z3o@XX83q4)C*a_Mf} z){-&8@mMG5G`r1^&C;rC;jF#Fc@CaBc@CaUa{C`BfYMSPARODK`dKAw5V3Npm5wVB z4&OWJ-ew$ksaUWjAu;yw3+ixgL;ZGPG`71rTU`NSlR@^iCc>{*VL@)gi)cpI+W6aIK z`rJpehK}ttH5$(uJXE8aWCHyXwyOsP-}y#umCE`(ENpD#6Tdn1rn7U16pk2t zu%w?}0#))|g)z}EZXTEyCg2Zc?w`+>mzO^kO$wv(u zGiA_gpKDLVw?4iUVrP(bL(T8D3ss47<7niXn%UWu&D_$27<2U%fBj1$3fFp#pd6ik z2S%jb-F=#!>Jq34{p{c3z08MPn(&Z%B+Ns42RALITnfxflvmIq#r(Y*=g zx;`hZK#lkL%SPKZrye)D3`pbU^5SA{S4|^E>pT9lG?sUW08Q`LhLM)*`ht&*&vKTTmF1z3vSCd*&kFu z1AwsGporp%5!BG=t{iktA|C56VTX|E=8uhA$C0(0M0@+;WiQR;|7z~y*Ew_OWgxHi zH{+HV=t${d@h_@+jPx9a*0-f^zw6Z1L{&_vXm0voA5ZCZf6G;mlZ3%CPH0~C zTd+`h!F0aTf1H)cUx?)5U7AdZK%)Xi>oR~vH&`5T%7nC}>l{6Ip1sRnvg_1rmXkXx zdpatH=BhOQG7W_o1%Y6%=F4z2y$@0zlKy;%EZys?*Y#dB+t(xpJ9}7Ou0x;^wx-E} zo6BjMhI$Ylf9DGB>F=Tovmqg=|7>ait+J94aF&FOT1BxPUL?jAz237M9~`BBpN#cq z2Lhwg{?SX7n}1na$rRvO6jT_-W#chcan>9>@OBvBclMTh@!d8j-GzjL2L9FD{(q`^y5>oFH>s9|O-`hlIcHW^kO)iQbppDrL6)!SvQCtv_PQ zo>p$GxMAdWj}Y(h^pdQWW8`9)Z@BM*_9|!*Lb@pIeUBVs%Hn zyv?3HpI@I^(Xhwhr0mvA+$uaixOMcH+dejdCPV5Vh0#%lDZKa`k!Wy! z)9yf)4Kuj(y9&+1YA8`dy>hyYMUr71*)U_#Bn19JyVVYBt)rxo0(dQty`>l#gyqWd zf8X8vSW0yWQ=0oVH8u5PMm=ShojApe%}#!>u))?d-#=O32N03XQG5h?GT~kV{1;bO zUxSclptlskO0;|A&;NCdBRUYeSpE5I{g^M=`i{zm^nR=hxh!8lE`F=A{|zQeOdI?q z+&Pwxs7*DmyJsl|k?!?r^0L!8O`Y{he^aZ@P#hhRu?S0HO1e4LatV~_du5=*YwS*% z&Hv#FQHSG1PJDlTJXM+`WXF=5V-S+xxhlJ9SfExdaq)5Z%RdRtS&irZX?stK%l;SPf%{+CbAI&NQFg~8_K<8d_p2pI=`j*f7Dp? zSw~u4+$!DBsX&5P8?mfYAyBBUiJFc)a^M&~zqk;X3fp`aipLqFm9lR?)FR_V;qLYI zk=%!Jt01O6R9$W2zr(<#)6j7JQni-=o~qZSoF=EQ0)lHxd^{%B;K89oLH0wYCe^_qHYh%l~xw)`IR!yZON?l;N2eUIGK+S2yHs_XU zL|UtjZ~cKEfpC%Z{%QXnu>uu?>+IGuBdk2{XsQeS9zGw7rH9H2en|`5$o!yJ*Ve^u z-eB<>FCE+Q;K{Qjd*G&7ObLn~pf16|9eag-{=^W0KfpH`iEl%fi{r;}f4-x*UHum> zO zWoM^;ti;z;_O5aKOmG|YDM6_Y7Jk~595YEA?<5xKX}nvYYX(wklDjZR?NO7fWH=pp z=ENV;irYMIYu5`2L`|m~MC1hA8IZB-?TB`JqSF8M<2i=}xGi2ge?5sWG0zK_ZY?^u znxGW_oT-c+wzq^Qy%Eh+{$&OwKX@dg*;ED7{npp`dzRF=#dgge%5cmi(srlC5QN1^ zK+SKe;glrrCJYJp<#DqKkfYZ*R|!fQ=S*Cm*N|)|cDup*N4H=3Z|;@bj?o6)+%4N5vA|a8 z5|gN5CEEJ|6{sj<4w*3om~-C!BQJ8$9N_CxBiTz0M-}AsNTU`X1R)=AvVRqb*UyY2 zljg8g)zdj*B55?0JinB=FyUK(7{YC^R3iykX`u^b#tzG~f0z-t+})4#0h*bAOGmJK zVWg8YL1jj$d=S`NQMq)kwr|5+^|)uctJU3YYL z=G-viJOm>ke~;dh{#jGt(9GK3?)p4TI{u{pgH#~2E0T~buvkc9nT7IT`;EX`MO@rZ zIKL{adPDUyvcHB7{+cKyrYI}6dNI-9jyGRS`Um+X#yFk(dRGkmCp^Ks&_#+Vqsjy@BZ|0Iod^yvU^($y}40AC(ilF9fSRZckz z=U=&N6&R>kG3v+}n3(uOp%w2IlmZjQfRptX_1TPcl^gup!GiBW+I=#?{5Z!#Nc%?z ztNnT;f7?ap+nlPp=(Hh&vs0%h&A)mf0++ltUH$JUB6$(Te#68$qc;9gfk3pu=wY~yoRNPEHE5d>f4 zkR_?94iVl-OI|t?B3z5^@7h?nH2w@;)@9FXTH%4i&`^8uj1AdvDLg)3{o+y+!;!!7 zN?r6`ijWbIq-6LdI*)iNN;nic{PVgzz@4Dg6Pf*;^i^{(e!0#7MD`7$j8RlwJ|^9; zf3ke$Nti2FT+^L zZXTs%=R=MfmVnMlhc8XOkKZKSz?Gnf1trGrR9SB1Fn%`Q3<5?QAxk3?Z)u3zi&)V3rKJ7 zcJFCVPKX2va-DQ)9?-V$eQ(J}WRI%u6P!5*(CdO-*8B0^&z~1su74T$ty%1Jp10^f!0^K-e^+~ak0TfV zs_XfX3E}N}n_xD!6kM%re=`Ia1nd{&m`0&6U7Pq7Rh_?pN!i89H!KIk(BrqAXSW`q z2?LU=pQ1h_%zvG`u9E9EJ01F#T9i%`gp>@{PYBF&tm4H%3iM@g9^k^7I!zt_w*6Wh zY~67?HIf;Dg_AG`_x>CHe^KVj?)M{a6Sv8|AxEwBu?>sYvB8*sGz_x52tdZYF;ICy}$4WstDAx zj|@L!BQs}V_+>Zb!WYWxx+4kS*l+TbCi;A%rMv6Ldsrtkb5+*Te*)d`LXEbrZojbI zYDE^#AI*Lsk6)iUzx*M{*7l9n)BPb~xHrA;lm@k~qMbJ(Qep z4+8bd)EEMrJg95Qe?@6xq=;8^yUjLQ;&(hDWWDGnXu6&X*G;LM%nmAXqyh~gk_Hq4 zZf{So&8<46yqSju^_sDKbCsF(j(0Y^|!uIx}yCniu( zfD9<8FOE4Ke`-Si38XA^)=+T&bKHNM_scBm}6;@IoyeSUS<^e+Rl#=X#c^{!8>#9j`fBO)M2`om){FJ0E8Uo2AZv$-= zovXX6rgpA1=1ziWj{_z|qghB!re)SEZ{_%cNi)Ckv2(R`rE|NPz}D7`WaIA$Z%ciw z-Tz;B)@`>3`wsIYm{8Y^+^k8t1LN}#e(H8RqIVx-6M>urN@VYWZx-?(-5;*tk(6{3 z97-U#e;rw$Qew25V*W2?68=nq?y`BtxRS|RNr4rUe)Bb??$8Q0dlU=mootLRS3v}* zaJ^Q$XIA4Z*W9#P^Vmy=>i2rQGp--f_i5QR==+YEXSWdmMG$cE}@i& z$z!vHkLrg%l7oaTj@ehzW}aw%p1+SgDac;|e=g7Szy8?C!8ho*&(^gsxO0c<>-AK7 z0SlLYDDjSou$`VJCgJ=fl~$|INh765)xHh=4dUd~s-g)xfv@Cs@`S|+Y zaK7c2_23;LMUyJ=H8cSQE5d`WtL_taTmOpul9WHZ6#>N3(sVG1qP`AV$>7sSeZ_T{ zf4(Gex*Jt_eJLJ9uO_#E^1^ce^!QZp-oFz)%OMjb&)pUFK-|`?uFt3(_@;29JM*!8 zOPv1|`0Tgm^uV#&z`hdkG9RWwx=gu1PNbVTW zCm7rZ&K>lQZl&)6L^l%H!mp_Q!*A5hf6OlGm?XSq0-Z5aJk6cV=HGvtLNJYIhwA>` zbDD0Vb@6mo60TUh>PQ*X`e$UXW;g}Ethzjn?)xeM=|%P`)w{dzru)srrBH#izqD4AtmTgk1u@?+t5Y~ZFsCSPdNeRHa8x& z=u+Rz;swLfJPzKBv<~f9bkUZ%AfH$(*tmPP&pFctKB&Y(x7YyFrdQd}(%< zb2Bu#ux2JdRemj=Qn^?4J#`Jg>U$;Rrue>RUg-DVu# zH~KGX@c^*2zzGjy16U~t8h>;n!cX)SjkXD3Djhu=<4?NpqlZ%|7x%k6<@o;Rxq>+I zM3=V4NS-a6LUJhn%JKr&5nRCM?3AQpZfuE(0N7KZm9~U$2N-5>n#kROyJZ^;F>a$J z#mM7V-#7&kIqkajV7USPe-za@v(o~cI6%Ewpp(&19{+UnXc#=@>wzs zjW>-C|9wecc{@lJZF*s@7s%V;f!cmmFNWCW?|k%JZ?_Mfibz<&!GuKhHJ(pf?a_w2 zBm7jT=6RYhwF|95t8HkTFo_u^4sR4O?V~9blO14&q2t?@R#+R9Z;hxwXUEoy}&^b?cmAkJUfz-5)yRwUydueg+oeQ5IoVhoJ z^V8WjRZ>%xC5@DfC4R#5`BhV`%K-2SQt`pirkI3!Uc4l~R8yv?!!hU>+{k;lC+%5< z_PC4=&j$!}x+jo(>YyOPv)}RM=JM%-X zSjoXN`#x{o|M>!Vb{m^h^>kp~p0un2FZ(H)7O@dPw zHeU$dgH=eVsLV`CQ3jAc@p{!xV)C+liOc!HMQ(sEyoaZ)fAp)&)>LL9sdzBdQ2CZG zUHTbhY4q-;cEd;UtjH>?CA6O}oc^2i%S}G$_(MD^D!RMJ5dcnFc#EH?O!pmX)u{8X zgSyH~e;^knj4^Wkw-QYu(-N&pgC&RB_wVcNIY$_*c1ZRva}KI_XZ&b`d&})4pBQS& z_bdCStCN~hf0;hL(?6P58D-ol?S%$TFUR1Osu;j6pETHYEWaxL_ooe?))uP25^3Np`}bf4=`Lok`ti>)g2B z<4O&B_nnQ*Z~5SDTP0?8yHd7A9DV<@)Si@~#-o3%!V}egZSD(rd9ORVj+cCrK43PS zW)HbuKOqm6IR`)v^@0BF2wUxGke(9@!&KALL=6i<_b&#zkP3F{IK)oB5rf1{JjFQdch#%~jYujFb_+wTqAcKaU(d^4L~ zx43eTVoGB4EKeaivC!EJBHQc^#Slk3pF=yLtYB?g@=0{0Y?}2D{#m`QdL5DgLLp;m zO}z<=VJ9t4;V=sfzO5w3k6-%pues9b_OX$p>#&QP9;iKV>EAC&zv4p_-#X&se`ZJJ zBdtVRv5=@>;ptqUiqM95Ibo?szaB9DvcwLw5^M7qw{0_L4TG5%42s6Ij3Ap_%Ncxs zcKX%ymTIco^JLQRAaWY#&SvSeb-QL{w2xbzZd{P^xj8SJcN))qOi6W2*MDlk$h2He zD_Kee{3$CeHRdOTES%wsapAq;e?Va@_|h=yfB#na9WG#-hTLQA2yu4X3oPK}1D$9a zOgtQ22^dvFjQIVQ;E3kFSBVfI&F^PN-cQ&MsZ7Yo;Cx#Pxk-0+J^T4JI*N(!+fTaa z{ix#8ISLee-@T0Mi;QjaY;zw}Y60AS>W!)9TBkOqF2roa^`S!#eT4O~e>M7kEQ!5t z%-v`g#dmKi*uD@|aC!=5qocDF4u#g1(eD@1M+_fXRjt~6`35s+l6_N>=aK&+txt1F z4te9}?){-@)+c5syXZX0)$4zOb`;aSc<00E1#@e7-*#9%bT7FiB|pdfMsma~m(Dj< znFpXSXB{Mu$0Vm}9R)<@e>|qThblzCM$Db1`}XY;vfl09eRpGQ@cdJyeIM+(<+}Kr zo?Mj2=f&LN?$C=JAvI9eOicMyZAfs;_@sEkJ#veQTwU0afQpPf{tEmP`O?&&ZeMgC zI`oP%BwAEUN{sksU?1zuaO^1F>&TB{YeVKpjv>3|vKO=u&<-*Te?wbd(Tq(Xg}dPN z5G%zENy`1J%uUpO#p)#!oq%$T&yU!ECU)1Y`s=#kcK2nbD!Q&AnY4ypwkWbxSR!3k z8+?zJdDqTP`Y>6%&(`)ZcMVal`{MtRdmTO1 z*cMp_enA{OI&Q$rf3!-`k}XZFmmwE%kJ4b2D8u%ERkiN&FY+|@fFImg>#2C#>Hg)9 z_A2^84n5~R^e$T5`>TO#?cOOM6lmIs`O2qlx+*BaHx5y5#M5ehRQs?%a&4^}YCm{9~a=#TVn7#ux$ z`@pFFyzlHaK%Q*JA7+3`1G6+7X-o1X<~F!zJ$G94#mfI2b9y221K(MS?E< z@jbqE+2F4%f147m`9-=hS`Ps#9tk=mqCSR`N0tU_00~rjHP{#;+C?RR8m(!&g1&UOODM+#xJFuBa|UhjM!b- zE7OBCf6X%V$np$hNTyU=At$cn1kJ>~Q953go^z~p(syB6Ty2i8)AactudQ!a;KuwX z(kOSL0P>+y4c}pN_qJ$xNkR?yW9A+0Bt66ViJDWP_cAI_ zUBPZKerM93s^DDx3ylG}_*Ph>C`jaw(?N998aQci()p!0Bo`GnEScQ8?hDMlRCqCJ z_!S0N(v7VId3AAvAjFkl{WjrLGlN9>+$e~cS}F)gBtb@#442C)_X0md*mX}SUQytQ zeKCo)OWPhLI8*h!F8;HJnY0;J1DsMk>XBwZ(Bf0|76 zu*d1os^wcBdJpsR^FB`m2Qh1**yRxO*C{OdyfdXlc!2QKmX;EA z*t29PmZh|#<52;RAdXCaT^w8(?0AcCNt96BdM2RmL@D^FgJzue3xy~($%EXB{@9r! z|6UhpC$E)i;|{?#&|M*C{91{we~9H)*Q`7$=^gPOE#ui1w|N^6E!kayeAwivM8%x< z_t`{DI*~GENn#2!b>Z5c$SS$mBAlbl`Pw+RCO z#uhES`0|@Mrh={f0~yhNf7+9Ct`jAqhnI63UaRmBo@8hsja#{gk8it@cf6t?Vono% z3G3IQFto-on~xMf!AUKXEyY%*v4~1QFvk}dB0PG{h(TH%nd*`Bb>)c~Tb5EOrEDFX zucTfjWagBx2=r9#_UQIys7(hIru1@D&71dFHj7-?Ra2+#!c&taf1$VbJQU_L8~K41 zFh5^%VRrTX8r)yYe~U2D!hz*R1w;72G842tqTMLfxIAEc&ITizhu_|FvL{~VNL(qS z*tEBW)X-(zQ*kWsf#wifirljDk}q8m)2mHhyi86}s?N=>!s{bA1*6~3i>jE?ZiDuN z;gh+9Wi})m=CN|`e-nXVZP_E`Z~CI-Og~;t^T9K`tJ@z~tE^4Tywu?Jhj64FA-J(;I-O zXpc?qhSUAr?r<{RX9VF2Qug*66&We|A-UpGTU}U8e;y*(jr`i7W)f~f1z$iEEq+0! zg~n|Ua=M9Z%N+412BcXj#zUxw)q|emDzDf z!TDt(O>2Jd$&hWUfD263j-8aXEJDRfjJJ+DUI1eFUE26sG+K*WyW7tD+0G-Dru|VY zsa)xwe+qF1;tC!;eFGb{k&thiTJy{!=F$XI<5$-n1agxMd#iCu_;q!cyXB`9TQ-V4 z)P3gxjkQ{?`)((0Aq}Nz9hzic;2E0fm%_F8NsE8+ev3nx^_-5j^uLO!EA60ydY@E# zR6P8S#7MK~aIL(TFjm&E7p#rLL^{rs$h5GGfBEl(fyOd32qMqGPN0V(eh5kn=U9ov3V1-Cagv8hs<^=#uqD(|`A1V)WL7{#=Z|a4 ztO-bVfug{bq5|fQ|X5!k87+`80m8&Gj?3x;SQ)zOTWj(Y_i zCzfWU({{sD;L#qV2+tj%bYp(^IwcOSp43y+HNA;C?hoq!`hzq%u0&MkNL)d!`YP`v zh2{^gJ8#f(M+bB*K5HHP-*!}MA2JClX|Q9Lkd$}|S$UfZr-rVuB(g}{MFf0?O- zr=MoR1Fe%Y?JLNL*M>9HNbRiY&Gs>~x=!3DzqWQFm@)`E*5l`%6U?e_PfiRu<6pg~haqPGVYm|ZatQk6E`l1cS$`E=#30&40qXk< zJIG^$J9Nd-25(n}Ban;3K)&wK<7W6ZcB<3UxmjTPW6bdB5i~B@o>um*f2*QK2pXn& zjWFIglQuJe=X4!ZcU8MrLeOyiPulx!0ouUFPUB+RWq_pDZJ_OI9mx1P4~W!Y^iqA& za)w*@6zdAb6yWfras6NPc`vG_0QiOAE!CS$!nb4an1O;y2KU@)wb4b*NrE`xYMgk) zE6ig5MV?=I)LXGh*%G@WfBa-lYm6EMrGb%>G)C%P%QOzmXgm{MHdKw6(Oas^js%jKMVq-PA9nHuO1@Ale&X3@zse_c~#w_Z*4?%gGG zHS7#KSlfsFX?(V%)avcJ4Xdfa=&T>P@x=0%2E~sqGs&eumUPRTUnqLfGp3x4X;TjOJ{bppZ$BbeeUeQlub^P zFv=nlYQ)=2##_In*wDNMw~rxd8BmUeS*)kd!1YYe-l4P+3NU7 zvORV8hmu66p+`nPd^v9DNNndwRU(;*mJ!HYf>w!N9pfd1e+~LqNrS3hUtIx+3Dqf` z*7DMOt2C%d3u9j!5}Xcuo$S{H&pnw#6i7HbCS|&zvn+owC}+(GrQZOIuB>r9(Jg|UCpLLkR|HmF zKA)8ZB|QF^3I_AUB03prwu3Iiqupvzx*P`v9Id@)k0GIxkG z*=ad&BY*oR6?G<=Htv$B|2fs+E`XDJD{IpdB}QoGew8!0E+xL@9W5e<*7vc^auPs!dw)`+x71+8qx=-ru51=E6nT5G*O?DnC;ca$9VkYJly))g=Gb z@GKj_qnV%szZ1!?Sm3o`o7>yxST%ZMHF4cc?ST%yUfEaNYS^u>_ndSsQ-+%^(ih0` zCCdh{?9C$y`4I9*kO~CZ8D*QTcaEGk>pJ-6e{^WEY+rkUt;){r&q5^owLOJk)U!q} zw=}x;;6HQU{~`5!j+^h<^EIr1l_Jnf;=9`=cBRM8a?o9nDiN}zY(kx>gT88^0cs*8 z#`;J-hzrKo2R$%>`-a$YsJh-cS<&(UzYfgpf)FWXpbOj4XUoh~o14W9csZ!JIN>WJ zf4m&^*h|v5n>?@L5hiZKk9?OohGP}gW4I){xndrbdv=sdca6#u`MwpBrVc!fTpv%d z*v!or4Vj+~mY)}$ZI=!ghgVM04pztabd?FREc1Fm>tG;GA)Fiz-d|wVl{;q+mIQMN zw@0a17>)i{{5ViuwqwQp$i4nG839!ge}DizD}?$Vv=UtfEGOjTBj0{_nJ)mZK23zM zbYs%A4Y{qSg0s;*i7P4FMh{=4d}AGRcS`t$1E^%=ebsF9>2vQD{)r6fAj(!DC7jrn!{gU{l8(r^>fIPD$&I$ZHt?+c}jYS|d-?$1>2nbV{e9TXwSF z|A7+Vcf+9mah%)pN3$Bhe;>Yr!e(|z^YDW*^12WSKk9GBzMU$o0#ZTLHfK(Q$c;8< z0C$fU5o?@7o5=iEjSyM03KjR0ArKQuVesk#vP3MF91;IShuF5=gEg%R2;SpY@k5Y1 zvTMzdpY-#-vMh~$oZb27Xie;kERnGfr^?pakI(H^r%Mv@gIyc(e=Ls|SXrF!N&G2k zM;fL$p(~sok~OMYp$BxxHLxg{@_cvg?l6qPV;(S(aU%g3PsA#tHfbcn#w4e?Z#qgA zl{9SCpRxVyO8-X3A>2?0;~LU*Wtu+{ku-1+>X8_;`4c?wysvk`_kr-zg%9jdIC%+wZue`p0mB!$2R5n@{_%ASN8EmKyYSnGMqDKg zE-cu&{c1O#D*YRyiVm<`KD3`Q=(%OS;L41#{oO`=FGsupf8d#B;EB2QWdH8VR}6d` z?{(;;(&o0!H6ss6D%bGeJI$MY`2N9MTLlru?78vZm-EOgZT@{I?#l)Db!s!R#Cj^K z2E>fH(l*xKURM0AWQV)NvA9tGi#qRenwHtBpP{?Fv0v)_m*0uKhB!J+Mzs$by&BQOki>Q!@1QxehXCc+LCy(jkE=7j}D*=6aZraTZ=!^lyp$eNQYlnjN)%lDQEYG zMKVjY47tW*xC=>vmOjnkdm_Uta0uYQp#6EV1u*huXxT4`dL4RphK+ZZjO;!V(lJ#k z{$6~ofCxLY$!-2vU8I9|X3Uc!&GMH{OW7b9>RGws4?-SR5q?B9a z5A~lS*5!v)$)r^8*=frSBe8OLA8{e@6N)fIYl1Xo-75MTDGA&%PDjsW|ys z3cYIdOO69nD(0HTd0vFu++_Ne#9HpDd{*KIKb8d0)(=G{?Yg-Mx+ECSiE&6Zb{S{g zYrnQu@5Y#I?^WiToPe1I-X75as%|E`^^z8LLJNysD3R~Iq9}zpVSwK3OVye;WGhOF ze}%k_0}dBy4MtTKUo_^FbkwxjyIO6bO;h@zBi*|-3ALB$LaHa& z;W~r5mwkV)CbN_ctYB5l?Z~)Z=w;BFS7$gbgc=a^R@0aIqQyXYQ-obeU}}}G`{=4w zS*^9yokU`H7!Vd=w1+d|ry+E|G=B!MGnFtm2^mIB?e2y$OM&_9dir;qAzg0Ye~^#L}g;)AZKPQX|Yxp zO|-gRLt2`HEw}Xn;&+rj4bpyL&VRJo%&j|Q>$xL(8)!H%X~Bwb#P^n>)MNfbZv|I_ zJFRwZyVXD_T%F|U4eCNI)1VLUK=>qNkWU{ zhpzZrE}NeUT3A(T zKO8ZvC}NvEOl=9QY6z@JMw=fPU69`a*;C4c-1V@s2%pkA>7)+h^>dTbm z-y##lq?YCRPGf&5K=>~{j3wX#ZL(pdW~o4P70I#6mv`5v+GgFUnS8&S7`BoA?xncN zPyq@~y)@R;pk1ZWL2J!zf8v>S$Iy|!+zsKXV4aJ2;KR(%nD6YutnGeB5GC_%5e2ccxXi6W~`P0vE`oEgGI}OC8_-p-do4Sw@SG8>Q0`F<0B8}?RC0y))-0)OX^InuDab@;& zU3)4C3h9uZdrsVWhHy$6S}V@OjCGR}sWIQTw7>wFrTZ;jFxEUQ3LC)nWEq(CSgrVH z*lqks9Rn5SY5;G8fBnRqW10~{_dL~6iSv1Vykqe3;#IeUetuZDfAr-u>GOJ+VmO}_ zRb))yS9Z*VJ;k;PDYa2N;Fz3_U8O1@QBd`M!r{Y2c!W?9bQt3jUtkjq?qlh_YQmrf3OkAL#?%7a+z8laois9 zYAZ9y1tugYuYNRW7h55V)?p8fplErFXfw8i?Hg7FxFqBM+(NlZYYg(y`;$XlrtaTb z|F%POX?$0#mQlH#%Kr}e3~KeA=JFiohO|w2h9wngul@yHo*C%7U5jAcRd9V1&v?d( z-)B{m^Cji(e|5-C^U1|`XZXsUlcU!eSwt77LP7%>mh*fUY`|@(Ts+VPjh=bEo=nif z?A*oXeB7=GU05%$_A|;Upjk+5)F#}~Zgets=b7>HkW#E}&(xT=Diwj>CN}MN=4@Mj z2t|xr(KJVSifLwHL8Rl;RZt1O73%cylCfK;z*$)B)hZH*ga31J{3}-yASMuBFPz*k z)n$p7QNr>UQi<*9h)a<7+Bmy*8CQn8$~%t53A6kWiPt2_P}203v%AyX$i(BTJGHeQ z&7`kzf2OE|Op=G-XqvNaeaS?+;$wXKV~sk-Bg?x;j7(k=!orBzs3^imU323-if1&=;am2DN8f|Z@F?!6Czv6-3EVCEI zQ%AKee7qcKHY@tZ(Y@V=$H)95y&iF}#wpzIoWDnn*MD#45`!zsB!-##ig(H>Zi0Sm zQIS51mV=aBCyE+}skX+kxh`>yomUkkGUQq~%+O^FA}eFZ^OO@|)*(+Nz0k+Ncz=IR zfBGz<-MOj0yk_$|zI1+;Nx$DhpzQPaMPKcGi}uv?j4i1i`VI=US$fUQ`$?qlFYQyR z{(hB9(>&gDa*QP3qAo$Ac6y?u2hKeq=BW1oTR^10U3zV9z^xLlzvM+aWHg%?I^O!p z(>9f|_ma&Re>sT1S^&Ukgw@8$#n@>D#_7kqAAgRoJF8FD><#MIzz93l041_eaO?zn z>=~nGXjk+C$rzjelu|-XFN?+4*n$l1Jm{i}lirzENXcJBCsB=#xOX0ic4qzGD@1)p zqZxN-pkOwiZ6=K0sQpgw!$}WQpGu?>T~KxGfW*X;rAHA2g2BAf)%dlFfuObivfXZZ zgMaV3pYn>UKaM-i4(z2;yM9vlxmTyuGJqkNPWLPhdx5P+{RGao9D~|ij$Ck>Lm4*G zl}b)vl@ruXVa6Wy4ZAQ?gmK?R0<~6^CAmT@S+Febq|d*xTdca(y~K_oo60t(Vib}J zo82`pxw(eD-O9}A`1j~s@F%g-)ArBzpMQR*7XEiL4(bc>AyE9Rz-Mr}Po`)F%Wyn~ zcZ_x(SmuR{)qDY)Gm0Sb>%bH0&vTpQWr30v52^wB(wBnN09+5X8s(}4YTng9Exd*L zSN?ErR_+^$U~@Fc1~6Sw2F>lU?N#YJseyoHLowmy6rtVhQ7q5_j>T^|7c3*nv}-2lcd}YQ>L~oFgmmSwk=*V35b6 zMU_(h&ScgMt>KosFf=uTgJjO zC&{&lbJ02?9v4H({Wh7)j!*x02}f>G>q+3bFN?b+>hBwp*v-=rd3%$KZ~huEwl7L%b=v|QqYN|^LGKXxrO zK;Ua7+?;#aY<`QxUVq|E(C^eOW!#~a(Gr-<5P&@=Jz4G#rw%~5I z&@GDU^N_~`yUAO ze}zua&4`AZ{z0@0LlAO^K@5wm}qvB-aBd;ffj%YQCjguDw|LLoUqK+e#*AB0 zS>+GR3)L?p!?2*Cmb9|-P{KyVwJ32uYK^8a>gtG2fGWosYyfnV3{>blk4)zxzEIS+ke z%R{b!w116s4rpT%rcTTtvkMvVF(U@HSWr0%hS%n<_bvq}_Z#0DKgrT6=6qQK|MzxQ((l#f)dgTD-*EF%W1fWtgOa~@; za}`UX589a?ZD6^VjGAH8jLE15X8iLL_*l@*E`O)Zdra(fidk^5Qyt~&t>A~4>@~?GH_);8#Y#c|I+jr~f$8`zJV)X7PXl-i?-uw0|pFMQ_ za-e0h6~9q4A=bLrFKR0d?H0~n4pbp4lEZo~H58{bbo--FmS|S~Cd}~78pt(N$;yIa ziGM5{@DFg#adSUW^NChl4E({28EtstVdb%rf^!5U94ogdX@Z2Q{u(T zF@QT5-^+`DpktCpEoato1?(Kgo8acrw*=PxTAj7F`RPL;=WF2txyR$abNQk!_TUwJ z6u;%%OP&YbfD51Azaqz+IREP7s%sgA{eSTqR2yRs|2r@#XMZRuI#5pK|Szey6<=(2t3JD!(!}N@oblp_WT?`rDert_^Uon>4(ciZpzD^ zkAYIj1_CNSR2H@stw*xl+6}r~h88zz&^|eg)Eo{}fL8j&hYMRQLM2^fPxqr4aD8lIaE6rYA8lZYAs+WyGe)QD=?jF9GNxz=l>x;vgtc>aY=E_uTJ$6nyN3Ive^nDJw1s<(+OtaL6$ZBtHv+(k>rdJTu zpMB=+F#^u>GNdUUr6%t$T{){p`d{2ns@;yom1rL(qhMU73TXOGM}Ho*Vjb5@pLE*$ z;)HsrRQX7<6;-K6r>@uAU4SZqnxQHUC-=hq4rz>4f8%F#ij zC8aZwFo&2*+VtwK+JCahyuRkK+-Cp%1V^ffYx};Jr%=87zlW|DH*}BF_c_Z}zE{$U zs-~Fovt#LPqfv9NG>y?aN1hDrpN0Z&xT9ouc?@CScHXsmw%L>tHfI_v(T@A5h)|10mr*O5RToMqXt!HS${k08x3YHLP-)O$AGY`uFKw5l9{>Eeh0$ z4h6Q*_n;7gVNhs~ixb8buy01%2+{8s%vW{QhB| ziuA?2X77B_$TEOOfw}$Zpw2a5#MVaM@h=liu_X+H@mXfx1uK_c%f97g_ZRBdEbH@8 zvIbge?v+aJ?vSZH%qV{VMFNxyd%kAa+%HJWBHd{6|X5pCa8B;^>rplI}R6axX zYYKokZMQ$99G5|A7p03RcVH^{RlwhJ7k)0U)tG?n$mMF(XGiv)d=KJc5smqCTMSDs zF+2M?G&SxXl8%e72Z3&Bg_9WH01D(SFzXpSP=6k?4zU{$YvPBPygXfCBSJBEkdb2Ht+jf z#RhQ&E-o7D_vv8^@MvQ|~o;ljwUalNo3`yRuT!`>r4b3nnBe(u<;U5fr zVux!VN8X3`5zh|`s( ztAAei>IS2`^%Y7}n!14;IMfucu)-`_bO#ib5kIB_FSH(T0OD@Zg4f0CVf&Wv-?ewy-XvDu zl!DIQ_lkPz83*hLH!e27Z@HMT7MLdPNm7wX!Z=!**tPKB@o6&^S8^VqVK8Hk9rM}}@ zhxSc*vvd6t{yBZX2Rqr8zuKV}-cNe|<}TP$(202hnC-(YZkuVslh`gDwSR*{Q&5$# z+tQLPuCh4yC#3o+t4`3Sd#7y(91Sh!Em)>eumNDkU}{^HYVj$q6`v|^-)5>@mIfYX zGc3&>@@=A~lQrUVgfI`xs?&OsuR@Flcivde1VI(exwK0FbS!FX8wQrWO$oPa6EJ4PecMB7M(fXQ))I zjoYNnPUq1p85{Bs=gk)bs|O%T=-(jv%))_0xm%*e`(A?nGwn!hdqc4*0b z#xU}p(pSP)ZA7y24=cZKTh(qG6SyxfGSJO+__%Rdaaq$M*sFPA?0@(b%JanZ%Pmdt z>I1Exl;~De^^o8}PgX{WJ_CY(iLkDr%C6$;wkPKlsW1vPxy@< zw$pfP;u-HgxzcK?83X0ZEby_}*(J?AAHa${i|mi3H+DIX-p=#oyt&c(1gnJa^#Y3a z`C3-wO6+#Xi-)6pbbmd5{`tgHo7$P8^Y;p~z>bI}ta0Kn_#EUVxceH*#-<|i^{R1}&my9CXX2Z$8ld}iQnklKxmJ}G6|I-8lMQf)zJd`M zz)C}djD_74G+m{O$`Bay(C0Vkh`eCUcORl(A>>5S%_ziITs)(M)=L7E+Cu9o8f32F z%e*?G-roxKg@1<>&DrHrBlp)-hgBIM@UGZ%L2N?~XdcTxCYXjzlGBQgg7U@evV_=JQ-QSNd3Lf3Z!8Y}sWpYV2Z77r= zo+|ch>}w@%@eOY{>UifW)QCaPAA~4M~i=`I=uV9gs0ln$l9%5^bL)T?lWLouvQ{mu}k$0g-t5ya1Bn~ z2wflUkY?g|sGPPit-sEu!H8&`T?WVOp;RF24&fW&$zlV^K;RdFy;WGRHkp-aZO)c) ztETha?|*d=!~w@+oJH9KSZk3`PZ3l$iI!4q_oZmOK;O6m4ieL^NpN8f- z+(ytHd1gEQHF;EmYjvEFew|3u1V~B}k{!xfp_(cz1AtMg;kMOSlvZmewIkliw|X?Z zJ%6?($4x!kaj0!H!=PR4*7kN${X(yW?{{WWaM=s}oD%+U?O}o2nwNsuNoc^%(69n%7<(97}h) zlTY0K;=W^I!i^ExAJR+WBRabr0;`MO=YRK)SU|^G7n}PcH+`7&oZqJ#--cH1rau7&-KleSN!(hwR^97@GC1J^`5%RcnyxU;d25fUf5;2T|8Cxw-adZ&@^#{2%_x~x8O!s7FXX|t>Tupl%w8LpHId$Yc=nB@XEKsbCoY5cB{ zeMcu*I7#Gd-5s_n&qpnoQa@r{6NoZt_xmOjSc9^v)1yKC%1exfUirm}DVgr4+tTp> zRGuw9Z~_j>{n~D3;8t?8iPEw2zk1@~AM9}BGArFwbNWub6m{*@3&epYXVN{awqq^`3ib`xc^-3`HhWR;{qI7zb359K7Rd3ejlk(^JhhQ5^EV; z7}zrw?I>E|NmN!;lF*_0-G8^roIH8}9jiM+|FOx-vF*E|xOWJ@AT%O)%me+!7G{3B z-49GgMmxoZLOS$4i1`V{@Nah859@AeSV!l;3`pm)3R7R}huS}h#p04SFSJ|2mHRAi zS`wdWoZ67v5DX(iD)Oa?m0Lwro<)%oe=dssquXUmn}#(%6j>0fr+;+f9o+NM3L~e{ z0_oCdy0^ft2s8Asg6WDfwbpqXe{Lew$8$q7*czzsrWVZVT;;?2aChg;*x>xR{Nn@F zhnGBe{$B0pr(>tEH8b&+wU7$~l>3R@X5^Kc)oRYATcy-Y&c6|die45ODw5Q~XKF3( ziSoC$K?A^@3($g%W`D!DYRH#@8MzX4Z6F2=vvwwp&5umcXxe-jis+;*0kQC~Q7DyL z1+%F@7Ia3fPneYkBB(+&hHMLW&ZWGZ+-!vwc?L@{LvTxsjiWXf?o+VzEAPjv$Ky=r z&w5vc}zBJ58;q~F1^{G?iowkhueR_7KgOkgA1%Fw;;q8ZK9p`O&;yy6C z!$|`t2Zj?;)7z~MqySl}*TMTjW7^mkS%n4Bb&ApKK0Q0XuF<8lf^;pqy#h17XvJwe zx7R0f)LC^Em*X>%bGV*c%SEhrOGQhXn|xkPi7+(-&JWFs34oiX#E)>ziVj=9jfs(k6fYB8=l*vFsJw-hwWXnEhJg4vo(| z##tkb*ydhJl~Pwq8Ptq$RGDR-A_Yv>oYlN(?|=4}oS^nGLztLxYj=)>+v(6UpI;)}4q4p!aGsfLcWI?u`C!AW&F+-&4zI-mGh_QJG) zJ+P>P4gcR~AI{_a5lY_tM-Glx7WuF%mAwoM@wwIcx`SURw6y-m5-noG*^U~(KD^r}cbF7oUCRc8)0vDg3)y)>9qezRXe|OR zXL9~X9=a2_(c$)5q?a31eN!o||1lt;wV~CNm{wCaoL~@&UIgYFKE8vyH(Gs0cw@!` zvqCekX^6hB-ei?$=M(mSwIRHMF=y+;To3spY@Sy9#!@`Fg_dOG$SdSb0 z1}w{NG2vZC9cqk1NqDXdixQIChra0sm$2PhnyWYwK!0); z$3*F1;E@)ANV4#OcrRpAfZXf`GhNx%b5knv-whr7P9;ZvA{{Pd%v3>ub(m77a!Tm0 z@3$9x+pN)lU07dpH_Na^X#U*Kj;pIHptVX7nps$?yl`qx?{s_KL@=2MUk zM|T#>IcD%Jp2S{oZWe+)j2}ZJrw17396F%Lq1U|lH+#yAq{660tbIAvIrVf?GBqlv zOPb;3!_l#)Qw=KImnziVv3n@s*MN(5iJg^?WDw8MSdkmC=M43b%-F&~)_*FDux+_X z?j7xy8!2sJiDHqLHx67g7wXIF6PPfKzCX%v{Kb0wROD||ygZa&Pr%ryt2OuJSg#EpN>vBf{`g7`KBUn(*7AAf)lz3*UDx%2eD zzHqfMl2}1Gf?3CiIi})0%qT|N{G~5kHiA^j^^EST3PYy;Z7Y*Hk3Dz2M~(svCDXj? z<8S^!>z8V!6vbSC*;j0t=oBsW- zV~cJP5(`a)j_;J7)1Z0UfzU~oOZ^o36uI4}P`hheof{MX%v-XD?1QFDZIBX@@Qx*Ws8 z#tF6}zPv|pIw$v4U?R&JUh2V{a__FiR-b`b!lL0L`edUjQ@-k~(aVQO(PPQVg#x*0 zeMlzLIY|L0@P9cs$vKuP@eYj<`9@60y&!mkU?ZnABcE9nS4MagfUkT8S!GD%aF$<2bPV>?W`7UP`W~Wz` zrNN!PaSFX>m3VFU;xt?V`#cBHP_HY%F0?ziEi?rz9iqs1h0hBd23R*2XK)G4jt7pX zY<(BsioZ}b+UP@Xk3>?54}?}XcLyTSZ~Q8Aqqk!t7rBAKC-eosT54W8xA&-(-lsON zn;oq5Pk*@$P?Rdb3ElVF@D|!RipQkt8WLP?GJ?E}EKYc@$_KaG$)9Aq@^yz*Kp{Lfs4*3f&-Zn`7A`sg@=wjT~U zkkuKZSGR3M`BgtJofv16piF?Pn8FhtfjklMn}3_khm#ICB7iaP6&bSvtqb!rAhI^v zp23gvJD@+O$hs1Buut(6nH03AfrgK77l)ng+pbbsd5 zam<5CuZxeZ8y9)^#>;)!8HkHg!c&En{)He0zizsFIXprBw-7+qw}7Bwh6l6xU$8fe zX@9m6iW0L(`pY<k*fcDDmbH)JJJ8Cv>AnAhMuN9#O%WB!3(k#(GmI6e23YWgiXLtTh*^CJIRpoW^U) zkApp+9no2jiy~FRUjRqgK7Ku%JSgF38PO+p4puk{iU=LXlNAS!;PN2?f}rw-P#`2sg<+3sk^!-o1{P`qGOxNs@B`m!{p?XMeTE zth2k?T|VxdK9U&rmT@#~;45mBFX@i(Jaqb6Y`l>YlLIz2j|&{#&e=StnT6&5<~^+9 zVt_-5+R9shF8QuNw!?h%1>!K4^~h$b$ZT~A~>!Mcp8fz6*?YJcPWQP7X(Ji-253ACe4VWlQoo1n4!T2C93TdPbu zGESTO{yYzCM`+fH!Pey_$yPt+u-%x;<5|*3PXUi{ykE=9{LPKikdFp@*)6`%GtmX^ zc2xLHlK=9lw^ZhhBdBSs`jY+N51^{n(-|$@RnWeS9MC^As^mE5A&yeRzkfOtJ=D>_ zjxkKd`4tdjN53_s?0RBw>EF)EN&%?bC6hq}?tN&#`@t7yj{d9vv;RTDarDIL+>PGS zID-klmC*MtYq1y9aV8pkq65Q4p(mJ=L;jPME3~Z1<6dIe3PS>;TgL2o1KaVN^_I># zb%J9CV<91PoQmX=(J}{t8-LSb49mavNoB#VIB){bM0x`;RW>oD5|9qt+Z43W3rF+| zS2bTyHTk&MoZ8LG17<$PZNI6S-=5co3^Zs0jS82RQ|Z8c`QdlZX_q{K0|fn-C**#! zI7pPhZ5Ju$S<{+x%7!K;WAB_j!K`@2ffk^~7O+k=?(cR1fj~ZRIe#6#?^xX|K94Q@ zzbpzyh;d_wf+>ixpW+ zmqDNf6PGg21r?XTnhGTWGneL?3NnA-1F($&ahSuzfD;H{+6*P`z;Grc4U`U(89E*8 z4h`u*r_c;DO$y0OD4oJzX(w$;l5+T|-+MkVP^L_#yYt?DyZh~azwg+$-$NK7BtpZ8 zM1|p!P$~W={u@G)J1|;R+nD26d+TryLbwe29n1TpYsgmO7D8kK^tbgyV{3nq2JwOa z2K1#peVe<7?n}!N;`k6s*tnuA+WGalzBZV<4v%LA42VAGyaoGlpl@2yzkaafEBqz& ze+ExS-@x*ysWoFh>^}nYANEHF*Px}S0p>EHuU{SQ@5%}-3IpKtFh;K#h^>z+&H}IG zJTs3NUAPGSeXrZlo>=g0LMnd)6QOs~C*Eb+JGiSazCV7KdyU%*`tn(Rra(&>=n!h* z`C|6{b6Mi&rrVq4^=SNByPXS)X%s82b zjAU9o4)aztWmd*BN_bzMKQU!SYX|4Y=od{KYBRBd7F*blB>;rUQ^<10)hQivsN9VJP!iF}i-mpB4WKH$Oe zd-1#2j|{#YA&D z_`-^qS*KQO=8b-=y#VdidcT&f`aq!|7U?dhuGkePYk=-6n{w%=lgM zGx9h{_ksmTR((d4SIwI;oYxCBVJW%S<8-LhUdm;)7&9myF~jO1KfA0XW&9*_qTd)k z+2$|MYu5J$OIsU$zCYIGE0LNSljZ5@hDAq=Nj7`2dl`TE)?`n8A|;aR#kKOCoAPbe zRrOaJbI008asNPTQDp1+dQ-A4NWZ?Bl9WMa#b7Y2 zh4Pv)11Cs;kr;RsvHM)@s^H+J-~QtE>0e=oN~!e=aSc6@V{x1OLrsNQr7q(1nVz+h zXUa;RDHwpt^^~>boHlwqUCNiK%x=@@2L(kbx}nz6;>w#J}KaNEFcuY-TJ2=1zB*iW1*H!)^}Faz|~AUSP0T*z-JFCA^u#l9GM zfA|tEf3GQ$m3O|seBI(`TS>Ymw_r8iRFqY@q@+1@-^LRw-^FED>H_7VWig#HOKe*{ zR;+*3FWSP~M#=H}93EKB&De}NLQ~?%S@X?_GsU#UcqD0@bt!R@e!M%dbXS41T=SYt z!WW73Cce@dIUEX>YPN}FLes*XJ74)XE>kK~JQS|x_CDTvwX`c-UFfN&BR&N`QIPDF zXjKPR73OBw<1^Vxp{!sl{eZs7Syh&vEKz@eucy3Av;$5dYo$3~4aF0vYT{{};MwcJ z=18C*(!BK#GPyt|vn|XRJ5y9G+o!Y`5|hNZk=q*y1R~5syC(esg;0@bjwE_kjGgXJTtQro2z=v4_5pCKJe|x%t-i)`j8X8V#1Hl94$gZx2*LS<)zwo`WF}3obIr0AX%IsOfY5g@V!5_cdw~BvhJDl&sn@ zxX3oY1;0_Rq{KWy)8|{FpAe&NB_)c!{YUyQ#IE*CsaiaxYYIdj1-iPP*7a%qo)5ey z8X30@1(%NGxhgcn(!>OT&L;^YBWL^e@iEvXhF$eGn4vQ#A zsONkz*snfuvUXqi0)6Z4J%Wos;3Mt6fG=8{RUzhy+P)b3lo+efr0OJT#c=z^A(9`0EGHw& z$OvF>P)0i@oS2(~2lAXc0=`4+b!KJGnbX1EaKyuGbHc7ldqIlrSV@1~@km~?Pn)cj znHT7df$kh{Sx)l?bIQ>J!6mj-s|KuoZE&y0qW9&V87{9KZB|PYHMn|nPau?=v$*ZG zkfX5g$10%#EMNsx+c^i>^F=x-Bl-L_r!X=%JW*M<%edoxsazes8fZ7M zTmvu`g`J9l>H|JKDu*4SK(9p0pTm*&KX%c-V4pk`VX?%faqHpEu(Mk0;hS8|@~nfI z7(CsuZr%@+^CBsBh}gk7drs~6WWPGp2$}rMo=;n#X(~gHe|_eoxB-3qf=ro22@nA? z=8Q~d@}ij^0VFA4TMXmOjEv!ejMT(LE>sLzdbyKf zrYJh&PCH54aklC7*ppuaKuLDo>7Mz$_s{$u!GHVVpl;PU zr@m8ls_H@%!!itW9l;`I>wvpk{;`t}zsfM|Q&j4UWJANh|DVg~=Hh1hd@>#j6`947 zHip^UL!URr^SS7|cb|rSv zE!(sC(elo3e<8~-H;DRP&4seD&{%eeVb+}`0*(&n3&r_I89DjCNPlc!9*xE6y^&#V zl#2afM6T$ne=&aJO{)+5j@?-I_f*1M(0uP!@%?9PEHnS!yrpiL$~XX{Sj?yHO#p|&gUa^_z}#+KDH;+yC> zd*qsiR+D{z*~)DF1Akld;Z^3%Yt;T$>~Bq8^e0yT><`<|w6Xp3^B*W1W;T(YeG8u@ zOBfklO|@&yg8Uz~A>roVlN@5?lJ!h0y>sL*$G*XE5{~**^?66N9OeBdS)&AovV5&2LG`>{+3xq{_Y~1&GgC6h6Phu`3)bwGZpdv{G0P{ zfAnkdUw_1m|9@ZmS(ZaT7A;$(TQs$3cF}JZw=PaEo?d*0UY8dC&Em`T?e*8! zf2#g_^}lY|-C%9VH{90n1iil7@QeQwUQ5l+lky~>`Q_e~#E zsSZw43&|@(KP{JkJe2s+v130G$KNLA?^q9;6|;U*OS^Itt75aKPm6cN{Q6&*Z-{sL zS`^BCPk(*m8{ZJiWdtLVml(=!3Poebs=AIA#q+bbFWtl)>bhO>(w46E%Zp+?%D?Pe zRFBr_CWW%+kVGMeOAXYCc4bFzZ(pD2+&)RiwpEh#=a#O%ZrR!mts9RW+{^yw-!9&~ zBG)NxQVi-%LHTv;58B#xG&gM9+bP?4%i&L0?tfX+wj$jbHg@WjZQBk9U~T?)u+}SD z>*Z-;Y>Nh^sx^Z3WvgWpyjolsXi3Pt&$O*>ncwJ3aiwy%$USG8x$-YvV#RF6~v-K+0oe1)X}F{^T%_C4!yE%yeMJU+;if@ z?d-FcJ@3@|@g}tqS*yFIBinW=l^qm(h5V3&-0#>{-_X~t+`ZwQl3N z?4%IrZ4LT6`3+0hm68jWOItT}A5a|Fx_@-Ld|m6&~z`$p5rroVnhqPqNjw)?^NdvCrOb;)nZ zHjtpzs;`icPZAb-YV~C+p}@y zP0`!WaeMZ5vD|}P+vYBI-TS>;RDbq;oBH-`siVktDK|Mo;z(8;#lhlShYdXKaZ}6}&d|-WxDr(>B6*|u^Ua_{ZW&O%Ui&n00*|?7Vr_Q}S z?)_VL4c-@z-W%B2)OXaPRA@RI*Q#zZ8gEjqqf~d@AvMH9M|iQNA|xz$9{FSbkJxVl&%(gLOR(W zlg{PPsO%6&CtV!-rT2dFlV6K1Pv7_8#OYeizonW>#hQx7TD{p?Ep}(M)>dL>1u?S^ z%zS``*0n8)4>8rJXjL?_n}39*Yg$)V$=KN1zHHg~Z3o;kS8{*L@;xrYy6)ZWOC^hL zVNcw$Z-4uW1wyuLQ1jRKZ;;)1S1@qbjotOD+gcB^C&&YFFZNT64fwfZR&O>pJRooRdUuk4Vm4e$OJYz=`Jf-@*0D1%v&XSO&lrdmal?VI!4N*MVd0&PiOei8B)*GdX=mWxD829A z9k^GtDSEt&JZ@myNB(e5U8M5)=edgU&|{;tjP^&1-+mu=py+S$>+r)B&0#x~Kr zUF6+wseeth2mF&ULvd7+2MeJ@zWx!dW~`Q-?RU@A(z6+DumemN@A|#IK#j;tJk@&|jVC zYgyTurtX0Qia~zowu8Q&f?~tkGaJ{hZdtK-@rst!o0r$uvnGXme|z(x;?cqM{_A=- z2aj|pc7Lwk$;;*ZuGOt8cT-PAn{WJEJ#A{~*txUn#ai&*qGSI3`(jaOaLg zUF(cNX@8^S+(-2ES9{tw9ZA|AzOQqynQTd}^mK{1{<1CA^=@rL`vsw1l%c1io(}u=nD;rw&Hg_M{)p&?Kd-(?TY^A;VI#x1L zSGT4^wR=}Z92x6r@An2`bO4?&@KYO@3wwx;wmPJhM)pILvTDg{Vdt(xU9FqmQiviTFpz^bOcsDH}-1uJh;5 zOJ2GC_UGPzQ?gm)cneXp;)7YgkG2(!Th6y#|Kb-f5ySr!ELqEO5CaJ|$z3^|> z`70}ksOukzVX+4v;kigSx!3A-w-4O0Tfg#k71BOIJw}b(2=f=32`u`WymN8_ho>Eg!kU?D)`f{5OkMG_78k zKdYNoEMnKK+;_u(^?K>*g?Dz9LfE-#)lNa7;Kg@du|HwoD|5t^`xCaV?twTml$$ct zzhU!AhN|shKQcj`7w7HHgW|lsVSkH+UCpV**{bGhvzP6EzxP*r+SVUV+D|<=Xs-6t zjj@kh!G2)hLAEJ3bX4a<{lau$_x?L+k*wBoMa}xspzSN zrMjllvE?s67a37F``UK(Yx;L>Xl`Bq*>CkZl^gJJ3vC7(>YoY(*GYxWp2ItuuWM~> zUc0{I=?(i1ZdV-Mxwd)3y0zt>WlFFlxierAijLqsQAm-nW#%fM%+%AsF z(6-uGmu$YA>{7Qcw+SSDMjWGTj5wHopWyFE3(elT+6rBF>km%M$|VYsFseMsexjNs zC89N2(HpN>&Ba&l)jmnztGRP~CCV3-lD*)0>hkewQ#2Rb)7bbSwSV70r|6dI(Ce#e zXj;2)vXZyTHg8vTZCUZa`5sB%#rx_O*RN`AT-Mxmi$t<&`@wDrcljYUds(u)xqi{A zmCGceUOLgS=Sc5Ta9JmwXq%L+7r(|n_T5(`oA0^&hkNc}m*37*pS4OBBhPPU+W$MK#}J_EO0Mgrc^6xT_#N7YGjlk5Wl5%KHeX2*sNHDA`YQ^A*CD z`Ll$bWPLwlq*nVGE6_pu_cK1o0pJb58-YjX#V748Bp;%-Z+{aS$*V)uRzx46iSEA3 zn1NPUv=P3{*dYhV`n!ap^#JoR<~qWogdanl8&T&dWCV$6Bc9kr6wyWEcMDaSO6ik^ z$uq-Dg&Ad1OoB-=MLdOz5NBH%8KY)YgxnQbevPcSMply)VscEhR-&%mwUX8xStyc) zVJ22936r%DU4Iq&Y+$nV*+^2JO19G7B2$32IK5I7?G(vHY8(9seS4{8jOt|QUWCfC zR4V$BqS6AzbSuNx?q2O*0iVSCF?t_|=6%dEdU@!+=wlRhMNdL#E&3TFt)hi!&66zM zi%@;hpIA*}i0*`_HyM-^sCJxml+ahPBwp*|FrlbvjDIwU?Gsd1tl9R|dKQQN7`zoZ zh`@tF&3cjAZ>5@f(jjst))ILsU8AXLWf;9Iq7R}!TftZg{m787;@t#rE2AY8eM+Nu zg=_kk1CxepqP!5NYkd=YS3uksYKeL(HQ&UM6~4mNwH%0j5?hVc`p4B`P@q~FXjmxC zqD=}T<9~xSBUCegg`Xjl&sMa^Q3fe_uT z`aO=3@&5*+)sgq_&qCFe*`{Ws*T@R!iBkJ2w{Fr>fX7+%!BiW!qoh9%K8m#we|2uS zJ}@_2e=;{j?{eth)e$HlGGfoF-?=gKpS}8B*nfw9>gd!VJ~I@b^4DGYcA%QsMQI%g z5=O$-Q7znQTHmyu@|URfR=T%|VHU5fy=8D5OVcJQu$Y;d!D42X#b_~F%*@bYmc<@1 zlPzXuSsEE-8vB@-ZynIp?tNgm1GLv%%pLn!b!fxxzHOsG-AuX{KGyAuGwXD@ zYZisx0VhA_tUp#d++U(EqAy1E&NxKnU@tBzee??{G+Rb5%gC~=QB&Y&U9*8L3bTGb zjCw2L$|_ESniv1PTjbrhX6ziRPf>UHiKB)#_s-(A#yek>);ppFzxd41uNWDfRM%bFLy^Osm7yw zw>?xC#zqY>IP*xhUqArJzs%UAPpXE~*URQ7a?Jp~r|Mq$)>+stI%s#9sHRGi)Np@n7dRul$CsipG&-jyVt)ROFn7p zksR)X9zUh{=nBonA&{t08w^&p0AE*rSANrf;U*z6Y)#L@?us@GMe7>X^}iIS2@BSo zKyn*H+eCEyRvig^hcx(LKyGXPWJ$6<_ILm&(umizL+#mUA3gJjSm#>3Qi(ClYmf4g zKJ6RQAm&oaE+LO2(4;`R3LS>fAe4Ngn5z>OY;*1XLZj`Q=Dj9y*=tpNL@~*sUwCj( z&urg2yuWg#Fq@+G6|wddk|sNNG1KSk)c1VvN0oRtl}-tuRDyv@=SJ*z#jx}~AZk%S zof%)5oitbYpx;a3+Ib}X5}KrjF(JR0owIV#CEkp5-{ta+59|dd_YKU-GmaOQ0VeN_ z^aubs5-4AvkxuBsIBoDnTrG&s&}Da|J5oQn(9}_E-oic$Lxjb}zmlNYL%OS8$FX$u z52IQ0GrR)P5V??ce(?^eZ$f8d>6N?(ocb;+7*DE#SK4VArTtP=%!?BM*FJgHmF*an z7{4`BCTxZy_H)xjXnt7eY~IojZ>e^;7rv;r`KxUgTLwo4Wx@w`Bik^$f(wDZ->7m+ z>+wY*7_m)w_*dln0T#;d>&_)(9<(h}iym&E*%1K2x+!*q|Fr<&zUK~3SFH$FWN4mp^E~q20&3#A%^^1iXZdn_dWtA(+CL&WttAq`o0< zQK5Z*om_V7p9~HezcM7Z4O|m?gX*>GcAc zw-i%gH$|9&uYb=Aa}sh2(Hdxwi^2no2vG$?3>6lDW|UlwBxo%jx6&;|P(wNL<6&`` zt9p8X7|c2{{9|bs)J;?FEB4&4phsP00O8m{^rfP-3+tvSsK(tL=B6p@vvQVuBTlJQ z3-TWFNXgmm+vGAb;J6B|!QX8V&O;AdB%&e?!{J)(H`fU1&9~$~#5d%f3+@&pg0YgW zVh^qp$hAfU#-}_8w+!W4@D-*O=TJP7#2L&P)l*Q`KH!;kh#R7hHd3TGQ;AWpaJpH1 zY~vK!o%^9ZI0XW$e--%{6yev*@O6x6+gi*sz;_kAjX#14Ktt$;KUNYGg7yWYLdkoT zN%n>Jguk~+2B7abtg~SZz$S7hBsUq)(H)WRA+{77A!#QHvT)(}hgWkjevrNo6u?jl ze9 zo^!2r5Y!F-x#Y{d^XIlli95JP4S6hyC}b{cC^vY0pd-#IV^#XxRQs z1{`Tt<*zD}p5J(lVI6UpEopRS~?LN$sCX zRkbY>?n3OsTN4LAgY728D}hhuo?&#?bx%U(d=QF#JP5=Iz?nWqSKbvBrFbCfMa~@x zg8+C%!4)yJbg_)20@F&t*x^|UNEIcJa)O>&&RI9{EO`)ov8n#v$f20#gZ5yoyG-)R zq8mw>t7uav98ucG0!l!_i@Q47mafK~b}J73mDb8U(i4NVIK)BrQlZ=nMV44>xFi4t zlB92)eaZ#A7^(F2>-dr1!a7ezY~aF2VgE#@Tr4ApBsa<*NmmAp99ept}%6zE8B zPWbzk^W3Zkc`sI5jMd~i(ucBl-Lwd$B_D+%FK$5isHB~2fg^&C&LpB1=@M)z5S_-9 z+mzXqWLMM(^`15P7i>G6CnhdLm8OP;8OwymSsxO(8Ru3w_!(L+XP{A;GA4IHM-2V_-{ElFU zA__F7I8(9d#6$1B3qa*R1U@9Xc|+R*emO|`3l_0}^1m8et<|#*mz)WW z>nDKpfVOE77za5SP)0+V6Z%ZK%M6hdmTf^Q3?mSih|qoTT_P4sS>Z~(pGduOV5MY$ zNi4P_Ki&NBBqS-GB%imkIE{7$rQjJ+CktvK*B6^dLwYGr>yBdNCBj5_dnvB-uAt=Q zypE*vrmv}Qsfu{O|AxhvKSsbEw{5NO{Fvo(!1&Y-*#zW+OH3@K$S?2TDREDiED)2Z z1St&Zi+IoI2<>Jbm_G?(>V_u)@pr?c0tiF95Nss@@=T;j2D1at6S56|X}-G>+MTny?EhrqJI!%h5xPJ(qo_yXCEU@OqMb^-2m zqp<7oe%)XX0$}5iQ-6#4a5Ni55r5Qs$xZg|Dlo|x`^<&)h_3`CZrqRC#T${Aa6={U zH~XiS=ky4|Ylr2Yq-lrciKH&`_KBo#M?@&j#SM9n>hR5VVeZ8fL`(q3!=@lKRkBjr(3i zNaAAmMd;w7!BLPKYavoF8TV5mMjLCv^~vJ0!UGgi#{IMq3}`Xku;miD!VoWLF$1vI z5-j)>xUsBrk&qZMhh9Nwf=bS zz!d*NN7e2%I-}n)#xF#DkNjct7KjXga5Xkq%Y{s|yauZ;zxKU3-3ueX4Q{>hJd=dK z?VNW7xmCOi#5h*D&0n$14sgJjVh8_4ookHVwk&gMPE!+?)vxG=&Oa}6s!mhWm)4JL zp(>z@8ucH8+B*yUH~km={{`a6tOrpWOd?iTCC+eDdBaMHAld&lj7O~emCajoiL<`~ zIQp>XyT1#H>|}uQZJrwu;YIq@DGkTX1|BtLhKaf31;+zcFzIJ4sEQP za>rho5L?x@)sLEjVX5mYOW=K4@da`;#dGhI43-3vTyQW>j2Sw7IA+XaeowXAE6V)?81*W zZwZ8VZ3eBBWX|GZQ7tuT6r0_HFBr_}4Q+Fc=zN5@aEMnv&8}7O59gLGDwWl9ED${J z1zmQ9sBwT-KEbY}m1TkY{jc_}RP#+5xn=(^E;y{rOkWK2`rDf$yqwT7|0=MG|5{R^ z*z1hUvB$=O@O=A5w+Rfy5}0) z`Yt#m6boD_XNJCVUIIn4e8ky${b!4_1XLV4#{_sBB&TUyoKdG52plD+3l%1rE)zZa zks^4#A46tQdX3#?(Rxf{du)14gGd(5Tjbp72LQZPCCdbsbjyW0?L?@Qi(V;ee%Vkc zb+cmx!r&pd9FCu?Cao}WK3v#=y*&CJLsY4Al!^i^c_7mbusBrX-jFSk4c-V4?0z0 zy?~cJEx}gUK#QwoL?t8WD`hhWZ~WKLX4fdWk`~u6e;2*<VKsX9VdIoG)T9bk>tk7=5jl>~nae#^KL_1#Z1V%i~*ff6+Z zQIoU{>amDcr9-7sd6Sk2lWDv3Qz15n9_u)kiykYpR5g!mAfw|?a_(>Y5xp0i? zqfSMrF9Xf0*$gx-&O$vZw$uaI10=6SOIGIJc*pJ~ShJPQ#(p2>u4=>5KZQU@ROD1Yc$w$8qr4W>vS5t6CX;Qe*jsD4WD@p=d+X z4nM9OC#dc`Y$C=&43Zky{vAX88yH{u$fw_vqHJx#W;l{TkJ+c)Ky$1Vr;FJd^)pMw zGEq?FE}rjbB%Pegi;dj#q?`}5vP$kTzR}Wdp z`V*$`$Wm(;wk$t5i=`QZeVs^EB5*MztvK+GX5itfoLCB?_T__kM?f&)-avA%@-UiG zVK`B?#3w`9ALQRd*bcuQGD?AD4-$@J{1-($l`SsmFU`eU;!#?lda}lO^sGRGl`7^h z{%sQprQ_15p?1s)Tb%LR}li4$mKE6S@$Lu3F6Y;EeZ1?j8 zIwe)6L&%!#yhXz-b2~d~s_!}e688j`U6puF)WRQBhZ5*V#!^%iK2?Ik%{Ee*cHlFR z9+E$%pbUfsc`H$GyR?m^XlYCiVx~Eji5_rW_E|@-eQ!j%)Xrc{@-R&r2n=C6yMB^_ zh$5EM%4YT4r4Qvgb(iZ9s01&;bed_PbDUL<=K@i1l`2_yvtoxh9O1aV{T0#3@(akc}bG+d9d8pIY$m|3W zsZTz>)3xj-vivS1mC8&>$lsE;A{cbs<)WR!IkaVL+UbEdLvxOYSsnbV* zx;(ViUi7#w9D}e-(&`5any#olTp5!c>3fOW__F0v6^a6MGsX#!B2_;PQ422gxYAWJ zYluNN2pE(08BH+vM|4%GrI$q}u-vSt!CNrTR!CYC_;VWjLC2AQ_gnBlF0r>uJ#1qRVA zuCKvcuKsU=WTf+=dj8EC?d|2#kNb;e&%S!>q?&{Edu)3QqE^%A;{I8hf4)Jb6!*yY zvuvq`wci@g#=B0@j3S1Oa79Zgdu@NWbeJ;1cJ(#oNgwWS*em3u{)pVVCKEf#Lx}Pr z-?$0@O{S?*moR0ajj?Gr{UoiEV3V!-tb<#pK>N8T@yDT}?A~uv8IqY6=)NG9-#1G| z)K&xdY}CD#$+k;>X2RE6jBR|ytgD|!DXe1s=YGe1cVpcI)K=D8x7syxA|={;h`Vwbr4Wx}qzo@~ zV-?G&6;2fB3Zg-oLS0H{TnZm5@}eeqQyJWKoz2Sn?0!2x`7*_^<5eH~nR~35x$=bz ze#_e~WjFi!zF~5YV=q5pypKL4o!hXrQ#vEYNNtTeBd=_6(0mq&lRTxFlt;@KtU|v8 zK(+IesWk_51}_I$UBa|eosyXc-e*XEJL*km9Oz!&yy$OnYu(DMXXnc=)Ez4s$$T!N z9?DqUl$WA*tZ4hQ8ciJN7l`ywIm|0qiAX-}_+&@J@RGCfQ+w*PDHG}}$76@%WRff9 ztox%X1zuBWDhv=srfkJ3+QCA7T0GMQ0B1>*IQ|ArEPZ0O7GD;J<_H4G1@^I# zIxq-xe(3XM!0epOuvBl5RMj<&EK^szgi9&S@kPc!gS~do;qO^kq0d5xD(3n+DGT{E z7Pc5da+Fxpl51(x@j$atdOQ{)7LY=n*$^92UytG_EvDn8L!eGOzaWvvNI5310T}Vn z7SiZZWiXJIM&|{5#8ZOttPoa4HZ#4LlIS^0SbdB!`|Kg0Idil|o#hmSa@CnT`$f4$ zAu{0OriZmSFNpPUIt8ktmOOb@9L?%GIzD|ehyMl|+haY|#>pa8=K6rDBVfy-%ha5C zWZ92L$gSc(S45$q@Izu`9xZE4-Bo^f6isRR_`vR4psGm3Jx05nvC@fZBTo*lB5&;j zYdG~;{fHobF#Bet4+39SqcZG6FqXb7QKacOBywove!0V3S-f>n`?%E{w)(f8Tz+-z zUTQDn)Wsh#dav85gUT}m>p&z^*EFZ863hTZQGkKTC2ucW$3jAGs<$b%1>Lsm#gS@h zxf*XB{r94B7j2hzlWwc`aY(38-6{de5WSTdt~)+PBdd6$mixIRXpv|PioP|^NLE%( zIbVH73K2WhpJxmQl2~8N#Fal-!=isLel=NVdm{BS4r6=8p!x30J`lo;aqH7p>{@6< zvOUfvPL7)Z?a^36OV;P*){7dOM_4(?^4ulo@0|Y!Z}NfZw7f!%JDIAT1U9{S!z^S2;Wj ztz|Yz=8DQ?C2i)3T%1o7r0|uXPITiBvzdORSXJutb8!3ME^zsE`qd{B^w8JiY(>sv zdCZx7&lI=HP^W5CUr()VrfQ-*c*O_Q*e1Z`Y02uaQH7G}0PF?@j3gd#CS+&l@$O-F;xsx5pC1J%f?DqR3}GM{I<1P15{MMN;z9+xDEZD!dHOGBj1;r z&lS$l2u<4a&F!_|pz@&}^BIM-_hC()p<+yTUf$wpllQV>QI^>ciS!*#Rnds?@AzCW zWffCDjiNy==&SuHT$MgN@YaY7uo{Y2N4oHL1IZO!rPhMCQIwImlqh6ZbMgL2;jiHO z?o1w!1n|gd>(5cs4(eCB_GcHdGJsTw<;(gE{4)m}296WCE2(kMBbH^wb$ z$FelDVn40m2)rst+3$X;T2;)WPV*)D-UZa30*jWTeA47Z@C3e!O$=#lNS8qiMg+M# z#!?w67hAQijI=1XI!NFRUin6Ku=Hm}BUDI*b7X-^Z4Forqxqa(A7~7BU3`_6aO79l z5?CoF`D2~kmh)6%bzi7cHYYF{{SCMi7I=~TbvZUUQM5R@MwTaPzpJ|QW=@dV{M29r z0htuUS=JJobYGEqW>>)996d#*yh1`+ZND*`T}0XTD{Dx8Tv26Z9Bq3rWue2R<}fy4 z!)7bZ)B&}c%6u>A$qQQ5Nn9aH5&Nm3a_~`HR0k#qFII2S4Aka~Z9!JZntGbdeo7sy zX)*Uhwly!ip5pU4!5W;2R{Tx#XMr;&TfnPK@7vXJ*6(WW+yXzbocWzgl*cn`&>8_Y zedJQ^Vm7L58rg^i*{qS1z@aA9(HP-Ea(=M6hXFT>N)#M)#!87o3|_;;CpxlN{8_Ui z@%Ux(SFkgmv;BawLCPUJGP_7*vz5Y47RqPQU?$AWc&Lik+;pWM-fopMB6IOfI)EN{ zoJv{chxJDTm!i~JpTf#OvX6h14u;QmX=7I-xNva4(#X08#^G|TjA=32A;7jqP^jnM z7ltb!_k3mp9j#T=EOWB;6r#zBmgL7H-OplR7nJXEUzfsQsunpE<}QpBxR;R~BaG&I z;}~w61j&4N;-es|8a^eyBz|spD+I0rI~?9Nzh&4>K40ytk6lIA0J82tMoxa+(4u#v ze;n`qtg!#fx^Vl`sOsa%sQ-OB)5WCH*+Y$~^^E(=-doL)g7NNx+KJ-z8lQ=Kc(TLG zYZ-H_pV_z)%}Q|G5xJpP1(Vlxc+c{;@N4?loF?F4V?X>&#qVSlrF`vf4H!bm@l090 z3(mg7%3uO?8ybAWdk_4j9~(Uus~-LDpCsfCztME2JuW?G{CZ-z5gm-f_IR7+7d!z{ zf7w~Jtk&K(7WF!Od+ysBUU+IW8xXz&aT9Hr_`ba1KaUc<-BUavIVd{#+zhS~Jr$D} zvbj+jO=V3|DX!gBZr?Er18a?Ul?X-qRlp+yGw~kg9q1duEIZr5w!`Z$q>3yeg7z}> zPQc5lZ*vm3ime(G?(%utSEJeO9V1clt1Gx*Mxjmine-0AdEGBF9D6-$dgVEPPK5DR z!h5r7oP8OKtrDnzUFgc-bJ{(7+ju$@7I=KwD%NeyIo#OyDZU&6vecj-{Wo6FxIp$Ho8-QUHb{BUOqPDt7VP^)`nQ_<-9JC zqYp}LZ>mUT+Q)1qT0ZO%R3&!-<8>Mr2V)Dz04CEnPGG7bk_k)SxV69Q3H$BW227f_ zIohjtPLT(JK634u`LWHUeVH)&{z?v%gZ6V#*V6Biw`ji)cMcuJ<0h^|**R}BKza_) zsUdTHK(&7o^6R@7Z4Zfyrl5exB5P}>2s#4t2GI+e5Z^G$WOfNiNymiDtnd;8O!aK53C(DPRP zyHu_k2^tGjxCXBVd=L;3!NkIWAXQ6J_F5{;||R~pFeZMQiMP#bAFjl9oz7G+$On* zgKd}$pLy=-+uo*G=i9&_(?fn>r80@OAm-sQ!kp_0Mt#aj1fsqd^$=yo$9cU!Kz%vd z-InEw^@DbU4Dj!}-^NQRg;a&`!`^k}?SV4Xd$C(< zq+m@GlD`pD3p}K#S&U(lt1w;v`Z`C!rhzB64lVeJai&=C=ITy-x2fbT{)=;0Q)3Kcx(` zqV;wK!Vpjy#MiJVUdN!Ccpn(qlVOIqD(6cfW5#YPLmWrVs1VbONLmtmL!C*5M{tOK zJyVvR!?T_h*)D8c`n-BC9<6C*VujGUaLG)JBn?=sEdte$puPsMs(zny3r9h7KK6No zMbTy^mdPd3X6(8HUJUN9tCNBWZb|ba#6ysKyq6w(!nE61vzQLFzwgcND{g#BU1rD%uLwTGuM=i_?rv0sU5X!R4Y$EiM2b+R`_&_w{^;mWz#!m6vS8maUfK8-Ffl zRBAy&Oz5I+YNLE+ADS-qe4dXtM`W|x1e$SRpz&Bv#p_m04f5w;!#iZ6*;eVnLQxWW zbCM^zyN{Q_2B?j4SsYaATJxI3=5?XN7oI?RJ#^S`vqNb2(A3l?U(NPlXhH|Q?u*)N zT8tYSE&Zua>?#Q|GSHgzKS_HPK3U3A($Hv)QA3?1pp!K5FMZ&Wf0O!TWHcnij~Lgq ztQ5wok+l)lhnzG+@pzUO7?=mt%7xO z%F^gZ;X*88LA=^TA>~ODdK#_D8g?D9F{a+I^vKK}BDJO5yaHVTMAmJrMb}M;pOD&a zL0S^6Lo+-<))PZkT;(61+x-L|!eYaDq}8mVv!f_tN>eWrb@RGW|wb?0I&;<2%tyZm2RkKF2WibO>a`C!; z<%b2O$?<%=azS68@@Kl^JugKJMOzfpb#69 zy(PFP?W;dRnL0wv`ag4UVjAhyvGq&15xNYg8*nvX7k`n$3fZ)e8;iulsOv57^2uvkompMtI5_PeFz+ip9u zfn+j=E<_@A`(0u~9_KK(q^*p*Y?CM@&tmgD_<&xJx1)Y@VE+bQHAe#Lw_;Jawvp4x z)n0e!*J^U5+%FT8>R1AlQLN8-N-!+q({5Ol5C!qwcsX31054ud4hjTyAAPLkr%1ERynasPF;L^j;s-a}CB`Pn-8V`Ro=iGr zFzk2e3YdKk`M4eGfrB8eh|pyoJ(3v}kg_}^qWv^nTyHIGS zwdm%-llDZ)+*Is|V#fceijuO)358M3!NY(1-AEf|bi&0T1CUw|KZlC+*~IIbK59X6 zI1`PYL|<+2d3l-A^-Oc@;Z$0NkszK_YPplC%y(B zX@mYDm`F?BB9;hIx|S#76jy;lkiC`2;+82pqoZl5C2Dta^Hwsr?QVlb*As;VmyM>w z=sU-fqO%FJ~k(N;Q?9>QC;GBN)@hXfqv?fvNaz{hozp6M5R}l#O!6 z^lPGNWxO~Vd7`UK)}aq$Msjj#IQngRX=r0~Ksxlh-&5tqYE20vm4 zi?OM(hOxr)jL&H2mRK9wm~bex&g$+p?I9{Yduge|EASW>3SeQUjkQzR&sj=mlm|C> z!5o;lQzdLGZm!C8HyLT2csY$j;DDD|CKB#_RweIIyvNFvk-UBA@2d%3g;6rpY5nwx zf@; ziX78LB;av#JkYN;K}EHkVOfZtIodAEXGKf>qEz5)Y7viqkmD5dSsj}pi84g>BgTfq z4AmAiT?^?ob3P}EjNXS-cktr00J@qMj|9}BXV${A{8hA-W>Tvc@g<@U;a6A=@&+Rn zDh*+!_^wuThpPCZ63LX`5?uSqAILz~ksDyOgn$YI4ux5M$@ca@CcTdPRznXbispQk zYAWQX_g$q%na4A{d_S>CH9<3uV6YmoMDS6(MpyIF@%o$@S~pg9*J7#h6Ep;p(TLl9 z+gyvPU6kso-;ng`wQ=F0(d!p_lIU@L|fi4hk}#gbp;p1I5Mi$)Qy6n7fn@6rv5m?SF!$7IM;`+ z-50a__Ue3I>C8#IG+pYVX$13IPx(ax3??ZI2ZT0tCX)Eo?;Q6Fj6tx-hQ?oxMr;5? zaL3<-lJ0|4NCFY1!OBbWB`2<}mifF7{dd&72LpL}L@Iy>RPX^nq8D6Y}I2 zi#$Xg59vQl4M*x8 z;f@OWLE~gdUwR_{ldBh6MVpORPTT_^0eQy>?(8E4kBI7y_AFS&$Nu&cF+{6^WHwu1 zK*y8j5!e5g&0zRiT5M1{D09P!V&hDb?Rm$maVmHbf7E8!+(3Y{30D5|p)GV3hH102 z7qmzMtLa3t?Y0p-9UC&u+rO?9eLK%d-MVDY@ILQ@|Ia#kan!pdhKoVFTY-HXcn42x)i}y?^KCHBR zZ<)*0*B6=n)$A@8tW6KD_DfD~N4IG6W$d3Sou7KX_s;UGZ4fwbn&oAyz`eA$c-^L; zwI$9p3FlocIJ42ww2^aKu%~qliulBI&u@%t3iflR#gHLt>wl8-N!n!v=<O+9dVvd}`i3CETaf*Hku{ zb=f-te}DfLIf{HtX?HC05h+x30pr%wYWlPr?qdp@j|Ci*Afr_50ASgpUkb-N5@A(hTONK2ZpyPv-Z$rbBT3nmjP%L)vnQGpy*XC2iCikXXeC1~B z_tE?iQg7_HR>it-7J&YwH9>TwK7FTNWr%I2;Q@BOye6N?OIC~%cR!VThiuP7udYo} z3)+n~P5Yhqq>ZHo;&xmIKZ`AJ>#?sbd32H1uA*RTaYeojt_KTh^YnU zlsV>G;=;`V6Q4X}{i@Qv6NFn6Jn%4tRZ;?&1YW-|3wFqq{C-=j z1fETBK3Fx(lK`IRXU2iM+1Nv;En5%=-;!a?GX=$QbY1JHgxk07kGd98=1@3~Jmr?N zU2b|^P7UAa%HYTedGYWqt&+4~jApY(wU?z3e&rodj%8O2USoa%zxGERAQF4>a9T^C z^iV4BjJDZ@j!STf89H-oDmL_f*pp+g>{7f+S>)>UKLb`y2&$a!vX!W9H#~mtWz-$4 zrTQQP%*eiXHz#ew-FXhRauLA^tID|3WOd6H7Ia@R>G~BQaAvE2`5uUDEeaxnSKs`( zcHQFKf?mje!Y{kD)m*6RB8>1j<;}>>ZMbM>5<+QuxTO8G4`5gNK2^+17*_Q@fIO=AEYODze4Lnm5 zYcKh*6q-LEsl_0TRLEat_&ENRjk(SF@Zm~a^BXW4stG3e~=&S)#` zAp~g<^LyG6afd>h1^vuGuWm_Wr@ z0B`&|96l~W5tAvK^M|xv=mB;D8~XmhRr{Eb4KzRCRu@gg%S~CY|+|Zfe|py zxbwb9gdNj3iDG!l_JD%SXkntN_vO=|A_HgjOLihwMa_rE3|Jk1L#^)O znQy^G-_}2RXVVUK&$DyyGh=e}2eG9+JgmEfl+;7+6;c(tSwNb(9u+pQ<|R*-4PbQe zLk%rLY92)fR~2o561Cms|7<#v3Z!06T*LEwygi$>>xMl*gGg9TPM9qjdmvI+yd6QX z7uFc#7+35z0AIQS7C4Y5wX~=-f&f`W2pEk{}g z@7;u={g8hJs3dLUjGZ*hJw&6hLC*)p%0rsIM)2rwC+KNR&*GNAy+Pw#p`Hi}$0D^f zx&P^ugb;d;JjcB%8Cp#k%=!I%5KFY`utD01%!q<#r#rXIY4GJDqRGb1(Lk~4O}B0z z;1_qyZf-bc;A{R)-03jrkEv5M%9bALCpX5zPODYryH! z)_oe6Jca>NBHDDyA`aiBiHWgBpdufB zstQu@j5-_K2q(w0ph`MU{_?io9OD&sRvZ-|b6coiV^)p9&-#pUds2VnFuUHv#rB~w zqn*cj6AM`(1{Lg?PiMsnStsIn2-xUaNP$;IV1Di6oH2m5A- z7_rt5xd<>;;c>+#co?4ai$sZdoO5E9?a)7cD@d$;WpIh zeSV-rGybs;yg)y3)>o65iJY{G-(T|%*l>3w4%0HDzBp0rZ6&muQ)|I6GGXhk`gd}PufY0BX%(>8 zP%h3(@-U>!#A*kYY%vP!h{udjj76I$XS^6867*EshOu^k9sa8SM?^RDaL40JK9pq< z8@-e@?Wofdf1*$Pd*k~=<~cl*EMey2`=bpahJXz3SQ(jR-c&Em5OA;zM&zyIF0?Ns z5afxh#*4h|z1bnaS!Ds#ZPi4IJiWd!4;uUWgP{}rwTF9-OdR49dr3)BzczVlY*5k} zJaOid(wp;sWx^OT`Jw!IO`I{NX=10vmf|(gZPr#`bgj ziCvt_1M$tYeB<2jWlTNgTdRvkquDgRLoW>8(f!<`d*NLGY&lZ3D*<>WX5XJb7Rx@D zmm5Y^KZFJbSOrgYyC<+nwu&?x!bYS1tYd3ALHe+Mp%OB-|ACk#l#Y{2*ZqrmRR|Zh zTtCi)BpaMUD48JmFT{3Si1bDzQryVNjgCKDzW@mA)psGbnYn3oUC2jn>iSE>GK zFV{Jh*VvLYJ*rqFHPS#TrM?hnw^%&A|EIE0plZLE0*aQOdbi*E{HpK+i+nr|P3Swe zTz|Oj8W94mNGaJ1m&4myBaj&^^7=tk0{^&`F96@-~|HA`^(&MAt z$9Wc2c<~m(FF;|SR`fM0fhZI1!f6j`I5~emGA@pkm80<-1`}duJ?b{Aqw!@ZUNr0I z={{~hz_w#4pc}?I_#0FgVdCP#e&}~yuC&Ye&1pWzly<0N*1{hidAJ#kLdx{1Mhh9r zS{Qj)rUy2iHpo!HbD=eY?=rd!m~o6Ux9Md+?*H|>ok7WvZlvvUDW^~NLFZ9zG(lC! z_m9Ly-2P>@MkK&H=C3W~%Gr^p7iTC{$^OR*m`&72Rj~`s{*BTXSMh!XQO+PFwx+aS^J61Hq9u0a_}+~K+o^(l(fHo%IMkFu))H?=>|KEU4++trlWBGW}LY4j0Q zuCuSP^%N!gC;T^GxKrbVb3}pY9hT(k`Rr>#3Vc>YQY#1Vw3r!(+d>d4VFd_|@=Spl zr1t0`aI}Zsq4$>!4VuXj-__OP?kNB4Ei5eiRzM@zET&WCnpn@>p(=A|^|k;|dwZ2# z(_Jq%eQ!X&6M~hGP7}Q%~?dUDO0uI|7Uz2>HRcS)5?!E z+1L1y2D5)@aPVaRP8$CPdAi28%=hR99oaurhOzQg|3Wvuzpz50%1`m#SwQKiW4A2j zp-HxCQO%nfcz_;~dWq2Af{;i!*IMFH3w-< zD;*^goBBwI#ZXCyCfPj&LDZ$!J}R0S*hSLBFTWh?7WptFnU2o3+d_=DhO2RVTwFhEOv`YbK3 z01|nG#|EiC!1F+Ia2)LIBn z2uaGy3bOft@C3oh3qpB;zW|l+BOrm0Ug1qZzhB`A;J8?XNCj9}xOqWUdUq)q2&sq99fU6K%EZgr+VxrV=?}Sm#O1BE_gdZlre?@*#?qK9?ZUQ|7mmAd>}WbO zSL;MZj&@g;O*6B#@fo@Ry()_;TWh^UgQoRvMWfqmMyOM9RO?Us+G|tVo;&%{^CwNF zyQA7eWDaZ7fsW^BgBz(^3$%N&K%Xw?pmm{{9kljT+d(UY?3h;Bbg`;~HZzM7n%1ke zZu=h9w${GS_I1&^NSmCMqt6NWXL>ZLqjomk-BIfzUhb$R@r{}OCPw2rX|MEh^x&hL zJ~JcDXft!X>GQDk(8lvAn=N)tt8Pwl}h z>e)r>EsA?{s1!wqwt`xbor&H2o~?L3%9*BedX`Pr0?rqSb{x3%g-7glbkSI!;^b; z=E!S2&ok%En_Cs^-p9QQDD&sd(}TnH4JijfoB8wg>iMzY9F6K2t*_8(XhVN+CbNa| zfsmlAv$U!7%s}l$noyv1pwe<}ka#twP5&)H*Nrjop~Y9?B1hYlMQ`gEChBP;TGIpz($m~xJiTsV^1f%| zEWYi55x%uCdDo%Zhq$FKgrU!HZAlhYpNWItcaG-f=xHf7=8=B7Rz&SbXl_qPL7>Y5 z2F2{O;&Rh^PEX6ZNs~ru-E*w;caBXP%e12QY29R-bWT^TuvhqkG%CpSjj+>GxE?)% zrc_jtjAI#`o6H~7?aq3E1;SoOa~Xp1SoKW=@bo7R?I z9;x-ufdUM)zC)*v(pKeI87*2xn?`A$(&QoF{zH}8Q2Of-%}k_~V43ARfx#O%YQ(tn z#$Qxs$71QvX+4I|fpW|pm{vyk?4`d?--|L^H`1Sj8!-u=GvlSDqs%rk9 zxE1{v$usDlxAGdYY%%Ic-uLtA@cg_u{X8pHDxUN6_qC$8uE;a#v9t4c(!I5LQBgfK z|Mu_5Ih0o;=Gu`DbLfh<@@%?xSY$`G6}X&Y|C(oo~?o zs%QgE8=4;x*UX7lT~9w+k+>LkBKk}_svH(EsN$dazI1pbB0l(MbpPk{?Zwf!$a}ls zi}rL|RaB?5ju-5v1#_a2$j*;Ym3B_W-gcAFCO0?>Ph2@nlDWN$9opf4Wb!|Rf-KX>p@;h__ zr}~!O@jd-vU~#o5>E8M092#?Ou|en7SLV~fySj7=7Pxud=r(k$zOsnAOeii${#4#s z{8Zj$>>{elD~;pJ&H_zG<&xW{)S#ieyX>O;zNOLR*SoujuXmsM>?ibSx6X0V>E^Ci zl~8ecXM>)-w`&7^S>8D!mfY8M_y!t%R~LNmmu+i9`MbO5^vcF=8P+TrR#uv2Cf5%u z6YGa{J2;6Rxv6V4f!anqx3OD^M(gkGYLK?I8-yqM@YZhP;jP_28cPp*WpVMpPrA>@ zr^kktnY8Ac?mKDiu(GHa{B8G}f1xQGyVVH&+a80Tp#I@R<3c69MK-2>^&qfch^PV<94+gDLcXMNkfAi3_lo?_j1z22QqfBB|I zMrP5jG(rcndKxtTV4oi8Gk$tu?^4>9-7}K>cwukx@xne|PN&VMdd9`zgMH>kXwSE3 z{l?$=G|-CgdPc^>2Ml8nmxpX0r2V^)1Qv{N%z}eZ|6A{ie;OA?1B*=z>{&TZ@7F`rVU9 ze;wG@q_bYNG|D^N?+)4<>l+b&JlyYtpUJrtEx&r(E|K4KqLnYMvBYJE`=6=NrkQ=A z@Q(Fwpv!0VjU=o7-d|Mx{j3So>7^I?#YK@@{&OB3I@Hggb{7xGEadf3<)t{7Pe+xD zPe%>7w*%dJsDE7i_o4xl^XR%`{T*64cfj^mw(ll;%o`wj%&XXQKJ9hN7gDibRxBny zR&gSq3PzP%Qa$8R?Y@f5^vS65f@I-~6{7IPfxo+pHe57dfq<&GGM^^Q9bi(!6|tST zCu3`4cyuwg*66W>UVF5nhK8Srme502#4>yG-Z%cD(ZGEb23emO4Oymm_cNoHAet46 zi}@+@t9Dd(Ma-b&{bmD=sf|UF{$5k~d#%$7Y2k4rF8=teRhdUwpBa{9WQyMT%!nr6 zd({%}y=w1mPe1K27YOjNJcp*<4;Xa*`}R)izt@Z=vrgD}9FcXx{bo6hf7YrIGcWb_ zcU36dGu!!jX z$cV3hr2AG3uNKQ69QjKw<3lU+Y?H+@xK8A*OxKM~LQcrFm1 z)=!GRPFv=VuBPYfC$QXq|pvH+eU1z*$Qte0Z zE=b;R_#$z`;fp(dO7}i6wMJw=Ic;_^eS2`KPABUYL@4p)^gvC;sq2wxCG^@WQzOYS zN2cMq1ka^n%#rDTDy3tOUQ|tMz1l7U?c-hOo2K5yfw|BJ*lcp>#cidmXLpBS~PiJ?@T2YyKs_N|L%|M@P8rK)Pghz0R+Wpv^GDubR`6W>MU2dbjU zy0vjpx3*^RVLJ2ixp7f=>4JMZk^kCUlfI~1&_D&R&y6N`EM0)-ay$#gj-^+u+E0J4 ziPwnrUTs4UTDK-{(0zVgW>=?X)Ry4r+Rv=Tvk1>((SGKVv)-czE?E#4Tl~6Z-RP;h z1qKbDUKiNaIjfeqruATciT~oKLnqa^?Q?lyXjE0*zarp*~K1eRpsmrCycad1VERuZV z(&gffORu}Pl3sPMtrqWZzwVKI8e4U(O)ouo-3~f)&b86xvWKn{%O1LZ%=xt8yychE z_CtBaV(2f|pB|ylFIjHTjrl8fWjlUyKxBm&5UKxV7G1Oby12Nlwth^YfO zP)Gx6>*IgAX61%F8jz?r$je_Tf#LD|mGwvHlYiF7#l8<$ew0i5KF62uoLsq!?l@T= zO|Cn&63=h({0`45vF_CGJG8$&x-u?WefRs^)2L1UN`r>AzhM{sJ%431`JeVTi2t;| zarqv)^h30H`{5f0718dKD|H&S|3(<$Q!69MQ>#{sQ>)heFqF1_`}=A!@x?W5b1A?5 z4F;`^uHFfQQBkluaRn`U_(p)R6|4VWn`C7FjUuu?dHs00@%1$gR9>(;D*QuhQcutW z*RPIK!J##6#F*sT_ghoWs?{b9lp!jxKzCjZTAR zuiHgu9a<9+r)IDF$H(;EjccogJ7)a}je4(NtJ9b%>vz-t)~}5uf0(jf{4nLFrMaX{ zTURZP%)04hgmPxDGpOUXn=`y#zUZb>Ix%})B)N0ZO=9Pwn}4j3=D0?*+jg^)M@3WC zo3!Nqn|IRZQ`Q$Gd+fbg^w_)M^;xuV)=f2HTJ9}-qV!DdO%@IMeq(0Dr%P@rZsquX z@@UB|;%Ldn&yLg9tv4?aXMMl%{CpaA|IG$H(Dv4bEL}`$ck6qf(bqY*#KoqsZ@oK0 zStYlaXX#~oLZE80y{BcJN{#ZJLs}tcF^VM?9@{&W^r7rBIcNG%n-Y)QrBi<(6V_!!wvI74H5aNh`A6{ zTg;rF>Y7Cl--G4U#Z&Xq<(+uYWpqubi|e=*PRw)+LliBLU4rGDu`pQ9iiJU!lM7Q% z6*lj{@@;V2a$@4iD^d&OYI-aV+TFT1G_(;jDog|atBcu7P8?BI#}dz6lNu)*wp$)Fyn1=4q2u`# zzGcL4LZhyeC)D-H)JrsGMZl6jtO&Yvt`BvwUAMy3ZCCfiXZ1}iIlMAhxAohg%g*11 zx>zy0!iYKehqFZ8@8m2Husw9~?^AQJF#Kk>l!N{^gu0lzQ(>E)X&U0cH^^DW9!lL! zd8>nkEnOXS`EYgWse)$4V}=SlMs>HO3dOT)QXMjzS+*|db8KDEXWaVGGzha)m}X43 zOwsA4CJ8-qbFdk2L(pZwirYehU|TxYfPBJX55oX)(z=gDxlU47yC;66(@IKKtF()aA>&gDz9I z23=m<8tQ_`QH5<89z=TMw#;GdqD|WaF8yzN&}G36##!hRht9bVoE@D?T_WXadqdDN zx+^reVZ|yu%k&IW9N(1@j&CWoCm6i_y+N02?hSW|L7qGv!YV57OAGabdxM5u9tbtG zOuNE%JaAro{y_Sk&wnUr`1V7ghAo(qdRXodLcO1M|1n^S_Gr*$`J|=7>OHM#SXFeWu+4p$RMHitiriJC0x`osFMWp3m+*3irC!Pv%&%s4UAhT{CxOe-X zGje%=&U+>h|3{t)`jkBz8eE5v>5xm`6JI==IoFTqt>*(idM^ZB_Ph}4Vww(=g9r5{ zuG*K8%a?+N75hUCp^>2+Vphx)Kkk=fkHgj4d@$JR{DVP{w-1JU_)Z1Zf*7Mny^92(w>ShJ9B05xYzt@{M|M8(9i~b+Kb+h2?n`#4-6sW;I)>_N{<9 zZ@v|D8Ga3W5wBPbgax4$F1Km+Zf z3*HU*`S`m*mtMz0T^!qj@iOGX{&OrWs9f znb$AX(vsK(RH13!PO0< z&e&pf6G?cSe`tj|thmz9@i-R%Cro95P=CrK1~e`o&j2WDq$M)f4iF4UPPQ0;U}a*% zQ*MN7_%1&>7jy7)cTL0Ri%Gbqr3e>pvAP)3Rpi4pJw>Xas#W$|x~(k6G9ekUfd9<` zVp-~{Eg*8v84K`Pp+To{8uC^OXKX`nrX|u^&&9E;ooysi*^$_eq8Cus5mVdw7T1I> z47MVe3EVn`HI8X34IL!;`C=SdS;7_wEv3keN`e1WLilBE;!FPK;IU)KTF*cs!tB3{v= zp=<|3$IlZqlVfx#S{}RWpgCOCg62W~`6Rx{jk^v>Kp;;py6JjhB|1KFn#oNp+Syzd zblAg;yd_!;h?)_;7zsYOe86*zDea)#R2q~rTcBlIVVnp4>i{HNy}?&g^a5y#M+3{^ zC`W>QC`V$u0R3z+z!~5_C;GM`!N^Q=V<4;lIHuDbs79t%LGu+ka-an>$JyksLB%6Z z#ukIHCO7zK=PTC}!QB9Tw3H8M01mx%;9sWT5;5Je6@&?z&LI$S)xv-Kt^Ve|e6#S+`iE{f4nx*If7vUk!et=0&nHUhP@#Zx8YjCv@ z=;bSPfxa0HAVAqMTnObFd#1tNZ}>PC%9cZ1NPq|t2@`b>@fnq-_*{@8@Spc}ftff> zu8`4usnD3e2Cv`Hxe)q6X=G^Cy00rZ(|Bor4Md+KRv5OJvT9fpu5Pgs zILu802L2ytzS<5B1CxG3PXO38uO{8uFu+$fAOI`Za0UV*Jb+{vxM9rbNUYlua!CmV zE$yI6``@UMK(smJ0?ku6V|chHe1SeN4Wl88p==JSza@4I@SkDYU?T5qt6VU#KrRa~ z2i%hdl1E~RjfeanH&}+HClpw1Shj+KL35g8EE&NB2IdX}IBhDz3RA+pTeO3FD;J3U zH?iMFl`Iz|ygAp?a?zfm`%#K+%0giI*ThvI2p$hKT&vN? zZ^nNhB5=z)gI!Do8?NfI8LsQ{unTCg9DF>OjSvm|H(dBc{Jy&YIb4E0C_;Y4Ji}A8 zo@d4st%tyxA`2dpZPGA;SPdFb2^$Y@A}K}(PHLlJ>)v!uK5#l0 z8gR>t(K$b+3KM+e(61b!89p?IGK8rz5*ANkF5G6$TqK~(Ml9TfQ$e)xl2wRlCW7`{ z6N9Fx0{DT7lMEWpnvduE4A&%Lri6=pVi-6c$Ur+;>JVNabaPtYG+zv~Q?f9iAy~ls zVnC1KRBR3KL}>`1s?dVcf4Q2*lSW^~X8{dYNa+ii$j};bDzg8I!2}HkoOg!f%v6D_ z9N^{yGzD^irg(D+5CQu5Bse_=b`U2wxDzB}=cckiY0Q-I6n_VOJ%H=LPPSknES$oA zOIObrVGBiOELDIJc;KjB=60L8n!%|c`W%~>x~)n-E(wPcXr}~GK>HQJv|WWW37E1* zozW~vPK^5O$7#2k};sc z;bjYiOTyC{G8h$TahmXRDw^OX8DAr6#D~$9L=@V|H+bbCH{W&0v%(J)PN`dx4THXP z?B`VXgtBbpKRBfD|LQw(x{Zl{IFQ8{2F#4PYoPg>Z3rC;$U0p(4OJ z#WMg62};f+grpm7MLKf}iwW4S7%AjMxif@ziqj;~g(8_ik{pJjo#HKkhO|KuUm+yj zxqSyzgu)9TV?{WCwYhbGrBbIy9#1(`h_S*SgoZNhW;Lk?L}+-#LK-igmj#WXR_GwW zWyPcc8pmwfn1wxt&q6j=z%W!3Y8E(`kl|DIgK`CKJrSx^dFC~Rt6B)32e+F~0#l+e z2yt=c=J_bWQ~JWPDKs^#D0Abg)WcQfnn9;ZYZ4T7$G62#rOEXY`kGBNJ7 z+D=I=Brpm5G-VQB?TpnZtVS^zmjz@w6un`ZaQpZiP+~^e;IIle0nL=fSidrfWqB@T zwZR4A0*@F@6By2vrl6_

#)y+`MvsT?7|-J9yMQJAq^eV({tlnl2Ec)Tp)yPXZO; za}5j?0SAt?Ly;gCCmFcr8VmVsUP(f^k)3BG;I#4);kvQw=c1a7dtZoftE)CWWI_0N zp2tCnYr^da+5t>8>Pk!z%D4T;bsOFCGS~Sq`BMzJkz%KkqWPXQTC)bANUL7C5Oj!k8(C=&5D-r5MU3lV+XCbkot9FSJH9fFHISPwS=B(^{gNP;OY zBqyYI9zG}3L3`L>Xc8zW3Gf&Vz9KUS1_CSGZvhR<32zSJ9OPPAat=f!&cd=_ATEjA zZ+PMW6tMwK1g;t8fCqxCLzr7o=g^K#$CM6wfCvr8b_|1MHAhE+i~u|Vz#_I7ygFks z-4D+g0rD{Wu?r8Oknl#F1dfg=5cqH?m<=C^;W#mu71beQjCTscS`ReaY~EdZ(YT$^ z)!~U{f|4ia5dBihLP2w27sH)>gqj&Df^kPcGu#d^6{`+U44MyCH;oqr6M!JW*f9HL zaDtZ~pdubL)(*geN9gd=Hh>Ue_Ct9nrD(t@39ztvH8ME|B7$dOp+Rr~CYlXsBV)vZ zSO`wTHnDbyI-{zwX~)4FvV=Ck2Gag#RHi9wXxVkbAt5Tp2E@V1I%M>0?h7g6l_Q9X zLw$tX;VAJ&Rtq7N^Gn>r^)h^NMcqn29eMlE#EADGvJ^fN(2(&D-x6Ff6#ay`03B*4 zcIWESb-PCZM58A>+N9af# z`Qi&RPN2zB(QrGN{zGv9BSN-Vp`LuNl1FA8U>w}|a9^3-WYz!34nwhqxqxu1(HJdl z!ha|QMjVa}C~q;dVh)fy58a~%4(cMz1xTt4EQJ;*>k$BhThfr6v@DB5Xq*QWJ?o4d zBjU0xJ4;-ERAyNE;gcfS9oB@hUWLaLpa)?+*~>IA?no+yWdSr4^Mn zs)JWV!ZfIUgghAyvHV7i*@*uD++!XyM4%xJ9p0UMU(sM;J3A$d|bt20THN^~B*xPy~RITTX-X6Iu*z16-Z3#sm$oH#Ntc0{n-xE2;*=9*m^H zs5YKcAV{*z7v@Q!`7BZf7XqL)97B{epokC$;j6&^VHF`DqE;geKtKyJ-z|3x8sduK zD=Q5?-j56k9B4=|hV=qyShAEOY5y}8P!cgOTcK3o;hc2P4zq+8gLZ(0oaV5?eAyQ_ zqFELs{tJjr8Yyj#16sI_IcUdKuxuxlKcCoTNGfROg+Z&NC6M}K=LuZF?15~Eq6vjF zxW;N4B9bQETGBXn3Q7Fyb%WqlgkI7Xt;Dh_B9f$ayuYGmXrZma9g%R z2sfl?0vd}41T;Gwwh3rn8mM6ZP3#Az35VQdLk`0N8cVMRG=${&qIprCYzL&*Ed9AS z|A1(Q-Q$3U7s2^}I43UY;2~EkD6eApPmU+~rHtp#QH2KKU`62YBA+rb0#(W}0huX_ zrX5`o>;}FITXeZ5pjTxopb%wP0B&Vi`6~i^KPV>R7Y!bPJpD*mKIxpYorPy3g2t@_ ze#yfK<7BK8S3eASB#ky=hy)xysy*Y0p4 zVfBsheAwZT1Ikzg;}f7%7_`cE62$R#sITEd4KUxr%4XEJjK%YVMEF}}B;06zWr4rd ziQ#A}6Ql5w%b=UEk%;B1Mp#eTF}y`CE%N0`WfEvEr7!HKvN`Bmd5uHr+lA~Xli=XF zNTLKJ97B@qm#>rZjle}{sVk(aK(SoQU};)?;zt%G6^sAl@p^N zLXi(Yfe1ZcwTuoaG!|vSfN*J)8-Z-GqCe!@2z(CclC&~uslMS8DcVFv+_-!oN~mZP z08~Yr$gmkdI{1_!t_JEGa6u88Ujd%bcFYBrTtKXfvlXF)up=C_gJd$UB4^HOYh^== z<>&$$JQLm-c}QhqxJe4Dp;EZaK*?tWv(bH*lR$tKRwJRROaf*pJ3%RgG6|q8zY!>t z3j^MSoEREYnFKgd*$MQmGBG?XWnx6@_>Dj$G8{LM^TM4', e) log.exception('Error in process') log.error(f'Task processing failed {e} : {taskid}') else: diff --git a/backend/dataall/aws/handlers/stepfunction.py b/backend/dataall/aws/handlers/stepfunction.py index 9b2916cec..9d497d63d 100644 --- a/backend/dataall/aws/handlers/stepfunction.py +++ b/backend/dataall/aws/handlers/stepfunction.py @@ -18,7 +18,7 @@ def run_pipeline(state_machine_name, env: models.Environment, stage='Test'): ) response = client.start_execution(stateMachineArn=arn) - print(response) + return response['executionArn'] diff --git a/backend/dataall/aws/handlers/sts.py b/backend/dataall/aws/handlers/sts.py index fe0d21d8c..d55e23a06 100644 --- a/backend/dataall/aws/handlers/sts.py +++ b/backend/dataall/aws/handlers/sts.py @@ -81,9 +81,7 @@ def get_secret(cls, secret_name): try: session = SessionHelper.get_session() client = session.client('secretsmanager', region_name=region) - secret_string = client.get_secret_value(SecretId=secret_name).get( - 'SecretString' - ) + secret_string = client.get_secret_value(SecretId=secret_name).get('SecretString') log.debug(f'Found Secret {secret_name}|{secret_string}') except ClientError as e: log.warning(f'Secret {secret_name} not found: {e}') @@ -97,9 +95,7 @@ def get_external_id_secret(cls): :return: :rtype: """ - return SessionHelper.get_secret( - secret_name=f'dataall-externalId-{os.getenv("envname", "local")}' - ) + return SessionHelper.get_secret(secret_name=f'dataall-externalId-{os.getenv("envname", "local")}') @classmethod def get_delegation_role_name(cls): @@ -107,17 +103,10 @@ def get_delegation_role_name(cls): Returns: string: name of the assumed role """ - return ( - SessionHelper.get_secret( - secret_name=f'dataall-pivot-role-name-{os.getenv("envname", "local")}' - ) - or 'dataallPivotRole' - ) + return SessionHelper.get_secret(secret_name=f'dataall-pivot-role-name-{os.getenv("envname", "local")}') @classmethod - def get_console_access_url( - cls, boto3_session, region='eu-west-1', bucket=None, redshiftcluster=None - ): + def get_console_access_url(cls, boto3_session, region='eu-west-1', bucket=None, redshiftcluster=None): """Returns an AWS Console access url for the boto3 session Args: boto3_session(object): a boto3 session @@ -133,9 +122,7 @@ def get_console_access_url( request_parameters = '?Action=getSigninToken' # request_parameters = "&SessionDuration=43200" - request_parameters += '&Session=' + urllib.parse.quote_plus( - json_string_with_temp_credentials - ) + request_parameters += '&Session=' + urllib.parse.quote_plus(json_string_with_temp_credentials) request_url = 'https://signin.aws.amazon.com/federation' + request_parameters r = urllib.request.urlopen(request_url).read() @@ -145,20 +132,15 @@ def get_console_access_url( request_parameters += '&Issuer=Example.org' if bucket: request_parameters += '&Destination=' + quote_plus( - 'https://{}.console.aws.amazon.com/s3/buckets/{}/'.format( - region, bucket - ) + 'https://{}.console.aws.amazon.com/s3/buckets/{}/'.format(region, bucket) ) elif redshiftcluster: request_parameters += '&Destination=' + quote_plus( - f'https://{region}.console.aws.amazon.com/redshiftv2/' - f'home?region={region}#query-editor:' + f'https://{region}.console.aws.amazon.com/redshiftv2/' f'home?region={region}#query-editor:' ) else: - request_parameters += '&Destination=' + urllib.parse.quote_plus( - f'https://{region}.console.aws.amazon.com/' - ) + request_parameters += '&Destination=' + urllib.parse.quote_plus(f'https://{region}.console.aws.amazon.com/') request_parameters += '&SigninToken=' + signin_token['SigninToken'] request_url = 'https://signin.aws.amazon.com/federation' + request_parameters @@ -173,9 +155,18 @@ def get_delegation_role_arn(cls, accountid): Returns: string : arn of the delegation role on the target aws account id """ - return 'arn:aws:iam::{}:role/{}'.format( - accountid, cls.get_delegation_role_name() - ) + return 'arn:aws:iam::{}:role/{}'.format(accountid, cls.get_delegation_role_name()) + + @classmethod + def get_cdk_look_up_role_arn(cls, accountid, region): + """Returns the name that will be assumed to perform IAM actions on a given AWS accountid using CDK Toolkit role + Args: + accountid(string) : aws account id + Returns: + string : arn of the CDKToolkit role on the target aws account id + """ + log.info(f"Getting CDK look up role: arn:aws:iam::{accountid}:role/cdk-hnb659fds-lookup-role-{accountid}-{region}") + return 'arn:aws:iam::{}:role/cdk-hnb659fds-lookup-role-{}-{}'.format(accountid, accountid, region) @classmethod def get_delegation_role_id(cls, accountid): @@ -191,22 +182,27 @@ def get_delegation_role_id(cls, accountid): return response['Role']['RoleId'] @classmethod - def remote_session(cls, accountid): + def remote_session(cls, accountid, role=None): """Creates a remote boto3 session on the remote AWS account , assuming the delegation Role Args: accountid(string) : aws account id + role(string) : arn of the IAM role to assume in the boto3 session Returns : - boto3.session.Session: boto3 Session, on the target aws accountid, assuming the delegation role + boto3.session.Session: boto3 Session, on the target aws accountid, assuming the delegation role or a provided role """ base_session = cls.get_session() - session = SessionHelper.get_session( - base_session=base_session, role_arn=cls.get_delegation_role_arn(accountid) - ) + if role: + log.info(f"Remote boto3 session using role={role} for account={accountid}") + role_arn = role + else: + log.info(f"Remote boto3 session using pivot role for account= {accountid}") + role_arn = cls.get_delegation_role_arn(accountid=accountid) + session = SessionHelper.get_session(base_session=base_session, role_arn=role_arn) return session @classmethod def get_account(cls, session=None): - """Returns the aws account id associated with the default session, or the priovided session + """Returns the aws account id associated with the default session, or the provided session Args: session(object, optional) : boto3 session Returns : @@ -279,11 +275,7 @@ def filter_roles_in_account(accountid, arns): Return : list : list of all arns within the account """ - return [ - arn - for arn in arns - if SessionHelper.extract_account_from_role_arn(arn) == accountid - ] + return [arn for arn in arns if SessionHelper.extract_account_from_role_arn(arn) == accountid] @staticmethod def get_role_ids(accountid, arns): @@ -317,32 +309,20 @@ def get_session_by_access_key_and_secret_key(cls, access_key_id, secret_key): if not access_key_id or not secret_key: raise ValueError('Passed access_key_id and secret_key are invalid') - return boto3.Session( - aws_access_key_id=access_key_id, aws_secret_access_key=secret_key - ) + return boto3.Session(aws_access_key_id=access_key_id, aws_secret_access_key=secret_key) @staticmethod - def generate_console_url( - credentials, session_duration=None, region='eu-west-1', bucket=None - ): + def generate_console_url(credentials, session_duration=None, region='eu-west-1', bucket=None): json_string_with_temp_credentials = '{' - json_string_with_temp_credentials += ( - '"sessionId":"' + credentials['AccessKeyId'] + '",' - ) - json_string_with_temp_credentials += ( - '"sessionKey":"' + credentials['SecretAccessKey'] + '",' - ) - json_string_with_temp_credentials += ( - '"sessionToken":"' + credentials['SessionToken'] + '"' - ) + json_string_with_temp_credentials += '"sessionId":"' + credentials['AccessKeyId'] + '",' + json_string_with_temp_credentials += '"sessionKey":"' + credentials['SecretAccessKey'] + '",' + json_string_with_temp_credentials += '"sessionToken":"' + credentials['SessionToken'] + '"' json_string_with_temp_credentials += '}' request_parameters = '?Action=getSigninToken' if session_duration: request_parameters += '&SessionDuration={}'.format(session_duration) - request_parameters += '&Session=' + quote_plus( - json_string_with_temp_credentials - ) + request_parameters += '&Session=' + quote_plus(json_string_with_temp_credentials) request_url = 'https://signin.aws.amazon.com/federation' + request_parameters r = urlopen(request_url).read() @@ -352,14 +332,10 @@ def generate_console_url( request_parameters += '&Issuer=Example.org' if bucket: request_parameters += '&Destination=' + quote_plus( - 'https://{}.console.aws.amazon.com/s3/buckets/{}/'.format( - region, bucket - ) + 'https://{}.console.aws.amazon.com/s3/buckets/{}/'.format(region, bucket) ) else: - request_parameters += '&Destination=' + quote_plus( - 'https://{}.console.aws.amazon.com/'.format(region) - ) + request_parameters += '&Destination=' + quote_plus('https://{}.console.aws.amazon.com/'.format(region)) request_parameters += '&SigninToken=' + signin_token['SigninToken'] request_url = 'https://signin.aws.amazon.com/federation' + request_parameters diff --git a/backend/dataall/cdkproxy/assets/datalakelocationcustomresource/__init__.py b/backend/dataall/cdkproxy/assets/datalakelocationcustomresource/__init__.py new file mode 100644 index 000000000..6c32d76cc --- /dev/null +++ b/backend/dataall/cdkproxy/assets/datalakelocationcustomresource/__init__.py @@ -0,0 +1 @@ +from .index import * diff --git a/backend/dataall/cdkproxy/assets/datalakelocationcustomresource/index.py b/backend/dataall/cdkproxy/assets/datalakelocationcustomresource/index.py new file mode 100644 index 000000000..216c8b23f --- /dev/null +++ b/backend/dataall/cdkproxy/assets/datalakelocationcustomresource/index.py @@ -0,0 +1,89 @@ +import logging +import os +import json +import boto3 +from botocore.exceptions import ClientError + +logger = logging.getLogger() +logger.setLevel(os.environ.get("LOG_LEVEL", "INFO")) +log = logging.getLogger(__name__) + +lf_client = boto3.client("lakeformation", region_name=os.environ.get("AWS_REGION")) + + +def on_event(event, context): + request_type = event["RequestType"] + if request_type == "Create": + return on_create(event) + if request_type == "Update": + return on_update(event) + if request_type == "Delete": + return on_delete(event) + raise Exception(f"Invalid request type: {request_type}") + + +def on_create(event): + """ Checks if the S3 location is already registered in Lake Formation. + If already registered it updated the roleArn for the location. + If not registered, it registers the location. + """ + props = event["ResourceProperties"] + if not _is_resource_registered(props["ResourceArn"]): + register(props) + else: + update(props) + + +def _is_resource_registered(resource_arn: str): + try: + lf_client.describe_resource(ResourceArn=resource_arn) + log.info(f"LakeFormation Resource: {resource_arn} already registered") + return True + except ClientError as client_error: + if client_error.response["Error"]["Code"] == "EntityNotFoundException": + log.info(f"LakeFormation Resource: {resource_arn} not found") + return False + else: + raise client_error + + +def register(props): + resource_arn = props["ResourceArn"] + role_arn = props["RoleArn"] + log.info(f"Registering LakeFormation Resource: {resource_arn} and roleArn: {role_arn}") + try: + lf_client.register_resource( + ResourceArn=resource_arn, + UseServiceLinkedRole=props["UseServiceLinkedRole"] == "True", + RoleArn=role_arn, + ) + except ClientError as e: + log.exception(f"Could not register LakeFormation resource: {resource_arn}") + raise Exception(f"Could not register LakeFormation resource: {resource_arn} , received {str(e)}") + + +def on_update(event): + on_create(event) + + +def update(props): + resource_arn = props["ResourceArn"] + role_arn = props["RoleArn"] + log.info(f"Updating LakeFormation Resource: {resource_arn} with roleArn: {role_arn}") + try: + lf_client.update_resource(RoleArn=role_arn, ResourceArn=resource_arn) + except ClientError as e: + log.exception(f"Could not update LakeFormation resource: {resource_arn}") + raise Exception(f"Could not update LakeFormation resource: {resource_arn}, received {str(e)}") + + +def on_delete(event): + """ Deregisters the S3 location from Lake Formation + """ + resource_arn = event["ResourceProperties"]["ResourceArn"] + log.info(f"Unregistering LakeFormation Resource: {resource_arn}") + try: + lf_client.deregister_resource(ResourceArn=resource_arn) + except ClientError as e: + log.exception(f"Could not unregister LakeFormation resource: {resource_arn}") + raise Exception(f"Could not unregister LakeFormation Resource: {resource_arn}, received {str(e)}") diff --git a/backend/dataall/cdkproxy/assets/gluedatabasecustomresource/index.py b/backend/dataall/cdkproxy/assets/gluedatabasecustomresource/index.py index 07e679b01..6c83b8e73 100644 --- a/backend/dataall/cdkproxy/assets/gluedatabasecustomresource/index.py +++ b/backend/dataall/cdkproxy/assets/gluedatabasecustomresource/index.py @@ -1,7 +1,21 @@ import os +import json import boto3 from botocore.exceptions import ClientError import uuid +import logging + +logger = logging.getLogger() +logger.setLevel(os.environ.get("LOG_LEVEL", "INFO")) +log = logging.getLogger(__name__) + +AWS_ACCOUNT = os.environ.get('AWS_ACCOUNT') +AWS_REGION = os.environ.get('AWS_REGION') +DEFAULT_ENV_ROLE_ARN = os.environ.get('DEFAULT_ENV_ROLE_ARN') +DEFAULT_CDK_ROLE_ARN = os.environ.get('DEFAULT_CDK_ROLE_ARN') + +glue_client = boto3.client('glue', region_name=AWS_REGION) +lf_client = boto3.client('lakeformation', region_name=AWS_REGION) def clean_props(**props): @@ -10,9 +24,6 @@ def clean_props(**props): def on_event(event, context): - AWS_ACCOUNT = os.environ.get('AWS_ACCOUNT') - AWS_REGION = os.environ.get('AWS_REGION') - DEFAULT_ENV_ROLE_ARN = os.environ.get('DEFAULT_ENV_ROLE_ARN') request_type = event['RequestType'] if request_type == 'Create': @@ -25,31 +36,28 @@ def on_event(event, context): def on_create(event): - AWS_ACCOUNT = os.environ.get('AWS_ACCOUNT') - AWS_REGION = os.environ.get('AWS_REGION') - DEFAULT_ENV_ROLE_ARN = os.environ.get('DEFAULT_ENV_ROLE_ARN') - DEFAULT_CDK_ROLE_ARN = os.environ.get('DEFAULT_CDK_ROLE_ARN') + """Creates if it does not exist Glue database for the data.all Dataset + Grants permissions to Database Administrators = dataset Admin team IAM role, pivotRole, dataset IAM role + """ props = clean_props(**event['ResourceProperties']) - print('Create new resource with props %s' % props) - glue = boto3.client('glue', region_name=AWS_REGION) - lf = boto3.client('lakeformation', region_name=AWS_REGION) + log.info('Create new resource with props %s' % props) + exists = False try: - glue.get_database(Name=props['DatabaseInput']['Name']) + glue_client.get_database(Name=props['DatabaseInput']['Name']) exists = True except ClientError as e: pass if not exists: try: - response = glue.create_database( + response = glue_client.create_database( CatalogId=props.get('CatalogId'), DatabaseInput=props.get('DatabaseInput'), ) except ClientError as e: - raise Exception( - f"Could not create Glue Database {props['DatabaseInput']['Name']} in aws://{AWS_ACCOUNT}/{AWS_REGION}, received {str(e)}" - ) + log.exception(f"Could not create Glue Database {props['DatabaseInput']['Name']} in aws://{AWS_ACCOUNT}/{AWS_REGION}, received {str(e)}") + raise Exception(f"Could not create Glue Database {props['DatabaseInput']['Name']} in aws://{AWS_ACCOUNT}/{AWS_REGION}, received {str(e)}") Entries = [] for i, role_arn in enumerate(props.get('DatabaseAdministrators')): @@ -67,11 +75,13 @@ def on_create(event): 'Alter'.upper(), 'Create_table'.upper(), 'Drop'.upper(), + 'Describe'.upper(), ], 'PermissionsWithGrantOption': [ 'Alter'.upper(), 'Create_table'.upper(), 'Drop'.upper(), + 'Describe'.upper(), ], } ) @@ -90,7 +100,7 @@ def on_create(event): 'PermissionsWithGrantOption': ['SELECT', 'ALTER', 'DESCRIBE'], } ) - lf.batch_grant_permissions(CatalogId=props['CatalogId'], Entries=Entries) + lf_client.batch_grant_permissions(CatalogId=props['CatalogId'], Entries=Entries) physical_id = props['DatabaseInput']['Name'] return {'PhysicalResourceId': physical_id} @@ -101,23 +111,20 @@ def on_update(event): def on_delete(event): - AWS_ACCOUNT = os.environ.get('AWS_ACCOUNT') - AWS_REGION = os.environ.get('AWS_REGION') - DEFAULT_ENV_ROLE_ARN = os.environ.get('DEFAULT_ENV_ROLE_ARN') + """ Deletes the created Glue database. + With this action, Lake Formation permissions are also deleted. + """ physical_id = event['PhysicalResourceId'] - print('delete resource %s' % physical_id) - glue = boto3.client('glue', region_name=AWS_REGION) + log.info('delete resource %s' % physical_id) try: - glue.get_database(Name=physical_id) + glue_client.get_database(Name=physical_id) except ClientError as e: + log.exception(f'Resource {physical_id} does not exists') raise Exception(f'Resource {physical_id} does not exists') try: - response = glue.delete_database(CatalogId=AWS_ACCOUNT, Name=physical_id) - print( - f'Successfully deleted database {physical_id} in aws://{AWS_ACCOUNT}/{AWS_REGION}' - ) + response = glue_client.delete_database(CatalogId=AWS_ACCOUNT, Name=physical_id) + log.info(f'Successfully deleted database {physical_id} in aws://{AWS_ACCOUNT}/{AWS_REGION}') except ClientError as e: - raise Exception( - f'Could not delete databse {physical_id} in aws://{AWS_ACCOUNT}/{AWS_REGION}' - ) + log.exception(f'Could not delete databse {physical_id} in aws://{AWS_ACCOUNT}/{AWS_REGION}') + raise Exception(f'Could not delete databse {physical_id} in aws://{AWS_ACCOUNT}/{AWS_REGION}') diff --git a/backend/dataall/cdkproxy/assets/gluedatabasecustomresource_nodelete/__init__.py b/backend/dataall/cdkproxy/assets/gluedatabasecustomresource_nodelete/__init__.py new file mode 100644 index 000000000..6c32d76cc --- /dev/null +++ b/backend/dataall/cdkproxy/assets/gluedatabasecustomresource_nodelete/__init__.py @@ -0,0 +1 @@ +from .index import * diff --git a/backend/dataall/cdkproxy/assets/gluedatabasecustomresource_nodelete/index.py b/backend/dataall/cdkproxy/assets/gluedatabasecustomresource_nodelete/index.py new file mode 100644 index 000000000..69abcd85c --- /dev/null +++ b/backend/dataall/cdkproxy/assets/gluedatabasecustomresource_nodelete/index.py @@ -0,0 +1,118 @@ +import os +import json +import boto3 +from botocore.exceptions import ClientError +import uuid +import logging + +logger = logging.getLogger() +logger.setLevel(os.environ.get("LOG_LEVEL", "INFO")) +log = logging.getLogger(__name__) + +AWS_ACCOUNT = os.environ.get('AWS_ACCOUNT') +AWS_REGION = os.environ.get('AWS_REGION') +DEFAULT_ENV_ROLE_ARN = os.environ.get('DEFAULT_ENV_ROLE_ARN') +DEFAULT_CDK_ROLE_ARN = os.environ.get('DEFAULT_CDK_ROLE_ARN') + +glue_client = boto3.client('glue', region_name=AWS_REGION) +lf_client = boto3.client('lakeformation', region_name=AWS_REGION) + + +def clean_props(**props): + data = {k: props[k] for k in props.keys() if k != 'ServiceToken'} + return data + + +def on_event(event, context): + + request_type = event['RequestType'] + if request_type == 'Create': + return on_create(event) + if request_type == 'Update': + return on_update(event) + if request_type == 'Delete': + return on_delete(event) + raise Exception('Invalid request type: %s' % request_type) + + +def on_create(event): + """Creates if it does not exist Glue database for the data.all Dataset + Grants permissions to Database Administrators = dataset Admin team IAM role, pivotRole, dataset IAM role + """ + props = clean_props(**event['ResourceProperties']) + log.info('Create new resource with props %s' % props) + + exists = False + try: + glue_client.get_database(Name=props['DatabaseInput']['Name']) + exists = True + except ClientError as e: + pass + + if not exists: + try: + response = glue_client.create_database( + CatalogId=props.get('CatalogId'), + DatabaseInput=props.get('DatabaseInput'), + ) + except ClientError as e: + log.exception(f"Could not create Glue Database {props['DatabaseInput']['Name']} in aws://{AWS_ACCOUNT}/{AWS_REGION}, received {str(e)}") + raise Exception(f"Could not create Glue Database {props['DatabaseInput']['Name']} in aws://{AWS_ACCOUNT}/{AWS_REGION}, received {str(e)}") + + Entries = [] + for i, role_arn in enumerate(props.get('DatabaseAdministrators')): + Entries.append( + { + 'Id': str(uuid.uuid4()), + 'Principal': {'DataLakePrincipalIdentifier': role_arn}, + 'Resource': { + 'Database': { + # 'CatalogId': AWS_ACCOUNT, + 'Name': props['DatabaseInput']['Name'] + } + }, + 'Permissions': [ + 'Alter'.upper(), + 'Create_table'.upper(), + 'Drop'.upper(), + 'Describe'.upper(), + ], + 'PermissionsWithGrantOption': [ + 'Alter'.upper(), + 'Create_table'.upper(), + 'Drop'.upper(), + 'Describe'.upper(), + ], + } + ) + Entries.append( + { + 'Id': str(uuid.uuid4()), + 'Principal': {'DataLakePrincipalIdentifier': role_arn}, + 'Resource': { + 'Table': { + 'DatabaseName': props['DatabaseInput']['Name'], + 'TableWildcard': {}, + 'CatalogId': props.get('CatalogId'), + } + }, + 'Permissions': ['SELECT', 'ALTER', 'DESCRIBE'], + 'PermissionsWithGrantOption': ['SELECT', 'ALTER', 'DESCRIBE'], + } + ) + lf_client.batch_grant_permissions(CatalogId=props['CatalogId'], Entries=Entries) + physical_id = props['DatabaseInput']['Name'] + + return {'PhysicalResourceId': physical_id} + + +def on_update(event): + return on_create(event) + + +def on_delete(event): + """ Does not Delete the created Glue database. + This is a risky action which would be done manually by customers + """ + physical_id = event['PhysicalResourceId'] + log.info('Keeping resources %s' % physical_id) diff --git a/backend/dataall/cdkproxy/assets/lakeformationdefaultsettings/index.py b/backend/dataall/cdkproxy/assets/lakeformationdefaultsettings/index.py index bc9e9ad7e..c6f706fa8 100644 --- a/backend/dataall/cdkproxy/assets/lakeformationdefaultsettings/index.py +++ b/backend/dataall/cdkproxy/assets/lakeformationdefaultsettings/index.py @@ -1,6 +1,16 @@ import os import boto3 from botocore.exceptions import ClientError +import logging + +logger = logging.getLogger() +logger.setLevel(os.environ.get("LOG_LEVEL", "INFO")) +log = logging.getLogger(__name__) + +AWS_ACCOUNT = os.environ.get('AWS_ACCOUNT') +AWS_REGION = os.environ.get('AWS_REGION') +lf_client = boto3.client("lakeformation", region_name=os.environ.get("AWS_REGION")) +iam_client = boto3.client('iam') def clean_props(**props): @@ -8,10 +18,21 @@ def clean_props(**props): return data +def validate_principals(principals): + validated_principals = [] + for principal in principals: + if ":role/" in principal: + log.info(f'Principal {principal} is an IAM role, validating....') + try: + iam_client.get_role(RoleName=principal.split("/")[-1]) + log.info(f'Adding principal {principal} to validated principals') + validated_principals.append(principal) + except Exception as e: + log.exception(f'Failed to get role {principal} due to: {e}') + return validated_principals + + def on_event(event, context): - AWS_ACCOUNT = os.environ.get('AWS_ACCOUNT') - AWS_REGION = os.environ.get('AWS_REGION') - DEFAULT_ENV_ROLE_ARN = os.environ.get('DEFAULT_ENV_ROLE_ARN') request_type = event['RequestType'] if request_type == 'Create': @@ -24,14 +45,12 @@ def on_event(event, context): def on_create(event): - AWS_ACCOUNT = os.environ.get('AWS_ACCOUNT') - AWS_REGION = os.environ.get('AWS_REGION') + """"Adds the PivotRole to the existing Data Lake Administrators + Before adding any principal, it validates it exists if it is an IAM role + """ props = clean_props(**event['ResourceProperties']) try: - client = boto3.client('lakeformation', region_name=AWS_REGION) - - response = client.get_data_lake_settings(CatalogId=AWS_ACCOUNT) - + response = lf_client.get_data_lake_settings(CatalogId=AWS_ACCOUNT) existing_admins = response.get('DataLakeSettings', {}).get('DataLakeAdmins', []) if existing_admins: existing_admins = [ @@ -40,21 +59,22 @@ def on_create(event): new_admins = props.get('DataLakeAdmins', []) new_admins.extend(existing_admins or []) + validated_new_admins = validate_principals(new_admins) - response = client.put_data_lake_settings( + response = lf_client.put_data_lake_settings( CatalogId=AWS_ACCOUNT, DataLakeSettings={ 'DataLakeAdmins': [ {'DataLakePrincipalIdentifier': principal} - for principal in new_admins + for principal in validated_new_admins ] }, ) - print( - f'Successfully configured AWS LakeFormation data lake admins: {new_admins}| {response}' - ) + log.info(f'Successfully configured AWS LakeFormation data lake admins: {validated_new_admins}| {response}') + except ClientError as e: - print(f'Failed to setup AWS LakeFormation data lake admins due to: {e}') + log.exception(f'Failed to setup AWS LakeFormation data lake admins due to: {e}') + raise Exception(f'Failed to setup AWS LakeFormation data lake admins due to: {e}') return { 'PhysicalResourceId': f'LakeFormationDefaultSettings{AWS_ACCOUNT}{AWS_REGION}' @@ -66,4 +86,38 @@ def on_update(event): def on_delete(event): - pass + """"Removes the PivotRole from the existing Data Lake Administrators + Before adding any principal, it validates it exists if it is an IAM role + """ + props = clean_props(**event['ResourceProperties']) + try: + response = lf_client.get_data_lake_settings(CatalogId=AWS_ACCOUNT) + existing_admins = response.get('DataLakeSettings', {}).get('DataLakeAdmins', []) + if existing_admins: + existing_admins = [ + admin['DataLakePrincipalIdentifier'] for admin in existing_admins + ] + + added_admins = props.get('DataLakeAdmins', []) + for added_admin in added_admins: + existing_admins.remove(added_admin) + + validated_new_admins = validate_principals(existing_admins) + response = lf_client.put_data_lake_settings( + CatalogId=AWS_ACCOUNT, + DataLakeSettings={ + 'DataLakeAdmins': [ + {'DataLakePrincipalIdentifier': principal} + for principal in validated_new_admins + ] + }, + ) + log.info(f'Successfully configured AWS LakeFormation data lake admins: {validated_new_admins}| {response}') + + except ClientError as e: + log.exception(f'Failed to setup AWS LakeFormation data lake admins due to: {e}') + raise Exception(f'Failed to setup AWS LakeFormation data lake admins due to: {e}') + + return { + 'PhysicalResourceId': f'LakeFormationDefaultSettings{AWS_ACCOUNT}{AWS_REGION}' + } diff --git a/backend/dataall/cdkproxy/cdk_cli_wrapper.py b/backend/dataall/cdkproxy/cdk_cli_wrapper.py index 8066d9350..fccd192cf 100644 --- a/backend/dataall/cdkproxy/cdk_cli_wrapper.py +++ b/backend/dataall/cdkproxy/cdk_cli_wrapper.py @@ -29,17 +29,8 @@ def aws_configure(profile_name='default'): print(' Running configure ') print('..............................................') print(f"AWS_CONTAINER_CREDENTIALS_RELATIVE_URI: {os.getenv('AWS_CONTAINER_CREDENTIALS_RELATIVE_URI')}") - cmd = [ - 'curl', - '169.254.170.2$AWS_CONTAINER_CREDENTIALS_RELATIVE_URI' - ] - process = subprocess.run( - ' '.join(cmd), - text=True, - shell=True, # nosec - encoding='utf-8', - capture_output=True - ) + cmd = ['curl', '169.254.170.2$AWS_CONTAINER_CREDENTIALS_RELATIVE_URI'] + process = subprocess.run(' '.join(cmd), text=True, shell=True, encoding='utf-8', capture_output=True) # nosec creds = None if process.returncode == 0: creds = ast.literal_eval(process.stdout) @@ -55,9 +46,7 @@ def update_stack_output(session, stack): try: stack_outputs = cfn.Stack(f'{stack.name}').outputs except ClientError as e: - logger.warning( - f'Failed to retrieve stack output for stack {stack.name} due to: {e}' - ) + logger.warning(f'Failed to retrieve stack output for stack {stack.name} due to: {e}') if stack_outputs: for output in stack_outputs: outputs[output['OutputKey']] = output['OutputValue'] @@ -76,25 +65,29 @@ def deploy_cdk_stack(engine: Engine, stackid: str, app_path: str = None, path: s with engine.scoped_session() as session: try: stack: models.Stack = session.query(models.Stack).get(stackid) - logger.warning(f"stackuri = {stack.stackUri}, stackId = {stack.stackid}") + logger.warning(f'stackuri = {stack.stackUri}, stackId = {stack.stackid}') stack.status = 'PENDING' session.commit() - if stack.stack == "cdkpipeline" or stack.stack == "template": + if stack.stack == 'cdkpipeline' or stack.stack == 'template': cdkpipeline = CDKPipelineStack(stack.targetUri) venv_name = cdkpipeline.venv_name if cdkpipeline.venv_name else None pipeline = Pipeline.get_pipeline_by_uri(session, stack.targetUri) - path = f"./cdkpipeline/{pipeline.repo}/" + path = f'./cdkpipeline/{pipeline.repo}/' app_path = './app.py' if not venv_name: - logger.info("Successfully Updated CDK Pipeline") + logger.info('Successfully Updated CDK Pipeline') meta = describe_stack(stack) stack.stackid = meta['StackId'] stack.status = meta['StackStatus'] update_stack_output(session, stack) return - cwd = os.path.join(os.path.dirname(os.path.abspath(__file__)), path) if path else os.path.dirname(os.path.abspath(__file__)) + cwd = ( + os.path.join(os.path.dirname(os.path.abspath(__file__)), path) + if path + else os.path.dirname(os.path.abspath(__file__)) + ) python_path = '/:'.join(sys.path)[1:] + ':/code' logger.info(f'python path = {python_path}') @@ -113,7 +106,7 @@ def deploy_cdk_stack(engine: Engine, stackid: str, app_path: str = None, path: s 'AWS_SESSION_TOKEN': creds.get('Token'), } ) - if stack.stack == "template": + if stack.stack == 'template': resp = subprocess.run( ['. ~/.nvm/nvm.sh && cdk ls'], cwd=cwd, @@ -121,18 +114,16 @@ def deploy_cdk_stack(engine: Engine, stackid: str, app_path: str = None, path: s shell=True, # nosec encoding='utf-8', stdout=subprocess.PIPE, - env=env + env=env, ) - logger.info(f"CDK Apps: {resp.stdout}") + logger.info(f'CDK Apps: {resp.stdout}') stack.name = resp.stdout.split('\n')[0] app_path = app_path or './app.py' logger.info(f'app_path: {app_path}') - cmd = [ - '' - '. ~/.nvm/nvm.sh &&', + '' '. ~/.nvm/nvm.sh &&', 'cdk', 'deploy --all', '--require-approval', @@ -159,9 +150,9 @@ def deploy_cdk_stack(engine: Engine, stackid: str, app_path: str = None, path: s '--verbose', ] - if stack.stack == "template" or stack.stack == "cdkpipeline": - if stack.stack == "template": - cmd.insert(0, f"source {venv_name}/bin/activate;") + if stack.stack == 'template' or stack.stack == 'cdkpipeline': + if stack.stack == 'template': + cmd.insert(0, f'source {venv_name}/bin/activate;') aws = SessionHelper.remote_session(stack.accountid) creds = aws.get_credentials() env.update( @@ -172,7 +163,7 @@ def deploy_cdk_stack(engine: Engine, stackid: str, app_path: str = None, path: s 'CDK_DEFAULT_ACCOUNT': stack.accountid, 'AWS_ACCESS_KEY_ID': creds.access_key, 'AWS_SECRET_ACCESS_KEY': creds.secret_key, - 'AWS_SESSION_TOKEN': creds.token + 'AWS_SESSION_TOKEN': creds.token, } ) @@ -186,8 +177,8 @@ def deploy_cdk_stack(engine: Engine, stackid: str, app_path: str = None, path: s env=env, cwd=cwd, ) - if stack.stack == "cdkpipeline" or stack.stack == "template": - CDKPipelineStack.clean_up_repo(path=f"./{pipeline.repo}") + if stack.stack == 'cdkpipeline' or stack.stack == 'template': + CDKPipelineStack.clean_up_repo(path=f'./{pipeline.repo}') if process.returncode == 0: meta = describe_stack(stack) @@ -196,9 +187,7 @@ def deploy_cdk_stack(engine: Engine, stackid: str, app_path: str = None, path: s update_stack_output(session, stack) else: stack.status = 'CREATE_FAILED' - logger.error( - f'Failed to deploy stack {stackid} due to {str(process.stderr)}' - ) + logger.error(f'Failed to deploy stack {stackid} due to {str(process.stderr)}') AlarmService().trigger_stack_deployment_failure_alarm(stack=stack) except Exception as e: @@ -219,9 +208,7 @@ def describe_stack(stack, engine: Engine = None, stackid: str = None): meta = resource.Stack(f'{stack.name}') return {'StackId': meta.stack_id, 'StackStatus': meta.stack_status} except ClientError as e: - logger.warning( - f'Failed to retrieve stack output for stack {stack.name} due to: {e}' - ) + logger.warning(f'Failed to retrieve stack output for stack {stack.name} due to: {e}') meta = resource.Stack(stack.stackid) return {'StackId': meta.stack_id, 'StackStatus': meta.stack_status} diff --git a/backend/dataall/cdkproxy/stacks/dataset.py b/backend/dataall/cdkproxy/stacks/dataset.py index cd5fbb4c7..410d4b79d 100644 --- a/backend/dataall/cdkproxy/stacks/dataset.py +++ b/backend/dataall/cdkproxy/stacks/dataset.py @@ -17,12 +17,11 @@ Tags, ) from aws_cdk.aws_glue import CfnCrawler -from sqlalchemy import and_, or_ from .manager import stack from ... import db -from ...aws.handlers.quicksight import Quicksight from ...aws.handlers.lakeformation import LakeFormation +from ...aws.handlers.quicksight import Quicksight from ...aws.handlers.sts import SessionHelper from ...db import models from ...db.api import Environment @@ -34,6 +33,14 @@ @stack(stack='dataset') class Dataset(Stack): + """Deploy common dataset resources: + - dataset S3 Bucket + KMS key (If S3 Bucket not imported) + - dataset IAM role + - custom resource to create glue database and grant permissions + - custom resource to register S3 location in LF + - Glue crawler + - Glue profiling job + """ module_name = __file__ def get_engine(self) -> db.Engine: @@ -71,84 +78,6 @@ def get_target(self) -> models.Dataset: raise Exception('ObjectNotFound') return dataset - def get_shared_tables(self) -> typing.List[models.ShareObjectItem]: - engine = self.get_engine() - with engine.scoped_session() as session: - tables = ( - session.query( - models.DatasetTable.GlueDatabaseName.label('GlueDatabaseName'), - models.DatasetTable.GlueTableName.label('GlueTableName'), - models.DatasetTable.AWSAccountId.label('SourceAwsAccountId'), - models.DatasetTable.region.label('SourceRegion'), - models.Environment.AwsAccountId.label('TargetAwsAccountId'), - models.Environment.region.label('TargetRegion'), - ) - .join( - models.ShareObjectItem, - and_( - models.ShareObjectItem.itemUri == models.DatasetTable.tableUri - ), - ) - .join( - models.ShareObject, - models.ShareObject.shareUri == models.ShareObjectItem.shareUri, - ) - .join( - models.Environment, - models.Environment.environmentUri - == models.ShareObject.environmentUri, - ) - .filter( - and_( - models.DatasetTable.datasetUri == self.target_uri, - models.DatasetTable.deleted.is_(None), - models.ShareObjectItem.status.in_(self.shared_states) - ) - ) - .all() - ) - logger.info(f'found {len(tables)} shared tables') - return tables - - def get_shared_folders(self) -> typing.List[models.DatasetStorageLocation]: - engine = self.get_engine() - with engine.scoped_session() as session: - locations = ( - session.query( - models.DatasetStorageLocation.locationUri.label('locationUri'), - models.DatasetStorageLocation.S3BucketName.label('S3BucketName'), - models.DatasetStorageLocation.S3Prefix.label('S3Prefix'), - models.Environment.AwsAccountId.label('AwsAccountId'), - models.Environment.region.label('region'), - ) - .join( - models.ShareObjectItem, - and_( - models.ShareObjectItem.itemUri - == models.DatasetStorageLocation.locationUri - ), - ) - .join( - models.ShareObject, - models.ShareObject.shareUri == models.ShareObjectItem.shareUri, - ) - .join( - models.Environment, - models.Environment.environmentUri - == models.ShareObject.environmentUri, - ) - .filter( - and_( - models.DatasetStorageLocation.datasetUri == self.target_uri, - models.DatasetStorageLocation.deleted.is_(None), - models.ShareObjectItem.status.in_(self.shared_states) - ) - ) - .all() - ) - logger.info(f'found {len(locations)} shared folders') - return locations - def __init__(self, scope, id, target_uri: str = None, **kwargs): super().__init__( scope, @@ -160,31 +89,19 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): )[:1024], **kwargs) - # Required for dynamic stack tagging + # Read input self.target_uri = target_uri - - self.shared_states = [ - models.Enums.ShareItemStatus.Share_Succeeded.value, - models.Enums.ShareItemStatus.Revoke_Approved.value, - models.Enums.ShareItemStatus.Revoke_In_Progress.value, - models.Enums.ShareItemStatus.Revoke_Failed.value - ] - - pivot_role_name = SessionHelper.get_delegation_role_name() - + self.pivot_role_name = SessionHelper.get_delegation_role_name() dataset = self.get_target() - env = self.get_env(dataset) - env_group = self.get_env_group(dataset) quicksight_default_group_arn = None if env.dashboardsEnabled: - quicksight_default_group = Quicksight.create_quicksight_group( - dataset.AwsAccountId, 'dataall' - ) + quicksight_default_group = Quicksight.create_quicksight_group(AwsAccountId=env.AwsAccountId) quicksight_default_group_arn = quicksight_default_group['Group']['Arn'] + # Dataset S3 Bucket and KMS key if dataset.imported and dataset.importedS3Bucket: dataset_bucket = s3.Bucket.from_bucket_name( self, f'ImportedBucket{dataset.datasetUri}', dataset.S3BucketName @@ -204,7 +121,7 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): principals=[ iam.AccountPrincipal(account_id=dataset.AwsAccountId), iam.ArnPrincipal( - f'arn:aws:iam::{env.AwsAccountId}:role/{pivot_role_name}' + f'arn:aws:iam::{env.AwsAccountId}:role/{self.pivot_role_name}' ), ], actions=['kms:*'], @@ -245,14 +162,6 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): bucket_key_enabled=True, ) - # dataset_bucket.add_to_resource_policy( - # permission=iam.PolicyStatement( - # actions=['s3:*'], - # resources=[dataset_bucket.bucket_arn], - # principals=[iam.AccountPrincipal(account_id=dataset.AwsAccountId)], - # ) - # ) - dataset_bucket.add_lifecycle_rule( abort_incomplete_multipart_upload_after=Duration.days(7), noncurrent_version_transitions=[ @@ -278,7 +187,7 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): enabled=True, ) - # Dataset Admin and ETL User + # Dataset IAM role - ETL policies dataset_admin_policy = iam.Policy( self, 'DatasetAdminPolicy', @@ -390,58 +299,61 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): iam.AccountPrincipal(os.environ.get('CURRENT_AWS_ACCOUNT')), iam.AccountPrincipal(dataset.AwsAccountId), iam.ArnPrincipal( - f'arn:aws:iam::{dataset.AwsAccountId}:role/{pivot_role_name}' + f'arn:aws:iam::{dataset.AwsAccountId}:role/{self.pivot_role_name}' ), ), ) dataset_admin_policy.attach_to_role(dataset_admin_role) - glue_db_handler_arn = ssm.StringParameter.from_string_parameter_name( - self, - 'GlueDbCRArnParameter', - string_parameter_name=f'/dataall/{dataset.environmentUri}/cfn/custom-resources/lambda/arn', - ) - - glue_db_handler = _lambda.Function.from_function_attributes( - self, - 'CustomGlueDatabaseHandler', - function_arn=glue_db_handler_arn.string_value, - same_environment=True, - ) - - GlueDatabase = cr.Provider( - self, - f'{env.resourcePrefix}GlueDbCustomResourceProvider', - on_event_handler=glue_db_handler, - ) - - existing_location = LakeFormation.describe_resource( + # Datalake location custom resource: registers the S3 location in LakeFormation + registered_location = LakeFormation.check_existing_lf_registered_location( resource_arn=f'arn:aws:s3:::{dataset.S3BucketName}', - role_arn=f'arn:aws:iam::{env.AwsAccountId}:role/{pivot_role_name}', accountid=env.AwsAccountId, region=env.region ) - if not existing_location: + if not registered_location: storage_location = CfnResource( self, 'DatasetStorageLocation', type='AWS::LakeFormation::Resource', properties={ 'ResourceArn': f'arn:aws:s3:::{dataset.S3BucketName}', - 'RoleArn': f'arn:aws:iam::{env.AwsAccountId}:role/{pivot_role_name}', + 'RoleArn': f'arn:aws:iam::{env.AwsAccountId}:role/{self.pivot_role_name}', 'UseServiceLinkedRole': False, }, ) + + # Define dataset admin groups (those with data access grant) dataset_admins = [ dataset_admin_role.role_arn, - f'arn:aws:iam::{env.AwsAccountId}:role/{pivot_role_name}', + f'arn:aws:iam::{env.AwsAccountId}:role/{self.pivot_role_name}', env_group.environmentIAMRoleArn, ] if quicksight_default_group_arn: dataset_admins.append(quicksight_default_group_arn) - glue_db = CustomResource( + # Glue Database custom resource: creates the Glue database and grants the default permissions (dataset role, admin, pivotrole, QS group) + # Old provider, to be deleted in future release + glue_db_handler_arn = ssm.StringParameter.from_string_parameter_name( + self, + 'GlueDbCRArnParameter', + string_parameter_name=f'/dataall/{dataset.environmentUri}/cfn/custom-resources/lambda/arn', + ) + + glue_db_handler = _lambda.Function.from_function_attributes( + self, + 'CustomGlueDatabaseHandler', + function_arn=glue_db_handler_arn.string_value, + same_environment=True, + ) + + GlueDatabase = cr.Provider( + self, + f'{env.resourcePrefix}GlueDbCustomResourceProvider', + on_event_handler=glue_db_handler, + ) + old_glue_db = CustomResource( self, f'{env.resourcePrefix}DatasetDatabase', service_token=GlueDatabase.service_token, @@ -460,6 +372,33 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): }, ) + # Get the Provider service token from SSM, the Lambda and Provider are created as part of the environment stack + glue_db_provider_service_token = ssm.StringParameter.from_string_parameter_name( + self, + 'GlueDatabaseProviderServiceToken', + string_parameter_name=f'/dataall/{dataset.environmentUri}/cfn/custom-resources/gluehandler/provider/servicetoken', + ) + + glue_db = CustomResource( + self, + f'{env.resourcePrefix}GlueDatabaseCustomResource', + service_token=glue_db_provider_service_token.string_value, + resource_type='Custom::GlueDatabase', + properties={ + 'CatalogId': dataset.AwsAccountId, + 'DatabaseInput': { + 'Description': 'dataall database {} '.format( + dataset.GlueDatabaseName + ), + 'LocationUri': f's3://{dataset.S3BucketName}/', + 'Name': f'{dataset.GlueDatabaseName}', + 'CreateTableDefaultPermissions': [], + }, + 'DatabaseAdministrators': dataset_admins + }, + ) + + # Support resources: GlueCrawler for the dataset, Profiling Job and Trigger crawler = glue.CfnCrawler( self, dataset.GlueCrawlerName, @@ -503,7 +442,7 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): 'DatasetGlueProfilingJob', name=dataset.GlueProfilingJobName, role=iam.ArnPrincipal( - f'arn:aws:iam::{env.AwsAccountId}:role/{pivot_role_name}' + f'arn:aws:iam::{env.AwsAccountId}:role/{self.pivot_role_name}' ).arn, allocated_capacity=10, execution_property=glue.CfnJob.ExecutionPropertyProperty( diff --git a/backend/dataall/cdkproxy/stacks/environment.py b/backend/dataall/cdkproxy/stacks/environment.py index e54b24988..e004c1f6e 100644 --- a/backend/dataall/cdkproxy/stacks/environment.py +++ b/backend/dataall/cdkproxy/stacks/environment.py @@ -1,3 +1,4 @@ +import json import logging import os import pathlib @@ -5,6 +6,7 @@ from aws_cdk import ( custom_resources as cr, + aws_ec2 as ec2, aws_s3 as s3, aws_s3_deployment, aws_iam as iam, @@ -15,27 +17,23 @@ aws_sqs as sqs, aws_sns_subscriptions as sns_subs, aws_kms as kms, - aws_ec2 as ec2, - aws_sagemaker as sagemaker, aws_athena, RemovalPolicy, + CfnOutput, Stack, Duration, CustomResource, Tags, ) from constructs import DependencyGroup -from botocore.exceptions import ClientError from .manager import stack +from .pivot_role import PivotRole +from .sagemakerstudio import SageMakerDomain from .policies.data_policy import DataPolicy from .policies.service_policy import ServicePolicy from ... import db -from ...aws.handlers.quicksight import Quicksight from ...aws.handlers.parameter_store import ParameterStoreManager -from ...aws.handlers.sagemaker_studio import ( - SagemakerStudio, -) from ...aws.handlers.sts import SessionHelper from ...db import models from ...utils.cdk_nag_utils import CDKNagUtil @@ -46,11 +44,25 @@ @stack(stack='environment') class EnvironmentSetup(Stack): + """Deploy common environment resources: + - default environment S3 Bucket + - Lambda + Provider for dataset Glue Databases custom resource + - Lambda + Provider for dataset Data Lake location custom resource + - SSM parameters for the Lambdas and Providers + - pivotRole (if configured) + - SNS topic (if subscriptions are enabled) + - SM Studio domain (if ML studio is enabled) + - Deploy team specific resources: teams IAM roles, Athena workgroups + - Set PivotRole as Lake formation data lake Admin - lakeformationdefaultsettings custom resource + """ module_name = __file__ + @staticmethod + def get_env_name(): + return os.environ.get('envname', 'local') + def get_engine(self): - envname = os.environ.get('envname', 'local') - engine = db.get_engine(envname=envname) + engine = db.get_engine(envname=self.get_env_name()) return engine def get_target(self, target_uri) -> models.Environment: @@ -61,33 +73,6 @@ def get_target(self, target_uri) -> models.Environment: raise Exception('ObjectNotFound') return target - def get_environment_defautl_vpc(self, engine, environmentUri) -> models.Vpc: - with engine.scoped_session() as session: - return db.api.Vpc.get_environment_default_vpc(session, environmentUri) - - def init_quicksight(self, environment: models.Environment): - Quicksight.create_quicksight_group(environment.AwsAccountId, 'dataall') - - def check_sagemaker_studio(self, engine, environment: models.Environment): - logger.info('check sagemaker studio domain creation') - - try: - dataall_created_domain = ParameterStoreManager.client( - AwsAccountId=environment.AwsAccountId, - region=environment.region - ).get_parameter( - Name=f'/dataall/{environment.environmentUri}/sagemaker/sagemakerstudio/domain_id' - ) - return None - except ClientError as e: - logger.info(f'check sagemaker studio domain created outside of data.all. Parameter data.all not found: {e}') - existing_domain = SagemakerStudio.get_sagemaker_studio_domain( - environment.AwsAccountId, environment.region - ) - existing_domain_id = existing_domain.get('DomainId', False) - if existing_domain_id: - return existing_domain_id - @staticmethod def get_environment_group_permissions(engine, environmentUri, group): with engine.scoped_session() as session: @@ -103,9 +88,7 @@ def get_environment_group_permissions(engine, environmentUri, group): return permission_names @staticmethod - def get_environment_groups( - engine, environment: models.Environment - ) -> [models.EnvironmentGroup]: + def get_environment_groups(engine, environment: models.Environment) -> [models.EnvironmentGroup]: with engine.scoped_session() as session: return db.api.Environment.list_environment_invited_groups( session, @@ -117,9 +100,7 @@ def get_environment_groups( ) @staticmethod - def get_environment_admins_group( - engine, environment: models.Environment - ) -> [models.EnvironmentGroup]: + def get_environment_admins_group(engine, environment: models.Environment) -> [models.EnvironmentGroup]: with engine.scoped_session() as session: return db.api.Environment.get_environment_group( session, @@ -128,9 +109,7 @@ def get_environment_admins_group( ) @staticmethod - def get_environment_group_datasets( - engine, environment: models.Environment, group: str - ) -> [models.Dataset]: + def get_environment_group_datasets(engine, environment: models.Environment, group: str) -> [models.Dataset]: with engine.scoped_session() as session: return db.api.Environment.list_group_datasets( session, @@ -142,9 +121,7 @@ def get_environment_group_datasets( ) @staticmethod - def get_all_environment_datasets( - engine, environment: models.Environment - ) -> [models.Dataset]: + def get_all_environment_datasets(engine, environment: models.Environment) -> [models.Dataset]: with engine.scoped_session() as session: return ( session.query(models.Dataset) @@ -155,45 +132,41 @@ def get_all_environment_datasets( ) def __init__(self, scope, id, target_uri: str = None, **kwargs): - super().__init__(scope, - id, - description="Cloud formation stack of ENVIRONMENT: {}; URI: {}; DESCRIPTION: {}".format( - self.get_target(target_uri=target_uri).label, - target_uri, - self.get_target(target_uri=target_uri).description, - )[:1024], - **kwargs) - - # Required for dynamic stack tagging + super().__init__( + scope, + id, + description='Cloud formation stack of ENVIRONMENT: {}; URI: {}; DESCRIPTION: {}'.format( + self.get_target(target_uri=target_uri).label, + target_uri, + self.get_target(target_uri=target_uri).description, + )[:1024], + **kwargs, + ) + # Read input self.target_uri = target_uri - self.pivot_role_name = SessionHelper.get_delegation_role_name() - + self.external_id = SessionHelper.get_external_id_secret() + self.dataall_central_account = SessionHelper.get_account() + pivot_role_as_part_of_environment_stack = ParameterStoreManager.get_parameter_value( + region=os.getenv('AWS_REGION', 'eu-west-1'), + parameter_path=f"/dataall/{os.getenv('envname', 'local')}/pivotRole/enablePivotRoleAutoCreate" + ) + self.create_pivot_role = True if pivot_role_as_part_of_environment_stack == "True" else False self.engine = self.get_engine() self._environment = self.get_target(target_uri=target_uri) - self.environment_groups: [models.EnvironmentGroup] = self.get_environment_groups(self.engine, environment=self._environment) - - self.environment_admins_group: models.EnvironmentGroup = self.get_environment_admins_group(self.engine, self._environment) + self.environment_groups: [models.EnvironmentGroup] = self.get_environment_groups( + self.engine, environment=self._environment + ) - self.all_environment_datasets = self.get_all_environment_datasets( + self.environment_admins_group: models.EnvironmentGroup = self.get_environment_admins_group( self.engine, self._environment ) - roles_sagemaker_dependency_group = DependencyGroup() - - if self._environment.dashboardsEnabled: - logger.warning('ensure_quicksight_default_group') - self.init_quicksight(environment=self._environment) - - group_roles = self.create_or_import_environment_groups_roles() - - for group_role in group_roles: - roles_sagemaker_dependency_group.add(group_role) - - central_account = SessionHelper.get_account() + self.all_environment_datasets = self.get_all_environment_datasets(self.engine, self._environment) + # Environment S3 Bucket default_environment_bucket = s3.Bucket( self, 'EnvironmentDefaultBucket', @@ -252,11 +225,7 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): ) profiling_assetspath = self.zip_code( - os.path.realpath( - os.path.abspath( - os.path.join(__file__, '..', '..', 'assets', 'glueprofilingjob') - ) - ) + os.path.realpath(os.path.abspath(os.path.join(__file__, '..', '..', 'assets', 'glueprofilingjob'))) ) aws_s3_deployment.BucketDeployment( @@ -267,25 +236,37 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): destination_key_prefix='profiling/code', ) + # Create or import team IAM roles default_role = self.create_or_import_environment_default_role() - roles_sagemaker_dependency_group.add(default_role) + group_roles = self.create_or_import_environment_groups_roles() self.create_default_athena_workgroup( default_environment_bucket, self._environment.EnvironmentDefaultAthenaWorkGroup, ) + self.create_athena_workgroups(self.environment_groups, default_environment_bucket) + + # Create or import Pivot role + if self.create_pivot_role is True: + config = { + 'roleName': self.pivot_role_name, + 'accountId': self.dataall_central_account, + 'externalId': self.external_id, + 'resourcePrefix': self._environment.resourcePrefix, + } + pivot_role_stack = PivotRole(self, 'PivotRoleStack', config) + self.pivot_role = pivot_role_stack.pivot_role + else: + self.pivot_role = iam.Role.from_role_arn( + self, + f'PivotRole{self._environment.environmentUri}', + f'arn:aws:iam::{self._environment.AwsAccountId}:role/{self.pivot_role_name}', + ) - pivot_role = iam.Role.from_role_arn( - self, - f'PivotRole{self._environment.environmentUri}', - f'arn:aws:iam::{self._environment.AwsAccountId}:role/{self.pivot_role_name}', - ) - - # Lakeformation default settings + # Lakeformation default settings custom resource + # Set PivotRole as Lake Formation data lake admin entry_point = str( - pathlib.PosixPath( - os.path.dirname(__file__), '../assets/lakeformationdefaultsettings' - ).resolve() + pathlib.PosixPath(os.path.dirname(__file__), '../assets/lakeformationdefaultsettings').resolve() ) lakeformation_cr_dlq = self.set_dlq( @@ -295,7 +276,7 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): self, 'LakeformationDefaultSettingsHandler', function_name=f'{self._environment.resourcePrefix}-lf-settings-handler-{self._environment.environmentUri}', - role=pivot_role, + role=self.pivot_role, handler='index.on_event', code=_lambda.Code.from_asset(entry_point), memory_size=1664, @@ -313,6 +294,23 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): on_failure=lambda_destination.SqsDestination(lakeformation_cr_dlq), runtime=_lambda.Runtime.PYTHON_3_9, ) + LakeformationDefaultSettingsProvider = cr.Provider( + self, + f'{self._environment.resourcePrefix}LakeformationDefaultSettingsProvider', + on_event_handler=lf_default_settings_custom_resource, + ) + + default_lf_settings = CustomResource( + self, + f'{self._environment.resourcePrefix}DefaultLakeFormationSettings', + service_token=LakeformationDefaultSettingsProvider.service_token, + resource_type='Custom::LakeformationDefaultSettings', + properties={ + 'DataLakeAdmins': [ + f'arn:aws:iam::{self._environment.AwsAccountId}:role/{self.pivot_role_name}', + ] + }, + ) ssm.StringParameter( self, @@ -327,23 +325,16 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): string_value=lf_default_settings_custom_resource.function_name, parameter_name=f'/dataall/{self._environment.environmentUri}/cfn/lf/defaultsettings/lambda/name', ) - - # Glue database custom resource - + # Glue database custom resource - Old, to be deleted in future release entry_point = str( - pathlib.PosixPath( - os.path.dirname(__file__), '../assets/gluedatabasecustomresource' - ).resolve() - ) - - gluedb_cr_dlq = self.set_dlq( - f'{self._environment.resourcePrefix}-gluedbcr-{self._environment.environmentUri}' + pathlib.PosixPath(os.path.dirname(__file__), '../assets/gluedatabasecustomresource_nodelete').resolve() ) + gluedb_cr_dlq = self.set_dlq(f'{self._environment.resourcePrefix}-gluedbcr-{self._environment.environmentUri}') gluedb_custom_resource = _lambda.Function( self, 'GlueDatabaseCustomResourceHandler', function_name=f'{self._environment.resourcePrefix}-gluedb-handler-{self._environment.environmentUri}', - role=pivot_role, + role=self.pivot_role, handler='index.on_event', code=_lambda.Code.from_asset(entry_point), memory_size=1664, @@ -363,7 +354,6 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): tracing=_lambda.Tracing.ACTIVE, runtime=_lambda.Runtime.PYTHON_3_9, ) - ssm.StringParameter( self, 'GlueCustomResourceFunctionArn', @@ -377,29 +367,127 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): string_value=gluedb_custom_resource.function_name, parameter_name=f'/dataall/{self._environment.environmentUri}/cfn/custom-resources/lambda/name', ) + # Glue database custom resource - New + # This Lambda is triggered with the creation of each dataset, it is not executed when the environment is created + entry_point = str( + pathlib.PosixPath(os.path.dirname(__file__), '../assets/gluedatabasecustomresource').resolve() + ) - LakeformationDefaultSettingsProvider = cr.Provider( + gluedb_lf_cr_dlq = self.set_dlq(f'{self._environment.resourcePrefix}-gluedb-lf-cr-{self._environment.environmentUri}') + gluedb_lf_custom_resource = _lambda.Function( self, - f'{self._environment.resourcePrefix}LakeformationDefaultSettingsProvider', - on_event_handler=lf_default_settings_custom_resource, + 'GlueDatabaseLFCustomResourceHandler', + function_name=f'{self._environment.resourcePrefix}-gluedb-lf-handler-{self._environment.environmentUri}', + role=self.pivot_role, + handler='index.on_event', + code=_lambda.Code.from_asset(entry_point), + memory_size=1664, + description='This Lambda function is a cloudformation custom resource provider for Glue database ' + 'as Cfn currently does not support the CreateTableDefaultPermissions parameter', + timeout=Duration.seconds(5 * 60), + environment={ + 'envname': self._environment.name, + 'LOG_LEVEL': 'DEBUG', + 'AWS_ACCOUNT': self._environment.AwsAccountId, + 'DEFAULT_ENV_ROLE_ARN': self._environment.EnvironmentDefaultIAMRoleArn, + 'DEFAULT_CDK_ROLE_ARN': self._environment.CDKRoleArn, + }, + dead_letter_queue_enabled=True, + dead_letter_queue=gluedb_lf_cr_dlq, + on_failure=lambda_destination.SqsDestination(gluedb_lf_cr_dlq), + tracing=_lambda.Tracing.ACTIVE, + runtime=_lambda.Runtime.PYTHON_3_9, ) - default_lf_settings = CustomResource( + glue_db_provider = cr.Provider( self, - f'{self._environment.resourcePrefix}DefaultLakeFormationSettings', - service_token=LakeformationDefaultSettingsProvider.service_token, - resource_type='Custom::LakeformationDefaultSettings', - properties={ - 'DataLakeAdmins': [ - f'arn:aws:iam::{self._environment.AwsAccountId}:role/{self.pivot_role_name}', - ] + f'{self._environment.resourcePrefix}GlueDbCustomResourceProvider', + on_event_handler=gluedb_lf_custom_resource + ) + ssm.StringParameter( + self, + 'GlueLFCustomResourceFunctionArn', + string_value=gluedb_lf_custom_resource.function_arn, + parameter_name=f'/dataall/{self._environment.environmentUri}/cfn/custom-resources/gluehandler/lambda/arn', + ) + + ssm.StringParameter( + self, + 'GlueLFCustomResourceFunctionName', + string_value=gluedb_lf_custom_resource.function_name, + parameter_name=f'/dataall/{self._environment.environmentUri}/cfn/custom-resources/gluehandler/lambda/name', + ) + + ssm.StringParameter( + self, + 'GlueLFCustomResourceProviderServiceToken', + string_value=glue_db_provider.service_token, + parameter_name=f'/dataall/{self._environment.environmentUri}/cfn/custom-resources/gluehandler/provider/servicetoken', + ) + + # Data lake location custom resource + entry_point = str( + pathlib.PosixPath( + os.path.dirname(__file__), "../assets/datalakelocationcustomresource" + ).resolve() + ) + + datalakelocation_cr_dlq = self.set_dlq( + f'{self._environment.resourcePrefix}-datalakelocationcr-{self._environment.environmentUri}' + ) + datalake_location_custom_resource = _lambda.Function( + self, + "DatalakeLocationCustomResourceHandler", + function_name=f'{self._environment.resourcePrefix}-datalakelocation-handler-{self._environment.environmentUri}', + role=self.pivot_role, + handler="index.on_event", + code=_lambda.Code.from_asset(entry_point), + memory_size=1664, + description='This Lambda function is a cloudformation custom resource provider for LakeFormation Storage Locations ' + 'as the Cfn resource cannot handle pivotRole updates', + timeout=Duration.seconds(5 * 60), + environment={ + 'envname': self._environment.name, + 'LOG_LEVEL': 'DEBUG', + 'AWS_ACCOUNT': self._environment.AwsAccountId, + 'DEFAULT_ENV_ROLE_ARN': self._environment.EnvironmentDefaultIAMRoleArn, + 'DEFAULT_CDK_ROLE_ARN': self._environment.CDKRoleArn, }, + dead_letter_queue_enabled=True, + dead_letter_queue=datalakelocation_cr_dlq, + on_failure=lambda_destination.SqsDestination(datalakelocation_cr_dlq), + tracing=_lambda.Tracing.ACTIVE, + runtime=_lambda.Runtime.PYTHON_3_9, + ) + + datalake_location_provider = cr.Provider( + self, + f"{self._environment.resourcePrefix}DatalakeLocationProvider", + on_event_handler=datalake_location_custom_resource + ) + + ssm.StringParameter( + self, + "DatalakeLocationCustomResourceFunctionArn", + string_value=datalake_location_custom_resource.function_arn, + parameter_name=f"/dataall/{self._environment.environmentUri}/cfn/custom-resources/datalocationhandler/lambda/arn", ) - self.create_athena_workgroups( - self.environment_groups, default_environment_bucket + ssm.StringParameter( + self, + "DatalakeLocationCustomResourceFunctionName", + string_value=datalake_location_custom_resource.function_name, + parameter_name=f"/dataall/{self._environment.environmentUri}/cfn/custom-resources/datalocationhandler/lambda/name", + ) + + ssm.StringParameter( + self, + 'DataLocationCustomResourceProviderServiceToken', + string_value=datalake_location_provider.service_token, + parameter_name=f'/dataall/{self._environment.environmentUri}/cfn/custom-resources/datalocationhandler/provider/servicetoken', ) + # Create SNS topics for subscriptions if self._environment.subscriptionsEnabled: queue_key = kms.Key( self, @@ -451,7 +539,7 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): else: topic = self.create_topic( self._environment.subscriptionsProducersTopicName, - central_account, + self.dataall_central_account, self._environment, ) @@ -465,7 +553,7 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): policy.document.add_statements( iam.PolicyStatement( - principals=[iam.AccountPrincipal(central_account)], + principals=[iam.AccountPrincipal(self.dataall_central_account)], effect=iam.Effect.ALLOW, actions=[ 'sqs:ReceiveMessage', @@ -502,93 +590,36 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): self.create_topic( self._environment.subscriptionsConsumersTopicName, - central_account, + self.dataall_central_account, self._environment, ) - self.sagemaker_domain_exists = self.check_sagemaker_studio(engine=self.engine, environment=self._environment) - - if self._environment.mlStudiosEnabled and not (self.sagemaker_domain_exists): - - sagemaker_domain_role = iam.Role( - self, - 'RoleForSagemakerStudioUsers', - assumed_by=iam.ServicePrincipal('sagemaker.amazonaws.com'), - role_name="RoleSagemakerStudioUsers", - managed_policies=[iam.ManagedPolicy.from_managed_policy_arn( - self, - id="SagemakerFullAccess", - managed_policy_arn="arn:aws:iam::aws:policy/AmazonSageMakerFullAccess"), - iam.ManagedPolicy.from_managed_policy_arn( - self, - id="S3FullAccess", - managed_policy_arn="arn:aws:iam::aws:policy/AmazonS3FullAccess") - ] - ) - - sagemaker_domain_key = kms.Key( - self, - 'SagemakerDomainKmsKey', - alias="SagemakerStudioDomain", - enable_key_rotation=True, - policy=iam.PolicyDocument( - assign_sids=True, - statements=[ - iam.PolicyStatement( - resources=['*'], - effect=iam.Effect.ALLOW, - principals=[ - iam.AccountPrincipal(account_id=self._environment.AwsAccountId), - sagemaker_domain_role, - default_role, - ] + group_roles, - actions=['kms:*'], - ) - ], - ), - ) - sagemaker_domain_key.node.add_dependency(roles_sagemaker_dependency_group) - - try: - default_vpc = ec2.Vpc.from_lookup(self, 'VPCStudio', is_default=True) - vpc_id = default_vpc.vpc_id - subnet_ids = [private_subnet.subnet_id for private_subnet in default_vpc.private_subnets] - subnet_ids += [public_subnet.subnet_id for public_subnet in default_vpc.public_subnets] - subnet_ids += [isolated_subnet.subnet_id for isolated_subnet in default_vpc.isolated_subnets] - except Exception as e: - logger.error(f"Default VPC not found, Exception: {e}. If you don't own a default VPC, modify the networking configuration, or disable ML Studio upon environment creation.") - - sagemaker_domain = sagemaker.CfnDomain( - self, - "SagemakerStudioDomain", - domain_name=f"SagemakerStudioDomain-{self._environment.region}-{self._environment.AwsAccountId}", - auth_mode="IAM", - - default_user_settings=sagemaker.CfnDomain.UserSettingsProperty( - execution_role=sagemaker_domain_role.role_arn, - - security_groups=[], - - sharing_settings=sagemaker.CfnDomain.SharingSettingsProperty( - notebook_output_option="Allowed", - s3_kms_key_id=sagemaker_domain_key.key_id, - s3_output_path=f"s3://sagemaker-{self._environment.region}-{self._environment.AwsAccountId}", - ) - ), + # Create or import SageMaker Studio domain if ML Studio enabled + domain = SageMakerDomain( + stack=self, + id='SageMakerDomain', + environment=self._environment + ) + self.existing_sagemaker_domain = domain.check_existing_sagemaker_studio_domain() + if self._environment.mlStudiosEnabled and not self.existing_sagemaker_domain: + # Create dependency group - Sagemaker depends on group IAM roles + sagemaker_dependency_group = DependencyGroup() + sagemaker_dependency_group.add(default_role) + for group_role in group_roles: + sagemaker_dependency_group.add(group_role) - vpc_id=vpc_id, - subnet_ids=subnet_ids, - app_network_access_type="VpcOnly", - kms_key_id=sagemaker_domain_key.key_id, - ) + sagemaker_domain = domain.create_sagemaker_domain_resources(sagemaker_principals=[default_role] + group_roles) - ssm.StringParameter( - self, - 'SagemakerStudioDomainId', - string_value=sagemaker_domain.attr_domain_id, - parameter_name=f'/dataall/{self._environment.environmentUri}/sagemaker/sagemakerstudio/domain_id', - ) + sagemaker_domain.node.add_dependency(sagemaker_dependency_group) + # print the IAM role arn for this service account + CfnOutput( + self, + 'pivotRoleName', + export_name='pivotRoleName', + value=self.pivot_role_name, + description='pivotRoleName', + ) TagsUtil.add_tags(self) CDKNagUtil.check_rules(self) @@ -606,8 +637,8 @@ def create_or_import_environment_default_role(self): tag_key='Team', tag_value=self._environment.SamlGroupName, resource_prefix=self._environment.resourcePrefix, - name=f'{self._environment.resourcePrefix}-{self._environment.SamlGroupName}-default-services-policy', - id=f'{self._environment.resourcePrefix}-{self._environment.SamlGroupName}-default-services-policy', + name=f'{self._environment.resourcePrefix}-{self._environment.SamlGroupName}-{self._environment.environmentUri}-default-services-policy', + id=f'{self._environment.resourcePrefix}-{self._environment.SamlGroupName}-{self._environment.environmentUri}-default-services-policy', account=self._environment.AwsAccountId, region=self._environment.region, role_name=self._environment.EnvironmentDefaultIAMRoleName, @@ -665,7 +696,7 @@ def create_or_import_environment_groups_roles(self): iam.Role.from_role_arn( self, f'{group.groupUri + group.environmentIAMRoleName}', - role_arn=f'arn:aws:iam::{self.environment.AwsAccountId}:role/{group.environmentIAMRoleName}', + role_arn=f'arn:aws:iam::{self._environment.AwsAccountId}:role/{group.environmentIAMRoleName}', ) return group_roles @@ -679,8 +710,8 @@ def create_group_environment_role(self, group): tag_key='Team', tag_value=group.groupUri, resource_prefix=self._environment.resourcePrefix, - name=f'{self._environment.resourcePrefix}-{group.groupUri}-services-policy', - id=f'{self._environment.resourcePrefix}-{group.groupUri}-services-policy', + name=f'{self._environment.resourcePrefix}-{group.groupUri}-{self._environment.environmentUri}-services-policy', + id=f'{self._environment.resourcePrefix}-{group.groupUri}-{self._environment.environmentUri}-services-policy', role_name=group.environmentIAMRoleName, account=self._environment.AwsAccountId, region=self._environment.region, @@ -698,9 +729,7 @@ def create_group_environment_role(self, group): region=self._environment.region, environment=self._environment, team=group, - datasets=self.get_environment_group_datasets( - self.engine, self._environment, group.groupUri - ), + datasets=self.get_environment_group_datasets(self.engine, self._environment, group.groupUri), ).generate_data_access_policy() group_role = iam.Role( @@ -730,9 +759,7 @@ def create_default_athena_workgroup(self, output_bucket, workgroup_name): def create_athena_workgroups(self, environment_groups, default_environment_bucket): for group in environment_groups: - self.create_athena_workgroup( - default_environment_bucket, group.environmentAthenaWorkGroup - ) + self.create_athena_workgroup(default_environment_bucket, group.environmentAthenaWorkGroup) def create_athena_workgroup(self, output_bucket, workgroup_name): athena_workgroup_output_location = ''.join( @@ -780,9 +807,7 @@ def create_topic(self, construct_id, central_account, environment): alias=f'{construct_id}-topic-key', enable_key_rotation=True, ) - topic = sns.Topic( - self, f'{construct_id}', topic_name=f'{construct_id}', master_key=topic_key - ) + topic = sns.Topic(self, f'{construct_id}', topic_name=f'{construct_id}', master_key=topic_key) topic.add_to_resource_policy( iam.PolicyStatement( principals=[iam.AccountPrincipal(central_account)], @@ -804,9 +829,7 @@ def create_topic(self, construct_id, central_account, environment): @staticmethod def zip_code(assetspath, s3_key='profiler'): logger.info('Zipping code') - shutil.make_archive( - base_name=f'{assetspath}/{s3_key}', format='zip', root_dir=f'{assetspath}' - ) + shutil.make_archive(base_name=f'{assetspath}/{s3_key}', format='zip', root_dir=f'{assetspath}') return assetspath def set_dlq(self, queue_name) -> sqs.Queue: diff --git a/backend/dataall/cdkproxy/stacks/pivot_role.py b/backend/dataall/cdkproxy/stacks/pivot_role.py new file mode 100644 index 000000000..b4c340d31 --- /dev/null +++ b/backend/dataall/cdkproxy/stacks/pivot_role.py @@ -0,0 +1,890 @@ +from constructs import Construct +from aws_cdk import Duration, aws_iam as iam, NestedStack + + +class PivotRole(NestedStack): + def __init__(self, scope: Construct, construct_id: str, config, **kwargs) -> None: + super().__init__(scope, construct_id, **kwargs) + # Create Pivot IAM Role + self.pivot_role = self.create_pivot_role( + name=config['roleName'], + principal_id=config['accountId'], + external_id=config['externalId'], + env_resource_prefix=config['resourcePrefix'], + ) + # Data.All IAM Lake Formation service role creation + self.lf_service_role = iam.CfnServiceLinkedRole( + self, 'LakeFormationSLR', aws_service_name='lakeformation.amazonaws.com' + ) + + def create_pivot_role(self, name: str, principal_id: str, external_id: str, env_resource_prefix: str) -> iam.Role: + """ + Creates an IAM Role that will enable data.all to interact with this Data Account + + :param str name: Role name + :param str principal_id: AWS Account ID of central data.all + :param str external_id: External ID provided by data.all + :param str env_resource_prefix: Environment Resource Prefix provided by data.all + :returns: Created IAM Role + :rtype: iam.Role + """ + + role = iam.Role( + self, + 'DataAllPivotRole-cdk', + role_name=name, + assumed_by=iam.CompositePrincipal( + iam.ServicePrincipal('glue.amazonaws.com'), + iam.ServicePrincipal('lakeformation.amazonaws.com'), + iam.ServicePrincipal('lambda.amazonaws.com'), + ), + path='/', + max_session_duration=Duration.hours(12), + managed_policies=[ + self._create_dataall_policy0(env_resource_prefix), + self._create_dataall_policy1(env_resource_prefix), + self._create_dataall_policy2(env_resource_prefix), + self._create_dataall_policy3(env_resource_prefix, name), + ], + ) + + role.assume_role_policy.add_statements( + iam.PolicyStatement( + effect=iam.Effect.ALLOW, + principals=[iam.AccountPrincipal(account_id=principal_id)], + actions=['sts:AssumeRole'], + conditions={'StringEquals': {'sts:ExternalId': external_id}}, + ) + ) + + return role + + def _create_dataall_policy0(self, env_resource_prefix: str) -> iam.ManagedPolicy: + """ + Creates the first managed IAM Policy required for the Pivot Role used by data.all + + :param str env_resource_prefix: Environment Resource Prefix provided by data.all + :returns: Created IAM Policy + :rtype: iam.ManagedPolicy + """ + return iam.ManagedPolicy( + self, + 'PivotRolePolicy0', + managed_policy_name=f'{env_resource_prefix}-pivotrole-cdk-policy-0', + statements=[ + # Athena permissions + iam.PolicyStatement( + sid='Athena', + effect=iam.Effect.ALLOW, + resources=['*'], + actions=[ + 'athena:GetQuery*', + 'athena:StartQueryExecution', + 'athena:ListWorkGroups' + ], + ), + # Athena Workgroups permissions + iam.PolicyStatement( + sid='AthenaWorkgroups', + effect=iam.Effect.ALLOW, + actions=[ + 'athena:GetWorkGroup', + 'athena:CreateWorkGroup', + 'athena:UpdateWorkGroup', + 'athena:DeleteWorkGroup', + 'athena:TagResource', + 'athena:UntagResource', + 'athena:ListTagsForResource', + ], + resources=[f'arn:aws:athena:*:{self.account}:workgroup/{env_resource_prefix}*'], + ), + # AWS Glue Crawler Bucket + iam.PolicyStatement( + sid='AwsGlueCrawlerBucket', + effect=iam.Effect.ALLOW, + actions=['s3:GetObject'], + resources=['arn:aws:s3:::crawler-public*'], + ), + # S3 Access points + iam.PolicyStatement( + sid='ManagedAccessPoints', + effect=iam.Effect.ALLOW, + actions=[ + 's3:GetAccessPoint', + 's3:GetAccessPointPolicy', + 's3:ListAccessPoints', + 's3:CreateAccessPoint', + 's3:DeleteAccessPoint', + 's3:GetAccessPointPolicyStatus', + 's3:DeleteAccessPointPolicy', + 's3:PutAccessPointPolicy', + ], + resources=[f'arn:aws:s3:*:{self.account}:accesspoint/*'], + ), + # S3 Managed Buckets + iam.PolicyStatement( + sid='ManagedBuckets', + effect=iam.Effect.ALLOW, + actions=[ + 's3:List*', + 's3:Delete*', + 's3:Get*', + 's3:Put*' + ], + resources=[f'arn:aws:s3:::{env_resource_prefix}*'], + ), + # S3 Imported Buckets + iam.PolicyStatement( + sid='ImportedBuckets', + effect=iam.Effect.ALLOW, + actions=[ + 's3:List*', + 's3:GetBucket*', + 's3:GetLifecycleConfiguration', + 's3:GetObject', + 's3:PutBucketPolicy', + 's3:PutBucketTagging', + 's3:PutObject', + 's3:PutObjectAcl', + 's3:PutBucketOwnershipControls', + ], + resources=['arn:aws:s3:::*'], + ), + # AWS Logging Buckets + iam.PolicyStatement( + sid='AWSLoggingBuckets', + effect=iam.Effect.ALLOW, + actions=[ + 's3:PutBucketAcl', + 's3:PutBucketNotification' + ], + resources=[f'arn:aws:s3:::{env_resource_prefix}-logging-*'], + ), + # Read Buckets + iam.PolicyStatement( + sid='ReadBuckets', + effect=iam.Effect.ALLOW, + actions=[ + 's3:ListAllMyBuckets', + 's3:GetBucketLocation', + 's3:PutBucketTagging' + ], + resources=['*'], + ), + # CloudWatch Metrics + iam.PolicyStatement( + sid='CWMetrics', + effect=iam.Effect.ALLOW, + actions=[ + 'cloudwatch:PutMetricData', + 'cloudwatch:GetMetricData', + 'cloudwatch:GetMetricStatistics' + ], + resources=['*'], + ), + # Logs + iam.PolicyStatement( + sid='Logs', + effect=iam.Effect.ALLOW, + actions=[ + 'logs:CreateLogGroup', + 'logs:CreateLogStream', + 'logs:PutLogEvents' + ], + resources=[ + f'arn:aws:logs:*:{self.account}:log-group:/aws-glue/*', + f'arn:aws:logs:*:{self.account}:log-group:/aws/lambda/*', + f'arn:aws:logs:*:{self.account}:log-group:/{env_resource_prefix}*', + ], + ), + # Logging + iam.PolicyStatement( + sid='Logging', effect=iam.Effect.ALLOW, actions=['logs:PutLogEvents'], resources=['*'] + ), + # EventBridge (CloudWatch Events) + iam.PolicyStatement( + sid='CWEvents', + effect=iam.Effect.ALLOW, + actions=[ + 'events:DeleteRule', + 'events:List*', + 'events:PutRule', + 'events:PutTargets', + 'events:RemoveTargets', + ], + resources=['*'], + ), + # Glue + iam.PolicyStatement( + sid='Glue', + effect=iam.Effect.ALLOW, + actions=[ + 'glue:BatchCreatePartition', + 'glue:BatchDeletePartition', + 'glue:BatchDeleteTable', + 'glue:CreateCrawler', + 'glue:CreateDatabase', + 'glue:CreatePartition', + 'glue:CreateTable', + 'glue:DeleteCrawler', + 'glue:DeleteDatabase', + 'glue:DeleteJob', + 'glue:DeletePartition', + 'glue:DeleteTable', + 'glue:DeleteTrigger', + 'glue:BatchGet*', + 'glue:Get*', + 'glue:List*', + 'glue:StartCrawler', + 'glue:StartJobRun', + 'glue:StartTrigger', + 'glue:SearchTables', + 'glue:UpdateDatabase', + 'glue:UpdatePartition', + 'glue:UpdateTable', + 'glue:UpdateTrigger', + 'glue:UpdateJob', + 'glue:TagResource', + 'glue:UpdateCrawler', + ], + resources=['*'], + ), + # KMS + iam.PolicyStatement( + sid='KMS', + effect=iam.Effect.ALLOW, + actions=[ + 'kms:Decrypt', + 'kms:Encrypt', + 'kms:GenerateDataKey*', + 'kms:PutKeyPolicy', + 'kms:ReEncrypt*', + 'kms:TagResource', + 'kms:UntagResource', + ], + resources=['*'], + ), + iam.PolicyStatement( + sid='KMSAlias', + effect=iam.Effect.ALLOW, + actions=['kms:DeleteAlias'], + resources=[f'arn:aws:kms:*:{self.account}:alias/{env_resource_prefix}*'], + ), + iam.PolicyStatement( + sid='KMSCreate', + effect=iam.Effect.ALLOW, + actions=[ + 'kms:List*', + 'kms:DescribeKey', + 'kms:CreateAlias', + 'kms:CreateKey' + ], + resources=['*'], + ), + # AWS Organizations + iam.PolicyStatement( + sid='Organizations', + effect=iam.Effect.ALLOW, + actions=['organizations:DescribeOrganization'], + resources=['*'], + ), + # Resource Tags + iam.PolicyStatement( + sid='ResourceGroupTags', + effect=iam.Effect.ALLOW, + actions=[ + 'tag:*', + 'resource-groups:*' + ], + resources=['*'], + ), + # SNS + iam.PolicyStatement( + sid='SNSPublish', + effect=iam.Effect.ALLOW, + actions=[ + 'sns:Publish', + 'sns:SetTopicAttributes', + 'sns:GetTopicAttributes', + 'sns:DeleteTopic', + 'sns:Subscribe', + 'sns:TagResource', + 'sns:UntagResource', + 'sns:CreateTopic', + ], + resources=[f'arn:aws:sns:*:{self.account}:{env_resource_prefix}*'], + ), + iam.PolicyStatement( + sid='SNSList', effect=iam.Effect.ALLOW, actions=['sns:ListTopics'], resources=['*'] + ), + # SQS + iam.PolicyStatement( + sid='SQSList', effect=iam.Effect.ALLOW, actions=['sqs:ListQueues'], resources=['*'] + ), + iam.PolicyStatement( + sid='SQS', + effect=iam.Effect.ALLOW, + actions=[ + 'sqs:ReceiveMessage', + 'sqs:SendMessage' + ], + resources=[f'arn:aws:sqs:*:{self.account}:{env_resource_prefix}*'], + ), + ], + ) + + def _create_dataall_policy1(self, env_resource_prefix: str) -> iam.ManagedPolicy: + """ + Creates the second managed IAM Policy required for the Pivot Role used by data.all + + :param str env_resource_prefix: Environment Resource Prefix provided by data.all + :returns: Created IAM Policy + :rtype: iam.ManagedPolicy + """ + return iam.ManagedPolicy( + self, + 'PivotRolePolicy1', + managed_policy_name=f'{env_resource_prefix}-pivotrole-cdk-policy-1', + statements=[ + # Redshift + iam.PolicyStatement( + sid='Redshift', + effect=iam.Effect.ALLOW, + actions=[ + 'redshift:DeleteTags', + 'redshift:ModifyClusterIamRoles', + 'redshift:DescribeClusterSecurityGroups', + 'redshift:DescribeClusterSubnetGroups', + 'redshift:pauseCluster', + 'redshift:resumeCluster', + ], + resources=['*'], + conditions={'StringEquals': {'aws:ResourceTag/dataall': 'true'}}, + ), + iam.PolicyStatement( + sid='RedshiftRead', + effect=iam.Effect.ALLOW, + actions=[ + 'redshift:DescribeClusters', + 'redshift:CreateTags', + 'redshift:DescribeClusterSubnetGroups', + ], + resources=['*'], + ), + iam.PolicyStatement( + sid='RedshiftCreds', + effect=iam.Effect.ALLOW, + actions=['redshift:GetClusterCredentials'], + resources=[ + f'arn:aws:redshift:*:{self.account}:dbgroup:*/*', + f'arn:aws:redshift:*:{self.account}:dbname:*/*', + f'arn:aws:redshift:*:{self.account}:dbuser:*/*', + ], + ), + iam.PolicyStatement( + sid='AllowRedshiftSubnet', + effect=iam.Effect.ALLOW, + actions=['redshift:CreateClusterSubnetGroup'], + resources=['*'], + ), + iam.PolicyStatement( + sid='AllowRedshiftDataApi', + effect=iam.Effect.ALLOW, + actions=[ + 'redshift-data:ListTables', + 'redshift-data:GetStatementResult', + 'redshift-data:CancelStatement', + 'redshift-data:ListSchemas', + 'redshift-data:ExecuteStatement', + 'redshift-data:ListStatements', + 'redshift-data:ListDatabases', + 'redshift-data:DescribeStatement', + ], + resources=['*'], + ), + # EC2 + iam.PolicyStatement( + sid='EC2SG', + effect=iam.Effect.ALLOW, + actions=[ + 'ec2:CreateSecurityGroup', + 'ec2:CreateNetworkInterface', + 'ec2:Describe*' + ], + resources=['*'], + ), + iam.PolicyStatement( + sid='TagsforENI', + effect=iam.Effect.ALLOW, + actions=[ + 'ec2:CreateTags', + 'ec2:DeleteTags' + ], + resources=[f'arn:aws:ec2:*:{self.account}:network-interface/*'], + ), + iam.PolicyStatement( + sid='DeleteENI', + effect=iam.Effect.ALLOW, + actions=['ec2:DeleteNetworkInterface'], + resources=[f'arn:aws:ec2:*:{self.account}:network-interface/*'], + ), + # SageMaker + iam.PolicyStatement( + sid='SageMakerNotebookActions', + effect=iam.Effect.ALLOW, + actions=[ + 'sagemaker:ListTags', + 'sagemaker:DescribeUserProfile', + 'sagemaker:DeleteNotebookInstance', + 'sagemaker:StopNotebookInstance', + 'sagemaker:CreatePresignedNotebookInstanceUrl', + 'sagemaker:DescribeNotebookInstance', + 'sagemaker:StartNotebookInstance', + 'sagemaker:AddTags', + 'sagemaker:DescribeDomain', + 'sagemaker:CreatePresignedDomainUrl', + ], + resources=[ + f'arn:aws:sagemaker:*:{self.account}:notebook-instance/{env_resource_prefix}*', + f'arn:aws:sagemaker:*:{self.account}:domain/*', + f'arn:aws:sagemaker:*:{self.account}:user-profile/*/*', + ], + ), + iam.PolicyStatement( + sid='SageMakerNotebookInstances', + effect=iam.Effect.ALLOW, + actions=[ + 'sagemaker:ListNotebookInstances', + 'sagemaker:ListDomains', + 'sagemaker:ListApps', + 'sagemaker:DeleteApp', + ], + resources=['*'], + ), + # RAM + iam.PolicyStatement( + sid='RamTag', + effect=iam.Effect.ALLOW, + actions=['ram:TagResource'], + resources=['*'], + conditions={'ForAllValues:StringLike': {'ram:ResourceShareName': ['LakeFormation*']}}, + ), + iam.PolicyStatement( + sid='RamCreateResource', + effect=iam.Effect.ALLOW, + actions=['ram:CreateResourceShare'], + resources=['*'], + conditions={ + 'ForAllValues:StringEquals': { + 'ram:RequestedResourceType': ['glue:Table', 'glue:Database', 'glue:Catalog'] + } + }, + ), + iam.PolicyStatement( + sid='RamUpdateResource', + effect=iam.Effect.ALLOW, + actions=['ram:UpdateResourceShare'], + resources=[f'arn:aws:ram:*:{self.account}:resource-share/*'], + conditions={ + 'StringEquals': {'aws:ResourceTag/dataall': 'true'}, + 'ForAllValues:StringLike': {'ram:ResourceShareName': ['LakeFormation*']}, + }, + ), + iam.PolicyStatement( + sid='RamAssociateResource', + effect=iam.Effect.ALLOW, + actions=[ + 'ram:AssociateResourceShare', + 'ram:DisassociateResourceShare' + ], + resources=[f'arn:aws:ram:*:{self.account}:resource-share/*'], + conditions={'ForAllValues:StringLike': {'ram:ResourceShareName': ['LakeFormation*']}}, + ), + iam.PolicyStatement( + sid='RamDeleteResource', + effect=iam.Effect.ALLOW, + actions=['ram:DeleteResourceShare'], + resources=[f'arn:aws:ram:*:{self.account}:resource-share/*'], + ), + iam.PolicyStatement( + sid='RamInvitations', + effect=iam.Effect.ALLOW, + actions=[ + 'ram:AcceptResourceShareInvitation', + 'ram:RejectResourceShareInvitation', + 'ec2:DescribeAvailabilityZones', + 'ram:EnableSharingWithAwsOrganization', + ], + resources=['*'], + ), + iam.PolicyStatement( + sid='RamReadGlue', + effect=iam.Effect.ALLOW, + actions=[ + 'glue:PutResourcePolicy', + 'glue:DeleteResourcePolicy', + 'ram:Get*', + 'ram:List*' + ], + resources=['*'], + ), + # Security Groups + iam.PolicyStatement( + sid='SGCreateTag', + effect=iam.Effect.ALLOW, + actions=['ec2:CreateTags'], + resources=[f'arn:aws:ec2:*:{self.account}:security-group/*'], + conditions={'StringEquals': {'aws:RequestTag/dataall': 'true'}}, + ), + iam.PolicyStatement( + sid='SGandRedshift', + effect=iam.Effect.ALLOW, + actions=[ + 'ec2:DeleteTags', + 'ec2:DeleteSecurityGroup', + 'redshift:DeleteClusterSubnetGroup' + ], + resources=['*'], + conditions={'ForAnyValue:StringEqualsIfExists': {'aws:ResourceTag/dataall': 'true'}}, + ), + # Redshift + iam.PolicyStatement( + sid='RedshiftDataApi', + effect=iam.Effect.ALLOW, + actions=[ + 'redshift-data:ListTables', + 'redshift-data:GetStatementResult', + 'redshift-data:CancelStatement', + 'redshift-data:ListSchemas', + 'redshift-data:ExecuteStatement', + 'redshift-data:ListStatements', + 'redshift-data:ListDatabases', + 'redshift-data:DescribeStatement', + ], + resources=['*'], + conditions={'StringEqualsIfExists': {'aws:ResourceTag/dataall': 'true'}}, + ), + # Dev Tools + iam.PolicyStatement( + sid='DevTools0', + effect=iam.Effect.ALLOW, + actions=['cloudformation:ValidateTemplate'], + resources=['*'], + ), + iam.PolicyStatement( + sid='DevTools1', + effect=iam.Effect.ALLOW, + actions=[ + 'secretsmanager:CreateSecret', + 'secretsmanager:DeleteSecret', + 'secretsmanager:TagResource', + 'codebuild:DeleteProject', + ], + resources=['*'], + conditions={'StringEquals': {'aws:ResourceTag/dataall': 'true'}}, + ), + iam.PolicyStatement( + sid='DevTools2', + effect=iam.Effect.ALLOW, + actions=[ + 'codebuild:CreateProject', + 'ecr:CreateRepository', + 'ssm:PutParameter', + 'ssm:AddTagsToResource', + ], + resources=['*'], + conditions={'StringEquals': {'aws:RequestTag/dataall': 'true'}}, + ), + iam.PolicyStatement( + sid='CloudFormation', + effect=iam.Effect.ALLOW, + actions=[ + 'cloudformation:DescribeStacks', + 'cloudformation:DescribeStackResources', + 'cloudformation:DescribeStackEvents', + 'cloudformation:DeleteStack', + 'cloudformation:CreateStack', + 'cloudformation:GetTemplate', + 'cloudformation:ListStackResources', + 'cloudformation:DescribeStackResource', + ], + resources=[ + f'arn:aws:cloudformation:*:{self.account}:stack/{env_resource_prefix}*/*', + f'arn:aws:cloudformation:*:{self.account}:stack/CDKToolkit/*', + f'arn:aws:cloudformation:*:{self.account}:stack/*/*', + ], + ), + ], + ) + + def _create_dataall_policy2(self, env_resource_prefix: str) -> iam.ManagedPolicy: + """ + Creates the third managed IAM Policy required for the Pivot Role used by data.all + + :param str env_resource_prefix: Environment Resource Prefix provided by data.all + :returns: Created IAM Policy + :rtype: iam.ManagedPolicy + """ + return iam.ManagedPolicy( + self, + 'PivotRolePolicy2', + managed_policy_name=f'{env_resource_prefix}-pivotrole-cdk-policy-2', + statements=[ + # LakeFormation + iam.PolicyStatement( + sid='LakeFormation', + effect=iam.Effect.ALLOW, + actions=[ + 'lakeformation:RegisterResource', + 'lakeformation:DeregisterResource', + 'lakeformation:UpdateResource', + 'lakeformation:DescribeResource', + 'lakeformation:AddLFTagsToResource', + 'lakeformation:RemoveLFTagsFromResource', + 'lakeformation:GetResourceLFTags', + 'lakeformation:ListLFTags', + 'lakeformation:CreateLFTag', + 'lakeformation:GetLFTag', + 'lakeformation:UpdateLFTag', + 'lakeformation:DeleteLFTag', + 'lakeformation:SearchTablesByLFTags', + 'lakeformation:SearchDatabasesByLFTags', + 'lakeformation:ListResources', + 'lakeformation:ListPermissions', + 'lakeformation:GrantPermissions', + 'lakeformation:BatchGrantPermissions', + 'lakeformation:RevokePermissions', + 'lakeformation:BatchRevokePermissions', + 'lakeformation:PutDataLakeSettings', + 'lakeformation:GetDataLakeSettings', + 'lakeformation:GetDataAccess', + 'lakeformation:GetWorkUnits', + 'lakeformation:StartQueryPlanning', + 'lakeformation:GetWorkUnitResults', + 'lakeformation:GetQueryState', + 'lakeformation:GetQueryStatistics', + 'lakeformation:StartTransaction', + 'lakeformation:CommitTransaction', + 'lakeformation:CancelTransaction', + 'lakeformation:ExtendTransaction', + 'lakeformation:DescribeTransaction', + 'lakeformation:ListTransactions', + 'lakeformation:GetTableObjects', + 'lakeformation:UpdateTableObjects', + 'lakeformation:DeleteObjectsOnCancel', + ], + resources=['*'], + ), + # Compute + iam.PolicyStatement( + sid='Compute', + effect=iam.Effect.ALLOW, + actions=[ + 'lambda:CreateFunction', + 'lambda:AddPermission', + 'lambda:InvokeFunction', + 'lambda:RemovePermission', + 'lambda:GetFunction', + 'lambda:GetFunctionConfiguration', + 'lambda:DeleteFunction', + 'ecr:CreateRepository', + 'ecr:SetRepositoryPolicy', + 'ecr:DeleteRepository', + 'ecr:DescribeImages', + 'ecr:BatchDeleteImage', + 'codepipeline:GetPipelineState', + 'codepipeline:DeletePipeline', + 'codepipeline:GetPipeline', + 'codepipeline:CreatePipeline', + 'codepipeline:TagResource', + 'codepipeline:UntagResource', + ], + resources=[ + f'arn:aws:lambda:*:{self.account}:function:{env_resource_prefix}*', + f'arn:aws:s3:::{env_resource_prefix}*', + f'arn:aws:codepipeline:*:{self.account}:{env_resource_prefix}*', + f'arn:aws:ecr:*:{self.account}:repository/{env_resource_prefix}*', + ], + ), + # Databrew + iam.PolicyStatement( + sid='DatabrewList', effect=iam.Effect.ALLOW, actions=['databrew:List*'], resources=['*'] + ), + iam.PolicyStatement( + sid='DatabrewPermissions', + effect=iam.Effect.ALLOW, + actions=[ + 'databrew:BatchDeleteRecipeVersion', + 'databrew:Delete*', + 'databrew:Describe*', + 'databrew:PublishRecipe', + 'databrew:SendProjectSessionAction', + 'databrew:Start*', + 'databrew:Stop*', + 'databrew:TagResource', + 'databrew:UntagResource', + 'databrew:Update*', + ], + resources=[f'arn:aws:databrew:*:{self.account}:*/{env_resource_prefix}*'], + ), + # QuickSight + iam.PolicyStatement( + sid='QuickSight', + effect=iam.Effect.ALLOW, + actions=[ + 'quicksight:CreateGroup', + 'quicksight:DescribeGroup', + 'quicksight:ListDashboards', + 'quicksight:DescribeDataSource', + 'quicksight:DescribeDashboard', + 'quicksight:DescribeUser', + 'quicksight:SearchDashboards', + 'quicksight:GetDashboardEmbedUrl', + 'quicksight:GenerateEmbedUrlForAnonymousUser', + 'quicksight:UpdateUser', + 'quicksight:ListUserGroups', + 'quicksight:RegisterUser', + 'quicksight:DescribeDashboardPermissions', + 'quicksight:UpdateDashboardPermissions', + 'quicksight:GetAuthCode', + 'quicksight:CreateGroupMembership', + 'quicksight:DescribeAccountSubscription', + ], + resources=[ + f'arn:aws:quicksight:*:{self.account}:group/default/*', + f'arn:aws:quicksight:*:{self.account}:user/default/*', + f'arn:aws:quicksight:*:{self.account}:datasource/*', + f'arn:aws:quicksight:*:{self.account}:user/*', + f'arn:aws:quicksight:*:{self.account}:dashboard/*', + f'arn:aws:quicksight:*:{self.account}:namespace/default', + f'arn:aws:quicksight:*:{self.account}:account/*', + f'arn:aws:quicksight:*:{self.account}:*', + ], + ), + iam.PolicyStatement( + sid='QuickSightSession', + effect=iam.Effect.ALLOW, + actions=['quicksight:GetSessionEmbedUrl'], + resources=['*'], + ), + ], + ) + + def _create_dataall_policy3(self, env_resource_prefix: str, role_name: str) -> iam.ManagedPolicy: + """ + Creates the fourth managed IAM Policy required for the Pivot Role used by data.all + + :param str env_resource_prefix: Environment Resource Prefix provided by data.all + :param str role_name: IAM Role name + :returns: Created IAM Policy + :rtype: iam.ManagedPolicy + """ + return iam.ManagedPolicy( + self, + 'PivotRolePolicy3', + managed_policy_name=f'{env_resource_prefix}-pivotrole-cdk-policy-3', + statements=[ + # SSM Parameter Store + iam.PolicyStatement( + sid='ParameterStore', + effect=iam.Effect.ALLOW, + actions=['ssm:GetParameter'], + resources=[ + f'arn:aws:ssm:*:{self.account}:parameter/{env_resource_prefix}/*', + f'arn:aws:ssm:*:{self.account}:parameter/dataall/*', + f'arn:aws:ssm:*:{self.account}:parameter/ddk/*', + ], + ), + # Secrets Manager + iam.PolicyStatement( + sid='SecretsManager', + effect=iam.Effect.ALLOW, + actions=[ + 'secretsmanager:DescribeSecret', + 'secretsmanager:GetSecretValue' + ], + resources=[ + f'arn:aws:secretsmanager:*:{self.account}:secret:{env_resource_prefix}*', + f'arn:aws:secretsmanager:*:{self.account}:secret:dataall*', + ], + ), + iam.PolicyStatement( + sid='SecretsManagerList', + effect=iam.Effect.ALLOW, + actions=['secretsmanager:ListSecrets'], + resources=['*'], + ), + # IAM + iam.PolicyStatement( + sid='IAMListGet', + effect=iam.Effect.ALLOW, + actions=[ + 'iam:ListRoles', + 'iam:Get*' + ], resources=['*'] + ), + iam.PolicyStatement( + sid='IAMRolePolicy', + effect=iam.Effect.ALLOW, + actions=[ + 'iam:PutRolePolicy', + 'iam:DeleteRolePolicy' + ], + resources=['*'], + ), + iam.PolicyStatement( + sid='IAMPassRole', + effect=iam.Effect.ALLOW, + actions=['iam:PassRole'], + resources=[ + f'arn:aws:iam::{self.account}:role/{env_resource_prefix}*', + f'arn:aws:iam::{self.account}:role/{role_name}', + f'arn:aws:iam::{self.account}:role/cdk-*', + ], + ), + # STS + iam.PolicyStatement( + sid='STS', + effect=iam.Effect.ALLOW, + actions=['sts:AssumeRole'], + resources=[ + f'arn:aws:iam::{self.account}:role/{env_resource_prefix}*', + f'arn:aws:iam::{self.account}:role/ddk-*', + ], + ), + # Step Functions + iam.PolicyStatement( + sid='StepFunctions', + effect=iam.Effect.ALLOW, + actions=[ + 'states:DescribeStateMachine', + 'states:ListExecutions', + 'states:StartExecution' + ], + resources=[f'arn:aws:states:*:{self.account}:stateMachine:{env_resource_prefix}*'], + ), + # CodeCommit + iam.PolicyStatement( + sid='CodeCommit', + effect=iam.Effect.ALLOW, + actions=[ + 'codecommit:GetFile', + 'codecommit:ListBranches', + 'codecommit:GetFolder', + 'codecommit:GetCommit', + 'codecommit:GitPull', + 'codecommit:GetRepository', + 'codecommit:TagResource', + 'codecommit:UntagResource', + 'codecommit:CreateBranch', + 'codecommit:CreateCommit', + 'codecommit:CreateRepository', + 'codecommit:DeleteRepository', + 'codecommit:GitPush', + 'codecommit:PutFile', + 'codecommit:GetBranch', + ], + resources=[f'arn:aws:codecommit:*:{self.account}:{env_resource_prefix}*'], + ), + ], + ) diff --git a/backend/dataall/cdkproxy/stacks/sagemakerstudio.py b/backend/dataall/cdkproxy/stacks/sagemakerstudio.py index a858cdfc2..491a5359a 100644 --- a/backend/dataall/cdkproxy/stacks/sagemakerstudio.py +++ b/backend/dataall/cdkproxy/stacks/sagemakerstudio.py @@ -1,21 +1,193 @@ import logging import os - from aws_cdk import ( cloudformation_include as cfn_inc, - Stack, + aws_ec2 as ec2, + aws_iam as iam, + aws_kms as kms, + aws_logs as logs, + aws_sagemaker as sagemaker, + aws_ssm as ssm, + RemovalPolicy, + Stack ) - +from botocore.exceptions import ClientError from .manager import stack from ... import db from ...db import models from ...db.api import Environment +from ...aws.handlers.parameter_store import ParameterStoreManager +from ...aws.handlers.sts import SessionHelper +from ...aws.handlers.sagemaker_studio import SagemakerStudio +from ...aws.handlers.ec2 import EC2 from ...utils.cdk_nag_utils import CDKNagUtil from ...utils.runtime_stacks_tagging import TagsUtil logger = logging.getLogger(__name__) +class SageMakerDomain: + def __init__( + self, + stack, + id, + environment: models.Environment + ): + self.stack = stack + self.id = id + self.environment = environment + + def check_existing_sagemaker_studio_domain(self): + logger.info('Check if there is an existing sagemaker studio domain in the account') + try: + logger.info('check sagemaker studio domain created as part of data.all environment stack.') + cdk_look_up_role_arn = SessionHelper.get_cdk_look_up_role_arn( + accountid=self.environment.AwsAccountId, region=self.environment.region + ) + dataall_created_domain = ParameterStoreManager.client( + AwsAccountId=self.environment.AwsAccountId, region=self.environment.region, role=cdk_look_up_role_arn + ).get_parameter(Name=f'/dataall/{self.environment.environmentUri}/sagemaker/sagemakerstudio/domain_id') + return False + except ClientError as e: + logger.info(f'check sagemaker studio domain created outside of data.all. Parameter data.all not found: {e}') + existing_domain = SagemakerStudio.get_sagemaker_studio_domain( + AwsAccountId=self.environment.AwsAccountId, region=self.environment.region, role=cdk_look_up_role_arn + ) + return existing_domain.get('DomainId', False) + + def create_sagemaker_domain_resources(self, sagemaker_principals): + logger.info('Creating SageMaker base resources..') + cdk_look_up_role_arn = SessionHelper.get_cdk_look_up_role_arn( + accountid=self.environment.AwsAccountId, region=self.environment.region + ) + existing_default_vpc = EC2.check_default_vpc_exists( + AwsAccountId=self.environment.AwsAccountId, region=self.environment.region, role=cdk_look_up_role_arn + ) + if existing_default_vpc: + logger.info("Using default VPC for Sagemaker Studio domain") + # Use default VPC - initial configuration (to be migrated) + vpc = ec2.Vpc.from_lookup(self.stack, 'VPCStudio', is_default=True) + subnet_ids = [private_subnet.subnet_id for private_subnet in vpc.private_subnets] + subnet_ids += [public_subnet.subnet_id for public_subnet in vpc.public_subnets] + subnet_ids += [isolated_subnet.subnet_id for isolated_subnet in vpc.isolated_subnets] + security_groups = [] + else: + logger.info("Default VPC not found, Exception. Creating a VPC for SageMaker resources...") + # Create VPC with 3 Public Subnets and 3 Private subnets wit NAT Gateways + log_group = logs.LogGroup( + self.stack, + f'SageMakerStudio{self.environment.name}', + log_group_name=f'/{self.environment.resourcePrefix}/{self.environment.name}/vpc/sagemakerstudio', + retention=logs.RetentionDays.ONE_MONTH, + removal_policy=RemovalPolicy.DESTROY, + ) + vpc_flow_role = iam.Role( + self.stack, 'FlowLog', + assumed_by=iam.ServicePrincipal('vpc-flow-logs.amazonaws.com') + ) + vpc = ec2.Vpc( + self.stack, + "SageMakerVPC", + max_azs=3, + cidr="10.10.0.0/16", + subnet_configuration=[ + ec2.SubnetConfiguration( + subnet_type=ec2.SubnetType.PUBLIC, + name="Public", + cidr_mask=24 + ), + ec2.SubnetConfiguration( + subnet_type=ec2.SubnetType.PRIVATE_WITH_NAT, + name="Private", + cidr_mask=24 + ), + ], + enable_dns_hostnames=True, + enable_dns_support=True, + ) + ec2.FlowLog( + self.stack, "StudioVPCFlowLog", + resource_type=ec2.FlowLogResourceType.from_vpc(vpc), + destination=ec2.FlowLogDestination.to_cloud_watch_logs(log_group, vpc_flow_role) + ) + # setup security group to be used for sagemaker studio domain + sagemaker_sg = ec2.SecurityGroup( + self.stack, + "SecurityGroup", + vpc=vpc, + description="Security Group for SageMaker Studio", + ) + + sagemaker_sg.add_ingress_rule(sagemaker_sg, ec2.Port.all_traffic()) + security_groups = [sagemaker_sg.security_group_id] + subnet_ids = [private_subnet.subnet_id for private_subnet in vpc.private_subnets] + + vpc_id = vpc.vpc_id + + sagemaker_domain_role = iam.Role( + self.stack, + 'RoleForSagemakerStudioUsers', + assumed_by=iam.ServicePrincipal('sagemaker.amazonaws.com'), + role_name='RoleSagemakerStudioUsers', + managed_policies=[ + iam.ManagedPolicy.from_managed_policy_arn( + self.stack, + id='SagemakerFullAccess', + managed_policy_arn='arn:aws:iam::aws:policy/AmazonSageMakerFullAccess', + ), + iam.ManagedPolicy.from_managed_policy_arn( + self.stack, id='S3FullAccess', managed_policy_arn='arn:aws:iam::aws:policy/AmazonS3FullAccess' + ), + ], + ) + + sagemaker_domain_key = kms.Key( + self.stack, + 'SagemakerDomainKmsKey', + alias='SagemakerStudioDomain', + enable_key_rotation=True, + policy=iam.PolicyDocument( + assign_sids=True, + statements=[ + iam.PolicyStatement( + resources=['*'], + effect=iam.Effect.ALLOW, + principals=[iam.AccountPrincipal(account_id=self.environment.AwsAccountId), sagemaker_domain_role] + sagemaker_principals, + actions=['kms:*'], + ) + ], + ), + ) + + sagemaker_domain = sagemaker.CfnDomain( + self.stack, + 'SagemakerStudioDomain', + domain_name=f'SagemakerStudioDomain-{self.environment.region}-{self.environment.AwsAccountId}', + auth_mode='IAM', + default_user_settings=sagemaker.CfnDomain.UserSettingsProperty( + execution_role=sagemaker_domain_role.role_arn, + security_groups=security_groups, + sharing_settings=sagemaker.CfnDomain.SharingSettingsProperty( + notebook_output_option='Allowed', + s3_kms_key_id=sagemaker_domain_key.key_id, + s3_output_path=f's3://sagemaker-{self.environment.region}-{self.environment.AwsAccountId}', + ), + ), + vpc_id=vpc_id, + subnet_ids=subnet_ids, + app_network_access_type='VpcOnly', + kms_key_id=sagemaker_domain_key.key_id, + ) + + ssm.StringParameter( + self.stack, + 'SagemakerStudioDomainId', + string_value=sagemaker_domain.attr_domain_id, + parameter_name=f'/dataall/{self.environment.environmentUri}/sagemaker/sagemakerstudio/domain_id', + ) + return sagemaker_domain + + @stack(stack='sagemakerstudiouserprofile') class SagemakerStudioUserProfile(Stack): module_name = __file__ @@ -70,6 +242,7 @@ def __init__(self, scope, id: str, target_uri: str = None, **kwargs) -> None: env_group = self.get_env_group(sm_user_profile) + # SageMaker Studio User Profile cfn_template_user_profile = os.path.join( os.path.dirname(__file__), '..', 'cfnstacks', 'sagemaker-user-template.yaml' ) diff --git a/backend/dataall/db/api/__init__.py b/backend/dataall/db/api/__init__.py index 765d1b68a..369e4faa9 100644 --- a/backend/dataall/db/api/__init__.py +++ b/backend/dataall/db/api/__init__.py @@ -10,7 +10,6 @@ from .environment import Environment from .glossary import Glossary from .vote import Vote -from .share_object import ShareObject, ShareObjectSM, ShareItemSM from .dataset import Dataset from .dataset_location import DatasetStorageLocation from .dataset_profiling_run import DatasetProfilingRun @@ -18,6 +17,7 @@ from .notification import Notification from .redshift_cluster import RedshiftCluster from .vpc import Vpc +from .share_object import ShareObject, ShareObjectSM, ShareItemSM from .notebook import Notebook from .sgm_studio_notebook import SgmStudioNotebook from .dashboard import Dashboard diff --git a/backend/dataall/db/api/dataset.py b/backend/dataall/db/api/dataset.py index 8fdbb72b7..a42b676ec 100644 --- a/backend/dataall/db/api/dataset.py +++ b/backend/dataall/db/api/dataset.py @@ -11,7 +11,7 @@ ResourcePolicy, KeyValueTag, Vote, - Stack, + Stack ) from . import Organization from .. import models, api, exceptions, permissions, paginate diff --git a/backend/dataall/db/api/redshift_cluster.py b/backend/dataall/db/api/redshift_cluster.py index 91e687d2b..8dc3c8d63 100644 --- a/backend/dataall/db/api/redshift_cluster.py +++ b/backend/dataall/db/api/redshift_cluster.py @@ -4,7 +4,6 @@ from .. import models, api, exceptions, paginate, permissions from . import has_resource_perm, ResourcePolicy, DatasetTable, Environment, Dataset -from ..models.Enums import ShareItemStatus from ...utils.naming_convention import ( NamingConventionService, NamingConventionPattern, diff --git a/backend/dataall/searchproxy/connect.py b/backend/dataall/searchproxy/connect.py index a758b95b3..3c952f5c9 100644 --- a/backend/dataall/searchproxy/connect.py +++ b/backend/dataall/searchproxy/connect.py @@ -55,15 +55,17 @@ def connect(envname='local'): secret = creds.secret_key token = creds.token + host = utils.Parameter.get_parameter(env=envname, path='elasticsearch/endpoint') + service = utils.Parameter.get_parameter(env=envname, path='elasticsearch/service') or 'es' + awsauth = AWS4Auth( access_key, secret, os.getenv('AWS_REGION', 'eu-west-1'), - 'es', + service, session_token=token, ) - host = utils.Parameter.get_parameter(env=envname, path='elasticsearch/endpoint') es = opensearchpy.OpenSearch( hosts=[{'host': host, 'port': 443}], http_auth=awsauth, @@ -71,7 +73,11 @@ def connect(envname='local'): verify_certs=True, connection_class=opensearchpy.RequestsHttpConnection, ) - print(es.info()) + + # Avoid calling GET /info endpoint because it is not available in OpenSearch Serverless + if service != "aoss": + print(es.info()) + if not es.indices.exists(index='dataall-index'): es.indices.create(index='dataall-index', body=CREATE_INDEX_REQUEST_BODY) print('Create "dataall-index" for dev env') diff --git a/backend/dataall/tasks/data_sharing/share_managers/lf_share_manager.py b/backend/dataall/tasks/data_sharing/share_managers/lf_share_manager.py index b74e34e93..22bba64ca 100644 --- a/backend/dataall/tasks/data_sharing/share_managers/lf_share_manager.py +++ b/backend/dataall/tasks/data_sharing/share_managers/lf_share_manager.py @@ -60,11 +60,12 @@ def get_share_principals(self) -> [str]: """ principals = [f"arn:aws:iam::{self.target_environment.AwsAccountId}:role/{self.share.principalIAMRoleName}"] if self.target_environment.dashboardsEnabled: - q_group = Quicksight.get_quicksight_group_arn( - self.target_environment.AwsAccountId - ) - if q_group: - principals.append(q_group) + group = Quicksight.create_quicksight_group(AwsAccountId=self.target_environment.AwsAccountId) + if group and group.get('Group'): + group_arn = group.get('Group').get('Arn') + if group_arn: + principals.append(group_arn) + return principals def build_shared_db_name(self) -> str: diff --git a/backend/dataall/tasks/stacks_updater.py b/backend/dataall/tasks/stacks_updater.py index 03bfd1005..d9f33332e 100644 --- a/backend/dataall/tasks/stacks_updater.py +++ b/backend/dataall/tasks/stacks_updater.py @@ -1,6 +1,7 @@ import logging import os import sys +import time from .. import db from ..db import models @@ -14,36 +15,46 @@ root.addHandler(logging.StreamHandler(sys.stdout)) log = logging.getLogger(__name__) +RETRIES = 30 +SLEEP_TIME = 30 + def update_stacks(engine, envname): with engine.scoped_session() as session: - all_datasets: [models.Dataset] = db.api.Dataset.list_all_active_datasets( - session - ) - all_environments: [ - models.Environment - ] = db.api.Environment.list_all_active_environments(session) - log.info(f'Found {len(all_environments)} environments') + all_datasets: [models.Dataset] = db.api.Dataset.list_all_active_datasets(session) + all_environments: [models.Environment] = db.api.Environment.list_all_active_environments(session) + + log.info(f'Found {len(all_environments)} environments, triggering update stack tasks...') environment: models.Environment for environment in all_environments: - update_stack(session, envname, environment.environmentUri) + update_stack(session=session, envname=envname, target_uri=environment.environmentUri, wait=True) log.info(f'Found {len(all_datasets)} datasets') dataset: models.Dataset for dataset in all_datasets: - update_stack(session, envname, dataset.datasetUri) + update_stack(session=session, envname=envname, target_uri=dataset.datasetUri, wait=False) return all_environments, all_datasets -def update_stack(session, envname, target_uri): +def update_stack(session, envname, target_uri, wait=False): stack: models.Stack = db.api.Stack.get_stack_by_target_uri( session, target_uri=target_uri ) cluster_name = Parameter().get_parameter(env=envname, path='ecs/cluster/name') - if not Ecs.is_task_running(cluster_name, f'awsworker-{stack.stackUri}'): - stack.EcsTaskArn = Ecs.run_cdkproxy_task(stack.stackUri) + if not Ecs.is_task_running(cluster_name=cluster_name, started_by=f'awsworker-{stack.stackUri}'): + stack.EcsTaskArn = Ecs.run_cdkproxy_task(stack_uri=stack.stackUri) + if wait: + retries = 1 + while Ecs.is_task_running(cluster_name=cluster_name, started_by=f'awsworker-{stack.stackUri}'): + log.info(f"Update for {stack.name}//{stack.stackUri} is not complete, waiting for {SLEEP_TIME} seconds...") + time.sleep(SLEEP_TIME) + retries = retries + 1 + if retries > RETRIES: + log.info(f"Maximum number of retries exceeded ({RETRIES} retries), continuing task...") + break + log.info(f"Update for {stack.name}//{stack.stackUri} COMPLETE or maximum number of retries exceeded ({RETRIES} retries)") else: log.info( f'Stack update is already running... Skipping stack {stack.name}//{stack.stackUri}' diff --git a/deploy/configs/frontend_config.py b/deploy/configs/frontend_config.py index 7b9c9c8d6..a32002099 100644 --- a/deploy/configs/frontend_config.py +++ b/deploy/configs/frontend_config.py @@ -10,7 +10,7 @@ def create_react_env_file( resource_prefix, internet_facing='True', custom_domain='False', - cw_rum_enabled='False', + cw_rum_enabled='False' ): ssm = boto3.client('ssm', region_name=region) user_pool_id = ssm.get_parameter(Name=f'/dataall/{envname}/cognito/userpool')[ @@ -33,6 +33,11 @@ def create_react_env_file( search_api_url = f'{api_url}search/api' print(f'Search API: {search_api_url}') + pivot_role_auto_create = ssm.get_parameter(Name=f"/dataall/{envname}/pivotRole/enablePivotRoleAutoCreate")['Parameter'][ + 'Value' + ] + print(f'PivotRole auto-create is enabled: {pivot_role_auto_create}') + if custom_domain == 'False' and internet_facing == 'True': print('Switching to us-east-1 region...') ssm = boto3.client('ssm', region_name='us-east-1') @@ -63,6 +68,7 @@ def create_react_env_file( REACT_APP_COGNITO_REDIRECT_SIGNIN=https://{signin_singout_link} REACT_APP_COGNITO_REDIRECT_SIGNOUT=https://{signin_singout_link} REACT_APP_USERGUIDE_LINK=https://{user_guide_link} +REACT_APP_ENABLE_PIVOT_ROLE_AUTO_CREATE={pivot_role_auto_create} """ print('.env content: \n', file_content) f.write(file_content) diff --git a/deploy/pivot_role/pivotRole.yaml b/deploy/pivot_role/pivotRole.yaml index 601d30f70..3721ea81f 100644 --- a/deploy/pivot_role/pivotRole.yaml +++ b/deploy/pivot_role/pivotRole.yaml @@ -318,10 +318,7 @@ Resources: Effect: Allow Action: - 'ec2:DeleteNetworkInterface' - Resource: '*' - Condition: - StringEquals: - 'aws:ResourceTag/dataall': 'True' + Resource: !Sub 'arn:aws:ec2:*:${AWS::AccountId}:network-interface/*' - Sid: SageMakerNotebookActions Effect: Allow Action: @@ -347,7 +344,8 @@ Resources: - 'sagemaker:ListApps' - 'sagemaker:DeleteApp' Resource: '*' - - Effect: Allow + - Sid: RamTag + Effect: Allow Action: - 'ram:TagResource' Resource: '*' @@ -355,7 +353,8 @@ Resources: 'ForAllValues:StringLike': 'ram:ResourceShareName': - LakeFormation* - - Effect: Allow + - Sid: RamCreateResource + Effect: Allow Action: - 'ram:CreateResourceShare' Resource: '*' @@ -365,7 +364,8 @@ Resources: - 'glue:Table' - 'glue:Database' - 'glue:Catalog' - - Effect: Allow + - Sid: RamUpdateResource + Effect: Allow Action: - 'ram:UpdateResourceShare' Resource: !Sub 'arn:aws:ram:*:${AWS::AccountId}:resource-share/*' @@ -375,7 +375,8 @@ Resources: 'ForAllValues:StringLike': 'ram:ResourceShareName': - LakeFormation* - - Effect: Allow + - Sid: RamAssociateResource + Effect: Allow Action: - 'ram:AssociateResourceShare' - 'ram:DisassociateResourceShare' @@ -384,7 +385,8 @@ Resources: 'ForAllValues:StringLike': 'ram:ResourceShareName': - LakeFormation* - - Effect: Allow + - Sid: RamDeleteResource + Effect: Allow Action: - 'ram:DeleteResourceShare' Resource: !Sub 'arn:aws:ram:*:${AWS::AccountId}:resource-share/*' @@ -399,7 +401,7 @@ Resources: - "ec2:DescribeAvailabilityZones" - "ram:EnableSharingWithAwsOrganization" Resource: '*' - - Sid: RamRead + - Sid: RamReadGlue Effect: Allow Action: - 'glue:PutResourcePolicy' @@ -503,6 +505,10 @@ Resources: - Sid: LakeFormation Effect: Allow Action: + - "lakeformation:RegisterResource" + - "lakeformation:DeregisterResource" + - "lakeformation:UpdateResource" + - "lakeformation:DescribeResource" - "lakeformation:AddLFTagsToResource" - "lakeformation:RemoveLFTagsFromResource" - "lakeformation:GetResourceLFTags" @@ -536,7 +542,6 @@ Resources: - 'lakeformation:GetTableObjects' - 'lakeformation:UpdateTableObjects' - 'lakeformation:DeleteObjectsOnCancel' - - 'lakeformation:DescribeResource' Resource: '*' - Sid: Compute Effect: Allow @@ -611,6 +616,7 @@ Resources: - !Sub "arn:aws:quicksight:*:${AWS::AccountId}:dashboard/*" - !Sub "arn:aws:quicksight:*:${AWS::AccountId}:namespace/default" - !Sub "arn:aws:quicksight:*:${AWS::AccountId}:account/*" + - !Sub "arn:aws:quicksight:*:${AWS::AccountId}:*" - Sid: QuickSightSession Effect: Allow Action: @@ -652,7 +658,6 @@ Resources: Action: - 'iam:Get*' - 'iam:ListRoles' - - 'iam:Get*' Effect: Allow Resource: '*' - Sid: IAMRolePolicy diff --git a/deploy/pivot_role/pivotRoleCDK/README.md b/deploy/pivot_role/pivotRoleCDK/README.md deleted file mode 100644 index 6778d1aae..000000000 --- a/deploy/pivot_role/pivotRoleCDK/README.md +++ /dev/null @@ -1,8 +0,0 @@ -# README - -This directory contains a CDK version of the pivotRole. -It is not used by data.all, but it might be useful if you are implementing -any automation around the creation of data.all pivotRole. - -This is just for reference, you still need to take the code and adapt it -to your specific CICD implementation. diff --git a/deploy/pivot_role/pivotRoleCDK/app.py b/deploy/pivot_role/pivotRoleCDK/app.py deleted file mode 100644 index 60e8d0bf6..000000000 --- a/deploy/pivot_role/pivotRoleCDK/app.py +++ /dev/null @@ -1,25 +0,0 @@ -#!/usr/bin/env python3 - -from aws_cdk import ( - App, -) - -from .dataall_base_infra import dataAllBaseInfra - -config = { - "NAME": "someenvironment", - "DATAALL_ACCOUNT": "AWSAccountId", - "EXTERNAL_ID": "externalId", - "RESOURCE_PREFIX": "resourcePrefix" -} - -app = App() - -# data.all base resources: pivot role and LakeFormation service role -dataall_infra = dataAllBaseInfra( - app, - f"BaseInfra-dataall-{config.NAME}", - config=config, -) - -app.synth() diff --git a/deploy/pivot_role/pivotRoleCDK/dataall_base_infra.py b/deploy/pivot_role/pivotRoleCDK/dataall_base_infra.py deleted file mode 100644 index 7690543cd..000000000 --- a/deploy/pivot_role/pivotRoleCDK/dataall_base_infra.py +++ /dev/null @@ -1,846 +0,0 @@ -from constructs import Construct -from aws_cdk import ( - Duration, - Stack, - aws_iam as iam -) - -class dataAllBaseInfra(Stack): - def __init__(self, scope: Construct, construct_id: str, config, **kwargs) -> None: - super().__init__(scope, construct_id, **kwargs) - # Data.All IAM PivotRole creation - self.create_dataall_role(name="dataallPivotRole", principal_id=config.DATAALL_ACCOUNT, external_id=config.EXTERNAL_ID, env_resource_prefix=config.RESOURCE_PREFIX) - - # Data.All IAM Lake Formation service role creation - self.lf_service_role = iam.CfnServiceLinkedRole(self, "LakeFormationSLR", - aws_service_name="lakeformation.amazonaws.com" - ) - - def create_dataall_role(self, name: str, principal_id: str, external_id: str, - env_resource_prefix: str) -> iam.Role: - """ - Creates an IAM Role that will enable data.all to interact with this Data Account - - :param str name: Role name - :param str principal_id: AWS Account ID of central data.all - :param str external_id: External ID provided by data.all - :param str env_resource_prefix: Environment Resource Prefix provided by data.all - :returns: Created IAM Role - :rtype: iam.Role - """ - - role = iam.Role(self, "DataAllPivotRole", - role_name=name, - assumed_by=iam.CompositePrincipal( - iam.ServicePrincipal("glue.amazonaws.com"), - iam.ServicePrincipal("lakeformation.amazonaws.com"), - iam.ServicePrincipal("lambda.amazonaws.com") - ), - path="/", - max_session_duration=Duration.hours(12), - managed_policies=[ - self._create_dataall_policy0(env_resource_prefix), - self._create_dataall_policy1(env_resource_prefix), - self._create_dataall_policy2(env_resource_prefix), - self._create_dataall_policy3(env_resource_prefix, name) - ] - ) - - role.assume_role_policy.add_statements(iam.PolicyStatement( - effect=iam.Effect.ALLOW, - principals=[ - iam.AccountPrincipal(account_id=principal_id) - ], - actions=["sts:AssumeRole"], - conditions={"StringEquals": {"sts:ExternalId": external_id}} - )) - - - return role - - def _create_dataall_policy0(self, env_resource_prefix: str) -> iam.ManagedPolicy: - """ - Creates the first managed IAM Policy required for the Pivot Role used by data.all - - :param str env_resource_prefix: Environment Resource Prefix provided by data.all - :returns: Created IAM Policy - :rtype: iam.ManagedPolicy - """ - return iam.ManagedPolicy(self, "PivotRolePolicy0", - managed_policy_name=f"{env_resource_prefix}-pivotrole-policy-0", - statements=[ - # Athena permissions - iam.PolicyStatement( - sid="Athena", effect=iam.Effect.ALLOW, resources=["*"], - actions=[ - "athena:GetQuery*", - "athena:StartQueryExecution", - "athena:ListWorkGroups" - ] - ), - # Athena Workgroups permissions - iam.PolicyStatement( - sid="AthenaWorkgroups", effect=iam.Effect.ALLOW, - actions=[ - "athena:GetWorkGroup", - "athena:CreateWorkGroup", - "athena:UpdateWorkGroup", - "athena:DeleteWorkGroup", - "athena:TagResource", - "athena:UntagResource", - "athena:ListTagsForResource" - ], - resources=[ - f"arn:aws:athena:*:{self.account}:workgroup/{env_resource_prefix}*"] - ), - # AWS Glue Crawler Bucket - iam.PolicyStatement( - sid="AwsGlueCrawlerBucket", effect=iam.Effect.ALLOW, - actions=["s3:GetObject"], - resources=["arn:aws:s3:::crawler-public*"] - ), - # S3 Access points - iam.PolicyStatement( - sid="ManagedAccessPoints", effect=iam.Effect.ALLOW, - actions=[ - "s3:GetAccessPoint", - "s3:GetAccessPointPolicy", - "s3:ListAccessPoints", - "s3:CreateAccessPoint", - "s3:DeleteAccessPoint", - "s3:GetAccessPointPolicyStatus", - "s3:DeleteAccessPointPolicy", - "s3:PutAccessPointPolicy" - ], - resources=[f"arn:aws:s3:*:{self.account}:accesspoint/*"] - ), - # S3 Managed Buckets - iam.PolicyStatement( - sid="ManagedBuckets", effect=iam.Effect.ALLOW, - actions=[ - "s3:List*", - "s3:Delete*", - "s3:Get*", - "s3:Put*" - ], - resources=[f"arn:aws:s3:::{env_resource_prefix}*"] - ), - # S3 Imported Buckets - iam.PolicyStatement( - sid="ImportedBuckets", effect=iam.Effect.ALLOW, - actions=[ - "s3:List*", - "s3:GetBucket*", - "s3:GetLifecycleConfiguration", - "s3:GetObject", - "s3:PutBucketPolicy", - "s3:PutBucketTagging", - "s3:PutObject", - "s3:PutObjectAcl", - "s3:PutBucketOwnershipControls" - ], - resources=["arn:aws:s3:::*"] - ), - # AWS Logging Buckets - iam.PolicyStatement( - sid="AWSLoggingBuckets", effect=iam.Effect.ALLOW, - actions=[ - "s3:PutBucketAcl", - "s3:PutBucketNotification" - ], - resources=[f"arn:aws:s3:::{env_resource_prefix}-logging-*"] - ), - # Read Buckets - iam.PolicyStatement( - sid="ReadBuckets", effect=iam.Effect.ALLOW, - actions=[ - "s3:ListAllMyBuckets", - "s3:GetBucketLocation", - "s3:PutBucketTagging" - ], - resources=["*"] - ), - # CloudWatch Metrics - iam.PolicyStatement( - sid="CWMetrics", effect=iam.Effect.ALLOW, - actions=[ - "cloudwatch:PutMetricData", - "cloudwatch:GetMetricData", - "cloudwatch:GetMetricStatistics" - ], - resources=["*"] - ), - # Logs - iam.PolicyStatement( - sid="Logs", effect=iam.Effect.ALLOW, - actions=[ - "logs:CreateLogGroup", - "logs:CreateLogStream", - "logs:PutLogEvents" - ], - resources=[ - f"arn:aws:logs:*:{self.account}:log-group:/aws-glue/*", - f"arn:aws:logs:*:{self.account}:log-group:/aws/lambda/*", - f"arn:aws:logs:*:{self.account}:log-group:/{env_resource_prefix}*", - ] - ), - # Logging - iam.PolicyStatement( - sid="Logging", effect=iam.Effect.ALLOW, - actions=["logs:PutLogEvents"], - resources=["*"] - ), - # EventBridge (CloudWatch Events) - iam.PolicyStatement( - sid="CWEvents", effect=iam.Effect.ALLOW, - actions=[ - "events:DeleteRule", - "events:List*", - "events:PutRule", - "events:PutTargets", - "events:RemoveTargets" - ], - resources=["*"] - ), - # Glue - iam.PolicyStatement( - sid="Glue", effect=iam.Effect.ALLOW, - actions=[ - "glue:BatchCreatePartition", - "glue:BatchDeletePartition", - "glue:BatchDeleteTable", - "glue:CreateCrawler", - "glue:CreateDatabase", - "glue:CreatePartition", - "glue:CreateTable", - "glue:DeleteCrawler", - "glue:DeleteDatabase", - "glue:DeleteJob", - "glue:DeletePartition", - "glue:DeleteTable", - "glue:DeleteTrigger", - "glue:BatchGet*", - "glue:Get*", - "glue:List*", - "glue:StartCrawler", - "glue:StartJobRun", - "glue:StartTrigger", - "glue:SearchTables", - "glue:UpdateDatabase", - "glue:UpdatePartition", - "glue:UpdateTable", - "glue:UpdateTrigger", - "glue:UpdateJob", - "glue:TagResource", - "glue:UpdateCrawler" - ], - resources=["*"] - ), - # KMS - iam.PolicyStatement( - sid="KMS", effect=iam.Effect.ALLOW, - actions=[ - "kms:Decrypt", - "kms:Encrypt", - "kms:GenerateDataKey*", - "kms:PutKeyPolicy", - "kms:ReEncrypt*", - "kms:TagResource", - "kms:UntagResource" - ], - resources=["*"] - ), - iam.PolicyStatement( - sid="KMSAlias", effect=iam.Effect.ALLOW, - actions=["kms:DeleteAlias"], - resources=[f"arn:aws:kms:*:{self.account}:alias/{env_resource_prefix}*"] - ), - iam.PolicyStatement( - sid="KMSCreate", effect=iam.Effect.ALLOW, - actions=[ - "kms:List*", - "kms:DescribeKey", - "kms:CreateAlias", - "kms:CreateKey" - ], - resources=["*"] - ), - # AWS Organizations - iam.PolicyStatement( - sid="Organizations", effect=iam.Effect.ALLOW, - actions=["organizations:DescribeOrganization"], - resources=["*"] - ), - # Resource Tags - iam.PolicyStatement( - sid="ResourceGroupTags", effect=iam.Effect.ALLOW, - actions=[ - "tag:*", - "resource-groups:*" - ], - resources=["*"] - ), - # SNS - iam.PolicyStatement( - sid="SNSPublish", effect=iam.Effect.ALLOW, - actions=[ - "sns:Publish", - "sns:SetTopicAttributes", - "sns:GetTopicAttributes", - "sns:DeleteTopic", - "sns:Subscribe", - "sns:TagResource", - "sns:UntagResource", - "sns:CreateTopic" - ], - resources=[f"arn:aws:sns:*:{self.account}:{env_resource_prefix}*"] - ), - iam.PolicyStatement( - sid="SNSList", effect=iam.Effect.ALLOW, - actions=["sns:ListTopics"], - resources=["*"] - ), - # SQS - iam.PolicyStatement( - sid="SQSList", effect=iam.Effect.ALLOW, - actions=["sqs:ListQueues"], - resources=["*"] - ), - iam.PolicyStatement( - sid="SQS", effect=iam.Effect.ALLOW, - actions=[ - "sqs:ReceiveMessage", - "sqs:SendMessage" - ], - resources=[f"arn:aws:sqs:*:{self.account}:{env_resource_prefix}*"] - ) - ] - ) - - def _create_dataall_policy1(self, env_resource_prefix: str) -> iam.ManagedPolicy: - """ - Creates the second managed IAM Policy required for the Pivot Role used by data.all - - :param str env_resource_prefix: Environment Resource Prefix provided by data.all - :returns: Created IAM Policy - :rtype: iam.ManagedPolicy - """ - return iam.ManagedPolicy(self, "PivotRolePolicy1", - managed_policy_name=f"{env_resource_prefix}-pivotrole-policy-1", - statements=[ - # Redshift - iam.PolicyStatement( - sid="Redshift", effect=iam.Effect.ALLOW, - actions=[ - "redshift:DeleteTags", - "redshift:ModifyClusterIamRoles", - "redshift:DescribeClusterSecurityGroups", - "redshift:DescribeClusterSubnetGroups", - "redshift:pauseCluster", - "redshift:resumeCluster" - ], - resources=["*"], - conditions={"StringEquals": {"aws:ResourceTag/dataall": "true"}} - ), - iam.PolicyStatement( - sid="RedshiftRead", effect=iam.Effect.ALLOW, - actions=[ - "redshift:DescribeClusters", - "redshift:CreateTags", - "redshift:DescribeClusterSubnetGroups" - ], - resources=["*"] - ), - iam.PolicyStatement( - sid="RedshiftCreds", effect=iam.Effect.ALLOW, - actions=["redshift:GetClusterCredentials"], - resources=[ - f"arn:aws:redshift:*:{self.account}:dbgroup:*/*", - f"arn:aws:redshift:*:{self.account}:dbname:*/*", - f"arn:aws:redshift:*:{self.account}:dbuser:*/*" - ] - ), - iam.PolicyStatement( - sid="AllowRedshiftSubnet", effect=iam.Effect.ALLOW, - actions=["redshift:CreateClusterSubnetGroup"], - resources=["*"] - ), - iam.PolicyStatement( - sid="AllowRedshiftDataApi", effect=iam.Effect.ALLOW, - actions=[ - "redshift-data:ListTables", - "redshift-data:GetStatementResult", - "redshift-data:CancelStatement", - "redshift-data:ListSchemas", - "redshift-data:ExecuteStatement", - "redshift-data:ListStatements", - "redshift-data:ListDatabases", - "redshift-data:DescribeStatement" - ], - resources=["*"] - ), - # EC2 - iam.PolicyStatement( - sid="EC2SG", effect=iam.Effect.ALLOW, - actions=[ - "ec2:CreateSecurityGroup", - "ec2:CreateNetworkInterface", - "ec2:Describe*" - ], - resources=["*"] - ), - iam.PolicyStatement( - sid="TagsforENI", effect=iam.Effect.ALLOW, - actions=[ - "ec2:CreateTags", - "ec2:DeleteTags" - ], - resources=[f"arn:aws:ec2:*:{self.account}:network-interface/*"] - ), - iam.PolicyStatement( - sid="DeleteENI", effect=iam.Effect.ALLOW, - actions=["ec2:DeleteNetworkInterface"], - resources=["*"], - conditions={"StringEquals": {"aws:ResourceTag/dataall": "True"}} - ), - # SageMaker - iam.PolicyStatement( - sid="SageMakerNotebookActions", effect=iam.Effect.ALLOW, - actions=[ - "sagemaker:ListTags", - "sagemaker:DescribeUserProfile", - "sagemaker:DeleteNotebookInstance", - "sagemaker:StopNotebookInstance", - "sagemaker:CreatePresignedNotebookInstanceUrl", - "sagemaker:DescribeNotebookInstance", - "sagemaker:StartNotebookInstance", - "sagemaker:AddTags", - "sagemaker:DescribeDomain", - "sagemaker:CreatePresignedDomainUrl" - ], - resources=[ - f"arn:aws:sagemaker:*:{self.account}:notebook-instance/{env_resource_prefix}*", - f"arn:aws:sagemaker:*:{self.account}:domain/*", - f"arn:aws:sagemaker:*:{self.account}:user-profile/*/*" - ] - ), - iam.PolicyStatement( - sid="SageMakerNotebookInstances", effect=iam.Effect.ALLOW, - actions=[ - "sagemaker:ListNotebookInstances", - "sagemaker:ListDomains", - "sagemaker:ListApps", - "sagemaker:DeleteApp" - ], - resources=["*"] - ), - # RAM - iam.PolicyStatement( - sid="RamTag", effect=iam.Effect.ALLOW, - actions=["ram:TagResource"], - resources=["*"], - conditions={"ForAllValues:StringLike": { - "ram:ResourceShareName": ["LakeFormation*"]}} - ), - iam.PolicyStatement( - sid="RamCreateResource", effect=iam.Effect.ALLOW, - actions=["ram:CreateResourceShare"], - resources=["*"], - conditions={"ForAllValues:StringEquals": {"ram:RequestedResourceType": [ - "glue:Table", - "glue:Database", - "glue:Catalog" - ]}} - ), - iam.PolicyStatement( - sid="RamUpdateResource", effect=iam.Effect.ALLOW, - actions=["ram:UpdateResourceShare"], - resources=[f"arn:aws:ram:*:{self.account}:resource-share/*"], - conditions={ - "StringEquals": {"aws:ResourceTag/dataall": "true"}, - "ForAllValues:StringLike": { - "ram:ResourceShareName": ["LakeFormation*"]} - } - ), - iam.PolicyStatement( - sid="RamAssociateResource", effect=iam.Effect.ALLOW, - actions=[ - "ram:AssociateResourceShare", - "ram:DisassociateResourceShare" - ], - resources=[f"arn:aws:ram:*:{self.account}:resource-share/*"], - conditions={"ForAllValues:StringLike": { - "ram:ResourceShareName": ["LakeFormation*"]} - } - ), - iam.PolicyStatement( - sid="RamDeleteResource", effect=iam.Effect.ALLOW, - actions=["ram:DeleteResourceShare"], - resources=[f"arn:aws:ram:*:{self.account}:resource-share/*"] - ), - iam.PolicyStatement( - sid="RamInvitations", effect=iam.Effect.ALLOW, - actions=[ - "ram:AcceptResourceShareInvitation", - "ram:RejectResourceShareInvitation", - "ec2:DescribeAvailabilityZones", - "ram:EnableSharingWithAwsOrganization" - ], - resources=["*"] - ), - iam.PolicyStatement( - sid="RamRead", effect=iam.Effect.ALLOW, - actions=[ - "glue:PutResourcePolicy", - "glue:DeleteResourcePolicy", - "ram:Get*", - "ram:List*" - ], - resources=["*"] - ), - # Security Groups - iam.PolicyStatement( - sid="SGCreateTag", effect=iam.Effect.ALLOW, - actions=["ec2:CreateTags"], - resources=[f"arn:aws:ec2:*:{self.account}:security-group/*"], - conditions={"StringEquals": {"aws:RequestTag/dataall": "true"}} - ), - iam.PolicyStatement( - sid="SGandRedshift", effect=iam.Effect.ALLOW, - actions=[ - "ec2:DeleteTags", - "ec2:DeleteSecurityGroup", - "redshift:DeleteClusterSubnetGroup" - ], - resources=["*"], - conditions={"ForAnyValue:StringEqualsIfExists": { - "aws:ResourceTag/dataall": "true"}} - ), - # Redshift - iam.PolicyStatement( - sid="RedshiftDataApi", effect=iam.Effect.ALLOW, - actions=[ - "redshift-data:ListTables", - "redshift-data:GetStatementResult", - "redshift-data:CancelStatement", - "redshift-data:ListSchemas", - "redshift-data:ExecuteStatement", - "redshift-data:ListStatements", - "redshift-data:ListDatabases", - "redshift-data:DescribeStatement" - ], - resources=["*"], - conditions={"StringEqualsIfExists": {"aws:ResourceTag/dataall": "true"}} - ), - # Dev Tools - iam.PolicyStatement( - sid="DevTools0", effect=iam.Effect.ALLOW, - actions=["cloudformation:ValidateTemplate"], - resources=["*"] - ), - iam.PolicyStatement( - sid="DevTools1", effect=iam.Effect.ALLOW, - actions=[ - "secretsmanager:CreateSecret", - "secretsmanager:DeleteSecret", - "secretsmanager:TagResource", - "codebuild:DeleteProject" - ], - resources=["*"], - conditions={"StringEquals": {"aws:ResourceTag/dataall": "true"}} - ), - iam.PolicyStatement( - sid="DevTools2", effect=iam.Effect.ALLOW, - actions=[ - "codebuild:CreateProject", - "ecr:CreateRepository", - "ssm:PutParameter", - "ssm:AddTagsToResource" - ], - resources=["*"], - conditions={"StringEquals": {"aws:RequestTag/dataall": "true"}} - ), - iam.PolicyStatement( - sid="CloudFormation", effect=iam.Effect.ALLOW, - actions=[ - "cloudformation:DescribeStacks", - "cloudformation:DescribeStackResources", - "cloudformation:DescribeStackEvents", - "cloudformation:DeleteStack", - "cloudformation:CreateStack", - "cloudformation:GetTemplate", - "cloudformation:ListStackResources", - "cloudformation:DescribeStackResource" - ], - resources=[ - f"arn:aws:cloudformation:*:{self.account}:stack/{env_resource_prefix}*/*", - f"arn:aws:cloudformation:*:{self.account}:stack/CDKToolkit/*", - f"arn:aws:cloudformation:*:{self.account}:stack/*/*" - ] - ) - ] - ) - - def _create_dataall_policy2(self, env_resource_prefix: str) -> iam.ManagedPolicy: - """ - Creates the third managed IAM Policy required for the Pivot Role used by data.all - - :param str env_resource_prefix: Environment Resource Prefix provided by data.all - :returns: Created IAM Policy - :rtype: iam.ManagedPolicy - """ - return iam.ManagedPolicy(self, "PivotRolePolicy2", - managed_policy_name=f"{env_resource_prefix}-pivotrole-policy-2", - statements=[ - # LakeFormation - iam.PolicyStatement( - sid="LakeFormation", effect=iam.Effect.ALLOW, - actions=[ - "lakeformation:AddLFTagsToResource", - "lakeformation:RemoveLFTagsFromResource", - "lakeformation:GetResourceLFTags", - "lakeformation:ListLFTags", - "lakeformation:CreateLFTag", - "lakeformation:GetLFTag", - "lakeformation:UpdateLFTag", - "lakeformation:DeleteLFTag", - "lakeformation:SearchTablesByLFTags", - "lakeformation:SearchDatabasesByLFTags", - "lakeformation:ListResources", - "lakeformation:ListPermissions", - "lakeformation:GrantPermissions", - "lakeformation:BatchGrantPermissions", - "lakeformation:RevokePermissions", - "lakeformation:BatchRevokePermissions", - "lakeformation:PutDataLakeSettings", - "lakeformation:GetDataLakeSettings", - "lakeformation:GetDataAccess", - "lakeformation:GetWorkUnits", - "lakeformation:StartQueryPlanning", - "lakeformation:GetWorkUnitResults", - "lakeformation:GetQueryState", - "lakeformation:GetQueryStatistics", - "lakeformation:StartTransaction", - "lakeformation:CommitTransaction", - "lakeformation:CancelTransaction", - "lakeformation:ExtendTransaction", - "lakeformation:DescribeTransaction", - "lakeformation:ListTransactions", - "lakeformation:GetTableObjects", - "lakeformation:UpdateTableObjects", - "lakeformation:DeleteObjectsOnCancel", - "lakeformation:DescribeResource" - ], - resources=["*"] - ), - # Compute - iam.PolicyStatement( - sid="Compute", effect=iam.Effect.ALLOW, - actions=[ - "lambda:CreateFunction", - "lambda:AddPermission", - "lambda:InvokeFunction", - "lambda:RemovePermission", - "lambda:GetFunction", - "lambda:GetFunctionConfiguration", - "lambda:DeleteFunction", - "ecr:CreateRepository", - "ecr:SetRepositoryPolicy", - "ecr:DeleteRepository", - "ecr:DescribeImages", - "ecr:BatchDeleteImage", - "codepipeline:GetPipelineState", - "codepipeline:DeletePipeline", - "codepipeline:GetPipeline", - "codepipeline:CreatePipeline", - "codepipeline:TagResource", - "codepipeline:UntagResource" - ], - resources=[ - f"arn:aws:lambda:*:{self.account}:function:{env_resource_prefix}*", - f"arn:aws:s3:::{env_resource_prefix}*", - f"arn:aws:codepipeline:*:{self.account}:{env_resource_prefix}*", - f"arn:aws:ecr:*:{self.account}:repository/{env_resource_prefix}*" - ] - ), - # Databrew - iam.PolicyStatement( - sid="DatabrewList", effect=iam.Effect.ALLOW, - actions=["databrew:List*"], - resources=["*"] - ), - iam.PolicyStatement( - sid="DatabrewPermissions", effect=iam.Effect.ALLOW, - actions=[ - "databrew:BatchDeleteRecipeVersion", - "databrew:Delete*", - "databrew:Describe*", - "databrew:PublishRecipe", - "databrew:SendProjectSessionAction", - "databrew:Start*", - "databrew:Stop*", - "databrew:TagResource", - "databrew:UntagResource", - "databrew:Update*" - ], - resources=[f"arn:aws:databrew:*:{self.account}:*/{env_resource_prefix}*"] - ), - # QuickSight - iam.PolicyStatement( - sid="QuickSight", effect=iam.Effect.ALLOW, - actions=[ - "quicksight:CreateGroup", - "quicksight:DescribeGroup", - "quicksight:ListDashboards", - "quicksight:DescribeDataSource", - "quicksight:DescribeDashboard", - "quicksight:DescribeUser", - "quicksight:SearchDashboards", - "quicksight:GetDashboardEmbedUrl", - "quicksight:GenerateEmbedUrlForAnonymousUser", - "quicksight:UpdateUser", - "quicksight:ListUserGroups", - "quicksight:RegisterUser", - "quicksight:DescribeDashboardPermissions", - "quicksight:UpdateDashboardPermissions", - "quicksight:GetAuthCode", - "quicksight:CreateGroupMembership", - "quicksight:DescribeAccountSubscription" - ], - resources=[ - f"arn:aws:quicksight:*:{self.account}:group/default/*", - f"arn:aws:quicksight:*:{self.account}:user/default/*", - f"arn:aws:quicksight:*:{self.account}:datasource/*", - f"arn:aws:quicksight:*:{self.account}:user/*", - f"arn:aws:quicksight:*:{self.account}:dashboard/*", - f"arn:aws:quicksight:*:{self.account}:namespace/default", - f"arn:aws:quicksight:*:{self.account}:account/*" - ] - ), - iam.PolicyStatement( - sid="QuickSightSession", effect=iam.Effect.ALLOW, - actions=["quicksight:GetSessionEmbedUrl"], - resources=["*"] - ) - ] - ) - - def _create_dataall_policy3(self, env_resource_prefix: str, role_name: str) -> iam.ManagedPolicy: - """ - Creates the fourth managed IAM Policy required for the Pivot Role used by data.all - - :param str env_resource_prefix: Environment Resource Prefix provided by data.all - :param str role_name: IAM Role name - :returns: Created IAM Policy - :rtype: iam.ManagedPolicy - """ - return iam.ManagedPolicy(self, "PivotRolePolicy3", - managed_policy_name=f"{env_resource_prefix}-pivotrole-policy-3", - statements=[ - # SSM Parameter Store - iam.PolicyStatement( - sid="ParameterStore", effect=iam.Effect.ALLOW, - actions=[ - "ssm:GetParameter" - ], - resources=[ - f"arn:aws:ssm:*:{self.account}:parameter/{env_resource_prefix}/*", - f"arn:aws:ssm:*:{self.account}:parameter/dataall/*", - f"arn:aws:ssm:*:{self.account}:parameter/ddk/*" - ] - ), - # Secrets Manager - iam.PolicyStatement( - sid="SecretsManager", effect=iam.Effect.ALLOW, - actions=[ - "secretsmanager:DescribeSecret", - "secretsmanager:GetSecretValue" - ], - resources=[ - f"arn:aws:secretsmanager:*:{self.account}:secret:{env_resource_prefix}*", - f"arn:aws:secretsmanager:*:{self.account}:secret:dataall*" - ] - ), - iam.PolicyStatement( - sid="SecretsManagerList", effect=iam.Effect.ALLOW, - actions=["secretsmanager:ListSecrets"], - resources=["*"] - ), - # IAM - iam.PolicyStatement( - sid="IAMListGet", effect=iam.Effect.ALLOW, - actions=[ - "iam:ListRoles", - "iam:Get*" - ], - resources=["*"] - ), - iam.PolicyStatement( - sid="IAMRolePolicy", effect=iam.Effect.ALLOW, - actions=[ - "iam:PutRolePolicy", - "iam:DeleteRolePolicy" - ], - resources=["*"] - ), - iam.PolicyStatement( - sid="IAMPassRole", effect=iam.Effect.ALLOW, - actions=[ - "iam:PassRole" - ], - resources=[ - f"arn:aws:iam::{self.account}:role/{env_resource_prefix}*", - f"arn:aws:iam::{self.account}:role/{role_name}", - f"arn:aws:iam::{self.account}:role/cdk-*" - ], - ), - # STS - iam.PolicyStatement( - sid="STS", effect=iam.Effect.ALLOW, - actions=["sts:AssumeRole"], - resources=[ - f"arn:aws:iam::{self.account}:role/{env_resource_prefix}*", - f"arn:aws:iam::{self.account}:role/ddk-*" - ] - ), - # Step Functions - iam.PolicyStatement( - sid="StepFunctions", effect=iam.Effect.ALLOW, - actions=[ - "states:DescribeStateMachine", - "states:ListExecutions", - "states:StartExecution" - ], - resources=[ - f"arn:aws:states:*:{self.account}:stateMachine:{env_resource_prefix}*"] - ), - # CodeCommit - iam.PolicyStatement( - sid="CodeCommit", effect=iam.Effect.ALLOW, - actions=[ - "codecommit:GetFile", - "codecommit:ListBranches", - "codecommit:GetFolder", - "codecommit:GetCommit", - "codecommit:GitPull", - "codecommit:GetRepository", - 'codecommit:TagResource', - "codecommit:UntagResource", - "codecommit:CreateBranch", - "codecommit:CreateCommit", - "codecommit:CreateRepository", - "codecommit:DeleteRepository", - "codecommit:GitPush", - "codecommit:PutFile", - "codecommit:GetBranch", - ], - resources=[f"arn:aws:codecommit:*:{self.account}:{env_resource_prefix}*"] - ) - ] - ) - - - - - - diff --git a/deploy/requirements.txt b/deploy/requirements.txt index 7cd815a9d..5ea72abd5 100644 --- a/deploy/requirements.txt +++ b/deploy/requirements.txt @@ -1,6 +1,6 @@ -aws-cdk-lib==2.14.0 +aws-cdk-lib==2.61.1 boto3-stubs==1.20.20 boto3==1.24.85 botocore==1.27.85 cdk-nag==2.7.2 -constructs==10.0.73 +constructs>=10.0.0,<11.0.0 diff --git a/deploy/stacks/backend_stack.py b/deploy/stacks/backend_stack.py index 6e49f0e27..78b920482 100644 --- a/deploy/stacks/backend_stack.py +++ b/deploy/stacks/backend_stack.py @@ -14,6 +14,7 @@ from .lambda_api import LambdaApiStack from .monitoring import MonitoringStack from .opensearch import OpenSearchStack +from .opensearch_serverless import OpenSearchServerlessStack from .param_store_stack import ParamStoreStack from .s3_resources import S3ResourcesStack from .secrets_stack import SecretsManagerStack @@ -43,11 +44,17 @@ def __init__( enable_cw_canaries=False, enable_cw_rum=False, shared_dashboard_sessions='anonymous', + enable_pivot_role_auto_create=False, + enable_opensearch_serverless=False, **kwargs, ): super().__init__(scope, id, **kwargs) - vpc_stack = VpcStack( + self.resource_prefix = resource_prefix + self.envname = envname + self.prod_sizing = prod_sizing + + self.vpc_stack = VpcStack( self, id='Vpc', envname=envname, @@ -56,8 +63,10 @@ def __init__( vpc_id=vpc_id, **kwargs, ) - vpc = vpc_stack.vpc - vpc_endpoints_sg = vpc_stack.vpce_security_group + vpc = self.vpc_stack.vpc + vpc_endpoints_sg = self.vpc_stack.vpce_security_group + + self.pivot_role_name = f"dataallPivotRole{'-cdk' if enable_pivot_role_auto_create else ''}" ParamStoreStack( self, @@ -68,6 +77,7 @@ def __init__( enable_cw_canaries=enable_cw_canaries, quicksight_enabled=quicksight_enabled, shared_dashboard_sessions=shared_dashboard_sessions, + enable_pivot_role_auto_create=enable_pivot_role_auto_create, **kwargs, ) @@ -77,6 +87,7 @@ def __init__( envname=envname, resource_prefix=resource_prefix, enable_cw_canaries=enable_cw_canaries, + pivot_role_name=self.pivot_role_name, **kwargs, ) @@ -112,7 +123,7 @@ def __init__( self, 'ECRREPO', repository_arn=ecr_repository ) - lambda_api_stack = LambdaApiStack( + self.lambda_api_stack = LambdaApiStack( self, f'Lambdas', envname=envname, @@ -126,10 +137,11 @@ def __init__( apig_vpce=apig_vpce, prod_sizing=prod_sizing, user_pool=cognito_stack.user_pool, + pivot_role_name=self.pivot_role_name, **kwargs, ) - ecs_stack = ContainerStack( + self.ecs_stack = ContainerStack( self, f'ECS', envname=envname, @@ -139,6 +151,8 @@ def __init__( ecr_repository=repo, image_tag=image_tag, prod_sizing=prod_sizing, + pivot_role_name=self.pivot_role_name, + tooling_account_id=tooling_account_id, **kwargs, ) @@ -241,51 +255,54 @@ def __init__( resource_prefix=resource_prefix, vpc=vpc, lambdas=[ - lambda_api_stack.aws_handler, - lambda_api_stack.api_handler, + self.lambda_api_stack.aws_handler, + self.lambda_api_stack.api_handler, ], - ecs_security_groups=ecs_stack.ecs_security_groups, + ecs_security_groups=self.ecs_stack.ecs_security_groups, codebuild_dbmigration_sg=dbmigration_stack.codebuild_sg, prod_sizing=prod_sizing, quicksight_monitoring_sg=quicksight_monitoring_sg, **kwargs, ) - opensearch_stack = OpenSearchStack( - self, - f'OpenSearch', - envname=envname, - resource_prefix=resource_prefix, - vpc=vpc, - lambdas=[ - lambda_api_stack.aws_handler, - lambda_api_stack.api_handler, - lambda_api_stack.elasticsearch_proxy_handler, - ], - ecs_security_groups=ecs_stack.ecs_security_groups, - prod_sizing=prod_sizing, - **kwargs, - ) - - monitoring_stack = MonitoringStack( + self.monitoring_stack = MonitoringStack( self, f'CWDashboards', envname=envname, resource_prefix=resource_prefix, lambdas=[ - lambda_api_stack.aws_handler, - lambda_api_stack.api_handler, - lambda_api_stack.elasticsearch_proxy_handler, + self.lambda_api_stack.aws_handler, + self.lambda_api_stack.api_handler, + self.lambda_api_stack.elasticsearch_proxy_handler, ], database=aurora_stack.cluster.cluster_identifier, - ecs_cluster=ecs_stack.ecs_cluster, - ecs_task_definitions=ecs_stack.ecs_task_definitions, - backend_api=lambda_api_stack.backend_api_name, - opensearch_domain=opensearch_stack.domain.domain_name, + ecs_cluster=self.ecs_stack.ecs_cluster, + ecs_task_definitions=self.ecs_stack.ecs_task_definitions, + backend_api=self.lambda_api_stack.backend_api_name, queue_name=sqs_stack.queue.queue_name, **kwargs, ) + self.opensearch_args = { + "envname": envname, + "resource_prefix": resource_prefix, + "vpc": vpc, + "vpc_endpoints_sg": vpc_endpoints_sg, + "lambdas": [ + self.lambda_api_stack.aws_handler, + self.lambda_api_stack.api_handler, + self.lambda_api_stack.elasticsearch_proxy_handler, + ], + "ecs_security_groups": self.ecs_stack.ecs_security_groups, + "ecs_task_role": self.ecs_stack.ecs_task_role, + "prod_sizing": prod_sizing, + **kwargs, + } + if enable_opensearch_serverless: + self.create_opensearch_serverless_stack() + else: + self.create_opensearch_stack() + if enable_cw_rum: CloudWatchRumStack( self, @@ -293,12 +310,10 @@ def __init__( envname=envname, resource_prefix=resource_prefix, tooling_account_id=tooling_account_id, - cw_alarm_action=monitoring_stack.cw_alarm_action, + cw_alarm_action=self.monitoring_stack.cw_alarm_action, cognito_identity_pool_id=cognito_stack.identity_pool.ref, cognito_identity_pool_role_arn=cognito_stack.identity_pool_role.role_arn, - custom_domain_name=custom_domain.get('hosted_zone_name') - if custom_domain - else None, + custom_domain_name=custom_domain.get('hosted_zone_name') if custom_domain else None, ) if enable_cw_canaries: @@ -309,6 +324,21 @@ def __init__( resource_prefix=resource_prefix, vpc=vpc, logs_bucket=s3_resources_stack.logs_bucket, - cw_alarm_action=monitoring_stack.cw_alarm_action, + cw_alarm_action=self.monitoring_stack.cw_alarm_action, internet_facing=internet_facing, ) + + def create_opensearch_stack(self): + os_stack = OpenSearchStack(self, 'OpenSearch', **self.opensearch_args) + self.monitoring_stack.set_es_alarms( + alarm_name=f'{self.resource_prefix}-{self.envname}-opensearch-alarm', + domain_name=os_stack.domain_name, + ) + + def create_opensearch_serverless_stack(self): + aoss_stack = OpenSearchServerlessStack(self, 'OpenSearchServerless', **self.opensearch_args) + self.monitoring_stack.set_aoss_alarms( + alarm_name=f'{self.resource_prefix}-{self.envname}-opensearch-serverless-alarm', + collection_id=aoss_stack.collection_id, + collection_name=aoss_stack.collection_name, + ) diff --git a/deploy/stacks/backend_stage.py b/deploy/stacks/backend_stage.py index 72ef3da37..86a21ba86 100644 --- a/deploy/stacks/backend_stage.py +++ b/deploy/stacks/backend_stage.py @@ -27,6 +27,8 @@ def __init__( enable_cw_canaries=False, enable_cw_rum=False, shared_dashboard_sessions='anonymous', + enable_opensearch_serverless=False, + enable_pivot_role_auto_create=False, **kwargs, ): super().__init__(scope, id, **kwargs) @@ -51,6 +53,8 @@ def __init__( enable_cw_canaries=enable_cw_canaries, enable_cw_rum=enable_cw_rum, shared_dashboard_sessions=shared_dashboard_sessions, + enable_opensearch_serverless=enable_opensearch_serverless, + enable_pivot_role_auto_create=enable_pivot_role_auto_create, **kwargs, ) @@ -61,9 +65,7 @@ def __init__( NagSuppressions.add_stack_suppressions( backend_stack, suppressions=[ - NagPackSuppression( - id=rule_suppressed['id'], reason=rule_suppressed['reason'] - ) + NagPackSuppression(id=rule_suppressed['id'], reason=rule_suppressed['reason']) for rule_suppressed in BACKEND_STACK_CDK_NAG_EXCLUSIONS ], apply_to_nested_stacks=True, diff --git a/deploy/stacks/container.py b/deploy/stacks/container.py index dfc667c25..a667c6898 100644 --- a/deploy/stacks/container.py +++ b/deploy/stacks/container.py @@ -25,6 +25,8 @@ def __init__( ecr_repository=None, image_tag=None, prod_sizing=False, + pivot_role_name=None, + tooling_account_id=None, **kwargs, ): super().__init__(scope, id, **kwargs) @@ -44,15 +46,16 @@ def __init__( container_insights=True, ) - task_role = self.create_task_role(envname, resource_prefix) + self.task_role = self.create_task_role(envname, resource_prefix, pivot_role_name) + self.cicd_stacks_updater_role = self.create_cicd_stacks_updater_role(envname, resource_prefix, tooling_account_id) cdkproxy_task_definition = ecs.FargateTaskDefinition( self, f'{resource_prefix}-{envname}-cdkproxy', cpu=1024, memory_limit_mib=2048, - task_role=task_role, - execution_role=task_role, + task_role=self.task_role, + execution_role=self.task_role, family=f'{resource_prefix}-{envname}-cdkproxy', ) @@ -94,7 +97,7 @@ def __init__( envname, resource_prefix, vpc, vpc_endpoints_sg ) - sync_tables_task = self.set_scheduled_task( + sync_tables_task, sync_tables_task_def = self.set_scheduled_task( cluster=cluster, command=['python3.8', '-m', 'dataall.tasks.tables_syncer'], container_id=f'container', @@ -111,14 +114,14 @@ def __init__( schedule_expression=Schedule.expression('rate(15 minutes)'), scheduled_task_id=f'{resource_prefix}-{envname}-tables-syncer-schedule', task_id=f'{resource_prefix}-{envname}-tables-syncer', - task_role=task_role, + task_role=self.task_role, vpc=vpc, security_group=scheduled_tasks_sg, prod_sizing=prod_sizing, ) self.ecs_security_groups.extend(sync_tables_task.task.security_groups) - catalog_indexer_task = self.set_scheduled_task( + catalog_indexer_task, catalog_indexer_task_def = self.set_scheduled_task( cluster=cluster, command=['python3.8', '-m', 'dataall.tasks.catalog_indexer'], container_id=f'container', @@ -135,14 +138,14 @@ def __init__( schedule_expression=Schedule.expression('rate(6 hours)'), scheduled_task_id=f'{resource_prefix}-{envname}-catalog-indexer-schedule', task_id=f'{resource_prefix}-{envname}-catalog-indexer', - task_role=task_role, + task_role=self.task_role, vpc=vpc, security_group=scheduled_tasks_sg, prod_sizing=prod_sizing, ) self.ecs_security_groups.extend(catalog_indexer_task.task.security_groups) - stacks_updater = self.set_scheduled_task( + stacks_updater, stacks_updater_task_def = self.set_scheduled_task( cluster=cluster, command=['python3.8', '-m', 'dataall.tasks.stacks_updater'], container_id=f'container', @@ -159,14 +162,21 @@ def __init__( schedule_expression=Schedule.expression('cron(0 1 * * ? *)'), scheduled_task_id=f'{resource_prefix}-{envname}-stacks-updater-schedule', task_id=f'{resource_prefix}-{envname}-stacks-updater', - task_role=task_role, + task_role=self.task_role, vpc=vpc, security_group=scheduled_tasks_sg, prod_sizing=prod_sizing, ) self.ecs_security_groups.extend(stacks_updater.task.security_groups) - update_bucket_policies_task = self.set_scheduled_task( + ssm.StringParameter( + self, + f'StacksUpdaterTaskDefParam{envname}', + parameter_name=f'/dataall/{envname}/ecs/task_def_arn/stacks_updater', + string_value=stacks_updater_task_def.task_definition_arn, + ) + + update_bucket_policies_task, update_bucket_task_def = self.set_scheduled_task( cluster=cluster, command=['python3.8', '-m', 'dataall.tasks.bucket_policy_updater'], container_id=f'container', @@ -183,7 +193,7 @@ def __init__( schedule_expression=Schedule.expression('rate(15 minutes)'), scheduled_task_id=f'{resource_prefix}-{envname}-policies-updater-schedule', task_id=f'{resource_prefix}-{envname}-policies-updater', - task_role=task_role, + task_role=self.task_role, vpc=vpc, security_group=scheduled_tasks_sg, prod_sizing=prod_sizing, @@ -192,7 +202,7 @@ def __init__( update_bucket_policies_task.task.security_groups ) - subscriptions_task = self.set_scheduled_task( + subscriptions_task, subscription_task_def = self.set_scheduled_task( cluster=cluster, command=[ 'python3.8', @@ -213,7 +223,7 @@ def __init__( schedule_expression=Schedule.expression('rate(15 minutes)'), scheduled_task_id=f'{resource_prefix}-{envname}-subscriptions-schedule', task_id=f'{resource_prefix}-{envname}-subscriptions', - task_role=task_role, + task_role=self.task_role, vpc=vpc, security_group=scheduled_tasks_sg, prod_sizing=prod_sizing, @@ -225,8 +235,8 @@ def __init__( f'{resource_prefix}-{envname}-share-manager', cpu=1024, memory_limit_mib=2048, - task_role=task_role, - execution_role=task_role, + task_role=self.task_role, + execution_role=self.task_role, family=f'{resource_prefix}-{envname}-share-manager', ) @@ -286,9 +296,7 @@ def __init__( self, f'SecurityGroup{envname}', parameter_name=f'/dataall/{envname}/ecs/security_groups', - string_value=','.join( - [s.security_group_id for s in sync_tables_task.task.security_groups] - ), + string_value=','.join([s.security_group_id for s in sync_tables_task.task.security_groups]), ) self.ecs_cluster = cluster @@ -301,7 +309,44 @@ def __init__( subscriptions_task.task_definition, ] - def create_task_role(self, envname, resource_prefix): + def create_cicd_stacks_updater_role(self, envname, resource_prefix, tooling_account_id): + cicd_stacks_updater_role = iam.Role( + self, + id=f"StackUpdaterCBRole{envname}", + role_name=f"{resource_prefix}-{envname}-cb-stackupdater-role", + assumed_by=iam.CompositePrincipal( + iam.ServicePrincipal("codebuild.amazonaws.com"), + iam.AccountPrincipal(tooling_account_id), + ), + ) + cicd_stacks_updater_role.add_to_policy( + iam.PolicyStatement( + actions=[ + "secretsmanager:GetSecretValue", + "kms:Decrypt", + "secretsmanager:DescribeSecret", + "kms:Encrypt", + "kms:GenerateDataKey", + "ssm:GetParametersByPath", + "ssm:GetParameters", + "ssm:GetParameter", + "iam:PassRole", + "ecs:RunTask" + ], + resources=[ + f"arn:aws:secretsmanager:{self.region}:{self.account}:secret:*{resource_prefix}*", + f"arn:aws:secretsmanager:{self.region}:{self.account}:secret:*dataall*", + f"arn:aws:kms:{self.region}:{self.account}:key/*", + f"arn:aws:ssm:*:{self.account}:parameter/*dataall*", + f"arn:aws:ssm:*:{self.account}:parameter/*{resource_prefix}*", + f"arn:aws:ecs:*:{self.account}:task-definition/{resource_prefix}-{envname}-*", + f"arn:aws:iam::{self.account}:role/{resource_prefix}-{envname}-ecs-tasks-role", + ], + ) + ) + return cicd_stacks_updater_role + + def create_task_role(self, envname, resource_prefix, pivot_role_name): role_inline_policy = iam.Policy( self, f'ECSRolePolicy{envname}', @@ -343,7 +388,7 @@ def create_task_role(self, envname, resource_prefix): 'sts:AssumeRole', ], resources=[ - f"arn:aws:iam::*:role/{self.node.try_get_context('pivot_role_name') or 'dataallPivotRole'}", + f'arn:aws:iam::*:role/{pivot_role_name}', f'arn:aws:iam::*:role/cdk*', 'arn:aws:iam::*:role/ddk*', f'arn:aws:iam::{self.account}:role/{resource_prefix}-{envname}-ecs-tasks-role', @@ -387,9 +432,7 @@ def create_task_role(self, envname, resource_prefix): self, f'ECSTaskRole{envname}', role_name=f'{resource_prefix}-{envname}-ecs-tasks-role', - inline_policies={ - f'ECSRoleInlinePolicy{envname}': role_inline_policy.document - }, + inline_policies={f'ECSRoleInlinePolicy{envname}': role_inline_policy.document}, assumed_by=iam.ServicePrincipal('ecs-tasks.amazonaws.com'), ) task_role.grant_pass_role(task_role) @@ -511,4 +554,8 @@ def set_scheduled_task( rule_name=scheduled_task_id # security_groups=[security_group], ) - return scheduled_task + return scheduled_task, task + + @property + def ecs_task_role(self) -> iam.Role: + return self.task_role diff --git a/deploy/stacks/lambda_api.py b/deploy/stacks/lambda_api.py index 300397446..19b42e754 100644 --- a/deploy/stacks/lambda_api.py +++ b/deploy/stacks/lambda_api.py @@ -27,6 +27,7 @@ from .pyNestedStack import pyNestedClass + class LambdaApiStack(pyNestedClass): def __init__( self, @@ -44,6 +45,7 @@ def __init__( apig_vpce=None, prod_sizing=False, user_pool=None, + pivot_role_name=None, **kwargs, ): super().__init__(scope, id, **kwargs) @@ -59,7 +61,7 @@ def __init__( 'ElasticSearchProxyHandler', function_name=f'{resource_prefix}-{envname}-esproxy', description='dataall es search function', - role=self.create_function_role(envname, resource_prefix, 'esproxy'), + role=self.create_function_role(envname, resource_prefix, 'esproxy', pivot_role_name), code=_lambda.DockerImageCode.from_ecr( repository=ecr_repository, tag=image_tag, cmd=['search_handler.handler'] ), @@ -79,7 +81,7 @@ def __init__( 'LambdaGraphQL', function_name=f'{resource_prefix}-{envname}-graphql', description='dataall graphql function', - role=self.create_function_role(envname, resource_prefix, 'graphql'), + role=self.create_function_role(envname, resource_prefix, 'graphql', pivot_role_name), code=_lambda.DockerImageCode.from_ecr( repository=ecr_repository, tag=image_tag, cmd=['api_handler.handler'] ), @@ -93,15 +95,13 @@ def __init__( tracing=_lambda.Tracing.ACTIVE, ) - self.aws_handler_dlq = self.set_dlq( - f'{resource_prefix}-{envname}-awsworker-dlq' - ) + self.aws_handler_dlq = self.set_dlq(f'{resource_prefix}-{envname}-awsworker-dlq') self.aws_handler = _lambda.DockerImageFunction( self, 'AWSWorker', function_name=f'{resource_prefix}-{envname}-awsworker', description='dataall aws worker for aws asynchronous tasks function', - role=self.create_function_role(envname, resource_prefix, 'awsworker'), + role=self.create_function_role(envname, resource_prefix, 'awsworker', pivot_role_name), code=_lambda.DockerImageCode.from_ecr( repository=ecr_repository, tag=image_tag, cmd=['aws_handler.handler'] ), @@ -142,7 +142,7 @@ def __init__( topic_name=f'{resource_prefix}-{envname}-backend-topic', ) - def create_function_role(self, envname, resource_prefix, fn_name): + def create_function_role(self, envname, resource_prefix, fn_name, pivot_role_name): role_name = f'{resource_prefix}-{envname}-{fn_name}-role' @@ -182,7 +182,8 @@ def create_function_role(self, envname, resource_prefix, fn_name): 'sts:AssumeRole', ], resources=[ - f"arn:aws:iam::*:role/{self.node.try_get_context('pivot_role_name') or 'dataallPivotRole'}" + f'arn:aws:iam::*:role/{pivot_role_name}', + 'arn:aws:iam::*:role/cdk-hnb659fds-lookup-role-*' ], ), iam.PolicyStatement( @@ -200,9 +201,7 @@ def create_function_role(self, envname, resource_prefix, fn_name): actions=[ 'iam:PassRole', ], - resources=[ - f'arn:aws:iam::{self.account}:role/{resource_prefix}-{envname}*' - ], + resources=[f'arn:aws:iam::{self.account}:role/{resource_prefix}-{envname}*'], ), iam.PolicyStatement( actions=[ @@ -240,7 +239,7 @@ def create_function_role(self, envname, resource_prefix, fn_name): 'xray:GetSamplingRules', 'xray:GetSamplingTargets', 'xray:GetSamplingStatisticSummaries', - 'cognito-idp:ListGroups' + 'cognito-idp:ListGroups', ], resources=['*'], ), @@ -250,9 +249,7 @@ def create_function_role(self, envname, resource_prefix, fn_name): self, role_name, role_name=role_name, - inline_policies={ - f'{resource_prefix}-{envname}-{fn_name}-inline': role_inline_policy.document - }, + inline_policies={f'{resource_prefix}-{envname}-{fn_name}-inline': role_inline_policy.document}, assumed_by=iam.ServicePrincipal('lambda.amazonaws.com'), ) return role @@ -293,16 +290,16 @@ def create_api_gateway( ) # Create IP set if IP filtering enabled in CDK.json - ip_set_regional=None - if custom_waf_rules and custom_waf_rules.get("allowed_ip_list"): + ip_set_regional = None + if custom_waf_rules and custom_waf_rules.get('allowed_ip_list'): ip_set_regional = wafv2.CfnIPSet( self, - "DataallRegionalIPSet", - name=f"{resource_prefix}-{envname}-ipset-regional", - description=f"IP addresses allowed for Dataall {envname}", - addresses=custom_waf_rules.get("allowed_ip_list"), - ip_address_version="IPV4", - scope="REGIONAL" + 'DataallRegionalIPSet', + name=f'{resource_prefix}-{envname}-ipset-regional', + description=f'IP addresses allowed for Dataall {envname}', + addresses=custom_waf_rules.get('allowed_ip_list'), + ip_address_version='IPV4', + scope='REGIONAL', ) acl = wafv2.CfnWebACL( @@ -315,7 +312,7 @@ def create_api_gateway( metric_name='waf-apigw', sampled_requests_enabled=True, ), - rules=self.get_waf_rules(envname,custom_waf_rules,ip_set_regional), + rules=self.get_waf_rules(envname, custom_waf_rules, ip_set_regional), ) wafv2.CfnWebACLAssociation( @@ -360,13 +357,11 @@ def set_up_graphql_api_gateway( ) if not internet_facing: if apig_vpce: - api_vpc_endpoint = ( - InterfaceVpcEndpoint.from_interface_vpc_endpoint_attributes( - self, - f'APIVpcEndpoint{envname}', - vpc_endpoint_id=apig_vpce, - port=443, - ) + api_vpc_endpoint = InterfaceVpcEndpoint.from_interface_vpc_endpoint_attributes( + self, + f'APIVpcEndpoint{envname}', + vpc_endpoint_id=apig_vpce, + port=443, ) else: api_vpc_endpoint = InterfaceVpcEndpoint( @@ -388,9 +383,7 @@ def set_up_graphql_api_gateway( actions=['execute-api:Invoke'], resources=['execute-api:/*'], effect=iam.Effect.DENY, - conditions={ - 'StringNotEquals': {'aws:SourceVpce': api_vpc_endpoint_id} - }, + conditions={'StringNotEquals': {'aws:SourceVpce': api_vpc_endpoint_id}}, ), iam.PolicyStatement( principals=[iam.AnyPrincipal()], @@ -540,9 +533,7 @@ def set_up_graphql_api_gateway( self, f'{resource_prefix}-{envname}-apigatewaylogs-role', assumed_by=iam.ServicePrincipal('apigateway.amazonaws.com'), - inline_policies={ - f'{resource_prefix}-{envname}-apigateway-policy': iam_policy - }, + inline_policies={f'{resource_prefix}-{envname}-apigateway-policy': iam_policy}, ) stage: apigw.CfnStage = gw.deployment_stage.node.default_child stage.access_log_setting = apigw.CfnStage.AccessLogSettingProperty( @@ -596,11 +587,11 @@ def get_api_resource_policy(vpc, ip_ranges): return api_policy @staticmethod - def get_waf_rules(envname,custom_waf_rules=None,ip_set_regional=None): + def get_waf_rules(envname, custom_waf_rules=None, ip_set_regional=None): waf_rules = [] priority = 0 if custom_waf_rules: - if custom_waf_rules.get("allowed_geo_list"): + if custom_waf_rules.get('allowed_geo_list'): waf_rules.append( wafv2.CfnWebACL.RuleProperty( name='GeoMatch', @@ -608,7 +599,7 @@ def get_waf_rules(envname,custom_waf_rules=None,ip_set_regional=None): not_statement=wafv2.CfnWebACL.NotStatementProperty( statement=wafv2.CfnWebACL.StatementProperty( geo_match_statement=wafv2.CfnWebACL.GeoMatchStatementProperty( - country_codes=custom_waf_rules.get("allowed_geo_list") + country_codes=custom_waf_rules.get('allowed_geo_list') ) ) ) @@ -623,16 +614,14 @@ def get_waf_rules(envname,custom_waf_rules=None,ip_set_regional=None): ) ) priority += 1 - if custom_waf_rules.get("allowed_ip_list"): + if custom_waf_rules.get('allowed_ip_list'): waf_rules.append( wafv2.CfnWebACL.RuleProperty( name='IPMatch', statement=wafv2.CfnWebACL.StatementProperty( not_statement=wafv2.CfnWebACL.NotStatementProperty( statement=wafv2.CfnWebACL.StatementProperty( - ip_set_reference_statement={ - "arn" : ip_set_regional.attr_arn - } + ip_set_reference_statement={'arn': ip_set_regional.attr_arn} ) ) ), @@ -758,9 +747,7 @@ def get_waf_rules(envname,custom_waf_rules=None,ip_set_regional=None): wafv2.CfnWebACL.RuleProperty( name='APIGatewayRateLimit', statement=wafv2.CfnWebACL.StatementProperty( - rate_based_statement=wafv2.CfnWebACL.RateBasedStatementProperty( - aggregate_key_type='IP', limit=1000 - ) + rate_based_statement=wafv2.CfnWebACL.RateBasedStatementProperty(aggregate_key_type='IP', limit=1000) ), action=wafv2.CfnWebACL.RuleActionProperty(block={}), visibility_config=wafv2.CfnWebACL.VisibilityConfigProperty( @@ -774,9 +761,7 @@ def get_waf_rules(envname,custom_waf_rules=None,ip_set_regional=None): priority += 1 return waf_rules - def create_sns_topic( - self, construct_id, envname, lambda_function, param_name, topic_name=None - ): + def create_sns_topic(self, construct_id, envname, lambda_function, param_name, topic_name=None): key = kms.Key( self, topic_name, diff --git a/deploy/stacks/monitoring.py b/deploy/stacks/monitoring.py index adc3629d3..f6cd4f9f7 100644 --- a/deploy/stacks/monitoring.py +++ b/deploy/stacks/monitoring.py @@ -29,7 +29,6 @@ def __init__( ecs_cluster: ecs.Cluster = None, ecs_task_definitions: [ecs.FargateTaskDefinition] = None, backend_api=None, - opensearch_domain: str = None, queue_name: str = None, **kwargs, ): @@ -43,7 +42,6 @@ def __init__( backend_api, lambdas, database, - opensearch_domain, queue_name, envname, resource_prefix, @@ -107,7 +105,6 @@ def create_cw_alarms( backend_api, lambdas, database, - openseach_domain, queue_name, envname, resource_prefix, @@ -117,29 +114,21 @@ def create_cw_alarms( self.set_function_alarms( f'Alarm{index}', lambda_function, - self.cw_alarm_action, resource_prefix, ) self.set_waf_alarms( f'{resource_prefix}-{envname}-WafApiGatewayRateLimitBreached', Fn.import_value(f'{resource_prefix}-{envname}-api-webacl'), - self.cw_alarm_action, ) self.set_api_alarms( - f'{resource_prefix}-{envname}-api-alarm', backend_api, self.cw_alarm_action + f'{resource_prefix}-{envname}-api-alarm', backend_api ) self.set_aurora_alarms( - f'{resource_prefix}-{envname}-aurora-alarm', database, self.cw_alarm_action - ) - self.set_es_alarms( - f'{resource_prefix}-{envname}-opensearch-alarm', - openseach_domain, - self.cw_alarm_action, + f'{resource_prefix}-{envname}-aurora-alarm', database ) self.set_sqs_alarms( f'{resource_prefix}-{envname}-sqs-alarm', queue_name, - self.cw_alarm_action, ) def create_cw_dashboard( @@ -231,7 +220,7 @@ def create_cw_dashboard( ) def set_function_alarms( - self, alarm_name, lambda_function, cw_alarm_action, resource_prefix + self, alarm_name, lambda_function, resource_prefix ): error_metric = cw.Metric( namespace=resource_prefix, @@ -259,8 +248,8 @@ def set_function_alarms( comparison_operator=cw.ComparisonOperator.GREATER_THAN_OR_EQUAL_TO_THRESHOLD, treat_missing_data=cw.TreatMissingData.NOT_BREACHING, ) - error_metric_alarm.add_alarm_action(cw_alarm_action) - error_metric_alarm.add_ok_action(cw_alarm_action) + error_metric_alarm.add_alarm_action(self.cw_alarm_action) + error_metric_alarm.add_ok_action(self.cw_alarm_action) lambda_error = cw.Alarm( self, @@ -272,8 +261,8 @@ def set_function_alarms( comparison_operator=cw.ComparisonOperator.GREATER_THAN_OR_EQUAL_TO_THRESHOLD, treat_missing_data=cw.TreatMissingData.NOT_BREACHING, ) - lambda_error.add_alarm_action(cw_alarm_action) - lambda_error.add_ok_action(cw_alarm_action) + lambda_error.add_alarm_action(self.cw_alarm_action) + lambda_error.add_ok_action(self.cw_alarm_action) lambda_throttles = cw.Alarm( self, f'{alarm_name}-throttles', @@ -284,17 +273,17 @@ def set_function_alarms( comparison_operator=cw.ComparisonOperator.GREATER_THAN_OR_EQUAL_TO_THRESHOLD, treat_missing_data=cw.TreatMissingData.NOT_BREACHING, ) - lambda_throttles.add_alarm_action(cw_alarm_action) - lambda_throttles.add_ok_action(cw_alarm_action) + lambda_throttles.add_alarm_action(self.cw_alarm_action) + lambda_throttles.add_ok_action(self.cw_alarm_action) - def set_api_alarms(self, alarm_name, api_name, cw_alarm_action): + def set_api_alarms(self, alarm_name, api_name): api_count = cw.Metric( namespace='AWS/ApiGateway', metric_name='Count', dimensions_map={'ApiName': api_name}, ) self._set_alarm( - f'{alarm_name}-max-calls', api_count, cw_alarm_action, threshold=100 + f'{alarm_name}-max-calls', api_count, threshold=100 ) api_5xx_errors = cw.Metric( namespace='AWS/ApiGateway', @@ -302,7 +291,7 @@ def set_api_alarms(self, alarm_name, api_name, cw_alarm_action): dimensions_map={'ApiName': api_name}, ) self._set_alarm( - f'{alarm_name}-5XXErrors', api_5xx_errors, cw_alarm_action, threshold=1 + f'{alarm_name}-5XXErrors', api_5xx_errors, threshold=1 ) api_4xx_errors = cw.Metric( namespace='AWS/ApiGateway', @@ -310,10 +299,10 @@ def set_api_alarms(self, alarm_name, api_name, cw_alarm_action): dimensions_map={'ApiName': api_name}, ) self._set_alarm( - f'{alarm_name}-4XXErrors', api_4xx_errors, cw_alarm_action, threshold=1 + f'{alarm_name}-4XXErrors', api_4xx_errors, threshold=1 ) - def set_aurora_alarms(self, alarm_name, db_identifier, cw_alarm_action): + def set_aurora_alarms(self, alarm_name, db_identifier): cpu_alarm = cw.Metric( namespace='AWS/RDS', metric_name='CPUUtilization', @@ -322,13 +311,13 @@ def set_aurora_alarms(self, alarm_name, db_identifier, cw_alarm_action): period=Duration.minutes(1), ) self._set_alarm( - f'{alarm_name}-CPUUtilization80', cpu_alarm, cw_alarm_action, threshold=80 + f'{alarm_name}-CPUUtilization80', cpu_alarm, threshold=80 ) self._set_alarm( - f'{alarm_name}-CPUUtilization90', cpu_alarm, cw_alarm_action, threshold=90 + f'{alarm_name}-CPUUtilization90', cpu_alarm, threshold=90 ) - def _set_alarm(self, alarm_name, api_count, cw_alarm_action, threshold=1): + def _set_alarm(self, alarm_name, api_count, threshold=1): api_error = cw.Alarm( self, alarm_name, @@ -339,10 +328,10 @@ def _set_alarm(self, alarm_name, api_count, cw_alarm_action, threshold=1): comparison_operator=cw.ComparisonOperator.GREATER_THAN_OR_EQUAL_TO_THRESHOLD, treat_missing_data=cw.TreatMissingData.NOT_BREACHING, ) - api_error.add_alarm_action(cw_alarm_action) - api_error.add_ok_action(cw_alarm_action) + api_error.add_alarm_action(self.cw_alarm_action) + api_error.add_ok_action(self.cw_alarm_action) - def set_waf_alarms(self, alarm_name, web_acl_id, cw_alarm_action): + def set_waf_alarms(self, alarm_name, web_acl_id): waf_metric = cw.Metric( metric_name='BlockedRequests', namespace='AWS/WAFV2', @@ -364,64 +353,95 @@ def set_waf_alarms(self, alarm_name, web_acl_id, cw_alarm_action): comparison_operator=cw.ComparisonOperator.GREATER_THAN_OR_EQUAL_TO_THRESHOLD, treat_missing_data=cw.TreatMissingData.NOT_BREACHING, ) - waf_alarm.add_alarm_action(cw_alarm_action) - waf_alarm.add_ok_action(cw_alarm_action) + waf_alarm.add_alarm_action(self.cw_alarm_action) + waf_alarm.add_ok_action(self.cw_alarm_action) - def set_es_alarms(self, alarm_name, domain_name, cw_alarm_action): + def set_es_alarms(self, alarm_name, domain_name): self._set_es_alarm( - domain_name, - f'{alarm_name}-cluster-red', - 'ClusterStatus.red', - 1, - cw.ComparisonOperator.GREATER_THAN_OR_EQUAL_TO_THRESHOLD, - 1, - 1, - 'max', - cw_alarm_action, + domain_name=domain_name, + alarm_name=f'{alarm_name}-cluster-red', + metric_name='ClusterStatus.red', + threshold=1, + comparison_operator=cw.ComparisonOperator.GREATER_THAN_OR_EQUAL_TO_THRESHOLD, + period=1, + evaluation_periods=1, + statistic='max', ) self._set_es_alarm( - domain_name, - f'{alarm_name}-cluster-yellow', - 'ClusterStatus.yellow', - 1, - cw.ComparisonOperator.GREATER_THAN_OR_EQUAL_TO_THRESHOLD, - 1, - 1, - 'max', - cw_alarm_action, + domain_name=domain_name, + alarm_name=f'{alarm_name}-cluster-yellow', + metric_name='ClusterStatus.yellow', + threshold=1, + comparison_operator=cw.ComparisonOperator.GREATER_THAN_OR_EQUAL_TO_THRESHOLD, + period=1, + evaluation_periods=1, + statistic='max', ) self._set_es_alarm( - domain_name, - f'{alarm_name}-cluster-IndexWritesBlocked', - 'ClusterIndexWritesBlocked', - 1, - cw.ComparisonOperator.GREATER_THAN_OR_EQUAL_TO_THRESHOLD, - 5, - 1, - 'max', - cw_alarm_action, + domain_name=domain_name, + alarm_name=f'{alarm_name}-cluster-IndexWritesBlocked', + metric_name='ClusterIndexWritesBlocked', + threshold=1, + comparison_operator=cw.ComparisonOperator.GREATER_THAN_OR_EQUAL_TO_THRESHOLD, + period=5, + evaluation_periods=1, + statistic='max', ) self._set_es_alarm( - domain_name, - f'{alarm_name}-cluster-CPUUtilization', - 'CPUUtilization', - 80, - cw.ComparisonOperator.GREATER_THAN_OR_EQUAL_TO_THRESHOLD, - 15, - 3, - 'avg', - cw_alarm_action, + domain_name=domain_name, + alarm_name=f'{alarm_name}-cluster-CPUUtilization', + metric_name='CPUUtilization', + threshold=80, + comparison_operator=cw.ComparisonOperator.GREATER_THAN_OR_EQUAL_TO_THRESHOLD, + period=15, + evaluation_periods=3, + statistic='avg', ) self._set_es_alarm( - domain_name, - f'{alarm_name}-cluster-JVMMemoryPressure', - 'JVMMemoryPressure', - 80, - cw.ComparisonOperator.GREATER_THAN_OR_EQUAL_TO_THRESHOLD, - 5, - 3, - 'max', - cw_alarm_action, + domain_name=domain_name, + alarm_name=f'{alarm_name}-cluster-JVMMemoryPressure', + metric_name='JVMMemoryPressure', + threshold=80, + comparison_operator=cw.ComparisonOperator.GREATER_THAN_OR_EQUAL_TO_THRESHOLD, + period=5, + evaluation_periods=3, + statistic='max', + ) + + def set_aoss_alarms(self, alarm_name, collection_id, collection_name): + self._set_aoss_alarm( + collection_id=collection_id, + collection_name=collection_name, + alarm_name=f'{alarm_name}-collection-ActiveCollection', + metric_name='ActiveCollection', + threshold=1, + comparison_operator=cw.ComparisonOperator.LESS_THAN_THRESHOLD, + period=1, + evaluation_periods=1, + statistic='max', + treat_missing_data=cw.TreatMissingData.BREACHING, + ) + self._set_aoss_alarm( + collection_id=collection_id, + collection_name=collection_name, + alarm_name=f'{alarm_name}-collection-IngestionRequestErrors', + metric_name='IngestionRequestErrors', + threshold=1, + comparison_operator=cw.ComparisonOperator.GREATER_THAN_OR_EQUAL_TO_THRESHOLD, + period=5, + evaluation_periods=1, + statistic='max', + ) + self._set_aoss_alarm( + collection_id=collection_id, + collection_name=collection_name, + alarm_name=f'{alarm_name}-collection-SearchRequestErrors', + metric_name='SearchRequestErrors', + threshold=1, + comparison_operator=cw.ComparisonOperator.GREATER_THAN_OR_EQUAL_TO_THRESHOLD, + period=5, + evaluation_periods=1, + statistic='max', ) def _set_es_alarm( @@ -434,7 +454,6 @@ def _set_es_alarm( period, evaluation_periods, statistic, - cw_alarm_action, ) -> None: cw_alarm = cw.Alarm( self, @@ -452,10 +471,46 @@ def _set_es_alarm( evaluation_periods=evaluation_periods, treat_missing_data=cw.TreatMissingData.MISSING, ) - cw_alarm.add_alarm_action(cw_alarm_action) - cw_alarm.add_ok_action(cw_alarm_action) + cw_alarm.add_alarm_action(self.cw_alarm_action) + cw_alarm.add_ok_action(self.cw_alarm_action) + + def _set_aoss_alarm( + self, + collection_id, + collection_name, + alarm_name, + metric_name, + threshold, + comparison_operator, + period, + evaluation_periods, + statistic, + treat_missing_data=cw.TreatMissingData.MISSING, + ) -> None: + cw_alarm = cw.Alarm( + self, + alarm_name, + alarm_name=alarm_name, + metric=cw.Metric( + metric_name=metric_name, + namespace='AWS/AOSS', + dimensions_map={ + 'CollectionId': collection_id, + 'CollectionName': collection_name, + 'ClientId': self.account, + }, + period=Duration.minutes(period), + statistic=statistic, + ), + threshold=threshold, + comparison_operator=comparison_operator, + evaluation_periods=evaluation_periods, + treat_missing_data=treat_missing_data, + ) + cw_alarm.add_alarm_action(self.cw_alarm_action) + cw_alarm.add_ok_action(self.cw_alarm_action) - def set_sqs_alarms(self, alarm_name, queue_name, cw_alarm_action): + def set_sqs_alarms(self, alarm_name, queue_name): max_messages = cw.Metric( namespace='AWS/SQS', metric_name='NumberOfMessagesSent', @@ -473,5 +528,5 @@ def set_sqs_alarms(self, alarm_name, queue_name, cw_alarm_action): comparison_operator=cw.ComparisonOperator.GREATER_THAN_OR_EQUAL_TO_THRESHOLD, treat_missing_data=cw.TreatMissingData.NOT_BREACHING, ) - queue_nb_msg_alarm.add_alarm_action(cw_alarm_action) - queue_nb_msg_alarm.add_ok_action(cw_alarm_action) + queue_nb_msg_alarm.add_alarm_action(self.cw_alarm_action) + queue_nb_msg_alarm.add_ok_action(self.cw_alarm_action) diff --git a/deploy/stacks/opensearch.py b/deploy/stacks/opensearch.py index 4868f5e8d..51ebbbaf2 100644 --- a/deploy/stacks/opensearch.py +++ b/deploy/stacks/opensearch.py @@ -25,7 +25,7 @@ def __init__( prod_sizing=False, **kwargs, ): - super().__init__(scope, id, **kwargs) + super().__init__(scope, id) db_security_group = ec2.SecurityGroup( self, @@ -135,3 +135,11 @@ def __init__( parameter_name=f'/dataall/{envname}/elasticsearch/security_group_id', string_value=db_security_group.security_group_id, ) + + @property + def domain_name(self) -> str: + return self.domain.domain_name + + @property + def domain_endpoint(self) -> str: + return self.domain.domain_endpoint diff --git a/deploy/stacks/opensearch_serverless.py b/deploy/stacks/opensearch_serverless.py new file mode 100644 index 000000000..e623cf462 --- /dev/null +++ b/deploy/stacks/opensearch_serverless.py @@ -0,0 +1,198 @@ +import json +from typing import Any, Dict, List, Optional +from aws_cdk import ( + aws_ec2 as ec2, + aws_iam as iam, + aws_lambda as _lambda, + aws_ssm as ssm, + aws_opensearchserverless as opensearchserverless, + aws_kms, + RemovalPolicy, +) + +from .pyNestedStack import pyNestedClass + + +class OpenSearchServerlessStack(pyNestedClass): + def __init__( + self, + scope, + id, + envname='dev', + resource_prefix='dataall', + vpc: ec2.Vpc = None, + vpc_endpoints_sg: ec2.SecurityGroup = None, + lambdas: Optional[List[_lambda.Function]] = None, + ecs_task_role: Optional[iam.Role] = None, + prod_sizing=False, + **kwargs, + ): + super().__init__(scope, id) + + self.cfn_collection = opensearchserverless.CfnCollection( + self, + f'OpenSearchCollection{envname}', + name=f'{resource_prefix}-{envname}-collection', + type="SEARCH", + ) + + key = aws_kms.Key( + self, + f'AOSSKMSKey', + removal_policy=RemovalPolicy.DESTROY + if not prod_sizing + else RemovalPolicy.RETAIN, + alias=f'{resource_prefix}-{envname}-opensearch-serverless', + enable_key_rotation=True, + ) + + cfn_encryption_policy = opensearchserverless.CfnSecurityPolicy( + self, + f'OpenSearchCollectionEncryptionPolicy{envname}', + name=f'{resource_prefix}-{envname}-encryption-policy', + type='encryption', + policy=self._get_encryption_policy( + collection_name=self.cfn_collection.name, + kms_key_arn=key.key_arn, + ), + ) + + cfn_vpc_endpoint = opensearchserverless.CfnVpcEndpoint( + self, + f'OpenSearchCollectionVpcEndpoint{envname}', + name=f'{resource_prefix}-{envname}-vpc-endpoint', + vpc_id=vpc.vpc_id, + security_group_ids=[vpc_endpoints_sg.security_group_id], + subnet_ids=[subnet.subnet_id for subnet in vpc.private_subnets], + ) if vpc and vpc_endpoints_sg else None + + cfn_network_policy = opensearchserverless.CfnSecurityPolicy( + self, + f'OpenSearchCollectionNetworkPolicy{envname}', + name=f'{resource_prefix}-{envname}-network-policy', + type='network', + policy=self._get_network_policy( + collection_name=self.cfn_collection.name, + vpc_endpoints=[cfn_vpc_endpoint.attr_id] if vpc else None, + ), + ) + + self.cfn_collection.add_depends_on(cfn_encryption_policy) + self.cfn_collection.add_depends_on(cfn_network_policy) + + principal_arns: List[str] = [fn.role.role_arn for fn in lambdas] if lambdas else [] + if ecs_task_role: + principal_arns.append(ecs_task_role.role_arn) + + opensearchserverless.CfnAccessPolicy( + self, + f'OpenSearchCollectionAccessPolicy{envname}', + name=f'{resource_prefix}-{envname}-access-policy', + type='data', + policy=self._get_access_policy( + collection_name=self.cfn_collection.name, + principal_arns=principal_arns, + ), + ) + + ssm.StringParameter( + self, + 'ElasticSearchEndpointParameter', + parameter_name=f'/dataall/{envname}/elasticsearch/endpoint', + string_value=f'{self.cfn_collection.attr_id}.{self.region}.aoss.amazonaws.com', + ) + + ssm.StringParameter( + self, + 'ElasticSearchDomainParameter', + parameter_name=f'/dataall/{envname}/elasticsearch/domain', + string_value=self.cfn_collection.name, + ) + + ssm.StringParameter( + self, + 'ElasticSearchServiceParameter', + parameter_name=f'/dataall/{envname}/elasticsearch/service', + string_value='aoss', + ) + + @property + def collection_id(self) -> str: + return self.cfn_collection.attr_id + + @property + def collection_name(self) -> str: + return self.cfn_collection.name + + @staticmethod + def _get_encryption_policy(collection_name: str, kms_key_arn: Optional[str] = None) -> str: + policy: Dict[str, Any] = { + "Rules": [ + { + "ResourceType": "collection", + "Resource": [ + f"collection/{collection_name}", + ] + } + ], + } + if kms_key_arn: + policy["KmsARN"] = kms_key_arn + else: + policy["AWSOwnedKey"] = True + return json.dumps(policy) + + @staticmethod + def _get_network_policy(collection_name: str, vpc_endpoints: Optional[List[str]] = None) -> str: + policy: List[Dict[str, Any]] = [ + { + "Rules": [ + { + "ResourceType": "dashboard", + "Resource": [ + f"collection/{collection_name}", + ], + }, + { + "ResourceType": "collection", + "Resource": [ + f"collection/{collection_name}", + ], + }, + ], + } + ] + if vpc_endpoints: + policy[0]["SourceVPCEs"] = vpc_endpoints + else: + policy[0]["AllowFromPublic"] = True + return json.dumps(policy) + + @staticmethod + def _get_access_policy(collection_name: str, principal_arns: List[str]) -> str: + policy = [ + { + "Rules": [ + { + "ResourceType": "index", + "Resource": [ + f"index/{collection_name}/*", + ], + "Permission": [ + "aoss:*", + ], + }, + { + "ResourceType": "collection", + "Resource": [ + f"collection/{collection_name}", + ], + "Permission": [ + "aoss:*", + ], + }, + ], + "Principal": principal_arns + } + ] + return json.dumps(policy) diff --git a/deploy/stacks/param_store_stack.py b/deploy/stacks/param_store_stack.py index 560451b6b..2419d1f66 100644 --- a/deploy/stacks/param_store_stack.py +++ b/deploy/stacks/param_store_stack.py @@ -16,6 +16,7 @@ def __init__( enable_cw_canaries=False, quicksight_enabled=False, shared_dashboard_sessions='anonymous', + enable_pivot_role_auto_create=False, **kwargs, ): super().__init__(scope, id, **kwargs) @@ -79,4 +80,11 @@ def __init__( f'dataallQuicksightConfiguration{envname}', parameter_name=f"/dataall/{envname}/quicksight/sharedDashboardsSessions", string_value=shared_dashboard_sessions, + ) + + aws_ssm.StringParameter( + self, + f'dataallCreationPivotRole{envname}', + parameter_name=f"/dataall/{envname}/pivotRole/enablePivotRoleAutoCreate", + string_value=str(enable_pivot_role_auto_create), ) \ No newline at end of file diff --git a/deploy/stacks/pipeline.py b/deploy/stacks/pipeline.py index d00a7db13..ea5b2128a 100644 --- a/deploy/stacks/pipeline.py +++ b/deploy/stacks/pipeline.py @@ -46,6 +46,7 @@ def __init__( cidr='10.0.0.0/16', resource_prefix=resource_prefix, vpc_id=self.node.try_get_context('tooling_vpc_id'), + restricted_nacl=self.node.try_get_context('tooling_vpc_restricted_nacl'), **kwargs, ) self.vpc = self.vpc_stack.vpc @@ -182,21 +183,19 @@ def __init__( ) for policy in self.codebuild_policy: self.pipeline_iam_role.add_to_policy(policy) - - if self.source == "github": + + if self.source == 'github': source = CodePipelineSource.git_hub( - repo_string="awslabs/aws-dataall", + repo_string='awslabs/aws-dataall', branch=self.git_branch, - authentication=SecretValue.secrets_manager(secret_id="github-access-token-secret") + authentication=SecretValue.secrets_manager(secret_id='github-access-token-secret'), ) - + else: source = CodePipelineSource.code_commit( - repository=codecommit.Repository.from_repository_name( - self, 'sourcerepo', repository_name='dataall' - ), - branch=self.git_branch, - ) + repository=codecommit.Repository.from_repository_name(self, 'sourcerepo', repository_name='dataall'), + branch=self.git_branch, + ) self.pipeline = pipelines.CodePipeline( self, @@ -207,11 +206,9 @@ def __init__( 'Synth', input=source, build_environment=codebuild.BuildEnvironment( - build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_3, + build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_4, ), commands=[ - 'n 16.15.1', - 'yum -y install shadow-utils wget && yum -y install openssl-devel bzip2-devel libffi-devel postgresql-devel', f'aws codeartifact login --tool npm --repository {self.codeartifact.npm_repo.attr_name} --domain {self.codeartifact.domain.attr_name} --domain-owner {self.codeartifact.domain.attr_owner}', 'npm install -g aws-cdk', f'aws codeartifact login --tool pip --repository {self.codeartifact.pip_repo.attr_name} --domain {self.codeartifact.domain.attr_name} --domain-owner {self.codeartifact.domain.attr_owner}', @@ -287,6 +284,11 @@ def __init__( target_env, ) + if target_env.get('enable_update_dataall_stacks_in_cicd_pipeline', False): + self.set_stacks_updater_stage( + target_env + ) + if target_env.get('internet_facing', True): self.set_cloudfront_stage( target_env, @@ -357,10 +359,9 @@ def set_quality_gate_stage(self): pipelines.CodeBuildStep( id='ValidateDBMigrations', build_environment=codebuild.BuildEnvironment( - build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_3, + build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_4, ), commands=[ - 'yum -y install shadow-utils wget && yum -y install openssl-devel bzip2-devel libffi-devel postgresql-devel', f'aws codeartifact login --tool pip --repository {self.codeartifact.pip_repo.attr_name} --domain {self.codeartifact.domain.attr_name} --domain-owner {self.codeartifact.domain.attr_owner}', f'export envname={self.git_branch}', f'export schema_name=validation', @@ -376,13 +377,12 @@ def set_quality_gate_stage(self): pipelines.CodeBuildStep( id='SecurityChecks', build_environment=codebuild.BuildEnvironment( - build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_3, + build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_4, ), commands=[ - 'yum -y install shadow-utils wget && yum -y install openssl-devel bzip2-devel libffi-devel postgresql-devel', f'aws codeartifact login --tool pip --repository {self.codeartifact.pip_repo.attr_name} --domain {self.codeartifact.domain.attr_name} --domain-owner {self.codeartifact.domain.attr_owner}', 'pip install --upgrade pip', - "python -m venv env", + 'python -m venv env', '. env/bin/activate', 'make check-security', ], @@ -392,14 +392,16 @@ def set_quality_gate_stage(self): pipelines.CodeBuildStep( id='Lint', build_environment=codebuild.BuildEnvironment( - build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_3, + build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_4, ), commands=[ + f'aws codeartifact login --tool pip --repository {self.codeartifact.pip_repo.attr_name} --domain {self.codeartifact.domain.attr_name} --domain-owner {self.codeartifact.domain.attr_owner}', 'pip install --upgrade pip', 'python -m venv env', '. env/bin/activate', 'make lint', 'cd frontend', + f'aws codeartifact login --tool npm --repository {self.codeartifact.npm_repo.attr_name} --domain {self.codeartifact.domain.attr_name} --domain-owner {self.codeartifact.domain.attr_owner}', 'npm install', 'npm run lint', ], @@ -411,7 +413,7 @@ def set_quality_gate_stage(self): pipelines.CodeBuildStep( id='IntegrationTests', build_environment=codebuild.BuildEnvironment( - build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_3, + build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_4, ), partial_build_spec=codebuild.BuildSpec.from_object( dict( @@ -419,9 +421,7 @@ def set_quality_gate_stage(self): phases={ 'build': { 'commands': [ - 'n 16.15.1', 'set -eu', - 'yum -y install shadow-utils wget && yum -y install openssl-devel bzip2-devel libffi-devel postgresql-devel', f'aws codeartifact login --tool pip --repository {self.codeartifact.pip_repo.attr_name} --domain {self.codeartifact.domain.attr_name} --domain-owner {self.codeartifact.domain.attr_owner}', f'export envname={self.git_branch}', 'python -m venv env', @@ -447,7 +447,7 @@ def set_quality_gate_stage(self): pipelines.CodeBuildStep( id='UploadCodeToS3', build_environment=codebuild.BuildEnvironment( - build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_3, + build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_4, ), commands=[ 'mkdir -p source_build', @@ -478,7 +478,7 @@ def set_quality_gate_stage(self): pipelines.CodeBuildStep( id='UploadCodeToS3', build_environment=codebuild.BuildEnvironment( - build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_3, + build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_4, ), commands=[ 'mkdir -p source_build', @@ -492,7 +492,6 @@ def set_quality_gate_stage(self): ), ) - def set_ecr_stage( self, target_env, @@ -516,19 +515,16 @@ def set_ecr_stage( pipelines.CodeBuildStep( id='LambdaImage', build_environment=codebuild.BuildEnvironment( - build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_3, + build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_4, privileged=True, environment_variables={ 'REPOSITORY_URI': codebuild.BuildEnvironmentVariable( value=f"{target_env['account']}.dkr.ecr.{target_env['region']}.amazonaws.com/{self.resource_prefix}-{target_env['envname']}-repository" ), - 'IMAGE_TAG': codebuild.BuildEnvironmentVariable( - value=f'lambdas-{self.image_tag}' - ), + 'IMAGE_TAG': codebuild.BuildEnvironmentVariable(value=f'lambdas-{self.image_tag}'), }, ), commands=[ - 'yum -y install shadow-utils wget && yum -y install openssl-devel bzip2-devel libffi-devel postgresql-devel', f"make deploy-image type=lambda image-tag=$IMAGE_TAG account={target_env['account']} region={target_env['region']} repo={repository_name}", ], role_policy_statements=self.codebuild_policy, @@ -537,19 +533,16 @@ def set_ecr_stage( pipelines.CodeBuildStep( id='ECSImage', build_environment=codebuild.BuildEnvironment( - build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_3, + build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_4, privileged=True, environment_variables={ 'REPOSITORY_URI': codebuild.BuildEnvironmentVariable( value=f"{target_env['account']}.dkr.ecr.{target_env['region']}.amazonaws.com/{repository_name}" ), - 'IMAGE_TAG': codebuild.BuildEnvironmentVariable( - value=f'cdkproxy-{self.image_tag}' - ), + 'IMAGE_TAG': codebuild.BuildEnvironmentVariable(value=f'cdkproxy-{self.image_tag}'), }, ), commands=[ - 'yum -y install shadow-utils wget && yum -y install openssl-devel bzip2-devel libffi-devel postgresql-devel', f"make deploy-image type=ecs image-tag=$IMAGE_TAG account={target_env['account']} region={target_env['region']} repo={repository_name}", ], role_policy_statements=self.codebuild_policy, @@ -584,6 +577,8 @@ def set_backend_stage(self, target_env, repository_name): enable_cw_rum=target_env.get('enable_cw_rum', False), enable_cw_canaries=target_env.get('enable_cw_canaries', False), shared_dashboard_sessions=target_env.get('shared_dashboard_sessions', 'anonymous'), + enable_opensearch_serverless=target_env.get('enable_opensearch_serverless', False), + enable_pivot_role_auto_create=target_env.get('enable_pivot_role_auto_create', False), ) ) return backend_stage @@ -599,7 +594,7 @@ def set_db_migration_stage( pipelines.CodeBuildStep( id='MigrateDB', build_environment=codebuild.BuildEnvironment( - build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_3, + build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_4, ), commands=[ 'mkdir ~/.aws/ && touch ~/.aws/config', @@ -618,6 +613,38 @@ def set_db_migration_stage( ), ) + def set_stacks_updater_stage( + self, + target_env, + ): + wave = self.pipeline.add_wave( + f"{self.resource_prefix}-{target_env['envname']}-stacks-updater-stage" + ) + wave.add_post( + pipelines.CodeBuildStep( + id='StacksUpdater', + build_environment=codebuild.BuildEnvironment( + build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_4, + ), + commands=[ + 'mkdir ~/.aws/ && touch ~/.aws/config', + 'echo "[profile buildprofile]" > ~/.aws/config', + f'echo "role_arn = arn:aws:iam::{target_env["account"]}:role/{self.resource_prefix}-{target_env["envname"]}-cb-stackupdater-role" >> ~/.aws/config', + 'echo "credential_source = EcsContainer" >> ~/.aws/config', + 'aws sts get-caller-identity --profile buildprofile', + f"export cluster_name=$(aws ssm get-parameter --name /dataall/{target_env['envname']}/ecs/cluster/name --profile buildprofile --output text --query 'Parameter.Value')", + f"export private_subnets=$(aws ssm get-parameter --name /dataall/{target_env['envname']}/ecs/private_subnets --profile buildprofile --output text --query 'Parameter.Value')", + f"export security_groups=$(aws ssm get-parameter --name /dataall/{target_env['envname']}/ecs/security_groups --profile buildprofile --output text --query 'Parameter.Value')", + f"export task_definition=$(aws ssm get-parameter --name /dataall/{target_env['envname']}/ecs/task_def_arn/stacks_updater --profile buildprofile --output text --query 'Parameter.Value')", + 'network_config="awsvpcConfiguration={subnets=[$private_subnets],securityGroups=[$security_groups],assignPublicIp=DISABLED}"', + f'cluster_arn="arn:aws:ecs:{target_env["region"]}:{target_env["account"]}:cluster/$cluster_name"', + f'aws --profile buildprofile ecs run-task --task-definition $task_definition --cluster "$cluster_arn" --launch-type "FARGATE" --network-configuration "$network_config" --launch-type FARGATE --propagate-tags TASK_DEFINITION', + ], + role_policy_statements=self.codebuild_policy, + vpc=self.vpc, + ), + ) + def set_cloudfront_stage(self, target_env): cloudfront_stage = self.pipeline.add_stage( CloudfrontStage( @@ -637,10 +664,9 @@ def set_cloudfront_stage(self, target_env): pipelines.CodeBuildStep( id='DeployFrontEnd', build_environment=codebuild.BuildEnvironment( - build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_3, + build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_4, compute_type=codebuild.ComputeType.LARGE, ), - install_commands=["n 14.18.3"], commands=[ f'export REACT_APP_STAGE={target_env["envname"]}', f'export envname={target_env["envname"]}', @@ -688,7 +714,7 @@ def set_cloudfront_stage(self, target_env): pipelines.CodeBuildStep( id='UpdateDocumentation', build_environment=codebuild.BuildEnvironment( - build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_3, + build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_4, ), commands=[ f'aws codeartifact login --tool pip --repository {self.codeartifact.pip_repo.attr_name} --domain {self.codeartifact.domain.attr_name} --domain-owner {self.codeartifact.domain.attr_owner}', @@ -713,7 +739,7 @@ def cw_rum_config_action(self, target_env): return pipelines.CodeBuildStep( id='ConfigureRUM', build_environment=codebuild.BuildEnvironment( - build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_3, + build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_4, ), commands=[ f'export envname={target_env["envname"]}', @@ -739,7 +765,7 @@ def cognito_config_action(self, target_env): return pipelines.CodeBuildStep( id='ConfigureCognito', build_environment=codebuild.BuildEnvironment( - build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_3, + build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_4, ), commands=[ f'export envname={target_env["envname"]}', @@ -781,16 +807,14 @@ def set_albfront_stage(self, target_env, repository_name): pipelines.CodeBuildStep( id='FrontendImage', build_environment=codebuild.BuildEnvironment( - build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_3, + build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_4, compute_type=codebuild.ComputeType.LARGE, privileged=True, environment_variables={ 'REPOSITORY_URI': codebuild.BuildEnvironmentVariable( value=f'{self.account}.dkr.ecr.{self.region}.amazonaws.com/{repository_name}' ), - 'IMAGE_TAG': codebuild.BuildEnvironmentVariable( - value=f'frontend-{self.image_tag}' - ), + 'IMAGE_TAG': codebuild.BuildEnvironmentVariable(value=f'frontend-{self.image_tag}'), }, ), commands=[ @@ -824,16 +848,14 @@ def set_albfront_stage(self, target_env, repository_name): pipelines.CodeBuildStep( id='UserGuideImage', build_environment=codebuild.BuildEnvironment( - build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_3, + build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_4, compute_type=codebuild.ComputeType.LARGE, privileged=True, environment_variables={ 'REPOSITORY_URI': codebuild.BuildEnvironmentVariable( value=f'{self.account}.dkr.ecr.{self.region}.amazonaws.com/{repository_name}' ), - 'IMAGE_TAG': codebuild.BuildEnvironmentVariable( - value=f'userguide-{self.image_tag}' - ), + 'IMAGE_TAG': codebuild.BuildEnvironmentVariable(value=f'userguide-{self.image_tag}'), }, ), commands=[ @@ -901,7 +923,7 @@ def set_release_stage( pipelines.CodeBuildStep( id='GitRelease', build_environment=codebuild.BuildEnvironment( - build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_3, + build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_4, ), partial_build_spec=codebuild.BuildSpec.from_object( dict( diff --git a/deploy/stacks/secrets_stack.py b/deploy/stacks/secrets_stack.py index b2c90378a..8d71129e1 100644 --- a/deploy/stacks/secrets_stack.py +++ b/deploy/stacks/secrets_stack.py @@ -18,6 +18,7 @@ def __init__( envname='dev', resource_prefix='dataall', enable_cw_canaries=False, + pivot_role_name=None, **kwargs, ): super().__init__(scope, id, **kwargs) @@ -51,7 +52,7 @@ def __init__( self, f'PivotRoleNameSecret{envname}', name=f'dataall-pivot-role-name-{envname}', - secret_string='dataallPivotRole', + secret_string=pivot_role_name, kms_key_id=self.pivot_role_name_key.key_id, description=f'Stores dataall pivot role name for environment {envname}', ) @@ -70,9 +71,7 @@ def __init__( f'canary-user', secret_name=f'{resource_prefix}-{envname}-cognito-canary-user', generate_secret_string=sm.SecretStringGenerator( - secret_string_template=json.dumps( - {'username': f'cwcanary-{self.account}'} - ), + secret_string_template=json.dumps({'username': f'cwcanary-{self.account}'}), generate_string_key='password', include_space=False, password_length=12, diff --git a/deploy/stacks/vpc.py b/deploy/stacks/vpc.py index 8c7f16429..42f04b0fb 100644 --- a/deploy/stacks/vpc.py +++ b/deploy/stacks/vpc.py @@ -22,6 +22,7 @@ def __init__( vpc_endpoints_sg=None, cidr=None, resource_prefix=None, + restricted_nacl=False, **kwargs, ): super().__init__(scope, id, **kwargs) @@ -29,7 +30,7 @@ def __init__( if vpc_id: self.vpc = ec2.Vpc.from_lookup(self, f'vpc', vpc_id=vpc_id) else: - self.create_new_vpc(cidr, envname, resource_prefix) + self.create_new_vpc(cidr, envname, resource_prefix, restricted_nacl) if vpc_endpoints_sg: self.vpce_security_group = ec2.SecurityGroup.from_security_group_id( @@ -107,7 +108,7 @@ def __init__( description=f'{resource_prefix}-{envname}-cidrBlock', ) - def create_new_vpc(self, cidr, envname, resource_prefix): + def create_new_vpc(self, cidr, envname, resource_prefix, restricted_nacl): self.vpc = ec2.Vpc( self, 'VPC', @@ -125,6 +126,55 @@ def create_new_vpc(self, cidr, envname, resource_prefix): ], nat_gateways=1, ) + if restricted_nacl: + nacl = ec2.NetworkAcl( + self, "RestrictedNACL", + vpc=self.vpc, + network_acl_name=f'{resource_prefix}-{envname}-restrictedNACL', + subnet_selection=ec2.SubnetSelection(subnets=self.vpc.private_subnets + self.vpc.public_subnets), + ) + nacl.add_entry( + "entryOutbound", + cidr=ec2.AclCidr.any_ipv4(), + traffic=ec2.AclTraffic.all_traffic(), + rule_number=100, + direction=ec2.TrafficDirection.EGRESS, + rule_action=ec2.Action.ALLOW + ) + nacl.add_entry( + "entryInboundHTTPS", + cidr=ec2.AclCidr.any_ipv4(), + traffic=ec2.AclTraffic.tcp_port(443), + rule_number=100, + direction=ec2.TrafficDirection.INGRESS, + rule_action=ec2.Action.ALLOW + ) + nacl.add_entry( + "entryInboundHTTP", + cidr=ec2.AclCidr.any_ipv4(), + traffic=ec2.AclTraffic.tcp_port(80), + rule_number=101, + direction=ec2.TrafficDirection.INGRESS, + rule_action=ec2.Action.ALLOW + ) + nacl.add_entry( + "entryInboundCustomTCP", + cidr=ec2.AclCidr.any_ipv4(), + traffic=ec2.AclTraffic.tcp_port_range(start_port=1024, end_port=65535), + rule_number=102, + direction=ec2.TrafficDirection.INGRESS, + rule_action=ec2.Action.ALLOW + ) + nacl.add_entry( + "entryInboundAllInVPC", + cidr=ec2.AclCidr.ipv4(self.vpc.vpc_cidr_block), + traffic=ec2.AclTraffic.all_traffic(), + rule_number=103, + direction=ec2.TrafficDirection.INGRESS, + rule_action=ec2.Action.ALLOW + ) + + flowlog_log_group = logs.LogGroup( self, f'{resource_prefix}/{envname}/flowlogs', @@ -209,7 +259,6 @@ def _create_vpc_endpoints(self) -> None: private_dns_enabled=True, security_groups=[cast(ec2.ISecurityGroup, self.vpce_security_group)], ) - self.vpc.add_interface_endpoint( id='code_artifact_repo_endpoint', service=cast( @@ -217,7 +266,7 @@ def _create_vpc_endpoints(self) -> None: ec2.InterfaceVpcEndpointAwsService('codeartifact.repositories'), ), subnets=ec2.SubnetSelection(subnets=self.vpc.private_subnets), - private_dns_enabled=False, + private_dns_enabled=True, security_groups=[cast(ec2.ISecurityGroup, self.vpce_security_group)], ) self.vpc.add_interface_endpoint( @@ -227,6 +276,6 @@ def _create_vpc_endpoints(self) -> None: ec2.InterfaceVpcEndpointAwsService('codeartifact.api'), ), subnets=ec2.SubnetSelection(subnets=self.vpc.private_subnets), - private_dns_enabled=False, + private_dns_enabled=True, security_groups=[cast(ec2.ISecurityGroup, self.vpce_security_group)], ) diff --git a/documentation/userguide/docs/environments.md b/documentation/userguide/docs/environments.md index 9aabe13b6..233d90ef3 100644 --- a/documentation/userguide/docs/environments.md +++ b/documentation/userguide/docs/environments.md @@ -9,34 +9,29 @@ users store data and work with data.** To ensure correct data access and AWS resources isolation, onboard one environment in each AWS account. Despite being possible, **we strongly discourage users to use the same AWS account for multiple environments**. -## :material-hammer-screwdriver: **Bootstrap your AWS account** -*data.all*does not create AWS accounts. You need to provide an AWS account and complete the following bootstraping -steps on that AWS account in each region you want to use. +## :material-hammer-screwdriver: **AWS account Pre-requisites** +*data.all* does not create AWS accounts. You need to provide an AWS account and complete the following bootstraping +steps. -### 1. Create AWS IAM role -*data.all* assumes a IAM role named **PivotRole** to be able to call AWS SDK APIs on your account. You can download -the AWS CloudFormation stack from *data.all* environment creation form. (Navigate to an -organization and click on link an environment to see this form) - - -### 2. Setup AWS CDK +### 1. CDK Bootstrap *data.all* uses AWS CDK to deploy and manage resources on your AWS account. AWS CDK requires some resources to exist on the AWS account, and provides a command called `bootstrap` to deploy these -specific resources. +specific resources in a particular AWS region. -Moreover, we need to trust data.all infrastructure account. +In this step we establish a trust relationship between the data.all infrastructure account and the accounts to be linked as environments. data.all codebase and CI/CD resources are in the data.all **tooling account**, -while all the resources used by the platform -are located in a **infrastructure account**. From this last one we will deploy environments and other resources -inside each of our business accounts (the ones to be boostraped). +and all the application resources used by the platform +are located in a **infrastructure account**. From the infrastructure account we will deploy environments and other resources +inside each of our business accounts. We are granting permissions to the infrastructure account +by setting the `--trust` parameter in the cdk bootstrap command. - -To boostrap the AWS account using AWS CDK, you need : +To boostrap the AWS account using AWS CDK, you need the following (which are already fulfilled if you open AWS CloudShell from the environment account). 1. to have AWS credentials configured in ~/.aws/credentials or as environment variables. -2. to install cdk : `npm install -g aws-cdk` -3. to run the following command : +2. to install cdk: `npm install -g aws-cdk` + +Then, you can copy/paste the following command from the UI and run from your local machine or CloudShell: ````bash cdk bootstrap --trust DATA.ALL_AWS_ACCOUNT_NUMBER -c @aws-cdk/core:newStyleStackSynthesis=true --cloudformation-execution-policies arn:aws:iam::aws:policy/AdministratorAccess aws://YOUR_ENVIRONMENT_AWS_ACCOUNT_NUMBER/ENVIRONMENT_REGION ```` @@ -51,8 +46,31 @@ cdk bootstrap --trust DATA.ALL_AWS_ACCOUNT_NUMBER -c @aws-cdk/core:newStyleStac ```` +### 2. (For manual) Pivot role +*data.all* assumes a certain IAM role to be able to call AWS SDK APIs on your account. +The Pivot Role is a super role in the environment account and thus, it is +protected to be assumed only by the data.all central account using an external Id. -### 3. Enable AWS Lake Formation +Since release V1.5.0, the Pivot Role can be created as part of the environment CDK stack, given that the trust between data.all and the environment account +is already explicitly granted in the bootstraping of the account. To enable the creation of Pivot Roles as part +of the environment stack, the `cdk.json` parameter `enable_pivot_role_auto_create` needs to be set to `true`. +When an environment is linked to data.all a nested stack creates a role called **dataallPivotRole-cdk**. + +For versions prior to V1.5.0 or if `enable_pivot_role_auto_create` is `false` the Pivot Role needs to be created manually. +In this case, the AWS CloudFormation stack of the role can be downloaded from *data.all* environment creation form. +(Navigate to an organization and click on link an environment to see this form). Fill the CloudFormation stack with the parameters +available in data.all UI to create the role named **dataallPivotRole**. + +!!! note "Upgrading from manual to cdk-created Pivot Role" + If you have existing environments that were linked to data.all using a manually created Pivot Role you can + still benefit from V1.5.0 `enable_pivot_role_auto_create` feature. You just need to update that parameter in + the `cdk.json` configuration of your deployment. Once the CICD pipeline has completed: new linked environments + will contain the nested cdk-pivotRole stack (no actions needed) and existing environments can be updated by: a) manually, + by clicking on "update stack" in the environment>stack tab b) automatically, wait for the `stack-updater` ECS task that + runs daily overnight c) automatically, set the added `enable_update_dataall_stacks_in_cicd_pipeline` parameter to `true` in + the `cdk.json` config file. The `stack-updater` ECS task will be triggered from the CICD pipeline + +### 3. (For new accounts) AWS Lake Formation Service role *data.all* relies on AWS Lake Formation to manage access to your structured data. If AWS Lake Formation has never been activated on your AWS account, you need to create @@ -67,7 +85,7 @@ aws iam create-service-linked-role --aws-service-name lakeformation.amazonaws.co role name AWSServiceRoleForLakeFormationDataAccess has been taken in this account, please try a different suffix. You can skip this step, as this indicates the Lake formation service-linked role exists. -### 4. Amazon Quicksight +### 4. (For Dashboards) Subscribe to Amazon Quicksight This is an optional step. To link environments with Dashboards enabled , you will also need a running Amazon QuickSight subscription on the bootstraped account. If you have not subscribed to Quicksight before, go to your AWS account and choose the @@ -87,6 +105,27 @@ to enable Dashboard Embedding on *data.all* UI. ![quicksight_domain](pictures/environments/boot_qs_3.png#zoom#shadow) +### 5. (For ML Studio) Delete or adapt the default VPC +If ML Studio is enabled, data.all checks if there is an existing SageMaker Studio domain. If there is an existing domain +it will use it to create ML Studio profiles. If no pre-existing domain is found, data.all will create a new one. + +Prior to V1.5.0 data.all always used the default VPC to create a new SageMaker domain. The default VPC had then to be +customized to fulfill the networking requirements specified in the Sagemaker +[documentation](https://docs.aws.amazon.com/sagemaker/latest/dg/studio-notebooks-and-internet-access.html) for VPCOnly +domains. + +In V1.5.0 we introduce the creation of a suitable VPC for SageMaker as part of the environment stack. However, it is not possible to edit the VPC used by a SageMaker domain, it requires deletion and re-creation. To allow backwards +compatibility and not delete the pre-existing domains, in V1.5.0 the default behavior is still to use the default VPC. + +Data.all will create a SageMaker VPC: +- For new environments: (link environment) + - if there is not a pre-existing SageMaker Studio domain + - if there is not a default VPC in the account +- For pre-existing environments: (update environment) + - if all ML Studio profiles have been deleted (from CloudFormation as well) + - if there is not a pre-existing SageMaker Studio domain + - if the default VPC has been deleted in the account + ## :material-new-box: **Link an environment** ### Necessary permissions !!! note "Environment permissions" diff --git a/documentation/userguide/docs/mlstudio.md b/documentation/userguide/docs/mlstudio.md index 1f295051d..cef153088 100644 --- a/documentation/userguide/docs/mlstudio.md +++ b/documentation/userguide/docs/mlstudio.md @@ -1,18 +1,19 @@ # **ML Studio** -With ML Studio Notebooks we can add users to our SageMaker domain and open Amazon SageMaker Studio +With ML Studio Profiles we can add users to our SageMaker domain and open Amazon SageMaker Studio. +The SageMaker Studio domain is created as part of the environment stack. -## :material-new-box: **Create a ML Notebook** -To create a new Notebook, go to ML Studio on the left side pane and click on Create. Then fill in the creation form +## :material-new-box: **Create an ML Studio profile** +To create a new ML Studio profile, go to ML Studio on the left side pane and click on Create. Then fill in the creation form with its corresponding information. -![notebooks](pictures/mlstudio/ml_studio.png#zoom#shadow) +![notebooks](pictures/mlstudio/ml_studio.png) | Field | Description | Required | Editable |Example |-------------------------------|---------------------------------------------|----------|----------|------------- | Sagemaker Studio profile name | Name of the user to add to SageMaker domain | Yes | No |johndoe -| Short description | Short description about the notebook | No | No |Notebook for Cannes exploration +| Short description | Short description about the user profile | No | No |Notebook for Cannes exploration | Tags | Tags | No | No |deleteme | Environment | Environment (and mapped AWS account) | Yes | No |Data Science | Region (auto-filled) | AWS region | Yes | No |Europe (Ireland) @@ -22,19 +23,19 @@ with its corresponding information. ## :material-cloud-check-outline: **Check CloudFormation stack** -In the **Stack** tab of the ML Studio Notebook, is where we check the AWS resources provisioned by data.all as well as its status. +In the **Stack** tab of the ML Studio Profile, is where we check the AWS resources provisioned by data.all as well as its status. As part of the CloudFormation stack deployed using CDK, data.all will deploy some CDK metadata and a SageMaker User Profile. -## :material-trash-can-outline: **Delete a Notebook** +## :material-trash-can-outline: **Delete an ML Studio user** -To delete a Notebook, simply select it and click on the **Delete** button in the top right corner. It is possible to -keep the CloudFormation stack associated with the Notebook by selecting this option in the confirmation +To delete a SageMaker user, simply select it and click on the **Delete** button in the top right corner. It is possible to +keep the CloudFormation stack associated with the User by selecting this option in the confirmation delete window that appears after clicking on delete. -![notebooks](pictures/mlstudio/ml_studio_3.png#zoom#shadow) +![notebooks](pictures/mlstudio/ml_studio_3.png) ## :material-file-code-outline: **Open Amazon SageMaker Studio** Click on the **Open ML Studio** button of the ML Studio notebook window to open Amazon SageMaker Studio. -![notebooks](pictures/mlstudio/ml_studio_2.png#zoom#shadow) +![notebooks](pictures/mlstudio/ml_studio_2.png) diff --git a/frontend/src/views/Environments/EnvironmentCreateForm.js b/frontend/src/views/Environments/EnvironmentCreateForm.js index 77bc5ee04..e9ec62afb 100644 --- a/frontend/src/views/Environments/EnvironmentCreateForm.js +++ b/frontend/src/views/Environments/EnvironmentCreateForm.js @@ -264,7 +264,7 @@ const EnvironmentCreateForm = (props) => { - 1. Bootstrap your AWS account with AWS CDK + Bootstrap your AWS account with AWS CDK { {`cdk bootstrap --trust ${trustedAccount} -c @aws-cdk/core:newStyleStackSynthesis=true --cloudformation-execution-policies arn:aws:iam::aws:policy/AdministratorAccess aws://ACCOUNT_ID/REGION`} - - - - 2. Create an IAM role named {pivotRoleName} using AWS - CloudFormation stack below - - - - - - - - - - + {process.env.REACT_APP_ENABLE_PIVOT_ROLE_AUTO_CREATE == 'True' ? ( + + + As part of the environment CloudFormation stack data.all will create an IAM role (Pivot Role) to manage AWS operations in the environment AWS Account. + + + ): ( + + + + Create an IAM role named {pivotRoleName} using the AWS + CloudFormation stack below + + + + + + + + + + + )} - 3. Manage your environment features + Make sure that the services needed for the selected environment features are available in your AWS Account. diff --git a/template_cdk.json b/template_cdk.json index c87fafafc..75a36cfbd 100644 --- a/template_cdk.json +++ b/template_cdk.json @@ -7,6 +7,7 @@ "@aws-cdk/core:stackRelativeExports": false, "tooling_region": "string_TOOLING_REGION|DEFAULT=eu-west-1", "tooling_vpc_id": "string_IMPORT_AN_EXISTING_VPC_FROM_TOOLING|DEFAULT=None", + "tooling_vpc_restricted_nacl": "boolean_CREATE_CUSTOM_NACL|DEFAULT=false", "git_branch": "string_GIT_BRANCH_NAME|DEFAULT=dataall", "git_release": "boolean_MANAGE_GIT_RELEASE|DEFAULT=false", "quality_gate": "boolean_MANAGE_QUALITY_GATE_STAGE|DEFAULT=true", @@ -31,7 +32,11 @@ "prod_sizing": "boolean_SET_INFRA_SIZING_TO_PROD_VALUES_IF_TRUE|DEFAULT=true", "enable_cw_rum": "boolean_SET_CLOUDWATCH_RUM_APP_MONITOR|DEFAULT=false", "enable_cw_canaries": "boolean_SET_CLOUDWATCH_CANARIES_FOR_FRONTEND_TESTING|DEFAULT=false", - "shared_dashboards_sessions": "string_TYPE_SESSION_SHARED_DASHBOARDS|(reader, anonymous) DEFAULT=anonymous" + "shared_dashboards_sessions": "string_TYPE_SESSION_SHARED_DASHBOARDS|(reader, anonymous) DEFAULT=anonymous", + "enable_quicksight_monitoring": "boolean_ENABLE_CONNECTION_QUICKSIGHT_RDS|DEFAULT=false", + "enable_opensearch_serverless": "boolean_USE_OPENSEARCH_SERVERLESS|DEFAULT=false", + "enable_pivot_role_auto_create": "boolean_ENABLE_PIVOT_ROLE_AUTO_CREATE_IN_ENVIRONMENT|DEFAULT=false", + "enable_update_dataall_stacks_in_cicd_pipeline": "boolean_ENABLE_UPDATE_DATAALL_STACKS_IN_CICD_PIPELINE|DEFAULT=false" } ] } diff --git a/tests/api/conftest.py b/tests/api/conftest.py index e2541ac72..fa3be8ade 100644 --- a/tests/api/conftest.py +++ b/tests/api/conftest.py @@ -15,11 +15,16 @@ def patch_check_env(module_mocker): 'dataall.api.Objects.Environment.resolvers.check_environment', return_value='CDKROLENAME', ) + module_mocker.patch( + 'dataall.api.Objects.Environment.resolvers.get_pivot_role_as_part_of_environment', return_value=False + ) @pytest.fixture(scope='module', autouse=True) -def patch_check_env(module_mocker): - module_mocker.patch('dataall.utils.Parameter.get_parameter', return_value='unknownvalue') +def patch_check_dataset(module_mocker): + module_mocker.patch( + 'dataall.api.Objects.Dataset.resolvers.check_dataset_account', return_value=True + ) @pytest.fixture(scope='module', autouse=True) @@ -605,12 +610,18 @@ def org_fixture(org, user, group, tenant): def env_fixture(env, org_fixture, user, group, tenant, module_mocker): module_mocker.patch('requests.post', return_value=True) module_mocker.patch('dataall.api.Objects.Environment.resolvers.check_environment', return_value=True) + module_mocker.patch( + 'dataall.api.Objects.Environment.resolvers.get_pivot_role_as_part_of_environment', return_value=False + ) env1 = env(org_fixture, 'dev', 'alice', 'testadmins', '111111111111', 'eu-west-1') yield env1 @pytest.fixture(scope='module') -def dataset_fixture(env_fixture, org_fixture, dataset, group) -> dataall.db.models.Dataset: +def dataset_fixture(env_fixture, org_fixture, dataset, group, module_mocker) -> dataall.db.models.Dataset: + module_mocker.patch( + 'dataall.api.Objects.Dataset.resolvers.check_dataset_account', return_value=True + ) yield dataset( org=org_fixture, env=env_fixture, diff --git a/tests/api/test_dashboards.py b/tests/api/test_dashboards.py index 8e83e3d54..b275bb72c 100644 --- a/tests/api/test_dashboards.py +++ b/tests/api/test_dashboards.py @@ -16,6 +16,9 @@ def env1(env, org1, user, group, tenant, module_mocker): module_mocker.patch( 'dataall.api.Objects.Environment.resolvers.check_environment', return_value=True ) + module_mocker.patch( + 'dataall.api.Objects.Environment.resolvers.get_pivot_role_as_part_of_environment', return_value=False + ) env1 = env(org1, 'dev', user.userName, group.name, '111111111111', 'eu-west-1') yield env1 diff --git a/tests/api/test_datapipelines.py b/tests/api/test_datapipelines.py index a017eca38..81a8b4e03 100644 --- a/tests/api/test_datapipelines.py +++ b/tests/api/test_datapipelines.py @@ -8,20 +8,12 @@ def org1(org, user, group, tenant): @pytest.fixture(scope='module') -def env1(env, org1, user, group, tenant, module_mocker): - module_mocker.patch('requests.post', return_value=True) - module_mocker.patch( - 'dataall.api.Objects.Environment.resolvers.check_environment', return_value=True - ) +def env1(env, org1, user, group, tenant): env1 = env(org1, 'cicd', user.userName, group.name, '111111111111', 'eu-west-1') yield env1 @pytest.fixture(scope='module') -def env2(env, org1, user, group, tenant, module_mocker): - module_mocker.patch('requests.post', return_value=True) - module_mocker.patch( - 'dataall.api.Objects.Environment.resolvers.check_environment', return_value=True - ) +def env2(env, org1, user, group): env2 = env(org1, 'dev', user.userName, group.name, '222222222222', 'eu-west-1') yield env2 diff --git a/tests/api/test_dataset.py b/tests/api/test_dataset.py index 057ff66a3..dd1d81a86 100644 --- a/tests/api/test_dataset.py +++ b/tests/api/test_dataset.py @@ -12,11 +12,7 @@ def org1(org, user, group, tenant): @pytest.fixture(scope='module', autouse=True) -def env1(env, org1, user, group, tenant, module_mocker): - module_mocker.patch('requests.post', return_value=True) - module_mocker.patch( - 'dataall.api.Objects.Environment.resolvers.check_environment', return_value=True - ) +def env1(env, org1, user, group, tenant): env1 = env(org1, 'dev', 'alice', 'testadmins', '111111111111', 'eu-west-1') yield env1 @@ -104,7 +100,7 @@ def test_list_datasets(client, dataset1, group): assert response.data.listDatasets.nodes[0].datasetUri == dataset1.datasetUri -def test_update_dataset(dataset1, client, patch_es, group, group2): +def test_update_dataset(dataset1, client, group, group2): response = client.query( """ mutation UpdateDataset($datasetUri:String!,$input:ModifyDatasetInput){ diff --git a/tests/api/test_dataset_location.py b/tests/api/test_dataset_location.py index 128d21bbb..32f876aa2 100644 --- a/tests/api/test_dataset_location.py +++ b/tests/api/test_dataset_location.py @@ -12,11 +12,7 @@ def org1(org, user, group, tenant): @pytest.fixture(scope='module', autouse=True) -def env1(env, org1, user, group, tenant, module_mocker): - module_mocker.patch('requests.post', return_value=True) - module_mocker.patch( - 'dataall.api.Objects.Environment.resolvers.check_environment', return_value=True - ) +def env1(env, org1, user, group, tenant): env1 = env(org1, 'dev', user.userName, group.name, '111111111111', 'eu-west-1') yield env1 diff --git a/tests/api/test_dataset_profiling.py b/tests/api/test_dataset_profiling.py index c5bed6d1e..ece463008 100644 --- a/tests/api/test_dataset_profiling.py +++ b/tests/api/test_dataset_profiling.py @@ -11,11 +11,7 @@ def org1(org, user, group, tenant): @pytest.fixture(scope='module', autouse=True) -def env1(env, org1, user, group, tenant, module_mocker): - module_mocker.patch('requests.post', return_value=True) - module_mocker.patch( - 'dataall.api.Objects.Environment.resolvers.check_environment', return_value=True - ) +def env1(env, org1, user, group, tenant): env1 = env(org1, 'dev', user.userName, group.name, '111111111111', 'eu-west-1') yield env1 diff --git a/tests/api/test_dataset_table.py b/tests/api/test_dataset_table.py index 6c30e77ea..66986a41a 100644 --- a/tests/api/test_dataset_table.py +++ b/tests/api/test_dataset_table.py @@ -12,11 +12,7 @@ def org1(org, user, group, tenant): @pytest.fixture(scope='module', autouse=True) -def env1(env, org1, user, group, tenant, module_mocker): - module_mocker.patch('requests.post', return_value=True) - module_mocker.patch( - 'dataall.api.Objects.Environment.resolvers.check_environment', return_value=True - ) +def env1(env, org1, user, group): env1 = env(org1, 'dev', user.userName, group.name, '111111111111', 'eu-west-1') yield env1 diff --git a/tests/api/test_environment.py b/tests/api/test_environment.py index e961a445c..aca5a6bf1 100644 --- a/tests/api/test_environment.py +++ b/tests/api/test_environment.py @@ -11,11 +11,7 @@ def org1(org, user, group, tenant): @pytest.fixture(scope='module', autouse=True) -def env1(env, org1, user, group, tenant, module_mocker): - module_mocker.patch('requests.post', return_value=True) - module_mocker.patch( - 'dataall.api.Objects.Environment.resolvers.check_environment', return_value=True - ) +def env1(env, org1, user, group, tenant): env1 = env(org1, 'dev', user.userName, group.name, '111111111111', 'eu-west-1') yield env1 diff --git a/tests/api/test_glossary.py b/tests/api/test_glossary.py index 157c6cd2c..8821aadd0 100644 --- a/tests/api/test_glossary.py +++ b/tests/api/test_glossary.py @@ -11,12 +11,8 @@ def _org(db, org, tenant, user, group) -> models.Organization: @pytest.fixture(scope='module') def _env( - db, _org: models.Organization, user, group, module_mocker, env + db, _org: models.Organization, user, group, env ) -> models.Environment: - module_mocker.patch('requests.post', return_value=True) - module_mocker.patch( - 'dataall.api.Objects.Environment.resolvers.check_environment', return_value=True - ) env1 = env(_org, 'dev', user.userName, group.name, '111111111111', 'eu-west-1') yield env1 diff --git a/tests/api/test_group.py b/tests/api/test_group.py index 7cab78314..c02e7de29 100644 --- a/tests/api/test_group.py +++ b/tests/api/test_group.py @@ -11,11 +11,7 @@ def org1(org, user, group, tenant): @pytest.fixture(scope='module', autouse=True) -def env1(env, org1, user, group, tenant, module_mocker): - module_mocker.patch('requests.post', return_value=True) - module_mocker.patch( - 'dataall.api.Objects.Environment.resolvers.check_environment', return_value=True - ) +def env1(env, org1, user, group, tenant): env1 = env(org1, 'dev', user.userName, group.name, '111111111111', 'eu-west-1') yield env1 diff --git a/tests/api/test_keyvaluetag.py b/tests/api/test_keyvaluetag.py index 8d546cb3f..16e2827a7 100644 --- a/tests/api/test_keyvaluetag.py +++ b/tests/api/test_keyvaluetag.py @@ -17,16 +17,12 @@ def org1(db, org, tenant, user, group) -> models.Organization: def env1( db, org1: models.Organization, user, group, module_mocker, env ) -> models.Environment: - module_mocker.patch('requests.post', return_value=True) - module_mocker.patch( - 'dataall.api.Objects.Environment.resolvers.check_environment', return_value=True - ) env1 = env(org1, 'dev', user.userName, group.name, '111111111111', 'eu-west-1') yield env1 @pytest.fixture(scope='module', autouse=True) -def dataset1(db, env1, org1, group, user, dataset) -> models.Dataset: +def dataset1(db, env1, org1, group, user, dataset, module_mocker) -> models.Dataset: with db.scoped_session() as session: yield dataset( org=org1, env=env1, name='dataset1', owner=user.userName, group=group.name diff --git a/tests/api/test_organization.py b/tests/api/test_organization.py index 47a78bcda..fd414af31 100644 --- a/tests/api/test_organization.py +++ b/tests/api/test_organization.py @@ -15,25 +15,19 @@ def org2(org, user2, group2, tenant): @pytest.fixture(scope='module', autouse=True) -def env_dev(env, org2, user2, group2, tenant, module_mocker): - module_mocker.patch('requests.post', return_value=True) - module_mocker.patch('dataall.api.Objects.Environment.resolvers.check_environment', return_value=True) +def env_dev(env, org2, user2, group2, tenant): env2 = env(org2, 'dev', user2.userName, group2.name, '222222222222', 'eu-west-1', 'description') yield env2 @pytest.fixture(scope='module', autouse=True) -def env_other(env, org2, user2, group2, tenant, module_mocker): - module_mocker.patch('requests.post', return_value=True) - module_mocker.patch('dataall.api.Objects.Environment.resolvers.check_environment', return_value=True) +def env_other(env, org2, user2, group2, tenant): env2 = env(org2, 'other', user2.userName, group2.name, '222222222222', 'eu-west-1') yield env2 @pytest.fixture(scope='module', autouse=True) -def env_prod(env, org2, user2, group2, tenant, module_mocker): - module_mocker.patch('requests.post', return_value=True) - module_mocker.patch('dataall.api.Objects.Environment.resolvers.check_environment', return_value=True) +def env_prod(env, org2, user2, group2, tenant): env2 = env(org2, 'prod', user2.userName, group2.name, '111111111111', 'eu-west-1', 'description') yield env2 @@ -188,7 +182,7 @@ def test_list_organizations_anyone(client, org1): assert response.data.listOrganizations.count == 0 -def test_group_invitation(db, client, org1, group2, user, group3, group, dataset, env, module_mocker): +def test_group_invitation(db, client, org1, group2, user, group3, group, dataset, env): response = client.query( """ mutation inviteGroupToOrganization($input:InviteGroupToOrganizationInput){ @@ -266,8 +260,6 @@ def test_group_invitation(db, client, org1, group2, user, group3, group, dataset assert response.data.listOrganizationGroups.count == 2 - module_mocker.patch('requests.post', return_value=True) - module_mocker.patch('dataall.api.Objects.Environment.resolvers.check_environment', return_value=True) env2 = env(org1, 'devg2', user.userName, group2.name, '111111111112', 'eu-west-1') assert env2.environmentUri diff --git a/tests/api/test_redshift_cluster.py b/tests/api/test_redshift_cluster.py index c9a8fac73..3b26fb5e6 100644 --- a/tests/api/test_redshift_cluster.py +++ b/tests/api/test_redshift_cluster.py @@ -13,11 +13,7 @@ def org1(org, user, group, tenant): @pytest.fixture(scope='module', autouse=True) -def env1(env, org1, user, group, tenant, module_mocker): - module_mocker.patch('requests.post', return_value=True) - module_mocker.patch( - 'dataall.api.Objects.Environment.resolvers.check_environment', return_value=True - ) +def env1(env, org1, user, group, tenant): env1 = env(org1, 'dev', user.userName, group.name, '111111111111', 'eu-west-1') yield env1 @@ -86,8 +82,7 @@ def table2(table, dataset2): @pytest.fixture(scope='module') -def cluster(env1, org1, client, module_mocker, group): - module_mocker.patch('requests.post', return_value=True) +def cluster(env1, org1, client, group): ouri = org1.organizationUri euri = env1.environmentUri group_name = group.name diff --git a/tests/api/test_sagemaker_notebook.py b/tests/api/test_sagemaker_notebook.py index 1861936ad..a48f51c43 100644 --- a/tests/api/test_sagemaker_notebook.py +++ b/tests/api/test_sagemaker_notebook.py @@ -11,10 +11,6 @@ def org1(org, user, group, tenant): @pytest.fixture(scope='module') def env1(env, org1, user, group, tenant, module_mocker): - module_mocker.patch('requests.post', return_value=True) - module_mocker.patch( - 'dataall.api.Objects.Environment.resolvers.check_environment', return_value=True - ) env1 = env(org1, 'dev', user.userName, group.name, '111111111111', 'eu-west-1') yield env1 diff --git a/tests/api/test_sagemaker_studio.py b/tests/api/test_sagemaker_studio.py index 2a165e7ff..70f903c73 100644 --- a/tests/api/test_sagemaker_studio.py +++ b/tests/api/test_sagemaker_studio.py @@ -12,10 +12,6 @@ def org1(org, user, group, tenant): @pytest.fixture(scope='module', autouse=True) def env1(env, org1, user, group, tenant, module_mocker): - module_mocker.patch('requests.post', return_value=True) - module_mocker.patch( - 'dataall.api.Objects.Environment.resolvers.check_environment', return_value=True - ) env1 = env(org1, 'dev', 'alice', 'testadmins', '111111111111', 'eu-west-1') yield env1 diff --git a/tests/api/test_vote.py b/tests/api/test_vote.py index fd05557cc..1956802a9 100644 --- a/tests/api/test_vote.py +++ b/tests/api/test_vote.py @@ -11,12 +11,8 @@ def org1(db, org, tenant, user, group) -> models.Organization: @pytest.fixture(scope='module') def env1( - db, org1: models.Organization, user, group, module_mocker, env + db, org1: models.Organization, user, group, env ) -> models.Environment: - module_mocker.patch('requests.post', return_value=True) - module_mocker.patch( - 'dataall.api.Objects.Environment.resolvers.check_environment', return_value=True - ) env1 = env(org1, 'dev', user.userName, group.name, '111111111111', 'eu-west-1') yield env1 diff --git a/tests/api/test_vpc.py b/tests/api/test_vpc.py index 028c90f60..a223b7e4e 100644 --- a/tests/api/test_vpc.py +++ b/tests/api/test_vpc.py @@ -10,11 +10,7 @@ def org1(org, user, group, tenant): @pytest.fixture(scope='module') -def env1(env, org1, user, group, tenant, module_mocker): - module_mocker.patch('requests.post', return_value=True) - module_mocker.patch( - 'dataall.api.Objects.Environment.resolvers.check_environment', return_value=True - ) +def env1(env, org1, user, group, tenant): env1 = env(org1, 'dev', user.userName, group.name, '111111111111', 'eu-west-1') yield env1 diff --git a/tests/cdkproxy/test_dataset_stack.py b/tests/cdkproxy/test_dataset_stack.py index 19a30d513..14caf7942 100644 --- a/tests/cdkproxy/test_dataset_stack.py +++ b/tests/cdkproxy/test_dataset_stack.py @@ -17,7 +17,7 @@ def patch_methods(mocker, db, dataset, env, org): return_value="dataall-pivot-role-name-pytest", ) mocker.patch( - 'dataall.aws.handlers.lakeformation.LakeFormation.describe_resource', + 'dataall.aws.handlers.lakeformation.LakeFormation.check_existing_lf_registered_location', return_value=False, ) mocker.patch( @@ -49,7 +49,6 @@ def test_resources_created(template): assert 'AWS::S3::Bucket' in template assert 'AWS::KMS::Key' in template assert 'AWS::IAM::Role' in template - assert 'AWS::Lambda::Function' in template assert 'AWS::IAM::Policy' in template assert 'AWS::S3::BucketPolicy' in template assert 'AWS::Glue::Job' in template diff --git a/tests/cdkproxy/test_environment_stack.py b/tests/cdkproxy/test_environment_stack.py index fbb01d83a..f5dceccdf 100644 --- a/tests/cdkproxy/test_environment_stack.py +++ b/tests/cdkproxy/test_environment_stack.py @@ -14,7 +14,11 @@ def patch_methods(mocker, db, env, another_group, permissions): ) mocker.patch( 'dataall.aws.handlers.sts.SessionHelper.get_delegation_role_name', - return_value="dataall-pivot-role-name-pytest", + return_value='dataall-pivot-role-name-pytest', + ) + mocker.patch( + 'dataall.aws.handlers.parameter_store.ParameterStoreManager.get_parameter_value', + return_value='False', ) mocker.patch( 'dataall.cdkproxy.stacks.environment.EnvironmentSetup.get_target', @@ -25,16 +29,14 @@ def patch_methods(mocker, db, env, another_group, permissions): return_value=[another_group], ) mocker.patch( - 'dataall.cdkproxy.stacks.environment.EnvironmentSetup.check_sagemaker_studio', + 'dataall.cdkproxy.stacks.sagemakerstudio.SageMakerDomain.check_existing_sagemaker_studio_domain', return_value=True, ) mocker.patch( 'dataall.aws.handlers.sts.SessionHelper.get_account', return_value='012345678901x', ) - mocker.patch( - 'dataall.utils.runtime_stacks_tagging.TagsUtil.get_engine', return_value=db - ) + mocker.patch('dataall.utils.runtime_stacks_tagging.TagsUtil.get_engine', return_value=db) mocker.patch( 'dataall.utils.runtime_stacks_tagging.TagsUtil.get_target', return_value=env, @@ -43,6 +45,10 @@ def patch_methods(mocker, db, env, another_group, permissions): 'dataall.cdkproxy.stacks.environment.EnvironmentSetup.get_environment_group_permissions', return_value=[permission.name for permission in permissions], ) + mocker.patch( + 'dataall.aws.handlers.sts.SessionHelper.get_external_id_secret', + return_value='*****', + ) @pytest.fixture(scope='function', autouse=True) From 50ff85a1152a66dfdc70ff9e2f546bda289c3ebb Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 25 Apr 2023 15:29:45 +0200 Subject: [PATCH 087/346] Move dataset related API to the dataset module --- .../api/Objects/Environment/queries.py | 12 ---- .../api/Objects/Environment/resolvers.py | 17 ----- backend/dataall/api/Objects/Group/queries.py | 14 +--- .../dataall/api/Objects/Group/resolvers.py | 17 ----- backend/dataall/db/api/environment.py | 69 ------------------- .../modules/datasets/api/dataset/queries.py | 27 +++++++- .../modules/datasets/api/dataset/resolvers.py | 33 +++++++++ .../datasets/services/dataset_service.py | 67 ++++++++++++++++++ 8 files changed, 127 insertions(+), 129 deletions(-) diff --git a/backend/dataall/api/Objects/Environment/queries.py b/backend/dataall/api/Objects/Environment/queries.py index 34b397748..892b3d32b 100644 --- a/backend/dataall/api/Objects/Environment/queries.py +++ b/backend/dataall/api/Objects/Environment/queries.py @@ -48,18 +48,6 @@ ) -listDatasetsCreatedInEnvironment = gql.QueryField( - name='listDatasetsCreatedInEnvironment', - type=gql.Ref('DatasetSearchResult'), - args=[ - gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='filter', type=gql.Ref('DatasetFilter')), - ], - resolver=list_datasets_created_in_environment, - test_scope='Dataset', -) - - searchEnvironmentDataItems = gql.QueryField( name='searchEnvironmentDataItems', args=[ diff --git a/backend/dataall/api/Objects/Environment/resolvers.py b/backend/dataall/api/Objects/Environment/resolvers.py index 86f251f59..97c9f963e 100644 --- a/backend/dataall/api/Objects/Environment/resolvers.py +++ b/backend/dataall/api/Objects/Environment/resolvers.py @@ -370,23 +370,6 @@ def list_environment_group_permissions( check_perm=True, ) - -def list_datasets_created_in_environment( - context: Context, source, environmentUri: str = None, filter: dict = None -): - if not filter: - filter = {} - with context.engine.scoped_session() as session: - return db.api.Environment.paginated_environment_datasets( - session=session, - username=context.username, - groups=context.groups, - uri=environmentUri, - data=filter, - check_perm=True, - ) - - def list_shared_with_environment_data_items( context: Context, source, environmentUri: str = None, filter: dict = None ): diff --git a/backend/dataall/api/Objects/Group/queries.py b/backend/dataall/api/Objects/Group/queries.py index 5cbf484ff..afa0abf17 100644 --- a/backend/dataall/api/Objects/Group/queries.py +++ b/backend/dataall/api/Objects/Group/queries.py @@ -1,5 +1,5 @@ from ... import gql -from .resolvers import get_group, list_datasets_owned_by_env_group, list_data_items_shared_with_env_group, list_cognito_groups +from .resolvers import get_group, list_data_items_shared_with_env_group, list_cognito_groups getGroup = gql.QueryField( name='getGroup', @@ -9,18 +9,6 @@ ) -listDatasetsOwnedByEnvGroup = gql.QueryField( - name='listDatasetsOwnedByEnvGroup', - type=gql.Ref('DatasetSearchResult'), - args=[ - gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='groupUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='filter', type=gql.Ref('DatasetFilter')), - ], - resolver=list_datasets_owned_by_env_group, - test_scope='Dataset', -) - listDataItemsSharedWithEnvGroup = gql.QueryField( name='listDataItemsSharedWithEnvGroup', diff --git a/backend/dataall/api/Objects/Group/resolvers.py b/backend/dataall/api/Objects/Group/resolvers.py index 11de0da1b..d29c5be2c 100644 --- a/backend/dataall/api/Objects/Group/resolvers.py +++ b/backend/dataall/api/Objects/Group/resolvers.py @@ -43,23 +43,6 @@ def get_group(context, source, groupUri): return Group(groupUri=groupUri, name=groupUri, label=groupUri) -def list_datasets_owned_by_env_group( - context, source, environmentUri: str = None, groupUri: str = None, filter: dict = None -): - if not filter: - filter = {} - with context.engine.scoped_session() as session: - return db.api.Environment.paginated_environment_group_datasets( - session=session, - username=context.username, - groups=context.groups, - envUri=environmentUri, - groupUri=groupUri, - data=filter, - check_perm=True, - ) - - def list_data_items_shared_with_env_group( context, source, environmentUri: str = None, groupUri: str = None, filter: dict = None ): diff --git a/backend/dataall/db/api/environment.py b/backend/dataall/db/api/environment.py index a26650368..accee95f7 100644 --- a/backend/dataall/db/api/environment.py +++ b/backend/dataall/db/api/environment.py @@ -803,75 +803,6 @@ def find_consumption_roles_by_IAMArn( ) ).first() - @staticmethod - def query_environment_datasets(session, username, groups, uri, filter) -> Query: - query = session.query(Dataset).filter( - and_( - Dataset.environmentUri == uri, - Dataset.deleted.is_(None), - ) - ) - if filter and filter.get('term'): - term = filter['term'] - query = query.filter( - or_( - Dataset.label.ilike('%' + term + '%'), - Dataset.description.ilike('%' + term + '%'), - Dataset.tags.contains(f'{{{term}}}'), - Dataset.region.ilike('%' + term + '%'), - ) - ) - return query - - @staticmethod - def query_environment_group_datasets(session, username, groups, envUri, groupUri, filter) -> Query: - query = session.query(Dataset).filter( - and_( - Dataset.environmentUri == envUri, - Dataset.SamlAdminGroupName == groupUri, - Dataset.deleted.is_(None), - ) - ) - if filter and filter.get('term'): - term = filter['term'] - query = query.filter( - or_( - Dataset.label.ilike('%' + term + '%'), - Dataset.description.ilike('%' + term + '%'), - Dataset.tags.contains(f'{{{term}}}'), - Dataset.region.ilike('%' + term + '%'), - ) - ) - return query - - @staticmethod - @has_resource_perm(permissions.LIST_ENVIRONMENT_DATASETS) - def paginated_environment_datasets( - session, username, groups, uri, data=None, check_perm=None - ) -> dict: - return paginate( - query=Environment.query_environment_datasets( - session, username, groups, uri, data - ), - page=data.get('page', 1), - page_size=data.get('pageSize', 10), - ).to_dict() - - @staticmethod - def paginated_environment_group_datasets( - session, username, groups, envUri, groupUri, data=None, check_perm=None - ) -> dict: - return paginate( - query=Environment.query_environment_group_datasets( - session, username, groups, envUri, groupUri, data - ), - page=data.get('page', 1), - page_size=data.get('pageSize', 10), - ).to_dict() - - - - @staticmethod def query_environment_networks(session, username, groups, uri, filter) -> Query: query = session.query(models.Vpc).filter( diff --git a/backend/dataall/modules/datasets/api/dataset/queries.py b/backend/dataall/modules/datasets/api/dataset/queries.py index d48a78e90..1d1cdb137 100644 --- a/backend/dataall/modules/datasets/api/dataset/queries.py +++ b/backend/dataall/modules/datasets/api/dataset/queries.py @@ -7,7 +7,9 @@ get_dataset_etl_credentials, get_dataset_summary, get_file_upload_presigned_url, - list_dataset_share_objects + list_dataset_share_objects, + list_datasets_owned_by_env_group, + list_datasets_created_in_environment, ) from dataall.modules.datasets.api.dataset.schema import DatasetSearchResult @@ -88,3 +90,26 @@ ], type=gql.Ref('ShareSearchResult'), ) + +listDatasetsOwnedByEnvGroup = gql.QueryField( + name='listDatasetsOwnedByEnvGroup', + type=gql.Ref('DatasetSearchResult'), + args=[ + gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='groupUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='filter', type=gql.Ref('DatasetFilter')), + ], + resolver=list_datasets_owned_by_env_group, + test_scope='Dataset', +) + +listDatasetsCreatedInEnvironment = gql.QueryField( + name='listDatasetsCreatedInEnvironment', + type=gql.Ref('DatasetSearchResult'), + args=[ + gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='filter', type=gql.Ref('DatasetFilter')), + ], + resolver=list_datasets_created_in_environment, + test_scope='Dataset', +) diff --git a/backend/dataall/modules/datasets/api/dataset/resolvers.py b/backend/dataall/modules/datasets/api/dataset/resolvers.py index 2938cd17f..29da121fc 100644 --- a/backend/dataall/modules/datasets/api/dataset/resolvers.py +++ b/backend/dataall/modules/datasets/api/dataset/resolvers.py @@ -643,3 +643,36 @@ def _deploy_dataset_stack(dataset: Dataset): """ stack_helper.deploy_stack(dataset.datasetUri) stack_helper.deploy_stack(dataset.environmentUri) + + +def list_datasets_created_in_environment( + context: Context, source, environmentUri: str = None, filter: dict = None +): + if not filter: + filter = {} + with context.engine.scoped_session() as session: + return DatasetService.paginated_environment_datasets( + session=session, + username=context.username, + groups=context.groups, + uri=environmentUri, + data=filter, + check_perm=True, + ) + + +def list_datasets_owned_by_env_group( + context, source, environmentUri: str = None, groupUri: str = None, filter: dict = None +): + if not filter: + filter = {} + with context.engine.scoped_session() as session: + return DatasetService.paginated_environment_group_datasets( + session=session, + username=context.username, + groups=context.groups, + envUri=environmentUri, + groupUri=groupUri, + data=filter, + check_perm=True, + ) \ No newline at end of file diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py index f28d1b637..4f95b4582 100644 --- a/backend/dataall/modules/datasets/services/dataset_service.py +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -623,3 +623,70 @@ def count_dataset_tables(session, dataset_uri): .filter(DatasetTable.datasetUri == dataset_uri) .count() ) + + @staticmethod + def query_environment_group_datasets(session, username, groups, envUri, groupUri, filter) -> Query: + query = session.query(Dataset).filter( + and_( + Dataset.environmentUri == envUri, + Dataset.SamlAdminGroupName == groupUri, + Dataset.deleted.is_(None), + ) + ) + if filter and filter.get('term'): + term = filter['term'] + query = query.filter( + or_( + Dataset.label.ilike('%' + term + '%'), + Dataset.description.ilike('%' + term + '%'), + Dataset.tags.contains(f'{{{term}}}'), + Dataset.region.ilike('%' + term + '%'), + ) + ) + return query + + @staticmethod + def query_environment_datasets(session, username, groups, uri, filter) -> Query: + query = session.query(Dataset).filter( + and_( + Dataset.environmentUri == uri, + Dataset.deleted.is_(None), + ) + ) + if filter and filter.get('term'): + term = filter['term'] + query = query.filter( + or_( + Dataset.label.ilike('%' + term + '%'), + Dataset.description.ilike('%' + term + '%'), + Dataset.tags.contains(f'{{{term}}}'), + Dataset.region.ilike('%' + term + '%'), + ) + ) + return query + + @staticmethod + @has_resource_perm(permissions.LIST_ENVIRONMENT_DATASETS) + def paginated_environment_datasets( + session, username, groups, uri, data=None, check_perm=None + ) -> dict: + return paginate( + query=DatasetService.query_environment_datasets( + session, username, groups, uri, data + ), + page=data.get('page', 1), + page_size=data.get('pageSize', 10), + ).to_dict() + + @staticmethod + def paginated_environment_group_datasets( + session, username, groups, envUri, groupUri, data=None, check_perm=None + ) -> dict: + return paginate( + query=DatasetService.query_environment_group_datasets( + session, username, groups, envUri, groupUri, data + ), + page=data.get('page', 1), + page_size=data.get('pageSize', 10), + ).to_dict() + From c4e9079d898f34e8fb9a191c48899af76763a800 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 25 Apr 2023 16:25:46 +0200 Subject: [PATCH 088/346] Got rid of datasets in votes --- backend/dataall/api/Objects/Vote/resolvers.py | 27 +++++++++++-------- backend/dataall/modules/datasets/__init__.py | 3 +++ 2 files changed, 19 insertions(+), 11 deletions(-) diff --git a/backend/dataall/api/Objects/Vote/resolvers.py b/backend/dataall/api/Objects/Vote/resolvers.py index d9f739872..2302ef3d2 100644 --- a/backend/dataall/api/Objects/Vote/resolvers.py +++ b/backend/dataall/api/Objects/Vote/resolvers.py @@ -1,7 +1,15 @@ -from .... import db -from ....api.context import Context +from typing import Dict, Type + +from dataall import db +from dataall.api.context import Context from dataall.searchproxy.indexers import DashboardIndexer -from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer +from dataall.searchproxy.upsert import BaseIndexer + +_VOTE_TYPES: Dict[str, Type[BaseIndexer]] = {} + + +def add_vote_type(target_type: str, indexer: Type[BaseIndexer]): + _VOTE_TYPES[target_type] = indexer def count_upvotes( @@ -28,15 +36,9 @@ def upvote(context: Context, source, input=None): data=input, check_perm=True, ) - reindex(session, vote) - return vote - -def reindex(session, vote): - if vote.targetType == 'dataset': - DatasetIndexer.upsert(session=session, dataset_uri=vote.targetUri) - elif vote.targetType == 'dashboard': - DashboardIndexer.upsert(session=session, dashboard_uri=vote.targetUri) + _VOTE_TYPES[vote.targetType].upsert(session, vote.targetUri) + return vote def get_vote(context: Context, source, targetUri: str = None, targetType: str = None): @@ -49,3 +51,6 @@ def get_vote(context: Context, source, targetUri: str = None, targetType: str = data={'targetType': targetType}, check_perm=True, ) + + +add_vote_type("dashboard", DashboardIndexer) diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index 1ac36b783..5d2444c4d 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -2,6 +2,7 @@ import logging from typing import List +from dataall.api.Objects.Vote.resolvers import add_vote_type from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetStorageLocation, DatasetTable, Dataset from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer @@ -50,6 +51,8 @@ def __init__(self): reindexer=DatasetTableIndexer )) + add_vote_type("dataset", DatasetIndexer) + log.info("API of datasets has been imported") From 1227ca3327e65f1c4d4223ca7ff3c6c60ede8829 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 26 Apr 2023 10:53:24 +0200 Subject: [PATCH 089/346] Extract share notification from notification API --- backend/dataall/api/Objects/Group/queries.py | 2 - backend/dataall/db/api/notification.py | 99 ----------------- backend/dataall/db/api/share_object.py | 9 +- .../datasets/indexers/dataset_indexer.py | 2 +- .../datasets/services/dataset_location.py | 1 - .../services/share_notification_service.py | 101 ++++++++++++++++++ .../datasets/tasks/subscription_service.py | 3 +- 7 files changed, 109 insertions(+), 108 deletions(-) create mode 100644 backend/dataall/modules/datasets/services/share_notification_service.py diff --git a/backend/dataall/api/Objects/Group/queries.py b/backend/dataall/api/Objects/Group/queries.py index afa0abf17..62233b5c5 100644 --- a/backend/dataall/api/Objects/Group/queries.py +++ b/backend/dataall/api/Objects/Group/queries.py @@ -8,8 +8,6 @@ resolver=get_group, ) - - listDataItemsSharedWithEnvGroup = gql.QueryField( name='listDataItemsSharedWithEnvGroup', args=[ diff --git a/backend/dataall/db/api/notification.py b/backend/dataall/db/api/notification.py index 1447892d3..0337e6a22 100644 --- a/backend/dataall/db/api/notification.py +++ b/backend/dataall/db/api/notification.py @@ -4,111 +4,12 @@ from .. import models from ...db import paginate -from dataall.modules.datasets.db.models import Dataset class Notification: def __init__(self): pass - @staticmethod - def notify_share_object_submission( - session, username: str, dataset: Dataset, share: models.ShareObject - ): - notifications = [] - # stewards = Notification.get_dataset_stewards(session, dataset) - # for steward in stewards: - notifications.append( - Notification.create( - session=session, - username=dataset.owner, - notification_type=models.NotificationType.SHARE_OBJECT_SUBMITTED, - target_uri=f'{share.shareUri}|{dataset.datasetUri}', - message=f'User {username} submitted share request for dataset {dataset.label}', - ) - ) - session.add_all(notifications) - return notifications - - @staticmethod - def get_dataset_stewards(session, dataset): - stewards = list() - stewards.append(dataset.SamlAdminGroupName) - stewards.append(dataset.stewards) - return stewards - - @staticmethod - def notify_share_object_approval( - session, username: str, dataset: Dataset, share: models.ShareObject - ): - notifications = [] - targeted_users = Notification.get_share_object_targeted_users( - session, dataset, share - ) - for user in targeted_users: - notifications.append( - Notification.create( - session=session, - username=user, - notification_type=models.NotificationType.SHARE_OBJECT_APPROVED, - target_uri=f'{share.shareUri}|{dataset.datasetUri}', - message=f'User {username} approved share request for dataset {dataset.label}', - ) - ) - session.add_all(notifications) - return notifications - - @staticmethod - def notify_share_object_rejection( - session, username: str, dataset: Dataset, share: models.ShareObject - ): - notifications = [] - targeted_users = Notification.get_share_object_targeted_users( - session, dataset, share - ) - for user in targeted_users: - notifications.append( - Notification.create( - session=session, - username=user, - notification_type=models.NotificationType.SHARE_OBJECT_REJECTED, - target_uri=f'{share.shareUri}|{dataset.datasetUri}', - message=f'User {username} approved share request for dataset {dataset.label}', - ) - ) - session.add_all(notifications) - return notifications - - @staticmethod - def notify_new_data_available_from_owners( - session, dataset: Dataset, share: models.ShareObject, s3_prefix - ): - notifications = [] - targeted_users = Notification.get_share_object_targeted_users( - session, dataset, share - ) - for user in targeted_users: - notifications.append( - Notification.create( - session=session, - username=user, - notification_type=models.NotificationType.DATASET_VERSION, - target_uri=f'{share.shareUri}|{dataset.datasetUri}', - message=f'New data (at {s3_prefix}) is available from dataset {dataset.datasetUri} shared by owner {dataset.owner}', - ) - ) - session.add_all(notifications) - return notifications - - @staticmethod - def get_share_object_targeted_users(session, dataset, share): - targeted_users = Notification.get_dataset_stewards( - session=session, dataset=dataset - ) - targeted_users.append(dataset.owner) - targeted_users.append(share.owner) - return targeted_users - @staticmethod def create( session, diff --git a/backend/dataall/db/api/share_object.py b/backend/dataall/db/api/share_object.py index d353c7825..455ee7296 100644 --- a/backend/dataall/db/api/share_object.py +++ b/backend/dataall/db/api/share_object.py @@ -12,6 +12,7 @@ from ..models.Enums import ShareObjectStatus, ShareItemStatus, ShareObjectActions, ShareItemActions, ShareableType, PrincipalType from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable, Dataset from dataall.modules.datasets.services.dataset_service import DatasetService +from ...modules.datasets.services.share_notification_service import ShareNotificationService logger = logging.getLogger(__name__) @@ -562,7 +563,7 @@ def submit_share_object( Share_SM.update_state(session, share, new_share_state) - api.Notification.notify_share_object_submission( + ShareNotificationService.notify_share_object_submission( session, username, dataset, share ) return share @@ -609,7 +610,7 @@ def approve_share_object( resource_type=DatasetTable.__name__, ) - api.Notification.notify_share_object_approval(session, username, dataset, share) + ShareNotificationService.notify_share_object_approval(session, username, dataset, share) return share @staticmethod @@ -642,7 +643,7 @@ def reject_share_object( group=share.groupUri, resource_uri=dataset.datasetUri, ) - api.Notification.notify_share_object_rejection(session, username, dataset, share) + ShareNotificationService.notify_share_object_rejection(session, username, dataset, share) return share @staticmethod @@ -684,7 +685,7 @@ def revoke_items_share_object( group=share.groupUri, resource_uri=dataset.datasetUri, ) - api.Notification.notify_share_object_rejection(session, username, dataset, share) + ShareNotificationService.notify_share_object_rejection(session, username, dataset, share) return share @staticmethod diff --git a/backend/dataall/modules/datasets/indexers/dataset_indexer.py b/backend/dataall/modules/datasets/indexers/dataset_indexer.py index ba3754ee2..1936b66be 100644 --- a/backend/dataall/modules/datasets/indexers/dataset_indexer.py +++ b/backend/dataall/modules/datasets/indexers/dataset_indexer.py @@ -2,7 +2,7 @@ from dataall import db from dataall.db import models -from dataall.modules.datasets import Dataset +from dataall.modules.datasets.db.models import Dataset from dataall.modules.datasets.services.dataset_location import DatasetLocationService from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.searchproxy.upsert import BaseIndexer diff --git a/backend/dataall/modules/datasets/services/dataset_location.py b/backend/dataall/modules/datasets/services/dataset_location.py index f1d8b5eaf..8a4790838 100644 --- a/backend/dataall/modules/datasets/services/dataset_location.py +++ b/backend/dataall/modules/datasets/services/dataset_location.py @@ -1,5 +1,4 @@ import logging -from typing import List from sqlalchemy import and_, or_ diff --git a/backend/dataall/modules/datasets/services/share_notification_service.py b/backend/dataall/modules/datasets/services/share_notification_service.py new file mode 100644 index 000000000..896094ab3 --- /dev/null +++ b/backend/dataall/modules/datasets/services/share_notification_service.py @@ -0,0 +1,101 @@ +from dataall.db import models +from dataall.db.api import Notification +from dataall.modules.datasets.db.models import Dataset + + +class ShareNotificationService: + @staticmethod + def notify_share_object_submission( + session, username: str, dataset: Dataset, share: models.ShareObject + ): + notifications = [] + # stewards = Notification.get_dataset_stewards(session, dataset) + # for steward in stewards: + notifications.append( + Notification.create( + session=session, + username=dataset.owner, + notification_type=models.NotificationType.SHARE_OBJECT_SUBMITTED, + target_uri=f'{share.shareUri}|{dataset.datasetUri}', + message=f'User {username} submitted share request for dataset {dataset.label}', + ) + ) + session.add_all(notifications) + return notifications + + @staticmethod + def notify_share_object_approval( + session, username: str, dataset: Dataset, share: models.ShareObject + ): + notifications = [] + targeted_users = ShareNotificationService._get_share_object_targeted_users( + session, dataset, share + ) + for user in targeted_users: + notifications.append( + Notification.create( + session=session, + username=user, + notification_type=models.NotificationType.SHARE_OBJECT_APPROVED, + target_uri=f'{share.shareUri}|{dataset.datasetUri}', + message=f'User {username} approved share request for dataset {dataset.label}', + ) + ) + session.add_all(notifications) + return notifications + + @staticmethod + def notify_share_object_rejection( + session, username: str, dataset: Dataset, share: models.ShareObject + ): + notifications = [] + targeted_users = ShareNotificationService._get_share_object_targeted_users( + session, dataset, share + ) + for user in targeted_users: + notifications.append( + Notification.create( + session=session, + username=user, + notification_type=models.NotificationType.SHARE_OBJECT_REJECTED, + target_uri=f'{share.shareUri}|{dataset.datasetUri}', + message=f'User {username} approved share request for dataset {dataset.label}', + ) + ) + session.add_all(notifications) + return notifications + + @staticmethod + def notify_new_data_available_from_owners( + session, dataset: Dataset, share: models.ShareObject, s3_prefix + ): + notifications = [] + targeted_users = ShareNotificationService._get_share_object_targeted_users( + session, dataset, share + ) + for user in targeted_users: + notifications.append( + Notification.create( + session=session, + username=user, + notification_type=models.NotificationType.DATASET_VERSION, + target_uri=f'{share.shareUri}|{dataset.datasetUri}', + message=f'New data (at {s3_prefix}) is available from dataset {dataset.datasetUri} shared by owner {dataset.owner}', + ) + ) + session.add_all(notifications) + return notifications + + @staticmethod + def _get_share_object_targeted_users(session, dataset, share): + targeted_users = ShareNotificationService._get_dataset_stewards(dataset) + targeted_users.append(dataset.owner) + targeted_users.append(share.owner) + return targeted_users + + @staticmethod + def _get_dataset_stewards(dataset): + stewards = list() + stewards.append(dataset.SamlAdminGroupName) + stewards.append(dataset.stewards) + return stewards diff --git a/backend/dataall/modules/datasets/tasks/subscription_service.py b/backend/dataall/modules/datasets/tasks/subscription_service.py index ec2eec459..033b27bd2 100644 --- a/backend/dataall/modules/datasets/tasks/subscription_service.py +++ b/backend/dataall/modules/datasets/tasks/subscription_service.py @@ -13,6 +13,7 @@ from dataall.db import get_engine from dataall.db import models from dataall.modules.datasets.services.dataset_profiling_service import DatasetProfilingService +from dataall.modules.datasets.services.share_notification_service import ShareNotificationService from dataall.tasks.subscriptions import poll_queues from dataall.utils import json_utils from dataall.modules.datasets.services.dataset_table import DatasetTableService @@ -261,7 +262,7 @@ def publish_sns_message( log.info(f'SNS update publish response {response}') - notifications = db.api.Notification.notify_new_data_available_from_owners( + notifications = ShareNotificationService.notify_new_data_available_from_owners( session=session, dataset=dataset, share=share_object, From 09f7de952138b0b162240872f9f3a1f6d2f55169 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 26 Apr 2023 11:02:00 +0200 Subject: [PATCH 090/346] Extracted dataset alarms --- .../services/dataset_alarm_service.py | 96 +++++++++++++++++++ backend/dataall/utils/alarm_service.py | 83 ---------------- 2 files changed, 96 insertions(+), 83 deletions(-) create mode 100644 backend/dataall/modules/datasets/services/dataset_alarm_service.py diff --git a/backend/dataall/modules/datasets/services/dataset_alarm_service.py b/backend/dataall/modules/datasets/services/dataset_alarm_service.py new file mode 100644 index 000000000..9283f2265 --- /dev/null +++ b/backend/dataall/modules/datasets/services/dataset_alarm_service.py @@ -0,0 +1,96 @@ +import logging +from datetime import datetime + +from dataall.db import models +from dataall.modules.datasets.db.models import DatasetTable, Dataset +from dataall.utils.alarm_service import AlarmService + +log = logging.getLogger(__name__) + + +class DatasetAlarmService(AlarmService): + """Contains set of alarms for datasets""" + + def trigger_table_sharing_failure_alarm( + self, + table: DatasetTable, + share: models.ShareObject, + target_environment: models.Environment, + ): + log.info('Triggering share failure alarm...') + subject = ( + f'ALARM: DATAALL Table {table.GlueTableName} Sharing Failure Notification' + ) + message = f""" + You are receiving this email because your DATAALL {self.envname} environment in the {self.region} region has entered the ALARM state, because it failed to share the table {table.GlueTableName} with Lake Formation. + + Alarm Details: + - State Change: OK -> ALARM + - Reason for State Change: Lake Formation sharing failure + - Timestamp: {datetime.now()} + + Share Source + - Dataset URI: {share.datasetUri} + - AWS Account: {table.AWSAccountId} + - Region: {table.region} + - Glue Database: {table.GlueDatabaseName} + - Glue Table: {table.GlueTableName} + + Share Target + - AWS Account: {target_environment.AwsAccountId} + - Region: {target_environment.region} + - Glue Database: {table.GlueDatabaseName}shared + """ + return self.publish_message_to_alarms_topic(subject, message) + + def trigger_revoke_table_sharing_failure_alarm( + self, + table: DatasetTable, + share: models.ShareObject, + target_environment: models.Environment, + ): + log.info('Triggering share failure alarm...') + subject = f'ALARM: DATAALL Table {table.GlueTableName} Revoking LF permissions Failure Notification' + message = f""" + You are receiving this email because your DATAALL {self.envname} environment in the {self.region} region has entered the ALARM state, because it failed to revoke Lake Formation permissions for table {table.GlueTableName} with Lake Formation. + + Alarm Details: + - State Change: OK -> ALARM + - Reason for State Change: Lake Formation sharing failure + - Timestamp: {datetime.now()} + + Share Source + - Dataset URI: {share.datasetUri} + - AWS Account: {table.AWSAccountId} + - Region: {table.region} + - Glue Database: {table.GlueDatabaseName} + - Glue Table: {table.GlueTableName} + + Share Target + - AWS Account: {target_environment.AwsAccountId} + - Region: {target_environment.region} + - Glue Database: {table.GlueDatabaseName}shared + """ + return self.publish_message_to_alarms_topic(subject, message) + + def trigger_dataset_sync_failure_alarm(self, dataset: Dataset, error: str): + log.info(f'Triggering dataset {dataset.name} tables sync failure alarm...') + subject = ( + f'ALARM: DATAALL Dataset {dataset.name} Tables Sync Failure Notification' + ) + message = f""" +You are receiving this email because your DATAALL {self.envname} environment in the {self.region} region has entered the ALARM state, because it failed to synchronize Dataset {dataset.name} tables from AWS Glue to the Search Catalog. + +Alarm Details: + - State Change: OK -> ALARM + - Reason for State Change: {error} + - Timestamp: {datetime.now()} + Dataset + - Dataset URI: {dataset.datasetUri} + - AWS Account: {dataset.AwsAccountId} + - Region: {dataset.region} + - Glue Database: {dataset.GlueDatabaseName} + """ + return self.publish_message_to_alarms_topic(subject, message) + + diff --git a/backend/dataall/utils/alarm_service.py b/backend/dataall/utils/alarm_service.py index 661eb852b..29b2e214e 100644 --- a/backend/dataall/utils/alarm_service.py +++ b/backend/dataall/utils/alarm_service.py @@ -11,7 +11,6 @@ from ..aws.handlers.sts import SessionHelper from ..db import models -from dataall.modules.datasets.db.models import DatasetTable, Dataset logger = logging.getLogger(__name__) @@ -38,68 +37,6 @@ def trigger_stack_deployment_failure_alarm(self, stack: models.Stack): - Reason for State Change: Stack Deployment Failure - Timestamp: {datetime.now()} - CW Log Group: {f"/dataall/{self.envname}/cdkproxy/{stack.EcsTaskArn.split('/')[-1]}"} -""" - return self.publish_message_to_alarms_topic(subject, message) - - def trigger_table_sharing_failure_alarm( - self, - table: DatasetTable, - share: models.ShareObject, - target_environment: models.Environment, - ): - logger.info('Triggering share failure alarm...') - subject = ( - f'ALARM: DATAALL Table {table.GlueTableName} Sharing Failure Notification' - ) - message = f""" -You are receiving this email because your DATAALL {self.envname} environment in the {self.region} region has entered the ALARM state, because it failed to share the table {table.GlueTableName} with Lake Formation. - -Alarm Details: - - State Change: OK -> ALARM - - Reason for State Change: Lake Formation sharing failure - - Timestamp: {datetime.now()} - - Share Source - - Dataset URI: {share.datasetUri} - - AWS Account: {table.AWSAccountId} - - Region: {table.region} - - Glue Database: {table.GlueDatabaseName} - - Glue Table: {table.GlueTableName} - - Share Target - - AWS Account: {target_environment.AwsAccountId} - - Region: {target_environment.region} - - Glue Database: {table.GlueDatabaseName}shared -""" - return self.publish_message_to_alarms_topic(subject, message) - - def trigger_revoke_table_sharing_failure_alarm( - self, - table: DatasetTable, - share: models.ShareObject, - target_environment: models.Environment, - ): - logger.info('Triggering share failure alarm...') - subject = f'ALARM: DATAALL Table {table.GlueTableName} Revoking LF permissions Failure Notification' - message = f""" -You are receiving this email because your DATAALL {self.envname} environment in the {self.region} region has entered the ALARM state, because it failed to revoke Lake Formation permissions for table {table.GlueTableName} with Lake Formation. - -Alarm Details: - - State Change: OK -> ALARM - - Reason for State Change: Lake Formation sharing failure - - Timestamp: {datetime.now()} - - Share Source - - Dataset URI: {share.datasetUri} - - AWS Account: {table.AWSAccountId} - - Region: {table.region} - - Glue Database: {table.GlueDatabaseName} - - Glue Table: {table.GlueTableName} - - Share Target - - AWS Account: {target_environment.AwsAccountId} - - Region: {target_environment.region} - - Glue Database: {table.GlueDatabaseName}shared """ return self.publish_message_to_alarms_topic(subject, message) @@ -116,26 +53,6 @@ def trigger_catalog_indexing_failure_alarm(self, error: str): """ return self.publish_message_to_alarms_topic(subject, message) - def trigger_dataset_sync_failure_alarm(self, dataset: Dataset, error: str): - logger.info(f'Triggering dataset {dataset.name} tables sync failure alarm...') - subject = ( - f'ALARM: DATAALL Dataset {dataset.name} Tables Sync Failure Notification' - ) - message = f""" -You are receiving this email because your DATAALL {self.envname} environment in the {self.region} region has entered the ALARM state, because it failed to synchronize Dataset {dataset.name} tables from AWS Glue to the Search Catalog. - -Alarm Details: - - State Change: OK -> ALARM - - Reason for State Change: {error} - - Timestamp: {datetime.now()} - Dataset - - Dataset URI: {dataset.datasetUri} - - AWS Account: {dataset.AwsAccountId} - - Region: {dataset.region} - - Glue Database: {dataset.GlueDatabaseName} - """ - return self.publish_message_to_alarms_topic(subject, message) - def publish_message_to_alarms_topic(self, subject, message): if self.envname in ['local', 'pytest', 'dkrcompose']: logger.debug('Running in local mode...SNS topic not available') From d446ff0664393507a6707a42dc6f2ba7fd1420f5 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 26 Apr 2023 16:28:56 +0200 Subject: [PATCH 091/346] Moved bucket_policy_updater to datasets --- .../{ => modules/datasets}/tasks/bucket_policy_updater.py | 6 +++--- deploy/stacks/container.py | 3 ++- tests/tasks/test_policies.py | 8 ++++---- 3 files changed, 9 insertions(+), 8 deletions(-) rename backend/dataall/{ => modules/datasets}/tasks/bucket_policy_updater.py (99%) diff --git a/backend/dataall/tasks/bucket_policy_updater.py b/backend/dataall/modules/datasets/tasks/bucket_policy_updater.py similarity index 99% rename from backend/dataall/tasks/bucket_policy_updater.py rename to backend/dataall/modules/datasets/tasks/bucket_policy_updater.py index 12844aae8..e04855d65 100644 --- a/backend/dataall/tasks/bucket_policy_updater.py +++ b/backend/dataall/modules/datasets/tasks/bucket_policy_updater.py @@ -7,9 +7,9 @@ from botocore.exceptions import ClientError from sqlalchemy import and_ -from ..aws.handlers.sts import SessionHelper -from ..db import get_engine -from ..db import models +from dataall.aws.handlers.sts import SessionHelper +from dataall.db import get_engine +from dataall.db import models from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable, Dataset root = logging.getLogger() diff --git a/deploy/stacks/container.py b/deploy/stacks/container.py index aa7be04df..ed5b929f8 100644 --- a/deploy/stacks/container.py +++ b/deploy/stacks/container.py @@ -153,9 +153,10 @@ def __init__( ) self.ecs_security_groups.extend(stacks_updater.task.security_groups) + # TODO introduce the ability to change the deployment depending on config.json file update_bucket_policies_task = self.set_scheduled_task( cluster=cluster, - command=['python3.8', '-m', 'dataall.tasks.bucket_policy_updater'], + command=['python3.8', '-m', 'dataall.modules.datasets.tasks.bucket_policy_updater'], container_id=f'container', ecr_repository=ecr_repository, environment=self._create_env('DEBUG'), diff --git a/tests/tasks/test_policies.py b/tests/tasks/test_policies.py index c1018b35f..e84ed2d75 100644 --- a/tests/tasks/test_policies.py +++ b/tests/tasks/test_policies.py @@ -1,6 +1,6 @@ from dataall.api.constants import OrganisationUserRole from dataall.modules.datasets.db.models import DatasetTable, Dataset -from dataall.tasks.bucket_policy_updater import BucketPoliciesUpdater +from dataall.modules.datasets.tasks.bucket_policy_updater import BucketPoliciesUpdater import pytest import dataall @@ -138,15 +138,15 @@ def test_group_prefixes_by_accountid(db, mocker): def test_handler(org, env, db, sync_dataset, mocker): mocker.patch( - 'dataall.tasks.bucket_policy_updater.BucketPoliciesUpdater.init_s3_client', + 'dataall.modules.datasets.tasks.bucket_policy_updater.BucketPoliciesUpdater.init_s3_client', return_value=True, ) mocker.patch( - 'dataall.tasks.bucket_policy_updater.BucketPoliciesUpdater.get_bucket_policy', + 'dataall.modules.datasets.tasks.bucket_policy_updater.BucketPoliciesUpdater.get_bucket_policy', return_value={'Version': '2012-10-17', 'Statement': []}, ) mocker.patch( - 'dataall.tasks.bucket_policy_updater.BucketPoliciesUpdater.put_bucket_policy', + 'dataall.modules.datasets.tasks.bucket_policy_updater.BucketPoliciesUpdater.put_bucket_policy', return_value={'status': 'SUCCEEDED'}, ) updater = BucketPoliciesUpdater(db) From 1db0133cd32dd30961ed37a70f90bccc1e33cd02 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 27 Apr 2023 10:10:51 +0200 Subject: [PATCH 092/346] Moved MANAGE_DATASETS to modules --- backend/dataall/db/permissions.py | 3 --- .../modules/datasets/services/dataset_location.py | 11 ++++++----- .../modules/datasets/services/dataset_service.py | 7 ++++--- .../modules/datasets/services/dataset_table.py | 14 +++++++------- .../modules/datasets/services/permissions.py | 6 ++++++ tests/api/test_tenant.py | 5 +++-- tests/db/test_permission.py | 5 +++-- 7 files changed, 29 insertions(+), 22 deletions(-) create mode 100644 backend/dataall/modules/datasets/services/permissions.py diff --git a/backend/dataall/db/permissions.py b/backend/dataall/db/permissions.py index 6a26b2033..1d0921b55 100644 --- a/backend/dataall/db/permissions.py +++ b/backend/dataall/db/permissions.py @@ -22,7 +22,6 @@ """ TENANT PERMISSIONS """ -MANAGE_DATASETS = 'MANAGE_DATASETS' MANAGE_REDSHIFT_CLUSTERS = 'MANAGE_REDSHIFT_CLUSTERS' MANAGE_DASHBOARDS = 'MANAGE_DASHBOARDS' MANAGE_PIPELINES = 'MANAGE_PIPELINES' @@ -274,7 +273,6 @@ """ TENANT_ALL = [ - MANAGE_DATASETS, MANAGE_REDSHIFT_CLUSTERS, MANAGE_DASHBOARDS, MANAGE_PIPELINES, @@ -288,7 +286,6 @@ TENANT_ALL_WITH_DESC = {k: k for k in TENANT_ALL} TENANT_ALL_WITH_DESC[MANAGE_DASHBOARDS] = 'Manage dashboards' -TENANT_ALL_WITH_DESC[MANAGE_DATASETS] = 'Manage datasets' TENANT_ALL_WITH_DESC[MANAGE_REDSHIFT_CLUSTERS] = 'Manage Redshift clusters' TENANT_ALL_WITH_DESC[MANAGE_GLOSSARIES] = 'Manage glossaries' TENANT_ALL_WITH_DESC[MANAGE_WORKSHEETS] = 'Manage worksheets' diff --git a/backend/dataall/modules/datasets/services/dataset_location.py b/backend/dataall/modules/datasets/services/dataset_location.py index 8a4790838..2251c2ad4 100644 --- a/backend/dataall/modules/datasets/services/dataset_location.py +++ b/backend/dataall/modules/datasets/services/dataset_location.py @@ -6,13 +6,14 @@ from dataall.db import models, api, paginate, permissions, exceptions from dataall.modules.datasets.db.dataset_repository import DatasetRepository from dataall.modules.datasets.db.models import DatasetStorageLocation +from dataall.modules.datasets.services.permissions import MANAGE_DATASETS logger = logging.getLogger(__name__) class DatasetLocationService: @staticmethod - @has_tenant_perm(permissions.MANAGE_DATASETS) + @has_tenant_perm(MANAGE_DATASETS) @has_resource_perm(permissions.CREATE_DATASET_FOLDER) def create_dataset_location( session, @@ -66,7 +67,7 @@ def create_dataset_location( return location @staticmethod - @has_tenant_perm(permissions.MANAGE_DATASETS) + @has_tenant_perm(MANAGE_DATASETS) @has_resource_perm(permissions.LIST_DATASET_FOLDERS) def list_dataset_locations( session, @@ -91,7 +92,7 @@ def list_dataset_locations( ).to_dict() @staticmethod - @has_tenant_perm(permissions.MANAGE_DATASETS) + @has_tenant_perm(MANAGE_DATASETS) @has_resource_perm(permissions.LIST_DATASET_FOLDERS) def get_dataset_location( session, @@ -104,7 +105,7 @@ def get_dataset_location( return DatasetLocationService.get_location_by_uri(session, data['locationUri']) @staticmethod - @has_tenant_perm(permissions.MANAGE_DATASETS) + @has_tenant_perm(MANAGE_DATASETS) @has_resource_perm(permissions.UPDATE_DATASET_FOLDER) def update_dataset_location( session, @@ -134,7 +135,7 @@ def update_dataset_location( return location @staticmethod - @has_tenant_perm(permissions.MANAGE_DATASETS) + @has_tenant_perm(MANAGE_DATASETS) @has_resource_perm(permissions.DELETE_DATASET_FOLDER) def delete_dataset_location( session, diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py index 4f95b4582..bac79d5b3 100644 --- a/backend/dataall/modules/datasets/services/dataset_service.py +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -19,6 +19,7 @@ from dataall.modules.datasets.db.dataset_repository import DatasetRepository from dataall.modules.datasets.db.models import DatasetTable, Dataset from dataall.modules.datasets.services.dataset_location import DatasetLocationService +from dataall.modules.datasets.services.permissions import MANAGE_DATASETS from dataall.utils.naming_convention import ( NamingConventionService, NamingConventionPattern, @@ -29,7 +30,7 @@ class DatasetService: @staticmethod - @has_tenant_perm(permissions.MANAGE_DATASETS) + @has_tenant_perm(MANAGE_DATASETS) @has_resource_perm(permissions.CREATE_DATASET) def create_dataset( session, @@ -199,7 +200,7 @@ def create_dataset_stack(session, dataset: Dataset) -> models.Stack: ) @staticmethod - @has_tenant_perm(permissions.MANAGE_DATASETS) + @has_tenant_perm(MANAGE_DATASETS) def get_dataset( session, username: str, @@ -292,7 +293,7 @@ def paginated_dataset_tables( ).to_dict() @staticmethod - @has_tenant_perm(permissions.MANAGE_DATASETS) + @has_tenant_perm(MANAGE_DATASETS) @has_resource_perm(permissions.UPDATE_DATASET) def update_dataset( session, username, groups, uri, data=None, check_perm=None diff --git a/backend/dataall/modules/datasets/services/dataset_table.py b/backend/dataall/modules/datasets/services/dataset_table.py index 32fbf40fb..b04348c02 100644 --- a/backend/dataall/modules/datasets/services/dataset_table.py +++ b/backend/dataall/modules/datasets/services/dataset_table.py @@ -4,17 +4,17 @@ from dataall.db import models, api, permissions, exceptions, paginate from dataall.db.api import has_tenant_perm, has_resource_perm, Glossary, ResourcePolicy, Environment -from dataall.db.models import Dataset +from dataall.modules.datasets.services.permissions import MANAGE_DATASETS from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.utils import json_utils -from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetTable +from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetTable, Dataset logger = logging.getLogger(__name__) class DatasetTableService: @staticmethod - @has_tenant_perm(permissions.MANAGE_DATASETS) + @has_tenant_perm(MANAGE_DATASETS) @has_resource_perm(permissions.CREATE_DATASET_TABLE) def create_dataset_table( session, @@ -78,7 +78,7 @@ def create_dataset_table( return table @staticmethod - @has_tenant_perm(permissions.MANAGE_DATASETS) + @has_tenant_perm(MANAGE_DATASETS) def list_dataset_tables( session, username: str, @@ -100,7 +100,7 @@ def list_dataset_tables( ).to_dict() @staticmethod - @has_tenant_perm(permissions.MANAGE_DATASETS) + @has_tenant_perm(MANAGE_DATASETS) def get_dataset_table( session, username: str, @@ -112,7 +112,7 @@ def get_dataset_table( return DatasetTableService.get_dataset_table_by_uri(session, data['tableUri']) @staticmethod - @has_tenant_perm(permissions.MANAGE_DATASETS) + @has_tenant_perm(MANAGE_DATASETS) @has_resource_perm(permissions.UPDATE_DATASET_TABLE) def update_dataset_table( session, @@ -138,7 +138,7 @@ def update_dataset_table( return table @staticmethod - @has_tenant_perm(permissions.MANAGE_DATASETS) + @has_tenant_perm(MANAGE_DATASETS) @has_resource_perm(permissions.DELETE_DATASET_TABLE) def delete_dataset_table( session, diff --git a/backend/dataall/modules/datasets/services/permissions.py b/backend/dataall/modules/datasets/services/permissions.py new file mode 100644 index 000000000..602343a2e --- /dev/null +++ b/backend/dataall/modules/datasets/services/permissions.py @@ -0,0 +1,6 @@ +from dataall.db.permissions import TENANT_ALL, TENANT_ALL_WITH_DESC + +MANAGE_DATASETS = 'MANAGE_DATASETS' + +TENANT_ALL.append(MANAGE_DATASETS) +TENANT_ALL_WITH_DESC[MANAGE_DATASETS] = 'Manage datasets' diff --git a/tests/api/test_tenant.py b/tests/api/test_tenant.py index a41eab9bd..8554c8de9 100644 --- a/tests/api/test_tenant.py +++ b/tests/api/test_tenant.py @@ -1,4 +1,5 @@ from dataall.db import permissions +from dataall.modules.datasets.services.permissions import MANAGE_DATASETS def test_list_tenant_permissions(client, user, group, tenant): @@ -60,7 +61,7 @@ def test_update_permissions(client, user, group, tenant): username='alice', input=dict( groupUri=group.name, - permissions=[permissions.MANAGE_ORGANIZATIONS, permissions.MANAGE_DATASETS], + permissions=[permissions.MANAGE_ORGANIZATIONS, MANAGE_DATASETS], ), groups=[group.name, 'DAAdministrators'], ) @@ -92,7 +93,7 @@ def test_update_permissions(client, user, group, tenant): username='alice', input=dict( groupUri=group.name, - permissions=[permissions.MANAGE_ORGANIZATIONS, permissions.MANAGE_DATASETS], + permissions=[permissions.MANAGE_ORGANIZATIONS, MANAGE_DATASETS], ), groups=[group.name, 'DAAdministrators'], ) diff --git a/tests/db/test_permission.py b/tests/db/test_permission.py index d40402836..a32e64085 100644 --- a/tests/db/test_permission.py +++ b/tests/db/test_permission.py @@ -6,6 +6,7 @@ from dataall.db.models.Permission import PermissionType from dataall.modules.datasets.db.models import Dataset from dataall.modules.datasets.services.dataset_service import DatasetService +from dataall.modules.datasets.services.permissions import MANAGE_DATASETS @pytest.fixture(scope='module') @@ -165,7 +166,7 @@ def test_attach_tenant_policy( dataall.db.api.TenantPolicy.attach_group_tenant_policy( session=session, group=group.name, - permissions=[dataall.db.permissions.MANAGE_DATASETS], + permissions=[MANAGE_DATASETS], tenant_name='dataall', ) @@ -173,7 +174,7 @@ def test_attach_tenant_policy( session=session, username=user.userName, groups=[group.name], - permission_name=dataall.db.permissions.MANAGE_DATASETS, + permission_name=MANAGE_DATASETS, tenant_name='dataall', ) From 8d8d952e3dd82222159750d2d4a072b100c9dbc1 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 27 Apr 2023 10:27:24 +0200 Subject: [PATCH 093/346] Moved dataset read permissions to modules --- backend/dataall/db/api/target_type.py | 1 - backend/dataall/db/permissions.py | 16 ---------------- backend/dataall/modules/datasets/__init__.py | 4 ++++ .../modules/datasets/api/dataset/resolvers.py | 7 ++++--- .../datasets/services/dataset_location.py | 8 ++++---- .../datasets/services/dataset_service.py | 8 ++++---- .../modules/datasets/services/permissions.py | 19 +++++++++++++++++++ tests/db/test_permission.py | 6 +++--- 8 files changed, 38 insertions(+), 31 deletions(-) diff --git a/backend/dataall/db/api/target_type.py b/backend/dataall/db/api/target_type.py index 0eecb2569..a77e4e09a 100644 --- a/backend/dataall/db/api/target_type.py +++ b/backend/dataall/db/api/target_type.py @@ -36,7 +36,6 @@ def is_supported_target_type(target_type): ) -TargetType("dataset", permissions.GET_DATASET, permissions.UPDATE_DATASET) TargetType("environment", permissions.GET_ENVIRONMENT, permissions.UPDATE_ENVIRONMENT) TargetType("mlstudio", permissions.GET_SGMSTUDIO_NOTEBOOK, permissions.UPDATE_SGMSTUDIO_NOTEBOOK) TargetType("pipeline", permissions.GET_PIPELINE, permissions.UPDATE_PIPELINE) diff --git a/backend/dataall/db/permissions.py b/backend/dataall/db/permissions.py index 1d0921b55..9a6726bbd 100644 --- a/backend/dataall/db/permissions.py +++ b/backend/dataall/db/permissions.py @@ -184,14 +184,10 @@ """ DATASET PERMISSIONS """ -GET_DATASET = 'GET_DATASET' -UPDATE_DATASET = 'UPDATE_DATASET' SYNC_DATASET = 'SYNC_DATASET' SUMMARY_DATASET = 'SUMMARY_DATASET' IMPORT_DATASET = 'IMPORT_DATASET' UPLOAD_DATASET = 'UPLOAD_DATASET' -LIST_DATASETS = 'LIST_DATASETS' -CREDENTIALS_DATASET = 'CREDENTIALS_DATASET' URL_DATASET = 'URL_DATASET' CRAWL_DATASET = 'CRAWL_DATASET' DELETE_DATASET = 'DELETE_DATASET' @@ -201,12 +197,9 @@ DELETE_DATASET_TABLE = 'DELETE_DATASET_TABLE' UPDATE_DATASET_TABLE = 'UPDATE_DATASET_TABLE' PROFILE_DATASET_TABLE = 'PROFILE_DATASET_TABLE' -LIST_DATASET_TABLES = 'LIST_DATASET_TABLES' -LIST_DATASET_SHARES = 'LIST_DATASET_SHARES' CREATE_DATASET_FOLDER = 'CREATE_DATASET_FOLDER' DELETE_DATASET_FOLDER = 'DELETE_DATASET_FOLDER' GET_DATASET_FOLDER = 'DELETE_DATASET_FOLDER' -LIST_DATASET_FOLDERS = 'LIST_DATASET_FOLDERS' UPDATE_DATASET_FOLDER = 'UPDATE_DATASET_FOLDER' DATASET_WRITE = [ UPDATE_DATASET, @@ -231,15 +224,6 @@ LIST_DATASET_FOLDERS, ] -DATASET_READ = [ - GET_DATASET, - LIST_DATASETS, - LIST_DATASET_TABLES, - LIST_DATASET_SHARES, - LIST_DATASET_FOLDERS, - CREDENTIALS_DATASET, -] - DATASET_ALL = list(set(DATASET_WRITE + DATASET_READ)) """ diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index 5d2444c4d..60349f8b4 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -3,10 +3,12 @@ from typing import List from dataall.api.Objects.Vote.resolvers import add_vote_type +from dataall.db.api import TargetType from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetStorageLocation, DatasetTable, Dataset from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer +from dataall.modules.datasets.services.permissions import GET_DATASET, UPDATE_DATASET from dataall.modules.loader import ModuleInterface, ImportMode log = logging.getLogger(__name__) @@ -53,6 +55,8 @@ def __init__(self): add_vote_type("dataset", DatasetIndexer) + TargetType("dataset", GET_DATASET, UPDATE_DATASET) + log.info("API of datasets has been imported") diff --git a/backend/dataall/modules/datasets/api/dataset/resolvers.py b/backend/dataall/modules/datasets/api/dataset/resolvers.py index 29da121fc..2a0c375fb 100644 --- a/backend/dataall/modules/datasets/api/dataset/resolvers.py +++ b/backend/dataall/modules/datasets/api/dataset/resolvers.py @@ -22,6 +22,7 @@ from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer +from dataall.modules.datasets.services.permissions import CREDENTIALS_DATASET log = logging.getLogger(__name__) @@ -256,7 +257,7 @@ def get_dataset_etl_credentials(context: Context, source, datasetUri: str = None username=context.username, groups=context.groups, resource_uri=datasetUri, - permission_name=permissions.CREDENTIALS_DATASET, + permission_name=CREDENTIALS_DATASET, ) task = models.Task(targetUri=datasetUri, action='iam.dataset.user.credentials') session.add(task) @@ -273,7 +274,7 @@ def get_dataset_assume_role_url(context: Context, source, datasetUri: str = None username=context.username, groups=context.groups, resource_uri=datasetUri, - permission_name=permissions.CREDENTIALS_DATASET, + permission_name=CREDENTIALS_DATASET, ) dataset = DatasetService.get_dataset_by_uri(session, datasetUri) if dataset.SamlAdminGroupName not in context.groups: @@ -407,7 +408,7 @@ def generate_dataset_access_token(context, source, datasetUri: str = None): username=context.username, groups=context.groups, resource_uri=datasetUri, - permission_name=permissions.CREDENTIALS_DATASET, + permission_name=CREDENTIALS_DATASET, ) dataset = DatasetService.get_dataset_by_uri(session, datasetUri) diff --git a/backend/dataall/modules/datasets/services/dataset_location.py b/backend/dataall/modules/datasets/services/dataset_location.py index 2251c2ad4..b13e66c6d 100644 --- a/backend/dataall/modules/datasets/services/dataset_location.py +++ b/backend/dataall/modules/datasets/services/dataset_location.py @@ -3,10 +3,10 @@ from sqlalchemy import and_, or_ from dataall.db.api import has_tenant_perm, has_resource_perm, Glossary -from dataall.db import models, api, paginate, permissions, exceptions +from dataall.db import models, api, paginate, exceptions from dataall.modules.datasets.db.dataset_repository import DatasetRepository from dataall.modules.datasets.db.models import DatasetStorageLocation -from dataall.modules.datasets.services.permissions import MANAGE_DATASETS +from dataall.modules.datasets.services.permissions import MANAGE_DATASETS, LIST_DATASET_FOLDERS logger = logging.getLogger(__name__) @@ -68,7 +68,7 @@ def create_dataset_location( @staticmethod @has_tenant_perm(MANAGE_DATASETS) - @has_resource_perm(permissions.LIST_DATASET_FOLDERS) + @has_resource_perm(LIST_DATASET_FOLDERS) def list_dataset_locations( session, username: str, @@ -93,7 +93,7 @@ def list_dataset_locations( @staticmethod @has_tenant_perm(MANAGE_DATASETS) - @has_resource_perm(permissions.LIST_DATASET_FOLDERS) + @has_resource_perm(LIST_DATASET_FOLDERS) def get_dataset_location( session, username: str, diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py index bac79d5b3..4488d03b8 100644 --- a/backend/dataall/modules/datasets/services/dataset_service.py +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -19,7 +19,7 @@ from dataall.modules.datasets.db.dataset_repository import DatasetRepository from dataall.modules.datasets.db.models import DatasetTable, Dataset from dataall.modules.datasets.services.dataset_location import DatasetLocationService -from dataall.modules.datasets.services.permissions import MANAGE_DATASETS +from dataall.modules.datasets.services.permissions import MANAGE_DATASETS, UPDATE_DATASET, DATASET_READ from dataall.utils.naming_convention import ( NamingConventionService, NamingConventionPattern, @@ -120,7 +120,7 @@ def create_dataset( ResourcePolicy.attach_resource_policy( session=session, group=dataset.stewards, - permissions=permissions.DATASET_READ, + permissions=DATASET_READ, resource_uri=dataset.datasetUri, resource_type=Dataset.__name__, ) @@ -294,7 +294,7 @@ def paginated_dataset_tables( @staticmethod @has_tenant_perm(MANAGE_DATASETS) - @has_resource_perm(permissions.UPDATE_DATASET) + @has_resource_perm(UPDATE_DATASET) def update_dataset( session, username, groups, uri, data=None, check_perm=None ) -> Dataset: @@ -363,7 +363,7 @@ def transfer_stewardship_to_new_stewards(session, dataset, new_stewards): ResourcePolicy.attach_resource_policy( session=session, group=new_stewards, - permissions=permissions.DATASET_READ, + permissions=DATASET_READ, resource_uri=dataset.datasetUri, resource_type=Dataset.__name__, ) diff --git a/backend/dataall/modules/datasets/services/permissions.py b/backend/dataall/modules/datasets/services/permissions.py index 602343a2e..bd36e681b 100644 --- a/backend/dataall/modules/datasets/services/permissions.py +++ b/backend/dataall/modules/datasets/services/permissions.py @@ -2,5 +2,24 @@ MANAGE_DATASETS = 'MANAGE_DATASETS' +GET_DATASET = 'GET_DATASET' +LIST_DATASETS = 'LIST_DATASETS' +LIST_DATASET_TABLES = 'LIST_DATASET_TABLES' +LIST_DATASET_SHARES = 'LIST_DATASET_SHARES' +LIST_DATASET_FOLDERS = 'LIST_DATASET_FOLDERS' +CREDENTIALS_DATASET = 'CREDENTIALS_DATASET' + +DATASET_READ = [ + GET_DATASET, + LIST_DATASETS, + LIST_DATASET_TABLES, + LIST_DATASET_SHARES, + LIST_DATASET_FOLDERS, + CREDENTIALS_DATASET, +] + + +UPDATE_DATASET = 'UPDATE_DATASET' + TENANT_ALL.append(MANAGE_DATASETS) TENANT_ALL_WITH_DESC[MANAGE_DATASETS] = 'Manage datasets' diff --git a/tests/db/test_permission.py b/tests/db/test_permission.py index a32e64085..e066f1484 100644 --- a/tests/db/test_permission.py +++ b/tests/db/test_permission.py @@ -6,7 +6,7 @@ from dataall.db.models.Permission import PermissionType from dataall.modules.datasets.db.models import Dataset from dataall.modules.datasets.services.dataset_service import DatasetService -from dataall.modules.datasets.services.permissions import MANAGE_DATASETS +from dataall.modules.datasets.services.permissions import MANAGE_DATASETS, UPDATE_DATASET, DATASET_READ @pytest.fixture(scope='module') @@ -14,7 +14,7 @@ def permissions(db): with db.scoped_session() as session: permissions = [] for p in ( - dataall.db.permissions.DATASET_READ + DATASET_READ + dataall.db.permissions.DATASET_WRITE + dataall.db.permissions.DATASET_TABLE_READ + dataall.db.permissions.ORGANIZATION_ALL @@ -153,7 +153,7 @@ def test_attach_resource_policy(db, user, group, group_user, dataset, permission session=session, username=user.userName, groups=[group.name], - permission_name=dataall.db.permissions.UPDATE_DATASET, + permission_name=UPDATE_DATASET, resource_uri=dataset.datasetUri, ) From d6ec387e560a6d2f6fbd3930234937a69543e779 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 27 Apr 2023 10:44:19 +0200 Subject: [PATCH 094/346] Moved dataset write permissions to modules --- backend/dataall/db/permissions.py | 47 +----------------- .../modules/datasets/api/dataset/resolvers.py | 21 ++++---- .../datasets/api/profiling/resolvers.py | 5 +- .../api/storage_location/resolvers.py | 5 +- .../modules/datasets/api/table/resolvers.py | 5 +- .../datasets/api/table_column/resolvers.py | 7 +-- .../datasets/services/dataset_location.py | 11 +++-- .../datasets/services/dataset_service.py | 10 ++-- .../datasets/services/dataset_table.py | 15 +++--- .../modules/datasets/services/permissions.py | 49 +++++++++++++++++-- tests/db/test_permission.py | 7 ++- 11 files changed, 93 insertions(+), 89 deletions(-) diff --git a/backend/dataall/db/permissions.py b/backend/dataall/db/permissions.py index 9a6726bbd..69a7b3d25 100644 --- a/backend/dataall/db/permissions.py +++ b/backend/dataall/db/permissions.py @@ -181,50 +181,6 @@ GET_SHARE_OBJECT, LIST_SHARED_ITEMS, ] -""" -DATASET PERMISSIONS -""" -SYNC_DATASET = 'SYNC_DATASET' -SUMMARY_DATASET = 'SUMMARY_DATASET' -IMPORT_DATASET = 'IMPORT_DATASET' -UPLOAD_DATASET = 'UPLOAD_DATASET' -URL_DATASET = 'URL_DATASET' -CRAWL_DATASET = 'CRAWL_DATASET' -DELETE_DATASET = 'DELETE_DATASET' -STACK_DATASET = 'STACK_DATASET' -SUBSCRIPTIONS_DATASET = 'SUBSCRIPTIONS_DATASET' -CREATE_DATASET_TABLE = 'CREATE_DATASET_TABLE' -DELETE_DATASET_TABLE = 'DELETE_DATASET_TABLE' -UPDATE_DATASET_TABLE = 'UPDATE_DATASET_TABLE' -PROFILE_DATASET_TABLE = 'PROFILE_DATASET_TABLE' -CREATE_DATASET_FOLDER = 'CREATE_DATASET_FOLDER' -DELETE_DATASET_FOLDER = 'DELETE_DATASET_FOLDER' -GET_DATASET_FOLDER = 'DELETE_DATASET_FOLDER' -UPDATE_DATASET_FOLDER = 'UPDATE_DATASET_FOLDER' -DATASET_WRITE = [ - UPDATE_DATASET, - SYNC_DATASET, - SUMMARY_DATASET, - IMPORT_DATASET, - UPLOAD_DATASET, - CREDENTIALS_DATASET, - URL_DATASET, - CRAWL_DATASET, - DELETE_DATASET, - STACK_DATASET, - SUBSCRIPTIONS_DATASET, - UPDATE_DATASET_TABLE, - DELETE_DATASET_TABLE, - CREATE_DATASET_TABLE, - PROFILE_DATASET_TABLE, - LIST_DATASET_SHARES, - CREATE_DATASET_FOLDER, - DELETE_DATASET_FOLDER, - UPDATE_DATASET_FOLDER, - LIST_DATASET_FOLDERS, -] - -DATASET_ALL = list(set(DATASET_WRITE + DATASET_READ)) """ DATASET TABLE PERMISSIONS @@ -386,8 +342,7 @@ RESOURCES_ALL """ RESOURCES_ALL = ( - DATASET_ALL - + DATASET_TABLE_READ + DATASET_TABLE_READ + ORGANIZATION_ALL + ENVIRONMENT_ALL + CONSUMPTION_ROLE_ALL diff --git a/backend/dataall/modules/datasets/api/dataset/resolvers.py b/backend/dataall/modules/datasets/api/dataset/resolvers.py index 2a0c375fb..a75e6d414 100644 --- a/backend/dataall/modules/datasets/api/dataset/resolvers.py +++ b/backend/dataall/modules/datasets/api/dataset/resolvers.py @@ -22,7 +22,8 @@ from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer -from dataall.modules.datasets.services.permissions import CREDENTIALS_DATASET +from dataall.modules.datasets.services.permissions import CREDENTIALS_DATASET, SYNC_DATASET, SUMMARY_DATASET, \ + CRAWL_DATASET, DELETE_DATASET, SUBSCRIPTIONS_DATASET log = logging.getLogger(__name__) @@ -317,7 +318,7 @@ def sync_tables(context: Context, source, datasetUri: str = None): username=context.username, groups=context.groups, resource_uri=datasetUri, - permission_name=permissions.SYNC_DATASET, + permission_name=SYNC_DATASET, ) dataset = DatasetService.get_dataset_by_uri(session, datasetUri) @@ -349,7 +350,7 @@ def start_crawler(context: Context, source, datasetUri: str, input: dict = None) username=context.username, groups=context.groups, resource_uri=datasetUri, - permission_name=permissions.CRAWL_DATASET, + permission_name=CRAWL_DATASET, ) dataset = DatasetService.get_dataset_by_uri(session, datasetUri) @@ -363,7 +364,7 @@ def start_crawler(context: Context, source, datasetUri: str, input: dict = None) crawler = DatasetCrawler(dataset).get_crawler() if not crawler: raise exceptions.AWSResourceNotFound( - action=permissions.CRAWL_DATASET, + action=CRAWL_DATASET, message=f'Crawler {dataset.GlueCrawlerName} can not be found', ) @@ -464,7 +465,7 @@ def save_dataset_summary( username=context.username, groups=context.groups, resource_uri=datasetUri, - permission_name=permissions.SUMMARY_DATASET, + permission_name=SUMMARY_DATASET, ) dataset = DatasetService.get_dataset_by_uri(session, datasetUri) environment = Environment.get_environment_by_uri( @@ -502,7 +503,7 @@ def get_crawler(context, source, datasetUri: str = None, name: str = None): username=context.username, groups=context.groups, resource_uri=datasetUri, - permission_name=permissions.CRAWL_DATASET, + permission_name=CRAWL_DATASET, ) dataset = DatasetService.get_dataset_by_uri(session, datasetUri) @@ -527,7 +528,7 @@ def delete_dataset( username=context.username, groups=context.groups, resource_uri=datasetUri, - permission_name=permissions.DELETE_DATASET, + permission_name=DELETE_DATASET, ) dataset: Dataset = DatasetService.get_dataset_by_uri(session, datasetUri) env: models.Environment = Environment.get_environment_by_uri( @@ -536,7 +537,7 @@ def delete_dataset( shares = DatasetService.list_dataset_shares_with_existing_shared_items(session, datasetUri) if shares: raise exceptions.UnauthorizedOperation( - action=permissions.DELETE_DATASET, + action=DELETE_DATASET, message=f'Dataset {dataset.name} is shared with other teams. ' 'Revoke all dataset shares before deletion.', ) @@ -545,7 +546,7 @@ def delete_dataset( ) if redshift_datasets: raise exceptions.UnauthorizedOperation( - action=permissions.DELETE_DATASET, + action=DELETE_DATASET, message='Dataset is used by Redshift clusters. ' 'Remove clusters associations first.', ) @@ -604,7 +605,7 @@ def publish_dataset_update( username=context.username, groups=context.groups, resource_uri=datasetUri, - permission_name=permissions.SUBSCRIPTIONS_DATASET, + permission_name=SUBSCRIPTIONS_DATASET, ) dataset = DatasetService.get_dataset_by_uri(session, datasetUri) env = db.api.Environment.get_environment_by_uri(session, dataset.environmentUri) diff --git a/backend/dataall/modules/datasets/api/profiling/resolvers.py b/backend/dataall/modules/datasets/api/profiling/resolvers.py index d1eeaf3c9..671b79c32 100644 --- a/backend/dataall/modules/datasets/api/profiling/resolvers.py +++ b/backend/dataall/modules/datasets/api/profiling/resolvers.py @@ -4,12 +4,13 @@ from dataall.api.context import Context from dataall.aws.handlers.service_handlers import Worker from dataall.aws.handlers.sts import SessionHelper -from dataall.db import api, permissions, models +from dataall.db import api, models from dataall.db.api import ResourcePolicy from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.modules.datasets.services.dataset_table import DatasetTableService from dataall.modules.datasets.services.dataset_profiling_service import DatasetProfilingService from dataall.modules.datasets.db.models import DatasetProfilingRun +from dataall.modules.datasets.services.permissions import PROFILE_DATASET_TABLE log = logging.getLogger(__name__) @@ -31,7 +32,7 @@ def start_profiling_run(context: Context, source, input: dict = None): username=context.username, groups=context.groups, resource_uri=input['datasetUri'], - permission_name=permissions.PROFILE_DATASET_TABLE, + permission_name=PROFILE_DATASET_TABLE, ) dataset = DatasetService.get_dataset_by_uri(session, input['datasetUri']) diff --git a/backend/dataall/modules/datasets/api/storage_location/resolvers.py b/backend/dataall/modules/datasets/api/storage_location/resolvers.py index 634e1239a..5e7351aa1 100644 --- a/backend/dataall/modules/datasets/api/storage_location/resolvers.py +++ b/backend/dataall/modules/datasets/api/storage_location/resolvers.py @@ -1,6 +1,6 @@ from dataall.api.context import Context from dataall.aws.handlers.service_handlers import Worker -from dataall.db import permissions, models +from dataall.db import models from dataall.db.api import ( ResourcePolicy, Glossary, @@ -11,6 +11,7 @@ from dataall.modules.datasets.db.models import DatasetStorageLocation, Dataset from dataall.modules.datasets.services.dataset_location import DatasetLocationService from dataall.modules.datasets.services.dataset_service import DatasetService +from dataall.modules.datasets.services.permissions import UPDATE_DATASET_FOLDER def create_storage_location( @@ -107,7 +108,7 @@ def publish_location_update(context: Context, source, locationUri: str = None): username=context.username, groups=context.groups, resource_uri=location.datasetUri, - permission_name=permissions.UPDATE_DATASET_FOLDER, + permission_name=UPDATE_DATASET_FOLDER, ) dataset = DatasetService.get_dataset_by_uri(session, location.datasetUri) env = Environment.get_environment_by_uri(session, dataset.environmentUri) diff --git a/backend/dataall/modules/datasets/api/table/resolvers.py b/backend/dataall/modules/datasets/api/table/resolvers.py index a45cee61e..2e0a46b2e 100644 --- a/backend/dataall/modules/datasets/api/table/resolvers.py +++ b/backend/dataall/modules/datasets/api/table/resolvers.py @@ -9,10 +9,11 @@ from dataall.api.context import Context from dataall.aws.handlers.service_handlers import Worker from dataall.aws.handlers.sts import SessionHelper -from dataall.db import permissions, models +from dataall.db import models from dataall.db.api import ResourcePolicy, Glossary from dataall.modules.datasets.db.models import DatasetTable, Dataset from dataall.modules.datasets.services.dataset_service import DatasetService +from dataall.modules.datasets.services.permissions import UPDATE_DATASET_TABLE from dataall.utils import json_utils from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer from dataall.modules.datasets.services.dataset_table import DatasetTableService @@ -197,7 +198,7 @@ def publish_table_update(context: Context, source, tableUri: str = None): username=context.username, groups=context.groups, resource_uri=table.datasetUri, - permission_name=permissions.UPDATE_DATASET_TABLE, + permission_name=UPDATE_DATASET_TABLE, ) dataset = DatasetService.get_dataset_by_uri(session, table.datasetUri) env = db.api.Environment.get_environment_by_uri(session, dataset.environmentUri) diff --git a/backend/dataall/modules/datasets/api/table_column/resolvers.py b/backend/dataall/modules/datasets/api/table_column/resolvers.py index b27a99dd3..951b2038f 100644 --- a/backend/dataall/modules/datasets/api/table_column/resolvers.py +++ b/backend/dataall/modules/datasets/api/table_column/resolvers.py @@ -3,10 +3,11 @@ from dataall import db from dataall.api.context import Context from dataall.aws.handlers.service_handlers import Worker -from dataall.db import paginate, permissions, models +from dataall.db import paginate, models from dataall.db.api import ResourcePolicy from dataall.modules.datasets.services.dataset_table import DatasetTableService from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetTable +from dataall.modules.datasets.services.permissions import UPDATE_DATASET_TABLE def list_table_columns( @@ -54,7 +55,7 @@ def sync_table_columns(context: Context, source, tableUri: str = None): username=context.username, groups=context.groups, resource_uri=table.datasetUri, - permission_name=permissions.UPDATE_DATASET_TABLE, + permission_name=UPDATE_DATASET_TABLE, ) task = models.Task(action='glue.table.columns', targetUri=table.tableUri) session.add(task) @@ -89,7 +90,7 @@ def update_table_column( username=context.username, groups=context.groups, resource_uri=table.datasetUri, - permission_name=permissions.UPDATE_DATASET_TABLE, + permission_name=UPDATE_DATASET_TABLE, ) column.description = input.get('description', 'No description provided') session.add(column) diff --git a/backend/dataall/modules/datasets/services/dataset_location.py b/backend/dataall/modules/datasets/services/dataset_location.py index b13e66c6d..73885eb51 100644 --- a/backend/dataall/modules/datasets/services/dataset_location.py +++ b/backend/dataall/modules/datasets/services/dataset_location.py @@ -6,7 +6,8 @@ from dataall.db import models, api, paginate, exceptions from dataall.modules.datasets.db.dataset_repository import DatasetRepository from dataall.modules.datasets.db.models import DatasetStorageLocation -from dataall.modules.datasets.services.permissions import MANAGE_DATASETS, LIST_DATASET_FOLDERS +from dataall.modules.datasets.services.permissions import MANAGE_DATASETS, LIST_DATASET_FOLDERS, CREATE_DATASET_FOLDER, \ + DELETE_DATASET_FOLDER, UPDATE_DATASET_FOLDER logger = logging.getLogger(__name__) @@ -14,7 +15,7 @@ class DatasetLocationService: @staticmethod @has_tenant_perm(MANAGE_DATASETS) - @has_resource_perm(permissions.CREATE_DATASET_FOLDER) + @has_resource_perm(CREATE_DATASET_FOLDER) def create_dataset_location( session, username: str, @@ -106,7 +107,7 @@ def get_dataset_location( @staticmethod @has_tenant_perm(MANAGE_DATASETS) - @has_resource_perm(permissions.UPDATE_DATASET_FOLDER) + @has_resource_perm(UPDATE_DATASET_FOLDER) def update_dataset_location( session, username: str, @@ -136,7 +137,7 @@ def update_dataset_location( @staticmethod @has_tenant_perm(MANAGE_DATASETS) - @has_resource_perm(permissions.DELETE_DATASET_FOLDER) + @has_resource_perm(DELETE_DATASET_FOLDER) def delete_dataset_location( session, username: str, @@ -161,7 +162,7 @@ def delete_dataset_location( ) if share_item: raise exceptions.ResourceShared( - action=permissions.DELETE_DATASET_FOLDER, + action=DELETE_DATASET_FOLDER, message='Revoke all folder shares before deletion', ) session.query(models.ShareObjectItem).filter( diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py index 4488d03b8..558fa3a28 100644 --- a/backend/dataall/modules/datasets/services/dataset_service.py +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -14,12 +14,12 @@ Stack, ) from dataall.db.api import Organization -from dataall.db import models, api, exceptions, permissions, paginate +from dataall.db import models, api, exceptions, paginate from dataall.db.models.Enums import Language, ConfidentialityClassification from dataall.modules.datasets.db.dataset_repository import DatasetRepository from dataall.modules.datasets.db.models import DatasetTable, Dataset from dataall.modules.datasets.services.dataset_location import DatasetLocationService -from dataall.modules.datasets.services.permissions import MANAGE_DATASETS, UPDATE_DATASET, DATASET_READ +from dataall.modules.datasets.services.permissions import MANAGE_DATASETS, UPDATE_DATASET, DATASET_READ, DATASET_ALL from dataall.utils.naming_convention import ( NamingConventionService, NamingConventionPattern, @@ -112,7 +112,7 @@ def create_dataset( ResourcePolicy.attach_resource_policy( session=session, group=data['SamlAdminGroupName'], - permissions=permissions.DATASET_ALL, + permissions=DATASET_ALL, resource_uri=dataset.datasetUri, resource_type=Dataset.__name__, ) @@ -128,7 +128,7 @@ def create_dataset( ResourcePolicy.attach_resource_policy( session=session, group=environment.SamlGroupName, - permissions=permissions.DATASET_ALL, + permissions=DATASET_ALL, resource_uri=dataset.datasetUri, resource_type=Dataset.__name__, ) @@ -316,7 +316,7 @@ def update_dataset( ResourcePolicy.attach_resource_policy( session=session, group=dataset.SamlAdminGroupName, - permissions=permissions.DATASET_ALL, + permissions=DATASET_ALL, resource_uri=dataset.datasetUri, resource_type=Dataset.__name__, ) diff --git a/backend/dataall/modules/datasets/services/dataset_table.py b/backend/dataall/modules/datasets/services/dataset_table.py index b04348c02..76d22015e 100644 --- a/backend/dataall/modules/datasets/services/dataset_table.py +++ b/backend/dataall/modules/datasets/services/dataset_table.py @@ -2,9 +2,10 @@ from sqlalchemy.sql import and_ -from dataall.db import models, api, permissions, exceptions, paginate +from dataall.db import models, api, exceptions, paginate from dataall.db.api import has_tenant_perm, has_resource_perm, Glossary, ResourcePolicy, Environment -from dataall.modules.datasets.services.permissions import MANAGE_DATASETS +from dataall.modules.datasets.services.permissions import MANAGE_DATASETS, CREATE_DATASET_TABLE, DELETE_DATASET_TABLE, \ + UPDATE_DATASET_TABLE from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.utils import json_utils from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetTable, Dataset @@ -15,7 +16,7 @@ class DatasetTableService: @staticmethod @has_tenant_perm(MANAGE_DATASETS) - @has_resource_perm(permissions.CREATE_DATASET_TABLE) + @has_resource_perm(CREATE_DATASET_TABLE) def create_dataset_table( session, username: str, @@ -71,7 +72,7 @@ def create_dataset_table( ResourcePolicy.attach_resource_policy( session=session, group=group, - permissions=permissions.DATASET_TABLE_READ, + permissions=DATASET_TABLE_READ, resource_uri=table.tableUri, resource_type=DatasetTable.__name__, ) @@ -113,7 +114,7 @@ def get_dataset_table( @staticmethod @has_tenant_perm(MANAGE_DATASETS) - @has_resource_perm(permissions.UPDATE_DATASET_TABLE) + @has_resource_perm(UPDATE_DATASET_TABLE) def update_dataset_table( session, username: str, @@ -139,7 +140,7 @@ def update_dataset_table( @staticmethod @has_tenant_perm(MANAGE_DATASETS) - @has_resource_perm(permissions.DELETE_DATASET_TABLE) + @has_resource_perm(DELETE_DATASET_TABLE) def delete_dataset_table( session, username: str, @@ -162,7 +163,7 @@ def delete_dataset_table( ) if share_item: raise exceptions.ResourceShared( - action=permissions.DELETE_DATASET_TABLE, + action=DELETE_DATASET_TABLE, message='Revoke all table shares before deletion', ) session.query(models.ShareObjectItem).filter( diff --git a/backend/dataall/modules/datasets/services/permissions.py b/backend/dataall/modules/datasets/services/permissions.py index bd36e681b..74f37544d 100644 --- a/backend/dataall/modules/datasets/services/permissions.py +++ b/backend/dataall/modules/datasets/services/permissions.py @@ -1,7 +1,10 @@ -from dataall.db.permissions import TENANT_ALL, TENANT_ALL_WITH_DESC +from dataall.db.permissions import TENANT_ALL, TENANT_ALL_WITH_DESC, RESOURCES_ALL MANAGE_DATASETS = 'MANAGE_DATASETS' +TENANT_ALL.append(MANAGE_DATASETS) +TENANT_ALL_WITH_DESC[MANAGE_DATASETS] = 'Manage datasets' + GET_DATASET = 'GET_DATASET' LIST_DATASETS = 'LIST_DATASETS' LIST_DATASET_TABLES = 'LIST_DATASET_TABLES' @@ -20,6 +23,46 @@ UPDATE_DATASET = 'UPDATE_DATASET' +SYNC_DATASET = 'SYNC_DATASET' +SUMMARY_DATASET = 'SUMMARY_DATASET' +IMPORT_DATASET = 'IMPORT_DATASET' +UPLOAD_DATASET = 'UPLOAD_DATASET' +URL_DATASET = 'URL_DATASET' +CRAWL_DATASET = 'CRAWL_DATASET' +DELETE_DATASET = 'DELETE_DATASET' +STACK_DATASET = 'STACK_DATASET' +SUBSCRIPTIONS_DATASET = 'SUBSCRIPTIONS_DATASET' +CREATE_DATASET_TABLE = 'CREATE_DATASET_TABLE' +DELETE_DATASET_TABLE = 'DELETE_DATASET_TABLE' +UPDATE_DATASET_TABLE = 'UPDATE_DATASET_TABLE' +PROFILE_DATASET_TABLE = 'PROFILE_DATASET_TABLE' +CREATE_DATASET_FOLDER = 'CREATE_DATASET_FOLDER' +DELETE_DATASET_FOLDER = 'DELETE_DATASET_FOLDER' +GET_DATASET_FOLDER = 'DELETE_DATASET_FOLDER' +UPDATE_DATASET_FOLDER = 'UPDATE_DATASET_FOLDER' -TENANT_ALL.append(MANAGE_DATASETS) -TENANT_ALL_WITH_DESC[MANAGE_DATASETS] = 'Manage datasets' +DATASET_WRITE = [ + UPDATE_DATASET, + SYNC_DATASET, + SUMMARY_DATASET, + IMPORT_DATASET, + UPLOAD_DATASET, + CREDENTIALS_DATASET, + URL_DATASET, + CRAWL_DATASET, + DELETE_DATASET, + STACK_DATASET, + SUBSCRIPTIONS_DATASET, + UPDATE_DATASET_TABLE, + DELETE_DATASET_TABLE, + CREATE_DATASET_TABLE, + PROFILE_DATASET_TABLE, + LIST_DATASET_SHARES, + CREATE_DATASET_FOLDER, + DELETE_DATASET_FOLDER, + UPDATE_DATASET_FOLDER, + LIST_DATASET_FOLDERS, +] + +DATASET_ALL = list(set(DATASET_WRITE + DATASET_READ)) +RESOURCES_ALL.extend(DATASET_ALL) diff --git a/tests/db/test_permission.py b/tests/db/test_permission.py index e066f1484..3b70757b6 100644 --- a/tests/db/test_permission.py +++ b/tests/db/test_permission.py @@ -6,7 +6,7 @@ from dataall.db.models.Permission import PermissionType from dataall.modules.datasets.db.models import Dataset from dataall.modules.datasets.services.dataset_service import DatasetService -from dataall.modules.datasets.services.permissions import MANAGE_DATASETS, UPDATE_DATASET, DATASET_READ +from dataall.modules.datasets.services.permissions import MANAGE_DATASETS, UPDATE_DATASET, DATASET_READ, DATASET_WRITE @pytest.fixture(scope='module') @@ -14,8 +14,7 @@ def permissions(db): with db.scoped_session() as session: permissions = [] for p in ( - DATASET_READ - + dataall.db.permissions.DATASET_WRITE + DATASET_READ + DATASET_WRITE + dataall.db.permissions.DATASET_TABLE_READ + dataall.db.permissions.ORGANIZATION_ALL + dataall.db.permissions.ENVIRONMENT_ALL @@ -145,7 +144,7 @@ def test_attach_resource_policy(db, user, group, group_user, dataset, permission dataall.db.api.ResourcePolicy.attach_resource_policy( session=session, group=group.name, - permissions=dataall.db.permissions.DATASET_WRITE, + permissions=DATASET_WRITE, resource_uri=dataset.datasetUri, resource_type=Dataset.__name__, ) From 788e35fb76be6043e667fb3cd5ba48dc27ec2891 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 27 Apr 2023 10:49:11 +0200 Subject: [PATCH 095/346] Moved dataset table permissions to modules --- backend/dataall/db/api/share_object.py | 3 ++- backend/dataall/db/permissions.py | 14 +------------- .../modules/datasets/api/table/resolvers.py | 4 ++-- .../datasets/services/dataset_service.py | 5 +++-- .../modules/datasets/services/dataset_table.py | 4 ++-- .../modules/datasets/services/permissions.py | 18 ++++++++++++++++++ ...b215e_backfill_dataset_table_permissions.py | 7 ++++--- tests/db/test_permission.py | 6 +++--- 8 files changed, 35 insertions(+), 26 deletions(-) diff --git a/backend/dataall/db/api/share_object.py b/backend/dataall/db/api/share_object.py index 455ee7296..79a82af43 100644 --- a/backend/dataall/db/api/share_object.py +++ b/backend/dataall/db/api/share_object.py @@ -12,6 +12,7 @@ from ..models.Enums import ShareObjectStatus, ShareItemStatus, ShareObjectActions, ShareItemActions, ShareableType, PrincipalType from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable, Dataset from dataall.modules.datasets.services.dataset_service import DatasetService +from ...modules.datasets.services.permissions import DATASET_TABLE_READ from ...modules.datasets.services.share_notification_service import ShareNotificationService logger = logging.getLogger(__name__) @@ -605,7 +606,7 @@ def approve_share_object( ResourcePolicy.attach_resource_policy( session=session, group=share.principalId, - permissions=permissions.DATASET_TABLE_READ, + permissions=DATASET_TABLE_READ, resource_uri=table.itemUri, resource_type=DatasetTable.__name__, ) diff --git a/backend/dataall/db/permissions.py b/backend/dataall/db/permissions.py index 69a7b3d25..2f2e59d00 100644 --- a/backend/dataall/db/permissions.py +++ b/backend/dataall/db/permissions.py @@ -182,17 +182,6 @@ LIST_SHARED_ITEMS, ] -""" -DATASET TABLE PERMISSIONS -""" -GET_DATASET_TABLE = 'GET_DATASET_TABLE' -PREVIEW_DATASET_TABLE = 'PREVIEW_DATASET_TABLE' - -DATASET_TABLE_READ = [ - GET_DATASET_TABLE, - PREVIEW_DATASET_TABLE -] - """ GLOSSARIES """ @@ -342,8 +331,7 @@ RESOURCES_ALL """ RESOURCES_ALL = ( - DATASET_TABLE_READ - + ORGANIZATION_ALL + ORGANIZATION_ALL + ENVIRONMENT_ALL + CONSUMPTION_ROLE_ALL + SHARE_OBJECT_ALL diff --git a/backend/dataall/modules/datasets/api/table/resolvers.py b/backend/dataall/modules/datasets/api/table/resolvers.py index 2e0a46b2e..c5e6726a2 100644 --- a/backend/dataall/modules/datasets/api/table/resolvers.py +++ b/backend/dataall/modules/datasets/api/table/resolvers.py @@ -13,7 +13,7 @@ from dataall.db.api import ResourcePolicy, Glossary from dataall.modules.datasets.db.models import DatasetTable, Dataset from dataall.modules.datasets.services.dataset_service import DatasetService -from dataall.modules.datasets.services.permissions import UPDATE_DATASET_TABLE +from dataall.modules.datasets.services.permissions import UPDATE_DATASET_TABLE, PREVIEW_DATASET_TABLE from dataall.utils import json_utils from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer from dataall.modules.datasets.services.dataset_table import DatasetTableService @@ -119,7 +119,7 @@ def preview(context, source, tableUri: str = None): username=context.username, groups=context.groups, resource_uri=table.tableUri, - permission_name=permissions.PREVIEW_DATASET_TABLE, + permission_name=PREVIEW_DATASET_TABLE, ) env = db.api.Environment.get_environment_by_uri(session, dataset.environmentUri) env_workgroup = {} diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py index 558fa3a28..f88022273 100644 --- a/backend/dataall/modules/datasets/services/dataset_service.py +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -19,7 +19,8 @@ from dataall.modules.datasets.db.dataset_repository import DatasetRepository from dataall.modules.datasets.db.models import DatasetTable, Dataset from dataall.modules.datasets.services.dataset_location import DatasetLocationService -from dataall.modules.datasets.services.permissions import MANAGE_DATASETS, UPDATE_DATASET, DATASET_READ, DATASET_ALL +from dataall.modules.datasets.services.permissions import MANAGE_DATASETS, UPDATE_DATASET, DATASET_READ, DATASET_ALL, \ + DATASET_TABLE_READ from dataall.utils.naming_convention import ( NamingConventionService, NamingConventionPattern, @@ -379,7 +380,7 @@ def transfer_stewardship_to_new_stewards(session, dataset, new_stewards): ResourcePolicy.attach_resource_policy( session=session, group=new_stewards, - permissions=permissions.DATASET_TABLE_READ, + permissions=DATASET_TABLE_READ, resource_uri=tableUri, resource_type=DatasetTable.__name__, ) diff --git a/backend/dataall/modules/datasets/services/dataset_table.py b/backend/dataall/modules/datasets/services/dataset_table.py index 76d22015e..437e9384e 100644 --- a/backend/dataall/modules/datasets/services/dataset_table.py +++ b/backend/dataall/modules/datasets/services/dataset_table.py @@ -5,7 +5,7 @@ from dataall.db import models, api, exceptions, paginate from dataall.db.api import has_tenant_perm, has_resource_perm, Glossary, ResourcePolicy, Environment from dataall.modules.datasets.services.permissions import MANAGE_DATASETS, CREATE_DATASET_TABLE, DELETE_DATASET_TABLE, \ - UPDATE_DATASET_TABLE + UPDATE_DATASET_TABLE, DATASET_TABLE_READ from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.utils import json_utils from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetTable, Dataset @@ -272,7 +272,7 @@ def sync_existing_tables(session, datasetUri, glue_tables=None): ResourcePolicy.attach_resource_policy( session=session, group=group, - permissions=permissions.DATASET_TABLE_READ, + permissions=DATASET_TABLE_READ, resource_uri=updated_table.tableUri, resource_type=DatasetTable.__name__, ) diff --git a/backend/dataall/modules/datasets/services/permissions.py b/backend/dataall/modules/datasets/services/permissions.py index 74f37544d..09d41c26e 100644 --- a/backend/dataall/modules/datasets/services/permissions.py +++ b/backend/dataall/modules/datasets/services/permissions.py @@ -5,6 +5,10 @@ TENANT_ALL.append(MANAGE_DATASETS) TENANT_ALL_WITH_DESC[MANAGE_DATASETS] = 'Manage datasets' +""" +DATASET PERMISSIONS +""" + GET_DATASET = 'GET_DATASET' LIST_DATASETS = 'LIST_DATASETS' LIST_DATASET_TABLES = 'LIST_DATASET_TABLES' @@ -66,3 +70,17 @@ DATASET_ALL = list(set(DATASET_WRITE + DATASET_READ)) RESOURCES_ALL.extend(DATASET_ALL) + +""" +DATASET TABLE PERMISSIONS +""" + +GET_DATASET_TABLE = 'GET_DATASET_TABLE' +PREVIEW_DATASET_TABLE = 'PREVIEW_DATASET_TABLE' + +DATASET_TABLE_READ = [ + GET_DATASET_TABLE, + PREVIEW_DATASET_TABLE +] + +RESOURCES_ALL.extend(DATASET_TABLE_READ) diff --git a/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py b/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py index 6845c2484..4447d1429 100644 --- a/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py +++ b/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py @@ -10,10 +10,11 @@ from sqlalchemy.orm import query_expression from sqlalchemy.dialects import postgresql from sqlalchemy.ext.declarative import declarative_base -from dataall.db import api, models, permissions, utils, Resource +from dataall.db import api, utils, Resource from datetime import datetime from dataall.db.models.Enums import ShareObjectStatus, ShareableType from dataall.modules.datasets.services.dataset_service import DatasetService +from dataall.modules.datasets.services.permissions import DATASET_TABLE_READ # revision identifiers, used by Alembic. revision = 'd05f9a5b215e' @@ -93,7 +94,7 @@ def upgrade(): session=session, resource_uri=table.tableUri, group=group, - permissions=permissions.DATASET_TABLE_READ, + permissions=DATASET_TABLE_READ, resource_type=DatasetTable.__name__, ) print('dataset table permissions updated successfully for owners/stewards') @@ -117,7 +118,7 @@ def upgrade(): api.ResourcePolicy.attach_resource_policy( session=session, group=share.principalId, - permissions=permissions.DATASET_TABLE_READ, + permissions=DATASET_TABLE_READ, resource_uri=shared_table.itemUri, resource_type=DatasetTable.__name__, ) diff --git a/tests/db/test_permission.py b/tests/db/test_permission.py index 3b70757b6..28524a77a 100644 --- a/tests/db/test_permission.py +++ b/tests/db/test_permission.py @@ -6,7 +6,8 @@ from dataall.db.models.Permission import PermissionType from dataall.modules.datasets.db.models import Dataset from dataall.modules.datasets.services.dataset_service import DatasetService -from dataall.modules.datasets.services.permissions import MANAGE_DATASETS, UPDATE_DATASET, DATASET_READ, DATASET_WRITE +from dataall.modules.datasets.services.permissions import MANAGE_DATASETS, UPDATE_DATASET, DATASET_READ, DATASET_WRITE, \ + DATASET_TABLE_READ @pytest.fixture(scope='module') @@ -14,8 +15,7 @@ def permissions(db): with db.scoped_session() as session: permissions = [] for p in ( - DATASET_READ + DATASET_WRITE - + dataall.db.permissions.DATASET_TABLE_READ + DATASET_READ + DATASET_WRITE + DATASET_TABLE_READ + dataall.db.permissions.ORGANIZATION_ALL + dataall.db.permissions.ENVIRONMENT_ALL ): From 584a04b8e550ba45ab67e435af65772882777df0 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 27 Apr 2023 10:57:58 +0200 Subject: [PATCH 096/346] Moved dataset related policies --- backend/dataall/modules/datasets/cdk/__init__.py | 4 ++-- .../databrew.py => modules/datasets/cdk/databrew_policy.py} | 4 ++-- .../policies/glue.py => modules/datasets/cdk/glue_policy.py} | 4 ++-- .../datasets/cdk/lakeformation_policy.py} | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) rename backend/dataall/{cdkproxy/stacks/policies/databrew.py => modules/datasets/cdk/databrew_policy.py} (92%) rename backend/dataall/{cdkproxy/stacks/policies/glue.py => modules/datasets/cdk/glue_policy.py} (98%) rename backend/dataall/{cdkproxy/stacks/policies/lakeformation.py => modules/datasets/cdk/lakeformation_policy.py} (92%) diff --git a/backend/dataall/modules/datasets/cdk/__init__.py b/backend/dataall/modules/datasets/cdk/__init__.py index 5642d8a40..9d1e205b8 100644 --- a/backend/dataall/modules/datasets/cdk/__init__.py +++ b/backend/dataall/modules/datasets/cdk/__init__.py @@ -1,3 +1,3 @@ -from dataall.modules.datasets.cdk import dataset_stack +from dataall.modules.datasets.cdk import dataset_stack, databrew_policy, glue_policy, lakeformation_policy -__all__ = ["dataset_stack"] +__all__ = ["dataset_stack", "databrew_policy", "glue_policy", "lakeformation_policy"] diff --git a/backend/dataall/cdkproxy/stacks/policies/databrew.py b/backend/dataall/modules/datasets/cdk/databrew_policy.py similarity index 92% rename from backend/dataall/cdkproxy/stacks/policies/databrew.py rename to backend/dataall/modules/datasets/cdk/databrew_policy.py index 270879639..1ba1c7ee9 100644 --- a/backend/dataall/cdkproxy/stacks/policies/databrew.py +++ b/backend/dataall/modules/datasets/cdk/databrew_policy.py @@ -1,9 +1,9 @@ from dataall.db import permissions -from .service_policy import ServicePolicy +from dataall.cdkproxy.stacks.policies.service_policy import ServicePolicy from aws_cdk import aws_iam as iam -class Databrew(ServicePolicy): +class DatabrewPolicy(ServicePolicy): def get_statements(self, group_permissions, **kwargs): if permissions.CREATE_DATASET not in group_permissions: return [] diff --git a/backend/dataall/cdkproxy/stacks/policies/glue.py b/backend/dataall/modules/datasets/cdk/glue_policy.py similarity index 98% rename from backend/dataall/cdkproxy/stacks/policies/glue.py rename to backend/dataall/modules/datasets/cdk/glue_policy.py index aa1dbe479..5b915ee6d 100644 --- a/backend/dataall/cdkproxy/stacks/policies/glue.py +++ b/backend/dataall/modules/datasets/cdk/glue_policy.py @@ -1,9 +1,9 @@ from dataall.db import permissions -from .service_policy import ServicePolicy +from dataall.cdkproxy.stacks.policies.service_policy import ServicePolicy from aws_cdk import aws_iam as iam -class Glue(ServicePolicy): +class GluePolicy(ServicePolicy): def get_statements(self, group_permissions, **kwargs): if permissions.CREATE_DATASET not in group_permissions: return [] diff --git a/backend/dataall/cdkproxy/stacks/policies/lakeformation.py b/backend/dataall/modules/datasets/cdk/lakeformation_policy.py similarity index 92% rename from backend/dataall/cdkproxy/stacks/policies/lakeformation.py rename to backend/dataall/modules/datasets/cdk/lakeformation_policy.py index 3eb5d835c..d072b5382 100644 --- a/backend/dataall/cdkproxy/stacks/policies/lakeformation.py +++ b/backend/dataall/modules/datasets/cdk/lakeformation_policy.py @@ -1,10 +1,10 @@ from aws_cdk import aws_iam as iam from dataall.db import permissions -from .service_policy import ServicePolicy +from dataall.cdkproxy.stacks.policies.service_policy import ServicePolicy -class LakeFormation(ServicePolicy): +class LakeFormationPolicy(ServicePolicy): def get_statements(self, group_permissions, **kwargs): if permissions.CREATE_DATASET not in group_permissions: return [] From 4dd3a2b81181f51ac6feb72c0a0fe42a15a42686 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 27 Apr 2023 12:33:25 +0200 Subject: [PATCH 097/346] Moved dataset permissions for env --- backend/dataall/db/api/environment.py | 4 --- backend/dataall/db/permissions.py | 8 ----- .../modules/datasets/cdk/databrew_policy.py | 5 +-- .../modules/datasets/cdk/glue_policy.py | 5 +-- .../datasets/cdk/lakeformation_policy.py | 4 +-- .../datasets/services/dataset_service.py | 10 +++--- .../modules/datasets/services/permissions.py | 31 ++++++++++++++++++- tests/api/test_environment.py | 5 +-- 8 files changed, 46 insertions(+), 26 deletions(-) diff --git a/backend/dataall/db/api/environment.py b/backend/dataall/db/api/environment.py index accee95f7..68b2d58ec 100644 --- a/backend/dataall/db/api/environment.py +++ b/backend/dataall/db/api/environment.py @@ -286,10 +286,6 @@ def invite_group( @staticmethod def validate_permissions(session, uri, g_permissions, group): - - if permissions.CREATE_DATASET in g_permissions: - g_permissions.append(permissions.LIST_ENVIRONMENT_DATASETS) - if permissions.CREATE_REDSHIFT_CLUSTER in g_permissions: g_permissions.append(permissions.LIST_ENVIRONMENT_REDSHIFT_CLUSTERS) diff --git a/backend/dataall/db/permissions.py b/backend/dataall/db/permissions.py index 2f2e59d00..bec9fd4cd 100644 --- a/backend/dataall/db/permissions.py +++ b/backend/dataall/db/permissions.py @@ -45,13 +45,11 @@ ADD_ENVIRONMENT_CONSUMPTION_ROLES = 'ADD_ENVIRONMENT_CONSUMPTION_ROLES' LIST_ENVIRONMENT_CONSUMPTION_ROLES = 'LIST_ENVIRONMENT_CONSUMPTION_ROLES' LIST_ENVIRONMENT_GROUP_PERMISSIONS = 'LIST_ENVIRONMENT_GROUP_PERMISSIONS' -LIST_ENVIRONMENT_DATASETS = 'LIST_ENVIRONMENT_DATASETS' LIST_ENVIRONMENT_GROUPS = 'LIST_ENVIRONMENT_GROUPS' CREDENTIALS_ENVIRONMENT = 'CREDENTIALS_ENVIRONMENT' ENABLE_ENVIRONMENT_SUBSCRIPTIONS = 'ENABLE_ENVIRONMENT_SUBSCRIPTIONS' DISABLE_ENVIRONMENT_SUBSCRIPTIONS = 'DISABLE_ENVIRONMENT_SUBSCRIPTIONS' RUN_ATHENA_QUERY = 'RUN_ATHENA_QUERY' -CREATE_DATASET = 'CREATE_DATASET' CREATE_SHARE_OBJECT = 'CREATE_SHARE_OBJECT' LIST_ENVIRONMENT_SHARED_WITH_OBJECTS = 'LIST_ENVIRONMENT_SHARED_WITH_OBJECTS' CREATE_REDSHIFT_CLUSTER = 'CREATE_REDSHIFT_CLUSTER' @@ -67,10 +65,8 @@ ENVIRONMENT_INVITED = [ - CREATE_DATASET, LIST_ENVIRONMENT_GROUP_PERMISSIONS, GET_ENVIRONMENT, - LIST_ENVIRONMENT_DATASETS, LIST_ENVIRONMENT_GROUPS, LIST_ENVIRONMENT_CONSUMPTION_ROLES, CREATE_SHARE_OBJECT, @@ -92,7 +88,6 @@ ENVIRONMENT_INVITATION_REQUEST = [ INVITE_ENVIRONMENT_GROUP, ADD_ENVIRONMENT_CONSUMPTION_ROLES, - CREATE_DATASET, CREATE_SHARE_OBJECT, CREATE_REDSHIFT_CLUSTER, CREATE_SGMSTUDIO_NOTEBOOK, @@ -110,13 +105,11 @@ LIST_ENVIRONMENT_GROUP_PERMISSIONS, ADD_ENVIRONMENT_CONSUMPTION_ROLES, LIST_ENVIRONMENT_CONSUMPTION_ROLES, - LIST_ENVIRONMENT_DATASETS, LIST_ENVIRONMENT_GROUPS, CREDENTIALS_ENVIRONMENT, ENABLE_ENVIRONMENT_SUBSCRIPTIONS, DISABLE_ENVIRONMENT_SUBSCRIPTIONS, RUN_ATHENA_QUERY, - CREATE_DATASET, CREATE_SHARE_OBJECT, CREATE_REDSHIFT_CLUSTER, LIST_ENVIRONMENT_REDSHIFT_CLUSTERS, @@ -345,7 +338,6 @@ ) RESOURCES_ALL_WITH_DESC = {k: k for k in RESOURCES_ALL} -RESOURCES_ALL_WITH_DESC[CREATE_DATASET] = 'Create datasets on this environment' RESOURCES_ALL_WITH_DESC[CREATE_DASHBOARD] = 'Create dashboards on this environment' RESOURCES_ALL_WITH_DESC[CREATE_REDSHIFT_CLUSTER] = 'Create Redshift clusters on this environment' RESOURCES_ALL_WITH_DESC[CREATE_SGMSTUDIO_NOTEBOOK] = 'Create ML Studio profiles on this environment' diff --git a/backend/dataall/modules/datasets/cdk/databrew_policy.py b/backend/dataall/modules/datasets/cdk/databrew_policy.py index 1ba1c7ee9..ed2ef0b32 100644 --- a/backend/dataall/modules/datasets/cdk/databrew_policy.py +++ b/backend/dataall/modules/datasets/cdk/databrew_policy.py @@ -1,11 +1,12 @@ -from dataall.db import permissions from dataall.cdkproxy.stacks.policies.service_policy import ServicePolicy from aws_cdk import aws_iam as iam +from dataall.modules.datasets.services.permissions import CREATE_DATASET + class DatabrewPolicy(ServicePolicy): def get_statements(self, group_permissions, **kwargs): - if permissions.CREATE_DATASET not in group_permissions: + if CREATE_DATASET not in group_permissions: return [] statements = [ diff --git a/backend/dataall/modules/datasets/cdk/glue_policy.py b/backend/dataall/modules/datasets/cdk/glue_policy.py index 5b915ee6d..a98b215bd 100644 --- a/backend/dataall/modules/datasets/cdk/glue_policy.py +++ b/backend/dataall/modules/datasets/cdk/glue_policy.py @@ -1,11 +1,12 @@ -from dataall.db import permissions from dataall.cdkproxy.stacks.policies.service_policy import ServicePolicy from aws_cdk import aws_iam as iam +from dataall.modules.datasets.services.permissions import CREATE_DATASET + class GluePolicy(ServicePolicy): def get_statements(self, group_permissions, **kwargs): - if permissions.CREATE_DATASET not in group_permissions: + if CREATE_DATASET not in group_permissions: return [] statements = [ diff --git a/backend/dataall/modules/datasets/cdk/lakeformation_policy.py b/backend/dataall/modules/datasets/cdk/lakeformation_policy.py index d072b5382..bbeef17dc 100644 --- a/backend/dataall/modules/datasets/cdk/lakeformation_policy.py +++ b/backend/dataall/modules/datasets/cdk/lakeformation_policy.py @@ -1,12 +1,12 @@ from aws_cdk import aws_iam as iam -from dataall.db import permissions from dataall.cdkproxy.stacks.policies.service_policy import ServicePolicy +from dataall.modules.datasets.services.permissions import CREATE_DATASET class LakeFormationPolicy(ServicePolicy): def get_statements(self, group_permissions, **kwargs): - if permissions.CREATE_DATASET not in group_permissions: + if CREATE_DATASET not in group_permissions: return [] return [ diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py index f88022273..f32828cfe 100644 --- a/backend/dataall/modules/datasets/services/dataset_service.py +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -14,13 +14,13 @@ Stack, ) from dataall.db.api import Organization -from dataall.db import models, api, exceptions, paginate +from dataall.db import models, api, exceptions, paginate, permissions from dataall.db.models.Enums import Language, ConfidentialityClassification from dataall.modules.datasets.db.dataset_repository import DatasetRepository from dataall.modules.datasets.db.models import DatasetTable, Dataset from dataall.modules.datasets.services.dataset_location import DatasetLocationService from dataall.modules.datasets.services.permissions import MANAGE_DATASETS, UPDATE_DATASET, DATASET_READ, DATASET_ALL, \ - DATASET_TABLE_READ + DATASET_TABLE_READ, LIST_ENVIRONMENT_DATASETS, CREATE_DATASET from dataall.utils.naming_convention import ( NamingConventionService, NamingConventionPattern, @@ -32,7 +32,7 @@ class DatasetService: @staticmethod @has_tenant_perm(MANAGE_DATASETS) - @has_resource_perm(permissions.CREATE_DATASET) + @has_resource_perm(CREATE_DATASET) def create_dataset( session, username: str, @@ -60,7 +60,7 @@ def create_dataset( groups=groups, uri=uri, group=data['SamlAdminGroupName'], - permission_name=permissions.CREATE_DATASET, + permission_name=CREATE_DATASET, ) environment = Environment.get_environment_by_uri(session, uri) @@ -668,7 +668,7 @@ def query_environment_datasets(session, username, groups, uri, filter) -> Query: return query @staticmethod - @has_resource_perm(permissions.LIST_ENVIRONMENT_DATASETS) + @has_resource_perm(LIST_ENVIRONMENT_DATASETS) def paginated_environment_datasets( session, username, groups, uri, data=None, check_perm=None ) -> dict: diff --git a/backend/dataall/modules/datasets/services/permissions.py b/backend/dataall/modules/datasets/services/permissions.py index 09d41c26e..be65f56e6 100644 --- a/backend/dataall/modules/datasets/services/permissions.py +++ b/backend/dataall/modules/datasets/services/permissions.py @@ -1,4 +1,7 @@ -from dataall.db.permissions import TENANT_ALL, TENANT_ALL_WITH_DESC, RESOURCES_ALL +from itertools import chain + +from dataall.db.permissions import TENANT_ALL, TENANT_ALL_WITH_DESC, RESOURCES_ALL, RESOURCES_ALL_WITH_DESC, \ + ENVIRONMENT_INVITED, ENVIRONMENT_INVITATION_REQUEST, ENVIRONMENT_ALL MANAGE_DATASETS = 'MANAGE_DATASETS' @@ -84,3 +87,29 @@ ] RESOURCES_ALL.extend(DATASET_TABLE_READ) + +""" +DATASET PERMISSIONS FOR ENVIRONMENT +""" + +CREATE_DATASET = 'CREATE_DATASET' +LIST_ENVIRONMENT_DATASETS = 'LIST_ENVIRONMENT_DATASETS' + +ENVIRONMENT_INVITED.append(CREATE_DATASET) +ENVIRONMENT_INVITED.append(LIST_ENVIRONMENT_DATASETS) + +ENVIRONMENT_INVITATION_REQUEST.append(CREATE_DATASET) +ENVIRONMENT_INVITATION_REQUEST.append(LIST_ENVIRONMENT_DATASETS) + +ENVIRONMENT_ALL.append(CREATE_DATASET) +ENVIRONMENT_ALL.append(LIST_ENVIRONMENT_DATASETS) + +RESOURCES_ALL.append(CREATE_DATASET) +RESOURCES_ALL.append(LIST_ENVIRONMENT_DATASETS) + + +for perm in chain(DATASET_ALL, DATASET_TABLE_READ): + RESOURCES_ALL_WITH_DESC[perm] = perm + +RESOURCES_ALL_WITH_DESC[CREATE_DATASET] = 'Create datasets on this environment' +RESOURCES_ALL_WITH_DESC[LIST_ENVIRONMENT_DATASETS] = "List datasets on this environment" diff --git a/tests/api/test_environment.py b/tests/api/test_environment.py index 774797108..a29cd1cae 100644 --- a/tests/api/test_environment.py +++ b/tests/api/test_environment.py @@ -3,6 +3,7 @@ import dataall from dataall.db import permissions from dataall.modules.datasets.db.models import Dataset +from dataall.modules.datasets.services.permissions import CREATE_DATASET @pytest.fixture(scope='module', autouse=True) @@ -436,7 +437,7 @@ def test_group_invitation(db, client, env1, org1, group2, user, group3, group, d env_permissions = [ p.name for p in response.data.listEnvironmentGroupInvitationPermissions ] - assert permissions.CREATE_DATASET in env_permissions + assert CREATE_DATASET in env_permissions response = client.query( """ @@ -474,7 +475,7 @@ def test_group_invitation(db, client, env1, org1, group2, user, group3, group, d environmentUri=env1.environmentUri, ) env_permissions = [p.name for p in response.data.getGroup.environmentPermissions] - assert permissions.CREATE_DATASET in env_permissions + assert CREATE_DATASET in env_permissions response = client.query( """ From 95cbf814ae96557761a77ec88945a8e8198192de Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 27 Apr 2023 12:56:48 +0200 Subject: [PATCH 098/346] Added migration script for group environments --- ...fc49baecea4_add_enviromental_parameters.py | 43 +++++++++++++++++-- 1 file changed, 40 insertions(+), 3 deletions(-) diff --git a/backend/migrations/versions/5fc49baecea4_add_enviromental_parameters.py b/backend/migrations/versions/5fc49baecea4_add_enviromental_parameters.py index 061d9b81a..349361b98 100644 --- a/backend/migrations/versions/5fc49baecea4_add_enviromental_parameters.py +++ b/backend/migrations/versions/5fc49baecea4_add_enviromental_parameters.py @@ -10,11 +10,13 @@ from alembic import op from sqlalchemy import Boolean, Column, String, orm from sqlalchemy.ext.declarative import declarative_base -from dataall.db import Resource +from dataall.db import Resource, models +from dataall.db.api import ResourcePolicy from dataall.db.api.permission import Permission -from dataall.db.models import TenantPolicy, TenantPolicyPermission, PermissionType +from dataall.db.models import TenantPolicy, TenantPolicyPermission, PermissionType, EnvironmentGroup from dataall.db.permissions import MANAGE_SGMSTUDIO_NOTEBOOKS -from dataall.modules.notebooks.services.permissions import MANAGE_NOTEBOOKS +from dataall.modules.datasets.services.permissions import LIST_ENVIRONMENT_DATASETS, CREATE_DATASET +from dataall.modules.notebooks.services.permissions import MANAGE_NOTEBOOKS, LIST_ENVIRONMENT_NOTEBOOKS, CREATE_NOTEBOOK # revision identifiers, used by Alembic. revision = "5fc49baecea4" @@ -109,6 +111,8 @@ def upgrade(): )) session.commit() + migrate_groups_permissions(session) + except Exception as ex: print(f"Failed to execute the migration script due to: {ex}") @@ -151,3 +155,36 @@ def create_foreign_key_to_env(op, table: str): table, "environment", ["environmentUri"], ["environmentUri"], ) + + +def find_all_groups(session): + return session.query(EnvironmentGroup).all() + + +def migrate_groups_permissions(session): + """ + Adds new permission if the old exist. needed to get rid of old hacks in the code + """ + permissions = [(CREATE_DATASET, LIST_ENVIRONMENT_DATASETS), + (CREATE_NOTEBOOK, LIST_ENVIRONMENT_NOTEBOOKS)] + + groups = find_all_groups(session) + for group in groups: + new_perms = [] + for existed, to_add in permissions: + if not ResourcePolicy.has_group_resource_permission( + session, + group_uri=group, + permission_name=existed, + resource_uri=group.environmentUri, + ): + new_perms.append(to_add) + + if new_perms: + ResourcePolicy.attach_resource_policy( + session=session, + group=group.groupUri, + permissions=new_perms, + resource_uri=group.environmentUri, + resource_type=models.Environment.__name__ + ) From 781bb087176bb2b9a4a528362e98af7c9f78b2e8 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 27 Apr 2023 13:43:09 +0200 Subject: [PATCH 099/346] Extracted data policy for dataset Did the same trick that was done for ServicePolicy --- .../dataall/cdkproxy/stacks/environment.py | 61 ++++--------------- .../cdkproxy/stacks/policies/data_policy.py | 17 ++---- backend/dataall/db/api/environment.py | 18 ------ .../modules/datasets/cdk/dataset_policy.py | 33 ++++++++++ .../datasets/services/dataset_service.py | 20 ++++++ 5 files changed, 72 insertions(+), 77 deletions(-) create mode 100644 backend/dataall/modules/datasets/cdk/dataset_policy.py diff --git a/backend/dataall/cdkproxy/stacks/environment.py b/backend/dataall/cdkproxy/stacks/environment.py index 8c9440933..20646b7fb 100644 --- a/backend/dataall/cdkproxy/stacks/environment.py +++ b/backend/dataall/cdkproxy/stacks/environment.py @@ -40,7 +40,6 @@ from ...db import models from ...utils.cdk_nag_utils import CDKNagUtil from ...utils.runtime_stacks_tagging import TagsUtil -from dataall.modules.datasets.db.models import Dataset logger = logging.getLogger(__name__) @@ -128,33 +127,6 @@ def get_environment_admins_group( group_uri=environment.SamlGroupName, ) - @staticmethod - def get_environment_group_datasets( - engine, environment: models.Environment, group: str - ) -> [Dataset]: - with engine.scoped_session() as session: - return db.api.Environment.list_group_datasets( - session, - username='cdk', - groups=[], - uri=environment.environmentUri, - data={'groupUri': group}, - check_perm=False, - ) - - @staticmethod - def get_all_environment_datasets( - engine, environment: models.Environment - ) -> [Dataset]: - with engine.scoped_session() as session: - return ( - session.query(Dataset) - .filter( - Dataset.environmentUri == environment.environmentUri, - ) - .all() - ) - def __init__(self, scope, id, target_uri: str = None, **kwargs): super().__init__(scope, id, @@ -178,10 +150,6 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): self.environment_admins_group: models.EnvironmentGroup = self.get_environment_admins_group(self.engine, self._environment) - self.all_environment_datasets = self.get_all_environment_datasets( - self.engine, self._environment - ) - roles_sagemaker_dependency_group = DependencyGroup() if self._environment.dashboardsEnabled: @@ -630,7 +598,6 @@ def create_or_import_environment_default_role(self): region=self._environment.region, environment=self._environment, team=self.environment_admins_group, - datasets=self.all_environment_datasets, ).generate_admins_data_access_policy() default_role = iam.Role( @@ -688,21 +655,19 @@ def create_group_environment_role(self, group): permissions=group_permissions, ).generate_policies() - data_policy = DataPolicy( - stack=self, - tag_key='Team', - tag_value=group.groupUri, - resource_prefix=self._environment.resourcePrefix, - name=f'{self._environment.resourcePrefix}-{group.groupUri}-data-policy', - id=f'{self._environment.resourcePrefix}-{group.groupUri}-data-policy', - account=self._environment.AwsAccountId, - region=self._environment.region, - environment=self._environment, - team=group, - datasets=self.get_environment_group_datasets( - self.engine, self._environment, group.groupUri - ), - ).generate_data_access_policy() + with self.engine.scoped_session() as session: + data_policy = DataPolicy( + stack=self, + tag_key='Team', + tag_value=group.groupUri, + resource_prefix=self._environment.resourcePrefix, + name=f'{self._environment.resourcePrefix}-{group.groupUri}-data-policy', + id=f'{self._environment.resourcePrefix}-{group.groupUri}-data-policy', + account=self._environment.AwsAccountId, + region=self._environment.region, + environment=self._environment, + team=group, + ).generate_data_access_policy(session=session) group_role = iam.Role( self, diff --git a/backend/dataall/cdkproxy/stacks/policies/data_policy.py b/backend/dataall/cdkproxy/stacks/policies/data_policy.py index b5842afea..a0791954d 100644 --- a/backend/dataall/cdkproxy/stacks/policies/data_policy.py +++ b/backend/dataall/cdkproxy/stacks/policies/data_policy.py @@ -4,7 +4,6 @@ from aws_cdk import aws_iam as iam from ....db import models -from dataall.modules.datasets.db.models import Dataset logger = logging.getLogger() @@ -22,7 +21,6 @@ def __init__( resource_prefix, environment: models.Environment, team: models.EnvironmentGroup, - datasets: [Dataset], ): self.stack = stack self.id = id @@ -34,7 +32,6 @@ def __init__( self.resource_prefix = resource_prefix self.environment = environment self.team = team - self.datasets = datasets def generate_admins_data_access_policy(self) -> iam.Policy: """ @@ -87,11 +84,14 @@ def generate_admins_data_access_policy(self) -> iam.Policy: return policy - def generate_data_access_policy(self) -> iam.Policy: + def generate_data_access_policy(self, session) -> iam.Policy: """ Creates aws_iam.Policy based on team datasets """ - statements: List[iam.PolicyStatement] = self.get_statements() + statements: List[iam.PolicyStatement] = self.get_statements(session) + + for extension in DataPolicy.__subclasses__(): + statements.extend(extension.get_statements(self, session=session)) policy: iam.Policy = iam.Policy( self.stack, @@ -103,7 +103,7 @@ def generate_data_access_policy(self) -> iam.Policy: return policy - def get_statements(self): + def get_statements(self, session): statements = [ iam.PolicyStatement( actions=[ @@ -147,11 +147,6 @@ def set_allowed_s3_buckets_statements(self, statements): f'arn:aws:s3:::{self.environment.EnvironmentDefaultBucketName}', f'arn:aws:s3:::{self.environment.EnvironmentDefaultBucketName}/*', ] - if self.datasets: - dataset: Dataset - for dataset in self.datasets: - allowed_buckets.append(f'arn:aws:s3:::{dataset.S3BucketName}/*') - allowed_buckets.append(f'arn:aws:s3:::{dataset.S3BucketName}') statements.extend( [ iam.PolicyStatement( diff --git a/backend/dataall/db/api/environment.py b/backend/dataall/db/api/environment.py index 68b2d58ec..9a3d2d6f3 100644 --- a/backend/dataall/db/api/environment.py +++ b/backend/dataall/db/api/environment.py @@ -946,24 +946,6 @@ def paginated_environment_redshift_clusters( page=data.get('page', 1), ).to_dict() - @staticmethod - def list_group_datasets(session, username, groups, uri, data=None, check_perm=None): - if not data: - raise exceptions.RequiredParameter('data') - if not data.get('groupUri'): - raise exceptions.RequiredParameter('groupUri') - - return ( - session.query(Dataset) - .filter( - and_( - Dataset.environmentUri == uri, - Dataset.SamlAdminGroupName == data['groupUri'], - ) - ) - .all() - ) - @staticmethod @has_resource_perm(permissions.GET_ENVIRONMENT) def get_stack( diff --git a/backend/dataall/modules/datasets/cdk/dataset_policy.py b/backend/dataall/modules/datasets/cdk/dataset_policy.py new file mode 100644 index 000000000..5c847fe14 --- /dev/null +++ b/backend/dataall/modules/datasets/cdk/dataset_policy.py @@ -0,0 +1,33 @@ +from typing import List +from aws_cdk import aws_iam as iam + +from dataall.cdkproxy.stacks.policies.data_policy import DataPolicy +from dataall.modules.datasets.db.models import Dataset +from dataall.modules.datasets.services.dataset_service import DatasetService + + +class DatasetDataPolicy(DataPolicy): + + def get_statements(self, session): + datasets = DatasetService.list_group_datasets( + session, + environment_id=self.environment.environmentUri, + group_uri=self.team.groupUri, + ) + return self._generate_dataset_statements(datasets) + + @staticmethod + def _generate_dataset_statements(datasets: List[Dataset]): + buckets = [] + if datasets: + for dataset in datasets: + buckets.append(f'arn:aws:s3:::{dataset.S3BucketName}/*') + buckets.append(f'arn:aws:s3:::{dataset.S3BucketName}') + + return [ + iam.PolicyStatement( + actions=['s3:*'], + resources=buckets, + ) + ] + diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py index f32828cfe..63a57dcdb 100644 --- a/backend/dataall/modules/datasets/services/dataset_service.py +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -692,3 +692,23 @@ def paginated_environment_group_datasets( page_size=data.get('pageSize', 10), ).to_dict() + @staticmethod + def list_group_datasets(session, environment_id, group_uri): + return ( + session.query(Dataset) + .filter( + and_( + Dataset.environmentUri == environment_id, + Dataset.SamlAdminGroupName == group_uri, + ) + ) + .all() + ) + + @staticmethod + def list_env_datasets(session, environment_uri): + return ( + session.query(Dataset) + .filter(Dataset.environmentUri == environment_uri) + .all() + ) From 1ec1235910a48a2be9b6593e515be0d52ace7e60 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 27 Apr 2023 15:32:40 +0200 Subject: [PATCH 100/346] Introduced GroupResourceManager --- backend/dataall/core/group/__init__.py | 0 .../dataall/core/group/services/__init__.py | 0 .../group/services/group_resource_manager.py | 24 +++++++++++++++++++ backend/dataall/db/api/environment.py | 13 +++++----- .../modules/datasets/db/dataset_repository.py | 17 ++++++++++++- .../services/dataset_group_resource.py | 11 +++++++++ 6 files changed, 58 insertions(+), 7 deletions(-) create mode 100644 backend/dataall/core/group/__init__.py create mode 100644 backend/dataall/core/group/services/__init__.py create mode 100644 backend/dataall/core/group/services/group_resource_manager.py create mode 100644 backend/dataall/modules/datasets/services/dataset_group_resource.py diff --git a/backend/dataall/core/group/__init__.py b/backend/dataall/core/group/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/dataall/core/group/services/__init__.py b/backend/dataall/core/group/services/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/dataall/core/group/services/group_resource_manager.py b/backend/dataall/core/group/services/group_resource_manager.py new file mode 100644 index 000000000..905770735 --- /dev/null +++ b/backend/dataall/core/group/services/group_resource_manager.py @@ -0,0 +1,24 @@ +from typing import Protocol, List + + +class GroupResource(Protocol): + def count_resources(self, session, environment_uri, group_uri) -> int: + ... + + +class GroupResourceManager: + """ + API for managing group resources + """ + _resources: List[GroupResource] = [] + + @staticmethod + def register(resource: GroupResource): + GroupResourceManager._resources.append(resource) + + @staticmethod + def count_group_resources(session, environment_uri, group_uri) -> int: + counter = 0 + for resource in GroupResourceManager._resources: + counter += resource.count_resources(session, environment_uri, group_uri) + return counter diff --git a/backend/dataall/db/api/environment.py b/backend/dataall/db/api/environment.py index 9a3d2d6f3..734e3dcbb 100644 --- a/backend/dataall/db/api/environment.py +++ b/backend/dataall/db/api/environment.py @@ -23,11 +23,11 @@ from ..paginator import paginate from dataall.core.environment.models import EnvironmentParameter from dataall.core.environment.db.repositories import EnvironmentParameterRepository -from dataall.modules.datasets.db.models import Dataset from dataall.utils.naming_convention import ( NamingConventionService, NamingConventionPattern, ) +from dataall.core.group.services.group_resource_manager import GroupResourceManager log = logging.getLogger(__name__) @@ -352,10 +352,6 @@ def remove_group(session, username, groups, uri, data=None, check_perm=None): group_env_objects_count = ( session.query(models.Environment) - .outerjoin( - Dataset, - Dataset.environmentUri == models.Environment.environmentUri, - ) .outerjoin( models.SagemakerStudioUserProfile, models.SagemakerStudioUserProfile.environmentUri @@ -384,7 +380,6 @@ def remove_group(session, username, groups, uri, data=None, check_perm=None): models.Environment.environmentUri == environment.environmentUri, or_( models.RedshiftCluster.SamlGroupName == group, - Dataset.SamlAdminGroupName == group, models.SagemakerStudioUserProfile.SamlAdminGroupName == group, models.DataPipeline.SamlGroupName == group, models.Dashboard.SamlGroupName == group, @@ -394,6 +389,12 @@ def remove_group(session, username, groups, uri, data=None, check_perm=None): .count() ) + group_env_objects_count += GroupResourceManager.count_group_resources( + session=session, + environment_uri=environment.environmentUri, + group_uri=group + ) + if group_env_objects_count > 0: raise exceptions.EnvironmentResourcesFound( action='Remove Team', diff --git a/backend/dataall/modules/datasets/db/dataset_repository.py b/backend/dataall/modules/datasets/db/dataset_repository.py index d58c3a7a1..95aef6102 100644 --- a/backend/dataall/modules/datasets/db/dataset_repository.py +++ b/backend/dataall/modules/datasets/db/dataset_repository.py @@ -1,5 +1,7 @@ +from operator import and_ + from dataall.db import exceptions -from dataall.db.models import Dataset +from dataall.modules.datasets.db.models import Dataset class DatasetRepository: @@ -11,3 +13,16 @@ def get_dataset_by_uri(session, dataset_uri) -> Dataset: if not dataset: raise exceptions.ObjectNotFound('Dataset', dataset_uri) return dataset + + @staticmethod + def count_group_datasets(session, environment_uri, group_uri) -> int: + return ( + session.query(Dataset) + .filter( + and_( + Dataset.environmentUri == environment_uri, + Dataset.SamlAdminGroupName == group_uri + )) + .count() + ) + diff --git a/backend/dataall/modules/datasets/services/dataset_group_resource.py b/backend/dataall/modules/datasets/services/dataset_group_resource.py new file mode 100644 index 000000000..e05d06e23 --- /dev/null +++ b/backend/dataall/modules/datasets/services/dataset_group_resource.py @@ -0,0 +1,11 @@ +from dataall.core.group.services.group_resource_manager import GroupResource, GroupResourceManager +from dataall.modules.datasets.db.dataset_repository import DatasetRepository + + +class DatasetGroupResource(GroupResource): + def count_resources(self, session, environment_uri, group_uri) -> int: + return DatasetRepository.count_group_datasets(session, environment_uri, group_uri) + + +GroupResourceManager.register(DatasetGroupResource()) + From 9f2748e8869bfccd4264fb41112b1c01ff9ca510 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 27 Apr 2023 18:21:46 +0200 Subject: [PATCH 101/346] Added dataset data policy to import it with other stacks --- backend/dataall/modules/datasets/cdk/__init__.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/backend/dataall/modules/datasets/cdk/__init__.py b/backend/dataall/modules/datasets/cdk/__init__.py index 9d1e205b8..eaa9ad99f 100644 --- a/backend/dataall/modules/datasets/cdk/__init__.py +++ b/backend/dataall/modules/datasets/cdk/__init__.py @@ -1,3 +1,4 @@ -from dataall.modules.datasets.cdk import dataset_stack, databrew_policy, glue_policy, lakeformation_policy +from dataall.modules.datasets.cdk import dataset_stack, databrew_policy, glue_policy, lakeformation_policy, \ + dataset_policy -__all__ = ["dataset_stack", "databrew_policy", "glue_policy", "lakeformation_policy"] +__all__ = ["dataset_stack", "databrew_policy", "glue_policy", "lakeformation_policy", "dataset_policy"] From 23e9c9453b116ce757809110828b9679eac47c78 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 27 Apr 2023 16:10:02 +0200 Subject: [PATCH 102/346] Introduced module dependencies --- backend/dataall/modules/datasets/__init__.py | 17 +++---- backend/dataall/modules/loader.py | 47 +++++++++++++++++-- backend/dataall/modules/notebooks/__init__.py | 8 ++-- 3 files changed, 55 insertions(+), 17 deletions(-) diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index 60349f8b4..36f84a969 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -2,8 +2,6 @@ import logging from typing import List -from dataall.api.Objects.Vote.resolvers import add_vote_type -from dataall.db.api import TargetType from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetStorageLocation, DatasetTable, Dataset from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer @@ -17,11 +15,14 @@ class DatasetApiModuleInterface(ModuleInterface): """Implements ModuleInterface for dataset GraphQl lambda""" - @classmethod - def is_supported(cls, modes): + @staticmethod + def is_supported(modes): return ImportMode.API in modes def __init__(self): + # these imports are placed inside the method because they are only related to GraphQL api. + from dataall.db.api import TargetType + from dataall.api.Objects.Vote.resolvers import add_vote_type from dataall.api.Objects.Feed.registry import FeedRegistry, FeedDefinition from dataall.api.Objects.Glossary.registry import GlossaryRegistry, GlossaryDefinition @@ -63,8 +64,8 @@ def __init__(self): class DatasetAsyncHandlersModuleInterface(ModuleInterface): """Implements ModuleInterface for dataset async lambda""" - @classmethod - def is_supported(cls, modes: List[ImportMode]): + @staticmethod + def is_supported(modes: List[ImportMode]): return ImportMode.HANDLERS in modes def __init__(self): @@ -75,8 +76,8 @@ def __init__(self): class DatasetCdkModuleInterface(ModuleInterface): """Loads dataset cdk stacks """ - @classmethod - def is_supported(cls, modes: List[ImportMode]): + @staticmethod + def is_supported(modes: List[ImportMode]): return ImportMode.CDK in modes def __init__(self): diff --git a/backend/dataall/modules/loader.py b/backend/dataall/modules/loader.py index e48583c13..63cbb524d 100644 --- a/backend/dataall/modules/loader.py +++ b/backend/dataall/modules/loader.py @@ -3,7 +3,7 @@ import logging from abc import ABC, abstractmethod from enum import Enum, auto -from typing import List +from typing import List, Type from dataall.core.config import config @@ -29,10 +29,32 @@ class ModuleInterface(ABC): An interface of the module. The implementation should be part of __init__.py of the module Contains an API that will be called from core part """ - @classmethod + @staticmethod @abstractmethod - def is_supported(cls, modes: List[ImportMode]): - pass + def is_supported(modes: List[ImportMode]) -> bool: + """ + Return True if the module interface supports any of the ImportMode and should be loaded + """ + raise NotImplementedError("is_supported is not implemented") + + @staticmethod + @abstractmethod + def name() -> str: + """ + Returns name of the module. Should be the same if it's specified in the config file + """ + raise NotImplementedError("name is not implemented") + + @staticmethod + def depends_on() -> List[Type['ModuleInterface']]: + """ + It describes on what modules this ModuleInterface depends on. + It will be used to eventually load these module dependencies. Even if a dependency module is not active + in the config file. + + The default value is no dependencies + """ + return [] def load_modules(modes: List[ImportMode]) -> None: @@ -47,7 +69,11 @@ def load_modules(modes: List[ImportMode]) -> None: return log.info("Found %d modules that have been found in the config", len(modules)) + + inactive = set() + in_config = set() for name, props in modules.items(): + in_config.add(name) if "active" not in props: raise ValueError(f"Status is not defined for {name} module") @@ -55,6 +81,7 @@ def load_modules(modes: List[ImportMode]) -> None: if not active: log.info(f"Module {name} is not active. Skipping...") + inactive.add(name) continue if not _import_module(name): @@ -62,10 +89,20 @@ def load_modules(modes: List[ImportMode]) -> None: log.info(f"Module {name} is loaded") - for module in ModuleInterface.__subclasses__(): + modules = ModuleInterface.__subclasses__() + for module in modules: if module.is_supported(modes): module() + modules = ModuleInterface.__subclasses__() # reload modules. Can load a new modules + for module in modules: + if module.name() in inactive: + log.info(f"There is a module that depends on {module.name()}. " + + "The module has been loaded despite it's inactive.") + elif module.name() not in in_config: + log.info(f"There is a module that depends on {module.name()}. " + + "The module has been loaded despite it's not specified in the configuration file.") + log.info("All modules have been imported") diff --git a/backend/dataall/modules/notebooks/__init__.py b/backend/dataall/modules/notebooks/__init__.py index b63e0fa51..347abff53 100644 --- a/backend/dataall/modules/notebooks/__init__.py +++ b/backend/dataall/modules/notebooks/__init__.py @@ -11,8 +11,8 @@ class NotebookApiModuleInterface(ModuleInterface): """Implements ModuleInterface for notebook GraphQl lambda""" - @classmethod - def is_supported(cls, modes): + @staticmethod + def is_supported(modes): return ImportMode.API in modes def __init__(self): @@ -27,8 +27,8 @@ def __init__(self): class NotebookCdkModuleInterface(ModuleInterface): """Implements ModuleInterface for notebook ecs tasks""" - @classmethod - def is_supported(cls, modes): + @staticmethod + def is_supported(modes): return ImportMode.CDK in modes def __init__(self): From 50a2486853992e6af3bf57fd8142efccc0cbd826 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 27 Apr 2023 16:45:05 +0200 Subject: [PATCH 103/346] Added a few check for loading --- backend/dataall/modules/loader.py | 52 +++++++++++++++++++++++++------ 1 file changed, 43 insertions(+), 9 deletions(-) diff --git a/backend/dataall/modules/loader.py b/backend/dataall/modules/loader.py index 63cbb524d..7e6f24169 100644 --- a/backend/dataall/modules/loader.py +++ b/backend/dataall/modules/loader.py @@ -1,9 +1,10 @@ """Load modules that are specified in the configuration file""" import importlib import logging +import sys from abc import ABC, abstractmethod from enum import Enum, auto -from typing import List, Type +from typing import List, Type, Set from dataall.core.config import config @@ -94,14 +95,8 @@ def load_modules(modes: List[ImportMode]) -> None: if module.is_supported(modes): module() - modules = ModuleInterface.__subclasses__() # reload modules. Can load a new modules - for module in modules: - if module.name() in inactive: - log.info(f"There is a module that depends on {module.name()}. " + - "The module has been loaded despite it's inactive.") - elif module.name() not in in_config: - log.info(f"There is a module that depends on {module.name()}. " + - "The module has been loaded despite it's not specified in the configuration file.") + _check_loading_correct() + _describe_loading(in_config, inactive) log.info("All modules have been imported") @@ -112,3 +107,42 @@ def _import_module(name): return True except ModuleNotFoundError: return False + + +def _check_loading_correct(in_config: Set[str]): + """ + To avoid unintentional loading (without ModuleInterface) we can check all loaded modules. + Unintentional/incorrect loading might happen if module A has a direct reference to module B without declaring it + in ModuleInterface. Doing so, this might lead to a problem when a module interface require to load something during + initialization. But since ModuleInterface is not initializing properly (using depends_on) + some functionality may work wrongly. + """ + + expected_load = set(in_config) + for module in ModuleInterface.__subclasses__(): + for dependency in module.depends_on(): + expected_load.add(dependency.name()) + + for module in ModuleInterface.__subclasses__(): + if module.name() not in expected_load: + raise ImportError(f"ModuleInterface has not been initialized for module {module.name()}") + + for module in sys.modules.keys(): + if module.startswith(_MODULE_PREFIX): + name = module.lstrip(_MODULE_PREFIX).split(".")[0] + if name not in expected_load: + raise ImportError(f"The package {module} has been imported, but it doesn't contain ModuleInterface") + + +def _describe_loading(in_config: Set[str], inactive: Set[str]): + modules = ModuleInterface.__subclasses__() + for module in modules: + name = module.name() + log.debug(f"The {name} module was loaded") + if name in inactive: + log.info(f"There is a module that depends on {module.name()}. " + + "The module has been loaded despite it's inactive.") + elif name not in in_config: + log.info(f"There is a module that depends on {module.name()}. " + + "The module has been loaded despite it's not specified in the configuration file.") + From 400492a17251906191ebbb3ee91f6c1dc2b3be28 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 27 Apr 2023 18:15:47 +0200 Subject: [PATCH 104/346] Added order for initialization Added test for the module loader --- .../cdkproxy/stacks/policies/__init__.py | 5 +- .../modules/dataset_sharing/__init__.py | 16 ++ backend/dataall/modules/datasets/__init__.py | 7 +- backend/dataall/modules/loader.py | 124 +++++++++++---- backend/dataall/modules/notebooks/__init__.py | 2 +- tests/modules/test_loader.py | 145 ++++++++++++++++++ 6 files changed, 266 insertions(+), 33 deletions(-) create mode 100644 backend/dataall/modules/dataset_sharing/__init__.py create mode 100644 tests/modules/test_loader.py diff --git a/backend/dataall/cdkproxy/stacks/policies/__init__.py b/backend/dataall/cdkproxy/stacks/policies/__init__.py index 964bb37a5..4391d0287 100644 --- a/backend/dataall/cdkproxy/stacks/policies/__init__.py +++ b/backend/dataall/cdkproxy/stacks/policies/__init__.py @@ -1,9 +1,8 @@ """Contains the code for creating environment policies""" from dataall.cdkproxy.stacks.policies import ( - _lambda, cloudformation, codestar, databrew, glue, - lakeformation, quicksight, redshift, stepfunctions, data_policy, service_policy + _lambda, cloudformation, codestar, quicksight, redshift, stepfunctions, data_policy, service_policy ) -__all__ = ["_lambda", "cloudformation", "codestar", "databrew", "glue", "lakeformation", "quicksight", +__all__ = ["_lambda", "cloudformation", "codestar", "quicksight", "redshift", "stepfunctions", "data_policy", "service_policy", "mlstudio"] diff --git a/backend/dataall/modules/dataset_sharing/__init__.py b/backend/dataall/modules/dataset_sharing/__init__.py new file mode 100644 index 000000000..ed6b62297 --- /dev/null +++ b/backend/dataall/modules/dataset_sharing/__init__.py @@ -0,0 +1,16 @@ +import logging +from typing import List + +from dataall.modules.loader import ModuleInterface, ImportMode + + +log = logging.getLogger(__name__) + + +class SharingApiModuleInterface(ModuleInterface): + @staticmethod + def is_supported(modes: List[ImportMode]) -> bool: + return ImportMode.API in modes + + def __init__(self): + log.info("API pf dataset sharing has been imported") diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index 36f84a969..e6696b534 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -1,7 +1,8 @@ """Contains the code related to datasets""" import logging -from typing import List +from typing import List, Type +from dataall.modules.dataset_sharing import SharingApiModuleInterface from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetStorageLocation, DatasetTable, Dataset from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer @@ -19,6 +20,10 @@ class DatasetApiModuleInterface(ModuleInterface): def is_supported(modes): return ImportMode.API in modes + @staticmethod + def depends_on() -> List[Type['ModuleInterface']]: + return [SharingApiModuleInterface] + def __init__(self): # these imports are placed inside the method because they are only related to GraphQL api. from dataall.db.api import TargetType diff --git a/backend/dataall/modules/loader.py b/backend/dataall/modules/loader.py index 7e6f24169..034ccee3c 100644 --- a/backend/dataall/modules/loader.py +++ b/backend/dataall/modules/loader.py @@ -3,6 +3,7 @@ import logging import sys from abc import ABC, abstractmethod +from collections import defaultdict, deque from enum import Enum, auto from typing import List, Type, Set @@ -38,13 +39,12 @@ def is_supported(modes: List[ImportMode]) -> bool: """ raise NotImplementedError("is_supported is not implemented") - @staticmethod - @abstractmethod - def name() -> str: + @classmethod + def name(cls) -> str: """ Returns name of the module. Should be the same if it's specified in the config file """ - raise NotImplementedError("name is not implemented") + return _remove_module_prefix(cls.__module__) @staticmethod def depends_on() -> List[Type['ModuleInterface']]: @@ -63,14 +63,19 @@ def load_modules(modes: List[ImportMode]) -> None: Loads all modules from the config Loads only requested functionality (submodules) using the mode parameter """ - try: - modules = config.get_property("modules") - except KeyError: - log.info('"modules" has not been found in the config file. Nothing to load') - return + in_config, inactive = _load_modules() + _check_loading_correct(in_config, modes) + _initialize_modules(modes) + _describe_loading(in_config, inactive) - log.info("Found %d modules that have been found in the config", len(modules)) + log.info("All modules have been imported") + +def _load_modules(): + """ + Loads modules but not initializing them + """ + modules = _get_modules_from_config() inactive = set() in_config = set() for name, props in modules.items(): @@ -85,23 +90,27 @@ def load_modules(modes: List[ImportMode]) -> None: inactive.add(name) continue - if not _import_module(name): + if not _load_module(name): raise ValueError(f"Couldn't find module {name} under modules directory") log.info(f"Module {name} is loaded") + return in_config, inactive - modules = ModuleInterface.__subclasses__() - for module in modules: - if module.is_supported(modes): - module() - _check_loading_correct() - _describe_loading(in_config, inactive) +def _get_modules_from_config(): + try: + modules = config.get_property("modules") + except KeyError as e: + raise KeyError('"modules" has not been found in the config file. Nothing to load') from e - log.info("All modules have been imported") + log.info("Found %d modules that have been found in the config", len(modules)) + return modules -def _import_module(name): +def _load_module(name: str): + """ + Loads a module but not initializing it + """ try: importlib.import_module(f"{_MODULE_PREFIX}.{name}") return True @@ -109,7 +118,50 @@ def _import_module(name): return False -def _check_loading_correct(in_config: Set[str]): +def _initialize_modules(modes: List[ImportMode]): + """ + Initialize all modules for supported modes. This method is using topological sorting for a graph of module + dependencies. It's needed to load module in a specific order: first modules to load are without dependencies. + It might help to avoid possible issues if there is a load in the module constructor (which can be the case + if a module supports a few importing modes). + """ + modules = _all_modules() + dependencies = defaultdict(list) + degrees = defaultdict(int) + supported = [] + for module in modules: + if module.is_supported(modes): + supported.append(module) + degrees[module] += len(module.depends_on()) + for dependency in module.depends_on(): + dependencies[dependency].append(module) + + queue = deque() + for module in supported: + if degrees[module] == 0: + queue.append(module) + + initialized = 0 + while queue: + to_init = queue.popleft() + _initialize_module(to_init) + initialized += 1 + + for dependant in dependencies[to_init]: + degrees[dependant] -= 1 + if degrees[dependant] == 0: + queue.append(dependant) + + if initialized < len(degrees): + # We shouldn't reach this point since it should already be covered by python. But just in case + raise ImportError("Not all modules have been initialized. Most likely circular dependency") + + +def _initialize_module(module): + module() # call a constructor for initialization + + +def _check_loading_correct(in_config: Set[str], modes: List[ImportMode]): """ To avoid unintentional loading (without ModuleInterface) we can check all loaded modules. Unintentional/incorrect loading might happen if module A has a direct reference to module B without declaring it @@ -119,23 +171,26 @@ def _check_loading_correct(in_config: Set[str]): """ expected_load = set(in_config) - for module in ModuleInterface.__subclasses__(): + for module in _all_modules(): for dependency in module.depends_on(): expected_load.add(dependency.name()) - for module in ModuleInterface.__subclasses__(): - if module.name() not in expected_load: - raise ImportError(f"ModuleInterface has not been initialized for module {module.name()}") + for module in _all_modules(): + if module.is_supported(modes) and module.name() not in expected_load: + raise ImportError( + f"ModuleInterface has not been initialized for module {module.name()}. " + "Declare the module in depends_on" + ) for module in sys.modules.keys(): - if module.startswith(_MODULE_PREFIX): - name = module.lstrip(_MODULE_PREFIX).split(".")[0] - if name not in expected_load: + if module.startswith(_MODULE_PREFIX) and module != __name__: # skip loader + name = _get_module_name(module) + if name and name not in expected_load: raise ImportError(f"The package {module} has been imported, but it doesn't contain ModuleInterface") def _describe_loading(in_config: Set[str], inactive: Set[str]): - modules = ModuleInterface.__subclasses__() + modules = _all_modules() for module in modules: name = module.name() log.debug(f"The {name} module was loaded") @@ -146,3 +201,16 @@ def _describe_loading(in_config: Set[str], inactive: Set[str]): log.info(f"There is a module that depends on {module.name()}. " + "The module has been loaded despite it's not specified in the configuration file.") + +def _remove_module_prefix(module: str): + if module.startswith(_MODULE_PREFIX): + return module[len(_MODULE_PREFIX) + 1:] + raise ValueError(f"Module {module} should always starts with {_MODULE_PREFIX}") + + +def _get_module_name(module): + return module[len(_MODULE_PREFIX) + 1:].split(".")[0] # gets only top level module name + + +def _all_modules(): + return ModuleInterface.__subclasses__() diff --git a/backend/dataall/modules/notebooks/__init__.py b/backend/dataall/modules/notebooks/__init__.py index 347abff53..127a1eb31 100644 --- a/backend/dataall/modules/notebooks/__init__.py +++ b/backend/dataall/modules/notebooks/__init__.py @@ -33,4 +33,4 @@ def is_supported(modes): def __init__(self): import dataall.modules.notebooks.cdk - log.info("API of sagemaker notebooks has been imported") + log.info("API of sagemaker notebooks has been imported") \ No newline at end of file diff --git a/tests/modules/test_loader.py b/tests/modules/test_loader.py new file mode 100644 index 000000000..e737c556d --- /dev/null +++ b/tests/modules/test_loader.py @@ -0,0 +1,145 @@ +from abc import ABC +from typing import List, Type + +import pytest + +from dataall.modules.loader import ModuleInterface, ImportMode +from dataall.modules import loader + +order = [] + + +class TestModule(ModuleInterface, ABC): + def __init__(self): + order.append(self.__class__) + + +class TestApiModule(TestModule): + @staticmethod + def is_supported(modes: List[ImportMode]) -> bool: + return ImportMode.API in modes + + +class AModule(TestApiModule): + pass + + +class BModule(TestApiModule): + @staticmethod + def depends_on() -> List[Type['ModuleInterface']]: + return [AModule] + + +class CModule(TestModule): + @staticmethod + def is_supported(modes: List[ImportMode]) -> bool: + return ImportMode.CDK in modes + + +class DModule(TestApiModule): + @staticmethod + def depends_on() -> List[Type['ModuleInterface']]: + return [BModule] + + +class EModule(TestApiModule): + @staticmethod + def depends_on() -> List[Type['ModuleInterface']]: + return [BModule] + + +class FModule(TestApiModule): + @staticmethod + def depends_on() -> List[Type['ModuleInterface']]: + return [EModule] + + +class GModule(TestApiModule): + @staticmethod + def depends_on() -> List[Type['ModuleInterface']]: + return [AModule, BModule] + + +class IModule(TestApiModule): + @staticmethod + def depends_on() -> List[Type['ModuleInterface']]: + return [EModule, DModule] + + +class JModule(TestApiModule): + pass + + +class KModule(TestApiModule): + @staticmethod + def depends_on() -> List[Type['ModuleInterface']]: + return [JModule, EModule] + + +@pytest.fixture(scope='module', autouse=True) +def patch_prefix(): + prefix = loader._MODULE_PREFIX + loader._MODULE_PREFIX = 'tests.modules.test_loader' + yield + + loader._MODULE_PREFIX = prefix + + +@pytest.fixture(scope='function', autouse=True) +def clean_order(): + yield + order.clear() + + +def patch_loading(mocker, all_modules, in_config): + mocker.patch( + 'dataall.modules.loader._all_modules', + return_value=all_modules, + ) + mocker.patch( + 'dataall.modules.loader._load_modules', + return_value=(in_config, {}) + ) + + +def test_nothing_to_load(mocker): + patch_loading(mocker, [], set()) + loader.load_modules([ImportMode.API, ImportMode.CDK]) + assert len(order) == 0 + + +def test_import_with_one_dependency(mocker): + patch_loading(mocker, [AModule, BModule], {BModule}) + loader.load_modules([ImportMode.API]) + assert order == [AModule, BModule] + + +def test_load_with_cdk_mode(mocker): + patch_loading(mocker, [DModule, CModule, BModule], {BModule, CModule}) + loader.load_modules([ImportMode.CDK]) + assert order == [CModule] + + +def test_many_nested_layers(mocker): + patch_loading(mocker, [BModule, CModule, AModule, DModule], {DModule, CModule}) + loader.load_modules([ImportMode.API]) + correct_order = [AModule, BModule, DModule] + assert order == correct_order + assert CModule not in correct_order + + +def test_complex_loading(mocker): + patch_loading(mocker, [ + AModule, BModule, CModule, DModule, EModule, FModule, GModule, IModule, JModule, KModule + ], {AModule, CModule, JModule}) + + loader.load_modules([ImportMode.API]) + assert order == [AModule, JModule, BModule, DModule, EModule, GModule, FModule, IModule, KModule] + + +def test_incorrect_loading(mocker): + patch_loading(mocker, [AModule], set()) # A is not specified in config, but was found + with pytest.raises(ImportError): + loader.load_modules([ImportMode.API]) + + From 068e29aee4b37de9f2478b1f31557c7e57919e8c Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 28 Apr 2023 14:08:21 +0200 Subject: [PATCH 105/346] Changed the error message and comments --- backend/dataall/modules/loader.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/backend/dataall/modules/loader.py b/backend/dataall/modules/loader.py index 034ccee3c..73b78f344 100644 --- a/backend/dataall/modules/loader.py +++ b/backend/dataall/modules/loader.py @@ -153,8 +153,9 @@ def _initialize_modules(modes: List[ImportMode]): queue.append(dependant) if initialized < len(degrees): - # We shouldn't reach this point since it should already be covered by python. But just in case - raise ImportError("Not all modules have been initialized. Most likely circular dependency") + # Happens if the ModuleInterface for dependency doesn't support import mode + # The case when there is circular imports should already be covered by python loader + raise ImportError("Not all modules have been initialized. Check if your import modes are correct") def _initialize_module(module): From 86f4529d53fcbdaed07e502db7c8c6be1a3665f9 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 28 Apr 2023 14:38:33 +0200 Subject: [PATCH 106/346] Fixed error and moved api import to API ModuleInterface --- backend/dataall/cdkproxy/stacks/policies/__init__.py | 5 ++--- backend/dataall/modules/datasets/__init__.py | 4 ++-- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/backend/dataall/cdkproxy/stacks/policies/__init__.py b/backend/dataall/cdkproxy/stacks/policies/__init__.py index 964bb37a5..4391d0287 100644 --- a/backend/dataall/cdkproxy/stacks/policies/__init__.py +++ b/backend/dataall/cdkproxy/stacks/policies/__init__.py @@ -1,9 +1,8 @@ """Contains the code for creating environment policies""" from dataall.cdkproxy.stacks.policies import ( - _lambda, cloudformation, codestar, databrew, glue, - lakeformation, quicksight, redshift, stepfunctions, data_policy, service_policy + _lambda, cloudformation, codestar, quicksight, redshift, stepfunctions, data_policy, service_policy ) -__all__ = ["_lambda", "cloudformation", "codestar", "databrew", "glue", "lakeformation", "quicksight", +__all__ = ["_lambda", "cloudformation", "codestar", "quicksight", "redshift", "stepfunctions", "data_policy", "service_policy", "mlstudio"] diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index 60349f8b4..69e5fb86d 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -2,8 +2,6 @@ import logging from typing import List -from dataall.api.Objects.Vote.resolvers import add_vote_type -from dataall.db.api import TargetType from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetStorageLocation, DatasetTable, Dataset from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer @@ -22,6 +20,8 @@ def is_supported(cls, modes): return ImportMode.API in modes def __init__(self): + from dataall.api.Objects.Vote.resolvers import add_vote_type + from dataall.db.api import TargetType from dataall.api.Objects.Feed.registry import FeedRegistry, FeedDefinition from dataall.api.Objects.Glossary.registry import GlossaryRegistry, GlossaryDefinition From f4b1f673fe37eb2d501357942335e5c5a568514e Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 28 Apr 2023 16:15:33 +0200 Subject: [PATCH 107/346] Removed unused method --- .../dataall/modules/datasets/services/dataset_service.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py index 63a57dcdb..4d54b5573 100644 --- a/backend/dataall/modules/datasets/services/dataset_service.py +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -704,11 +704,3 @@ def list_group_datasets(session, environment_id, group_uri): ) .all() ) - - @staticmethod - def list_env_datasets(session, environment_uri): - return ( - session.query(Dataset) - .filter(Dataset.environmentUri == environment_uri) - .all() - ) From a52c145ffa48cbc4f200d1b048d80d64d4832434 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 28 Apr 2023 16:21:16 +0200 Subject: [PATCH 108/346] Changed method for checking permissions --- .../api/storage_location/resolvers.py | 2 -- .../datasets/services/dataset_location.py | 27 +++++++++---------- .../datasets/services/dataset_service.py | 15 +++++------ .../datasets/services/dataset_table.py | 19 ++++++------- 4 files changed, 30 insertions(+), 33 deletions(-) diff --git a/backend/dataall/modules/datasets/api/storage_location/resolvers.py b/backend/dataall/modules/datasets/api/storage_location/resolvers.py index 5e7351aa1..35386dacb 100644 --- a/backend/dataall/modules/datasets/api/storage_location/resolvers.py +++ b/backend/dataall/modules/datasets/api/storage_location/resolvers.py @@ -21,10 +21,8 @@ def create_storage_location( location = DatasetLocationService.create_dataset_location( session=session, username=context.username, - groups=context.groups, uri=datasetUri, data=input, - check_perm=True, ) S3DatasetLocationHandler.create_bucket_prefix(location) diff --git a/backend/dataall/modules/datasets/services/dataset_location.py b/backend/dataall/modules/datasets/services/dataset_location.py index 73885eb51..a8c0eb61d 100644 --- a/backend/dataall/modules/datasets/services/dataset_location.py +++ b/backend/dataall/modules/datasets/services/dataset_location.py @@ -2,7 +2,8 @@ from sqlalchemy import and_, or_ -from dataall.db.api import has_tenant_perm, has_resource_perm, Glossary +from dataall.core.permission_checker import has_tenant_permission, has_resource_permission +from dataall.db.api import Glossary from dataall.db import models, api, paginate, exceptions from dataall.modules.datasets.db.dataset_repository import DatasetRepository from dataall.modules.datasets.db.models import DatasetStorageLocation @@ -14,15 +15,13 @@ class DatasetLocationService: @staticmethod - @has_tenant_perm(MANAGE_DATASETS) - @has_resource_perm(CREATE_DATASET_FOLDER) + @has_tenant_permission(MANAGE_DATASETS) + @has_resource_permission(CREATE_DATASET_FOLDER) def create_dataset_location( session, username: str, - groups: [str], uri: str, - data: dict = None, - check_perm: bool = False, + data: dict = None ) -> DatasetStorageLocation: dataset = DatasetRepository.get_dataset_by_uri(session, uri) exists = ( @@ -68,8 +67,8 @@ def create_dataset_location( return location @staticmethod - @has_tenant_perm(MANAGE_DATASETS) - @has_resource_perm(LIST_DATASET_FOLDERS) + @has_tenant_permission(MANAGE_DATASETS) + @has_resource_permission(LIST_DATASET_FOLDERS) def list_dataset_locations( session, username: str, @@ -93,8 +92,8 @@ def list_dataset_locations( ).to_dict() @staticmethod - @has_tenant_perm(MANAGE_DATASETS) - @has_resource_perm(LIST_DATASET_FOLDERS) + @has_tenant_permission(MANAGE_DATASETS) + @has_resource_permission(LIST_DATASET_FOLDERS) def get_dataset_location( session, username: str, @@ -106,8 +105,8 @@ def get_dataset_location( return DatasetLocationService.get_location_by_uri(session, data['locationUri']) @staticmethod - @has_tenant_perm(MANAGE_DATASETS) - @has_resource_perm(UPDATE_DATASET_FOLDER) + @has_tenant_permission(MANAGE_DATASETS) + @has_resource_permission(UPDATE_DATASET_FOLDER) def update_dataset_location( session, username: str, @@ -136,8 +135,8 @@ def update_dataset_location( return location @staticmethod - @has_tenant_perm(MANAGE_DATASETS) - @has_resource_perm(DELETE_DATASET_FOLDER) + @has_tenant_permission(MANAGE_DATASETS) + @has_resource_permission(DELETE_DATASET_FOLDER) def delete_dataset_location( session, username: str, diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py index 4d54b5573..710e2cc1b 100644 --- a/backend/dataall/modules/datasets/services/dataset_service.py +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -4,10 +4,9 @@ from sqlalchemy import and_, or_ from sqlalchemy.orm import Query +from dataall.core.permission_checker import has_tenant_permission, has_resource_permission from dataall.db.api import ( Environment, - has_tenant_perm, - has_resource_perm, ResourcePolicy, KeyValueTag, Vote, @@ -31,8 +30,8 @@ class DatasetService: @staticmethod - @has_tenant_perm(MANAGE_DATASETS) - @has_resource_perm(CREATE_DATASET) + @has_tenant_permission(MANAGE_DATASETS) + @has_resource_permission(CREATE_DATASET) def create_dataset( session, username: str, @@ -201,7 +200,7 @@ def create_dataset_stack(session, dataset: Dataset) -> models.Stack: ) @staticmethod - @has_tenant_perm(MANAGE_DATASETS) + @has_tenant_permission(MANAGE_DATASETS) def get_dataset( session, username: str, @@ -294,8 +293,8 @@ def paginated_dataset_tables( ).to_dict() @staticmethod - @has_tenant_perm(MANAGE_DATASETS) - @has_resource_perm(UPDATE_DATASET) + @has_tenant_permission(MANAGE_DATASETS) + @has_resource_permission(UPDATE_DATASET) def update_dataset( session, username, groups, uri, data=None, check_perm=None ) -> Dataset: @@ -668,7 +667,7 @@ def query_environment_datasets(session, username, groups, uri, filter) -> Query: return query @staticmethod - @has_resource_perm(LIST_ENVIRONMENT_DATASETS) + @has_resource_permission(LIST_ENVIRONMENT_DATASETS) def paginated_environment_datasets( session, username, groups, uri, data=None, check_perm=None ) -> dict: diff --git a/backend/dataall/modules/datasets/services/dataset_table.py b/backend/dataall/modules/datasets/services/dataset_table.py index 437e9384e..50f543005 100644 --- a/backend/dataall/modules/datasets/services/dataset_table.py +++ b/backend/dataall/modules/datasets/services/dataset_table.py @@ -2,8 +2,9 @@ from sqlalchemy.sql import and_ +from dataall.core.permission_checker import has_tenant_permission, has_resource_permission from dataall.db import models, api, exceptions, paginate -from dataall.db.api import has_tenant_perm, has_resource_perm, Glossary, ResourcePolicy, Environment +from dataall.db.api import Glossary, ResourcePolicy, Environment from dataall.modules.datasets.services.permissions import MANAGE_DATASETS, CREATE_DATASET_TABLE, DELETE_DATASET_TABLE, \ UPDATE_DATASET_TABLE, DATASET_TABLE_READ from dataall.modules.datasets.services.dataset_service import DatasetService @@ -15,8 +16,8 @@ class DatasetTableService: @staticmethod - @has_tenant_perm(MANAGE_DATASETS) - @has_resource_perm(CREATE_DATASET_TABLE) + @has_tenant_permission(MANAGE_DATASETS) + @has_resource_permission(CREATE_DATASET_TABLE) def create_dataset_table( session, username: str, @@ -79,7 +80,7 @@ def create_dataset_table( return table @staticmethod - @has_tenant_perm(MANAGE_DATASETS) + @has_tenant_permission(MANAGE_DATASETS) def list_dataset_tables( session, username: str, @@ -101,7 +102,7 @@ def list_dataset_tables( ).to_dict() @staticmethod - @has_tenant_perm(MANAGE_DATASETS) + @has_tenant_permission(MANAGE_DATASETS) def get_dataset_table( session, username: str, @@ -113,8 +114,8 @@ def get_dataset_table( return DatasetTableService.get_dataset_table_by_uri(session, data['tableUri']) @staticmethod - @has_tenant_perm(MANAGE_DATASETS) - @has_resource_perm(UPDATE_DATASET_TABLE) + @has_tenant_permission(MANAGE_DATASETS) + @has_resource_permission(UPDATE_DATASET_TABLE) def update_dataset_table( session, username: str, @@ -139,8 +140,8 @@ def update_dataset_table( return table @staticmethod - @has_tenant_perm(MANAGE_DATASETS) - @has_resource_perm(DELETE_DATASET_TABLE) + @has_tenant_permission(MANAGE_DATASETS) + @has_resource_permission(DELETE_DATASET_TABLE) def delete_dataset_table( session, username: str, From 19cc9aa7967fbf0c9380223ca3595c46b2adfd65 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 28 Apr 2023 16:30:18 +0200 Subject: [PATCH 109/346] Reduce number of parameters for dataset location service --- .../datasets/api/storage_location/resolvers.py | 12 +----------- .../datasets/services/dataset_location.py | 18 +++--------------- 2 files changed, 4 insertions(+), 26 deletions(-) diff --git a/backend/dataall/modules/datasets/api/storage_location/resolvers.py b/backend/dataall/modules/datasets/api/storage_location/resolvers.py index 35386dacb..e47ce1007 100644 --- a/backend/dataall/modules/datasets/api/storage_location/resolvers.py +++ b/backend/dataall/modules/datasets/api/storage_location/resolvers.py @@ -20,7 +20,6 @@ def create_storage_location( with context.engine.scoped_session() as session: location = DatasetLocationService.create_dataset_location( session=session, - username=context.username, uri=datasetUri, data=input, ) @@ -38,7 +37,7 @@ def list_dataset_locations(context, source, filter: dict = None): filter = {} with context.engine.scoped_session() as session: return DatasetLocationService.list_dataset_locations( - session=session, uri=source.datasetUri, data=filter, check_perm=True + session=session, uri=source.datasetUri, data=filter ) @@ -47,11 +46,8 @@ def get_storage_location(context, source, locationUri=None): location = DatasetLocationService.get_location_by_uri(session, locationUri) return DatasetLocationService.get_dataset_location( session=session, - username=context.username, - groups=context.groups, uri=location.datasetUri, data={'locationUri': location.locationUri}, - check_perm=True, ) @@ -64,11 +60,8 @@ def update_storage_location( input['locationUri'] = location.locationUri DatasetLocationService.update_dataset_location( session=session, - username=context.username, - groups=context.groups, uri=location.datasetUri, data=input, - check_perm=True, ) DatasetLocationIndexer.upsert(session, folder_uri=location.locationUri) @@ -80,11 +73,8 @@ def remove_storage_location(context, source, locationUri: str = None): location = DatasetLocationService.get_location_by_uri(session, locationUri) DatasetLocationService.delete_dataset_location( session=session, - username=context.username, - groups=context.groups, uri=location.datasetUri, data={'locationUri': location.locationUri}, - check_perm=True, ) DatasetLocationIndexer.delete_doc(doc_id=location.locationUri) return True diff --git a/backend/dataall/modules/datasets/services/dataset_location.py b/backend/dataall/modules/datasets/services/dataset_location.py index a8c0eb61d..240c52f0e 100644 --- a/backend/dataall/modules/datasets/services/dataset_location.py +++ b/backend/dataall/modules/datasets/services/dataset_location.py @@ -2,6 +2,7 @@ from sqlalchemy import and_, or_ +from dataall.core.context import get_context from dataall.core.permission_checker import has_tenant_permission, has_resource_permission from dataall.db.api import Glossary from dataall.db import models, api, paginate, exceptions @@ -19,7 +20,6 @@ class DatasetLocationService: @has_resource_permission(CREATE_DATASET_FOLDER) def create_dataset_location( session, - username: str, uri: str, data: dict = None ) -> DatasetStorageLocation: @@ -58,7 +58,7 @@ def create_dataset_location( if 'terms' in data.keys(): Glossary.set_glossary_terms_links( session, - username, + get_context().username, location.locationUri, 'DatasetStorageLocation', data.get('terms', []), @@ -71,11 +71,8 @@ def create_dataset_location( @has_resource_permission(LIST_DATASET_FOLDERS) def list_dataset_locations( session, - username: str, - groups: [str], uri: str, data: dict = None, - check_perm: bool = False, ) -> dict: query = ( session.query(DatasetStorageLocation) @@ -96,11 +93,8 @@ def list_dataset_locations( @has_resource_permission(LIST_DATASET_FOLDERS) def get_dataset_location( session, - username: str, - groups: [str], uri: str, data: dict = None, - check_perm: bool = False, ) -> DatasetStorageLocation: return DatasetLocationService.get_location_by_uri(session, data['locationUri']) @@ -109,11 +103,8 @@ def get_dataset_location( @has_resource_permission(UPDATE_DATASET_FOLDER) def update_dataset_location( session, - username: str, - groups: [str], uri: str, data: dict = None, - check_perm: bool = False, ) -> DatasetStorageLocation: location = data.get( @@ -127,7 +118,7 @@ def update_dataset_location( if 'terms' in data.keys(): Glossary.set_glossary_terms_links( session, - username, + get_context().username, location.locationUri, 'DatasetStorageLocation', data.get('terms', []), @@ -139,11 +130,8 @@ def update_dataset_location( @has_resource_permission(DELETE_DATASET_FOLDER) def delete_dataset_location( session, - username: str, - groups: [str], uri: str, data: dict = None, - check_perm: bool = False, ): location = DatasetLocationService.get_location_by_uri( session, data['locationUri'] From cd218e2579471d98a5a07c923b2d2ac142ef8dc9 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 28 Apr 2023 16:31:04 +0200 Subject: [PATCH 110/346] Renamed files --- backend/dataall/aws/handlers/redshift.py | 2 +- backend/dataall/db/api/redshift_cluster.py | 2 +- backend/dataall/modules/datasets/api/dataset/resolvers.py | 2 +- backend/dataall/modules/datasets/api/profiling/resolvers.py | 2 +- .../modules/datasets/api/storage_location/resolvers.py | 2 +- backend/dataall/modules/datasets/api/table/resolvers.py | 2 +- .../dataall/modules/datasets/api/table_column/resolvers.py | 2 +- .../dataall/modules/datasets/handlers/glue_column_handler.py | 2 +- .../dataall/modules/datasets/handlers/glue_table_handler.py | 2 +- .../dataall/modules/datasets/handlers/s3_location_handler.py | 2 +- backend/dataall/modules/datasets/indexers/dataset_indexer.py | 2 +- .../{dataset_location.py => dataset_location_service.py} | 0 backend/dataall/modules/datasets/services/dataset_service.py | 2 +- .../services/{dataset_table.py => dataset_table_service.py} | 0 .../dataall/modules/datasets/tasks/subscription_service.py | 4 ++-- backend/dataall/modules/datasets/tasks/tables_syncer.py | 2 +- tests/api/test_dataset_table.py | 2 +- 17 files changed, 16 insertions(+), 16 deletions(-) rename backend/dataall/modules/datasets/services/{dataset_location.py => dataset_location_service.py} (100%) rename backend/dataall/modules/datasets/services/{dataset_table.py => dataset_table_service.py} (100%) diff --git a/backend/dataall/aws/handlers/redshift.py b/backend/dataall/aws/handlers/redshift.py index f606b9a79..0ace9d4f3 100644 --- a/backend/dataall/aws/handlers/redshift.py +++ b/backend/dataall/aws/handlers/redshift.py @@ -10,7 +10,7 @@ from ... import db from ...db import models # TODO should be migrated in the redshift module -from dataall.modules.datasets.services.dataset_table import DatasetTableService +from dataall.modules.datasets.services.dataset_table_service import DatasetTableService from dataall.modules.datasets.db.models import DatasetTable, Dataset from dataall.modules.datasets.services.dataset_service import DatasetService diff --git a/backend/dataall/db/api/redshift_cluster.py b/backend/dataall/db/api/redshift_cluster.py index ca7a69515..f43614421 100644 --- a/backend/dataall/db/api/redshift_cluster.py +++ b/backend/dataall/db/api/redshift_cluster.py @@ -497,7 +497,7 @@ def enable_copy_table( cluster = RedshiftCluster.get_redshift_cluster_by_uri(session, uri) # TODO this dirty hack should be removed in the redshift module or after pipeline migration (circular import) - from dataall.modules.datasets.services.dataset_table import DatasetTableService + from dataall.modules.datasets.services.dataset_table_service import DatasetTableService table = DatasetTableService.get_dataset_table_by_uri( session, data['tableUri'] ) diff --git a/backend/dataall/modules/datasets/api/dataset/resolvers.py b/backend/dataall/modules/datasets/api/dataset/resolvers.py index a75e6d414..ab6e4b0d3 100644 --- a/backend/dataall/modules/datasets/api/dataset/resolvers.py +++ b/backend/dataall/modules/datasets/api/dataset/resolvers.py @@ -18,7 +18,7 @@ from dataall.db.api.organization import Organization from dataall.modules.datasets import Dataset from dataall.modules.datasets.aws.glue_dataset_client import DatasetCrawler -from dataall.modules.datasets.services.dataset_location import DatasetLocationService +from dataall.modules.datasets.services.dataset_location_service import DatasetLocationService from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer diff --git a/backend/dataall/modules/datasets/api/profiling/resolvers.py b/backend/dataall/modules/datasets/api/profiling/resolvers.py index 671b79c32..d156eee95 100644 --- a/backend/dataall/modules/datasets/api/profiling/resolvers.py +++ b/backend/dataall/modules/datasets/api/profiling/resolvers.py @@ -7,7 +7,7 @@ from dataall.db import api, models from dataall.db.api import ResourcePolicy from dataall.modules.datasets.services.dataset_service import DatasetService -from dataall.modules.datasets.services.dataset_table import DatasetTableService +from dataall.modules.datasets.services.dataset_table_service import DatasetTableService from dataall.modules.datasets.services.dataset_profiling_service import DatasetProfilingService from dataall.modules.datasets.db.models import DatasetProfilingRun from dataall.modules.datasets.services.permissions import PROFILE_DATASET_TABLE diff --git a/backend/dataall/modules/datasets/api/storage_location/resolvers.py b/backend/dataall/modules/datasets/api/storage_location/resolvers.py index e47ce1007..4b1ae1726 100644 --- a/backend/dataall/modules/datasets/api/storage_location/resolvers.py +++ b/backend/dataall/modules/datasets/api/storage_location/resolvers.py @@ -9,7 +9,7 @@ from dataall.modules.datasets.handlers.s3_location_handler import S3DatasetLocationHandler from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer from dataall.modules.datasets.db.models import DatasetStorageLocation, Dataset -from dataall.modules.datasets.services.dataset_location import DatasetLocationService +from dataall.modules.datasets.services.dataset_location_service import DatasetLocationService from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.modules.datasets.services.permissions import UPDATE_DATASET_FOLDER diff --git a/backend/dataall/modules/datasets/api/table/resolvers.py b/backend/dataall/modules/datasets/api/table/resolvers.py index c5e6726a2..1ae69e7ae 100644 --- a/backend/dataall/modules/datasets/api/table/resolvers.py +++ b/backend/dataall/modules/datasets/api/table/resolvers.py @@ -16,7 +16,7 @@ from dataall.modules.datasets.services.permissions import UPDATE_DATASET_TABLE, PREVIEW_DATASET_TABLE from dataall.utils import json_utils from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer -from dataall.modules.datasets.services.dataset_table import DatasetTableService +from dataall.modules.datasets.services.dataset_table_service import DatasetTableService log = logging.getLogger(__name__) diff --git a/backend/dataall/modules/datasets/api/table_column/resolvers.py b/backend/dataall/modules/datasets/api/table_column/resolvers.py index 951b2038f..591459f87 100644 --- a/backend/dataall/modules/datasets/api/table_column/resolvers.py +++ b/backend/dataall/modules/datasets/api/table_column/resolvers.py @@ -5,7 +5,7 @@ from dataall.aws.handlers.service_handlers import Worker from dataall.db import paginate, models from dataall.db.api import ResourcePolicy -from dataall.modules.datasets.services.dataset_table import DatasetTableService +from dataall.modules.datasets.services.dataset_table_service import DatasetTableService from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetTable from dataall.modules.datasets.services.permissions import UPDATE_DATASET_TABLE diff --git a/backend/dataall/modules/datasets/handlers/glue_column_handler.py b/backend/dataall/modules/datasets/handlers/glue_column_handler.py index 59dca4528..07a1c41b5 100644 --- a/backend/dataall/modules/datasets/handlers/glue_column_handler.py +++ b/backend/dataall/modules/datasets/handlers/glue_column_handler.py @@ -6,7 +6,7 @@ from dataall.modules.datasets.aws.glue_table_client import GlueTableClient from dataall.modules.datasets.aws.lf_table_client import LakeFormationTableClient from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetTable -from dataall.modules.datasets.services.dataset_table import DatasetTableService +from dataall.modules.datasets.services.dataset_table_service import DatasetTableService log = logging.getLogger(__name__) diff --git a/backend/dataall/modules/datasets/handlers/glue_table_handler.py b/backend/dataall/modules/datasets/handlers/glue_table_handler.py index d41b71ebc..f648dc330 100644 --- a/backend/dataall/modules/datasets/handlers/glue_table_handler.py +++ b/backend/dataall/modules/datasets/handlers/glue_table_handler.py @@ -5,7 +5,7 @@ from dataall.db import models from dataall.modules.datasets.db.models import Dataset from dataall.modules.datasets.services.dataset_service import DatasetService -from dataall.modules.datasets.services.dataset_table import DatasetTableService +from dataall.modules.datasets.services.dataset_table_service import DatasetTableService log = logging.getLogger(__name__) diff --git a/backend/dataall/modules/datasets/handlers/s3_location_handler.py b/backend/dataall/modules/datasets/handlers/s3_location_handler.py index ba8cf6eda..1990e7a9a 100644 --- a/backend/dataall/modules/datasets/handlers/s3_location_handler.py +++ b/backend/dataall/modules/datasets/handlers/s3_location_handler.py @@ -3,7 +3,7 @@ from dataall.aws.handlers.service_handlers import Worker from dataall.aws.handlers.sts import SessionHelper from dataall.db import models -from dataall.modules.datasets.services.dataset_location import DatasetLocationService +from dataall.modules.datasets.services.dataset_location_service import DatasetLocationService log = logging.getLogger(__name__) diff --git a/backend/dataall/modules/datasets/indexers/dataset_indexer.py b/backend/dataall/modules/datasets/indexers/dataset_indexer.py index 1936b66be..8af54600f 100644 --- a/backend/dataall/modules/datasets/indexers/dataset_indexer.py +++ b/backend/dataall/modules/datasets/indexers/dataset_indexer.py @@ -3,7 +3,7 @@ from dataall import db from dataall.db import models from dataall.modules.datasets.db.models import Dataset -from dataall.modules.datasets.services.dataset_location import DatasetLocationService +from dataall.modules.datasets.services.dataset_location_service import DatasetLocationService from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.searchproxy.upsert import BaseIndexer diff --git a/backend/dataall/modules/datasets/services/dataset_location.py b/backend/dataall/modules/datasets/services/dataset_location_service.py similarity index 100% rename from backend/dataall/modules/datasets/services/dataset_location.py rename to backend/dataall/modules/datasets/services/dataset_location_service.py diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py index 710e2cc1b..ddf5fc92e 100644 --- a/backend/dataall/modules/datasets/services/dataset_service.py +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -17,7 +17,7 @@ from dataall.db.models.Enums import Language, ConfidentialityClassification from dataall.modules.datasets.db.dataset_repository import DatasetRepository from dataall.modules.datasets.db.models import DatasetTable, Dataset -from dataall.modules.datasets.services.dataset_location import DatasetLocationService +from dataall.modules.datasets.services.dataset_location_service import DatasetLocationService from dataall.modules.datasets.services.permissions import MANAGE_DATASETS, UPDATE_DATASET, DATASET_READ, DATASET_ALL, \ DATASET_TABLE_READ, LIST_ENVIRONMENT_DATASETS, CREATE_DATASET from dataall.utils.naming_convention import ( diff --git a/backend/dataall/modules/datasets/services/dataset_table.py b/backend/dataall/modules/datasets/services/dataset_table_service.py similarity index 100% rename from backend/dataall/modules/datasets/services/dataset_table.py rename to backend/dataall/modules/datasets/services/dataset_table_service.py diff --git a/backend/dataall/modules/datasets/tasks/subscription_service.py b/backend/dataall/modules/datasets/tasks/subscription_service.py index 033b27bd2..a261fad8a 100644 --- a/backend/dataall/modules/datasets/tasks/subscription_service.py +++ b/backend/dataall/modules/datasets/tasks/subscription_service.py @@ -16,8 +16,8 @@ from dataall.modules.datasets.services.share_notification_service import ShareNotificationService from dataall.tasks.subscriptions import poll_queues from dataall.utils import json_utils -from dataall.modules.datasets.services.dataset_table import DatasetTableService -from dataall.modules.datasets.services.dataset_location import DatasetLocationService +from dataall.modules.datasets.services.dataset_table_service import DatasetTableService +from dataall.modules.datasets.services.dataset_location_service import DatasetLocationService from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable, Dataset root = logging.getLogger() diff --git a/backend/dataall/modules/datasets/tasks/tables_syncer.py b/backend/dataall/modules/datasets/tasks/tables_syncer.py index c17951916..3a399fe14 100644 --- a/backend/dataall/modules/datasets/tasks/tables_syncer.py +++ b/backend/dataall/modules/datasets/tasks/tables_syncer.py @@ -13,7 +13,7 @@ from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.utils.alarm_service import AlarmService -from dataall.modules.datasets.services.dataset_table import DatasetTableService +from dataall.modules.datasets.services.dataset_table_service import DatasetTableService root = logging.getLogger() root.setLevel(logging.INFO) diff --git a/tests/api/test_dataset_table.py b/tests/api/test_dataset_table.py index 9894569ae..dc8cdbccf 100644 --- a/tests/api/test_dataset_table.py +++ b/tests/api/test_dataset_table.py @@ -3,7 +3,7 @@ import pytest import dataall -from dataall.modules.datasets.services.dataset_table import DatasetTableService +from dataall.modules.datasets.services.dataset_table_service import DatasetTableService from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetTable, Dataset From 3e24acb1d99bcb14f58684e11d83667a1dc0bd67 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 28 Apr 2023 16:34:26 +0200 Subject: [PATCH 111/346] Reduced number of parameters for the table service --- .../modules/datasets/api/table/resolvers.py | 15 -------------- .../services/dataset_table_service.py | 20 +++---------------- 2 files changed, 3 insertions(+), 32 deletions(-) diff --git a/backend/dataall/modules/datasets/api/table/resolvers.py b/backend/dataall/modules/datasets/api/table/resolvers.py index 1ae69e7ae..8e72bb8df 100644 --- a/backend/dataall/modules/datasets/api/table/resolvers.py +++ b/backend/dataall/modules/datasets/api/table/resolvers.py @@ -25,11 +25,8 @@ def create_table(context, source, datasetUri: str = None, input: dict = None): with context.engine.scoped_session() as session: table = DatasetTableService.create_dataset_table( session=session, - username=context.username, - groups=context.groups, uri=datasetUri, data=input, - check_perm=True, ) DatasetTableIndexer.upsert(session, table_uri=table.tableUri) return table @@ -43,11 +40,8 @@ def list_dataset_tables(context, source, filter: dict = None): with context.engine.scoped_session() as session: return DatasetTableService.list_dataset_tables( session=session, - username=context.username, - groups=context.groups, uri=source.datasetUri, data=filter, - check_perm=True, ) @@ -56,13 +50,10 @@ def get_table(context, source: Dataset, tableUri: str = None): table = DatasetTableService.get_dataset_table_by_uri(session, tableUri) return DatasetTableService.get_dataset_table( session=session, - username=context.username, - groups=context.groups, uri=table.datasetUri, data={ 'tableUri': tableUri, }, - check_perm=True, ) @@ -77,11 +68,8 @@ def update_table(context, source, tableUri: str = None, input: dict = None): DatasetTableService.update_dataset_table( session=session, - username=context.username, - groups=context.groups, uri=dataset.datasetUri, data=input, - check_perm=True, ) DatasetTableIndexer.upsert(session, table_uri=table.tableUri) return table @@ -92,13 +80,10 @@ def delete_table(context, source, tableUri: str = None): table = DatasetTableService.get_dataset_table_by_uri(session, tableUri) DatasetTableService.delete_dataset_table( session=session, - username=context.username, - groups=context.groups, uri=table.datasetUri, data={ 'tableUri': tableUri, }, - check_perm=True, ) DatasetTableIndexer.delete_doc(doc_id=tableUri) return True diff --git a/backend/dataall/modules/datasets/services/dataset_table_service.py b/backend/dataall/modules/datasets/services/dataset_table_service.py index 50f543005..ddfde9c5b 100644 --- a/backend/dataall/modules/datasets/services/dataset_table_service.py +++ b/backend/dataall/modules/datasets/services/dataset_table_service.py @@ -2,6 +2,7 @@ from sqlalchemy.sql import and_ +from dataall.core.context import get_context from dataall.core.permission_checker import has_tenant_permission, has_resource_permission from dataall.db import models, api, exceptions, paginate from dataall.db.api import Glossary, ResourcePolicy, Environment @@ -20,11 +21,8 @@ class DatasetTableService: @has_resource_permission(CREATE_DATASET_TABLE) def create_dataset_table( session, - username: str, - groups: [str], uri: str, data: dict = None, - check_perm: bool = False, ) -> DatasetTable: dataset = DatasetService.get_dataset_by_uri(session, uri) exists = ( @@ -62,7 +60,7 @@ def create_dataset_table( session.add(table) if data.get('terms') is not None: Glossary.set_glossary_terms_links( - session, username, table.tableUri, 'DatasetTable', data.get('terms', []) + session, get_context().username, table.tableUri, 'DatasetTable', data.get('terms', []) ) session.commit() @@ -83,11 +81,8 @@ def create_dataset_table( @has_tenant_permission(MANAGE_DATASETS) def list_dataset_tables( session, - username: str, - groups: [str], uri: str, data: dict = None, - check_perm: bool = False, ) -> dict: query = ( session.query(DatasetTable) @@ -105,11 +100,8 @@ def list_dataset_tables( @has_tenant_permission(MANAGE_DATASETS) def get_dataset_table( session, - username: str, - groups: [str], uri: str, data: dict = None, - check_perm: bool = False, ) -> DatasetTable: return DatasetTableService.get_dataset_table_by_uri(session, data['tableUri']) @@ -118,11 +110,8 @@ def get_dataset_table( @has_resource_permission(UPDATE_DATASET_TABLE) def update_dataset_table( session, - username: str, - groups: [str], uri: str, data: dict = None, - check_perm: bool = False, ): table = data.get( 'table', @@ -134,7 +123,7 @@ def update_dataset_table( if data.get('terms') is not None: Glossary.set_glossary_terms_links( - session, username, table.tableUri, 'DatasetTable', data.get('terms', []) + session, get_context().username, table.tableUri, 'DatasetTable', data.get('terms', []) ) return table @@ -144,11 +133,8 @@ def update_dataset_table( @has_resource_permission(DELETE_DATASET_TABLE) def delete_dataset_table( session, - username: str, - groups: [str], uri: str, data: dict = None, - check_perm: bool = False, ): table = DatasetTableService.get_dataset_table_by_uri(session, data['tableUri']) share_item_shared_states = api.ShareItemSM.get_share_item_shared_states() From b5eb774f8b238383fef386d02df09640d01301b7 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 28 Apr 2023 16:42:16 +0200 Subject: [PATCH 112/346] Reduced number of parameters for the dataset service --- .../modules/datasets/api/dataset/resolvers.py | 16 +---------- .../datasets/services/dataset_service.py | 27 +++++++------------ 2 files changed, 11 insertions(+), 32 deletions(-) diff --git a/backend/dataall/modules/datasets/api/dataset/resolvers.py b/backend/dataall/modules/datasets/api/dataset/resolvers.py index ab6e4b0d3..52797fd35 100644 --- a/backend/dataall/modules/datasets/api/dataset/resolvers.py +++ b/backend/dataall/modules/datasets/api/dataset/resolvers.py @@ -91,12 +91,7 @@ def import_dataset(context: Context, source, input=None): def get_dataset(context, source, datasetUri=None): with context.engine.scoped_session() as session: - dataset = DatasetService.get_dataset( - session=session, - username=context.username, - groups=context.groups, - uri=datasetUri, - ) + dataset = DatasetService.get_dataset(session, uri=datasetUri) if dataset.SamlAdminGroupName in context.groups: dataset.userRoleForDataset = DatasetRole.Admin.value return dataset @@ -220,11 +215,8 @@ def update_dataset(context, source, datasetUri: str = None, input: dict = None): with context.engine.scoped_session() as session: updated_dataset = DatasetService.update_dataset( session=session, - username=context.username, - groups=context.groups, uri=datasetUri, data=input, - check_perm=True, ) DatasetIndexer.upsert(session, dataset_uri=datasetUri) @@ -655,11 +647,8 @@ def list_datasets_created_in_environment( with context.engine.scoped_session() as session: return DatasetService.paginated_environment_datasets( session=session, - username=context.username, - groups=context.groups, uri=environmentUri, data=filter, - check_perm=True, ) @@ -671,10 +660,7 @@ def list_datasets_owned_by_env_group( with context.engine.scoped_session() as session: return DatasetService.paginated_environment_group_datasets( session=session, - username=context.username, - groups=context.groups, envUri=environmentUri, groupUri=groupUri, data=filter, - check_perm=True, ) \ No newline at end of file diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py index ddf5fc92e..75cc89047 100644 --- a/backend/dataall/modules/datasets/services/dataset_service.py +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -4,6 +4,7 @@ from sqlalchemy import and_, or_ from sqlalchemy.orm import Query +from dataall.core.context import get_context from dataall.core.permission_checker import has_tenant_permission, has_resource_permission from dataall.db.api import ( Environment, @@ -201,14 +202,7 @@ def create_dataset_stack(session, dataset: Dataset) -> models.Stack: @staticmethod @has_tenant_permission(MANAGE_DATASETS) - def get_dataset( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ) -> Dataset: + def get_dataset(session, uri: str) -> Dataset: return DatasetService.get_dataset_by_uri(session, uri) @staticmethod @@ -295,9 +289,8 @@ def paginated_dataset_tables( @staticmethod @has_tenant_permission(MANAGE_DATASETS) @has_resource_permission(UPDATE_DATASET) - def update_dataset( - session, username, groups, uri, data=None, check_perm=None - ) -> Dataset: + def update_dataset(session, uri, data=None) -> Dataset: + username = get_context().username dataset: Dataset = DatasetService.get_dataset_by_uri(session, uri) if data and isinstance(data, dict): for k in data.keys(): @@ -626,7 +619,7 @@ def count_dataset_tables(session, dataset_uri): ) @staticmethod - def query_environment_group_datasets(session, username, groups, envUri, groupUri, filter) -> Query: + def query_environment_group_datasets(session, envUri, groupUri, filter) -> Query: query = session.query(Dataset).filter( and_( Dataset.environmentUri == envUri, @@ -647,7 +640,7 @@ def query_environment_group_datasets(session, username, groups, envUri, groupUri return query @staticmethod - def query_environment_datasets(session, username, groups, uri, filter) -> Query: + def query_environment_datasets(session, uri, filter) -> Query: query = session.query(Dataset).filter( and_( Dataset.environmentUri == uri, @@ -669,11 +662,11 @@ def query_environment_datasets(session, username, groups, uri, filter) -> Query: @staticmethod @has_resource_permission(LIST_ENVIRONMENT_DATASETS) def paginated_environment_datasets( - session, username, groups, uri, data=None, check_perm=None + session, uri, data=None, ) -> dict: return paginate( query=DatasetService.query_environment_datasets( - session, username, groups, uri, data + session, uri, data ), page=data.get('page', 1), page_size=data.get('pageSize', 10), @@ -681,11 +674,11 @@ def paginated_environment_datasets( @staticmethod def paginated_environment_group_datasets( - session, username, groups, envUri, groupUri, data=None, check_perm=None + session, envUri, groupUri, data=None ) -> dict: return paginate( query=DatasetService.query_environment_group_datasets( - session, username, groups, envUri, groupUri, data + session, envUri, groupUri, data ), page=data.get('page', 1), page_size=data.get('pageSize', 10), From 7ad099ff77c76b92c8604fa618090e7c15b1291a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 2 May 2023 11:53:58 +0200 Subject: [PATCH 113/346] Bump flask from 2.0.3 to 2.3.2 in /backend (#439) Bumps [flask](https://github.com/pallets/flask) from 2.0.3 to 2.3.2.

Release notes

Sourced from flask's releases.

2.3.2

This is a security fix release for the 2.3.x release branch.

2.3.1

This is a fix release for the 2.3.x release branch.

2.3.0

This is a feature release, which includes new features, removes previously deprecated code, and adds new deprecations. The 2.3.x branch is now the supported fix branch, the 2.2.x branch will become a tag marking the end of support for that branch. We encourage everyone to upgrade, and to use a tool such as pip-tools to pin all dependencies and control upgrades. Test with warnings treated as errors to be able to adapt to deprecation warnings early.

2.2.4

This is a fix release for the 2.2.x release branch.

2.2.3

This is a fix release for the 2.2.x release branch.

2.2.2

This is a fix release for the 2.2.0 feature release.

2.2.1

This is a fix release for the 2.2.0 feature release.

2.2.0

This is a feature release, which includes new features and removes previously deprecated code. The 2.2.x branch is now the supported bug fix branch, the 2.1.x branch will become a tag marking the end of support for that branch. We encourage everyone to upgrade, and to use a tool such as pip-tools to pin all dependencies and control upgrades.

2.1.3

... (truncated)

Changelog

Sourced from flask's changelog.

Version 2.3.2

Released 2023-05-01

  • Set Vary: Cookie header when the session is accessed, modified, or refreshed.
  • Update Werkzeug requirement to >=2.3.3 to apply recent bug fixes.

Version 2.3.1

Released 2023-04-25

  • Restore deprecated from flask import Markup. :issue:5084

Version 2.3.0

Released 2023-04-25

  • Drop support for Python 3.7. :pr:5072

  • Update minimum requirements to the latest versions: Werkzeug>=2.3.0, Jinja2>3.1.2, itsdangerous>=2.1.2, click>=8.1.3.

  • Remove previously deprecated code. :pr:4995

    • The push and pop methods of the deprecated _app_ctx_stack and _request_ctx_stack objects are removed. top still exists to give extensions more time to update, but it will be removed.
    • The FLASK_ENV environment variable, ENV config key, and app.env property are removed.
    • The session_cookie_name, send_file_max_age_default, use_x_sendfile, propagate_exceptions, and templates_auto_reload properties on app are removed.
    • The JSON_AS_ASCII, JSON_SORT_KEYS, JSONIFY_MIMETYPE, and JSONIFY_PRETTYPRINT_REGULAR config keys are removed.
    • The app.before_first_request and bp.before_app_first_request decorators are removed.
    • json_encoder and json_decoder attributes on app and blueprint, and the corresponding json.JSONEncoder and JSONDecoder classes, are removed.
    • The json.htmlsafe_dumps and htmlsafe_dump functions are removed.
    • Calling setup methods on blueprints after registration is an error instead of a warning. :pr:4997
  • Importing escape and Markup from flask is deprecated. Import them directly from markupsafe instead. :pr:4996

  • The app.got_first_request property is deprecated. :pr:4997

  • The locked_cached_property decorator is deprecated. Use a lock inside the decorated function if locking is needed. :issue:4993

... (truncated)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=flask&package-manager=pip&previous-version=2.0.3&new-version=2.3.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/awslabs/aws-dataall/network/alerts).
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- backend/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/requirements.txt b/backend/requirements.txt index 7429e61c9..09d8a7abe 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -3,7 +3,7 @@ aws-xray-sdk==2.4.3 boto3==1.26.95 botocore==1.29.95 fastapi == 0.92.0 -Flask==2.0.3 +Flask==2.3.2 flask-cors==3.0.10 nanoid==2.0.0 opensearch-py==1.0.0 From e4b3e733d32bc179a2fce9493d9c5d4a7bb08fed Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 2 May 2023 13:41:27 +0200 Subject: [PATCH 114/346] Bump flask from 2.0.3 to 2.3.2 in /backend/dataall/cdkproxy (#438) Bumps [flask](https://github.com/pallets/flask) from 2.0.3 to 2.3.2.
Release notes

Sourced from flask's releases.

2.3.2

This is a security fix release for the 2.3.x release branch.

2.3.1

This is a fix release for the 2.3.x release branch.

2.3.0

This is a feature release, which includes new features, removes previously deprecated code, and adds new deprecations. The 2.3.x branch is now the supported fix branch, the 2.2.x branch will become a tag marking the end of support for that branch. We encourage everyone to upgrade, and to use a tool such as pip-tools to pin all dependencies and control upgrades. Test with warnings treated as errors to be able to adapt to deprecation warnings early.

2.2.4

This is a fix release for the 2.2.x release branch.

2.2.3

This is a fix release for the 2.2.x release branch.

2.2.2

This is a fix release for the 2.2.0 feature release.

2.2.1

This is a fix release for the 2.2.0 feature release.

2.2.0

This is a feature release, which includes new features and removes previously deprecated code. The 2.2.x branch is now the supported bug fix branch, the 2.1.x branch will become a tag marking the end of support for that branch. We encourage everyone to upgrade, and to use a tool such as pip-tools to pin all dependencies and control upgrades.

2.1.3

... (truncated)

Changelog

Sourced from flask's changelog.

Version 2.3.2

Released 2023-05-01

  • Set Vary: Cookie header when the session is accessed, modified, or refreshed.
  • Update Werkzeug requirement to >=2.3.3 to apply recent bug fixes.

Version 2.3.1

Released 2023-04-25

  • Restore deprecated from flask import Markup. :issue:5084

Version 2.3.0

Released 2023-04-25

  • Drop support for Python 3.7. :pr:5072

  • Update minimum requirements to the latest versions: Werkzeug>=2.3.0, Jinja2>3.1.2, itsdangerous>=2.1.2, click>=8.1.3.

  • Remove previously deprecated code. :pr:4995

    • The push and pop methods of the deprecated _app_ctx_stack and _request_ctx_stack objects are removed. top still exists to give extensions more time to update, but it will be removed.
    • The FLASK_ENV environment variable, ENV config key, and app.env property are removed.
    • The session_cookie_name, send_file_max_age_default, use_x_sendfile, propagate_exceptions, and templates_auto_reload properties on app are removed.
    • The JSON_AS_ASCII, JSON_SORT_KEYS, JSONIFY_MIMETYPE, and JSONIFY_PRETTYPRINT_REGULAR config keys are removed.
    • The app.before_first_request and bp.before_app_first_request decorators are removed.
    • json_encoder and json_decoder attributes on app and blueprint, and the corresponding json.JSONEncoder and JSONDecoder classes, are removed.
    • The json.htmlsafe_dumps and htmlsafe_dump functions are removed.
    • Calling setup methods on blueprints after registration is an error instead of a warning. :pr:4997
  • Importing escape and Markup from flask is deprecated. Import them directly from markupsafe instead. :pr:4996

  • The app.got_first_request property is deprecated. :pr:4997

  • The locked_cached_property decorator is deprecated. Use a lock inside the decorated function if locking is needed. :issue:4993

... (truncated)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=flask&package-manager=pip&previous-version=2.0.3&new-version=2.3.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/awslabs/aws-dataall/network/alerts).
--------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Nikita Podshivalov --- backend/dataall/cdkproxy/requirements.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/backend/dataall/cdkproxy/requirements.txt b/backend/dataall/cdkproxy/requirements.txt index f2da84ebe..ccd390c61 100644 --- a/backend/dataall/cdkproxy/requirements.txt +++ b/backend/dataall/cdkproxy/requirements.txt @@ -7,13 +7,13 @@ cdk-nag==2.7.2 constructs==10.0.73 starlette==0.25.0 fastapi == 0.92.0 -Flask==2.0.3 +Flask==2.3.2 PyYAML==6.0 requests==2.27.1 tabulate==0.8.9 uvicorn==0.15.0 -jinja2==3.0.3 -werkzeug==2.2.3 +jinja2==3.1.2 +werkzeug==2.3.3 constructs>=10.0.0,<11.0.0 git-remote-codecommit==1.16 aws-ddk==0.5.1 From 3ae1eca83df494e392786350a9e8fb941c08746e Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 3 May 2023 14:26:48 +0200 Subject: [PATCH 115/346] Fixed all tests --- backend/dataall/db/api/share_object.py | 11 ++++++++++- backend/dataall/modules/datasets/__init__.py | 4 ++++ .../modules/datasets/api/dataset/resolvers.py | 3 +-- .../dataall/modules/datasets/cdk/dataset_policy.py | 2 +- .../datasets/handlers/glue_dataset_handler.py | 2 +- .../datasets/services/dataset_group_resource.py | 3 --- .../modules/datasets/services/dataset_service.py | 6 ++++-- .../modules/datasets/tasks/tables_syncer.py | 4 ++-- .../share_managers/lf_share_manager.py | 7 ++++--- tests/api/test_dataset.py | 5 +++-- tests/api/test_vote.py | 3 +-- tests/tasks/test_catalog_indexer.py | 2 +- tests/tasks/test_lf_share_manager.py | 7 +++++-- tests/tasks/test_tables_sync.py | 14 +++++++++----- 14 files changed, 46 insertions(+), 27 deletions(-) diff --git a/backend/dataall/db/api/share_object.py b/backend/dataall/db/api/share_object.py index 79a82af43..4917664b9 100644 --- a/backend/dataall/db/api/share_object.py +++ b/backend/dataall/db/api/share_object.py @@ -13,7 +13,6 @@ from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable, Dataset from dataall.modules.datasets.services.dataset_service import DatasetService from ...modules.datasets.services.permissions import DATASET_TABLE_READ -from ...modules.datasets.services.share_notification_service import ShareNotificationService logger = logging.getLogger(__name__) @@ -564,6 +563,8 @@ def submit_share_object( Share_SM.update_state(session, share, new_share_state) + # TODO Temporary, to solve cyclic imports. It will go away when shares are in a dedicated module. + from dataall.modules.datasets.services.share_notification_service import ShareNotificationService ShareNotificationService.notify_share_object_submission( session, username, dataset, share ) @@ -611,6 +612,8 @@ def approve_share_object( resource_type=DatasetTable.__name__, ) + # TODO Temporary, to solve cyclic imports. It will go away when shares are in a dedicated module. + from dataall.modules.datasets.services.share_notification_service import ShareNotificationService ShareNotificationService.notify_share_object_approval(session, username, dataset, share) return share @@ -644,6 +647,9 @@ def reject_share_object( group=share.groupUri, resource_uri=dataset.datasetUri, ) + + # TODO Temporary, to solve cyclic imports. It will go away when shares are in a dedicated module. + from dataall.modules.datasets.services.share_notification_service import ShareNotificationService ShareNotificationService.notify_share_object_rejection(session, username, dataset, share) return share @@ -686,6 +692,9 @@ def revoke_items_share_object( group=share.groupUri, resource_uri=dataset.datasetUri, ) + + # TODO Temporary, to solve cyclic imports. It will go away when shares are in a dedicated module. + from dataall.modules.datasets.services.share_notification_service import ShareNotificationService ShareNotificationService.notify_share_object_rejection(session, username, dataset, share) return share diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index 69e5fb86d..08b1dffd1 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -2,10 +2,12 @@ import logging from typing import List +from dataall.core.group.services.group_resource_manager import GroupResourceManager from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetStorageLocation, DatasetTable, Dataset from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer +from dataall.modules.datasets.services.dataset_group_resource import DatasetGroupResource from dataall.modules.datasets.services.permissions import GET_DATASET, UPDATE_DATASET from dataall.modules.loader import ModuleInterface, ImportMode @@ -57,6 +59,8 @@ def __init__(self): TargetType("dataset", GET_DATASET, UPDATE_DATASET) + GroupResourceManager.register(DatasetGroupResource()) + log.info("API of datasets has been imported") diff --git a/backend/dataall/modules/datasets/api/dataset/resolvers.py b/backend/dataall/modules/datasets/api/dataset/resolvers.py index 52797fd35..3cef6e21a 100644 --- a/backend/dataall/modules/datasets/api/dataset/resolvers.py +++ b/backend/dataall/modules/datasets/api/dataset/resolvers.py @@ -10,10 +10,9 @@ DatasetRole, ) from dataall.api.context import Context -from dataall.aws.handlers.glue import Glue from dataall.aws.handlers.service_handlers import Worker from dataall.aws.handlers.sts import SessionHelper -from dataall.db import paginate, exceptions, permissions, models +from dataall.db import paginate, exceptions, models from dataall.db.api import Environment, ShareObject, ResourcePolicy from dataall.db.api.organization import Organization from dataall.modules.datasets import Dataset diff --git a/backend/dataall/modules/datasets/cdk/dataset_policy.py b/backend/dataall/modules/datasets/cdk/dataset_policy.py index 5c847fe14..64c0c53a4 100644 --- a/backend/dataall/modules/datasets/cdk/dataset_policy.py +++ b/backend/dataall/modules/datasets/cdk/dataset_policy.py @@ -14,7 +14,7 @@ def get_statements(self, session): environment_id=self.environment.environmentUri, group_uri=self.team.groupUri, ) - return self._generate_dataset_statements(datasets) + return DatasetDataPolicy._generate_dataset_statements(datasets) @staticmethod def _generate_dataset_statements(datasets: List[Dataset]): diff --git a/backend/dataall/modules/datasets/handlers/glue_dataset_handler.py b/backend/dataall/modules/datasets/handlers/glue_dataset_handler.py index ff360a2f1..7a43b6ac6 100644 --- a/backend/dataall/modules/datasets/handlers/glue_dataset_handler.py +++ b/backend/dataall/modules/datasets/handlers/glue_dataset_handler.py @@ -26,4 +26,4 @@ def start_crawler(engine, task: models.Task): crawler = DatasetCrawler(dataset) if location: crawler.update_crawler(targets) - return crawler.start_crawler() \ No newline at end of file + return crawler.start_crawler() diff --git a/backend/dataall/modules/datasets/services/dataset_group_resource.py b/backend/dataall/modules/datasets/services/dataset_group_resource.py index e05d06e23..8d58ac3ad 100644 --- a/backend/dataall/modules/datasets/services/dataset_group_resource.py +++ b/backend/dataall/modules/datasets/services/dataset_group_resource.py @@ -6,6 +6,3 @@ class DatasetGroupResource(GroupResource): def count_resources(self, session, environment_uri, group_uri) -> int: return DatasetRepository.count_group_datasets(session, environment_uri, group_uri) - -GroupResourceManager.register(DatasetGroupResource()) - diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py index 75cc89047..388a14746 100644 --- a/backend/dataall/modules/datasets/services/dataset_service.py +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -12,6 +12,8 @@ KeyValueTag, Vote, Stack, + has_tenant_perm, + has_resource_perm, ) from dataall.db.api import Organization from dataall.db import models, api, exceptions, paginate, permissions @@ -31,8 +33,8 @@ class DatasetService: @staticmethod - @has_tenant_permission(MANAGE_DATASETS) - @has_resource_permission(CREATE_DATASET) + @has_tenant_perm(MANAGE_DATASETS) + @has_resource_perm(CREATE_DATASET) def create_dataset( session, username: str, diff --git a/backend/dataall/modules/datasets/tasks/tables_syncer.py b/backend/dataall/modules/datasets/tasks/tables_syncer.py index 3a399fe14..b56b57ddd 100644 --- a/backend/dataall/modules/datasets/tasks/tables_syncer.py +++ b/backend/dataall/modules/datasets/tasks/tables_syncer.py @@ -11,8 +11,8 @@ from dataall.modules.datasets.aws.lf_table_client import LakeFormationTableClient from dataall.modules.datasets.db.models import DatasetTable, Dataset from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer +from dataall.modules.datasets.services.dataset_alarm_service import DatasetAlarmService from dataall.modules.datasets.services.dataset_service import DatasetService -from dataall.utils.alarm_service import AlarmService from dataall.modules.datasets.services.dataset_table_service import DatasetTableService root = logging.getLogger() @@ -94,7 +94,7 @@ def sync_tables(engine): f'{dataset.AwsAccountId}/{dataset.GlueDatabaseName} ' f'due to: {e}' ) - AlarmService().trigger_dataset_sync_failure_alarm(dataset, str(e)) + DatasetAlarmService().trigger_dataset_sync_failure_alarm(dataset, str(e)) return processed_tables diff --git a/backend/dataall/tasks/data_sharing/share_managers/lf_share_manager.py b/backend/dataall/tasks/data_sharing/share_managers/lf_share_manager.py index 997dc830f..1cf649f01 100644 --- a/backend/dataall/tasks/data_sharing/share_managers/lf_share_manager.py +++ b/backend/dataall/tasks/data_sharing/share_managers/lf_share_manager.py @@ -10,9 +10,10 @@ from ....aws.handlers.quicksight import Quicksight from ....aws.handlers.sts import SessionHelper from ....aws.handlers.ram import Ram -from ....db import api, exceptions, models +from ....db import exceptions, models from dataall.modules.datasets.db.models import DatasetTable, Dataset from dataall.utils.alarm_service import AlarmService +from dataall.modules.datasets.services.dataset_alarm_service import DatasetAlarmService logger = logging.getLogger(__name__) @@ -526,7 +527,7 @@ def handle_share_failure( f'due to: {error}' ) - AlarmService().trigger_table_sharing_failure_alarm( + DatasetAlarmService().trigger_table_sharing_failure_alarm( table, self.share, self.target_environment ) return True @@ -549,7 +550,7 @@ def handle_revoke_failure( f'with target account {self.target_environment.AwsAccountId}/{self.target_environment.region} ' f'due to: {error}' ) - AlarmService().trigger_revoke_table_sharing_failure_alarm( + DatasetAlarmService().trigger_revoke_table_sharing_failure_alarm( table, self.share, self.target_environment ) return True diff --git a/tests/api/test_dataset.py b/tests/api/test_dataset.py index ee2d6047e..c6909c28b 100644 --- a/tests/api/test_dataset.py +++ b/tests/api/test_dataset.py @@ -1,8 +1,10 @@ import typing +from unittest.mock import MagicMock import pytest import dataall +from dataall.modules.datasets.aws.glue_dataset_client import DatasetCrawler from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable, Dataset from dataall.modules.datasets.services.dataset_service import DatasetService @@ -180,8 +182,7 @@ def test_update_dataset(dataset1, client, patch_es, group, group2): def test_start_crawler(org1, env1, dataset1, client, group, module_mocker): module_mocker.patch( - 'dataall.modules.datasets.aws.glue_dataset_client.DatasetCrawler.get_crawler', - return_value={'crawler_name': dataset1.GlueCrawlerName}, + 'dataall.modules.datasets.api.dataset.resolvers.DatasetCrawler', MagicMock() ) mutation = """ mutation StartGlueCrawler($datasetUri:String, $input:CrawlerInput){ diff --git a/tests/api/test_vote.py b/tests/api/test_vote.py index 4701c5609..e241e27f7 100644 --- a/tests/api/test_vote.py +++ b/tests/api/test_vote.py @@ -110,8 +110,7 @@ def get_vote_query(client, target_uri, target_type, group): return response -def test_upvote(patch_es, client, dataset1, module_mocker, dashboard): - module_mocker.patch('dataall.api.Objects.Vote.resolvers.reindex', return_value={}) +def test_upvote(patch_es, client, dataset1, dashboard): response = upvote_mutation( client, dataset1.datasetUri, 'dataset', True, dataset1.SamlAdminGroupName ) diff --git a/tests/tasks/test_catalog_indexer.py b/tests/tasks/test_catalog_indexer.py index 32c8873e3..1ffac0b6c 100644 --- a/tests/tasks/test_catalog_indexer.py +++ b/tests/tasks/test_catalog_indexer.py @@ -1,6 +1,6 @@ import pytest import dataall -from dataall.modules.datasets.db.models import DatasetTable +from dataall.modules.datasets.db.models import DatasetTable, Dataset @pytest.fixture(scope='module', autouse=True) diff --git a/tests/tasks/test_lf_share_manager.py b/tests/tasks/test_lf_share_manager.py index 89373d96e..bbe2e21ca 100644 --- a/tests/tasks/test_lf_share_manager.py +++ b/tests/tasks/test_lf_share_manager.py @@ -11,6 +11,7 @@ from dataall.db import models from dataall.api import constants from dataall.modules.datasets.db.models import DatasetTable, Dataset +from dataall.modules.datasets.services.dataset_alarm_service import DatasetAlarmService from dataall.tasks.data_sharing.share_processors.lf_process_cross_account_share import ProcessLFCrossAccountShare from dataall.tasks.data_sharing.share_processors.lf_process_same_account_share import ProcessLFSameAccountShare @@ -644,6 +645,7 @@ def test_revoke_external_account_access_on_source_account( # Then lf_mock.assert_called_once() + def test_handle_share_failure( db, processor_same_account: ProcessLFSameAccountShare, @@ -655,7 +657,7 @@ def test_handle_share_failure( ): # Given - alarm_service_mock = mocker.patch.object(AlarmService, "trigger_table_sharing_failure_alarm") + alarm_service_mock = mocker.patch.object(DatasetAlarmService, "trigger_table_sharing_failure_alarm") error = Exception # When @@ -673,6 +675,7 @@ def test_handle_share_failure( # Then alarm_service_mock.assert_called_once() + def test_handle_revoke_failure( db, processor_same_account: ProcessLFSameAccountShare, @@ -683,7 +686,7 @@ def test_handle_revoke_failure( mocker, ): # Given - alarm_service_mock = mocker.patch.object(AlarmService, "trigger_revoke_table_sharing_failure_alarm") + alarm_service_mock = mocker.patch.object(DatasetAlarmService, "trigger_revoke_table_sharing_failure_alarm") error = Exception # When diff --git a/tests/tasks/test_tables_sync.py b/tests/tasks/test_tables_sync.py index 5d0322f94..5c9937c72 100644 --- a/tests/tasks/test_tables_sync.py +++ b/tests/tasks/test_tables_sync.py @@ -1,7 +1,11 @@ +from unittest.mock import MagicMock + import pytest import dataall from dataall.api.constants import OrganisationUserRole +from dataall.modules.datasets.aws.lf_table_client import LakeFormationTableClient from dataall.modules.datasets.db.models import DatasetTable, Dataset +from dataall.modules.datasets.tasks.tables_syncer import sync_tables @pytest.fixture(scope='module', autouse=True) @@ -156,12 +160,12 @@ def test_tables_sync(db, org, env, sync_dataset, table, mocker): mocker.patch( 'dataall.modules.datasets.tasks.tables_syncer.is_assumable_pivot_role', return_value=True ) - mocker.patch( - 'dataall.aws.handlers.glue.Glue.grant_principals_all_table_permissions', - return_value=True, - ) - processed_tables = dataall.modules.datasets.tasks.tables_syncer.sync_tables(engine=db) + mock_client = MagicMock() + mocker.patch("dataall.modules.datasets.tasks.tables_syncer.LakeFormationTableClient", mock_client) + mock_client.grant_principals_all_table_permissions = True + + processed_tables = sync_tables(engine=db) assert len(processed_tables) == 2 with db.scoped_session() as session: saved_table: DatasetTable = ( From 745f71011f1e47cd26fb86fa56229e5cf5115517 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 3 May 2023 15:34:34 +0200 Subject: [PATCH 116/346] Fixed all tests --- backend/dataall/cdkproxy/stacks/environment.py | 2 -- .../modules/datasets/services/dataset_share_service.py | 8 +++++--- tests/api/conftest.py | 5 +---- 3 files changed, 6 insertions(+), 9 deletions(-) diff --git a/backend/dataall/cdkproxy/stacks/environment.py b/backend/dataall/cdkproxy/stacks/environment.py index a2d40af86..e9d604f47 100644 --- a/backend/dataall/cdkproxy/stacks/environment.py +++ b/backend/dataall/cdkproxy/stacks/environment.py @@ -141,8 +141,6 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): self.engine, self._environment ) - self.all_environment_datasets = self.get_all_environment_datasets(self.engine, self._environment) - # Environment S3 Bucket default_environment_bucket = s3.Bucket( self, diff --git a/backend/dataall/modules/datasets/services/dataset_share_service.py b/backend/dataall/modules/datasets/services/dataset_share_service.py index 74e64c951..90a5334d5 100644 --- a/backend/dataall/modules/datasets/services/dataset_share_service.py +++ b/backend/dataall/modules/datasets/services/dataset_share_service.py @@ -4,7 +4,7 @@ from sqlalchemy import or_, case, func from sqlalchemy.sql import and_ -from dataall.api.constants import ShareableType +from dataall.api.constants import ShareableType, PrincipalType from dataall.db import models, permissions from dataall.db.api import has_resource_perm, ShareItemSM from dataall.db.paginator import paginate @@ -29,6 +29,7 @@ def paginated_shared_with_environment_datasets( models.Environment.name.label('environmentName'), models.ShareObject.created.label('created'), models.ShareObject.principalId.label('principalId'), + models.ShareObject.principalType.label('principalType'), models.ShareObjectItem.itemType.label('itemType'), models.ShareObjectItem.GlueDatabaseName.label('GlueDatabaseName'), models.ShareObjectItem.GlueTableName.label('GlueTableName'), @@ -99,8 +100,9 @@ def paginated_shared_with_environment_datasets( or_(*[models.ShareObjectItem.itemType == t for t in itemTypes]) ) - if data.get("uniqueDatasets", False): - q = q.distinct(models.ShareObject.datasetUri) + if data.get("uniqueShares", False): + q = q.filter(models.ShareObject.principalType != PrincipalType.ConsumptionRole.value) + q = q.distinct(models.ShareObject.shareUri) if data.get('term'): term = data.get('term') diff --git a/tests/api/conftest.py b/tests/api/conftest.py index a0b184504..983b011fe 100644 --- a/tests/api/conftest.py +++ b/tests/api/conftest.py @@ -25,7 +25,7 @@ def patch_check_env(module_mocker): @pytest.fixture(scope='module', autouse=True) def patch_check_dataset(module_mocker): module_mocker.patch( - 'dataall.api.Objects.Dataset.resolvers.check_dataset_account', return_value=True + 'dataall.modules.datasets.api.dataset.resolvers.check_dataset_account', return_value=True ) @@ -635,9 +635,6 @@ def env_fixture(env, org_fixture, user, group, tenant, module_mocker): @pytest.fixture(scope='module') def dataset_fixture(env_fixture, org_fixture, dataset, group, module_mocker) -> Dataset: - module_mocker.patch( - 'dataall.api.Objects.Dataset.resolvers.check_dataset_account', return_value=True - ) yield dataset( org=org_fixture, env=env_fixture, From f382a6897b8cb2b366c1d098dcf71d2dd2ea0d72 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 4 May 2023 10:58:15 +0200 Subject: [PATCH 117/346] Review remarks --- .../dataall/api/Objects/Glossary/registry.py | 2 +- .../datasets/aws/s3_location_client.py | 31 +++++++++++++++++++ .../datasets/handlers/s3_location_handler.py | 30 ++---------------- .../datasets/indexers/dataset_indexer.py | 2 +- .../datasets/indexers/location_indexer.py | 2 +- .../datasets/indexers/table_indexer.py | 2 +- backend/dataall/searchproxy/__init__.py | 1 - .../{upsert.py => base_indexer.py} | 7 +++-- backend/dataall/searchproxy/indexers.py | 3 +- .../share_managers/s3_share_manager.py | 14 +++------ .../share_processors/s3_process_share.py | 4 +-- 11 files changed, 51 insertions(+), 47 deletions(-) create mode 100644 backend/dataall/modules/datasets/aws/s3_location_client.py rename backend/dataall/searchproxy/{upsert.py => base_indexer.py} (88%) diff --git a/backend/dataall/api/Objects/Glossary/registry.py b/backend/dataall/api/Objects/Glossary/registry.py index 36fea6cf0..fb7e6edf7 100644 --- a/backend/dataall/api/Objects/Glossary/registry.py +++ b/backend/dataall/api/Objects/Glossary/registry.py @@ -7,7 +7,7 @@ from dataall.api.gql.graphql_union_type import UnionTypeRegistry from dataall.db import Resource, models from dataall.searchproxy.indexers import DashboardIndexer -from dataall.searchproxy.upsert import BaseIndexer +from dataall.searchproxy.base_indexer import BaseIndexer class Identifiable(Protocol): diff --git a/backend/dataall/modules/datasets/aws/s3_location_client.py b/backend/dataall/modules/datasets/aws/s3_location_client.py new file mode 100644 index 000000000..45385743d --- /dev/null +++ b/backend/dataall/modules/datasets/aws/s3_location_client.py @@ -0,0 +1,31 @@ +import logging + +from dataall.aws.handlers.sts import SessionHelper +from dataall.modules.datasets.db.models import DatasetStorageLocation + +log = logging.getLogger(__name__) + + +class S3LocationClient: + + def __init__(self, location: DatasetStorageLocation): + session = SessionHelper.remote_session(accountid=location.AWSAccountId) + self._client = session.client('s3', region_name=location.region) + self._location = location + + def create_bucket_prefix(self): + location = self._location + try: + response = self._client.put_object( + Bucket=location.S3BucketName, Body='', Key=location.S3Prefix + '/' + ) + log.info( + 'Creating S3 Prefix `{}`({}) on AWS #{}'.format( + location.S3BucketName, location.AWSAccountId, response + ) + ) + except Exception as e: + log.error( + f'Dataset storage location creation failed on S3 for dataset location {location.locationUri} : {e}' + ) + raise e diff --git a/backend/dataall/modules/datasets/handlers/s3_location_handler.py b/backend/dataall/modules/datasets/handlers/s3_location_handler.py index ba8cf6eda..296b7e33c 100644 --- a/backend/dataall/modules/datasets/handlers/s3_location_handler.py +++ b/backend/dataall/modules/datasets/handlers/s3_location_handler.py @@ -3,6 +3,7 @@ from dataall.aws.handlers.service_handlers import Worker from dataall.aws.handlers.sts import SessionHelper from dataall.db import models +from dataall.modules.datasets.aws.s3_location_client import S3LocationClient from dataall.modules.datasets.services.dataset_location import DatasetLocationService log = logging.getLogger(__name__) @@ -11,11 +12,6 @@ class S3DatasetLocationHandler: """Handles async requests related to s3 for dataset storage location""" - @staticmethod - def client(account_id: str, region: str, client_type: str): - session = SessionHelper.remote_session(accountid=account_id) - return session.client(client_type, region_name=region) - @staticmethod @Worker.handler(path='s3.prefix.create') def create_dataset_location(engine, task: models.Task): @@ -23,26 +19,6 @@ def create_dataset_location(engine, task: models.Task): location = DatasetLocationService.get_location_by_uri( session, task.targetUri ) - S3DatasetLocationHandler.create_bucket_prefix(location) - return location - - @staticmethod - def create_bucket_prefix(location): - try: - account_id = location.AWSAccountId - region = location.region - s3cli = S3DatasetLocationHandler.client(account_id=account_id, region=region, client_type='s3') - response = s3cli.put_object( - Bucket=location.S3BucketName, Body='', Key=location.S3Prefix + '/' - ) - log.info( - 'Creating S3 Prefix `{}`({}) on AWS #{}'.format( - location.S3BucketName, account_id, response - ) - ) + S3LocationClient(location).create_bucket_prefix() location.locationCreated = True - except Exception as e: - log.error( - f'Dataset storage location creation failed on S3 for dataset location {location.locationUri} : {e}' - ) - raise e + return location diff --git a/backend/dataall/modules/datasets/indexers/dataset_indexer.py b/backend/dataall/modules/datasets/indexers/dataset_indexer.py index 8cb0b7873..35de32e1c 100644 --- a/backend/dataall/modules/datasets/indexers/dataset_indexer.py +++ b/backend/dataall/modules/datasets/indexers/dataset_indexer.py @@ -3,7 +3,7 @@ from dataall import db from dataall.db import models from dataall.modules.datasets.services.dataset_location import DatasetLocationService -from dataall.searchproxy.upsert import BaseIndexer +from dataall.searchproxy.base_indexer import BaseIndexer class DatasetIndexer(BaseIndexer): diff --git a/backend/dataall/modules/datasets/indexers/location_indexer.py b/backend/dataall/modules/datasets/indexers/location_indexer.py index 72495b51c..f649a244b 100644 --- a/backend/dataall/modules/datasets/indexers/location_indexer.py +++ b/backend/dataall/modules/datasets/indexers/location_indexer.py @@ -3,7 +3,7 @@ from dataall.db import models from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer -from dataall.searchproxy.upsert import BaseIndexer +from dataall.searchproxy.base_indexer import BaseIndexer class DatasetLocationIndexer(BaseIndexer): diff --git a/backend/dataall/modules/datasets/indexers/table_indexer.py b/backend/dataall/modules/datasets/indexers/table_indexer.py index 1eab70a87..fec9e4f7c 100644 --- a/backend/dataall/modules/datasets/indexers/table_indexer.py +++ b/backend/dataall/modules/datasets/indexers/table_indexer.py @@ -3,7 +3,7 @@ from dataall.db import models from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer -from dataall.searchproxy.upsert import BaseIndexer +from dataall.searchproxy.base_indexer import BaseIndexer class DatasetTableIndexer(BaseIndexer): diff --git a/backend/dataall/searchproxy/__init__.py b/backend/dataall/searchproxy/__init__.py index 78493adb6..8dab74fea 100644 --- a/backend/dataall/searchproxy/__init__.py +++ b/backend/dataall/searchproxy/__init__.py @@ -4,5 +4,4 @@ __all__ = [ 'connect', 'run_query', - 'upsert', ] diff --git a/backend/dataall/searchproxy/upsert.py b/backend/dataall/searchproxy/base_indexer.py similarity index 88% rename from backend/dataall/searchproxy/upsert.py rename to backend/dataall/searchproxy/base_indexer.py index 9eb2e3125..fd0cb5e0e 100644 --- a/backend/dataall/searchproxy/upsert.py +++ b/backend/dataall/searchproxy/base_indexer.py @@ -21,7 +21,10 @@ class BaseIndexer(ABC): def es(cls): """Lazy creation of the OpenSearch connection""" if cls._es is None: - cls._es = connect(envname=os.getenv('envname', 'local')) + es = connect(envname=os.getenv('envname', 'local')) + if not es: + raise Exception('Failed to create ES connection') + cls._es = es return cls._es @@ -35,7 +38,7 @@ def _index(cls, doc_id, doc): es = cls.es() doc['_indexed'] = datetime.now() if es: - res = es.index(index=BaseIndexer._INDEX, id=doc_id, body=doc) + res = es.index(index=cls._INDEX, id=doc_id, body=doc) log.info(f'doc {doc} for id {doc_id} indexed with response {res}') return True else: diff --git a/backend/dataall/searchproxy/indexers.py b/backend/dataall/searchproxy/indexers.py index 13ba44eea..ce4145fc5 100644 --- a/backend/dataall/searchproxy/indexers.py +++ b/backend/dataall/searchproxy/indexers.py @@ -4,11 +4,12 @@ from .. import db from ..db import models -from dataall.searchproxy.upsert import BaseIndexer +from dataall.searchproxy.base_indexer import BaseIndexer log = logging.getLogger(__name__) +# TODO Should be moved to dashboard module class DashboardIndexer(BaseIndexer): @classmethod def upsert(cls, session, dashboard_uri: str): diff --git a/backend/dataall/tasks/data_sharing/share_managers/s3_share_manager.py b/backend/dataall/tasks/data_sharing/share_managers/s3_share_manager.py index 30c72a60e..fad1e801f 100644 --- a/backend/dataall/tasks/data_sharing/share_managers/s3_share_manager.py +++ b/backend/dataall/tasks/data_sharing/share_managers/s3_share_manager.py @@ -398,12 +398,9 @@ def delete_dataset_bucket_key_policy( json.dumps(policy) ) - def handle_share_failure(self, error: Exception) -> None: + def log_share_failure(self, error: Exception) -> None: """ - Handles share failure by raising an alarm to alarmsTopic - Returns - ------- - True if alarm published successfully + Writes a log if the failure happened while sharing """ logger.error( f'Failed to share folder {self.s3_prefix} ' @@ -412,12 +409,9 @@ def handle_share_failure(self, error: Exception) -> None: f'due to: {error}' ) - def handle_revoke_failure(self, error: Exception) -> None: + def log_revoke_failure(self, error: Exception) -> None: """ - Handles share failure by raising an alarm to alarmsTopic - Returns - ------- - True if alarm published successfully + Writes a log if the failure happened while revoking share """ logger.error( f'Failed to revoke S3 permissions to folder {self.s3_prefix} ' diff --git a/backend/dataall/tasks/data_sharing/share_processors/s3_process_share.py b/backend/dataall/tasks/data_sharing/share_processors/s3_process_share.py index 96b608338..860aa8a69 100644 --- a/backend/dataall/tasks/data_sharing/share_processors/s3_process_share.py +++ b/backend/dataall/tasks/data_sharing/share_processors/s3_process_share.py @@ -92,7 +92,7 @@ def process_approved_shares( shared_item_SM.update_state_single_item(session, sharing_item, new_state) except Exception as e: - sharing_folder.handle_share_failure(e) + sharing_folder.log_share_failure(e) new_state = shared_item_SM.run_transition(models.Enums.ShareItemActions.Failure.value) shared_item_SM.update_state_single_item(session, sharing_item, new_state) success = False @@ -155,7 +155,7 @@ def process_revoked_shares( revoked_item_SM.update_state_single_item(session, removing_item, new_state) except Exception as e: - removing_folder.handle_revoke_failure(e) + removing_folder.log_revoke_failure(e) new_state = revoked_item_SM.run_transition(models.Enums.ShareItemActions.Failure.value) revoked_item_SM.update_state_single_item(session, removing_item, new_state) success = False From 532ff0d678c1d6b842a3ad1e39e03321bcef9b5d Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 4 May 2023 11:56:36 +0200 Subject: [PATCH 118/346] Added TODO --- .../dataall/modules/datasets/handlers/glue_profiling_handler.py | 1 + 1 file changed, 1 insertion(+) diff --git a/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py b/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py index d15607733..be0915331 100644 --- a/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py +++ b/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py @@ -68,6 +68,7 @@ def start_profiling_run(engine, task: models.Task): ) return run + # TODO move to client once dataset is migrated @staticmethod def get_job_run(**data): accountid = data['accountid'] From 2a319ca2c8fdf67251974ecab6f00e1c392c5426 Mon Sep 17 00:00:00 2001 From: Abdulrahman Kaitoua Date: Fri, 5 May 2023 09:47:52 +0200 Subject: [PATCH 119/346] solve deployment bug #433 CloudFront logs does not enable ACL access (#437) ### Feature or Bugfix - Bugfix ### Detail Solved bug 433, starting from April 2023 S3 default configurations changed, the default for s3 is set to disable ACL. Which is giving an issue for cloudfront logging on s3. The solution was to change the ownership of the object to object writer (enabling ACL for object writer as stated in cloudfront documentation). ### Relates [- ](https://github.com/awslabs/aws-dataall/issues/433) By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. Co-authored-by: akaitoua-sa <126820454+akaitoua-sa@users.noreply.github.com> --- deploy/stacks/cloudfront.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/deploy/stacks/cloudfront.py b/deploy/stacks/cloudfront.py index 5fe398423..382b91d8e 100644 --- a/deploy/stacks/cloudfront.py +++ b/deploy/stacks/cloudfront.py @@ -226,6 +226,7 @@ def __init__( block_public_access=s3.BlockPublicAccess.BLOCK_ALL, enforce_ssl=True, versioned=True, + object_ownership=s3.ObjectOwnership.OBJECT_WRITER, ) frontend_alternate_domain = None @@ -240,6 +241,7 @@ def __init__( removal_policy=RemovalPolicy.DESTROY, block_public_access=s3.BlockPublicAccess.BLOCK_ALL, enforce_ssl=True, + object_ownership=s3.ObjectOwnership.OBJECT_WRITER, ) origin_access_identity = cloudfront.OriginAccessIdentity( @@ -545,6 +547,7 @@ def build_static_site( removal_policy=RemovalPolicy.DESTROY, block_public_access=s3.BlockPublicAccess.BLOCK_ALL, enforce_ssl=True, + object_ownership=s3.ObjectOwnership.OBJECT_WRITER, ) origin_access_identity = cloudfront.OriginAccessIdentity( From 8d71ab544bf4875e524631e79b5cacb696a26981 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 5 May 2023 10:32:06 +0200 Subject: [PATCH 120/346] Moved Share API --- .../dataset_sharing/api}/__init__.py | 2 +- .../dataset_sharing/api}/input_types.py | 2 +- .../dataset_sharing/api}/mutations.py | 2 +- .../dataset_sharing/api}/queries.py | 2 +- .../dataset_sharing/api}/resolvers.py | 13 ++++++------- .../dataset_sharing/api}/schema.py | 2 +- 6 files changed, 11 insertions(+), 12 deletions(-) rename backend/dataall/{api/Objects/ShareObject => modules/dataset_sharing/api}/__init__.py (74%) rename backend/dataall/{api/Objects/ShareObject => modules/dataset_sharing/api}/input_types.py (98%) rename backend/dataall/{api/Objects/ShareObject => modules/dataset_sharing/api}/mutations.py (97%) rename backend/dataall/{api/Objects/ShareObject => modules/dataset_sharing/api}/queries.py (91%) rename backend/dataall/{api/Objects/ShareObject => modules/dataset_sharing/api}/resolvers.py (98%) rename backend/dataall/{api/Objects/ShareObject => modules/dataset_sharing/api}/schema.py (99%) diff --git a/backend/dataall/api/Objects/ShareObject/__init__.py b/backend/dataall/modules/dataset_sharing/api/__init__.py similarity index 74% rename from backend/dataall/api/Objects/ShareObject/__init__.py rename to backend/dataall/modules/dataset_sharing/api/__init__.py index dfa46b264..d6574a49b 100644 --- a/backend/dataall/api/Objects/ShareObject/__init__.py +++ b/backend/dataall/modules/dataset_sharing/api/__init__.py @@ -1,4 +1,4 @@ -from . import ( +from dataall.modules.dataset_sharing.api import ( input_types, mutations, queries, diff --git a/backend/dataall/api/Objects/ShareObject/input_types.py b/backend/dataall/modules/dataset_sharing/api/input_types.py similarity index 98% rename from backend/dataall/api/Objects/ShareObject/input_types.py rename to backend/dataall/modules/dataset_sharing/api/input_types.py index 04f7269ec..fba467b67 100644 --- a/backend/dataall/api/Objects/ShareObject/input_types.py +++ b/backend/dataall/modules/dataset_sharing/api/input_types.py @@ -1,4 +1,4 @@ -from ....api.constants import * +from dataall.api.constants import * NewShareObjectInput = gql.InputType( diff --git a/backend/dataall/api/Objects/ShareObject/mutations.py b/backend/dataall/modules/dataset_sharing/api/mutations.py similarity index 97% rename from backend/dataall/api/Objects/ShareObject/mutations.py rename to backend/dataall/modules/dataset_sharing/api/mutations.py index d8247837d..472ba2764 100644 --- a/backend/dataall/api/Objects/ShareObject/mutations.py +++ b/backend/dataall/modules/dataset_sharing/api/mutations.py @@ -1,4 +1,4 @@ -from .resolvers import * +from dataall.modules.dataset_sharing.api.resolvers import * createShareObject = gql.MutationField( name='createShareObject', diff --git a/backend/dataall/api/Objects/ShareObject/queries.py b/backend/dataall/modules/dataset_sharing/api/queries.py similarity index 91% rename from backend/dataall/api/Objects/ShareObject/queries.py rename to backend/dataall/modules/dataset_sharing/api/queries.py index e74be6b03..1033d4408 100644 --- a/backend/dataall/api/Objects/ShareObject/queries.py +++ b/backend/dataall/modules/dataset_sharing/api/queries.py @@ -1,4 +1,4 @@ -from .resolvers import * +from dataall.modules.dataset_sharing.api.resolvers import * getShareObject = gql.QueryField( name='getShareObject', diff --git a/backend/dataall/api/Objects/ShareObject/resolvers.py b/backend/dataall/modules/dataset_sharing/api/resolvers.py similarity index 98% rename from backend/dataall/api/Objects/ShareObject/resolvers.py rename to backend/dataall/modules/dataset_sharing/api/resolvers.py index f2e58fa14..46f0261a1 100644 --- a/backend/dataall/api/Objects/ShareObject/resolvers.py +++ b/backend/dataall/modules/dataset_sharing/api/resolvers.py @@ -1,12 +1,11 @@ import logging - -from .... import db -from .... import utils -from ....api.constants import * -from ....api.context import Context -from ....aws.handlers.service_handlers import Worker -from ....db import models +from dataall import db +from dataall import utils +from dataall.api.constants import * +from dataall.api.context import Context +from dataall.aws.handlers.service_handlers import Worker +from dataall.db import models from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable, Dataset from dataall.modules.datasets.services.dataset_service import DatasetService diff --git a/backend/dataall/api/Objects/ShareObject/schema.py b/backend/dataall/modules/dataset_sharing/api/schema.py similarity index 99% rename from backend/dataall/api/Objects/ShareObject/schema.py rename to backend/dataall/modules/dataset_sharing/api/schema.py index 7a26154e3..c99382205 100644 --- a/backend/dataall/api/Objects/ShareObject/schema.py +++ b/backend/dataall/modules/dataset_sharing/api/schema.py @@ -1,4 +1,4 @@ -from .resolvers import * +from dataall.modules.dataset_sharing.api.resolvers import * from dataall.api.Objects.Environment.resolvers import resolve_environment ShareableObject = gql.Union( From e881a487b570af1394c2fda1fcccaefdac55b338 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 5 May 2023 10:39:37 +0200 Subject: [PATCH 121/346] Moved code of Share --- .../api/Objects/Environment/resolvers.py | 3 +-- .../dataall/api/Objects/Group/resolvers.py | 2 +- backend/dataall/api/Objects/__init__.py | 1 - backend/dataall/db/api/__init__.py | 1 - .../modules/dataset_sharing/__init__.py | 3 ++- .../services/dataset_share_service.py | 0 .../services/share_notification_service.py | 0 .../dataset_sharing/services}/share_object.py | 22 ++++++++----------- .../datasets/tasks/subscription_service.py | 2 +- 9 files changed, 14 insertions(+), 20 deletions(-) rename backend/dataall/modules/{datasets => dataset_sharing}/services/dataset_share_service.py (100%) rename backend/dataall/modules/{datasets => dataset_sharing}/services/share_notification_service.py (100%) rename backend/dataall/{db/api => modules/dataset_sharing/services}/share_object.py (97%) diff --git a/backend/dataall/api/Objects/Environment/resolvers.py b/backend/dataall/api/Objects/Environment/resolvers.py index b22ee0c5a..b241f6dbb 100644 --- a/backend/dataall/api/Objects/Environment/resolvers.py +++ b/backend/dataall/api/Objects/Environment/resolvers.py @@ -11,7 +11,6 @@ from ..Stack import stack_helper from ...constants import * from ....aws.handlers.sts import SessionHelper -from ....aws.handlers.quicksight import Quicksight from ....aws.handlers.cloudformation import CloudFormation from ....aws.handlers.iam import IAM from ....aws.handlers.parameter_store import ParameterStoreManager @@ -22,7 +21,7 @@ NamingConventionPattern, ) -from dataall.modules.datasets.services.dataset_share_service import DatasetShareService +from dataall.modules.dataset_sharing.services.dataset_share_service import DatasetShareService log = logging.getLogger() diff --git a/backend/dataall/api/Objects/Group/resolvers.py b/backend/dataall/api/Objects/Group/resolvers.py index d29c5be2c..eb7b04ce7 100644 --- a/backend/dataall/api/Objects/Group/resolvers.py +++ b/backend/dataall/api/Objects/Group/resolvers.py @@ -4,7 +4,7 @@ from ....db import exceptions from ....db.models import Group from ....aws.handlers.cognito import Cognito -from ....modules.datasets.services.dataset_share_service import DatasetShareService +from dataall.modules.dataset_sharing.services.dataset_share_service import DatasetShareService log = logging.getLogger() diff --git a/backend/dataall/api/Objects/__init__.py b/backend/dataall/api/Objects/__init__.py index 94a2ed2ba..798f3636b 100644 --- a/backend/dataall/api/Objects/__init__.py +++ b/backend/dataall/api/Objects/__init__.py @@ -20,7 +20,6 @@ Group, Principal, Dashboard, - ShareObject, Organization, Stack, Test, diff --git a/backend/dataall/db/api/__init__.py b/backend/dataall/db/api/__init__.py index ed19787aa..cfc41776d 100644 --- a/backend/dataall/db/api/__init__.py +++ b/backend/dataall/db/api/__init__.py @@ -10,7 +10,6 @@ from .environment import Environment from .glossary import Glossary from .vote import Vote -from .share_object import ShareObject, ShareObjectSM, ShareItemSM from .notification import Notification from .redshift_cluster import RedshiftCluster from .vpc import Vpc diff --git a/backend/dataall/modules/dataset_sharing/__init__.py b/backend/dataall/modules/dataset_sharing/__init__.py index ed6b62297..793662cc7 100644 --- a/backend/dataall/modules/dataset_sharing/__init__.py +++ b/backend/dataall/modules/dataset_sharing/__init__.py @@ -13,4 +13,5 @@ def is_supported(modes: List[ImportMode]) -> bool: return ImportMode.API in modes def __init__(self): - log.info("API pf dataset sharing has been imported") + from dataall.modules.dataset_sharing import api + log.info("API of dataset sharing has been imported") diff --git a/backend/dataall/modules/datasets/services/dataset_share_service.py b/backend/dataall/modules/dataset_sharing/services/dataset_share_service.py similarity index 100% rename from backend/dataall/modules/datasets/services/dataset_share_service.py rename to backend/dataall/modules/dataset_sharing/services/dataset_share_service.py diff --git a/backend/dataall/modules/datasets/services/share_notification_service.py b/backend/dataall/modules/dataset_sharing/services/share_notification_service.py similarity index 100% rename from backend/dataall/modules/datasets/services/share_notification_service.py rename to backend/dataall/modules/dataset_sharing/services/share_notification_service.py diff --git a/backend/dataall/db/api/share_object.py b/backend/dataall/modules/dataset_sharing/services/share_object.py similarity index 97% rename from backend/dataall/db/api/share_object.py rename to backend/dataall/modules/dataset_sharing/services/share_object.py index 4917664b9..264534ff2 100644 --- a/backend/dataall/db/api/share_object.py +++ b/backend/dataall/modules/dataset_sharing/services/share_object.py @@ -2,17 +2,18 @@ from sqlalchemy import and_, or_, func, case -from . import ( +from dataall.db.api import ( has_resource_perm, ResourcePolicy, Environment, ) -from .. import api, utils -from .. import models, exceptions, permissions, paginate -from ..models.Enums import ShareObjectStatus, ShareItemStatus, ShareObjectActions, ShareItemActions, ShareableType, PrincipalType +from dataall.db import api, utils +from dataall.db import models, exceptions, permissions, paginate +from dataall.db.models.Enums import ShareObjectStatus, ShareItemStatus, ShareObjectActions, ShareItemActions, ShareableType, PrincipalType from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable, Dataset from dataall.modules.datasets.services.dataset_service import DatasetService -from ...modules.datasets.services.permissions import DATASET_TABLE_READ +from dataall.modules.datasets.services.permissions import DATASET_TABLE_READ +from dataall.modules.dataset_sharing.services.share_notification_service import ShareNotificationService logger = logging.getLogger(__name__) @@ -563,8 +564,6 @@ def submit_share_object( Share_SM.update_state(session, share, new_share_state) - # TODO Temporary, to solve cyclic imports. It will go away when shares are in a dedicated module. - from dataall.modules.datasets.services.share_notification_service import ShareNotificationService ShareNotificationService.notify_share_object_submission( session, username, dataset, share ) @@ -612,8 +611,7 @@ def approve_share_object( resource_type=DatasetTable.__name__, ) - # TODO Temporary, to solve cyclic imports. It will go away when shares are in a dedicated module. - from dataall.modules.datasets.services.share_notification_service import ShareNotificationService + from dataall.modules.dataset_sharing.services.share_notification_service import ShareNotificationService ShareNotificationService.notify_share_object_approval(session, username, dataset, share) return share @@ -648,8 +646,7 @@ def reject_share_object( resource_uri=dataset.datasetUri, ) - # TODO Temporary, to solve cyclic imports. It will go away when shares are in a dedicated module. - from dataall.modules.datasets.services.share_notification_service import ShareNotificationService + from dataall.modules.dataset_sharing.services.share_notification_service import ShareNotificationService ShareNotificationService.notify_share_object_rejection(session, username, dataset, share) return share @@ -693,8 +690,7 @@ def revoke_items_share_object( resource_uri=dataset.datasetUri, ) - # TODO Temporary, to solve cyclic imports. It will go away when shares are in a dedicated module. - from dataall.modules.datasets.services.share_notification_service import ShareNotificationService + from dataall.modules.dataset_sharing.services.share_notification_service import ShareNotificationService ShareNotificationService.notify_share_object_rejection(session, username, dataset, share) return share diff --git a/backend/dataall/modules/datasets/tasks/subscription_service.py b/backend/dataall/modules/datasets/tasks/subscription_service.py index a261fad8a..4bbe7c34b 100644 --- a/backend/dataall/modules/datasets/tasks/subscription_service.py +++ b/backend/dataall/modules/datasets/tasks/subscription_service.py @@ -13,7 +13,7 @@ from dataall.db import get_engine from dataall.db import models from dataall.modules.datasets.services.dataset_profiling_service import DatasetProfilingService -from dataall.modules.datasets.services.share_notification_service import ShareNotificationService +`from dataall.modules.dataset_sharing.services.share_notification_service import ShareNotificationService from dataall.tasks.subscriptions import poll_queues from dataall.utils import json_utils from dataall.modules.datasets.services.dataset_table_service import DatasetTableService From e0aac6aaa30a64d7adb207f2139f791f2357a14b Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 5 May 2023 11:53:56 +0200 Subject: [PATCH 122/346] Moved Share models --- backend/dataall/db/api/environment.py | 10 +- backend/dataall/db/api/redshift_cluster.py | 38 +- backend/dataall/db/models/Enums.py | 51 --- backend/dataall/db/models/ShareObjectItem.py | 27 -- backend/dataall/db/models/__init__.py | 2 - .../modules/dataset_sharing/api/resolvers.py | 64 ++-- .../modules/dataset_sharing/db/Enums.py | 52 +++ .../modules/dataset_sharing/db/__init__.py | 0 .../dataset_sharing/db/models.py} | 26 +- .../dataset_sharing/services/__init__.py | 0 .../services/dataset_share_service.py | 88 ++--- .../services/share_notification_service.py | 9 +- .../dataset_sharing/services/share_object.py | 348 +++++++++--------- .../modules/datasets/api/dataset/resolvers.py | 7 +- .../datasets/handlers/glue_dataset_handler.py | 3 - .../services/dataset_alarm_service.py | 5 +- .../services/dataset_location_service.py | 16 +- .../datasets/services/dataset_service.py | 64 ++-- .../services/dataset_table_service.py | 33 +- .../datasets/tasks/bucket_policy_updater.py | 30 +- .../datasets/tasks/subscription_service.py | 23 +- .../data_sharing/data_sharing_service.py | 51 +-- .../share_managers/lf_share_manager.py | 12 +- .../share_managers/s3_share_manager.py | 19 +- .../lf_process_cross_account_share.py | 32 +- .../lf_process_same_account_share.py | 27 +- .../share_processors/s3_process_share.py | 33 +- ...215e_backfill_dataset_table_permissions.py | 6 +- tests/api/conftest.py | 19 +- tests/api/test_share.py | 118 +++--- tests/tasks/conftest.py | 20 +- tests/tasks/test_lf_share_manager.py | 77 ++-- tests/tasks/test_s3_share_manager.py | 70 ++-- tests/tasks/test_subscriptions.py | 11 +- 34 files changed, 712 insertions(+), 679 deletions(-) delete mode 100644 backend/dataall/db/models/ShareObjectItem.py create mode 100644 backend/dataall/modules/dataset_sharing/db/Enums.py create mode 100644 backend/dataall/modules/dataset_sharing/db/__init__.py rename backend/dataall/{db/models/ShareObject.py => modules/dataset_sharing/db/models.py} (51%) create mode 100644 backend/dataall/modules/dataset_sharing/services/__init__.py diff --git a/backend/dataall/db/api/environment.py b/backend/dataall/db/api/environment.py index 734e3dcbb..0e5c1cb86 100644 --- a/backend/dataall/db/api/environment.py +++ b/backend/dataall/db/api/environment.py @@ -28,6 +28,8 @@ NamingConventionPattern, ) from dataall.core.group.services.group_resource_manager import GroupResourceManager +# TODO get rid of it +from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject log = logging.getLogger(__name__) @@ -988,14 +990,14 @@ def delete_environment(session, username, groups, uri, data=None, check_perm=Non ) env_shared_with_objects = ( - session.query(models.ShareObject) - .filter(models.ShareObject.environmentUri == environment.environmentUri) + session.query(ShareObject) + .filter(ShareObject.environmentUri == environment.environmentUri) .all() ) for share in env_shared_with_objects: ( - session.query(models.ShareObjectItem) - .filter(models.ShareObjectItem.shareUri == share.shareUri) + session.query(ShareObjectItem) + .filter(ShareObjectItem.shareUri == share.shareUri) .delete() ) session.delete(share) diff --git a/backend/dataall/db/api/redshift_cluster.py b/backend/dataall/db/api/redshift_cluster.py index f43614421..6f57e38fc 100644 --- a/backend/dataall/db/api/redshift_cluster.py +++ b/backend/dataall/db/api/redshift_cluster.py @@ -11,6 +11,8 @@ ) from dataall.utils.slugify import slugify from dataall.modules.datasets.services.dataset_service import DatasetService +from dataall.modules.dataset_sharing.db.models import ShareObject, ShareObjectItem +from dataall.modules.dataset_sharing.services.share_object import ShareItemSM log = logging.getLogger(__name__) @@ -185,29 +187,29 @@ def list_available_datasets( cluster: models.RedshiftCluster = RedshiftCluster.get_redshift_cluster_by_uri( session, uri ) - share_item_shared_states = api.ShareItemSM.get_share_item_shared_states() + share_item_shared_states = ShareItemSM.get_share_item_shared_states() shared = ( session.query( - models.ShareObject.datasetUri.label('datasetUri'), + ShareObject.datasetUri.label('datasetUri'), literal(cluster.clusterUri).label('clusterUri'), ) .join( models.RedshiftCluster, models.RedshiftCluster.environmentUri - == models.ShareObject.environmentUri, + == ShareObject.environmentUri, ) .filter( and_( models.RedshiftCluster.clusterUri == cluster.clusterUri, - models.ShareObjectItem.status.in_(share_item_shared_states), + ShareObjectItem.status.in_(share_item_shared_states), or_( - models.ShareObject.owner == username, - models.ShareObject.principalId.in_(groups), + ShareObject.owner == username, + ShareObject.principalId.in_(groups), ), ) ) - .group_by(models.ShareObject.datasetUri, models.RedshiftCluster.clusterUri) + .group_by(ShareObject.datasetUri, models.RedshiftCluster.clusterUri) ) created = ( session.query( @@ -300,36 +302,36 @@ def list_available_cluster_tables( cluster: models.RedshiftCluster = RedshiftCluster.get_redshift_cluster_by_uri( session, uri ) - share_item_shared_states = api.ShareItemSM.get_share_item_shared_states() + share_item_shared_states = ShareItemSM.get_share_item_shared_states() shared = ( session.query( - models.ShareObject.datasetUri.label('datasetUri'), - models.ShareObjectItem.itemUri.label('tableUri'), + ShareObject.datasetUri.label('datasetUri'), + ShareObjectItem.itemUri.label('tableUri'), literal(cluster.clusterUri).label('clusterUri'), ) .join( - models.ShareObject, - models.ShareObject.shareUri == models.ShareObjectItem.shareUri, + ShareObject, + ShareObject.shareUri == ShareObjectItem.shareUri, ) .join( models.RedshiftCluster, models.RedshiftCluster.environmentUri - == models.ShareObject.environmentUri, + == ShareObject.environmentUri, ) .filter( and_( models.RedshiftCluster.clusterUri == cluster.clusterUri, - models.ShareObjectItem.status.in_(share_item_shared_states), + ShareObjectItem.status.in_(share_item_shared_states), or_( - models.ShareObject.owner == username, - models.ShareObject.principalId.in_(groups), + ShareObject.owner == username, + ShareObject.principalId.in_(groups), ), ) ) .group_by( - models.ShareObject.datasetUri, - models.ShareObjectItem.itemUri, + ShareObject.datasetUri, + ShareObjectItem.itemUri, models.RedshiftCluster.clusterUri, ) ) diff --git a/backend/dataall/db/models/Enums.py b/backend/dataall/db/models/Enums.py index 8e981242b..f5b5200cf 100644 --- a/backend/dataall/db/models/Enums.py +++ b/backend/dataall/db/models/Enums.py @@ -109,57 +109,6 @@ class PrincipalType(Enum): ConsumptionRole = 'ConsumptionRole' -class ShareObjectPermission(Enum): - Approvers = '999' - Requesters = '800' - DatasetAdmins = '700' - NoPermission = '000' - - -class ShareObjectStatus(Enum): - Deleted = 'Deleted' - Approved = 'Approved' - Rejected = 'Rejected' - Revoked = 'Revoked' - Draft = 'Draft' - Submitted = 'Submitted' - Revoke_In_Progress = 'Revoke_In_Progress' - Share_In_Progress = 'Share_In_Progress' - Processed = 'Processed' - - -class ShareItemStatus(Enum): - Deleted = 'Deleted' - PendingApproval = 'PendingApproval' - Share_Approved = 'Share_Approved' - Share_Rejected = 'Share_Rejected' - Share_In_Progress = 'Share_In_Progress' - Share_Succeeded = 'Share_Succeeded' - Share_Failed = 'Share_Failed' - Revoke_Approved = 'Revoke_Approved' - Revoke_In_Progress = 'Revoke_In_Progress' - Revoke_Failed = 'Revoke_Failed' - Revoke_Succeeded = 'Revoke_Succeeded' - - -class ShareObjectActions(Enum): - Submit = 'Submit' - Approve = 'Approve' - Reject = 'Reject' - RevokeItems = 'RevokeItems' - Start = 'Start' - Finish = 'Finish' - FinishPending = 'FinishPending' - Delete = 'Delete' - - -class ShareItemActions(Enum): - AddItem = 'AddItem' - RemoveItem = 'RemoveItem' - Failure = 'Failure' - Success = 'Success' - - class ConfidentialityClassification(Enum): Unclassified = 'Unclassified' Official = 'Official' diff --git a/backend/dataall/db/models/ShareObjectItem.py b/backend/dataall/db/models/ShareObjectItem.py deleted file mode 100644 index dac037687..000000000 --- a/backend/dataall/db/models/ShareObjectItem.py +++ /dev/null @@ -1,27 +0,0 @@ -from datetime import datetime - -from sqlalchemy import Column, DateTime, String - -from .Enums import ShareItemStatus -from .. import Base, utils - - -class ShareObjectItem(Base): - __tablename__ = 'share_object_item' - shareUri = Column(String, nullable=False) - shareItemUri = Column( - String, default=utils.uuid('shareitem'), nullable=False, primary_key=True - ) - itemType = Column(String, nullable=False) - itemUri = Column(String, nullable=False) - itemName = Column(String, nullable=False) - permission = Column(String, nullable=True) - created = Column(DateTime, nullable=False, default=datetime.now) - updated = Column(DateTime, nullable=True, onupdate=datetime.now) - deleted = Column(DateTime, nullable=True) - owner = Column(String, nullable=False) - GlueDatabaseName = Column(String, nullable=True) - GlueTableName = Column(String, nullable=True) - S3AccessPointName = Column(String, nullable=True) - status = Column(String, nullable=False, default=ShareItemStatus.PendingApproval.value) - action = Column(String, nullable=True) diff --git a/backend/dataall/db/models/__init__.py b/backend/dataall/db/models/__init__.py index fff02245e..fbed60b1e 100644 --- a/backend/dataall/db/models/__init__.py +++ b/backend/dataall/db/models/__init__.py @@ -21,8 +21,6 @@ from .ResourcePolicy import ResourcePolicy from .ResourcePolicyPermission import ResourcePolicyPermission from .SagemakerStudio import SagemakerStudio, SagemakerStudioUserProfile -from .ShareObject import ShareObject -from .ShareObjectItem import ShareObjectItem from .DataPipeline import DataPipeline from .DataPipelineEnvironment import DataPipelineEnvironment from .Stack import Stack diff --git a/backend/dataall/modules/dataset_sharing/api/resolvers.py b/backend/dataall/modules/dataset_sharing/api/resolvers.py index 46f0261a1..ec634bc41 100644 --- a/backend/dataall/modules/dataset_sharing/api/resolvers.py +++ b/backend/dataall/modules/dataset_sharing/api/resolvers.py @@ -2,10 +2,13 @@ from dataall import db from dataall import utils +from dataall.api.Objects.Principal.resolvers import get_principal from dataall.api.constants import * from dataall.api.context import Context from dataall.aws.handlers.service_handlers import Worker from dataall.db import models +from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject +from dataall.modules.dataset_sharing.services.share_object import ShareObjectService from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable, Dataset from dataall.modules.datasets.services.dataset_service import DatasetService @@ -16,7 +19,7 @@ def get_share_object_dataset(context, source, **kwargs): if not source: return None with context.engine.scoped_session() as session: - share: models.ShareObject = session.query(models.ShareObject).get( + share: ShareObject = session.query(ShareObject).get( source.shareUri ) return session.query(Dataset).get(share.datasetUri) @@ -41,7 +44,7 @@ def create_share_object( input['itemUri'] = itemUri input['itemType'] = itemType input['datasetUri'] = datasetUri - return db.api.ShareObject.create_share_object( + return ShareObjectService.create_share_object( session=session, username=context.username, groups=context.groups, @@ -53,7 +56,7 @@ def create_share_object( def submit_share_object(context: Context, source, shareUri: str = None): with context.engine.scoped_session() as session: - return db.api.ShareObject.submit_share_object( + return ShareObjectService.submit_share_object( session=session, username=context.username, groups=context.groups, @@ -65,7 +68,7 @@ def submit_share_object(context: Context, source, shareUri: str = None): def approve_share_object(context: Context, source, shareUri: str = None): with context.engine.scoped_session() as session: - share = db.api.ShareObject.approve_share_object( + share = ShareObjectService.approve_share_object( session=session, username=context.username, groups=context.groups, @@ -88,7 +91,7 @@ def approve_share_object(context: Context, source, shareUri: str = None): def reject_share_object(context: Context, source, shareUri: str = None): with context.engine.scoped_session() as session: - return db.api.ShareObject.reject_share_object( + return ShareObjectService.reject_share_object( session=session, username=context.username, groups=context.groups, @@ -100,7 +103,7 @@ def reject_share_object(context: Context, source, shareUri: str = None): def revoke_items_share_object(context: Context, source, input): with context.engine.scoped_session() as session: - share = db.api.ShareObject.revoke_items_share_object( + share = ShareObjectService.revoke_items_share_object( session=session, username=context.username, groups=context.groups, @@ -123,11 +126,11 @@ def revoke_items_share_object(context: Context, source, input): def delete_share_object(context: Context, source, shareUri: str = None): with context.engine.scoped_session() as session: - share = db.api.ShareObject.get_share_by_uri(session, shareUri) + share = ShareObjectService.get_share_by_uri(session, shareUri) if not share: raise db.exceptions.ObjectNotFound('ShareObject', shareUri) - db.api.ShareObject.delete_share_object( + ShareObjectService.delete_share_object( session=session, username=context.username, groups=context.groups, @@ -140,7 +143,7 @@ def delete_share_object(context: Context, source, shareUri: str = None): def add_shared_item(context, source, shareUri: str = None, input: dict = None): with context.engine.scoped_session() as session: - share_item = db.api.ShareObject.add_share_object_item( + share_item = ShareObjectService.add_share_object_item( session=session, username=context.username, groups=context.groups, @@ -153,13 +156,13 @@ def add_shared_item(context, source, shareUri: str = None, input: dict = None): def remove_shared_item(context, source, shareItemUri: str = None): with context.engine.scoped_session() as session: - share_item: models.ShareObjectItem = session.query(models.ShareObjectItem).get( + share_item: ShareObjectItem = session.query(ShareObjectItem).get( shareItemUri ) if not share_item: raise db.exceptions.ObjectNotFound('ShareObjectItem', shareItemUri) - share = db.api.ShareObject.get_share_by_uri(session, share_item.shareUri) - db.api.ShareObject.remove_share_object_item( + share = ShareObjectService.get_share_by_uri(session, share_item.shareUri) + ShareObjectService.remove_share_object_item( session=session, username=context.username, groups=context.groups, @@ -175,14 +178,14 @@ def remove_shared_item(context, source, shareItemUri: str = None): def list_shared_items( - context: Context, source: models.ShareObject, filter: dict = None + context: Context, source: ShareObject, filter: dict = None ): if not source: return None if not filter: filter = {} with context.engine.scoped_session() as session: - return db.api.ShareObject.list_shared_items( + return ShareObjectService.list_shared_items( session=session, username=context.username, groups=context.groups, @@ -192,11 +195,11 @@ def list_shared_items( ) -def resolve_shared_item(context, source: models.ShareObjectItem, **kwargs): +def resolve_shared_item(context, source: ShareObjectItem, **kwargs): if not source: return None with context.engine.scoped_session() as session: - return db.api.ShareObject.get_share_item( + return ShareObjectService.get_share_item( session=session, username=context.username, groups=context.groups, @@ -208,7 +211,7 @@ def resolve_shared_item(context, source: models.ShareObjectItem, **kwargs): def get_share_object(context, source, shareUri: str = None): with context.engine.scoped_session() as session: - return db.api.ShareObject.get_share_object( + return ShareObjectService.get_share_object( session=session, username=context.username, groups=context.groups, @@ -218,7 +221,7 @@ def get_share_object(context, source, shareUri: str = None): ) -def resolve_user_role(context: Context, source: models.ShareObject, **kwargs): +def resolve_user_role(context: Context, source: ShareObject, **kwargs): if not source: return None with context.engine.scoped_session() as session: @@ -246,7 +249,7 @@ def resolve_user_role(context: Context, source: models.ShareObject, **kwargs): return ShareObjectPermission.NoPermission.value -def resolve_dataset(context: Context, source: models.ShareObject, **kwargs): +def resolve_dataset(context: Context, source: ShareObject, **kwargs): if not source: return None with context.engine.scoped_session() as session: @@ -271,10 +274,9 @@ def union_resolver(object, *_): return 'DatasetStorageLocation' -def resolve_principal(context: Context, source: models.ShareObject, **kwargs): +def resolve_principal(context: Context, source: ShareObject, **kwargs): if not source: return None - from ..Principal.resolvers import get_principal with context.engine.scoped_session() as session: return get_principal( @@ -282,13 +284,13 @@ def resolve_principal(context: Context, source: models.ShareObject, **kwargs): ) -def resolve_group(context: Context, source: models.ShareObject, **kwargs): +def resolve_group(context: Context, source: ShareObject, **kwargs): if not source: return None return source.groupUri -def resolve_consumption_data(context: Context, source: models.ShareObject, **kwargs): +def resolve_consumption_data(context: Context, source: ShareObject, **kwargs): if not source: return None with context.engine.scoped_session() as session: @@ -304,33 +306,33 @@ def resolve_consumption_data(context: Context, source: models.ShareObject, **kwa } -def resolve_share_object_statistics(context: Context, source: models.ShareObject, **kwargs): +def resolve_share_object_statistics(context: Context, source: ShareObject, **kwargs): if not source: return None with context.engine.scoped_session() as session: - return db.api.ShareObject.resolve_share_object_statistics( + return ShareObjectService.resolve_share_object_statistics( session, source.shareUri ) -def resolve_existing_shared_items(context: Context, source: models.ShareObject, **kwargs): +def resolve_existing_shared_items(context: Context, source: ShareObject, **kwargs): if not source: return None with context.engine.scoped_session() as session: - return db.api.ShareObject.check_existing_shared_items( + return ShareObjectService.check_existing_shared_items( session, source.shareUri ) def list_shareable_objects( - context: Context, source: models.ShareObject, filter: dict = None + context: Context, source: ShareObject, filter: dict = None ): if not source: return None if not filter: filter = {'page': 1, 'pageSize': 5} with context.engine.scoped_session() as session: - return db.api.ShareObject.list_shareable_items( + return ShareObjectService.list_shareable_items( session=session, username=context.username, groups=context.groups, @@ -344,7 +346,7 @@ def list_shares_in_my_inbox(context: Context, source, filter: dict = None): if not filter: filter = {} with context.engine.scoped_session() as session: - return db.api.ShareObject.list_user_received_share_requests( + return ShareObjectService.list_user_received_share_requests( session=session, username=context.username, groups=context.groups, @@ -358,7 +360,7 @@ def list_shares_in_my_outbox(context: Context, source, filter: dict = None): if not filter: filter = {} with context.engine.scoped_session() as session: - return db.api.ShareObject.list_user_sent_share_requests( + return ShareObjectService.list_user_sent_share_requests( session=session, username=context.username, groups=context.groups, diff --git a/backend/dataall/modules/dataset_sharing/db/Enums.py b/backend/dataall/modules/dataset_sharing/db/Enums.py new file mode 100644 index 000000000..e76485bd2 --- /dev/null +++ b/backend/dataall/modules/dataset_sharing/db/Enums.py @@ -0,0 +1,52 @@ +from enum import Enum + + +class ShareObjectStatus(Enum): + Deleted = 'Deleted' + Approved = 'Approved' + Rejected = 'Rejected' + Revoked = 'Revoked' + Draft = 'Draft' + Submitted = 'Submitted' + Revoke_In_Progress = 'Revoke_In_Progress' + Share_In_Progress = 'Share_In_Progress' + Processed = 'Processed' + + +class ShareObjectPermission(Enum): + Approvers = '999' + Requesters = '800' + DatasetAdmins = '700' + NoPermission = '000' + + +class ShareItemStatus(Enum): + Deleted = 'Deleted' + PendingApproval = 'PendingApproval' + Share_Approved = 'Share_Approved' + Share_Rejected = 'Share_Rejected' + Share_In_Progress = 'Share_In_Progress' + Share_Succeeded = 'Share_Succeeded' + Share_Failed = 'Share_Failed' + Revoke_Approved = 'Revoke_Approved' + Revoke_In_Progress = 'Revoke_In_Progress' + Revoke_Failed = 'Revoke_Failed' + Revoke_Succeeded = 'Revoke_Succeeded' + + +class ShareObjectActions(Enum): + Submit = 'Submit' + Approve = 'Approve' + Reject = 'Reject' + RevokeItems = 'RevokeItems' + Start = 'Start' + Finish = 'Finish' + FinishPending = 'FinishPending' + Delete = 'Delete' + + +class ShareItemActions(Enum): + AddItem = 'AddItem' + RemoveItem = 'RemoveItem' + Failure = 'Failure' + Success = 'Success' diff --git a/backend/dataall/modules/dataset_sharing/db/__init__.py b/backend/dataall/modules/dataset_sharing/db/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/dataall/db/models/ShareObject.py b/backend/dataall/modules/dataset_sharing/db/models.py similarity index 51% rename from backend/dataall/db/models/ShareObject.py rename to backend/dataall/modules/dataset_sharing/db/models.py index 220099fd5..43436c290 100644 --- a/backend/dataall/db/models/ShareObject.py +++ b/backend/dataall/modules/dataset_sharing/db/models.py @@ -4,8 +4,8 @@ from sqlalchemy import Boolean, Column, String, DateTime from sqlalchemy.orm import query_expression -from .Enums import ShareObjectStatus -from .. import Base, utils +from dataall.db import Base, utils +from dataall.modules.dataset_sharing.db.Enums import ShareObjectStatus, ShareItemStatus def in_one_month(): @@ -35,3 +35,25 @@ class ShareObject(Base): confirmed = Column(Boolean, default=False) userRoleForShareObject = query_expression() existingSharedItems = query_expression() + + +class ShareObjectItem(Base): + __tablename__ = 'share_object_item' + shareUri = Column(String, nullable=False) + shareItemUri = Column( + String, default=utils.uuid('shareitem'), nullable=False, primary_key=True + ) + itemType = Column(String, nullable=False) + itemUri = Column(String, nullable=False) + itemName = Column(String, nullable=False) + permission = Column(String, nullable=True) + created = Column(DateTime, nullable=False, default=datetime.now) + updated = Column(DateTime, nullable=True, onupdate=datetime.now) + deleted = Column(DateTime, nullable=True) + owner = Column(String, nullable=False) + GlueDatabaseName = Column(String, nullable=True) + GlueTableName = Column(String, nullable=True) + S3AccessPointName = Column(String, nullable=True) + status = Column(String, nullable=False, default=ShareItemStatus.PendingApproval.value) + action = Column(String, nullable=True) + diff --git a/backend/dataall/modules/dataset_sharing/services/__init__.py b/backend/dataall/modules/dataset_sharing/services/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/dataall/modules/dataset_sharing/services/dataset_share_service.py b/backend/dataall/modules/dataset_sharing/services/dataset_share_service.py index 90a5334d5..e2996d746 100644 --- a/backend/dataall/modules/dataset_sharing/services/dataset_share_service.py +++ b/backend/dataall/modules/dataset_sharing/services/dataset_share_service.py @@ -6,8 +6,10 @@ from dataall.api.constants import ShareableType, PrincipalType from dataall.db import models, permissions -from dataall.db.api import has_resource_perm, ShareItemSM +from dataall.db.api import has_resource_perm from dataall.db.paginator import paginate +from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject +from dataall.modules.dataset_sharing.services.share_object import ShareItemSM from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable, Dataset @@ -21,25 +23,25 @@ def paginated_shared_with_environment_datasets( share_item_shared_states = ShareItemSM.get_share_item_shared_states() q = ( session.query( - models.ShareObjectItem.shareUri.label('shareUri'), + ShareObjectItem.shareUri.label('shareUri'), Dataset.datasetUri.label('datasetUri'), Dataset.name.label('datasetName'), Dataset.description.label('datasetDescription'), models.Environment.environmentUri.label('environmentUri'), models.Environment.name.label('environmentName'), - models.ShareObject.created.label('created'), - models.ShareObject.principalId.label('principalId'), - models.ShareObject.principalType.label('principalType'), - models.ShareObjectItem.itemType.label('itemType'), - models.ShareObjectItem.GlueDatabaseName.label('GlueDatabaseName'), - models.ShareObjectItem.GlueTableName.label('GlueTableName'), - models.ShareObjectItem.S3AccessPointName.label('S3AccessPointName'), + ShareObject.created.label('created'), + ShareObject.principalId.label('principalId'), + ShareObject.principalType.label('principalType'), + ShareObjectItem.itemType.label('itemType'), + ShareObjectItem.GlueDatabaseName.label('GlueDatabaseName'), + ShareObjectItem.GlueTableName.label('GlueTableName'), + ShareObjectItem.S3AccessPointName.label('S3AccessPointName'), models.Organization.organizationUri.label('organizationUri'), models.Organization.name.label('organizationName'), case( [ ( - models.ShareObjectItem.itemType + ShareObjectItem.itemType == ShareableType.Table.value, func.concat( DatasetTable.GlueDatabaseName, @@ -48,7 +50,7 @@ def paginated_shared_with_environment_datasets( ), ), ( - models.ShareObjectItem.itemType + ShareObjectItem.itemType == ShareableType.StorageLocation.value, func.concat(DatasetStorageLocation.name), ), @@ -57,12 +59,12 @@ def paginated_shared_with_environment_datasets( ).label('itemAccess'), ) .join( - models.ShareObject, - models.ShareObject.shareUri == models.ShareObjectItem.shareUri, + ShareObject, + ShareObject.shareUri == ShareObjectItem.shareUri, ) .join( Dataset, - models.ShareObject.datasetUri == Dataset.datasetUri, + ShareObject.datasetUri == Dataset.datasetUri, ) .join( models.Environment, @@ -75,38 +77,38 @@ def paginated_shared_with_environment_datasets( ) .outerjoin( DatasetTable, - models.ShareObjectItem.itemUri == DatasetTable.tableUri, + ShareObjectItem.itemUri == DatasetTable.tableUri, ) .outerjoin( DatasetStorageLocation, - models.ShareObjectItem.itemUri + ShareObjectItem.itemUri == DatasetStorageLocation.locationUri, ) .filter( and_( - models.ShareObjectItem.status.in_(share_item_shared_states), - models.ShareObject.environmentUri == uri, + ShareObjectItem.status.in_(share_item_shared_states), + ShareObject.environmentUri == uri, ) ) ) if data.get('datasetUri'): datasetUri = data.get('datasetUri') - q = q.filter(models.ShareObject.datasetUri == datasetUri) + q = q.filter(ShareObject.datasetUri == datasetUri) if data.get('itemTypes', None): itemTypes = data.get('itemTypes') q = q.filter( - or_(*[models.ShareObjectItem.itemType == t for t in itemTypes]) + or_(*[ShareObjectItem.itemType == t for t in itemTypes]) ) if data.get("uniqueShares", False): - q = q.filter(models.ShareObject.principalType != PrincipalType.ConsumptionRole.value) - q = q.distinct(models.ShareObject.shareUri) + q = q.filter(ShareObject.principalType != PrincipalType.ConsumptionRole.value) + q = q.distinct(ShareObject.shareUri) if data.get('term'): term = data.get('term') - q = q.filter(models.ShareObjectItem.itemName.ilike('%' + term + '%')) + q = q.filter(ShareObjectItem.itemName.ilike('%' + term + '%')) return paginate( query=q, page=data.get('page', 1), page_size=data.get('pageSize', 10) @@ -119,24 +121,24 @@ def paginated_shared_with_environment_group_datasets( share_item_shared_states = ShareItemSM.get_share_item_shared_states() q = ( session.query( - models.ShareObjectItem.shareUri.label('shareUri'), + ShareObjectItem.shareUri.label('shareUri'), Dataset.datasetUri.label('datasetUri'), Dataset.name.label('datasetName'), Dataset.description.label('datasetDescription'), models.Environment.environmentUri.label('environmentUri'), models.Environment.name.label('environmentName'), - models.ShareObject.created.label('created'), - models.ShareObject.principalId.label('principalId'), - models.ShareObjectItem.itemType.label('itemType'), - models.ShareObjectItem.GlueDatabaseName.label('GlueDatabaseName'), - models.ShareObjectItem.GlueTableName.label('GlueTableName'), - models.ShareObjectItem.S3AccessPointName.label('S3AccessPointName'), + ShareObject.created.label('created'), + ShareObject.principalId.label('principalId'), + ShareObjectItem.itemType.label('itemType'), + ShareObjectItem.GlueDatabaseName.label('GlueDatabaseName'), + ShareObjectItem.GlueTableName.label('GlueTableName'), + ShareObjectItem.S3AccessPointName.label('S3AccessPointName'), models.Organization.organizationUri.label('organizationUri'), models.Organization.name.label('organizationName'), case( [ ( - models.ShareObjectItem.itemType + ShareObjectItem.itemType == ShareableType.Table.value, func.concat( DatasetTable.GlueDatabaseName, @@ -145,7 +147,7 @@ def paginated_shared_with_environment_group_datasets( ), ), ( - models.ShareObjectItem.itemType + ShareObjectItem.itemType == ShareableType.StorageLocation.value, func.concat(DatasetStorageLocation.name), ), @@ -154,12 +156,12 @@ def paginated_shared_with_environment_group_datasets( ).label('itemAccess'), ) .join( - models.ShareObject, - models.ShareObject.shareUri == models.ShareObjectItem.shareUri, + ShareObject, + ShareObject.shareUri == ShareObjectItem.shareUri, ) .join( Dataset, - models.ShareObject.datasetUri == Dataset.datasetUri, + ShareObject.datasetUri == Dataset.datasetUri, ) .join( models.Environment, @@ -172,34 +174,34 @@ def paginated_shared_with_environment_group_datasets( ) .outerjoin( DatasetTable, - models.ShareObjectItem.itemUri == DatasetTable.tableUri, + ShareObjectItem.itemUri == DatasetTable.tableUri, ) .outerjoin( DatasetStorageLocation, - models.ShareObjectItem.itemUri + ShareObjectItem.itemUri == DatasetStorageLocation.locationUri, ) .filter( and_( - models.ShareObjectItem.status.in_(share_item_shared_states), - models.ShareObject.environmentUri == envUri, - models.ShareObject.principalId == groupUri, + ShareObjectItem.status.in_(share_item_shared_states), + ShareObject.environmentUri == envUri, + ShareObject.principalId == groupUri, ) ) ) if data.get('datasetUri'): datasetUri = data.get('datasetUri') - q = q.filter(models.ShareObject.datasetUri == datasetUri) + q = q.filter(ShareObject.datasetUri == datasetUri) if data.get('itemTypes', None): itemTypes = data.get('itemTypes') q = q.filter( - or_(*[models.ShareObjectItem.itemType == t for t in itemTypes]) + or_(*[ShareObjectItem.itemType == t for t in itemTypes]) ) if data.get('term'): term = data.get('term') - q = q.filter(models.ShareObjectItem.itemName.ilike('%' + term + '%')) + q = q.filter(ShareObjectItem.itemName.ilike('%' + term + '%')) return paginate( query=q, page=data.get('page', 1), page_size=data.get('pageSize', 10) diff --git a/backend/dataall/modules/dataset_sharing/services/share_notification_service.py b/backend/dataall/modules/dataset_sharing/services/share_notification_service.py index 896094ab3..59c12fef5 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_notification_service.py +++ b/backend/dataall/modules/dataset_sharing/services/share_notification_service.py @@ -1,12 +1,13 @@ from dataall.db import models from dataall.db.api import Notification +from dataall.modules.dataset_sharing.db.models import ShareObject from dataall.modules.datasets.db.models import Dataset class ShareNotificationService: @staticmethod def notify_share_object_submission( - session, username: str, dataset: Dataset, share: models.ShareObject + session, username: str, dataset: Dataset, share: ShareObject ): notifications = [] # stewards = Notification.get_dataset_stewards(session, dataset) @@ -25,7 +26,7 @@ def notify_share_object_submission( @staticmethod def notify_share_object_approval( - session, username: str, dataset: Dataset, share: models.ShareObject + session, username: str, dataset: Dataset, share: ShareObject ): notifications = [] targeted_users = ShareNotificationService._get_share_object_targeted_users( @@ -46,7 +47,7 @@ def notify_share_object_approval( @staticmethod def notify_share_object_rejection( - session, username: str, dataset: Dataset, share: models.ShareObject + session, username: str, dataset: Dataset, share: ShareObject ): notifications = [] targeted_users = ShareNotificationService._get_share_object_targeted_users( @@ -67,7 +68,7 @@ def notify_share_object_rejection( @staticmethod def notify_new_data_available_from_owners( - session, dataset: Dataset, share: models.ShareObject, s3_prefix + session, dataset: Dataset, share: ShareObject, s3_prefix ): notifications = [] targeted_users = ShareNotificationService._get_share_object_targeted_users( diff --git a/backend/dataall/modules/dataset_sharing/services/share_object.py b/backend/dataall/modules/dataset_sharing/services/share_object.py index 264534ff2..0fa291cbb 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_object.py +++ b/backend/dataall/modules/dataset_sharing/services/share_object.py @@ -9,7 +9,10 @@ ) from dataall.db import api, utils from dataall.db import models, exceptions, permissions, paginate -from dataall.db.models.Enums import ShareObjectStatus, ShareItemStatus, ShareObjectActions, ShareItemActions, ShareableType, PrincipalType +from dataall.db.models.Enums import ShareableType, PrincipalType +from dataall.modules.dataset_sharing.db.Enums import ShareObjectActions, ShareObjectStatus, ShareItemActions, \ + ShareItemStatus +from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable, Dataset from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.modules.datasets.services.permissions import DATASET_TABLE_READ @@ -146,7 +149,7 @@ def run_transition(self, transition): def update_state(self, session, share, new_state): logger.info(f"Updating share object {share.shareUri} in DB from {self._state} to state {new_state}") - ShareObject.update_share_object_status( + ShareObjectService.update_share_object_status( session=session, shareUri=share.shareUri, status=new_state @@ -276,14 +279,14 @@ def update_state(self, session, share_uri, new_state): if share_uri and (new_state != self._state): if new_state == ShareItemStatus.Deleted.value: logger.info(f"Deleting share items in DB in {self._state} state") - ShareObject.delete_share_item_status_batch( + ShareObjectService.delete_share_item_status_batch( session=session, share_uri=share_uri, status=self._state ) else: logger.info(f"Updating share items in DB from {self._state} to state {new_state}") - ShareObject.update_share_item_status_batch( + ShareObjectService.update_share_item_status_batch( session=session, share_uri=share_uri, old_status=self._state, @@ -296,7 +299,7 @@ def update_state(self, session, share_uri, new_state): def update_state_single_item(self, session, share_item, new_state): logger.info(f"Updating share item in DB {share_item.shareItemUri} status to {new_state}") - ShareObject.update_share_item_status( + ShareObjectService.update_share_item_status( session=session, uri=share_item.shareItemUri, status=new_state @@ -322,7 +325,7 @@ def get_share_item_revokable_states(): ] -class ShareObject: +class ShareObjectService: @staticmethod @has_resource_perm(permissions.CREATE_SHARE_OBJECT) def create_share_object( @@ -332,7 +335,7 @@ def create_share_object( uri: str, data: dict = None, check_perm: bool = False, - ) -> models.ShareObject: + ) -> ShareObject: if not data: raise exceptions.RequiredParameter(data) if not data.get('principalId'): @@ -385,7 +388,7 @@ def create_share_object( message=f'Team: {groupUri} is managing the dataset {dataset.name}', ) - ShareObject.validate_group_membership( + ShareObjectService.validate_group_membership( session=session, username=username, groups=groups, @@ -393,20 +396,20 @@ def create_share_object( environment_uri=uri, ) - share: models.ShareObject = ( - session.query(models.ShareObject) + share: ShareObject = ( + session.query(ShareObject) .filter( and_( - models.ShareObject.datasetUri == datasetUri, - models.ShareObject.principalId == principalId, - models.ShareObject.environmentUri == environmentUri, - models.ShareObject.groupUri == groupUri, + ShareObject.datasetUri == datasetUri, + ShareObject.principalId == principalId, + ShareObject.environmentUri == environmentUri, + ShareObject.groupUri == groupUri, ) ) .first() ) if not share: - share = models.ShareObject( + share = ShareObject( datasetUri=dataset.datasetUri, environmentUri=environment.environmentUri, owner=username, @@ -428,11 +431,11 @@ def create_share_object( item = session.query(DatasetTable).get(itemUri) share_item = ( - session.query(models.ShareObjectItem) + session.query(ShareObjectItem) .filter( and_( - models.ShareObjectItem.shareUri == share.shareUri, - models.ShareObjectItem.itemUri == itemUri, + ShareObjectItem.shareUri == share.shareUri, + ShareObjectItem.itemUri == itemUri, ) ) .first() @@ -443,7 +446,7 @@ def create_share_object( ) if not share_item and item: - new_share_item: models.ShareObjectItem = models.ShareObjectItem( + new_share_item: ShareObjectItem = ShareObjectItem( shareUri=share.shareUri, itemUri=itemUri, itemType=itemType, @@ -481,14 +484,14 @@ def create_share_object( group=groupUri, permissions=permissions.SHARE_OBJECT_REQUESTER, resource_uri=share.shareUri, - resource_type=models.ShareObject.__name__, + resource_type=ShareObject.__name__, ) ResourcePolicy.attach_resource_policy( session=session, group=dataset.SamlAdminGroupName, permissions=permissions.SHARE_OBJECT_REQUESTER, resource_uri=share.shareUri, - resource_type=models.ShareObject.__name__, + resource_type=ShareObject.__name__, ) if dataset.SamlAdminGroupName != environment.SamlGroupName: ResourcePolicy.attach_resource_policy( @@ -496,7 +499,7 @@ def create_share_object( group=environment.SamlGroupName, permissions=permissions.SHARE_OBJECT_REQUESTER, resource_uri=share.shareUri, - resource_type=models.ShareObject.__name__, + resource_type=ShareObject.__name__, ) # Attaching REQUESTER permissions to: # dataset.stewards (includes the dataset Admins) @@ -505,7 +508,7 @@ def create_share_object( group=dataset.stewards, permissions=permissions.SHARE_OBJECT_APPROVER, resource_uri=share.shareUri, - resource_type=models.ShareObject.__name__, + resource_type=ShareObject.__name__, ) return share @@ -540,10 +543,10 @@ def submit_share_object( uri: str, data: dict = None, check_perm: bool = False, - ) -> models.ShareObject: - share = ShareObject.get_share_by_uri(session, uri) + ) -> ShareObject: + share = ShareObjectService.get_share_by_uri(session, uri) dataset = DatasetService.get_dataset_by_uri(session, share.datasetUri) - share_items_states = ShareObject.get_share_items_states(session, uri) + share_items_states = ShareObjectService.get_share_items_states(session, uri) valid_states = [ShareItemStatus.PendingApproval.value] valid_share_items_states = [x for x in valid_states if x in share_items_states] @@ -578,10 +581,10 @@ def approve_share_object( uri: str, data: dict = None, check_perm: bool = False, - ) -> models.ShareObject: - share = ShareObject.get_share_by_uri(session, uri) + ) -> ShareObject: + share = ShareObjectService.get_share_by_uri(session, uri) dataset = DatasetService.get_dataset_by_uri(session, share.datasetUri) - share_items_states = ShareObject.get_share_items_states(session, uri) + share_items_states = ShareObjectService.get_share_items_states(session, uri) Share_SM = ShareObjectSM(share.status) new_share_state = Share_SM.run_transition(ShareObjectActions.Approve.value) @@ -594,11 +597,11 @@ def approve_share_object( Share_SM.update_state(session, share, new_share_state) # GET TABLES SHARED AND APPROVE SHARE FOR EACH TABLE - share_table_items = session.query(models.ShareObjectItem).filter( + share_table_items = session.query(ShareObjectItem).filter( ( and_( - models.ShareObjectItem.shareUri == uri, - models.ShareObjectItem.itemType == ShareableType.Table.value + ShareObjectItem.shareUri == uri, + ShareObjectItem.itemType == ShareableType.Table.value ) ) ).all() @@ -624,11 +627,11 @@ def reject_share_object( uri: str, data: dict = None, check_perm: bool = False, - ) -> models.ShareObject: + ) -> ShareObject: - share = ShareObject.get_share_by_uri(session, uri) + share = ShareObjectService.get_share_by_uri(session, uri) dataset = DatasetService.get_dataset_by_uri(session, share.datasetUri) - share_items_states = ShareObject.get_share_items_states(session, uri) + share_items_states = ShareObjectService.get_share_items_states(session, uri) Share_SM = ShareObjectSM(share.status) new_share_state = Share_SM.run_transition(ShareObjectActions.Reject.value) @@ -659,12 +662,12 @@ def revoke_items_share_object( uri: str, data: dict = None, check_perm: bool = False, - ) -> models.ShareObject: + ) -> ShareObject: - share = ShareObject.get_share_by_uri(session, uri) + share = ShareObjectService.get_share_by_uri(session, uri) dataset = DatasetService.get_dataset_by_uri(session, share.datasetUri) - revoked_items_states = ShareObject.get_share_items_states(session, uri, data.get("revokedItemUris")) - revoked_items = [ShareObject.get_share_item_by_uri(session, uri) for uri in data.get("revokedItemUris")] + revoked_items_states = ShareObjectService.get_share_items_states(session, uri, data.get("revokedItemUris")) + revoked_items = [ShareObjectService.get_share_item_by_uri(session, uri) for uri in data.get("revokedItemUris")] if revoked_items_states == []: raise exceptions.ShareItemsFound( @@ -704,7 +707,7 @@ def get_share_object( data: dict = None, check_perm: bool = False, ): - share = session.query(models.ShareObject).get(uri) + share = session.query(ShareObject).get(uri) if not share: raise exceptions.ObjectNotFound('Share', uri) @@ -720,9 +723,9 @@ def get_share_item( data: dict = None, check_perm: bool = False, ): - share_item: models.ShareObjectItem = data.get( + share_item: ShareObjectItem = data.get( 'share_item', - ShareObject.get_share_item_by_uri(session, data['shareItemUri']), + ShareObjectService.get_share_item_by_uri(session, data['shareItemUri']), ) if share_item.itemType == ShareableType.Table.value: return session.query(DatasetTable).get(share_item.itemUri) @@ -731,17 +734,17 @@ def get_share_item( @staticmethod def get_share_by_uri(session, uri): - share = session.query(models.ShareObject).get(uri) + share = session.query(ShareObject).get(uri) if not share: raise exceptions.ObjectNotFound('Share', uri) return share @staticmethod def get_share_by_dataset_attributes(session, dataset_uri, dataset_owner): - share: models.ShareObject = ( - session.query(models.ShareObject) - .filter(models.ShareObject.datasetUri == dataset_uri) - .filter(models.ShareObject.owner == dataset_owner) + share: ShareObject = ( + session.query(ShareObject) + .filter(ShareObject.datasetUri == dataset_uri) + .filter(ShareObject.owner == dataset_owner) .first() ) return share @@ -755,11 +758,11 @@ def add_share_object_item( uri: str, data: dict = None, check_perm: bool = False, - ) -> models.ShareObjectItem: + ) -> ShareObjectItem: itemType = data.get('itemType') itemUri = data.get('itemUri') item = None - share: models.ShareObject = session.query(models.ShareObject).get(uri) + share: ShareObject = session.query(ShareObject).get(uri) dataset: Dataset = session.query(Dataset).get(share.datasetUri) target_environment: models.Environment = session.query(models.Environment).get( share.environmentUri @@ -785,12 +788,12 @@ def add_share_object_item( if not item: raise exceptions.ObjectNotFound('ShareObjectItem', itemUri) - shareItem: models.ShareObjectItem = ( - session.query(models.ShareObjectItem) + shareItem: ShareObjectItem = ( + session.query(ShareObjectItem) .filter( and_( - models.ShareObjectItem.shareUri == uri, - models.ShareObjectItem.itemUri == itemUri, + ShareObjectItem.shareUri == uri, + ShareObjectItem.itemUri == itemUri, ) ) .first() @@ -802,7 +805,7 @@ def add_share_object_item( logger.info(f"S3AccessPointName={S3AccessPointName}") if not shareItem: - shareItem = models.ShareObjectItem( + shareItem = ShareObjectItem( shareUri=uri, itemUri=itemUri, itemType=itemType, @@ -834,13 +837,13 @@ def remove_share_object_item( check_perm: bool = False, ) -> bool: - share_item: models.ShareObjectItem = data.get( + share_item: ShareObjectItem = data.get( 'share_item', - ShareObject.get_share_item_by_uri(session, data['shareItemUri']), + ShareObjectService.get_share_item_by_uri(session, data['shareItemUri']), ) - share: models.ShareObject = data.get( + share: ShareObject = data.get( 'share', - ShareObject.get_share_by_uri(session, uri), + ShareObjectService.get_share_by_uri(session, uri), ) Item_SM = ShareItemSM(share_item.status) @@ -852,8 +855,8 @@ def remove_share_object_item( @staticmethod @has_resource_perm(permissions.DELETE_SHARE_OBJECT) def delete_share_object(session, username, groups, uri, data=None, check_perm=None): - share: models.ShareObject = ShareObject.get_share_by_uri(session, uri) - share_items_states = ShareObject.get_share_items_states(session, uri) + share: ShareObject = ShareObjectService.get_share_by_uri(session, uri) + share_items_states = ShareObjectService.get_share_items_states(session, uri) shared_share_items_states = [x for x in ShareItemSM.get_share_item_shared_states() if x in share_items_states] Share_SM = ShareObjectSM(share.status) @@ -876,12 +879,12 @@ def delete_share_object(session, username, groups, uri, data=None, check_perm=No @staticmethod def check_existing_shared_items(session, uri): - share: models.ShareObject = ShareObject.get_share_by_uri(session, uri) + share: ShareObject = ShareObjectService.get_share_by_uri(session, uri) share_item_shared_states = ShareItemSM.get_share_item_shared_states() - shared_items = session.query(models.ShareObjectItem).filter( + shared_items = session.query(ShareObjectItem).filter( and_( - models.ShareObjectItem.shareUri == share.shareUri, - models.ShareObjectItem.status.in_(share_item_shared_states) + ShareObjectItem.shareUri == share.shareUri, + ShareObjectItem.status.in_(share_item_shared_states) ) ).all() if shared_items: @@ -890,13 +893,13 @@ def check_existing_shared_items(session, uri): @staticmethod def check_existing_shared_items_of_type(session, uri, item_type): - share: models.ShareObject = ShareObject.get_share_by_uri(session, uri) + share: ShareObject = ShareObjectService.get_share_by_uri(session, uri) share_item_shared_states = ShareItemSM.get_share_item_shared_states() - shared_items = session.query(models.ShareObjectItem).filter( + shared_items = session.query(ShareObjectItem).filter( and_( - models.ShareObjectItem.shareUri == share.shareUri, - models.ShareObjectItem.itemType == item_type, - models.ShareObjectItem.status.in_(share_item_shared_states) + ShareObjectItem.shareUri == share.shareUri, + ShareObjectItem.itemType == item_type, + ShareObjectItem.status.in_(share_item_shared_states) ) ).all() if shared_items: @@ -905,11 +908,11 @@ def check_existing_shared_items_of_type(session, uri, item_type): @staticmethod def check_pending_share_items(session, uri): - share: models.ShareObject = ShareObject.get_share_by_uri(session, uri) - shared_items = session.query(models.ShareObjectItem).filter( + share: ShareObject = ShareObjectService.get_share_by_uri(session, uri) + shared_items = session.query(ShareObjectItem).filter( and_( - models.ShareObjectItem.shareUri == share.shareUri, - models.ShareObjectItem.status.in_([ShareItemStatus.PendingApproval.value]) + ShareObjectItem.shareUri == share.shareUri, + ShareObjectItem.status.in_([ShareItemStatus.PendingApproval.value]) ) ).all() if shared_items: @@ -918,7 +921,7 @@ def check_pending_share_items(session, uri): @staticmethod def get_share_item_by_uri(session, uri): - share_item: models.ShareObjectItem = session.query(models.ShareObjectItem).get( + share_item: ShareObjectItem = session.query(ShareObjectItem).get( uri ) if not share_item: @@ -929,9 +932,9 @@ def get_share_item_by_uri(session, uri): @staticmethod @has_resource_perm(permissions.LIST_SHARED_ITEMS) def list_shared_items(session, username, groups, uri, data=None, check_perm=None): - share: models.ShareObject = ShareObject.get_share_by_uri(session, uri) - query = session.query(models.ShareObjectItem).filter( - models.ShareObjectItem.shareUri == share.shareUri, + share: ShareObject = ShareObjectService.get_share_by_uri(session, uri) + query = session.query(ShareObjectItem).filter( + ShareObjectItem.shareUri == share.shareUri, ) return paginate( query, page=data.get('page', 1), page_size=data.get('pageSize', 5) @@ -942,8 +945,8 @@ def list_shareable_items( session, username, groups, uri, data=None, check_perm=None ): - share: models.ShareObject = data.get( - 'share', ShareObject.get_share_by_uri(session, uri) + share: ShareObject = data.get( + 'share', ShareObjectService.get_share_by_uri(session, uri) ) share_item_revokable_states = ShareItemSM.get_share_item_revokable_states() datasetUri = share.datasetUri @@ -956,25 +959,25 @@ def list_shareable_items( func.coalesce('DatasetTable').label('itemType'), DatasetTable.GlueTableName.label('itemName'), DatasetTable.description.label('description'), - models.ShareObjectItem.shareItemUri.label('shareItemUri'), - models.ShareObjectItem.status.label('status'), + ShareObjectItem.shareItemUri.label('shareItemUri'), + ShareObjectItem.status.label('status'), case( - [(models.ShareObjectItem.shareItemUri.isnot(None), True)], + [(ShareObjectItem.shareItemUri.isnot(None), True)], else_=False, ).label('isShared'), ) .outerjoin( - models.ShareObjectItem, + ShareObjectItem, and_( - models.ShareObjectItem.shareUri == share.shareUri, - DatasetTable.tableUri == models.ShareObjectItem.itemUri, + ShareObjectItem.shareUri == share.shareUri, + DatasetTable.tableUri == ShareObjectItem.itemUri, ), ) .filter(DatasetTable.datasetUri == datasetUri) ) if data: if data.get("isRevokable"): - tables = tables.filter(models.ShareObjectItem.status.in_(share_item_revokable_states)) + tables = tables.filter(ShareObjectItem.status.in_(share_item_revokable_states)) # All folders from the dataset with a column isShared # marking the folder as part of the shareObject @@ -984,26 +987,26 @@ def list_shareable_items( func.coalesce('DatasetStorageLocation').label('itemType'), DatasetStorageLocation.S3Prefix.label('itemName'), DatasetStorageLocation.description.label('description'), - models.ShareObjectItem.shareItemUri.label('shareItemUri'), - models.ShareObjectItem.status.label('status'), + ShareObjectItem.shareItemUri.label('shareItemUri'), + ShareObjectItem.status.label('status'), case( - [(models.ShareObjectItem.shareItemUri.isnot(None), True)], + [(ShareObjectItem.shareItemUri.isnot(None), True)], else_=False, ).label('isShared'), ) .outerjoin( - models.ShareObjectItem, + ShareObjectItem, and_( - models.ShareObjectItem.shareUri == share.shareUri, + ShareObjectItem.shareUri == share.shareUri, DatasetStorageLocation.locationUri - == models.ShareObjectItem.itemUri, + == ShareObjectItem.itemUri, ), ) .filter(DatasetStorageLocation.datasetUri == datasetUri) ) if data: if data.get("isRevokable"): - locations = locations.filter(models.ShareObjectItem.status.in_(share_item_revokable_states)) + locations = locations.filter(ShareObjectItem.status.in_(share_item_revokable_states)) shareable_objects = tables.union(locations).subquery('shareable_objects') query = session.query(shareable_objects) @@ -1028,10 +1031,10 @@ def list_user_received_share_requests( session, username, groups, uri, data=None, check_perm=None ): query = ( - session.query(models.ShareObject) + session.query(ShareObject) .join( Dataset, - Dataset.datasetUri == models.ShareObject.datasetUri, + Dataset.datasetUri == ShareObject.datasetUri, ) .filter( or_( @@ -1050,17 +1053,17 @@ def list_user_sent_share_requests( session, username, groups, uri, data=None, check_perm=None ): query = ( - session.query(models.ShareObject) + session.query(ShareObject) .join( models.Environment, - models.Environment.environmentUri == models.ShareObject.environmentUri, + models.Environment.environmentUri == ShareObject.environmentUri, ) .filter( or_( - models.ShareObject.owner == username, + ShareObject.owner == username, and_( - models.ShareObject.groupUri.in_(groups), - models.ShareObject.principalType.in_([PrincipalType.Group.value, PrincipalType.ConsumptionRole.value]) + ShareObject.groupUri.in_(groups), + ShareObject.principalType.in_([PrincipalType.Group.value, PrincipalType.ConsumptionRole.value]) ), ) ) @@ -1073,11 +1076,11 @@ def get_share_by_dataset_and_environment(session, dataset_uri, environment_uri): models.EnvironmentGroup.environmentUri == environment_uri ) groups = [g.groupUri for g in environment_groups] - share = session.query(models.ShareObject).filter( + share = session.query(ShareObject).filter( and_( - models.ShareObject.datasetUri == dataset_uri, - models.ShareObject.environmentUri == environment_uri, - models.ShareObject.groupUri.in_(groups), + ShareObject.datasetUri == dataset_uri, + ShareObject.environmentUri == environment_uri, + ShareObject.groupUri.in_(groups), ) ) if not share: @@ -1089,9 +1092,9 @@ def update_share_object_status( session, shareUri: str, status: str, - ) -> models.ShareObject: + ) -> ShareObject: - share = ShareObject.get_share_by_uri(session, shareUri) + share = ShareObjectService.get_share_by_uri(session, shareUri) share.status = status session.commit() return share @@ -1101,9 +1104,9 @@ def update_share_item_status( session, uri: str, status: str, - ) -> models.ShareObjectItem: + ) -> ShareObjectItem: - share_item = ShareObject.get_share_item_by_uri(session, uri) + share_item = ShareObjectService.get_share_item_by_uri(session, uri) share_item.status = status session.commit() return share_item @@ -1115,11 +1118,11 @@ def delete_share_item_status_batch( status: str, ): ( - session.query(models.ShareObjectItem) + session.query(ShareObjectItem) .filter( and_( - models.ShareObjectItem.shareUri == share_uri, - models.ShareObjectItem.status == status + ShareObjectItem.shareUri == share_uri, + ShareObjectItem.status == status ) ) .delete() @@ -1134,16 +1137,16 @@ def update_share_item_status_batch( ) -> bool: ( - session.query(models.ShareObjectItem) + session.query(ShareObjectItem) .filter( and_( - models.ShareObjectItem.shareUri == share_uri, - models.ShareObjectItem.status == old_status + ShareObjectItem.shareUri == share_uri, + ShareObjectItem.status == old_status ) ) .update( { - models.ShareObjectItem.status: new_status, + ShareObjectItem.status: new_status, } ) ) @@ -1152,15 +1155,15 @@ def update_share_item_status_batch( @staticmethod def find_share_item_by_table( session, - share: models.ShareObject, + share: ShareObject, table: DatasetTable, - ) -> models.ShareObjectItem: - share_item: models.ShareObjectItem = ( - session.query(models.ShareObjectItem) + ) -> ShareObjectItem: + share_item: ShareObjectItem = ( + session.query(ShareObjectItem) .filter( and_( - models.ShareObjectItem.itemUri == table.tableUri, - models.ShareObjectItem.shareUri == share.shareUri, + ShareObjectItem.itemUri == table.tableUri, + ShareObjectItem.shareUri == share.shareUri, ) ) .first() @@ -1170,15 +1173,15 @@ def find_share_item_by_table( @staticmethod def find_share_item_by_folder( session, - share: models.ShareObject, + share: ShareObject, folder: DatasetStorageLocation, - ) -> models.ShareObjectItem: - share_item: models.ShareObjectItem = ( - session.query(models.ShareObjectItem) + ) -> ShareObjectItem: + share_item: ShareObjectItem = ( + session.query(ShareObjectItem) .filter( and_( - models.ShareObjectItem.itemUri == folder.locationUri, - models.ShareObjectItem.shareUri == share.shareUri, + ShareObjectItem.itemUri == folder.locationUri, + ShareObjectItem.shareUri == share.shareUri, ) ) .first() @@ -1187,7 +1190,7 @@ def find_share_item_by_folder( @staticmethod def get_share_data(session, share_uri): - share: models.ShareObject = session.query(models.ShareObject).get(share_uri) + share: ShareObject = session.query(ShareObject).get(share_uri) if not share: raise exceptions.ObjectNotFound('Share', share_uri) @@ -1250,27 +1253,27 @@ def get_share_data(session, share_uri): @staticmethod def get_share_data_items(session, share_uri, status): - share: models.ShareObject = session.query(models.ShareObject).get(share_uri) + share: ShareObject = session.query(ShareObject).get(share_uri) if not share: raise exceptions.ObjectNotFound('Share', share_uri) tables = ( session.query(DatasetTable) .join( - models.ShareObjectItem, - models.ShareObjectItem.itemUri == DatasetTable.tableUri, + ShareObjectItem, + ShareObjectItem.itemUri == DatasetTable.tableUri, ) .join( - models.ShareObject, - models.ShareObject.shareUri == models.ShareObjectItem.shareUri, + ShareObject, + ShareObject.shareUri == ShareObjectItem.shareUri, ) .filter( and_( - models.ShareObject.datasetUri == share.datasetUri, - models.ShareObject.environmentUri + ShareObject.datasetUri == share.datasetUri, + ShareObject.environmentUri == share.environmentUri, - models.ShareObject.shareUri == share_uri, - models.ShareObjectItem.status == status, + ShareObject.shareUri == share_uri, + ShareObjectItem.status == status, ) ) .all() @@ -1279,20 +1282,20 @@ def get_share_data_items(session, share_uri, status): folders = ( session.query(DatasetStorageLocation) .join( - models.ShareObjectItem, - models.ShareObjectItem.itemUri == DatasetStorageLocation.locationUri, + ShareObjectItem, + ShareObjectItem.itemUri == DatasetStorageLocation.locationUri, ) .join( - models.ShareObject, - models.ShareObject.shareUri == models.ShareObjectItem.shareUri, + ShareObject, + ShareObject.shareUri == ShareObjectItem.shareUri, ) .filter( and_( - models.ShareObject.datasetUri == share.datasetUri, - models.ShareObject.environmentUri + ShareObject.datasetUri == share.datasetUri, + ShareObject.environmentUri == share.environmentUri, - models.ShareObject.shareUri == share_uri, - models.ShareObjectItem.status == status, + ShareObject.shareUri == share_uri, + ShareObjectItem.status == status, ) ) .all() @@ -1306,13 +1309,12 @@ def get_share_data_items(session, share_uri, status): @staticmethod def other_approved_share_object_exists(session, environment_uri, dataset_uri): return ( - session.query(models.ShareObject) + session.query(ShareObject) .filter( and_( models.Environment.environmentUri == environment_uri, - models.ShareObject.status - == models.Enums.ShareObjectStatus.Approved.value, - models.ShareObject.datasetUri == dataset_uri, + ShareObject.status == ShareObjectStatus.Approved.value, + ShareObject.datasetUri == dataset_uri, ) ) .all() @@ -1321,60 +1323,60 @@ def other_approved_share_object_exists(session, environment_uri, dataset_uri): @staticmethod def get_share_items_states(session, share_uri, item_uris=None): query = ( - session.query(models.ShareObjectItem) + session.query(ShareObjectItem) .join( - models.ShareObject, - models.ShareObjectItem.shareUri == models.ShareObject.shareUri, + ShareObject, + ShareObjectItem.shareUri == ShareObject.shareUri, ) .filter( and_( - models.ShareObject.shareUri == share_uri, + ShareObject.shareUri == share_uri, ) ) ) if item_uris: - query = query.filter(models.ShareObjectItem.shareItemUri.in_(item_uris)) - return [item.status for item in query.distinct(models.ShareObjectItem.status)] + query = query.filter(ShareObjectItem.shareItemUri.in_(item_uris)) + return [item.status for item in query.distinct(ShareObjectItem.status)] @staticmethod def resolve_share_object_statistics(session, uri, **kwargs): share_item_shared_states = ShareItemSM.get_share_item_shared_states() tables = ( - session.query(models.ShareObjectItem) + session.query(ShareObjectItem) .filter( and_( - models.ShareObjectItem.shareUri == uri, - models.ShareObjectItem.itemType == 'DatasetTable', + ShareObjectItem.shareUri == uri, + ShareObjectItem.itemType == 'DatasetTable', ) ) .count() ) locations = ( - session.query(models.ShareObjectItem) + session.query(ShareObjectItem) .filter( and_( - models.ShareObjectItem.shareUri == uri, - models.ShareObjectItem.itemType == 'DatasetStorageLocation', + ShareObjectItem.shareUri == uri, + ShareObjectItem.itemType == 'DatasetStorageLocation', ) ) .count() ) shared_items = ( - session.query(models.ShareObjectItem) + session.query(ShareObjectItem) .filter( and_( - models.ShareObjectItem.shareUri == uri, - models.ShareObjectItem.status.in_(share_item_shared_states), + ShareObjectItem.shareUri == uri, + ShareObjectItem.status.in_(share_item_shared_states), ) ) .count() ) revoked_items = ( - session.query(models.ShareObjectItem) + session.query(ShareObjectItem) .filter( and_( - models.ShareObjectItem.shareUri == uri, - models.ShareObjectItem.status.in_([ShareItemStatus.Revoke_Succeeded.value]), + ShareObjectItem.shareUri == uri, + ShareObjectItem.status.in_([ShareItemStatus.Revoke_Succeeded.value]), ) ) .count() @@ -1384,11 +1386,11 @@ def resolve_share_object_statistics(session, uri, **kwargs): ShareItemStatus.Revoke_Failed.value ] failed_items = ( - session.query(models.ShareObjectItem) + session.query(ShareObjectItem) .filter( and_( - models.ShareObjectItem.shareUri == uri, - models.ShareObjectItem.status.in_(failed_states), + ShareObjectItem.shareUri == uri, + ShareObjectItem.status.in_(failed_states), ) ) .count() @@ -1397,11 +1399,11 @@ def resolve_share_object_statistics(session, uri, **kwargs): ShareItemStatus.PendingApproval.value ] pending_items = ( - session.query(models.ShareObjectItem) + session.query(ShareObjectItem) .filter( and_( - models.ShareObjectItem.shareUri == uri, - models.ShareObjectItem.status.in_(pending_states), + ShareObjectItem.shareUri == uri, + ShareObjectItem.status.in_(pending_states), ) ) .count() diff --git a/backend/dataall/modules/datasets/api/dataset/resolvers.py b/backend/dataall/modules/datasets/api/dataset/resolvers.py index f2444d40d..ab70bc627 100644 --- a/backend/dataall/modules/datasets/api/dataset/resolvers.py +++ b/backend/dataall/modules/datasets/api/dataset/resolvers.py @@ -13,8 +13,9 @@ from dataall.aws.handlers.service_handlers import Worker from dataall.aws.handlers.sts import SessionHelper from dataall.db import paginate, exceptions, models -from dataall.db.api import Environment, ShareObject, ResourcePolicy +from dataall.db.api import Environment, ResourcePolicy from dataall.db.api.organization import Organization +from dataall.modules.dataset_sharing.db.models import ShareObject from dataall.modules.datasets import Dataset from dataall.modules.datasets.aws.glue_dataset_client import DatasetCrawler from dataall.modules.datasets.services.dataset_location_service import DatasetLocationService @@ -124,8 +125,8 @@ def resolve_user_role(context: Context, source: Dataset, **kwargs): else: with context.engine.scoped_session() as session: share = ( - session.query(models.ShareObject) - .filter(models.ShareObject.datasetUri == source.datasetUri) + session.query(ShareObject) + .filter(ShareObject.datasetUri == source.datasetUri) .first() ) if share and ( diff --git a/backend/dataall/modules/datasets/handlers/glue_dataset_handler.py b/backend/dataall/modules/datasets/handlers/glue_dataset_handler.py index 7a43b6ac6..27e5c1ec4 100644 --- a/backend/dataall/modules/datasets/handlers/glue_dataset_handler.py +++ b/backend/dataall/modules/datasets/handlers/glue_dataset_handler.py @@ -1,9 +1,6 @@ import logging -from botocore.exceptions import ClientError - from dataall.aws.handlers.service_handlers import Worker -from dataall.aws.handlers.sts import SessionHelper from dataall.db import models from dataall.modules.datasets.aws.glue_dataset_client import DatasetCrawler from dataall.modules.datasets.db.models import Dataset diff --git a/backend/dataall/modules/datasets/services/dataset_alarm_service.py b/backend/dataall/modules/datasets/services/dataset_alarm_service.py index 9283f2265..2748877fb 100644 --- a/backend/dataall/modules/datasets/services/dataset_alarm_service.py +++ b/backend/dataall/modules/datasets/services/dataset_alarm_service.py @@ -2,6 +2,7 @@ from datetime import datetime from dataall.db import models +from dataall.modules.dataset_sharing.db.models import ShareObject from dataall.modules.datasets.db.models import DatasetTable, Dataset from dataall.utils.alarm_service import AlarmService @@ -14,7 +15,7 @@ class DatasetAlarmService(AlarmService): def trigger_table_sharing_failure_alarm( self, table: DatasetTable, - share: models.ShareObject, + share: ShareObject, target_environment: models.Environment, ): log.info('Triggering share failure alarm...') @@ -46,7 +47,7 @@ def trigger_table_sharing_failure_alarm( def trigger_revoke_table_sharing_failure_alarm( self, table: DatasetTable, - share: models.ShareObject, + share: ShareObject, target_environment: models.Environment, ): log.info('Triggering share failure alarm...') diff --git a/backend/dataall/modules/datasets/services/dataset_location_service.py b/backend/dataall/modules/datasets/services/dataset_location_service.py index 240c52f0e..2493ddede 100644 --- a/backend/dataall/modules/datasets/services/dataset_location_service.py +++ b/backend/dataall/modules/datasets/services/dataset_location_service.py @@ -5,7 +5,9 @@ from dataall.core.context import get_context from dataall.core.permission_checker import has_tenant_permission, has_resource_permission from dataall.db.api import Glossary -from dataall.db import models, api, paginate, exceptions +from dataall.db import paginate, exceptions +from dataall.modules.dataset_sharing.db.models import ShareObjectItem +from dataall.modules.dataset_sharing.services.share_object import ShareItemSM from dataall.modules.datasets.db.dataset_repository import DatasetRepository from dataall.modules.datasets.db.models import DatasetStorageLocation from dataall.modules.datasets.services.permissions import MANAGE_DATASETS, LIST_DATASET_FOLDERS, CREATE_DATASET_FOLDER, \ @@ -136,13 +138,13 @@ def delete_dataset_location( location = DatasetLocationService.get_location_by_uri( session, data['locationUri'] ) - share_item_shared_states = api.ShareItemSM.get_share_item_shared_states() + share_item_shared_states = ShareItemSM.get_share_item_shared_states() share_item = ( - session.query(models.ShareObjectItem) + session.query(ShareObjectItem) .filter( and_( - models.ShareObjectItem.itemUri == location.locationUri, - models.ShareObjectItem.status.in_(share_item_shared_states) + ShareObjectItem.itemUri == location.locationUri, + ShareObjectItem.status.in_(share_item_shared_states) ) ) .first() @@ -152,8 +154,8 @@ def delete_dataset_location( action=DELETE_DATASET_FOLDER, message='Revoke all folder shares before deletion', ) - session.query(models.ShareObjectItem).filter( - models.ShareObjectItem.itemUri == location.locationUri, + session.query(ShareObjectItem).filter( + ShareObjectItem.itemUri == location.locationUri, ).delete() session.delete(location) diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py index 388a14746..faf973106 100644 --- a/backend/dataall/modules/datasets/services/dataset_service.py +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -16,8 +16,10 @@ has_resource_perm, ) from dataall.db.api import Organization -from dataall.db import models, api, exceptions, paginate, permissions +from dataall.db import models, exceptions, paginate, permissions from dataall.db.models.Enums import Language, ConfidentialityClassification +from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject +from dataall.modules.dataset_sharing.services.share_object import ShareItemSM from dataall.modules.datasets.db.dataset_repository import DatasetRepository from dataall.modules.datasets.db.models import DatasetTable, Dataset from dataall.modules.datasets.services.dataset_location_service import DatasetLocationService @@ -213,16 +215,16 @@ def get_dataset_by_uri(session, dataset_uri) -> Dataset: @staticmethod def query_user_datasets(session, username, groups, filter) -> Query: - share_item_shared_states = api.ShareItemSM.get_share_item_shared_states() + share_item_shared_states = ShareItemSM.get_share_item_shared_states() query = ( session.query(Dataset) .outerjoin( - models.ShareObject, - models.ShareObject.datasetUri == Dataset.datasetUri, + ShareObject, + ShareObject.datasetUri == Dataset.datasetUri, ) .outerjoin( - models.ShareObjectItem, - models.ShareObjectItem.shareUri == models.ShareObject.shareUri + ShareObjectItem, + ShareObjectItem.shareUri == ShareObject.shareUri ) .filter( or_( @@ -230,12 +232,12 @@ def query_user_datasets(session, username, groups, filter) -> Query: Dataset.SamlAdminGroupName.in_(groups), Dataset.stewards.in_(groups), and_( - models.ShareObject.principalId.in_(groups), - models.ShareObjectItem.status.in_(share_item_shared_states), + ShareObject.principalId.in_(groups), + ShareObjectItem.status.in_(share_item_shared_states), ), and_( - models.ShareObject.owner == username, - models.ShareObjectItem.status.in_(share_item_shared_states), + ShareObject.owner == username, + ShareObjectItem.status.in_(share_item_shared_states), ), ) ) @@ -331,8 +333,8 @@ def update_dataset(session, uri, data=None) -> Dataset: @staticmethod def transfer_stewardship_to_owners(session, dataset): dataset_shares = ( - session.query(models.ShareObject) - .filter(models.ShareObject.datasetUri == dataset.datasetUri) + session.query(ShareObject) + .filter(ShareObject.datasetUri == dataset.datasetUri) .all() ) if dataset_shares: @@ -342,7 +344,7 @@ def transfer_stewardship_to_owners(session, dataset): group=dataset.SamlAdminGroupName, permissions=permissions.SHARE_OBJECT_APPROVER, resource_uri=share.shareUri, - resource_type=models.ShareObject.__name__, + resource_type=ShareObject.__name__, ) return dataset @@ -380,8 +382,8 @@ def transfer_stewardship_to_new_stewards(session, dataset, new_stewards): ) dataset_shares = ( - session.query(models.ShareObject) - .filter(models.ShareObject.datasetUri == dataset.datasetUri) + session.query(ShareObject) + .filter(ShareObject.datasetUri == dataset.datasetUri) .all() ) if dataset_shares: @@ -391,7 +393,7 @@ def transfer_stewardship_to_new_stewards(session, dataset, new_stewards): group=new_stewards, permissions=permissions.SHARE_OBJECT_APPROVER, resource_uri=share.shareUri, - resource_type=models.ShareObject.__name__, + resource_type=ShareObject.__name__, ) ResourcePolicy.delete_resource_policy( session=session, @@ -459,35 +461,35 @@ def get_dataset_tables(session, dataset_uri): @staticmethod def query_dataset_shares(session, dataset_uri) -> Query: - return session.query(models.ShareObject).filter( + return session.query(ShareObject).filter( and_( - models.ShareObject.datasetUri == dataset_uri, - models.ShareObject.deleted.is_(None), + ShareObject.datasetUri == dataset_uri, + ShareObject.deleted.is_(None), ) ) @staticmethod def paginated_dataset_shares( session, username, groups, uri, data=None, check_perm=None - ) -> [models.ShareObject]: + ) -> [ShareObject]: query = DatasetService.query_dataset_shares(session, uri) return paginate( query=query, page=data.get('page', 1), page_size=data.get('pageSize', 5) ).to_dict() @staticmethod - def list_dataset_shares(session, dataset_uri) -> [models.ShareObject]: + def list_dataset_shares(session, dataset_uri) -> [ShareObject]: """return the dataset shares""" query = DatasetService.query_dataset_shares(session, dataset_uri) return query.all() @staticmethod - def list_dataset_shares_with_existing_shared_items(session, dataset_uri) -> [models.ShareObject]: - query = session.query(models.ShareObject).filter( + def list_dataset_shares_with_existing_shared_items(session, dataset_uri) -> [ShareObject]: + query = session.query(ShareObject).filter( and_( - models.ShareObject.datasetUri == dataset_uri, - models.ShareObject.deleted.is_(None), - models.ShareObject.existingSharedItems.is_(True), + ShareObject.datasetUri == dataset_uri, + ShareObject.deleted.is_(None), + ShareObject.existingSharedItems.is_(True), ) ) return query.all() @@ -532,19 +534,19 @@ def delete_dataset( @staticmethod def _delete_dataset_shares_with_no_shared_items(session, dataset_uri): share_objects = ( - session.query(models.ShareObject) + session.query(ShareObject) .filter( and_( - models.ShareObject.datasetUri == dataset_uri, - models.ShareObject.existingSharedItems.is_(False), + ShareObject.datasetUri == dataset_uri, + ShareObject.existingSharedItems.is_(False), ) ) .all() ) for share in share_objects: ( - session.query(models.ShareObjectItem) - .filter(models.ShareObjectItem.shareUri == share.shareUri) + session.query(ShareObjectItem) + .filter(ShareObjectItem.shareUri == share.shareUri) .delete() ) session.delete(share) diff --git a/backend/dataall/modules/datasets/services/dataset_table_service.py b/backend/dataall/modules/datasets/services/dataset_table_service.py index ddfde9c5b..18d0533bd 100644 --- a/backend/dataall/modules/datasets/services/dataset_table_service.py +++ b/backend/dataall/modules/datasets/services/dataset_table_service.py @@ -4,8 +4,10 @@ from dataall.core.context import get_context from dataall.core.permission_checker import has_tenant_permission, has_resource_permission -from dataall.db import models, api, exceptions, paginate +from dataall.db import exceptions, paginate from dataall.db.api import Glossary, ResourcePolicy, Environment +from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject +from dataall.modules.dataset_sharing.services.share_object import ShareItemSM from dataall.modules.datasets.services.permissions import MANAGE_DATASETS, CREATE_DATASET_TABLE, DELETE_DATASET_TABLE, \ UPDATE_DATASET_TABLE, DATASET_TABLE_READ from dataall.modules.datasets.services.dataset_service import DatasetService @@ -137,13 +139,13 @@ def delete_dataset_table( data: dict = None, ): table = DatasetTableService.get_dataset_table_by_uri(session, data['tableUri']) - share_item_shared_states = api.ShareItemSM.get_share_item_shared_states() + share_item_shared_states = ShareItemSM.get_share_item_shared_states() share_item = ( - session.query(models.ShareObjectItem) + session.query(ShareObjectItem) .filter( and_( - models.ShareObjectItem.itemUri == table.tableUri, - models.ShareObjectItem.status.in_(share_item_shared_states) + ShareObjectItem.itemUri == table.tableUri, + ShareObjectItem.status.in_(share_item_shared_states) ) ) .first() @@ -153,8 +155,8 @@ def delete_dataset_table( action=DELETE_DATASET_TABLE, message='Revoke all table shares before deletion', ) - session.query(models.ShareObjectItem).filter( - models.ShareObjectItem.itemUri == table.tableUri, + session.query(ShareObjectItem).filter( + ShareObjectItem.itemUri == table.tableUri, ).delete() session.delete(table) Glossary.delete_glossary_terms_links( @@ -170,23 +172,22 @@ def query_dataset_tables_shared_with_env( This means looking at approved ShareObject items for the share object associating the dataset and environment """ - share_item_shared_states = api.ShareItemSM.get_share_item_shared_states() + share_item_shared_states = ShareItemSM.get_share_item_shared_states() env_tables_shared = ( session.query(DatasetTable) # all tables .join( - models.ShareObjectItem, # found in ShareObjectItem - models.ShareObjectItem.itemUri == DatasetTable.tableUri, + ShareObjectItem, # found in ShareObjectItem + ShareObjectItem.itemUri == DatasetTable.tableUri, ) .join( - models.ShareObject, # jump to share object - models.ShareObject.shareUri == models.ShareObjectItem.shareUri, + ShareObject, # jump to share object + ShareObject.shareUri == ShareObjectItem.shareUri, ) .filter( and_( - models.ShareObject.datasetUri == dataset_uri, # for this dataset - models.ShareObject.environmentUri - == environment_uri, # for this environment - models.ShareObjectItem.status.in_(share_item_shared_states), + ShareObject.datasetUri == dataset_uri, # for this dataset + ShareObject.environmentUri == environment_uri, # for this environment + ShareObjectItem.status.in_(share_item_shared_states), ) ) .all() diff --git a/backend/dataall/modules/datasets/tasks/bucket_policy_updater.py b/backend/dataall/modules/datasets/tasks/bucket_policy_updater.py index e04855d65..2656dec3f 100644 --- a/backend/dataall/modules/datasets/tasks/bucket_policy_updater.py +++ b/backend/dataall/modules/datasets/tasks/bucket_policy_updater.py @@ -10,6 +10,8 @@ from dataall.aws.handlers.sts import SessionHelper from dataall.db import get_engine from dataall.db import models +from dataall.modules.dataset_sharing.db.Enums import ShareObjectStatus +from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable, Dataset root = logging.getLogger() @@ -164,7 +166,7 @@ def group_prefixes_by_accountid(cls, accountid, prefix, account_prefixes): account_prefixes[accountid] = [prefix] return account_prefixes - def get_shared_tables(self, dataset) -> typing.List[models.ShareObjectItem]: + def get_shared_tables(self, dataset) -> typing.List[ShareObjectItem]: with self.engine.scoped_session() as session: tables = ( session.query( @@ -177,26 +179,25 @@ def get_shared_tables(self, dataset) -> typing.List[models.ShareObjectItem]: models.Environment.region.label('TargetRegion'), ) .join( - models.ShareObjectItem, + ShareObjectItem, and_( - models.ShareObjectItem.itemUri == DatasetTable.tableUri + ShareObjectItem.itemUri == DatasetTable.tableUri ), ) .join( - models.ShareObject, - models.ShareObject.shareUri == models.ShareObjectItem.shareUri, + ShareObject, + ShareObject.shareUri == ShareObjectItem.shareUri, ) .join( models.Environment, models.Environment.environmentUri - == models.ShareObject.environmentUri, + == ShareObject.environmentUri, ) .filter( and_( DatasetTable.datasetUri == dataset.datasetUri, DatasetTable.deleted.is_(None), - models.ShareObjectItem.status - == models.Enums.ShareObjectStatus.Approved.value, + ShareObjectItem.status == ShareObjectStatus.Approved.value, ) ) ).all() @@ -213,27 +214,26 @@ def get_shared_folders(self, dataset) -> typing.List[DatasetStorageLocation]: models.Environment.region.label('region'), ) .join( - models.ShareObjectItem, + ShareObjectItem, and_( - models.ShareObjectItem.itemUri + ShareObjectItem.itemUri == DatasetStorageLocation.locationUri ), ) .join( - models.ShareObject, - models.ShareObject.shareUri == models.ShareObjectItem.shareUri, + ShareObject, + ShareObject.shareUri == ShareObjectItem.shareUri, ) .join( models.Environment, models.Environment.environmentUri - == models.ShareObject.environmentUri, + == ShareObject.environmentUri, ) .filter( and_( DatasetStorageLocation.datasetUri == dataset.datasetUri, DatasetStorageLocation.deleted.is_(None), - models.ShareObjectItem.status - == models.Enums.ShareObjectStatus.Approved.value, + ShareObjectItem.status == ShareObjectStatus.Approved.value, ) ) ).all() diff --git a/backend/dataall/modules/datasets/tasks/subscription_service.py b/backend/dataall/modules/datasets/tasks/subscription_service.py index 4bbe7c34b..bb639130e 100644 --- a/backend/dataall/modules/datasets/tasks/subscription_service.py +++ b/backend/dataall/modules/datasets/tasks/subscription_service.py @@ -12,8 +12,9 @@ from dataall.aws.handlers.sqs import SqsQueue from dataall.db import get_engine from dataall.db import models +from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject from dataall.modules.datasets.services.dataset_profiling_service import DatasetProfilingService -`from dataall.modules.dataset_sharing.services.share_notification_service import ShareNotificationService +from dataall.modules.dataset_sharing.services.share_notification_service import ShareNotificationService from dataall.tasks.subscriptions import poll_queues from dataall.utils import json_utils from dataall.modules.datasets.services.dataset_table_service import DatasetTableService @@ -88,9 +89,9 @@ def publish_table_update_message(engine, message): log.info( f'Found dataset {dataset.datasetUri}|{dataset.environmentUri}|{dataset.AwsAccountId}' ) - share_items: [models.ShareObjectItem] = ( - session.query(models.ShareObjectItem) - .filter(models.ShareObjectItem.itemUri == table.tableUri) + share_items: [ShareObjectItem] = ( + session.query(ShareObjectItem) + .filter(ShareObjectItem.itemUri == table.tableUri) .all() ) log.info(f'Found shared items for table {share_items}') @@ -126,9 +127,9 @@ def publish_location_update_message(session, message): log.info( f'Found dataset {dataset.datasetUri}|{dataset.environmentUri}|{dataset.AwsAccountId}' ) - share_items: [models.ShareObjectItem] = ( - session.query(models.ShareObjectItem) - .filter(models.ShareObjectItem.itemUri == location.locationUri) + share_items: [ShareObjectItem] = ( + session.query(ShareObjectItem) + .filter(ShareObjectItem.itemUri == location.locationUri) .all() ) log.info(f'Found shared items for location {share_items}') @@ -316,12 +317,12 @@ def redshift_copy( @staticmethod def get_approved_share_object(session, item): - share_object: models.ShareObject = ( - session.query(models.ShareObject) + share_object: ShareObject = ( + session.query(ShareObject) .filter( and_( - models.ShareObject.shareUri == item.shareUri, - models.ShareObject.status == 'Approved', + ShareObject.shareUri == item.shareUri, + ShareObject.status == 'Approved', ) ) .first() diff --git a/backend/dataall/tasks/data_sharing/data_sharing_service.py b/backend/dataall/tasks/data_sharing/data_sharing_service.py index 40b79f1c1..4ec4687b6 100644 --- a/backend/dataall/tasks/data_sharing/data_sharing_service.py +++ b/backend/dataall/tasks/data_sharing/data_sharing_service.py @@ -5,10 +5,13 @@ from .share_processors.lf_process_same_account_share import ProcessLFSameAccountShare from .share_processors.s3_process_share import ProcessS3Share -from ...aws.handlers.ram import Ram -from ...aws.handlers.sts import SessionHelper -from ...db import api, models, Engine -from ...utils import Parameter +from dataall.aws.handlers.ram import Ram +from dataall.aws.handlers.sts import SessionHelper +from dataall.db import models, Engine +from dataall.modules.dataset_sharing.db.Enums import ShareObjectActions, ShareItemStatus, ShareObjectStatus +from dataall.modules.dataset_sharing.db.models import ShareObject +from dataall.modules.dataset_sharing.services.share_object import ShareObjectSM, ShareObjectService, ShareItemSM +from dataall.utils import Parameter log = logging.getLogger(__name__) @@ -44,16 +47,16 @@ def approve_share(cls, engine: Engine, share_uri: str) -> bool: share, source_environment, target_environment, - ) = api.ShareObject.get_share_data(session, share_uri) + ) = ShareObjectService.get_share_data(session, share_uri) - Share_SM = api.ShareObjectSM(share.status) - new_share_state = Share_SM.run_transition(models.Enums.ShareObjectActions.Start.value) + Share_SM = ShareObjectSM(share.status) + new_share_state = Share_SM.run_transition(ShareObjectActions.Start.value) Share_SM.update_state(session, share, new_share_state) ( shared_tables, shared_folders - ) = api.ShareObject.get_share_data_items(session, share_uri, models.ShareItemStatus.Share_Approved.value) + ) = ShareObjectService.get_share_data_items(session, share_uri, ShareItemStatus.Share_Approved.value) log.info(f'Granting permissions to folders: {shared_folders}') @@ -96,7 +99,7 @@ def approve_share(cls, engine: Engine, share_uri: str) -> bool: approved_tables_succeed = processor.process_approved_shares() log.info(f'sharing tables succeeded = {approved_tables_succeed}') - new_share_state = Share_SM.run_transition(models.Enums.ShareObjectActions.Finish.value) + new_share_state = Share_SM.run_transition(ShareObjectActions.Finish.value) Share_SM.update_state(session, share, new_share_state) return approved_tables_succeed if approved_folders_succeed else False @@ -131,20 +134,20 @@ def revoke_share(cls, engine: Engine, share_uri: str): share, source_environment, target_environment, - ) = api.ShareObject.get_share_data(session, share_uri) + ) = ShareObjectService.get_share_data(session, share_uri) - Share_SM = api.ShareObjectSM(share.status) - new_share_state = Share_SM.run_transition(models.Enums.ShareObjectActions.Start.value) + Share_SM = ShareObjectSM(share.status) + new_share_state = Share_SM.run_transition(ShareObjectActions.Start.value) Share_SM.update_state(session, share, new_share_state) - revoked_item_SM = api.ShareItemSM(models.ShareItemStatus.Revoke_Approved.value) + revoked_item_SM = ShareItemSM(ShareItemStatus.Revoke_Approved.value) ( revoked_tables, revoked_folders - ) = api.ShareObject.get_share_data_items(session, share_uri, models.ShareItemStatus.Revoke_Approved.value) + ) = ShareObjectService.get_share_data_items(session, share_uri, ShareItemStatus.Revoke_Approved.value) - new_state = revoked_item_SM.run_transition(models.ShareObjectActions.Start.value) + new_state = revoked_item_SM.run_transition(ShareObjectActions.Start.value) revoked_item_SM.update_state(session, share_uri, new_state) log.info(f'Revoking permissions to folders: {revoked_folders}') @@ -160,7 +163,7 @@ def revoke_share(cls, engine: Engine, share_uri: str): env_group, ) log.info(f'revoking folders succeeded = {revoked_folders_succeed}') - existing_shared_items = api.ShareObject.check_existing_shared_items_of_type( + existing_shared_items = ShareObjectService.check_existing_shared_items_of_type( session, share_uri, models.ShareableType.StorageLocation.value @@ -201,7 +204,7 @@ def revoke_share(cls, engine: Engine, share_uri: str): revoked_tables_succeed = processor.process_revoked_shares() log.info(f'revoking tables succeeded = {revoked_tables_succeed}') - existing_shared_items = api.ShareObject.check_existing_shared_items_of_type( + existing_shared_items = ShareObjectService.check_existing_shared_items_of_type( session, share_uri, models.ShareableType.Table.value @@ -212,11 +215,11 @@ def revoke_share(cls, engine: Engine, share_uri: str): clean_up_tables = processor.clean_up_share() log.info(f"Clean up LF successful = {clean_up_tables}") - existing_pending_items = api.ShareObject.check_pending_share_items(session, share_uri) + existing_pending_items = ShareObjectService.check_pending_share_items(session, share_uri) if existing_pending_items: - new_share_state = Share_SM.run_transition(models.Enums.ShareObjectActions.FinishPending.value) + new_share_state = Share_SM.run_transition(ShareObjectActions.FinishPending.value) else: - new_share_state = Share_SM.run_transition(models.Enums.ShareObjectActions.Finish.value) + new_share_state = Share_SM.run_transition(ShareObjectActions.Finish.value) Share_SM.update_state(session, share, new_share_state) return revoked_tables_succeed and revoked_folders_succeed @@ -254,12 +257,12 @@ def refresh_shares(cls, engine: Engine) -> bool: ------- true if refresh succeeds """ - share_object_refreshable_states = api.ShareObjectSM.get_share_object_refreshable_states() + share_object_refreshable_states = ShareObjectSM.get_share_object_refreshable_states() with engine.scoped_session() as session: environments = session.query(models.Environment).all() shares = ( - session.query(models.ShareObject) - .filter(models.ShareObject.status.in_(share_object_refreshable_states)) + session.query(ShareObject) + .filter(ShareObject.status.in_(share_object_refreshable_states)) .all() ) @@ -286,7 +289,7 @@ def refresh_shares(cls, engine: Engine) -> bool: log.info( f'Refreshing share {share.shareUri} with {share.status} status...' ) - if share.status in [models.ShareObjectStatus.Approved.value]: + if share.status in [ShareObjectStatus.Approved.value]: cls.approve_share(engine, share.shareUri) else: cls.revoke_share(engine, share.shareUri) diff --git a/backend/dataall/tasks/data_sharing/share_managers/lf_share_manager.py b/backend/dataall/tasks/data_sharing/share_managers/lf_share_manager.py index 192b13913..16f3cb689 100644 --- a/backend/dataall/tasks/data_sharing/share_managers/lf_share_manager.py +++ b/backend/dataall/tasks/data_sharing/share_managers/lf_share_manager.py @@ -12,8 +12,8 @@ from ....aws.handlers.ram import Ram from ....db import exceptions, models from dataall.modules.datasets.db.models import DatasetTable, Dataset -from dataall.utils.alarm_service import AlarmService from dataall.modules.datasets.services.dataset_alarm_service import DatasetAlarmService +from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject logger = logging.getLogger(__name__) @@ -23,7 +23,7 @@ def __init__( self, session, dataset: Dataset, - share: models.ShareObject, + share: ShareObject, shared_tables: [DatasetTable], revoked_tables: [DatasetTable], source_environment: models.Environment, @@ -77,7 +77,7 @@ def build_shared_db_name(self) -> str: Parameters ---------- dataset : Dataset - share : models.ShareObject + share : ShareObject Returns ------- @@ -113,7 +113,7 @@ def build_share_data(self, table: DatasetTable) -> dict: return data def check_share_item_exists_on_glue_catalog( - self, share_item: models.ShareObjectItem, table: DatasetTable + self, share_item: ShareObjectItem, table: DatasetTable ) -> None: """ Checks if a table in the share request @@ -506,7 +506,7 @@ def delete_ram_resource_shares(self, resource_arn: str) -> [dict]: def handle_share_failure( self, table: DatasetTable, - share_item: models.ShareObjectItem, + share_item: ShareObjectItem, error: Exception, ) -> bool: """ @@ -536,7 +536,7 @@ def handle_share_failure( def handle_revoke_failure( self, table: DatasetTable, - share_item: models.ShareObjectItem, + share_item: ShareObjectItem, error: Exception, ) -> bool: """ diff --git a/backend/dataall/tasks/data_sharing/share_managers/s3_share_manager.py b/backend/dataall/tasks/data_sharing/share_managers/s3_share_manager.py index 52f6581ea..211a87968 100644 --- a/backend/dataall/tasks/data_sharing/share_managers/s3_share_manager.py +++ b/backend/dataall/tasks/data_sharing/share_managers/s3_share_manager.py @@ -8,8 +8,9 @@ from ....aws.handlers.s3 import S3 from ....aws.handlers.kms import KMS from ....aws.handlers.iam import IAM +from ....modules.dataset_sharing.db.models import ShareObject +from ....modules.dataset_sharing.services.share_object import ShareObjectService -from ....utils.alarm_service import AlarmService from dataall.modules.datasets.db.models import DatasetStorageLocation, Dataset logger = logging.getLogger(__name__) @@ -22,7 +23,7 @@ def __init__( self, session, dataset: Dataset, - share: models.ShareObject, + share: ShareObject, target_folder: DatasetStorageLocation, source_environment: models.Environment, target_environment: models.Environment, @@ -37,7 +38,7 @@ def __init__( self.target_folder = target_folder self.source_environment = source_environment self.target_environment = target_environment - self.share_item = api.ShareObject.find_share_item_by_folder( + self.share_item = ShareObjectService.find_share_item_by_folder( session, share, target_folder, @@ -56,15 +57,15 @@ def __init__( @abc.abstractmethod def process_approved_shares(self, *kwargs) -> bool: - return NotImplementedError + raise NotImplementedError @abc.abstractmethod def process_revoked_shares(self, *kwargs) -> bool: - return NotImplementedError + raise NotImplementedError @abc.abstractmethod def clean_up_share(self, *kwargs): - return NotImplementedError + raise NotImplementedError @staticmethod def build_access_point_name(share): @@ -324,7 +325,7 @@ def delete_access_point_policy(self): @staticmethod def delete_access_point( - share: models.ShareObject, + share: ShareObject, dataset: Dataset, ): access_point_name = S3ShareManager.build_access_point_name(share) @@ -341,7 +342,7 @@ def delete_access_point( @staticmethod def delete_target_role_access_policy( - share: models.ShareObject, + share: ShareObject, dataset: Dataset, target_environment: models.Environment, ): @@ -376,7 +377,7 @@ def delete_target_role_access_policy( @staticmethod def delete_dataset_bucket_key_policy( - share: models.ShareObject, + share: ShareObject, dataset: Dataset, target_environment: models.Environment, ): diff --git a/backend/dataall/tasks/data_sharing/share_processors/lf_process_cross_account_share.py b/backend/dataall/tasks/data_sharing/share_processors/lf_process_cross_account_share.py index 94eb786ec..eccd515b0 100644 --- a/backend/dataall/tasks/data_sharing/share_processors/lf_process_cross_account_share.py +++ b/backend/dataall/tasks/data_sharing/share_processors/lf_process_cross_account_share.py @@ -1,10 +1,12 @@ import logging - +from dataall.modules.dataset_sharing.db.Enums import ShareItemStatus, ShareObjectActions, ShareItemActions from ..share_managers import LFShareManager -from ....aws.handlers.ram import Ram -from ....db import models, api +from dataall.aws.handlers.ram import Ram +from dataall.db import models, api from dataall.modules.datasets.db.models import DatasetTable, Dataset +from dataall.modules.dataset_sharing.db.models import ShareObject +from dataall.modules.dataset_sharing.services.share_object import ShareObjectService, ShareItemSM log = logging.getLogger(__name__) @@ -14,7 +16,7 @@ def __init__( self, session, dataset: Dataset, - share: models.ShareObject, + share: ShareObject, shared_tables: [DatasetTable], revoked_tables: [DatasetTable], source_environment: models.Environment, @@ -72,7 +74,7 @@ def process_approved_shares(self) -> bool: for table in self.shared_tables: log.info(f"Sharing table {table.GlueTableName}...") - share_item = api.ShareObject.find_share_item_by_table( + share_item = ShareObjectService.find_share_item_by_table( self.session, self.share, table ) @@ -83,8 +85,8 @@ def process_approved_shares(self) -> bool: ) continue - shared_item_SM = api.ShareItemSM(models.ShareItemStatus.Share_Approved.value) - new_state = shared_item_SM.run_transition(models.Enums.ShareObjectActions.Start.value) + shared_item_SM = ShareItemSM(ShareItemStatus.Share_Approved.value) + new_state = shared_item_SM.run_transition(ShareObjectActions.Start.value) shared_item_SM.update_state_single_item(self.session, share_item, new_state) try: @@ -105,12 +107,12 @@ def process_approved_shares(self) -> bool: self.create_resource_link(**data) - new_state = shared_item_SM.run_transition(models.Enums.ShareItemActions.Success.value) + new_state = shared_item_SM.run_transition(ShareItemActions.Success.value) shared_item_SM.update_state_single_item(self.session, share_item, new_state) except Exception as e: self.handle_share_failure(table=table, share_item=share_item, error=e) - new_state = shared_item_SM.run_transition(models.Enums.ShareItemActions.Failure.value) + new_state = shared_item_SM.run_transition(ShareItemActions.Failure.value) shared_item_SM.update_state_single_item(self.session, share_item, new_state) success = False @@ -138,12 +140,12 @@ def process_revoked_shares(self) -> bool: shared_db_name = self.build_shared_db_name() principals = self.get_share_principals() for table in self.revoked_tables: - share_item = api.ShareObject.find_share_item_by_table( + share_item = ShareObjectService.find_share_item_by_table( self.session, self.share, table ) - revoked_item_SM = api.ShareItemSM(models.ShareItemStatus.Revoke_Approved.value) - new_state = revoked_item_SM.run_transition(models.Enums.ShareObjectActions.Start.value) + revoked_item_SM = ShareItemSM(ShareItemStatus.Revoke_Approved.value) + new_state = revoked_item_SM.run_transition(ShareObjectActions.Start.value) revoked_item_SM.update_state_single_item(self.session, share_item, new_state) try: @@ -159,12 +161,12 @@ def process_revoked_shares(self) -> bool: self.delete_resource_link_table(table) - new_state = revoked_item_SM.run_transition(models.Enums.ShareItemActions.Success.value) + new_state = revoked_item_SM.run_transition(ShareItemActions.Success.value) revoked_item_SM.update_state_single_item(self.session, share_item, new_state) except Exception as e: self.handle_revoke_failure(share_item=share_item, table=table, error=e) - new_state = revoked_item_SM.run_transition(models.Enums.ShareItemActions.Failure.value) + new_state = revoked_item_SM.run_transition(ShareItemActions.Failure.value) revoked_item_SM.update_state_single_item(self.session, share_item, new_state) success = False @@ -182,7 +184,7 @@ def clean_up_share(self) -> bool: self.delete_shared_database() - if not api.ShareObject.other_approved_share_object_exists( + if not ShareObjectService.other_approved_share_object_exists( self.session, self.target_environment.environmentUri, self.dataset.datasetUri, diff --git a/backend/dataall/tasks/data_sharing/share_processors/lf_process_same_account_share.py b/backend/dataall/tasks/data_sharing/share_processors/lf_process_same_account_share.py index c2afa4b23..bce4ad0d2 100644 --- a/backend/dataall/tasks/data_sharing/share_processors/lf_process_same_account_share.py +++ b/backend/dataall/tasks/data_sharing/share_processors/lf_process_same_account_share.py @@ -1,7 +1,10 @@ import logging +from dataall.modules.dataset_sharing.db.Enums import ShareItemStatus, ShareObjectActions, ShareItemActions +from dataall.modules.dataset_sharing.db.models import ShareObject +from dataall.modules.dataset_sharing.services.share_object import ShareObjectService, ShareItemSM from ..share_managers import LFShareManager -from dataall.db import models, api +from dataall.db import models from dataall.modules.datasets.db.models import DatasetTable, Dataset log = logging.getLogger(__name__) @@ -12,7 +15,7 @@ def __init__( self, session, dataset: Dataset, - share: models.ShareObject, + share: ShareObject, shared_tables: [DatasetTable], revoked_tables: [DatasetTable], source_environment: models.Environment, @@ -67,7 +70,7 @@ def process_approved_shares(self) -> bool: for table in self.shared_tables: - share_item = api.ShareObject.find_share_item_by_table( + share_item = ShareObjectService.find_share_item_by_table( self.session, self.share, table ) @@ -77,8 +80,8 @@ def process_approved_shares(self) -> bool: f'and Dataset Table {table.GlueTableName} continuing loop...' ) continue - shared_item_SM = api.ShareItemSM(models.ShareItemStatus.Share_Approved.value) - new_state = shared_item_SM.run_transition(models.Enums.ShareObjectActions.Start.value) + shared_item_SM = ShareItemSM(ShareItemStatus.Share_Approved.value) + new_state = shared_item_SM.run_transition(ShareObjectActions.Start.value) shared_item_SM.update_state_single_item(self.session, share_item, new_state) try: @@ -88,12 +91,12 @@ def process_approved_shares(self) -> bool: data = self.build_share_data(table) self.create_resource_link(**data) - new_state = shared_item_SM.run_transition(models.Enums.ShareItemActions.Success.value) + new_state = shared_item_SM.run_transition(ShareItemActions.Success.value) shared_item_SM.update_state_single_item(self.session, share_item, new_state) except Exception as e: self.handle_share_failure(table, share_item, e) - new_state = shared_item_SM.run_transition(models.Enums.ShareItemActions.Failure.value) + new_state = shared_item_SM.run_transition(ShareItemActions.Failure.value) shared_item_SM.update_state_single_item(self.session, share_item, new_state) success = False @@ -118,7 +121,7 @@ def process_revoked_shares(self) -> bool: shared_db_name = self.build_shared_db_name() principals = self.get_share_principals() for table in self.revoked_tables: - share_item = api.ShareObject.find_share_item_by_table( + share_item = ShareObjectService.find_share_item_by_table( self.session, self.share, table ) if not share_item: @@ -128,8 +131,8 @@ def process_revoked_shares(self) -> bool: ) continue - revoked_item_SM = api.ShareItemSM(models.ShareItemStatus.Revoke_Approved.value) - new_state = revoked_item_SM.run_transition(models.Enums.ShareObjectActions.Start.value) + revoked_item_SM = ShareItemSM(ShareItemStatus.Revoke_Approved.value) + new_state = revoked_item_SM.run_transition(ShareObjectActions.Start.value) revoked_item_SM.update_state_single_item(self.session, share_item, new_state) try: @@ -144,12 +147,12 @@ def process_revoked_shares(self) -> bool: self.delete_resource_link_table(table) - new_state = revoked_item_SM.run_transition(models.Enums.ShareItemActions.Success.value) + new_state = revoked_item_SM.run_transition(ShareItemActions.Success.value) revoked_item_SM.update_state_single_item(self.session, share_item, new_state) except Exception as e: self.handle_revoke_failure(share_item, table, e) - new_state = revoked_item_SM.run_transition(models.Enums.ShareItemActions.Failure.value) + new_state = revoked_item_SM.run_transition(ShareItemActions.Failure.value) revoked_item_SM.update_state_single_item(self.session, share_item, new_state) success = False diff --git a/backend/dataall/tasks/data_sharing/share_processors/s3_process_share.py b/backend/dataall/tasks/data_sharing/share_processors/s3_process_share.py index 013dda059..74525ef20 100644 --- a/backend/dataall/tasks/data_sharing/share_processors/s3_process_share.py +++ b/backend/dataall/tasks/data_sharing/share_processors/s3_process_share.py @@ -1,8 +1,11 @@ import logging -from ....db import models, api +from dataall.db import models from ..share_managers import S3ShareManager from dataall.modules.datasets.db.models import DatasetStorageLocation, Dataset +from dataall.modules.dataset_sharing.db.Enums import ShareItemStatus, ShareObjectActions, ShareItemActions +from dataall.modules.dataset_sharing.db.models import ShareObject +from dataall.modules.dataset_sharing.services.share_object import ShareObjectService, ShareItemSM log = logging.getLogger(__name__) @@ -12,7 +15,7 @@ def __init__( self, session, dataset: Dataset, - share: models.ShareObject, + share: ShareObject, share_folder: DatasetStorageLocation, source_environment: models.Environment, target_environment: models.Environment, @@ -36,7 +39,7 @@ def process_approved_shares( cls, session, dataset: Dataset, - share: models.ShareObject, + share: ShareObject, share_folders: [DatasetStorageLocation], source_environment: models.Environment, target_environment: models.Environment, @@ -61,13 +64,13 @@ def process_approved_shares( success = True for folder in share_folders: log.info(f'sharing folder: {folder}') - sharing_item = api.ShareObject.find_share_item_by_folder( + sharing_item = ShareObjectService.find_share_item_by_folder( session, share, folder, ) - shared_item_SM = api.ShareItemSM(models.ShareItemStatus.Share_Approved.value) - new_state = shared_item_SM.run_transition(models.Enums.ShareObjectActions.Start.value) + shared_item_SM = ShareItemSM(ShareItemStatus.Share_Approved.value) + new_state = shared_item_SM.run_transition(ShareObjectActions.Start.value) shared_item_SM.update_state_single_item(session, sharing_item, new_state) sharing_folder = cls( @@ -87,12 +90,12 @@ def process_approved_shares( sharing_folder.manage_access_point_and_policy() sharing_folder.update_dataset_bucket_key_policy() - new_state = shared_item_SM.run_transition(models.Enums.ShareItemActions.Success.value) + new_state = shared_item_SM.run_transition(ShareItemActions.Success.value) shared_item_SM.update_state_single_item(session, sharing_item, new_state) except Exception as e: sharing_folder.log_share_failure(e) - new_state = shared_item_SM.run_transition(models.Enums.ShareItemActions.Failure.value) + new_state = shared_item_SM.run_transition(ShareItemActions.Failure.value) shared_item_SM.update_state_single_item(session, sharing_item, new_state) success = False @@ -103,7 +106,7 @@ def process_revoked_shares( cls, session, dataset: Dataset, - share: models.ShareObject, + share: ShareObject, revoke_folders: [DatasetStorageLocation], source_environment: models.Environment, target_environment: models.Environment, @@ -126,14 +129,14 @@ def process_revoked_shares( success = True for folder in revoke_folders: log.info(f'revoking access to folder: {folder}') - removing_item = api.ShareObject.find_share_item_by_folder( + removing_item = ShareObjectService.find_share_item_by_folder( session, share, folder, ) - revoked_item_SM = api.ShareItemSM(models.ShareItemStatus.Revoke_Approved.value) - new_state = revoked_item_SM.run_transition(models.Enums.ShareObjectActions.Start.value) + revoked_item_SM = ShareItemSM(ShareItemStatus.Revoke_Approved.value) + new_state = revoked_item_SM.run_transition(ShareObjectActions.Start.value) revoked_item_SM.update_state_single_item(session, removing_item, new_state) removing_folder = cls( @@ -150,12 +153,12 @@ def process_revoked_shares( try: removing_folder.delete_access_point_policy() - new_state = revoked_item_SM.run_transition(models.Enums.ShareItemActions.Success.value) + new_state = revoked_item_SM.run_transition(ShareItemActions.Success.value) revoked_item_SM.update_state_single_item(session, removing_item, new_state) except Exception as e: removing_folder.log_revoke_failure(e) - new_state = revoked_item_SM.run_transition(models.Enums.ShareItemActions.Failure.value) + new_state = revoked_item_SM.run_transition(ShareItemActions.Failure.value) revoked_item_SM.update_state_single_item(session, removing_item, new_state) success = False @@ -164,7 +167,7 @@ def process_revoked_shares( @staticmethod def clean_up_share( dataset: Dataset, - share: models.ShareObject, + share: ShareObject, target_environment: models.Environment ): """ diff --git a/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py b/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py index 4447d1429..c80c922d0 100644 --- a/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py +++ b/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py @@ -12,7 +12,9 @@ from sqlalchemy.ext.declarative import declarative_base from dataall.db import api, utils, Resource from datetime import datetime -from dataall.db.models.Enums import ShareObjectStatus, ShareableType +from dataall.db.models.Enums import ShareableType +from dataall.modules.dataset_sharing.db.Enums import ShareObjectStatus +from dataall.modules.dataset_sharing.services.share_object import ShareObjectService from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.modules.datasets.services.permissions import DATASET_TABLE_READ @@ -114,7 +116,7 @@ def upgrade(): ) ).all() for shared_table in share_table_items: - share = api.ShareObject.get_share_by_uri(session, shared_table.shareUri) + share = ShareObjectService.get_share_by_uri(session, shared_table.shareUri) api.ResourcePolicy.attach_resource_policy( session=session, group=share.principalId, diff --git a/tests/api/conftest.py b/tests/api/conftest.py index 0e74c0563..62075ddb0 100644 --- a/tests/api/conftest.py +++ b/tests/api/conftest.py @@ -1,4 +1,5 @@ import dataall.searchproxy.indexers +from dataall.modules.dataset_sharing.db.models import ShareObject, ShareObjectItem from .client import * from dataall.db import models from dataall.api import constants @@ -458,9 +459,9 @@ def factory( env_group: models.EnvironmentGroup, owner: str, status: str - ) -> models.ShareObject: + ) -> ShareObject: with db.scoped_session() as session: - share = models.ShareObject( + share = ShareObject( datasetUri=dataset.datasetUri, environmentUri=environment.environmentUri, owner=owner, @@ -477,21 +478,21 @@ def factory( group=env_group.groupUri, permissions=dataall.db.permissions.SHARE_OBJECT_REQUESTER, resource_uri=share.shareUri, - resource_type=models.ShareObject.__name__, + resource_type=ShareObject.__name__, ) dataall.db.api.ResourcePolicy.attach_resource_policy( session=session, group=dataset.SamlAdminGroupName, permissions=dataall.db.permissions.SHARE_OBJECT_REQUESTER, resource_uri=share.shareUri, - resource_type=dataall.db.models.ShareObject.__name__, + resource_type=ShareObject.__name__, ) dataall.db.api.ResourcePolicy.attach_resource_policy( session=session, group=dataset.stewards, permissions=dataall.db.permissions.SHARE_OBJECT_APPROVER, resource_uri=share.shareUri, - resource_type=dataall.db.models.ShareObject.__name__, + resource_type=ShareObject.__name__, ) if dataset.SamlAdminGroupName != environment.SamlGroupName: dataall.db.api.ResourcePolicy.attach_resource_policy( @@ -499,7 +500,7 @@ def factory( group=environment.SamlGroupName, permissions=dataall.db.permissions.SHARE_OBJECT_REQUESTER, resource_uri=share.shareUri, - resource_type=dataall.db.models.ShareObject.__name__, + resource_type=ShareObject.__name__, ) session.commit() return share @@ -510,12 +511,12 @@ def factory( @pytest.fixture(scope="module") def share_item(db): def factory( - share: models.ShareObject, + share: ShareObject, table: DatasetTable, status: str - ) -> models.ShareObjectItem: + ) -> ShareObjectItem: with db.scoped_session() as session: - share_item = models.ShareObjectItem( + share_item = ShareObjectItem( shareUri=share.shareUri, owner="alice", itemUri=table.tableUri, diff --git a/tests/api/test_share.py b/tests/api/test_share.py index c87e8255a..9ae7c3b62 100644 --- a/tests/api/test_share.py +++ b/tests/api/test_share.py @@ -3,6 +3,10 @@ import pytest import dataall +from dataall.api.constants import ShareObjectStatus, ShareItemStatus +from dataall.modules.dataset_sharing.db.Enums import ShareObjectActions, ShareItemActions +from dataall.modules.dataset_sharing.db.models import ShareObject, ShareObjectItem +from dataall.modules.dataset_sharing.services.share_object import ShareObjectService, ShareItemSM, ShareObjectSM from dataall.modules.datasets.db.models import DatasetTable, Dataset @@ -139,13 +143,13 @@ def share1_draft( dataset1: Dataset, env2: dataall.db.models.Environment, env2group: dataall.db.models.EnvironmentGroup, -) -> dataall.db.models.ShareObject: +) -> ShareObject: share1 = share( dataset=dataset1, environment=env2, env_group=env2group, owner=user2.userName, - status=dataall.api.constants.ShareObjectStatus.Draft.value + status=ShareObjectStatus.Draft.value ) yield share1 @@ -195,9 +199,9 @@ def share1_draft( @pytest.fixture(scope='function') def share1_item_pa( share_item: typing.Callable, - share1_draft: dataall.db.models.ShareObject, + share1_draft: ShareObject, table1: DatasetTable -) -> dataall.db.models.ShareObjectItem: +) -> ShareObjectItem: # Cleaned up with share1_draft yield share_item( share=share1_draft, @@ -216,13 +220,13 @@ def share2_submitted( dataset1: Dataset, env2: dataall.db.models.Environment, env2group: dataall.db.models.EnvironmentGroup, -) -> dataall.db.models.ShareObject: +) -> ShareObject: share2 = share( dataset=dataset1, environment=env2, env_group=env2group, owner=user2.userName, - status=dataall.api.constants.ShareObjectStatus.Submitted.value + status=ShareObjectStatus.Submitted.value ) yield share2 # Cleanup share @@ -270,9 +274,9 @@ def share2_submitted( @pytest.fixture(scope='function') def share2_item_pa( share_item: typing.Callable, - share2_submitted: dataall.db.models.ShareObject, + share2_submitted: ShareObject, table1: DatasetTable -) -> dataall.db.models.ShareObjectItem: +) -> ShareObjectItem: # Cleaned up with share2 yield share_item( share=share2_submitted, @@ -291,13 +295,13 @@ def share3_processed( dataset1: Dataset, env2: dataall.db.models.Environment, env2group: dataall.db.models.EnvironmentGroup, -) -> dataall.db.models.ShareObject: +) -> ShareObject: share3 = share( dataset=dataset1, environment=env2, env_group=env2group, owner=user2.userName, - status=dataall.api.constants.ShareObjectStatus.Processed.value + status=ShareObjectStatus.Processed.value ) yield share3 # Cleanup share @@ -345,14 +349,14 @@ def share3_processed( @pytest.fixture(scope='function') def share3_item_shared( share_item: typing.Callable, - share3_processed: dataall.db.models.ShareObject, + share3_processed: ShareObject, table1:DatasetTable -) -> dataall.db.models.ShareObjectItem: +) -> ShareObjectItem: # Cleaned up with share3 yield share_item( share=share3_processed, table=table1, - status=dataall.api.constants.ShareItemStatus.Share_Succeeded.value + status=ShareItemStatus.Share_Succeeded.value ) @@ -363,13 +367,13 @@ def share4_draft( dataset1: Dataset, env2: dataall.db.models.Environment, env2group: dataall.db.models.EnvironmentGroup, -) -> dataall.db.models.ShareObject: +) -> ShareObject: yield share( dataset=dataset1, environment=env2, env_group=env2group, owner=user2.userName, - status=dataall.api.constants.ShareObjectStatus.Draft.value + status=ShareObjectStatus.Draft.value ) @@ -810,7 +814,7 @@ def test_create_share_object_authorized(client, user2, group2, env2group, env2, ) # Then share object created with status Draft and user is 'Requester' assert create_share_object_response.data.createShareObject.shareUri - assert create_share_object_response.data.createShareObject.status == dataall.api.constants.ShareObjectStatus.Draft.value + assert create_share_object_response.data.createShareObject.status == ShareObjectStatus.Draft.value assert create_share_object_response.data.createShareObject.userRoleForShareObject == 'Requesters' @@ -830,7 +834,7 @@ def test_create_share_object_with_item_authorized(client, user2, group2, env2gro # Then share object created with status Draft and user is 'Requester' assert create_share_object_response.data.createShareObject.shareUri - assert create_share_object_response.data.createShareObject.status == dataall.api.constants.ShareObjectStatus.Draft.value + assert create_share_object_response.data.createShareObject.status == ShareObjectStatus.Draft.value assert create_share_object_response.data.createShareObject.userRoleForShareObject == 'Requesters' # And item has been added to the share request @@ -994,8 +998,7 @@ def test_add_share_item( # Then shared item was added to share object in status PendingApproval assert add_share_item_response.data.addSharedItem.shareUri == share1_draft.shareUri - assert add_share_item_response.data.addSharedItem.status == \ - dataall.api.constants.ShareItemStatus.PendingApproval.name + assert add_share_item_response.data.addSharedItem.status == ShareItemStatus.PendingApproval.name def test_remove_share_item( @@ -1011,11 +1014,11 @@ def test_remove_share_item( filter={"isShared": True}, ) - assert get_share_object_response.data.getShareObject.status == dataall.api.constants.ShareObjectStatus.Draft.value + assert get_share_object_response.data.getShareObject.status == ShareObjectStatus.Draft.value shareItem = get_share_object_response.data.getShareObject.get("items").nodes[0] assert shareItem.shareItemUri == share1_item_pa.shareItemUri - assert shareItem.status == dataall.api.constants.ShareItemStatus.PendingApproval.value + assert shareItem.status == ShareItemStatus.PendingApproval.value assert get_share_object_response.data.getShareObject.get("items").count == 1 # When @@ -1052,11 +1055,11 @@ def test_submit_share_request( filter={"isShared": True} ) - assert get_share_object_response.data.getShareObject.status == dataall.api.constants.ShareObjectStatus.Draft.value + assert get_share_object_response.data.getShareObject.status == ShareObjectStatus.Draft.value shareItem = get_share_object_response.data.getShareObject.get("items").nodes[0] assert shareItem.shareItemUri == share1_item_pa.shareItemUri - assert shareItem.status == dataall.api.constants.ShareItemStatus.PendingApproval.value + assert shareItem.status == ShareItemStatus.PendingApproval.value assert get_share_object_response.data.getShareObject.get("items").count == 1 # When @@ -1069,8 +1072,7 @@ def test_submit_share_request( ) # Then share object status is changed to Submitted - assert submit_share_object_response.data.submitShareObject.status == \ - dataall.api.constants.ShareObjectStatus.Submitted.name + assert submit_share_object_response.data.submitShareObject.status == ShareObjectStatus.Submitted.name assert submit_share_object_response.data.submitShareObject.userRoleForShareObject == 'Requesters' # and share item status stays in PendingApproval @@ -1083,7 +1085,7 @@ def test_submit_share_request( ) shareItem = get_share_object_response.data.getShareObject.get("items").nodes[0] status = shareItem['status'] - assert status == dataall.api.constants.ShareItemStatus.PendingApproval.name + assert status == ShareItemStatus.PendingApproval.name def test_approve_share_request( @@ -1100,10 +1102,10 @@ def test_approve_share_request( filter={"isShared": True} ) - assert get_share_object_response.data.getShareObject.status == dataall.api.constants.ShareObjectStatus.Submitted.value + assert get_share_object_response.data.getShareObject.status == ShareObjectStatus.Submitted.value shareItem = get_share_object_response.data.getShareObject.get("items").nodes[0] assert shareItem.shareItemUri == share2_item_pa.shareItemUri - assert shareItem.status == dataall.api.constants.ShareItemStatus.PendingApproval.value + assert shareItem.status == ShareItemStatus.PendingApproval.value assert get_share_object_response.data.getShareObject.get("items").count == 1 # When we approve the share object @@ -1118,8 +1120,7 @@ def test_approve_share_request( assert approve_share_object_response.data.approveShareObject.userRoleForShareObject == 'Approvers' # Then share object status is changed to Approved - assert approve_share_object_response.data.approveShareObject.status == \ - dataall.api.constants.ShareObjectStatus.Approved.name + assert approve_share_object_response.data.approveShareObject.status == ShareObjectStatus.Approved.name # and share item status is changed to Share_Approved get_share_object_response = get_share_object( @@ -1131,7 +1132,7 @@ def test_approve_share_request( ) shareItem = get_share_object_response.data.getShareObject.get("items").nodes[0] - assert shareItem.status == dataall.api.constants.ShareItemStatus.Share_Approved.value + assert shareItem.status == ShareItemStatus.Share_Approved.value # When approved share object is processed and the shared items successfully shared _successfull_processing_for_share_object(db, share2_submitted) @@ -1145,11 +1146,11 @@ def test_approve_share_request( ) # Then share object status is changed to Processed - assert get_share_object_response.data.getShareObject.status == dataall.api.constants.ShareObjectStatus.Processed.value + assert get_share_object_response.data.getShareObject.status == ShareObjectStatus.Processed.value # And share item status is changed to Share_Succeeded shareItem = get_share_object_response.data.getShareObject.get("items").nodes[0] - assert shareItem.status == dataall.api.constants.ShareItemStatus.Share_Succeeded.value + assert shareItem.status == ShareItemStatus.Share_Succeeded.value def test_reject_share_request( @@ -1166,10 +1167,10 @@ def test_reject_share_request( filter={"isShared": True} ) - assert get_share_object_response.data.getShareObject.status == dataall.api.constants.ShareObjectStatus.Submitted.value + assert get_share_object_response.data.getShareObject.status == ShareObjectStatus.Submitted.value shareItem = get_share_object_response.data.getShareObject.get("items").nodes[0] assert shareItem.shareItemUri == share2_item_pa.shareItemUri - assert shareItem.status == dataall.api.constants.ShareItemStatus.PendingApproval.value + assert shareItem.status == ShareItemStatus.PendingApproval.value assert get_share_object_response.data.getShareObject.get("items").count == 1 # When we reject the share object @@ -1181,8 +1182,7 @@ def test_reject_share_request( ) # Then share object status is changed to Rejected - assert reject_share_object_response.data.rejectShareObject.status == \ - dataall.api.constants.ShareObjectStatus.Rejected.name + assert reject_share_object_response.data.rejectShareObject.status == ShareObjectStatus.Rejected.name # and share item status is changed to Share_Rejected get_share_object_response = get_share_object( @@ -1194,7 +1194,7 @@ def test_reject_share_request( ) shareItem = get_share_object_response.data.getShareObject.get("items").nodes[0] - assert shareItem.status == dataall.api.constants.ShareItemStatus.Share_Rejected.value + assert shareItem.status == ShareItemStatus.Share_Rejected.value def test_search_shared_items_in_environment( @@ -1210,7 +1210,7 @@ def test_search_shared_items_in_environment( filter={"isShared": True} ) - assert get_share_object_response.data.getShareObject.status == dataall.api.constants.ShareObjectStatus.Processed.value + assert get_share_object_response.data.getShareObject.status == ShareObjectStatus.Processed.value list_datasets_published_in_environment_response = list_datasets_published_in_environment( client=client, @@ -1237,11 +1237,11 @@ def test_revoke_items_share_request( filter={"isShared": True} ) - assert get_share_object_response.data.getShareObject.status == dataall.api.constants.ShareObjectStatus.Processed.value + assert get_share_object_response.data.getShareObject.status == ShareObjectStatus.Processed.value shareItem = get_share_object_response.data.getShareObject.get("items").nodes[0] assert shareItem.shareItemUri == share3_item_shared.shareItemUri - assert shareItem.status == dataall.api.constants.ShareItemStatus.Share_Succeeded.value + assert shareItem.status == ShareItemStatus.Share_Succeeded.value revoked_items_uris = [node.shareItemUri for node in get_share_object_response.data.getShareObject.get('items').nodes] @@ -1255,7 +1255,7 @@ def test_revoke_items_share_request( revoked_items_uris=revoked_items_uris ) # Then share object changes to status Rejected - assert revoke_items_share_object_response.data.revokeItemsShareObject.status == dataall.api.constants.ShareObjectStatus.Revoked.value + assert revoke_items_share_object_response.data.revokeItemsShareObject.status == ShareObjectStatus.Revoked.value # And shared item changes to status Revoke_Approved get_share_object_response = get_share_object( @@ -1267,7 +1267,7 @@ def test_revoke_items_share_request( ) sharedItem = get_share_object_response.data.getShareObject.get('items').nodes[0] status = sharedItem['status'] - assert status == dataall.api.constants.ShareItemStatus.Revoke_Approved.value + assert status == ShareItemStatus.Revoke_Approved.value # Given the revoked share object is processed and the shared items # When approved share object is processed and the shared items successfully revoked (we re-use same function) @@ -1282,11 +1282,11 @@ def test_revoke_items_share_request( ) # Then share object status is changed to Processed - assert get_share_object_response.data.getShareObject.status == dataall.api.constants.ShareObjectStatus.Processed.value + assert get_share_object_response.data.getShareObject.status == ShareObjectStatus.Processed.value # And share item status is changed to Revoke_Succeeded shareItem = get_share_object_response.data.getShareObject.get("items").nodes[0] - assert shareItem.status == dataall.api.constants.ShareItemStatus.Revoke_Succeeded.value + assert shareItem.status == ShareItemStatus.Revoke_Succeeded.value def test_delete_share_object( @@ -1302,7 +1302,7 @@ def test_delete_share_object( filter={"isShared": True} ) - assert get_share_object_response.data.getShareObject.status == dataall.api.constants.ShareObjectStatus.Draft.value + assert get_share_object_response.data.getShareObject.status == ShareObjectStatus.Draft.value # When deleting the share object delete_share_object_response = delete_share_object( @@ -1329,11 +1329,11 @@ def test_delete_share_object_remaining_items_error( filter={"isShared": True} ) - assert get_share_object_response.data.getShareObject.status == dataall.api.constants.ShareObjectStatus.Processed.value + assert get_share_object_response.data.getShareObject.status == ShareObjectStatus.Processed.value shareItem = get_share_object_response.data.getShareObject.get("items").nodes[0] assert shareItem.shareItemUri == share3_item_shared.shareItemUri - assert shareItem.status == dataall.api.constants.ShareItemStatus.Share_Succeeded.value + assert shareItem.status == ShareItemStatus.Share_Succeeded.value assert get_share_object_response.data.getShareObject.get("items").count == 1 # When deleting the share object @@ -1350,16 +1350,16 @@ def test_delete_share_object_remaining_items_error( def _successfull_processing_for_share_object(db, share): with db.scoped_session() as session: print('Processing share with action ShareObjectActions.Start') - share = dataall.db.api.ShareObject.get_share_by_uri(session, share.shareUri) + share = ShareObjectService.get_share_by_uri(session, share.shareUri) - share_items_states = dataall.db.api.ShareObject.get_share_items_states(session, share.shareUri) + share_items_states = ShareObjectService.get_share_items_states(session, share.shareUri) - Share_SM = dataall.db.api.ShareObjectSM(share.status) - new_share_state = Share_SM.run_transition(dataall.db.models.Enums.ShareObjectActions.Start.value) + Share_SM = ShareObjectSM(share.status) + new_share_state = Share_SM.run_transition(ShareObjectActions.Start.value) for item_state in share_items_states: - Item_SM = dataall.db.api.ShareItemSM(item_state) - new_state = Item_SM.run_transition(dataall.db.models.Enums.ShareObjectActions.Start.value) + Item_SM = ShareItemSM(item_state) + new_state = Item_SM.run_transition(ShareObjectActions.Start.value) Item_SM.update_state(session, share.shareUri, new_state) Share_SM.update_state(session, share, new_share_state) @@ -1367,14 +1367,14 @@ def _successfull_processing_for_share_object(db, share): print('Processing share with action ShareObjectActions.Finish \ and ShareItemActions.Success') - share = dataall.db.api.ShareObject.get_share_by_uri(session, share.shareUri) - share_items_states = dataall.db.api.ShareObject.get_share_items_states(session, share.shareUri) + share = ShareObjectService.get_share_by_uri(session, share.shareUri) + share_items_states = ShareObjectService.get_share_items_states(session, share.shareUri) - new_share_state = Share_SM.run_transition(dataall.db.models.Enums.ShareObjectActions.Finish.value) + new_share_state = Share_SM.run_transition(ShareObjectActions.Finish.value) for item_state in share_items_states: - Item_SM = dataall.db.api.ShareItemSM(item_state) - new_state = Item_SM.run_transition(dataall.db.models.Enums.ShareItemActions.Success.value) + Item_SM = ShareItemSM(item_state) + new_state = Item_SM.run_transition(ShareItemActions.Success.value) Item_SM.update_state(session, share.shareUri, new_state) Share_SM.update_state(session, share, new_share_state) diff --git a/tests/tasks/conftest.py b/tests/tasks/conftest.py index 0ff919894..e8d1effe3 100644 --- a/tests/tasks/conftest.py +++ b/tests/tasks/conftest.py @@ -1,7 +1,9 @@ import pytest +from dataall.api.constants import ShareObjectStatus from dataall.db import models from dataall.api import constants +from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable, Dataset @@ -175,16 +177,16 @@ def factory( dataset: Dataset, environment: models.Environment, env_group: models.EnvironmentGroup - ) -> models.ShareObject: + ) -> ShareObject: with db.scoped_session() as session: - share = models.ShareObject( + share = ShareObject( datasetUri=dataset.datasetUri, environmentUri=environment.environmentUri, owner="bob", principalId=environment.SamlGroupName, principalType=constants.PrincipalType.Group.value, principalIAMRoleName=env_group.environmentIAMRoleName, - status=constants.ShareObjectStatus.Approved.value, + status=ShareObjectStatus.Approved.value, ) session.add(share) session.commit() @@ -196,11 +198,11 @@ def factory( @pytest.fixture(scope="module") def share_item_folder(db): def factory( - share: models.ShareObject, + share: ShareObject, location: DatasetStorageLocation, - ) -> models.ShareObjectItem: + ) -> ShareObjectItem: with db.scoped_session() as session: - share_item = models.ShareObjectItem( + share_item = ShareObjectItem( shareUri=share.shareUri, owner="alice", itemUri=location.locationUri, @@ -217,12 +219,12 @@ def factory( @pytest.fixture(scope="module") def share_item_table(db): def factory( - share: models.ShareObject, + share: ShareObject, table: DatasetTable, status: str, - ) -> models.ShareObjectItem: + ) -> ShareObjectItem: with db.scoped_session() as session: - share_item = models.ShareObjectItem( + share_item = ShareObjectItem( shareUri=share.shareUri, owner="alice", itemUri=table.tableUri, diff --git a/tests/tasks/test_lf_share_manager.py b/tests/tasks/test_lf_share_manager.py index bbe2e21ca..e2ddde9da 100644 --- a/tests/tasks/test_lf_share_manager.py +++ b/tests/tasks/test_lf_share_manager.py @@ -10,12 +10,12 @@ from dataall.db import models from dataall.api import constants +from dataall.modules.dataset_sharing.db.models import ShareObject, ShareObjectItem from dataall.modules.datasets.db.models import DatasetTable, Dataset from dataall.modules.datasets.services.dataset_alarm_service import DatasetAlarmService from dataall.tasks.data_sharing.share_processors.lf_process_cross_account_share import ProcessLFCrossAccountShare from dataall.tasks.data_sharing.share_processors.lf_process_same_account_share import ProcessLFSameAccountShare -from dataall.utils.alarm_service import AlarmService SOURCE_ENV_ACCOUNT = "1" * 12 @@ -114,7 +114,7 @@ def table2(table: Callable, dataset1: Dataset) -> DatasetTable: @pytest.fixture(scope="module") def share_same_account( share: Callable, dataset1: Dataset, source_environment: models.Environment, - source_environment_group_requesters: models.EnvironmentGroup) -> models.ShareObject: + source_environment_group_requesters: models.EnvironmentGroup) -> ShareObject: yield share( dataset=dataset1, environment=source_environment, @@ -125,7 +125,7 @@ def share_same_account( @pytest.fixture(scope="module") def share_cross_account( share: Callable, dataset1: Dataset, target_environment: models.Environment, - target_environment_group: models.EnvironmentGroup) -> models.ShareObject: + target_environment_group: models.EnvironmentGroup) -> ShareObject: yield share( dataset=dataset1, environment=target_environment, @@ -134,8 +134,8 @@ def share_cross_account( @pytest.fixture(scope="module") -def share_item_same_account(share_item_table: Callable, share_same_account: models.ShareObject, - table1: DatasetTable) -> models.ShareObjectItem: +def share_item_same_account(share_item_table: Callable, share_same_account: ShareObject, + table1: DatasetTable) -> ShareObjectItem: yield share_item_table( share=share_same_account, table=table1, @@ -143,8 +143,8 @@ def share_item_same_account(share_item_table: Callable, share_same_account: mode ) @pytest.fixture(scope="module") -def revoke_item_same_account(share_item_table: Callable, share_same_account: models.ShareObject, - table2: DatasetTable) -> models.ShareObjectItem: +def revoke_item_same_account(share_item_table: Callable, share_same_account: ShareObject, + table2: DatasetTable) -> ShareObjectItem: yield share_item_table( share=share_same_account, table=table2, @@ -152,23 +152,25 @@ def revoke_item_same_account(share_item_table: Callable, share_same_account: mod ) @pytest.fixture(scope="module") -def share_item_cross_account(share_item_table: Callable, share_cross_account: models.ShareObject, - table1: DatasetTable) -> models.ShareObjectItem: +def share_item_cross_account(share_item_table: Callable, share_cross_account: ShareObject, + table1: DatasetTable) -> ShareObjectItem: yield share_item_table( share=share_cross_account, table=table1, status=constants.ShareItemStatus.Share_Approved.value ) + @pytest.fixture(scope="module") -def revoke_item_cross_account(share_item_table: Callable, share_cross_account: models.ShareObject, - table2: DatasetTable) -> models.ShareObjectItem: +def revoke_item_cross_account(share_item_table: Callable, share_cross_account: ShareObject, + table2: DatasetTable) -> ShareObjectItem: yield share_item_table( share=share_cross_account, table=table2, status=constants.ShareItemStatus.Revoke_Approved.value ) + @pytest.fixture(scope="module", autouse=True) def processor_cross_account(db, dataset1, share_cross_account, table1, table2, source_environment, target_environment, target_environment_group): @@ -185,6 +187,7 @@ def processor_cross_account(db, dataset1, share_cross_account, table1, table2, s ) yield processor + @pytest.fixture(scope="module", autouse=True) def processor_same_account(db, dataset1, share_same_account, table1, source_environment, source_environment_group_requesters): @@ -211,8 +214,8 @@ def test_build_shared_db_name( processor_same_account: ProcessLFSameAccountShare, processor_cross_account: ProcessLFCrossAccountShare, dataset1: Dataset, - share_same_account: models.ShareObject, - share_cross_account: models.ShareObject, + share_same_account: ShareObject, + share_cross_account: ShareObject, ): # Given a dataset and its share, build db_share name # Then, it should return @@ -225,8 +228,8 @@ def test_get_share_principals( processor_cross_account: ProcessLFCrossAccountShare, source_environment: models.Environment, target_environment: models.Environment, - share_same_account: models.ShareObject, - share_cross_account: models.ShareObject, + share_same_account: ShareObject, + share_cross_account: ShareObject, ): # Given a dataset and its share, build db_share name # Then, it should return @@ -238,8 +241,8 @@ def test_create_shared_database( db, processor_same_account: ProcessLFSameAccountShare, processor_cross_account: ProcessLFCrossAccountShare, - share_same_account: models.ShareObject, - share_cross_account: models.ShareObject, + share_same_account: ShareObject, + share_cross_account: ShareObject, source_environment: models.Environment, target_environment: models.Environment, dataset1: Dataset, @@ -297,8 +300,8 @@ def test_check_share_item_exists_on_glue_catalog( processor_same_account: ProcessLFSameAccountShare, processor_cross_account: ProcessLFCrossAccountShare, table1: DatasetTable, - share_item_same_account: models.ShareObjectItem, - share_item_cross_account: models.ShareObjectItem, + share_item_same_account: ShareObjectItem, + share_item_cross_account: ShareObjectItem, mocker, ): @@ -329,8 +332,8 @@ def test_build_share_data( db, processor_same_account: ProcessLFSameAccountShare, processor_cross_account: ProcessLFCrossAccountShare, - share_same_account: models.ShareObject, - share_cross_account: models.ShareObject, + share_same_account: ShareObject, + share_cross_account: ShareObject, source_environment: models.Environment, target_environment: models.Environment, dataset1: Dataset, @@ -377,8 +380,8 @@ def test_create_resource_link( db, processor_same_account: ProcessLFSameAccountShare, processor_cross_account: ProcessLFCrossAccountShare, - share_same_account: models.ShareObject, - share_cross_account: models.ShareObject, + share_same_account: ShareObject, + share_cross_account: ShareObject, source_environment: models.Environment, target_environment: models.Environment, dataset1: Dataset, @@ -460,8 +463,8 @@ def test_revoke_table_resource_link_access( db, processor_same_account: ProcessLFSameAccountShare, processor_cross_account: ProcessLFCrossAccountShare, - share_same_account: models.ShareObject, - share_cross_account: models.ShareObject, + share_same_account: ShareObject, + share_cross_account: ShareObject, source_environment: models.Environment, target_environment: models.Environment, dataset1: Dataset, @@ -508,8 +511,8 @@ def test_revoke_source_table_access( db, processor_same_account: ProcessLFSameAccountShare, processor_cross_account: ProcessLFCrossAccountShare, - share_same_account: models.ShareObject, - share_cross_account: models.ShareObject, + share_same_account: ShareObject, + share_cross_account: ShareObject, source_environment: models.Environment, target_environment: models.Environment, dataset1: Dataset, @@ -551,8 +554,8 @@ def test_delete_resource_link_table( db, processor_same_account: ProcessLFSameAccountShare, processor_cross_account: ProcessLFCrossAccountShare, - share_same_account: models.ShareObject, - share_cross_account: models.ShareObject, + share_same_account: ShareObject, + share_cross_account: ShareObject, source_environment: models.Environment, target_environment: models.Environment, dataset1: Dataset, @@ -593,8 +596,8 @@ def test_delete_shared_database( db, processor_same_account: ProcessLFSameAccountShare, processor_cross_account: ProcessLFCrossAccountShare, - share_same_account: models.ShareObject, - share_cross_account: models.ShareObject, + share_same_account: ShareObject, + share_cross_account: ShareObject, source_environment: models.Environment, target_environment: models.Environment, dataset1: Dataset, @@ -622,8 +625,8 @@ def test_revoke_external_account_access_on_source_account( db, processor_same_account: ProcessLFSameAccountShare, processor_cross_account: ProcessLFCrossAccountShare, - share_same_account: models.ShareObject, - share_cross_account: models.ShareObject, + share_same_account: ShareObject, + share_cross_account: ShareObject, source_environment: models.Environment, target_environment: models.Environment, dataset1: Dataset, @@ -650,8 +653,8 @@ def test_handle_share_failure( db, processor_same_account: ProcessLFSameAccountShare, processor_cross_account: ProcessLFCrossAccountShare, - share_item_same_account: models.ShareObjectItem, - share_item_cross_account: models.ShareObjectItem, + share_item_same_account: ShareObjectItem, + share_item_cross_account: ShareObjectItem, table1: DatasetTable, mocker, ): @@ -680,8 +683,8 @@ def test_handle_revoke_failure( db, processor_same_account: ProcessLFSameAccountShare, processor_cross_account: ProcessLFCrossAccountShare, - revoke_item_same_account: models.ShareObjectItem, - revoke_item_cross_account: models.ShareObjectItem, + revoke_item_same_account: ShareObjectItem, + revoke_item_cross_account: ShareObjectItem, table1: DatasetTable, mocker, ): diff --git a/tests/tasks/test_s3_share_manager.py b/tests/tasks/test_s3_share_manager.py index 549ed8afb..b0dc0f722 100644 --- a/tests/tasks/test_s3_share_manager.py +++ b/tests/tasks/test_s3_share_manager.py @@ -4,9 +4,9 @@ from typing import Callable from dataall.db import models +from dataall.modules.dataset_sharing.db.models import ShareObject, ShareObjectItem from dataall.tasks.data_sharing.share_managers.s3_share_manager import S3ShareManager -from dataall.utils.alarm_service import AlarmService from dataall.modules.datasets.db.models import DatasetStorageLocation, Dataset SOURCE_ENV_ACCOUNT = "111111111111" @@ -75,13 +75,13 @@ def location1(location: Callable, dataset1: Dataset) -> DatasetStorageLocation: @pytest.fixture(scope="module") def share1(share: Callable, dataset1: Dataset, target_environment: models.Environment, - target_environment_group: models.EnvironmentGroup) -> models.ShareObject: + target_environment_group: models.EnvironmentGroup) -> ShareObject: share1 = share(dataset1, target_environment, target_environment_group) yield share1 @pytest.fixture(scope="module") -def share_item_folder1(share_item_folder: Callable, share1: models.ShareObject, location1: DatasetStorageLocation): +def share_item_folder1(share_item_folder: Callable, share1: ShareObject, location1: DatasetStorageLocation): share_item_folder1 = share_item_folder(share1, location1) return share_item_folder1 @@ -169,7 +169,7 @@ def test_manage_bucket_policy_no_policy( target_environment_group, dataset1, db, - share1: models.ShareObject, + share1: ShareObject, share_item_folder1, location1, source_environment: models.Environment, @@ -233,7 +233,7 @@ def test_manage_bucket_policy_existing_policy( target_environment_group, dataset1, db, - share1: models.ShareObject, + share1: ShareObject, share_item_folder1, location1, source_environment: models.Environment, @@ -282,7 +282,7 @@ def test_grant_target_role_access_policy_existing_policy_bucket_not_included( target_environment_group, dataset1, db, - share1: models.ShareObject, + share1: ShareObject, share_item_folder1, location1, source_environment: models.Environment, @@ -335,7 +335,7 @@ def test_grant_target_role_access_policy_existing_policy_bucket_included( target_environment_group, dataset1, db, - share1: models.ShareObject, + share1: ShareObject, share_item_folder1, location1, source_environment: models.Environment, @@ -381,8 +381,8 @@ def test_grant_target_role_access_policy_test_no_policy( target_environment_group: models.EnvironmentGroup, dataset1: Dataset, db, - share1: models.ShareObject, - share_item_folder1: models.ShareObjectItem, + share1: ShareObject, + share_item_folder1: ShareObjectItem, location1: DatasetStorageLocation, source_environment: models.Environment, target_environment: models.Environment, @@ -443,8 +443,8 @@ def test_update_dataset_bucket_key_policy_with_env_admin( target_environment_group: models.EnvironmentGroup, dataset1: Dataset, db, - share1: models.ShareObject, - share_item_folder1: models.ShareObjectItem, + share1: ShareObject, + share_item_folder1: ShareObjectItem, location1: DatasetStorageLocation, source_environment: models.Environment, target_environment: models.Environment, @@ -560,8 +560,8 @@ def test_update_dataset_bucket_key_policy_without_env_admin( target_environment_group: models.EnvironmentGroup, dataset1: Dataset, db, - share1: models.ShareObject, - share_item_folder1: models.ShareObjectItem, + share1: ShareObject, + share_item_folder1: ShareObjectItem, location1: DatasetStorageLocation, source_environment: models.Environment, target_environment: models.Environment, @@ -640,8 +640,8 @@ def test_manage_access_point_and_policy_1( target_environment_group: models.EnvironmentGroup, dataset1: Dataset, db, - share1: models.ShareObject, - share_item_folder1: models.ShareObjectItem, + share1: ShareObject, + share_item_folder1: ShareObjectItem, location1: DatasetStorageLocation, source_environment: models.Environment, target_environment: models.Environment, @@ -731,8 +731,8 @@ def test_manage_access_point_and_policy_2( target_environment_group: models.EnvironmentGroup, dataset1: Dataset, db, - share1: models.ShareObject, - share_item_folder1: models.ShareObjectItem, + share1: ShareObject, + share_item_folder1: ShareObjectItem, location1: DatasetStorageLocation, source_environment: models.Environment, target_environment: models.Environment, @@ -805,8 +805,8 @@ def test_manage_access_point_and_policy_3( target_environment_group: models.EnvironmentGroup, dataset1: Dataset, db, - share1: models.ShareObject, - share_item_folder1: models.ShareObjectItem, + share1: ShareObject, + share_item_folder1: ShareObjectItem, location1: DatasetStorageLocation, source_environment: models.Environment, target_environment: models.Environment, @@ -876,8 +876,8 @@ def test_delete_access_point_policy_with_env_admin_one_prefix( target_environment_group: models.EnvironmentGroup, dataset1: Dataset, db, - share1: models.ShareObject, - share_item_folder1: models.ShareObjectItem, + share1: ShareObject, + share_item_folder1: ShareObjectItem, location1: DatasetStorageLocation, source_environment: models.Environment, target_environment: models.Environment, @@ -948,8 +948,8 @@ def test_delete_access_point_policy_with_env_admin_multiple_prefix( target_environment_group: models.EnvironmentGroup, dataset1: Dataset, db, - share1: models.ShareObject, - share_item_folder1: models.ShareObjectItem, + share1: ShareObject, + share_item_folder1: ShareObjectItem, location1: DatasetStorageLocation, source_environment: models.Environment, target_environment: models.Environment, @@ -1015,8 +1015,8 @@ def test_dont_delete_access_point_with_policy( target_environment_group: models.EnvironmentGroup, dataset1: Dataset, db, - share1: models.ShareObject, - share_item_folder1: models.ShareObjectItem, + share1: ShareObject, + share_item_folder1: ShareObjectItem, location1: DatasetStorageLocation, source_environment: models.Environment, target_environment: models.Environment, @@ -1061,8 +1061,8 @@ def test_delete_access_point_without_policy( target_environment_group: models.EnvironmentGroup, dataset1: Dataset, db, - share1: models.ShareObject, - share_item_folder1: models.ShareObjectItem, + share1: ShareObject, + share_item_folder1: ShareObjectItem, location1: DatasetStorageLocation, source_environment: models.Environment, target_environment: models.Environment, @@ -1107,8 +1107,8 @@ def test_delete_target_role_access_policy_no_remaining_statement( target_environment_group: models.EnvironmentGroup, dataset1: Dataset, db, - share1: models.ShareObject, - share_item_folder1: models.ShareObjectItem, + share1: ShareObject, + share_item_folder1: ShareObjectItem, location1: DatasetStorageLocation, source_environment: models.Environment, target_environment: models.Environment, @@ -1172,8 +1172,8 @@ def test_delete_target_role_access_policy_with_remaining_statement( target_environment_group: models.EnvironmentGroup, dataset1: Dataset, db, - share1: models.ShareObject, - share_item_folder1: models.ShareObjectItem, + share1: ShareObject, + share_item_folder1: ShareObjectItem, location1: DatasetStorageLocation, source_environment: models.Environment, target_environment: models.Environment, @@ -1258,8 +1258,8 @@ def test_delete_dataset_bucket_key_policy_existing_policy_with_additional_target target_environment_group: models.EnvironmentGroup, dataset1: Dataset, db, - share1: models.ShareObject, - share_item_folder1: models.ShareObjectItem, + share1: ShareObject, + share_item_folder1: ShareObjectItem, location1: DatasetStorageLocation, source_environment: models.Environment, target_environment: models.Environment, @@ -1349,8 +1349,8 @@ def test_delete_dataset_bucket_key_policy_existing_policy_with_no_additional_tar target_environment_group: models.EnvironmentGroup, dataset1: Dataset, db, - share1: models.ShareObject, - share_item_folder1: models.ShareObjectItem, + share1: ShareObject, + share_item_folder1: ShareObjectItem, location1: DatasetStorageLocation, source_environment: models.Environment, target_environment: models.Environment, diff --git a/tests/tasks/test_subscriptions.py b/tests/tasks/test_subscriptions.py index 11c255db2..b8e470a9d 100644 --- a/tests/tasks/test_subscriptions.py +++ b/tests/tasks/test_subscriptions.py @@ -2,7 +2,10 @@ import dataall from dataall.api.constants import OrganisationUserRole +from dataall.modules.dataset_sharing.db.Enums import ShareObjectStatus +from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject from dataall.modules.datasets.db.models import DatasetTable, Dataset +from dataall.modules.datasets.tasks.subscription_service import SubscriptionService @pytest.fixture(scope='module') @@ -110,17 +113,17 @@ def share( region=dataset.region, ) session.add(table) - share = dataall.db.models.ShareObject( + share = ShareObject( datasetUri=dataset.datasetUri, environmentUri=otherenv.environmentUri, owner='bob', principalId='group2', principalType=dataall.api.constants.PrincipalType.Environment.value, - status=dataall.api.constants.ShareObjectStatus.Approved.value, + status=ShareObjectStatus.Approved.value, ) session.add(share) session.commit() - share_item = dataall.db.models.ShareObjectItem( + share_item = ShareObjectItem( shareUri=share.shareUri, owner='alice', itemUri=table.tableUri, @@ -138,7 +141,7 @@ def test_subscriptions(org, env, otherenv, db, dataset, share, mocker): 'dataall.modules.datasets.tasks.subscription_service.SubscriptionService.sns_call', return_value=True, ) - subscriber = dataall.modules.datasets.tasks.subscription_service.SubscriptionService() + subscriber = SubscriptionService() messages = [ { 'prefix': 's3://dataset/testtable/csv/', From da7c858536a5eefebbcc35662268c3396680bb1f Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 5 May 2023 12:01:55 +0200 Subject: [PATCH 123/346] Moved dataset models to dataset_base --- backend/dataall/aws/handlers/redshift.py | 2 +- backend/dataall/db/api/redshift_cluster.py | 4 ++-- backend/dataall/modules/dataset_sharing/api/resolvers.py | 2 +- .../dataset_sharing/services/dataset_share_service.py | 5 +---- .../dataset_sharing/services/share_notification_service.py | 2 +- .../dataall/modules/dataset_sharing/services/share_object.py | 2 +- backend/dataall/modules/datasets/__init__.py | 2 +- backend/dataall/modules/datasets/api/profiling/resolvers.py | 2 +- .../modules/datasets/api/storage_location/resolvers.py | 2 +- backend/dataall/modules/datasets/api/table/resolvers.py | 2 +- .../dataall/modules/datasets/api/table_column/resolvers.py | 2 +- backend/dataall/modules/datasets/aws/glue_dataset_client.py | 2 +- backend/dataall/modules/datasets/aws/glue_table_client.py | 2 +- backend/dataall/modules/datasets/aws/lf_table_client.py | 2 +- backend/dataall/modules/datasets/aws/s3_location_client.py | 2 +- backend/dataall/modules/datasets/cdk/dataset_policy.py | 2 +- backend/dataall/modules/datasets/cdk/dataset_stack.py | 3 +-- .../dataall/modules/datasets/handlers/glue_column_handler.py | 2 +- .../modules/datasets/handlers/glue_dataset_handler.py | 2 +- .../modules/datasets/handlers/glue_profiling_handler.py | 2 +- .../dataall/modules/datasets/handlers/glue_table_handler.py | 2 +- backend/dataall/modules/datasets/indexers/dataset_indexer.py | 2 +- .../dataall/modules/datasets/indexers/location_indexer.py | 2 +- backend/dataall/modules/datasets/indexers/table_indexer.py | 2 +- .../modules/datasets/services/dataset_alarm_service.py | 2 +- .../modules/datasets/services/dataset_group_resource.py | 4 ++-- .../modules/datasets/services/dataset_location_service.py | 4 ++-- .../modules/datasets/services/dataset_profiling_service.py | 2 +- backend/dataall/modules/datasets/services/dataset_service.py | 4 ++-- .../modules/datasets/services/dataset_table_service.py | 2 +- .../dataall/modules/datasets/tasks/bucket_policy_updater.py | 2 +- .../dataall/modules/datasets/tasks/subscription_service.py | 2 +- backend/dataall/modules/datasets/tasks/tables_syncer.py | 2 +- backend/dataall/modules/datasets_base/__init__.py | 0 backend/dataall/modules/datasets_base/db/__init__.py | 0 .../{datasets => datasets_base}/db/dataset_repository.py | 2 +- .../dataall/modules/{datasets => datasets_base}/db/models.py | 0 backend/dataall/tasks/catalog_indexer.py | 2 +- .../tasks/data_sharing/share_managers/lf_share_manager.py | 2 +- .../tasks/data_sharing/share_managers/s3_share_manager.py | 4 ++-- .../share_processors/lf_process_cross_account_share.py | 4 ++-- .../share_processors/lf_process_same_account_share.py | 2 +- .../tasks/data_sharing/share_processors/s3_process_share.py | 2 +- backend/dataall/tasks/stacks_updater.py | 2 +- tests/api/conftest.py | 2 +- tests/api/test_dashboards.py | 2 +- tests/api/test_dataset.py | 3 +-- tests/api/test_dataset_location.py | 2 +- tests/api/test_dataset_profiling.py | 4 +--- tests/api/test_dataset_table.py | 2 +- tests/api/test_environment.py | 3 +-- tests/api/test_glossary.py | 2 +- tests/api/test_keyvaluetag.py | 4 +--- tests/api/test_redshift_cluster.py | 2 +- tests/api/test_share.py | 2 +- tests/api/test_vote.py | 2 +- tests/cdkproxy/conftest.py | 2 +- tests/db/test_permission.py | 2 +- tests/modules/datasets/test_dataset_feed.py | 2 +- tests/searchproxy/test_indexers.py | 4 +--- tests/tasks/conftest.py | 2 +- tests/tasks/test_catalog_indexer.py | 2 +- tests/tasks/test_lf_share_manager.py | 2 +- tests/tasks/test_policies.py | 2 +- tests/tasks/test_s3_share_manager.py | 2 +- tests/tasks/test_stacks_updater.py | 2 +- tests/tasks/test_subscriptions.py | 2 +- tests/tasks/test_tables_sync.py | 3 +-- 68 files changed, 71 insertions(+), 84 deletions(-) create mode 100644 backend/dataall/modules/datasets_base/__init__.py create mode 100644 backend/dataall/modules/datasets_base/db/__init__.py rename backend/dataall/modules/{datasets => datasets_base}/db/dataset_repository.py (92%) rename backend/dataall/modules/{datasets => datasets_base}/db/models.py (100%) diff --git a/backend/dataall/aws/handlers/redshift.py b/backend/dataall/aws/handlers/redshift.py index 0ace9d4f3..1e3e994c6 100644 --- a/backend/dataall/aws/handlers/redshift.py +++ b/backend/dataall/aws/handlers/redshift.py @@ -11,7 +11,7 @@ from ...db import models # TODO should be migrated in the redshift module from dataall.modules.datasets.services.dataset_table_service import DatasetTableService -from dataall.modules.datasets.db.models import DatasetTable, Dataset +from dataall.modules.datasets_base.db.models import DatasetTable, Dataset from dataall.modules.datasets.services.dataset_service import DatasetService log = logging.getLogger(__name__) diff --git a/backend/dataall/db/api/redshift_cluster.py b/backend/dataall/db/api/redshift_cluster.py index 6f57e38fc..33657ca9d 100644 --- a/backend/dataall/db/api/redshift_cluster.py +++ b/backend/dataall/db/api/redshift_cluster.py @@ -2,9 +2,9 @@ from sqlalchemy import and_, or_, literal -from .. import models, api, exceptions, paginate, permissions +from .. import models, exceptions, paginate, permissions from . import has_resource_perm, ResourcePolicy, Environment -from dataall.modules.datasets.db.models import DatasetTable, Dataset +from dataall.modules.datasets_base.db.models import DatasetTable, Dataset from dataall.utils.naming_convention import ( NamingConventionService, NamingConventionPattern, diff --git a/backend/dataall/modules/dataset_sharing/api/resolvers.py b/backend/dataall/modules/dataset_sharing/api/resolvers.py index ec634bc41..50877afcc 100644 --- a/backend/dataall/modules/dataset_sharing/api/resolvers.py +++ b/backend/dataall/modules/dataset_sharing/api/resolvers.py @@ -9,7 +9,7 @@ from dataall.db import models from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject from dataall.modules.dataset_sharing.services.share_object import ShareObjectService -from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable, Dataset +from dataall.modules.datasets_base.db.models import DatasetStorageLocation, DatasetTable, Dataset from dataall.modules.datasets.services.dataset_service import DatasetService log = logging.getLogger(__name__) diff --git a/backend/dataall/modules/dataset_sharing/services/dataset_share_service.py b/backend/dataall/modules/dataset_sharing/services/dataset_share_service.py index e2996d746..dbf0fe150 100644 --- a/backend/dataall/modules/dataset_sharing/services/dataset_share_service.py +++ b/backend/dataall/modules/dataset_sharing/services/dataset_share_service.py @@ -1,6 +1,3 @@ -import logging -import re - from sqlalchemy import or_, case, func from sqlalchemy.sql import and_ @@ -10,7 +7,7 @@ from dataall.db.paginator import paginate from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject from dataall.modules.dataset_sharing.services.share_object import ShareItemSM -from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable, Dataset +from dataall.modules.datasets_base.db.models import DatasetStorageLocation, DatasetTable, Dataset class DatasetShareService: diff --git a/backend/dataall/modules/dataset_sharing/services/share_notification_service.py b/backend/dataall/modules/dataset_sharing/services/share_notification_service.py index 59c12fef5..4f1bb20df 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_notification_service.py +++ b/backend/dataall/modules/dataset_sharing/services/share_notification_service.py @@ -1,7 +1,7 @@ from dataall.db import models from dataall.db.api import Notification from dataall.modules.dataset_sharing.db.models import ShareObject -from dataall.modules.datasets.db.models import Dataset +from dataall.modules.datasets_base.db.models import Dataset class ShareNotificationService: diff --git a/backend/dataall/modules/dataset_sharing/services/share_object.py b/backend/dataall/modules/dataset_sharing/services/share_object.py index 0fa291cbb..2a97d6cb4 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_object.py +++ b/backend/dataall/modules/dataset_sharing/services/share_object.py @@ -13,7 +13,7 @@ from dataall.modules.dataset_sharing.db.Enums import ShareObjectActions, ShareObjectStatus, ShareItemActions, \ ShareItemStatus from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject -from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable, Dataset +from dataall.modules.datasets_base.db.models import DatasetStorageLocation, DatasetTable, Dataset from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.modules.datasets.services.permissions import DATASET_TABLE_READ from dataall.modules.dataset_sharing.services.share_notification_service import ShareNotificationService diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index 5fce6b4c4..874482057 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -4,7 +4,7 @@ from dataall.modules.dataset_sharing import SharingApiModuleInterface from dataall.core.group.services.group_resource_manager import GroupResourceManager -from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetStorageLocation, DatasetTable, Dataset +from dataall.modules.datasets_base.db.models import DatasetTableColumn, DatasetStorageLocation, DatasetTable, Dataset from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer diff --git a/backend/dataall/modules/datasets/api/profiling/resolvers.py b/backend/dataall/modules/datasets/api/profiling/resolvers.py index d156eee95..9fddc4505 100644 --- a/backend/dataall/modules/datasets/api/profiling/resolvers.py +++ b/backend/dataall/modules/datasets/api/profiling/resolvers.py @@ -9,7 +9,7 @@ from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.modules.datasets.services.dataset_table_service import DatasetTableService from dataall.modules.datasets.services.dataset_profiling_service import DatasetProfilingService -from dataall.modules.datasets.db.models import DatasetProfilingRun +from dataall.modules.datasets_base.db.models import DatasetProfilingRun from dataall.modules.datasets.services.permissions import PROFILE_DATASET_TABLE log = logging.getLogger(__name__) diff --git a/backend/dataall/modules/datasets/api/storage_location/resolvers.py b/backend/dataall/modules/datasets/api/storage_location/resolvers.py index 4b1ae1726..db9f8fde7 100644 --- a/backend/dataall/modules/datasets/api/storage_location/resolvers.py +++ b/backend/dataall/modules/datasets/api/storage_location/resolvers.py @@ -8,7 +8,7 @@ ) from dataall.modules.datasets.handlers.s3_location_handler import S3DatasetLocationHandler from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer -from dataall.modules.datasets.db.models import DatasetStorageLocation, Dataset +from dataall.modules.datasets_base.db.models import DatasetStorageLocation, Dataset from dataall.modules.datasets.services.dataset_location_service import DatasetLocationService from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.modules.datasets.services.permissions import UPDATE_DATASET_FOLDER diff --git a/backend/dataall/modules/datasets/api/table/resolvers.py b/backend/dataall/modules/datasets/api/table/resolvers.py index 8e72bb8df..caa160851 100644 --- a/backend/dataall/modules/datasets/api/table/resolvers.py +++ b/backend/dataall/modules/datasets/api/table/resolvers.py @@ -11,7 +11,7 @@ from dataall.aws.handlers.sts import SessionHelper from dataall.db import models from dataall.db.api import ResourcePolicy, Glossary -from dataall.modules.datasets.db.models import DatasetTable, Dataset +from dataall.modules.datasets_base.db.models import DatasetTable, Dataset from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.modules.datasets.services.permissions import UPDATE_DATASET_TABLE, PREVIEW_DATASET_TABLE from dataall.utils import json_utils diff --git a/backend/dataall/modules/datasets/api/table_column/resolvers.py b/backend/dataall/modules/datasets/api/table_column/resolvers.py index 591459f87..0efeceaa6 100644 --- a/backend/dataall/modules/datasets/api/table_column/resolvers.py +++ b/backend/dataall/modules/datasets/api/table_column/resolvers.py @@ -6,7 +6,7 @@ from dataall.db import paginate, models from dataall.db.api import ResourcePolicy from dataall.modules.datasets.services.dataset_table_service import DatasetTableService -from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetTable +from dataall.modules.datasets_base.db.models import DatasetTableColumn, DatasetTable from dataall.modules.datasets.services.permissions import UPDATE_DATASET_TABLE diff --git a/backend/dataall/modules/datasets/aws/glue_dataset_client.py b/backend/dataall/modules/datasets/aws/glue_dataset_client.py index 2f29abc41..f52b35f92 100644 --- a/backend/dataall/modules/datasets/aws/glue_dataset_client.py +++ b/backend/dataall/modules/datasets/aws/glue_dataset_client.py @@ -2,7 +2,7 @@ from botocore.exceptions import ClientError from dataall.aws.handlers.sts import SessionHelper -from dataall.modules.datasets.db.models import Dataset +from dataall.modules.datasets_base.db.models import Dataset log = logging.getLogger(__name__) diff --git a/backend/dataall/modules/datasets/aws/glue_table_client.py b/backend/dataall/modules/datasets/aws/glue_table_client.py index 2b0f1c5c4..6050b27d6 100644 --- a/backend/dataall/modules/datasets/aws/glue_table_client.py +++ b/backend/dataall/modules/datasets/aws/glue_table_client.py @@ -2,7 +2,7 @@ from botocore.exceptions import ClientError -from dataall.modules.datasets.db.models import DatasetTable +from dataall.modules.datasets_base.db.models import DatasetTable log = logging.getLogger(__name__) diff --git a/backend/dataall/modules/datasets/aws/lf_table_client.py b/backend/dataall/modules/datasets/aws/lf_table_client.py index 8caff5073..2a6eaef5a 100644 --- a/backend/dataall/modules/datasets/aws/lf_table_client.py +++ b/backend/dataall/modules/datasets/aws/lf_table_client.py @@ -2,7 +2,7 @@ from botocore.exceptions import ClientError from dataall.aws.handlers.sts import SessionHelper -from dataall.modules.datasets.db.models import DatasetTable +from dataall.modules.datasets_base.db.models import DatasetTable log = logging.getLogger(__name__) diff --git a/backend/dataall/modules/datasets/aws/s3_location_client.py b/backend/dataall/modules/datasets/aws/s3_location_client.py index 45385743d..ab127ad28 100644 --- a/backend/dataall/modules/datasets/aws/s3_location_client.py +++ b/backend/dataall/modules/datasets/aws/s3_location_client.py @@ -1,7 +1,7 @@ import logging from dataall.aws.handlers.sts import SessionHelper -from dataall.modules.datasets.db.models import DatasetStorageLocation +from dataall.modules.datasets_base.db.models import DatasetStorageLocation log = logging.getLogger(__name__) diff --git a/backend/dataall/modules/datasets/cdk/dataset_policy.py b/backend/dataall/modules/datasets/cdk/dataset_policy.py index 64c0c53a4..05d3e0e89 100644 --- a/backend/dataall/modules/datasets/cdk/dataset_policy.py +++ b/backend/dataall/modules/datasets/cdk/dataset_policy.py @@ -2,7 +2,7 @@ from aws_cdk import aws_iam as iam from dataall.cdkproxy.stacks.policies.data_policy import DataPolicy -from dataall.modules.datasets.db.models import Dataset +from dataall.modules.datasets_base.db.models import Dataset from dataall.modules.datasets.services.dataset_service import DatasetService diff --git a/backend/dataall/modules/datasets/cdk/dataset_stack.py b/backend/dataall/modules/datasets/cdk/dataset_stack.py index 2e77dcfc8..73c2aedc5 100644 --- a/backend/dataall/modules/datasets/cdk/dataset_stack.py +++ b/backend/dataall/modules/datasets/cdk/dataset_stack.py @@ -1,6 +1,5 @@ import logging import os -import typing from aws_cdk import ( custom_resources as cr, @@ -27,7 +26,7 @@ from dataall.db.api import Environment from dataall.utils.cdk_nag_utils import CDKNagUtil from dataall.utils.runtime_stacks_tagging import TagsUtil -from dataall.modules.datasets.db.models import Dataset +from dataall.modules.datasets_base.db.models import Dataset logger = logging.getLogger(__name__) diff --git a/backend/dataall/modules/datasets/handlers/glue_column_handler.py b/backend/dataall/modules/datasets/handlers/glue_column_handler.py index 07a1c41b5..0cf8d0b1b 100644 --- a/backend/dataall/modules/datasets/handlers/glue_column_handler.py +++ b/backend/dataall/modules/datasets/handlers/glue_column_handler.py @@ -5,7 +5,7 @@ from dataall.aws.handlers.service_handlers import Worker from dataall.modules.datasets.aws.glue_table_client import GlueTableClient from dataall.modules.datasets.aws.lf_table_client import LakeFormationTableClient -from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetTable +from dataall.modules.datasets_base.db.models import DatasetTableColumn, DatasetTable from dataall.modules.datasets.services.dataset_table_service import DatasetTableService log = logging.getLogger(__name__) diff --git a/backend/dataall/modules/datasets/handlers/glue_dataset_handler.py b/backend/dataall/modules/datasets/handlers/glue_dataset_handler.py index 27e5c1ec4..277999720 100644 --- a/backend/dataall/modules/datasets/handlers/glue_dataset_handler.py +++ b/backend/dataall/modules/datasets/handlers/glue_dataset_handler.py @@ -3,7 +3,7 @@ from dataall.aws.handlers.service_handlers import Worker from dataall.db import models from dataall.modules.datasets.aws.glue_dataset_client import DatasetCrawler -from dataall.modules.datasets.db.models import Dataset +from dataall.modules.datasets_base.db.models import Dataset from dataall.modules.datasets.services.dataset_service import DatasetService log = logging.getLogger(__name__) diff --git a/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py b/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py index 5d2a4232a..c45e7011b 100644 --- a/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py +++ b/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py @@ -4,7 +4,7 @@ from dataall.aws.handlers.service_handlers import Worker from dataall.aws.handlers.sts import SessionHelper from dataall.db import models -from dataall.modules.datasets.db.models import DatasetProfilingRun, Dataset +from dataall.modules.datasets_base.db.models import DatasetProfilingRun, Dataset from dataall.modules.datasets.services.dataset_profiling_service import DatasetProfilingService log = logging.getLogger(__name__) diff --git a/backend/dataall/modules/datasets/handlers/glue_table_handler.py b/backend/dataall/modules/datasets/handlers/glue_table_handler.py index f648dc330..aabff1ae7 100644 --- a/backend/dataall/modules/datasets/handlers/glue_table_handler.py +++ b/backend/dataall/modules/datasets/handlers/glue_table_handler.py @@ -3,7 +3,7 @@ from dataall.aws.handlers.glue import Glue from dataall.aws.handlers.service_handlers import Worker from dataall.db import models -from dataall.modules.datasets.db.models import Dataset +from dataall.modules.datasets_base.db.models import Dataset from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.modules.datasets.services.dataset_table_service import DatasetTableService diff --git a/backend/dataall/modules/datasets/indexers/dataset_indexer.py b/backend/dataall/modules/datasets/indexers/dataset_indexer.py index 33935de71..9f8191343 100644 --- a/backend/dataall/modules/datasets/indexers/dataset_indexer.py +++ b/backend/dataall/modules/datasets/indexers/dataset_indexer.py @@ -2,7 +2,7 @@ from dataall import db from dataall.db import models -from dataall.modules.datasets.db.models import Dataset +from dataall.modules.datasets_base.db.models import Dataset from dataall.modules.datasets.services.dataset_location_service import DatasetLocationService from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.searchproxy.base_indexer import BaseIndexer diff --git a/backend/dataall/modules/datasets/indexers/location_indexer.py b/backend/dataall/modules/datasets/indexers/location_indexer.py index 0a4672164..9a6694d66 100644 --- a/backend/dataall/modules/datasets/indexers/location_indexer.py +++ b/backend/dataall/modules/datasets/indexers/location_indexer.py @@ -1,5 +1,5 @@ """Indexes DatasetStorageLocation in OpenSearch""" -from dataall.modules.datasets.db.models import DatasetStorageLocation, Dataset +from dataall.modules.datasets_base.db.models import DatasetStorageLocation, Dataset from dataall.db import models from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer diff --git a/backend/dataall/modules/datasets/indexers/table_indexer.py b/backend/dataall/modules/datasets/indexers/table_indexer.py index bde683c20..edbf262b5 100644 --- a/backend/dataall/modules/datasets/indexers/table_indexer.py +++ b/backend/dataall/modules/datasets/indexers/table_indexer.py @@ -2,7 +2,7 @@ from operator import and_ from dataall.db import models -from dataall.modules.datasets.db.models import DatasetTable, Dataset +from dataall.modules.datasets_base.db.models import DatasetTable, Dataset from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer from dataall.searchproxy.base_indexer import BaseIndexer diff --git a/backend/dataall/modules/datasets/services/dataset_alarm_service.py b/backend/dataall/modules/datasets/services/dataset_alarm_service.py index 2748877fb..2a348ec85 100644 --- a/backend/dataall/modules/datasets/services/dataset_alarm_service.py +++ b/backend/dataall/modules/datasets/services/dataset_alarm_service.py @@ -3,7 +3,7 @@ from dataall.db import models from dataall.modules.dataset_sharing.db.models import ShareObject -from dataall.modules.datasets.db.models import DatasetTable, Dataset +from dataall.modules.datasets_base.db.models import DatasetTable, Dataset from dataall.utils.alarm_service import AlarmService log = logging.getLogger(__name__) diff --git a/backend/dataall/modules/datasets/services/dataset_group_resource.py b/backend/dataall/modules/datasets/services/dataset_group_resource.py index 8d58ac3ad..25ec77beb 100644 --- a/backend/dataall/modules/datasets/services/dataset_group_resource.py +++ b/backend/dataall/modules/datasets/services/dataset_group_resource.py @@ -1,5 +1,5 @@ -from dataall.core.group.services.group_resource_manager import GroupResource, GroupResourceManager -from dataall.modules.datasets.db.dataset_repository import DatasetRepository +from dataall.core.group.services.group_resource_manager import GroupResource +from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository class DatasetGroupResource(GroupResource): diff --git a/backend/dataall/modules/datasets/services/dataset_location_service.py b/backend/dataall/modules/datasets/services/dataset_location_service.py index 2493ddede..b86b86b34 100644 --- a/backend/dataall/modules/datasets/services/dataset_location_service.py +++ b/backend/dataall/modules/datasets/services/dataset_location_service.py @@ -8,8 +8,8 @@ from dataall.db import paginate, exceptions from dataall.modules.dataset_sharing.db.models import ShareObjectItem from dataall.modules.dataset_sharing.services.share_object import ShareItemSM -from dataall.modules.datasets.db.dataset_repository import DatasetRepository -from dataall.modules.datasets.db.models import DatasetStorageLocation +from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository +from dataall.modules.datasets_base.db.models import DatasetStorageLocation from dataall.modules.datasets.services.permissions import MANAGE_DATASETS, LIST_DATASET_FOLDERS, CREATE_DATASET_FOLDER, \ DELETE_DATASET_FOLDER, UPDATE_DATASET_FOLDER diff --git a/backend/dataall/modules/datasets/services/dataset_profiling_service.py b/backend/dataall/modules/datasets/services/dataset_profiling_service.py index ce679ccd4..55a143ebf 100644 --- a/backend/dataall/modules/datasets/services/dataset_profiling_service.py +++ b/backend/dataall/modules/datasets/services/dataset_profiling_service.py @@ -2,7 +2,7 @@ from dataall.db import paginate, models from dataall.db.exceptions import ObjectNotFound -from dataall.modules.datasets.db.models import DatasetProfilingRun, DatasetTable, Dataset +from dataall.modules.datasets_base.db.models import DatasetProfilingRun, DatasetTable, Dataset class DatasetProfilingService: diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py index faf973106..e2dd52e4c 100644 --- a/backend/dataall/modules/datasets/services/dataset_service.py +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -20,8 +20,8 @@ from dataall.db.models.Enums import Language, ConfidentialityClassification from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject from dataall.modules.dataset_sharing.services.share_object import ShareItemSM -from dataall.modules.datasets.db.dataset_repository import DatasetRepository -from dataall.modules.datasets.db.models import DatasetTable, Dataset +from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository +from dataall.modules.datasets_base.db.models import DatasetTable, Dataset from dataall.modules.datasets.services.dataset_location_service import DatasetLocationService from dataall.modules.datasets.services.permissions import MANAGE_DATASETS, UPDATE_DATASET, DATASET_READ, DATASET_ALL, \ DATASET_TABLE_READ, LIST_ENVIRONMENT_DATASETS, CREATE_DATASET diff --git a/backend/dataall/modules/datasets/services/dataset_table_service.py b/backend/dataall/modules/datasets/services/dataset_table_service.py index 18d0533bd..42b40011a 100644 --- a/backend/dataall/modules/datasets/services/dataset_table_service.py +++ b/backend/dataall/modules/datasets/services/dataset_table_service.py @@ -12,7 +12,7 @@ UPDATE_DATASET_TABLE, DATASET_TABLE_READ from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.utils import json_utils -from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetTable, Dataset +from dataall.modules.datasets_base.db.models import DatasetTableColumn, DatasetTable, Dataset logger = logging.getLogger(__name__) diff --git a/backend/dataall/modules/datasets/tasks/bucket_policy_updater.py b/backend/dataall/modules/datasets/tasks/bucket_policy_updater.py index 2656dec3f..cf40ddaa8 100644 --- a/backend/dataall/modules/datasets/tasks/bucket_policy_updater.py +++ b/backend/dataall/modules/datasets/tasks/bucket_policy_updater.py @@ -12,7 +12,7 @@ from dataall.db import models from dataall.modules.dataset_sharing.db.Enums import ShareObjectStatus from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject -from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable, Dataset +from dataall.modules.datasets_base.db.models import DatasetStorageLocation, DatasetTable, Dataset root = logging.getLogger() root.setLevel(logging.INFO) diff --git a/backend/dataall/modules/datasets/tasks/subscription_service.py b/backend/dataall/modules/datasets/tasks/subscription_service.py index bb639130e..1a27ddcf0 100644 --- a/backend/dataall/modules/datasets/tasks/subscription_service.py +++ b/backend/dataall/modules/datasets/tasks/subscription_service.py @@ -19,7 +19,7 @@ from dataall.utils import json_utils from dataall.modules.datasets.services.dataset_table_service import DatasetTableService from dataall.modules.datasets.services.dataset_location_service import DatasetLocationService -from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable, Dataset +from dataall.modules.datasets_base.db.models import DatasetStorageLocation, DatasetTable, Dataset root = logging.getLogger() root.setLevel(logging.INFO) diff --git a/backend/dataall/modules/datasets/tasks/tables_syncer.py b/backend/dataall/modules/datasets/tasks/tables_syncer.py index b56b57ddd..6be5054f2 100644 --- a/backend/dataall/modules/datasets/tasks/tables_syncer.py +++ b/backend/dataall/modules/datasets/tasks/tables_syncer.py @@ -9,7 +9,7 @@ from dataall.db import get_engine from dataall.db import models from dataall.modules.datasets.aws.lf_table_client import LakeFormationTableClient -from dataall.modules.datasets.db.models import DatasetTable, Dataset +from dataall.modules.datasets_base.db.models import DatasetTable, Dataset from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer from dataall.modules.datasets.services.dataset_alarm_service import DatasetAlarmService from dataall.modules.datasets.services.dataset_service import DatasetService diff --git a/backend/dataall/modules/datasets_base/__init__.py b/backend/dataall/modules/datasets_base/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/dataall/modules/datasets_base/db/__init__.py b/backend/dataall/modules/datasets_base/db/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/dataall/modules/datasets/db/dataset_repository.py b/backend/dataall/modules/datasets_base/db/dataset_repository.py similarity index 92% rename from backend/dataall/modules/datasets/db/dataset_repository.py rename to backend/dataall/modules/datasets_base/db/dataset_repository.py index 95aef6102..132905664 100644 --- a/backend/dataall/modules/datasets/db/dataset_repository.py +++ b/backend/dataall/modules/datasets_base/db/dataset_repository.py @@ -1,7 +1,7 @@ from operator import and_ from dataall.db import exceptions -from dataall.modules.datasets.db.models import Dataset +from dataall.modules.datasets_base.db.models import Dataset class DatasetRepository: diff --git a/backend/dataall/modules/datasets/db/models.py b/backend/dataall/modules/datasets_base/db/models.py similarity index 100% rename from backend/dataall/modules/datasets/db/models.py rename to backend/dataall/modules/datasets_base/db/models.py diff --git a/backend/dataall/tasks/catalog_indexer.py b/backend/dataall/tasks/catalog_indexer.py index 60d9df792..962eb66f2 100644 --- a/backend/dataall/tasks/catalog_indexer.py +++ b/backend/dataall/tasks/catalog_indexer.py @@ -2,7 +2,7 @@ import os import sys -from dataall.modules.datasets.db.models import Dataset +from dataall.modules.datasets_base.db.models import Dataset from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer from dataall.modules.datasets.services.dataset_service import DatasetService diff --git a/backend/dataall/tasks/data_sharing/share_managers/lf_share_manager.py b/backend/dataall/tasks/data_sharing/share_managers/lf_share_manager.py index 16f3cb689..89f28c271 100644 --- a/backend/dataall/tasks/data_sharing/share_managers/lf_share_manager.py +++ b/backend/dataall/tasks/data_sharing/share_managers/lf_share_manager.py @@ -11,7 +11,7 @@ from ....aws.handlers.sts import SessionHelper from ....aws.handlers.ram import Ram from ....db import exceptions, models -from dataall.modules.datasets.db.models import DatasetTable, Dataset +from dataall.modules.datasets_base.db.models import DatasetTable, Dataset from dataall.modules.datasets.services.dataset_alarm_service import DatasetAlarmService from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject diff --git a/backend/dataall/tasks/data_sharing/share_managers/s3_share_manager.py b/backend/dataall/tasks/data_sharing/share_managers/s3_share_manager.py index 211a87968..4b4824a51 100644 --- a/backend/dataall/tasks/data_sharing/share_managers/s3_share_manager.py +++ b/backend/dataall/tasks/data_sharing/share_managers/s3_share_manager.py @@ -3,7 +3,7 @@ import json import time -from ....db import models, api, utils +from ....db import models, utils from ....aws.handlers.sts import SessionHelper from ....aws.handlers.s3 import S3 from ....aws.handlers.kms import KMS @@ -11,7 +11,7 @@ from ....modules.dataset_sharing.db.models import ShareObject from ....modules.dataset_sharing.services.share_object import ShareObjectService -from dataall.modules.datasets.db.models import DatasetStorageLocation, Dataset +from dataall.modules.datasets_base.db.models import DatasetStorageLocation, Dataset logger = logging.getLogger(__name__) ACCESS_POINT_CREATION_TIME = 30 diff --git a/backend/dataall/tasks/data_sharing/share_processors/lf_process_cross_account_share.py b/backend/dataall/tasks/data_sharing/share_processors/lf_process_cross_account_share.py index eccd515b0..7b225db92 100644 --- a/backend/dataall/tasks/data_sharing/share_processors/lf_process_cross_account_share.py +++ b/backend/dataall/tasks/data_sharing/share_processors/lf_process_cross_account_share.py @@ -3,8 +3,8 @@ from dataall.modules.dataset_sharing.db.Enums import ShareItemStatus, ShareObjectActions, ShareItemActions from ..share_managers import LFShareManager from dataall.aws.handlers.ram import Ram -from dataall.db import models, api -from dataall.modules.datasets.db.models import DatasetTable, Dataset +from dataall.db import models +from dataall.modules.datasets_base.db.models import DatasetTable, Dataset from dataall.modules.dataset_sharing.db.models import ShareObject from dataall.modules.dataset_sharing.services.share_object import ShareObjectService, ShareItemSM diff --git a/backend/dataall/tasks/data_sharing/share_processors/lf_process_same_account_share.py b/backend/dataall/tasks/data_sharing/share_processors/lf_process_same_account_share.py index bce4ad0d2..70392765e 100644 --- a/backend/dataall/tasks/data_sharing/share_processors/lf_process_same_account_share.py +++ b/backend/dataall/tasks/data_sharing/share_processors/lf_process_same_account_share.py @@ -5,7 +5,7 @@ from dataall.modules.dataset_sharing.services.share_object import ShareObjectService, ShareItemSM from ..share_managers import LFShareManager from dataall.db import models -from dataall.modules.datasets.db.models import DatasetTable, Dataset +from dataall.modules.datasets_base.db.models import DatasetTable, Dataset log = logging.getLogger(__name__) diff --git a/backend/dataall/tasks/data_sharing/share_processors/s3_process_share.py b/backend/dataall/tasks/data_sharing/share_processors/s3_process_share.py index 74525ef20..1d8b932e9 100644 --- a/backend/dataall/tasks/data_sharing/share_processors/s3_process_share.py +++ b/backend/dataall/tasks/data_sharing/share_processors/s3_process_share.py @@ -2,7 +2,7 @@ from dataall.db import models from ..share_managers import S3ShareManager -from dataall.modules.datasets.db.models import DatasetStorageLocation, Dataset +from dataall.modules.datasets_base.db.models import DatasetStorageLocation, Dataset from dataall.modules.dataset_sharing.db.Enums import ShareItemStatus, ShareObjectActions, ShareItemActions from dataall.modules.dataset_sharing.db.models import ShareObject from dataall.modules.dataset_sharing.services.share_object import ShareObjectService, ShareItemSM diff --git a/backend/dataall/tasks/stacks_updater.py b/backend/dataall/tasks/stacks_updater.py index 9d76b7cfb..9a17deaf1 100644 --- a/backend/dataall/tasks/stacks_updater.py +++ b/backend/dataall/tasks/stacks_updater.py @@ -3,7 +3,7 @@ import sys import time -from dataall.modules.datasets.db.models import Dataset +from dataall.modules.datasets_base.db.models import Dataset from dataall.modules.datasets.services.dataset_service import DatasetService from .. import db from ..db import models diff --git a/tests/api/conftest.py b/tests/api/conftest.py index 62075ddb0..cdeadcd6f 100644 --- a/tests/api/conftest.py +++ b/tests/api/conftest.py @@ -3,7 +3,7 @@ from .client import * from dataall.db import models from dataall.api import constants -from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable, Dataset +from dataall.modules.datasets_base.db.models import DatasetStorageLocation, DatasetTable, Dataset @pytest.fixture(scope='module', autouse=True) diff --git a/tests/api/test_dashboards.py b/tests/api/test_dashboards.py index ca29f1587..f38ea2419 100644 --- a/tests/api/test_dashboards.py +++ b/tests/api/test_dashboards.py @@ -2,7 +2,7 @@ import pytest import dataall -from dataall.modules.datasets.db.models import Dataset +from dataall.modules.datasets_base.db.models import Dataset @pytest.fixture(scope='module', autouse=True) diff --git a/tests/api/test_dataset.py b/tests/api/test_dataset.py index 8e2e57297..b6d5a4629 100644 --- a/tests/api/test_dataset.py +++ b/tests/api/test_dataset.py @@ -4,8 +4,7 @@ import pytest import dataall -from dataall.modules.datasets.aws.glue_dataset_client import DatasetCrawler -from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable, Dataset +from dataall.modules.datasets_base.db.models import DatasetStorageLocation, DatasetTable, Dataset from dataall.modules.datasets.services.dataset_service import DatasetService diff --git a/tests/api/test_dataset_location.py b/tests/api/test_dataset_location.py index d5f1e8efe..6764ffa90 100644 --- a/tests/api/test_dataset_location.py +++ b/tests/api/test_dataset_location.py @@ -3,7 +3,7 @@ import pytest import dataall -from dataall.modules.datasets.db.models import Dataset +from dataall.modules.datasets_base.db.models import Dataset @pytest.fixture(scope='module', autouse=True) diff --git a/tests/api/test_dataset_profiling.py b/tests/api/test_dataset_profiling.py index 917c7149b..345c9541e 100644 --- a/tests/api/test_dataset_profiling.py +++ b/tests/api/test_dataset_profiling.py @@ -1,8 +1,6 @@ -import typing import pytest -import dataall -from dataall.modules.datasets.db.models import DatasetProfilingRun, DatasetTable, Dataset +from dataall.modules.datasets_base.db.models import DatasetProfilingRun, DatasetTable, Dataset @pytest.fixture(scope='module', autouse=True) diff --git a/tests/api/test_dataset_table.py b/tests/api/test_dataset_table.py index 22078b018..3e3dc3ab3 100644 --- a/tests/api/test_dataset_table.py +++ b/tests/api/test_dataset_table.py @@ -4,7 +4,7 @@ import dataall from dataall.modules.datasets.services.dataset_table_service import DatasetTableService -from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetTable, Dataset +from dataall.modules.datasets_base.db.models import DatasetTableColumn, DatasetTable, Dataset @pytest.fixture(scope='module', autouse=True) diff --git a/tests/api/test_environment.py b/tests/api/test_environment.py index 7e4640f41..2500b595b 100644 --- a/tests/api/test_environment.py +++ b/tests/api/test_environment.py @@ -1,8 +1,7 @@ import pytest import dataall -from dataall.db import permissions -from dataall.modules.datasets.db.models import Dataset +from dataall.modules.datasets_base.db.models import Dataset from dataall.modules.datasets.services.permissions import CREATE_DATASET diff --git a/tests/api/test_glossary.py b/tests/api/test_glossary.py index 92685bd69..4802afb81 100644 --- a/tests/api/test_glossary.py +++ b/tests/api/test_glossary.py @@ -1,7 +1,7 @@ from datetime import datetime from typing import List from dataall.db import models -from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetTable, Dataset +from dataall.modules.datasets_base.db.models import DatasetTableColumn, DatasetTable, Dataset import pytest diff --git a/tests/api/test_keyvaluetag.py b/tests/api/test_keyvaluetag.py index 2f4f04a8e..cf254a515 100644 --- a/tests/api/test_keyvaluetag.py +++ b/tests/api/test_keyvaluetag.py @@ -1,11 +1,9 @@ -from typing import List - import dataall from dataall.db import models import pytest from dataall.db import exceptions -from dataall.modules.datasets.db.models import Dataset +from dataall.modules.datasets_base.db.models import Dataset @pytest.fixture(scope='module') diff --git a/tests/api/test_redshift_cluster.py b/tests/api/test_redshift_cluster.py index 548e9e02b..0058c2fe9 100644 --- a/tests/api/test_redshift_cluster.py +++ b/tests/api/test_redshift_cluster.py @@ -4,7 +4,7 @@ import pytest import dataall from dataall.api.constants import RedshiftClusterRole -from dataall.modules.datasets.db.models import Dataset +from dataall.modules.datasets_base.db.models import Dataset from dataall.modules.datasets.services.dataset_service import DatasetService diff --git a/tests/api/test_share.py b/tests/api/test_share.py index 9ae7c3b62..ada27ed57 100644 --- a/tests/api/test_share.py +++ b/tests/api/test_share.py @@ -7,7 +7,7 @@ from dataall.modules.dataset_sharing.db.Enums import ShareObjectActions, ShareItemActions from dataall.modules.dataset_sharing.db.models import ShareObject, ShareObjectItem from dataall.modules.dataset_sharing.services.share_object import ShareObjectService, ShareItemSM, ShareObjectSM -from dataall.modules.datasets.db.models import DatasetTable, Dataset +from dataall.modules.datasets_base.db.models import DatasetTable, Dataset def random_table_name(): diff --git a/tests/api/test_vote.py b/tests/api/test_vote.py index 28800edd8..efe0a7d4e 100644 --- a/tests/api/test_vote.py +++ b/tests/api/test_vote.py @@ -1,7 +1,7 @@ import pytest from dataall.db import models -from dataall.modules.datasets.db.models import Dataset +from dataall.modules.datasets_base.db.models import Dataset @pytest.fixture(scope='module') diff --git a/tests/cdkproxy/conftest.py b/tests/cdkproxy/conftest.py index d1810bfef..fa0680594 100644 --- a/tests/cdkproxy/conftest.py +++ b/tests/cdkproxy/conftest.py @@ -1,7 +1,7 @@ import pytest from dataall.db import models, api -from dataall.modules.datasets.db.models import DatasetTable, Dataset +from dataall.modules.datasets_base.db.models import DatasetTable, Dataset @pytest.fixture(scope='module', autouse=True) diff --git a/tests/db/test_permission.py b/tests/db/test_permission.py index 28524a77a..29f7c0edf 100644 --- a/tests/db/test_permission.py +++ b/tests/db/test_permission.py @@ -4,7 +4,7 @@ from dataall.api.constants import OrganisationUserRole from dataall.db import exceptions from dataall.db.models.Permission import PermissionType -from dataall.modules.datasets.db.models import Dataset +from dataall.modules.datasets_base.db.models import Dataset from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.modules.datasets.services.permissions import MANAGE_DATASETS, UPDATE_DATASET, DATASET_READ, DATASET_WRITE, \ DATASET_TABLE_READ diff --git a/tests/modules/datasets/test_dataset_feed.py b/tests/modules/datasets/test_dataset_feed.py index 06ffdc8ed..7546a173a 100644 --- a/tests/modules/datasets/test_dataset_feed.py +++ b/tests/modules/datasets/test_dataset_feed.py @@ -1,6 +1,6 @@ from dataall.api.Objects.Feed.registry import FeedRegistry -from dataall.modules.datasets.db.models import DatasetTableColumn +from dataall.modules.datasets_base.db.models import DatasetTableColumn def test_dataset_registered(): diff --git a/tests/searchproxy/test_indexers.py b/tests/searchproxy/test_indexers.py index 8bcf114d7..9458140cd 100644 --- a/tests/searchproxy/test_indexers.py +++ b/tests/searchproxy/test_indexers.py @@ -1,11 +1,9 @@ -import typing - import pytest import dataall from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer -from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable, Dataset +from dataall.modules.datasets_base.db.models import DatasetStorageLocation, DatasetTable, Dataset from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer diff --git a/tests/tasks/conftest.py b/tests/tasks/conftest.py index e8d1effe3..175fc6934 100644 --- a/tests/tasks/conftest.py +++ b/tests/tasks/conftest.py @@ -4,7 +4,7 @@ from dataall.db import models from dataall.api import constants from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject -from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable, Dataset +from dataall.modules.datasets_base.db.models import DatasetStorageLocation, DatasetTable, Dataset @pytest.fixture(scope="module") diff --git a/tests/tasks/test_catalog_indexer.py b/tests/tasks/test_catalog_indexer.py index 1ffac0b6c..712f4a6f9 100644 --- a/tests/tasks/test_catalog_indexer.py +++ b/tests/tasks/test_catalog_indexer.py @@ -1,6 +1,6 @@ import pytest import dataall -from dataall.modules.datasets.db.models import DatasetTable, Dataset +from dataall.modules.datasets_base.db.models import DatasetTable, Dataset @pytest.fixture(scope='module', autouse=True) diff --git a/tests/tasks/test_lf_share_manager.py b/tests/tasks/test_lf_share_manager.py index e2ddde9da..9669216d2 100644 --- a/tests/tasks/test_lf_share_manager.py +++ b/tests/tasks/test_lf_share_manager.py @@ -11,7 +11,7 @@ from dataall.db import models from dataall.api import constants from dataall.modules.dataset_sharing.db.models import ShareObject, ShareObjectItem -from dataall.modules.datasets.db.models import DatasetTable, Dataset +from dataall.modules.datasets_base.db.models import DatasetTable, Dataset from dataall.modules.datasets.services.dataset_alarm_service import DatasetAlarmService from dataall.tasks.data_sharing.share_processors.lf_process_cross_account_share import ProcessLFCrossAccountShare diff --git a/tests/tasks/test_policies.py b/tests/tasks/test_policies.py index e84ed2d75..d0f369bcb 100644 --- a/tests/tasks/test_policies.py +++ b/tests/tasks/test_policies.py @@ -1,5 +1,5 @@ from dataall.api.constants import OrganisationUserRole -from dataall.modules.datasets.db.models import DatasetTable, Dataset +from dataall.modules.datasets_base.db.models import DatasetTable, Dataset from dataall.modules.datasets.tasks.bucket_policy_updater import BucketPoliciesUpdater import pytest import dataall diff --git a/tests/tasks/test_s3_share_manager.py b/tests/tasks/test_s3_share_manager.py index b0dc0f722..eb8893586 100644 --- a/tests/tasks/test_s3_share_manager.py +++ b/tests/tasks/test_s3_share_manager.py @@ -7,7 +7,7 @@ from dataall.modules.dataset_sharing.db.models import ShareObject, ShareObjectItem from dataall.tasks.data_sharing.share_managers.s3_share_manager import S3ShareManager -from dataall.modules.datasets.db.models import DatasetStorageLocation, Dataset +from dataall.modules.datasets_base.db.models import DatasetStorageLocation, Dataset SOURCE_ENV_ACCOUNT = "111111111111" SOURCE_ENV_ROLE_NAME = "dataall-ProducerEnvironment-i6v1v1c2" diff --git a/tests/tasks/test_stacks_updater.py b/tests/tasks/test_stacks_updater.py index 5bd095a91..701d7c50d 100644 --- a/tests/tasks/test_stacks_updater.py +++ b/tests/tasks/test_stacks_updater.py @@ -1,7 +1,7 @@ import pytest import dataall from dataall.api.constants import OrganisationUserRole -from dataall.modules.datasets.db.models import Dataset +from dataall.modules.datasets_base.db.models import Dataset @pytest.fixture(scope='module', autouse=True) diff --git a/tests/tasks/test_subscriptions.py b/tests/tasks/test_subscriptions.py index b8e470a9d..8195a8298 100644 --- a/tests/tasks/test_subscriptions.py +++ b/tests/tasks/test_subscriptions.py @@ -4,7 +4,7 @@ from dataall.api.constants import OrganisationUserRole from dataall.modules.dataset_sharing.db.Enums import ShareObjectStatus from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject -from dataall.modules.datasets.db.models import DatasetTable, Dataset +from dataall.modules.datasets_base.db.models import DatasetTable, Dataset from dataall.modules.datasets.tasks.subscription_service import SubscriptionService diff --git a/tests/tasks/test_tables_sync.py b/tests/tasks/test_tables_sync.py index 5c9937c72..3206d2f4e 100644 --- a/tests/tasks/test_tables_sync.py +++ b/tests/tasks/test_tables_sync.py @@ -3,8 +3,7 @@ import pytest import dataall from dataall.api.constants import OrganisationUserRole -from dataall.modules.datasets.aws.lf_table_client import LakeFormationTableClient -from dataall.modules.datasets.db.models import DatasetTable, Dataset +from dataall.modules.datasets_base.db.models import DatasetTable, Dataset from dataall.modules.datasets.tasks.tables_syncer import sync_tables From c28ac675e68e1204b8423f7ea3cd6eca0f848390 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 5 May 2023 12:10:21 +0200 Subject: [PATCH 124/346] Moved DATASET_TABLE_READ to datasets_base --- backend/dataall/api/Objects/Vote/resolvers.py | 2 +- .../modules/dataset_sharing/api/resolvers.py | 4 ++-- .../dataset_sharing/services/share_object.py | 14 +++++++------- .../modules/datasets/services/dataset_service.py | 3 ++- .../datasets/services/dataset_table_service.py | 3 ++- .../modules/datasets/services/permissions.py | 12 +----------- .../modules/datasets_base/services/__init__.py | 0 .../modules/datasets_base/services/permissions.py | 12 ++++++++++++ ...f9a5b215e_backfill_dataset_table_permissions.py | 2 +- tests/db/test_permission.py | 4 ++-- 10 files changed, 30 insertions(+), 26 deletions(-) create mode 100644 backend/dataall/modules/datasets_base/services/__init__.py create mode 100644 backend/dataall/modules/datasets_base/services/permissions.py diff --git a/backend/dataall/api/Objects/Vote/resolvers.py b/backend/dataall/api/Objects/Vote/resolvers.py index 2302ef3d2..c94d735eb 100644 --- a/backend/dataall/api/Objects/Vote/resolvers.py +++ b/backend/dataall/api/Objects/Vote/resolvers.py @@ -3,7 +3,7 @@ from dataall import db from dataall.api.context import Context from dataall.searchproxy.indexers import DashboardIndexer -from dataall.searchproxy.upsert import BaseIndexer +from dataall.searchproxy.base_indexer import BaseIndexer _VOTE_TYPES: Dict[str, Type[BaseIndexer]] = {} diff --git a/backend/dataall/modules/dataset_sharing/api/resolvers.py b/backend/dataall/modules/dataset_sharing/api/resolvers.py index 50877afcc..6cbb39b95 100644 --- a/backend/dataall/modules/dataset_sharing/api/resolvers.py +++ b/backend/dataall/modules/dataset_sharing/api/resolvers.py @@ -9,8 +9,8 @@ from dataall.db import models from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject from dataall.modules.dataset_sharing.services.share_object import ShareObjectService +from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.db.models import DatasetStorageLocation, DatasetTable, Dataset -from dataall.modules.datasets.services.dataset_service import DatasetService log = logging.getLogger(__name__) @@ -35,7 +35,7 @@ def create_share_object( ): with context.engine.scoped_session() as session: - dataset: Dataset = DatasetService.get_dataset_by_uri(session, datasetUri) + dataset: Dataset = DatasetRepository.get_dataset_by_uri(session, datasetUri) environment: models.Environment = db.api.Environment.get_environment_by_uri( session, input['environmentUri'] ) diff --git a/backend/dataall/modules/dataset_sharing/services/share_object.py b/backend/dataall/modules/dataset_sharing/services/share_object.py index 2a97d6cb4..821952f05 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_object.py +++ b/backend/dataall/modules/dataset_sharing/services/share_object.py @@ -13,10 +13,10 @@ from dataall.modules.dataset_sharing.db.Enums import ShareObjectActions, ShareObjectStatus, ShareItemActions, \ ShareItemStatus from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject +from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.db.models import DatasetStorageLocation, DatasetTable, Dataset -from dataall.modules.datasets.services.dataset_service import DatasetService -from dataall.modules.datasets.services.permissions import DATASET_TABLE_READ from dataall.modules.dataset_sharing.services.share_notification_service import ShareNotificationService +from dataall.modules.datasets_base.services.permissions import DATASET_TABLE_READ logger = logging.getLogger(__name__) @@ -352,7 +352,7 @@ def create_share_object( itemType = data.get('itemType') dataset: Dataset = data.get( - 'dataset', DatasetService.get_dataset_by_uri(session, datasetUri) + 'dataset', DatasetRepository.get_dataset_by_uri(session, datasetUri) ) environment: models.Environment = data.get( 'environment', @@ -545,7 +545,7 @@ def submit_share_object( check_perm: bool = False, ) -> ShareObject: share = ShareObjectService.get_share_by_uri(session, uri) - dataset = DatasetService.get_dataset_by_uri(session, share.datasetUri) + dataset = DatasetRepository.get_dataset_by_uri(session, share.datasetUri) share_items_states = ShareObjectService.get_share_items_states(session, uri) valid_states = [ShareItemStatus.PendingApproval.value] @@ -583,7 +583,7 @@ def approve_share_object( check_perm: bool = False, ) -> ShareObject: share = ShareObjectService.get_share_by_uri(session, uri) - dataset = DatasetService.get_dataset_by_uri(session, share.datasetUri) + dataset = DatasetRepository.get_dataset_by_uri(session, share.datasetUri) share_items_states = ShareObjectService.get_share_items_states(session, uri) Share_SM = ShareObjectSM(share.status) @@ -630,7 +630,7 @@ def reject_share_object( ) -> ShareObject: share = ShareObjectService.get_share_by_uri(session, uri) - dataset = DatasetService.get_dataset_by_uri(session, share.datasetUri) + dataset = DatasetRepository.get_dataset_by_uri(session, share.datasetUri) share_items_states = ShareObjectService.get_share_items_states(session, uri) Share_SM = ShareObjectSM(share.status) @@ -665,7 +665,7 @@ def revoke_items_share_object( ) -> ShareObject: share = ShareObjectService.get_share_by_uri(session, uri) - dataset = DatasetService.get_dataset_by_uri(session, share.datasetUri) + dataset = DatasetRepository.get_dataset_by_uri(session, share.datasetUri) revoked_items_states = ShareObjectService.get_share_items_states(session, uri, data.get("revokedItemUris")) revoked_items = [ShareObjectService.get_share_item_by_uri(session, uri) for uri in data.get("revokedItemUris")] diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py index e2dd52e4c..46d1b9d60 100644 --- a/backend/dataall/modules/datasets/services/dataset_service.py +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -24,7 +24,8 @@ from dataall.modules.datasets_base.db.models import DatasetTable, Dataset from dataall.modules.datasets.services.dataset_location_service import DatasetLocationService from dataall.modules.datasets.services.permissions import MANAGE_DATASETS, UPDATE_DATASET, DATASET_READ, DATASET_ALL, \ - DATASET_TABLE_READ, LIST_ENVIRONMENT_DATASETS, CREATE_DATASET + LIST_ENVIRONMENT_DATASETS, CREATE_DATASET +from dataall.modules.datasets_base.services.permissions import DATASET_TABLE_READ from dataall.utils.naming_convention import ( NamingConventionService, NamingConventionPattern, diff --git a/backend/dataall/modules/datasets/services/dataset_table_service.py b/backend/dataall/modules/datasets/services/dataset_table_service.py index 42b40011a..62ba0dfbd 100644 --- a/backend/dataall/modules/datasets/services/dataset_table_service.py +++ b/backend/dataall/modules/datasets/services/dataset_table_service.py @@ -9,8 +9,9 @@ from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject from dataall.modules.dataset_sharing.services.share_object import ShareItemSM from dataall.modules.datasets.services.permissions import MANAGE_DATASETS, CREATE_DATASET_TABLE, DELETE_DATASET_TABLE, \ - UPDATE_DATASET_TABLE, DATASET_TABLE_READ + UPDATE_DATASET_TABLE from dataall.modules.datasets.services.dataset_service import DatasetService +from dataall.modules.datasets_base.services.permissions import DATASET_TABLE_READ from dataall.utils import json_utils from dataall.modules.datasets_base.db.models import DatasetTableColumn, DatasetTable, Dataset diff --git a/backend/dataall/modules/datasets/services/permissions.py b/backend/dataall/modules/datasets/services/permissions.py index be65f56e6..c17f43e9d 100644 --- a/backend/dataall/modules/datasets/services/permissions.py +++ b/backend/dataall/modules/datasets/services/permissions.py @@ -2,6 +2,7 @@ from dataall.db.permissions import TENANT_ALL, TENANT_ALL_WITH_DESC, RESOURCES_ALL, RESOURCES_ALL_WITH_DESC, \ ENVIRONMENT_INVITED, ENVIRONMENT_INVITATION_REQUEST, ENVIRONMENT_ALL +from dataall.modules.datasets_base.services.permissions import DATASET_TABLE_READ MANAGE_DATASETS = 'MANAGE_DATASETS' @@ -74,17 +75,6 @@ DATASET_ALL = list(set(DATASET_WRITE + DATASET_READ)) RESOURCES_ALL.extend(DATASET_ALL) -""" -DATASET TABLE PERMISSIONS -""" - -GET_DATASET_TABLE = 'GET_DATASET_TABLE' -PREVIEW_DATASET_TABLE = 'PREVIEW_DATASET_TABLE' - -DATASET_TABLE_READ = [ - GET_DATASET_TABLE, - PREVIEW_DATASET_TABLE -] RESOURCES_ALL.extend(DATASET_TABLE_READ) diff --git a/backend/dataall/modules/datasets_base/services/__init__.py b/backend/dataall/modules/datasets_base/services/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/dataall/modules/datasets_base/services/permissions.py b/backend/dataall/modules/datasets_base/services/permissions.py new file mode 100644 index 000000000..5d44ec983 --- /dev/null +++ b/backend/dataall/modules/datasets_base/services/permissions.py @@ -0,0 +1,12 @@ + +""" +DATASET TABLE PERMISSIONS +""" + +GET_DATASET_TABLE = 'GET_DATASET_TABLE' +PREVIEW_DATASET_TABLE = 'PREVIEW_DATASET_TABLE' + +DATASET_TABLE_READ = [ + GET_DATASET_TABLE, + PREVIEW_DATASET_TABLE +] diff --git a/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py b/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py index c80c922d0..54b93659f 100644 --- a/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py +++ b/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py @@ -16,7 +16,7 @@ from dataall.modules.dataset_sharing.db.Enums import ShareObjectStatus from dataall.modules.dataset_sharing.services.share_object import ShareObjectService from dataall.modules.datasets.services.dataset_service import DatasetService -from dataall.modules.datasets.services.permissions import DATASET_TABLE_READ +from dataall.modules.datasets_base.services.permissions import DATASET_TABLE_READ # revision identifiers, used by Alembic. revision = 'd05f9a5b215e' diff --git a/tests/db/test_permission.py b/tests/db/test_permission.py index 29f7c0edf..eba00c47b 100644 --- a/tests/db/test_permission.py +++ b/tests/db/test_permission.py @@ -6,8 +6,8 @@ from dataall.db.models.Permission import PermissionType from dataall.modules.datasets_base.db.models import Dataset from dataall.modules.datasets.services.dataset_service import DatasetService -from dataall.modules.datasets.services.permissions import MANAGE_DATASETS, UPDATE_DATASET, DATASET_READ, DATASET_WRITE, \ - DATASET_TABLE_READ +from dataall.modules.datasets.services.permissions import MANAGE_DATASETS, UPDATE_DATASET, DATASET_READ, DATASET_WRITE +from dataall.modules.datasets_base.services.permissions import DATASET_TABLE_READ @pytest.fixture(scope='module') From a68117538faf6d127f4e0d22ea4ea267ae0371e1 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 5 May 2023 12:44:09 +0200 Subject: [PATCH 125/346] Fixed all tests --- backend/dataall/modules/dataset_sharing/__init__.py | 7 ++++++- backend/dataall/modules/dataset_sharing/api/resolvers.py | 6 +++--- backend/dataall/modules/datasets/__init__.py | 3 ++- .../modules/datasets/api/storage_location/resolvers.py | 4 ++-- backend/dataall/modules/datasets/api/table/resolvers.py | 3 ++- backend/dataall/modules/datasets_base/__init__.py | 9 +++++++++ tests/api/conftest.py | 1 - tests/api/test_dataset_location.py | 7 +++---- tests/searchproxy/test_indexers.py | 8 ++++---- 9 files changed, 31 insertions(+), 17 deletions(-) diff --git a/backend/dataall/modules/dataset_sharing/__init__.py b/backend/dataall/modules/dataset_sharing/__init__.py index 793662cc7..7e2c7621d 100644 --- a/backend/dataall/modules/dataset_sharing/__init__.py +++ b/backend/dataall/modules/dataset_sharing/__init__.py @@ -1,6 +1,7 @@ import logging -from typing import List +from typing import List, Type +from dataall.modules.datasets_base import DatasetBaseModuleInterface from dataall.modules.loader import ModuleInterface, ImportMode @@ -12,6 +13,10 @@ class SharingApiModuleInterface(ModuleInterface): def is_supported(modes: List[ImportMode]) -> bool: return ImportMode.API in modes + @staticmethod + def depends_on() -> List[Type['ModuleInterface']]: + return [DatasetBaseModuleInterface] + def __init__(self): from dataall.modules.dataset_sharing import api log.info("API of dataset sharing has been imported") diff --git a/backend/dataall/modules/dataset_sharing/api/resolvers.py b/backend/dataall/modules/dataset_sharing/api/resolvers.py index 6cbb39b95..8f857f031 100644 --- a/backend/dataall/modules/dataset_sharing/api/resolvers.py +++ b/backend/dataall/modules/dataset_sharing/api/resolvers.py @@ -225,7 +225,7 @@ def resolve_user_role(context: Context, source: ShareObject, **kwargs): if not source: return None with context.engine.scoped_session() as session: - dataset: Dataset = DatasetService.get_dataset_by_uri(session, source.datasetUri) + dataset: Dataset = DatasetRepository.get_dataset_by_uri(session, source.datasetUri) if dataset and dataset.stewards in context.groups: return ShareObjectPermission.Approvers.value if ( @@ -253,7 +253,7 @@ def resolve_dataset(context: Context, source: ShareObject, **kwargs): if not source: return None with context.engine.scoped_session() as session: - ds: Dataset = DatasetService.get_dataset_by_uri(session, source.datasetUri) + ds: Dataset = DatasetRepository.get_dataset_by_uri(session, source.datasetUri) if ds: env: models.Environment = db.api.Environment.get_environment_by_uri(session, ds.environmentUri) return { @@ -294,7 +294,7 @@ def resolve_consumption_data(context: Context, source: ShareObject, **kwargs): if not source: return None with context.engine.scoped_session() as session: - ds: Dataset = DatasetService.get_dataset_by_uri(session, source.datasetUri) + ds: Dataset = DatasetRepository.get_dataset_by_uri(session, source.datasetUri) if ds: S3AccessPointName = utils.slugify( source.datasetUri + '-' + source.principalId, diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index 874482057..737c3e2b2 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -4,6 +4,7 @@ from dataall.modules.dataset_sharing import SharingApiModuleInterface from dataall.core.group.services.group_resource_manager import GroupResourceManager +from dataall.modules.datasets_base import DatasetBaseModuleInterface from dataall.modules.datasets_base.db.models import DatasetTableColumn, DatasetStorageLocation, DatasetTable, Dataset from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer @@ -24,7 +25,7 @@ def is_supported(modes): @staticmethod def depends_on() -> List[Type['ModuleInterface']]: - return [SharingApiModuleInterface] + return [SharingApiModuleInterface, DatasetBaseModuleInterface] def __init__(self): # these imports are placed inside the method because they are only related to GraphQL api. diff --git a/backend/dataall/modules/datasets/api/storage_location/resolvers.py b/backend/dataall/modules/datasets/api/storage_location/resolvers.py index db9f8fde7..c5b073a6a 100644 --- a/backend/dataall/modules/datasets/api/storage_location/resolvers.py +++ b/backend/dataall/modules/datasets/api/storage_location/resolvers.py @@ -6,7 +6,7 @@ Glossary, Environment, ) -from dataall.modules.datasets.handlers.s3_location_handler import S3DatasetLocationHandler +from dataall.modules.datasets.aws.s3_location_client import S3LocationClient from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer from dataall.modules.datasets_base.db.models import DatasetStorageLocation, Dataset from dataall.modules.datasets.services.dataset_location_service import DatasetLocationService @@ -24,7 +24,7 @@ def create_storage_location( data=input, ) - S3DatasetLocationHandler.create_bucket_prefix(location) + S3LocationClient.create_bucket_prefix(location) DatasetLocationIndexer.upsert(session=session, folder_uri=location.locationUri) return location diff --git a/backend/dataall/modules/datasets/api/table/resolvers.py b/backend/dataall/modules/datasets/api/table/resolvers.py index caa160851..b7b0ae04b 100644 --- a/backend/dataall/modules/datasets/api/table/resolvers.py +++ b/backend/dataall/modules/datasets/api/table/resolvers.py @@ -13,7 +13,8 @@ from dataall.db.api import ResourcePolicy, Glossary from dataall.modules.datasets_base.db.models import DatasetTable, Dataset from dataall.modules.datasets.services.dataset_service import DatasetService -from dataall.modules.datasets.services.permissions import UPDATE_DATASET_TABLE, PREVIEW_DATASET_TABLE +from dataall.modules.datasets.services.permissions import UPDATE_DATASET_TABLE +from dataall.modules.datasets_base.services.permissions import PREVIEW_DATASET_TABLE from dataall.utils import json_utils from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer from dataall.modules.datasets.services.dataset_table_service import DatasetTableService diff --git a/backend/dataall/modules/datasets_base/__init__.py b/backend/dataall/modules/datasets_base/__init__.py index e69de29bb..7906c0307 100644 --- a/backend/dataall/modules/datasets_base/__init__.py +++ b/backend/dataall/modules/datasets_base/__init__.py @@ -0,0 +1,9 @@ +from typing import List + +from dataall.modules.loader import ModuleInterface, ImportMode + + +class DatasetBaseModuleInterface(ModuleInterface): + @staticmethod + def is_supported(modes: List[ImportMode]) -> bool: + return True diff --git a/tests/api/conftest.py b/tests/api/conftest.py index cdeadcd6f..671cc793c 100644 --- a/tests/api/conftest.py +++ b/tests/api/conftest.py @@ -34,7 +34,6 @@ def patch_check_dataset(module_mocker): def patch_es(module_mocker): module_mocker.patch('dataall.searchproxy.connect', return_value={}) module_mocker.patch('dataall.searchproxy.search', return_value={}) - module_mocker.patch('dataall.searchproxy.upsert', return_value={}) module_mocker.patch( 'dataall.modules.datasets.indexers.table_indexer.DatasetTableIndexer.upsert_all', return_value={} diff --git a/tests/api/test_dataset_location.py b/tests/api/test_dataset_location.py index 6764ffa90..191fab492 100644 --- a/tests/api/test_dataset_location.py +++ b/tests/api/test_dataset_location.py @@ -1,8 +1,10 @@ import typing +from unittest.mock import MagicMock import pytest import dataall +from dataall.modules.datasets.aws.s3_location_client import S3LocationClient from dataall.modules.datasets_base.db.models import Dataset @@ -67,10 +69,7 @@ def test_get_dataset(client, dataset1, env1, user, group): def test_create_location(client, dataset1, env1, user, group, patch_es, module_mocker): - module_mocker.patch( - 'dataall.modules.datasets.handlers.s3_location_handler.S3DatasetLocationHandler.create_bucket_prefix', - return_value=True - ) + module_mocker.patch.object(S3LocationClient, "create_bucket_prefix") response = client.query( """ mutation createDatasetStorageLocation($datasetUri:String!, $input:NewDatasetStorageLocationInput!){ diff --git a/tests/searchproxy/test_indexers.py b/tests/searchproxy/test_indexers.py index 9458140cd..006060e24 100644 --- a/tests/searchproxy/test_indexers.py +++ b/tests/searchproxy/test_indexers.py @@ -122,7 +122,7 @@ def test_es_request(): def test_upsert_dataset(db, dataset, env, mocker): - mocker.patch('dataall.searchproxy.upsert', return_value={}) + mocker.patch('dataall.searchproxy.base_indexer.BaseIndexer._index', return_value={}) with db.scoped_session() as session: dataset_indexed = DatasetIndexer.upsert( session, dataset_uri=dataset.datasetUri @@ -131,14 +131,14 @@ def test_upsert_dataset(db, dataset, env, mocker): def test_upsert_table(db, dataset, env, mocker, table): - mocker.patch('dataall.searchproxy.upsert', return_value={}) + mocker.patch('dataall.searchproxy.base_indexer.BaseIndexer._index', return_value={}) with db.scoped_session() as session: table_indexed = DatasetTableIndexer.upsert(session, table_uri=table.tableUri) assert table_indexed.uri == table.tableUri def test_upsert_folder(db, dataset, env, mocker, folder): - mocker.patch('dataall.searchproxy.upsert', return_value={}) + mocker.patch('dataall.searchproxy.base_indexer.BaseIndexer._index', return_value={}) with db.scoped_session() as session: folder_indexed = DatasetLocationIndexer.upsert( session=session, folder_uri=folder.locationUri @@ -147,7 +147,7 @@ def test_upsert_folder(db, dataset, env, mocker, folder): def test_upsert_tables(db, dataset, env, mocker, folder): - mocker.patch('dataall.searchproxy.upsert', return_value={}) + mocker.patch('dataall.searchproxy.base_indexer.BaseIndexer._index', return_value={}) with db.scoped_session() as session: tables = DatasetTableIndexer.upsert_all( session, dataset_uri=dataset.datasetUri From c2ac71c3a8dae3db5523ab4bad8e0c1cd99addba Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 5 May 2023 13:02:35 +0200 Subject: [PATCH 126/346] Fixed all tests --- backend/dataall/api/Objects/Vote/resolvers.py | 2 +- .../modules/datasets/api/storage_location/resolvers.py | 4 ++-- tests/api/conftest.py | 1 - tests/api/test_dataset.py | 1 - tests/api/test_dataset_location.py | 8 ++++---- tests/searchproxy/test_indexers.py | 8 ++++---- tests/tasks/test_tables_sync.py | 1 - 7 files changed, 11 insertions(+), 14 deletions(-) diff --git a/backend/dataall/api/Objects/Vote/resolvers.py b/backend/dataall/api/Objects/Vote/resolvers.py index 2302ef3d2..c94d735eb 100644 --- a/backend/dataall/api/Objects/Vote/resolvers.py +++ b/backend/dataall/api/Objects/Vote/resolvers.py @@ -3,7 +3,7 @@ from dataall import db from dataall.api.context import Context from dataall.searchproxy.indexers import DashboardIndexer -from dataall.searchproxy.upsert import BaseIndexer +from dataall.searchproxy.base_indexer import BaseIndexer _VOTE_TYPES: Dict[str, Type[BaseIndexer]] = {} diff --git a/backend/dataall/modules/datasets/api/storage_location/resolvers.py b/backend/dataall/modules/datasets/api/storage_location/resolvers.py index 4b1ae1726..62feb570c 100644 --- a/backend/dataall/modules/datasets/api/storage_location/resolvers.py +++ b/backend/dataall/modules/datasets/api/storage_location/resolvers.py @@ -6,7 +6,7 @@ Glossary, Environment, ) -from dataall.modules.datasets.handlers.s3_location_handler import S3DatasetLocationHandler +from dataall.modules.datasets.aws.s3_location_client import S3LocationClient from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer from dataall.modules.datasets.db.models import DatasetStorageLocation, Dataset from dataall.modules.datasets.services.dataset_location_service import DatasetLocationService @@ -24,7 +24,7 @@ def create_storage_location( data=input, ) - S3DatasetLocationHandler.create_bucket_prefix(location) + S3LocationClient(location).create_bucket_prefix() DatasetLocationIndexer.upsert(session=session, folder_uri=location.locationUri) return location diff --git a/tests/api/conftest.py b/tests/api/conftest.py index 395bafe67..9a204e9e6 100644 --- a/tests/api/conftest.py +++ b/tests/api/conftest.py @@ -28,7 +28,6 @@ def patch_check_env(module_mocker): def patch_es(module_mocker): module_mocker.patch('dataall.searchproxy.connect', return_value={}) module_mocker.patch('dataall.searchproxy.search', return_value={}) - module_mocker.patch('dataall.searchproxy.upsert', return_value={}) module_mocker.patch( 'dataall.modules.datasets.indexers.table_indexer.DatasetTableIndexer.upsert_all', return_value={} diff --git a/tests/api/test_dataset.py b/tests/api/test_dataset.py index c6909c28b..368ea97fa 100644 --- a/tests/api/test_dataset.py +++ b/tests/api/test_dataset.py @@ -4,7 +4,6 @@ import pytest import dataall -from dataall.modules.datasets.aws.glue_dataset_client import DatasetCrawler from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable, Dataset from dataall.modules.datasets.services.dataset_service import DatasetService diff --git a/tests/api/test_dataset_location.py b/tests/api/test_dataset_location.py index 50aafe9a3..ed700630c 100644 --- a/tests/api/test_dataset_location.py +++ b/tests/api/test_dataset_location.py @@ -1,8 +1,10 @@ import typing +from unittest.mock import MagicMock import pytest import dataall +from dataall.modules.datasets.aws.s3_location_client import S3LocationClient from dataall.modules.datasets.db.models import Dataset @@ -71,10 +73,8 @@ def test_get_dataset(client, dataset1, env1, user, group): def test_create_location(client, dataset1, env1, user, group, patch_es, module_mocker): - module_mocker.patch( - 'dataall.modules.datasets.handlers.s3_location_handler.S3DatasetLocationHandler.create_bucket_prefix', - return_value=True - ) + mock_client = MagicMock() + module_mocker.patch("dataall.modules.datasets.api.storage_location.resolvers.S3LocationClient", mock_client) response = client.query( """ mutation createDatasetStorageLocation($datasetUri:String!, $input:NewDatasetStorageLocationInput!){ diff --git a/tests/searchproxy/test_indexers.py b/tests/searchproxy/test_indexers.py index 8bcf114d7..83be2dc4c 100644 --- a/tests/searchproxy/test_indexers.py +++ b/tests/searchproxy/test_indexers.py @@ -124,7 +124,7 @@ def test_es_request(): def test_upsert_dataset(db, dataset, env, mocker): - mocker.patch('dataall.searchproxy.upsert', return_value={}) + mocker.patch('dataall.searchproxy.base_indexer.BaseIndexer._index', return_value={}) with db.scoped_session() as session: dataset_indexed = DatasetIndexer.upsert( session, dataset_uri=dataset.datasetUri @@ -133,14 +133,14 @@ def test_upsert_dataset(db, dataset, env, mocker): def test_upsert_table(db, dataset, env, mocker, table): - mocker.patch('dataall.searchproxy.upsert', return_value={}) + mocker.patch('dataall.searchproxy.base_indexer.BaseIndexer._index', return_value={}) with db.scoped_session() as session: table_indexed = DatasetTableIndexer.upsert(session, table_uri=table.tableUri) assert table_indexed.uri == table.tableUri def test_upsert_folder(db, dataset, env, mocker, folder): - mocker.patch('dataall.searchproxy.upsert', return_value={}) + mocker.patch('dataall.searchproxy.base_indexer.BaseIndexer._index', return_value={}) with db.scoped_session() as session: folder_indexed = DatasetLocationIndexer.upsert( session=session, folder_uri=folder.locationUri @@ -149,7 +149,7 @@ def test_upsert_folder(db, dataset, env, mocker, folder): def test_upsert_tables(db, dataset, env, mocker, folder): - mocker.patch('dataall.searchproxy.upsert', return_value={}) + mocker.patch('dataall.searchproxy.base_indexer.BaseIndexer._index', return_value={}) with db.scoped_session() as session: tables = DatasetTableIndexer.upsert_all( session, dataset_uri=dataset.datasetUri diff --git a/tests/tasks/test_tables_sync.py b/tests/tasks/test_tables_sync.py index 5c9937c72..f6ff700dd 100644 --- a/tests/tasks/test_tables_sync.py +++ b/tests/tasks/test_tables_sync.py @@ -3,7 +3,6 @@ import pytest import dataall from dataall.api.constants import OrganisationUserRole -from dataall.modules.datasets.aws.lf_table_client import LakeFormationTableClient from dataall.modules.datasets.db.models import DatasetTable, Dataset from dataall.modules.datasets.tasks.tables_syncer import sync_tables From 082fec9008e5efc79f760dc45496467142ef8a0f Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 5 May 2023 13:03:44 +0200 Subject: [PATCH 127/346] Review remarks --- backend/dataall/cdkproxy/stacks/policies/data_policy.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/dataall/cdkproxy/stacks/policies/data_policy.py b/backend/dataall/cdkproxy/stacks/policies/data_policy.py index a0791954d..49f7d59fb 100644 --- a/backend/dataall/cdkproxy/stacks/policies/data_policy.py +++ b/backend/dataall/cdkproxy/stacks/policies/data_policy.py @@ -103,7 +103,7 @@ def generate_data_access_policy(self, session) -> iam.Policy: return policy - def get_statements(self, session): + def get_statements(self, *args, **kwargs): statements = [ iam.PolicyStatement( actions=[ From dbf6197e20ecfa0d94ba753280cd5f30335debcd Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 5 May 2023 13:21:00 +0200 Subject: [PATCH 128/346] Moved code related to dataset sharing --- backend/dataall/aws/handlers/ecs.py | 2 +- .../services}/data_sharing_service.py | 6 +++--- .../services/dataset_alarm_service.py | 0 .../services}/share_managers/__init__.py | 0 .../services}/share_managers/lf_share_manager.py | 14 +++++++------- .../services}/share_managers/s3_share_manager.py | 14 +++++++------- .../services}/share_processors/__init__.py | 0 .../lf_process_cross_account_share.py | 0 .../lf_process_same_account_share.py | 0 .../services}/share_processors/s3_process_share.py | 0 .../modules/datasets/tasks/tables_syncer.py | 2 +- backend/dataall/tasks/share_manager.py | 4 ++-- backend/dataall/tasks/shares_refresh.py | 4 ++-- tests/tasks/test_lf_share_manager.py | 6 +++--- tests/tasks/test_s3_share_manager.py | 2 +- 15 files changed, 27 insertions(+), 27 deletions(-) rename backend/dataall/{tasks/data_sharing => modules/dataset_sharing/services}/data_sharing_service.py (96%) rename backend/dataall/modules/{datasets => dataset_sharing}/services/dataset_alarm_service.py (100%) rename backend/dataall/{tasks/data_sharing => modules/dataset_sharing/services}/share_managers/__init__.py (100%) rename backend/dataall/{tasks/data_sharing => modules/dataset_sharing/services}/share_managers/lf_share_manager.py (97%) rename backend/dataall/{tasks/data_sharing => modules/dataset_sharing/services}/share_managers/s3_share_manager.py (98%) rename backend/dataall/{tasks/data_sharing => modules/dataset_sharing/services}/share_processors/__init__.py (100%) rename backend/dataall/{tasks/data_sharing => modules/dataset_sharing/services}/share_processors/lf_process_cross_account_share.py (100%) rename backend/dataall/{tasks/data_sharing => modules/dataset_sharing/services}/share_processors/lf_process_same_account_share.py (100%) rename backend/dataall/{tasks/data_sharing => modules/dataset_sharing/services}/share_processors/s3_process_share.py (100%) diff --git a/backend/dataall/aws/handlers/ecs.py b/backend/dataall/aws/handlers/ecs.py index 5d98f6ba3..528ebb520 100644 --- a/backend/dataall/aws/handlers/ecs.py +++ b/backend/dataall/aws/handlers/ecs.py @@ -9,7 +9,7 @@ from ... import db from ...db import models from ...utils import Parameter -from ...tasks.data_sharing.data_sharing_service import DataSharingService +from dataall.modules.dataset_sharing.services.data_sharing_service import DataSharingService log = logging.getLogger('aws:ecs') diff --git a/backend/dataall/tasks/data_sharing/data_sharing_service.py b/backend/dataall/modules/dataset_sharing/services/data_sharing_service.py similarity index 96% rename from backend/dataall/tasks/data_sharing/data_sharing_service.py rename to backend/dataall/modules/dataset_sharing/services/data_sharing_service.py index 4ec4687b6..6ffd124e4 100644 --- a/backend/dataall/tasks/data_sharing/data_sharing_service.py +++ b/backend/dataall/modules/dataset_sharing/services/data_sharing_service.py @@ -1,9 +1,9 @@ import logging import os -from .share_processors.lf_process_cross_account_share import ProcessLFCrossAccountShare -from .share_processors.lf_process_same_account_share import ProcessLFSameAccountShare -from .share_processors.s3_process_share import ProcessS3Share +from dataall.modules.dataset_sharing.services.share_processors.lf_process_cross_account_share import ProcessLFCrossAccountShare +from dataall.modules.dataset_sharing.services.share_processors.lf_process_same_account_share import ProcessLFSameAccountShare +from dataall.modules.dataset_sharing.services.share_processors.s3_process_share import ProcessS3Share from dataall.aws.handlers.ram import Ram from dataall.aws.handlers.sts import SessionHelper diff --git a/backend/dataall/modules/datasets/services/dataset_alarm_service.py b/backend/dataall/modules/dataset_sharing/services/dataset_alarm_service.py similarity index 100% rename from backend/dataall/modules/datasets/services/dataset_alarm_service.py rename to backend/dataall/modules/dataset_sharing/services/dataset_alarm_service.py diff --git a/backend/dataall/tasks/data_sharing/share_managers/__init__.py b/backend/dataall/modules/dataset_sharing/services/share_managers/__init__.py similarity index 100% rename from backend/dataall/tasks/data_sharing/share_managers/__init__.py rename to backend/dataall/modules/dataset_sharing/services/share_managers/__init__.py diff --git a/backend/dataall/tasks/data_sharing/share_managers/lf_share_manager.py b/backend/dataall/modules/dataset_sharing/services/share_managers/lf_share_manager.py similarity index 97% rename from backend/dataall/tasks/data_sharing/share_managers/lf_share_manager.py rename to backend/dataall/modules/dataset_sharing/services/share_managers/lf_share_manager.py index 89f28c271..2e3b830ff 100644 --- a/backend/dataall/tasks/data_sharing/share_managers/lf_share_manager.py +++ b/backend/dataall/modules/dataset_sharing/services/share_managers/lf_share_manager.py @@ -5,14 +5,14 @@ from botocore.exceptions import ClientError -from ....aws.handlers.glue import Glue -from ....aws.handlers.lakeformation import LakeFormation -from ....aws.handlers.quicksight import Quicksight -from ....aws.handlers.sts import SessionHelper -from ....aws.handlers.ram import Ram -from ....db import exceptions, models +from dataall.aws.handlers.glue import Glue +from dataall.aws.handlers.lakeformation import LakeFormation +from dataall.aws.handlers.quicksight import Quicksight +from dataall.aws.handlers.sts import SessionHelper +from dataall.aws.handlers.ram import Ram +from dataall.db import exceptions, models from dataall.modules.datasets_base.db.models import DatasetTable, Dataset -from dataall.modules.datasets.services.dataset_alarm_service import DatasetAlarmService +from dataall.modules.dataset_sharing.services.dataset_alarm_service import DatasetAlarmService from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject logger = logging.getLogger(__name__) diff --git a/backend/dataall/tasks/data_sharing/share_managers/s3_share_manager.py b/backend/dataall/modules/dataset_sharing/services/share_managers/s3_share_manager.py similarity index 98% rename from backend/dataall/tasks/data_sharing/share_managers/s3_share_manager.py rename to backend/dataall/modules/dataset_sharing/services/share_managers/s3_share_manager.py index 4b4824a51..8a1b516eb 100644 --- a/backend/dataall/tasks/data_sharing/share_managers/s3_share_manager.py +++ b/backend/dataall/modules/dataset_sharing/services/share_managers/s3_share_manager.py @@ -3,13 +3,13 @@ import json import time -from ....db import models, utils -from ....aws.handlers.sts import SessionHelper -from ....aws.handlers.s3 import S3 -from ....aws.handlers.kms import KMS -from ....aws.handlers.iam import IAM -from ....modules.dataset_sharing.db.models import ShareObject -from ....modules.dataset_sharing.services.share_object import ShareObjectService +from dataall.db import models, utils +from dataall.aws.handlers.sts import SessionHelper +from dataall.aws.handlers.s3 import S3 +from dataall.aws.handlers.kms import KMS +from dataall.aws.handlers.iam import IAM +from dataall.modules.dataset_sharing.db.models import ShareObject +from dataall.modules.dataset_sharing.services.share_object import ShareObjectService from dataall.modules.datasets_base.db.models import DatasetStorageLocation, Dataset diff --git a/backend/dataall/tasks/data_sharing/share_processors/__init__.py b/backend/dataall/modules/dataset_sharing/services/share_processors/__init__.py similarity index 100% rename from backend/dataall/tasks/data_sharing/share_processors/__init__.py rename to backend/dataall/modules/dataset_sharing/services/share_processors/__init__.py diff --git a/backend/dataall/tasks/data_sharing/share_processors/lf_process_cross_account_share.py b/backend/dataall/modules/dataset_sharing/services/share_processors/lf_process_cross_account_share.py similarity index 100% rename from backend/dataall/tasks/data_sharing/share_processors/lf_process_cross_account_share.py rename to backend/dataall/modules/dataset_sharing/services/share_processors/lf_process_cross_account_share.py diff --git a/backend/dataall/tasks/data_sharing/share_processors/lf_process_same_account_share.py b/backend/dataall/modules/dataset_sharing/services/share_processors/lf_process_same_account_share.py similarity index 100% rename from backend/dataall/tasks/data_sharing/share_processors/lf_process_same_account_share.py rename to backend/dataall/modules/dataset_sharing/services/share_processors/lf_process_same_account_share.py diff --git a/backend/dataall/tasks/data_sharing/share_processors/s3_process_share.py b/backend/dataall/modules/dataset_sharing/services/share_processors/s3_process_share.py similarity index 100% rename from backend/dataall/tasks/data_sharing/share_processors/s3_process_share.py rename to backend/dataall/modules/dataset_sharing/services/share_processors/s3_process_share.py diff --git a/backend/dataall/modules/datasets/tasks/tables_syncer.py b/backend/dataall/modules/datasets/tasks/tables_syncer.py index 6be5054f2..a9d8bb47a 100644 --- a/backend/dataall/modules/datasets/tasks/tables_syncer.py +++ b/backend/dataall/modules/datasets/tasks/tables_syncer.py @@ -11,7 +11,7 @@ from dataall.modules.datasets.aws.lf_table_client import LakeFormationTableClient from dataall.modules.datasets_base.db.models import DatasetTable, Dataset from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer -from dataall.modules.datasets.services.dataset_alarm_service import DatasetAlarmService +from dataall.modules.dataset_sharing.services.dataset_alarm_service import DatasetAlarmService from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.modules.datasets.services.dataset_table_service import DatasetTableService diff --git a/backend/dataall/tasks/share_manager.py b/backend/dataall/tasks/share_manager.py index 000f5d808..e00cb4441 100644 --- a/backend/dataall/tasks/share_manager.py +++ b/backend/dataall/tasks/share_manager.py @@ -2,8 +2,8 @@ import os import sys -from .data_sharing.data_sharing_service import DataSharingService -from ..db import get_engine +from dataall.modules.dataset_sharing.services.data_sharing_service import DataSharingService +from dataall.db import get_engine root = logging.getLogger() root.setLevel(logging.INFO) diff --git a/backend/dataall/tasks/shares_refresh.py b/backend/dataall/tasks/shares_refresh.py index d1957bc74..0d0b25d55 100644 --- a/backend/dataall/tasks/shares_refresh.py +++ b/backend/dataall/tasks/shares_refresh.py @@ -2,8 +2,8 @@ import os import sys -from .data_sharing.data_sharing_service import DataSharingService -from ..db import get_engine +from dataall.modules.dataset_sharing.services.data_sharing_service import DataSharingService +from dataall.db import get_engine root = logging.getLogger() root.setLevel(logging.INFO) diff --git a/tests/tasks/test_lf_share_manager.py b/tests/tasks/test_lf_share_manager.py index 9669216d2..5cefd5a2c 100644 --- a/tests/tasks/test_lf_share_manager.py +++ b/tests/tasks/test_lf_share_manager.py @@ -12,10 +12,10 @@ from dataall.api import constants from dataall.modules.dataset_sharing.db.models import ShareObject, ShareObjectItem from dataall.modules.datasets_base.db.models import DatasetTable, Dataset -from dataall.modules.datasets.services.dataset_alarm_service import DatasetAlarmService +from dataall.modules.dataset_sharing.services.dataset_alarm_service import DatasetAlarmService -from dataall.tasks.data_sharing.share_processors.lf_process_cross_account_share import ProcessLFCrossAccountShare -from dataall.tasks.data_sharing.share_processors.lf_process_same_account_share import ProcessLFSameAccountShare +from dataall.modules.dataset_sharing.services.share_processors.lf_process_cross_account_share import ProcessLFCrossAccountShare +from dataall.modules.dataset_sharing.services.share_processors.lf_process_same_account_share import ProcessLFSameAccountShare SOURCE_ENV_ACCOUNT = "1" * 12 diff --git a/tests/tasks/test_s3_share_manager.py b/tests/tasks/test_s3_share_manager.py index eb8893586..6595b9ee8 100644 --- a/tests/tasks/test_s3_share_manager.py +++ b/tests/tasks/test_s3_share_manager.py @@ -6,7 +6,7 @@ from dataall.db import models from dataall.modules.dataset_sharing.db.models import ShareObject, ShareObjectItem -from dataall.tasks.data_sharing.share_managers.s3_share_manager import S3ShareManager +from dataall.modules.dataset_sharing.services.share_managers import S3ShareManager from dataall.modules.datasets_base.db.models import DatasetStorageLocation, Dataset SOURCE_ENV_ACCOUNT = "111111111111" From 26b2e8b77e24529892d56e23b78506283d4b55e5 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 5 May 2023 13:29:22 +0200 Subject: [PATCH 129/346] Moved task for sharing --- .../data_sharing => modules/dataset_sharing/tasks}/__init__.py | 0 .../dataset_sharing/tasks/share_manager_task.py} | 0 .../dataset_sharing/tasks/shares_refresh_task.py} | 0 deploy/stacks/container.py | 3 ++- 4 files changed, 2 insertions(+), 1 deletion(-) rename backend/dataall/{tasks/data_sharing => modules/dataset_sharing/tasks}/__init__.py (100%) rename backend/dataall/{tasks/share_manager.py => modules/dataset_sharing/tasks/share_manager_task.py} (100%) rename backend/dataall/{tasks/shares_refresh.py => modules/dataset_sharing/tasks/shares_refresh_task.py} (100%) diff --git a/backend/dataall/tasks/data_sharing/__init__.py b/backend/dataall/modules/dataset_sharing/tasks/__init__.py similarity index 100% rename from backend/dataall/tasks/data_sharing/__init__.py rename to backend/dataall/modules/dataset_sharing/tasks/__init__.py diff --git a/backend/dataall/tasks/share_manager.py b/backend/dataall/modules/dataset_sharing/tasks/share_manager_task.py similarity index 100% rename from backend/dataall/tasks/share_manager.py rename to backend/dataall/modules/dataset_sharing/tasks/share_manager_task.py diff --git a/backend/dataall/tasks/shares_refresh.py b/backend/dataall/modules/dataset_sharing/tasks/shares_refresh_task.py similarity index 100% rename from backend/dataall/tasks/shares_refresh.py rename to backend/dataall/modules/dataset_sharing/tasks/shares_refresh_task.py diff --git a/deploy/stacks/container.py b/deploy/stacks/container.py index 2eb850a65..2a835753e 100644 --- a/deploy/stacks/container.py +++ b/deploy/stacks/container.py @@ -221,6 +221,7 @@ def __init__( family=f'{resource_prefix}-{envname}-share-manager', ) + # TODO introduce the ability to change the deployment depending on config.json file share_management_container = share_management_task_definition.add_container( f'ShareManagementTaskContainer{envname}', container_name=f'container', @@ -228,7 +229,7 @@ def __init__( repository=ecr_repository, tag=cdkproxy_image_tag ), environment=self._create_env('DEBUG'), - command=['python3.8', '-m', 'dataall.tasks.share_manager'], + command=['python3.8', '-m', 'dataall.modules.dataset_sharing.tasks.share_manager_task'], logging=ecs.LogDriver.aws_logs( stream_prefix='task', log_group=self.create_log_group( From 2c40b9228728fbe39f820e5a5da420414c7caf01 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 5 May 2023 13:31:48 +0200 Subject: [PATCH 130/346] Removed shares_refresh task since it's not used in the project --- .../services/data_sharing_service.py | 77 ------------------- .../dataset_sharing/services/share_object.py | 7 -- .../tasks/shares_refresh_task.py | 28 ------- 3 files changed, 112 deletions(-) delete mode 100644 backend/dataall/modules/dataset_sharing/tasks/shares_refresh_task.py diff --git a/backend/dataall/modules/dataset_sharing/services/data_sharing_service.py b/backend/dataall/modules/dataset_sharing/services/data_sharing_service.py index 6ffd124e4..e0989ccf9 100644 --- a/backend/dataall/modules/dataset_sharing/services/data_sharing_service.py +++ b/backend/dataall/modules/dataset_sharing/services/data_sharing_service.py @@ -223,80 +223,3 @@ def revoke_share(cls, engine: Engine, share_uri: str): Share_SM.update_state(session, share, new_share_state) return revoked_tables_succeed and revoked_folders_succeed - - @classmethod - def clean_lfv1_ram_resources(cls, environment: models.Environment): - """ - Deletes LFV1 resource shares for an environment - Parameters - ---------- - environment : models.Environment - - Returns - ------- - None - """ - return Ram.delete_lakeformation_v1_resource_shares( - SessionHelper.remote_session(accountid=environment.AwsAccountId).client( - 'ram', region_name=environment.region - ) - ) - - @classmethod - def refresh_shares(cls, engine: Engine) -> bool: - """ - Refreshes the shares at scheduled frequency. - If a share is in 'Approve' state it triggers an approve ECS sharing task - If a share is in 'Revoked' state it triggers a revoke ECS sharing task - Also cleans up LFV1 ram resource shares if enabled on SSM - Parameters - ---------- - engine : db.engine - - Returns - ------- - true if refresh succeeds - """ - share_object_refreshable_states = ShareObjectSM.get_share_object_refreshable_states() - with engine.scoped_session() as session: - environments = session.query(models.Environment).all() - shares = ( - session.query(ShareObject) - .filter(ShareObject.status.in_(share_object_refreshable_states)) - .all() - ) - - # Feature toggle: default value is False - if ( - Parameter().get_parameter( - os.getenv('envname', 'local'), 'shares/cleanlfv1ram' - ) - == 'True' - ): - log.info('LFV1 Cleanup toggle is enabled') - for e in environments: - log.info( - f'Cleaning LFV1 ram resource for environment: {e.AwsAccountId}/{e.region}...' - ) - cls.clean_lfv1_ram_resources(e) - - if not shares: - log.info('No Approved nor Revoked shares found. Nothing to do...') - return True - - for share in shares: - try: - log.info( - f'Refreshing share {share.shareUri} with {share.status} status...' - ) - if share.status in [ShareObjectStatus.Approved.value]: - cls.approve_share(engine, share.shareUri) - else: - cls.revoke_share(engine, share.shareUri) - - except Exception as e: - log.error( - f'Failed refreshing share {share.shareUri} with {share.status}. ' - f'due to: {e}' - ) - return True diff --git a/backend/dataall/modules/dataset_sharing/services/share_object.py b/backend/dataall/modules/dataset_sharing/services/share_object.py index 821952f05..2d6ff314c 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_object.py +++ b/backend/dataall/modules/dataset_sharing/services/share_object.py @@ -157,13 +157,6 @@ def update_state(self, session, share, new_state): self._state = new_state return True - @staticmethod - def get_share_object_refreshable_states(): - return [ - ShareObjectStatus.Approved.value, - ShareObjectStatus.Revoked.value - ] - class ShareItemSM: def __init__(self, state): diff --git a/backend/dataall/modules/dataset_sharing/tasks/shares_refresh_task.py b/backend/dataall/modules/dataset_sharing/tasks/shares_refresh_task.py deleted file mode 100644 index 0d0b25d55..000000000 --- a/backend/dataall/modules/dataset_sharing/tasks/shares_refresh_task.py +++ /dev/null @@ -1,28 +0,0 @@ -import logging -import os -import sys - -from dataall.modules.dataset_sharing.services.data_sharing_service import DataSharingService -from dataall.db import get_engine - -root = logging.getLogger() -root.setLevel(logging.INFO) -if not root.hasHandlers(): - root.addHandler(logging.StreamHandler(sys.stdout)) -log = logging.getLogger(__name__) - - -if __name__ == '__main__': - - try: - ENVNAME = os.environ.get('envname', 'local') - ENGINE = get_engine(envname=ENVNAME) - - log.info('Starting refresh shares task...') - DataSharingService.refresh_shares(engine=ENGINE) - - log.info('Sharing task finished successfully') - - except Exception as e: - log.error(f'Sharing task failed due to: {e}') - raise e From 29a7f7e574d392e4427e0533216dbe0a7c4e90f8 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 5 May 2023 13:35:33 +0200 Subject: [PATCH 131/346] Fixed imports --- .../dataset_sharing/services/data_sharing_service.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/backend/dataall/modules/dataset_sharing/services/data_sharing_service.py b/backend/dataall/modules/dataset_sharing/services/data_sharing_service.py index e0989ccf9..08ef1169d 100644 --- a/backend/dataall/modules/dataset_sharing/services/data_sharing_service.py +++ b/backend/dataall/modules/dataset_sharing/services/data_sharing_service.py @@ -1,17 +1,12 @@ import logging -import os from dataall.modules.dataset_sharing.services.share_processors.lf_process_cross_account_share import ProcessLFCrossAccountShare from dataall.modules.dataset_sharing.services.share_processors.lf_process_same_account_share import ProcessLFSameAccountShare from dataall.modules.dataset_sharing.services.share_processors.s3_process_share import ProcessS3Share -from dataall.aws.handlers.ram import Ram -from dataall.aws.handlers.sts import SessionHelper from dataall.db import models, Engine -from dataall.modules.dataset_sharing.db.Enums import ShareObjectActions, ShareItemStatus, ShareObjectStatus -from dataall.modules.dataset_sharing.db.models import ShareObject +from dataall.modules.dataset_sharing.db.Enums import ShareObjectActions, ShareItemStatus from dataall.modules.dataset_sharing.services.share_object import ShareObjectSM, ShareObjectService, ShareItemSM -from dataall.utils import Parameter log = logging.getLogger(__name__) From da2bac50e7081d067f1c67fbd8ace779a7f493f3 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 5 May 2023 15:30:38 +0200 Subject: [PATCH 132/346] Moved services to repositories since it would be easier to extract business logic from it --- backend/dataall/aws/handlers/redshift.py | 4 +-- backend/dataall/db/api/redshift_cluster.py | 4 +-- .../modules/datasets/api/dataset/resolvers.py | 8 +++--- .../datasets/api/profiling/resolvers.py | 20 +++++++------- .../api/storage_location/resolvers.py | 20 +++++++------- .../modules/datasets/api/table/resolvers.py | 26 +++++++++---------- .../datasets/api/table_column/resolvers.py | 8 +++--- .../dataset_location_repository.py} | 8 +++--- .../dataset_profiling_repository.py} | 4 +-- .../dataset_table_repository.py} | 16 ++++++------ .../datasets/handlers/glue_column_handler.py | 4 +-- .../handlers/glue_profiling_handler.py | 8 +++--- .../datasets/handlers/glue_table_handler.py | 4 +-- .../datasets/handlers/s3_location_handler.py | 4 +-- .../datasets/indexers/dataset_indexer.py | 4 +-- .../datasets/services/dataset_service.py | 4 +-- .../datasets/tasks/subscription_service.py | 14 +++++----- .../modules/datasets/tasks/tables_syncer.py | 4 +-- tests/api/test_dataset_table.py | 4 +-- 19 files changed, 84 insertions(+), 84 deletions(-) rename backend/dataall/modules/datasets/{services/dataset_location_service.py => db/dataset_location_repository.py} (96%) rename backend/dataall/modules/datasets/{services/dataset_profiling_service.py => db/dataset_profiling_repository.py} (98%) rename backend/dataall/modules/datasets/{services/dataset_table_service.py => db/dataset_table_repository.py} (95%) diff --git a/backend/dataall/aws/handlers/redshift.py b/backend/dataall/aws/handlers/redshift.py index 1e3e994c6..5264366b3 100644 --- a/backend/dataall/aws/handlers/redshift.py +++ b/backend/dataall/aws/handlers/redshift.py @@ -10,7 +10,7 @@ from ... import db from ...db import models # TODO should be migrated in the redshift module -from dataall.modules.datasets.services.dataset_table_service import DatasetTableService +from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository from dataall.modules.datasets_base.db.models import DatasetTable, Dataset from dataall.modules.datasets.services.dataset_service import DatasetService @@ -450,7 +450,7 @@ def copy_data(engine, task: models.Task): session, task.payload['datasetUri'] ) - table: DatasetTable = DatasetTableService.get_dataset_table_by_uri( + table: DatasetTable = DatasetTableRepository.get_dataset_table_by_uri( session, task.payload['tableUri'] ) diff --git a/backend/dataall/db/api/redshift_cluster.py b/backend/dataall/db/api/redshift_cluster.py index 33657ca9d..f463fd3f5 100644 --- a/backend/dataall/db/api/redshift_cluster.py +++ b/backend/dataall/db/api/redshift_cluster.py @@ -499,8 +499,8 @@ def enable_copy_table( cluster = RedshiftCluster.get_redshift_cluster_by_uri(session, uri) # TODO this dirty hack should be removed in the redshift module or after pipeline migration (circular import) - from dataall.modules.datasets.services.dataset_table_service import DatasetTableService - table = DatasetTableService.get_dataset_table_by_uri( + from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository + table = DatasetTableRepository.get_dataset_table_by_uri( session, data['tableUri'] ) table = models.RedshiftClusterDatasetTable( diff --git a/backend/dataall/modules/datasets/api/dataset/resolvers.py b/backend/dataall/modules/datasets/api/dataset/resolvers.py index ab70bc627..9239fbb3b 100644 --- a/backend/dataall/modules/datasets/api/dataset/resolvers.py +++ b/backend/dataall/modules/datasets/api/dataset/resolvers.py @@ -18,7 +18,7 @@ from dataall.modules.dataset_sharing.db.models import ShareObject from dataall.modules.datasets import Dataset from dataall.modules.datasets.aws.glue_dataset_client import DatasetCrawler -from dataall.modules.datasets.services.dataset_location_service import DatasetLocationService +from dataall.modules.datasets.db.dataset_location_repository import DatasetLocationRepository from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer @@ -177,7 +177,7 @@ def list_locations(context, source: Dataset, filter: dict = None): if not filter: filter = {'page': 1, 'pageSize': 5} with context.engine.scoped_session() as session: - return DatasetLocationService.paginated_dataset_locations( + return DatasetLocationRepository.paginated_dataset_locations( session=session, username=context.username, groups=context.groups, @@ -249,7 +249,7 @@ def get_dataset_statistics(context: Context, source: Dataset, **kwargs): return None with context.engine.scoped_session() as session: count_tables = DatasetService.count_dataset_tables(session, source.datasetUri) - count_locations = DatasetLocationService.count_dataset_locations( + count_locations = DatasetLocationRepository.count_dataset_locations( session, source.datasetUri ) count_upvotes = db.api.Vote.count_upvotes( @@ -566,7 +566,7 @@ def delete_dataset( for uri in tables: DatasetIndexer.delete_doc(doc_id=uri) - folders = [f.locationUri for f in DatasetLocationService.get_dataset_folders(session, datasetUri)] + folders = [f.locationUri for f in DatasetLocationRepository.get_dataset_folders(session, datasetUri)] for uri in folders: DatasetIndexer.delete_doc(doc_id=uri) diff --git a/backend/dataall/modules/datasets/api/profiling/resolvers.py b/backend/dataall/modules/datasets/api/profiling/resolvers.py index 9fddc4505..0f676c789 100644 --- a/backend/dataall/modules/datasets/api/profiling/resolvers.py +++ b/backend/dataall/modules/datasets/api/profiling/resolvers.py @@ -7,8 +7,8 @@ from dataall.db import api, models from dataall.db.api import ResourcePolicy from dataall.modules.datasets.services.dataset_service import DatasetService -from dataall.modules.datasets.services.dataset_table_service import DatasetTableService -from dataall.modules.datasets.services.dataset_profiling_service import DatasetProfilingService +from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository +from dataall.modules.datasets.db.dataset_profiling_repository import DatasetProfilingRepository from dataall.modules.datasets_base.db.models import DatasetProfilingRun from dataall.modules.datasets.services.permissions import PROFILE_DATASET_TABLE @@ -36,7 +36,7 @@ def start_profiling_run(context: Context, source, input: dict = None): ) dataset = DatasetService.get_dataset_by_uri(session, input['datasetUri']) - run = DatasetProfilingService.start_profiling( + run = DatasetProfilingRepository.start_profiling( session=session, datasetUri=dataset.datasetUri, tableUri=input.get('tableUri'), @@ -74,7 +74,7 @@ def get_profiling_results(context: Context, source: DatasetProfilingRun): def update_profiling_run_results(context: Context, source, profilingRunUri, results): with context.engine.scoped_session() as session: - run = DatasetProfilingService.update_run( + run = DatasetProfilingRepository.update_run( session=session, profilingRunUri=profilingRunUri, results=results ) return run @@ -82,12 +82,12 @@ def update_profiling_run_results(context: Context, source, profilingRunUri, resu def list_profiling_runs(context: Context, source, datasetUri=None): with context.engine.scoped_session() as session: - return DatasetProfilingService.list_profiling_runs(session, datasetUri) + return DatasetProfilingRepository.list_profiling_runs(session, datasetUri) def get_profiling_run(context: Context, source, profilingRunUri=None): with context.engine.scoped_session() as session: - return DatasetProfilingService.get_profiling_run( + return DatasetProfilingRepository.get_profiling_run( session=session, profilingRunUri=profilingRunUri ) @@ -95,14 +95,14 @@ def get_profiling_run(context: Context, source, profilingRunUri=None): def get_last_table_profiling_run(context: Context, source, tableUri=None): with context.engine.scoped_session() as session: run: DatasetProfilingRun = ( - DatasetProfilingService.get_table_last_profiling_run( + DatasetProfilingRepository.get_table_last_profiling_run( session=session, tableUri=tableUri ) ) if run: if not run.results: - table = DatasetTableService.get_dataset_table_by_uri(session, tableUri) + table = DatasetTableRepository.get_dataset_table_by_uri(session, tableUri) dataset = DatasetService.get_dataset_by_uri(session, table.datasetUri) environment = api.Environment.get_environment_by_uri( session, dataset.environmentUri @@ -116,7 +116,7 @@ def get_last_table_profiling_run(context: Context, source, tableUri=None): if not run.results: run_with_results = ( - DatasetProfilingService.get_table_last_profiling_run_with_results( + DatasetProfilingRepository.get_table_last_profiling_run_with_results( session=session, tableUri=tableUri ) ) @@ -147,6 +147,6 @@ def get_profiling_results_from_s3(environment, dataset, table, run): def list_table_profiling_runs(context: Context, source, tableUri=None): with context.engine.scoped_session() as session: - return DatasetProfilingService.list_table_profiling_runs( + return DatasetProfilingRepository.list_table_profiling_runs( session=session, tableUri=tableUri, filter={} ) diff --git a/backend/dataall/modules/datasets/api/storage_location/resolvers.py b/backend/dataall/modules/datasets/api/storage_location/resolvers.py index 9f0a16f01..119e861c1 100644 --- a/backend/dataall/modules/datasets/api/storage_location/resolvers.py +++ b/backend/dataall/modules/datasets/api/storage_location/resolvers.py @@ -9,7 +9,7 @@ from dataall.modules.datasets.aws.s3_location_client import S3LocationClient from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer from dataall.modules.datasets_base.db.models import DatasetStorageLocation, Dataset -from dataall.modules.datasets.services.dataset_location_service import DatasetLocationService +from dataall.modules.datasets.db.dataset_location_repository import DatasetLocationRepository from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.modules.datasets.services.permissions import UPDATE_DATASET_FOLDER @@ -18,7 +18,7 @@ def create_storage_location( context, source, datasetUri: str = None, input: dict = None ): with context.engine.scoped_session() as session: - location = DatasetLocationService.create_dataset_location( + location = DatasetLocationRepository.create_dataset_location( session=session, uri=datasetUri, data=input, @@ -36,15 +36,15 @@ def list_dataset_locations(context, source, filter: dict = None): if not filter: filter = {} with context.engine.scoped_session() as session: - return DatasetLocationService.list_dataset_locations( + return DatasetLocationRepository.list_dataset_locations( session=session, uri=source.datasetUri, data=filter ) def get_storage_location(context, source, locationUri=None): with context.engine.scoped_session() as session: - location = DatasetLocationService.get_location_by_uri(session, locationUri) - return DatasetLocationService.get_dataset_location( + location = DatasetLocationRepository.get_location_by_uri(session, locationUri) + return DatasetLocationRepository.get_dataset_location( session=session, uri=location.datasetUri, data={'locationUri': location.locationUri}, @@ -55,10 +55,10 @@ def update_storage_location( context, source, locationUri: str = None, input: dict = None ): with context.engine.scoped_session() as session: - location = DatasetLocationService.get_location_by_uri(session, locationUri) + location = DatasetLocationRepository.get_location_by_uri(session, locationUri) input['location'] = location input['locationUri'] = location.locationUri - DatasetLocationService.update_dataset_location( + DatasetLocationRepository.update_dataset_location( session=session, uri=location.datasetUri, data=input, @@ -70,8 +70,8 @@ def update_storage_location( def remove_storage_location(context, source, locationUri: str = None): with context.engine.scoped_session() as session: - location = DatasetLocationService.get_location_by_uri(session, locationUri) - DatasetLocationService.delete_dataset_location( + location = DatasetLocationRepository.get_location_by_uri(session, locationUri) + DatasetLocationRepository.delete_dataset_location( session=session, uri=location.datasetUri, data={'locationUri': location.locationUri}, @@ -90,7 +90,7 @@ def resolve_dataset(context, source: DatasetStorageLocation, **kwargs): def publish_location_update(context: Context, source, locationUri: str = None): with context.engine.scoped_session() as session: - location = DatasetLocationService.get_location_by_uri(session, locationUri) + location = DatasetLocationRepository.get_location_by_uri(session, locationUri) ResourcePolicy.check_user_resource_permission( session=session, username=context.username, diff --git a/backend/dataall/modules/datasets/api/table/resolvers.py b/backend/dataall/modules/datasets/api/table/resolvers.py index b7b0ae04b..5444cd0a0 100644 --- a/backend/dataall/modules/datasets/api/table/resolvers.py +++ b/backend/dataall/modules/datasets/api/table/resolvers.py @@ -17,14 +17,14 @@ from dataall.modules.datasets_base.services.permissions import PREVIEW_DATASET_TABLE from dataall.utils import json_utils from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer -from dataall.modules.datasets.services.dataset_table_service import DatasetTableService +from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository log = logging.getLogger(__name__) def create_table(context, source, datasetUri: str = None, input: dict = None): with context.engine.scoped_session() as session: - table = DatasetTableService.create_dataset_table( + table = DatasetTableRepository.create_dataset_table( session=session, uri=datasetUri, data=input, @@ -39,7 +39,7 @@ def list_dataset_tables(context, source, filter: dict = None): if not filter: filter = {} with context.engine.scoped_session() as session: - return DatasetTableService.list_dataset_tables( + return DatasetTableRepository.list_dataset_tables( session=session, uri=source.datasetUri, data=filter, @@ -48,8 +48,8 @@ def list_dataset_tables(context, source, filter: dict = None): def get_table(context, source: Dataset, tableUri: str = None): with context.engine.scoped_session() as session: - table = DatasetTableService.get_dataset_table_by_uri(session, tableUri) - return DatasetTableService.get_dataset_table( + table = DatasetTableRepository.get_dataset_table_by_uri(session, tableUri) + return DatasetTableRepository.get_dataset_table( session=session, uri=table.datasetUri, data={ @@ -60,14 +60,14 @@ def get_table(context, source: Dataset, tableUri: str = None): def update_table(context, source, tableUri: str = None, input: dict = None): with context.engine.scoped_session() as session: - table = DatasetTableService.get_dataset_table_by_uri(session, tableUri) + table = DatasetTableRepository.get_dataset_table_by_uri(session, tableUri) dataset = DatasetService.get_dataset_by_uri(session, table.datasetUri) input['table'] = table input['tableUri'] = table.tableUri - DatasetTableService.update_dataset_table( + DatasetTableRepository.update_dataset_table( session=session, uri=dataset.datasetUri, data=input, @@ -78,8 +78,8 @@ def update_table(context, source, tableUri: str = None, input: dict = None): def delete_table(context, source, tableUri: str = None): with context.engine.scoped_session() as session: - table = DatasetTableService.get_dataset_table_by_uri(session, tableUri) - DatasetTableService.delete_dataset_table( + table = DatasetTableRepository.get_dataset_table_by_uri(session, tableUri) + DatasetTableRepository.delete_dataset_table( session=session, uri=table.datasetUri, data={ @@ -92,7 +92,7 @@ def delete_table(context, source, tableUri: str = None): def preview(context, source, tableUri: str = None): with context.engine.scoped_session() as session: - table: DatasetTable = DatasetTableService.get_dataset_table_by_uri( + table: DatasetTable = DatasetTableRepository.get_dataset_table_by_uri( session, tableUri ) dataset = DatasetService.get_dataset_by_uri(session, table.datasetUri) @@ -147,7 +147,7 @@ def get_glue_table_properties(context: Context, source: DatasetTable, **kwargs): if not source: return None with context.engine.scoped_session() as session: - table: DatasetTable = DatasetTableService.get_dataset_table_by_uri( + table: DatasetTable = DatasetTableRepository.get_dataset_table_by_uri( session, source.tableUri ) return json_utils.to_string(table.GlueTableProperties).replace('\\', ' ') @@ -176,7 +176,7 @@ def resolve_glossary_terms(context: Context, source: DatasetTable, **kwargs): def publish_table_update(context: Context, source, tableUri: str = None): with context.engine.scoped_session() as session: - table: DatasetTable = DatasetTableService.get_dataset_table_by_uri( + table: DatasetTable = DatasetTableRepository.get_dataset_table_by_uri( session, tableUri ) ResourcePolicy.check_user_resource_permission( @@ -225,7 +225,7 @@ def resolve_redshift_copy_location( def list_shared_tables_by_env_dataset(context: Context, source, datasetUri: str, envUri: str, filter: dict = None): with context.engine.scoped_session() as session: - return DatasetTableService.get_dataset_tables_shared_with_env( + return DatasetTableRepository.get_dataset_tables_shared_with_env( session, envUri, datasetUri diff --git a/backend/dataall/modules/datasets/api/table_column/resolvers.py b/backend/dataall/modules/datasets/api/table_column/resolvers.py index 0efeceaa6..251d407bd 100644 --- a/backend/dataall/modules/datasets/api/table_column/resolvers.py +++ b/backend/dataall/modules/datasets/api/table_column/resolvers.py @@ -5,7 +5,7 @@ from dataall.aws.handlers.service_handlers import Worker from dataall.db import paginate, models from dataall.db.api import ResourcePolicy -from dataall.modules.datasets.services.dataset_table_service import DatasetTableService +from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository from dataall.modules.datasets_base.db.models import DatasetTableColumn, DatasetTable from dataall.modules.datasets.services.permissions import UPDATE_DATASET_TABLE @@ -22,7 +22,7 @@ def list_table_columns( filter = {} with context.engine.scoped_session() as session: if not source: - source = DatasetTableService.get_dataset_table_by_uri(session, tableUri) + source = DatasetTableRepository.get_dataset_table_by_uri(session, tableUri) q = ( session.query(DatasetTableColumn) .filter( @@ -47,7 +47,7 @@ def list_table_columns( def sync_table_columns(context: Context, source, tableUri: str = None): with context.engine.scoped_session() as session: - table: DatasetTable = DatasetTableService.get_dataset_table_by_uri( + table: DatasetTable = DatasetTableRepository.get_dataset_table_by_uri( session, tableUri ) ResourcePolicy.check_user_resource_permission( @@ -82,7 +82,7 @@ def update_table_column( ).get(columnUri) if not column: raise db.exceptions.ObjectNotFound('Column', columnUri) - table: DatasetTable = DatasetTableService.get_dataset_table_by_uri( + table: DatasetTable = DatasetTableRepository.get_dataset_table_by_uri( session, column.tableUri ) ResourcePolicy.check_user_resource_permission( diff --git a/backend/dataall/modules/datasets/services/dataset_location_service.py b/backend/dataall/modules/datasets/db/dataset_location_repository.py similarity index 96% rename from backend/dataall/modules/datasets/services/dataset_location_service.py rename to backend/dataall/modules/datasets/db/dataset_location_repository.py index b86b86b34..4fc65db98 100644 --- a/backend/dataall/modules/datasets/services/dataset_location_service.py +++ b/backend/dataall/modules/datasets/db/dataset_location_repository.py @@ -16,7 +16,7 @@ logger = logging.getLogger(__name__) -class DatasetLocationService: +class DatasetLocationRepository: @staticmethod @has_tenant_permission(MANAGE_DATASETS) @has_resource_permission(CREATE_DATASET_FOLDER) @@ -98,7 +98,7 @@ def get_dataset_location( uri: str, data: dict = None, ) -> DatasetStorageLocation: - return DatasetLocationService.get_location_by_uri(session, data['locationUri']) + return DatasetLocationRepository.get_location_by_uri(session, data['locationUri']) @staticmethod @has_tenant_permission(MANAGE_DATASETS) @@ -111,7 +111,7 @@ def update_dataset_location( location = data.get( 'location', - DatasetLocationService.get_location_by_uri(session, data['locationUri']), + DatasetLocationRepository.get_location_by_uri(session, data['locationUri']), ) for k in data.keys(): @@ -135,7 +135,7 @@ def delete_dataset_location( uri: str, data: dict = None, ): - location = DatasetLocationService.get_location_by_uri( + location = DatasetLocationRepository.get_location_by_uri( session, data['locationUri'] ) share_item_shared_states = ShareItemSM.get_share_item_shared_states() diff --git a/backend/dataall/modules/datasets/services/dataset_profiling_service.py b/backend/dataall/modules/datasets/db/dataset_profiling_repository.py similarity index 98% rename from backend/dataall/modules/datasets/services/dataset_profiling_service.py rename to backend/dataall/modules/datasets/db/dataset_profiling_repository.py index 55a143ebf..a34ad0a5a 100644 --- a/backend/dataall/modules/datasets/services/dataset_profiling_service.py +++ b/backend/dataall/modules/datasets/db/dataset_profiling_repository.py @@ -5,7 +5,7 @@ from dataall.modules.datasets_base.db.models import DatasetProfilingRun, DatasetTable, Dataset -class DatasetProfilingService: +class DatasetProfilingRepository: def __init__(self): pass @@ -56,7 +56,7 @@ def update_run( GlueJobRunState=None, results=None, ): - run = DatasetProfilingService.get_profiling_run( + run = DatasetProfilingRepository.get_profiling_run( session, profilingRunUri=profilingRunUri, GlueJobRunId=GlueJobRunId ) if GlueJobRunId: diff --git a/backend/dataall/modules/datasets/services/dataset_table_service.py b/backend/dataall/modules/datasets/db/dataset_table_repository.py similarity index 95% rename from backend/dataall/modules/datasets/services/dataset_table_service.py rename to backend/dataall/modules/datasets/db/dataset_table_repository.py index 62ba0dfbd..1b7a95867 100644 --- a/backend/dataall/modules/datasets/services/dataset_table_service.py +++ b/backend/dataall/modules/datasets/db/dataset_table_repository.py @@ -18,7 +18,7 @@ logger = logging.getLogger(__name__) -class DatasetTableService: +class DatasetTableRepository: @staticmethod @has_tenant_permission(MANAGE_DATASETS) @has_resource_permission(CREATE_DATASET_TABLE) @@ -106,7 +106,7 @@ def get_dataset_table( uri: str, data: dict = None, ) -> DatasetTable: - return DatasetTableService.get_dataset_table_by_uri(session, data['tableUri']) + return DatasetTableRepository.get_dataset_table_by_uri(session, data['tableUri']) @staticmethod @has_tenant_permission(MANAGE_DATASETS) @@ -118,7 +118,7 @@ def update_dataset_table( ): table = data.get( 'table', - DatasetTableService.get_dataset_table_by_uri(session, data['tableUri']), + DatasetTableRepository.get_dataset_table_by_uri(session, data['tableUri']), ) for k in [attr for attr in data.keys() if attr != 'term']: @@ -139,7 +139,7 @@ def delete_dataset_table( uri: str, data: dict = None, ): - table = DatasetTableService.get_dataset_table_by_uri(session, data['tableUri']) + table = DatasetTableRepository.get_dataset_table_by_uri(session, data['tableUri']) share_item_shared_states = ShareItemSM.get_share_item_shared_states() share_item = ( session.query(ShareObjectItem) @@ -202,7 +202,7 @@ def get_dataset_tables_shared_with_env( ): return [ {"tableUri": t.tableUri, "GlueTableName": t.GlueTableName} - for t in DatasetTableService.query_dataset_tables_shared_with_env( + for t in DatasetTableRepository.query_dataset_tables_shared_with_env( session, environment_uri, dataset_uri ) ] @@ -227,7 +227,7 @@ def sync_existing_tables(session, datasetUri, glue_tables=None): existing_table_names = [e.GlueTableName for e in existing_tables] existing_dataset_tables_map = {t.GlueTableName: t for t in existing_tables} - DatasetTableService.update_existing_tables_status(existing_tables, glue_tables) + DatasetTableRepository.update_existing_tables_status(existing_tables, glue_tables) logger.info( f'existing_tables={glue_tables}' ) @@ -276,7 +276,7 @@ def sync_existing_tables(session, datasetUri, glue_tables=None): table.get('Parameters', {}) ) - DatasetTableService.sync_table_columns(session, updated_table, table) + DatasetTableRepository.sync_table_columns(session, updated_table, table) return True @@ -292,7 +292,7 @@ def update_existing_tables_status(existing_tables, glue_tables): @staticmethod def sync_table_columns(session, dataset_table, glue_table): - DatasetTableService.delete_all_table_columns(session, dataset_table) + DatasetTableRepository.delete_all_table_columns(session, dataset_table) columns = [ {**item, **{'columnType': 'column'}} diff --git a/backend/dataall/modules/datasets/handlers/glue_column_handler.py b/backend/dataall/modules/datasets/handlers/glue_column_handler.py index 0cf8d0b1b..b16ce4413 100644 --- a/backend/dataall/modules/datasets/handlers/glue_column_handler.py +++ b/backend/dataall/modules/datasets/handlers/glue_column_handler.py @@ -6,7 +6,7 @@ from dataall.modules.datasets.aws.glue_table_client import GlueTableClient from dataall.modules.datasets.aws.lf_table_client import LakeFormationTableClient from dataall.modules.datasets_base.db.models import DatasetTableColumn, DatasetTable -from dataall.modules.datasets.services.dataset_table_service import DatasetTableService +from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository log = logging.getLogger(__name__) @@ -24,7 +24,7 @@ def get_table_columns(engine, task: models.Task): aws = SessionHelper.remote_session(dataset_table.AWSAccountId) glue_table = GlueTableClient(aws, dataset_table).get_table() - DatasetTableService.sync_table_columns( + DatasetTableRepository.sync_table_columns( session, dataset_table, glue_table['Table'] ) return True diff --git a/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py b/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py index c45e7011b..7177d426d 100644 --- a/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py +++ b/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py @@ -5,7 +5,7 @@ from dataall.aws.handlers.sts import SessionHelper from dataall.db import models from dataall.modules.datasets_base.db.models import DatasetProfilingRun, Dataset -from dataall.modules.datasets.services.dataset_profiling_service import DatasetProfilingService +from dataall.modules.datasets.db.dataset_profiling_repository import DatasetProfilingRepository log = logging.getLogger(__name__) @@ -18,7 +18,7 @@ class DatasetProfilingGlueHandler: def get_profiling_run(engine, task: models.Task): with engine.scoped_session() as session: profiling: DatasetProfilingRun = ( - DatasetProfilingService.get_profiling_run( + DatasetProfilingRepository.get_profiling_run( session, profilingRunUri=task.targetUri ) ) @@ -42,7 +42,7 @@ def get_profiling_run(engine, task: models.Task): def start_profiling_run(engine, task: models.Task): with engine.scoped_session() as session: profiling: DatasetProfilingRun = ( - DatasetProfilingService.get_profiling_run( + DatasetProfilingRepository.get_profiling_run( session, profilingRunUri=task.targetUri ) ) @@ -61,7 +61,7 @@ def start_profiling_run(engine, task: models.Task): ), } ) - DatasetProfilingService.update_run( + DatasetProfilingRepository.update_run( session, profilingRunUri=profiling.profilingRunUri, GlueJobRunId=run['JobRunId'], diff --git a/backend/dataall/modules/datasets/handlers/glue_table_handler.py b/backend/dataall/modules/datasets/handlers/glue_table_handler.py index aabff1ae7..7fc938828 100644 --- a/backend/dataall/modules/datasets/handlers/glue_table_handler.py +++ b/backend/dataall/modules/datasets/handlers/glue_table_handler.py @@ -5,7 +5,7 @@ from dataall.db import models from dataall.modules.datasets_base.db.models import Dataset from dataall.modules.datasets.services.dataset_service import DatasetService -from dataall.modules.datasets.services.dataset_table_service import DatasetTableService +from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository log = logging.getLogger(__name__) @@ -25,5 +25,5 @@ def list_tables(engine, task: models.Task): tables = Glue.list_glue_database_tables( account_id, dataset.GlueDatabaseName, region ) - DatasetTableService.sync_existing_tables(session, dataset.datasetUri, glue_tables=tables) + DatasetTableRepository.sync_existing_tables(session, dataset.datasetUri, glue_tables=tables) return tables diff --git a/backend/dataall/modules/datasets/handlers/s3_location_handler.py b/backend/dataall/modules/datasets/handlers/s3_location_handler.py index 14864c191..68ba5f04b 100644 --- a/backend/dataall/modules/datasets/handlers/s3_location_handler.py +++ b/backend/dataall/modules/datasets/handlers/s3_location_handler.py @@ -3,7 +3,7 @@ from dataall.aws.handlers.service_handlers import Worker from dataall.db import models from dataall.modules.datasets.aws.s3_location_client import S3LocationClient -from dataall.modules.datasets.services.dataset_location_service import DatasetLocationService +from dataall.modules.datasets.db.dataset_location_repository import DatasetLocationRepository log = logging.getLogger(__name__) @@ -15,7 +15,7 @@ class S3DatasetLocationHandler: @Worker.handler(path='s3.prefix.create') def create_dataset_location(engine, task: models.Task): with engine.scoped_session() as session: - location = DatasetLocationService.get_location_by_uri( + location = DatasetLocationRepository.get_location_by_uri( session, task.targetUri ) S3LocationClient(location).create_bucket_prefix() diff --git a/backend/dataall/modules/datasets/indexers/dataset_indexer.py b/backend/dataall/modules/datasets/indexers/dataset_indexer.py index 9f8191343..72915207d 100644 --- a/backend/dataall/modules/datasets/indexers/dataset_indexer.py +++ b/backend/dataall/modules/datasets/indexers/dataset_indexer.py @@ -3,7 +3,7 @@ from dataall import db from dataall.db import models from dataall.modules.datasets_base.db.models import Dataset -from dataall.modules.datasets.services.dataset_location_service import DatasetLocationService +from dataall.modules.datasets.db.dataset_location_repository import DatasetLocationRepository from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.searchproxy.base_indexer import BaseIndexer @@ -46,7 +46,7 @@ def upsert(cls, session, dataset_uri: str): .first() ) count_tables = DatasetService.count_dataset_tables(session, dataset_uri) - count_folders = DatasetLocationService.count_dataset_locations(session, dataset_uri) + count_folders = DatasetLocationRepository.count_dataset_locations(session, dataset_uri) count_upvotes = db.api.Vote.count_upvotes( session, None, None, dataset_uri, {'targetType': 'dataset'} ) diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py index 46d1b9d60..d92c672ca 100644 --- a/backend/dataall/modules/datasets/services/dataset_service.py +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -22,7 +22,7 @@ from dataall.modules.dataset_sharing.services.share_object import ShareItemSM from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.db.models import DatasetTable, Dataset -from dataall.modules.datasets.services.dataset_location_service import DatasetLocationService +from dataall.modules.datasets.db.dataset_location_repository import DatasetLocationRepository from dataall.modules.datasets.services.permissions import MANAGE_DATASETS, UPDATE_DATASET, DATASET_READ, DATASET_ALL, \ LIST_ENVIRONMENT_DATASETS, CREATE_DATASET from dataall.modules.datasets_base.services.permissions import DATASET_TABLE_READ @@ -514,7 +514,7 @@ def delete_dataset( DatasetService._delete_dataset_shares_with_no_shared_items(session, uri) DatasetService._delete_dataset_term_links(session, uri) DatasetService._delete_dataset_tables(session, dataset.datasetUri) - DatasetLocationService.delete_dataset_locations(session, dataset.datasetUri) + DatasetLocationRepository.delete_dataset_locations(session, dataset.datasetUri) KeyValueTag.delete_key_value_tags(session, dataset.datasetUri, 'dataset') Vote.delete_votes(session, dataset.datasetUri, 'dataset') session.delete(dataset) diff --git a/backend/dataall/modules/datasets/tasks/subscription_service.py b/backend/dataall/modules/datasets/tasks/subscription_service.py index 1a27ddcf0..e18875ab9 100644 --- a/backend/dataall/modules/datasets/tasks/subscription_service.py +++ b/backend/dataall/modules/datasets/tasks/subscription_service.py @@ -13,12 +13,12 @@ from dataall.db import get_engine from dataall.db import models from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject -from dataall.modules.datasets.services.dataset_profiling_service import DatasetProfilingService +from dataall.modules.datasets.db.dataset_profiling_repository import DatasetProfilingRepository from dataall.modules.dataset_sharing.services.share_notification_service import ShareNotificationService from dataall.tasks.subscriptions import poll_queues from dataall.utils import json_utils -from dataall.modules.datasets.services.dataset_table_service import DatasetTableService -from dataall.modules.datasets.services.dataset_location_service import DatasetLocationService +from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository +from dataall.modules.datasets.db.dataset_location_repository import DatasetLocationRepository from dataall.modules.datasets_base.db.models import DatasetStorageLocation, DatasetTable, Dataset root = logging.getLogger() @@ -70,7 +70,7 @@ def notify_consumers(engine, messages): @staticmethod def publish_table_update_message(engine, message): with engine.scoped_session() as session: - table: DatasetTable = DatasetTableService.get_table_by_s3_prefix( + table: DatasetTable = DatasetTableRepository.get_table_by_s3_prefix( session, message.get('prefix'), message.get('accountid'), @@ -108,7 +108,7 @@ def publish_table_update_message(engine, message): @staticmethod def publish_location_update_message(session, message): location: DatasetStorageLocation = ( - DatasetLocationService.get_location_by_s3_prefix( + DatasetLocationRepository.get_location_by_s3_prefix( session, message.get('prefix'), message.get('accountid'), @@ -141,14 +141,14 @@ def publish_location_update_message(session, message): @staticmethod def store_dataquality_results(session, message): - table: DatasetTable = DatasetTableService.get_table_by_s3_prefix( + table: DatasetTable = DatasetTableRepository.get_table_by_s3_prefix( session, message.get('prefix'), message.get('accountid'), message.get('region'), ) - run = DatasetProfilingService.start_profiling( + run = DatasetProfilingRepository.start_profiling( session=session, datasetUri=table.datasetUri, GlueTableName=table.GlueTableName, diff --git a/backend/dataall/modules/datasets/tasks/tables_syncer.py b/backend/dataall/modules/datasets/tasks/tables_syncer.py index a9d8bb47a..ab219eb5b 100644 --- a/backend/dataall/modules/datasets/tasks/tables_syncer.py +++ b/backend/dataall/modules/datasets/tasks/tables_syncer.py @@ -13,7 +13,7 @@ from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer from dataall.modules.dataset_sharing.services.dataset_alarm_service import DatasetAlarmService from dataall.modules.datasets.services.dataset_service import DatasetService -from dataall.modules.datasets.services.dataset_table_service import DatasetTableService +from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository root = logging.getLogger() root.setLevel(logging.INFO) @@ -64,7 +64,7 @@ def sync_tables(engine): f'Found {len(tables)} tables on Glue database {dataset.GlueDatabaseName}' ) - DatasetTableService.sync_existing_tables( + DatasetTableRepository.sync_existing_tables( session, dataset.datasetUri, glue_tables=tables ) diff --git a/tests/api/test_dataset_table.py b/tests/api/test_dataset_table.py index 3e3dc3ab3..042ad880b 100644 --- a/tests/api/test_dataset_table.py +++ b/tests/api/test_dataset_table.py @@ -3,7 +3,7 @@ import pytest import dataall -from dataall.modules.datasets.services.dataset_table_service import DatasetTableService +from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository from dataall.modules.datasets_base.db.models import DatasetTableColumn, DatasetTable, Dataset @@ -287,7 +287,7 @@ def test_sync_tables_and_columns(client, table, dataset1, db): }, ] - assert DatasetTableService.sync_existing_tables(session, dataset1.datasetUri, glue_tables) + assert DatasetTableRepository.sync_existing_tables(session, dataset1.datasetUri, glue_tables) new_table: DatasetTable = ( session.query(DatasetTable) .filter(DatasetTable.name == 'new_table') From 5c67ef4cbcf5d6ddfc21902de73465f03bdb109c Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 8 May 2023 10:54:00 +0200 Subject: [PATCH 133/346] Review remarks --- backend/dataall/api/Objects/Vote/resolvers.py | 1 + .../group/services/group_resource_manager.py | 7 ++++--- backend/dataall/db/api/share_object.py | 2 +- backend/dataall/modules/datasets/__init__.py | 6 +++--- .../modules/datasets/api/dataset/resolvers.py | 2 +- .../datasets/api/profiling/resolvers.py | 2 +- .../api/storage_location/resolvers.py | 2 +- .../modules/datasets/api/table/resolvers.py | 2 +- .../datasets/api/table_column/resolvers.py | 2 +- .../dataall/modules/datasets/cdk/__init__.py | 7 ++++--- ...taset_policy.py => dataset_data_policy.py} | 0 ...w_policy.py => dataset_databrew_policy.py} | 4 ++-- ...{glue_policy.py => dataset_glue_policy.py} | 4 ++-- ...icy.py => dataset_lakeformation_policy.py} | 4 ++-- .../modules/datasets/db/dataset_repository.py | 7 +++---- .../services/dataset_group_resource.py | 8 -------- .../services/dataset_location_service.py | 4 ++-- ...{permissions.py => dataset_permissions.py} | 0 .../datasets/services/dataset_service.py | 4 ++-- .../services/dataset_table_service.py | 2 +- backend/dataall/modules/notebooks/__init__.py | 2 +- .../modules/notebooks/api/resolvers.py | 2 +- .../dataall/modules/notebooks/cdk/policies.py | 2 +- .../modules/notebooks/services/__init__.py | 4 ++-- ...permissions.py => notebook_permissions.py} | 0 .../{services.py => notebook_service.py} | 20 +++++++++---------- ...fc49baecea4_add_enviromental_parameters.py | 4 ++-- ...215e_backfill_dataset_table_permissions.py | 2 +- tests/api/test_environment.py | 3 +-- tests/api/test_tenant.py | 2 +- tests/db/test_permission.py | 2 +- .../notebooks/test_sagemaker_notebook.py | 2 +- 32 files changed, 54 insertions(+), 61 deletions(-) rename backend/dataall/modules/datasets/cdk/{dataset_policy.py => dataset_data_policy.py} (100%) rename backend/dataall/modules/datasets/cdk/{databrew_policy.py => dataset_databrew_policy.py} (91%) rename backend/dataall/modules/datasets/cdk/{glue_policy.py => dataset_glue_policy.py} (97%) rename backend/dataall/modules/datasets/cdk/{lakeformation_policy.py => dataset_lakeformation_policy.py} (91%) delete mode 100644 backend/dataall/modules/datasets/services/dataset_group_resource.py rename backend/dataall/modules/datasets/services/{permissions.py => dataset_permissions.py} (100%) rename backend/dataall/modules/notebooks/services/{permissions.py => notebook_permissions.py} (100%) rename backend/dataall/modules/notebooks/services/{services.py => notebook_service.py} (92%) diff --git a/backend/dataall/api/Objects/Vote/resolvers.py b/backend/dataall/api/Objects/Vote/resolvers.py index c94d735eb..b9a14e117 100644 --- a/backend/dataall/api/Objects/Vote/resolvers.py +++ b/backend/dataall/api/Objects/Vote/resolvers.py @@ -53,4 +53,5 @@ def get_vote(context: Context, source, targetUri: str = None, targetType: str = ) +# TODO should migrate after into the Dashboard module add_vote_type("dashboard", DashboardIndexer) diff --git a/backend/dataall/core/group/services/group_resource_manager.py b/backend/dataall/core/group/services/group_resource_manager.py index 905770735..01d858495 100644 --- a/backend/dataall/core/group/services/group_resource_manager.py +++ b/backend/dataall/core/group/services/group_resource_manager.py @@ -1,9 +1,10 @@ -from typing import Protocol, List +from abc import ABC +from typing import List -class GroupResource(Protocol): +class GroupResource(ABC): def count_resources(self, session, environment_uri, group_uri) -> int: - ... + raise NotImplementedError() class GroupResourceManager: diff --git a/backend/dataall/db/api/share_object.py b/backend/dataall/db/api/share_object.py index 4917664b9..bcd8b12b9 100644 --- a/backend/dataall/db/api/share_object.py +++ b/backend/dataall/db/api/share_object.py @@ -12,7 +12,7 @@ from ..models.Enums import ShareObjectStatus, ShareItemStatus, ShareObjectActions, ShareItemActions, ShareableType, PrincipalType from dataall.modules.datasets.db.models import DatasetStorageLocation, DatasetTable, Dataset from dataall.modules.datasets.services.dataset_service import DatasetService -from ...modules.datasets.services.permissions import DATASET_TABLE_READ +from dataall.modules.datasets.services.dataset_permissions import DATASET_TABLE_READ logger = logging.getLogger(__name__) diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index 08b1dffd1..2ace4145b 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -3,12 +3,12 @@ from typing import List from dataall.core.group.services.group_resource_manager import GroupResourceManager +from dataall.modules.datasets.db.dataset_repository import DatasetRepository from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetStorageLocation, DatasetTable, Dataset from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer -from dataall.modules.datasets.services.dataset_group_resource import DatasetGroupResource -from dataall.modules.datasets.services.permissions import GET_DATASET, UPDATE_DATASET +from dataall.modules.datasets.services.dataset_permissions import GET_DATASET, UPDATE_DATASET from dataall.modules.loader import ModuleInterface, ImportMode log = logging.getLogger(__name__) @@ -59,7 +59,7 @@ def __init__(self): TargetType("dataset", GET_DATASET, UPDATE_DATASET) - GroupResourceManager.register(DatasetGroupResource()) + GroupResourceManager.register(DatasetRepository()) log.info("API of datasets has been imported") diff --git a/backend/dataall/modules/datasets/api/dataset/resolvers.py b/backend/dataall/modules/datasets/api/dataset/resolvers.py index 3cef6e21a..7b25886da 100644 --- a/backend/dataall/modules/datasets/api/dataset/resolvers.py +++ b/backend/dataall/modules/datasets/api/dataset/resolvers.py @@ -21,7 +21,7 @@ from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer -from dataall.modules.datasets.services.permissions import CREDENTIALS_DATASET, SYNC_DATASET, SUMMARY_DATASET, \ +from dataall.modules.datasets.services.dataset_permissions import CREDENTIALS_DATASET, SYNC_DATASET, SUMMARY_DATASET, \ CRAWL_DATASET, DELETE_DATASET, SUBSCRIPTIONS_DATASET log = logging.getLogger(__name__) diff --git a/backend/dataall/modules/datasets/api/profiling/resolvers.py b/backend/dataall/modules/datasets/api/profiling/resolvers.py index d156eee95..0dab9a579 100644 --- a/backend/dataall/modules/datasets/api/profiling/resolvers.py +++ b/backend/dataall/modules/datasets/api/profiling/resolvers.py @@ -10,7 +10,7 @@ from dataall.modules.datasets.services.dataset_table_service import DatasetTableService from dataall.modules.datasets.services.dataset_profiling_service import DatasetProfilingService from dataall.modules.datasets.db.models import DatasetProfilingRun -from dataall.modules.datasets.services.permissions import PROFILE_DATASET_TABLE +from dataall.modules.datasets.services.dataset_permissions import PROFILE_DATASET_TABLE log = logging.getLogger(__name__) diff --git a/backend/dataall/modules/datasets/api/storage_location/resolvers.py b/backend/dataall/modules/datasets/api/storage_location/resolvers.py index 62feb570c..a3b35f5c0 100644 --- a/backend/dataall/modules/datasets/api/storage_location/resolvers.py +++ b/backend/dataall/modules/datasets/api/storage_location/resolvers.py @@ -11,7 +11,7 @@ from dataall.modules.datasets.db.models import DatasetStorageLocation, Dataset from dataall.modules.datasets.services.dataset_location_service import DatasetLocationService from dataall.modules.datasets.services.dataset_service import DatasetService -from dataall.modules.datasets.services.permissions import UPDATE_DATASET_FOLDER +from dataall.modules.datasets.services.dataset_permissions import UPDATE_DATASET_FOLDER def create_storage_location( diff --git a/backend/dataall/modules/datasets/api/table/resolvers.py b/backend/dataall/modules/datasets/api/table/resolvers.py index 8e72bb8df..bed3aa32d 100644 --- a/backend/dataall/modules/datasets/api/table/resolvers.py +++ b/backend/dataall/modules/datasets/api/table/resolvers.py @@ -13,7 +13,7 @@ from dataall.db.api import ResourcePolicy, Glossary from dataall.modules.datasets.db.models import DatasetTable, Dataset from dataall.modules.datasets.services.dataset_service import DatasetService -from dataall.modules.datasets.services.permissions import UPDATE_DATASET_TABLE, PREVIEW_DATASET_TABLE +from dataall.modules.datasets.services.dataset_permissions import UPDATE_DATASET_TABLE, PREVIEW_DATASET_TABLE from dataall.utils import json_utils from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer from dataall.modules.datasets.services.dataset_table_service import DatasetTableService diff --git a/backend/dataall/modules/datasets/api/table_column/resolvers.py b/backend/dataall/modules/datasets/api/table_column/resolvers.py index 591459f87..b206ccce3 100644 --- a/backend/dataall/modules/datasets/api/table_column/resolvers.py +++ b/backend/dataall/modules/datasets/api/table_column/resolvers.py @@ -7,7 +7,7 @@ from dataall.db.api import ResourcePolicy from dataall.modules.datasets.services.dataset_table_service import DatasetTableService from dataall.modules.datasets.db.models import DatasetTableColumn, DatasetTable -from dataall.modules.datasets.services.permissions import UPDATE_DATASET_TABLE +from dataall.modules.datasets.services.dataset_permissions import UPDATE_DATASET_TABLE def list_table_columns( diff --git a/backend/dataall/modules/datasets/cdk/__init__.py b/backend/dataall/modules/datasets/cdk/__init__.py index eaa9ad99f..613a75e04 100644 --- a/backend/dataall/modules/datasets/cdk/__init__.py +++ b/backend/dataall/modules/datasets/cdk/__init__.py @@ -1,4 +1,5 @@ -from dataall.modules.datasets.cdk import dataset_stack, databrew_policy, glue_policy, lakeformation_policy, \ - dataset_policy +from dataall.modules.datasets.cdk import dataset_stack, dataset_databrew_policy, dataset_glue_policy, \ + dataset_lakeformation_policy, dataset_data_policy -__all__ = ["dataset_stack", "databrew_policy", "glue_policy", "lakeformation_policy", "dataset_policy"] +__all__ = ["dataset_stack", "dataset_databrew_policy", "dataset_glue_policy", "dataset_lakeformation_policy", + "dataset_data_policy"] diff --git a/backend/dataall/modules/datasets/cdk/dataset_policy.py b/backend/dataall/modules/datasets/cdk/dataset_data_policy.py similarity index 100% rename from backend/dataall/modules/datasets/cdk/dataset_policy.py rename to backend/dataall/modules/datasets/cdk/dataset_data_policy.py diff --git a/backend/dataall/modules/datasets/cdk/databrew_policy.py b/backend/dataall/modules/datasets/cdk/dataset_databrew_policy.py similarity index 91% rename from backend/dataall/modules/datasets/cdk/databrew_policy.py rename to backend/dataall/modules/datasets/cdk/dataset_databrew_policy.py index ed2ef0b32..896a1069a 100644 --- a/backend/dataall/modules/datasets/cdk/databrew_policy.py +++ b/backend/dataall/modules/datasets/cdk/dataset_databrew_policy.py @@ -1,10 +1,10 @@ from dataall.cdkproxy.stacks.policies.service_policy import ServicePolicy from aws_cdk import aws_iam as iam -from dataall.modules.datasets.services.permissions import CREATE_DATASET +from dataall.modules.datasets.services.dataset_permissions import CREATE_DATASET -class DatabrewPolicy(ServicePolicy): +class DatasetDatabrewServicePolicy(ServicePolicy): def get_statements(self, group_permissions, **kwargs): if CREATE_DATASET not in group_permissions: return [] diff --git a/backend/dataall/modules/datasets/cdk/glue_policy.py b/backend/dataall/modules/datasets/cdk/dataset_glue_policy.py similarity index 97% rename from backend/dataall/modules/datasets/cdk/glue_policy.py rename to backend/dataall/modules/datasets/cdk/dataset_glue_policy.py index a98b215bd..de9783448 100644 --- a/backend/dataall/modules/datasets/cdk/glue_policy.py +++ b/backend/dataall/modules/datasets/cdk/dataset_glue_policy.py @@ -1,10 +1,10 @@ from dataall.cdkproxy.stacks.policies.service_policy import ServicePolicy from aws_cdk import aws_iam as iam -from dataall.modules.datasets.services.permissions import CREATE_DATASET +from dataall.modules.datasets.services.dataset_permissions import CREATE_DATASET -class GluePolicy(ServicePolicy): +class DatasetGlueServicePolicy(ServicePolicy): def get_statements(self, group_permissions, **kwargs): if CREATE_DATASET not in group_permissions: return [] diff --git a/backend/dataall/modules/datasets/cdk/lakeformation_policy.py b/backend/dataall/modules/datasets/cdk/dataset_lakeformation_policy.py similarity index 91% rename from backend/dataall/modules/datasets/cdk/lakeformation_policy.py rename to backend/dataall/modules/datasets/cdk/dataset_lakeformation_policy.py index bbeef17dc..ab4113e5c 100644 --- a/backend/dataall/modules/datasets/cdk/lakeformation_policy.py +++ b/backend/dataall/modules/datasets/cdk/dataset_lakeformation_policy.py @@ -1,10 +1,10 @@ from aws_cdk import aws_iam as iam from dataall.cdkproxy.stacks.policies.service_policy import ServicePolicy -from dataall.modules.datasets.services.permissions import CREATE_DATASET +from dataall.modules.datasets.services.dataset_permissions import CREATE_DATASET -class LakeFormationPolicy(ServicePolicy): +class DatasetLakeFormationServicePolicy(ServicePolicy): def get_statements(self, group_permissions, **kwargs): if CREATE_DATASET not in group_permissions: return [] diff --git a/backend/dataall/modules/datasets/db/dataset_repository.py b/backend/dataall/modules/datasets/db/dataset_repository.py index 95aef6102..8159147cc 100644 --- a/backend/dataall/modules/datasets/db/dataset_repository.py +++ b/backend/dataall/modules/datasets/db/dataset_repository.py @@ -1,10 +1,11 @@ from operator import and_ +from dataall.core.group.services.group_resource_manager import GroupResource from dataall.db import exceptions from dataall.modules.datasets.db.models import Dataset -class DatasetRepository: +class DatasetRepository(GroupResource): """DAO layer for Datasets""" @staticmethod @@ -14,8 +15,7 @@ def get_dataset_by_uri(session, dataset_uri) -> Dataset: raise exceptions.ObjectNotFound('Dataset', dataset_uri) return dataset - @staticmethod - def count_group_datasets(session, environment_uri, group_uri) -> int: + def count_resources(self, session, environment_uri, group_uri) -> int: return ( session.query(Dataset) .filter( @@ -25,4 +25,3 @@ def count_group_datasets(session, environment_uri, group_uri) -> int: )) .count() ) - diff --git a/backend/dataall/modules/datasets/services/dataset_group_resource.py b/backend/dataall/modules/datasets/services/dataset_group_resource.py deleted file mode 100644 index 8d58ac3ad..000000000 --- a/backend/dataall/modules/datasets/services/dataset_group_resource.py +++ /dev/null @@ -1,8 +0,0 @@ -from dataall.core.group.services.group_resource_manager import GroupResource, GroupResourceManager -from dataall.modules.datasets.db.dataset_repository import DatasetRepository - - -class DatasetGroupResource(GroupResource): - def count_resources(self, session, environment_uri, group_uri) -> int: - return DatasetRepository.count_group_datasets(session, environment_uri, group_uri) - diff --git a/backend/dataall/modules/datasets/services/dataset_location_service.py b/backend/dataall/modules/datasets/services/dataset_location_service.py index 240c52f0e..27ec62c9a 100644 --- a/backend/dataall/modules/datasets/services/dataset_location_service.py +++ b/backend/dataall/modules/datasets/services/dataset_location_service.py @@ -8,8 +8,8 @@ from dataall.db import models, api, paginate, exceptions from dataall.modules.datasets.db.dataset_repository import DatasetRepository from dataall.modules.datasets.db.models import DatasetStorageLocation -from dataall.modules.datasets.services.permissions import MANAGE_DATASETS, LIST_DATASET_FOLDERS, CREATE_DATASET_FOLDER, \ - DELETE_DATASET_FOLDER, UPDATE_DATASET_FOLDER +from dataall.modules.datasets.services.dataset_permissions import MANAGE_DATASETS, LIST_DATASET_FOLDERS, \ + CREATE_DATASET_FOLDER, DELETE_DATASET_FOLDER, UPDATE_DATASET_FOLDER logger = logging.getLogger(__name__) diff --git a/backend/dataall/modules/datasets/services/permissions.py b/backend/dataall/modules/datasets/services/dataset_permissions.py similarity index 100% rename from backend/dataall/modules/datasets/services/permissions.py rename to backend/dataall/modules/datasets/services/dataset_permissions.py diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py index 388a14746..2c7835327 100644 --- a/backend/dataall/modules/datasets/services/dataset_service.py +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -21,8 +21,8 @@ from dataall.modules.datasets.db.dataset_repository import DatasetRepository from dataall.modules.datasets.db.models import DatasetTable, Dataset from dataall.modules.datasets.services.dataset_location_service import DatasetLocationService -from dataall.modules.datasets.services.permissions import MANAGE_DATASETS, UPDATE_DATASET, DATASET_READ, DATASET_ALL, \ - DATASET_TABLE_READ, LIST_ENVIRONMENT_DATASETS, CREATE_DATASET +from dataall.modules.datasets.services.dataset_permissions import MANAGE_DATASETS, UPDATE_DATASET, DATASET_READ, \ + DATASET_ALL, DATASET_TABLE_READ, LIST_ENVIRONMENT_DATASETS, CREATE_DATASET from dataall.utils.naming_convention import ( NamingConventionService, NamingConventionPattern, diff --git a/backend/dataall/modules/datasets/services/dataset_table_service.py b/backend/dataall/modules/datasets/services/dataset_table_service.py index ddfde9c5b..28c9a3b88 100644 --- a/backend/dataall/modules/datasets/services/dataset_table_service.py +++ b/backend/dataall/modules/datasets/services/dataset_table_service.py @@ -6,7 +6,7 @@ from dataall.core.permission_checker import has_tenant_permission, has_resource_permission from dataall.db import models, api, exceptions, paginate from dataall.db.api import Glossary, ResourcePolicy, Environment -from dataall.modules.datasets.services.permissions import MANAGE_DATASETS, CREATE_DATASET_TABLE, DELETE_DATASET_TABLE, \ +from dataall.modules.datasets.services.dataset_permissions import MANAGE_DATASETS, CREATE_DATASET_TABLE, DELETE_DATASET_TABLE, \ UPDATE_DATASET_TABLE, DATASET_TABLE_READ from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.utils import json_utils diff --git a/backend/dataall/modules/notebooks/__init__.py b/backend/dataall/modules/notebooks/__init__.py index b63e0fa51..f2db29d86 100644 --- a/backend/dataall/modules/notebooks/__init__.py +++ b/backend/dataall/modules/notebooks/__init__.py @@ -18,7 +18,7 @@ def is_supported(cls, modes): def __init__(self): import dataall.modules.notebooks.api - from dataall.modules.notebooks.services.permissions import GET_NOTEBOOK, UPDATE_NOTEBOOK + from dataall.modules.notebooks.services.notebook_permissions import GET_NOTEBOOK, UPDATE_NOTEBOOK TargetType("notebook", GET_NOTEBOOK, UPDATE_NOTEBOOK) log.info("API of sagemaker notebooks has been imported") diff --git a/backend/dataall/modules/notebooks/api/resolvers.py b/backend/dataall/modules/notebooks/api/resolvers.py index 3c0d70c67..04218bf4a 100644 --- a/backend/dataall/modules/notebooks/api/resolvers.py +++ b/backend/dataall/modules/notebooks/api/resolvers.py @@ -3,7 +3,7 @@ from dataall.api.context import Context from dataall.db import exceptions from dataall.api.Objects.Stack import stack_helper -from dataall.modules.notebooks.services.services import NotebookService, NotebookCreationRequest +from dataall.modules.notebooks.services.notebook_service import NotebookService, NotebookCreationRequest from dataall.modules.notebooks.db.models import SagemakerNotebook diff --git a/backend/dataall/modules/notebooks/cdk/policies.py b/backend/dataall/modules/notebooks/cdk/policies.py index c6e77f9b6..b462b6a0c 100644 --- a/backend/dataall/modules/notebooks/cdk/policies.py +++ b/backend/dataall/modules/notebooks/cdk/policies.py @@ -1,6 +1,6 @@ from dataall.cdkproxy.stacks.policies.service_policy import ServicePolicy -from dataall.modules.notebooks.services.permissions import CREATE_NOTEBOOK +from dataall.modules.notebooks.services.notebook_permissions import CREATE_NOTEBOOK from dataall.modules.common.sagemaker.cdk.statements import create_sagemaker_statements diff --git a/backend/dataall/modules/notebooks/services/__init__.py b/backend/dataall/modules/notebooks/services/__init__.py index a5df50095..5f13a14f1 100644 --- a/backend/dataall/modules/notebooks/services/__init__.py +++ b/backend/dataall/modules/notebooks/services/__init__.py @@ -2,6 +2,6 @@ Contains the code needed for service layer. The service layer is a layer where all business logic is aggregated """ -from dataall.modules.notebooks.services import services, permissions +from dataall.modules.notebooks.services import notebook_service, notebook_permissions -__all__ = ["services", "permissions"] +__all__ = ["notebook_service", "notebook_permissions"] diff --git a/backend/dataall/modules/notebooks/services/permissions.py b/backend/dataall/modules/notebooks/services/notebook_permissions.py similarity index 100% rename from backend/dataall/modules/notebooks/services/permissions.py rename to backend/dataall/modules/notebooks/services/notebook_permissions.py diff --git a/backend/dataall/modules/notebooks/services/services.py b/backend/dataall/modules/notebooks/services/notebook_service.py similarity index 92% rename from backend/dataall/modules/notebooks/services/services.py rename to backend/dataall/modules/notebooks/services/notebook_service.py index c2b9b9b27..041f2d044 100644 --- a/backend/dataall/modules/notebooks/services/services.py +++ b/backend/dataall/modules/notebooks/services/notebook_service.py @@ -24,8 +24,8 @@ ) from dataall.utils.slugify import slugify from dataall.modules.notebooks.db.models import SagemakerNotebook -from dataall.modules.notebooks.services import permissions -from dataall.modules.notebooks.services.permissions import MANAGE_NOTEBOOKS, CREATE_NOTEBOOK +from dataall.modules.notebooks.services.notebook_permissions import MANAGE_NOTEBOOKS, CREATE_NOTEBOOK, NOTEBOOK_ALL, \ + GET_NOTEBOOK, UPDATE_NOTEBOOK, DELETE_NOTEBOOK from dataall.core.permission_checker import has_resource_permission, has_tenant_permission, has_group_permission logger = logging.getLogger(__name__) @@ -119,7 +119,7 @@ def create_notebook(*, uri: str, admin_group: str, request: NotebookCreationRequ ResourcePolicy.attach_resource_policy( session=session, group=request.SamlAdminGroupName, - permissions=permissions.NOTEBOOK_ALL, + permissions=NOTEBOOK_ALL, resource_uri=notebook.notebookUri, resource_type=SagemakerNotebook.__name__, ) @@ -128,7 +128,7 @@ def create_notebook(*, uri: str, admin_group: str, request: NotebookCreationRequ ResourcePolicy.attach_resource_policy( session=session, group=env.SamlGroupName, - permissions=permissions.NOTEBOOK_ALL, + permissions=NOTEBOOK_ALL, resource_uri=notebook.notebookUri, resource_type=SagemakerNotebook.__name__, ) @@ -156,42 +156,42 @@ def list_user_notebooks(filter) -> dict: ) @staticmethod - @has_resource_permission(permissions.GET_NOTEBOOK) + @has_resource_permission(GET_NOTEBOOK) def get_notebook(*, uri) -> SagemakerNotebook: """Gets a notebook by uri""" with _session() as session: return NotebookService._get_notebook(session, uri) @staticmethod - @has_resource_permission(permissions.UPDATE_NOTEBOOK) + @has_resource_permission(UPDATE_NOTEBOOK) def start_notebook(*, uri): """Starts notebooks instance""" notebook = NotebookService.get_notebook(uri=uri) client(notebook).start_instance() @staticmethod - @has_resource_permission(permissions.UPDATE_NOTEBOOK) + @has_resource_permission(UPDATE_NOTEBOOK) def stop_notebook(*, uri: str) -> None: """Stop notebook instance""" notebook = NotebookService.get_notebook(uri=uri) client(notebook).stop_instance() @staticmethod - @has_resource_permission(permissions.GET_NOTEBOOK) + @has_resource_permission(GET_NOTEBOOK) def get_notebook_presigned_url(*, uri: str) -> str: """Creates and returns a presigned url for a notebook""" notebook = NotebookService.get_notebook(uri=uri) return client(notebook).presigned_url() @staticmethod - @has_resource_permission(permissions.GET_NOTEBOOK) + @has_resource_permission(GET_NOTEBOOK) def get_notebook_status(*, uri) -> str: """Retrieves notebook status""" notebook = NotebookService.get_notebook(uri=uri) return client(notebook).get_notebook_instance_status() @staticmethod - @has_resource_permission(permissions.DELETE_NOTEBOOK) + @has_resource_permission(DELETE_NOTEBOOK) def delete_notebook(*, uri: str, delete_from_aws: bool): """Deletes notebook from the database and if delete_from_aws is True from AWS as well""" with _session() as session: diff --git a/backend/migrations/versions/5fc49baecea4_add_enviromental_parameters.py b/backend/migrations/versions/5fc49baecea4_add_enviromental_parameters.py index 349361b98..628ae38ab 100644 --- a/backend/migrations/versions/5fc49baecea4_add_enviromental_parameters.py +++ b/backend/migrations/versions/5fc49baecea4_add_enviromental_parameters.py @@ -15,8 +15,8 @@ from dataall.db.api.permission import Permission from dataall.db.models import TenantPolicy, TenantPolicyPermission, PermissionType, EnvironmentGroup from dataall.db.permissions import MANAGE_SGMSTUDIO_NOTEBOOKS -from dataall.modules.datasets.services.permissions import LIST_ENVIRONMENT_DATASETS, CREATE_DATASET -from dataall.modules.notebooks.services.permissions import MANAGE_NOTEBOOKS, LIST_ENVIRONMENT_NOTEBOOKS, CREATE_NOTEBOOK +from dataall.modules.datasets.services.dataset_permissions import LIST_ENVIRONMENT_DATASETS, CREATE_DATASET +from dataall.modules.notebooks.services.notebook_permissions import MANAGE_NOTEBOOKS, LIST_ENVIRONMENT_NOTEBOOKS, CREATE_NOTEBOOK # revision identifiers, used by Alembic. revision = "5fc49baecea4" diff --git a/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py b/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py index 4447d1429..c87301d9d 100644 --- a/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py +++ b/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py @@ -14,7 +14,7 @@ from datetime import datetime from dataall.db.models.Enums import ShareObjectStatus, ShareableType from dataall.modules.datasets.services.dataset_service import DatasetService -from dataall.modules.datasets.services.permissions import DATASET_TABLE_READ +from dataall.modules.datasets.services.dataset_permissions import DATASET_TABLE_READ # revision identifiers, used by Alembic. revision = 'd05f9a5b215e' diff --git a/tests/api/test_environment.py b/tests/api/test_environment.py index a29cd1cae..88fe7e48e 100644 --- a/tests/api/test_environment.py +++ b/tests/api/test_environment.py @@ -1,9 +1,8 @@ import pytest import dataall -from dataall.db import permissions from dataall.modules.datasets.db.models import Dataset -from dataall.modules.datasets.services.permissions import CREATE_DATASET +from dataall.modules.datasets.services.dataset_permissions import CREATE_DATASET @pytest.fixture(scope='module', autouse=True) diff --git a/tests/api/test_tenant.py b/tests/api/test_tenant.py index 8554c8de9..443110b8a 100644 --- a/tests/api/test_tenant.py +++ b/tests/api/test_tenant.py @@ -1,5 +1,5 @@ from dataall.db import permissions -from dataall.modules.datasets.services.permissions import MANAGE_DATASETS +from dataall.modules.datasets.services.dataset_permissions import MANAGE_DATASETS def test_list_tenant_permissions(client, user, group, tenant): diff --git a/tests/db/test_permission.py b/tests/db/test_permission.py index 28524a77a..f1e55c6b0 100644 --- a/tests/db/test_permission.py +++ b/tests/db/test_permission.py @@ -6,7 +6,7 @@ from dataall.db.models.Permission import PermissionType from dataall.modules.datasets.db.models import Dataset from dataall.modules.datasets.services.dataset_service import DatasetService -from dataall.modules.datasets.services.permissions import MANAGE_DATASETS, UPDATE_DATASET, DATASET_READ, DATASET_WRITE, \ +from dataall.modules.datasets.services.dataset_permissions import MANAGE_DATASETS, UPDATE_DATASET, DATASET_READ, DATASET_WRITE, \ DATASET_TABLE_READ diff --git a/tests/modules/notebooks/test_sagemaker_notebook.py b/tests/modules/notebooks/test_sagemaker_notebook.py index 8b2aa9792..ab47815fe 100644 --- a/tests/modules/notebooks/test_sagemaker_notebook.py +++ b/tests/modules/notebooks/test_sagemaker_notebook.py @@ -41,7 +41,7 @@ def test_sgm_notebook(sgm_notebook, group): @pytest.fixture(scope='module', autouse=True) def patch_aws(module_mocker): module_mocker.patch( - "dataall.modules.notebooks.services.services.client", + "dataall.modules.notebooks.services.notebook_service.client", return_value=MockSagemakerClient(), ) From 57f1e1f4811c550847b5ad045a89ae8ed8c9cd40 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 8 May 2023 11:48:17 +0200 Subject: [PATCH 134/346] Review remarks --- ...ervice.py => dataset_subscription_task.py} | 22 +++++++++---------- deploy/stacks/container.py | 2 +- tests/tasks/test_subscriptions.py | 4 ++-- 3 files changed, 14 insertions(+), 14 deletions(-) rename backend/dataall/modules/datasets/tasks/{subscription_service.py => dataset_subscription_task.py} (93%) diff --git a/backend/dataall/modules/datasets/tasks/subscription_service.py b/backend/dataall/modules/datasets/tasks/dataset_subscription_task.py similarity index 93% rename from backend/dataall/modules/datasets/tasks/subscription_service.py rename to backend/dataall/modules/datasets/tasks/dataset_subscription_task.py index a261fad8a..dee3ee6b7 100644 --- a/backend/dataall/modules/datasets/tasks/subscription_service.py +++ b/backend/dataall/modules/datasets/tasks/dataset_subscription_task.py @@ -27,7 +27,7 @@ log = logging.getLogger(__name__) -class SubscriptionService: +class DatasetSubscriptionService: def __init__(self): pass @@ -60,9 +60,9 @@ def notify_consumers(engine, messages): for message in messages: - SubscriptionService.publish_table_update_message(engine, message) + DatasetSubscriptionService.publish_table_update_message(engine, message) - SubscriptionService.publish_location_update_message(session, message) + DatasetSubscriptionService.publish_location_update_message(session, message) return True @@ -95,7 +95,7 @@ def publish_table_update_message(engine, message): ) log.info(f'Found shared items for table {share_items}') - return SubscriptionService.publish_sns_message( + return DatasetSubscriptionService.publish_sns_message( engine, message, dataset, @@ -133,7 +133,7 @@ def publish_location_update_message(session, message): ) log.info(f'Found shared items for location {share_items}') - return SubscriptionService.publish_sns_message( + return DatasetSubscriptionService.publish_sns_message( session, message, dataset, share_items, location.S3Prefix ) @@ -164,9 +164,9 @@ def store_dataquality_results(session, message): if message.get('rows'): quality_results['table_nb_rows'] = message.get('rows') - SubscriptionService.set_columns_type(quality_results, message) + DatasetSubscriptionService.set_columns_type(quality_results, message) - data_types = SubscriptionService.set_data_types(message) + data_types = DatasetSubscriptionService.set_data_types(message) quality_results['dataTypes'] = data_types @@ -213,7 +213,7 @@ def publish_sns_message( with engine.scoped_session() as session: for item in share_items: - share_object = SubscriptionService.get_approved_share_object( + share_object = DatasetSubscriptionService.get_approved_share_object( session, item ) @@ -245,7 +245,7 @@ def publish_sns_message( f'Producer message before notifications: {message}' ) - SubscriptionService.redshift_copy( + DatasetSubscriptionService.redshift_copy( engine, message, dataset, environment, table ) @@ -256,7 +256,7 @@ def publish_sns_message( f'has updated the table shared with you {prefix}', } - response = SubscriptionService.sns_call( + response = DatasetSubscriptionService.sns_call( message, environment ) @@ -334,7 +334,7 @@ def get_approved_share_object(session, item): ENGINE = get_engine(envname=ENVNAME) Worker.queue = SqsQueue.send log.info('Polling datasets updates...') - service = SubscriptionService() + service = DatasetSubscriptionService() queues = service.get_queues(service.get_environments(ENGINE)) messages = poll_queues(queues) service.notify_consumers(ENGINE, messages) diff --git a/deploy/stacks/container.py b/deploy/stacks/container.py index ed5b929f8..94d79906d 100644 --- a/deploy/stacks/container.py +++ b/deploy/stacks/container.py @@ -182,7 +182,7 @@ def __init__( command=[ 'python3.8', '-m', - 'dataall.modules.datasets.tasks.subscription_service', + 'dataall.modules.datasets.tasks.dataset_subscription_task', ], container_id=f'container', ecr_repository=ecr_repository, diff --git a/tests/tasks/test_subscriptions.py b/tests/tasks/test_subscriptions.py index 11c255db2..f02a218bb 100644 --- a/tests/tasks/test_subscriptions.py +++ b/tests/tasks/test_subscriptions.py @@ -135,10 +135,10 @@ def share( def test_subscriptions(org, env, otherenv, db, dataset, share, mocker): mocker.patch( - 'dataall.modules.datasets.tasks.subscription_service.SubscriptionService.sns_call', + 'dataall.modules.datasets.tasks.dataset_subscription_task.DatasetSubscriptionService.sns_call', return_value=True, ) - subscriber = dataall.modules.datasets.tasks.subscription_service.SubscriptionService() + subscriber = dataall.modules.datasets.tasks.dataset_subscription_task.DatasetSubscriptionService() messages = [ { 'prefix': 's3://dataset/testtable/csv/', From 66510d68be5596739916cd80d2208b8a069f0bb8 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 8 May 2023 12:45:56 +0200 Subject: [PATCH 135/346] Removed common and added sagemaker base --- backend/dataall/cdkproxy/stacks/policies/mlstudio.py | 2 +- backend/dataall/modules/common/__init__.py | 1 - backend/dataall/modules/common/sagemaker/__init__.py | 1 - .../dataall/modules/common/sagemaker/cdk/__init__.py | 1 - backend/dataall/modules/notebooks/__init__.py | 8 +++++++- backend/dataall/modules/notebooks/cdk/policies.py | 2 +- backend/dataall/modules/sagemaker_base/__init__.py | 10 ++++++++++ backend/dataall/modules/sagemaker_base/cdk/__init__.py | 0 .../sagemaker => sagemaker_base}/cdk/statements.py | 0 9 files changed, 19 insertions(+), 6 deletions(-) delete mode 100644 backend/dataall/modules/common/__init__.py delete mode 100644 backend/dataall/modules/common/sagemaker/__init__.py delete mode 100644 backend/dataall/modules/common/sagemaker/cdk/__init__.py create mode 100644 backend/dataall/modules/sagemaker_base/__init__.py create mode 100644 backend/dataall/modules/sagemaker_base/cdk/__init__.py rename backend/dataall/modules/{common/sagemaker => sagemaker_base}/cdk/statements.py (100%) diff --git a/backend/dataall/cdkproxy/stacks/policies/mlstudio.py b/backend/dataall/cdkproxy/stacks/policies/mlstudio.py index 05b44c903..b416de5c7 100644 --- a/backend/dataall/cdkproxy/stacks/policies/mlstudio.py +++ b/backend/dataall/cdkproxy/stacks/policies/mlstudio.py @@ -1,7 +1,7 @@ from dataall.cdkproxy.stacks.policies.service_policy import ServicePolicy from dataall.db import permissions -from dataall.modules.common.sagemaker.cdk.statements import create_sagemaker_statements +from dataall.modules.sagemaker_base.cdk.statements import create_sagemaker_statements class SagemakerPolicy(ServicePolicy): diff --git a/backend/dataall/modules/common/__init__.py b/backend/dataall/modules/common/__init__.py deleted file mode 100644 index 984cce4a8..000000000 --- a/backend/dataall/modules/common/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Contains the common code that can be shared among modules""" diff --git a/backend/dataall/modules/common/sagemaker/__init__.py b/backend/dataall/modules/common/sagemaker/__init__.py deleted file mode 100644 index 959747d5d..000000000 --- a/backend/dataall/modules/common/sagemaker/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Common code for machine learning studio and notebooks""" diff --git a/backend/dataall/modules/common/sagemaker/cdk/__init__.py b/backend/dataall/modules/common/sagemaker/cdk/__init__.py deleted file mode 100644 index e2e75f02a..000000000 --- a/backend/dataall/modules/common/sagemaker/cdk/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# Contains infrastructure code shared between ML studio and notebooks diff --git a/backend/dataall/modules/notebooks/__init__.py b/backend/dataall/modules/notebooks/__init__.py index cf53b455f..0241bd2be 100644 --- a/backend/dataall/modules/notebooks/__init__.py +++ b/backend/dataall/modules/notebooks/__init__.py @@ -1,9 +1,11 @@ """Contains the code related to SageMaker notebooks""" import logging +from typing import List, Type from dataall.db.api import TargetType from dataall.modules.loader import ImportMode, ModuleInterface from dataall.modules.notebooks.db.repositories import NotebookRepository +from dataall.modules.sagemaker_base import SagemakerCdkModuleInterface log = logging.getLogger(__name__) @@ -31,6 +33,10 @@ class NotebookCdkModuleInterface(ModuleInterface): def is_supported(modes): return ImportMode.CDK in modes + @staticmethod + def depends_on() -> List[Type['ModuleInterface']]: + return [SagemakerCdkModuleInterface] + def __init__(self): import dataall.modules.notebooks.cdk - log.info("API of sagemaker notebooks has been imported") \ No newline at end of file + log.info("API of sagemaker notebooks has been imported") diff --git a/backend/dataall/modules/notebooks/cdk/policies.py b/backend/dataall/modules/notebooks/cdk/policies.py index b462b6a0c..fffb3aa6c 100644 --- a/backend/dataall/modules/notebooks/cdk/policies.py +++ b/backend/dataall/modules/notebooks/cdk/policies.py @@ -1,7 +1,7 @@ from dataall.cdkproxy.stacks.policies.service_policy import ServicePolicy from dataall.modules.notebooks.services.notebook_permissions import CREATE_NOTEBOOK -from dataall.modules.common.sagemaker.cdk.statements import create_sagemaker_statements +from dataall.modules.sagemaker_base.cdk.statements import create_sagemaker_statements class SagemakerPolicy(ServicePolicy): diff --git a/backend/dataall/modules/sagemaker_base/__init__.py b/backend/dataall/modules/sagemaker_base/__init__.py new file mode 100644 index 000000000..20589ebf0 --- /dev/null +++ b/backend/dataall/modules/sagemaker_base/__init__.py @@ -0,0 +1,10 @@ +"""Common code for machine learning studio and notebooks""" +from typing import List + +from dataall.modules.loader import ModuleInterface, ImportMode + + +class SagemakerCdkModuleInterface(ModuleInterface): + @staticmethod + def is_supported(modes: List[ImportMode]) -> bool: + return ImportMode.CDK in modes diff --git a/backend/dataall/modules/sagemaker_base/cdk/__init__.py b/backend/dataall/modules/sagemaker_base/cdk/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/dataall/modules/common/sagemaker/cdk/statements.py b/backend/dataall/modules/sagemaker_base/cdk/statements.py similarity index 100% rename from backend/dataall/modules/common/sagemaker/cdk/statements.py rename to backend/dataall/modules/sagemaker_base/cdk/statements.py From a1af334e30667a336d7ce2809b5449602786ca09 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 8 May 2023 13:07:43 +0200 Subject: [PATCH 136/346] Resolved merge conflict --- backend/dataall/modules/datasets/__init__.py | 2 +- tests/tasks/test_subscriptions.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index e6ec2361a..0fee22f94 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -4,7 +4,7 @@ from dataall.modules.dataset_sharing import SharingApiModuleInterface from dataall.core.group.services.group_resource_manager import GroupResourceManager -from dataall.modules.datasets.db.dataset_repository import DatasetRepository +from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base import DatasetBaseModuleInterface from dataall.modules.datasets_base.db.models import DatasetTableColumn, DatasetStorageLocation, DatasetTable, Dataset from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer diff --git a/tests/tasks/test_subscriptions.py b/tests/tasks/test_subscriptions.py index eb1bd92d7..3e29f8182 100644 --- a/tests/tasks/test_subscriptions.py +++ b/tests/tasks/test_subscriptions.py @@ -141,7 +141,7 @@ def test_subscriptions(org, env, otherenv, db, dataset, share, mocker): 'dataall.modules.datasets.tasks.dataset_subscription_task.DatasetSubscriptionService.sns_call', return_value=True, ) - subscriber = SubscriptionService() + subscriber = DatasetSubscriptionService() messages = [ { 'prefix': 's3://dataset/testtable/csv/', From 44c653d8c12a1158a700f2cdb0ae13691f3b4bdd Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 8 May 2023 14:18:09 +0200 Subject: [PATCH 137/346] Moved dataset service to repository for now --- backend/dataall/aws/handlers/redshift.py | 2 +- backend/dataall/db/api/redshift_cluster.py | 2 +- backend/dataall/modules/datasets/api/dataset/resolvers.py | 2 +- backend/dataall/modules/datasets/api/profiling/resolvers.py | 2 +- .../modules/datasets/api/storage_location/resolvers.py | 2 +- backend/dataall/modules/datasets/api/table/resolvers.py | 2 +- backend/dataall/modules/datasets/cdk/dataset_data_policy.py | 2 +- .../modules/datasets/{services => db}/dataset_service.py | 0 .../dataall/modules/datasets/db/dataset_table_repository.py | 2 +- .../modules/datasets/handlers/glue_dataset_handler.py | 2 +- .../dataall/modules/datasets/handlers/glue_table_handler.py | 2 +- .../dataall/modules/datasets/handlers/sns_dataset_handler.py | 2 +- backend/dataall/modules/datasets/indexers/dataset_indexer.py | 2 +- backend/dataall/modules/datasets/tasks/tables_syncer.py | 2 +- backend/dataall/tasks/catalog_indexer.py | 2 +- backend/dataall/tasks/stacks_updater.py | 2 +- .../d05f9a5b215e_backfill_dataset_table_permissions.py | 2 +- tests/api/test_dataset.py | 2 +- tests/api/test_redshift_cluster.py | 2 +- tests/db/test_permission.py | 5 ++--- 20 files changed, 20 insertions(+), 21 deletions(-) rename backend/dataall/modules/datasets/{services => db}/dataset_service.py (100%) diff --git a/backend/dataall/aws/handlers/redshift.py b/backend/dataall/aws/handlers/redshift.py index 5264366b3..bb61c05a7 100644 --- a/backend/dataall/aws/handlers/redshift.py +++ b/backend/dataall/aws/handlers/redshift.py @@ -12,7 +12,7 @@ # TODO should be migrated in the redshift module from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository from dataall.modules.datasets_base.db.models import DatasetTable, Dataset -from dataall.modules.datasets.services.dataset_service import DatasetService +from dataall.modules.datasets.db.dataset_service import DatasetService log = logging.getLogger(__name__) diff --git a/backend/dataall/db/api/redshift_cluster.py b/backend/dataall/db/api/redshift_cluster.py index f463fd3f5..dece5d480 100644 --- a/backend/dataall/db/api/redshift_cluster.py +++ b/backend/dataall/db/api/redshift_cluster.py @@ -10,7 +10,7 @@ NamingConventionPattern, ) from dataall.utils.slugify import slugify -from dataall.modules.datasets.services.dataset_service import DatasetService +from dataall.modules.datasets.db.dataset_service import DatasetService from dataall.modules.dataset_sharing.db.models import ShareObject, ShareObjectItem from dataall.modules.dataset_sharing.services.share_object import ShareItemSM diff --git a/backend/dataall/modules/datasets/api/dataset/resolvers.py b/backend/dataall/modules/datasets/api/dataset/resolvers.py index 01dc1802e..a0ac4c55d 100644 --- a/backend/dataall/modules/datasets/api/dataset/resolvers.py +++ b/backend/dataall/modules/datasets/api/dataset/resolvers.py @@ -19,7 +19,7 @@ from dataall.modules.datasets import Dataset from dataall.modules.datasets.aws.glue_dataset_client import DatasetCrawler from dataall.modules.datasets.db.dataset_location_repository import DatasetLocationRepository -from dataall.modules.datasets.services.dataset_service import DatasetService +from dataall.modules.datasets.db.dataset_service import DatasetService from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer from dataall.modules.datasets.services.dataset_permissions import CREDENTIALS_DATASET, SYNC_DATASET, SUMMARY_DATASET, \ diff --git a/backend/dataall/modules/datasets/api/profiling/resolvers.py b/backend/dataall/modules/datasets/api/profiling/resolvers.py index a89a2dd29..c29f6100b 100644 --- a/backend/dataall/modules/datasets/api/profiling/resolvers.py +++ b/backend/dataall/modules/datasets/api/profiling/resolvers.py @@ -6,7 +6,7 @@ from dataall.aws.handlers.sts import SessionHelper from dataall.db import api, models from dataall.db.api import ResourcePolicy -from dataall.modules.datasets.services.dataset_service import DatasetService +from dataall.modules.datasets.db.dataset_service import DatasetService from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository from dataall.modules.datasets.db.dataset_profiling_repository import DatasetProfilingRepository from dataall.modules.datasets_base.db.models import DatasetProfilingRun diff --git a/backend/dataall/modules/datasets/api/storage_location/resolvers.py b/backend/dataall/modules/datasets/api/storage_location/resolvers.py index ff16fe1bc..db68277a2 100644 --- a/backend/dataall/modules/datasets/api/storage_location/resolvers.py +++ b/backend/dataall/modules/datasets/api/storage_location/resolvers.py @@ -10,7 +10,7 @@ from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer from dataall.modules.datasets_base.db.models import DatasetStorageLocation, Dataset from dataall.modules.datasets.db.dataset_location_repository import DatasetLocationRepository -from dataall.modules.datasets.services.dataset_service import DatasetService +from dataall.modules.datasets.db.dataset_service import DatasetService from dataall.modules.datasets.services.dataset_permissions import UPDATE_DATASET_FOLDER diff --git a/backend/dataall/modules/datasets/api/table/resolvers.py b/backend/dataall/modules/datasets/api/table/resolvers.py index 1c6d802f8..8e70ad271 100644 --- a/backend/dataall/modules/datasets/api/table/resolvers.py +++ b/backend/dataall/modules/datasets/api/table/resolvers.py @@ -12,7 +12,7 @@ from dataall.db import models from dataall.db.api import ResourcePolicy, Glossary from dataall.modules.datasets_base.db.models import DatasetTable, Dataset -from dataall.modules.datasets.services.dataset_service import DatasetService +from dataall.modules.datasets.db.dataset_service import DatasetService from dataall.modules.datasets.services.dataset_permissions import UPDATE_DATASET_TABLE from dataall.modules.datasets_base.services.permissions import PREVIEW_DATASET_TABLE from dataall.utils import json_utils diff --git a/backend/dataall/modules/datasets/cdk/dataset_data_policy.py b/backend/dataall/modules/datasets/cdk/dataset_data_policy.py index 05d3e0e89..21be6ee5a 100644 --- a/backend/dataall/modules/datasets/cdk/dataset_data_policy.py +++ b/backend/dataall/modules/datasets/cdk/dataset_data_policy.py @@ -3,7 +3,7 @@ from dataall.cdkproxy.stacks.policies.data_policy import DataPolicy from dataall.modules.datasets_base.db.models import Dataset -from dataall.modules.datasets.services.dataset_service import DatasetService +from dataall.modules.datasets.db.dataset_service import DatasetService class DatasetDataPolicy(DataPolicy): diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/db/dataset_service.py similarity index 100% rename from backend/dataall/modules/datasets/services/dataset_service.py rename to backend/dataall/modules/datasets/db/dataset_service.py diff --git a/backend/dataall/modules/datasets/db/dataset_table_repository.py b/backend/dataall/modules/datasets/db/dataset_table_repository.py index 9c0a8ae3d..5db57a59a 100644 --- a/backend/dataall/modules/datasets/db/dataset_table_repository.py +++ b/backend/dataall/modules/datasets/db/dataset_table_repository.py @@ -10,7 +10,7 @@ from dataall.modules.dataset_sharing.services.share_object import ShareItemSM from dataall.modules.datasets.services.dataset_permissions import MANAGE_DATASETS, CREATE_DATASET_TABLE, DELETE_DATASET_TABLE, \ UPDATE_DATASET_TABLE -from dataall.modules.datasets.services.dataset_service import DatasetService +from dataall.modules.datasets.db.dataset_service import DatasetService from dataall.modules.datasets_base.services.permissions import DATASET_TABLE_READ from dataall.utils import json_utils from dataall.modules.datasets_base.db.models import DatasetTableColumn, DatasetTable, Dataset diff --git a/backend/dataall/modules/datasets/handlers/glue_dataset_handler.py b/backend/dataall/modules/datasets/handlers/glue_dataset_handler.py index 277999720..cf1be74d0 100644 --- a/backend/dataall/modules/datasets/handlers/glue_dataset_handler.py +++ b/backend/dataall/modules/datasets/handlers/glue_dataset_handler.py @@ -4,7 +4,7 @@ from dataall.db import models from dataall.modules.datasets.aws.glue_dataset_client import DatasetCrawler from dataall.modules.datasets_base.db.models import Dataset -from dataall.modules.datasets.services.dataset_service import DatasetService +from dataall.modules.datasets.db.dataset_service import DatasetService log = logging.getLogger(__name__) diff --git a/backend/dataall/modules/datasets/handlers/glue_table_handler.py b/backend/dataall/modules/datasets/handlers/glue_table_handler.py index 7fc938828..7cb3cbd38 100644 --- a/backend/dataall/modules/datasets/handlers/glue_table_handler.py +++ b/backend/dataall/modules/datasets/handlers/glue_table_handler.py @@ -4,7 +4,7 @@ from dataall.aws.handlers.service_handlers import Worker from dataall.db import models from dataall.modules.datasets_base.db.models import Dataset -from dataall.modules.datasets.services.dataset_service import DatasetService +from dataall.modules.datasets.db.dataset_service import DatasetService from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository log = logging.getLogger(__name__) diff --git a/backend/dataall/modules/datasets/handlers/sns_dataset_handler.py b/backend/dataall/modules/datasets/handlers/sns_dataset_handler.py index 006c4023d..e5a2deb27 100644 --- a/backend/dataall/modules/datasets/handlers/sns_dataset_handler.py +++ b/backend/dataall/modules/datasets/handlers/sns_dataset_handler.py @@ -7,7 +7,7 @@ from dataall.aws.handlers.sts import SessionHelper from dataall import db from dataall.db import models -from dataall.modules.datasets.services.dataset_service import DatasetService +from dataall.modules.datasets.db.dataset_service import DatasetService logger = logging.getLogger(__name__) diff --git a/backend/dataall/modules/datasets/indexers/dataset_indexer.py b/backend/dataall/modules/datasets/indexers/dataset_indexer.py index 72915207d..20c28bb1d 100644 --- a/backend/dataall/modules/datasets/indexers/dataset_indexer.py +++ b/backend/dataall/modules/datasets/indexers/dataset_indexer.py @@ -4,7 +4,7 @@ from dataall.db import models from dataall.modules.datasets_base.db.models import Dataset from dataall.modules.datasets.db.dataset_location_repository import DatasetLocationRepository -from dataall.modules.datasets.services.dataset_service import DatasetService +from dataall.modules.datasets.db.dataset_service import DatasetService from dataall.searchproxy.base_indexer import BaseIndexer diff --git a/backend/dataall/modules/datasets/tasks/tables_syncer.py b/backend/dataall/modules/datasets/tasks/tables_syncer.py index ab219eb5b..98dfc3a00 100644 --- a/backend/dataall/modules/datasets/tasks/tables_syncer.py +++ b/backend/dataall/modules/datasets/tasks/tables_syncer.py @@ -12,7 +12,7 @@ from dataall.modules.datasets_base.db.models import DatasetTable, Dataset from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer from dataall.modules.dataset_sharing.services.dataset_alarm_service import DatasetAlarmService -from dataall.modules.datasets.services.dataset_service import DatasetService +from dataall.modules.datasets.db.dataset_service import DatasetService from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository root = logging.getLogger() diff --git a/backend/dataall/tasks/catalog_indexer.py b/backend/dataall/tasks/catalog_indexer.py index 962eb66f2..377337410 100644 --- a/backend/dataall/tasks/catalog_indexer.py +++ b/backend/dataall/tasks/catalog_indexer.py @@ -5,7 +5,7 @@ from dataall.modules.datasets_base.db.models import Dataset from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer -from dataall.modules.datasets.services.dataset_service import DatasetService +from dataall.modules.datasets.db.dataset_service import DatasetService from dataall.db import get_engine, models from dataall.searchproxy.indexers import DashboardIndexer from dataall.utils.alarm_service import AlarmService diff --git a/backend/dataall/tasks/stacks_updater.py b/backend/dataall/tasks/stacks_updater.py index 9a17deaf1..28c5e0887 100644 --- a/backend/dataall/tasks/stacks_updater.py +++ b/backend/dataall/tasks/stacks_updater.py @@ -4,7 +4,7 @@ import time from dataall.modules.datasets_base.db.models import Dataset -from dataall.modules.datasets.services.dataset_service import DatasetService +from dataall.modules.datasets.db.dataset_service import DatasetService from .. import db from ..db import models from ..aws.handlers.ecs import Ecs diff --git a/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py b/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py index 3d036f0e0..66a76761d 100644 --- a/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py +++ b/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py @@ -15,7 +15,7 @@ from dataall.db.models.Enums import ShareableType from dataall.modules.dataset_sharing.db.Enums import ShareObjectStatus from dataall.modules.dataset_sharing.services.share_object import ShareObjectService -from dataall.modules.datasets.services.dataset_service import DatasetService +from dataall.modules.datasets.db.dataset_service import DatasetService from dataall.modules.datasets_base.services.dataset_permissions import DATASET_TABLE_READ # revision identifiers, used by Alembic. diff --git a/tests/api/test_dataset.py b/tests/api/test_dataset.py index b6d5a4629..4534f3b16 100644 --- a/tests/api/test_dataset.py +++ b/tests/api/test_dataset.py @@ -5,7 +5,7 @@ import dataall from dataall.modules.datasets_base.db.models import DatasetStorageLocation, DatasetTable, Dataset -from dataall.modules.datasets.services.dataset_service import DatasetService +from dataall.modules.datasets.db.dataset_service import DatasetService @pytest.fixture(scope='module', autouse=True) diff --git a/tests/api/test_redshift_cluster.py b/tests/api/test_redshift_cluster.py index 0058c2fe9..7ecc12264 100644 --- a/tests/api/test_redshift_cluster.py +++ b/tests/api/test_redshift_cluster.py @@ -5,7 +5,7 @@ import dataall from dataall.api.constants import RedshiftClusterRole from dataall.modules.datasets_base.db.models import Dataset -from dataall.modules.datasets.services.dataset_service import DatasetService +from dataall.modules.datasets.db.dataset_service import DatasetService @pytest.fixture(scope='module', autouse=True) diff --git a/tests/db/test_permission.py b/tests/db/test_permission.py index b3c5c11b2..aee931076 100644 --- a/tests/db/test_permission.py +++ b/tests/db/test_permission.py @@ -5,9 +5,8 @@ from dataall.db import exceptions from dataall.db.models.Permission import PermissionType from dataall.modules.datasets_base.db.models import Dataset -from dataall.modules.datasets.services.dataset_service import DatasetService -from dataall.modules.datasets.services.dataset_permissions import MANAGE_DATASETS, UPDATE_DATASET, DATASET_READ, DATASET_WRITE, \ - DATASET_TABLE_READ +from dataall.modules.datasets.db.dataset_service import DatasetService +from dataall.modules.datasets.services.dataset_permissions import MANAGE_DATASETS, UPDATE_DATASET, DATASET_READ, DATASET_WRITE from dataall.modules.datasets_base.services.permissions import DATASET_TABLE_READ @pytest.fixture(scope='module') From 2ff57a2c83094bf394b16ac87a91ef14a5ce29f1 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 8 May 2023 14:30:41 +0200 Subject: [PATCH 138/346] Moved listDataItemsSharedWithEnvGroup to sharing --- backend/dataall/api/Objects/Group/queries.py | 14 +------------- backend/dataall/api/Objects/Group/resolvers.py | 18 ------------------ .../modules/dataset_sharing/api/queries.py | 12 ++++++++++++ .../modules/dataset_sharing/api/resolvers.py | 18 ++++++++++++++++++ 4 files changed, 31 insertions(+), 31 deletions(-) diff --git a/backend/dataall/api/Objects/Group/queries.py b/backend/dataall/api/Objects/Group/queries.py index 62233b5c5..8642a00c8 100644 --- a/backend/dataall/api/Objects/Group/queries.py +++ b/backend/dataall/api/Objects/Group/queries.py @@ -1,5 +1,5 @@ from ... import gql -from .resolvers import get_group, list_data_items_shared_with_env_group, list_cognito_groups +from .resolvers import get_group, list_cognito_groups getGroup = gql.QueryField( name='getGroup', @@ -8,18 +8,6 @@ resolver=get_group, ) -listDataItemsSharedWithEnvGroup = gql.QueryField( - name='listDataItemsSharedWithEnvGroup', - args=[ - gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='groupUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='filter', type=gql.Ref('EnvironmentDataItemFilter')), - ], - resolver=list_data_items_shared_with_env_group, - type=gql.Ref('EnvironmentPublishedItemSearchResults'), - test_scope='Dataset', -) - listCognitoGroups = gql.QueryField( name='listCognitoGroups', args=[ diff --git a/backend/dataall/api/Objects/Group/resolvers.py b/backend/dataall/api/Objects/Group/resolvers.py index eb7b04ce7..3ef75f1ed 100644 --- a/backend/dataall/api/Objects/Group/resolvers.py +++ b/backend/dataall/api/Objects/Group/resolvers.py @@ -4,7 +4,6 @@ from ....db import exceptions from ....db.models import Group from ....aws.handlers.cognito import Cognito -from dataall.modules.dataset_sharing.services.dataset_share_service import DatasetShareService log = logging.getLogger() @@ -43,23 +42,6 @@ def get_group(context, source, groupUri): return Group(groupUri=groupUri, name=groupUri, label=groupUri) -def list_data_items_shared_with_env_group( - context, source, environmentUri: str = None, groupUri: str = None, filter: dict = None -): - if not filter: - filter = {} - with context.engine.scoped_session() as session: - return DatasetShareService.paginated_shared_with_environment_group_datasets( - session=session, - username=context.username, - groups=context.groups, - envUri=environmentUri, - groupUri=groupUri, - data=filter, - check_perm=True, - ) - - def list_cognito_groups(context, source, filter: dict = None): envname = os.getenv('envname', 'local') if envname in ['dkrcompose']: diff --git a/backend/dataall/modules/dataset_sharing/api/queries.py b/backend/dataall/modules/dataset_sharing/api/queries.py index 1033d4408..37565220b 100644 --- a/backend/dataall/modules/dataset_sharing/api/queries.py +++ b/backend/dataall/modules/dataset_sharing/api/queries.py @@ -21,3 +21,15 @@ type=gql.Ref('ShareSearchResult'), resolver=list_shares_in_my_inbox, ) + +listDataItemsSharedWithEnvGroup = gql.QueryField( + name='listDataItemsSharedWithEnvGroup', + args=[ + gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='groupUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='filter', type=gql.Ref('EnvironmentDataItemFilter')), + ], + resolver=list_data_items_shared_with_env_group, + type=gql.Ref('EnvironmentPublishedItemSearchResults'), + test_scope='Dataset', +) diff --git a/backend/dataall/modules/dataset_sharing/api/resolvers.py b/backend/dataall/modules/dataset_sharing/api/resolvers.py index 8f857f031..096c7e5a8 100644 --- a/backend/dataall/modules/dataset_sharing/api/resolvers.py +++ b/backend/dataall/modules/dataset_sharing/api/resolvers.py @@ -8,6 +8,7 @@ from dataall.aws.handlers.service_handlers import Worker from dataall.db import models from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject +from dataall.modules.dataset_sharing.services.dataset_share_service import DatasetShareService from dataall.modules.dataset_sharing.services.share_object import ShareObjectService from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.db.models import DatasetStorageLocation, DatasetTable, Dataset @@ -368,3 +369,20 @@ def list_shares_in_my_outbox(context: Context, source, filter: dict = None): data=filter, check_perm=None, ) + + +def list_data_items_shared_with_env_group( + context, source, environmentUri: str = None, groupUri: str = None, filter: dict = None +): + if not filter: + filter = {} + with context.engine.scoped_session() as session: + return DatasetShareService.paginated_shared_with_environment_group_datasets( + session=session, + username=context.username, + groups=context.groups, + envUri=environmentUri, + groupUri=groupUri, + data=filter, + check_perm=True, + ) \ No newline at end of file From b0e30d80a68f36f272c210f63a7b22dd707f3e63 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 8 May 2023 14:36:47 +0200 Subject: [PATCH 139/346] Moved queries related to sharing --- .../api/Objects/Environment/queries.py | 12 ------- .../api/Objects/Environment/resolvers.py | 17 ---------- .../dataall/api/Objects/Environment/schema.py | 33 ------------------- .../modules/dataset_sharing/api/queries.py | 11 +++++++ .../modules/dataset_sharing/api/resolvers.py | 18 +++++++++- .../modules/dataset_sharing/api/schema.py | 33 +++++++++++++++++++ 6 files changed, 61 insertions(+), 63 deletions(-) diff --git a/backend/dataall/api/Objects/Environment/queries.py b/backend/dataall/api/Objects/Environment/queries.py index 892b3d32b..18f266c3f 100644 --- a/backend/dataall/api/Objects/Environment/queries.py +++ b/backend/dataall/api/Objects/Environment/queries.py @@ -48,18 +48,6 @@ ) -searchEnvironmentDataItems = gql.QueryField( - name='searchEnvironmentDataItems', - args=[ - gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='filter', type=gql.Ref('EnvironmentDataItemFilter')), - ], - resolver=list_shared_with_environment_data_items, - type=gql.Ref('EnvironmentPublishedItemSearchResults'), - test_scope='Dataset', -) - - generateEnvironmentAccessToken = gql.QueryField( name='generateEnvironmentAccessToken', args=[ diff --git a/backend/dataall/api/Objects/Environment/resolvers.py b/backend/dataall/api/Objects/Environment/resolvers.py index b241f6dbb..af7976e62 100644 --- a/backend/dataall/api/Objects/Environment/resolvers.py +++ b/backend/dataall/api/Objects/Environment/resolvers.py @@ -21,8 +21,6 @@ NamingConventionPattern, ) -from dataall.modules.dataset_sharing.services.dataset_share_service import DatasetShareService - log = logging.getLogger() @@ -401,21 +399,6 @@ def list_environment_group_permissions( check_perm=True, ) -def list_shared_with_environment_data_items( - context: Context, source, environmentUri: str = None, filter: dict = None -): - if not filter: - filter = {} - with context.engine.scoped_session() as session: - return DatasetShareService.paginated_shared_with_environment_datasets( - session=session, - username=context.username, - groups=context.groups, - uri=environmentUri, - data=filter, - check_perm=True, - ) - def _get_environment_group_aws_session( session, username, groups, environment, groupUri=None diff --git a/backend/dataall/api/Objects/Environment/schema.py b/backend/dataall/api/Objects/Environment/schema.py index 1c1bae604..bb5749065 100644 --- a/backend/dataall/api/Objects/Environment/schema.py +++ b/backend/dataall/api/Objects/Environment/schema.py @@ -127,39 +127,6 @@ ) -EnvironmentPublishedItem = gql.ObjectType( - name='EnvironmentPublishedItem', - fields=[ - gql.Field(name='shareUri', type=gql.NonNullableType(gql.String)), - gql.Field(name='datasetUri', type=gql.NonNullableType(gql.String)), - gql.Field(name='datasetName', type=gql.NonNullableType(gql.String)), - gql.Field(name='itemAccess', type=gql.NonNullableType(gql.String)), - gql.Field(name='itemType', type=gql.NonNullableType(gql.String)), - gql.Field(name='environmentUri', type=gql.NonNullableType(gql.String)), - gql.Field(name='principalId', type=gql.NonNullableType(gql.String)), - gql.Field(name='environmentName', type=gql.NonNullableType(gql.String)), - gql.Field(name='organizationUri', type=gql.NonNullableType(gql.String)), - gql.Field(name='organizationName', type=gql.NonNullableType(gql.String)), - gql.Field(name='created', type=gql.NonNullableType(gql.String)), - gql.Field(name='GlueDatabaseName', type=gql.String), - gql.Field(name='GlueTableName', type=gql.String), - gql.Field(name='S3AccessPointName', type=gql.String), - ], -) - - -EnvironmentPublishedItemSearchResults = gql.ObjectType( - name='EnvironmentPublishedItemSearchResults', - fields=[ - gql.Field(name='count', type=gql.Integer), - gql.Field(name='page', type=gql.Integer), - gql.Field(name='pages', type=gql.Integer), - gql.Field(name='hasNext', type=gql.Boolean), - gql.Field(name='hasPrevious', type=gql.Boolean), - gql.Field(name='nodes', type=gql.ArrayType(EnvironmentPublishedItem)), - ], -) - ConsumptionRole = gql.ObjectType( name='ConsumptionRole', fields=[ diff --git a/backend/dataall/modules/dataset_sharing/api/queries.py b/backend/dataall/modules/dataset_sharing/api/queries.py index 37565220b..126508758 100644 --- a/backend/dataall/modules/dataset_sharing/api/queries.py +++ b/backend/dataall/modules/dataset_sharing/api/queries.py @@ -33,3 +33,14 @@ type=gql.Ref('EnvironmentPublishedItemSearchResults'), test_scope='Dataset', ) + +searchEnvironmentDataItems = gql.QueryField( + name='searchEnvironmentDataItems', + args=[ + gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='filter', type=gql.Ref('EnvironmentDataItemFilter')), + ], + resolver=list_shared_with_environment_data_items, + type=gql.Ref('EnvironmentPublishedItemSearchResults'), + test_scope='Dataset', +) diff --git a/backend/dataall/modules/dataset_sharing/api/resolvers.py b/backend/dataall/modules/dataset_sharing/api/resolvers.py index 096c7e5a8..a0e33909b 100644 --- a/backend/dataall/modules/dataset_sharing/api/resolvers.py +++ b/backend/dataall/modules/dataset_sharing/api/resolvers.py @@ -385,4 +385,20 @@ def list_data_items_shared_with_env_group( groupUri=groupUri, data=filter, check_perm=True, - ) \ No newline at end of file + ) + + +def list_shared_with_environment_data_items( + context: Context, source, environmentUri: str = None, filter: dict = None +): + if not filter: + filter = {} + with context.engine.scoped_session() as session: + return DatasetShareService.paginated_shared_with_environment_datasets( + session=session, + username=context.username, + groups=context.groups, + uri=environmentUri, + data=filter, + check_perm=True, + ) diff --git a/backend/dataall/modules/dataset_sharing/api/schema.py b/backend/dataall/modules/dataset_sharing/api/schema.py index c99382205..f8455d0b5 100644 --- a/backend/dataall/modules/dataset_sharing/api/schema.py +++ b/backend/dataall/modules/dataset_sharing/api/schema.py @@ -166,3 +166,36 @@ gql.Field(name='nodes', type=gql.ArrayType(gql.Ref('ShareObject'))), ], ) + +EnvironmentPublishedItem = gql.ObjectType( + name='EnvironmentPublishedItem', + fields=[ + gql.Field(name='shareUri', type=gql.NonNullableType(gql.String)), + gql.Field(name='datasetUri', type=gql.NonNullableType(gql.String)), + gql.Field(name='datasetName', type=gql.NonNullableType(gql.String)), + gql.Field(name='itemAccess', type=gql.NonNullableType(gql.String)), + gql.Field(name='itemType', type=gql.NonNullableType(gql.String)), + gql.Field(name='environmentUri', type=gql.NonNullableType(gql.String)), + gql.Field(name='principalId', type=gql.NonNullableType(gql.String)), + gql.Field(name='environmentName', type=gql.NonNullableType(gql.String)), + gql.Field(name='organizationUri', type=gql.NonNullableType(gql.String)), + gql.Field(name='organizationName', type=gql.NonNullableType(gql.String)), + gql.Field(name='created', type=gql.NonNullableType(gql.String)), + gql.Field(name='GlueDatabaseName', type=gql.String), + gql.Field(name='GlueTableName', type=gql.String), + gql.Field(name='S3AccessPointName', type=gql.String), + ], +) + + +EnvironmentPublishedItemSearchResults = gql.ObjectType( + name='EnvironmentPublishedItemSearchResults', + fields=[ + gql.Field(name='count', type=gql.Integer), + gql.Field(name='page', type=gql.Integer), + gql.Field(name='pages', type=gql.Integer), + gql.Field(name='hasNext', type=gql.Boolean), + gql.Field(name='hasPrevious', type=gql.Boolean), + gql.Field(name='nodes', type=gql.ArrayType(EnvironmentPublishedItem)), + ], +) \ No newline at end of file From 1ceb50e204735f8bcb79646e97472891e0c0c618 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 8 May 2023 16:57:22 +0200 Subject: [PATCH 140/346] Review remarks --- .../dataall/cdkproxy/stacks/environment.py | 6 +- .../cdkproxy/stacks/policies/data_policy.py | 4 +- .../datasets/aws/glue_profiler_client.py | 46 ++++++++++ .../modules/datasets/aws/lf_table_client.py | 3 - .../datasets/aws/sns_dataset_client.py | 39 +++++++++ .../dataall/modules/datasets/cdk/__init__.py | 4 +- ...et_data_policy.py => dataset_s3_policy.py} | 6 +- .../modules/datasets/handlers/__init__.py | 6 +- .../datasets/handlers/glue_dataset_handler.py | 2 +- .../handlers/glue_profiling_handler.py | 86 +++++-------------- .../datasets/handlers/glue_table_handler.py | 2 +- ..._handler.py => glue_table_sync_handler.py} | 6 +- ...andler.py => s3_folder_creator_handler.py} | 4 +- .../datasets/handlers/sns_dataset_handler.py | 32 ++----- 14 files changed, 132 insertions(+), 114 deletions(-) create mode 100644 backend/dataall/modules/datasets/aws/glue_profiler_client.py create mode 100644 backend/dataall/modules/datasets/aws/sns_dataset_client.py rename backend/dataall/modules/datasets/cdk/{dataset_data_policy.py => dataset_s3_policy.py} (83%) rename backend/dataall/modules/datasets/handlers/{glue_column_handler.py => glue_table_sync_handler.py} (91%) rename backend/dataall/modules/datasets/handlers/{s3_location_handler.py => s3_folder_creator_handler.py} (87%) diff --git a/backend/dataall/cdkproxy/stacks/environment.py b/backend/dataall/cdkproxy/stacks/environment.py index 20646b7fb..2b863640d 100644 --- a/backend/dataall/cdkproxy/stacks/environment.py +++ b/backend/dataall/cdkproxy/stacks/environment.py @@ -28,7 +28,7 @@ from botocore.exceptions import ClientError from .manager import stack -from .policies.data_policy import DataPolicy +from .policies.data_policy import S3Policy from .policies.service_policy import ServicePolicy from ... import db from ...aws.handlers.quicksight import Quicksight @@ -587,7 +587,7 @@ def create_or_import_environment_default_role(self): ), ).generate_policies() - data_policy = DataPolicy( + data_policy = S3Policy( stack=self, tag_key='Team', tag_value=self._environment.SamlGroupName, @@ -656,7 +656,7 @@ def create_group_environment_role(self, group): ).generate_policies() with self.engine.scoped_session() as session: - data_policy = DataPolicy( + data_policy = S3Policy( stack=self, tag_key='Team', tag_value=group.groupUri, diff --git a/backend/dataall/cdkproxy/stacks/policies/data_policy.py b/backend/dataall/cdkproxy/stacks/policies/data_policy.py index 49f7d59fb..37c86e648 100644 --- a/backend/dataall/cdkproxy/stacks/policies/data_policy.py +++ b/backend/dataall/cdkproxy/stacks/policies/data_policy.py @@ -8,7 +8,7 @@ logger = logging.getLogger() -class DataPolicy: +class S3Policy: def __init__( self, stack, @@ -90,7 +90,7 @@ def generate_data_access_policy(self, session) -> iam.Policy: """ statements: List[iam.PolicyStatement] = self.get_statements(session) - for extension in DataPolicy.__subclasses__(): + for extension in S3Policy.__subclasses__(): statements.extend(extension.get_statements(self, session=session)) policy: iam.Policy = iam.Policy( diff --git a/backend/dataall/modules/datasets/aws/glue_profiler_client.py b/backend/dataall/modules/datasets/aws/glue_profiler_client.py new file mode 100644 index 000000000..6b678ec0b --- /dev/null +++ b/backend/dataall/modules/datasets/aws/glue_profiler_client.py @@ -0,0 +1,46 @@ +import logging +from botocore.exceptions import ClientError + +from dataall.aws.handlers.sts import SessionHelper +from dataall.modules.datasets import Dataset +from dataall.modules.datasets.db.models import DatasetProfilingRun + +log = logging.getLogger(__name__) + + +class GlueDatasetProfilerClient: + """Controls glue profiling jobs in AWS""" + + def __init__(self, dataset: Dataset): + session = SessionHelper.remote_session(accountid=dataset.AwsAccountId) + self._client = session.client('glue', region_name=dataset.region) + self._name = dataset.GlueProfilingJobName + + def get_job_status(self, profiling: DatasetProfilingRun): + """Returns a status of a glue job""" + run_id = profiling.GlueJobRunId + try: + response = self._client.get_job_run(JobName=self._name, RunId=run_id) + return response['JobRun']['JobRunState'] + except ClientError as e: + log.error(f'Failed to get job run {run_id} due to: {e}') + raise e + + def run_job(self, profiling: DatasetProfilingRun): + """Run glue job. Returns id of the job""" + args = { + 'arguments': ( + {'--table': profiling.GlueTableName} + if profiling.GlueTableName + else {} + ), + } + try: + response = self._client.start_job_run( + JobName=self._name, Arguments=args + ) + + return response['JobRunId'] + except ClientError as e: + log.error(f'Failed to start profiling job {self._name} due to: {e}') + raise e diff --git a/backend/dataall/modules/datasets/aws/lf_table_client.py b/backend/dataall/modules/datasets/aws/lf_table_client.py index 8caff5073..c7b0825c9 100644 --- a/backend/dataall/modules/datasets/aws/lf_table_client.py +++ b/backend/dataall/modules/datasets/aws/lf_table_client.py @@ -20,9 +20,6 @@ def grant_pivot_role_all_table_permissions(self): """ Pivot role needs to have all permissions for tables managed inside dataall - :param aws_session: - :param table: - :return: """ table = self._table principal = SessionHelper.get_delegation_role_arn(table.AWSAccountId) diff --git a/backend/dataall/modules/datasets/aws/sns_dataset_client.py b/backend/dataall/modules/datasets/aws/sns_dataset_client.py new file mode 100644 index 000000000..f6e82694a --- /dev/null +++ b/backend/dataall/modules/datasets/aws/sns_dataset_client.py @@ -0,0 +1,39 @@ +import json +import logging + +from botocore.exceptions import ClientError + +from dataall.aws.handlers.sts import SessionHelper +from dataall.db.models import Environment +from dataall.modules.datasets import Dataset + +log = logging.getLogger(__name__) + + +class SnsDatasetClient: + + def __init__(self, environment: Environment, dataset: Dataset): + aws_session = SessionHelper.remote_session( + accountid=environment.AwsAccountId + ) + + self._client = aws_session.client('sns', region_name=environment.region) + self._topic = f'arn:aws:sns:{environment.region}:{environment.AwsAccountId}:{environment.subscriptionsProducersTopicName}' + self._dataset = dataset + + def publish_dataset_message(self, message: dict): + + try: + response = self._client.publish( + TopicArn=self._topic, + Message=json.dumps(message), + ) + return response + except ClientError as e: + log.error( + f'Failed to deliver dataset ' + f'{self._dataset.datasetUri}|{message} ' + f'update message for consumers ' + f'due to: {e} ' + ) + raise e diff --git a/backend/dataall/modules/datasets/cdk/__init__.py b/backend/dataall/modules/datasets/cdk/__init__.py index 613a75e04..8710674cd 100644 --- a/backend/dataall/modules/datasets/cdk/__init__.py +++ b/backend/dataall/modules/datasets/cdk/__init__.py @@ -1,5 +1,5 @@ from dataall.modules.datasets.cdk import dataset_stack, dataset_databrew_policy, dataset_glue_policy, \ - dataset_lakeformation_policy, dataset_data_policy + dataset_lakeformation_policy, dataset_s3_policy __all__ = ["dataset_stack", "dataset_databrew_policy", "dataset_glue_policy", "dataset_lakeformation_policy", - "dataset_data_policy"] + "dataset_s3_policy"] diff --git a/backend/dataall/modules/datasets/cdk/dataset_data_policy.py b/backend/dataall/modules/datasets/cdk/dataset_s3_policy.py similarity index 83% rename from backend/dataall/modules/datasets/cdk/dataset_data_policy.py rename to backend/dataall/modules/datasets/cdk/dataset_s3_policy.py index 64c0c53a4..158d8ac74 100644 --- a/backend/dataall/modules/datasets/cdk/dataset_data_policy.py +++ b/backend/dataall/modules/datasets/cdk/dataset_s3_policy.py @@ -1,12 +1,12 @@ from typing import List from aws_cdk import aws_iam as iam -from dataall.cdkproxy.stacks.policies.data_policy import DataPolicy +from dataall.cdkproxy.stacks.policies.data_policy import S3Policy from dataall.modules.datasets.db.models import Dataset from dataall.modules.datasets.services.dataset_service import DatasetService -class DatasetDataPolicy(DataPolicy): +class DatasetS3Policy(S3Policy): def get_statements(self, session): datasets = DatasetService.list_group_datasets( @@ -14,7 +14,7 @@ def get_statements(self, session): environment_id=self.environment.environmentUri, group_uri=self.team.groupUri, ) - return DatasetDataPolicy._generate_dataset_statements(datasets) + return DatasetS3Policy._generate_dataset_statements(datasets) @staticmethod def _generate_dataset_statements(datasets: List[Dataset]): diff --git a/backend/dataall/modules/datasets/handlers/__init__.py b/backend/dataall/modules/datasets/handlers/__init__.py index 382f052a9..91d4aeff6 100644 --- a/backend/dataall/modules/datasets/handlers/__init__.py +++ b/backend/dataall/modules/datasets/handlers/__init__.py @@ -3,10 +3,10 @@ processing in a separate lambda function """ from dataall.modules.datasets.handlers import ( - glue_column_handler, + glue_table_sync_handler, glue_table_handler, glue_profiling_handler, - s3_location_handler + s3_folder_creator_handler ) -__all__ = ["glue_column_handler", "glue_table_handler", "glue_profiling_handler", "s3_location_handler"] +__all__ = ["glue_table_sync_handler", "glue_table_handler", "glue_profiling_handler", "s3_folder_creator_handler"] diff --git a/backend/dataall/modules/datasets/handlers/glue_dataset_handler.py b/backend/dataall/modules/datasets/handlers/glue_dataset_handler.py index 7a43b6ac6..7e8596531 100644 --- a/backend/dataall/modules/datasets/handlers/glue_dataset_handler.py +++ b/backend/dataall/modules/datasets/handlers/glue_dataset_handler.py @@ -12,7 +12,7 @@ log = logging.getLogger(__name__) -class GlueDatasetHandler: +class DatasetCrawlerHandler: @staticmethod @Worker.handler(path='glue.crawler.start') diff --git a/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py b/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py index 5d2a4232a..85af6b108 100644 --- a/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py +++ b/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py @@ -4,6 +4,7 @@ from dataall.aws.handlers.service_handlers import Worker from dataall.aws.handlers.sts import SessionHelper from dataall.db import models +from dataall.modules.datasets.aws.glue_profiler_client import GlueDatasetProfilerClient from dataall.modules.datasets.db.models import DatasetProfilingRun, Dataset from dataall.modules.datasets.services.dataset_profiling_service import DatasetProfilingService @@ -17,23 +18,10 @@ class DatasetProfilingGlueHandler: @Worker.handler('glue.job.profiling_run_status') def get_profiling_run(engine, task: models.Task): with engine.scoped_session() as session: - profiling: DatasetProfilingRun = ( - DatasetProfilingService.get_profiling_run( - session, profilingRunUri=task.targetUri - ) - ) - dataset: Dataset = session.query(Dataset).get( - profiling.datasetUri - ) - glue_run = DatasetProfilingGlueHandler.get_job_run( - **{ - 'accountid': dataset.AwsAccountId, - 'name': dataset.GlueProfilingJobName, - 'region': dataset.region, - 'run_id': profiling.GlueJobRunId, - } - ) - profiling.status = glue_run['JobRun']['JobRunState'] + dataset, profiling = DatasetProfilingGlueHandler._get_job_data(session, task) + status = GlueDatasetProfilerClient(dataset).get_job_status(profiling) + + profiling.status = status session.commit() return profiling.status @@ -41,59 +29,25 @@ def get_profiling_run(engine, task: models.Task): @Worker.handler('glue.job.start_profiling_run') def start_profiling_run(engine, task: models.Task): with engine.scoped_session() as session: - profiling: DatasetProfilingRun = ( - DatasetProfilingService.get_profiling_run( - session, profilingRunUri=task.targetUri - ) - ) - dataset: Dataset = session.query(Dataset).get( - profiling.datasetUri - ) - run = DatasetProfilingGlueHandler.run_job( - **{ - 'accountid': dataset.AwsAccountId, - 'name': dataset.GlueProfilingJobName, - 'region': dataset.region, - 'arguments': ( - {'--table': profiling.GlueTableName} - if profiling.GlueTableName - else {} - ), - } - ) + dataset, profiling = DatasetProfilingGlueHandler._get_job_data(session, task) + run_id = GlueDatasetProfilerClient(dataset).run_job(profiling) + DatasetProfilingService.update_run( session, profilingRunUri=profiling.profilingRunUri, - GlueJobRunId=run['JobRunId'], + GlueJobRunId=run_id, ) - return run + return run_id - # TODO move to client once dataset is migrated @staticmethod - def get_job_run(**data): - accountid = data['accountid'] - name = data['name'] - run_id = data['run_id'] - try: - session = SessionHelper.remote_session(accountid=accountid) - client = session.client('glue', region_name=data.get('region', 'eu-west-1')) - response = client.get_job_run(JobName=name, RunId=run_id) - return response - except ClientError as e: - log.error(f'Failed to get job run {run_id} due to: {e}') - raise e - - @staticmethod - def run_job(**data): - accountid = data['accountid'] - name = data['name'] - try: - session = SessionHelper.remote_session(accountid=accountid) - client = session.client('glue', region_name=data.get('region', 'eu-west-1')) - response = client.start_job_run( - JobName=name, Arguments=data.get('arguments', {}) + def _get_job_data(session, task): + profiling: DatasetProfilingRun = ( + DatasetProfilingService.get_profiling_run( + session, profilingRunUri=task.targetUri ) - return response - except ClientError as e: - log.error(f'Failed to start profiling job {name} due to: {e}') - raise e + ) + dataset: Dataset = session.query(Dataset).get( + profiling.datasetUri + ) + + return dataset, profiling diff --git a/backend/dataall/modules/datasets/handlers/glue_table_handler.py b/backend/dataall/modules/datasets/handlers/glue_table_handler.py index f648dc330..b38311ca6 100644 --- a/backend/dataall/modules/datasets/handlers/glue_table_handler.py +++ b/backend/dataall/modules/datasets/handlers/glue_table_handler.py @@ -10,7 +10,7 @@ log = logging.getLogger(__name__) -class DatasetColumnGlueHandler: +class DatasetTableSyncHandler: """A handler for dataset table""" @staticmethod diff --git a/backend/dataall/modules/datasets/handlers/glue_column_handler.py b/backend/dataall/modules/datasets/handlers/glue_table_sync_handler.py similarity index 91% rename from backend/dataall/modules/datasets/handlers/glue_column_handler.py rename to backend/dataall/modules/datasets/handlers/glue_table_sync_handler.py index 07a1c41b5..76b04d75b 100644 --- a/backend/dataall/modules/datasets/handlers/glue_column_handler.py +++ b/backend/dataall/modules/datasets/handlers/glue_table_sync_handler.py @@ -38,8 +38,10 @@ def update_table_columns(engine, task: models.Task): aws_session = SessionHelper.remote_session(table.AWSAccountId) - LakeFormationTableClient(table, aws_session).grant_pivot_role_all_table_permissions() - glue_client = GlueTableClient(aws_session, table) + lf_client = LakeFormationTableClient(table=table, aws_session=aws_session) + lf_client.grant_pivot_role_all_table_permissions() + + glue_client = GlueTableClient(aws_session=aws_session, table=table) original_table = glue_client.get_table() updated_table = { k: v diff --git a/backend/dataall/modules/datasets/handlers/s3_location_handler.py b/backend/dataall/modules/datasets/handlers/s3_folder_creator_handler.py similarity index 87% rename from backend/dataall/modules/datasets/handlers/s3_location_handler.py rename to backend/dataall/modules/datasets/handlers/s3_folder_creator_handler.py index 14864c191..b2435f6d7 100644 --- a/backend/dataall/modules/datasets/handlers/s3_location_handler.py +++ b/backend/dataall/modules/datasets/handlers/s3_folder_creator_handler.py @@ -8,8 +8,8 @@ log = logging.getLogger(__name__) -class S3DatasetLocationHandler: - """Handles async requests related to s3 for dataset storage location""" +class S3FolderCreatorHandler: + """Handles async requests related to s3 for dataset folders""" @staticmethod @Worker.handler(path='s3.prefix.create') diff --git a/backend/dataall/modules/datasets/handlers/sns_dataset_handler.py b/backend/dataall/modules/datasets/handlers/sns_dataset_handler.py index 006c4023d..25986fe7d 100644 --- a/backend/dataall/modules/datasets/handlers/sns_dataset_handler.py +++ b/backend/dataall/modules/datasets/handlers/sns_dataset_handler.py @@ -4,9 +4,9 @@ from botocore.exceptions import ClientError from dataall.aws.handlers.service_handlers import Worker -from dataall.aws.handlers.sts import SessionHelper -from dataall import db from dataall.db import models +from dataall.db.api import Environment +from dataall.modules.datasets.aws.sns_dataset_client import SnsDatasetClient from dataall.modules.datasets.services.dataset_service import DatasetService logger = logging.getLogger(__name__) @@ -21,33 +21,13 @@ def __init__(self): def publish_update(engine, task: models.Task): with engine.scoped_session() as session: dataset = DatasetService.get_dataset_by_uri(session, task.targetUri) - environment = db.api.Environment.get_environment_by_uri( - session, dataset.environmentUri - ) - aws_session = SessionHelper.remote_session( - accountid=environment.AwsAccountId - ) - sns = aws_session.client('sns', region_name=environment.region) + environment = Environment.get_environment_by_uri(session, dataset.environmentUri) + message = { 'prefix': task.payload['s3Prefix'], 'accountid': environment.AwsAccountId, 'region': environment.region, 'bucket_name': dataset.S3BucketName, } - try: - logger.info( - f'Sending dataset {dataset.datasetUri}|{message} update message for consumers' - ) - response = sns.publish( - TopicArn=f'arn:aws:sns:{environment.region}:{environment.AwsAccountId}:{environment.subscriptionsProducersTopicName}', - Message=json.dumps(message), - ) - return response - except ClientError as e: - logger.error( - f'Failed to deliver dataset ' - f'{dataset.datasetUri}|{message} ' - f'update message for consumers ' - f'due to: {e} ' - ) - raise e + + SnsDatasetClient(environment, dataset).publish_dataset_message(message) From 0a5170e5c3ea9992ec65628a7e2349ec0d8c44cd Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 8 May 2023 17:01:44 +0200 Subject: [PATCH 141/346] Moved dataset constants --- backend/dataall/api/constants.py | 10 ---------- backend/dataall/db/models/Enums.py | 10 ---------- .../modules/datasets/api/dataset/resolvers.py | 4 +--- .../dataall/modules/datasets/api/dataset/schema.py | 13 ++++++++++++- 4 files changed, 13 insertions(+), 24 deletions(-) diff --git a/backend/dataall/api/constants.py b/backend/dataall/api/constants.py index ad712b4a4..d3c2a74e8 100644 --- a/backend/dataall/api/constants.py +++ b/backend/dataall/api/constants.py @@ -80,16 +80,6 @@ class DataPipelineRole(GraphQLEnumMapper): NoPermission = '000' -class DatasetRole(GraphQLEnumMapper): - # Permissions on a dataset - BusinessOwner = '999' - DataSteward = '998' - Creator = '950' - Admin = '900' - Shared = '300' - NoPermission = '000' - - class GlossaryRole(GraphQLEnumMapper): # Permissions on a glossary Admin = '900' diff --git a/backend/dataall/db/models/Enums.py b/backend/dataall/db/models/Enums.py index 8e981242b..e31718fbb 100644 --- a/backend/dataall/db/models/Enums.py +++ b/backend/dataall/db/models/Enums.py @@ -49,16 +49,6 @@ class DataPipelineRole(Enum): NoPermission = '000' -class DatasetRole(Enum): - # Permissions on a dataset - BusinessOwner = '999' - DataSteward = '998' - Creator = '950' - Admin = '900' - Shared = '300' - NoPermission = '000' - - class RedshiftClusterRole(Enum): Creator = '950' Admin = '900' diff --git a/backend/dataall/modules/datasets/api/dataset/resolvers.py b/backend/dataall/modules/datasets/api/dataset/resolvers.py index 7b25886da..b62397415 100644 --- a/backend/dataall/modules/datasets/api/dataset/resolvers.py +++ b/backend/dataall/modules/datasets/api/dataset/resolvers.py @@ -6,9 +6,6 @@ from dataall.api.Objects.Stack import stack_helper from dataall import db -from dataall.api.constants import ( - DatasetRole, -) from dataall.api.context import Context from dataall.aws.handlers.service_handlers import Worker from dataall.aws.handlers.sts import SessionHelper @@ -16,6 +13,7 @@ from dataall.db.api import Environment, ShareObject, ResourcePolicy from dataall.db.api.organization import Organization from dataall.modules.datasets import Dataset +from dataall.modules.datasets.api.dataset.schema import DatasetRole from dataall.modules.datasets.aws.glue_dataset_client import DatasetCrawler from dataall.modules.datasets.services.dataset_location_service import DatasetLocationService from dataall.modules.datasets.services.dataset_service import DatasetService diff --git a/backend/dataall/modules/datasets/api/dataset/schema.py b/backend/dataall/modules/datasets/api/dataset/schema.py index 4786df52a..90f4a2baa 100644 --- a/backend/dataall/modules/datasets/api/dataset/schema.py +++ b/backend/dataall/modules/datasets/api/dataset/schema.py @@ -13,7 +13,18 @@ resolve_redshift_copy_enabled, get_dataset_stack ) -from dataall.api.constants import DatasetRole, EnvironmentPermission +from dataall.api.constants import EnvironmentPermission, GraphQLEnumMapper + + +class DatasetRole(GraphQLEnumMapper): + # Permissions on a dataset + BusinessOwner = '999' + DataSteward = '998' + Creator = '950' + Admin = '900' + Shared = '300' + NoPermission = '000' + DatasetStatistics = gql.ObjectType( name='DatasetStatistics', From 19377d6bdad42348338b686289bf71f956ab4eef Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 8 May 2023 17:09:17 +0200 Subject: [PATCH 142/346] Returned deleted methods and added triggering of alarms --- .../services/dataset_alarm_service.py | 58 ++++++++++++++++++- .../share_managers/s3_share_manager.py | 22 +++++-- .../share_processors/s3_process_share.py | 4 +- 3 files changed, 76 insertions(+), 8 deletions(-) diff --git a/backend/dataall/modules/datasets/services/dataset_alarm_service.py b/backend/dataall/modules/datasets/services/dataset_alarm_service.py index 9283f2265..7dfeaae57 100644 --- a/backend/dataall/modules/datasets/services/dataset_alarm_service.py +++ b/backend/dataall/modules/datasets/services/dataset_alarm_service.py @@ -2,7 +2,7 @@ from datetime import datetime from dataall.db import models -from dataall.modules.datasets.db.models import DatasetTable, Dataset +from dataall.modules.datasets.db.models import DatasetTable, Dataset, DatasetStorageLocation from dataall.utils.alarm_service import AlarmService log = logging.getLogger(__name__) @@ -93,4 +93,60 @@ def trigger_dataset_sync_failure_alarm(self, dataset: Dataset, error: str): """ return self.publish_message_to_alarms_topic(subject, message) + def trigger_folder_sharing_failure_alarm( + self, + folder: DatasetStorageLocation, + share: models.ShareObject, + target_environment: models.Environment, + ): + log.info('Triggering share failure alarm...') + subject = ( + f'ALARM: DATAALL Folder {folder.S3Prefix} Sharing Failure Notification' + ) + message = f""" +You are receiving this email because your DATAALL {self.envname} environment in the {self.region} region has entered the ALARM state, because it failed to share the folder {folder.S3Prefix} with S3 Access Point. +Alarm Details: + - State Change: OK -> ALARM + - Reason for State Change: S3 Folder sharing failure + - Timestamp: {datetime.now()} + Share Source + - Dataset URI: {share.datasetUri} + - AWS Account: {folder.AWSAccountId} + - Region: {folder.region} + - S3 Bucket: {folder.S3BucketName} + - S3 Folder: {folder.S3Prefix} + Share Target + - AWS Account: {target_environment.AwsAccountId} + - Region: {target_environment.region} +""" + return self.publish_message_to_alarms_topic(subject, message) + + def trigger_revoke_folder_sharing_failure_alarm( + self, + folder: DatasetStorageLocation, + share: models.ShareObject, + target_environment: models.Environment, + ): + log.info('Triggering share failure alarm...') + subject = ( + f'ALARM: DATAALL Folder {folder.S3Prefix} Sharing Revoke Failure Notification' + ) + message = f""" +You are receiving this email because your DATAALL {self.envname} environment in the {self.region} region has entered the ALARM state, because it failed to share the folder {folder.S3Prefix} with S3 Access Point. +Alarm Details: + - State Change: OK -> ALARM + - Reason for State Change: S3 Folder sharing Revoke failure + - Timestamp: {datetime.now()} + Share Source + - Dataset URI: {share.datasetUri} + - AWS Account: {folder.AWSAccountId} + - Region: {folder.region} + - S3 Bucket: {folder.S3BucketName} + - S3 Folder: {folder.S3Prefix} + Share Target + - AWS Account: {target_environment.AwsAccountId} + - Region: {target_environment.region} +""" + return self.publish_message_to_alarms_topic(subject, message) + diff --git a/backend/dataall/tasks/data_sharing/share_managers/s3_share_manager.py b/backend/dataall/tasks/data_sharing/share_managers/s3_share_manager.py index 52f6581ea..c557482c9 100644 --- a/backend/dataall/tasks/data_sharing/share_managers/s3_share_manager.py +++ b/backend/dataall/tasks/data_sharing/share_managers/s3_share_manager.py @@ -8,8 +8,8 @@ from ....aws.handlers.s3 import S3 from ....aws.handlers.kms import KMS from ....aws.handlers.iam import IAM +from ....modules.datasets.services.dataset_alarm_service import DatasetAlarmService -from ....utils.alarm_service import AlarmService from dataall.modules.datasets.db.models import DatasetStorageLocation, Dataset logger = logging.getLogger(__name__) @@ -398,9 +398,12 @@ def delete_dataset_bucket_key_policy( json.dumps(policy) ) - def log_share_failure(self, error: Exception) -> None: + def handle_share_failure(self, error: Exception) -> None: """ - Writes a log if the failure happened while sharing + Handles share failure by raising an alarm to alarmsTopic + Returns + ------- + True if alarm published successfully """ logger.error( f'Failed to share folder {self.s3_prefix} ' @@ -408,10 +411,16 @@ def log_share_failure(self, error: Exception) -> None: f'with target account {self.target_environment.AwsAccountId}/{self.target_environment.region} ' f'due to: {error}' ) + DatasetAlarmService().trigger_folder_sharing_failure_alarm( + self.target_folder, self.share, self.target_environment + ) - def log_revoke_failure(self, error: Exception) -> None: + def handle_revoke_failure(self, error: Exception) -> None: """ - Writes a log if the failure happened while revoking share + Handles share failure by raising an alarm to alarmsTopic + Returns + ------- + True if alarm published successfully """ logger.error( f'Failed to revoke S3 permissions to folder {self.s3_prefix} ' @@ -419,3 +428,6 @@ def log_revoke_failure(self, error: Exception) -> None: f'with target account {self.target_environment.AwsAccountId}/{self.target_environment.region} ' f'due to: {error}' ) + DatasetAlarmService().trigger_revoke_folder_sharing_failure_alarm( + self.target_folder, self.share, self.target_environment + ) diff --git a/backend/dataall/tasks/data_sharing/share_processors/s3_process_share.py b/backend/dataall/tasks/data_sharing/share_processors/s3_process_share.py index 013dda059..13175c2d1 100644 --- a/backend/dataall/tasks/data_sharing/share_processors/s3_process_share.py +++ b/backend/dataall/tasks/data_sharing/share_processors/s3_process_share.py @@ -91,7 +91,7 @@ def process_approved_shares( shared_item_SM.update_state_single_item(session, sharing_item, new_state) except Exception as e: - sharing_folder.log_share_failure(e) + sharing_folder.handle_share_failure(e) new_state = shared_item_SM.run_transition(models.Enums.ShareItemActions.Failure.value) shared_item_SM.update_state_single_item(session, sharing_item, new_state) success = False @@ -154,7 +154,7 @@ def process_revoked_shares( revoked_item_SM.update_state_single_item(session, removing_item, new_state) except Exception as e: - removing_folder.log_revoke_failure(e) + removing_folder.handle_revoke_failure(e) new_state = revoked_item_SM.run_transition(models.Enums.ShareItemActions.Failure.value) revoked_item_SM.update_state_single_item(session, removing_item, new_state) success = False From ae0c61189c9f1efb0de51ed2eea8f6b20aa2bc19 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 9 May 2023 11:34:40 +0200 Subject: [PATCH 143/346] Resolved cyclic import --- .../dataall/modules/datasets/api/dataset/enums.py | 11 +++++++++++ .../modules/datasets/api/dataset/resolvers.py | 2 +- .../dataall/modules/datasets/api/dataset/schema.py | 14 ++------------ 3 files changed, 14 insertions(+), 13 deletions(-) create mode 100644 backend/dataall/modules/datasets/api/dataset/enums.py diff --git a/backend/dataall/modules/datasets/api/dataset/enums.py b/backend/dataall/modules/datasets/api/dataset/enums.py new file mode 100644 index 000000000..decf8df06 --- /dev/null +++ b/backend/dataall/modules/datasets/api/dataset/enums.py @@ -0,0 +1,11 @@ +from dataall.api.constants import GraphQLEnumMapper + + +class DatasetRole(GraphQLEnumMapper): + # Permissions on a dataset + BusinessOwner = '999' + DataSteward = '998' + Creator = '950' + Admin = '900' + Shared = '300' + NoPermission = '000' diff --git a/backend/dataall/modules/datasets/api/dataset/resolvers.py b/backend/dataall/modules/datasets/api/dataset/resolvers.py index b62397415..d3c5863a2 100644 --- a/backend/dataall/modules/datasets/api/dataset/resolvers.py +++ b/backend/dataall/modules/datasets/api/dataset/resolvers.py @@ -13,7 +13,7 @@ from dataall.db.api import Environment, ShareObject, ResourcePolicy from dataall.db.api.organization import Organization from dataall.modules.datasets import Dataset -from dataall.modules.datasets.api.dataset.schema import DatasetRole +from dataall.modules.datasets.api.dataset.enums import DatasetRole from dataall.modules.datasets.aws.glue_dataset_client import DatasetCrawler from dataall.modules.datasets.services.dataset_location_service import DatasetLocationService from dataall.modules.datasets.services.dataset_service import DatasetService diff --git a/backend/dataall/modules/datasets/api/dataset/schema.py b/backend/dataall/modules/datasets/api/dataset/schema.py index 90f4a2baa..a329ebf58 100644 --- a/backend/dataall/modules/datasets/api/dataset/schema.py +++ b/backend/dataall/modules/datasets/api/dataset/schema.py @@ -1,4 +1,5 @@ from dataall.api import gql +from dataall.modules.datasets.api.dataset.enums import DatasetRole from dataall.modules.datasets.api.dataset.resolvers import ( get_dataset_environment, get_dataset_organization, @@ -13,18 +14,7 @@ resolve_redshift_copy_enabled, get_dataset_stack ) -from dataall.api.constants import EnvironmentPermission, GraphQLEnumMapper - - -class DatasetRole(GraphQLEnumMapper): - # Permissions on a dataset - BusinessOwner = '999' - DataSteward = '998' - Creator = '950' - Admin = '900' - Shared = '300' - NoPermission = '000' - +from dataall.api.constants import EnvironmentPermission DatasetStatistics = gql.ObjectType( name='DatasetStatistics', From a2e27a8c60c633ac8e3d061915fddc8daaa82512 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 9 May 2023 11:36:01 +0200 Subject: [PATCH 144/346] Moved glue script to dataset module and introduced extension for environment stack --- .gitignore | 2 - .../dataall/cdkproxy/stacks/environment.py | 50 +++++++++---------- .../assets/glueprofilingjob/glue_script.py | 0 .../cdk/dataset_glue_profiler_extension.py | 42 ++++++++++++++++ tests/modules/datasets/test_glue_profiler.py | 8 +++ 5 files changed, 74 insertions(+), 28 deletions(-) rename backend/dataall/{cdkproxy => modules/datasets/cdk}/assets/glueprofilingjob/glue_script.py (100%) create mode 100644 backend/dataall/modules/datasets/cdk/dataset_glue_profiler_extension.py create mode 100644 tests/modules/datasets/test_glue_profiler.py diff --git a/.gitignore b/.gitignore index aab8ca3d0..9261ed4bd 100644 --- a/.gitignore +++ b/.gitignore @@ -70,5 +70,3 @@ npm-debug.log* yarn-debug.log* yarn-error.log* .idea -/backend/dataall/cdkproxy/assets/gluedataqualityjob/datahubdq.zip -/backend/dataall/cdkproxy/assets/glueprofilingjob/datahubdq.zip diff --git a/backend/dataall/cdkproxy/stacks/environment.py b/backend/dataall/cdkproxy/stacks/environment.py index 2b863640d..b591fbea5 100644 --- a/backend/dataall/cdkproxy/stacks/environment.py +++ b/backend/dataall/cdkproxy/stacks/environment.py @@ -1,12 +1,12 @@ import logging import os import pathlib -import shutil +from abc import abstractmethod +from typing import List from aws_cdk import ( custom_resources as cr, aws_s3 as s3, - aws_s3_deployment, aws_iam as iam, aws_lambda as _lambda, aws_lambda_destinations as lambda_destination, @@ -44,9 +44,25 @@ logger = logging.getLogger(__name__) +class EnvironmentStackExtension: + @staticmethod + @abstractmethod + def extent(setup: 'EnvironmentSetup'): + raise NotImplementedError + + @stack(stack='environment') class EnvironmentSetup(Stack): module_name = __file__ + _EXTENSIONS: List[EnvironmentStackExtension] = [] + + @staticmethod + def register(extension: EnvironmentStackExtension): + EnvironmentSetup._EXTENSIONS.append(extension) + + @property + def environment(self) -> models.Environment: + return self._environment def get_engine(self): envname = os.environ.get('envname', 'local') @@ -173,6 +189,7 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): versioned=True, enforce_ssl=True, ) + self.default_environment_bucket = default_environment_bucket default_environment_bucket.add_to_resource_policy( iam.PolicyStatement( sid='RedshiftLogging', @@ -220,22 +237,6 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): enabled=True, ) - profiling_assetspath = self.zip_code( - os.path.realpath( - os.path.abspath( - os.path.join(__file__, '..', '..', 'assets', 'glueprofilingjob') - ) - ) - ) - - aws_s3_deployment.BucketDeployment( - self, - f'{self._environment.resourcePrefix}GlueProflingJobDeployment', - sources=[aws_s3_deployment.Source.asset(profiling_assetspath)], - destination_bucket=default_environment_bucket, - destination_key_prefix='profiling/code', - ) - default_role = self.create_or_import_environment_default_role() roles_sagemaker_dependency_group.add(default_role) @@ -250,6 +251,8 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): f'arn:aws:iam::{self._environment.AwsAccountId}:role/{self.pivot_role_name}', ) + self.pivot_role = pivot_role + # Lakeformation default settings entry_point = str( pathlib.PosixPath( @@ -558,6 +561,9 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): parameter_name=f'/dataall/{self._environment.environmentUri}/sagemaker/sagemakerstudio/domain_id', ) + for extension in EnvironmentSetup._EXTENSIONS: + extension.extent(self) + TagsUtil.add_tags(stack=self, model=models.Environment, target_type="environment") CDKNagUtil.check_rules(self) @@ -767,14 +773,6 @@ def create_topic(self, construct_id, central_account, environment): ) return topic - @staticmethod - def zip_code(assetspath, s3_key='profiler'): - logger.info('Zipping code') - shutil.make_archive( - base_name=f'{assetspath}/{s3_key}', format='zip', root_dir=f'{assetspath}' - ) - return assetspath - def set_dlq(self, queue_name) -> sqs.Queue: queue_key = kms.Key( self, diff --git a/backend/dataall/cdkproxy/assets/glueprofilingjob/glue_script.py b/backend/dataall/modules/datasets/cdk/assets/glueprofilingjob/glue_script.py similarity index 100% rename from backend/dataall/cdkproxy/assets/glueprofilingjob/glue_script.py rename to backend/dataall/modules/datasets/cdk/assets/glueprofilingjob/glue_script.py diff --git a/backend/dataall/modules/datasets/cdk/dataset_glue_profiler_extension.py b/backend/dataall/modules/datasets/cdk/dataset_glue_profiler_extension.py new file mode 100644 index 000000000..5311c7460 --- /dev/null +++ b/backend/dataall/modules/datasets/cdk/dataset_glue_profiler_extension.py @@ -0,0 +1,42 @@ +import os +import logging +import shutil +from aws_cdk import aws_s3_deployment + +from dataall.cdkproxy.stacks import EnvironmentSetup +from dataall.cdkproxy.stacks.environment import EnvironmentStackExtension + +log = logging.getLogger(__name__) + + +class DatasetGlueProfilerExtension(EnvironmentStackExtension): + """Extends an environment stack for glue profiler """ + + @staticmethod + def extent(setup: EnvironmentSetup): + asset_path = DatasetGlueProfilerExtension.get_path_to_asset() + profiling_assetspath = DatasetGlueProfilerExtension.zip_code(asset_path) + + aws_s3_deployment.BucketDeployment( + setup, + f'{setup.environment.resourcePrefix}GlueProflingJobDeployment', + sources=[aws_s3_deployment.Source.asset(profiling_assetspath)], + destination_bucket=setup.default_environment_bucket, + destination_key_prefix='profiling/code', + ) + + @staticmethod + def get_path_to_asset(): + return os.path.realpath( + os.path.abspath( + os.path.join(__file__, '..', 'assets', 'glueprofilingjob') + ) + ) + + @staticmethod + def zip_code(assets_path, s3_key='profiler'): + log.info('Zipping code') + shutil.make_archive( + base_name=f'{assets_path}/{s3_key}', format='zip', root_dir=f'{assets_path}' + ) + return assets_path diff --git a/tests/modules/datasets/test_glue_profiler.py b/tests/modules/datasets/test_glue_profiler.py new file mode 100644 index 000000000..7e45695c2 --- /dev/null +++ b/tests/modules/datasets/test_glue_profiler.py @@ -0,0 +1,8 @@ +from dataall.modules.datasets.cdk.dataset_glue_profiler_extension import DatasetGlueProfilerExtension +from pathlib import Path + + +def test_glue_profiler_exist(): + path = DatasetGlueProfilerExtension.get_path_to_asset() + assert Path(path).exists() + From 732f726d43886fe4707accb531357fc58c463ae7 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 9 May 2023 11:50:50 +0200 Subject: [PATCH 145/346] Renamed the method --- backend/dataall/modules/datasets/handlers/glue_table_handler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/dataall/modules/datasets/handlers/glue_table_handler.py b/backend/dataall/modules/datasets/handlers/glue_table_handler.py index b38311ca6..ce6b7b66a 100644 --- a/backend/dataall/modules/datasets/handlers/glue_table_handler.py +++ b/backend/dataall/modules/datasets/handlers/glue_table_handler.py @@ -15,7 +15,7 @@ class DatasetTableSyncHandler: @staticmethod @Worker.handler(path='glue.dataset.database.tables') - def list_tables(engine, task: models.Task): + def sync_existing_tables(engine, task: models.Task): with engine.scoped_session() as session: dataset: Dataset = DatasetService.get_dataset_by_uri( session, task.targetUri From 90d6429a155c07248dd219a2d60b4812ce670894 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 9 May 2023 12:10:27 +0200 Subject: [PATCH 146/346] Fixed test and added registration of glue extension --- backend/dataall/cdkproxy/stacks/environment.py | 7 +++---- backend/dataall/modules/datasets/__init__.py | 5 +++++ .../datasets/cdk/dataset_glue_profiler_extension.py | 2 +- 3 files changed, 9 insertions(+), 5 deletions(-) diff --git a/backend/dataall/cdkproxy/stacks/environment.py b/backend/dataall/cdkproxy/stacks/environment.py index b591fbea5..7c1050fe7 100644 --- a/backend/dataall/cdkproxy/stacks/environment.py +++ b/backend/dataall/cdkproxy/stacks/environment.py @@ -2,7 +2,7 @@ import os import pathlib from abc import abstractmethod -from typing import List +from typing import List, Type from aws_cdk import ( custom_resources as cr, @@ -54,13 +54,12 @@ def extent(setup: 'EnvironmentSetup'): @stack(stack='environment') class EnvironmentSetup(Stack): module_name = __file__ - _EXTENSIONS: List[EnvironmentStackExtension] = [] + _EXTENSIONS: List[Type[EnvironmentStackExtension]] = [] @staticmethod - def register(extension: EnvironmentStackExtension): + def register(extension: Type[EnvironmentStackExtension]): EnvironmentSetup._EXTENSIONS.append(extension) - @property def environment(self) -> models.Environment: return self._environment diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index 2ace4145b..d653d358e 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -85,4 +85,9 @@ def is_supported(cls, modes: List[ImportMode]): def __init__(self): import dataall.modules.datasets.cdk + from dataall.cdkproxy.stacks.environment import EnvironmentSetup + from dataall.modules.datasets.cdk.dataset_glue_profiler_extension import DatasetGlueProfilerExtension + + EnvironmentSetup.register(DatasetGlueProfilerExtension) + log.info("Dataset stacks have been imported") diff --git a/backend/dataall/modules/datasets/cdk/dataset_glue_profiler_extension.py b/backend/dataall/modules/datasets/cdk/dataset_glue_profiler_extension.py index 5311c7460..d4986380d 100644 --- a/backend/dataall/modules/datasets/cdk/dataset_glue_profiler_extension.py +++ b/backend/dataall/modules/datasets/cdk/dataset_glue_profiler_extension.py @@ -19,7 +19,7 @@ def extent(setup: EnvironmentSetup): aws_s3_deployment.BucketDeployment( setup, - f'{setup.environment.resourcePrefix}GlueProflingJobDeployment', + f'{setup.environment().resourcePrefix}GlueProflingJobDeployment', sources=[aws_s3_deployment.Source.asset(profiling_assetspath)], destination_bucket=setup.default_environment_bucket, destination_key_prefix='profiling/code', From 23b5eb8c2b4000d212bd22c801d42cdf91531bb6 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 9 May 2023 14:12:43 +0200 Subject: [PATCH 147/346] Merge conflict --- backend/dataall/cdkproxy/stacks/environment.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/backend/dataall/cdkproxy/stacks/environment.py b/backend/dataall/cdkproxy/stacks/environment.py index 807e331bb..e4fb58b44 100644 --- a/backend/dataall/cdkproxy/stacks/environment.py +++ b/backend/dataall/cdkproxy/stacks/environment.py @@ -242,8 +242,6 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): f'arn:aws:iam::{self._environment.AwsAccountId}:role/{self.pivot_role_name}', ) - self.pivot_role = pivot_role - # Lakeformation default settings custom resource # Set PivotRole as Lake Formation data lake admin entry_point = str( From 99fb5066f159a9736a62c02de80615a937bb670d Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 9 May 2023 15:21:04 +0200 Subject: [PATCH 148/346] Fix merge conflict --- .../dataset_sharing/services/dataset_alarm_service.py | 4 ++-- .../services/share_managers/s3_share_manager.py | 2 +- backend/dataall/modules/datasets/aws/glue_profiler_client.py | 2 +- .../modules/datasets/handlers/glue_profiling_handler.py | 5 ++--- 4 files changed, 6 insertions(+), 7 deletions(-) diff --git a/backend/dataall/modules/dataset_sharing/services/dataset_alarm_service.py b/backend/dataall/modules/dataset_sharing/services/dataset_alarm_service.py index 0c30d267b..c1a11c289 100644 --- a/backend/dataall/modules/dataset_sharing/services/dataset_alarm_service.py +++ b/backend/dataall/modules/dataset_sharing/services/dataset_alarm_service.py @@ -97,7 +97,7 @@ def trigger_dataset_sync_failure_alarm(self, dataset: Dataset, error: str): def trigger_folder_sharing_failure_alarm( self, folder: DatasetStorageLocation, - share: models.ShareObject, + share: ShareObject, target_environment: models.Environment, ): log.info('Triggering share failure alarm...') @@ -125,7 +125,7 @@ def trigger_folder_sharing_failure_alarm( def trigger_revoke_folder_sharing_failure_alarm( self, folder: DatasetStorageLocation, - share: models.ShareObject, + share: ShareObject, target_environment: models.Environment, ): log.info('Triggering share failure alarm...') diff --git a/backend/dataall/modules/dataset_sharing/services/share_managers/s3_share_manager.py b/backend/dataall/modules/dataset_sharing/services/share_managers/s3_share_manager.py index d0248c1c8..004ed44f7 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_managers/s3_share_manager.py +++ b/backend/dataall/modules/dataset_sharing/services/share_managers/s3_share_manager.py @@ -9,8 +9,8 @@ from dataall.aws.handlers.kms import KMS from dataall.aws.handlers.iam import IAM from dataall.modules.dataset_sharing.db.models import ShareObject +from dataall.modules.dataset_sharing.services.dataset_alarm_service import DatasetAlarmService from dataall.modules.dataset_sharing.services.share_object import ShareObjectService -from dataall.modules.datasets.services.dataset_alarm_service import DatasetAlarmService from dataall.modules.datasets_base.db.models import DatasetStorageLocation, Dataset diff --git a/backend/dataall/modules/datasets/aws/glue_profiler_client.py b/backend/dataall/modules/datasets/aws/glue_profiler_client.py index 6b678ec0b..523dc2c20 100644 --- a/backend/dataall/modules/datasets/aws/glue_profiler_client.py +++ b/backend/dataall/modules/datasets/aws/glue_profiler_client.py @@ -3,7 +3,7 @@ from dataall.aws.handlers.sts import SessionHelper from dataall.modules.datasets import Dataset -from dataall.modules.datasets.db.models import DatasetProfilingRun +from dataall.modules.datasets_base.db.models import DatasetProfilingRun log = logging.getLogger(__name__) diff --git a/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py b/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py index 0d604a7c7..a804dc729 100644 --- a/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py +++ b/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py @@ -1,11 +1,10 @@ import logging from dataall.aws.handlers.service_handlers import Worker -from dataall.aws.handlers.sts import SessionHelper from dataall.db import models from dataall.modules.datasets.aws.glue_profiler_client import GlueDatasetProfilerClient from dataall.modules.datasets_base.db.models import DatasetProfilingRun, Dataset -from dataall.modules.datasets.services.dataset_profiling_repository import DatasetProfilingRepository +from dataall.modules.datasets.db.dataset_profiling_repository import DatasetProfilingRepository log = logging.getLogger(__name__) @@ -31,7 +30,7 @@ def start_profiling_run(engine, task: models.Task): dataset, profiling = DatasetProfilingGlueHandler._get_job_data(session, task) run_id = GlueDatasetProfilerClient(dataset).run_job(profiling) - DatasetProfilingService.update_run( + DatasetProfilingRepository.update_run( session, profilingRunUri=profiling.profilingRunUri, GlueJobRunId=run_id, From b3838205a30303eed82ea33fc70459d062bfaff2 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 9 May 2023 15:37:51 +0200 Subject: [PATCH 149/346] Extracted EcsShareHandler --- backend/dataall/aws/handlers/ecs.py | 60 --------------- backend/dataall/modules/__init__.py | 10 +-- .../modules/dataset_sharing/__init__.py | 12 +++ .../dataset_sharing/handlers/__init__.py | 0 .../handlers/ecs_share_handler.py | 74 +++++++++++++++++++ backend/dataall/modules/datasets/__init__.py | 13 +++- 6 files changed, 102 insertions(+), 67 deletions(-) create mode 100644 backend/dataall/modules/dataset_sharing/handlers/__init__.py create mode 100644 backend/dataall/modules/dataset_sharing/handlers/ecs_share_handler.py diff --git a/backend/dataall/aws/handlers/ecs.py b/backend/dataall/aws/handlers/ecs.py index 528ebb520..922a22a89 100644 --- a/backend/dataall/aws/handlers/ecs.py +++ b/backend/dataall/aws/handlers/ecs.py @@ -9,7 +9,6 @@ from ... import db from ...db import models from ...utils import Parameter -from dataall.modules.dataset_sharing.services.data_sharing_service import DataSharingService log = logging.getLogger('aws:ecs') @@ -18,65 +17,6 @@ class Ecs: def __init__(self): pass - @staticmethod - @Worker.handler(path='ecs.share.approve') - def approve_share(engine, task: models.Task): - envname = os.environ.get('envname', 'local') - if envname in ['local', 'dkrcompose']: - return DataSharingService.approve_share(engine, task.targetUri) - else: - return Ecs.run_share_management_ecs_task( - envname=envname, share_uri=task.targetUri, handler='approve_share' - ) - - @staticmethod - @Worker.handler(path='ecs.share.revoke') - def revoke_share(engine, task: models.Task): - envname = os.environ.get('envname', 'local') - if envname in ['local', 'dkrcompose']: - return DataSharingService.revoke_share(engine, task.targetUri) - else: - return Ecs.run_share_management_ecs_task( - envname=envname, share_uri=task.targetUri, handler='revoke_share' - ) - - @staticmethod - def run_share_management_ecs_task(envname, share_uri, handler): - share_task_definition = Parameter().get_parameter( - env=envname, path='ecs/task_def_arn/share_management' - ) - container_name = Parameter().get_parameter( - env=envname, path='ecs/container/share_management' - ) - cluster_name = Parameter().get_parameter(env=envname, path='ecs/cluster/name') - subnets = Parameter().get_parameter(env=envname, path='ecs/private_subnets') - security_groups = Parameter().get_parameter( - env=envname, path='ecs/security_groups' - ) - - try: - Ecs.run_ecs_task( - cluster_name=cluster_name, - task_definition=share_task_definition, - container_name=container_name, - security_groups=security_groups, - subnets=subnets, - environment=[ - {'name': 'shareUri', 'value': share_uri}, - {'name': 'config_location', 'value': '/config.json'}, - {'name': 'envname', 'value': envname}, - {'name': 'handler', 'value': handler}, - { - 'name': 'AWS_REGION', - 'value': os.getenv('AWS_REGION', 'eu-west-1'), - }, - ], - ) - return True - except ClientError as e: - log.error(e) - raise e - @staticmethod @Worker.handler(path='ecs.cdkproxy.deploy') def deploy_stack(engine, task: models.Task): diff --git a/backend/dataall/modules/__init__.py b/backend/dataall/modules/__init__.py index dce3a711e..7e1d5c42a 100644 --- a/backend/dataall/modules/__init__.py +++ b/backend/dataall/modules/__init__.py @@ -7,7 +7,7 @@ 3) Define your module in config.json. The loader will use it to import your module Remember that there should not be any references from outside to modules. -The rule is simple: modules can import the core/common code, but not the other way around +The rule is simple: modules can import the core code, but not the other way around Otherwise your modules will be imported automatically. You can add logging about the importing the module in __init__.py to track unintentionally imports @@ -19,15 +19,13 @@ register itself automatically if there is decorator @stack upon the class see StackManagerFactory and @stack - for more information on stacks -tasks - contains code for short-running tasks that will be delegated to lambda +handlers - contains code for long-running tasks that will be delegated to lambda These task will automatically register themselves when there is @Worker.handler upon the static! method. see WorkerHandler - for more information on short-living tasks Another example of auto import is service policies. If your module has a service policy -it will be automatically imported if it inherited from ServicePolicy +it will be automatically imported if it inherited from ServicePolicy or S3Policy -Manual import: -Permissions. Make sure you have added all permission to the core permissions -Permission resolvers in TargetType. see it for reference +Any manual import should be done in __init__ file of the module in ModuleInterface """ diff --git a/backend/dataall/modules/dataset_sharing/__init__.py b/backend/dataall/modules/dataset_sharing/__init__.py index 7e2c7621d..80ab16edc 100644 --- a/backend/dataall/modules/dataset_sharing/__init__.py +++ b/backend/dataall/modules/dataset_sharing/__init__.py @@ -20,3 +20,15 @@ def depends_on() -> List[Type['ModuleInterface']]: def __init__(self): from dataall.modules.dataset_sharing import api log.info("API of dataset sharing has been imported") + + +class SharingAsyncHandlersModuleInterface(ModuleInterface): + """Implements ModuleInterface for dataset async lambda""" + + @staticmethod + def is_supported(modes: List[ImportMode]): + return ImportMode.HANDLERS in modes + + def __init__(self): + import dataall.modules.dataset_sharing.handlers + log.info("Sharing handlers have been imported") diff --git a/backend/dataall/modules/dataset_sharing/handlers/__init__.py b/backend/dataall/modules/dataset_sharing/handlers/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/dataall/modules/dataset_sharing/handlers/ecs_share_handler.py b/backend/dataall/modules/dataset_sharing/handlers/ecs_share_handler.py new file mode 100644 index 000000000..e7b600222 --- /dev/null +++ b/backend/dataall/modules/dataset_sharing/handlers/ecs_share_handler.py @@ -0,0 +1,74 @@ + +import logging +import os + +from botocore.exceptions import ClientError + +from dataall.aws.handlers.ecs import Ecs +from dataall.db import models +from dataall.utils import Parameter +from dataall.aws.handlers.service_handlers import Worker +from dataall.modules.dataset_sharing.services.data_sharing_service import DataSharingService + +log = logging.getLogger(__name__) + + +class EcsShareHandler: + @staticmethod + @Worker.handler(path='ecs.share.approve') + def approve_share(engine, task: models.Task): + envname = os.environ.get('envname', 'local') + if envname in ['local', 'dkrcompose']: + return DataSharingService.approve_share(engine, task.targetUri) + else: + return EcsShareHandler.run_share_management_ecs_task( + envname=envname, share_uri=task.targetUri, handler='approve_share' + ) + + @staticmethod + @Worker.handler(path='ecs.share.revoke') + def revoke_share(engine, task: models.Task): + envname = os.environ.get('envname', 'local') + if envname in ['local', 'dkrcompose']: + return DataSharingService.revoke_share(engine, task.targetUri) + else: + return EcsShareHandler.run_share_management_ecs_task( + envname=envname, share_uri=task.targetUri, handler='revoke_share' + ) + + @staticmethod + def run_share_management_ecs_task(envname, share_uri, handler): + share_task_definition = Parameter().get_parameter( + env=envname, path='ecs/task_def_arn/share_management' + ) + container_name = Parameter().get_parameter( + env=envname, path='ecs/container/share_management' + ) + cluster_name = Parameter().get_parameter(env=envname, path='ecs/cluster/name') + subnets = Parameter().get_parameter(env=envname, path='ecs/private_subnets') + security_groups = Parameter().get_parameter( + env=envname, path='ecs/security_groups' + ) + + try: + Ecs.run_ecs_task( + cluster_name=cluster_name, + task_definition=share_task_definition, + container_name=container_name, + security_groups=security_groups, + subnets=subnets, + environment=[ + {'name': 'shareUri', 'value': share_uri}, + {'name': 'config_location', 'value': '/config.json'}, + {'name': 'envname', 'value': envname}, + {'name': 'handler', 'value': handler}, + { + 'name': 'AWS_REGION', + 'value': os.getenv('AWS_REGION', 'eu-west-1'), + }, + ], + ) + return True + except ClientError as e: + log.error(e) + raise e diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index aba27e198..e923124a7 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -2,7 +2,6 @@ import logging from typing import List, Type -from dataall.modules.dataset_sharing import SharingApiModuleInterface from dataall.core.group.services.group_resource_manager import GroupResourceManager from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base import DatasetBaseModuleInterface @@ -25,6 +24,8 @@ def is_supported(modes): @staticmethod def depends_on() -> List[Type['ModuleInterface']]: + from dataall.modules.dataset_sharing import SharingApiModuleInterface + return [SharingApiModuleInterface, DatasetBaseModuleInterface] def __init__(self): @@ -82,6 +83,12 @@ def __init__(self): import dataall.modules.datasets.handlers log.info("Dataset handlers have been imported") + @staticmethod + def depends_on() -> List[Type['ModuleInterface']]: + from dataall.modules.dataset_sharing import SharingAsyncHandlersModuleInterface + + return [SharingAsyncHandlersModuleInterface, DatasetBaseModuleInterface] + class DatasetCdkModuleInterface(ModuleInterface): """Loads dataset cdk stacks """ @@ -98,3 +105,7 @@ def __init__(self): EnvironmentSetup.register(DatasetGlueProfilerExtension) log.info("Dataset stacks have been imported") + + @staticmethod + def depends_on() -> List[Type['ModuleInterface']]: + return [DatasetBaseModuleInterface] From efbdd269ffdb919e68fda417f7414be7b17484a5 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 9 May 2023 16:04:36 +0200 Subject: [PATCH 150/346] Reduced code duplication --- backend/dataall/aws/handlers/ecs.py | 140 +++++++++--------- .../handlers/ecs_share_handler.py | 55 ++----- 2 files changed, 82 insertions(+), 113 deletions(-) diff --git a/backend/dataall/aws/handlers/ecs.py b/backend/dataall/aws/handlers/ecs.py index 922a22a89..b5958e4e6 100644 --- a/backend/dataall/aws/handlers/ecs.py +++ b/backend/dataall/aws/handlers/ecs.py @@ -39,87 +39,83 @@ def deploy_stack(engine, task: models.Task): @staticmethod def run_cdkproxy_task(stack_uri): - envname = os.environ.get('envname', 'local') - cdkproxy_task_definition = Parameter().get_parameter( - env=envname, path='ecs/task_def_arn/cdkproxy' + task_arn = Ecs.run_ecs_task( + task_definition_param='ecs/task_def_arn/cdkproxy', + container_name_param='ecs/container/cdkproxy', + context=[{'name': 'stackUri', 'value': stack_uri}], + started_by=f'awsworker-{stack_uri}', ) - container_name = Parameter().get_parameter( - env=envname, path='ecs/container/cdkproxy' - ) - cluster_name = Parameter().get_parameter(env=envname, path='ecs/cluster/name') - subnets = Parameter().get_parameter(env=envname, path='ecs/private_subnets') - security_groups = Parameter().get_parameter( - env=envname, path='ecs/security_groups' - ) - try: - task_arn = Ecs.run_ecs_task( - cluster_name=cluster_name, - task_definition=cdkproxy_task_definition, - container_name=container_name, - security_groups=security_groups, - subnets=subnets, - environment=[ - {'name': 'stackUri', 'value': stack_uri}, - {'name': 'config_location', 'value': '/config.json'}, - {'name': 'envname', 'value': envname}, - { - 'name': 'AWS_REGION', - 'value': os.getenv('AWS_REGION', 'eu-west-1'), - }, - ], - started_by=f'awsworker-{stack_uri}', - ) - log.info(f'ECS Task {task_arn} running') - return task_arn - except ClientError as e: - log.error(e) - raise e + log.info(f'ECS Task {task_arn} running') + return task_arn @staticmethod def run_ecs_task( - cluster_name, - task_definition, - container_name, - security_groups, - subnets, - environment, + task_definition_param, + container_name_param, + context, started_by='awsworker', ): - response = boto3.client('ecs').run_task( - cluster=cluster_name, - taskDefinition=task_definition, - count=1, - launchType='FARGATE', - networkConfiguration={ - 'awsvpcConfiguration': { - 'subnets': subnets.split(','), - 'securityGroups': security_groups.split(','), - } - }, - overrides={ - 'containerOverrides': [ - { - 'name': container_name, - 'environment': environment, + try: + envname = os.environ.get('envname', 'local') + cluster_name = Parameter().get_parameter(env=envname, path='ecs/cluster/name') + subnets = Parameter().get_parameter(env=envname, path='ecs/private_subnets') + security_groups = Parameter().get_parameter( + env=envname, path='ecs/security_groups' + ) + + task_definition = Parameter().get_parameter( + env=envname, path=task_definition_param + ) + container_name = Parameter().get_parameter( + env=envname, path=container_name_param + ) + + response = boto3.client('ecs').run_task( + cluster=cluster_name, + taskDefinition=task_definition, + count=1, + launchType='FARGATE', + networkConfiguration={ + 'awsvpcConfiguration': { + 'subnets': subnets.split(','), + 'securityGroups': security_groups.split(','), } - ] - }, - startedBy=started_by, - ) - if response['failures']: - raise Exception( - ', '.join( - [ - 'fail to run task {0} reason: {1}'.format( - failure['arn'], failure['reason'] - ) - for failure in response['failures'] + }, + overrides={ + 'containerOverrides': [ + { + 'name': container_name, + 'environment': [ + {'name': 'config_location', 'value': '/config.json'}, + {'name': 'envname', 'value': envname}, + { + 'name': 'AWS_REGION', + 'value': os.getenv('AWS_REGION', 'eu-west-1'), + }, + *context + ], + } ] - ) + }, + startedBy=started_by, ) - task_arn = response.get('tasks', [{'taskArn': None}])[0]['taskArn'] - log.info(f'Task started {task_arn}..') - return task_arn + if response['failures']: + raise Exception( + ', '.join( + [ + 'fail to run task {0} reason: {1}'.format( + failure['arn'], failure['reason'] + ) + for failure in response['failures'] + ] + ) + ) + task_arn = response.get('tasks', [{'taskArn': None}])[0]['taskArn'] + log.info(f'Task started {task_arn}..') + return task_arn + except ClientError as e: + log.error(e) + raise e @staticmethod def is_task_running(cluster_name, started_by): diff --git a/backend/dataall/modules/dataset_sharing/handlers/ecs_share_handler.py b/backend/dataall/modules/dataset_sharing/handlers/ecs_share_handler.py index e7b600222..2707a0609 100644 --- a/backend/dataall/modules/dataset_sharing/handlers/ecs_share_handler.py +++ b/backend/dataall/modules/dataset_sharing/handlers/ecs_share_handler.py @@ -17,58 +17,31 @@ class EcsShareHandler: @staticmethod @Worker.handler(path='ecs.share.approve') def approve_share(engine, task: models.Task): - envname = os.environ.get('envname', 'local') - if envname in ['local', 'dkrcompose']: - return DataSharingService.approve_share(engine, task.targetUri) - else: - return EcsShareHandler.run_share_management_ecs_task( - envname=envname, share_uri=task.targetUri, handler='approve_share' - ) + return EcsShareHandler._manage_share(engine, task, DataSharingService.approve_share, 'approve_share') @staticmethod @Worker.handler(path='ecs.share.revoke') def revoke_share(engine, task: models.Task): + return EcsShareHandler._manage_share(engine, task, DataSharingService.revoke_share, 'revoke_share') + + @staticmethod + def _manage_share(engine, task: models.Task, local_handler, ecs_handler: str): envname = os.environ.get('envname', 'local') if envname in ['local', 'dkrcompose']: - return DataSharingService.revoke_share(engine, task.targetUri) + return local_handler(engine, task.targetUri) else: - return EcsShareHandler.run_share_management_ecs_task( - envname=envname, share_uri=task.targetUri, handler='revoke_share' + return EcsShareHandler._run_share_management_ecs_task( + share_uri=task.targetUri, handler=ecs_handler ) @staticmethod - def run_share_management_ecs_task(envname, share_uri, handler): - share_task_definition = Parameter().get_parameter( - env=envname, path='ecs/task_def_arn/share_management' - ) - container_name = Parameter().get_parameter( - env=envname, path='ecs/container/share_management' - ) - cluster_name = Parameter().get_parameter(env=envname, path='ecs/cluster/name') - subnets = Parameter().get_parameter(env=envname, path='ecs/private_subnets') - security_groups = Parameter().get_parameter( - env=envname, path='ecs/security_groups' - ) - - try: - Ecs.run_ecs_task( - cluster_name=cluster_name, - task_definition=share_task_definition, - container_name=container_name, - security_groups=security_groups, - subnets=subnets, - environment=[ + def _run_share_management_ecs_task(share_uri, handler): + return Ecs.run_ecs_task( + task_definition_param='ecs/task_def_arn/share_management', + container_name_param='ecs/container/share_management', + context=[ {'name': 'shareUri', 'value': share_uri}, - {'name': 'config_location', 'value': '/config.json'}, - {'name': 'envname', 'value': envname}, {'name': 'handler', 'value': handler}, - { - 'name': 'AWS_REGION', - 'value': os.getenv('AWS_REGION', 'eu-west-1'), - }, ], ) - return True - except ClientError as e: - log.error(e) - raise e + From efe9154fb286d8e65c3137c4813806d37d9ca3e5 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 9 May 2023 16:24:00 +0200 Subject: [PATCH 151/346] Moved GraphQL constants --- backend/dataall/api/constants.py | 57 ------------------- .../modules/dataset_sharing/api/enums.py | 14 +++++ .../dataset_sharing/api/input_types.py | 2 +- .../modules/dataset_sharing/api/mutations.py | 1 + .../modules/dataset_sharing/api/queries.py | 1 + .../modules/dataset_sharing/api/resolvers.py | 2 +- .../modules/dataset_sharing/api/schema.py | 1 + 7 files changed, 19 insertions(+), 59 deletions(-) create mode 100644 backend/dataall/modules/dataset_sharing/api/enums.py diff --git a/backend/dataall/api/constants.py b/backend/dataall/api/constants.py index d3c2a74e8..f63fb16c6 100644 --- a/backend/dataall/api/constants.py +++ b/backend/dataall/api/constants.py @@ -119,12 +119,6 @@ class SortDirection(GraphQLEnumMapper): desc = 'desc' -class ShareableType(GraphQLEnumMapper): - Table = 'DatasetTable' - StorageLocation = 'DatasetStorageLocation' - View = 'View' - - class PrincipalType(GraphQLEnumMapper): Any = 'Any' Organization = 'Organization' @@ -136,57 +130,6 @@ class PrincipalType(GraphQLEnumMapper): ConsumptionRole = 'ConsumptionRole' -class ShareObjectPermission(GraphQLEnumMapper): - Approvers = '999' - Requesters = '800' - DatasetAdmins = '700' - NoPermission = '000' - - -class ShareObjectStatus(GraphQLEnumMapper): - Deleted = 'Deleted' - Approved = 'Approved' - Rejected = 'Rejected' - Revoked = 'Revoked' - Draft = 'Draft' - Submitted = 'Submitted' - Revoke_In_Progress = 'Revoke_In_Progress' - Share_In_Progress = 'Share_In_Progress' - Processed = 'Processed' - - -class ShareItemStatus(GraphQLEnumMapper): - Deleted = 'Deleted' - PendingApproval = 'PendingApproval' - Share_Approved = 'Share_Approved' - Share_Rejected = 'Share_Rejected' - Share_In_Progress = 'Share_In_Progress' - Share_Succeeded = 'Share_Succeeded' - Share_Failed = 'Share_Failed' - Revoke_Approved = 'Revoke_Approved' - Revoke_In_Progress = 'Revoke_In_Progress' - Revoke_Failed = 'Revoke_Failed' - Revoke_Succeeded = 'Revoke_Succeeded' - - -class ShareObjectActions(GraphQLEnumMapper): - Submit = 'Submit' - Approve = 'Approve' - Reject = 'Reject' - RevokeItems = 'RevokeItems' - Start = 'Start' - Finish = 'Finish' - FinishPending = 'FinishPending' - Delete = 'Delete' - - -class ShareItemActions(GraphQLEnumMapper): - AddItem = 'AddItem' - RemoveItem = 'RemoveItem' - Failure = 'Failure' - Success = 'Success' - - class ConfidentialityClassification(GraphQLEnumMapper): Unclassified = 'Unclassified' Official = 'Official' diff --git a/backend/dataall/modules/dataset_sharing/api/enums.py b/backend/dataall/modules/dataset_sharing/api/enums.py new file mode 100644 index 000000000..e08c83ea6 --- /dev/null +++ b/backend/dataall/modules/dataset_sharing/api/enums.py @@ -0,0 +1,14 @@ +from dataall.api.constants import GraphQLEnumMapper + + +class ShareableType(GraphQLEnumMapper): + Table = 'DatasetTable' + StorageLocation = 'DatasetStorageLocation' + View = 'View' + + +class ShareObjectPermission(GraphQLEnumMapper): + Approvers = '999' + Requesters = '800' + DatasetAdmins = '700' + NoPermission = '000' diff --git a/backend/dataall/modules/dataset_sharing/api/input_types.py b/backend/dataall/modules/dataset_sharing/api/input_types.py index fba467b67..a631736bf 100644 --- a/backend/dataall/modules/dataset_sharing/api/input_types.py +++ b/backend/dataall/modules/dataset_sharing/api/input_types.py @@ -1,5 +1,5 @@ from dataall.api.constants import * - +from dataall.modules.dataset_sharing.api.enums import ShareableType NewShareObjectInput = gql.InputType( name='NewShareObjectInput', diff --git a/backend/dataall/modules/dataset_sharing/api/mutations.py b/backend/dataall/modules/dataset_sharing/api/mutations.py index 472ba2764..b7e6cac43 100644 --- a/backend/dataall/modules/dataset_sharing/api/mutations.py +++ b/backend/dataall/modules/dataset_sharing/api/mutations.py @@ -1,3 +1,4 @@ +from dataall.api import gql from dataall.modules.dataset_sharing.api.resolvers import * createShareObject = gql.MutationField( diff --git a/backend/dataall/modules/dataset_sharing/api/queries.py b/backend/dataall/modules/dataset_sharing/api/queries.py index 126508758..0ebe44c64 100644 --- a/backend/dataall/modules/dataset_sharing/api/queries.py +++ b/backend/dataall/modules/dataset_sharing/api/queries.py @@ -1,3 +1,4 @@ +from dataall.api import gql from dataall.modules.dataset_sharing.api.resolvers import * getShareObject = gql.QueryField( diff --git a/backend/dataall/modules/dataset_sharing/api/resolvers.py b/backend/dataall/modules/dataset_sharing/api/resolvers.py index a0e33909b..6d5234e2e 100644 --- a/backend/dataall/modules/dataset_sharing/api/resolvers.py +++ b/backend/dataall/modules/dataset_sharing/api/resolvers.py @@ -3,10 +3,10 @@ from dataall import db from dataall import utils from dataall.api.Objects.Principal.resolvers import get_principal -from dataall.api.constants import * from dataall.api.context import Context from dataall.aws.handlers.service_handlers import Worker from dataall.db import models +from dataall.modules.dataset_sharing.api.enums import ShareObjectPermission from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject from dataall.modules.dataset_sharing.services.dataset_share_service import DatasetShareService from dataall.modules.dataset_sharing.services.share_object import ShareObjectService diff --git a/backend/dataall/modules/dataset_sharing/api/schema.py b/backend/dataall/modules/dataset_sharing/api/schema.py index f8455d0b5..5146e65c2 100644 --- a/backend/dataall/modules/dataset_sharing/api/schema.py +++ b/backend/dataall/modules/dataset_sharing/api/schema.py @@ -1,3 +1,4 @@ +from dataall.modules.dataset_sharing.api.enums import ShareableType from dataall.modules.dataset_sharing.api.resolvers import * from dataall.api.Objects.Environment.resolvers import resolve_environment From 13a2fc082694600a0dacaa7e88d0d61ec950d753 Mon Sep 17 00:00:00 2001 From: dlpzx <71252798+dlpzx@users.noreply.github.com> Date: Wed, 10 May 2023 10:15:11 +0200 Subject: [PATCH 152/346] =?UTF-8?q?Modify=20docker-compose=20yaml=20to=20r?= =?UTF-8?q?ead=20region=20and=20default=20region=20from=20env=E2=80=A6=20(?= =?UTF-8?q?#446)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### Feature or Bugfix - Bugfix - Refactoring ### Detail Added `AWS_REGION` to the environment variables of the Docker containers for local development. Set both`AWS_DEFAULT_REGION` and `AWS_REGION` to their values set on the terminal where `docker-compose up` is run. If these values are not set, `eu-west-1` is used as default Another PR with better instructions to the github pages documentation (deploy locally) will follow. ### Relates By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- docker-compose.yaml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/docker-compose.yaml b/docker-compose.yaml index 0668ffcd8..bd41b1925 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -16,7 +16,8 @@ services: - db environment: envname: 'dkrcompose' - AWS_DEFAULT_REGION: "eu-west-1" + AWS_REGION: "${AWS_REGION:-eu-west-1}" + AWS_DEFAULT_REGION: "${AWS_DEFAULT_REGION:-eu-west-1}" volumes: - ./backend:/code - $HOME/.aws/credentials:/root/.aws/credentials:ro @@ -36,7 +37,8 @@ services: - 5000:5000 environment: envname: 'dkrcompose' - AWS_DEFAULT_REGION: "eu-west-1" + AWS_REGION: "${AWS_REGION:-eu-west-1}" + AWS_DEFAULT_REGION: "${AWS_DEFAULT_REGION:-eu-west-1}" volumes: - ./backend:/code - $HOME/.aws/credentials:/root/.aws/credentials:ro From d9ace5d00f5239626a69287350b78d04fd467a7e Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 10 May 2023 10:27:11 +0200 Subject: [PATCH 153/346] Moved GraphQL and db constants --- backend/dataall/db/models/Enums.py | 6 --- .../modules/dataset_sharing/api/enums.py | 44 +++++++++++++++++++ .../modules/dataset_sharing/api/schema.py | 1 + .../modules/dataset_sharing/db/Enums.py | 6 +++ .../services/data_sharing_service.py | 8 ++-- .../services/dataset_share_service.py | 3 +- .../dataset_sharing/services/share_object.py | 4 +- .../04d92886fabe_add_consumption_roles.py | 3 +- ...215e_backfill_dataset_table_permissions.py | 7 ++- tests/api/conftest.py | 3 +- tests/api/test_share.py | 19 ++++---- tests/tasks/conftest.py | 8 ++-- tests/tasks/test_lf_share_manager.py | 10 ++--- tests/tasks/test_subscriptions.py | 6 +-- 14 files changed, 88 insertions(+), 40 deletions(-) diff --git a/backend/dataall/db/models/Enums.py b/backend/dataall/db/models/Enums.py index c283d9381..7d90746f5 100644 --- a/backend/dataall/db/models/Enums.py +++ b/backend/dataall/db/models/Enums.py @@ -82,12 +82,6 @@ class SortDirection(Enum): desc = 'desc' -class ShareableType(Enum): - Table = 'DatasetTable' - StorageLocation = 'DatasetStorageLocation' - View = 'View' - - class PrincipalType(Enum): Any = 'Any' Organization = 'Organization' diff --git a/backend/dataall/modules/dataset_sharing/api/enums.py b/backend/dataall/modules/dataset_sharing/api/enums.py index e08c83ea6..139bba0a4 100644 --- a/backend/dataall/modules/dataset_sharing/api/enums.py +++ b/backend/dataall/modules/dataset_sharing/api/enums.py @@ -12,3 +12,47 @@ class ShareObjectPermission(GraphQLEnumMapper): Requesters = '800' DatasetAdmins = '700' NoPermission = '000' + + +class ShareObjectStatus(GraphQLEnumMapper): + Deleted = 'Deleted' + Approved = 'Approved' + Rejected = 'Rejected' + Revoked = 'Revoked' + Draft = 'Draft' + Submitted = 'Submitted' + Revoke_In_Progress = 'Revoke_In_Progress' + Share_In_Progress = 'Share_In_Progress' + Processed = 'Processed' + + +class ShareItemStatus(GraphQLEnumMapper): + Deleted = 'Deleted' + PendingApproval = 'PendingApproval' + Share_Approved = 'Share_Approved' + Share_Rejected = 'Share_Rejected' + Share_In_Progress = 'Share_In_Progress' + Share_Succeeded = 'Share_Succeeded' + Share_Failed = 'Share_Failed' + Revoke_Approved = 'Revoke_Approved' + Revoke_In_Progress = 'Revoke_In_Progress' + Revoke_Failed = 'Revoke_Failed' + Revoke_Succeeded = 'Revoke_Succeeded' + + +class ShareObjectActions(GraphQLEnumMapper): + Submit = 'Submit' + Approve = 'Approve' + Reject = 'Reject' + RevokeItems = 'RevokeItems' + Start = 'Start' + Finish = 'Finish' + FinishPending = 'FinishPending' + Delete = 'Delete' + + +class ShareItemActions(GraphQLEnumMapper): + AddItem = 'AddItem' + RemoveItem = 'RemoveItem' + Failure = 'Failure' + Success = 'Success' diff --git a/backend/dataall/modules/dataset_sharing/api/schema.py b/backend/dataall/modules/dataset_sharing/api/schema.py index 5146e65c2..529a55cbb 100644 --- a/backend/dataall/modules/dataset_sharing/api/schema.py +++ b/backend/dataall/modules/dataset_sharing/api/schema.py @@ -1,3 +1,4 @@ +from dataall.api import gql from dataall.modules.dataset_sharing.api.enums import ShareableType from dataall.modules.dataset_sharing.api.resolvers import * from dataall.api.Objects.Environment.resolvers import resolve_environment diff --git a/backend/dataall/modules/dataset_sharing/db/Enums.py b/backend/dataall/modules/dataset_sharing/db/Enums.py index e76485bd2..dfab06799 100644 --- a/backend/dataall/modules/dataset_sharing/db/Enums.py +++ b/backend/dataall/modules/dataset_sharing/db/Enums.py @@ -50,3 +50,9 @@ class ShareItemActions(Enum): RemoveItem = 'RemoveItem' Failure = 'Failure' Success = 'Success' + + +class ShareableType(Enum): + Table = 'DatasetTable' + StorageLocation = 'DatasetStorageLocation' + View = 'View' diff --git a/backend/dataall/modules/dataset_sharing/services/data_sharing_service.py b/backend/dataall/modules/dataset_sharing/services/data_sharing_service.py index 08ef1169d..560d3c03e 100644 --- a/backend/dataall/modules/dataset_sharing/services/data_sharing_service.py +++ b/backend/dataall/modules/dataset_sharing/services/data_sharing_service.py @@ -4,8 +4,8 @@ from dataall.modules.dataset_sharing.services.share_processors.lf_process_same_account_share import ProcessLFSameAccountShare from dataall.modules.dataset_sharing.services.share_processors.s3_process_share import ProcessS3Share -from dataall.db import models, Engine -from dataall.modules.dataset_sharing.db.Enums import ShareObjectActions, ShareItemStatus +from dataall.db import Engine +from dataall.modules.dataset_sharing.db.Enums import ShareObjectActions, ShareItemStatus, ShareableType from dataall.modules.dataset_sharing.services.share_object import ShareObjectSM, ShareObjectService, ShareItemSM log = logging.getLogger(__name__) @@ -161,7 +161,7 @@ def revoke_share(cls, engine: Engine, share_uri: str): existing_shared_items = ShareObjectService.check_existing_shared_items_of_type( session, share_uri, - models.ShareableType.StorageLocation.value + ShareableType.StorageLocation.value ) log.info(f'Still remaining S3 resources shared = {existing_shared_items}') if not existing_shared_items and revoked_folders: @@ -202,7 +202,7 @@ def revoke_share(cls, engine: Engine, share_uri: str): existing_shared_items = ShareObjectService.check_existing_shared_items_of_type( session, share_uri, - models.ShareableType.Table.value + ShareableType.Table.value ) log.info(f'Still remaining LF resources shared = {existing_shared_items}') if not existing_shared_items and revoked_tables: diff --git a/backend/dataall/modules/dataset_sharing/services/dataset_share_service.py b/backend/dataall/modules/dataset_sharing/services/dataset_share_service.py index dbf0fe150..afb275a7a 100644 --- a/backend/dataall/modules/dataset_sharing/services/dataset_share_service.py +++ b/backend/dataall/modules/dataset_sharing/services/dataset_share_service.py @@ -1,10 +1,11 @@ from sqlalchemy import or_, case, func from sqlalchemy.sql import and_ -from dataall.api.constants import ShareableType, PrincipalType +from dataall.api.constants import PrincipalType from dataall.db import models, permissions from dataall.db.api import has_resource_perm from dataall.db.paginator import paginate +from dataall.modules.dataset_sharing.db.Enums import ShareableType from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject from dataall.modules.dataset_sharing.services.share_object import ShareItemSM from dataall.modules.datasets_base.db.models import DatasetStorageLocation, DatasetTable, Dataset diff --git a/backend/dataall/modules/dataset_sharing/services/share_object.py b/backend/dataall/modules/dataset_sharing/services/share_object.py index 2d6ff314c..cf79e12e4 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_object.py +++ b/backend/dataall/modules/dataset_sharing/services/share_object.py @@ -9,9 +9,9 @@ ) from dataall.db import api, utils from dataall.db import models, exceptions, permissions, paginate -from dataall.db.models.Enums import ShareableType, PrincipalType +from dataall.db.models.Enums import PrincipalType from dataall.modules.dataset_sharing.db.Enums import ShareObjectActions, ShareObjectStatus, ShareItemActions, \ - ShareItemStatus + ShareItemStatus, ShareableType from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.db.models import DatasetStorageLocation, DatasetTable, Dataset diff --git a/backend/migrations/versions/04d92886fabe_add_consumption_roles.py b/backend/migrations/versions/04d92886fabe_add_consumption_roles.py index 2cda5e189..ee323d9eb 100644 --- a/backend/migrations/versions/04d92886fabe_add_consumption_roles.py +++ b/backend/migrations/versions/04d92886fabe_add_consumption_roles.py @@ -12,9 +12,10 @@ from sqlalchemy.ext.declarative import declarative_base from dataall.db import api, models, permissions, utils -from dataall.db.models.Enums import ShareObjectStatus, ShareableType, PrincipalType from datetime import datetime +from dataall.modules.dataset_sharing.db.Enums import ShareObjectStatus + # revision identifiers, used by Alembic. revision = '04d92886fabe' down_revision = 'd922057f0d91' diff --git a/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py b/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py index 66a76761d..59ea230fd 100644 --- a/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py +++ b/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py @@ -12,11 +12,10 @@ from sqlalchemy.ext.declarative import declarative_base from dataall.db import api, utils, Resource from datetime import datetime -from dataall.db.models.Enums import ShareableType -from dataall.modules.dataset_sharing.db.Enums import ShareObjectStatus +from dataall.modules.dataset_sharing.db.Enums import ShareObjectStatus, ShareableType, ShareItemStatus from dataall.modules.dataset_sharing.services.share_object import ShareObjectService from dataall.modules.datasets.db.dataset_service import DatasetService -from dataall.modules.datasets_base.services.dataset_permissions import DATASET_TABLE_READ +from dataall.modules.datasets_base.services.permissions import DATASET_TABLE_READ # revision identifiers, used by Alembic. revision = 'd05f9a5b215e' @@ -110,7 +109,7 @@ def upgrade(): share_table_items: [ShareObjectItem] = session.query(ShareObjectItem).filter( ( and_( - ShareObjectItem.status == ShareObjectStatus.Share_Succeeded.value, + ShareObjectItem.status == ShareItemStatus.Share_Succeeded.value, ShareObjectItem.itemType == ShareableType.Table.value ) ) diff --git a/tests/api/conftest.py b/tests/api/conftest.py index 671cc793c..707a7b008 100644 --- a/tests/api/conftest.py +++ b/tests/api/conftest.py @@ -1,4 +1,5 @@ import dataall.searchproxy.indexers +from dataall.modules.dataset_sharing.db.Enums import ShareableType from dataall.modules.dataset_sharing.db.models import ShareObject, ShareObjectItem from .client import * from dataall.db import models @@ -519,7 +520,7 @@ def factory( shareUri=share.shareUri, owner="alice", itemUri=table.tableUri, - itemType=constants.ShareableType.Table.value, + itemType=ShareableType.Table.value, itemName=table.name, status=status, ) diff --git a/tests/api/test_share.py b/tests/api/test_share.py index ada27ed57..2643ce0fb 100644 --- a/tests/api/test_share.py +++ b/tests/api/test_share.py @@ -3,8 +3,10 @@ import pytest import dataall -from dataall.api.constants import ShareObjectStatus, ShareItemStatus -from dataall.modules.dataset_sharing.db.Enums import ShareObjectActions, ShareItemActions +from dataall.api.constants import PrincipalType +from dataall.modules.dataset_sharing.api.enums import ShareableType +from dataall.modules.dataset_sharing.db.Enums import ShareObjectActions, ShareItemActions, ShareObjectStatus, \ + ShareItemStatus from dataall.modules.dataset_sharing.db.models import ShareObject, ShareObjectItem from dataall.modules.dataset_sharing.services.share_object import ShareObjectService, ShareItemSM, ShareObjectSM from dataall.modules.datasets_base.db.models import DatasetTable, Dataset @@ -206,7 +208,7 @@ def share1_item_pa( yield share_item( share=share1_draft, table=table1, - status=dataall.api.constants.ShareItemStatus.PendingApproval.value + status=ShareItemStatus.PendingApproval.value ) @@ -281,7 +283,7 @@ def share2_item_pa( yield share_item( share=share2_submitted, table=table1, - status=dataall.api.constants.ShareItemStatus.PendingApproval.value + status=ShareItemStatus.PendingApproval.value ) @@ -409,13 +411,13 @@ def create_share_object(client, userName, group, groupUri, environmentUri, datas username=userName, groups=[group.name], datasetUri=datasetUri, - itemType=dataall.api.constants.ShareableType.Table.value if itemUri else None, + itemType=ShareableType.Table.value if itemUri else None, itemUri=itemUri, input={ 'environmentUri': environmentUri, 'groupUri': groupUri, 'principalId': groupUri, - 'principalType': dataall.api.constants.PrincipalType.Group.value, + 'principalType': PrincipalType.Group.value, }, ) @@ -847,8 +849,7 @@ def test_create_share_object_with_item_authorized(client, user2, group2, env2gro ) assert get_share_object_response.data.getShareObject.get('items').nodes[0].itemUri == table1.tableUri - assert get_share_object_response.data.getShareObject.get('items').nodes[ - 0].itemType == dataall.api.constants.ShareableType.Table.name + assert get_share_object_response.data.getShareObject.get('items').nodes[0].itemType == ShareableType.Table.name def test_get_share_object(client, share1_draft, user, group): @@ -865,7 +866,7 @@ def test_get_share_object(client, share1_draft, user, group): # Then we get the info about the share assert get_share_object_response.data.getShareObject.shareUri == share1_draft.shareUri assert get_share_object_response.data.getShareObject.get( - 'principal').principalType == dataall.api.constants.PrincipalType.Group.name + 'principal').principalType == PrincipalType.Group.name assert get_share_object_response.data.getShareObject.get('principal').principalIAMRoleName assert get_share_object_response.data.getShareObject.get('principal').SamlGroupName assert get_share_object_response.data.getShareObject.get('principal').region diff --git a/tests/tasks/conftest.py b/tests/tasks/conftest.py index 175fc6934..cfa59f774 100644 --- a/tests/tasks/conftest.py +++ b/tests/tasks/conftest.py @@ -1,8 +1,8 @@ import pytest -from dataall.api.constants import ShareObjectStatus from dataall.db import models from dataall.api import constants +from dataall.modules.dataset_sharing.db.Enums import ShareableType, ShareItemStatus, ShareObjectStatus from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject from dataall.modules.datasets_base.db.models import DatasetStorageLocation, DatasetTable, Dataset @@ -206,9 +206,9 @@ def factory( shareUri=share.shareUri, owner="alice", itemUri=location.locationUri, - itemType=constants.ShareableType.StorageLocation.value, + itemType=ShareableType.StorageLocation.value, itemName=location.name, - status=constants.ShareItemStatus.Share_Approved.value, + status=ShareItemStatus.Share_Approved.value, ) session.add(share_item) session.commit() @@ -228,7 +228,7 @@ def factory( shareUri=share.shareUri, owner="alice", itemUri=table.tableUri, - itemType=constants.ShareableType.Table.value, + itemType=ShareableType.Table.value, itemName=table.name, status=status, ) diff --git a/tests/tasks/test_lf_share_manager.py b/tests/tasks/test_lf_share_manager.py index 5cefd5a2c..4c4e0f6b8 100644 --- a/tests/tasks/test_lf_share_manager.py +++ b/tests/tasks/test_lf_share_manager.py @@ -9,7 +9,7 @@ from typing import Callable from dataall.db import models -from dataall.api import constants +from dataall.modules.dataset_sharing.api.enums import ShareItemStatus from dataall.modules.dataset_sharing.db.models import ShareObject, ShareObjectItem from dataall.modules.datasets_base.db.models import DatasetTable, Dataset from dataall.modules.dataset_sharing.services.dataset_alarm_service import DatasetAlarmService @@ -139,7 +139,7 @@ def share_item_same_account(share_item_table: Callable, share_same_account: Shar yield share_item_table( share=share_same_account, table=table1, - status=constants.ShareItemStatus.Share_Approved.value + status=ShareItemStatus.Share_Approved.value ) @pytest.fixture(scope="module") @@ -148,7 +148,7 @@ def revoke_item_same_account(share_item_table: Callable, share_same_account: Sha yield share_item_table( share=share_same_account, table=table2, - status=constants.ShareItemStatus.Revoke_Approved.value + status=ShareItemStatus.Revoke_Approved.value ) @pytest.fixture(scope="module") @@ -157,7 +157,7 @@ def share_item_cross_account(share_item_table: Callable, share_cross_account: Sh yield share_item_table( share=share_cross_account, table=table1, - status=constants.ShareItemStatus.Share_Approved.value + status=ShareItemStatus.Share_Approved.value ) @@ -167,7 +167,7 @@ def revoke_item_cross_account(share_item_table: Callable, share_cross_account: S yield share_item_table( share=share_cross_account, table=table2, - status=constants.ShareItemStatus.Revoke_Approved.value + status=ShareItemStatus.Revoke_Approved.value ) diff --git a/tests/tasks/test_subscriptions.py b/tests/tasks/test_subscriptions.py index 3e29f8182..bad162b08 100644 --- a/tests/tasks/test_subscriptions.py +++ b/tests/tasks/test_subscriptions.py @@ -2,7 +2,7 @@ import dataall from dataall.api.constants import OrganisationUserRole -from dataall.modules.dataset_sharing.db.Enums import ShareObjectStatus +from dataall.modules.dataset_sharing.db.Enums import ShareObjectStatus, ShareItemStatus, ShareableType from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject from dataall.modules.datasets_base.db.models import DatasetTable, Dataset from dataall.modules.datasets.tasks.dataset_subscription_task import DatasetSubscriptionService @@ -127,11 +127,11 @@ def share( shareUri=share.shareUri, owner='alice', itemUri=table.tableUri, - itemType=dataall.api.constants.ShareableType.Table.value, + itemType=ShareableType.Table.value, itemName=table.GlueTableName, GlueDatabaseName=table.GlueDatabaseName, GlueTableName=table.GlueTableName, - status=dataall.api.constants.ShareItemStatus.Share_Approved.value, + status=ShareItemStatus.Share_Approved.value, ) session.add(share_item) From 5654763709a96785994e28797921ff100a736cda Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 10 May 2023 10:27:43 +0200 Subject: [PATCH 154/346] Renamed the file --- .../dataall/modules/dataset_sharing/db/{Enums.py => enums.py} | 0 backend/dataall/modules/dataset_sharing/db/models.py | 2 +- .../modules/dataset_sharing/services/data_sharing_service.py | 2 +- .../modules/dataset_sharing/services/dataset_share_service.py | 2 +- .../dataall/modules/dataset_sharing/services/share_object.py | 2 +- .../services/share_processors/lf_process_cross_account_share.py | 2 +- .../services/share_processors/lf_process_same_account_share.py | 2 +- .../services/share_processors/s3_process_share.py | 2 +- backend/dataall/modules/datasets/tasks/bucket_policy_updater.py | 2 +- .../migrations/versions/04d92886fabe_add_consumption_roles.py | 2 +- .../versions/d05f9a5b215e_backfill_dataset_table_permissions.py | 2 +- tests/api/conftest.py | 2 +- tests/api/test_share.py | 2 +- tests/tasks/conftest.py | 2 +- tests/tasks/test_subscriptions.py | 2 +- 15 files changed, 14 insertions(+), 14 deletions(-) rename backend/dataall/modules/dataset_sharing/db/{Enums.py => enums.py} (100%) diff --git a/backend/dataall/modules/dataset_sharing/db/Enums.py b/backend/dataall/modules/dataset_sharing/db/enums.py similarity index 100% rename from backend/dataall/modules/dataset_sharing/db/Enums.py rename to backend/dataall/modules/dataset_sharing/db/enums.py diff --git a/backend/dataall/modules/dataset_sharing/db/models.py b/backend/dataall/modules/dataset_sharing/db/models.py index 43436c290..85883b00c 100644 --- a/backend/dataall/modules/dataset_sharing/db/models.py +++ b/backend/dataall/modules/dataset_sharing/db/models.py @@ -5,7 +5,7 @@ from sqlalchemy.orm import query_expression from dataall.db import Base, utils -from dataall.modules.dataset_sharing.db.Enums import ShareObjectStatus, ShareItemStatus +from dataall.modules.dataset_sharing.db.enums import ShareObjectStatus, ShareItemStatus def in_one_month(): diff --git a/backend/dataall/modules/dataset_sharing/services/data_sharing_service.py b/backend/dataall/modules/dataset_sharing/services/data_sharing_service.py index 560d3c03e..1f5fb2b92 100644 --- a/backend/dataall/modules/dataset_sharing/services/data_sharing_service.py +++ b/backend/dataall/modules/dataset_sharing/services/data_sharing_service.py @@ -5,7 +5,7 @@ from dataall.modules.dataset_sharing.services.share_processors.s3_process_share import ProcessS3Share from dataall.db import Engine -from dataall.modules.dataset_sharing.db.Enums import ShareObjectActions, ShareItemStatus, ShareableType +from dataall.modules.dataset_sharing.db.enums import ShareObjectActions, ShareItemStatus, ShareableType from dataall.modules.dataset_sharing.services.share_object import ShareObjectSM, ShareObjectService, ShareItemSM log = logging.getLogger(__name__) diff --git a/backend/dataall/modules/dataset_sharing/services/dataset_share_service.py b/backend/dataall/modules/dataset_sharing/services/dataset_share_service.py index afb275a7a..c86468941 100644 --- a/backend/dataall/modules/dataset_sharing/services/dataset_share_service.py +++ b/backend/dataall/modules/dataset_sharing/services/dataset_share_service.py @@ -5,7 +5,7 @@ from dataall.db import models, permissions from dataall.db.api import has_resource_perm from dataall.db.paginator import paginate -from dataall.modules.dataset_sharing.db.Enums import ShareableType +from dataall.modules.dataset_sharing.db.enums import ShareableType from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject from dataall.modules.dataset_sharing.services.share_object import ShareItemSM from dataall.modules.datasets_base.db.models import DatasetStorageLocation, DatasetTable, Dataset diff --git a/backend/dataall/modules/dataset_sharing/services/share_object.py b/backend/dataall/modules/dataset_sharing/services/share_object.py index cf79e12e4..3c982e6fe 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_object.py +++ b/backend/dataall/modules/dataset_sharing/services/share_object.py @@ -10,7 +10,7 @@ from dataall.db import api, utils from dataall.db import models, exceptions, permissions, paginate from dataall.db.models.Enums import PrincipalType -from dataall.modules.dataset_sharing.db.Enums import ShareObjectActions, ShareObjectStatus, ShareItemActions, \ +from dataall.modules.dataset_sharing.db.enums import ShareObjectActions, ShareObjectStatus, ShareItemActions, \ ShareItemStatus, ShareableType from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository diff --git a/backend/dataall/modules/dataset_sharing/services/share_processors/lf_process_cross_account_share.py b/backend/dataall/modules/dataset_sharing/services/share_processors/lf_process_cross_account_share.py index 7b225db92..0f6a56938 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_processors/lf_process_cross_account_share.py +++ b/backend/dataall/modules/dataset_sharing/services/share_processors/lf_process_cross_account_share.py @@ -1,6 +1,6 @@ import logging -from dataall.modules.dataset_sharing.db.Enums import ShareItemStatus, ShareObjectActions, ShareItemActions +from dataall.modules.dataset_sharing.db.enums import ShareItemStatus, ShareObjectActions, ShareItemActions from ..share_managers import LFShareManager from dataall.aws.handlers.ram import Ram from dataall.db import models diff --git a/backend/dataall/modules/dataset_sharing/services/share_processors/lf_process_same_account_share.py b/backend/dataall/modules/dataset_sharing/services/share_processors/lf_process_same_account_share.py index 70392765e..0a78057fa 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_processors/lf_process_same_account_share.py +++ b/backend/dataall/modules/dataset_sharing/services/share_processors/lf_process_same_account_share.py @@ -1,6 +1,6 @@ import logging -from dataall.modules.dataset_sharing.db.Enums import ShareItemStatus, ShareObjectActions, ShareItemActions +from dataall.modules.dataset_sharing.db.enums import ShareItemStatus, ShareObjectActions, ShareItemActions from dataall.modules.dataset_sharing.db.models import ShareObject from dataall.modules.dataset_sharing.services.share_object import ShareObjectService, ShareItemSM from ..share_managers import LFShareManager diff --git a/backend/dataall/modules/dataset_sharing/services/share_processors/s3_process_share.py b/backend/dataall/modules/dataset_sharing/services/share_processors/s3_process_share.py index f955de7be..cb8e284dd 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_processors/s3_process_share.py +++ b/backend/dataall/modules/dataset_sharing/services/share_processors/s3_process_share.py @@ -3,7 +3,7 @@ from dataall.db import models from ..share_managers import S3ShareManager from dataall.modules.datasets_base.db.models import DatasetStorageLocation, Dataset -from dataall.modules.dataset_sharing.db.Enums import ShareItemStatus, ShareObjectActions, ShareItemActions +from dataall.modules.dataset_sharing.db.enums import ShareItemStatus, ShareObjectActions, ShareItemActions from dataall.modules.dataset_sharing.db.models import ShareObject from dataall.modules.dataset_sharing.services.share_object import ShareObjectService, ShareItemSM diff --git a/backend/dataall/modules/datasets/tasks/bucket_policy_updater.py b/backend/dataall/modules/datasets/tasks/bucket_policy_updater.py index cf40ddaa8..bfffe873c 100644 --- a/backend/dataall/modules/datasets/tasks/bucket_policy_updater.py +++ b/backend/dataall/modules/datasets/tasks/bucket_policy_updater.py @@ -10,7 +10,7 @@ from dataall.aws.handlers.sts import SessionHelper from dataall.db import get_engine from dataall.db import models -from dataall.modules.dataset_sharing.db.Enums import ShareObjectStatus +from dataall.modules.dataset_sharing.db.enums import ShareObjectStatus from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject from dataall.modules.datasets_base.db.models import DatasetStorageLocation, DatasetTable, Dataset diff --git a/backend/migrations/versions/04d92886fabe_add_consumption_roles.py b/backend/migrations/versions/04d92886fabe_add_consumption_roles.py index ee323d9eb..af764861e 100644 --- a/backend/migrations/versions/04d92886fabe_add_consumption_roles.py +++ b/backend/migrations/versions/04d92886fabe_add_consumption_roles.py @@ -14,7 +14,7 @@ from dataall.db import api, models, permissions, utils from datetime import datetime -from dataall.modules.dataset_sharing.db.Enums import ShareObjectStatus +from dataall.modules.dataset_sharing.db.enums import ShareObjectStatus # revision identifiers, used by Alembic. revision = '04d92886fabe' diff --git a/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py b/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py index 59ea230fd..bf8ea96b3 100644 --- a/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py +++ b/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py @@ -12,7 +12,7 @@ from sqlalchemy.ext.declarative import declarative_base from dataall.db import api, utils, Resource from datetime import datetime -from dataall.modules.dataset_sharing.db.Enums import ShareObjectStatus, ShareableType, ShareItemStatus +from dataall.modules.dataset_sharing.db.enums import ShareObjectStatus, ShareableType, ShareItemStatus from dataall.modules.dataset_sharing.services.share_object import ShareObjectService from dataall.modules.datasets.db.dataset_service import DatasetService from dataall.modules.datasets_base.services.permissions import DATASET_TABLE_READ diff --git a/tests/api/conftest.py b/tests/api/conftest.py index 707a7b008..a0ba4ff0e 100644 --- a/tests/api/conftest.py +++ b/tests/api/conftest.py @@ -1,5 +1,5 @@ import dataall.searchproxy.indexers -from dataall.modules.dataset_sharing.db.Enums import ShareableType +from dataall.modules.dataset_sharing.db.enums import ShareableType from dataall.modules.dataset_sharing.db.models import ShareObject, ShareObjectItem from .client import * from dataall.db import models diff --git a/tests/api/test_share.py b/tests/api/test_share.py index 2643ce0fb..055208996 100644 --- a/tests/api/test_share.py +++ b/tests/api/test_share.py @@ -5,7 +5,7 @@ import dataall from dataall.api.constants import PrincipalType from dataall.modules.dataset_sharing.api.enums import ShareableType -from dataall.modules.dataset_sharing.db.Enums import ShareObjectActions, ShareItemActions, ShareObjectStatus, \ +from dataall.modules.dataset_sharing.db.enums import ShareObjectActions, ShareItemActions, ShareObjectStatus, \ ShareItemStatus from dataall.modules.dataset_sharing.db.models import ShareObject, ShareObjectItem from dataall.modules.dataset_sharing.services.share_object import ShareObjectService, ShareItemSM, ShareObjectSM diff --git a/tests/tasks/conftest.py b/tests/tasks/conftest.py index cfa59f774..4fbbd49e3 100644 --- a/tests/tasks/conftest.py +++ b/tests/tasks/conftest.py @@ -2,7 +2,7 @@ from dataall.db import models from dataall.api import constants -from dataall.modules.dataset_sharing.db.Enums import ShareableType, ShareItemStatus, ShareObjectStatus +from dataall.modules.dataset_sharing.db.enums import ShareableType, ShareItemStatus, ShareObjectStatus from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject from dataall.modules.datasets_base.db.models import DatasetStorageLocation, DatasetTable, Dataset diff --git a/tests/tasks/test_subscriptions.py b/tests/tasks/test_subscriptions.py index bad162b08..b27df8462 100644 --- a/tests/tasks/test_subscriptions.py +++ b/tests/tasks/test_subscriptions.py @@ -2,7 +2,7 @@ import dataall from dataall.api.constants import OrganisationUserRole -from dataall.modules.dataset_sharing.db.Enums import ShareObjectStatus, ShareItemStatus, ShareableType +from dataall.modules.dataset_sharing.db.enums import ShareObjectStatus, ShareItemStatus, ShareableType from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject from dataall.modules.datasets_base.db.models import DatasetTable, Dataset from dataall.modules.datasets.tasks.dataset_subscription_task import DatasetSubscriptionService From 235ec826331687f8f7ddc5e4068fb7c112175700 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 10 May 2023 10:32:05 +0200 Subject: [PATCH 155/346] Moved to share_object_repository.py --- backend/dataall/db/api/redshift_cluster.py | 2 +- .../modules/dataset_sharing/api/resolvers.py | 38 ++++++------- .../share_object_repository.py} | 54 +++++++++---------- .../services/data_sharing_service.py | 16 +++--- .../services/dataset_share_service.py | 2 +- .../share_managers/s3_share_manager.py | 4 +- .../lf_process_cross_account_share.py | 8 +-- .../lf_process_same_account_share.py | 6 +-- .../share_processors/s3_process_share.py | 6 +-- .../db/dataset_location_repository.py | 4 +- .../modules/datasets/db/dataset_service.py | 2 +- .../datasets/db/dataset_table_repository.py | 2 +- ...215e_backfill_dataset_table_permissions.py | 4 +- tests/api/test_share.py | 10 ++-- 14 files changed, 79 insertions(+), 79 deletions(-) rename backend/dataall/modules/dataset_sharing/{services/share_object.py => db/share_object_repository.py} (96%) diff --git a/backend/dataall/db/api/redshift_cluster.py b/backend/dataall/db/api/redshift_cluster.py index dece5d480..bd092ff45 100644 --- a/backend/dataall/db/api/redshift_cluster.py +++ b/backend/dataall/db/api/redshift_cluster.py @@ -12,7 +12,7 @@ from dataall.utils.slugify import slugify from dataall.modules.datasets.db.dataset_service import DatasetService from dataall.modules.dataset_sharing.db.models import ShareObject, ShareObjectItem -from dataall.modules.dataset_sharing.services.share_object import ShareItemSM +from dataall.modules.dataset_sharing.db.share_object_repository import ShareItemSM log = logging.getLogger(__name__) diff --git a/backend/dataall/modules/dataset_sharing/api/resolvers.py b/backend/dataall/modules/dataset_sharing/api/resolvers.py index 6d5234e2e..c21432437 100644 --- a/backend/dataall/modules/dataset_sharing/api/resolvers.py +++ b/backend/dataall/modules/dataset_sharing/api/resolvers.py @@ -9,7 +9,7 @@ from dataall.modules.dataset_sharing.api.enums import ShareObjectPermission from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject from dataall.modules.dataset_sharing.services.dataset_share_service import DatasetShareService -from dataall.modules.dataset_sharing.services.share_object import ShareObjectService +from dataall.modules.dataset_sharing.db.share_object_repository import ShareObjectRepository from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.db.models import DatasetStorageLocation, DatasetTable, Dataset @@ -45,7 +45,7 @@ def create_share_object( input['itemUri'] = itemUri input['itemType'] = itemType input['datasetUri'] = datasetUri - return ShareObjectService.create_share_object( + return ShareObjectRepository.create_share_object( session=session, username=context.username, groups=context.groups, @@ -57,7 +57,7 @@ def create_share_object( def submit_share_object(context: Context, source, shareUri: str = None): with context.engine.scoped_session() as session: - return ShareObjectService.submit_share_object( + return ShareObjectRepository.submit_share_object( session=session, username=context.username, groups=context.groups, @@ -69,7 +69,7 @@ def submit_share_object(context: Context, source, shareUri: str = None): def approve_share_object(context: Context, source, shareUri: str = None): with context.engine.scoped_session() as session: - share = ShareObjectService.approve_share_object( + share = ShareObjectRepository.approve_share_object( session=session, username=context.username, groups=context.groups, @@ -92,7 +92,7 @@ def approve_share_object(context: Context, source, shareUri: str = None): def reject_share_object(context: Context, source, shareUri: str = None): with context.engine.scoped_session() as session: - return ShareObjectService.reject_share_object( + return ShareObjectRepository.reject_share_object( session=session, username=context.username, groups=context.groups, @@ -104,7 +104,7 @@ def reject_share_object(context: Context, source, shareUri: str = None): def revoke_items_share_object(context: Context, source, input): with context.engine.scoped_session() as session: - share = ShareObjectService.revoke_items_share_object( + share = ShareObjectRepository.revoke_items_share_object( session=session, username=context.username, groups=context.groups, @@ -127,11 +127,11 @@ def revoke_items_share_object(context: Context, source, input): def delete_share_object(context: Context, source, shareUri: str = None): with context.engine.scoped_session() as session: - share = ShareObjectService.get_share_by_uri(session, shareUri) + share = ShareObjectRepository.get_share_by_uri(session, shareUri) if not share: raise db.exceptions.ObjectNotFound('ShareObject', shareUri) - ShareObjectService.delete_share_object( + ShareObjectRepository.delete_share_object( session=session, username=context.username, groups=context.groups, @@ -144,7 +144,7 @@ def delete_share_object(context: Context, source, shareUri: str = None): def add_shared_item(context, source, shareUri: str = None, input: dict = None): with context.engine.scoped_session() as session: - share_item = ShareObjectService.add_share_object_item( + share_item = ShareObjectRepository.add_share_object_item( session=session, username=context.username, groups=context.groups, @@ -162,8 +162,8 @@ def remove_shared_item(context, source, shareItemUri: str = None): ) if not share_item: raise db.exceptions.ObjectNotFound('ShareObjectItem', shareItemUri) - share = ShareObjectService.get_share_by_uri(session, share_item.shareUri) - ShareObjectService.remove_share_object_item( + share = ShareObjectRepository.get_share_by_uri(session, share_item.shareUri) + ShareObjectRepository.remove_share_object_item( session=session, username=context.username, groups=context.groups, @@ -186,7 +186,7 @@ def list_shared_items( if not filter: filter = {} with context.engine.scoped_session() as session: - return ShareObjectService.list_shared_items( + return ShareObjectRepository.list_shared_items( session=session, username=context.username, groups=context.groups, @@ -200,7 +200,7 @@ def resolve_shared_item(context, source: ShareObjectItem, **kwargs): if not source: return None with context.engine.scoped_session() as session: - return ShareObjectService.get_share_item( + return ShareObjectRepository.get_share_item( session=session, username=context.username, groups=context.groups, @@ -212,7 +212,7 @@ def resolve_shared_item(context, source: ShareObjectItem, **kwargs): def get_share_object(context, source, shareUri: str = None): with context.engine.scoped_session() as session: - return ShareObjectService.get_share_object( + return ShareObjectRepository.get_share_object( session=session, username=context.username, groups=context.groups, @@ -311,7 +311,7 @@ def resolve_share_object_statistics(context: Context, source: ShareObject, **kwa if not source: return None with context.engine.scoped_session() as session: - return ShareObjectService.resolve_share_object_statistics( + return ShareObjectRepository.resolve_share_object_statistics( session, source.shareUri ) @@ -320,7 +320,7 @@ def resolve_existing_shared_items(context: Context, source: ShareObject, **kwarg if not source: return None with context.engine.scoped_session() as session: - return ShareObjectService.check_existing_shared_items( + return ShareObjectRepository.check_existing_shared_items( session, source.shareUri ) @@ -333,7 +333,7 @@ def list_shareable_objects( if not filter: filter = {'page': 1, 'pageSize': 5} with context.engine.scoped_session() as session: - return ShareObjectService.list_shareable_items( + return ShareObjectRepository.list_shareable_items( session=session, username=context.username, groups=context.groups, @@ -347,7 +347,7 @@ def list_shares_in_my_inbox(context: Context, source, filter: dict = None): if not filter: filter = {} with context.engine.scoped_session() as session: - return ShareObjectService.list_user_received_share_requests( + return ShareObjectRepository.list_user_received_share_requests( session=session, username=context.username, groups=context.groups, @@ -361,7 +361,7 @@ def list_shares_in_my_outbox(context: Context, source, filter: dict = None): if not filter: filter = {} with context.engine.scoped_session() as session: - return ShareObjectService.list_user_sent_share_requests( + return ShareObjectRepository.list_user_sent_share_requests( session=session, username=context.username, groups=context.groups, diff --git a/backend/dataall/modules/dataset_sharing/services/share_object.py b/backend/dataall/modules/dataset_sharing/db/share_object_repository.py similarity index 96% rename from backend/dataall/modules/dataset_sharing/services/share_object.py rename to backend/dataall/modules/dataset_sharing/db/share_object_repository.py index 3c982e6fe..6f88548ee 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_object.py +++ b/backend/dataall/modules/dataset_sharing/db/share_object_repository.py @@ -149,7 +149,7 @@ def run_transition(self, transition): def update_state(self, session, share, new_state): logger.info(f"Updating share object {share.shareUri} in DB from {self._state} to state {new_state}") - ShareObjectService.update_share_object_status( + ShareObjectRepository.update_share_object_status( session=session, shareUri=share.shareUri, status=new_state @@ -272,14 +272,14 @@ def update_state(self, session, share_uri, new_state): if share_uri and (new_state != self._state): if new_state == ShareItemStatus.Deleted.value: logger.info(f"Deleting share items in DB in {self._state} state") - ShareObjectService.delete_share_item_status_batch( + ShareObjectRepository.delete_share_item_status_batch( session=session, share_uri=share_uri, status=self._state ) else: logger.info(f"Updating share items in DB from {self._state} to state {new_state}") - ShareObjectService.update_share_item_status_batch( + ShareObjectRepository.update_share_item_status_batch( session=session, share_uri=share_uri, old_status=self._state, @@ -292,7 +292,7 @@ def update_state(self, session, share_uri, new_state): def update_state_single_item(self, session, share_item, new_state): logger.info(f"Updating share item in DB {share_item.shareItemUri} status to {new_state}") - ShareObjectService.update_share_item_status( + ShareObjectRepository.update_share_item_status( session=session, uri=share_item.shareItemUri, status=new_state @@ -318,7 +318,7 @@ def get_share_item_revokable_states(): ] -class ShareObjectService: +class ShareObjectRepository: @staticmethod @has_resource_perm(permissions.CREATE_SHARE_OBJECT) def create_share_object( @@ -381,7 +381,7 @@ def create_share_object( message=f'Team: {groupUri} is managing the dataset {dataset.name}', ) - ShareObjectService.validate_group_membership( + ShareObjectRepository.validate_group_membership( session=session, username=username, groups=groups, @@ -537,9 +537,9 @@ def submit_share_object( data: dict = None, check_perm: bool = False, ) -> ShareObject: - share = ShareObjectService.get_share_by_uri(session, uri) + share = ShareObjectRepository.get_share_by_uri(session, uri) dataset = DatasetRepository.get_dataset_by_uri(session, share.datasetUri) - share_items_states = ShareObjectService.get_share_items_states(session, uri) + share_items_states = ShareObjectRepository.get_share_items_states(session, uri) valid_states = [ShareItemStatus.PendingApproval.value] valid_share_items_states = [x for x in valid_states if x in share_items_states] @@ -575,9 +575,9 @@ def approve_share_object( data: dict = None, check_perm: bool = False, ) -> ShareObject: - share = ShareObjectService.get_share_by_uri(session, uri) + share = ShareObjectRepository.get_share_by_uri(session, uri) dataset = DatasetRepository.get_dataset_by_uri(session, share.datasetUri) - share_items_states = ShareObjectService.get_share_items_states(session, uri) + share_items_states = ShareObjectRepository.get_share_items_states(session, uri) Share_SM = ShareObjectSM(share.status) new_share_state = Share_SM.run_transition(ShareObjectActions.Approve.value) @@ -622,9 +622,9 @@ def reject_share_object( check_perm: bool = False, ) -> ShareObject: - share = ShareObjectService.get_share_by_uri(session, uri) + share = ShareObjectRepository.get_share_by_uri(session, uri) dataset = DatasetRepository.get_dataset_by_uri(session, share.datasetUri) - share_items_states = ShareObjectService.get_share_items_states(session, uri) + share_items_states = ShareObjectRepository.get_share_items_states(session, uri) Share_SM = ShareObjectSM(share.status) new_share_state = Share_SM.run_transition(ShareObjectActions.Reject.value) @@ -657,10 +657,10 @@ def revoke_items_share_object( check_perm: bool = False, ) -> ShareObject: - share = ShareObjectService.get_share_by_uri(session, uri) + share = ShareObjectRepository.get_share_by_uri(session, uri) dataset = DatasetRepository.get_dataset_by_uri(session, share.datasetUri) - revoked_items_states = ShareObjectService.get_share_items_states(session, uri, data.get("revokedItemUris")) - revoked_items = [ShareObjectService.get_share_item_by_uri(session, uri) for uri in data.get("revokedItemUris")] + revoked_items_states = ShareObjectRepository.get_share_items_states(session, uri, data.get("revokedItemUris")) + revoked_items = [ShareObjectRepository.get_share_item_by_uri(session, uri) for uri in data.get("revokedItemUris")] if revoked_items_states == []: raise exceptions.ShareItemsFound( @@ -718,7 +718,7 @@ def get_share_item( ): share_item: ShareObjectItem = data.get( 'share_item', - ShareObjectService.get_share_item_by_uri(session, data['shareItemUri']), + ShareObjectRepository.get_share_item_by_uri(session, data['shareItemUri']), ) if share_item.itemType == ShareableType.Table.value: return session.query(DatasetTable).get(share_item.itemUri) @@ -832,11 +832,11 @@ def remove_share_object_item( share_item: ShareObjectItem = data.get( 'share_item', - ShareObjectService.get_share_item_by_uri(session, data['shareItemUri']), + ShareObjectRepository.get_share_item_by_uri(session, data['shareItemUri']), ) share: ShareObject = data.get( 'share', - ShareObjectService.get_share_by_uri(session, uri), + ShareObjectRepository.get_share_by_uri(session, uri), ) Item_SM = ShareItemSM(share_item.status) @@ -848,8 +848,8 @@ def remove_share_object_item( @staticmethod @has_resource_perm(permissions.DELETE_SHARE_OBJECT) def delete_share_object(session, username, groups, uri, data=None, check_perm=None): - share: ShareObject = ShareObjectService.get_share_by_uri(session, uri) - share_items_states = ShareObjectService.get_share_items_states(session, uri) + share: ShareObject = ShareObjectRepository.get_share_by_uri(session, uri) + share_items_states = ShareObjectRepository.get_share_items_states(session, uri) shared_share_items_states = [x for x in ShareItemSM.get_share_item_shared_states() if x in share_items_states] Share_SM = ShareObjectSM(share.status) @@ -872,7 +872,7 @@ def delete_share_object(session, username, groups, uri, data=None, check_perm=No @staticmethod def check_existing_shared_items(session, uri): - share: ShareObject = ShareObjectService.get_share_by_uri(session, uri) + share: ShareObject = ShareObjectRepository.get_share_by_uri(session, uri) share_item_shared_states = ShareItemSM.get_share_item_shared_states() shared_items = session.query(ShareObjectItem).filter( and_( @@ -886,7 +886,7 @@ def check_existing_shared_items(session, uri): @staticmethod def check_existing_shared_items_of_type(session, uri, item_type): - share: ShareObject = ShareObjectService.get_share_by_uri(session, uri) + share: ShareObject = ShareObjectRepository.get_share_by_uri(session, uri) share_item_shared_states = ShareItemSM.get_share_item_shared_states() shared_items = session.query(ShareObjectItem).filter( and_( @@ -901,7 +901,7 @@ def check_existing_shared_items_of_type(session, uri, item_type): @staticmethod def check_pending_share_items(session, uri): - share: ShareObject = ShareObjectService.get_share_by_uri(session, uri) + share: ShareObject = ShareObjectRepository.get_share_by_uri(session, uri) shared_items = session.query(ShareObjectItem).filter( and_( ShareObjectItem.shareUri == share.shareUri, @@ -925,7 +925,7 @@ def get_share_item_by_uri(session, uri): @staticmethod @has_resource_perm(permissions.LIST_SHARED_ITEMS) def list_shared_items(session, username, groups, uri, data=None, check_perm=None): - share: ShareObject = ShareObjectService.get_share_by_uri(session, uri) + share: ShareObject = ShareObjectRepository.get_share_by_uri(session, uri) query = session.query(ShareObjectItem).filter( ShareObjectItem.shareUri == share.shareUri, ) @@ -939,7 +939,7 @@ def list_shareable_items( ): share: ShareObject = data.get( - 'share', ShareObjectService.get_share_by_uri(session, uri) + 'share', ShareObjectRepository.get_share_by_uri(session, uri) ) share_item_revokable_states = ShareItemSM.get_share_item_revokable_states() datasetUri = share.datasetUri @@ -1087,7 +1087,7 @@ def update_share_object_status( status: str, ) -> ShareObject: - share = ShareObjectService.get_share_by_uri(session, shareUri) + share = ShareObjectRepository.get_share_by_uri(session, shareUri) share.status = status session.commit() return share @@ -1099,7 +1099,7 @@ def update_share_item_status( status: str, ) -> ShareObjectItem: - share_item = ShareObjectService.get_share_item_by_uri(session, uri) + share_item = ShareObjectRepository.get_share_item_by_uri(session, uri) share_item.status = status session.commit() return share_item diff --git a/backend/dataall/modules/dataset_sharing/services/data_sharing_service.py b/backend/dataall/modules/dataset_sharing/services/data_sharing_service.py index 1f5fb2b92..53ab11c98 100644 --- a/backend/dataall/modules/dataset_sharing/services/data_sharing_service.py +++ b/backend/dataall/modules/dataset_sharing/services/data_sharing_service.py @@ -6,7 +6,7 @@ from dataall.db import Engine from dataall.modules.dataset_sharing.db.enums import ShareObjectActions, ShareItemStatus, ShareableType -from dataall.modules.dataset_sharing.services.share_object import ShareObjectSM, ShareObjectService, ShareItemSM +from dataall.modules.dataset_sharing.db.share_object_repository import ShareObjectSM, ShareObjectRepository, ShareItemSM log = logging.getLogger(__name__) @@ -42,7 +42,7 @@ def approve_share(cls, engine: Engine, share_uri: str) -> bool: share, source_environment, target_environment, - ) = ShareObjectService.get_share_data(session, share_uri) + ) = ShareObjectRepository.get_share_data(session, share_uri) Share_SM = ShareObjectSM(share.status) new_share_state = Share_SM.run_transition(ShareObjectActions.Start.value) @@ -51,7 +51,7 @@ def approve_share(cls, engine: Engine, share_uri: str) -> bool: ( shared_tables, shared_folders - ) = ShareObjectService.get_share_data_items(session, share_uri, ShareItemStatus.Share_Approved.value) + ) = ShareObjectRepository.get_share_data_items(session, share_uri, ShareItemStatus.Share_Approved.value) log.info(f'Granting permissions to folders: {shared_folders}') @@ -129,7 +129,7 @@ def revoke_share(cls, engine: Engine, share_uri: str): share, source_environment, target_environment, - ) = ShareObjectService.get_share_data(session, share_uri) + ) = ShareObjectRepository.get_share_data(session, share_uri) Share_SM = ShareObjectSM(share.status) new_share_state = Share_SM.run_transition(ShareObjectActions.Start.value) @@ -140,7 +140,7 @@ def revoke_share(cls, engine: Engine, share_uri: str): ( revoked_tables, revoked_folders - ) = ShareObjectService.get_share_data_items(session, share_uri, ShareItemStatus.Revoke_Approved.value) + ) = ShareObjectRepository.get_share_data_items(session, share_uri, ShareItemStatus.Revoke_Approved.value) new_state = revoked_item_SM.run_transition(ShareObjectActions.Start.value) revoked_item_SM.update_state(session, share_uri, new_state) @@ -158,7 +158,7 @@ def revoke_share(cls, engine: Engine, share_uri: str): env_group, ) log.info(f'revoking folders succeeded = {revoked_folders_succeed}') - existing_shared_items = ShareObjectService.check_existing_shared_items_of_type( + existing_shared_items = ShareObjectRepository.check_existing_shared_items_of_type( session, share_uri, ShareableType.StorageLocation.value @@ -199,7 +199,7 @@ def revoke_share(cls, engine: Engine, share_uri: str): revoked_tables_succeed = processor.process_revoked_shares() log.info(f'revoking tables succeeded = {revoked_tables_succeed}') - existing_shared_items = ShareObjectService.check_existing_shared_items_of_type( + existing_shared_items = ShareObjectRepository.check_existing_shared_items_of_type( session, share_uri, ShareableType.Table.value @@ -210,7 +210,7 @@ def revoke_share(cls, engine: Engine, share_uri: str): clean_up_tables = processor.clean_up_share() log.info(f"Clean up LF successful = {clean_up_tables}") - existing_pending_items = ShareObjectService.check_pending_share_items(session, share_uri) + existing_pending_items = ShareObjectRepository.check_pending_share_items(session, share_uri) if existing_pending_items: new_share_state = Share_SM.run_transition(ShareObjectActions.FinishPending.value) else: diff --git a/backend/dataall/modules/dataset_sharing/services/dataset_share_service.py b/backend/dataall/modules/dataset_sharing/services/dataset_share_service.py index c86468941..f40437d89 100644 --- a/backend/dataall/modules/dataset_sharing/services/dataset_share_service.py +++ b/backend/dataall/modules/dataset_sharing/services/dataset_share_service.py @@ -7,7 +7,7 @@ from dataall.db.paginator import paginate from dataall.modules.dataset_sharing.db.enums import ShareableType from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject -from dataall.modules.dataset_sharing.services.share_object import ShareItemSM +from dataall.modules.dataset_sharing.db.share_object_repository import ShareItemSM from dataall.modules.datasets_base.db.models import DatasetStorageLocation, DatasetTable, Dataset diff --git a/backend/dataall/modules/dataset_sharing/services/share_managers/s3_share_manager.py b/backend/dataall/modules/dataset_sharing/services/share_managers/s3_share_manager.py index 004ed44f7..9662a3529 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_managers/s3_share_manager.py +++ b/backend/dataall/modules/dataset_sharing/services/share_managers/s3_share_manager.py @@ -10,7 +10,7 @@ from dataall.aws.handlers.iam import IAM from dataall.modules.dataset_sharing.db.models import ShareObject from dataall.modules.dataset_sharing.services.dataset_alarm_service import DatasetAlarmService -from dataall.modules.dataset_sharing.services.share_object import ShareObjectService +from dataall.modules.dataset_sharing.db.share_object_repository import ShareObjectRepository from dataall.modules.datasets_base.db.models import DatasetStorageLocation, Dataset @@ -39,7 +39,7 @@ def __init__( self.target_folder = target_folder self.source_environment = source_environment self.target_environment = target_environment - self.share_item = ShareObjectService.find_share_item_by_folder( + self.share_item = ShareObjectRepository.find_share_item_by_folder( session, share, target_folder, diff --git a/backend/dataall/modules/dataset_sharing/services/share_processors/lf_process_cross_account_share.py b/backend/dataall/modules/dataset_sharing/services/share_processors/lf_process_cross_account_share.py index 0f6a56938..6e96fa0b5 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_processors/lf_process_cross_account_share.py +++ b/backend/dataall/modules/dataset_sharing/services/share_processors/lf_process_cross_account_share.py @@ -6,7 +6,7 @@ from dataall.db import models from dataall.modules.datasets_base.db.models import DatasetTable, Dataset from dataall.modules.dataset_sharing.db.models import ShareObject -from dataall.modules.dataset_sharing.services.share_object import ShareObjectService, ShareItemSM +from dataall.modules.dataset_sharing.db.share_object_repository import ShareObjectRepository, ShareItemSM log = logging.getLogger(__name__) @@ -74,7 +74,7 @@ def process_approved_shares(self) -> bool: for table in self.shared_tables: log.info(f"Sharing table {table.GlueTableName}...") - share_item = ShareObjectService.find_share_item_by_table( + share_item = ShareObjectRepository.find_share_item_by_table( self.session, self.share, table ) @@ -140,7 +140,7 @@ def process_revoked_shares(self) -> bool: shared_db_name = self.build_shared_db_name() principals = self.get_share_principals() for table in self.revoked_tables: - share_item = ShareObjectService.find_share_item_by_table( + share_item = ShareObjectRepository.find_share_item_by_table( self.session, self.share, table ) @@ -184,7 +184,7 @@ def clean_up_share(self) -> bool: self.delete_shared_database() - if not ShareObjectService.other_approved_share_object_exists( + if not ShareObjectRepository.other_approved_share_object_exists( self.session, self.target_environment.environmentUri, self.dataset.datasetUri, diff --git a/backend/dataall/modules/dataset_sharing/services/share_processors/lf_process_same_account_share.py b/backend/dataall/modules/dataset_sharing/services/share_processors/lf_process_same_account_share.py index 0a78057fa..b6e11482d 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_processors/lf_process_same_account_share.py +++ b/backend/dataall/modules/dataset_sharing/services/share_processors/lf_process_same_account_share.py @@ -2,7 +2,7 @@ from dataall.modules.dataset_sharing.db.enums import ShareItemStatus, ShareObjectActions, ShareItemActions from dataall.modules.dataset_sharing.db.models import ShareObject -from dataall.modules.dataset_sharing.services.share_object import ShareObjectService, ShareItemSM +from dataall.modules.dataset_sharing.db.share_object_repository import ShareObjectRepository, ShareItemSM from ..share_managers import LFShareManager from dataall.db import models from dataall.modules.datasets_base.db.models import DatasetTable, Dataset @@ -70,7 +70,7 @@ def process_approved_shares(self) -> bool: for table in self.shared_tables: - share_item = ShareObjectService.find_share_item_by_table( + share_item = ShareObjectRepository.find_share_item_by_table( self.session, self.share, table ) @@ -121,7 +121,7 @@ def process_revoked_shares(self) -> bool: shared_db_name = self.build_shared_db_name() principals = self.get_share_principals() for table in self.revoked_tables: - share_item = ShareObjectService.find_share_item_by_table( + share_item = ShareObjectRepository.find_share_item_by_table( self.session, self.share, table ) if not share_item: diff --git a/backend/dataall/modules/dataset_sharing/services/share_processors/s3_process_share.py b/backend/dataall/modules/dataset_sharing/services/share_processors/s3_process_share.py index cb8e284dd..7b79f4013 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_processors/s3_process_share.py +++ b/backend/dataall/modules/dataset_sharing/services/share_processors/s3_process_share.py @@ -5,7 +5,7 @@ from dataall.modules.datasets_base.db.models import DatasetStorageLocation, Dataset from dataall.modules.dataset_sharing.db.enums import ShareItemStatus, ShareObjectActions, ShareItemActions from dataall.modules.dataset_sharing.db.models import ShareObject -from dataall.modules.dataset_sharing.services.share_object import ShareObjectService, ShareItemSM +from dataall.modules.dataset_sharing.db.share_object_repository import ShareObjectRepository, ShareItemSM log = logging.getLogger(__name__) @@ -64,7 +64,7 @@ def process_approved_shares( success = True for folder in share_folders: log.info(f'sharing folder: {folder}') - sharing_item = ShareObjectService.find_share_item_by_folder( + sharing_item = ShareObjectRepository.find_share_item_by_folder( session, share, folder, @@ -129,7 +129,7 @@ def process_revoked_shares( success = True for folder in revoke_folders: log.info(f'revoking access to folder: {folder}') - removing_item = ShareObjectService.find_share_item_by_folder( + removing_item = ShareObjectRepository.find_share_item_by_folder( session, share, folder, diff --git a/backend/dataall/modules/datasets/db/dataset_location_repository.py b/backend/dataall/modules/datasets/db/dataset_location_repository.py index d2809e28e..ffb0617b4 100644 --- a/backend/dataall/modules/datasets/db/dataset_location_repository.py +++ b/backend/dataall/modules/datasets/db/dataset_location_repository.py @@ -7,10 +7,10 @@ from dataall.db.api import Glossary from dataall.db import paginate, exceptions from dataall.modules.dataset_sharing.db.models import ShareObjectItem -from dataall.modules.dataset_sharing.services.share_object import ShareItemSM +from dataall.modules.dataset_sharing.db.share_object_repository import ShareItemSM from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.db.models import DatasetStorageLocation -from dataall.modules.datasets.services.dataset_permissions import MANAGE_DATASETS, LIST_DATASET_FOLDERS, CREATE_DATASET_FOLDER, \ +from dataall.modules.datasets.services.dataset_permissions import MANAGE_DATASETS, LIST_DATASET_FOLDERS, \ DELETE_DATASET_FOLDER, UPDATE_DATASET_FOLDER, CREATE_DATASET_FOLDER logger = logging.getLogger(__name__) diff --git a/backend/dataall/modules/datasets/db/dataset_service.py b/backend/dataall/modules/datasets/db/dataset_service.py index 0fe28693f..e4988d853 100644 --- a/backend/dataall/modules/datasets/db/dataset_service.py +++ b/backend/dataall/modules/datasets/db/dataset_service.py @@ -19,7 +19,7 @@ from dataall.db import models, exceptions, paginate, permissions from dataall.db.models.Enums import Language, ConfidentialityClassification from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject -from dataall.modules.dataset_sharing.services.share_object import ShareItemSM +from dataall.modules.dataset_sharing.db.share_object_repository import ShareItemSM from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.db.models import DatasetTable, Dataset from dataall.modules.datasets.db.dataset_location_repository import DatasetLocationRepository diff --git a/backend/dataall/modules/datasets/db/dataset_table_repository.py b/backend/dataall/modules/datasets/db/dataset_table_repository.py index 5db57a59a..cdf473d03 100644 --- a/backend/dataall/modules/datasets/db/dataset_table_repository.py +++ b/backend/dataall/modules/datasets/db/dataset_table_repository.py @@ -7,7 +7,7 @@ from dataall.db import exceptions, paginate from dataall.db.api import Glossary, ResourcePolicy, Environment from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject -from dataall.modules.dataset_sharing.services.share_object import ShareItemSM +from dataall.modules.dataset_sharing.db.share_object_repository import ShareItemSM from dataall.modules.datasets.services.dataset_permissions import MANAGE_DATASETS, CREATE_DATASET_TABLE, DELETE_DATASET_TABLE, \ UPDATE_DATASET_TABLE from dataall.modules.datasets.db.dataset_service import DatasetService diff --git a/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py b/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py index bf8ea96b3..c696f0896 100644 --- a/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py +++ b/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py @@ -13,7 +13,7 @@ from dataall.db import api, utils, Resource from datetime import datetime from dataall.modules.dataset_sharing.db.enums import ShareObjectStatus, ShareableType, ShareItemStatus -from dataall.modules.dataset_sharing.services.share_object import ShareObjectService +from dataall.modules.dataset_sharing.db.share_object_repository import ShareObjectRepository from dataall.modules.datasets.db.dataset_service import DatasetService from dataall.modules.datasets_base.services.permissions import DATASET_TABLE_READ @@ -115,7 +115,7 @@ def upgrade(): ) ).all() for shared_table in share_table_items: - share = ShareObjectService.get_share_by_uri(session, shared_table.shareUri) + share = ShareObjectRepository.get_share_by_uri(session, shared_table.shareUri) api.ResourcePolicy.attach_resource_policy( session=session, group=share.principalId, diff --git a/tests/api/test_share.py b/tests/api/test_share.py index 055208996..e1f6983c0 100644 --- a/tests/api/test_share.py +++ b/tests/api/test_share.py @@ -8,7 +8,7 @@ from dataall.modules.dataset_sharing.db.enums import ShareObjectActions, ShareItemActions, ShareObjectStatus, \ ShareItemStatus from dataall.modules.dataset_sharing.db.models import ShareObject, ShareObjectItem -from dataall.modules.dataset_sharing.services.share_object import ShareObjectService, ShareItemSM, ShareObjectSM +from dataall.modules.dataset_sharing.db.share_object_repository import ShareObjectRepository, ShareItemSM, ShareObjectSM from dataall.modules.datasets_base.db.models import DatasetTable, Dataset @@ -1351,9 +1351,9 @@ def test_delete_share_object_remaining_items_error( def _successfull_processing_for_share_object(db, share): with db.scoped_session() as session: print('Processing share with action ShareObjectActions.Start') - share = ShareObjectService.get_share_by_uri(session, share.shareUri) + share = ShareObjectRepository.get_share_by_uri(session, share.shareUri) - share_items_states = ShareObjectService.get_share_items_states(session, share.shareUri) + share_items_states = ShareObjectRepository.get_share_items_states(session, share.shareUri) Share_SM = ShareObjectSM(share.status) new_share_state = Share_SM.run_transition(ShareObjectActions.Start.value) @@ -1368,8 +1368,8 @@ def _successfull_processing_for_share_object(db, share): print('Processing share with action ShareObjectActions.Finish \ and ShareItemActions.Success') - share = ShareObjectService.get_share_by_uri(session, share.shareUri) - share_items_states = ShareObjectService.get_share_items_states(session, share.shareUri) + share = ShareObjectRepository.get_share_by_uri(session, share.shareUri) + share_items_states = ShareObjectRepository.get_share_items_states(session, share.shareUri) new_share_state = Share_SM.run_transition(ShareObjectActions.Finish.value) From c598dcb218f3c5794640d74b19009e3377d8525e Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 10 May 2023 11:17:21 +0200 Subject: [PATCH 156/346] Introduced DatasetColumnService --- .../datasets/api/table_column/resolvers.py | 82 +++---------------- .../datasets/db/dataset_column_repository.py | 42 ++++++++++ .../services/dataset_column_service.py | 56 +++++++++++++ 3 files changed, 109 insertions(+), 71 deletions(-) create mode 100644 backend/dataall/modules/datasets/db/dataset_column_repository.py create mode 100644 backend/dataall/modules/datasets/services/dataset_column_service.py diff --git a/backend/dataall/modules/datasets/api/table_column/resolvers.py b/backend/dataall/modules/datasets/api/table_column/resolvers.py index 0d5c05a69..a11a6539d 100644 --- a/backend/dataall/modules/datasets/api/table_column/resolvers.py +++ b/backend/dataall/modules/datasets/api/table_column/resolvers.py @@ -1,13 +1,7 @@ -from sqlalchemy import or_ - -from dataall import db from dataall.api.context import Context -from dataall.aws.handlers.service_handlers import Worker from dataall.db import paginate, models -from dataall.db.api import ResourcePolicy -from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository +from dataall.modules.datasets.services.dataset_column_service import DatasetColumnService from dataall.modules.datasets_base.db.models import DatasetTableColumn, DatasetTable -from dataall.modules.datasets.services.dataset_permissions import UPDATE_DATASET_TABLE def list_table_columns( @@ -20,47 +14,13 @@ def list_table_columns( tableUri = source.tableUri if not filter: filter = {} - with context.engine.scoped_session() as session: - if not source: - source = DatasetTableRepository.get_dataset_table_by_uri(session, tableUri) - q = ( - session.query(DatasetTableColumn) - .filter( - DatasetTableColumn.tableUri == tableUri, - DatasetTableColumn.deleted.is_(None), - ) - .order_by(DatasetTableColumn.columnType.asc()) - ) - term = filter.get('term') - if term: - q = q.filter( - or_( - DatasetTableColumn.label.ilike('%' + term + '%'), - DatasetTableColumn.description.ilike('%' + term + '%'), - ) - ).order_by(DatasetTableColumn.columnType.asc()) - - return paginate( - q, page=filter.get('page', 1), page_size=filter.get('pageSize', 65) - ).to_dict() + DatasetColumnService.paginate_active_columns_for_table(tableUri, **filter) def sync_table_columns(context: Context, source, tableUri: str = None): - with context.engine.scoped_session() as session: - table: DatasetTable = DatasetTableRepository.get_dataset_table_by_uri( - session, tableUri - ) - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=table.datasetUri, - permission_name=UPDATE_DATASET_TABLE, - ) - task = models.Task(action='glue.table.columns', targetUri=table.tableUri) - session.add(task) - Worker.process(engine=context.engine, task_ids=[task.taskUri], save_response=False) - return list_table_columns(context, source=table, tableUri=tableUri) + if tableUri is None: + return None + return DatasetColumnService.sync_table_columns(tableUri) def resolve_terms(context, source: DatasetTableColumn, **kwargs): @@ -76,31 +36,11 @@ def resolve_terms(context, source: DatasetTableColumn, **kwargs): def update_table_column( context: Context, source, columnUri: str = None, input: dict = None ): - with context.engine.scoped_session() as session: - column: DatasetTableColumn = session.query( - DatasetTableColumn - ).get(columnUri) - if not column: - raise db.exceptions.ObjectNotFound('Column', columnUri) - table: DatasetTable = DatasetTableRepository.get_dataset_table_by_uri( - session, column.tableUri - ) - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=table.datasetUri, - permission_name=UPDATE_DATASET_TABLE, - ) - column.description = input.get('description', 'No description provided') - session.add(column) - session.commit() + if columnUri is None: + return None - task = models.Task( - action='glue.table.update_column', targetUri=column.columnUri - ) - session.add(task) - session.commit() + if input is None: + input = {} - Worker.queue(engine=context.engine, task_ids=[task.taskUri]) - return column + description = input.get('description', 'No description provided') + return DatasetColumnService.update_table_column_description(columnUri, description) diff --git a/backend/dataall/modules/datasets/db/dataset_column_repository.py b/backend/dataall/modules/datasets/db/dataset_column_repository.py new file mode 100644 index 000000000..9caf3fa0e --- /dev/null +++ b/backend/dataall/modules/datasets/db/dataset_column_repository.py @@ -0,0 +1,42 @@ +from operator import or_ + +from dataall.db import paginate +from dataall.db.exceptions import ObjectNotFound +from dataall.modules.datasets_base.db.models import DatasetTableColumn + + +class DatasetColumnRepository: + @staticmethod + def get_column(session, column_uri) -> DatasetTableColumn: + column = session.query(DatasetTableColumn).get(column_uri) + if not column: + raise ObjectNotFound('Column', column_uri) + return column + + @staticmethod + def save_and_commit(session, column: DatasetTableColumn): + session.add(column) + session.commit() + + @staticmethod + def paginate_active_columns_for_table(session, table_uri: str, term, page, page_size): + q = ( + session.query(DatasetTableColumn) + .filter( + DatasetTableColumn.tableUri == table_uri, + DatasetTableColumn.deleted.is_(None), + ) + .order_by(DatasetTableColumn.columnType.asc()) + ) + + if term: + q = q.filter( + or_( + DatasetTableColumn.label.ilike('%' + term + '%'), + DatasetTableColumn.description.ilike('%' + term + '%'), + ) + ).order_by(DatasetTableColumn.columnType.asc()) + + return paginate(q, page=page, page_size=page_size).to_dict() + + diff --git a/backend/dataall/modules/datasets/services/dataset_column_service.py b/backend/dataall/modules/datasets/services/dataset_column_service.py new file mode 100644 index 000000000..c2d308846 --- /dev/null +++ b/backend/dataall/modules/datasets/services/dataset_column_service.py @@ -0,0 +1,56 @@ +from dataall.aws.handlers.service_handlers import Worker +from dataall.core.context import get_context +from dataall.db import models +from dataall.db.api import ResourcePolicy +from dataall.modules.datasets.db.dataset_column_repository import DatasetColumnRepository +from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository +from dataall.modules.datasets.services.dataset_permissions import UPDATE_DATASET_TABLE +from dataall.modules.datasets_base.db.models import DatasetTable, DatasetTableColumn + + +class DatasetColumnService: + + @staticmethod + def paginate_active_columns_for_table(table_uri: str, page=1, pageSize=65, term=None): + # TODO THERE WAS NO PERMISSION CHECK!!! + with get_context().db_engine.scoped_session() as session: + DatasetColumnRepository.paginate_active_columns_for_table(session, table_uri, term, page, pageSize) + + @staticmethod + def sync_table_columns(table_uri: str): + context = get_context() + with context.db_engine.scoped_session() as session: + DatasetColumnService._check_resource_permission(session, table_uri, UPDATE_DATASET_TABLE) + task = models.Task(action='glue.table.columns', targetUri=table_uri) + session.add(task) + Worker.process(engine=context.db_engine, task_ids=[task.taskUri], save_response=False) + return DatasetColumnService.paginate_active_columns_for_table(table_uri) + + @staticmethod + def update_table_column_description(column_uri: str, description) -> DatasetTableColumn: + with get_context().db_engine.scoped_session() as session: + column: DatasetTableColumn = DatasetColumnRepository.get_column(session, column_uri) + DatasetColumnService._check_resource_permission(session, column.tableUri, UPDATE_DATASET_TABLE) + + column.description = description + + task = models.Task( + action='glue.table.update_column', targetUri=column.columnUri + ) + session.add(task) + session.commit() + + Worker.queue(engine=get_context().db_engine, task_ids=[task.taskUri]) + return column + + @staticmethod + def _check_resource_permission(session, table_uri: str, permission): + context = get_context() + table: DatasetTable = DatasetTableRepository.get_dataset_table_by_uri(session, table_uri) + ResourcePolicy.check_user_resource_permission( + session=session, + username=context.username, + groups=context.groups, + resource_uri=table.datasetUri, + permission_name=permission, + ) From 2331d17b8868e8720a3b7e93ec5dd70304a5b954 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 10 May 2023 17:27:55 +0200 Subject: [PATCH 157/346] Created DatasetTableService --- .../modules/datasets/api/table/resolvers.py | 167 ++------------- .../services/dataset_table_service.py | 191 ++++++++++++++++++ 2 files changed, 206 insertions(+), 152 deletions(-) create mode 100644 backend/dataall/modules/datasets/services/dataset_table_service.py diff --git a/backend/dataall/modules/datasets/api/table/resolvers.py b/backend/dataall/modules/datasets/api/table/resolvers.py index 8e70ad271..89404a75b 100644 --- a/backend/dataall/modules/datasets/api/table/resolvers.py +++ b/backend/dataall/modules/datasets/api/table/resolvers.py @@ -1,36 +1,17 @@ -import json import logging -from botocore.exceptions import ClientError -from pyathena import connect - from dataall import db from dataall.modules.datasets.api.dataset.resolvers import get_dataset from dataall.api.context import Context -from dataall.aws.handlers.service_handlers import Worker -from dataall.aws.handlers.sts import SessionHelper -from dataall.db import models -from dataall.db.api import ResourcePolicy, Glossary +from dataall.db.api import Glossary +from dataall.modules.datasets.services.dataset_table_service import DatasetTableService from dataall.modules.datasets_base.db.models import DatasetTable, Dataset -from dataall.modules.datasets.db.dataset_service import DatasetService -from dataall.modules.datasets.services.dataset_permissions import UPDATE_DATASET_TABLE -from dataall.modules.datasets_base.services.permissions import PREVIEW_DATASET_TABLE -from dataall.utils import json_utils -from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer -from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository log = logging.getLogger(__name__) def create_table(context, source, datasetUri: str = None, input: dict = None): - with context.engine.scoped_session() as session: - table = DatasetTableRepository.create_dataset_table( - session=session, - uri=datasetUri, - data=input, - ) - DatasetTableIndexer.upsert(session, table_uri=table.tableUri) - return table + return DatasetTableService.create_table(dataset_uri=datasetUri, table_data=input) def list_dataset_tables(context, source, filter: dict = None): @@ -38,119 +19,33 @@ def list_dataset_tables(context, source, filter: dict = None): return None if not filter: filter = {} - with context.engine.scoped_session() as session: - return DatasetTableRepository.list_dataset_tables( - session=session, - uri=source.datasetUri, - data=filter, - ) + return DatasetTableService.list_dataset_tables(dataset_uri=source.datasetUri, filter=filter) def get_table(context, source: Dataset, tableUri: str = None): - with context.engine.scoped_session() as session: - table = DatasetTableRepository.get_dataset_table_by_uri(session, tableUri) - return DatasetTableRepository.get_dataset_table( - session=session, - uri=table.datasetUri, - data={ - 'tableUri': tableUri, - }, - ) + return DatasetTableService.update_table(table_uri=tableUri) def update_table(context, source, tableUri: str = None, input: dict = None): - with context.engine.scoped_session() as session: - table = DatasetTableRepository.get_dataset_table_by_uri(session, tableUri) - - dataset = DatasetService.get_dataset_by_uri(session, table.datasetUri) - - input['table'] = table - input['tableUri'] = table.tableUri - - DatasetTableRepository.update_dataset_table( - session=session, - uri=dataset.datasetUri, - data=input, - ) - DatasetTableIndexer.upsert(session, table_uri=table.tableUri) - return table + return DatasetTableService.update_table(table_uri=tableUri, input=input) def delete_table(context, source, tableUri: str = None): - with context.engine.scoped_session() as session: - table = DatasetTableRepository.get_dataset_table_by_uri(session, tableUri) - DatasetTableRepository.delete_dataset_table( - session=session, - uri=table.datasetUri, - data={ - 'tableUri': tableUri, - }, - ) - DatasetTableIndexer.delete_doc(doc_id=tableUri) - return True + if not tableUri: + return False + return DatasetTableService.delete_table(table_uri=tableUri) def preview(context, source, tableUri: str = None): - with context.engine.scoped_session() as session: - table: DatasetTable = DatasetTableRepository.get_dataset_table_by_uri( - session, tableUri - ) - dataset = DatasetService.get_dataset_by_uri(session, table.datasetUri) - if ( - dataset.confidentiality - != models.ConfidentialityClassification.Unclassified.value - ): - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=table.tableUri, - permission_name=PREVIEW_DATASET_TABLE, - ) - env = db.api.Environment.get_environment_by_uri(session, dataset.environmentUri) - env_workgroup = {} - boto3_session = SessionHelper.remote_session(accountid=table.AWSAccountId) - creds = boto3_session.get_credentials() - try: - env_workgroup = boto3_session.client( - 'athena', region_name=env.region - ).get_work_group(WorkGroup=env.EnvironmentDefaultAthenaWorkGroup) - except ClientError as e: - log.info( - f'Workgroup {env.EnvironmentDefaultAthenaWorkGroup} can not be found' - f'due to: {e}' - ) - - connection = connect( - aws_access_key_id=creds.access_key, - aws_secret_access_key=creds.secret_key, - aws_session_token=creds.token, - work_group=env_workgroup.get('WorkGroup', {}).get('Name', 'primary'), - s3_staging_dir=f's3://{env.EnvironmentDefaultBucketName}/preview/{dataset.datasetUri}/{table.tableUri}', - region_name=table.region, - ) - cursor = connection.cursor() - - SQL = f'select * from "{table.GlueDatabaseName}"."{table.GlueTableName}" limit 50' # nosec - cursor.execute(SQL) - fields = [] - for f in cursor.description: - fields.append(json.dumps({'name': f[0]})) - rows = [] - for row in cursor: - rows.append(json.dumps(json_utils.to_json(list(row)))) - - return {'rows': rows, 'fields': fields} + if not tableUri: + return None + return DatasetTableService.preview(table_uri=tableUri) def get_glue_table_properties(context: Context, source: DatasetTable, **kwargs): if not source: return None - with context.engine.scoped_session() as session: - table: DatasetTable = DatasetTableRepository.get_dataset_table_by_uri( - session, source.tableUri - ) - return json_utils.to_string(table.GlueTableProperties).replace('\\', ' ') + return DatasetTableService.get_glue_table_properties(source.tableUri) def resolve_dataset(context, source: DatasetTable, **kwargs): @@ -175,34 +70,7 @@ def resolve_glossary_terms(context: Context, source: DatasetTable, **kwargs): def publish_table_update(context: Context, source, tableUri: str = None): - with context.engine.scoped_session() as session: - table: DatasetTable = DatasetTableRepository.get_dataset_table_by_uri( - session, tableUri - ) - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=table.datasetUri, - permission_name=UPDATE_DATASET_TABLE, - ) - dataset = DatasetService.get_dataset_by_uri(session, table.datasetUri) - env = db.api.Environment.get_environment_by_uri(session, dataset.environmentUri) - if not env.subscriptionsEnabled or not env.subscriptionsProducersTopicName: - raise Exception( - 'Subscriptions are disabled. ' - "First enable subscriptions for this dataset's environment then retry." - ) - - task = models.Task( - targetUri=table.datasetUri, - action='sns.dataset.publish_update', - payload={'s3Prefix': table.S3Prefix}, - ) - session.add(task) - - Worker.process(engine=context.engine, task_ids=[task.taskUri], save_response=False) - return True + return DatasetTableService.publish_table_update(table_uri=tableUri) def resolve_redshift_copy_schema(context, source: DatasetTable, clusterUri: str): @@ -224,9 +92,4 @@ def resolve_redshift_copy_location( def list_shared_tables_by_env_dataset(context: Context, source, datasetUri: str, envUri: str, filter: dict = None): - with context.engine.scoped_session() as session: - return DatasetTableRepository.get_dataset_tables_shared_with_env( - session, - envUri, - datasetUri - ) + return DatasetTableService.list_shared_tables_by_env_dataset(datasetUri, envUri) diff --git a/backend/dataall/modules/datasets/services/dataset_table_service.py b/backend/dataall/modules/datasets/services/dataset_table_service.py new file mode 100644 index 000000000..f3585422c --- /dev/null +++ b/backend/dataall/modules/datasets/services/dataset_table_service.py @@ -0,0 +1,191 @@ +import json +import logging + +from botocore.exceptions import ClientError +from pyathena import connect + +from dataall.aws.handlers.service_handlers import Worker +from dataall.aws.handlers.sts import SessionHelper +from dataall.core.context import get_context +from dataall.db import models +from dataall.db.api import ResourcePolicy, Environment +from dataall.modules.datasets import DatasetTableIndexer +from dataall.modules.datasets.db.dataset_service import DatasetService +from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository +from dataall.modules.datasets.services.dataset_permissions import UPDATE_DATASET_TABLE +from dataall.modules.datasets_base.db.models import Dataset, DatasetTable +from dataall.modules.datasets_base.services.permissions import PREVIEW_DATASET_TABLE +from dataall.utils import json_utils + +log = logging.getLogger(__name__) + + +class DatasetTableService: + + @staticmethod + def create_table(dataset_uri: str, table_data: dict): + with get_context().db_engine.scoped_session() as session: + table = DatasetTableRepository.create_dataset_table( + session=session, + uri=dataset_uri, + data=table_data, + ) + DatasetTableIndexer.upsert(session, table_uri=table.tableUri) + return table + + @staticmethod + def list_dataset_tables(dataset_uri: str, filter: dict): + with get_context().db_engine.scoped_session() as session: + return DatasetTableRepository.list_dataset_tables( + session=session, + uri=dataset_uri, + data=filter, + ) + + @staticmethod + def get_table(table_uri: str): + with get_context().db_engine.scoped_session() as session: + table = DatasetTableRepository.get_dataset_table_by_uri(session, table_uri) + return DatasetTableRepository.get_dataset_table( + session=session, + uri=table.datasetUri, + data={ + 'tableUri': table_uri, + }, + ) + + @staticmethod + def update_table(table_uri: str, input: dict = None): + with get_context().db_engine.scoped_session() as session: + table = DatasetTableRepository.get_dataset_table_by_uri(session, table_uri) + dataset = DatasetService.get_dataset_by_uri(session, table.datasetUri) + + input['table'] = table + input['tableUri'] = table.tableUri + + DatasetTableRepository.update_dataset_table( + session=session, + uri=dataset.datasetUri, + data=input, + ) + DatasetTableIndexer.upsert(session, table_uri=table.tableUri) + return table + + @staticmethod + def delete_table(table_uri: str): + with get_context().db_engine.scoped_session() as session: + table = DatasetTableRepository.get_dataset_table_by_uri(session, table_uri) + DatasetTableRepository.delete_dataset_table( + session=session, + uri=table.datasetUri, + data={ + 'tableUri': table_uri, + }, + ) + DatasetTableIndexer.delete_doc(doc_id=table_uri) + return True + + @staticmethod + def preview(table_uri: str): + context = get_context() + with context.db_engine.scoped_session() as session: + table: DatasetTable = DatasetTableRepository.get_dataset_table_by_uri( + session, table_uri + ) + dataset = DatasetService.get_dataset_by_uri(session, table.datasetUri) + if ( + dataset.confidentiality + != models.ConfidentialityClassification.Unclassified.value + ): + ResourcePolicy.check_user_resource_permission( + session=session, + username=context.username, + groups=context.groups, + resource_uri=table.tableUri, + permission_name=PREVIEW_DATASET_TABLE, + ) + env = Environment.get_environment_by_uri(session, dataset.environmentUri) + env_workgroup = {} + boto3_session = SessionHelper.remote_session(accountid=table.AWSAccountId) + creds = boto3_session.get_credentials() + try: + env_workgroup = boto3_session.client( + 'athena', region_name=env.region + ).get_work_group(WorkGroup=env.EnvironmentDefaultAthenaWorkGroup) + except ClientError as e: + log.info( + f'Workgroup {env.EnvironmentDefaultAthenaWorkGroup} can not be found' + f'due to: {e}' + ) + + connection = connect( + aws_access_key_id=creds.access_key, + aws_secret_access_key=creds.secret_key, + aws_session_token=creds.token, + work_group=env_workgroup.get('WorkGroup', {}).get('Name', 'primary'), + s3_staging_dir=f's3://{env.EnvironmentDefaultBucketName}/preview/{dataset.datasetUri}/{table.tableUri}', + region_name=table.region, + ) + cursor = connection.cursor() + + sql = f'select * from "{table.GlueDatabaseName}"."{table.GlueTableName}" limit 50' # nosec + cursor.execute(sql) + fields = [] + for f in cursor.description: + fields.append(json.dumps({'name': f[0]})) + rows = [] + for row in cursor: + rows.append(json.dumps(json_utils.to_json(list(row)))) + + return {'rows': rows, 'fields': fields} + + @staticmethod + def get_glue_table_properties(table_uri: str): + with get_context().db_engine.scoped_session() as session: + table: DatasetTable = DatasetTableRepository.get_dataset_table_by_uri( + session, table_uri + ) + return json_utils.to_string(table.GlueTableProperties).replace('\\', ' ') + + @staticmethod + def publish_table_update(table_uri: str): + context = get_context() + with context.db_engine.scoped_session() as session: + table: DatasetTable = DatasetTableRepository.get_dataset_table_by_uri( + session, table_uri + ) + ResourcePolicy.check_user_resource_permission( + session=session, + username=context.username, + groups=context.groups, + resource_uri=table.datasetUri, + permission_name=UPDATE_DATASET_TABLE, + ) + dataset = DatasetService.get_dataset_by_uri(session, table.datasetUri) + env = Environment.get_environment_by_uri(session, dataset.environmentUri) + if not env.subscriptionsEnabled or not env.subscriptionsProducersTopicName: + raise Exception( + 'Subscriptions are disabled. ' + "First enable subscriptions for this dataset's environment then retry." + ) + + task = models.Task( + targetUri=table.datasetUri, + action='sns.dataset.publish_update', + payload={'s3Prefix': table.S3Prefix}, + ) + session.add(task) + + Worker.process(engine=context.db_engine, task_ids=[task.taskUri], save_response=False) + return True + + @staticmethod + def list_shared_tables_by_env_dataset(dataset_uri: str, env_uri: str): + with get_context().db_engine.scoped_session() as session: + return DatasetTableRepository.get_dataset_tables_shared_with_env( + session, + dataset_uri, + env_uri + ) + + From 95557ba68d662411e4e8bab1f19e3d3a34ea2f6b Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 10 May 2023 17:37:37 +0200 Subject: [PATCH 158/346] Extracted AthenaTableClient --- .../datasets/aws/athena_table_client.py | 58 +++++++++++++++++++ .../services/dataset_table_service.py | 39 +------------ 2 files changed, 60 insertions(+), 37 deletions(-) create mode 100644 backend/dataall/modules/datasets/aws/athena_table_client.py diff --git a/backend/dataall/modules/datasets/aws/athena_table_client.py b/backend/dataall/modules/datasets/aws/athena_table_client.py new file mode 100644 index 000000000..d3132df3f --- /dev/null +++ b/backend/dataall/modules/datasets/aws/athena_table_client.py @@ -0,0 +1,58 @@ +import json +import logging +from pyathena import connect + +from botocore.exceptions import ClientError + +from dataall.aws.handlers.sts import SessionHelper +from dataall.db.models import Environment +from dataall.modules.datasets_base.db.models import DatasetTable +from dataall.utils import json_utils + + +log = logging.getLogger(__name__) + + +class AthenaTableClient: + + def __init__(self, env: Environment, table: DatasetTable): + session = SessionHelper.remote_session(accountid=table.AWSAccountId) + self._client = session.client('athena', region_name=env.region) + self._creds = session.get_credentials() + self._env = env + self._table = table + + def get_table(self, dataset_uri): + env = self._env + table = self._table + creds = self._creds + + env_workgroup = {} + try: + env_workgroup = self._client.get_work_group(WorkGroup=env.EnvironmentDefaultAthenaWorkGroup) + except ClientError as e: + log.info( + f'Workgroup {env.EnvironmentDefaultAthenaWorkGroup} can not be found' + f'due to: {e}' + ) + + connection = connect( + aws_access_key_id=creds.access_key, + aws_secret_access_key=creds.secret_key, + aws_session_token=creds.token, + work_group=env_workgroup.get('WorkGroup', {}).get('Name', 'primary'), + s3_staging_dir=f's3://{env.EnvironmentDefaultBucketName}/preview/{dataset_uri}/{table.tableUri}', + region_name=table.region, + ) + cursor = connection.cursor() + + sql = f'select * from "{table.GlueDatabaseName}"."{table.GlueTableName}" limit 50' # nosec + cursor.execute(sql) + fields = [] + for f in cursor.description: + fields.append(json.dumps({'name': f[0]})) + rows = [] + for row in cursor: + rows.append(json.dumps(json_utils.to_json(list(row)))) + + return {'rows': rows, 'fields': fields} diff --git a/backend/dataall/modules/datasets/services/dataset_table_service.py b/backend/dataall/modules/datasets/services/dataset_table_service.py index f3585422c..fc16fc564 100644 --- a/backend/dataall/modules/datasets/services/dataset_table_service.py +++ b/backend/dataall/modules/datasets/services/dataset_table_service.py @@ -1,15 +1,12 @@ -import json import logging -from botocore.exceptions import ClientError -from pyathena import connect from dataall.aws.handlers.service_handlers import Worker -from dataall.aws.handlers.sts import SessionHelper from dataall.core.context import get_context from dataall.db import models from dataall.db.api import ResourcePolicy, Environment from dataall.modules.datasets import DatasetTableIndexer +from dataall.modules.datasets.aws.athena_table_client import AthenaTableClient from dataall.modules.datasets.db.dataset_service import DatasetService from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository from dataall.modules.datasets.services.dataset_permissions import UPDATE_DATASET_TABLE @@ -105,39 +102,7 @@ def preview(table_uri: str): permission_name=PREVIEW_DATASET_TABLE, ) env = Environment.get_environment_by_uri(session, dataset.environmentUri) - env_workgroup = {} - boto3_session = SessionHelper.remote_session(accountid=table.AWSAccountId) - creds = boto3_session.get_credentials() - try: - env_workgroup = boto3_session.client( - 'athena', region_name=env.region - ).get_work_group(WorkGroup=env.EnvironmentDefaultAthenaWorkGroup) - except ClientError as e: - log.info( - f'Workgroup {env.EnvironmentDefaultAthenaWorkGroup} can not be found' - f'due to: {e}' - ) - - connection = connect( - aws_access_key_id=creds.access_key, - aws_secret_access_key=creds.secret_key, - aws_session_token=creds.token, - work_group=env_workgroup.get('WorkGroup', {}).get('Name', 'primary'), - s3_staging_dir=f's3://{env.EnvironmentDefaultBucketName}/preview/{dataset.datasetUri}/{table.tableUri}', - region_name=table.region, - ) - cursor = connection.cursor() - - sql = f'select * from "{table.GlueDatabaseName}"."{table.GlueTableName}" limit 50' # nosec - cursor.execute(sql) - fields = [] - for f in cursor.description: - fields.append(json.dumps({'name': f[0]})) - rows = [] - for row in cursor: - rows.append(json.dumps(json_utils.to_json(list(row)))) - - return {'rows': rows, 'fields': fields} + return AthenaTableClient(env, table).get_table(dataset_uri=dataset.datasetUri) @staticmethod def get_glue_table_properties(table_uri: str): From 445e25a91a91b6477b27373a663dd3d1d14eb37d Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 11 May 2023 10:46:30 +0200 Subject: [PATCH 159/346] Split logic between service and repo for DatasetTable --- backend/dataall/core/permission_checker.py | 13 +- .../db/share_object_repository.py | 19 +++ .../modules/datasets/api/table/resolvers.py | 8 +- .../datasets/db/dataset_table_repository.py | 141 ++-------------- .../services/dataset_table_service.py | 151 +++++++++++------- 5 files changed, 136 insertions(+), 196 deletions(-) diff --git a/backend/dataall/core/permission_checker.py b/backend/dataall/core/permission_checker.py index 784665996..1fccd8fb4 100644 --- a/backend/dataall/core/permission_checker.py +++ b/backend/dataall/core/permission_checker.py @@ -3,7 +3,7 @@ and interact with resources or do some actions in the app """ import contextlib -from typing import Protocol +from typing import Protocol, Callable from dataall.core.context import RequestContext, get_context from dataall.db.api import TenantPolicy, ResourcePolicy, Environment @@ -11,7 +11,7 @@ class Identifiable(Protocol): """Protocol to identify resources for checking permissions""" - def get_uri(self) -> str: + def get_resource_uri(self) -> str: ... @@ -56,9 +56,8 @@ def no_decorated(f): static_func = False try: - func.__func__ - static_func = True fn = func.__func__ + static_func = True except AttributeError: fn = func @@ -66,7 +65,7 @@ def no_decorated(f): return fn, staticmethod if static_func else no_decorated -def has_resource_permission(permission: str, resource_name: str = None): +def has_resource_permission(permission: str, resource_name: str = None, parent_resource: Callable = None): """ Decorator that check if a user has access to the resource. The method or function decorated with this decorator must have a URI of accessing resource @@ -80,11 +79,13 @@ def decorated(*args, **kwargs): uri: str if resource_name: resource: Identifiable = kwargs[resource_name] - uri = resource.get_uri() + uri = resource.get_resource_uri() else: uri = kwargs["uri"] with get_context().db_engine.scoped_session() as session: + if parent_resource: + uri = parent_resource(session, uri) _check_resource_permission(session, uri, permission) return fn(*args, **kwargs) diff --git a/backend/dataall/modules/dataset_sharing/db/share_object_repository.py b/backend/dataall/modules/dataset_sharing/db/share_object_repository.py index 6f88548ee..a2c83ac10 100644 --- a/backend/dataall/modules/dataset_sharing/db/share_object_repository.py +++ b/backend/dataall/modules/dataset_sharing/db/share_object_repository.py @@ -1402,3 +1402,22 @@ def resolve_share_object_statistics(session, uri, **kwargs): .count() ) return {'tables': tables, 'locations': locations, 'sharedItems': shared_items, 'revokedItems': revoked_items, 'failedItems': failed_items, 'pendingItems': pending_items} + + @staticmethod + def has_shared_items(session, item_uri: str) -> int: + share_item_shared_states = ShareItemSM.get_share_item_shared_states() + return ( + session.query(ShareObjectItem) + .filter( + and_( + ShareObjectItem.itemUri == item_uri, + ShareObjectItem.status.in_(share_item_shared_states) + ) + ) + .count() + ) + + @staticmethod + def delete_shares(session, item_uri: str): + session.query(ShareObjectItem).filter(ShareObjectItem.itemUri == item_uri).delete() + diff --git a/backend/dataall/modules/datasets/api/table/resolvers.py b/backend/dataall/modules/datasets/api/table/resolvers.py index 89404a75b..d400f0b74 100644 --- a/backend/dataall/modules/datasets/api/table/resolvers.py +++ b/backend/dataall/modules/datasets/api/table/resolvers.py @@ -19,11 +19,11 @@ def list_dataset_tables(context, source, filter: dict = None): return None if not filter: filter = {} - return DatasetTableService.list_dataset_tables(dataset_uri=source.datasetUri, filter=filter) + return DatasetTableService.list_dataset_tables(dataset_uri=source.datasetUri, **filter) def get_table(context, source: Dataset, tableUri: str = None): - return DatasetTableService.update_table(table_uri=tableUri) + return DatasetTableService.get_table(table_uri=tableUri) def update_table(context, source, tableUri: str = None, input: dict = None): @@ -33,7 +33,7 @@ def update_table(context, source, tableUri: str = None, input: dict = None): def delete_table(context, source, tableUri: str = None): if not tableUri: return False - return DatasetTableService.delete_table(table_uri=tableUri) + return DatasetTableService.delete_table(uri=tableUri) def preview(context, source, tableUri: str = None): @@ -70,7 +70,7 @@ def resolve_glossary_terms(context: Context, source: DatasetTable, **kwargs): def publish_table_update(context: Context, source, tableUri: str = None): - return DatasetTableService.publish_table_update(table_uri=tableUri) + return DatasetTableService.publish_table_update(uri=tableUri) def resolve_redshift_copy_schema(context, source: DatasetTable, clusterUri: str): diff --git a/backend/dataall/modules/datasets/db/dataset_table_repository.py b/backend/dataall/modules/datasets/db/dataset_table_repository.py index cdf473d03..bfcf7ef18 100644 --- a/backend/dataall/modules/datasets/db/dataset_table_repository.py +++ b/backend/dataall/modules/datasets/db/dataset_table_repository.py @@ -2,15 +2,10 @@ from sqlalchemy.sql import and_ -from dataall.core.context import get_context -from dataall.core.permission_checker import has_tenant_permission, has_resource_permission from dataall.db import exceptions, paginate from dataall.db.api import Glossary, ResourcePolicy, Environment from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject from dataall.modules.dataset_sharing.db.share_object_repository import ShareItemSM -from dataall.modules.datasets.services.dataset_permissions import MANAGE_DATASETS, CREATE_DATASET_TABLE, DELETE_DATASET_TABLE, \ - UPDATE_DATASET_TABLE -from dataall.modules.datasets.db.dataset_service import DatasetService from dataall.modules.datasets_base.services.permissions import DATASET_TABLE_READ from dataall.utils import json_utils from dataall.modules.datasets_base.db.models import DatasetTableColumn, DatasetTable, Dataset @@ -19,34 +14,28 @@ class DatasetTableRepository: + @staticmethod - @has_tenant_permission(MANAGE_DATASETS) - @has_resource_permission(CREATE_DATASET_TABLE) - def create_dataset_table( - session, - uri: str, - data: dict = None, - ) -> DatasetTable: - dataset = DatasetService.get_dataset_by_uri(session, uri) - exists = ( + def save(session, table: DatasetTable): + session.add(table) + + @staticmethod + def exists(session, dataset_uri, glue_table_name): + return ( session.query(DatasetTable) .filter( and_( - DatasetTable.datasetUri == uri, - DatasetTable.GlueTableName == data['name'], + DatasetTable.datasetUri == dataset_uri, + DatasetTable.GlueTableName == glue_table_name, ) ) .count() ) - if exists: - raise exceptions.ResourceAlreadyExists( - action='Create Table', - message=f'table: {data["name"]} already exist on dataset {uri}', - ) - + @staticmethod + def create_dataset_table(session, dataset: Dataset, data: dict = None) -> DatasetTable: table = DatasetTable( - datasetUri=uri, + datasetUri=dataset.datasetUri, label=data['name'], name=data['name'], description=data.get('description', 'No description provided'), @@ -61,108 +50,23 @@ def create_dataset_table( region=dataset.region, ) session.add(table) - if data.get('terms') is not None: - Glossary.set_glossary_terms_links( - session, get_context().username, table.tableUri, 'DatasetTable', data.get('terms', []) - ) session.commit() - - # ADD DATASET TABLE PERMISSIONS - environment = Environment.get_environment_by_uri(session, dataset.environmentUri) - permission_group = set([dataset.SamlAdminGroupName, environment.SamlGroupName, dataset.stewards if dataset.stewards is not None else dataset.SamlAdminGroupName]) - for group in permission_group: - ResourcePolicy.attach_resource_policy( - session=session, - group=group, - permissions=DATASET_TABLE_READ, - resource_uri=table.tableUri, - resource_type=DatasetTable.__name__, - ) return table @staticmethod - @has_tenant_permission(MANAGE_DATASETS) - def list_dataset_tables( - session, - uri: str, - data: dict = None, - ) -> dict: + def paginate_dataset_tables(session, dataset_uri, term, page, page_size) -> dict: query = ( session.query(DatasetTable) - .filter(DatasetTable.datasetUri == uri) + .filter(DatasetTable.datasetUri == dataset_uri) .order_by(DatasetTable.created.desc()) ) - if data.get('term'): - term = data.get('term') + if term: query = query.filter(DatasetTable.label.ilike('%' + term + '%')) - return paginate( - query, page=data.get('page', 1), page_size=data.get('pageSize', 10) - ).to_dict() - - @staticmethod - @has_tenant_permission(MANAGE_DATASETS) - def get_dataset_table( - session, - uri: str, - data: dict = None, - ) -> DatasetTable: - return DatasetTableRepository.get_dataset_table_by_uri(session, data['tableUri']) - - @staticmethod - @has_tenant_permission(MANAGE_DATASETS) - @has_resource_permission(UPDATE_DATASET_TABLE) - def update_dataset_table( - session, - uri: str, - data: dict = None, - ): - table = data.get( - 'table', - DatasetTableRepository.get_dataset_table_by_uri(session, data['tableUri']), - ) - - for k in [attr for attr in data.keys() if attr != 'term']: - setattr(table, k, data.get(k)) - - if data.get('terms') is not None: - Glossary.set_glossary_terms_links( - session, get_context().username, table.tableUri, 'DatasetTable', data.get('terms', []) - ) - - return table + return paginate(query, page=page, page_size=page_size).to_dict() @staticmethod - @has_tenant_permission(MANAGE_DATASETS) - @has_resource_permission(DELETE_DATASET_TABLE) - def delete_dataset_table( - session, - uri: str, - data: dict = None, - ): - table = DatasetTableRepository.get_dataset_table_by_uri(session, data['tableUri']) - share_item_shared_states = ShareItemSM.get_share_item_shared_states() - share_item = ( - session.query(ShareObjectItem) - .filter( - and_( - ShareObjectItem.itemUri == table.tableUri, - ShareObjectItem.status.in_(share_item_shared_states) - ) - ) - .first() - ) - if share_item: - raise exceptions.ResourceShared( - action=DELETE_DATASET_TABLE, - message='Revoke all table shares before deletion', - ) - session.query(ShareObjectItem).filter( - ShareObjectItem.itemUri == table.tableUri, - ).delete() + def delete_dataset_table(session, table: DatasetTable): session.delete(table) - Glossary.delete_glossary_terms_links( - session, target_uri=table.tableUri, target_type='DatasetTable' - ) return True @staticmethod @@ -196,17 +100,6 @@ def query_dataset_tables_shared_with_env( return env_tables_shared - @staticmethod - def get_dataset_tables_shared_with_env( - session, environment_uri: str, dataset_uri: str - ): - return [ - {"tableUri": t.tableUri, "GlueTableName": t.GlueTableName} - for t in DatasetTableRepository.query_dataset_tables_shared_with_env( - session, environment_uri, dataset_uri - ) - ] - @staticmethod def get_dataset_table_by_uri(session, table_uri): table: DatasetTable = session.query(DatasetTable).get(table_uri) diff --git a/backend/dataall/modules/datasets/services/dataset_table_service.py b/backend/dataall/modules/datasets/services/dataset_table_service.py index fc16fc564..70c9394d7 100644 --- a/backend/dataall/modules/datasets/services/dataset_table_service.py +++ b/backend/dataall/modules/datasets/services/dataset_table_service.py @@ -3,83 +3,118 @@ from dataall.aws.handlers.service_handlers import Worker from dataall.core.context import get_context +from dataall.core.permission_checker import has_resource_permission, has_tenant_permission from dataall.db import models -from dataall.db.api import ResourcePolicy, Environment +from dataall.db.api import ResourcePolicy, Environment, Glossary +from dataall.db.exceptions import ResourceShared, ResourceAlreadyExists +from dataall.modules.dataset_sharing.db.share_object_repository import ShareObjectRepository from dataall.modules.datasets import DatasetTableIndexer from dataall.modules.datasets.aws.athena_table_client import AthenaTableClient from dataall.modules.datasets.db.dataset_service import DatasetService from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository -from dataall.modules.datasets.services.dataset_permissions import UPDATE_DATASET_TABLE -from dataall.modules.datasets_base.db.models import Dataset, DatasetTable -from dataall.modules.datasets_base.services.permissions import PREVIEW_DATASET_TABLE +from dataall.modules.datasets.services.dataset_permissions import UPDATE_DATASET_TABLE, MANAGE_DATASETS, \ + DELETE_DATASET_TABLE, CREATE_DATASET_TABLE +from dataall.modules.datasets_base.db.models import DatasetTable +from dataall.modules.datasets_base.services.permissions import PREVIEW_DATASET_TABLE, DATASET_TABLE_READ from dataall.utils import json_utils log = logging.getLogger(__name__) class DatasetTableService: + @staticmethod + def _get_dataset_uri(session, table_uri): + table = DatasetTableRepository.get_dataset_table_by_uri(session, table_uri) + return table.datasetUri @staticmethod - def create_table(dataset_uri: str, table_data: dict): + @has_tenant_permission(MANAGE_DATASETS) + @has_resource_permission(CREATE_DATASET_TABLE) + def create_table(uri: str, table_data: dict): with get_context().db_engine.scoped_session() as session: - table = DatasetTableRepository.create_dataset_table( - session=session, - uri=dataset_uri, - data=table_data, - ) + dataset = DatasetService.get_dataset_by_uri(session, uri) + glue_table = table_data['name'] + exists = DatasetTableRepository.exists(session, dataset_uri=uri, glue_table_name=glue_table) + + if exists: + raise ResourceAlreadyExists( + action='Create Table', + message=f'table: {glue_table} already exist on dataset {uri}', + ) + + table = DatasetTableRepository.create_dataset_table(session, dataset, table_data) + + if 'terms' in table_data: + Glossary.set_glossary_terms_links( + session, get_context().username, table.tableUri, 'DatasetTable', table_data['terms'] + ) + + # ADD DATASET TABLE PERMISSIONS + environment = Environment.get_environment_by_uri(session, dataset.environmentUri) + permission_group = {dataset.SamlAdminGroupName, environment.SamlGroupName, + dataset.stewards if dataset.stewards is not None else dataset.SamlAdminGroupName} + for group in permission_group: + ResourcePolicy.attach_resource_policy( + session=session, + group=group, + permissions=DATASET_TABLE_READ, + resource_uri=table.tableUri, + resource_type=DatasetTable.__name__, + ) DatasetTableIndexer.upsert(session, table_uri=table.tableUri) return table @staticmethod - def list_dataset_tables(dataset_uri: str, filter: dict): + @has_tenant_permission(MANAGE_DATASETS) + def list_dataset_tables(dataset_uri: str, term=None, page=1, pageSize=10): with get_context().db_engine.scoped_session() as session: - return DatasetTableRepository.list_dataset_tables( - session=session, - uri=dataset_uri, - data=filter, - ) + return DatasetTableRepository.paginate_dataset_tables(session, dataset_uri, term, page, pageSize) @staticmethod + @has_tenant_permission(MANAGE_DATASETS) def get_table(table_uri: str): with get_context().db_engine.scoped_session() as session: - table = DatasetTableRepository.get_dataset_table_by_uri(session, table_uri) - return DatasetTableRepository.get_dataset_table( - session=session, - uri=table.datasetUri, - data={ - 'tableUri': table_uri, - }, - ) + return DatasetTableRepository.get_dataset_table_by_uri(session, table_uri) @staticmethod - def update_table(table_uri: str, input: dict = None): + @has_tenant_permission(MANAGE_DATASETS) + @has_resource_permission(UPDATE_DATASET_TABLE, parent_resource=_get_dataset_uri) + def update_table(table_uri: str, table_data: dict = None): with get_context().db_engine.scoped_session() as session: table = DatasetTableRepository.get_dataset_table_by_uri(session, table_uri) - dataset = DatasetService.get_dataset_by_uri(session, table.datasetUri) - input['table'] = table - input['tableUri'] = table.tableUri + for k in [attr for attr in table_data.keys() if attr != 'terms']: + setattr(table, k, table_data.get(k)) + + DatasetTableRepository.save(session, table) + if 'terms' in table_data: + Glossary.set_glossary_terms_links( + session, get_context().username, table.tableUri, 'DatasetTable', table_data['terms'] + ) - DatasetTableRepository.update_dataset_table( - session=session, - uri=dataset.datasetUri, - data=input, - ) DatasetTableIndexer.upsert(session, table_uri=table.tableUri) return table @staticmethod - def delete_table(table_uri: str): + @has_tenant_permission(MANAGE_DATASETS) + @has_resource_permission(DELETE_DATASET_TABLE, parent_resource=_get_dataset_uri) + def delete_table(uri: str): with get_context().db_engine.scoped_session() as session: - table = DatasetTableRepository.get_dataset_table_by_uri(session, table_uri) - DatasetTableRepository.delete_dataset_table( - session=session, - uri=table.datasetUri, - data={ - 'tableUri': table_uri, - }, + table = DatasetTableRepository.get_dataset_table_by_uri(session, uri) + has_share = ShareObjectRepository.has_shared_items(session, table.tableUri) + if has_share: + raise ResourceShared( + action=DELETE_DATASET_TABLE, + message='Revoke all table shares before deletion', + ) + + ShareObjectRepository.delete_shares(session, table.tableUri) + DatasetTableRepository.delete_dataset_table(session, table) + + Glossary.delete_glossary_terms_links( + session, target_uri=table.tableUri, target_type='DatasetTable' ) - DatasetTableIndexer.delete_doc(doc_id=table_uri) + DatasetTableIndexer.delete_doc(doc_id=uri) return True @staticmethod @@ -106,26 +141,17 @@ def preview(table_uri: str): @staticmethod def get_glue_table_properties(table_uri: str): + # TODO THERE WAS NO PERMISSION CHECK with get_context().db_engine.scoped_session() as session: - table: DatasetTable = DatasetTableRepository.get_dataset_table_by_uri( - session, table_uri - ) + table: DatasetTable = DatasetTableRepository.get_dataset_table_by_uri(session, table_uri) return json_utils.to_string(table.GlueTableProperties).replace('\\', ' ') @staticmethod - def publish_table_update(table_uri: str): + @has_resource_permission(UPDATE_DATASET_TABLE, parent_resource=_get_dataset_uri) + def publish_table_update(uri: str): context = get_context() with context.db_engine.scoped_session() as session: - table: DatasetTable = DatasetTableRepository.get_dataset_table_by_uri( - session, table_uri - ) - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=table.datasetUri, - permission_name=UPDATE_DATASET_TABLE, - ) + table: DatasetTable = DatasetTableRepository.get_dataset_table_by_uri(session, uri) dataset = DatasetService.get_dataset_by_uri(session, table.datasetUri) env = Environment.get_environment_by_uri(session, dataset.environmentUri) if not env.subscriptionsEnabled or not env.subscriptionsProducersTopicName: @@ -146,11 +172,12 @@ def publish_table_update(table_uri: str): @staticmethod def list_shared_tables_by_env_dataset(dataset_uri: str, env_uri: str): + # TODO THERE WAS NO PERMISSION CHECK with get_context().db_engine.scoped_session() as session: - return DatasetTableRepository.get_dataset_tables_shared_with_env( - session, - dataset_uri, - env_uri - ) - + return [ + {"tableUri": t.tableUri, "GlueTableName": t.GlueTableName} + for t in DatasetTableRepository.query_dataset_tables_shared_with_env( + session, env_uri, dataset_uri + ) + ] From 22c970b32a94e90eeb910248db18dd8cfcdec65b Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 11 May 2023 12:25:33 +0200 Subject: [PATCH 160/346] Split logic between service and repo for DatasetTable --- .../datasets/db/dataset_table_repository.py | 103 ++++++------------ .../datasets/handlers/glue_table_handler.py | 4 +- .../services/dataset_table_service.py | 63 ++++++++--- .../modules/datasets/tasks/tables_syncer.py | 4 +- tests/api/test_dataset_table.py | 4 +- 5 files changed, 88 insertions(+), 90 deletions(-) diff --git a/backend/dataall/modules/datasets/db/dataset_table_repository.py b/backend/dataall/modules/datasets/db/dataset_table_repository.py index bfcf7ef18..b6fddf491 100644 --- a/backend/dataall/modules/datasets/db/dataset_table_repository.py +++ b/backend/dataall/modules/datasets/db/dataset_table_repository.py @@ -3,12 +3,10 @@ from sqlalchemy.sql import and_ from dataall.db import exceptions, paginate -from dataall.db.api import Glossary, ResourcePolicy, Environment from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject from dataall.modules.dataset_sharing.db.share_object_repository import ShareItemSM -from dataall.modules.datasets_base.services.permissions import DATASET_TABLE_READ -from dataall.utils import json_utils from dataall.modules.datasets_base.db.models import DatasetTableColumn, DatasetTable, Dataset +from dataall.utils import json_utils logger = logging.getLogger(__name__) @@ -53,6 +51,28 @@ def create_dataset_table(session, dataset: Dataset, data: dict = None) -> Datase session.commit() return table + @staticmethod + def create_synced_table(session, dataset: Dataset, table: dict): + updated_table = DatasetTable( + datasetUri=dataset.datasetUri, + label=table['Name'], + name=table['Name'], + region=dataset.region, + owner=dataset.owner, + GlueDatabaseName=dataset.GlueDatabaseName, + AWSAccountId=dataset.AwsAccountId, + S3BucketName=dataset.S3BucketName, + S3Prefix=table.get('StorageDescriptor', {}).get('Location'), + GlueTableName=table['Name'], + LastGlueTableStatus='InSync', + GlueTableProperties=json_utils.to_json( + table.get('Parameters', {}) + ), + ) + session.add(updated_table) + session.commit() + return table + @staticmethod def paginate_dataset_tables(session, dataset_uri, term, page, page_size) -> dict: query = ( @@ -107,72 +127,6 @@ def get_dataset_table_by_uri(session, table_uri): raise exceptions.ObjectNotFound('DatasetTable', table_uri) return table - @staticmethod - def sync_existing_tables(session, datasetUri, glue_tables=None): - - dataset: Dataset = session.query(Dataset).get(datasetUri) - if dataset: - existing_tables = ( - session.query(DatasetTable) - .filter(DatasetTable.datasetUri == datasetUri) - .all() - ) - existing_table_names = [e.GlueTableName for e in existing_tables] - existing_dataset_tables_map = {t.GlueTableName: t for t in existing_tables} - - DatasetTableRepository.update_existing_tables_status(existing_tables, glue_tables) - logger.info( - f'existing_tables={glue_tables}' - ) - for table in glue_tables: - if table['Name'] not in existing_table_names: - logger.info( - f'Storing new table: {table} for dataset db {dataset.GlueDatabaseName}' - ) - updated_table = DatasetTable( - datasetUri=dataset.datasetUri, - label=table['Name'], - name=table['Name'], - region=dataset.region, - owner=dataset.owner, - GlueDatabaseName=dataset.GlueDatabaseName, - AWSAccountId=dataset.AwsAccountId, - S3BucketName=dataset.S3BucketName, - S3Prefix=table.get('StorageDescriptor', {}).get('Location'), - GlueTableName=table['Name'], - LastGlueTableStatus='InSync', - GlueTableProperties=json_utils.to_json( - table.get('Parameters', {}) - ), - ) - session.add(updated_table) - session.commit() - # ADD DATASET TABLE PERMISSIONS - env = Environment.get_environment_by_uri(session, dataset.environmentUri) - permission_group = set([dataset.SamlAdminGroupName, env.SamlGroupName, dataset.stewards if dataset.stewards is not None else dataset.SamlAdminGroupName]) - for group in permission_group: - ResourcePolicy.attach_resource_policy( - session=session, - group=group, - permissions=DATASET_TABLE_READ, - resource_uri=updated_table.tableUri, - resource_type=DatasetTable.__name__, - ) - else: - logger.info( - f'Updating table: {table} for dataset db {dataset.GlueDatabaseName}' - ) - updated_table: DatasetTable = ( - existing_dataset_tables_map.get(table['Name']) - ) - updated_table.GlueTableProperties = json_utils.to_json( - table.get('Parameters', {}) - ) - - DatasetTableRepository.sync_table_columns(session, updated_table, table) - - return True - @staticmethod def update_existing_tables_status(existing_tables, glue_tables): for existing_table in existing_tables: @@ -220,8 +174,7 @@ def sync_table_columns(session, dataset_table, glue_table): def delete_all_table_columns(session, dataset_table): session.query(DatasetTableColumn).filter( and_( - DatasetTableColumn.GlueDatabaseName - == dataset_table.GlueDatabaseName, + DatasetTableColumn.GlueDatabaseName == dataset_table.GlueDatabaseName, DatasetTableColumn.GlueTableName == dataset_table.GlueTableName, ) ).delete() @@ -247,3 +200,11 @@ def get_table_by_s3_prefix(session, s3_prefix, accountid, region): f'Found table {table.tableUri}|{table.GlueTableName}|{table.S3Prefix}' ) return table + + @staticmethod + def find_dataset_tables(session, dataset_uri): + return ( + session.query(DatasetTable) + .filter(DatasetTable.datasetUri == dataset_uri) + .all() + ) diff --git a/backend/dataall/modules/datasets/handlers/glue_table_handler.py b/backend/dataall/modules/datasets/handlers/glue_table_handler.py index daf9003c0..d822a435a 100644 --- a/backend/dataall/modules/datasets/handlers/glue_table_handler.py +++ b/backend/dataall/modules/datasets/handlers/glue_table_handler.py @@ -3,9 +3,9 @@ from dataall.aws.handlers.glue import Glue from dataall.aws.handlers.service_handlers import Worker from dataall.db import models +from dataall.modules.datasets.services.dataset_table_service import DatasetTableService from dataall.modules.datasets_base.db.models import Dataset from dataall.modules.datasets.db.dataset_service import DatasetService -from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository log = logging.getLogger(__name__) @@ -25,5 +25,5 @@ def sync_existing_tables(engine, task: models.Task): tables = Glue.list_glue_database_tables( account_id, dataset.GlueDatabaseName, region ) - DatasetTableRepository.sync_existing_tables(session, dataset.datasetUri, glue_tables=tables) + DatasetTableService.sync_existing_tables(session, dataset.datasetUri, glue_tables=tables) return tables diff --git a/backend/dataall/modules/datasets/services/dataset_table_service.py b/backend/dataall/modules/datasets/services/dataset_table_service.py index 70c9394d7..8c40b6255 100644 --- a/backend/dataall/modules/datasets/services/dataset_table_service.py +++ b/backend/dataall/modules/datasets/services/dataset_table_service.py @@ -14,7 +14,7 @@ from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository from dataall.modules.datasets.services.dataset_permissions import UPDATE_DATASET_TABLE, MANAGE_DATASETS, \ DELETE_DATASET_TABLE, CREATE_DATASET_TABLE -from dataall.modules.datasets_base.db.models import DatasetTable +from dataall.modules.datasets_base.db.models import DatasetTable, Dataset from dataall.modules.datasets_base.services.permissions import PREVIEW_DATASET_TABLE, DATASET_TABLE_READ from dataall.utils import json_utils @@ -49,18 +49,7 @@ def create_table(uri: str, table_data: dict): session, get_context().username, table.tableUri, 'DatasetTable', table_data['terms'] ) - # ADD DATASET TABLE PERMISSIONS - environment = Environment.get_environment_by_uri(session, dataset.environmentUri) - permission_group = {dataset.SamlAdminGroupName, environment.SamlGroupName, - dataset.stewards if dataset.stewards is not None else dataset.SamlAdminGroupName} - for group in permission_group: - ResourcePolicy.attach_resource_policy( - session=session, - group=group, - permissions=DATASET_TABLE_READ, - resource_uri=table.tableUri, - resource_type=DatasetTable.__name__, - ) + DatasetTableService._attach_dataset_table_permission(session, dataset, table.tableUri) DatasetTableIndexer.upsert(session, table_uri=table.tableUri) return table @@ -181,3 +170,51 @@ def list_shared_tables_by_env_dataset(dataset_uri: str, env_uri: str): ) ] + @staticmethod + def sync_existing_tables(session, dataset_uri, glue_tables=None): + dataset: Dataset = DatasetService.get_dataset_by_uri(session, dataset_uri) + if dataset: + existing_tables = DatasetTableRepository.find_dataset_tables(session, dataset_uri) + existing_table_names = [e.GlueTableName for e in existing_tables] + existing_dataset_tables_map = {t.GlueTableName: t for t in existing_tables} + + DatasetTableRepository.update_existing_tables_status(existing_tables, glue_tables) + log.info( + f'existing_tables={glue_tables}' + ) + for table in glue_tables: + if table['Name'] not in existing_table_names: + log.info( + f'Storing new table: {table} for dataset db {dataset.GlueDatabaseName}' + ) + updated_table = DatasetTableRepository.create_synced_table(session, dataset, table) + DatasetTableService._attach_dataset_table_permission(session, dataset, updated_table.tableUri) + else: + log.info( + f'Updating table: {table} for dataset db {dataset.GlueDatabaseName}' + ) + updated_table: DatasetTable = ( + existing_dataset_tables_map.get(table['Name']) + ) + updated_table.GlueTableProperties = json_utils.to_json( + table.get('Parameters', {}) + ) + + DatasetTableRepository.sync_table_columns(session, updated_table, table) + + return True + + @staticmethod + def _attach_dataset_table_permission(session, dataset: Dataset, table_uri): + # ADD DATASET TABLE PERMISSIONS + env = Environment.get_environment_by_uri(session, dataset.environmentUri) + permission_group = {dataset.SamlAdminGroupName, env.SamlGroupName, + dataset.stewards if dataset.stewards is not None else dataset.SamlAdminGroupName} + for group in permission_group: + ResourcePolicy.attach_resource_policy( + session=session, + group=group, + permissions=DATASET_TABLE_READ, + resource_uri=table_uri, + resource_type=DatasetTable.__name__, + ) diff --git a/backend/dataall/modules/datasets/tasks/tables_syncer.py b/backend/dataall/modules/datasets/tasks/tables_syncer.py index 98dfc3a00..3f6e223db 100644 --- a/backend/dataall/modules/datasets/tasks/tables_syncer.py +++ b/backend/dataall/modules/datasets/tasks/tables_syncer.py @@ -9,11 +9,11 @@ from dataall.db import get_engine from dataall.db import models from dataall.modules.datasets.aws.lf_table_client import LakeFormationTableClient +from dataall.modules.datasets.services.dataset_table_service import DatasetTableService from dataall.modules.datasets_base.db.models import DatasetTable, Dataset from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer from dataall.modules.dataset_sharing.services.dataset_alarm_service import DatasetAlarmService from dataall.modules.datasets.db.dataset_service import DatasetService -from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository root = logging.getLogger() root.setLevel(logging.INFO) @@ -64,7 +64,7 @@ def sync_tables(engine): f'Found {len(tables)} tables on Glue database {dataset.GlueDatabaseName}' ) - DatasetTableRepository.sync_existing_tables( + DatasetTableService.sync_existing_tables( session, dataset.datasetUri, glue_tables=tables ) diff --git a/tests/api/test_dataset_table.py b/tests/api/test_dataset_table.py index 042ad880b..3e3dc3ab3 100644 --- a/tests/api/test_dataset_table.py +++ b/tests/api/test_dataset_table.py @@ -3,7 +3,7 @@ import pytest import dataall -from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository +from dataall.modules.datasets.services.dataset_table_service import DatasetTableService from dataall.modules.datasets_base.db.models import DatasetTableColumn, DatasetTable, Dataset @@ -287,7 +287,7 @@ def test_sync_tables_and_columns(client, table, dataset1, db): }, ] - assert DatasetTableRepository.sync_existing_tables(session, dataset1.datasetUri, glue_tables) + assert DatasetTableService.sync_existing_tables(session, dataset1.datasetUri, glue_tables) new_table: DatasetTable = ( session.query(DatasetTable) .filter(DatasetTable.name == 'new_table') From f186a30fc7e82bf34691dc17992f4ec011247c38 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 11 May 2023 12:31:27 +0200 Subject: [PATCH 161/346] Moved ConfidentialityClassification --- backend/dataall/api/constants.py | 6 ------ backend/dataall/db/models/Enums.py | 6 ------ backend/dataall/modules/datasets/api/dataset/__init__.py | 3 ++- backend/dataall/modules/datasets/api/dataset/enums.py | 7 +++++++ backend/dataall/modules/datasets/db/dataset_service.py | 3 ++- backend/dataall/modules/datasets/db/enums.py | 7 +++++++ .../modules/datasets/services/dataset_table_service.py | 4 ++-- 7 files changed, 20 insertions(+), 16 deletions(-) create mode 100644 backend/dataall/modules/datasets/db/enums.py diff --git a/backend/dataall/api/constants.py b/backend/dataall/api/constants.py index f63fb16c6..0be2d4ae2 100644 --- a/backend/dataall/api/constants.py +++ b/backend/dataall/api/constants.py @@ -130,12 +130,6 @@ class PrincipalType(GraphQLEnumMapper): ConsumptionRole = 'ConsumptionRole' -class ConfidentialityClassification(GraphQLEnumMapper): - Unclassified = 'Unclassified' - Official = 'Official' - Secret = 'Secret' - - class Language(GraphQLEnumMapper): English = 'English' French = 'French' diff --git a/backend/dataall/db/models/Enums.py b/backend/dataall/db/models/Enums.py index 7d90746f5..ebce865a2 100644 --- a/backend/dataall/db/models/Enums.py +++ b/backend/dataall/db/models/Enums.py @@ -93,12 +93,6 @@ class PrincipalType(Enum): ConsumptionRole = 'ConsumptionRole' -class ConfidentialityClassification(Enum): - Unclassified = 'Unclassified' - Official = 'Official' - Secret = 'Secret' - - class Language(Enum): English = 'English' French = 'French' diff --git a/backend/dataall/modules/datasets/api/dataset/__init__.py b/backend/dataall/modules/datasets/api/dataset/__init__.py index d286e1a7d..807cc8a05 100644 --- a/backend/dataall/modules/datasets/api/dataset/__init__.py +++ b/backend/dataall/modules/datasets/api/dataset/__init__.py @@ -4,6 +4,7 @@ queries, resolvers, schema, + enums ) -__all__ = ['resolvers', 'schema', 'input_types', 'queries', 'mutations'] +__all__ = ['resolvers', 'schema', 'input_types', 'queries', 'mutations', 'enums'] diff --git a/backend/dataall/modules/datasets/api/dataset/enums.py b/backend/dataall/modules/datasets/api/dataset/enums.py index decf8df06..7169bb5c5 100644 --- a/backend/dataall/modules/datasets/api/dataset/enums.py +++ b/backend/dataall/modules/datasets/api/dataset/enums.py @@ -9,3 +9,10 @@ class DatasetRole(GraphQLEnumMapper): Admin = '900' Shared = '300' NoPermission = '000' + + +class ConfidentialityClassification(GraphQLEnumMapper): + Unclassified = 'Unclassified' + Official = 'Official' + Secret = 'Secret' + diff --git a/backend/dataall/modules/datasets/db/dataset_service.py b/backend/dataall/modules/datasets/db/dataset_service.py index e4988d853..b93bbe8f4 100644 --- a/backend/dataall/modules/datasets/db/dataset_service.py +++ b/backend/dataall/modules/datasets/db/dataset_service.py @@ -17,9 +17,10 @@ ) from dataall.db.api import Organization from dataall.db import models, exceptions, paginate, permissions -from dataall.db.models.Enums import Language, ConfidentialityClassification +from dataall.db.models.Enums import Language from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject from dataall.modules.dataset_sharing.db.share_object_repository import ShareItemSM +from dataall.modules.datasets.db.enums import ConfidentialityClassification from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.db.models import DatasetTable, Dataset from dataall.modules.datasets.db.dataset_location_repository import DatasetLocationRepository diff --git a/backend/dataall/modules/datasets/db/enums.py b/backend/dataall/modules/datasets/db/enums.py new file mode 100644 index 000000000..5ef5e8170 --- /dev/null +++ b/backend/dataall/modules/datasets/db/enums.py @@ -0,0 +1,7 @@ +from enum import Enum + + +class ConfidentialityClassification(Enum): + Unclassified = 'Unclassified' + Official = 'Official' + Secret = 'Secret' diff --git a/backend/dataall/modules/datasets/services/dataset_table_service.py b/backend/dataall/modules/datasets/services/dataset_table_service.py index 8c40b6255..f2e3fbd22 100644 --- a/backend/dataall/modules/datasets/services/dataset_table_service.py +++ b/backend/dataall/modules/datasets/services/dataset_table_service.py @@ -12,6 +12,7 @@ from dataall.modules.datasets.aws.athena_table_client import AthenaTableClient from dataall.modules.datasets.db.dataset_service import DatasetService from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository +from dataall.modules.datasets.db.enums import ConfidentialityClassification from dataall.modules.datasets.services.dataset_permissions import UPDATE_DATASET_TABLE, MANAGE_DATASETS, \ DELETE_DATASET_TABLE, CREATE_DATASET_TABLE from dataall.modules.datasets_base.db.models import DatasetTable, Dataset @@ -115,8 +116,7 @@ def preview(table_uri: str): ) dataset = DatasetService.get_dataset_by_uri(session, table.datasetUri) if ( - dataset.confidentiality - != models.ConfidentialityClassification.Unclassified.value + dataset.confidentiality != ConfidentialityClassification.Unclassified.value ): ResourcePolicy.check_user_resource_permission( session=session, From d76005a9731d509645f15ed503a1fd88592762f8 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 11 May 2023 12:48:37 +0200 Subject: [PATCH 162/346] Extracted DatasetLocationService --- .../api/storage_location/resolvers.py | 88 ++------------- .../db/dataset_location_repository.py | 22 +--- .../services/dataset_location_service.py | 104 ++++++++++++++++++ 3 files changed, 113 insertions(+), 101 deletions(-) create mode 100644 backend/dataall/modules/datasets/services/dataset_location_service.py diff --git a/backend/dataall/modules/datasets/api/storage_location/resolvers.py b/backend/dataall/modules/datasets/api/storage_location/resolvers.py index db68277a2..e73ade13a 100644 --- a/backend/dataall/modules/datasets/api/storage_location/resolvers.py +++ b/backend/dataall/modules/datasets/api/storage_location/resolvers.py @@ -1,33 +1,13 @@ from dataall.api.context import Context -from dataall.aws.handlers.service_handlers import Worker -from dataall.db import models -from dataall.db.api import ( - ResourcePolicy, - Glossary, - Environment, -) -from dataall.modules.datasets.aws.s3_location_client import S3LocationClient -from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer +from dataall.db.api import Glossary +from dataall.modules.datasets.services.dataset_location_service import DatasetLocationService from dataall.modules.datasets_base.db.models import DatasetStorageLocation, Dataset -from dataall.modules.datasets.db.dataset_location_repository import DatasetLocationRepository -from dataall.modules.datasets.db.dataset_service import DatasetService -from dataall.modules.datasets.services.dataset_permissions import UPDATE_DATASET_FOLDER def create_storage_location( context, source, datasetUri: str = None, input: dict = None ): - with context.engine.scoped_session() as session: - location = DatasetLocationRepository.create_dataset_location( - session=session, - uri=datasetUri, - data=input, - ) - - S3LocationClient(location).create_bucket_prefix() - - DatasetLocationIndexer.upsert(session=session, folder_uri=location.locationUri) - return location + return DatasetLocationService.create_storage_location(uri=datasetUri, data=input) def list_dataset_locations(context, source, filter: dict = None): @@ -35,49 +15,21 @@ def list_dataset_locations(context, source, filter: dict = None): return None if not filter: filter = {} - with context.engine.scoped_session() as session: - return DatasetLocationRepository.list_dataset_locations( - session=session, uri=source.datasetUri, data=filter - ) + return DatasetLocationService.list_dataset_locations(uri=source.datasetUri, filter=filter) def get_storage_location(context, source, locationUri=None): - with context.engine.scoped_session() as session: - location = DatasetLocationRepository.get_location_by_uri(session, locationUri) - return DatasetLocationRepository.get_dataset_location( - session=session, - uri=location.datasetUri, - data={'locationUri': location.locationUri}, - ) + return DatasetLocationService.get_storage_location(uri=locationUri) def update_storage_location( context, source, locationUri: str = None, input: dict = None ): - with context.engine.scoped_session() as session: - location = DatasetLocationRepository.get_location_by_uri(session, locationUri) - input['location'] = location - input['locationUri'] = location.locationUri - DatasetLocationRepository.update_dataset_location( - session=session, - uri=location.datasetUri, - data=input, - ) - DatasetLocationIndexer.upsert(session, folder_uri=location.locationUri) - - return location + return DatasetLocationService.update_storage_location(uri=locationUri, data=input) def remove_storage_location(context, source, locationUri: str = None): - with context.engine.scoped_session() as session: - location = DatasetLocationRepository.get_location_by_uri(session, locationUri) - DatasetLocationRepository.delete_dataset_location( - session=session, - uri=location.datasetUri, - data={'locationUri': location.locationUri}, - ) - DatasetLocationIndexer.delete_doc(doc_id=location.locationUri) - return True + return DatasetLocationService.remove_storage_location(uri=locationUri) def resolve_dataset(context, source: DatasetStorageLocation, **kwargs): @@ -89,31 +41,7 @@ def resolve_dataset(context, source: DatasetStorageLocation, **kwargs): def publish_location_update(context: Context, source, locationUri: str = None): - with context.engine.scoped_session() as session: - location = DatasetLocationRepository.get_location_by_uri(session, locationUri) - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=location.datasetUri, - permission_name=UPDATE_DATASET_FOLDER, - ) - dataset = DatasetService.get_dataset_by_uri(session, location.datasetUri) - env = Environment.get_environment_by_uri(session, dataset.environmentUri) - if not env.subscriptionsEnabled or not env.subscriptionsProducersTopicName: - raise Exception( - 'Subscriptions are disabled. ' - "First enable subscriptions for this dataset's environment then retry." - ) - task = models.Task( - targetUri=location.datasetUri, - action='sns.dataset.publish_update', - payload={'s3Prefix': location.S3Prefix}, - ) - session.add(task) - - Worker.process(engine=context.engine, task_ids=[task.taskUri], save_response=False) - return True + return DatasetLocationService.publish_location_update(uri=locationUri) def resolve_glossary_terms( diff --git a/backend/dataall/modules/datasets/db/dataset_location_repository.py b/backend/dataall/modules/datasets/db/dataset_location_repository.py index ffb0617b4..73517dc78 100644 --- a/backend/dataall/modules/datasets/db/dataset_location_repository.py +++ b/backend/dataall/modules/datasets/db/dataset_location_repository.py @@ -3,23 +3,19 @@ from sqlalchemy import and_, or_ from dataall.core.context import get_context -from dataall.core.permission_checker import has_tenant_permission, has_resource_permission from dataall.db.api import Glossary from dataall.db import paginate, exceptions from dataall.modules.dataset_sharing.db.models import ShareObjectItem from dataall.modules.dataset_sharing.db.share_object_repository import ShareItemSM from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.db.models import DatasetStorageLocation -from dataall.modules.datasets.services.dataset_permissions import MANAGE_DATASETS, LIST_DATASET_FOLDERS, \ - DELETE_DATASET_FOLDER, UPDATE_DATASET_FOLDER, CREATE_DATASET_FOLDER +from dataall.modules.datasets.services.dataset_permissions import DELETE_DATASET_FOLDER logger = logging.getLogger(__name__) class DatasetLocationRepository: @staticmethod - @has_tenant_permission(MANAGE_DATASETS) - @has_resource_permission(CREATE_DATASET_FOLDER) def create_dataset_location( session, uri: str, @@ -69,8 +65,6 @@ def create_dataset_location( return location @staticmethod - @has_tenant_permission(MANAGE_DATASETS) - @has_resource_permission(LIST_DATASET_FOLDERS) def list_dataset_locations( session, uri: str, @@ -91,18 +85,6 @@ def list_dataset_locations( ).to_dict() @staticmethod - @has_tenant_permission(MANAGE_DATASETS) - @has_resource_permission(LIST_DATASET_FOLDERS) - def get_dataset_location( - session, - uri: str, - data: dict = None, - ) -> DatasetStorageLocation: - return DatasetLocationRepository.get_location_by_uri(session, data['locationUri']) - - @staticmethod - @has_tenant_permission(MANAGE_DATASETS) - @has_resource_permission(UPDATE_DATASET_FOLDER) def update_dataset_location( session, uri: str, @@ -128,8 +110,6 @@ def update_dataset_location( return location @staticmethod - @has_tenant_permission(MANAGE_DATASETS) - @has_resource_permission(DELETE_DATASET_FOLDER) def delete_dataset_location( session, uri: str, diff --git a/backend/dataall/modules/datasets/services/dataset_location_service.py b/backend/dataall/modules/datasets/services/dataset_location_service.py new file mode 100644 index 000000000..2ce5e170e --- /dev/null +++ b/backend/dataall/modules/datasets/services/dataset_location_service.py @@ -0,0 +1,104 @@ +from dataall.aws.handlers.service_handlers import Worker +from dataall.core.context import get_context +from dataall.core.permission_checker import has_resource_permission, has_tenant_permission +from dataall.db.api import Environment +from dataall.db.models import Task +from dataall.modules.datasets import DatasetLocationIndexer +from dataall.modules.datasets.aws.s3_location_client import S3LocationClient +from dataall.modules.datasets.db.dataset_location_repository import DatasetLocationRepository +from dataall.modules.datasets.db.dataset_service import DatasetService +from dataall.modules.datasets.services.dataset_permissions import UPDATE_DATASET_FOLDER, MANAGE_DATASETS, \ + CREATE_DATASET_FOLDER, LIST_DATASET_FOLDERS, DELETE_DATASET_FOLDER + + +class DatasetLocationService: + @staticmethod + def _get_dataset_uri(session, uri): + location = DatasetLocationRepository.get_location_by_uri(session, uri) + return location.datasetUri + + @staticmethod + @has_tenant_permission(MANAGE_DATASETS) + @has_resource_permission(CREATE_DATASET_FOLDER) + def create_storage_location(uri: str, data: dict): + with get_context().db_engine.scoped_session() as session: + location = DatasetLocationRepository.create_dataset_location( + session=session, + uri=uri, + data=data, + ) + + S3LocationClient(location).create_bucket_prefix() + + DatasetLocationIndexer.upsert(session=session, folder_uri=location.locationUri) + return location + + @staticmethod + @has_tenant_permission(MANAGE_DATASETS) + @has_resource_permission(LIST_DATASET_FOLDERS) + def list_dataset_locations(uri: str, filter: dict = None): + with get_context().db_engine.scoped_session() as session: + return DatasetLocationRepository.list_dataset_locations( + session=session, uri=uri, data=filter + ) + + @staticmethod + @has_tenant_permission(MANAGE_DATASETS) + @has_resource_permission(LIST_DATASET_FOLDERS, parent_resource=_get_dataset_uri) + def get_storage_location(uri): + with get_context().db_engine.scoped_session() as session: + return DatasetLocationRepository.get_location_by_uri(session, uri) + + @staticmethod + @has_tenant_permission(MANAGE_DATASETS) + @has_resource_permission(UPDATE_DATASET_FOLDER, parent_resource=_get_dataset_uri) + def update_storage_location(uri: str, data: dict): + with get_context().db_engine.scoped_session() as session: + location = DatasetLocationRepository.get_location_by_uri(session, uri) + data['location'] = location + data['locationUri'] = location.locationUri + DatasetLocationRepository.update_dataset_location( + session=session, + uri=location.datasetUri, + data=data, + ) + DatasetLocationIndexer.upsert(session, folder_uri=location.locationUri) + + return location + + @staticmethod + @has_tenant_permission(MANAGE_DATASETS) + @has_resource_permission(DELETE_DATASET_FOLDER, parent_resource=_get_dataset_uri) + def remove_storage_location(uri: str = None): + with get_context().db_engine.scoped_session() as session: + location = DatasetLocationRepository.get_location_by_uri(session, uri) + DatasetLocationRepository.delete_dataset_location( + session=session, + uri=location.datasetUri, + data={'locationUri': location.locationUri}, + ) + DatasetLocationIndexer.delete_doc(doc_id=location.locationUri) + return True + + @staticmethod + @has_resource_permission(UPDATE_DATASET_FOLDER, parent_resource=_get_dataset_uri) + def publish_location_update(uri: str): + context = get_context() + with context.db_engine.scoped_session() as session: + location = DatasetLocationRepository.get_location_by_uri(session, uri) + dataset = DatasetService.get_dataset_by_uri(session, location.datasetUri) + env = Environment.get_environment_by_uri(session, dataset.environmentUri) + if not env.subscriptionsEnabled or not env.subscriptionsProducersTopicName: + raise Exception( + 'Subscriptions are disabled. ' + "First enable subscriptions for this dataset's environment then retry." + ) + task = Task( + targetUri=location.datasetUri, + action='sns.dataset.publish_update', + payload={'s3Prefix': location.S3Prefix}, + ) + session.add(task) + + Worker.process(engine=context.db_engine, task_ids=[task.taskUri], save_response=False) + return True From e01e4e984aae0ea8662837333d372a836830da18 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 11 May 2023 13:08:20 +0200 Subject: [PATCH 163/346] Extracted DatasetLocationService --- .../modules/datasets/api/dataset/resolvers.py | 2 - .../db/dataset_location_repository.py | 108 +++--------------- .../datasets/db/dataset_table_repository.py | 3 +- .../services/dataset_location_service.py | 63 +++++++--- .../services/dataset_table_service.py | 2 +- 5 files changed, 63 insertions(+), 115 deletions(-) diff --git a/backend/dataall/modules/datasets/api/dataset/resolvers.py b/backend/dataall/modules/datasets/api/dataset/resolvers.py index 6f1c6fc15..dd90eb360 100644 --- a/backend/dataall/modules/datasets/api/dataset/resolvers.py +++ b/backend/dataall/modules/datasets/api/dataset/resolvers.py @@ -177,8 +177,6 @@ def list_locations(context, source: Dataset, filter: dict = None): with context.engine.scoped_session() as session: return DatasetLocationRepository.paginated_dataset_locations( session=session, - username=context.username, - groups=context.groups, uri=source.datasetUri, data=filter, ) diff --git a/backend/dataall/modules/datasets/db/dataset_location_repository.py b/backend/dataall/modules/datasets/db/dataset_location_repository.py index 73517dc78..a58527c75 100644 --- a/backend/dataall/modules/datasets/db/dataset_location_repository.py +++ b/backend/dataall/modules/datasets/db/dataset_location_repository.py @@ -8,37 +8,33 @@ from dataall.modules.dataset_sharing.db.models import ShareObjectItem from dataall.modules.dataset_sharing.db.share_object_repository import ShareItemSM from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository -from dataall.modules.datasets_base.db.models import DatasetStorageLocation +from dataall.modules.datasets_base.db.models import DatasetStorageLocation, Dataset from dataall.modules.datasets.services.dataset_permissions import DELETE_DATASET_FOLDER logger = logging.getLogger(__name__) class DatasetLocationRepository: + @staticmethod - def create_dataset_location( - session, - uri: str, - data: dict = None - ) -> DatasetStorageLocation: - dataset = DatasetRepository.get_dataset_by_uri(session, uri) - exists = ( + def exists(session, dataset_uri: str, prefix: str): + return ( session.query(DatasetStorageLocation) .filter( and_( - DatasetStorageLocation.datasetUri == dataset.datasetUri, - DatasetStorageLocation.S3Prefix == data['prefix'], + DatasetStorageLocation.datasetUri == dataset_uri, + DatasetStorageLocation.S3Prefix == prefix, ) ) .count() ) - if exists: - raise exceptions.ResourceAlreadyExists( - action='Create Folder', - message=f'Folder: {data["prefix"]} already exist on dataset {uri}', - ) - + @staticmethod + def create_dataset_location( + session, + dataset: Dataset, + data: dict = None + ) -> DatasetStorageLocation: location = DatasetStorageLocation( datasetUri=dataset.datasetUri, label=data.get('label'), @@ -52,16 +48,6 @@ def create_dataset_location( ) session.add(location) session.commit() - - if 'terms' in data.keys(): - Glossary.set_glossary_terms_links( - session, - get_context().username, - location.locationUri, - 'DatasetStorageLocation', - data.get('terms', []), - ) - return location @staticmethod @@ -85,66 +71,8 @@ def list_dataset_locations( ).to_dict() @staticmethod - def update_dataset_location( - session, - uri: str, - data: dict = None, - ) -> DatasetStorageLocation: - - location = data.get( - 'location', - DatasetLocationRepository.get_location_by_uri(session, data['locationUri']), - ) - - for k in data.keys(): - setattr(location, k, data.get(k)) - - if 'terms' in data.keys(): - Glossary.set_glossary_terms_links( - session, - get_context().username, - location.locationUri, - 'DatasetStorageLocation', - data.get('terms', []), - ) - return location - - @staticmethod - def delete_dataset_location( - session, - uri: str, - data: dict = None, - ): - location = DatasetLocationRepository.get_location_by_uri( - session, data['locationUri'] - ) - share_item_shared_states = ShareItemSM.get_share_item_shared_states() - share_item = ( - session.query(ShareObjectItem) - .filter( - and_( - ShareObjectItem.itemUri == location.locationUri, - ShareObjectItem.status.in_(share_item_shared_states) - ) - ) - .first() - ) - if share_item: - raise exceptions.ResourceShared( - action=DELETE_DATASET_FOLDER, - message='Revoke all folder shares before deletion', - ) - session.query(ShareObjectItem).filter( - ShareObjectItem.itemUri == location.locationUri, - ).delete() - + def delete(session, location): session.delete(location) - Glossary.delete_glossary_terms_links( - session, - target_uri=location.locationUri, - target_type='DatasetStorageLocation', - ) - return True @staticmethod def get_location_by_uri(session, location_uri) -> DatasetStorageLocation: @@ -186,11 +114,7 @@ def count_dataset_locations(session, dataset_uri): def delete_dataset_locations(session, dataset_uri) -> bool: locations = ( session.query(DatasetStorageLocation) - .filter( - and_( - DatasetStorageLocation.datasetUri == dataset_uri, - ) - ) + .filter(DatasetStorageLocation.datasetUri == dataset_uri ) .all() ) for location in locations: @@ -207,9 +131,7 @@ def get_dataset_folders(session, dataset_uri): ) @staticmethod - def paginated_dataset_locations( - session, username, groups, uri, data=None, check_perm=None - ) -> dict: + def paginated_dataset_locations(session, uri, data=None) -> dict: query = session.query(DatasetStorageLocation).filter( DatasetStorageLocation.datasetUri == uri ) diff --git a/backend/dataall/modules/datasets/db/dataset_table_repository.py b/backend/dataall/modules/datasets/db/dataset_table_repository.py index b6fddf491..73d7fd2ad 100644 --- a/backend/dataall/modules/datasets/db/dataset_table_repository.py +++ b/backend/dataall/modules/datasets/db/dataset_table_repository.py @@ -85,9 +85,8 @@ def paginate_dataset_tables(session, dataset_uri, term, page, page_size) -> dict return paginate(query, page=page, page_size=page_size).to_dict() @staticmethod - def delete_dataset_table(session, table: DatasetTable): + def delete(session, table: DatasetTable): session.delete(table) - return True @staticmethod def query_dataset_tables_shared_with_env( diff --git a/backend/dataall/modules/datasets/services/dataset_location_service.py b/backend/dataall/modules/datasets/services/dataset_location_service.py index 2ce5e170e..bf277987c 100644 --- a/backend/dataall/modules/datasets/services/dataset_location_service.py +++ b/backend/dataall/modules/datasets/services/dataset_location_service.py @@ -1,14 +1,17 @@ from dataall.aws.handlers.service_handlers import Worker from dataall.core.context import get_context from dataall.core.permission_checker import has_resource_permission, has_tenant_permission -from dataall.db.api import Environment +from dataall.db.api import Environment, Glossary +from dataall.db.exceptions import ResourceShared, ResourceAlreadyExists from dataall.db.models import Task +from dataall.modules.dataset_sharing.db.share_object_repository import ShareObjectRepository from dataall.modules.datasets import DatasetLocationIndexer from dataall.modules.datasets.aws.s3_location_client import S3LocationClient from dataall.modules.datasets.db.dataset_location_repository import DatasetLocationRepository from dataall.modules.datasets.db.dataset_service import DatasetService from dataall.modules.datasets.services.dataset_permissions import UPDATE_DATASET_FOLDER, MANAGE_DATASETS, \ CREATE_DATASET_FOLDER, LIST_DATASET_FOLDERS, DELETE_DATASET_FOLDER +from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository class DatasetLocationService: @@ -22,11 +25,19 @@ def _get_dataset_uri(session, uri): @has_resource_permission(CREATE_DATASET_FOLDER) def create_storage_location(uri: str, data: dict): with get_context().db_engine.scoped_session() as session: - location = DatasetLocationRepository.create_dataset_location( - session=session, - uri=uri, - data=data, - ) + exists = DatasetLocationRepository.exists(session, uri, data['prefix']) + + if exists: + raise ResourceAlreadyExists( + action='Create Folder', + message=f'Folder: {data["prefix"]} already exist on dataset {uri}', + ) + + dataset = DatasetRepository.get_dataset_by_uri(session, uri) + location = DatasetLocationRepository.create_dataset_location(session, dataset, data) + + if 'terms' in data.keys(): + DatasetLocationService._create_glossary_links(session, location, data['terms']) S3LocationClient(location).create_bucket_prefix() @@ -55,13 +66,12 @@ def get_storage_location(uri): def update_storage_location(uri: str, data: dict): with get_context().db_engine.scoped_session() as session: location = DatasetLocationRepository.get_location_by_uri(session, uri) - data['location'] = location - data['locationUri'] = location.locationUri - DatasetLocationRepository.update_dataset_location( - session=session, - uri=location.datasetUri, - data=data, - ) + for k in data.keys(): + setattr(location, k, data.get(k)) + + if 'terms' in data.keys(): + DatasetLocationService._create_glossary_links(session, location, data['terms']) + DatasetLocationIndexer.upsert(session, folder_uri=location.locationUri) return location @@ -72,10 +82,19 @@ def update_storage_location(uri: str, data: dict): def remove_storage_location(uri: str = None): with get_context().db_engine.scoped_session() as session: location = DatasetLocationRepository.get_location_by_uri(session, uri) - DatasetLocationRepository.delete_dataset_location( - session=session, - uri=location.datasetUri, - data={'locationUri': location.locationUri}, + has_shares = ShareObjectRepository.has_shared_items(session, location.locationUri) + if has_shares: + raise ResourceShared( + action=DELETE_DATASET_FOLDER, + message='Revoke all folder shares before deletion', + ) + + ShareObjectRepository.delete_shares(session, location.locationUri) + DatasetLocationRepository.delete(session, location) + Glossary.delete_glossary_terms_links( + session, + target_uri=location.locationUri, + target_type='DatasetStorageLocation', ) DatasetLocationIndexer.delete_doc(doc_id=location.locationUri) return True @@ -102,3 +121,13 @@ def publish_location_update(uri: str): Worker.process(engine=context.db_engine, task_ids=[task.taskUri], save_response=False) return True + + @staticmethod + def _create_glossary_links(session, location, terms): + Glossary.set_glossary_terms_links( + session, + get_context().username, + location.locationUri, + 'DatasetStorageLocation', + terms + ) diff --git a/backend/dataall/modules/datasets/services/dataset_table_service.py b/backend/dataall/modules/datasets/services/dataset_table_service.py index f2e3fbd22..28c6755d3 100644 --- a/backend/dataall/modules/datasets/services/dataset_table_service.py +++ b/backend/dataall/modules/datasets/services/dataset_table_service.py @@ -99,7 +99,7 @@ def delete_table(uri: str): ) ShareObjectRepository.delete_shares(session, table.tableUri) - DatasetTableRepository.delete_dataset_table(session, table) + DatasetTableRepository.delete(session, table) Glossary.delete_glossary_terms_links( session, target_uri=table.tableUri, target_type='DatasetTable' From db1df34ea8a6881502db49b7be60ba1f16df969f Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 11 May 2023 13:36:37 +0200 Subject: [PATCH 164/346] Extracted DatasetProfilingService --- .../datasets/api/profiling/resolvers.py | 109 ++---------------- .../datasets/aws/s3_profiler_client.py | 30 +++++ .../db/dataset_profiling_repository.py | 2 +- .../services/dataset_profiling_service.py | 102 ++++++++++++++++ 4 files changed, 145 insertions(+), 98 deletions(-) create mode 100644 backend/dataall/modules/datasets/aws/s3_profiler_client.py create mode 100644 backend/dataall/modules/datasets/services/dataset_profiling_service.py diff --git a/backend/dataall/modules/datasets/api/profiling/resolvers.py b/backend/dataall/modules/datasets/api/profiling/resolvers.py index c29f6100b..911293f62 100644 --- a/backend/dataall/modules/datasets/api/profiling/resolvers.py +++ b/backend/dataall/modules/datasets/api/profiling/resolvers.py @@ -9,6 +9,7 @@ from dataall.modules.datasets.db.dataset_service import DatasetService from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository from dataall.modules.datasets.db.dataset_profiling_repository import DatasetProfilingRepository +from dataall.modules.datasets.services.dataset_profiling_service import DatasetProfilingService from dataall.modules.datasets_base.db.models import DatasetProfilingRun from dataall.modules.datasets.services.dataset_permissions import PROFILE_DATASET_TABLE @@ -25,43 +26,17 @@ def resolve_dataset(context, source: DatasetProfilingRun): def start_profiling_run(context: Context, source, input: dict = None): - with context.engine.scoped_session() as session: - - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=input['datasetUri'], - permission_name=PROFILE_DATASET_TABLE, - ) - dataset = DatasetService.get_dataset_by_uri(session, input['datasetUri']) - - run = DatasetProfilingRepository.start_profiling( - session=session, - datasetUri=dataset.datasetUri, - tableUri=input.get('tableUri'), - GlueTableName=input.get('GlueTableName'), - ) - - task = models.Task( - targetUri=run.profilingRunUri, action='glue.job.start_profiling_run' - ) - session.add(task) - - Worker.process(engine=context.engine, task_ids=[task.taskUri]) - - return run + return DatasetProfilingService.start_profiling_run( + uri=input['datasetUri'], + table_uri=input['table_uri'], + glue_table_name=input['GlueTableName'] + ) def get_profiling_run_status(context: Context, source: DatasetProfilingRun): if not source: return None - with context.engine.scoped_session() as session: - task = models.Task( - targetUri=source.profilingRunUri, action='glue.job.profiling_run_status' - ) - session.add(task) - Worker.queue(engine=context.engine, task_ids=[task.taskUri]) + DatasetProfilingService.queue_profiling_run(source.profilingRunUri) return source.status @@ -73,80 +48,20 @@ def get_profiling_results(context: Context, source: DatasetProfilingRun): def update_profiling_run_results(context: Context, source, profilingRunUri, results): - with context.engine.scoped_session() as session: - run = DatasetProfilingRepository.update_run( - session=session, profilingRunUri=profilingRunUri, results=results - ) - return run + return DatasetProfilingService.update_profiling_run_results(profilingRunUri, results) def list_profiling_runs(context: Context, source, datasetUri=None): - with context.engine.scoped_session() as session: - return DatasetProfilingRepository.list_profiling_runs(session, datasetUri) + return DatasetProfilingService.list_profiling_runs(datasetUri) def get_profiling_run(context: Context, source, profilingRunUri=None): - with context.engine.scoped_session() as session: - return DatasetProfilingRepository.get_profiling_run( - session=session, profilingRunUri=profilingRunUri - ) + return DatasetProfilingService.get_profiling_run(profilingRunUri) def get_last_table_profiling_run(context: Context, source, tableUri=None): - with context.engine.scoped_session() as session: - run: DatasetProfilingRun = ( - DatasetProfilingRepository.get_table_last_profiling_run( - session=session, tableUri=tableUri - ) - ) - - if run: - if not run.results: - table = DatasetTableRepository.get_dataset_table_by_uri(session, tableUri) - dataset = DatasetService.get_dataset_by_uri(session, table.datasetUri) - environment = api.Environment.get_environment_by_uri( - session, dataset.environmentUri - ) - content = get_profiling_results_from_s3( - environment, dataset, table, run - ) - if content: - results = json.loads(content) - run.results = results - - if not run.results: - run_with_results = ( - DatasetProfilingRepository.get_table_last_profiling_run_with_results( - session=session, tableUri=tableUri - ) - ) - if run_with_results: - run = run_with_results - - return run - - -def get_profiling_results_from_s3(environment, dataset, table, run): - s3 = SessionHelper.remote_session(environment.AwsAccountId).client( - 's3', region_name=environment.region - ) - try: - key = f'profiling/results/{dataset.datasetUri}/{table.GlueTableName}/{run.GlueJobRunId}/results.json' - s3.head_object(Bucket=environment.EnvironmentDefaultBucketName, Key=key) - response = s3.get_object( - Bucket=environment.EnvironmentDefaultBucketName, Key=key - ) - content = str(response['Body'].read().decode('utf-8')) - return content - except Exception as e: - log.error( - f'Failed to retrieve S3 results for table profiling job ' - f'{table.GlueTableName}//{run.GlueJobRunId} due to {e}' - ) + return DatasetProfilingService.get_last_table_profiling_run(tableUri) def list_table_profiling_runs(context: Context, source, tableUri=None): - with context.engine.scoped_session() as session: - return DatasetProfilingRepository.list_table_profiling_runs( - session=session, tableUri=tableUri, filter={} - ) + return DatasetProfilingService.list_profiling_runs(tableUri) diff --git a/backend/dataall/modules/datasets/aws/s3_profiler_client.py b/backend/dataall/modules/datasets/aws/s3_profiler_client.py new file mode 100644 index 000000000..cb1928b18 --- /dev/null +++ b/backend/dataall/modules/datasets/aws/s3_profiler_client.py @@ -0,0 +1,30 @@ +import logging + +from dataall.aws.handlers.sts import SessionHelper +from dataall.db.models import Environment + +log = logging.getLogger(__name__) + + +class S3ProfilerClient: + def __init__(self, env: Environment): + self._client = SessionHelper.remote_session(env.AwsAccountId).client( + 's3', region_name=env.region + ) + self._env = env + + def get_profiling_results_from_s3(self, dataset, table, run): + s3 = self._client + try: + key = f'profiling/results/{dataset.datasetUri}/{table.GlueTableName}/{run.GlueJobRunId}/results.json' + s3.head_object(Bucket=self._env.EnvironmentDefaultBucketName, Key=key) + response = s3.get_object( + Bucket=self._env.EnvironmentDefaultBucketName, Key=key + ) + content = str(response['Body'].read().decode('utf-8')) + return content + except Exception as e: + log.error( + f'Failed to retrieve S3 results for table profiling job ' + f'{table.GlueTableName}//{run.GlueJobRunId} due to {e}' + ) diff --git a/backend/dataall/modules/datasets/db/dataset_profiling_repository.py b/backend/dataall/modules/datasets/db/dataset_profiling_repository.py index a34ad0a5a..a709e08c8 100644 --- a/backend/dataall/modules/datasets/db/dataset_profiling_repository.py +++ b/backend/dataall/modules/datasets/db/dataset_profiling_repository.py @@ -139,7 +139,7 @@ def get_table_last_profiling_run(session, tableUri): ) @staticmethod - def get_table_last_profiling_run_with_results(session, tableUri): + def get_table_last_profiling_run_with_results(session, table_uri): return ( session.query(DatasetProfilingRun) .join( diff --git a/backend/dataall/modules/datasets/services/dataset_profiling_service.py b/backend/dataall/modules/datasets/services/dataset_profiling_service.py new file mode 100644 index 000000000..7160c7dc5 --- /dev/null +++ b/backend/dataall/modules/datasets/services/dataset_profiling_service.py @@ -0,0 +1,102 @@ +import json + +from dataall.aws.handlers.service_handlers import Worker +from dataall.core.context import get_context +from dataall.core.permission_checker import has_resource_permission +from dataall.db.api import Environment, ResourcePolicy +from dataall.db.models import Task +from dataall.modules.datasets.aws.s3_profiler_client import S3ProfilerClient +from dataall.modules.datasets.db.dataset_profiling_repository import DatasetProfilingRepository +from dataall.modules.datasets.db.dataset_service import DatasetService +from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository +from dataall.modules.datasets.services.dataset_permissions import PROFILE_DATASET_TABLE +from dataall.modules.datasets_base.db.models import DatasetProfilingRun + + +class DatasetProfilingService: + @staticmethod + @has_resource_permission(PROFILE_DATASET_TABLE) + def start_profiling_run(uri, table_uri, glue_table_name): + context = get_context() + with context.db_engine.scoped_session() as session: + dataset = DatasetService.get_dataset_by_uri(session, uri) + run = DatasetProfilingRepository.start_profiling( + session=session, + datasetUri=dataset.datasetUri, + tableUri=table_uri, + GlueTableName=glue_table_name, + ) + + task = Task( + targetUri=run.profilingRunUri, action='glue.job.start_profiling_run' + ) + session.add(task) + + Worker.process(engine=context.db_engine, task_ids=[task.taskUri]) + + return run + + @staticmethod + def queue_profiling_run(run_uri): + context = get_context() + with context.db_engine.scoped_session() as session: + task = Task( + targetUri=run_uri, action='glue.job.profiling_run_status' + ) + session.add(task) + Worker.queue(engine=context.db_engine, task_ids=[task.taskUri]) + + @staticmethod + def update_profiling_run_results(run_uri, results): + with get_context().db_engine.scoped_session() as session: + run = DatasetProfilingRepository.update_run( + session=session, profilingRunUri=run_uri, results=results + ) + return run + + @staticmethod + def list_profiling_runs(dataset_uri): + with get_context().db_engine.scoped_session() as session: + return DatasetProfilingRepository.list_profiling_runs(session, dataset_uri) + + @staticmethod + def get_profiling_run(run_uri): + with get_context().db_engine.scoped_session() as session: + return DatasetProfilingRepository.get_profiling_run( + session=session, profilingRunUri=run_uri + ) + + @staticmethod + def get_last_table_profiling_run(table_uri: str): + with get_context().db_engine.scoped_session() as session: + run: DatasetProfilingRun = ( + DatasetProfilingRepository.get_table_last_profiling_run( + session=session, tableUri=table_uri + ) + ) + + if run: + if not run.results: + table = DatasetTableRepository.get_dataset_table_by_uri(session, table_uri) + dataset = DatasetService.get_dataset_by_uri(session, table.datasetUri) + environment = Environment.get_environment_by_uri(session, dataset.environmentUri) + content = S3ProfilerClient(environment).get_profiling_results_from_s3(dataset, table, run) + if content: + results = json.loads(content) + run.results = results + + if not run.results: + run_with_results = ( + DatasetProfilingRepository.get_table_last_profiling_run_with_results(session, table_uri) + ) + if run_with_results: + run = run_with_results + + return run + + @staticmethod + def list_table_profiling_runs(table_uri: str): + with get_context().db_engine.scoped_session() as session: + return DatasetProfilingRepository.list_table_profiling_runs( + session=session, tableUri=table_uri, filter={} + ) From c831f18422c99e588bbcaa29c36d43154a5e1b14 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 11 May 2023 13:56:04 +0200 Subject: [PATCH 165/346] Extracted DatasetProfilingService --- .../datasets/api/profiling/resolvers.py | 2 +- .../db/dataset_profiling_repository.py | 70 +++++++------------ .../handlers/glue_profiling_handler.py | 4 +- .../services/dataset_profiling_service.py | 40 +++++++---- 4 files changed, 56 insertions(+), 60 deletions(-) diff --git a/backend/dataall/modules/datasets/api/profiling/resolvers.py b/backend/dataall/modules/datasets/api/profiling/resolvers.py index 911293f62..d59d1628f 100644 --- a/backend/dataall/modules/datasets/api/profiling/resolvers.py +++ b/backend/dataall/modules/datasets/api/profiling/resolvers.py @@ -64,4 +64,4 @@ def get_last_table_profiling_run(context: Context, source, tableUri=None): def list_table_profiling_runs(context: Context, source, tableUri=None): - return DatasetProfilingService.list_profiling_runs(tableUri) + return DatasetProfilingService.list_table_profiling_runs(tableUri) diff --git a/backend/dataall/modules/datasets/db/dataset_profiling_repository.py b/backend/dataall/modules/datasets/db/dataset_profiling_repository.py index a709e08c8..eb8a2b3ca 100644 --- a/backend/dataall/modules/datasets/db/dataset_profiling_repository.py +++ b/backend/dataall/modules/datasets/db/dataset_profiling_repository.py @@ -10,36 +10,16 @@ def __init__(self): pass @staticmethod - def start_profiling( - session, datasetUri, tableUri=None, GlueTableName=None, GlueJobRunId=None - ): - dataset: Dataset = session.query(Dataset).get(datasetUri) - if not dataset: - raise ObjectNotFound('Dataset', datasetUri) - - if tableUri and not GlueTableName: - table: DatasetTable = session.query(DatasetTable).get( - tableUri - ) - if not table: - raise ObjectNotFound('DatasetTable', tableUri) - GlueTableName = table.GlueTableName - - environment: models.Environment = session.query(models.Environment).get( - dataset.environmentUri - ) - if not environment: - raise ObjectNotFound('Environment', dataset.environmentUri) - + def save_profiling(session, dataset, env, glue_table_name): run = DatasetProfilingRun( datasetUri=dataset.datasetUri, status='RUNNING', - AwsAccountId=environment.AwsAccountId, + AwsAccountId=env.AwsAccountId, GlueJobName=dataset.GlueProfilingJobName or 'Unknown', GlueTriggerSchedule=dataset.GlueProfilingTriggerSchedule, GlueTriggerName=dataset.GlueProfilingTriggerName, - GlueTableName=GlueTableName, - GlueJobRunId=GlueJobRunId, + GlueTableName=glue_table_name, + GlueJobRunId=None, owner=dataset.owner, label=dataset.GlueProfilingJobName or 'Unknown', ) @@ -51,18 +31,18 @@ def start_profiling( @staticmethod def update_run( session, - profilingRunUri=None, - GlueJobRunId=None, - GlueJobRunState=None, + run_uri=None, + glue_job_run_id=None, + glue_job_state=None, results=None, ): run = DatasetProfilingRepository.get_profiling_run( - session, profilingRunUri=profilingRunUri, GlueJobRunId=GlueJobRunId + session, profilingRunUri=run_uri, GlueJobRunId=glue_job_run_id ) - if GlueJobRunId: - run.GlueJobRunId = GlueJobRunId - if GlueJobRunState: - run.status = GlueJobRunState + if glue_job_run_id: + run.GlueJobRunId = glue_job_run_id + if glue_job_state: + run.status = glue_job_state if results: run.results = results session.commit() @@ -86,12 +66,12 @@ def get_profiling_run( return run @staticmethod - def list_profiling_runs(session, datasetUri, filter: dict = None): - if not filter: - filter = {} + def list_profiling_runs(session, dataset_uri): + # TODO filter is always default + filter = {} q = ( session.query(DatasetProfilingRun) - .filter(DatasetProfilingRun.datasetUri == datasetUri) + .filter(DatasetProfilingRun.datasetUri == dataset_uri) .order_by(DatasetProfilingRun.created.desc()) ) return paginate( @@ -99,9 +79,9 @@ def list_profiling_runs(session, datasetUri, filter: dict = None): ).to_dict() @staticmethod - def list_table_profiling_runs(session, tableUri, filter): - if not filter: - filter = {} + def list_table_profiling_runs(session, table_uri): + # TODO filter is always default + filter = {} q = ( session.query(DatasetProfilingRun) .join( @@ -110,26 +90,26 @@ def list_table_profiling_runs(session, tableUri, filter): ) .filter( and_( - DatasetTable.tableUri == tableUri, - DatasetTable.GlueTableName - == DatasetProfilingRun.GlueTableName, + DatasetTable.tableUri == table_uri, + DatasetTable.GlueTableName == DatasetProfilingRun.GlueTableName, ) ) .order_by(DatasetProfilingRun.created.desc()) + .all() ) return paginate( q, page=filter.get('page', 1), page_size=filter.get('pageSize', 20) ).to_dict() @staticmethod - def get_table_last_profiling_run(session, tableUri): + def get_table_last_profiling_run(session, table_uri): return ( session.query(DatasetProfilingRun) .join( DatasetTable, DatasetTable.datasetUri == DatasetProfilingRun.datasetUri, ) - .filter(DatasetTable.tableUri == tableUri) + .filter(DatasetTable.tableUri == table_uri) .filter( DatasetTable.GlueTableName == DatasetProfilingRun.GlueTableName @@ -146,7 +126,7 @@ def get_table_last_profiling_run_with_results(session, table_uri): DatasetTable, DatasetTable.datasetUri == DatasetProfilingRun.datasetUri, ) - .filter(DatasetTable.tableUri == tableUri) + .filter(DatasetTable.tableUri == table_uri) .filter( DatasetTable.GlueTableName == DatasetProfilingRun.GlueTableName diff --git a/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py b/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py index a804dc729..81d37d36d 100644 --- a/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py +++ b/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py @@ -32,8 +32,8 @@ def start_profiling_run(engine, task: models.Task): DatasetProfilingRepository.update_run( session, - profilingRunUri=profiling.profilingRunUri, - GlueJobRunId=run_id, + run_uri=profiling.profilingRunUri, + glue_job_run_id=run_id, ) return run_id diff --git a/backend/dataall/modules/datasets/services/dataset_profiling_service.py b/backend/dataall/modules/datasets/services/dataset_profiling_service.py index 7160c7dc5..9f3428406 100644 --- a/backend/dataall/modules/datasets/services/dataset_profiling_service.py +++ b/backend/dataall/modules/datasets/services/dataset_profiling_service.py @@ -4,13 +4,14 @@ from dataall.core.context import get_context from dataall.core.permission_checker import has_resource_permission from dataall.db.api import Environment, ResourcePolicy +from dataall.db.exceptions import ObjectNotFound from dataall.db.models import Task from dataall.modules.datasets.aws.s3_profiler_client import S3ProfilerClient from dataall.modules.datasets.db.dataset_profiling_repository import DatasetProfilingRepository from dataall.modules.datasets.db.dataset_service import DatasetService from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository from dataall.modules.datasets.services.dataset_permissions import PROFILE_DATASET_TABLE -from dataall.modules.datasets_base.db.models import DatasetProfilingRun +from dataall.modules.datasets_base.db.models import DatasetProfilingRun, DatasetTable class DatasetProfilingService: @@ -20,11 +21,24 @@ def start_profiling_run(uri, table_uri, glue_table_name): context = get_context() with context.db_engine.scoped_session() as session: dataset = DatasetService.get_dataset_by_uri(session, uri) - run = DatasetProfilingRepository.start_profiling( + if not dataset: + raise ObjectNotFound('Dataset', uri) + + if table_uri and not glue_table_name: + table: DatasetTable = DatasetTableRepository.get_dataset_table_by_uri(session, table_uri) + if not table: + raise ObjectNotFound('DatasetTable', table_uri) + glue_table_name = table.GlueTableName + + environment: Environment = Environment.get_environment_by_uri(session, dataset.environmentUri) + if not environment: + raise ObjectNotFound('Environment', dataset.environmentUri) + + run = DatasetProfilingRepository.save_profiling( session=session, - datasetUri=dataset.datasetUri, - tableUri=table_uri, - GlueTableName=glue_table_name, + dataset=dataset, + env=environment, + glue_table_name=glue_table_name, ) task = Task( @@ -38,6 +52,7 @@ def start_profiling_run(uri, table_uri, glue_table_name): @staticmethod def queue_profiling_run(run_uri): + # TODO NO PERMISSION CHECK context = get_context() with context.db_engine.scoped_session() as session: task = Task( @@ -48,19 +63,22 @@ def queue_profiling_run(run_uri): @staticmethod def update_profiling_run_results(run_uri, results): + # TODO NO PERMISSION CHECK with get_context().db_engine.scoped_session() as session: run = DatasetProfilingRepository.update_run( - session=session, profilingRunUri=run_uri, results=results + session=session, run_uri=run_uri, results=results ) return run @staticmethod def list_profiling_runs(dataset_uri): + # TODO NO PERMISSION CHECK with get_context().db_engine.scoped_session() as session: return DatasetProfilingRepository.list_profiling_runs(session, dataset_uri) @staticmethod def get_profiling_run(run_uri): + # TODO NO PERMISSION CHECK with get_context().db_engine.scoped_session() as session: return DatasetProfilingRepository.get_profiling_run( session=session, profilingRunUri=run_uri @@ -68,11 +86,10 @@ def get_profiling_run(run_uri): @staticmethod def get_last_table_profiling_run(table_uri: str): + # TODO NO PERMISSION CHECK with get_context().db_engine.scoped_session() as session: run: DatasetProfilingRun = ( - DatasetProfilingRepository.get_table_last_profiling_run( - session=session, tableUri=table_uri - ) + DatasetProfilingRepository.get_table_last_profiling_run(session, table_uri) ) if run: @@ -96,7 +113,6 @@ def get_last_table_profiling_run(table_uri: str): @staticmethod def list_table_profiling_runs(table_uri: str): + # TODO NO PERMISSION CHECK with get_context().db_engine.scoped_session() as session: - return DatasetProfilingRepository.list_table_profiling_runs( - session=session, tableUri=table_uri, filter={} - ) + return DatasetProfilingRepository.list_table_profiling_runs(session, table_uri) From 77efeb381f042037549ca3ad4d9db384d65bf8fa Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 11 May 2023 14:09:05 +0200 Subject: [PATCH 166/346] Put all code in dataset_base because of name collision --- .../modules/datasets/db/dataset_service.py | 704 ------------------ .../datasets/services/dataset_service.py | 5 + .../datasets_base/db/dataset_repository.py | 700 ++++++++++++++++- 3 files changed, 702 insertions(+), 707 deletions(-) delete mode 100644 backend/dataall/modules/datasets/db/dataset_service.py create mode 100644 backend/dataall/modules/datasets/services/dataset_service.py diff --git a/backend/dataall/modules/datasets/db/dataset_service.py b/backend/dataall/modules/datasets/db/dataset_service.py deleted file mode 100644 index b93bbe8f4..000000000 --- a/backend/dataall/modules/datasets/db/dataset_service.py +++ /dev/null @@ -1,704 +0,0 @@ -import logging -from datetime import datetime - -from sqlalchemy import and_, or_ -from sqlalchemy.orm import Query - -from dataall.core.context import get_context -from dataall.core.permission_checker import has_tenant_permission, has_resource_permission -from dataall.db.api import ( - Environment, - ResourcePolicy, - KeyValueTag, - Vote, - Stack, - has_tenant_perm, - has_resource_perm, -) -from dataall.db.api import Organization -from dataall.db import models, exceptions, paginate, permissions -from dataall.db.models.Enums import Language -from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject -from dataall.modules.dataset_sharing.db.share_object_repository import ShareItemSM -from dataall.modules.datasets.db.enums import ConfidentialityClassification -from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository -from dataall.modules.datasets_base.db.models import DatasetTable, Dataset -from dataall.modules.datasets.db.dataset_location_repository import DatasetLocationRepository -from dataall.modules.datasets.services.dataset_permissions import MANAGE_DATASETS, UPDATE_DATASET, DATASET_READ, DATASET_ALL, \ - LIST_ENVIRONMENT_DATASETS, CREATE_DATASET -from dataall.modules.datasets_base.services.permissions import DATASET_TABLE_READ -from dataall.utils.naming_convention import ( - NamingConventionService, - NamingConventionPattern, -) - -logger = logging.getLogger(__name__) - - -class DatasetService: - @staticmethod - @has_tenant_perm(MANAGE_DATASETS) - @has_resource_perm(CREATE_DATASET) - def create_dataset( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ) -> Dataset: - if not uri: - raise exceptions.RequiredParameter('environmentUri') - if not data: - raise exceptions.RequiredParameter('data') - if not data.get('SamlAdminGroupName'): - raise exceptions.RequiredParameter('group') - if not data.get('label'): - raise exceptions.RequiredParameter('label') - if len(data['label']) > 52: - raise exceptions.InvalidInput( - 'Dataset name', data['label'], 'less than 52 characters' - ) - - Environment.check_group_environment_permission( - session=session, - username=username, - groups=groups, - uri=uri, - group=data['SamlAdminGroupName'], - permission_name=CREATE_DATASET, - ) - - environment = Environment.get_environment_by_uri(session, uri) - - organization = Organization.get_organization_by_uri( - session, environment.organizationUri - ) - - dataset = Dataset( - label=data.get('label'), - owner=username, - description=data.get('description', 'No description provided'), - tags=data.get('tags', []), - AwsAccountId=environment.AwsAccountId, - SamlAdminGroupName=data['SamlAdminGroupName'], - region=environment.region, - S3BucketName='undefined', - GlueDatabaseName='undefined', - IAMDatasetAdminRoleArn='undefined', - IAMDatasetAdminUserArn='undefined', - KmsAlias='undefined', - environmentUri=environment.environmentUri, - organizationUri=environment.organizationUri, - language=data.get('language', Language.English.value), - confidentiality=data.get( - 'confidentiality', ConfidentialityClassification.Unclassified.value - ), - topics=data.get('topics', []), - businessOwnerEmail=data.get('businessOwnerEmail'), - businessOwnerDelegationEmails=data.get('businessOwnerDelegationEmails', []), - stewards=data.get('stewards') - if data.get('stewards') - else data['SamlAdminGroupName'], - ) - session.add(dataset) - session.commit() - - DatasetService._set_dataset_aws_resources(dataset, data, environment) - - activity = models.Activity( - action='dataset:create', - label='dataset:create', - owner=username, - summary=f'{username} created dataset {dataset.name} in {environment.name} on organization {organization.name}', - targetUri=dataset.datasetUri, - targetType='dataset', - ) - session.add(activity) - - ResourcePolicy.attach_resource_policy( - session=session, - group=data['SamlAdminGroupName'], - permissions=DATASET_ALL, - resource_uri=dataset.datasetUri, - resource_type=Dataset.__name__, - ) - if dataset.stewards and dataset.stewards != dataset.SamlAdminGroupName: - ResourcePolicy.attach_resource_policy( - session=session, - group=dataset.stewards, - permissions=DATASET_READ, - resource_uri=dataset.datasetUri, - resource_type=Dataset.__name__, - ) - if environment.SamlGroupName != dataset.SamlAdminGroupName: - ResourcePolicy.attach_resource_policy( - session=session, - group=environment.SamlGroupName, - permissions=DATASET_ALL, - resource_uri=dataset.datasetUri, - resource_type=Dataset.__name__, - ) - return dataset - - @staticmethod - def _set_dataset_aws_resources(dataset: Dataset, data, environment): - - bucket_name = NamingConventionService( - target_uri=dataset.datasetUri, - target_label=dataset.label, - pattern=NamingConventionPattern.S3, - resource_prefix=environment.resourcePrefix, - ).build_compliant_name() - dataset.S3BucketName = data.get('bucketName') or bucket_name - - glue_db_name = NamingConventionService( - target_uri=dataset.datasetUri, - target_label=dataset.label, - pattern=NamingConventionPattern.GLUE, - resource_prefix=environment.resourcePrefix, - ).build_compliant_name() - dataset.GlueDatabaseName = data.get('glueDatabaseName') or glue_db_name - - kms_alias = bucket_name - dataset.KmsAlias = data.get('KmsKeyId') or kms_alias - - iam_role_name = NamingConventionService( - target_uri=dataset.datasetUri, - target_label=dataset.label, - pattern=NamingConventionPattern.IAM, - resource_prefix=environment.resourcePrefix, - ).build_compliant_name() - iam_role_arn = f'arn:aws:iam::{dataset.AwsAccountId}:role/{iam_role_name}' - if data.get('adminRoleName'): - dataset.IAMDatasetAdminRoleArn = ( - f"arn:aws:iam::{dataset.AwsAccountId}:role/{data['adminRoleName']}" - ) - dataset.IAMDatasetAdminUserArn = ( - f"arn:aws:iam::{dataset.AwsAccountId}:role/{data['adminRoleName']}" - ) - else: - dataset.IAMDatasetAdminRoleArn = iam_role_arn - dataset.IAMDatasetAdminUserArn = iam_role_arn - - dataset.GlueCrawlerName = f'{dataset.S3BucketName}-{dataset.datasetUri}-crawler' - dataset.GlueProfilingJobName = f'{dataset.S3BucketName}-{dataset.datasetUri}-profiler' - dataset.GlueProfilingTriggerSchedule = None - dataset.GlueProfilingTriggerName = f'{dataset.S3BucketName}-{dataset.datasetUri}-trigger' - dataset.GlueDataQualityJobName = f'{dataset.S3BucketName}-{dataset.datasetUri}-dataquality' - dataset.GlueDataQualitySchedule = None - dataset.GlueDataQualityTriggerName = f'{dataset.S3BucketName}-{dataset.datasetUri}-dqtrigger' - return dataset - - @staticmethod - def create_dataset_stack(session, dataset: Dataset) -> models.Stack: - return Stack.create_stack( - session=session, - environment_uri=dataset.environmentUri, - target_uri=dataset.datasetUri, - target_label=dataset.label, - target_type='dataset', - payload={ - 'bucket_name': dataset.S3BucketName, - 'database_name': dataset.GlueDatabaseName, - 'role_name': dataset.S3BucketName, - 'user_name': dataset.S3BucketName, - }, - ) - - @staticmethod - @has_tenant_permission(MANAGE_DATASETS) - def get_dataset(session, uri: str) -> Dataset: - return DatasetService.get_dataset_by_uri(session, uri) - - @staticmethod - def get_dataset_by_uri(session, dataset_uri) -> Dataset: - return DatasetRepository.get_dataset_by_uri(session, dataset_uri) - - @staticmethod - def query_user_datasets(session, username, groups, filter) -> Query: - share_item_shared_states = ShareItemSM.get_share_item_shared_states() - query = ( - session.query(Dataset) - .outerjoin( - ShareObject, - ShareObject.datasetUri == Dataset.datasetUri, - ) - .outerjoin( - ShareObjectItem, - ShareObjectItem.shareUri == ShareObject.shareUri - ) - .filter( - or_( - Dataset.owner == username, - Dataset.SamlAdminGroupName.in_(groups), - Dataset.stewards.in_(groups), - and_( - ShareObject.principalId.in_(groups), - ShareObjectItem.status.in_(share_item_shared_states), - ), - and_( - ShareObject.owner == username, - ShareObjectItem.status.in_(share_item_shared_states), - ), - ) - ) - ) - if filter and filter.get('term'): - query = query.filter( - or_( - Dataset.description.ilike(filter.get('term') + '%%'), - Dataset.label.ilike(filter.get('term') + '%%'), - ) - ) - return query - - @staticmethod - def paginated_user_datasets( - session, username, groups, uri, data=None, check_perm=None - ) -> dict: - return paginate( - query=DatasetService.query_user_datasets(session, username, groups, data), - page=data.get('page', 1), - page_size=data.get('pageSize', 10), - ).to_dict() - - @staticmethod - def paginated_dataset_tables( - session, username, groups, uri, data=None, check_perm=None - ) -> dict: - query = ( - session.query(DatasetTable) - .filter( - and_( - DatasetTable.datasetUri == uri, - DatasetTable.LastGlueTableStatus != 'Deleted', - ) - ) - .order_by(DatasetTable.created.desc()) - ) - if data and data.get('term'): - query = query.filter( - or_( - *[ - DatasetTable.name.ilike('%' + data.get('term') + '%'), - DatasetTable.GlueTableName.ilike( - '%' + data.get('term') + '%' - ), - ] - ) - ) - return paginate( - query=query, page_size=data.get('pageSize', 10), page=data.get('page', 1) - ).to_dict() - - @staticmethod - @has_tenant_permission(MANAGE_DATASETS) - @has_resource_permission(UPDATE_DATASET) - def update_dataset(session, uri, data=None) -> Dataset: - username = get_context().username - dataset: Dataset = DatasetService.get_dataset_by_uri(session, uri) - if data and isinstance(data, dict): - for k in data.keys(): - if k != 'stewards': - setattr(dataset, k, data.get(k)) - if data.get('stewards') and data.get('stewards') != dataset.stewards: - if data.get('stewards') != dataset.SamlAdminGroupName: - DatasetService.transfer_stewardship_to_new_stewards( - session, dataset, data['stewards'] - ) - dataset.stewards = data['stewards'] - else: - DatasetService.transfer_stewardship_to_owners(session, dataset) - dataset.stewards = dataset.SamlAdminGroupName - - ResourcePolicy.attach_resource_policy( - session=session, - group=dataset.SamlAdminGroupName, - permissions=DATASET_ALL, - resource_uri=dataset.datasetUri, - resource_type=Dataset.__name__, - ) - DatasetService.update_dataset_glossary_terms(session, username, uri, data) - activity = models.Activity( - action='dataset:update', - label='dataset:update', - owner=username, - summary=f'{username} updated dataset {dataset.name}', - targetUri=dataset.datasetUri, - targetType='dataset', - ) - session.add(activity) - session.commit() - return dataset - - @staticmethod - def transfer_stewardship_to_owners(session, dataset): - dataset_shares = ( - session.query(ShareObject) - .filter(ShareObject.datasetUri == dataset.datasetUri) - .all() - ) - if dataset_shares: - for share in dataset_shares: - ResourcePolicy.attach_resource_policy( - session=session, - group=dataset.SamlAdminGroupName, - permissions=permissions.SHARE_OBJECT_APPROVER, - resource_uri=share.shareUri, - resource_type=ShareObject.__name__, - ) - return dataset - - @staticmethod - def transfer_stewardship_to_new_stewards(session, dataset, new_stewards): - env = Environment.get_environment_by_uri(session, dataset.environmentUri) - if dataset.stewards != env.SamlGroupName: - ResourcePolicy.delete_resource_policy( - session=session, - group=dataset.stewards, - resource_uri=dataset.datasetUri, - ) - ResourcePolicy.attach_resource_policy( - session=session, - group=new_stewards, - permissions=DATASET_READ, - resource_uri=dataset.datasetUri, - resource_type=Dataset.__name__, - ) - - dataset_tables = [t.tableUri for t in DatasetService.get_dataset_tables(session, dataset.datasetUri)] - for tableUri in dataset_tables: - if dataset.stewards != env.SamlGroupName: - ResourcePolicy.delete_resource_policy( - session=session, - group=dataset.stewards, - resource_uri=tableUri, - ) - ResourcePolicy.attach_resource_policy( - session=session, - group=new_stewards, - permissions=DATASET_TABLE_READ, - resource_uri=tableUri, - resource_type=DatasetTable.__name__, - ) - - dataset_shares = ( - session.query(ShareObject) - .filter(ShareObject.datasetUri == dataset.datasetUri) - .all() - ) - if dataset_shares: - for share in dataset_shares: - ResourcePolicy.attach_resource_policy( - session=session, - group=new_stewards, - permissions=permissions.SHARE_OBJECT_APPROVER, - resource_uri=share.shareUri, - resource_type=ShareObject.__name__, - ) - ResourcePolicy.delete_resource_policy( - session=session, - group=dataset.stewards, - resource_uri=share.shareUri, - ) - return dataset - - @staticmethod - def update_dataset_glossary_terms(session, username, uri, data): - if data.get('terms'): - input_terms = data.get('terms', []) - current_links = session.query(models.TermLink).filter( - models.TermLink.targetUri == uri - ) - for current_link in current_links: - if current_link not in input_terms: - session.delete(current_link) - for nodeUri in input_terms: - term = session.query(models.GlossaryNode).get(nodeUri) - if term: - link = ( - session.query(models.TermLink) - .filter( - models.TermLink.targetUri == uri, - models.TermLink.nodeUri == nodeUri, - ) - .first() - ) - if not link: - new_link = models.TermLink( - targetUri=uri, - nodeUri=nodeUri, - targetType='Dataset', - owner=username, - approvedByOwner=True, - ) - session.add(new_link) - - @staticmethod - def update_bucket_status(session, dataset_uri): - """ - helper method to update the dataset bucket status - """ - dataset = DatasetService.get_dataset_by_uri(session, dataset_uri) - dataset.bucketCreated = True - return dataset - - @staticmethod - def update_glue_database_status(session, dataset_uri): - """ - helper method to update the dataset db status - """ - dataset = DatasetService.get_dataset_by_uri(session, dataset_uri) - dataset.glueDatabaseCreated = True - - @staticmethod - def get_dataset_tables(session, dataset_uri): - """return the dataset tables""" - return ( - session.query(DatasetTable) - .filter(DatasetTable.datasetUri == dataset_uri) - .all() - ) - - @staticmethod - def query_dataset_shares(session, dataset_uri) -> Query: - return session.query(ShareObject).filter( - and_( - ShareObject.datasetUri == dataset_uri, - ShareObject.deleted.is_(None), - ) - ) - - @staticmethod - def paginated_dataset_shares( - session, username, groups, uri, data=None, check_perm=None - ) -> [ShareObject]: - query = DatasetService.query_dataset_shares(session, uri) - return paginate( - query=query, page=data.get('page', 1), page_size=data.get('pageSize', 5) - ).to_dict() - - @staticmethod - def list_dataset_shares(session, dataset_uri) -> [ShareObject]: - """return the dataset shares""" - query = DatasetService.query_dataset_shares(session, dataset_uri) - return query.all() - - @staticmethod - def list_dataset_shares_with_existing_shared_items(session, dataset_uri) -> [ShareObject]: - query = session.query(ShareObject).filter( - and_( - ShareObject.datasetUri == dataset_uri, - ShareObject.deleted.is_(None), - ShareObject.existingSharedItems.is_(True), - ) - ) - return query.all() - - @staticmethod - def list_dataset_redshift_clusters( - session, dataset_uri - ) -> [models.RedshiftClusterDataset]: - """return the dataset clusters""" - return ( - session.query(models.RedshiftClusterDataset) - .filter(models.RedshiftClusterDataset.datasetUri == dataset_uri) - .all() - ) - - @staticmethod - def delete_dataset( - session, username, groups, uri, data=None, check_perm=None - ) -> bool: - dataset = DatasetService.get_dataset_by_uri(session, uri) - DatasetService._delete_dataset_shares_with_no_shared_items(session, uri) - DatasetService._delete_dataset_term_links(session, uri) - DatasetService._delete_dataset_tables(session, dataset.datasetUri) - DatasetLocationRepository.delete_dataset_locations(session, dataset.datasetUri) - KeyValueTag.delete_key_value_tags(session, dataset.datasetUri, 'dataset') - Vote.delete_votes(session, dataset.datasetUri, 'dataset') - session.delete(dataset) - ResourcePolicy.delete_resource_policy( - session=session, resource_uri=uri, group=dataset.SamlAdminGroupName - ) - env = Environment.get_environment_by_uri(session, dataset.environmentUri) - if dataset.SamlAdminGroupName != env.SamlGroupName: - ResourcePolicy.delete_resource_policy( - session=session, resource_uri=uri, group=env.SamlGroupName - ) - if dataset.stewards: - ResourcePolicy.delete_resource_policy( - session=session, resource_uri=uri, group=dataset.stewards - ) - return True - - @staticmethod - def _delete_dataset_shares_with_no_shared_items(session, dataset_uri): - share_objects = ( - session.query(ShareObject) - .filter( - and_( - ShareObject.datasetUri == dataset_uri, - ShareObject.existingSharedItems.is_(False), - ) - ) - .all() - ) - for share in share_objects: - ( - session.query(ShareObjectItem) - .filter(ShareObjectItem.shareUri == share.shareUri) - .delete() - ) - session.delete(share) - - @staticmethod - def _delete_dataset_term_links(session, uri): - tables = [t.tableUri for t in DatasetService.get_dataset_tables(session, uri)] - for tableUri in tables: - term_links = ( - session.query(models.TermLink) - .filter( - and_( - models.TermLink.targetUri == tableUri, - models.TermLink.targetType == 'DatasetTable', - ) - ) - .all() - ) - for link in term_links: - session.delete(link) - session.commit() - term_links = ( - session.query(models.TermLink) - .filter( - and_( - models.TermLink.targetUri == uri, - models.TermLink.targetType == 'Dataset', - ) - ) - .all() - ) - for link in term_links: - session.delete(link) - - @staticmethod - def _delete_dataset_tables(session, dataset_uri) -> bool: - tables = ( - session.query(DatasetTable) - .filter( - and_( - DatasetTable.datasetUri == dataset_uri, - ) - ) - .all() - ) - for table in tables: - table.deleted = datetime.now() - return tables - - @staticmethod - def list_all_datasets(session) -> [Dataset]: - return session.query(Dataset).all() - - @staticmethod - def list_all_active_datasets(session) -> [Dataset]: - return ( - session.query(Dataset).filter(Dataset.deleted.is_(None)).all() - ) - - @staticmethod - def get_dataset_by_bucket_name(session, bucket) -> [Dataset]: - return ( - session.query(Dataset) - .filter(Dataset.S3BucketName == bucket) - .first() - ) - - @staticmethod - def count_dataset_tables(session, dataset_uri): - return ( - session.query(DatasetTable) - .filter(DatasetTable.datasetUri == dataset_uri) - .count() - ) - - @staticmethod - def query_environment_group_datasets(session, envUri, groupUri, filter) -> Query: - query = session.query(Dataset).filter( - and_( - Dataset.environmentUri == envUri, - Dataset.SamlAdminGroupName == groupUri, - Dataset.deleted.is_(None), - ) - ) - if filter and filter.get('term'): - term = filter['term'] - query = query.filter( - or_( - Dataset.label.ilike('%' + term + '%'), - Dataset.description.ilike('%' + term + '%'), - Dataset.tags.contains(f'{{{term}}}'), - Dataset.region.ilike('%' + term + '%'), - ) - ) - return query - - @staticmethod - def query_environment_datasets(session, uri, filter) -> Query: - query = session.query(Dataset).filter( - and_( - Dataset.environmentUri == uri, - Dataset.deleted.is_(None), - ) - ) - if filter and filter.get('term'): - term = filter['term'] - query = query.filter( - or_( - Dataset.label.ilike('%' + term + '%'), - Dataset.description.ilike('%' + term + '%'), - Dataset.tags.contains(f'{{{term}}}'), - Dataset.region.ilike('%' + term + '%'), - ) - ) - return query - - @staticmethod - @has_resource_permission(LIST_ENVIRONMENT_DATASETS) - def paginated_environment_datasets( - session, uri, data=None, - ) -> dict: - return paginate( - query=DatasetService.query_environment_datasets( - session, uri, data - ), - page=data.get('page', 1), - page_size=data.get('pageSize', 10), - ).to_dict() - - @staticmethod - def paginated_environment_group_datasets( - session, envUri, groupUri, data=None - ) -> dict: - return paginate( - query=DatasetService.query_environment_group_datasets( - session, envUri, groupUri, data - ), - page=data.get('page', 1), - page_size=data.get('pageSize', 10), - ).to_dict() - - @staticmethod - def list_group_datasets(session, environment_id, group_uri): - return ( - session.query(Dataset) - .filter( - and_( - Dataset.environmentUri == environment_id, - Dataset.SamlAdminGroupName == group_uri, - ) - ) - .all() - ) diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py new file mode 100644 index 000000000..e1c025e2d --- /dev/null +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -0,0 +1,5 @@ + + + +class DatasetService: + pass \ No newline at end of file diff --git a/backend/dataall/modules/datasets_base/db/dataset_repository.py b/backend/dataall/modules/datasets_base/db/dataset_repository.py index 4f15ad259..8b8369854 100644 --- a/backend/dataall/modules/datasets_base/db/dataset_repository.py +++ b/backend/dataall/modules/datasets_base/db/dataset_repository.py @@ -1,8 +1,38 @@ -from operator import and_ +import logging +from datetime import datetime +from sqlalchemy import and_, or_ +from sqlalchemy.orm import Query + +from dataall.core.context import get_context +from dataall.core.permission_checker import has_tenant_permission, has_resource_permission +from dataall.db.api import ( + Environment, + ResourcePolicy, + KeyValueTag, + Vote, + Stack, + has_tenant_perm, + has_resource_perm, +) +from dataall.db.api import Organization +from dataall.db import models, exceptions, paginate, permissions +from dataall.db.models.Enums import Language +from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject +from dataall.modules.dataset_sharing.db.share_object_repository import ShareItemSM +from dataall.modules.datasets.db.enums import ConfidentialityClassification from dataall.core.group.services.group_resource_manager import GroupResource -from dataall.db import exceptions -from dataall.modules.datasets_base.db.models import Dataset +from dataall.modules.datasets_base.db.models import DatasetTable, Dataset +from dataall.modules.datasets.db.dataset_location_repository import DatasetLocationRepository +from dataall.modules.datasets.services.dataset_permissions import MANAGE_DATASETS, UPDATE_DATASET, DATASET_READ, DATASET_ALL, \ + LIST_ENVIRONMENT_DATASETS, CREATE_DATASET +from dataall.modules.datasets_base.services.permissions import DATASET_TABLE_READ +from dataall.utils.naming_convention import ( + NamingConventionService, + NamingConventionPattern, +) + +logger = logging.getLogger(__name__) class DatasetRepository(GroupResource): @@ -25,3 +55,667 @@ def count_resources(self, session, environment_uri, group_uri) -> int: )) .count() ) + + @staticmethod + @has_tenant_perm(MANAGE_DATASETS) + @has_resource_perm(CREATE_DATASET) + def create_dataset( + session, + username: str, + groups: [str], + uri: str, + data: dict = None, + check_perm: bool = False, + ) -> Dataset: + if not uri: + raise exceptions.RequiredParameter('environmentUri') + if not data: + raise exceptions.RequiredParameter('data') + if not data.get('SamlAdminGroupName'): + raise exceptions.RequiredParameter('group') + if not data.get('label'): + raise exceptions.RequiredParameter('label') + if len(data['label']) > 52: + raise exceptions.InvalidInput( + 'Dataset name', data['label'], 'less than 52 characters' + ) + + Environment.check_group_environment_permission( + session=session, + username=username, + groups=groups, + uri=uri, + group=data['SamlAdminGroupName'], + permission_name=CREATE_DATASET, + ) + + environment = Environment.get_environment_by_uri(session, uri) + + organization = Organization.get_organization_by_uri( + session, environment.organizationUri + ) + + dataset = Dataset( + label=data.get('label'), + owner=username, + description=data.get('description', 'No description provided'), + tags=data.get('tags', []), + AwsAccountId=environment.AwsAccountId, + SamlAdminGroupName=data['SamlAdminGroupName'], + region=environment.region, + S3BucketName='undefined', + GlueDatabaseName='undefined', + IAMDatasetAdminRoleArn='undefined', + IAMDatasetAdminUserArn='undefined', + KmsAlias='undefined', + environmentUri=environment.environmentUri, + organizationUri=environment.organizationUri, + language=data.get('language', Language.English.value), + confidentiality=data.get( + 'confidentiality', ConfidentialityClassification.Unclassified.value + ), + topics=data.get('topics', []), + businessOwnerEmail=data.get('businessOwnerEmail'), + businessOwnerDelegationEmails=data.get('businessOwnerDelegationEmails', []), + stewards=data.get('stewards') + if data.get('stewards') + else data['SamlAdminGroupName'], + ) + session.add(dataset) + session.commit() + + DatasetRepository._set_dataset_aws_resources(dataset, data, environment) + + activity = models.Activity( + action='dataset:create', + label='dataset:create', + owner=username, + summary=f'{username} created dataset {dataset.name} in {environment.name} on organization {organization.name}', + targetUri=dataset.datasetUri, + targetType='dataset', + ) + session.add(activity) + + ResourcePolicy.attach_resource_policy( + session=session, + group=data['SamlAdminGroupName'], + permissions=DATASET_ALL, + resource_uri=dataset.datasetUri, + resource_type=Dataset.__name__, + ) + if dataset.stewards and dataset.stewards != dataset.SamlAdminGroupName: + ResourcePolicy.attach_resource_policy( + session=session, + group=dataset.stewards, + permissions=DATASET_READ, + resource_uri=dataset.datasetUri, + resource_type=Dataset.__name__, + ) + if environment.SamlGroupName != dataset.SamlAdminGroupName: + ResourcePolicy.attach_resource_policy( + session=session, + group=environment.SamlGroupName, + permissions=DATASET_ALL, + resource_uri=dataset.datasetUri, + resource_type=Dataset.__name__, + ) + return dataset + + @staticmethod + def _set_dataset_aws_resources(dataset: Dataset, data, environment): + + bucket_name = NamingConventionService( + target_uri=dataset.datasetUri, + target_label=dataset.label, + pattern=NamingConventionPattern.S3, + resource_prefix=environment.resourcePrefix, + ).build_compliant_name() + dataset.S3BucketName = data.get('bucketName') or bucket_name + + glue_db_name = NamingConventionService( + target_uri=dataset.datasetUri, + target_label=dataset.label, + pattern=NamingConventionPattern.GLUE, + resource_prefix=environment.resourcePrefix, + ).build_compliant_name() + dataset.GlueDatabaseName = data.get('glueDatabaseName') or glue_db_name + + kms_alias = bucket_name + dataset.KmsAlias = data.get('KmsKeyId') or kms_alias + + iam_role_name = NamingConventionService( + target_uri=dataset.datasetUri, + target_label=dataset.label, + pattern=NamingConventionPattern.IAM, + resource_prefix=environment.resourcePrefix, + ).build_compliant_name() + iam_role_arn = f'arn:aws:iam::{dataset.AwsAccountId}:role/{iam_role_name}' + if data.get('adminRoleName'): + dataset.IAMDatasetAdminRoleArn = ( + f"arn:aws:iam::{dataset.AwsAccountId}:role/{data['adminRoleName']}" + ) + dataset.IAMDatasetAdminUserArn = ( + f"arn:aws:iam::{dataset.AwsAccountId}:role/{data['adminRoleName']}" + ) + else: + dataset.IAMDatasetAdminRoleArn = iam_role_arn + dataset.IAMDatasetAdminUserArn = iam_role_arn + + dataset.GlueCrawlerName = f'{dataset.S3BucketName}-{dataset.datasetUri}-crawler' + dataset.GlueProfilingJobName = f'{dataset.S3BucketName}-{dataset.datasetUri}-profiler' + dataset.GlueProfilingTriggerSchedule = None + dataset.GlueProfilingTriggerName = f'{dataset.S3BucketName}-{dataset.datasetUri}-trigger' + dataset.GlueDataQualityJobName = f'{dataset.S3BucketName}-{dataset.datasetUri}-dataquality' + dataset.GlueDataQualitySchedule = None + dataset.GlueDataQualityTriggerName = f'{dataset.S3BucketName}-{dataset.datasetUri}-dqtrigger' + return dataset + + @staticmethod + def create_dataset_stack(session, dataset: Dataset) -> models.Stack: + return Stack.create_stack( + session=session, + environment_uri=dataset.environmentUri, + target_uri=dataset.datasetUri, + target_label=dataset.label, + target_type='dataset', + payload={ + 'bucket_name': dataset.S3BucketName, + 'database_name': dataset.GlueDatabaseName, + 'role_name': dataset.S3BucketName, + 'user_name': dataset.S3BucketName, + }, + ) + + @staticmethod + @has_tenant_permission(MANAGE_DATASETS) + def get_dataset(session, uri: str) -> Dataset: + return DatasetRepository.get_dataset_by_uri(session, uri) + + @staticmethod + def query_user_datasets(session, username, groups, filter) -> Query: + share_item_shared_states = ShareItemSM.get_share_item_shared_states() + query = ( + session.query(Dataset) + .outerjoin( + ShareObject, + ShareObject.datasetUri == Dataset.datasetUri, + ) + .outerjoin( + ShareObjectItem, + ShareObjectItem.shareUri == ShareObject.shareUri + ) + .filter( + or_( + Dataset.owner == username, + Dataset.SamlAdminGroupName.in_(groups), + Dataset.stewards.in_(groups), + and_( + ShareObject.principalId.in_(groups), + ShareObjectItem.status.in_(share_item_shared_states), + ), + and_( + ShareObject.owner == username, + ShareObjectItem.status.in_(share_item_shared_states), + ), + ) + ) + ) + if filter and filter.get('term'): + query = query.filter( + or_( + Dataset.description.ilike(filter.get('term') + '%%'), + Dataset.label.ilike(filter.get('term') + '%%'), + ) + ) + return query + + @staticmethod + def paginated_user_datasets( + session, username, groups, uri, data=None, check_perm=None + ) -> dict: + return paginate( + query=DatasetRepository.query_user_datasets(session, username, groups, data), + page=data.get('page', 1), + page_size=data.get('pageSize', 10), + ).to_dict() + + @staticmethod + def paginated_dataset_tables( + session, username, groups, uri, data=None, check_perm=None + ) -> dict: + query = ( + session.query(DatasetTable) + .filter( + and_( + DatasetTable.datasetUri == uri, + DatasetTable.LastGlueTableStatus != 'Deleted', + ) + ) + .order_by(DatasetTable.created.desc()) + ) + if data and data.get('term'): + query = query.filter( + or_( + *[ + DatasetTable.name.ilike('%' + data.get('term') + '%'), + DatasetTable.GlueTableName.ilike( + '%' + data.get('term') + '%' + ), + ] + ) + ) + return paginate( + query=query, page_size=data.get('pageSize', 10), page=data.get('page', 1) + ).to_dict() + + @staticmethod + @has_tenant_permission(MANAGE_DATASETS) + @has_resource_permission(UPDATE_DATASET) + def update_dataset(session, uri, data=None) -> Dataset: + username = get_context().username + dataset: Dataset = DatasetRepository.get_dataset_by_uri(session, uri) + if data and isinstance(data, dict): + for k in data.keys(): + if k != 'stewards': + setattr(dataset, k, data.get(k)) + if data.get('stewards') and data.get('stewards') != dataset.stewards: + if data.get('stewards') != dataset.SamlAdminGroupName: + DatasetRepository.transfer_stewardship_to_new_stewards( + session, dataset, data['stewards'] + ) + dataset.stewards = data['stewards'] + else: + DatasetRepository.transfer_stewardship_to_owners(session, dataset) + dataset.stewards = dataset.SamlAdminGroupName + + ResourcePolicy.attach_resource_policy( + session=session, + group=dataset.SamlAdminGroupName, + permissions=DATASET_ALL, + resource_uri=dataset.datasetUri, + resource_type=Dataset.__name__, + ) + DatasetRepository.update_dataset_glossary_terms(session, username, uri, data) + activity = models.Activity( + action='dataset:update', + label='dataset:update', + owner=username, + summary=f'{username} updated dataset {dataset.name}', + targetUri=dataset.datasetUri, + targetType='dataset', + ) + session.add(activity) + session.commit() + return dataset + + @staticmethod + def transfer_stewardship_to_owners(session, dataset): + dataset_shares = ( + session.query(ShareObject) + .filter(ShareObject.datasetUri == dataset.datasetUri) + .all() + ) + if dataset_shares: + for share in dataset_shares: + ResourcePolicy.attach_resource_policy( + session=session, + group=dataset.SamlAdminGroupName, + permissions=permissions.SHARE_OBJECT_APPROVER, + resource_uri=share.shareUri, + resource_type=ShareObject.__name__, + ) + return dataset + + @staticmethod + def transfer_stewardship_to_new_stewards(session, dataset, new_stewards): + env = Environment.get_environment_by_uri(session, dataset.environmentUri) + if dataset.stewards != env.SamlGroupName: + ResourcePolicy.delete_resource_policy( + session=session, + group=dataset.stewards, + resource_uri=dataset.datasetUri, + ) + ResourcePolicy.attach_resource_policy( + session=session, + group=new_stewards, + permissions=DATASET_READ, + resource_uri=dataset.datasetUri, + resource_type=Dataset.__name__, + ) + + dataset_tables = [t.tableUri for t in DatasetRepository.get_dataset_tables(session, dataset.datasetUri)] + for tableUri in dataset_tables: + if dataset.stewards != env.SamlGroupName: + ResourcePolicy.delete_resource_policy( + session=session, + group=dataset.stewards, + resource_uri=tableUri, + ) + ResourcePolicy.attach_resource_policy( + session=session, + group=new_stewards, + permissions=DATASET_TABLE_READ, + resource_uri=tableUri, + resource_type=DatasetTable.__name__, + ) + + dataset_shares = ( + session.query(ShareObject) + .filter(ShareObject.datasetUri == dataset.datasetUri) + .all() + ) + if dataset_shares: + for share in dataset_shares: + ResourcePolicy.attach_resource_policy( + session=session, + group=new_stewards, + permissions=permissions.SHARE_OBJECT_APPROVER, + resource_uri=share.shareUri, + resource_type=ShareObject.__name__, + ) + ResourcePolicy.delete_resource_policy( + session=session, + group=dataset.stewards, + resource_uri=share.shareUri, + ) + return dataset + + @staticmethod + def update_dataset_glossary_terms(session, username, uri, data): + if data.get('terms'): + input_terms = data.get('terms', []) + current_links = session.query(models.TermLink).filter( + models.TermLink.targetUri == uri + ) + for current_link in current_links: + if current_link not in input_terms: + session.delete(current_link) + for nodeUri in input_terms: + term = session.query(models.GlossaryNode).get(nodeUri) + if term: + link = ( + session.query(models.TermLink) + .filter( + models.TermLink.targetUri == uri, + models.TermLink.nodeUri == nodeUri, + ) + .first() + ) + if not link: + new_link = models.TermLink( + targetUri=uri, + nodeUri=nodeUri, + targetType='Dataset', + owner=username, + approvedByOwner=True, + ) + session.add(new_link) + + @staticmethod + def update_bucket_status(session, dataset_uri): + """ + helper method to update the dataset bucket status + """ + dataset = DatasetRepository.get_dataset_by_uri(session, dataset_uri) + dataset.bucketCreated = True + return dataset + + @staticmethod + def update_glue_database_status(session, dataset_uri): + """ + helper method to update the dataset db status + """ + dataset = DatasetRepository.get_dataset_by_uri(session, dataset_uri) + dataset.glueDatabaseCreated = True + + @staticmethod + def get_dataset_tables(session, dataset_uri): + """return the dataset tables""" + return ( + session.query(DatasetTable) + .filter(DatasetTable.datasetUri == dataset_uri) + .all() + ) + + @staticmethod + def query_dataset_shares(session, dataset_uri) -> Query: + return session.query(ShareObject).filter( + and_( + ShareObject.datasetUri == dataset_uri, + ShareObject.deleted.is_(None), + ) + ) + + @staticmethod + def paginated_dataset_shares( + session, username, groups, uri, data=None, check_perm=None + ) -> [ShareObject]: + query = DatasetRepository.query_dataset_shares(session, uri) + return paginate( + query=query, page=data.get('page', 1), page_size=data.get('pageSize', 5) + ).to_dict() + + @staticmethod + def list_dataset_shares(session, dataset_uri) -> [ShareObject]: + """return the dataset shares""" + query = DatasetRepository.query_dataset_shares(session, dataset_uri) + return query.all() + + @staticmethod + def list_dataset_shares_with_existing_shared_items(session, dataset_uri) -> [ShareObject]: + query = session.query(ShareObject).filter( + and_( + ShareObject.datasetUri == dataset_uri, + ShareObject.deleted.is_(None), + ShareObject.existingSharedItems.is_(True), + ) + ) + return query.all() + + @staticmethod + def list_dataset_redshift_clusters( + session, dataset_uri + ) -> [models.RedshiftClusterDataset]: + """return the dataset clusters""" + return ( + session.query(models.RedshiftClusterDataset) + .filter(models.RedshiftClusterDataset.datasetUri == dataset_uri) + .all() + ) + + @staticmethod + def delete_dataset( + session, username, groups, uri, data=None, check_perm=None + ) -> bool: + dataset = DatasetRepository.get_dataset_by_uri(session, uri) + DatasetRepository._delete_dataset_shares_with_no_shared_items(session, uri) + DatasetRepository._delete_dataset_term_links(session, uri) + DatasetRepository._delete_dataset_tables(session, dataset.datasetUri) + DatasetLocationRepository.delete_dataset_locations(session, dataset.datasetUri) + KeyValueTag.delete_key_value_tags(session, dataset.datasetUri, 'dataset') + Vote.delete_votes(session, dataset.datasetUri, 'dataset') + session.delete(dataset) + ResourcePolicy.delete_resource_policy( + session=session, resource_uri=uri, group=dataset.SamlAdminGroupName + ) + env = Environment.get_environment_by_uri(session, dataset.environmentUri) + if dataset.SamlAdminGroupName != env.SamlGroupName: + ResourcePolicy.delete_resource_policy( + session=session, resource_uri=uri, group=env.SamlGroupName + ) + if dataset.stewards: + ResourcePolicy.delete_resource_policy( + session=session, resource_uri=uri, group=dataset.stewards + ) + return True + + @staticmethod + def _delete_dataset_shares_with_no_shared_items(session, dataset_uri): + share_objects = ( + session.query(ShareObject) + .filter( + and_( + ShareObject.datasetUri == dataset_uri, + ShareObject.existingSharedItems.is_(False), + ) + ) + .all() + ) + for share in share_objects: + ( + session.query(ShareObjectItem) + .filter(ShareObjectItem.shareUri == share.shareUri) + .delete() + ) + session.delete(share) + + @staticmethod + def _delete_dataset_term_links(session, uri): + tables = [t.tableUri for t in DatasetRepository.get_dataset_tables(session, uri)] + for tableUri in tables: + term_links = ( + session.query(models.TermLink) + .filter( + and_( + models.TermLink.targetUri == tableUri, + models.TermLink.targetType == 'DatasetTable', + ) + ) + .all() + ) + for link in term_links: + session.delete(link) + session.commit() + term_links = ( + session.query(models.TermLink) + .filter( + and_( + models.TermLink.targetUri == uri, + models.TermLink.targetType == 'Dataset', + ) + ) + .all() + ) + for link in term_links: + session.delete(link) + + @staticmethod + def _delete_dataset_tables(session, dataset_uri) -> bool: + tables = ( + session.query(DatasetTable) + .filter( + and_( + DatasetTable.datasetUri == dataset_uri, + ) + ) + .all() + ) + for table in tables: + table.deleted = datetime.now() + return tables + + @staticmethod + def list_all_datasets(session) -> [Dataset]: + return session.query(Dataset).all() + + @staticmethod + def list_all_active_datasets(session) -> [Dataset]: + return ( + session.query(Dataset).filter(Dataset.deleted.is_(None)).all() + ) + + @staticmethod + def get_dataset_by_bucket_name(session, bucket) -> [Dataset]: + return ( + session.query(Dataset) + .filter(Dataset.S3BucketName == bucket) + .first() + ) + + @staticmethod + def count_dataset_tables(session, dataset_uri): + return ( + session.query(DatasetTable) + .filter(DatasetTable.datasetUri == dataset_uri) + .count() + ) + + @staticmethod + def query_environment_group_datasets(session, envUri, groupUri, filter) -> Query: + query = session.query(Dataset).filter( + and_( + Dataset.environmentUri == envUri, + Dataset.SamlAdminGroupName == groupUri, + Dataset.deleted.is_(None), + ) + ) + if filter and filter.get('term'): + term = filter['term'] + query = query.filter( + or_( + Dataset.label.ilike('%' + term + '%'), + Dataset.description.ilike('%' + term + '%'), + Dataset.tags.contains(f'{{{term}}}'), + Dataset.region.ilike('%' + term + '%'), + ) + ) + return query + + @staticmethod + def query_environment_datasets(session, uri, filter) -> Query: + query = session.query(Dataset).filter( + and_( + Dataset.environmentUri == uri, + Dataset.deleted.is_(None), + ) + ) + if filter and filter.get('term'): + term = filter['term'] + query = query.filter( + or_( + Dataset.label.ilike('%' + term + '%'), + Dataset.description.ilike('%' + term + '%'), + Dataset.tags.contains(f'{{{term}}}'), + Dataset.region.ilike('%' + term + '%'), + ) + ) + return query + + @staticmethod + @has_resource_permission(LIST_ENVIRONMENT_DATASETS) + def paginated_environment_datasets( + session, uri, data=None, + ) -> dict: + return paginate( + query=DatasetRepository.query_environment_datasets( + session, uri, data + ), + page=data.get('page', 1), + page_size=data.get('pageSize', 10), + ).to_dict() + + @staticmethod + def paginated_environment_group_datasets( + session, envUri, groupUri, data=None + ) -> dict: + return paginate( + query=DatasetRepository.query_environment_group_datasets( + session, envUri, groupUri, data + ), + page=data.get('page', 1), + page_size=data.get('pageSize', 10), + ).to_dict() + + @staticmethod + def list_group_datasets(session, environment_id, group_uri): + return ( + session.query(Dataset) + .filter( + and_( + Dataset.environmentUri == environment_id, + Dataset.SamlAdminGroupName == group_uri, + ) + ) + .all() + ) + From 0e0689406f1211a33be44f11a5866f3b95f25b22 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 11 May 2023 15:27:46 +0200 Subject: [PATCH 167/346] Extracted methods into DatasetService --- .../modules/datasets/api/dataset/resolvers.py | 504 +----------------- .../datasets/services/dataset_service.py | 503 ++++++++++++++++- 2 files changed, 526 insertions(+), 481 deletions(-) diff --git a/backend/dataall/modules/datasets/api/dataset/resolvers.py b/backend/dataall/modules/datasets/api/dataset/resolvers.py index dd90eb360..8fb021c98 100644 --- a/backend/dataall/modules/datasets/api/dataset/resolvers.py +++ b/backend/dataall/modules/datasets/api/dataset/resolvers.py @@ -1,65 +1,21 @@ -import json import logging -from botocore.config import Config -from botocore.exceptions import ClientError - from dataall.api.Objects.Stack import stack_helper from dataall import db from dataall.api.context import Context -from dataall.aws.handlers.service_handlers import Worker -from dataall.aws.handlers.sts import SessionHelper from dataall.db import paginate, exceptions, models -from dataall.db.api import Environment, ResourcePolicy +from dataall.db.api import Environment from dataall.db.api.organization import Organization from dataall.modules.dataset_sharing.db.models import ShareObject from dataall.modules.datasets import Dataset from dataall.modules.datasets.api.dataset.enums import DatasetRole -from dataall.modules.datasets.aws.glue_dataset_client import DatasetCrawler -from dataall.modules.datasets.db.dataset_location_repository import DatasetLocationRepository -from dataall.modules.datasets.db.dataset_service import DatasetService -from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer -from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer -from dataall.modules.datasets.services.dataset_permissions import CREDENTIALS_DATASET, SYNC_DATASET, SUMMARY_DATASET, \ - CRAWL_DATASET, DELETE_DATASET, SUBSCRIPTIONS_DATASET -from dataall.aws.handlers.quicksight import Quicksight +from dataall.modules.datasets.services.dataset_service import DatasetService log = logging.getLogger(__name__) -def check_dataset_account(environment): - if environment.dashboardsEnabled: - quicksight_subscription = Quicksight.check_quicksight_enterprise_subscription(AwsAccountId=environment.AwsAccountId) - if quicksight_subscription: - group = Quicksight.create_quicksight_group(AwsAccountId=environment.AwsAccountId) - return True if group else False - return True - - def create_dataset(context: Context, source, input=None): - with context.engine.scoped_session() as session: - environment = Environment.get_environment_by_uri(session, input.get('environmentUri')) - check_dataset_account(environment=environment) - - dataset = DatasetService.create_dataset( - session=session, - username=context.username, - groups=context.groups, - uri=input.get('environmentUri'), - data=input, - check_perm=True, - ) - DatasetService.create_dataset_stack(session, dataset) - - DatasetIndexer.upsert( - session=session, dataset_uri=dataset.datasetUri - ) - - _deploy_dataset_stack(dataset) - - dataset.userRoleForDataset = DatasetRole.Creator.value - - return dataset + return DatasetService.create_dataset(env_uri=input['environmentUri'], data=input) def import_dataset(context: Context, source, input=None): @@ -72,43 +28,11 @@ def import_dataset(context: Context, source, input=None): if not input.get('SamlAdminGroupName'): raise exceptions.RequiredParameter('group') - with context.engine.scoped_session() as session: - environment = Environment.get_environment_by_uri(session, input.get('environmentUri')) - check_dataset_account(environment=environment) - - dataset = DatasetService.create_dataset( - session=session, - username=context.username, - groups=context.groups, - uri=input.get('environmentUri'), - data=input, - check_perm=True, - ) - dataset.imported = True - dataset.importedS3Bucket = True if input['bucketName'] else False - dataset.importedGlueDatabase = True if input.get('glueDatabaseName') else False - dataset.importedKmsKey = True if input.get('KmsKeyId') else False - dataset.importedAdminRole = True if input.get('adminRoleName') else False - - DatasetService.create_dataset_stack(session, dataset) - - DatasetIndexer.upsert( - session=session, dataset_uri=dataset.datasetUri - ) - - _deploy_dataset_stack(dataset) - - dataset.userRoleForDataset = DatasetRole.Creator.value - - return dataset + return DatasetService.import_dataset(data=input) def get_dataset(context, source, datasetUri=None): - with context.engine.scoped_session() as session: - dataset = DatasetService.get_dataset(session, uri=datasetUri) - if dataset.SamlAdminGroupName in context.groups: - dataset.userRoleForDataset = DatasetRole.Admin.value - return dataset + return DatasetService.get_dataset(uri=datasetUri) def resolve_user_role(context: Context, source: Dataset, **kwargs): @@ -137,36 +61,13 @@ def resolve_user_role(context: Context, source: Dataset, **kwargs): def get_file_upload_presigned_url( context, source, datasetUri: str = None, input: dict = None ): - with context.engine.scoped_session() as session: - dataset = DatasetService.get_dataset_by_uri(session, datasetUri) - - s3_client = SessionHelper.remote_session(dataset.AwsAccountId).client( - 's3', - region_name=dataset.region, - config=Config(signature_version='s3v4', s3={'addressing_style': 'virtual'}), - ) - try: - s3_client.get_bucket_acl( - Bucket=dataset.S3BucketName, ExpectedBucketOwner=dataset.AwsAccountId - ) - response = s3_client.generate_presigned_post( - Bucket=dataset.S3BucketName, - Key=input.get('prefix', 'uploads') + '/' + input.get('fileName'), - ExpiresIn=15 * 60, - ) - - return json.dumps(response) - except ClientError as e: - raise e + return DatasetService.get_file_upload_presigned_url(uri=datasetUri, data=input) def list_datasets(context: Context, source, filter: dict = None): if not filter: filter = {'page': 1, 'pageSize': 5} - with context.engine.scoped_session() as session: - return DatasetService.paginated_user_datasets( - session, context.username, context.groups, uri=None, data=filter - ) + return DatasetService.list_datasets(filter) def list_locations(context, source: Dataset, filter: dict = None): @@ -174,12 +75,7 @@ def list_locations(context, source: Dataset, filter: dict = None): return None if not filter: filter = {'page': 1, 'pageSize': 5} - with context.engine.scoped_session() as session: - return DatasetLocationRepository.paginated_dataset_locations( - session=session, - uri=source.datasetUri, - data=filter, - ) + return DatasetService.list_locations(source.datasetUri, filter) def list_tables(context, source: Dataset, filter: dict = None): @@ -187,14 +83,7 @@ def list_tables(context, source: Dataset, filter: dict = None): return None if not filter: filter = {'page': 1, 'pageSize': 5} - with context.engine.scoped_session() as session: - return DatasetService.paginated_dataset_tables( - session=session, - username=context.username, - groups=context.groups, - uri=source.datasetUri, - data=filter, - ) + return DatasetService.list_tables(source.datasetUri, filter) def get_dataset_organization(context, source: Dataset, **kwargs): @@ -224,173 +113,29 @@ def get_dataset_stewards_group(context, source: Dataset, **kwargs): def update_dataset(context, source, datasetUri: str = None, input: dict = None): - with context.engine.scoped_session() as session: - dataset = DatasetService.get_dataset_by_uri(session, datasetUri) - environment = Environment.get_environment_by_uri(session, dataset.environmentUri) - check_dataset_account(environment=environment) - updated_dataset = DatasetService.update_dataset( - session=session, - uri=datasetUri, - data=input, - ) - DatasetIndexer.upsert(session, dataset_uri=datasetUri) - - _deploy_dataset_stack(updated_dataset) - - return updated_dataset + return DatasetService.update_dataset(uri=datasetUri) def get_dataset_statistics(context: Context, source: Dataset, **kwargs): if not source: return None - with context.engine.scoped_session() as session: - count_tables = DatasetService.count_dataset_tables(session, source.datasetUri) - count_locations = DatasetLocationRepository.count_dataset_locations( - session, source.datasetUri - ) - count_upvotes = db.api.Vote.count_upvotes( - session, None, None, source.datasetUri, {'targetType': 'dataset'} - ) - return { - 'tables': count_tables or 0, - 'locations': count_locations or 0, - 'upvotes': count_upvotes or 0, - } + return DatasetService.get_dataset_statistics(source) def get_dataset_etl_credentials(context: Context, source, datasetUri: str = None): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=datasetUri, - permission_name=CREDENTIALS_DATASET, - ) - task = models.Task(targetUri=datasetUri, action='iam.dataset.user.credentials') - session.add(task) - response = Worker.process( - engine=context.engine, task_ids=[task.taskUri], save_response=False - )[0] - return json.dumps(response['response']) + return DatasetService.get_dataset_etl_credentials(uri=datasetUri) def get_dataset_assume_role_url(context: Context, source, datasetUri: str = None): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=datasetUri, - permission_name=CREDENTIALS_DATASET, - ) - dataset = DatasetService.get_dataset_by_uri(session, datasetUri) - if dataset.SamlAdminGroupName not in context.groups: - share = ShareObject.get_share_by_dataset_attributes( - session=session, - dataset_uri=datasetUri, - dataset_owner=context.username - ) - shared_environment = Environment.get_environment_by_uri( - session=session, - uri=share.environmentUri - ) - env_group = Environment.get_environment_group( - session=session, - group_uri=share.principalId, - environment_uri=share.environmentUri - ) - role_arn = env_group.environmentIAMRoleArn - account_id = shared_environment.AwsAccountId - else: - role_arn = dataset.IAMDatasetAdminRoleArn - account_id = dataset.AwsAccountId - - pivot_session = SessionHelper.remote_session(account_id) - aws_session = SessionHelper.get_session( - base_session=pivot_session, role_arn=role_arn - ) - url = SessionHelper.get_console_access_url( - aws_session, - region=dataset.region, - bucket=dataset.S3BucketName, - ) - return url + return DatasetService.get_dataset_assume_role_url(uri=datasetUri) def sync_tables(context: Context, source, datasetUri: str = None): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=datasetUri, - permission_name=SYNC_DATASET, - ) - dataset = DatasetService.get_dataset_by_uri(session, datasetUri) - - task = models.Task( - action='glue.dataset.database.tables', - targetUri=dataset.datasetUri, - ) - session.add(task) - Worker.process(engine=context.engine, task_ids=[task.taskUri], save_response=False) - with context.engine.scoped_session() as session: - DatasetTableIndexer.upsert_all( - session=session, dataset_uri=dataset.datasetUri - ) - DatasetTableIndexer.remove_all_deleted(session=session, dataset_uri=dataset.datasetUri) - return DatasetService.paginated_dataset_tables( - session=session, - username=context.username, - groups=context.groups, - uri=datasetUri, - data={'page': 1, 'pageSize': 10}, - check_perm=None, - ) + return DatasetService.sync_tables(uri=datasetUri) def start_crawler(context: Context, source, datasetUri: str, input: dict = None): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=datasetUri, - permission_name=CRAWL_DATASET, - ) - - dataset = DatasetService.get_dataset_by_uri(session, datasetUri) - - location = ( - f's3://{dataset.S3BucketName}/{input.get("prefix")}' - if input.get('prefix') - else f's3://{dataset.S3BucketName}' - ) - - crawler = DatasetCrawler(dataset).get_crawler() - if not crawler: - raise exceptions.AWSResourceNotFound( - action=CRAWL_DATASET, - message=f'Crawler {dataset.GlueCrawlerName} can not be found', - ) - - task = models.Task( - targetUri=datasetUri, - action='glue.crawler.start', - payload={'location': location}, - ) - session.add(task) - session.commit() - - Worker.queue(engine=context.engine, task_ids=[task.taskUri]) - - return { - 'Name': dataset.GlueCrawlerName, - 'AwsAccountId': dataset.AwsAccountId, - 'region': dataset.region, - 'status': crawler.get('LastCrawl', {}).get('Status', 'N/A'), - } + return DatasetService.start_crawler(uri=datasetUri, data=input) def list_dataset_share_objects(context, source, filter: dict = None): @@ -398,100 +143,21 @@ def list_dataset_share_objects(context, source, filter: dict = None): return None if not filter: filter = {'page': 1, 'pageSize': 5} - with context.engine.scoped_session() as session: - return DatasetService.paginated_dataset_shares( - session=session, - username=context.username, - groups=context.groups, - uri=source.datasetUri, - data=filter, - check_perm=True, - ) + return DatasetService.list_dataset_share_objects(source, filter) def generate_dataset_access_token(context, source, datasetUri: str = None): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=datasetUri, - permission_name=CREDENTIALS_DATASET, - ) - dataset = DatasetService.get_dataset_by_uri(session, datasetUri) - - pivot_session = SessionHelper.remote_session(dataset.AwsAccountId) - aws_session = SessionHelper.get_session( - base_session=pivot_session, role_arn=dataset.IAMDatasetAdminRoleArn - ) - c = aws_session.get_credentials() - credentials = { - 'AccessKey': c.access_key, - 'SessionKey': c.secret_key, - 'sessionToken': c.token, - } - - return json.dumps(credentials) + return DatasetService.generate_dataset_access_token(uri=datasetUri) def get_dataset_summary(context, source, datasetUri: str = None): - with context.engine.scoped_session() as session: - dataset = DatasetService.get_dataset_by_uri(session, datasetUri) - environment = Environment.get_environment_by_uri( - session, dataset.environmentUri - ) - - pivot_session = SessionHelper.remote_session(dataset.AwsAccountId) - env_admin_session = SessionHelper.get_session( - base_session=pivot_session, - role_arn=environment.EnvironmentDefaultIAMRoleArn, - ) - s3 = env_admin_session.client('s3', region_name=dataset.region) - - try: - s3.head_object( - Bucket=environment.EnvironmentDefaultBucketName, - Key=f'summary/{datasetUri}/summary.md', - ) - response = s3.get_object( - Bucket=environment.EnvironmentDefaultBucketName, - Key=f'summary/{datasetUri}/summary.md', - ) - content = str(response['Body'].read().decode('utf-8')) - return content - except Exception as e: - raise e + return DatasetService.get_dataset_summary(uri=datasetUri) def save_dataset_summary( context: Context, source, datasetUri: str = None, content: str = None ): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=datasetUri, - permission_name=SUMMARY_DATASET, - ) - dataset = DatasetService.get_dataset_by_uri(session, datasetUri) - environment = Environment.get_environment_by_uri( - session, dataset.environmentUri - ) - - pivot_session = SessionHelper.remote_session(dataset.AwsAccountId) - env_admin_session = SessionHelper.get_session( - base_session=pivot_session, - role_arn=environment.EnvironmentDefaultIAMRoleArn, - ) - s3 = env_admin_session.client('s3', region_name=dataset.region) - - s3.put_object( - Bucket=environment.EnvironmentDefaultBucketName, - Key=f'summary/{datasetUri}/summary.md', - Body=content, - ) - return True + return DatasetService.save_dataset_summary(uri=datasetUri, content=content) def get_dataset_stack(context: Context, source: Dataset, **kwargs): @@ -504,88 +170,13 @@ def get_dataset_stack(context: Context, source: Dataset, **kwargs): def get_crawler(context, source, datasetUri: str = None, name: str = None): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=datasetUri, - permission_name=CRAWL_DATASET, - ) - dataset = DatasetService.get_dataset_by_uri(session, datasetUri) - - aws_session = SessionHelper.remote_session(dataset.AwsAccountId) - client = aws_session.client('glue', region_name=dataset.region) - - response = client.get_crawler(Name=name) - return { - 'Name': name, - 'AwsAccountId': dataset.AwsAccountId, - 'region': dataset.region, - 'status': response['Crawler'].get('LastCrawl', {}).get('Status', 'N/A'), - } + return DatasetService.get_crawler(uri=datasetUri, name=name) def delete_dataset( context: Context, source, datasetUri: str = None, deleteFromAWS: bool = False ): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=datasetUri, - permission_name=DELETE_DATASET, - ) - dataset: Dataset = DatasetService.get_dataset_by_uri(session, datasetUri) - env: models.Environment = Environment.get_environment_by_uri( - session, dataset.environmentUri - ) - shares = DatasetService.list_dataset_shares_with_existing_shared_items(session, datasetUri) - if shares: - raise exceptions.UnauthorizedOperation( - action=DELETE_DATASET, - message=f'Dataset {dataset.name} is shared with other teams. ' - 'Revoke all dataset shares before deletion.', - ) - redshift_datasets = DatasetService.list_dataset_redshift_clusters( - session, datasetUri - ) - if redshift_datasets: - raise exceptions.UnauthorizedOperation( - action=DELETE_DATASET, - message='Dataset is used by Redshift clusters. ' - 'Remove clusters associations first.', - ) - - tables = [t.tableUri for t in DatasetService.get_dataset_tables(session, datasetUri)] - for uri in tables: - DatasetIndexer.delete_doc(doc_id=uri) - - folders = [f.locationUri for f in DatasetLocationRepository.get_dataset_folders(session, datasetUri)] - for uri in folders: - DatasetIndexer.delete_doc(doc_id=uri) - - DatasetIndexer.delete_doc(doc_id=datasetUri) - - DatasetService.delete_dataset( - session=session, - username=context.username, - groups=context.groups, - uri=datasetUri, - data=None, - check_perm=True, - ) - - if deleteFromAWS: - stack_helper.delete_stack( - target_uri=datasetUri, - accountid=env.AwsAccountId, - cdk_role_arn=env.CDKRoleArn, - region=env.region, - ) - stack_helper.deploy_stack(dataset.environmentUri) - return True + return DatasetService.delete_dataset(uri=datasetUri, delete_from_aws=deleteFromAWS) def get_dataset_glossary_terms(context: Context, source: Dataset, **kwargs): @@ -606,34 +197,7 @@ def get_dataset_glossary_terms(context: Context, source: Dataset, **kwargs): def publish_dataset_update( context: Context, source, datasetUri: str = None, s3Prefix: str = None ): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=datasetUri, - permission_name=SUBSCRIPTIONS_DATASET, - ) - dataset = DatasetService.get_dataset_by_uri(session, datasetUri) - env = db.api.Environment.get_environment_by_uri(session, dataset.environmentUri) - if not env.subscriptionsEnabled or not env.subscriptionsProducersTopicName: - raise Exception( - 'Subscriptions are disabled. ' - "First enable subscriptions for this dataset's environment then retry." - ) - - task = models.Task( - targetUri=datasetUri, - action='sns.dataset.publish_update', - payload={'s3Prefix': s3Prefix}, - ) - session.add(task) - - response = Worker.process( - engine=context.engine, task_ids=[task.taskUri], save_response=False - )[0] - log.info(f'Dataset update publish response: {response}') - return True + return DatasetService.publish_dataset_update(uri=datasetUri, s3_prefix=s3Prefix) def resolve_redshift_copy_enabled(context, source: Dataset, clusterUri: str): @@ -645,26 +209,12 @@ def resolve_redshift_copy_enabled(context, source: Dataset, clusterUri: str): ).datasetCopyEnabled -def _deploy_dataset_stack(dataset: Dataset): - """ - Each dataset stack deployment triggers environment stack update - to rebuild teams IAM roles data access policies - """ - stack_helper.deploy_stack(dataset.datasetUri) - stack_helper.deploy_stack(dataset.environmentUri) - - def list_datasets_created_in_environment( context: Context, source, environmentUri: str = None, filter: dict = None ): if not filter: filter = {} - with context.engine.scoped_session() as session: - return DatasetService.paginated_environment_datasets( - session=session, - uri=environmentUri, - data=filter, - ) + return DatasetService.list_datasets_created_in_environment(environmentUri, filter) def list_datasets_owned_by_env_group( @@ -672,10 +222,4 @@ def list_datasets_owned_by_env_group( ): if not filter: filter = {} - with context.engine.scoped_session() as session: - return DatasetService.paginated_environment_group_datasets( - session=session, - envUri=environmentUri, - groupUri=groupUri, - data=filter, - ) \ No newline at end of file + return DatasetService.list_datasets_owned_by_env_group(environmentUri, groupUri, filter) diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py index e1c025e2d..ede241f95 100644 --- a/backend/dataall/modules/datasets/services/dataset_service.py +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -1,5 +1,506 @@ +import json +import logging +from dataall.api.Objects.Stack import stack_helper +from dataall.aws.handlers.quicksight import Quicksight +from dataall.aws.handlers.service_handlers import Worker +from dataall.aws.handlers.sts import SessionHelper +from dataall.core.context import get_context +from dataall.core.permission_checker import has_resource_permission +from dataall.db.api import Vote +from dataall.db.exceptions import AWSResourceNotFound, UnauthorizedOperation +from dataall.db.models import Environment, Task +from dataall.modules.dataset_sharing.api.schema import ShareObject +from dataall.modules.datasets import DatasetIndexer, DatasetTableIndexer +from dataall.modules.datasets.api.dataset.enums import DatasetRole +from dataall.modules.datasets.aws.glue_dataset_client import DatasetCrawler +from dataall.modules.datasets.aws.s3_dataset_client import S3DatasetClient +from dataall.modules.datasets.db.dataset_location_repository import DatasetLocationRepository +from dataall.modules.datasets.services.dataset_permissions import CREDENTIALS_DATASET, SYNC_DATASET, CRAWL_DATASET, \ + SUMMARY_DATASET, DELETE_DATASET, SUBSCRIPTIONS_DATASET +from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository +from dataall.modules.datasets_base.db.models import Dataset + +log = logging.getLogger(__name__) class DatasetService: - pass \ No newline at end of file + + @staticmethod + def check_dataset_account(environment): + if environment.dashboardsEnabled: + quicksight_subscription = Quicksight.check_quicksight_enterprise_subscription( + AwsAccountId=environment.AwsAccountId) + if quicksight_subscription: + group = Quicksight.create_quicksight_group(AwsAccountId=environment.AwsAccountId) + return True if group else False + return True + + @staticmethod + def create_dataset(env_uri, data: dict): + context = get_context() + with context.db_engine.scoped_session() as session: + environment = Environment.get_environment_by_uri(session, env_uri) + DatasetService.check_dataset_account(environment=environment) + + dataset = DatasetRepository.create_dataset( + session=session, + username=context.username, + groups=context.groups, + uri=env_uri, + data=data, + check_perm=True, + ) + + DatasetRepository.create_dataset_stack(session, dataset) + + DatasetIndexer.upsert( + session=session, dataset_uri=dataset.datasetUri + ) + + DatasetService._deploy_dataset_stack(dataset) + + dataset.userRoleForDataset = DatasetRole.Creator.value + + return dataset + + @staticmethod + def import_dataset(data): + context = get_context() + with context.db_engine.scoped_session() as session: + environment = Environment.get_environment_by_uri(session, data.get('environmentUri')) + DatasetService.check_dataset_account(environment=environment) + + dataset = DatasetRepository.create_dataset( + session=session, + username=context.username, + groups=context.groups, + uri=data.get('environmentUri'), + data=data, + check_perm=True, + ) + dataset.imported = True + dataset.importedS3Bucket = True if data['bucketName'] else False + dataset.importedGlueDatabase = True if data['glueDatabaseName'] else False + dataset.importedKmsKey = True if data['KmsKeyId'] else False + dataset.importedAdminRole = True if data['adminRoleName'] else False + + DatasetRepository.create_dataset_stack(session, dataset) + + DatasetIndexer.upsert( + session=session, dataset_uri=dataset.datasetUri + ) + + DatasetService._deploy_dataset_stack(dataset) + + dataset.userRoleForDataset = DatasetRole.Creator.value + + return dataset + + @staticmethod + def get_dataset(uri): + context = get_context() + with context.db_engine.scoped_session() as session: + dataset = DatasetRepository.get_dataset(session, uri=uri) + if dataset.SamlAdminGroupName in context.groups: + dataset.userRoleForDataset = DatasetRole.Admin.value + return dataset + + @staticmethod + def get_file_upload_presigned_url(uri: str, data: dict): + with get_context().db_engine.scoped_session() as session: + dataset = DatasetRepository.get_dataset_by_uri(session, uri) + return S3DatasetClient(dataset).get_file_upload_presigned_url(data) + + @staticmethod + def list_datasets(data: dict): + context = get_context() + with context.db_engine.scoped_session() as session: + return DatasetRepository.paginated_user_datasets( + session, context.username, context.groups, uri=None, data=data + ) + + @staticmethod + def list_locations(dataset_uri, data: dict): + with get_context().db_engine.scoped_session() as session: + return DatasetLocationRepository.paginated_dataset_locations( + session=session, + uri=dataset_uri, + data=data, + ) + + @staticmethod + def list_tables(dataset_uri, data: dict): + context = get_context() + with context.db_engine.scoped_session() as session: + return DatasetRepository.paginated_dataset_tables( + session=session, + username=context.username, + groups=context.groups, + uri=dataset_uri, + data=data, + ) + + @staticmethod + def update_dataset(uri: str, data: dict): + with get_context().db_engine.scoped_session() as session: + dataset = DatasetRepository.get_dataset_by_uri(session, uri) + environment = Environment.get_environment_by_uri(session, dataset.environmentUri) + DatasetService.check_dataset_account(environment=environment) + updated_dataset = DatasetRepository.update_dataset( + session=session, + uri=uri, + data=data, + ) + DatasetIndexer.upsert(session, dataset_uri=uri) + + DatasetService._deploy_dataset_stack(updated_dataset) + + return updated_dataset + + @staticmethod + def get_dataset_statistics(dataset: Dataset): + with get_context().db_engine.scoped_session() as session: + count_tables = DatasetRepository.count_dataset_tables(session, dataset.datasetUri) + count_locations = DatasetLocationRepository.count_dataset_locations( + session, dataset.datasetUri + ) + count_upvotes = Vote.count_upvotes( + session, None, None, dataset.datasetUri, {'targetType': 'dataset'} + ) + return { + 'tables': count_tables or 0, + 'locations': count_locations or 0, + 'upvotes': count_upvotes or 0, + } + + @staticmethod + @has_resource_permission(CREDENTIALS_DATASET) + def get_dataset_etl_credentials(uri): + context = get_context() + with context.db_engine.scoped_session() as session: + task = Task(targetUri=uri, action='iam.dataset.user.credentials') + session.add(task) + response = Worker.process( + engine=context.db_engine, task_ids=[task.taskUri], save_response=False + )[0] + return json.dumps(response['response']) + + @staticmethod + @has_resource_permission(CREDENTIALS_DATASET) + def get_dataset_assume_role_url(uri): + context = get_context() + with context.db_engine.scoped_session() as session: + dataset = DatasetRepository.get_dataset_by_uri(session, uri) + if dataset.SamlAdminGroupName not in context.groups: + share = ShareObject.get_share_by_dataset_attributes( + session=session, + dataset_uri=uri, + dataset_owner=context.username + ) + shared_environment = Environment.get_environment_by_uri( + session=session, + uri=share.environmentUri + ) + env_group = Environment.get_environment_group( + session=session, + group_uri=share.principalId, + environment_uri=share.environmentUri + ) + role_arn = env_group.environmentIAMRoleArn + account_id = shared_environment.AwsAccountId + else: + role_arn = dataset.IAMDatasetAdminRoleArn + account_id = dataset.AwsAccountId + + pivot_session = SessionHelper.remote_session(account_id) + aws_session = SessionHelper.get_session( + base_session=pivot_session, role_arn=role_arn + ) + url = SessionHelper.get_console_access_url( + aws_session, + region=dataset.region, + bucket=dataset.S3BucketName, + ) + return url + + @staticmethod + @has_resource_permission(SYNC_DATASET) + def sync_tables(uri): + context = get_context() + with context.db_engine.scoped_session() as session: + dataset = DatasetRepository.get_dataset_by_uri(session, uri) + + task = Task( + action='glue.dataset.database.tables', + targetUri=dataset.datasetUri, + ) + session.add(task) + Worker.process(engine=context.db_engine, task_ids=[task.taskUri], save_response=False) + with context.db_engine.scoped_session() as session: + DatasetTableIndexer.upsert_all( + session=session, dataset_uri=dataset.datasetUri + ) + DatasetTableIndexer.remove_all_deleted(session=session, dataset_uri=dataset.datasetUri) + return DatasetRepository.paginated_dataset_tables( + session=session, + username=context.username, + groups=context.groups, + uri=uri, + data={'page': 1, 'pageSize': 10}, + check_perm=None, + ) + + @staticmethod + @has_resource_permission(CRAWL_DATASET) + def start_crawler(uri: str, data: dict = None): + engine = get_context().db_engine + with engine.scoped_session() as session: + dataset = DatasetRepository.get_dataset_by_uri(session, uri) + + location = ( + f's3://{dataset.S3BucketName}/{data.get("prefix")}' + if data.get('prefix') + else f's3://{dataset.S3BucketName}' + ) + + crawler = DatasetCrawler(dataset).get_crawler() + if not crawler: + raise AWSResourceNotFound( + action=CRAWL_DATASET, + message=f'Crawler {dataset.GlueCrawlerName} can not be found', + ) + + task = Task( + targetUri=uri, + action='glue.crawler.start', + payload={'location': location}, + ) + session.add(task) + session.commit() + + Worker.queue(engine=engine, task_ids=[task.taskUri]) + + return { + 'Name': dataset.GlueCrawlerName, + 'AwsAccountId': dataset.AwsAccountId, + 'region': dataset.region, + 'status': crawler.get('LastCrawl', {}).get('Status', 'N/A'), + } + + @staticmethod + def list_dataset_share_objects(dataset: Dataset, data: dict = None): + context = get_context() + with context.db_engine.scoped_session() as session: + return DatasetRepository.paginated_dataset_shares( + session=session, + username=context.username, + groups=context.groups, + uri=dataset.datasetUri, + data=data, + check_perm=True, + ) + + @staticmethod + @has_resource_permission(CREDENTIALS_DATASET) + def generate_dataset_access_token(uri): + with get_context().db_engine.scoped_session() as session: + dataset = DatasetRepository.get_dataset_by_uri(session, uri) + + pivot_session = SessionHelper.remote_session(dataset.AwsAccountId) + aws_session = SessionHelper.get_session( + base_session=pivot_session, role_arn=dataset.IAMDatasetAdminRoleArn + ) + c = aws_session.get_credentials() + credentials = { + 'AccessKey': c.access_key, + 'SessionKey': c.secret_key, + 'sessionToken': c.token, + } + + return json.dumps(credentials) + + @staticmethod + def get_dataset_summary(uri: str): + # TODO THERE WAS NO PERMISSION CHECK!!! + with get_context().db_engine.scoped_session() as session: + dataset = DatasetRepository.get_dataset_by_uri(session, uri) + environment = Environment.get_environment_by_uri( + session, dataset.environmentUri + ) + + pivot_session = SessionHelper.remote_session(dataset.AwsAccountId) + env_admin_session = SessionHelper.get_session( + base_session=pivot_session, + role_arn=environment.EnvironmentDefaultIAMRoleArn, + ) + s3 = env_admin_session.client('s3', region_name=dataset.region) + + try: + s3.head_object( + Bucket=environment.EnvironmentDefaultBucketName, + Key=f'summary/{uri}/summary.md', + ) + response = s3.get_object( + Bucket=environment.EnvironmentDefaultBucketName, + Key=f'summary/{uri}/summary.md', + ) + content = str(response['Body'].read().decode('utf-8')) + return content + except Exception as e: + raise e + + @staticmethod + @has_resource_permission(SUMMARY_DATASET) + def save_dataset_summary(uri: str, content: str): + context = get_context() + with context.db_engine.scoped_session() as session: + dataset = DatasetRepository.get_dataset_by_uri(session, uri) + environment = Environment.get_environment_by_uri( + session, dataset.environmentUri + ) + + pivot_session = SessionHelper.remote_session(dataset.AwsAccountId) + env_admin_session = SessionHelper.get_session( + base_session=pivot_session, + role_arn=environment.EnvironmentDefaultIAMRoleArn, + ) + s3 = env_admin_session.client('s3', region_name=dataset.region) + + s3.put_object( + Bucket=environment.EnvironmentDefaultBucketName, + Key=f'summary/{uri}/summary.md', + Body=content, + ) + return True + + @staticmethod + def get_dataset_stack(dataset: Dataset): + return stack_helper.get_stack_with_cfn_resources( + targetUri=dataset.datasetUri, + environmentUri=dataset.environmentUri, + ) + + @staticmethod + @has_resource_permission(CRAWL_DATASET) + def get_crawler(uri: str, name: str): + context = get_context() + with context.db_engine.scoped_session() as session: + dataset = DatasetRepository.get_dataset_by_uri(session, uri) + + response = DatasetCrawler(dataset).get_crawler(crawler_name=name) + return { + 'Name': name, + 'AwsAccountId': dataset.AwsAccountId, + 'region': dataset.region, + 'status': response.get('LastCrawl', {}).get('Status', 'N/A'), + } + + @staticmethod + @has_resource_permission(DELETE_DATASET) + def delete_dataset(uri: str, delete_from_aws: bool = False): + context = get_context() + with context.db_engine.scoped_session() as session: + dataset: Dataset = DatasetRepository.get_dataset_by_uri(session, uri) + env: Environment = Environment.get_environment_by_uri( + session, dataset.environmentUri + ) + shares = DatasetRepository.list_dataset_shares_with_existing_shared_items(session, uri) + if shares: + raise UnauthorizedOperation( + action=DELETE_DATASET, + message=f'Dataset {dataset.name} is shared with other teams. ' + 'Revoke all dataset shares before deletion.', + ) + redshift_datasets = DatasetRepository.list_dataset_redshift_clusters( + session, uri + ) + if redshift_datasets: + raise UnauthorizedOperation( + action=DELETE_DATASET, + message='Dataset is used by Redshift clusters. ' + 'Remove clusters associations first.', + ) + + tables = [t.tableUri for t in DatasetRepository.get_dataset_tables(session, uri)] + for uri in tables: + DatasetIndexer.delete_doc(doc_id=uri) + + folders = [f.locationUri for f in DatasetLocationRepository.get_dataset_folders(session, uri)] + for uri in folders: + DatasetIndexer.delete_doc(doc_id=uri) + + DatasetIndexer.delete_doc(doc_id=uri) + + DatasetService.delete_dataset( + session=session, + username=context.username, + groups=context.groups, + uri=uri, + data=None, + check_perm=True, + ) + + if delete_from_aws: + stack_helper.delete_stack( + target_uri=uri, + accountid=env.AwsAccountId, + cdk_role_arn=env.CDKRoleArn, + region=env.region, + ) + stack_helper.deploy_stack(dataset.environmentUri) + return True + + @staticmethod + @has_resource_permission(SUBSCRIPTIONS_DATASET) + def publish_dataset_update(uri: str, s3_prefix: str): + engine = get_context().db_engine + with engine.scoped_session() as session: + dataset = DatasetRepository.get_dataset_by_uri(session, uri) + env = Environment.get_environment_by_uri(session, dataset.environmentUri) + if not env.subscriptionsEnabled or not env.subscriptionsProducersTopicName: + raise Exception( + 'Subscriptions are disabled. ' + "First enable subscriptions for this dataset's environment then retry." + ) + + task = Task( + targetUri=uri, + action='sns.dataset.publish_update', + payload={'s3Prefix': s3_prefix}, + ) + session.add(task) + + response = Worker.process( + engine=engine, task_ids=[task.taskUri], save_response=False + )[0] + log.info(f'Dataset update publish response: {response}') + return True + + @staticmethod + def _deploy_dataset_stack(dataset: Dataset): + """ + Each dataset stack deployment triggers environment stack update + to rebuild teams IAM roles data access policies + """ + stack_helper.deploy_stack(dataset.datasetUri) + stack_helper.deploy_stack(dataset.environmentUri) + + @staticmethod + def list_datasets_created_in_environment(env_uri: str, data: dict): + with get_context().db_engine.scoped_session() as session: + return DatasetRepository.paginated_environment_datasets( + session=session, + uri=env_uri, + data=data, + ) + + @staticmethod + def list_datasets_owned_by_env_group(env_uri: str, group_uri: str, data: dict): + with get_context().db_engine.scoped_session() as session: + return DatasetRepository.paginated_environment_group_datasets( + session=session, + envUri=env_uri, + groupUri=group_uri, + data=data, + ) From 7ba235d136ca7ba476cea74f656aa0bbfd8be25e Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 11 May 2023 15:38:52 +0200 Subject: [PATCH 168/346] Introduced S3DatasetClient and small fixes --- .../modules/datasets/api/dataset/resolvers.py | 4 +- .../datasets/aws/glue_dataset_client.py | 5 ++- .../modules/datasets/aws/s3_dataset_client.py | 34 ++++++++++++++ .../datasets/handlers/glue_dataset_handler.py | 4 +- .../datasets/handlers/sns_dataset_handler.py | 8 +--- .../datasets/services/dataset_service.py | 44 +++++++++---------- .../services/dataset_table_service.py | 10 ++--- 7 files changed, 70 insertions(+), 39 deletions(-) create mode 100644 backend/dataall/modules/datasets/aws/s3_dataset_client.py diff --git a/backend/dataall/modules/datasets/api/dataset/resolvers.py b/backend/dataall/modules/datasets/api/dataset/resolvers.py index 8fb021c98..2453ebe7c 100644 --- a/backend/dataall/modules/datasets/api/dataset/resolvers.py +++ b/backend/dataall/modules/datasets/api/dataset/resolvers.py @@ -28,7 +28,7 @@ def import_dataset(context: Context, source, input=None): if not input.get('SamlAdminGroupName'): raise exceptions.RequiredParameter('group') - return DatasetService.import_dataset(data=input) + return DatasetService.import_dataset(uri=input['environmentUri'], data=input) def get_dataset(context, source, datasetUri=None): @@ -214,7 +214,7 @@ def list_datasets_created_in_environment( ): if not filter: filter = {} - return DatasetService.list_datasets_created_in_environment(environmentUri, filter) + return DatasetService.list_datasets_created_in_environment(uri=environmentUri, data=filter) def list_datasets_owned_by_env_group( diff --git a/backend/dataall/modules/datasets/aws/glue_dataset_client.py b/backend/dataall/modules/datasets/aws/glue_dataset_client.py index f52b35f92..310acd2b6 100644 --- a/backend/dataall/modules/datasets/aws/glue_dataset_client.py +++ b/backend/dataall/modules/datasets/aws/glue_dataset_client.py @@ -14,9 +14,10 @@ def __init__(self, dataset: Dataset): self._client = session.client('glue', region_name=region) self._dataset = dataset - def get_crawler(self): + def get_crawler(self, crawler_name=None): crawler = None - crawler_name = self._dataset.GlueCrawlerName + if not crawler_name: + crawler_name = self._dataset.GlueCrawlerName try: crawler = self._client.get_crawler(Name=crawler_name) diff --git a/backend/dataall/modules/datasets/aws/s3_dataset_client.py b/backend/dataall/modules/datasets/aws/s3_dataset_client.py new file mode 100644 index 000000000..d6a92ab39 --- /dev/null +++ b/backend/dataall/modules/datasets/aws/s3_dataset_client.py @@ -0,0 +1,34 @@ +import json + +from botocore.config import Config +from botocore.exceptions import ClientError + +from dataall.aws.handlers.sts import SessionHelper +from dataall.modules.datasets_base.db.models import Dataset + + +class S3DatasetClient: + + def __init__(self, dataset: Dataset): + self._client = SessionHelper.remote_session(dataset.AwsAccountId).client( + 's3', + region_name=dataset.region, + config=Config(signature_version='s3v4', s3={'addressing_style': 'virtual'}), + ) + self._dataset = dataset + + def get_file_upload_presigned_url(self, data): + dataset = self._dataset + try: + self._client.get_bucket_acl( + Bucket=dataset.S3BucketName, ExpectedBucketOwner=dataset.AwsAccountId + ) + response = self._client.generate_presigned_post( + Bucket=dataset.S3BucketName, + Key=data.get('prefix', 'uploads') + '/' + data.get('fileName'), + ExpiresIn=15 * 60, + ) + + return json.dumps(response) + except ClientError as e: + raise e diff --git a/backend/dataall/modules/datasets/handlers/glue_dataset_handler.py b/backend/dataall/modules/datasets/handlers/glue_dataset_handler.py index d066f59f4..44192a3d1 100644 --- a/backend/dataall/modules/datasets/handlers/glue_dataset_handler.py +++ b/backend/dataall/modules/datasets/handlers/glue_dataset_handler.py @@ -3,8 +3,8 @@ from dataall.aws.handlers.service_handlers import Worker from dataall.db import models from dataall.modules.datasets.aws.glue_dataset_client import DatasetCrawler +from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.db.models import Dataset -from dataall.modules.datasets.db.dataset_service import DatasetService log = logging.getLogger(__name__) @@ -15,7 +15,7 @@ class DatasetCrawlerHandler: @Worker.handler(path='glue.crawler.start') def start_crawler(engine, task: models.Task): with engine.scoped_session() as session: - dataset: Dataset = DatasetService.get_dataset_by_uri( + dataset: Dataset = DatasetRepository.get_dataset_by_uri( session, task.targetUri ) location = task.payload.get('location') diff --git a/backend/dataall/modules/datasets/handlers/sns_dataset_handler.py b/backend/dataall/modules/datasets/handlers/sns_dataset_handler.py index 14e11cc98..f2c3b8dd5 100644 --- a/backend/dataall/modules/datasets/handlers/sns_dataset_handler.py +++ b/backend/dataall/modules/datasets/handlers/sns_dataset_handler.py @@ -1,14 +1,10 @@ -import json import logging -from botocore.exceptions import ClientError - from dataall.aws.handlers.service_handlers import Worker from dataall.db import models -from dataall.modules.datasets.db.dataset_service import DatasetService from dataall.db.api import Environment from dataall.modules.datasets.aws.sns_dataset_client import SnsDatasetClient -from dataall.modules.datasets.db.dataset_service import DatasetService +from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository logger = logging.getLogger(__name__) @@ -21,7 +17,7 @@ def __init__(self): @Worker.handler(path='sns.dataset.publish_update') def publish_update(engine, task: models.Task): with engine.scoped_session() as session: - dataset = DatasetService.get_dataset_by_uri(session, task.targetUri) + dataset = DatasetRepository.get_dataset_by_uri(session, task.targetUri) environment = Environment.get_environment_by_uri(session, dataset.environmentUri) message = { diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py index ede241f95..6b5e99bf1 100644 --- a/backend/dataall/modules/datasets/services/dataset_service.py +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -6,7 +6,7 @@ from dataall.aws.handlers.service_handlers import Worker from dataall.aws.handlers.sts import SessionHelper from dataall.core.context import get_context -from dataall.core.permission_checker import has_resource_permission +from dataall.core.permission_checker import has_resource_permission, has_tenant_permission from dataall.db.api import Vote from dataall.db.exceptions import AWSResourceNotFound, UnauthorizedOperation from dataall.db.models import Environment, Task @@ -17,7 +17,8 @@ from dataall.modules.datasets.aws.s3_dataset_client import S3DatasetClient from dataall.modules.datasets.db.dataset_location_repository import DatasetLocationRepository from dataall.modules.datasets.services.dataset_permissions import CREDENTIALS_DATASET, SYNC_DATASET, CRAWL_DATASET, \ - SUMMARY_DATASET, DELETE_DATASET, SUBSCRIPTIONS_DATASET + SUMMARY_DATASET, DELETE_DATASET, SUBSCRIPTIONS_DATASET, MANAGE_DATASETS, UPDATE_DATASET, LIST_ENVIRONMENT_DATASETS, \ + CREATE_DATASET from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.db.models import Dataset @@ -37,17 +38,19 @@ def check_dataset_account(environment): return True @staticmethod - def create_dataset(env_uri, data: dict): + @has_tenant_permission(MANAGE_DATASETS) + @has_resource_permission(CREATE_DATASET) + def create_dataset(uri, data: dict): context = get_context() with context.db_engine.scoped_session() as session: - environment = Environment.get_environment_by_uri(session, env_uri) + environment = Environment.get_environment_by_uri(session, uri) DatasetService.check_dataset_account(environment=environment) dataset = DatasetRepository.create_dataset( session=session, username=context.username, groups=context.groups, - uri=env_uri, + uri=uri, data=data, check_perm=True, ) @@ -65,17 +68,19 @@ def create_dataset(env_uri, data: dict): return dataset @staticmethod - def import_dataset(data): + @has_tenant_permission(MANAGE_DATASETS) + @has_resource_permission(CREATE_DATASET) + def import_dataset(uri, data): context = get_context() with context.db_engine.scoped_session() as session: - environment = Environment.get_environment_by_uri(session, data.get('environmentUri')) + environment = Environment.get_environment_by_uri(session, uri) DatasetService.check_dataset_account(environment=environment) dataset = DatasetRepository.create_dataset( session=session, username=context.username, groups=context.groups, - uri=data.get('environmentUri'), + uri=uri, data=data, check_perm=True, ) @@ -98,6 +103,7 @@ def import_dataset(data): return dataset @staticmethod + @has_tenant_permission(MANAGE_DATASETS) def get_dataset(uri): context = get_context() with context.db_engine.scoped_session() as session: @@ -117,7 +123,7 @@ def list_datasets(data: dict): context = get_context() with context.db_engine.scoped_session() as session: return DatasetRepository.paginated_user_datasets( - session, context.username, context.groups, uri=None, data=data + session, context.username, context.groups, data=data ) @staticmethod @@ -135,13 +141,13 @@ def list_tables(dataset_uri, data: dict): with context.db_engine.scoped_session() as session: return DatasetRepository.paginated_dataset_tables( session=session, - username=context.username, - groups=context.groups, uri=dataset_uri, data=data, ) @staticmethod + @has_tenant_permission(MANAGE_DATASETS) + @has_resource_permission(UPDATE_DATASET) def update_dataset(uri: str, data: dict): with get_context().db_engine.scoped_session() as session: dataset = DatasetRepository.get_dataset_by_uri(session, uri) @@ -244,11 +250,8 @@ def sync_tables(uri): DatasetTableIndexer.remove_all_deleted(session=session, dataset_uri=dataset.datasetUri) return DatasetRepository.paginated_dataset_tables( session=session, - username=context.username, - groups=context.groups, uri=uri, data={'page': 1, 'pageSize': 10}, - check_perm=None, ) @staticmethod @@ -290,15 +293,11 @@ def start_crawler(uri: str, data: dict = None): @staticmethod def list_dataset_share_objects(dataset: Dataset, data: dict = None): - context = get_context() - with context.db_engine.scoped_session() as session: + with get_context().db_engine.scoped_session() as session: return DatasetRepository.paginated_dataset_shares( session=session, - username=context.username, - groups=context.groups, uri=dataset.datasetUri, - data=data, - check_perm=True, + data=data ) @staticmethod @@ -487,11 +486,12 @@ def _deploy_dataset_stack(dataset: Dataset): stack_helper.deploy_stack(dataset.environmentUri) @staticmethod - def list_datasets_created_in_environment(env_uri: str, data: dict): + @has_resource_permission(LIST_ENVIRONMENT_DATASETS) + def list_datasets_created_in_environment(uri: str, data: dict): with get_context().db_engine.scoped_session() as session: return DatasetRepository.paginated_environment_datasets( session=session, - uri=env_uri, + uri=uri, data=data, ) diff --git a/backend/dataall/modules/datasets/services/dataset_table_service.py b/backend/dataall/modules/datasets/services/dataset_table_service.py index 28c6755d3..29401278e 100644 --- a/backend/dataall/modules/datasets/services/dataset_table_service.py +++ b/backend/dataall/modules/datasets/services/dataset_table_service.py @@ -10,11 +10,11 @@ from dataall.modules.dataset_sharing.db.share_object_repository import ShareObjectRepository from dataall.modules.datasets import DatasetTableIndexer from dataall.modules.datasets.aws.athena_table_client import AthenaTableClient -from dataall.modules.datasets.db.dataset_service import DatasetService from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository from dataall.modules.datasets.db.enums import ConfidentialityClassification from dataall.modules.datasets.services.dataset_permissions import UPDATE_DATASET_TABLE, MANAGE_DATASETS, \ DELETE_DATASET_TABLE, CREATE_DATASET_TABLE +from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.db.models import DatasetTable, Dataset from dataall.modules.datasets_base.services.permissions import PREVIEW_DATASET_TABLE, DATASET_TABLE_READ from dataall.utils import json_utils @@ -33,7 +33,7 @@ def _get_dataset_uri(session, table_uri): @has_resource_permission(CREATE_DATASET_TABLE) def create_table(uri: str, table_data: dict): with get_context().db_engine.scoped_session() as session: - dataset = DatasetService.get_dataset_by_uri(session, uri) + dataset = DatasetRepository.get_dataset_by_uri(session, uri) glue_table = table_data['name'] exists = DatasetTableRepository.exists(session, dataset_uri=uri, glue_table_name=glue_table) @@ -114,7 +114,7 @@ def preview(table_uri: str): table: DatasetTable = DatasetTableRepository.get_dataset_table_by_uri( session, table_uri ) - dataset = DatasetService.get_dataset_by_uri(session, table.datasetUri) + dataset = DatasetRepository.get_dataset_by_uri(session, table.datasetUri) if ( dataset.confidentiality != ConfidentialityClassification.Unclassified.value ): @@ -141,7 +141,7 @@ def publish_table_update(uri: str): context = get_context() with context.db_engine.scoped_session() as session: table: DatasetTable = DatasetTableRepository.get_dataset_table_by_uri(session, uri) - dataset = DatasetService.get_dataset_by_uri(session, table.datasetUri) + dataset = DatasetRepository.get_dataset_by_uri(session, table.datasetUri) env = Environment.get_environment_by_uri(session, dataset.environmentUri) if not env.subscriptionsEnabled or not env.subscriptionsProducersTopicName: raise Exception( @@ -172,7 +172,7 @@ def list_shared_tables_by_env_dataset(dataset_uri: str, env_uri: str): @staticmethod def sync_existing_tables(session, dataset_uri, glue_tables=None): - dataset: Dataset = DatasetService.get_dataset_by_uri(session, dataset_uri) + dataset: Dataset = DatasetRepository.get_dataset_by_uri(session, dataset_uri) if dataset: existing_tables = DatasetTableRepository.find_dataset_tables(session, dataset_uri) existing_table_names = [e.GlueTableName for e in existing_tables] From 5e65e41be20243fa2e9a41715afddc86fbd00810 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 11 May 2023 16:10:56 +0200 Subject: [PATCH 169/346] Extracting logic from repos --- .../modules/datasets/cdk/dataset_s3_policy.py | 4 +- .../services/dataset_location_service.py | 3 +- .../datasets/services/dataset_service.py | 116 ++++++++------ .../datasets_base/db/dataset_repository.py | 148 ++---------------- backend/dataall/tasks/catalog_indexer.py | 4 +- ...215e_backfill_dataset_table_permissions.py | 4 +- 6 files changed, 95 insertions(+), 184 deletions(-) diff --git a/backend/dataall/modules/datasets/cdk/dataset_s3_policy.py b/backend/dataall/modules/datasets/cdk/dataset_s3_policy.py index 8b7d55453..404b43db0 100644 --- a/backend/dataall/modules/datasets/cdk/dataset_s3_policy.py +++ b/backend/dataall/modules/datasets/cdk/dataset_s3_policy.py @@ -2,14 +2,14 @@ from aws_cdk import aws_iam as iam from dataall.cdkproxy.stacks.policies.data_policy import S3Policy +from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.db.models import Dataset -from dataall.modules.datasets.db.dataset_service import DatasetService class DatasetS3Policy(S3Policy): def get_statements(self, session): - datasets = DatasetService.list_group_datasets( + datasets = DatasetRepository.list_group_datasets( session, environment_id=self.environment.environmentUri, group_uri=self.team.groupUri, diff --git a/backend/dataall/modules/datasets/services/dataset_location_service.py b/backend/dataall/modules/datasets/services/dataset_location_service.py index bf277987c..144f59ddf 100644 --- a/backend/dataall/modules/datasets/services/dataset_location_service.py +++ b/backend/dataall/modules/datasets/services/dataset_location_service.py @@ -8,7 +8,6 @@ from dataall.modules.datasets import DatasetLocationIndexer from dataall.modules.datasets.aws.s3_location_client import S3LocationClient from dataall.modules.datasets.db.dataset_location_repository import DatasetLocationRepository -from dataall.modules.datasets.db.dataset_service import DatasetService from dataall.modules.datasets.services.dataset_permissions import UPDATE_DATASET_FOLDER, MANAGE_DATASETS, \ CREATE_DATASET_FOLDER, LIST_DATASET_FOLDERS, DELETE_DATASET_FOLDER from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository @@ -105,7 +104,7 @@ def publish_location_update(uri: str): context = get_context() with context.db_engine.scoped_session() as session: location = DatasetLocationRepository.get_location_by_uri(session, uri) - dataset = DatasetService.get_dataset_by_uri(session, location.datasetUri) + dataset = DatasetRepository.get_dataset_by_uri(session, location.datasetUri) env = Environment.get_environment_by_uri(session, dataset.environmentUri) if not env.subscriptionsEnabled or not env.subscriptionsProducersTopicName: raise Exception( diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py index 6b5e99bf1..9e5a35bdf 100644 --- a/backend/dataall/modules/datasets/services/dataset_service.py +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -6,19 +6,20 @@ from dataall.aws.handlers.service_handlers import Worker from dataall.aws.handlers.sts import SessionHelper from dataall.core.context import get_context -from dataall.core.permission_checker import has_resource_permission, has_tenant_permission -from dataall.db.api import Vote +from dataall.core.permission_checker import has_resource_permission, has_tenant_permission, has_group_permission +from dataall.db.api import Vote, ResourcePolicy, KeyValueTag, Stack from dataall.db.exceptions import AWSResourceNotFound, UnauthorizedOperation from dataall.db.models import Environment, Task -from dataall.modules.dataset_sharing.api.schema import ShareObject +from dataall.modules.dataset_sharing.db.share_object_repository import ShareObjectRepository from dataall.modules.datasets import DatasetIndexer, DatasetTableIndexer from dataall.modules.datasets.api.dataset.enums import DatasetRole from dataall.modules.datasets.aws.glue_dataset_client import DatasetCrawler from dataall.modules.datasets.aws.s3_dataset_client import S3DatasetClient from dataall.modules.datasets.db.dataset_location_repository import DatasetLocationRepository +from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository from dataall.modules.datasets.services.dataset_permissions import CREDENTIALS_DATASET, SYNC_DATASET, CRAWL_DATASET, \ SUMMARY_DATASET, DELETE_DATASET, SUBSCRIPTIONS_DATASET, MANAGE_DATASETS, UPDATE_DATASET, LIST_ENVIRONMENT_DATASETS, \ - CREATE_DATASET + CREATE_DATASET, DATASET_ALL, DATASET_READ from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.db.models import Dataset @@ -40,6 +41,7 @@ def check_dataset_account(environment): @staticmethod @has_tenant_permission(MANAGE_DATASETS) @has_resource_permission(CREATE_DATASET) + @has_group_permission(CREATE_DATASET) def create_dataset(uri, data: dict): context = get_context() with context.db_engine.scoped_session() as session: @@ -55,42 +57,31 @@ def create_dataset(uri, data: dict): check_perm=True, ) - DatasetRepository.create_dataset_stack(session, dataset) - - DatasetIndexer.upsert( - session=session, dataset_uri=dataset.datasetUri - ) - - DatasetService._deploy_dataset_stack(dataset) - - dataset.userRoleForDataset = DatasetRole.Creator.value - - return dataset - - @staticmethod - @has_tenant_permission(MANAGE_DATASETS) - @has_resource_permission(CREATE_DATASET) - def import_dataset(uri, data): - context = get_context() - with context.db_engine.scoped_session() as session: - environment = Environment.get_environment_by_uri(session, uri) - DatasetService.check_dataset_account(environment=environment) - - dataset = DatasetRepository.create_dataset( + ResourcePolicy.attach_resource_policy( session=session, - username=context.username, - groups=context.groups, - uri=uri, - data=data, - check_perm=True, + group=data['SamlAdminGroupName'], + permissions=DATASET_ALL, + resource_uri=dataset.datasetUri, + resource_type=Dataset.__name__, ) - dataset.imported = True - dataset.importedS3Bucket = True if data['bucketName'] else False - dataset.importedGlueDatabase = True if data['glueDatabaseName'] else False - dataset.importedKmsKey = True if data['KmsKeyId'] else False - dataset.importedAdminRole = True if data['adminRoleName'] else False + if dataset.stewards and dataset.stewards != dataset.SamlAdminGroupName: + ResourcePolicy.attach_resource_policy( + session=session, + group=dataset.stewards, + permissions=DATASET_READ, + resource_uri=dataset.datasetUri, + resource_type=Dataset.__name__, + ) + if environment.SamlGroupName != dataset.SamlAdminGroupName: + ResourcePolicy.attach_resource_policy( + session=session, + group=environment.SamlGroupName, + permissions=DATASET_ALL, + resource_uri=dataset.datasetUri, + resource_type=Dataset.__name__, + ) - DatasetRepository.create_dataset_stack(session, dataset) + DatasetService._create_dataset_stack(session, dataset) DatasetIndexer.upsert( session=session, dataset_uri=dataset.datasetUri @@ -102,6 +93,11 @@ def import_dataset(uri, data): return dataset + @staticmethod + def import_dataset(uri, data): + data['imported'] = True + return DatasetService.create_dataset(uri=uri, data=data) + @staticmethod @has_tenant_permission(MANAGE_DATASETS) def get_dataset(uri): @@ -199,7 +195,7 @@ def get_dataset_assume_role_url(uri): with context.db_engine.scoped_session() as session: dataset = DatasetRepository.get_dataset_by_uri(session, uri) if dataset.SamlAdminGroupName not in context.groups: - share = ShareObject.get_share_by_dataset_attributes( + share = ShareObjectRepository.get_share_by_dataset_attributes( session=session, dataset_uri=uri, dataset_owner=context.username @@ -431,14 +427,28 @@ def delete_dataset(uri: str, delete_from_aws: bool = False): DatasetIndexer.delete_doc(doc_id=uri) - DatasetService.delete_dataset( - session=session, - username=context.username, - groups=context.groups, - uri=uri, - data=None, - check_perm=True, + dataset = DatasetRepository.get_dataset_by_uri(session, uri) + ShareObjectRepository.delete_shares_with_no_shared_items(session, uri) + DatasetRepository.delete_dataset_term_links(session, uri) + DatasetTableRepository.delete_dataset_tables(session, dataset.datasetUri) + DatasetLocationRepository.delete_dataset_locations(session, dataset.datasetUri) + KeyValueTag.delete_key_value_tags(session, dataset.datasetUri, 'dataset') + Vote.delete_votes(session, dataset.datasetUri, 'dataset') + + ResourcePolicy.delete_resource_policy( + session=session, resource_uri=uri, group=dataset.SamlAdminGroupName ) + env = Environment.get_environment_by_uri(session, dataset.environmentUri) + if dataset.SamlAdminGroupName != env.SamlGroupName: + ResourcePolicy.delete_resource_policy( + session=session, resource_uri=uri, group=env.SamlGroupName + ) + if dataset.stewards: + ResourcePolicy.delete_resource_policy( + session=session, resource_uri=uri, group=dataset.stewards + ) + + DatasetRepository.delete_dataset(session, dataset) if delete_from_aws: stack_helper.delete_stack( @@ -485,6 +495,22 @@ def _deploy_dataset_stack(dataset: Dataset): stack_helper.deploy_stack(dataset.datasetUri) stack_helper.deploy_stack(dataset.environmentUri) + @staticmethod + def _create_dataset_stack(session, dataset: Dataset) -> Stack: + return Stack.create_stack( + session=session, + environment_uri=dataset.environmentUri, + target_uri=dataset.datasetUri, + target_label=dataset.label, + target_type='dataset', + payload={ + 'bucket_name': dataset.S3BucketName, + 'database_name': dataset.GlueDatabaseName, + 'role_name': dataset.S3BucketName, + 'user_name': dataset.S3BucketName, + }, + ) + @staticmethod @has_resource_permission(LIST_ENVIRONMENT_DATASETS) def list_datasets_created_in_environment(uri: str, data: dict): diff --git a/backend/dataall/modules/datasets_base/db/dataset_repository.py b/backend/dataall/modules/datasets_base/db/dataset_repository.py index 8b8369854..f20465c68 100644 --- a/backend/dataall/modules/datasets_base/db/dataset_repository.py +++ b/backend/dataall/modules/datasets_base/db/dataset_repository.py @@ -1,31 +1,23 @@ import logging -from datetime import datetime from sqlalchemy import and_, or_ from sqlalchemy.orm import Query from dataall.core.context import get_context -from dataall.core.permission_checker import has_tenant_permission, has_resource_permission from dataall.db.api import ( Environment, ResourcePolicy, - KeyValueTag, - Vote, - Stack, - has_tenant_perm, - has_resource_perm, ) from dataall.db.api import Organization from dataall.db import models, exceptions, paginate, permissions +from dataall.db.exceptions import ObjectNotFound from dataall.db.models.Enums import Language from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject from dataall.modules.dataset_sharing.db.share_object_repository import ShareItemSM from dataall.modules.datasets.db.enums import ConfidentialityClassification from dataall.core.group.services.group_resource_manager import GroupResource from dataall.modules.datasets_base.db.models import DatasetTable, Dataset -from dataall.modules.datasets.db.dataset_location_repository import DatasetLocationRepository -from dataall.modules.datasets.services.dataset_permissions import MANAGE_DATASETS, UPDATE_DATASET, DATASET_READ, DATASET_ALL, \ - LIST_ENVIRONMENT_DATASETS, CREATE_DATASET +from dataall.modules.datasets.services.dataset_permissions import DATASET_READ, DATASET_ALL from dataall.modules.datasets_base.services.permissions import DATASET_TABLE_READ from dataall.utils.naming_convention import ( NamingConventionService, @@ -42,7 +34,7 @@ class DatasetRepository(GroupResource): def get_dataset_by_uri(session, dataset_uri) -> Dataset: dataset: Dataset = session.query(Dataset).get(dataset_uri) if not dataset: - raise exceptions.ObjectNotFound('Dataset', dataset_uri) + raise ObjectNotFound('Dataset', dataset_uri) return dataset def count_resources(self, session, environment_uri, group_uri) -> int: @@ -57,8 +49,6 @@ def count_resources(self, session, environment_uri, group_uri) -> int: ) @staticmethod - @has_tenant_perm(MANAGE_DATASETS) - @has_resource_perm(CREATE_DATASET) def create_dataset( session, username: str, @@ -80,15 +70,6 @@ def create_dataset( 'Dataset name', data['label'], 'less than 52 characters' ) - Environment.check_group_environment_permission( - session=session, - username=username, - groups=groups, - uri=uri, - group=data['SamlAdminGroupName'], - permission_name=CREATE_DATASET, - ) - environment = Environment.get_environment_by_uri(session, uri) organization = Organization.get_organization_by_uri( @@ -125,6 +106,7 @@ def create_dataset( session.commit() DatasetRepository._set_dataset_aws_resources(dataset, data, environment) + DatasetRepository._set_import_data(dataset, data) activity = models.Activity( action='dataset:create', @@ -135,30 +117,6 @@ def create_dataset( targetType='dataset', ) session.add(activity) - - ResourcePolicy.attach_resource_policy( - session=session, - group=data['SamlAdminGroupName'], - permissions=DATASET_ALL, - resource_uri=dataset.datasetUri, - resource_type=Dataset.__name__, - ) - if dataset.stewards and dataset.stewards != dataset.SamlAdminGroupName: - ResourcePolicy.attach_resource_policy( - session=session, - group=dataset.stewards, - permissions=DATASET_READ, - resource_uri=dataset.datasetUri, - resource_type=Dataset.__name__, - ) - if environment.SamlGroupName != dataset.SamlAdminGroupName: - ResourcePolicy.attach_resource_policy( - session=session, - group=environment.SamlGroupName, - permissions=DATASET_ALL, - resource_uri=dataset.datasetUri, - resource_type=Dataset.__name__, - ) return dataset @staticmethod @@ -211,23 +169,6 @@ def _set_dataset_aws_resources(dataset: Dataset, data, environment): return dataset @staticmethod - def create_dataset_stack(session, dataset: Dataset) -> models.Stack: - return Stack.create_stack( - session=session, - environment_uri=dataset.environmentUri, - target_uri=dataset.datasetUri, - target_label=dataset.label, - target_type='dataset', - payload={ - 'bucket_name': dataset.S3BucketName, - 'database_name': dataset.GlueDatabaseName, - 'role_name': dataset.S3BucketName, - 'user_name': dataset.S3BucketName, - }, - ) - - @staticmethod - @has_tenant_permission(MANAGE_DATASETS) def get_dataset(session, uri: str) -> Dataset: return DatasetRepository.get_dataset_by_uri(session, uri) @@ -271,7 +212,7 @@ def query_user_datasets(session, username, groups, filter) -> Query: @staticmethod def paginated_user_datasets( - session, username, groups, uri, data=None, check_perm=None + session, username, groups, data=None ) -> dict: return paginate( query=DatasetRepository.query_user_datasets(session, username, groups, data), @@ -280,9 +221,7 @@ def paginated_user_datasets( ).to_dict() @staticmethod - def paginated_dataset_tables( - session, username, groups, uri, data=None, check_perm=None - ) -> dict: + def paginated_dataset_tables(session, uri, data=None) -> dict: query = ( session.query(DatasetTable) .filter( @@ -309,8 +248,6 @@ def paginated_dataset_tables( ).to_dict() @staticmethod - @has_tenant_permission(MANAGE_DATASETS) - @has_resource_permission(UPDATE_DATASET) def update_dataset(session, uri, data=None) -> Dataset: username = get_context().username dataset: Dataset = DatasetRepository.get_dataset_by_uri(session, uri) @@ -487,9 +424,7 @@ def query_dataset_shares(session, dataset_uri) -> Query: ) @staticmethod - def paginated_dataset_shares( - session, username, groups, uri, data=None, check_perm=None - ) -> [ShareObject]: + def paginated_dataset_shares(session, uri, data=None) -> [ShareObject]: query = DatasetRepository.query_dataset_shares(session, uri) return paginate( query=query, page=data.get('page', 1), page_size=data.get('pageSize', 5) @@ -524,53 +459,12 @@ def list_dataset_redshift_clusters( ) @staticmethod - def delete_dataset( - session, username, groups, uri, data=None, check_perm=None - ) -> bool: - dataset = DatasetRepository.get_dataset_by_uri(session, uri) - DatasetRepository._delete_dataset_shares_with_no_shared_items(session, uri) - DatasetRepository._delete_dataset_term_links(session, uri) - DatasetRepository._delete_dataset_tables(session, dataset.datasetUri) - DatasetLocationRepository.delete_dataset_locations(session, dataset.datasetUri) - KeyValueTag.delete_key_value_tags(session, dataset.datasetUri, 'dataset') - Vote.delete_votes(session, dataset.datasetUri, 'dataset') + def delete_dataset(session, dataset) -> bool: session.delete(dataset) - ResourcePolicy.delete_resource_policy( - session=session, resource_uri=uri, group=dataset.SamlAdminGroupName - ) - env = Environment.get_environment_by_uri(session, dataset.environmentUri) - if dataset.SamlAdminGroupName != env.SamlGroupName: - ResourcePolicy.delete_resource_policy( - session=session, resource_uri=uri, group=env.SamlGroupName - ) - if dataset.stewards: - ResourcePolicy.delete_resource_policy( - session=session, resource_uri=uri, group=dataset.stewards - ) return True @staticmethod - def _delete_dataset_shares_with_no_shared_items(session, dataset_uri): - share_objects = ( - session.query(ShareObject) - .filter( - and_( - ShareObject.datasetUri == dataset_uri, - ShareObject.existingSharedItems.is_(False), - ) - ) - .all() - ) - for share in share_objects: - ( - session.query(ShareObjectItem) - .filter(ShareObjectItem.shareUri == share.shareUri) - .delete() - ) - session.delete(share) - - @staticmethod - def _delete_dataset_term_links(session, uri): + def delete_dataset_term_links(session, uri): tables = [t.tableUri for t in DatasetRepository.get_dataset_tables(session, uri)] for tableUri in tables: term_links = ( @@ -599,21 +493,6 @@ def _delete_dataset_term_links(session, uri): for link in term_links: session.delete(link) - @staticmethod - def _delete_dataset_tables(session, dataset_uri) -> bool: - tables = ( - session.query(DatasetTable) - .filter( - and_( - DatasetTable.datasetUri == dataset_uri, - ) - ) - .all() - ) - for table in tables: - table.deleted = datetime.now() - return tables - @staticmethod def list_all_datasets(session) -> [Dataset]: return session.query(Dataset).all() @@ -682,7 +561,6 @@ def query_environment_datasets(session, uri, filter) -> Query: return query @staticmethod - @has_resource_permission(LIST_ENVIRONMENT_DATASETS) def paginated_environment_datasets( session, uri, data=None, ) -> dict: @@ -719,3 +597,11 @@ def list_group_datasets(session, environment_id, group_uri): .all() ) + @staticmethod + def _set_import_data(dataset, data): + dataset.imported = True if data['imported'] else False + dataset.importedS3Bucket = True if data['bucketName'] else False + dataset.importedGlueDatabase = True if data['glueDatabaseName'] else False + dataset.importedKmsKey = True if data['KmsKeyId'] else False + dataset.importedAdminRole = True if data['adminRoleName'] else False + diff --git a/backend/dataall/tasks/catalog_indexer.py b/backend/dataall/tasks/catalog_indexer.py index 377337410..9da3ed925 100644 --- a/backend/dataall/tasks/catalog_indexer.py +++ b/backend/dataall/tasks/catalog_indexer.py @@ -2,10 +2,10 @@ import os import sys +from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.db.models import Dataset from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer -from dataall.modules.datasets.db.dataset_service import DatasetService from dataall.db import get_engine, models from dataall.searchproxy.indexers import DashboardIndexer from dataall.utils.alarm_service import AlarmService @@ -22,7 +22,7 @@ def index_objects(engine): indexed_objects_counter = 0 with engine.scoped_session() as session: - all_datasets: [Dataset] = DatasetService.list_all_active_datasets( + all_datasets: [Dataset] = DatasetRepository.list_all_active_datasets( session ) log.info(f'Found {len(all_datasets)} datasets') diff --git a/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py b/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py index c696f0896..4b7accd56 100644 --- a/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py +++ b/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py @@ -14,7 +14,7 @@ from datetime import datetime from dataall.modules.dataset_sharing.db.enums import ShareObjectStatus, ShareableType, ShareItemStatus from dataall.modules.dataset_sharing.db.share_object_repository import ShareObjectRepository -from dataall.modules.datasets.db.dataset_service import DatasetService +from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.services.permissions import DATASET_TABLE_READ # revision identifiers, used by Alembic. @@ -86,7 +86,7 @@ def upgrade(): print('Back-filling dataset table permissions for owners/stewards...') dataset_tables: [DatasetTable] = session.query(DatasetTable).filter(DatasetTable.deleted.is_(None)).all() for table in dataset_tables: - dataset = DatasetService.get_dataset_by_uri(session, table.datasetUri) + dataset = DatasetRepository.get_dataset_by_uri(session, table.datasetUri) env = api.Environment.get_environment_by_uri(session, dataset.environmentUri) groups = set([dataset.SamlAdminGroupName, env.SamlGroupName, dataset.stewards if dataset.stewards is not None else dataset.SamlAdminGroupName]) From a420f0e92bcbd60f75277b7c84fe6270929de383 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 11 May 2023 16:25:09 +0200 Subject: [PATCH 170/346] Some refactoring --- backend/dataall/aws/handlers/redshift.py | 6 +++--- backend/dataall/db/api/redshift_cluster.py | 4 ++-- .../db/share_object_repository.py | 21 +++++++++++++++++++ .../datasets/api/profiling/resolvers.py | 9 +------- .../datasets/db/dataset_table_repository.py | 16 ++++++++++++++ .../datasets/handlers/glue_table_handler.py | 4 ++-- .../datasets/indexers/dataset_indexer.py | 8 +++---- .../services/dataset_profiling_service.py | 10 ++++----- .../modules/datasets/tasks/tables_syncer.py | 4 ++-- .../datasets_base/db/dataset_repository.py | 2 -- backend/dataall/tasks/stacks_updater.py | 4 ++-- tests/api/test_dataset.py | 4 ++-- tests/api/test_redshift_cluster.py | 7 +------ tests/db/test_permission.py | 6 +----- 14 files changed, 61 insertions(+), 44 deletions(-) diff --git a/backend/dataall/aws/handlers/redshift.py b/backend/dataall/aws/handlers/redshift.py index bb61c05a7..7bb1e096d 100644 --- a/backend/dataall/aws/handlers/redshift.py +++ b/backend/dataall/aws/handlers/redshift.py @@ -12,7 +12,7 @@ # TODO should be migrated in the redshift module from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository from dataall.modules.datasets_base.db.models import DatasetTable, Dataset -from dataall.modules.datasets.db.dataset_service import DatasetService +from ...modules.datasets_base.db.dataset_repository import DatasetRepository log = logging.getLogger(__name__) @@ -372,7 +372,7 @@ def get_cluster_catalog_databases(session, task): Redshift.set_cluster_secrets(secretsmanager, cluster) catalog_databases = [] for d in cluster_datasets: - dataset = DatasetService.get_dataset_by_uri(session, d.datasetUri) + dataset = DatasetRepository.get_dataset_by_uri(session, d.datasetUri) if dataset.environmentUri != cluster.environmentUri: catalog_databases.append(f'{dataset.GlueDatabaseName}shared') else: @@ -446,7 +446,7 @@ def copy_data(engine, task: models.Task): task.targetUri ) - dataset: Dataset = DatasetService.get_dataset_by_uri( + dataset: Dataset = DatasetRepository.get_dataset_by_uri( session, task.payload['datasetUri'] ) diff --git a/backend/dataall/db/api/redshift_cluster.py b/backend/dataall/db/api/redshift_cluster.py index bd092ff45..342739145 100644 --- a/backend/dataall/db/api/redshift_cluster.py +++ b/backend/dataall/db/api/redshift_cluster.py @@ -10,9 +10,9 @@ NamingConventionPattern, ) from dataall.utils.slugify import slugify -from dataall.modules.datasets.db.dataset_service import DatasetService from dataall.modules.dataset_sharing.db.models import ShareObject, ShareObjectItem from dataall.modules.dataset_sharing.db.share_object_repository import ShareItemSM +from ...modules.datasets_base.db.dataset_repository import DatasetRepository log = logging.getLogger(__name__) @@ -396,7 +396,7 @@ def add_dataset(session, username, groups, uri, data=None, check_perm=True): message=f'Cluster {cluster.name} is not on available state ({cluster.status})', ) - dataset = DatasetService.get_dataset_by_uri(session, dataset_uri=data['datasetUri']) + dataset = DatasetRepository.get_dataset_by_uri(session, dataset_uri=data['datasetUri']) exists = session.query(models.RedshiftClusterDataset).get( (uri, data['datasetUri']) diff --git a/backend/dataall/modules/dataset_sharing/db/share_object_repository.py b/backend/dataall/modules/dataset_sharing/db/share_object_repository.py index a2c83ac10..2d06fe332 100644 --- a/backend/dataall/modules/dataset_sharing/db/share_object_repository.py +++ b/backend/dataall/modules/dataset_sharing/db/share_object_repository.py @@ -1421,3 +1421,24 @@ def has_shared_items(session, item_uri: str) -> int: def delete_shares(session, item_uri: str): session.query(ShareObjectItem).filter(ShareObjectItem.itemUri == item_uri).delete() + @staticmethod + def delete_shares_with_no_shared_items(session, dataset_uri): + share_objects = ( + session.query(ShareObject) + .filter( + and_( + ShareObject.datasetUri == dataset_uri, + ShareObject.existingSharedItems.is_(False), + ) + ) + .all() + ) + for share in share_objects: + ( + session.query(ShareObjectItem) + .filter(ShareObjectItem.shareUri == share.shareUri) + .delete() + ) + session.delete(share) + + diff --git a/backend/dataall/modules/datasets/api/profiling/resolvers.py b/backend/dataall/modules/datasets/api/profiling/resolvers.py index d59d1628f..0cf6d77e7 100644 --- a/backend/dataall/modules/datasets/api/profiling/resolvers.py +++ b/backend/dataall/modules/datasets/api/profiling/resolvers.py @@ -2,16 +2,9 @@ import logging from dataall.api.context import Context -from dataall.aws.handlers.service_handlers import Worker -from dataall.aws.handlers.sts import SessionHelper -from dataall.db import api, models -from dataall.db.api import ResourcePolicy -from dataall.modules.datasets.db.dataset_service import DatasetService -from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository -from dataall.modules.datasets.db.dataset_profiling_repository import DatasetProfilingRepository +from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.modules.datasets.services.dataset_profiling_service import DatasetProfilingService from dataall.modules.datasets_base.db.models import DatasetProfilingRun -from dataall.modules.datasets.services.dataset_permissions import PROFILE_DATASET_TABLE log = logging.getLogger(__name__) diff --git a/backend/dataall/modules/datasets/db/dataset_table_repository.py b/backend/dataall/modules/datasets/db/dataset_table_repository.py index 73d7fd2ad..63b6e7d63 100644 --- a/backend/dataall/modules/datasets/db/dataset_table_repository.py +++ b/backend/dataall/modules/datasets/db/dataset_table_repository.py @@ -1,4 +1,5 @@ import logging +from datetime import datetime from sqlalchemy.sql import and_ @@ -207,3 +208,18 @@ def find_dataset_tables(session, dataset_uri): .filter(DatasetTable.datasetUri == dataset_uri) .all() ) + + @staticmethod + def delete_dataset_tables(session, dataset_uri) -> bool: + tables = ( + session.query(DatasetTable) + .filter( + and_( + DatasetTable.datasetUri == dataset_uri, + ) + ) + .all() + ) + for table in tables: + table.deleted = datetime.now() + return tables diff --git a/backend/dataall/modules/datasets/handlers/glue_table_handler.py b/backend/dataall/modules/datasets/handlers/glue_table_handler.py index d822a435a..af148cc38 100644 --- a/backend/dataall/modules/datasets/handlers/glue_table_handler.py +++ b/backend/dataall/modules/datasets/handlers/glue_table_handler.py @@ -4,8 +4,8 @@ from dataall.aws.handlers.service_handlers import Worker from dataall.db import models from dataall.modules.datasets.services.dataset_table_service import DatasetTableService +from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.db.models import Dataset -from dataall.modules.datasets.db.dataset_service import DatasetService log = logging.getLogger(__name__) @@ -17,7 +17,7 @@ class DatasetTableSyncHandler: @Worker.handler(path='glue.dataset.database.tables') def sync_existing_tables(engine, task: models.Task): with engine.scoped_session() as session: - dataset: Dataset = DatasetService.get_dataset_by_uri( + dataset: Dataset = DatasetRepository.get_dataset_by_uri( session, task.targetUri ) account_id = dataset.AwsAccountId diff --git a/backend/dataall/modules/datasets/indexers/dataset_indexer.py b/backend/dataall/modules/datasets/indexers/dataset_indexer.py index 20c28bb1d..4036136ba 100644 --- a/backend/dataall/modules/datasets/indexers/dataset_indexer.py +++ b/backend/dataall/modules/datasets/indexers/dataset_indexer.py @@ -1,10 +1,10 @@ """Indexes Datasets in OpenSearch""" -from dataall import db from dataall.db import models +from dataall.db.api import Vote +from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.db.models import Dataset from dataall.modules.datasets.db.dataset_location_repository import DatasetLocationRepository -from dataall.modules.datasets.db.dataset_service import DatasetService from dataall.searchproxy.base_indexer import BaseIndexer @@ -45,9 +45,9 @@ def upsert(cls, session, dataset_uri: str): .filter(Dataset.datasetUri == dataset_uri) .first() ) - count_tables = DatasetService.count_dataset_tables(session, dataset_uri) + count_tables = DatasetRepository.count_dataset_tables(session, dataset_uri) count_folders = DatasetLocationRepository.count_dataset_locations(session, dataset_uri) - count_upvotes = db.api.Vote.count_upvotes( + count_upvotes = Vote.count_upvotes( session, None, None, dataset_uri, {'targetType': 'dataset'} ) diff --git a/backend/dataall/modules/datasets/services/dataset_profiling_service.py b/backend/dataall/modules/datasets/services/dataset_profiling_service.py index 9f3428406..109bbcafd 100644 --- a/backend/dataall/modules/datasets/services/dataset_profiling_service.py +++ b/backend/dataall/modules/datasets/services/dataset_profiling_service.py @@ -3,14 +3,14 @@ from dataall.aws.handlers.service_handlers import Worker from dataall.core.context import get_context from dataall.core.permission_checker import has_resource_permission -from dataall.db.api import Environment, ResourcePolicy +from dataall.db.api import Environment from dataall.db.exceptions import ObjectNotFound from dataall.db.models import Task from dataall.modules.datasets.aws.s3_profiler_client import S3ProfilerClient from dataall.modules.datasets.db.dataset_profiling_repository import DatasetProfilingRepository -from dataall.modules.datasets.db.dataset_service import DatasetService from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository from dataall.modules.datasets.services.dataset_permissions import PROFILE_DATASET_TABLE +from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.db.models import DatasetProfilingRun, DatasetTable @@ -20,9 +20,7 @@ class DatasetProfilingService: def start_profiling_run(uri, table_uri, glue_table_name): context = get_context() with context.db_engine.scoped_session() as session: - dataset = DatasetService.get_dataset_by_uri(session, uri) - if not dataset: - raise ObjectNotFound('Dataset', uri) + dataset = DatasetRepository.get_dataset_by_uri(session, uri) if table_uri and not glue_table_name: table: DatasetTable = DatasetTableRepository.get_dataset_table_by_uri(session, table_uri) @@ -95,7 +93,7 @@ def get_last_table_profiling_run(table_uri: str): if run: if not run.results: table = DatasetTableRepository.get_dataset_table_by_uri(session, table_uri) - dataset = DatasetService.get_dataset_by_uri(session, table.datasetUri) + dataset = DatasetRepository.get_dataset_by_uri(session, table.datasetUri) environment = Environment.get_environment_by_uri(session, dataset.environmentUri) content = S3ProfilerClient(environment).get_profiling_results_from_s3(dataset, table, run) if content: diff --git a/backend/dataall/modules/datasets/tasks/tables_syncer.py b/backend/dataall/modules/datasets/tasks/tables_syncer.py index 3f6e223db..c8c4eb76b 100644 --- a/backend/dataall/modules/datasets/tasks/tables_syncer.py +++ b/backend/dataall/modules/datasets/tasks/tables_syncer.py @@ -10,10 +10,10 @@ from dataall.db import models from dataall.modules.datasets.aws.lf_table_client import LakeFormationTableClient from dataall.modules.datasets.services.dataset_table_service import DatasetTableService +from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.db.models import DatasetTable, Dataset from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer from dataall.modules.dataset_sharing.services.dataset_alarm_service import DatasetAlarmService -from dataall.modules.datasets.db.dataset_service import DatasetService root = logging.getLogger() root.setLevel(logging.INFO) @@ -25,7 +25,7 @@ def sync_tables(engine): with engine.scoped_session() as session: processed_tables = [] - all_datasets: [Dataset] = DatasetService.list_all_active_datasets( + all_datasets: [Dataset] = DatasetRepository.list_all_active_datasets( session ) log.info(f'Found {len(all_datasets)} datasets for tables sync') diff --git a/backend/dataall/modules/datasets_base/db/dataset_repository.py b/backend/dataall/modules/datasets_base/db/dataset_repository.py index f20465c68..41dc2a577 100644 --- a/backend/dataall/modules/datasets_base/db/dataset_repository.py +++ b/backend/dataall/modules/datasets_base/db/dataset_repository.py @@ -52,10 +52,8 @@ def count_resources(self, session, environment_uri, group_uri) -> int: def create_dataset( session, username: str, - groups: [str], uri: str, data: dict = None, - check_perm: bool = False, ) -> Dataset: if not uri: raise exceptions.RequiredParameter('environmentUri') diff --git a/backend/dataall/tasks/stacks_updater.py b/backend/dataall/tasks/stacks_updater.py index 28c5e0887..ea380cdd6 100644 --- a/backend/dataall/tasks/stacks_updater.py +++ b/backend/dataall/tasks/stacks_updater.py @@ -3,8 +3,8 @@ import sys import time +from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.db.models import Dataset -from dataall.modules.datasets.db.dataset_service import DatasetService from .. import db from ..db import models from ..aws.handlers.ecs import Ecs @@ -24,7 +24,7 @@ def update_stacks(engine, envname): with engine.scoped_session() as session: - all_datasets: [Dataset] = DatasetService.list_all_active_datasets(session) + all_datasets: [Dataset] = DatasetRepository.list_all_active_datasets(session) all_environments: [models.Environment] = db.api.Environment.list_all_active_environments(session) log.info(f'Found {len(all_environments)} environments, triggering update stack tasks...') diff --git a/tests/api/test_dataset.py b/tests/api/test_dataset.py index 4534f3b16..1f060701d 100644 --- a/tests/api/test_dataset.py +++ b/tests/api/test_dataset.py @@ -4,8 +4,8 @@ import pytest import dataall +from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.db.models import DatasetStorageLocation, DatasetTable, Dataset -from dataall.modules.datasets.db.dataset_service import DatasetService @pytest.fixture(scope='module', autouse=True) @@ -487,7 +487,7 @@ def test_get_dataset_by_prefix(db, env1, org1): ) session.add(dataset) session.commit() - dataset_found: Dataset = DatasetService.get_dataset_by_bucket_name( + dataset_found: Dataset = DatasetRepository.get_dataset_by_bucket_name( session, bucket='s3a://insite-data-lake-raw-alpha-eu-west-1/booker/volume_constraints/insite_version=1/volume_constraints.delta'.split( '//' diff --git a/tests/api/test_redshift_cluster.py b/tests/api/test_redshift_cluster.py index 7ecc12264..0a7cf3bef 100644 --- a/tests/api/test_redshift_cluster.py +++ b/tests/api/test_redshift_cluster.py @@ -4,9 +4,8 @@ import pytest import dataall from dataall.api.constants import RedshiftClusterRole +from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.modules.datasets_base.db.models import Dataset -from dataall.modules.datasets.db.dataset_service import DatasetService - @pytest.fixture(scope='module', autouse=True) def org1(org, user, group, tenant): @@ -40,12 +39,8 @@ def dataset1(db, user, env1, org1, dataset, group, group3) -> Dataset: stewards=group3.name, ) dataset = DatasetService.create_dataset( - session=session, - username=user.userName, - groups=[group.name], uri=env1.environmentUri, data=data, - check_perm=True, ) yield dataset diff --git a/tests/db/test_permission.py b/tests/db/test_permission.py index aee931076..0b32a93b3 100644 --- a/tests/db/test_permission.py +++ b/tests/db/test_permission.py @@ -5,7 +5,7 @@ from dataall.db import exceptions from dataall.db.models.Permission import PermissionType from dataall.modules.datasets_base.db.models import Dataset -from dataall.modules.datasets.db.dataset_service import DatasetService +from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.modules.datasets.services.dataset_permissions import MANAGE_DATASETS, UPDATE_DATASET, DATASET_READ, DATASET_WRITE from dataall.modules.datasets_base.services.permissions import DATASET_TABLE_READ @@ -259,11 +259,7 @@ def test_create_dataset(db, env, user, group, group_user, dataset, permissions, IAMDatasetAdminRoleArn=f'arn:aws:iam::123456789012:role/dataset', ) dataset = DatasetService.create_dataset( - session=session, - username=user.userName, - groups=[group.name], uri=env_with_perm.environmentUri, data=data, - check_perm=True, ) assert dataset From f6cab09cbdd19ebe9dc5f949844778a659528ace Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 11 May 2023 17:15:08 +0200 Subject: [PATCH 171/346] Extracted code to fix circular dependency --- backend/dataall/db/api/redshift_cluster.py | 2 +- .../db/share_object_repository.py | 83 +++++++ backend/dataall/modules/datasets/db/enums.py | 7 - .../datasets/services/dataset_service.py | 115 ++++++++-- .../services/dataset_table_service.py | 2 +- .../datasets_base/db/dataset_repository.py | 208 +----------------- .../dataall/modules/datasets_base/db/enums.py | 18 ++ tests/api/conftest.py | 2 +- 8 files changed, 217 insertions(+), 220 deletions(-) delete mode 100644 backend/dataall/modules/datasets/db/enums.py create mode 100644 backend/dataall/modules/datasets_base/db/enums.py diff --git a/backend/dataall/db/api/redshift_cluster.py b/backend/dataall/db/api/redshift_cluster.py index 342739145..15222738b 100644 --- a/backend/dataall/db/api/redshift_cluster.py +++ b/backend/dataall/db/api/redshift_cluster.py @@ -12,7 +12,6 @@ from dataall.utils.slugify import slugify from dataall.modules.dataset_sharing.db.models import ShareObject, ShareObjectItem from dataall.modules.dataset_sharing.db.share_object_repository import ShareItemSM -from ...modules.datasets_base.db.dataset_repository import DatasetRepository log = logging.getLogger(__name__) @@ -396,6 +395,7 @@ def add_dataset(session, username, groups, uri, data=None, check_perm=True): message=f'Cluster {cluster.name} is not on available state ({cluster.status})', ) + from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository dataset = DatasetRepository.get_dataset_by_uri(session, dataset_uri=data['datasetUri']) exists = session.query(models.RedshiftClusterDataset).get( diff --git a/backend/dataall/modules/dataset_sharing/db/share_object_repository.py b/backend/dataall/modules/dataset_sharing/db/share_object_repository.py index 2d06fe332..e2a9ddc8f 100644 --- a/backend/dataall/modules/dataset_sharing/db/share_object_repository.py +++ b/backend/dataall/modules/dataset_sharing/db/share_object_repository.py @@ -1,6 +1,7 @@ import logging from sqlalchemy import and_, or_, func, case +from sqlalchemy.orm import Query from dataall.db.api import ( has_resource_perm, @@ -1441,4 +1442,86 @@ def delete_shares_with_no_shared_items(session, dataset_uri): ) session.delete(share) + @staticmethod + def _query_user_datasets(session, username, groups, filter) -> Query: + share_item_shared_states = ShareItemSM.get_share_item_shared_states() + query = ( + session.query(Dataset) + .outerjoin( + ShareObject, + ShareObject.datasetUri == Dataset.datasetUri, + ) + .outerjoin( + ShareObjectItem, + ShareObjectItem.shareUri == ShareObject.shareUri + ) + .filter( + or_( + Dataset.owner == username, + Dataset.SamlAdminGroupName.in_(groups), + Dataset.stewards.in_(groups), + and_( + ShareObject.principalId.in_(groups), + ShareObjectItem.status.in_(share_item_shared_states), + ), + and_( + ShareObject.owner == username, + ShareObjectItem.status.in_(share_item_shared_states), + ), + ) + ) + ) + if filter and filter.get('term'): + query = query.filter( + or_( + Dataset.description.ilike(filter.get('term') + '%%'), + Dataset.label.ilike(filter.get('term') + '%%'), + ) + ) + return query + + @staticmethod + def paginated_user_datasets( + session, username, groups, data=None + ) -> dict: + return paginate( + query=ShareObjectRepository._query_user_datasets(session, username, groups, data), + page=data.get('page', 1), + page_size=data.get('pageSize', 10), + ).to_dict() + + @staticmethod + def find_dataset_shares(session, dataset_uri): + return ( + session.query(ShareObject) + .filter(ShareObject.datasetUri == dataset_uri) + .all() + ) + + @staticmethod + def query_dataset_shares(session, dataset_uri) -> Query: + return session.query(ShareObject).filter( + and_( + ShareObject.datasetUri == dataset_uri, + ShareObject.deleted.is_(None), + ) + ) + + @staticmethod + def paginated_dataset_shares(session, uri, data=None) -> [ShareObject]: + query = ShareObjectRepository.query_dataset_shares(session, uri) + return paginate( + query=query, page=data.get('page', 1), page_size=data.get('pageSize', 5) + ).to_dict() + + @staticmethod + def list_dataset_shares_with_existing_shared_items(session, dataset_uri) -> [ShareObject]: + query = session.query(ShareObject).filter( + and_( + ShareObject.datasetUri == dataset_uri, + ShareObject.deleted.is_(None), + ShareObject.existingSharedItems.is_(True), + ) + ) + return query.all() diff --git a/backend/dataall/modules/datasets/db/enums.py b/backend/dataall/modules/datasets/db/enums.py deleted file mode 100644 index 5ef5e8170..000000000 --- a/backend/dataall/modules/datasets/db/enums.py +++ /dev/null @@ -1,7 +0,0 @@ -from enum import Enum - - -class ConfidentialityClassification(Enum): - Unclassified = 'Unclassified' - Official = 'Official' - Secret = 'Secret' diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py index 9e5a35bdf..ed8f4263d 100644 --- a/backend/dataall/modules/datasets/services/dataset_service.py +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -10,9 +10,10 @@ from dataall.db.api import Vote, ResourcePolicy, KeyValueTag, Stack from dataall.db.exceptions import AWSResourceNotFound, UnauthorizedOperation from dataall.db.models import Environment, Task +from dataall.db.permissions import SHARE_OBJECT_APPROVER +from dataall.modules.dataset_sharing.db.models import ShareObject from dataall.modules.dataset_sharing.db.share_object_repository import ShareObjectRepository from dataall.modules.datasets import DatasetIndexer, DatasetTableIndexer -from dataall.modules.datasets.api.dataset.enums import DatasetRole from dataall.modules.datasets.aws.glue_dataset_client import DatasetCrawler from dataall.modules.datasets.aws.s3_dataset_client import S3DatasetClient from dataall.modules.datasets.db.dataset_location_repository import DatasetLocationRepository @@ -21,7 +22,9 @@ SUMMARY_DATASET, DELETE_DATASET, SUBSCRIPTIONS_DATASET, MANAGE_DATASETS, UPDATE_DATASET, LIST_ENVIRONMENT_DATASETS, \ CREATE_DATASET, DATASET_ALL, DATASET_READ from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository -from dataall.modules.datasets_base.db.models import Dataset +from dataall.modules.datasets_base.db.enums import DatasetRole +from dataall.modules.datasets_base.db.models import Dataset, DatasetTable +from dataall.modules.datasets_base.services.permissions import DATASET_TABLE_READ log = logging.getLogger(__name__) @@ -51,10 +54,8 @@ def create_dataset(uri, data: dict): dataset = DatasetRepository.create_dataset( session=session, username=context.username, - groups=context.groups, uri=uri, data=data, - check_perm=True, ) ResourcePolicy.attach_resource_policy( @@ -118,7 +119,7 @@ def get_file_upload_presigned_url(uri: str, data: dict): def list_datasets(data: dict): context = get_context() with context.db_engine.scoped_session() as session: - return DatasetRepository.paginated_user_datasets( + return ShareObjectRepository.paginated_user_datasets( session, context.username, context.groups, data=data ) @@ -149,16 +150,38 @@ def update_dataset(uri: str, data: dict): dataset = DatasetRepository.get_dataset_by_uri(session, uri) environment = Environment.get_environment_by_uri(session, dataset.environmentUri) DatasetService.check_dataset_account(environment=environment) - updated_dataset = DatasetRepository.update_dataset( - session=session, - uri=uri, - data=data, - ) + + username = get_context().username + dataset: Dataset = DatasetRepository.get_dataset_by_uri(session, uri) + if data and isinstance(data, dict): + for k in data.keys(): + if k != 'stewards': + setattr(dataset, k, data.get(k)) + if data.get('stewards') and data.get('stewards') != dataset.stewards: + if data.get('stewards') != dataset.SamlAdminGroupName: + DatasetService._transfer_stewardship_to_new_stewards( + session, dataset, data['stewards'] + ) + dataset.stewards = data['stewards'] + else: + DatasetService._transfer_stewardship_to_owners(session, dataset) + dataset.stewards = dataset.SamlAdminGroupName + + ResourcePolicy.attach_resource_policy( + session=session, + group=dataset.SamlAdminGroupName, + permissions=DATASET_ALL, + resource_uri=dataset.datasetUri, + resource_type=Dataset.__name__, + ) + DatasetRepository.update_dataset_glossary_terms(session, username, uri, data) + DatasetRepository.update_dataset_activity(session, dataset, username) + DatasetIndexer.upsert(session, dataset_uri=uri) - DatasetService._deploy_dataset_stack(updated_dataset) + DatasetService._deploy_dataset_stack(dataset) - return updated_dataset + return dataset @staticmethod def get_dataset_statistics(dataset: Dataset): @@ -290,7 +313,7 @@ def start_crawler(uri: str, data: dict = None): @staticmethod def list_dataset_share_objects(dataset: Dataset, data: dict = None): with get_context().db_engine.scoped_session() as session: - return DatasetRepository.paginated_dataset_shares( + return ShareObjectRepository.paginated_dataset_shares( session=session, uri=dataset.datasetUri, data=data @@ -400,7 +423,7 @@ def delete_dataset(uri: str, delete_from_aws: bool = False): env: Environment = Environment.get_environment_by_uri( session, dataset.environmentUri ) - shares = DatasetRepository.list_dataset_shares_with_existing_shared_items(session, uri) + shares = ShareObjectRepository.list_dataset_shares_with_existing_shared_items(session, uri) if shares: raise UnauthorizedOperation( action=DELETE_DATASET, @@ -530,3 +553,67 @@ def list_datasets_owned_by_env_group(env_uri: str, group_uri: str, data: dict): groupUri=group_uri, data=data, ) + + @staticmethod + def _transfer_stewardship_to_owners(session, dataset): + dataset_shares = ShareObjectRepository.find_dataset_shares(session, dataset.datasetUri) + if dataset_shares: + for share in dataset_shares: + ResourcePolicy.attach_resource_policy( + session=session, + group=dataset.SamlAdminGroupName, + permissions=SHARE_OBJECT_APPROVER, + resource_uri=share.shareUri, + resource_type=ShareObject.__name__, + ) + return dataset + + @staticmethod + def _transfer_stewardship_to_new_stewards(session, dataset, new_stewards): + env = Environment.get_environment_by_uri(session, dataset.environmentUri) + if dataset.stewards != env.SamlGroupName: + ResourcePolicy.delete_resource_policy( + session=session, + group=dataset.stewards, + resource_uri=dataset.datasetUri, + ) + ResourcePolicy.attach_resource_policy( + session=session, + group=new_stewards, + permissions=DATASET_READ, + resource_uri=dataset.datasetUri, + resource_type=Dataset.__name__, + ) + + dataset_tables = [t.tableUri for t in DatasetRepository.get_dataset_tables(session, dataset.datasetUri)] + for tableUri in dataset_tables: + if dataset.stewards != env.SamlGroupName: + ResourcePolicy.delete_resource_policy( + session=session, + group=dataset.stewards, + resource_uri=tableUri, + ) + ResourcePolicy.attach_resource_policy( + session=session, + group=new_stewards, + permissions=DATASET_TABLE_READ, + resource_uri=tableUri, + resource_type=DatasetTable.__name__, + ) + + dataset_shares = ShareObjectRepository.find_dataset_shares(session, dataset.datasetUri) + if dataset_shares: + for share in dataset_shares: + ResourcePolicy.attach_resource_policy( + session=session, + group=new_stewards, + permissions=SHARE_OBJECT_APPROVER, + resource_uri=share.shareUri, + resource_type=ShareObject.__name__, + ) + ResourcePolicy.delete_resource_policy( + session=session, + group=dataset.stewards, + resource_uri=share.shareUri, + ) + return dataset diff --git a/backend/dataall/modules/datasets/services/dataset_table_service.py b/backend/dataall/modules/datasets/services/dataset_table_service.py index 29401278e..ab12d1131 100644 --- a/backend/dataall/modules/datasets/services/dataset_table_service.py +++ b/backend/dataall/modules/datasets/services/dataset_table_service.py @@ -11,7 +11,7 @@ from dataall.modules.datasets import DatasetTableIndexer from dataall.modules.datasets.aws.athena_table_client import AthenaTableClient from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository -from dataall.modules.datasets.db.enums import ConfidentialityClassification +from dataall.modules.datasets_base.db.enums import ConfidentialityClassification from dataall.modules.datasets.services.dataset_permissions import UPDATE_DATASET_TABLE, MANAGE_DATASETS, \ DELETE_DATASET_TABLE, CREATE_DATASET_TABLE from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository diff --git a/backend/dataall/modules/datasets_base/db/dataset_repository.py b/backend/dataall/modules/datasets_base/db/dataset_repository.py index 41dc2a577..a7da6a930 100644 --- a/backend/dataall/modules/datasets_base/db/dataset_repository.py +++ b/backend/dataall/modules/datasets_base/db/dataset_repository.py @@ -3,22 +3,16 @@ from sqlalchemy import and_, or_ from sqlalchemy.orm import Query -from dataall.core.context import get_context from dataall.db.api import ( Environment, - ResourcePolicy, ) from dataall.db.api import Organization -from dataall.db import models, exceptions, paginate, permissions +from dataall.db import models, exceptions, paginate from dataall.db.exceptions import ObjectNotFound from dataall.db.models.Enums import Language -from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject -from dataall.modules.dataset_sharing.db.share_object_repository import ShareItemSM -from dataall.modules.datasets.db.enums import ConfidentialityClassification +from dataall.modules.datasets_base.db.enums import ConfidentialityClassification from dataall.core.group.services.group_resource_manager import GroupResource from dataall.modules.datasets_base.db.models import DatasetTable, Dataset -from dataall.modules.datasets.services.dataset_permissions import DATASET_READ, DATASET_ALL -from dataall.modules.datasets_base.services.permissions import DATASET_TABLE_READ from dataall.utils.naming_convention import ( NamingConventionService, NamingConventionPattern, @@ -170,54 +164,6 @@ def _set_dataset_aws_resources(dataset: Dataset, data, environment): def get_dataset(session, uri: str) -> Dataset: return DatasetRepository.get_dataset_by_uri(session, uri) - @staticmethod - def query_user_datasets(session, username, groups, filter) -> Query: - share_item_shared_states = ShareItemSM.get_share_item_shared_states() - query = ( - session.query(Dataset) - .outerjoin( - ShareObject, - ShareObject.datasetUri == Dataset.datasetUri, - ) - .outerjoin( - ShareObjectItem, - ShareObjectItem.shareUri == ShareObject.shareUri - ) - .filter( - or_( - Dataset.owner == username, - Dataset.SamlAdminGroupName.in_(groups), - Dataset.stewards.in_(groups), - and_( - ShareObject.principalId.in_(groups), - ShareObjectItem.status.in_(share_item_shared_states), - ), - and_( - ShareObject.owner == username, - ShareObjectItem.status.in_(share_item_shared_states), - ), - ) - ) - ) - if filter and filter.get('term'): - query = query.filter( - or_( - Dataset.description.ilike(filter.get('term') + '%%'), - Dataset.label.ilike(filter.get('term') + '%%'), - ) - ) - return query - - @staticmethod - def paginated_user_datasets( - session, username, groups, data=None - ) -> dict: - return paginate( - query=DatasetRepository.query_user_datasets(session, username, groups, data), - page=data.get('page', 1), - page_size=data.get('pageSize', 10), - ).to_dict() - @staticmethod def paginated_dataset_tables(session, uri, data=None) -> dict: query = ( @@ -246,114 +192,17 @@ def paginated_dataset_tables(session, uri, data=None) -> dict: ).to_dict() @staticmethod - def update_dataset(session, uri, data=None) -> Dataset: - username = get_context().username - dataset: Dataset = DatasetRepository.get_dataset_by_uri(session, uri) - if data and isinstance(data, dict): - for k in data.keys(): - if k != 'stewards': - setattr(dataset, k, data.get(k)) - if data.get('stewards') and data.get('stewards') != dataset.stewards: - if data.get('stewards') != dataset.SamlAdminGroupName: - DatasetRepository.transfer_stewardship_to_new_stewards( - session, dataset, data['stewards'] - ) - dataset.stewards = data['stewards'] - else: - DatasetRepository.transfer_stewardship_to_owners(session, dataset) - dataset.stewards = dataset.SamlAdminGroupName - - ResourcePolicy.attach_resource_policy( - session=session, - group=dataset.SamlAdminGroupName, - permissions=DATASET_ALL, - resource_uri=dataset.datasetUri, - resource_type=Dataset.__name__, - ) - DatasetRepository.update_dataset_glossary_terms(session, username, uri, data) - activity = models.Activity( - action='dataset:update', - label='dataset:update', - owner=username, - summary=f'{username} updated dataset {dataset.name}', - targetUri=dataset.datasetUri, - targetType='dataset', - ) - session.add(activity) - session.commit() - return dataset - - @staticmethod - def transfer_stewardship_to_owners(session, dataset): - dataset_shares = ( - session.query(ShareObject) - .filter(ShareObject.datasetUri == dataset.datasetUri) - .all() - ) - if dataset_shares: - for share in dataset_shares: - ResourcePolicy.attach_resource_policy( - session=session, - group=dataset.SamlAdminGroupName, - permissions=permissions.SHARE_OBJECT_APPROVER, - resource_uri=share.shareUri, - resource_type=ShareObject.__name__, - ) - return dataset - - @staticmethod - def transfer_stewardship_to_new_stewards(session, dataset, new_stewards): - env = Environment.get_environment_by_uri(session, dataset.environmentUri) - if dataset.stewards != env.SamlGroupName: - ResourcePolicy.delete_resource_policy( - session=session, - group=dataset.stewards, - resource_uri=dataset.datasetUri, - ) - ResourcePolicy.attach_resource_policy( - session=session, - group=new_stewards, - permissions=DATASET_READ, - resource_uri=dataset.datasetUri, - resource_type=Dataset.__name__, - ) - - dataset_tables = [t.tableUri for t in DatasetRepository.get_dataset_tables(session, dataset.datasetUri)] - for tableUri in dataset_tables: - if dataset.stewards != env.SamlGroupName: - ResourcePolicy.delete_resource_policy( - session=session, - group=dataset.stewards, - resource_uri=tableUri, - ) - ResourcePolicy.attach_resource_policy( - session=session, - group=new_stewards, - permissions=DATASET_TABLE_READ, - resource_uri=tableUri, - resource_type=DatasetTable.__name__, - ) - - dataset_shares = ( - session.query(ShareObject) - .filter(ShareObject.datasetUri == dataset.datasetUri) - .all() + def update_dataset_activity(session, dataset, username) : + activity = models.Activity( + action='dataset:update', + label='dataset:update', + owner=username, + summary=f'{username} updated dataset {dataset.name}', + targetUri=dataset.datasetUri, + targetType='dataset', ) - if dataset_shares: - for share in dataset_shares: - ResourcePolicy.attach_resource_policy( - session=session, - group=new_stewards, - permissions=permissions.SHARE_OBJECT_APPROVER, - resource_uri=share.shareUri, - resource_type=ShareObject.__name__, - ) - ResourcePolicy.delete_resource_policy( - session=session, - group=dataset.stewards, - resource_uri=share.shareUri, - ) - return dataset + session.add(activity) + session.commit() @staticmethod def update_dataset_glossary_terms(session, username, uri, data): @@ -412,39 +261,6 @@ def get_dataset_tables(session, dataset_uri): .all() ) - @staticmethod - def query_dataset_shares(session, dataset_uri) -> Query: - return session.query(ShareObject).filter( - and_( - ShareObject.datasetUri == dataset_uri, - ShareObject.deleted.is_(None), - ) - ) - - @staticmethod - def paginated_dataset_shares(session, uri, data=None) -> [ShareObject]: - query = DatasetRepository.query_dataset_shares(session, uri) - return paginate( - query=query, page=data.get('page', 1), page_size=data.get('pageSize', 5) - ).to_dict() - - @staticmethod - def list_dataset_shares(session, dataset_uri) -> [ShareObject]: - """return the dataset shares""" - query = DatasetRepository.query_dataset_shares(session, dataset_uri) - return query.all() - - @staticmethod - def list_dataset_shares_with_existing_shared_items(session, dataset_uri) -> [ShareObject]: - query = session.query(ShareObject).filter( - and_( - ShareObject.datasetUri == dataset_uri, - ShareObject.deleted.is_(None), - ShareObject.existingSharedItems.is_(True), - ) - ) - return query.all() - @staticmethod def list_dataset_redshift_clusters( session, dataset_uri diff --git a/backend/dataall/modules/datasets_base/db/enums.py b/backend/dataall/modules/datasets_base/db/enums.py new file mode 100644 index 000000000..5c2d89091 --- /dev/null +++ b/backend/dataall/modules/datasets_base/db/enums.py @@ -0,0 +1,18 @@ +from enum import Enum + + +class ConfidentialityClassification(Enum): + Unclassified = 'Unclassified' + Official = 'Official' + Secret = 'Secret' + + +class DatasetRole(Enum): + # Permissions on a dataset + BusinessOwner = '999' + DataSteward = '998' + Creator = '950' + Admin = '900' + Shared = '300' + NoPermission = '000' + diff --git a/tests/api/conftest.py b/tests/api/conftest.py index a0ba4ff0e..931fc26c9 100644 --- a/tests/api/conftest.py +++ b/tests/api/conftest.py @@ -27,7 +27,7 @@ def patch_check_env(module_mocker): @pytest.fixture(scope='module', autouse=True) def patch_check_dataset(module_mocker): module_mocker.patch( - 'dataall.modules.datasets.api.dataset.resolvers.check_dataset_account', return_value=True + 'dataall.modules.datasets.services.dataset_service.DatasetService.check_dataset_account', return_value=True ) From 15f2c93a22dbdcf73c78f5d03715d9feca48b3be Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 12 May 2023 10:03:15 +0200 Subject: [PATCH 172/346] Fixed tests --- backend/dataall/core/permission_checker.py | 1 - .../modules/datasets/api/dataset/resolvers.py | 10 +++++++--- .../modules/datasets/api/profiling/resolvers.py | 11 ++++------- .../modules/datasets/services/dataset_service.py | 12 ++++++------ .../modules/datasets_base/db/dataset_repository.py | 10 +++++----- tests/api/test_dataset.py | 2 +- tests/api/test_dataset_location.py | 2 +- tests/api/test_redshift_cluster.py | 5 ++++- tests/db/test_permission.py | 4 ++++ 9 files changed, 32 insertions(+), 25 deletions(-) diff --git a/backend/dataall/core/permission_checker.py b/backend/dataall/core/permission_checker.py index 1fccd8fb4..4101a9b67 100644 --- a/backend/dataall/core/permission_checker.py +++ b/backend/dataall/core/permission_checker.py @@ -2,7 +2,6 @@ Contains decorators that check if user has a permission to access and interact with resources or do some actions in the app """ -import contextlib from typing import Protocol, Callable from dataall.core.context import RequestContext, get_context diff --git a/backend/dataall/modules/datasets/api/dataset/resolvers.py b/backend/dataall/modules/datasets/api/dataset/resolvers.py index 2453ebe7c..236432ce0 100644 --- a/backend/dataall/modules/datasets/api/dataset/resolvers.py +++ b/backend/dataall/modules/datasets/api/dataset/resolvers.py @@ -15,7 +15,9 @@ def create_dataset(context: Context, source, input=None): - return DatasetService.create_dataset(env_uri=input['environmentUri'], data=input) + admin_group = input['SamlAdminGroupName'] + uri = input['environmentUri'] + return DatasetService.create_dataset(uri=uri, admin_group=admin_group, data=input) def import_dataset(context: Context, source, input=None): @@ -28,7 +30,9 @@ def import_dataset(context: Context, source, input=None): if not input.get('SamlAdminGroupName'): raise exceptions.RequiredParameter('group') - return DatasetService.import_dataset(uri=input['environmentUri'], data=input) + admin_group = input['SamlAdminGroupName'] + uri = input['environmentUri'] + return DatasetService.import_dataset(uri=uri, admin_group=admin_group, data=input) def get_dataset(context, source, datasetUri=None): @@ -113,7 +117,7 @@ def get_dataset_stewards_group(context, source: Dataset, **kwargs): def update_dataset(context, source, datasetUri: str = None, input: dict = None): - return DatasetService.update_dataset(uri=datasetUri) + return DatasetService.update_dataset(uri=datasetUri, data=input) def get_dataset_statistics(context: Context, source: Dataset, **kwargs): diff --git a/backend/dataall/modules/datasets/api/profiling/resolvers.py b/backend/dataall/modules/datasets/api/profiling/resolvers.py index 0cf6d77e7..51701fb5e 100644 --- a/backend/dataall/modules/datasets/api/profiling/resolvers.py +++ b/backend/dataall/modules/datasets/api/profiling/resolvers.py @@ -2,8 +2,8 @@ import logging from dataall.api.context import Context -from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.modules.datasets.services.dataset_profiling_service import DatasetProfilingService +from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.modules.datasets_base.db.models import DatasetProfilingRun log = logging.getLogger(__name__) @@ -12,17 +12,14 @@ def resolve_dataset(context, source: DatasetProfilingRun): if not source: return None - with context.engine.scoped_session() as session: - return DatasetService.get_dataset_by_uri( - session=session, dataset_uri=source.datasetUri - ) + return DatasetService.get_dataset(uri=source.datasetUri) def start_profiling_run(context: Context, source, input: dict = None): return DatasetProfilingService.start_profiling_run( uri=input['datasetUri'], - table_uri=input['table_uri'], - glue_table_name=input['GlueTableName'] + table_uri=input.get('tableUri'), + glue_table_name=input.get('GlueTableName') ) diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py index ed8f4263d..689fe951e 100644 --- a/backend/dataall/modules/datasets/services/dataset_service.py +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -7,9 +7,9 @@ from dataall.aws.handlers.sts import SessionHelper from dataall.core.context import get_context from dataall.core.permission_checker import has_resource_permission, has_tenant_permission, has_group_permission -from dataall.db.api import Vote, ResourcePolicy, KeyValueTag, Stack +from dataall.db.api import Vote, ResourcePolicy, KeyValueTag, Stack, Environment from dataall.db.exceptions import AWSResourceNotFound, UnauthorizedOperation -from dataall.db.models import Environment, Task +from dataall.db.models import Task from dataall.db.permissions import SHARE_OBJECT_APPROVER from dataall.modules.dataset_sharing.db.models import ShareObject from dataall.modules.dataset_sharing.db.share_object_repository import ShareObjectRepository @@ -45,7 +45,7 @@ def check_dataset_account(environment): @has_tenant_permission(MANAGE_DATASETS) @has_resource_permission(CREATE_DATASET) @has_group_permission(CREATE_DATASET) - def create_dataset(uri, data: dict): + def create_dataset(uri, admin_group, data: dict): context = get_context() with context.db_engine.scoped_session() as session: environment = Environment.get_environment_by_uri(session, uri) @@ -95,9 +95,9 @@ def create_dataset(uri, data: dict): return dataset @staticmethod - def import_dataset(uri, data): + def import_dataset(uri, admin_group, data): data['imported'] = True - return DatasetService.create_dataset(uri=uri, data=data) + return DatasetService.create_dataset(uri=uri, admin_group=admin_group, data=data) @staticmethod @has_tenant_permission(MANAGE_DATASETS) @@ -420,7 +420,7 @@ def delete_dataset(uri: str, delete_from_aws: bool = False): context = get_context() with context.db_engine.scoped_session() as session: dataset: Dataset = DatasetRepository.get_dataset_by_uri(session, uri) - env: Environment = Environment.get_environment_by_uri( + env = Environment.get_environment_by_uri( session, dataset.environmentUri ) shares = ShareObjectRepository.list_dataset_shares_with_existing_shared_items(session, uri) diff --git a/backend/dataall/modules/datasets_base/db/dataset_repository.py b/backend/dataall/modules/datasets_base/db/dataset_repository.py index a7da6a930..12b0514d7 100644 --- a/backend/dataall/modules/datasets_base/db/dataset_repository.py +++ b/backend/dataall/modules/datasets_base/db/dataset_repository.py @@ -413,9 +413,9 @@ def list_group_datasets(session, environment_id, group_uri): @staticmethod def _set_import_data(dataset, data): - dataset.imported = True if data['imported'] else False - dataset.importedS3Bucket = True if data['bucketName'] else False - dataset.importedGlueDatabase = True if data['glueDatabaseName'] else False - dataset.importedKmsKey = True if data['KmsKeyId'] else False - dataset.importedAdminRole = True if data['adminRoleName'] else False + dataset.imported = True if data.get('imported') else False + dataset.importedS3Bucket = True if data.get('bucketName') else False + dataset.importedGlueDatabase = True if data.get('glueDatabaseName') else False + dataset.importedKmsKey = True if data.get('KmsKeyId') else False + dataset.importedAdminRole = True if data.get('adminRoleName') else False diff --git a/tests/api/test_dataset.py b/tests/api/test_dataset.py index 1f060701d..f5b9e80b3 100644 --- a/tests/api/test_dataset.py +++ b/tests/api/test_dataset.py @@ -177,7 +177,7 @@ def test_update_dataset(dataset1, client, group, group2): def test_start_crawler(org1, env1, dataset1, client, group, module_mocker): module_mocker.patch( - 'dataall.modules.datasets.api.dataset.resolvers.DatasetCrawler', MagicMock() + 'dataall.modules.datasets.services.dataset_service.DatasetCrawler', MagicMock() ) mutation = """ mutation StartGlueCrawler($datasetUri:String, $input:CrawlerInput){ diff --git a/tests/api/test_dataset_location.py b/tests/api/test_dataset_location.py index fe62eb065..8071d327e 100644 --- a/tests/api/test_dataset_location.py +++ b/tests/api/test_dataset_location.py @@ -69,7 +69,7 @@ def test_get_dataset(client, dataset1, env1, user, group): def test_create_location(client, dataset1, env1, user, group, patch_es, module_mocker): mock_client = MagicMock() - module_mocker.patch("dataall.modules.datasets.api.storage_location.resolvers.S3LocationClient", mock_client) + module_mocker.patch("dataall.modules.datasets.services.dataset_location_service.S3LocationClient", mock_client) response = client.query( """ mutation createDatasetStorageLocation($datasetUri:String!, $input:NewDatasetStorageLocationInput!){ diff --git a/tests/api/test_redshift_cluster.py b/tests/api/test_redshift_cluster.py index 0a7cf3bef..d98e748f4 100644 --- a/tests/api/test_redshift_cluster.py +++ b/tests/api/test_redshift_cluster.py @@ -5,6 +5,7 @@ import dataall from dataall.api.constants import RedshiftClusterRole from dataall.modules.datasets.services.dataset_service import DatasetService +from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.db.models import Dataset @pytest.fixture(scope='module', autouse=True) @@ -38,7 +39,9 @@ def dataset1(db, user, env1, org1, dataset, group, group3) -> Dataset: IAMDatasetAdminRoleArn=f'arn:aws:iam::123456789012:role/dataset', stewards=group3.name, ) - dataset = DatasetService.create_dataset( + dataset = DatasetRepository.create_dataset( + session=session, + username=user.userName, uri=env1.environmentUri, data=data, ) diff --git a/tests/db/test_permission.py b/tests/db/test_permission.py index 0b32a93b3..0016888e2 100644 --- a/tests/db/test_permission.py +++ b/tests/db/test_permission.py @@ -2,6 +2,7 @@ import dataall from dataall.api.constants import OrganisationUserRole +from dataall.core.context import set_context, RequestContext from dataall.db import exceptions from dataall.db.models.Permission import PermissionType from dataall.modules.datasets_base.db.models import Dataset @@ -258,8 +259,11 @@ def test_create_dataset(db, env, user, group, group_user, dataset, permissions, IAMDatasetAdminUserArn=f'arn:aws:iam::123456789012:user/dataset', IAMDatasetAdminRoleArn=f'arn:aws:iam::123456789012:role/dataset', ) + + set_context(RequestContext(db, user.userName, [group.name])) dataset = DatasetService.create_dataset( uri=env_with_perm.environmentUri, + admin_group=group.name, data=data, ) assert dataset From 73ff8669498b3ca7ba3a5eef5ef5572fd87ffd0b Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 12 May 2023 13:24:47 +0200 Subject: [PATCH 173/346] Fixed incorrect check --- backend/dataall/modules/loader.py | 26 +++++++++++++++++++------- tests/modules/test_loader.py | 11 +++++++++-- 2 files changed, 28 insertions(+), 9 deletions(-) diff --git a/backend/dataall/modules/loader.py b/backend/dataall/modules/loader.py index 73b78f344..8fc03c0b4 100644 --- a/backend/dataall/modules/loader.py +++ b/backend/dataall/modules/loader.py @@ -79,7 +79,7 @@ def _load_modules(): inactive = set() in_config = set() for name, props in modules.items(): - in_config.add(name) + if "active" not in props: raise ValueError(f"Status is not defined for {name} module") @@ -90,6 +90,7 @@ def _load_modules(): inactive.add(name) continue + in_config.add(name) if not _load_module(name): raise ValueError(f"Couldn't find module {name} under modules directory") @@ -170,23 +171,34 @@ def _check_loading_correct(in_config: Set[str], modes: List[ImportMode]): initialization. But since ModuleInterface is not initializing properly (using depends_on) some functionality may work wrongly. """ - - expected_load = set(in_config) + expected_load = set() for module in _all_modules(): - for dependency in module.depends_on(): - expected_load.add(dependency.name()) + if module.name() in in_config: + expected_load.add(module) + + to_add = list(expected_load) + while to_add: + new_to_add = [] + while to_add: + module = to_add.pop() + for dependency in module.depends_on(): + if dependency not in expected_load: + expected_load.add(dependency) + new_to_add.append(dependency) + to_add = new_to_add for module in _all_modules(): - if module.is_supported(modes) and module.name() not in expected_load: + if module.is_supported(modes) and module not in expected_load: raise ImportError( f"ModuleInterface has not been initialized for module {module.name()}. " "Declare the module in depends_on" ) + loaded_module_names = {module.name() for module in expected_load} for module in sys.modules.keys(): if module.startswith(_MODULE_PREFIX) and module != __name__: # skip loader name = _get_module_name(module) - if name and name not in expected_load: + if name and name not in loaded_module_names: raise ImportError(f"The package {module} has been imported, but it doesn't contain ModuleInterface") diff --git a/tests/modules/test_loader.py b/tests/modules/test_loader.py index e737c556d..f374c7763 100644 --- a/tests/modules/test_loader.py +++ b/tests/modules/test_loader.py @@ -13,6 +13,10 @@ class TestModule(ModuleInterface, ABC): def __init__(self): order.append(self.__class__) + @classmethod + def name(cls) -> str: + return cls.__name__ + class TestApiModule(TestModule): @staticmethod @@ -98,7 +102,7 @@ def patch_loading(mocker, all_modules, in_config): ) mocker.patch( 'dataall.modules.loader._load_modules', - return_value=(in_config, {}) + return_value=({module.name() for module in in_config}, {}) ) @@ -131,7 +135,7 @@ def test_many_nested_layers(mocker): def test_complex_loading(mocker): patch_loading(mocker, [ AModule, BModule, CModule, DModule, EModule, FModule, GModule, IModule, JModule, KModule - ], {AModule, CModule, JModule}) + ], {CModule, FModule, GModule, IModule, KModule}) loader.load_modules([ImportMode.API]) assert order == [AModule, JModule, BModule, DModule, EModule, GModule, FModule, IModule, KModule] @@ -142,4 +146,7 @@ def test_incorrect_loading(mocker): with pytest.raises(ImportError): loader.load_modules([ImportMode.API]) + patch_loading(mocker, [AModule, BModule], {AModule}) + with pytest.raises(ImportError): + loader.load_modules([ImportMode.API]) From cd340797e58984de2ac170422f8352e97f1c8805 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 12 May 2023 14:56:57 +0200 Subject: [PATCH 174/346] Fixed tests --- .../modules/datasets/api/table/resolvers.py | 4 ++-- .../db/dataset_profiling_repository.py | 1 - .../datasets/db/dataset_table_repository.py | 2 +- .../datasets/services/dataset_table_service.py | 8 ++++---- tests/api/test_dataset_profiling.py | 18 ++++++++++-------- tests/db/test_permission.py | 8 ++++++++ 6 files changed, 25 insertions(+), 16 deletions(-) diff --git a/backend/dataall/modules/datasets/api/table/resolvers.py b/backend/dataall/modules/datasets/api/table/resolvers.py index d400f0b74..1ecae0cde 100644 --- a/backend/dataall/modules/datasets/api/table/resolvers.py +++ b/backend/dataall/modules/datasets/api/table/resolvers.py @@ -23,11 +23,11 @@ def list_dataset_tables(context, source, filter: dict = None): def get_table(context, source: Dataset, tableUri: str = None): - return DatasetTableService.get_table(table_uri=tableUri) + return DatasetTableService.get_table(uri=tableUri) def update_table(context, source, tableUri: str = None, input: dict = None): - return DatasetTableService.update_table(table_uri=tableUri, input=input) + return DatasetTableService.update_table(uri=tableUri, table_data=input) def delete_table(context, source, tableUri: str = None): diff --git a/backend/dataall/modules/datasets/db/dataset_profiling_repository.py b/backend/dataall/modules/datasets/db/dataset_profiling_repository.py index eb8a2b3ca..71a7c3016 100644 --- a/backend/dataall/modules/datasets/db/dataset_profiling_repository.py +++ b/backend/dataall/modules/datasets/db/dataset_profiling_repository.py @@ -95,7 +95,6 @@ def list_table_profiling_runs(session, table_uri): ) ) .order_by(DatasetProfilingRun.created.desc()) - .all() ) return paginate( q, page=filter.get('page', 1), page_size=filter.get('pageSize', 20) diff --git a/backend/dataall/modules/datasets/db/dataset_table_repository.py b/backend/dataall/modules/datasets/db/dataset_table_repository.py index 63b6e7d63..b34c87dfa 100644 --- a/backend/dataall/modules/datasets/db/dataset_table_repository.py +++ b/backend/dataall/modules/datasets/db/dataset_table_repository.py @@ -72,7 +72,7 @@ def create_synced_table(session, dataset: Dataset, table: dict): ) session.add(updated_table) session.commit() - return table + return updated_table @staticmethod def paginate_dataset_tables(session, dataset_uri, term, page, page_size) -> dict: diff --git a/backend/dataall/modules/datasets/services/dataset_table_service.py b/backend/dataall/modules/datasets/services/dataset_table_service.py index ab12d1131..99f3b4db1 100644 --- a/backend/dataall/modules/datasets/services/dataset_table_service.py +++ b/backend/dataall/modules/datasets/services/dataset_table_service.py @@ -62,16 +62,16 @@ def list_dataset_tables(dataset_uri: str, term=None, page=1, pageSize=10): @staticmethod @has_tenant_permission(MANAGE_DATASETS) - def get_table(table_uri: str): + def get_table(uri: str): with get_context().db_engine.scoped_session() as session: - return DatasetTableRepository.get_dataset_table_by_uri(session, table_uri) + return DatasetTableRepository.get_dataset_table_by_uri(session, uri) @staticmethod @has_tenant_permission(MANAGE_DATASETS) @has_resource_permission(UPDATE_DATASET_TABLE, parent_resource=_get_dataset_uri) - def update_table(table_uri: str, table_data: dict = None): + def update_table(uri: str, table_data: dict = None): with get_context().db_engine.scoped_session() as session: - table = DatasetTableRepository.get_dataset_table_by_uri(session, table_uri) + table = DatasetTableRepository.get_dataset_table_by_uri(session, uri) for k in [attr for attr in table_data.keys() if attr != 'terms']: setattr(table, k, table_data.get(k)) diff --git a/tests/api/test_dataset_profiling.py b/tests/api/test_dataset_profiling.py index 345c9541e..852b4ea0b 100644 --- a/tests/api/test_dataset_profiling.py +++ b/tests/api/test_dataset_profiling.py @@ -1,3 +1,5 @@ +from unittest.mock import MagicMock + import pytest from dataall.modules.datasets_base.db.models import DatasetProfilingRun, DatasetTable, Dataset @@ -21,6 +23,14 @@ def dataset1(env1, org1, dataset, group, user) -> Dataset: org=org1, env=env1, name='dataset1', owner=user.userName, group=group.name ) +@pytest.fixture(scope='module', autouse=True) +def patch_methods(module_mocker): + mock_client = MagicMock() + module_mocker.patch( + 'dataall.modules.datasets.services.dataset_profiling_service.S3ProfilerClient', mock_client + ) + mock_client().get_profiling_results_from_s3.return_value = '{"results": "yes"}' + def test_add_tables(table, dataset1, db): for i in range(0, 10): @@ -123,10 +133,6 @@ def test_get_profiling_run(client, dataset1, env1, module_mocker, db, group): def test_get_table_profiling_run( client, dataset1, env1, module_mocker, table, db, group ): - module_mocker.patch( - 'dataall.modules.datasets.api.profiling.resolvers.get_profiling_results_from_s3', - return_value='{"results": "yes"}', - ) runs = list_profiling_runs(client, dataset1, group) module_mocker.patch( 'dataall.aws.handlers.service_handlers.Worker.queue', @@ -163,10 +169,6 @@ def test_get_table_profiling_run( def test_list_table_profiling_runs( client, dataset1, env1, module_mocker, table, db, group ): - module_mocker.patch( - 'dataall.modules.datasets.api.profiling.resolvers.get_profiling_results_from_s3', - return_value='{"results": "yes"}', - ) module_mocker.patch('requests.post', return_value=True) runs = list_profiling_runs(client, dataset1, group) table1000 = table(dataset=dataset1, name='table1000', username=dataset1.owner) diff --git a/tests/db/test_permission.py b/tests/db/test_permission.py index 0016888e2..177f8fe2d 100644 --- a/tests/db/test_permission.py +++ b/tests/db/test_permission.py @@ -114,6 +114,14 @@ def env(org, db, group): yield env +@pytest.fixture(scope='module', autouse=True) +def patch_methods(module_mocker): + module_mocker.patch( + 'dataall.modules.datasets.services.dataset_service.DatasetService._deploy_dataset_stack', + return_value=True + ) + + @pytest.fixture(scope='module', autouse=True) def dataset(org, env, db, group): with db.scoped_session() as session: From a92e8297a91836336993e8a1410a962ab40ad7f8 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 12 May 2023 15:21:05 +0200 Subject: [PATCH 175/346] Moved permissions related to sharing --- backend/dataall/db/api/environment.py | 4 -- backend/dataall/db/permissions.py | 49 --------------- .../db/share_object_repository.py | 45 +++++++------- .../services/dataset_share_service.py | 3 +- .../services/share_permissions.py | 59 +++++++++++++++++++ .../datasets/services/dataset_service.py | 2 +- tests/api/conftest.py | 9 +-- 7 files changed, 91 insertions(+), 80 deletions(-) create mode 100644 backend/dataall/modules/dataset_sharing/services/share_permissions.py diff --git a/backend/dataall/db/api/environment.py b/backend/dataall/db/api/environment.py index 0e5c1cb86..bba9c841d 100644 --- a/backend/dataall/db/api/environment.py +++ b/backend/dataall/db/api/environment.py @@ -301,9 +301,6 @@ def validate_permissions(session, uri, g_permissions, group): if permissions.ADD_ENVIRONMENT_CONSUMPTION_ROLES in g_permissions: g_permissions.append(permissions.LIST_ENVIRONMENT_CONSUMPTION_ROLES) - if permissions.CREATE_SHARE_OBJECT in g_permissions: - g_permissions.append(permissions.LIST_ENVIRONMENT_SHARED_WITH_OBJECTS) - if permissions.CREATE_NETWORK in g_permissions: g_permissions.append(permissions.LIST_ENVIRONMENT_NETWORKS) @@ -312,7 +309,6 @@ def validate_permissions(session, uri, g_permissions, group): g_permissions.append(permissions.LIST_ENVIRONMENT_GROUPS) g_permissions.append(permissions.LIST_ENVIRONMENT_GROUP_PERMISSIONS) g_permissions.append(permissions.LIST_ENVIRONMENT_REDSHIFT_CLUSTERS) - g_permissions.append(permissions.LIST_ENVIRONMENT_SHARED_WITH_OBJECTS) g_permissions.append(permissions.LIST_ENVIRONMENT_NETWORKS) g_permissions.append(permissions.CREDENTIALS_ENVIRONMENT) diff --git a/backend/dataall/db/permissions.py b/backend/dataall/db/permissions.py index bec9fd4cd..8fc4fa8fd 100644 --- a/backend/dataall/db/permissions.py +++ b/backend/dataall/db/permissions.py @@ -50,8 +50,6 @@ ENABLE_ENVIRONMENT_SUBSCRIPTIONS = 'ENABLE_ENVIRONMENT_SUBSCRIPTIONS' DISABLE_ENVIRONMENT_SUBSCRIPTIONS = 'DISABLE_ENVIRONMENT_SUBSCRIPTIONS' RUN_ATHENA_QUERY = 'RUN_ATHENA_QUERY' -CREATE_SHARE_OBJECT = 'CREATE_SHARE_OBJECT' -LIST_ENVIRONMENT_SHARED_WITH_OBJECTS = 'LIST_ENVIRONMENT_SHARED_WITH_OBJECTS' CREATE_REDSHIFT_CLUSTER = 'CREATE_REDSHIFT_CLUSTER' LIST_ENVIRONMENT_REDSHIFT_CLUSTERS = 'LIST_ENVIRONMENT_REDSHIFT_CLUSTERS' CREATE_SGMSTUDIO_NOTEBOOK = 'CREATE_SGMSTUDIO_NOTEBOOK' @@ -69,8 +67,6 @@ GET_ENVIRONMENT, LIST_ENVIRONMENT_GROUPS, LIST_ENVIRONMENT_CONSUMPTION_ROLES, - CREATE_SHARE_OBJECT, - LIST_ENVIRONMENT_SHARED_WITH_OBJECTS, RUN_ATHENA_QUERY, CREATE_REDSHIFT_CLUSTER, LIST_ENVIRONMENT_REDSHIFT_CLUSTERS, @@ -88,7 +84,6 @@ ENVIRONMENT_INVITATION_REQUEST = [ INVITE_ENVIRONMENT_GROUP, ADD_ENVIRONMENT_CONSUMPTION_ROLES, - CREATE_SHARE_OBJECT, CREATE_REDSHIFT_CLUSTER, CREATE_SGMSTUDIO_NOTEBOOK, CREATE_DASHBOARD, @@ -110,10 +105,8 @@ ENABLE_ENVIRONMENT_SUBSCRIPTIONS, DISABLE_ENVIRONMENT_SUBSCRIPTIONS, RUN_ATHENA_QUERY, - CREATE_SHARE_OBJECT, CREATE_REDSHIFT_CLUSTER, LIST_ENVIRONMENT_REDSHIFT_CLUSTERS, - LIST_ENVIRONMENT_SHARED_WITH_OBJECTS, CREATE_SGMSTUDIO_NOTEBOOK, LIST_ENVIRONMENT_SGMSTUDIO_NOTEBOOKS, CREATE_DASHBOARD, @@ -135,46 +128,6 @@ REMOVE_ENVIRONMENT_CONSUMPTION_ROLE ] -""" -SHARE OBJECT -""" -ADD_ITEM = 'ADD_ITEM' -REMOVE_ITEM = 'REMOVE_ITEM' -SUBMIT_SHARE_OBJECT = 'SUBMIT_SHARE_OBJECT' -APPROVE_SHARE_OBJECT = 'APPROVE_SHARE_OBJECT' -REJECT_SHARE_OBJECT = 'REJECT_SHARE_OBJECT' -DELETE_SHARE_OBJECT = 'DELETE_SHARE_OBJECT' -GET_SHARE_OBJECT = 'GET_SHARE_OBJECT' -LIST_SHARED_ITEMS = 'LIST_SHARED_ITEMS' -SHARE_OBJECT_REQUESTER = [ - ADD_ITEM, - REMOVE_ITEM, - SUBMIT_SHARE_OBJECT, - GET_SHARE_OBJECT, - LIST_SHARED_ITEMS, - DELETE_SHARE_OBJECT, -] -SHARE_OBJECT_APPROVER = [ - ADD_ITEM, - REMOVE_ITEM, - SUBMIT_SHARE_OBJECT, - APPROVE_SHARE_OBJECT, - REJECT_SHARE_OBJECT, - DELETE_SHARE_OBJECT, - GET_SHARE_OBJECT, - LIST_SHARED_ITEMS, -] -SHARE_OBJECT_ALL = [ - ADD_ITEM, - REMOVE_ITEM, - SUBMIT_SHARE_OBJECT, - APPROVE_SHARE_OBJECT, - REJECT_SHARE_OBJECT, - DELETE_SHARE_OBJECT, - GET_SHARE_OBJECT, - LIST_SHARED_ITEMS, -] - """ GLOSSARIES """ @@ -327,7 +280,6 @@ ORGANIZATION_ALL + ENVIRONMENT_ALL + CONSUMPTION_ROLE_ALL - + SHARE_OBJECT_ALL + REDSHIFT_CLUSTER_ALL + GLOSSARY_ALL + SGMSTUDIO_NOTEBOOK_ALL @@ -343,6 +295,5 @@ RESOURCES_ALL_WITH_DESC[CREATE_SGMSTUDIO_NOTEBOOK] = 'Create ML Studio profiles on this environment' RESOURCES_ALL_WITH_DESC[INVITE_ENVIRONMENT_GROUP] = 'Invite other teams to this environment' RESOURCES_ALL_WITH_DESC[ADD_ENVIRONMENT_CONSUMPTION_ROLES] = 'Add IAM consumption roles to this environment' -RESOURCES_ALL_WITH_DESC[CREATE_SHARE_OBJECT] = 'Request datasets access for this environment' RESOURCES_ALL_WITH_DESC[CREATE_PIPELINE] = 'Create pipelines on this environment' RESOURCES_ALL_WITH_DESC[CREATE_NETWORK] = 'Create networks on this environment' diff --git a/backend/dataall/modules/dataset_sharing/db/share_object_repository.py b/backend/dataall/modules/dataset_sharing/db/share_object_repository.py index e2a9ddc8f..5abbe6134 100644 --- a/backend/dataall/modules/dataset_sharing/db/share_object_repository.py +++ b/backend/dataall/modules/dataset_sharing/db/share_object_repository.py @@ -9,11 +9,14 @@ Environment, ) from dataall.db import api, utils -from dataall.db import models, exceptions, permissions, paginate +from dataall.db import models, exceptions, paginate from dataall.db.models.Enums import PrincipalType from dataall.modules.dataset_sharing.db.enums import ShareObjectActions, ShareObjectStatus, ShareItemActions, \ ShareItemStatus, ShareableType from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject +from dataall.modules.dataset_sharing.services.share_permissions import SHARE_OBJECT_REQUESTER, SHARE_OBJECT_APPROVER, \ + CREATE_SHARE_OBJECT, SUBMIT_SHARE_OBJECT, ADD_ITEM, GET_SHARE_OBJECT, APPROVE_SHARE_OBJECT, REMOVE_ITEM, \ + DELETE_SHARE_OBJECT, LIST_SHARED_ITEMS, REJECT_SHARE_OBJECT from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.db.models import DatasetStorageLocation, DatasetTable, Dataset from dataall.modules.dataset_sharing.services.share_notification_service import ShareNotificationService @@ -321,7 +324,7 @@ def get_share_item_revokable_states(): class ShareObjectRepository: @staticmethod - @has_resource_perm(permissions.CREATE_SHARE_OBJECT) + @has_resource_perm(CREATE_SHARE_OBJECT) def create_share_object( session, username: str, @@ -355,7 +358,7 @@ def create_share_object( if environment.region != dataset.region: raise exceptions.UnauthorizedOperation( - action=permissions.CREATE_SHARE_OBJECT, + action=CREATE_SHARE_OBJECT, message=f'Requester Team {groupUri} works in region {environment.region} and the requested dataset is stored in region {dataset.region}', ) @@ -378,7 +381,7 @@ def create_share_object( dataset.stewards == groupUri or dataset.SamlAdminGroupName == groupUri ) and environment.environmentUri == dataset.environmentUri and principalType == models.PrincipalType.Group.value: raise exceptions.UnauthorizedOperation( - action=permissions.CREATE_SHARE_OBJECT, + action=CREATE_SHARE_OBJECT, message=f'Team: {groupUri} is managing the dataset {dataset.name}', ) @@ -476,14 +479,14 @@ def create_share_object( ResourcePolicy.attach_resource_policy( session=session, group=groupUri, - permissions=permissions.SHARE_OBJECT_REQUESTER, + permissions=SHARE_OBJECT_REQUESTER, resource_uri=share.shareUri, resource_type=ShareObject.__name__, ) ResourcePolicy.attach_resource_policy( session=session, group=dataset.SamlAdminGroupName, - permissions=permissions.SHARE_OBJECT_REQUESTER, + permissions=SHARE_OBJECT_REQUESTER, resource_uri=share.shareUri, resource_type=ShareObject.__name__, ) @@ -491,7 +494,7 @@ def create_share_object( ResourcePolicy.attach_resource_policy( session=session, group=environment.SamlGroupName, - permissions=permissions.SHARE_OBJECT_REQUESTER, + permissions=SHARE_OBJECT_REQUESTER, resource_uri=share.shareUri, resource_type=ShareObject.__name__, ) @@ -500,7 +503,7 @@ def create_share_object( ResourcePolicy.attach_resource_policy( session=session, group=dataset.stewards, - permissions=permissions.SHARE_OBJECT_APPROVER, + permissions=SHARE_OBJECT_APPROVER, resource_uri=share.shareUri, resource_type=ShareObject.__name__, ) @@ -512,7 +515,7 @@ def validate_group_membership( ): if share_object_group and share_object_group not in groups: raise exceptions.UnauthorizedOperation( - action=permissions.CREATE_SHARE_OBJECT, + action=CREATE_SHARE_OBJECT, message=f'User: {username} is not a member of the team {share_object_group}', ) if share_object_group not in Environment.list_environment_groups( @@ -524,12 +527,12 @@ def validate_group_membership( check_perm=True, ): raise exceptions.UnauthorizedOperation( - action=permissions.CREATE_SHARE_OBJECT, + action=CREATE_SHARE_OBJECT, message=f'Team: {share_object_group} is not a member of the environment {environment_uri}', ) @staticmethod - @has_resource_perm(permissions.SUBMIT_SHARE_OBJECT) + @has_resource_perm(SUBMIT_SHARE_OBJECT) def submit_share_object( session, username: str, @@ -567,7 +570,7 @@ def submit_share_object( return share @staticmethod - @has_resource_perm(permissions.APPROVE_SHARE_OBJECT) + @has_resource_perm(APPROVE_SHARE_OBJECT) def approve_share_object( session, username: str, @@ -613,7 +616,7 @@ def approve_share_object( return share @staticmethod - @has_resource_perm(permissions.REJECT_SHARE_OBJECT) + @has_resource_perm(REJECT_SHARE_OBJECT) def reject_share_object( session, username: str, @@ -648,7 +651,7 @@ def reject_share_object( return share @staticmethod - @has_resource_perm(permissions.GET_SHARE_OBJECT) + @has_resource_perm(GET_SHARE_OBJECT) def revoke_items_share_object( session, username: str, @@ -692,7 +695,7 @@ def revoke_items_share_object( return share @staticmethod - @has_resource_perm(permissions.GET_SHARE_OBJECT) + @has_resource_perm(GET_SHARE_OBJECT) def get_share_object( session, username: str, @@ -708,7 +711,7 @@ def get_share_object( return share @staticmethod - @has_resource_perm(permissions.GET_SHARE_OBJECT) + @has_resource_perm(GET_SHARE_OBJECT) def get_share_item( session, username: str, @@ -744,7 +747,7 @@ def get_share_by_dataset_attributes(session, dataset_uri, dataset_owner): return share @staticmethod - @has_resource_perm(permissions.ADD_ITEM) + @has_resource_perm(ADD_ITEM) def add_share_object_item( session, username: str, @@ -770,7 +773,7 @@ def add_share_object_item( item: DatasetTable = session.query(DatasetTable).get(itemUri) if item and item.region != target_environment.region: raise exceptions.UnauthorizedOperation( - action=permissions.ADD_ITEM, + action=ADD_ITEM, message=f'Lake Formation cross region sharing is not supported. ' f'Table {item.GlueTableName} is in {item.region} and target environment ' f'{target_environment.name} is in {target_environment.region} ', @@ -821,7 +824,7 @@ def add_share_object_item( return shareItem @staticmethod - @has_resource_perm(permissions.REMOVE_ITEM) + @has_resource_perm(REMOVE_ITEM) def remove_share_object_item( session, username: str, @@ -847,7 +850,7 @@ def remove_share_object_item( return True @staticmethod - @has_resource_perm(permissions.DELETE_SHARE_OBJECT) + @has_resource_perm(DELETE_SHARE_OBJECT) def delete_share_object(session, username, groups, uri, data=None, check_perm=None): share: ShareObject = ShareObjectRepository.get_share_by_uri(session, uri) share_items_states = ShareObjectRepository.get_share_items_states(session, uri) @@ -924,7 +927,7 @@ def get_share_item_by_uri(session, uri): return share_item @staticmethod - @has_resource_perm(permissions.LIST_SHARED_ITEMS) + @has_resource_perm(LIST_SHARED_ITEMS) def list_shared_items(session, username, groups, uri, data=None, check_perm=None): share: ShareObject = ShareObjectRepository.get_share_by_uri(session, uri) query = session.query(ShareObjectItem).filter( diff --git a/backend/dataall/modules/dataset_sharing/services/dataset_share_service.py b/backend/dataall/modules/dataset_sharing/services/dataset_share_service.py index f40437d89..80a0d9998 100644 --- a/backend/dataall/modules/dataset_sharing/services/dataset_share_service.py +++ b/backend/dataall/modules/dataset_sharing/services/dataset_share_service.py @@ -8,13 +8,14 @@ from dataall.modules.dataset_sharing.db.enums import ShareableType from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject from dataall.modules.dataset_sharing.db.share_object_repository import ShareItemSM +from dataall.modules.dataset_sharing.services.share_permissions import LIST_ENVIRONMENT_SHARED_WITH_OBJECTS from dataall.modules.datasets_base.db.models import DatasetStorageLocation, DatasetTable, Dataset class DatasetShareService: @staticmethod - @has_resource_perm(permissions.LIST_ENVIRONMENT_SHARED_WITH_OBJECTS) + @has_resource_perm(LIST_ENVIRONMENT_SHARED_WITH_OBJECTS) def paginated_shared_with_environment_datasets( session, username, groups, uri, data=None, check_perm=None ) -> dict: diff --git a/backend/dataall/modules/dataset_sharing/services/share_permissions.py b/backend/dataall/modules/dataset_sharing/services/share_permissions.py new file mode 100644 index 000000000..5ae4071da --- /dev/null +++ b/backend/dataall/modules/dataset_sharing/services/share_permissions.py @@ -0,0 +1,59 @@ +""" +SHARE OBJECT +""" +from dataall.db.permissions import ENVIRONMENT_INVITED, ENVIRONMENT_INVITATION_REQUEST, ENVIRONMENT_ALL, RESOURCES_ALL, \ + RESOURCES_ALL_WITH_DESC + +ADD_ITEM = 'ADD_ITEM' +REMOVE_ITEM = 'REMOVE_ITEM' +SUBMIT_SHARE_OBJECT = 'SUBMIT_SHARE_OBJECT' +APPROVE_SHARE_OBJECT = 'APPROVE_SHARE_OBJECT' +REJECT_SHARE_OBJECT = 'REJECT_SHARE_OBJECT' +DELETE_SHARE_OBJECT = 'DELETE_SHARE_OBJECT' +GET_SHARE_OBJECT = 'GET_SHARE_OBJECT' +LIST_SHARED_ITEMS = 'LIST_SHARED_ITEMS' +SHARE_OBJECT_REQUESTER = [ + ADD_ITEM, + REMOVE_ITEM, + SUBMIT_SHARE_OBJECT, + GET_SHARE_OBJECT, + LIST_SHARED_ITEMS, + DELETE_SHARE_OBJECT, +] +SHARE_OBJECT_APPROVER = [ + ADD_ITEM, + REMOVE_ITEM, + SUBMIT_SHARE_OBJECT, + APPROVE_SHARE_OBJECT, + REJECT_SHARE_OBJECT, + DELETE_SHARE_OBJECT, + GET_SHARE_OBJECT, + LIST_SHARED_ITEMS, +] +SHARE_OBJECT_ALL = [ + ADD_ITEM, + REMOVE_ITEM, + SUBMIT_SHARE_OBJECT, + APPROVE_SHARE_OBJECT, + REJECT_SHARE_OBJECT, + DELETE_SHARE_OBJECT, + GET_SHARE_OBJECT, + LIST_SHARED_ITEMS, +] + +CREATE_SHARE_OBJECT = 'CREATE_SHARE_OBJECT' +LIST_ENVIRONMENT_SHARED_WITH_OBJECTS = 'LIST_ENVIRONMENT_SHARED_WITH_OBJECTS' + +ENVIRONMENT_INVITED.append(CREATE_SHARE_OBJECT) +ENVIRONMENT_INVITED.append(LIST_ENVIRONMENT_SHARED_WITH_OBJECTS) +ENVIRONMENT_INVITATION_REQUEST.append(CREATE_SHARE_OBJECT) +ENVIRONMENT_INVITATION_REQUEST.append(LIST_ENVIRONMENT_SHARED_WITH_OBJECTS) +ENVIRONMENT_ALL.append(CREATE_SHARE_OBJECT) +ENVIRONMENT_ALL.append(LIST_ENVIRONMENT_SHARED_WITH_OBJECTS) + +RESOURCES_ALL.extend(SHARE_OBJECT_ALL) +for perm in SHARE_OBJECT_ALL: + RESOURCES_ALL_WITH_DESC[perm] = perm + +RESOURCES_ALL_WITH_DESC[CREATE_SHARE_OBJECT] = 'Request datasets access for this environment' +RESOURCES_ALL_WITH_DESC[LIST_ENVIRONMENT_SHARED_WITH_OBJECTS] = LIST_ENVIRONMENT_SHARED_WITH_OBJECTS diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py index 689fe951e..643bbff39 100644 --- a/backend/dataall/modules/datasets/services/dataset_service.py +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -10,9 +10,9 @@ from dataall.db.api import Vote, ResourcePolicy, KeyValueTag, Stack, Environment from dataall.db.exceptions import AWSResourceNotFound, UnauthorizedOperation from dataall.db.models import Task -from dataall.db.permissions import SHARE_OBJECT_APPROVER from dataall.modules.dataset_sharing.db.models import ShareObject from dataall.modules.dataset_sharing.db.share_object_repository import ShareObjectRepository +from dataall.modules.dataset_sharing.services.share_permissions import SHARE_OBJECT_APPROVER from dataall.modules.datasets import DatasetIndexer, DatasetTableIndexer from dataall.modules.datasets.aws.glue_dataset_client import DatasetCrawler from dataall.modules.datasets.aws.s3_dataset_client import S3DatasetClient diff --git a/tests/api/conftest.py b/tests/api/conftest.py index 931fc26c9..9e8ec5996 100644 --- a/tests/api/conftest.py +++ b/tests/api/conftest.py @@ -1,6 +1,7 @@ import dataall.searchproxy.indexers from dataall.modules.dataset_sharing.db.enums import ShareableType from dataall.modules.dataset_sharing.db.models import ShareObject, ShareObjectItem +from dataall.modules.dataset_sharing.services.share_permissions import SHARE_OBJECT_REQUESTER, SHARE_OBJECT_APPROVER from .client import * from dataall.db import models from dataall.api import constants @@ -476,21 +477,21 @@ def factory( dataall.db.api.ResourcePolicy.attach_resource_policy( session=session, group=env_group.groupUri, - permissions=dataall.db.permissions.SHARE_OBJECT_REQUESTER, + permissions=SHARE_OBJECT_REQUESTER, resource_uri=share.shareUri, resource_type=ShareObject.__name__, ) dataall.db.api.ResourcePolicy.attach_resource_policy( session=session, group=dataset.SamlAdminGroupName, - permissions=dataall.db.permissions.SHARE_OBJECT_REQUESTER, + permissions=SHARE_OBJECT_REQUESTER, resource_uri=share.shareUri, resource_type=ShareObject.__name__, ) dataall.db.api.ResourcePolicy.attach_resource_policy( session=session, group=dataset.stewards, - permissions=dataall.db.permissions.SHARE_OBJECT_APPROVER, + permissions=SHARE_OBJECT_APPROVER, resource_uri=share.shareUri, resource_type=ShareObject.__name__, ) @@ -498,7 +499,7 @@ def factory( dataall.db.api.ResourcePolicy.attach_resource_policy( session=session, group=environment.SamlGroupName, - permissions=dataall.db.permissions.SHARE_OBJECT_REQUESTER, + permissions=SHARE_OBJECT_REQUESTER, resource_uri=share.shareUri, resource_type=ShareObject.__name__, ) From afc8a735620c492c50802cd404bbdf4bd1c72f09 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 12 May 2023 15:32:57 +0200 Subject: [PATCH 176/346] Migrated to a new permission API --- .../modules/dataset_sharing/api/resolvers.py | 37 +---------- .../db/share_object_repository.py | 61 +++++-------------- 2 files changed, 17 insertions(+), 81 deletions(-) diff --git a/backend/dataall/modules/dataset_sharing/api/resolvers.py b/backend/dataall/modules/dataset_sharing/api/resolvers.py index c21432437..94555b6ab 100644 --- a/backend/dataall/modules/dataset_sharing/api/resolvers.py +++ b/backend/dataall/modules/dataset_sharing/api/resolvers.py @@ -51,7 +51,6 @@ def create_share_object( groups=context.groups, uri=environment.environmentUri, data=input, - check_perm=True, ) @@ -60,10 +59,7 @@ def submit_share_object(context: Context, source, shareUri: str = None): return ShareObjectRepository.submit_share_object( session=session, username=context.username, - groups=context.groups, uri=shareUri, - data=None, - check_perm=True, ) @@ -72,10 +68,7 @@ def approve_share_object(context: Context, source, shareUri: str = None): share = ShareObjectRepository.approve_share_object( session=session, username=context.username, - groups=context.groups, uri=shareUri, - data=None, - check_perm=True, ) approve_share_task: models.Task = models.Task( @@ -95,10 +88,7 @@ def reject_share_object(context: Context, source, shareUri: str = None): return ShareObjectRepository.reject_share_object( session=session, username=context.username, - groups=context.groups, uri=shareUri, - data=None, - check_perm=True, ) @@ -107,10 +97,8 @@ def revoke_items_share_object(context: Context, source, input): share = ShareObjectRepository.revoke_items_share_object( session=session, username=context.username, - groups=context.groups, uri=input.get("shareUri"), data=input, - check_perm=True, ) revoke_share_task: models.Task = models.Task( @@ -131,13 +119,7 @@ def delete_share_object(context: Context, source, shareUri: str = None): if not share: raise db.exceptions.ObjectNotFound('ShareObject', shareUri) - ShareObjectRepository.delete_share_object( - session=session, - username=context.username, - groups=context.groups, - uri=shareUri, - check_perm=True, - ) + ShareObjectRepository.delete_share_object(session=session, uri=shareUri) return True @@ -147,10 +129,8 @@ def add_shared_item(context, source, shareUri: str = None, input: dict = None): share_item = ShareObjectRepository.add_share_object_item( session=session, username=context.username, - groups=context.groups, uri=shareUri, data=input, - check_perm=True, ) return share_item @@ -165,15 +145,12 @@ def remove_shared_item(context, source, shareItemUri: str = None): share = ShareObjectRepository.get_share_by_uri(session, share_item.shareUri) ShareObjectRepository.remove_share_object_item( session=session, - username=context.username, - groups=context.groups, uri=share.shareUri, data={ 'shareItemUri': shareItemUri, 'share_item': share_item, 'share': share, - }, - check_perm=True, + } ) return True @@ -189,10 +166,7 @@ def list_shared_items( return ShareObjectRepository.list_shared_items( session=session, username=context.username, - groups=context.groups, - uri=source.shareUri, data=filter, - check_perm=True, ) @@ -202,11 +176,8 @@ def resolve_shared_item(context, source: ShareObjectItem, **kwargs): with context.engine.scoped_session() as session: return ShareObjectRepository.get_share_item( session=session, - username=context.username, - groups=context.groups, uri=source.shareUri, data={'share_item': source}, - check_perm=True, ) @@ -214,11 +185,7 @@ def get_share_object(context, source, shareUri: str = None): with context.engine.scoped_session() as session: return ShareObjectRepository.get_share_object( session=session, - username=context.username, - groups=context.groups, uri=shareUri, - data=None, - check_perm=True, ) diff --git a/backend/dataall/modules/dataset_sharing/db/share_object_repository.py b/backend/dataall/modules/dataset_sharing/db/share_object_repository.py index 5abbe6134..d90318329 100644 --- a/backend/dataall/modules/dataset_sharing/db/share_object_repository.py +++ b/backend/dataall/modules/dataset_sharing/db/share_object_repository.py @@ -3,8 +3,8 @@ from sqlalchemy import and_, or_, func, case from sqlalchemy.orm import Query +from dataall.core.permission_checker import has_resource_permission from dataall.db.api import ( - has_resource_perm, ResourcePolicy, Environment, ) @@ -324,14 +324,13 @@ def get_share_item_revokable_states(): class ShareObjectRepository: @staticmethod - @has_resource_perm(CREATE_SHARE_OBJECT) + @has_resource_permission(CREATE_SHARE_OBJECT) def create_share_object( session, username: str, groups: [str], uri: str, data: dict = None, - check_perm: bool = False, ) -> ShareObject: if not data: raise exceptions.RequiredParameter(data) @@ -532,14 +531,11 @@ def validate_group_membership( ) @staticmethod - @has_resource_perm(SUBMIT_SHARE_OBJECT) + @has_resource_permission(SUBMIT_SHARE_OBJECT) def submit_share_object( session, username: str, - groups: [str], uri: str, - data: dict = None, - check_perm: bool = False, ) -> ShareObject: share = ShareObjectRepository.get_share_by_uri(session, uri) dataset = DatasetRepository.get_dataset_by_uri(session, share.datasetUri) @@ -570,14 +566,11 @@ def submit_share_object( return share @staticmethod - @has_resource_perm(APPROVE_SHARE_OBJECT) + @has_resource_permission(APPROVE_SHARE_OBJECT) def approve_share_object( session, username: str, - groups: [str], uri: str, - data: dict = None, - check_perm: bool = False, ) -> ShareObject: share = ShareObjectRepository.get_share_by_uri(session, uri) dataset = DatasetRepository.get_dataset_by_uri(session, share.datasetUri) @@ -616,14 +609,11 @@ def approve_share_object( return share @staticmethod - @has_resource_perm(REJECT_SHARE_OBJECT) + @has_resource_permission(REJECT_SHARE_OBJECT) def reject_share_object( session, username: str, - groups: [str], uri: str, - data: dict = None, - check_perm: bool = False, ) -> ShareObject: share = ShareObjectRepository.get_share_by_uri(session, uri) @@ -651,14 +641,12 @@ def reject_share_object( return share @staticmethod - @has_resource_perm(GET_SHARE_OBJECT) + @has_resource_permission(GET_SHARE_OBJECT) def revoke_items_share_object( session, username: str, - groups: [str], uri: str, data: dict = None, - check_perm: bool = False, ) -> ShareObject: share = ShareObjectRepository.get_share_by_uri(session, uri) @@ -695,15 +683,8 @@ def revoke_items_share_object( return share @staticmethod - @has_resource_perm(GET_SHARE_OBJECT) - def get_share_object( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ): + @has_resource_permission(GET_SHARE_OBJECT) + def get_share_object(session, uri: str): share = session.query(ShareObject).get(uri) if not share: raise exceptions.ObjectNotFound('Share', uri) @@ -711,14 +692,11 @@ def get_share_object( return share @staticmethod - @has_resource_perm(GET_SHARE_OBJECT) + @has_resource_permission(GET_SHARE_OBJECT) def get_share_item( session, - username: str, - groups: [str], uri: str, data: dict = None, - check_perm: bool = False, ): share_item: ShareObjectItem = data.get( 'share_item', @@ -747,14 +725,12 @@ def get_share_by_dataset_attributes(session, dataset_uri, dataset_owner): return share @staticmethod - @has_resource_perm(ADD_ITEM) + @has_resource_permission(ADD_ITEM) def add_share_object_item( session, username: str, - groups: [str], uri: str, data: dict = None, - check_perm: bool = False, ) -> ShareObjectItem: itemType = data.get('itemType') itemUri = data.get('itemUri') @@ -824,24 +800,17 @@ def add_share_object_item( return shareItem @staticmethod - @has_resource_perm(REMOVE_ITEM) + @has_resource_permission(REMOVE_ITEM) def remove_share_object_item( session, - username: str, - groups: [str], uri: str, data: dict = None, - check_perm: bool = False, ) -> bool: share_item: ShareObjectItem = data.get( 'share_item', ShareObjectRepository.get_share_item_by_uri(session, data['shareItemUri']), ) - share: ShareObject = data.get( - 'share', - ShareObjectRepository.get_share_by_uri(session, uri), - ) Item_SM = ShareItemSM(share_item.status) newstatus = Item_SM.run_transition(ShareItemActions.RemoveItem.value) @@ -850,8 +819,8 @@ def remove_share_object_item( return True @staticmethod - @has_resource_perm(DELETE_SHARE_OBJECT) - def delete_share_object(session, username, groups, uri, data=None, check_perm=None): + @has_resource_permission(DELETE_SHARE_OBJECT) + def delete_share_object(session, uri): share: ShareObject = ShareObjectRepository.get_share_by_uri(session, uri) share_items_states = ShareObjectRepository.get_share_items_states(session, uri) shared_share_items_states = [x for x in ShareItemSM.get_share_item_shared_states() if x in share_items_states] @@ -927,8 +896,8 @@ def get_share_item_by_uri(session, uri): return share_item @staticmethod - @has_resource_perm(LIST_SHARED_ITEMS) - def list_shared_items(session, username, groups, uri, data=None, check_perm=None): + @has_resource_permission(LIST_SHARED_ITEMS) + def list_shared_items(session, uri, data=None): share: ShareObject = ShareObjectRepository.get_share_by_uri(session, uri) query = session.query(ShareObjectItem).filter( ShareObjectItem.shareUri == share.shareUri, From bfce1cbe22de9dbe13298501c06246f48158a9fe Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 12 May 2023 15:39:19 +0200 Subject: [PATCH 177/346] Returned filter back --- .../dataall/modules/datasets/api/table/resolvers.py | 2 +- .../modules/datasets/api/table_column/resolvers.py | 2 +- .../modules/datasets/db/dataset_column_repository.py | 11 ++++++++--- .../modules/datasets/db/dataset_table_repository.py | 12 ++++++++---- .../datasets/services/dataset_column_service.py | 4 ++-- .../datasets/services/dataset_table_service.py | 4 ++-- 6 files changed, 22 insertions(+), 13 deletions(-) diff --git a/backend/dataall/modules/datasets/api/table/resolvers.py b/backend/dataall/modules/datasets/api/table/resolvers.py index 1ecae0cde..c2a7ed3d1 100644 --- a/backend/dataall/modules/datasets/api/table/resolvers.py +++ b/backend/dataall/modules/datasets/api/table/resolvers.py @@ -19,7 +19,7 @@ def list_dataset_tables(context, source, filter: dict = None): return None if not filter: filter = {} - return DatasetTableService.list_dataset_tables(dataset_uri=source.datasetUri, **filter) + return DatasetTableService.list_dataset_tables(dataset_uri=source.datasetUri, filter=filter) def get_table(context, source: Dataset, tableUri: str = None): diff --git a/backend/dataall/modules/datasets/api/table_column/resolvers.py b/backend/dataall/modules/datasets/api/table_column/resolvers.py index a11a6539d..b4e6ca0df 100644 --- a/backend/dataall/modules/datasets/api/table_column/resolvers.py +++ b/backend/dataall/modules/datasets/api/table_column/resolvers.py @@ -14,7 +14,7 @@ def list_table_columns( tableUri = source.tableUri if not filter: filter = {} - DatasetColumnService.paginate_active_columns_for_table(tableUri, **filter) + DatasetColumnService.paginate_active_columns_for_table(tableUri, filter) def sync_table_columns(context: Context, source, tableUri: str = None): diff --git a/backend/dataall/modules/datasets/db/dataset_column_repository.py b/backend/dataall/modules/datasets/db/dataset_column_repository.py index 9caf3fa0e..b4ae5e6ed 100644 --- a/backend/dataall/modules/datasets/db/dataset_column_repository.py +++ b/backend/dataall/modules/datasets/db/dataset_column_repository.py @@ -19,7 +19,7 @@ def save_and_commit(session, column: DatasetTableColumn): session.commit() @staticmethod - def paginate_active_columns_for_table(session, table_uri: str, term, page, page_size): + def paginate_active_columns_for_table(session, table_uri: str, filter: dict): q = ( session.query(DatasetTableColumn) .filter( @@ -29,7 +29,8 @@ def paginate_active_columns_for_table(session, table_uri: str, term, page, page_ .order_by(DatasetTableColumn.columnType.asc()) ) - if term: + if 'term' in filter: + term = filter['term'] q = q.filter( or_( DatasetTableColumn.label.ilike('%' + term + '%'), @@ -37,6 +38,10 @@ def paginate_active_columns_for_table(session, table_uri: str, term, page, page_ ) ).order_by(DatasetTableColumn.columnType.asc()) - return paginate(q, page=page, page_size=page_size).to_dict() + return paginate( + query=q, + page=filter.get('page', 1), + page_size=filter.get('pageSize', 10) + ).to_dict() diff --git a/backend/dataall/modules/datasets/db/dataset_table_repository.py b/backend/dataall/modules/datasets/db/dataset_table_repository.py index b34c87dfa..9fb6eeec7 100644 --- a/backend/dataall/modules/datasets/db/dataset_table_repository.py +++ b/backend/dataall/modules/datasets/db/dataset_table_repository.py @@ -75,15 +75,19 @@ def create_synced_table(session, dataset: Dataset, table: dict): return updated_table @staticmethod - def paginate_dataset_tables(session, dataset_uri, term, page, page_size) -> dict: + def paginate_dataset_tables(session, dataset_uri, filter: dict) -> dict: query = ( session.query(DatasetTable) .filter(DatasetTable.datasetUri == dataset_uri) .order_by(DatasetTable.created.desc()) ) - if term: - query = query.filter(DatasetTable.label.ilike('%' + term + '%')) - return paginate(query, page=page, page_size=page_size).to_dict() + if 'term' in filter: + query = query.filter(DatasetTable.label.ilike('%' + filter['term'] + '%')) + return paginate( + query=query, + page=filter.get('page', 1), + page_size=filter.get('pageSize', 10) + ).to_dict() @staticmethod def delete(session, table: DatasetTable): diff --git a/backend/dataall/modules/datasets/services/dataset_column_service.py b/backend/dataall/modules/datasets/services/dataset_column_service.py index c2d308846..f50c5bdef 100644 --- a/backend/dataall/modules/datasets/services/dataset_column_service.py +++ b/backend/dataall/modules/datasets/services/dataset_column_service.py @@ -11,10 +11,10 @@ class DatasetColumnService: @staticmethod - def paginate_active_columns_for_table(table_uri: str, page=1, pageSize=65, term=None): + def paginate_active_columns_for_table(table_uri: str, filter=None): # TODO THERE WAS NO PERMISSION CHECK!!! with get_context().db_engine.scoped_session() as session: - DatasetColumnRepository.paginate_active_columns_for_table(session, table_uri, term, page, pageSize) + DatasetColumnRepository.paginate_active_columns_for_table(session, table_uri, filter) @staticmethod def sync_table_columns(table_uri: str): diff --git a/backend/dataall/modules/datasets/services/dataset_table_service.py b/backend/dataall/modules/datasets/services/dataset_table_service.py index 99f3b4db1..958c80b48 100644 --- a/backend/dataall/modules/datasets/services/dataset_table_service.py +++ b/backend/dataall/modules/datasets/services/dataset_table_service.py @@ -56,9 +56,9 @@ def create_table(uri: str, table_data: dict): @staticmethod @has_tenant_permission(MANAGE_DATASETS) - def list_dataset_tables(dataset_uri: str, term=None, page=1, pageSize=10): + def list_dataset_tables(dataset_uri: str, filter): with get_context().db_engine.scoped_session() as session: - return DatasetTableRepository.paginate_dataset_tables(session, dataset_uri, term, page, pageSize) + return DatasetTableRepository.paginate_dataset_tables(session, dataset_uri, filter) @staticmethod @has_tenant_permission(MANAGE_DATASETS) From 1d386da502ad635b69696b2dc48adc635156bad6 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 12 May 2023 16:05:22 +0200 Subject: [PATCH 178/346] Extracted validation logic --- .../modules/dataset_sharing/api/resolvers.py | 11 ++++++ .../db/share_object_repository.py | 7 ---- .../modules/datasets/api/dataset/resolvers.py | 37 ++++++++++++++----- .../datasets/api/profiling/resolvers.py | 4 ++ .../api/storage_location/resolvers.py | 6 +++ .../modules/datasets/api/table/resolvers.py | 4 ++ .../datasets_base/db/dataset_repository.py | 13 ------- 7 files changed, 53 insertions(+), 29 deletions(-) diff --git a/backend/dataall/modules/dataset_sharing/api/resolvers.py b/backend/dataall/modules/dataset_sharing/api/resolvers.py index 94555b6ab..f15dbff9b 100644 --- a/backend/dataall/modules/dataset_sharing/api/resolvers.py +++ b/backend/dataall/modules/dataset_sharing/api/resolvers.py @@ -6,6 +6,7 @@ from dataall.api.context import Context from dataall.aws.handlers.service_handlers import Worker from dataall.db import models +from dataall.db.exceptions import RequiredParameter from dataall.modules.dataset_sharing.api.enums import ShareObjectPermission from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject from dataall.modules.dataset_sharing.services.dataset_share_service import DatasetShareService @@ -34,6 +35,16 @@ def create_share_object( itemType: str = None, input: dict = None, ): + if not input: + raise RequiredParameter(input) + if 'principalId' not in input: + raise RequiredParameter('principalId') + if 'datasetUri' not in input: + raise RequiredParameter('datasetUri') + if 'principalType' not in input: + raise RequiredParameter('principalType') + if 'groupUri' not in input: + raise RequiredParameter('groupUri') with context.engine.scoped_session() as session: dataset: Dataset = DatasetRepository.get_dataset_by_uri(session, datasetUri) diff --git a/backend/dataall/modules/dataset_sharing/db/share_object_repository.py b/backend/dataall/modules/dataset_sharing/db/share_object_repository.py index d90318329..7f966c62c 100644 --- a/backend/dataall/modules/dataset_sharing/db/share_object_repository.py +++ b/backend/dataall/modules/dataset_sharing/db/share_object_repository.py @@ -332,13 +332,6 @@ def create_share_object( uri: str, data: dict = None, ) -> ShareObject: - if not data: - raise exceptions.RequiredParameter(data) - if not data.get('principalId'): - raise exceptions.RequiredParameter('principalId') - if not data.get('datasetUri'): - raise exceptions.RequiredParameter('datasetUri') - principalId = data['principalId'] principalType = data['principalType'] datasetUri = data['datasetUri'] diff --git a/backend/dataall/modules/datasets/api/dataset/resolvers.py b/backend/dataall/modules/datasets/api/dataset/resolvers.py index 236432ce0..388be6861 100644 --- a/backend/dataall/modules/datasets/api/dataset/resolvers.py +++ b/backend/dataall/modules/datasets/api/dataset/resolvers.py @@ -3,9 +3,10 @@ from dataall.api.Objects.Stack import stack_helper from dataall import db from dataall.api.context import Context -from dataall.db import paginate, exceptions, models +from dataall.db import paginate, models from dataall.db.api import Environment from dataall.db.api.organization import Organization +from dataall.db.exceptions import RequiredParameter, InvalidInput from dataall.modules.dataset_sharing.db.models import ShareObject from dataall.modules.datasets import Dataset from dataall.modules.datasets.api.dataset.enums import DatasetRole @@ -15,20 +16,15 @@ def create_dataset(context: Context, source, input=None): + RequestValidator.validate_creation_request(input) + admin_group = input['SamlAdminGroupName'] uri = input['environmentUri'] return DatasetService.create_dataset(uri=uri, admin_group=admin_group, data=input) def import_dataset(context: Context, source, input=None): - if not input: - raise exceptions.RequiredParameter(input) - if not input.get('environmentUri'): - raise exceptions.RequiredParameter('environmentUri') - if not input.get('bucketName'): - raise exceptions.RequiredParameter('bucketName') - if not input.get('SamlAdminGroupName'): - raise exceptions.RequiredParameter('group') + RequestValidator.validate_import_request(input) admin_group = input['SamlAdminGroupName'] uri = input['environmentUri'] @@ -227,3 +223,26 @@ def list_datasets_owned_by_env_group( if not filter: filter = {} return DatasetService.list_datasets_owned_by_env_group(environmentUri, groupUri, filter) + + +class RequestValidator: + @staticmethod + def validate_creation_request(data): + if not data: + raise RequiredParameter(data) + if not data.get('environmentUri'): + raise RequiredParameter('environmentUri') + if not data.get('SamlAdminGroupName'): + raise RequiredParameter('group') + if not data.get('label'): + raise RequiredParameter('label') + if len(data['label']) > 52: + raise InvalidInput( + 'Dataset name', data['label'], 'less than 52 characters' + ) + + @staticmethod + def validate_import_request(data): + RequestValidator.validate_creation_request(data) + if not data.get('bucketName'): + raise RequiredParameter('bucketName') diff --git a/backend/dataall/modules/datasets/api/profiling/resolvers.py b/backend/dataall/modules/datasets/api/profiling/resolvers.py index 51701fb5e..13cb232d9 100644 --- a/backend/dataall/modules/datasets/api/profiling/resolvers.py +++ b/backend/dataall/modules/datasets/api/profiling/resolvers.py @@ -2,6 +2,7 @@ import logging from dataall.api.context import Context +from dataall.db.exceptions import RequiredParameter from dataall.modules.datasets.services.dataset_profiling_service import DatasetProfilingService from dataall.modules.datasets.services.dataset_service import DatasetService from dataall.modules.datasets_base.db.models import DatasetProfilingRun @@ -16,6 +17,9 @@ def resolve_dataset(context, source: DatasetProfilingRun): def start_profiling_run(context: Context, source, input: dict = None): + if 'datasetUri' not in input: + raise RequiredParameter('datasetUri') + return DatasetProfilingService.start_profiling_run( uri=input['datasetUri'], table_uri=input.get('tableUri'), diff --git a/backend/dataall/modules/datasets/api/storage_location/resolvers.py b/backend/dataall/modules/datasets/api/storage_location/resolvers.py index e73ade13a..ed29097e3 100644 --- a/backend/dataall/modules/datasets/api/storage_location/resolvers.py +++ b/backend/dataall/modules/datasets/api/storage_location/resolvers.py @@ -1,5 +1,6 @@ from dataall.api.context import Context from dataall.db.api import Glossary +from dataall.db.exceptions import RequiredParameter from dataall.modules.datasets.services.dataset_location_service import DatasetLocationService from dataall.modules.datasets_base.db.models import DatasetStorageLocation, Dataset @@ -7,6 +8,11 @@ def create_storage_location( context, source, datasetUri: str = None, input: dict = None ): + if 'prefix' not in input: + raise RequiredParameter('prefix') + if 'label' not in input: + raise RequiredParameter('label') + return DatasetLocationService.create_storage_location(uri=datasetUri, data=input) diff --git a/backend/dataall/modules/datasets/api/table/resolvers.py b/backend/dataall/modules/datasets/api/table/resolvers.py index c2a7ed3d1..5b047ab49 100644 --- a/backend/dataall/modules/datasets/api/table/resolvers.py +++ b/backend/dataall/modules/datasets/api/table/resolvers.py @@ -1,6 +1,7 @@ import logging from dataall import db +from dataall.db.exceptions import RequiredParameter from dataall.modules.datasets.api.dataset.resolvers import get_dataset from dataall.api.context import Context from dataall.db.api import Glossary @@ -11,6 +12,9 @@ def create_table(context, source, datasetUri: str = None, input: dict = None): + if "name" not in input: + raise RequiredParameter("name") + return DatasetTableService.create_table(dataset_uri=datasetUri, table_data=input) diff --git a/backend/dataall/modules/datasets_base/db/dataset_repository.py b/backend/dataall/modules/datasets_base/db/dataset_repository.py index 12b0514d7..8db4cac14 100644 --- a/backend/dataall/modules/datasets_base/db/dataset_repository.py +++ b/backend/dataall/modules/datasets_base/db/dataset_repository.py @@ -49,19 +49,6 @@ def create_dataset( uri: str, data: dict = None, ) -> Dataset: - if not uri: - raise exceptions.RequiredParameter('environmentUri') - if not data: - raise exceptions.RequiredParameter('data') - if not data.get('SamlAdminGroupName'): - raise exceptions.RequiredParameter('group') - if not data.get('label'): - raise exceptions.RequiredParameter('label') - if len(data['label']) > 52: - raise exceptions.InvalidInput( - 'Dataset name', data['label'], 'less than 52 characters' - ) - environment = Environment.get_environment_by_uri(session, uri) organization = Organization.get_organization_by_uri( From 2467144e3bbfb4d3891ba6637e6ff1c461dd4e32 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 15 May 2023 15:22:22 +0200 Subject: [PATCH 179/346] Extracted two sharing services --- .../modules/dataset_sharing/api/resolvers.py | 203 +---- .../modules/dataset_sharing/api/schema.py | 6 +- .../db/share_object_repository.py | 764 +++--------------- .../services/share_item_service.py | 164 ++++ .../share_managers/s3_share_manager.py | 6 +- .../services/share_object_service.py | 344 ++++++++ .../lf_process_cross_account_share.py | 8 +- .../lf_process_same_account_share.py | 8 +- .../share_processors/s3_process_share.py | 12 +- .../modules/datasets/api/table/resolvers.py | 12 +- 10 files changed, 661 insertions(+), 866 deletions(-) create mode 100644 backend/dataall/modules/dataset_sharing/services/share_item_service.py create mode 100644 backend/dataall/modules/dataset_sharing/services/share_object_service.py diff --git a/backend/dataall/modules/dataset_sharing/api/resolvers.py b/backend/dataall/modules/dataset_sharing/api/resolvers.py index f15dbff9b..350330c32 100644 --- a/backend/dataall/modules/dataset_sharing/api/resolvers.py +++ b/backend/dataall/modules/dataset_sharing/api/resolvers.py @@ -4,29 +4,19 @@ from dataall import utils from dataall.api.Objects.Principal.resolvers import get_principal from dataall.api.context import Context -from dataall.aws.handlers.service_handlers import Worker from dataall.db import models from dataall.db.exceptions import RequiredParameter from dataall.modules.dataset_sharing.api.enums import ShareObjectPermission from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject from dataall.modules.dataset_sharing.services.dataset_share_service import DatasetShareService -from dataall.modules.dataset_sharing.db.share_object_repository import ShareObjectRepository +from dataall.modules.dataset_sharing.services.share_item_service import ShareItemService +from dataall.modules.dataset_sharing.services.share_object_service import ShareObjectService from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.db.models import DatasetStorageLocation, DatasetTable, Dataset log = logging.getLogger(__name__) -def get_share_object_dataset(context, source, **kwargs): - if not source: - return None - with context.engine.scoped_session() as session: - share: ShareObject = session.query(ShareObject).get( - source.shareUri - ) - return session.query(Dataset).get(share.datasetUri) - - def create_share_object( context: Context, source, @@ -46,158 +36,53 @@ def create_share_object( if 'groupUri' not in input: raise RequiredParameter('groupUri') - with context.engine.scoped_session() as session: - dataset: Dataset = DatasetRepository.get_dataset_by_uri(session, datasetUri) - environment: models.Environment = db.api.Environment.get_environment_by_uri( - session, input['environmentUri'] - ) - input['dataset'] = dataset - input['environment'] = environment - input['itemUri'] = itemUri - input['itemType'] = itemType - input['datasetUri'] = datasetUri - return ShareObjectRepository.create_share_object( - session=session, - username=context.username, - groups=context.groups, - uri=environment.environmentUri, - data=input, - ) + return ShareObjectService.create_share_object( + uri=datasetUri, + item_uri=itemUri, + item_type=itemType, + group_uri=input['groupUri'], + principal_id=input['principalId'], + principal_type=input['principalType'] + ) def submit_share_object(context: Context, source, shareUri: str = None): - with context.engine.scoped_session() as session: - return ShareObjectRepository.submit_share_object( - session=session, - username=context.username, - uri=shareUri, - ) + return ShareObjectService.submit_share_object(uri=shareUri) def approve_share_object(context: Context, source, shareUri: str = None): - with context.engine.scoped_session() as session: - share = ShareObjectRepository.approve_share_object( - session=session, - username=context.username, - uri=shareUri, - ) - - approve_share_task: models.Task = models.Task( - action='ecs.share.approve', - targetUri=shareUri, - payload={'environmentUri': share.environmentUri}, - ) - session.add(approve_share_task) - - Worker.queue(engine=context.engine, task_ids=[approve_share_task.taskUri]) - - return share + return ShareObjectService.approve_share_object(uri=shareUri) def reject_share_object(context: Context, source, shareUri: str = None): - with context.engine.scoped_session() as session: - return ShareObjectRepository.reject_share_object( - session=session, - username=context.username, - uri=shareUri, - ) + return ShareObjectService.reject_share_object(uri=shareUri) def revoke_items_share_object(context: Context, source, input): - with context.engine.scoped_session() as session: - share = ShareObjectRepository.revoke_items_share_object( - session=session, - username=context.username, - uri=input.get("shareUri"), - data=input, - ) - - revoke_share_task: models.Task = models.Task( - action='ecs.share.revoke', - targetUri=input.get("shareUri"), - payload={'environmentUri': share.environmentUri}, - ) - session.add(revoke_share_task) - - Worker.queue(engine=context.engine, task_ids=[revoke_share_task.taskUri]) - - return share + share_uri = input.get("shareUri") + revoked_uris = input.get("revokedItemUris") + return ShareItemService.revoke_items_share_object(uri=share_uri, revoked_uris=revoked_uris) def delete_share_object(context: Context, source, shareUri: str = None): - with context.engine.scoped_session() as session: - share = ShareObjectRepository.get_share_by_uri(session, shareUri) - if not share: - raise db.exceptions.ObjectNotFound('ShareObject', shareUri) - - ShareObjectRepository.delete_share_object(session=session, uri=shareUri) - - return True + return ShareObjectService.delete_share_object(uri=shareUri) def add_shared_item(context, source, shareUri: str = None, input: dict = None): - with context.engine.scoped_session() as session: - share_item = ShareObjectRepository.add_share_object_item( - session=session, - username=context.username, - uri=shareUri, - data=input, - ) - return share_item + return ShareItemService.add_shared_item(uri=shareUri, data=input) def remove_shared_item(context, source, shareItemUri: str = None): - with context.engine.scoped_session() as session: - share_item: ShareObjectItem = session.query(ShareObjectItem).get( - shareItemUri - ) - if not share_item: - raise db.exceptions.ObjectNotFound('ShareObjectItem', shareItemUri) - share = ShareObjectRepository.get_share_by_uri(session, share_item.shareUri) - ShareObjectRepository.remove_share_object_item( - session=session, - uri=share.shareUri, - data={ - 'shareItemUri': shareItemUri, - 'share_item': share_item, - 'share': share, - } - ) - return True - - -def list_shared_items( - context: Context, source: ShareObject, filter: dict = None -): - if not source: - return None - if not filter: - filter = {} - with context.engine.scoped_session() as session: - return ShareObjectRepository.list_shared_items( - session=session, - username=context.username, - data=filter, - ) - + return ShareItemService.remove_shared_item(uri=shareItemUri) def resolve_shared_item(context, source: ShareObjectItem, **kwargs): if not source: return None - with context.engine.scoped_session() as session: - return ShareObjectRepository.get_share_item( - session=session, - uri=source.shareUri, - data={'share_item': source}, - ) + return ShareItemService.resolve_shared_item(uri=source.shareUri, item=source) def get_share_object(context, source, shareUri: str = None): - with context.engine.scoped_session() as session: - return ShareObjectRepository.get_share_object( - session=session, - uri=shareUri, - ) + return ShareObjectService.get_share_object(uri=shareUri) def resolve_user_role(context: Context, source: ShareObject, **kwargs): @@ -288,19 +173,13 @@ def resolve_consumption_data(context: Context, source: ShareObject, **kwargs): def resolve_share_object_statistics(context: Context, source: ShareObject, **kwargs): if not source: return None - with context.engine.scoped_session() as session: - return ShareObjectRepository.resolve_share_object_statistics( - session, source.shareUri - ) + return ShareObjectService.resolve_share_object_statistics(uri=source.shareUri) def resolve_existing_shared_items(context: Context, source: ShareObject, **kwargs): if not source: return None - with context.engine.scoped_session() as session: - return ShareObjectRepository.check_existing_shared_items( - session, source.shareUri - ) + return ShareItemService.check_existing_shared_items(source) def list_shareable_objects( @@ -310,43 +189,25 @@ def list_shareable_objects( return None if not filter: filter = {'page': 1, 'pageSize': 5} - with context.engine.scoped_session() as session: - return ShareObjectRepository.list_shareable_items( - session=session, - username=context.username, - groups=context.groups, - uri=source.shareUri, - data=filter, - check_perm=True, - ) + + is_revokable = filter.get('isRevokable') + return ShareItemService.list_shareable_objects( + share=source, + is_revokable=is_revokable, + filter=filter + ) def list_shares_in_my_inbox(context: Context, source, filter: dict = None): if not filter: filter = {} - with context.engine.scoped_session() as session: - return ShareObjectRepository.list_user_received_share_requests( - session=session, - username=context.username, - groups=context.groups, - uri=None, - data=filter, - check_perm=None, - ) + return ShareObjectService.list_shares_in_my_inbox(filter) def list_shares_in_my_outbox(context: Context, source, filter: dict = None): if not filter: filter = {} - with context.engine.scoped_session() as session: - return ShareObjectRepository.list_user_sent_share_requests( - session=session, - username=context.username, - groups=context.groups, - uri=None, - data=filter, - check_perm=None, - ) + return ShareObjectService.list_shares_in_my_outbox(filter) def list_data_items_shared_with_env_group( diff --git a/backend/dataall/modules/dataset_sharing/api/schema.py b/backend/dataall/modules/dataset_sharing/api/schema.py index 529a55cbb..891bf2ff0 100644 --- a/backend/dataall/modules/dataset_sharing/api/schema.py +++ b/backend/dataall/modules/dataset_sharing/api/schema.py @@ -1,6 +1,8 @@ from dataall.api import gql from dataall.modules.dataset_sharing.api.enums import ShareableType -from dataall.modules.dataset_sharing.api.resolvers import * +from dataall.modules.dataset_sharing.api.resolvers import union_resolver, resolve_shared_item, resolve_dataset, \ + resolve_consumption_data, resolve_existing_shared_items, resolve_share_object_statistics, resolve_principal, \ + resolve_group, list_shareable_objects, resolve_user_role from dataall.api.Objects.Environment.resolvers import resolve_environment ShareableObject = gql.Union( @@ -200,4 +202,4 @@ gql.Field(name='hasPrevious', type=gql.Boolean), gql.Field(name='nodes', type=gql.ArrayType(EnvironmentPublishedItem)), ], -) \ No newline at end of file +) diff --git a/backend/dataall/modules/dataset_sharing/db/share_object_repository.py b/backend/dataall/modules/dataset_sharing/db/share_object_repository.py index 7f966c62c..dfa27f33e 100644 --- a/backend/dataall/modules/dataset_sharing/db/share_object_repository.py +++ b/backend/dataall/modules/dataset_sharing/db/share_object_repository.py @@ -3,24 +3,13 @@ from sqlalchemy import and_, or_, func, case from sqlalchemy.orm import Query -from dataall.core.permission_checker import has_resource_permission -from dataall.db.api import ( - ResourcePolicy, - Environment, -) -from dataall.db import api, utils from dataall.db import models, exceptions, paginate from dataall.db.models.Enums import PrincipalType from dataall.modules.dataset_sharing.db.enums import ShareObjectActions, ShareObjectStatus, ShareItemActions, \ ShareItemStatus, ShareableType from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject -from dataall.modules.dataset_sharing.services.share_permissions import SHARE_OBJECT_REQUESTER, SHARE_OBJECT_APPROVER, \ - CREATE_SHARE_OBJECT, SUBMIT_SHARE_OBJECT, ADD_ITEM, GET_SHARE_OBJECT, APPROVE_SHARE_OBJECT, REMOVE_ITEM, \ - DELETE_SHARE_OBJECT, LIST_SHARED_ITEMS, REJECT_SHARE_OBJECT from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.db.models import DatasetStorageLocation, DatasetTable, Dataset -from dataall.modules.dataset_sharing.services.share_notification_service import ShareNotificationService -from dataall.modules.datasets_base.services.permissions import DATASET_TABLE_READ logger = logging.getLogger(__name__) @@ -324,381 +313,31 @@ def get_share_item_revokable_states(): class ShareObjectRepository: @staticmethod - @has_resource_permission(CREATE_SHARE_OBJECT) - def create_share_object( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - ) -> ShareObject: - principalId = data['principalId'] - principalType = data['principalType'] - datasetUri = data['datasetUri'] - environmentUri = uri - groupUri = data['groupUri'] - itemUri = data.get('itemUri') - itemType = data.get('itemType') - - dataset: Dataset = data.get( - 'dataset', DatasetRepository.get_dataset_by_uri(session, datasetUri) - ) - environment: models.Environment = data.get( - 'environment', - api.Environment.get_environment_by_uri(session, environmentUri), - ) - - if environment.region != dataset.region: - raise exceptions.UnauthorizedOperation( - action=CREATE_SHARE_OBJECT, - message=f'Requester Team {groupUri} works in region {environment.region} and the requested dataset is stored in region {dataset.region}', - ) - - if principalType == models.PrincipalType.ConsumptionRole.value: - consumption_role: models.ConsumptionRole = api.Environment.get_environment_consumption_role( - session, - principalId, - environmentUri - ) - principalIAMRoleName = consumption_role.IAMRoleName - else: - env_group: models.EnvironmentGroup = api.Environment.get_environment_group( - session, - groupUri, - environmentUri - ) - principalIAMRoleName = env_group.environmentIAMRoleName - - if ( - dataset.stewards == groupUri or dataset.SamlAdminGroupName == groupUri - ) and environment.environmentUri == dataset.environmentUri and principalType == models.PrincipalType.Group.value: - raise exceptions.UnauthorizedOperation( - action=CREATE_SHARE_OBJECT, - message=f'Team: {groupUri} is managing the dataset {dataset.name}', - ) - - ShareObjectRepository.validate_group_membership( - session=session, - username=username, - groups=groups, - share_object_group=groupUri, - environment_uri=uri, - ) + def save_and_commit(session, share): + session.add(share) + session.commit() - share: ShareObject = ( + @staticmethod + def exists(session, dataset: Dataset, env, principal_id, group_uri) -> ShareObject: + return ( session.query(ShareObject) .filter( and_( - ShareObject.datasetUri == datasetUri, - ShareObject.principalId == principalId, - ShareObject.environmentUri == environmentUri, - ShareObject.groupUri == groupUri, - ) - ) - .first() - ) - if not share: - share = ShareObject( - datasetUri=dataset.datasetUri, - environmentUri=environment.environmentUri, - owner=username, - groupUri=groupUri, - principalId=principalId, - principalType=principalType, - principalIAMRoleName=principalIAMRoleName, - status=ShareObjectStatus.Draft.value, - ) - session.add(share) - session.commit() - - if itemUri: - item = None - if itemType: - if itemType == ShareableType.StorageLocation.value: - item = session.query(DatasetStorageLocation).get(itemUri) - if itemType == ShareableType.Table.value: - item = session.query(DatasetTable).get(itemUri) - - share_item = ( - session.query(ShareObjectItem) - .filter( - and_( - ShareObjectItem.shareUri == share.shareUri, - ShareObjectItem.itemUri == itemUri, - ) - ) - .first() - ) - S3AccessPointName = utils.slugify( - share.datasetUri + '-' + share.principalId, - max_length=50, lowercase=True, regex_pattern='[^a-zA-Z0-9-]', separator='-' - ) - - if not share_item and item: - new_share_item: ShareObjectItem = ShareObjectItem( - shareUri=share.shareUri, - itemUri=itemUri, - itemType=itemType, - itemName=item.name, - status=ShareItemStatus.PendingApproval.value, - owner=username, - GlueDatabaseName=dataset.GlueDatabaseName - if itemType == ShareableType.Table.value - else '', - GlueTableName=item.GlueTableName - if itemType == ShareableType.Table.value - else '', - S3AccessPointName=S3AccessPointName - if itemType == ShareableType.StorageLocation.value - else '', - ) - session.add(new_share_item) - - activity = models.Activity( - action='SHARE_OBJECT:CREATE', - label='SHARE_OBJECT:CREATE', - owner=username, - summary=f'{username} created a share object for the {dataset.name} in {environment.name} for the principal: {principalId}', - targetUri=dataset.datasetUri, - targetType='dataset', - ) - session.add(activity) - - # Attaching REQUESTER permissions to: - # requester group (groupUri) - # dataset.SamlAdminGroupName - # environment.SamlGroupName - ResourcePolicy.attach_resource_policy( - session=session, - group=groupUri, - permissions=SHARE_OBJECT_REQUESTER, - resource_uri=share.shareUri, - resource_type=ShareObject.__name__, - ) - ResourcePolicy.attach_resource_policy( - session=session, - group=dataset.SamlAdminGroupName, - permissions=SHARE_OBJECT_REQUESTER, - resource_uri=share.shareUri, - resource_type=ShareObject.__name__, - ) - if dataset.SamlAdminGroupName != environment.SamlGroupName: - ResourcePolicy.attach_resource_policy( - session=session, - group=environment.SamlGroupName, - permissions=SHARE_OBJECT_REQUESTER, - resource_uri=share.shareUri, - resource_type=ShareObject.__name__, - ) - # Attaching REQUESTER permissions to: - # dataset.stewards (includes the dataset Admins) - ResourcePolicy.attach_resource_policy( - session=session, - group=dataset.stewards, - permissions=SHARE_OBJECT_APPROVER, - resource_uri=share.shareUri, - resource_type=ShareObject.__name__, - ) - return share - - @staticmethod - def validate_group_membership( - session, environment_uri, share_object_group, username, groups - ): - if share_object_group and share_object_group not in groups: - raise exceptions.UnauthorizedOperation( - action=CREATE_SHARE_OBJECT, - message=f'User: {username} is not a member of the team {share_object_group}', - ) - if share_object_group not in Environment.list_environment_groups( - session=session, - username=username, - groups=groups, - uri=environment_uri, - data=None, - check_perm=True, - ): - raise exceptions.UnauthorizedOperation( - action=CREATE_SHARE_OBJECT, - message=f'Team: {share_object_group} is not a member of the environment {environment_uri}', - ) - - @staticmethod - @has_resource_permission(SUBMIT_SHARE_OBJECT) - def submit_share_object( - session, - username: str, - uri: str, - ) -> ShareObject: - share = ShareObjectRepository.get_share_by_uri(session, uri) - dataset = DatasetRepository.get_dataset_by_uri(session, share.datasetUri) - share_items_states = ShareObjectRepository.get_share_items_states(session, uri) - - valid_states = [ShareItemStatus.PendingApproval.value] - valid_share_items_states = [x for x in valid_states if x in share_items_states] - - if valid_share_items_states == []: - raise exceptions.ShareItemsFound( - action='Submit Share Object', - message='The request is empty of pending items. Add items to share request.', - ) - - Share_SM = ShareObjectSM(share.status) - new_share_state = Share_SM.run_transition(ShareObjectActions.Submit.value) - - for item_state in share_items_states: - Item_SM = ShareItemSM(item_state) - new_state = Item_SM.run_transition(ShareObjectActions.Submit.value) - Item_SM.update_state(session, share.shareUri, new_state) - - Share_SM.update_state(session, share, new_share_state) - - ShareNotificationService.notify_share_object_submission( - session, username, dataset, share - ) - return share - - @staticmethod - @has_resource_permission(APPROVE_SHARE_OBJECT) - def approve_share_object( - session, - username: str, - uri: str, - ) -> ShareObject: - share = ShareObjectRepository.get_share_by_uri(session, uri) - dataset = DatasetRepository.get_dataset_by_uri(session, share.datasetUri) - share_items_states = ShareObjectRepository.get_share_items_states(session, uri) - - Share_SM = ShareObjectSM(share.status) - new_share_state = Share_SM.run_transition(ShareObjectActions.Approve.value) - - for item_state in share_items_states: - Item_SM = ShareItemSM(item_state) - new_state = Item_SM.run_transition(ShareObjectActions.Approve.value) - Item_SM.update_state(session, share.shareUri, new_state) - - Share_SM.update_state(session, share, new_share_state) - - # GET TABLES SHARED AND APPROVE SHARE FOR EACH TABLE - share_table_items = session.query(ShareObjectItem).filter( - ( - and_( - ShareObjectItem.shareUri == uri, - ShareObjectItem.itemType == ShareableType.Table.value + ShareObject.datasetUri == dataset.datasetUri, + ShareObject.principalId == principal_id, + ShareObject.environmentUri == env.environmentUri, + ShareObject.groupUri == group_uri, ) ) - ).all() - for table in share_table_items: - ResourcePolicy.attach_resource_policy( - session=session, - group=share.principalId, - permissions=DATASET_TABLE_READ, - resource_uri=table.itemUri, - resource_type=DatasetTable.__name__, - ) - - from dataall.modules.dataset_sharing.services.share_notification_service import ShareNotificationService - ShareNotificationService.notify_share_object_approval(session, username, dataset, share) - return share - - @staticmethod - @has_resource_permission(REJECT_SHARE_OBJECT) - def reject_share_object( - session, - username: str, - uri: str, - ) -> ShareObject: - - share = ShareObjectRepository.get_share_by_uri(session, uri) - dataset = DatasetRepository.get_dataset_by_uri(session, share.datasetUri) - share_items_states = ShareObjectRepository.get_share_items_states(session, uri) - - Share_SM = ShareObjectSM(share.status) - new_share_state = Share_SM.run_transition(ShareObjectActions.Reject.value) - - for item_state in share_items_states: - Item_SM = ShareItemSM(item_state) - new_state = Item_SM.run_transition(ShareObjectActions.Reject.value) - Item_SM.update_state(session, share.shareUri, new_state) - - Share_SM.update_state(session, share, new_share_state) - - ResourcePolicy.delete_resource_policy( - session=session, - group=share.groupUri, - resource_uri=dataset.datasetUri, - ) - - from dataall.modules.dataset_sharing.services.share_notification_service import ShareNotificationService - ShareNotificationService.notify_share_object_rejection(session, username, dataset, share) - return share - - @staticmethod - @has_resource_permission(GET_SHARE_OBJECT) - def revoke_items_share_object( - session, - username: str, - uri: str, - data: dict = None, - ) -> ShareObject: - - share = ShareObjectRepository.get_share_by_uri(session, uri) - dataset = DatasetRepository.get_dataset_by_uri(session, share.datasetUri) - revoked_items_states = ShareObjectRepository.get_share_items_states(session, uri, data.get("revokedItemUris")) - revoked_items = [ShareObjectRepository.get_share_item_by_uri(session, uri) for uri in data.get("revokedItemUris")] - - if revoked_items_states == []: - raise exceptions.ShareItemsFound( - action='Revoke Items from Share Object', - message='Nothing to be revoked.', - ) - - Share_SM = ShareObjectSM(share.status) - new_share_state = Share_SM.run_transition(ShareObjectActions.RevokeItems.value) - - for item_state in revoked_items_states: - Item_SM = ShareItemSM(item_state) - new_state = Item_SM.run_transition(ShareObjectActions.RevokeItems.value) - for item in revoked_items: - if item.status == item_state: - Item_SM.update_state_single_item(session, item, new_state) - - Share_SM.update_state(session, share, new_share_state) - - ResourcePolicy.delete_resource_policy( - session=session, - group=share.groupUri, - resource_uri=dataset.datasetUri, + .count() ) - from dataall.modules.dataset_sharing.services.share_notification_service import ShareNotificationService - ShareNotificationService.notify_share_object_rejection(session, username, dataset, share) - return share - - @staticmethod - @has_resource_permission(GET_SHARE_OBJECT) - def get_share_object(session, uri: str): - share = session.query(ShareObject).get(uri) - if not share: - raise exceptions.ObjectNotFound('Share', uri) - - return share - @staticmethod - @has_resource_permission(GET_SHARE_OBJECT) - def get_share_item( - session, - uri: str, - data: dict = None, - ): - share_item: ShareObjectItem = data.get( - 'share_item', - ShareObjectRepository.get_share_item_by_uri(session, data['shareItemUri']), - ) - if share_item.itemType == ShareableType.Table.value: - return session.query(DatasetTable).get(share_item.itemUri) - if share_item.itemType == ShareableType.StorageLocation: - return session.Query(DatasetStorageLocation).get(share_item.itemUri) + def get_share_item(session, item_type, item_uri): + if item_type == ShareableType.Table.value: + return session.query(DatasetTable).get(item_uri) + if item_type == ShareableType.StorageLocation.value: + return session.query(DatasetStorageLocation).get(item_uri) @staticmethod def get_share_by_uri(session, uri): @@ -718,124 +357,10 @@ def get_share_by_dataset_attributes(session, dataset_uri, dataset_owner): return share @staticmethod - @has_resource_permission(ADD_ITEM) - def add_share_object_item( - session, - username: str, - uri: str, - data: dict = None, - ) -> ShareObjectItem: - itemType = data.get('itemType') - itemUri = data.get('itemUri') - item = None - share: ShareObject = session.query(ShareObject).get(uri) - dataset: Dataset = session.query(Dataset).get(share.datasetUri) - target_environment: models.Environment = session.query(models.Environment).get( - share.environmentUri - ) - - Share_SM = ShareObjectSM(share.status) - new_share_state = Share_SM.run_transition(ShareItemActions.AddItem.value) - Share_SM.update_state(session, share, new_share_state) - - if itemType == ShareableType.Table.value: - item: DatasetTable = session.query(DatasetTable).get(itemUri) - if item and item.region != target_environment.region: - raise exceptions.UnauthorizedOperation( - action=ADD_ITEM, - message=f'Lake Formation cross region sharing is not supported. ' - f'Table {item.GlueTableName} is in {item.region} and target environment ' - f'{target_environment.name} is in {target_environment.region} ', - ) - - elif itemType == ShareableType.StorageLocation.value: - item = session.query(DatasetStorageLocation).get(itemUri) - - if not item: - raise exceptions.ObjectNotFound('ShareObjectItem', itemUri) - - shareItem: ShareObjectItem = ( - session.query(ShareObjectItem) - .filter( - and_( - ShareObjectItem.shareUri == uri, - ShareObjectItem.itemUri == itemUri, - ) - ) - .first() - ) - S3AccessPointName = utils.slugify( - share.datasetUri + '-' + share.principalId, - max_length=50, lowercase=True, regex_pattern='[^a-zA-Z0-9-]', separator='-' - ) - logger.info(f"S3AccessPointName={S3AccessPointName}") - - if not shareItem: - shareItem = ShareObjectItem( - shareUri=uri, - itemUri=itemUri, - itemType=itemType, - itemName=item.name, - status=ShareItemStatus.PendingApproval.value, - owner=username, - GlueDatabaseName=dataset.GlueDatabaseName - if itemType == ShareableType.Table.value - else '', - GlueTableName=item.GlueTableName - if itemType == ShareableType.Table.value - else '', - S3AccessPointName=S3AccessPointName - if itemType == ShareableType.StorageLocation.value - else '', - ) - session.add(shareItem) - - return shareItem - - @staticmethod - @has_resource_permission(REMOVE_ITEM) - def remove_share_object_item( - session, - uri: str, - data: dict = None, - ) -> bool: - - share_item: ShareObjectItem = data.get( - 'share_item', - ShareObjectRepository.get_share_item_by_uri(session, data['shareItemUri']), - ) - - Item_SM = ShareItemSM(share_item.status) - newstatus = Item_SM.run_transition(ShareItemActions.RemoveItem.value) - + def remove_share_object_item(session, share_item): session.delete(share_item) return True - @staticmethod - @has_resource_permission(DELETE_SHARE_OBJECT) - def delete_share_object(session, uri): - share: ShareObject = ShareObjectRepository.get_share_by_uri(session, uri) - share_items_states = ShareObjectRepository.get_share_items_states(session, uri) - shared_share_items_states = [x for x in ShareItemSM.get_share_item_shared_states() if x in share_items_states] - - Share_SM = ShareObjectSM(share.status) - new_share_state = Share_SM.run_transition(ShareObjectActions.Delete.value) - - for item_state in share_items_states: - Item_SM = ShareItemSM(item_state) - new_state = Item_SM.run_transition(ShareObjectActions.Delete.value) - Item_SM.update_state(session, share.shareUri, new_state) - - if shared_share_items_states: - raise exceptions.ShareItemsFound( - action='Delete share object', - message='There are shared items in this request. Revoke access to these items before deleting the request.', - ) - if new_share_state == ShareObjectStatus.Deleted.value: - session.delete(share) - - return True - @staticmethod def check_existing_shared_items(session, uri): share: ShareObject = ShareObjectRepository.get_share_by_uri(session, uri) @@ -850,6 +375,45 @@ def check_existing_shared_items(session, uri): return True return False + @staticmethod + def count_sharable_items(session, uri, share_type): + return ( + session.query(ShareObjectItem) + .filter( + and_( + ShareObjectItem.shareUri == uri, + ShareObjectItem.itemType == share_type, + ) + ) + .count() + ) + + @staticmethod + def find_sharable_item(session, share_uri, item_uri) -> ShareObjectItem: + return ( + session.query(ShareObjectItem) + .filter( + and_( + ShareObjectItem.itemUri == item_uri, + ShareObjectItem.shareUri == share_uri, + ) + ) + .first() + ) + + @staticmethod + def count_items_in_states(session, uri, states): + return ( + session.query(ShareObjectItem) + .filter( + and_( + ShareObjectItem.shareUri == uri, + ShareObjectItem.status.in_(states), + ) + ) + .count() + ) + @staticmethod def check_existing_shared_items_of_type(session, uri, item_type): share: ShareObject = ShareObjectRepository.get_share_by_uri(session, uri) @@ -880,36 +444,14 @@ def check_pending_share_items(session, uri): @staticmethod def get_share_item_by_uri(session, uri): - share_item: ShareObjectItem = session.query(ShareObjectItem).get( - uri - ) + share_item: ShareObjectItem = session.query(ShareObjectItem).get(uri) if not share_item: raise exceptions.ObjectNotFound('ShareObjectItem', uri) return share_item @staticmethod - @has_resource_permission(LIST_SHARED_ITEMS) - def list_shared_items(session, uri, data=None): - share: ShareObject = ShareObjectRepository.get_share_by_uri(session, uri) - query = session.query(ShareObjectItem).filter( - ShareObjectItem.shareUri == share.shareUri, - ) - return paginate( - query, page=data.get('page', 1), page_size=data.get('pageSize', 5) - ).to_dict() - - @staticmethod - def list_shareable_items( - session, username, groups, uri, data=None, check_perm=None - ): - - share: ShareObject = data.get( - 'share', ShareObjectRepository.get_share_by_uri(session, uri) - ) - share_item_revokable_states = ShareItemSM.get_share_item_revokable_states() - datasetUri = share.datasetUri - + def list_shareable_items(session, share, states, data): # All tables from dataset with a column isShared # marking the table as part of the shareObject tables = ( @@ -932,11 +474,10 @@ def list_shareable_items( DatasetTable.tableUri == ShareObjectItem.itemUri, ), ) - .filter(DatasetTable.datasetUri == datasetUri) + .filter(DatasetTable.datasetUri == share.datasetUri) ) - if data: - if data.get("isRevokable"): - tables = tables.filter(ShareObjectItem.status.in_(share_item_revokable_states)) + if states: + tables = tables.filter(ShareObjectItem.status.in_(states)) # All folders from the dataset with a column isShared # marking the folder as part of the shareObject @@ -961,11 +502,10 @@ def list_shareable_items( == ShareObjectItem.itemUri, ), ) - .filter(DatasetStorageLocation.datasetUri == datasetUri) + .filter(DatasetStorageLocation.datasetUri == share.datasetUri) ) - if data: - if data.get("isRevokable"): - locations = locations.filter(ShareObjectItem.status.in_(share_item_revokable_states)) + if states: + locations = locations.filter(ShareObjectItem.status.in_(states)) shareable_objects = tables.union(locations).subquery('shareable_objects') query = session.query(shareable_objects) @@ -986,9 +526,7 @@ def list_shareable_items( return paginate(query, data.get('page', 1), data.get('pageSize', 10)).to_dict() @staticmethod - def list_user_received_share_requests( - session, username, groups, uri, data=None, check_perm=None - ): + def list_user_received_share_requests(session, username, groups, data=None): query = ( session.query(ShareObject) .join( @@ -1008,9 +546,7 @@ def list_user_received_share_requests( return paginate(query, data.get('page', 1), data.get('pageSize', 10)).to_dict() @staticmethod - def list_user_sent_share_requests( - session, username, groups, uri, data=None, check_perm=None - ): + def list_user_sent_share_requests(session, username, groups, data=None): query = ( session.query(ShareObject) .join( @@ -1111,51 +647,13 @@ def update_share_item_status_batch( ) return True - @staticmethod - def find_share_item_by_table( - session, - share: ShareObject, - table: DatasetTable, - ) -> ShareObjectItem: - share_item: ShareObjectItem = ( - session.query(ShareObjectItem) - .filter( - and_( - ShareObjectItem.itemUri == table.tableUri, - ShareObjectItem.shareUri == share.shareUri, - ) - ) - .first() - ) - return share_item - - @staticmethod - def find_share_item_by_folder( - session, - share: ShareObject, - folder: DatasetStorageLocation, - ) -> ShareObjectItem: - share_item: ShareObjectItem = ( - session.query(ShareObjectItem) - .filter( - and_( - ShareObjectItem.itemUri == folder.locationUri, - ShareObjectItem.shareUri == share.shareUri, - ) - ) - .first() - ) - return share_item - @staticmethod def get_share_data(session, share_uri): share: ShareObject = session.query(ShareObject).get(share_uri) if not share: raise exceptions.ObjectNotFound('Share', share_uri) - dataset: Dataset = session.query(Dataset).get(share.datasetUri) - if not dataset: - raise exceptions.ObjectNotFound('Dataset', share.datasetUri) + dataset: Dataset = DatasetRepository.get_dataset_by_uri(session, share.datasetUri) source_environment: models.Environment = session.query(models.Environment).get( dataset.environmentUri @@ -1216,33 +714,26 @@ def get_share_data_items(session, share_uri, status): if not share: raise exceptions.ObjectNotFound('Share', share_uri) - tables = ( - session.query(DatasetTable) - .join( - ShareObjectItem, - ShareObjectItem.itemUri == DatasetTable.tableUri, - ) - .join( - ShareObject, - ShareObject.shareUri == ShareObjectItem.shareUri, - ) - .filter( - and_( - ShareObject.datasetUri == share.datasetUri, - ShareObject.environmentUri - == share.environmentUri, - ShareObject.shareUri == share_uri, - ShareObjectItem.status == status, - ) - ) - .all() + tables = ShareObjectRepository._find_all_share_item( + session, share, status, DatasetTable, DatasetTable.tableUri ) - folders = ( - session.query(DatasetStorageLocation) + folders = ShareObjectRepository._find_all_share_item( + session, share, status, DatasetStorageLocation, DatasetStorageLocation.locationUri + ) + + return ( + tables, + folders, + ) + + @staticmethod + def _find_all_share_item(session, share, status, share_type_model, share_type_uri): + return ( + session.query(share_type_model) .join( ShareObjectItem, - ShareObjectItem.itemUri == DatasetStorageLocation.locationUri, + ShareObjectItem.itemUri == share_type_uri, ) .join( ShareObject, @@ -1251,18 +742,25 @@ def get_share_data_items(session, share_uri, status): .filter( and_( ShareObject.datasetUri == share.datasetUri, - ShareObject.environmentUri - == share.environmentUri, - ShareObject.shareUri == share_uri, + ShareObject.environmentUri == share.environmentUri, + ShareObject.shareUri == share.shareUri, ShareObjectItem.status == status, ) ) .all() ) + @staticmethod + def find_all_share_items(session, share_uri, share_type): return ( - tables, - folders, + session.query(ShareObjectItem).filter( + ( + and_( + ShareObjectItem.shareUri == share_uri, + ShareObjectItem.itemType == share_type + ) + ) + ).all() ) @staticmethod @@ -1297,78 +795,6 @@ def get_share_items_states(session, share_uri, item_uris=None): query = query.filter(ShareObjectItem.shareItemUri.in_(item_uris)) return [item.status for item in query.distinct(ShareObjectItem.status)] - @staticmethod - def resolve_share_object_statistics(session, uri, **kwargs): - share_item_shared_states = ShareItemSM.get_share_item_shared_states() - tables = ( - session.query(ShareObjectItem) - .filter( - and_( - ShareObjectItem.shareUri == uri, - ShareObjectItem.itemType == 'DatasetTable', - ) - ) - .count() - ) - locations = ( - session.query(ShareObjectItem) - .filter( - and_( - ShareObjectItem.shareUri == uri, - ShareObjectItem.itemType == 'DatasetStorageLocation', - ) - ) - .count() - ) - shared_items = ( - session.query(ShareObjectItem) - .filter( - and_( - ShareObjectItem.shareUri == uri, - ShareObjectItem.status.in_(share_item_shared_states), - ) - ) - .count() - ) - revoked_items = ( - session.query(ShareObjectItem) - .filter( - and_( - ShareObjectItem.shareUri == uri, - ShareObjectItem.status.in_([ShareItemStatus.Revoke_Succeeded.value]), - ) - ) - .count() - ) - failed_states = [ - ShareItemStatus.Share_Failed.value, - ShareItemStatus.Revoke_Failed.value - ] - failed_items = ( - session.query(ShareObjectItem) - .filter( - and_( - ShareObjectItem.shareUri == uri, - ShareObjectItem.status.in_(failed_states), - ) - ) - .count() - ) - pending_states = [ - ShareItemStatus.PendingApproval.value - ] - pending_items = ( - session.query(ShareObjectItem) - .filter( - and_( - ShareObjectItem.shareUri == uri, - ShareObjectItem.status.in_(pending_states), - ) - ) - .count() - ) - return {'tables': tables, 'locations': locations, 'sharedItems': shared_items, 'revokedItems': revoked_items, 'failedItems': failed_items, 'pendingItems': pending_items} - @staticmethod def has_shared_items(session, item_uri: str) -> int: share_item_shared_states = ShareItemSM.get_share_item_shared_states() diff --git a/backend/dataall/modules/dataset_sharing/services/share_item_service.py b/backend/dataall/modules/dataset_sharing/services/share_item_service.py new file mode 100644 index 000000000..db2613bba --- /dev/null +++ b/backend/dataall/modules/dataset_sharing/services/share_item_service.py @@ -0,0 +1,164 @@ +import logging + +from dataall.aws.handlers.service_handlers import Worker +from dataall.core.context import get_context +from dataall.core.permission_checker import has_resource_permission +from dataall.db import utils +from dataall.db.api import Environment, ResourcePolicy +from dataall.db.exceptions import ObjectNotFound, ShareItemsFound, UnauthorizedOperation +from dataall.db.models import Task +from dataall.modules.dataset_sharing.db.enums import ShareObjectActions, ShareableType, ShareItemStatus, \ + ShareItemActions +from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject +from dataall.modules.dataset_sharing.db.share_object_repository import ShareObjectRepository, ShareObjectSM, ShareItemSM +from dataall.modules.dataset_sharing.services.share_notification_service import ShareNotificationService +from dataall.modules.dataset_sharing.services.share_permissions import GET_SHARE_OBJECT, ADD_ITEM, REMOVE_ITEM +from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository +from dataall.modules.datasets_base.db.models import Dataset + + +log = logging.getLogger(__name__) + + +class ShareItemService: + @staticmethod + def _get_share_uri(session, uri): + share_item = ShareObjectRepository.get_share_item_by_uri(session, uri) + share = ShareObjectRepository.get_share_by_uri(session, share_item.shareUri) + return share.shareUri + + @staticmethod + @has_resource_permission(GET_SHARE_OBJECT) + def revoke_items_share_object(uri, revoked_uris): + context = get_context() + with context.db_engine.scoped_session() as session: + share = ShareObjectRepository.get_share_by_uri(session, uri) + dataset = DatasetRepository.get_dataset_by_uri(session, share.datasetUri) + revoked_items_states = ShareObjectRepository.get_share_items_states(session, uri, revoked_uris) + revoked_items = [ShareObjectRepository.get_share_item_by_uri(session, uri) for uri in revoked_uris] + + if revoked_items_states: + raise ShareItemsFound( + action='Revoke Items from Share Object', + message='Nothing to be revoked.', + ) + + share_sm = ShareObjectSM(share.status) + new_share_state = share_sm.run_transition(ShareObjectActions.RevokeItems.value) + + for item_state in revoked_items_states: + item_sm = ShareItemSM(item_state) + new_state = item_sm.run_transition(ShareObjectActions.RevokeItems.value) + for item in revoked_items: + if item.status == item_state: + item_sm.update_state_single_item(session, item, new_state) + + share_sm.update_state(session, share, new_share_state) + + ResourcePolicy.delete_resource_policy( + session=session, + group=share.groupUri, + resource_uri=dataset.datasetUri, + ) + + ShareNotificationService.notify_share_object_rejection(session, context.username, dataset, share) + + revoke_share_task: Task = Task( + action='ecs.share.revoke', + targetUri=uri, + payload={'environmentUri': share.environmentUri}, + ) + session.add(revoke_share_task) + + Worker.queue(engine=context.db_engine, task_ids=[revoke_share_task.taskUri]) + + return share + + @staticmethod + @has_resource_permission(ADD_ITEM) + def add_shared_item(uri: str, data: dict = None): + context = get_context() + with context.db_engine.scoped_session() as session: + item_type = data.get('itemType') + item_uri = data.get('itemUri') + share = ShareObjectRepository.get_share_by_uri(session, uri) + dataset: Dataset = DatasetRepository.get_dataset_by_uri(session, share.datasetUri) + target_environment = Environment.get_environment_by_uri(session, dataset.environmentUri) + + share_sm = ShareObjectSM(share.status) + new_share_state = share_sm.run_transition(ShareItemActions.AddItem.value) + share_sm.update_state(session, share, new_share_state) + + item = ShareObjectRepository.get_share_item(session, item_type, item_uri) + if not item: + raise ObjectNotFound('ShareObjectItem', item_uri) + + if item_type == ShareableType.Table.value and item.region != target_environment.region: + raise UnauthorizedOperation( + action=ADD_ITEM, + message=f'Lake Formation cross region sharing is not supported. ' + f'Table {item.GlueTableName} is in {item.region} and target environment ' + f'{target_environment.name} is in {target_environment.region} ', + ) + + share_item: ShareObjectItem = ShareObjectRepository.find_sharable_item(session, uri, item_uri) + + s3_access_point_name = utils.slugify( + share.datasetUri + '-' + share.principalId, + max_length=50, lowercase=True, regex_pattern='[^a-zA-Z0-9-]', separator='-' + ) + log.info(f"S3AccessPointName={s3_access_point_name}") + + if not share_item: + share_item = ShareObjectItem( + shareUri=uri, + itemUri=item_uri, + itemType=item_type, + itemName=item.name, + status=ShareItemStatus.PendingApproval.value, + owner=context.username, + GlueDatabaseName=dataset.GlueDatabaseName + if item_type == ShareableType.Table.value + else '', + GlueTableName=item.GlueTableName + if item_type == ShareableType.Table.value + else '', + S3AccessPointName=s3_access_point_name + if item_type == ShareableType.StorageLocation.value + else '', + ) + session.add(share_item) + return share_item + + @staticmethod + @has_resource_permission(REMOVE_ITEM, parent_resource=_get_share_uri) + def remove_shared_item(uri: str): + with get_context().db_engine.scoped_session() as session: + share_item = ShareObjectRepository.get_share_item_by_uri(session, uri) + + item_sm = ShareItemSM(share_item.status) + item_sm.run_transition(ShareItemActions.RemoveItem.value) + ShareObjectRepository.remove_share_object_item(session, share_item) + return True + + @staticmethod + @has_resource_permission(GET_SHARE_OBJECT) + def resolve_shared_item(uri, item: ShareObjectItem): + with get_context().db_engine.scoped_session() as session: + return ShareObjectRepository.get_share_item(session, item.itemType, item.itemUri) + + @staticmethod + def check_existing_shared_items(share): + with get_context().db_engine.scoped_session() as session: + return ShareObjectRepository.check_existing_shared_items( + session, share.shareUri + ) + + @staticmethod + def list_shareable_objects(share, filter, is_revokable=False): + states = None + if is_revokable: + states = ShareItemSM.get_share_item_revokable_states() + + with get_context().db_engine.scoped_session() as session: + return ShareObjectRepository.list_shareable_items(session, share, states, filter) diff --git a/backend/dataall/modules/dataset_sharing/services/share_managers/s3_share_manager.py b/backend/dataall/modules/dataset_sharing/services/share_managers/s3_share_manager.py index 9662a3529..46c448643 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_managers/s3_share_manager.py +++ b/backend/dataall/modules/dataset_sharing/services/share_managers/s3_share_manager.py @@ -39,10 +39,10 @@ def __init__( self.target_folder = target_folder self.source_environment = source_environment self.target_environment = target_environment - self.share_item = ShareObjectRepository.find_share_item_by_folder( + self.share_item = ShareObjectRepository.find_sharable_item( session, - share, - target_folder, + share.shareUri, + target_folder.locationUri, ) self.access_point_name = self.share_item.S3AccessPointName diff --git a/backend/dataall/modules/dataset_sharing/services/share_object_service.py b/backend/dataall/modules/dataset_sharing/services/share_object_service.py new file mode 100644 index 000000000..5752a3d37 --- /dev/null +++ b/backend/dataall/modules/dataset_sharing/services/share_object_service.py @@ -0,0 +1,344 @@ +from sqlalchemy import and_ + +from dataall.core.context import get_context +from dataall.core.permission_checker import has_resource_permission +from dataall.db import utils +from dataall.db.api import ResourcePolicy, Environment +from dataall.db.exceptions import ShareItemsFound, UnauthorizedOperation +from dataall.db.models import Activity, PrincipalType, EnvironmentGroup, ConsumptionRole +from dataall.modules.dataset_sharing.db.enums import ShareObjectActions, ShareableType, ShareItemStatus, \ + ShareObjectStatus +from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject +from dataall.modules.dataset_sharing.db.share_object_repository import ShareObjectRepository, ShareObjectSM, ShareItemSM +from dataall.modules.dataset_sharing.services.share_notification_service import ShareNotificationService +from dataall.modules.dataset_sharing.services.share_permissions import REJECT_SHARE_OBJECT, APPROVE_SHARE_OBJECT, \ + SUBMIT_SHARE_OBJECT, SHARE_OBJECT_APPROVER, SHARE_OBJECT_REQUESTER, CREATE_SHARE_OBJECT, DELETE_SHARE_OBJECT, \ + GET_SHARE_OBJECT +from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository +from dataall.modules.datasets_base.db.models import DatasetTable, Dataset +from dataall.modules.datasets_base.services.permissions import DATASET_TABLE_READ + + +class ShareObjectService: + @staticmethod + def _get_env_uri(session, uri): + dataset: Dataset = DatasetRepository.get_dataset_by_uri(session, uri) + environment = Environment.get_environment_by_uri(session, dataset.environmentUri) + return environment.environmentUri + + @staticmethod + @has_resource_permission(GET_SHARE_OBJECT) + def get_share_object(uri): + with get_context().db_engine.scoped_session() as session: + return ShareObjectRepository.get_share_by_uri(session, uri) + + @staticmethod + @has_resource_permission(CREATE_SHARE_OBJECT, parent_resource=_get_env_uri) + def create_share_object( + uri: str, + item_uri: str, + item_type: str, + group_uri, + principal_id, + principal_type + ): + context = get_context() + with context.db_engine.scoped_session() as session: + dataset: Dataset = DatasetRepository.get_dataset_by_uri(session, uri) + environment = Environment.get_environment_by_uri(session, dataset.environmentUri) + + if environment.region != dataset.region: + raise UnauthorizedOperation( + action=CREATE_SHARE_OBJECT, + message=f'Requester Team {group_uri} works in region {environment.region} ' + + f'and the requested dataset is stored in region {dataset.region}', + ) + + if principal_type == PrincipalType.ConsumptionRole.value: + consumption_role: ConsumptionRole = Environment.get_environment_consumption_role( + session, + principal_id, + environment.environmentUri + ) + principal_iam_role_name = consumption_role.IAMRoleName + else: + env_group: EnvironmentGroup = Environment.get_environment_group( + session, + group_uri, + environment.environmentUri + ) + principal_iam_role_name = env_group.environmentIAMRoleName + + if ( + dataset.stewards == group_uri or dataset.SamlAdminGroupName == group_uri + ) and environment.environmentUri == dataset.environmentUri and principal_type == PrincipalType.Group.value: + raise UnauthorizedOperation( + action=CREATE_SHARE_OBJECT, + message=f'Team: {group_uri} is managing the dataset {dataset.name}', + ) + + ShareObjectService._validate_group_membership(session, group_uri, environment.environmentUri) + + has_share = ShareObjectRepository.exists(session, dataset, environment, principal_id, group_uri) + if not has_share: + share = ShareObject( + datasetUri=dataset.datasetUri, + environmentUri=environment.environmentUri, + owner=context.username, + groupUri=group_uri, + principalId=principal_id, + principalType=principal_type, + principalIAMRoleName=principal_iam_role_name, + status=ShareObjectStatus.Draft.value, + ) + ShareObjectRepository.save_and_commit(session, share) + + if item_uri: + item = ShareObjectRepository.get_share_item(session, item_type, item_uri) + share_item = ShareObjectRepository.find_sharable_item(session, share.shareUri, item_uri) + + s3_access_point_name = utils.slugify( + share.datasetUri + '-' + share.principalId, + max_length=50, lowercase=True, regex_pattern='[^a-zA-Z0-9-]', separator='-' + ) + + if not share_item and item: + new_share_item: ShareObjectItem = ShareObjectItem( + shareUri=share.shareUri, + itemUri=item_uri, + itemType=item_type, + itemName=item.name, + status=ShareItemStatus.PendingApproval.value, + owner=context.username, + GlueDatabaseName=dataset.GlueDatabaseName + if item_type == ShareableType.Table.value + else '', + GlueTableName=item.GlueTableName + if item_type == ShareableType.Table.value + else '', + S3AccessPointName=s3_access_point_name + if item_type == ShareableType.StorageLocation.value + else '', + ) + session.add(new_share_item) + + activity = Activity( + action='SHARE_OBJECT:CREATE', + label='SHARE_OBJECT:CREATE', + owner=context.username, + summary=f'{context.username} created a share object for the {dataset.name} in {environment.name} for the principal: {principal_id}', + targetUri=dataset.datasetUri, + targetType='dataset', + ) + session.add(activity) + + # Attaching REQUESTER permissions to: + # requester group (groupUri) + # dataset.SamlAdminGroupName + # environment.SamlGroupName + ShareObjectService._attach_share_resource_policy(session, share, group_uri) + ShareObjectService._attach_share_resource_policy(session, share, dataset.SamlGroupName) + if dataset.SamlAdminGroupName != environment.SamlGroupName: + ShareObjectService._attach_share_resource_policy(session, share, environment.SamlGroupName) + + # Attaching REQUESTER permissions to: + # dataset.stewards (includes the dataset Admins) + ResourcePolicy.attach_resource_policy( + session=session, + group=dataset.stewards, + permissions=SHARE_OBJECT_APPROVER, + resource_uri=share.shareUri, + resource_type=ShareObject.__name__, + ) + return share + + @staticmethod + @has_resource_permission(SUBMIT_SHARE_OBJECT) + def submit_share_object(uri: str): + context = get_context() + with context.db_engine.scoped_session() as session: + share, dataset, states = ShareObjectService._get_share_data(session, uri) + + valid_states = [ShareItemStatus.PendingApproval.value] + valid_share_items_states = [x for x in valid_states if x in states] + + if valid_share_items_states: + raise ShareItemsFound( + action='Submit Share Object', + message='The request is empty of pending items. Add items to share request.', + ) + + ShareObjectService._run_transitions(session, share, states, ShareObjectActions.Submit) + ShareNotificationService.notify_share_object_submission( + session, context.username, dataset, share + ) + + @staticmethod + @has_resource_permission(APPROVE_SHARE_OBJECT) + def approve_share_object(uri: str): + context = get_context() + with context.db_engine.scoped_session() as session: + share, dataset, states = ShareObjectService._get_share_data(session, uri) + ShareObjectService._run_transitions(session, share, states, ShareObjectActions.Approve) + + # GET TABLES SHARED AND APPROVE SHARE FOR EACH TABLE + share_table_items = ShareObjectRepository.find_all_share_items(session, uri, ShareableType.Table.value) + for table in share_table_items: + ResourcePolicy.attach_resource_policy( + session=session, + group=share.principalId, + permissions=DATASET_TABLE_READ, + resource_uri=table.itemUri, + resource_type=DatasetTable.__name__, + ) + + ShareNotificationService.notify_share_object_approval(session, context.username, dataset, share) + return share + + approve_share_task: Task = Task( + action='ecs.share.approve', + targetUri=uri, + payload={'environmentUri': share.environmentUri}, + ) + session.add(approve_share_task) + + Worker.queue(engine=context.db_engine, task_ids=[approve_share_task.taskUri]) + + return share + + @staticmethod + @has_resource_permission(REJECT_SHARE_OBJECT) + def reject_share_object(uri: str): + context = get_context() + with context.db_engine.scoped_session() as session: + share, dataset, states = ShareObjectService._get_share_data(session, uri) + ShareObjectService._run_transitions(session, share, states, ShareObjectActions.Reject) + ResourcePolicy.delete_resource_policy( + session=session, + group=share.groupUri, + resource_uri=dataset.datasetUri, + ) + + ShareNotificationService.notify_share_object_rejection(session, context.username, dataset, share) + return share + + @staticmethod + @has_resource_permission(DELETE_SHARE_OBJECT) + def delete_share_object(uri: str): + with get_context().db_engine.scoped_session() as session: + share, dataset, states = ShareObjectService._get_share_data(session, uri) + shared_share_items_states = [x for x in ShareItemSM.get_share_item_shared_states() if x in states] + + if shared_share_items_states: + raise ShareItemsFound( + action='Delete share object', + message='There are shared items in this request. ' + + 'Revoke access to these items before deleting the request.', + ) + + new_state = ShareObjectService._run_transitions(session, share, states, ShareObjectActions.Delete) + if new_state == ShareObjectStatus.Deleted.value: + session.delete(share) + + return True + + @staticmethod + def resolve_share_object_statistics(uri): + with get_context().db_engine.scoped_session() as session: + tables = ShareObjectRepository.count_sharable_items(session, uri, 'DatasetTable') + locations = ShareObjectRepository.count_sharable_items(session, uri, 'DatasetStorageLocation') + shared_items = ShareObjectRepository.count_items_in_states( + session, uri, ShareItemSM.get_share_item_shared_states() + ) + revoked_items = ShareObjectRepository.count_items_in_states( + session, uri, [ShareItemStatus.Revoke_Succeeded.value] + ) + failed_states = [ + ShareItemStatus.Share_Failed.value, + ShareItemStatus.Revoke_Failed.value + ] + failed_items = ShareObjectRepository.count_items_in_states( + session, uri, failed_states + ) + pending_items = ShareObjectRepository.count_items_in_states( + session, uri, [ShareItemStatus.PendingApproval.value] + ) + return {'tables': tables, 'locations': locations, 'sharedItems': shared_items, 'revokedItems': revoked_items, + 'failedItems': failed_items, 'pendingItems': pending_items} + + @staticmethod + def list_shares_in_my_inbox(filter: dict): + # TODO THERE WAS NO PERMISSION CHECK + context = get_context() + with context.db_engine.scoped_session() as session: + return ShareObjectRepository.list_user_received_share_requests( + session=session, + username=context.username, + groups=context.groups, + data=filter, + ) + + @staticmethod + def list_shares_in_my_outbox(filter): + # TODO THERE WAS NO PERMISSION CHECK + context = get_context() + with context.db_engine.scoped_session() as session: + return ShareObjectRepository.list_user_sent_share_requests( + session=session, + username=context.username, + groups=context.groups, + data=filter, + ) + + @staticmethod + def _run_transitions(session, share, share_items_states, action): + share_sm = ShareObjectSM(share.status) + new_share_state = share_sm.run_transition(action.value) + + for item_state in share_items_states: + item_sm = ShareItemSM(item_state) + new_state = item_sm.run_transition(action.value) + item_sm.update_state(session, share.shareUri, new_state) + + share_sm.update_state(session, share, new_share_state) + return new_share_state + + @staticmethod + def _get_share_data(session, uri): + share = ShareObjectRepository.get_share_by_uri(session, uri) + dataset = DatasetRepository.get_dataset_by_uri(session, share.datasetUri) + share_items_states = ShareObjectRepository.get_share_items_states(session, uri) + return share, dataset, share_items_states + + @staticmethod + def _attach_share_resource_policy(session, share, group): + ResourcePolicy.attach_resource_policy( + session=session, + group=group, + permissions=SHARE_OBJECT_REQUESTER, + resource_uri=share.shareUri, + resource_type=ShareObject.__name__, + ) + + @staticmethod + def _validate_group_membership( + session, share_object_group, environment_uri + ): + context = get_context() + if share_object_group and share_object_group not in context.groups: + raise UnauthorizedOperation( + action=CREATE_SHARE_OBJECT, + message=f'User: {context.username} is not a member of the team {share_object_group}', + ) + if share_object_group not in Environment.list_environment_groups( + session=session, + username=context.username, + groups=context.groups, + uri=environment_uri, + data=None, + check_perm=True, + ): + raise UnauthorizedOperation( + action=CREATE_SHARE_OBJECT, + message=f'Team: {share_object_group} is not a member of the environment {environment_uri}', + ) diff --git a/backend/dataall/modules/dataset_sharing/services/share_processors/lf_process_cross_account_share.py b/backend/dataall/modules/dataset_sharing/services/share_processors/lf_process_cross_account_share.py index 6e96fa0b5..64e46eb91 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_processors/lf_process_cross_account_share.py +++ b/backend/dataall/modules/dataset_sharing/services/share_processors/lf_process_cross_account_share.py @@ -74,8 +74,8 @@ def process_approved_shares(self) -> bool: for table in self.shared_tables: log.info(f"Sharing table {table.GlueTableName}...") - share_item = ShareObjectRepository.find_share_item_by_table( - self.session, self.share, table + share_item = ShareObjectRepository.find_sharable_item( + self.session, self.share.shareUri, table.tableUri ) if not share_item: @@ -140,8 +140,8 @@ def process_revoked_shares(self) -> bool: shared_db_name = self.build_shared_db_name() principals = self.get_share_principals() for table in self.revoked_tables: - share_item = ShareObjectRepository.find_share_item_by_table( - self.session, self.share, table + share_item = ShareObjectRepository.find_sharable_item( + self.session, self.share.shareUri, table.tableUri ) revoked_item_SM = ShareItemSM(ShareItemStatus.Revoke_Approved.value) diff --git a/backend/dataall/modules/dataset_sharing/services/share_processors/lf_process_same_account_share.py b/backend/dataall/modules/dataset_sharing/services/share_processors/lf_process_same_account_share.py index b6e11482d..7f7968366 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_processors/lf_process_same_account_share.py +++ b/backend/dataall/modules/dataset_sharing/services/share_processors/lf_process_same_account_share.py @@ -70,8 +70,8 @@ def process_approved_shares(self) -> bool: for table in self.shared_tables: - share_item = ShareObjectRepository.find_share_item_by_table( - self.session, self.share, table + share_item = ShareObjectRepository.find_sharable_item( + self.session, self.share.shareUri, table.tableUri ) if not share_item: @@ -121,8 +121,8 @@ def process_revoked_shares(self) -> bool: shared_db_name = self.build_shared_db_name() principals = self.get_share_principals() for table in self.revoked_tables: - share_item = ShareObjectRepository.find_share_item_by_table( - self.session, self.share, table + share_item = ShareObjectRepository.find_sharable_item( + self.session, self.share.shareUri, table.tableUri ) if not share_item: log.info( diff --git a/backend/dataall/modules/dataset_sharing/services/share_processors/s3_process_share.py b/backend/dataall/modules/dataset_sharing/services/share_processors/s3_process_share.py index 7b79f4013..bd9eb9298 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_processors/s3_process_share.py +++ b/backend/dataall/modules/dataset_sharing/services/share_processors/s3_process_share.py @@ -64,10 +64,10 @@ def process_approved_shares( success = True for folder in share_folders: log.info(f'sharing folder: {folder}') - sharing_item = ShareObjectRepository.find_share_item_by_folder( + sharing_item = ShareObjectRepository.find_sharable_item( session, - share, - folder, + share.shareUri, + folder.locationUri, ) shared_item_SM = ShareItemSM(ShareItemStatus.Share_Approved.value) new_state = shared_item_SM.run_transition(ShareObjectActions.Start.value) @@ -129,10 +129,10 @@ def process_revoked_shares( success = True for folder in revoke_folders: log.info(f'revoking access to folder: {folder}') - removing_item = ShareObjectRepository.find_share_item_by_folder( + removing_item = ShareObjectRepository.find_sharable_item( session, - share, - folder, + share.shareUri, + folder.locationUri, ) revoked_item_SM = ShareItemSM(ShareItemStatus.Revoke_Approved.value) diff --git a/backend/dataall/modules/datasets/api/table/resolvers.py b/backend/dataall/modules/datasets/api/table/resolvers.py index 5b047ab49..8fb41b064 100644 --- a/backend/dataall/modules/datasets/api/table/resolvers.py +++ b/backend/dataall/modules/datasets/api/table/resolvers.py @@ -55,12 +55,10 @@ def get_glue_table_properties(context: Context, source: DatasetTable, **kwargs): def resolve_dataset(context, source: DatasetTable, **kwargs): if not source: return None - with context.engine.scoped_session() as session: - dataset_with_role = get_dataset( - context, source=None, datasetUri=source.datasetUri - ) - if not dataset_with_role: - return None + + dataset_with_role = get_dataset(context, source=None, datasetUri=source.datasetUri) + if not dataset_with_role: + return None return dataset_with_role @@ -95,5 +93,5 @@ def resolve_redshift_copy_location( ).dataLocation -def list_shared_tables_by_env_dataset(context: Context, source, datasetUri: str, envUri: str, filter: dict = None): +def list_shared_tables_by_env_dataset(context: Context, source, datasetUri: str, envUri: str): return DatasetTableService.list_shared_tables_by_env_dataset(datasetUri, envUri) From dd0fc0ac0c0c45d9fa6d4850a40a0358d151edd9 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 15 May 2023 16:03:53 +0200 Subject: [PATCH 180/346] Review remarks --- backend/dataall/modules/loader.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/backend/dataall/modules/loader.py b/backend/dataall/modules/loader.py index 8fc03c0b4..7354ebeec 100644 --- a/backend/dataall/modules/loader.py +++ b/backend/dataall/modules/loader.py @@ -184,6 +184,9 @@ def _check_loading_correct(in_config: Set[str], modes: List[ImportMode]): for dependency in module.depends_on(): if dependency not in expected_load: expected_load.add(dependency) + if not dependency.is_supported(modes): + raise ImportError(f"Dependency {dependency.name()} doesn't support {modes}") + new_to_add.append(dependency) to_add = new_to_add From 6765aaa009e9bd803a30dc80bfe1f253f1277c89 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 15 May 2023 16:24:36 +0200 Subject: [PATCH 181/346] Removed dataset_share_service.py --- .../modules/dataset_sharing/api/resolvers.py | 15 +- .../db/share_object_repository.py | 99 +++++++++ .../services/dataset_share_service.py | 207 ------------------ .../services/share_item_service.py | 14 +- 4 files changed, 116 insertions(+), 219 deletions(-) delete mode 100644 backend/dataall/modules/dataset_sharing/services/dataset_share_service.py diff --git a/backend/dataall/modules/dataset_sharing/api/resolvers.py b/backend/dataall/modules/dataset_sharing/api/resolvers.py index 350330c32..f6e5347d2 100644 --- a/backend/dataall/modules/dataset_sharing/api/resolvers.py +++ b/backend/dataall/modules/dataset_sharing/api/resolvers.py @@ -8,7 +8,6 @@ from dataall.db.exceptions import RequiredParameter from dataall.modules.dataset_sharing.api.enums import ShareObjectPermission from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject -from dataall.modules.dataset_sharing.services.dataset_share_service import DatasetShareService from dataall.modules.dataset_sharing.services.share_item_service import ShareItemService from dataall.modules.dataset_sharing.services.share_object_service import ShareObjectService from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository @@ -216,14 +215,11 @@ def list_data_items_shared_with_env_group( if not filter: filter = {} with context.engine.scoped_session() as session: - return DatasetShareService.paginated_shared_with_environment_group_datasets( + return ShareItemService.paginated_shared_with_environment_group_datasets( session=session, - username=context.username, - groups=context.groups, - envUri=environmentUri, - groupUri=groupUri, + env_uri=environmentUri, + group_uri=groupUri, data=filter, - check_perm=True, ) @@ -233,11 +229,8 @@ def list_shared_with_environment_data_items( if not filter: filter = {} with context.engine.scoped_session() as session: - return DatasetShareService.paginated_shared_with_environment_datasets( + return ShareItemService.paginated_shared_with_environment_datasets( session=session, - username=context.username, - groups=context.groups, uri=environmentUri, data=filter, - check_perm=True, ) diff --git a/backend/dataall/modules/dataset_sharing/db/share_object_repository.py b/backend/dataall/modules/dataset_sharing/db/share_object_repository.py index dfa27f33e..29d59fc9a 100644 --- a/backend/dataall/modules/dataset_sharing/db/share_object_repository.py +++ b/backend/dataall/modules/dataset_sharing/db/share_object_repository.py @@ -916,3 +916,102 @@ def list_dataset_shares_with_existing_shared_items(session, dataset_uri) -> [Sha ) return query.all() + @staticmethod + def paginate_shared_datasets(session, env_uri, group_uri, data): + share_item_shared_states = ShareItemSM.get_share_item_shared_states() + q = ( + session.query( + ShareObjectItem.shareUri.label('shareUri'), + Dataset.datasetUri.label('datasetUri'), + Dataset.name.label('datasetName'), + Dataset.description.label('datasetDescription'), + models.Environment.environmentUri.label('environmentUri'), + models.Environment.name.label('environmentName'), + ShareObject.created.label('created'), + ShareObject.principalId.label('principalId'), + ShareObject.principalType.label('principalType'), + ShareObjectItem.itemType.label('itemType'), + ShareObjectItem.GlueDatabaseName.label('GlueDatabaseName'), + ShareObjectItem.GlueTableName.label('GlueTableName'), + ShareObjectItem.S3AccessPointName.label('S3AccessPointName'), + models.Organization.organizationUri.label('organizationUri'), + models.Organization.name.label('organizationName'), + case( + [ + ( + ShareObjectItem.itemType + == ShareableType.Table.value, + func.concat( + DatasetTable.GlueDatabaseName, + '.', + DatasetTable.GlueTableName, + ), + ), + ( + ShareObjectItem.itemType + == ShareableType.StorageLocation.value, + func.concat(DatasetStorageLocation.name), + ), + ], + else_='XXX XXXX', + ).label('itemAccess'), + ) + .join( + ShareObject, + ShareObject.shareUri == ShareObjectItem.shareUri, + ) + .join( + Dataset, + ShareObject.datasetUri == Dataset.datasetUri, + ) + .join( + models.Environment, + models.Environment.environmentUri == Dataset.environmentUri, + ) + .join( + models.Organization, + models.Organization.organizationUri + == models.Environment.organizationUri, + ) + .outerjoin( + DatasetTable, + ShareObjectItem.itemUri == DatasetTable.tableUri, + ) + .outerjoin( + DatasetStorageLocation, + ShareObjectItem.itemUri + == DatasetStorageLocation.locationUri, + ) + .filter( + and_( + ShareObjectItem.status.in_(share_item_shared_states), + ShareObject.environmentUri == env_uri, + ShareObject.principalId == group_uri if group_uri else True, + ) + ) + ) + + if data.get('datasetUri'): + dataset_uri = data.get('datasetUri') + q = q.filter(ShareObject.datasetUri == dataset_uri) + + if data.get('itemTypes', None): + item_types = data.get('itemTypes') + q = q.filter( + or_(*[ShareObjectItem.itemType == t for t in item_types]) + ) + + if data.get("uniqueShares", False): + q = q.filter(ShareObject.principalType != PrincipalType.ConsumptionRole.value) + q = q.distinct(ShareObject.shareUri) + + if data.get('term'): + term = data.get('term') + q = q.filter(ShareObjectItem.itemName.ilike('%' + term + '%')) + + return paginate( + query=q, page=data.get('page', 1), page_size=data.get('pageSize', 10) + ).to_dict() + + + diff --git a/backend/dataall/modules/dataset_sharing/services/dataset_share_service.py b/backend/dataall/modules/dataset_sharing/services/dataset_share_service.py deleted file mode 100644 index 80a0d9998..000000000 --- a/backend/dataall/modules/dataset_sharing/services/dataset_share_service.py +++ /dev/null @@ -1,207 +0,0 @@ -from sqlalchemy import or_, case, func -from sqlalchemy.sql import and_ - -from dataall.api.constants import PrincipalType -from dataall.db import models, permissions -from dataall.db.api import has_resource_perm -from dataall.db.paginator import paginate -from dataall.modules.dataset_sharing.db.enums import ShareableType -from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject -from dataall.modules.dataset_sharing.db.share_object_repository import ShareItemSM -from dataall.modules.dataset_sharing.services.share_permissions import LIST_ENVIRONMENT_SHARED_WITH_OBJECTS -from dataall.modules.datasets_base.db.models import DatasetStorageLocation, DatasetTable, Dataset - - -class DatasetShareService: - - @staticmethod - @has_resource_perm(LIST_ENVIRONMENT_SHARED_WITH_OBJECTS) - def paginated_shared_with_environment_datasets( - session, username, groups, uri, data=None, check_perm=None - ) -> dict: - share_item_shared_states = ShareItemSM.get_share_item_shared_states() - q = ( - session.query( - ShareObjectItem.shareUri.label('shareUri'), - Dataset.datasetUri.label('datasetUri'), - Dataset.name.label('datasetName'), - Dataset.description.label('datasetDescription'), - models.Environment.environmentUri.label('environmentUri'), - models.Environment.name.label('environmentName'), - ShareObject.created.label('created'), - ShareObject.principalId.label('principalId'), - ShareObject.principalType.label('principalType'), - ShareObjectItem.itemType.label('itemType'), - ShareObjectItem.GlueDatabaseName.label('GlueDatabaseName'), - ShareObjectItem.GlueTableName.label('GlueTableName'), - ShareObjectItem.S3AccessPointName.label('S3AccessPointName'), - models.Organization.organizationUri.label('organizationUri'), - models.Organization.name.label('organizationName'), - case( - [ - ( - ShareObjectItem.itemType - == ShareableType.Table.value, - func.concat( - DatasetTable.GlueDatabaseName, - '.', - DatasetTable.GlueTableName, - ), - ), - ( - ShareObjectItem.itemType - == ShareableType.StorageLocation.value, - func.concat(DatasetStorageLocation.name), - ), - ], - else_='XXX XXXX', - ).label('itemAccess'), - ) - .join( - ShareObject, - ShareObject.shareUri == ShareObjectItem.shareUri, - ) - .join( - Dataset, - ShareObject.datasetUri == Dataset.datasetUri, - ) - .join( - models.Environment, - models.Environment.environmentUri == Dataset.environmentUri, - ) - .join( - models.Organization, - models.Organization.organizationUri - == models.Environment.organizationUri, - ) - .outerjoin( - DatasetTable, - ShareObjectItem.itemUri == DatasetTable.tableUri, - ) - .outerjoin( - DatasetStorageLocation, - ShareObjectItem.itemUri - == DatasetStorageLocation.locationUri, - ) - .filter( - and_( - ShareObjectItem.status.in_(share_item_shared_states), - ShareObject.environmentUri == uri, - ) - ) - ) - - if data.get('datasetUri'): - datasetUri = data.get('datasetUri') - q = q.filter(ShareObject.datasetUri == datasetUri) - - if data.get('itemTypes', None): - itemTypes = data.get('itemTypes') - q = q.filter( - or_(*[ShareObjectItem.itemType == t for t in itemTypes]) - ) - - if data.get("uniqueShares", False): - q = q.filter(ShareObject.principalType != PrincipalType.ConsumptionRole.value) - q = q.distinct(ShareObject.shareUri) - - if data.get('term'): - term = data.get('term') - q = q.filter(ShareObjectItem.itemName.ilike('%' + term + '%')) - - return paginate( - query=q, page=data.get('page', 1), page_size=data.get('pageSize', 10) - ).to_dict() - - @staticmethod - def paginated_shared_with_environment_group_datasets( - session, username, groups, envUri, groupUri, data=None, check_perm=None - ) -> dict: - share_item_shared_states = ShareItemSM.get_share_item_shared_states() - q = ( - session.query( - ShareObjectItem.shareUri.label('shareUri'), - Dataset.datasetUri.label('datasetUri'), - Dataset.name.label('datasetName'), - Dataset.description.label('datasetDescription'), - models.Environment.environmentUri.label('environmentUri'), - models.Environment.name.label('environmentName'), - ShareObject.created.label('created'), - ShareObject.principalId.label('principalId'), - ShareObjectItem.itemType.label('itemType'), - ShareObjectItem.GlueDatabaseName.label('GlueDatabaseName'), - ShareObjectItem.GlueTableName.label('GlueTableName'), - ShareObjectItem.S3AccessPointName.label('S3AccessPointName'), - models.Organization.organizationUri.label('organizationUri'), - models.Organization.name.label('organizationName'), - case( - [ - ( - ShareObjectItem.itemType - == ShareableType.Table.value, - func.concat( - DatasetTable.GlueDatabaseName, - '.', - DatasetTable.GlueTableName, - ), - ), - ( - ShareObjectItem.itemType - == ShareableType.StorageLocation.value, - func.concat(DatasetStorageLocation.name), - ), - ], - else_='XXX XXXX', - ).label('itemAccess'), - ) - .join( - ShareObject, - ShareObject.shareUri == ShareObjectItem.shareUri, - ) - .join( - Dataset, - ShareObject.datasetUri == Dataset.datasetUri, - ) - .join( - models.Environment, - models.Environment.environmentUri == Dataset.environmentUri, - ) - .join( - models.Organization, - models.Organization.organizationUri - == models.Environment.organizationUri, - ) - .outerjoin( - DatasetTable, - ShareObjectItem.itemUri == DatasetTable.tableUri, - ) - .outerjoin( - DatasetStorageLocation, - ShareObjectItem.itemUri - == DatasetStorageLocation.locationUri, - ) - .filter( - and_( - ShareObjectItem.status.in_(share_item_shared_states), - ShareObject.environmentUri == envUri, - ShareObject.principalId == groupUri, - ) - ) - ) - - if data.get('datasetUri'): - datasetUri = data.get('datasetUri') - q = q.filter(ShareObject.datasetUri == datasetUri) - - if data.get('itemTypes', None): - itemTypes = data.get('itemTypes') - q = q.filter( - or_(*[ShareObjectItem.itemType == t for t in itemTypes]) - ) - if data.get('term'): - term = data.get('term') - q = q.filter(ShareObjectItem.itemName.ilike('%' + term + '%')) - - return paginate( - query=q, page=data.get('page', 1), page_size=data.get('pageSize', 10) - ).to_dict() diff --git a/backend/dataall/modules/dataset_sharing/services/share_item_service.py b/backend/dataall/modules/dataset_sharing/services/share_item_service.py index db2613bba..604924797 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_item_service.py +++ b/backend/dataall/modules/dataset_sharing/services/share_item_service.py @@ -12,7 +12,8 @@ from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject from dataall.modules.dataset_sharing.db.share_object_repository import ShareObjectRepository, ShareObjectSM, ShareItemSM from dataall.modules.dataset_sharing.services.share_notification_service import ShareNotificationService -from dataall.modules.dataset_sharing.services.share_permissions import GET_SHARE_OBJECT, ADD_ITEM, REMOVE_ITEM +from dataall.modules.dataset_sharing.services.share_permissions import GET_SHARE_OBJECT, ADD_ITEM, REMOVE_ITEM, \ + LIST_ENVIRONMENT_SHARED_WITH_OBJECTS from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.db.models import Dataset @@ -162,3 +163,14 @@ def list_shareable_objects(share, filter, is_revokable=False): with get_context().db_engine.scoped_session() as session: return ShareObjectRepository.list_shareable_items(session, share, states, filter) + + @staticmethod + @has_resource_permission(LIST_ENVIRONMENT_SHARED_WITH_OBJECTS) + def paginated_shared_with_environment_datasets(session, uri, data) -> dict: + return ShareObjectRepository.paginate_shared_datasets(session, uri, None, data) + + @staticmethod + def paginated_shared_with_environment_group_datasets(session, env_uri, group_uri, data) -> dict: + # TODO THERE WAS NOT PERMISSION + return ShareObjectRepository.paginate_shared_datasets(session, env_uri, group_uri, data) + From 4d2db02d618b34fb9968c4df977266c8fbb711e5 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 15 May 2023 17:13:49 +0200 Subject: [PATCH 182/346] Small fix --- .../modules/dataset_sharing/api/resolvers.py | 5 ++--- .../services/share_object_service.py | 15 +++++---------- 2 files changed, 7 insertions(+), 13 deletions(-) diff --git a/backend/dataall/modules/dataset_sharing/api/resolvers.py b/backend/dataall/modules/dataset_sharing/api/resolvers.py index f6e5347d2..9d32aed3d 100644 --- a/backend/dataall/modules/dataset_sharing/api/resolvers.py +++ b/backend/dataall/modules/dataset_sharing/api/resolvers.py @@ -28,15 +28,14 @@ def create_share_object( raise RequiredParameter(input) if 'principalId' not in input: raise RequiredParameter('principalId') - if 'datasetUri' not in input: - raise RequiredParameter('datasetUri') if 'principalType' not in input: raise RequiredParameter('principalType') if 'groupUri' not in input: raise RequiredParameter('groupUri') return ShareObjectService.create_share_object( - uri=datasetUri, + uri=input['environmentUri'], + dataset_uri=datasetUri, item_uri=itemUri, item_type=itemType, group_uri=input['groupUri'], diff --git a/backend/dataall/modules/dataset_sharing/services/share_object_service.py b/backend/dataall/modules/dataset_sharing/services/share_object_service.py index 5752a3d37..154b129f5 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_object_service.py +++ b/backend/dataall/modules/dataset_sharing/services/share_object_service.py @@ -20,12 +20,6 @@ class ShareObjectService: - @staticmethod - def _get_env_uri(session, uri): - dataset: Dataset = DatasetRepository.get_dataset_by_uri(session, uri) - environment = Environment.get_environment_by_uri(session, dataset.environmentUri) - return environment.environmentUri - @staticmethod @has_resource_permission(GET_SHARE_OBJECT) def get_share_object(uri): @@ -33,9 +27,10 @@ def get_share_object(uri): return ShareObjectRepository.get_share_by_uri(session, uri) @staticmethod - @has_resource_permission(CREATE_SHARE_OBJECT, parent_resource=_get_env_uri) + @has_resource_permission(CREATE_SHARE_OBJECT) def create_share_object( uri: str, + dataset_uri: str, item_uri: str, item_type: str, group_uri, @@ -44,8 +39,8 @@ def create_share_object( ): context = get_context() with context.db_engine.scoped_session() as session: - dataset: Dataset = DatasetRepository.get_dataset_by_uri(session, uri) - environment = Environment.get_environment_by_uri(session, dataset.environmentUri) + dataset: Dataset = DatasetRepository.get_dataset_by_uri(session, dataset_uri) + environment = Environment.get_environment_by_uri(session, uri) if environment.region != dataset.region: raise UnauthorizedOperation( @@ -137,7 +132,7 @@ def create_share_object( # dataset.SamlAdminGroupName # environment.SamlGroupName ShareObjectService._attach_share_resource_policy(session, share, group_uri) - ShareObjectService._attach_share_resource_policy(session, share, dataset.SamlGroupName) + ShareObjectService._attach_share_resource_policy(session, share, dataset.SamlAdminGroupName) if dataset.SamlAdminGroupName != environment.SamlGroupName: ShareObjectService._attach_share_resource_policy(session, share, environment.SamlGroupName) From d0e9f648deace336b296c04711b7ce376e547047 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 15 May 2023 17:23:11 +0200 Subject: [PATCH 183/346] Fix test with a new check --- tests/modules/test_loader.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/modules/test_loader.py b/tests/modules/test_loader.py index f374c7763..9db12b408 100644 --- a/tests/modules/test_loader.py +++ b/tests/modules/test_loader.py @@ -119,7 +119,7 @@ def test_import_with_one_dependency(mocker): def test_load_with_cdk_mode(mocker): - patch_loading(mocker, [DModule, CModule, BModule], {BModule, CModule}) + patch_loading(mocker, [DModule, CModule, BModule], {CModule}) loader.load_modules([ImportMode.CDK]) assert order == [CModule] From 345f71de650b5c28942aad8de7dd869e67362b24 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 15 May 2023 17:54:15 +0200 Subject: [PATCH 184/346] Fixed small issues --- .../dataset_sharing/db/share_object_repository.py | 4 ++-- .../dataset_sharing/services/share_item_service.py | 2 +- .../dataset_sharing/services/share_object_service.py | 9 +++++---- 3 files changed, 8 insertions(+), 7 deletions(-) diff --git a/backend/dataall/modules/dataset_sharing/db/share_object_repository.py b/backend/dataall/modules/dataset_sharing/db/share_object_repository.py index 29d59fc9a..036534efd 100644 --- a/backend/dataall/modules/dataset_sharing/db/share_object_repository.py +++ b/backend/dataall/modules/dataset_sharing/db/share_object_repository.py @@ -318,7 +318,7 @@ def save_and_commit(session, share): session.commit() @staticmethod - def exists(session, dataset: Dataset, env, principal_id, group_uri) -> ShareObject: + def find_share(session, dataset: Dataset, env, principal_id, group_uri) -> ShareObject: return ( session.query(ShareObject) .filter( @@ -329,7 +329,7 @@ def exists(session, dataset: Dataset, env, principal_id, group_uri) -> ShareObje ShareObject.groupUri == group_uri, ) ) - .count() + .first() ) @staticmethod diff --git a/backend/dataall/modules/dataset_sharing/services/share_item_service.py b/backend/dataall/modules/dataset_sharing/services/share_item_service.py index 604924797..4d5faf18b 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_item_service.py +++ b/backend/dataall/modules/dataset_sharing/services/share_item_service.py @@ -38,7 +38,7 @@ def revoke_items_share_object(uri, revoked_uris): revoked_items_states = ShareObjectRepository.get_share_items_states(session, uri, revoked_uris) revoked_items = [ShareObjectRepository.get_share_item_by_uri(session, uri) for uri in revoked_uris] - if revoked_items_states: + if not revoked_items_states: raise ShareItemsFound( action='Revoke Items from Share Object', message='Nothing to be revoked.', diff --git a/backend/dataall/modules/dataset_sharing/services/share_object_service.py b/backend/dataall/modules/dataset_sharing/services/share_object_service.py index 154b129f5..cdc8651bd 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_object_service.py +++ b/backend/dataall/modules/dataset_sharing/services/share_object_service.py @@ -74,8 +74,8 @@ def create_share_object( ShareObjectService._validate_group_membership(session, group_uri, environment.environmentUri) - has_share = ShareObjectRepository.exists(session, dataset, environment, principal_id, group_uri) - if not has_share: + share = ShareObjectRepository.find_share(session, dataset, environment, principal_id, group_uri) + if not share: share = ShareObject( datasetUri=dataset.datasetUri, environmentUri=environment.environmentUri, @@ -157,7 +157,7 @@ def submit_share_object(uri: str): valid_states = [ShareItemStatus.PendingApproval.value] valid_share_items_states = [x for x in valid_states if x in states] - if valid_share_items_states: + if not valid_share_items_states: raise ShareItemsFound( action='Submit Share Object', message='The request is empty of pending items. Add items to share request.', @@ -167,6 +167,7 @@ def submit_share_object(uri: str): ShareNotificationService.notify_share_object_submission( session, context.username, dataset, share ) + return share @staticmethod @has_resource_permission(APPROVE_SHARE_OBJECT) @@ -224,6 +225,7 @@ def delete_share_object(uri: str): share, dataset, states = ShareObjectService._get_share_data(session, uri) shared_share_items_states = [x for x in ShareItemSM.get_share_item_shared_states() if x in states] + new_state = ShareObjectService._run_transitions(session, share, states, ShareObjectActions.Delete) if shared_share_items_states: raise ShareItemsFound( action='Delete share object', @@ -231,7 +233,6 @@ def delete_share_object(uri: str): 'Revoke access to these items before deleting the request.', ) - new_state = ShareObjectService._run_transitions(session, share, states, ShareObjectActions.Delete) if new_state == ShareObjectStatus.Deleted.value: session.delete(share) From e9ebb086dd03c1bb8c4e0db9ac194ea26f298c79 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 16 May 2023 13:05:02 +0200 Subject: [PATCH 185/346] Bump pymdown-extensions from 8.1.1 to 10.0 in /documentation/userguide (#456) Bumps pymdown-extensions from 8.1.1 to 10.0. --- documentation/userguide/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/documentation/userguide/requirements.txt b/documentation/userguide/requirements.txt index 8dcd4add6..116221455 100644 --- a/documentation/userguide/requirements.txt +++ b/documentation/userguide/requirements.txt @@ -1,2 +1,2 @@ mkdocs-material==6.1.7 -pymdown-extensions==8.1.1 +pymdown-extensions==10.0 From 45da50f34880df289f385ac959926d369cb413ac Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 16 May 2023 13:07:58 +0200 Subject: [PATCH 186/346] Moved datasets and share tests into dataset folder --- tests/api/client.py | 31 -- tests/api/conftest.py | 310 --------------- tests/api/test_environment.py | 1 + tests/api/test_keyvaluetag.py | 90 ++--- tests/api/test_organization.py | 6 +- tests/api/test_redshift_cluster.py | 4 +- tests/api/test_stack.py | 6 - tests/api/test_vote.py | 55 +-- tests/modules/datasets/conftest.py | 363 ++++++++++++++++++ .../{api => modules/datasets}/test_dataset.py | 7 - .../datasets/test_dataset_count_votes.py | 48 +++ .../datasets/test_dataset_key_value_tag.py | 59 +++ .../datasets}/test_dataset_location.py | 0 .../datasets}/test_dataset_profiling.py | 0 tests/modules/datasets/test_dataset_stack.py | 7 + .../datasets}/test_dataset_table.py | 0 tests/{api => modules/datasets}/test_share.py | 0 .../modules/notebooks/test_notebook_stack.py | 18 +- 18 files changed, 520 insertions(+), 485 deletions(-) create mode 100644 tests/modules/datasets/conftest.py rename tests/{api => modules/datasets}/test_dataset.py (98%) create mode 100644 tests/modules/datasets/test_dataset_count_votes.py create mode 100644 tests/modules/datasets/test_dataset_key_value_tag.py rename tests/{api => modules/datasets}/test_dataset_location.py (100%) rename tests/{api => modules/datasets}/test_dataset_profiling.py (100%) create mode 100644 tests/modules/datasets/test_dataset_stack.py rename tests/{api => modules/datasets}/test_dataset_table.py (100%) rename tests/{api => modules/datasets}/test_share.py (100%) diff --git a/tests/api/client.py b/tests/api/client.py index 36c6bd8e8..8544d4e1c 100644 --- a/tests/api/client.py +++ b/tests/api/client.py @@ -1,4 +1,3 @@ -import random import typing import json import pytest @@ -95,33 +94,3 @@ def wrapper(*args, **kwargs): print(fn.__name__, 'is deprecated') return wrapper - - -def random_email(): - names = ['andy', 'bill', 'satya', 'sundar'] - corps = ['google.com', 'amazon.com', 'microsoft.com'] - return f'{random.choice(names)}@{random.choice(corps)}' - - -def random_emails(): - emails = [] - for i in range(1, 2 + random.choice([2, 3, 4])): - emails.append(random_email()) - return emails - - -def random_group(): - prefixes = ['big', 'small', 'pretty', 'shiny'] - names = ['team', 'people', 'group'] - lands = ['snow', 'ice', 'green', 'high'] - return f'{random.choice(prefixes).capitalize()}{random.choice(names).capitalize()}From{random.choice(lands).capitalize()}land' - - -def random_tag(): - return random.choice( - ['sales', 'finances', 'sites', 'people', 'products', 'partners', 'operations'] - ) - - -def random_tags(): - return [random_tag() for i in range(1, random.choice([2, 3, 4, 5]))] diff --git a/tests/api/conftest.py b/tests/api/conftest.py index 9e8ec5996..791357e66 100644 --- a/tests/api/conftest.py +++ b/tests/api/conftest.py @@ -1,11 +1,6 @@ import dataall.searchproxy.indexers -from dataall.modules.dataset_sharing.db.enums import ShareableType -from dataall.modules.dataset_sharing.db.models import ShareObject, ShareObjectItem -from dataall.modules.dataset_sharing.services.share_permissions import SHARE_OBJECT_REQUESTER, SHARE_OBJECT_APPROVER from .client import * from dataall.db import models -from dataall.api import constants -from dataall.modules.datasets_base.db.models import DatasetStorageLocation, DatasetTable, Dataset @pytest.fixture(scope='module', autouse=True) @@ -36,16 +31,6 @@ def patch_check_dataset(module_mocker): def patch_es(module_mocker): module_mocker.patch('dataall.searchproxy.connect', return_value={}) module_mocker.patch('dataall.searchproxy.search', return_value={}) - module_mocker.patch( - 'dataall.modules.datasets.indexers.table_indexer.DatasetTableIndexer.upsert_all', - return_value={} - ) - module_mocker.patch('dataall.modules.datasets.indexers.dataset_indexer.DatasetIndexer.upsert', return_value={}) - module_mocker.patch('dataall.modules.datasets.indexers.table_indexer.DatasetTableIndexer.upsert', return_value={}) - module_mocker.patch( - 'dataall.modules.datasets.indexers.location_indexer.DatasetLocationIndexer.upsert', - return_value={} - ) module_mocker.patch('dataall.searchproxy.indexers.DashboardIndexer.upsert', return_value={}) module_mocker.patch('dataall.searchproxy.base_indexer.BaseIndexer.delete_doc', return_value={}) @@ -187,127 +172,6 @@ def tenant(db, group, group2, permissions, user, user2, user3, group3, group4): yield tenant -@pytest.fixture(scope='module', autouse=True) -def dataset(client, patch_es): - cache = {} - - def factory( - org: models.Organization, - env: models.Environment, - name: str, - owner: str, - group: str, - ) -> Dataset: - key = f'{org.organizationUri}-{env.environmentUri}-{name}-{group}' - if cache.get(key): - print('found in cache ', cache[key]) - return cache.get(key) - response = client.query( - """ - mutation CreateDataset($input:NewDatasetInput){ - createDataset( - input:$input - ){ - datasetUri - label - description - AwsAccountId - S3BucketName - GlueDatabaseName - owner - region, - businessOwnerEmail - businessOwnerDelegationEmails - SamlAdminGroupName - GlueCrawlerName - tables{ - nodes{ - tableUri - } - } - locations{ - nodes{ - locationUri - } - } - stack{ - stack - status - stackUri - targetUri - accountid - region - stackid - link - outputs - resources - - } - topics - language - confidentiality - organization{ - organizationUri - label - } - shares{ - nodes{ - shareUri - } - } - terms{ - count - nodes{ - __typename - ...on Term { - nodeUri - path - label - } - } - } - environment{ - environmentUri - label - region - subscriptionsEnabled - subscriptionsProducersTopicImported - subscriptionsConsumersTopicImported - subscriptionsConsumersTopicName - subscriptionsProducersTopicName - organization{ - organizationUri - label - } - } - statistics{ - tables - locations - upvotes - } - } - } - """, - username=owner, - groups=[group], - input={ - 'owner': owner, - 'label': f'{name}', - 'description': 'test dataset {name}', - 'businessOwnerEmail': 'jeff@amazon.com', - 'tags': random_tags(), - 'businessOwnerDelegationEmails': random_emails(), - 'environmentUri': env.environmentUri, - 'SamlAdminGroupName': group or random_group(), - 'organizationUri': org.organizationUri, - }, - ) - print('==>', response) - return response.data.createDataset - - yield factory - - @pytest.fixture(scope='module', autouse=True) def env(client): cache = {} @@ -391,37 +255,6 @@ def factory( yield factory -@pytest.fixture(scope="module") -def dataset_model(db): - def factory( - organization: models.Organization, - environment: models.Environment, - label: str, - ) -> Dataset: - with db.scoped_session() as session: - dataset = Dataset( - organizationUri=organization.organizationUri, - environmentUri=environment.environmentUri, - label=label, - owner=environment.owner, - stewards=environment.SamlGroupName, - SamlAdminGroupName=environment.SamlGroupName, - businessOwnerDelegationEmails=["foo@amazon.com"], - name=label, - S3BucketName=label, - GlueDatabaseName="gluedatabase", - KmsAlias="kmsalias", - AwsAccountId=environment.AwsAccountId, - region=environment.region, - IAMDatasetAdminUserArn=f"arn:aws:iam::{environment.AwsAccountId}:user/dataset", - IAMDatasetAdminRoleArn=f"arn:aws:iam::{environment.AwsAccountId}:role/dataset", - ) - session.add(dataset) - session.commit() - return dataset - - yield factory - @pytest.fixture(scope="module") def environment_group(db): @@ -452,138 +285,6 @@ def factory( yield factory -@pytest.fixture(scope="module") -def share(db): - def factory( - dataset: Dataset, - environment: models.Environment, - env_group: models.EnvironmentGroup, - owner: str, - status: str - ) -> ShareObject: - with db.scoped_session() as session: - share = ShareObject( - datasetUri=dataset.datasetUri, - environmentUri=environment.environmentUri, - owner=owner, - groupUri=env_group.groupUri, - principalId=env_group.groupUri, - principalType=constants.PrincipalType.Group.value, - principalIAMRoleName=env_group.environmentIAMRoleName, - status=status, - ) - session.add(share) - session.commit() - dataall.db.api.ResourcePolicy.attach_resource_policy( - session=session, - group=env_group.groupUri, - permissions=SHARE_OBJECT_REQUESTER, - resource_uri=share.shareUri, - resource_type=ShareObject.__name__, - ) - dataall.db.api.ResourcePolicy.attach_resource_policy( - session=session, - group=dataset.SamlAdminGroupName, - permissions=SHARE_OBJECT_REQUESTER, - resource_uri=share.shareUri, - resource_type=ShareObject.__name__, - ) - dataall.db.api.ResourcePolicy.attach_resource_policy( - session=session, - group=dataset.stewards, - permissions=SHARE_OBJECT_APPROVER, - resource_uri=share.shareUri, - resource_type=ShareObject.__name__, - ) - if dataset.SamlAdminGroupName != environment.SamlGroupName: - dataall.db.api.ResourcePolicy.attach_resource_policy( - session=session, - group=environment.SamlGroupName, - permissions=SHARE_OBJECT_REQUESTER, - resource_uri=share.shareUri, - resource_type=ShareObject.__name__, - ) - session.commit() - return share - - yield factory - - -@pytest.fixture(scope="module") -def share_item(db): - def factory( - share: ShareObject, - table: DatasetTable, - status: str - ) -> ShareObjectItem: - with db.scoped_session() as session: - share_item = ShareObjectItem( - shareUri=share.shareUri, - owner="alice", - itemUri=table.tableUri, - itemType=ShareableType.Table.value, - itemName=table.name, - status=status, - ) - session.add(share_item) - session.commit() - return share_item - - yield factory - - -@pytest.fixture(scope='module', autouse=True) -def location(db): - cache = {} - - def factory(dataset: Dataset, name, username) -> DatasetStorageLocation: - key = f'{dataset.datasetUri}-{name}' - if cache.get(key): - return cache.get(key) - with db.scoped_session() as session: - ds_location = DatasetStorageLocation( - name=name, - label=name, - owner=username, - datasetUri=dataset.datasetUri, - S3BucketName=dataset.S3BucketName, - region=dataset.region, - AWSAccountId=dataset.AwsAccountId, - S3Prefix=f'{name}', - ) - session.add(ds_location) - return ds_location - - yield factory - - -@pytest.fixture(scope='module', autouse=True) -def table(db): - cache = {} - - def factory(dataset: Dataset, name, username) -> DatasetTable: - key = f'{dataset.datasetUri}-{name}' - if cache.get(key): - return cache.get(key) - with db.scoped_session() as session: - table = DatasetTable( - name=name, - label=name, - owner=username, - datasetUri=dataset.datasetUri, - GlueDatabaseName=dataset.GlueDatabaseName, - GlueTableName=name, - region=dataset.region, - AWSAccountId=dataset.AwsAccountId, - S3BucketName=dataset.S3BucketName, - S3Prefix=f'{name}', - ) - session.add(table) - return table - - yield factory - - @pytest.fixture(scope='module', autouse=True) def org(client): cache = {} @@ -635,17 +336,6 @@ def env_fixture(env, org_fixture, user, group, tenant, module_mocker): yield env1 -@pytest.fixture(scope='module') -def dataset_fixture(env_fixture, org_fixture, dataset, group, module_mocker) -> Dataset: - yield dataset( - org=org_fixture, - env=env_fixture, - name='dataset1', - owner=env_fixture.owner, - group=group.name, - ) - - @pytest.fixture(scope='module') def cluster(env_fixture, org_fixture, client, group): ouri = org_fixture.organizationUri diff --git a/tests/api/test_environment.py b/tests/api/test_environment.py index 7b6e4965d..d3a88d086 100644 --- a/tests/api/test_environment.py +++ b/tests/api/test_environment.py @@ -51,6 +51,7 @@ def get_env(client, env1, group): groups=[group.name], ) + def test_get_environment(client, org1, env1, group): response = get_env(client, env1, group) assert ( diff --git a/tests/api/test_keyvaluetag.py b/tests/api/test_keyvaluetag.py index cf254a515..be386d6a4 100644 --- a/tests/api/test_keyvaluetag.py +++ b/tests/api/test_keyvaluetag.py @@ -20,15 +20,7 @@ def env1( yield env1 -@pytest.fixture(scope='module', autouse=True) -def dataset1(db, env1, org1, group, user, dataset, module_mocker) -> Dataset: - with db.scoped_session() as session: - yield dataset( - org=org1, env=env1, name='dataset1', owner=user.userName, group=group.name - ) - - -def list_tags_query(client, dataset1, target_type=None): +def list_tags_query(client, target_uri, target_type, group): query = client.query( """ query listKeyValueTags($targetUri:String!, $targetType:String!){ @@ -42,72 +34,36 @@ def list_tags_query(client, dataset1, target_type=None): } } """, - targetUri=dataset1.datasetUri, - targetType=target_type or 'dataset', + targetUri=target_uri, + targetType=target_type, username='alice', - groups=[dataset1.SamlAdminGroupName], + groups=[group], ) return query -def test_empty_key_value_tags(client, dataset1): - response = list_tags_query(client, dataset1) - print(response) - assert len(response.data.listKeyValueTags) == 0 - - -def test_unsupported_target_type(db, dataset1): +def test_unsupported_target_type(db): with pytest.raises(exceptions.InvalidInput): assert dataall.db.api.TargetType.is_supported_target_type('unknown') -def test_update_key_value_tags(client, dataset1): - response = client.query( - """ - mutation updateKeyValueTags($input:UpdateKeyValueTagsInput!){ - updateKeyValueTags(input:$input){ - tagUri - targetUri - targetType - key - value - cascade +def update_key_value_tags(client, target_uri, target_type, tags, group): + return ( + client.query( + """ + mutation updateKeyValueTags($input:UpdateKeyValueTagsInput!){ + updateKeyValueTags(input:$input){ + tagUri + targetUri + targetType + key + value + cascade + } } - } - """, - input=dict( - targetUri=dataset1.datasetUri, - targetType='dataset', - tags=[{'key': 'tag1', 'value': 'value1', 'cascade': False}], - ), - username='alice', - groups=[dataset1.SamlAdminGroupName], - ) - assert len(response.data.updateKeyValueTags) == 1 - - response = list_tags_query(client, dataset1) - assert response.data.listKeyValueTags[0].key == 'tag1' - assert response.data.listKeyValueTags[0].value == 'value1' - assert response.data.listKeyValueTags[0].cascade == False - - response = client.query( - """ - mutation updateKeyValueTags($input:UpdateKeyValueTagsInput!){ - updateKeyValueTags(input:$input){ - tagUri - targetUri - targetType - key - value - cascade - } - } - """, - input=dict(targetUri=dataset1.datasetUri, targetType='dataset', tags=[]), - username='alice', - groups=[dataset1.SamlAdminGroupName], + """, + input=dict(targetUri=target_uri, targetType=target_type, tags=tags), + username='alice', + groups=[group], + ) ) - assert len(response.data.updateKeyValueTags) == 0 - - response = list_tags_query(client, dataset1) - assert len(response.data.listKeyValueTags) == 0 diff --git a/tests/api/test_organization.py b/tests/api/test_organization.py index fd414af31..9b74e52a2 100644 --- a/tests/api/test_organization.py +++ b/tests/api/test_organization.py @@ -182,7 +182,7 @@ def test_list_organizations_anyone(client, org1): assert response.data.listOrganizations.count == 0 -def test_group_invitation(db, client, org1, group2, user, group3, group, dataset, env): +def test_group_invitation(db, client, org1, group2, user, group3, group, env): response = client.query( """ mutation inviteGroupToOrganization($input:InviteGroupToOrganizationInput){ @@ -280,8 +280,8 @@ def test_group_invitation(db, client, org1, group2, user, group3, group, dataset assert 'OrganizationResourcesFound' in response.errors[0].message with db.scoped_session() as session: - dataset = session.query(dataall.db.models.Environment).get(env2.environmentUri) - session.delete(dataset) + env = session.query(dataall.db.models.Environment).get(env2.environmentUri) + session.delete(env) session.commit() response = client.query( diff --git a/tests/api/test_redshift_cluster.py b/tests/api/test_redshift_cluster.py index d98e748f4..502c2ef02 100644 --- a/tests/api/test_redshift_cluster.py +++ b/tests/api/test_redshift_cluster.py @@ -4,7 +4,9 @@ import pytest import dataall from dataall.api.constants import RedshiftClusterRole -from dataall.modules.datasets.services.dataset_service import DatasetService + +from tests.modules.datasets.conftest import dataset, table + from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.db.models import Dataset diff --git a/tests/api/test_stack.py b/tests/api/test_stack.py index bcb9f28f0..d99e1ca6a 100644 --- a/tests/api/test_stack.py +++ b/tests/api/test_stack.py @@ -4,7 +4,6 @@ def test_update_stack( group, pipeline, env_fixture, - dataset_fixture, sgm_studio, cluster, ): @@ -13,11 +12,6 @@ def test_update_stack( ) assert response.data.updateStack.targetUri == env_fixture.environmentUri - response = update_stack_query( - client, dataset_fixture.datasetUri, 'dataset', group.name - ) - assert response.data.updateStack.targetUri == dataset_fixture.datasetUri - response = update_stack_query( client, sgm_studio.sagemakerStudioUserProfileUri, 'mlstudio', group.name ) diff --git a/tests/api/test_vote.py b/tests/api/test_vote.py index efe0a7d4e..e0db05c74 100644 --- a/tests/api/test_vote.py +++ b/tests/api/test_vote.py @@ -18,14 +18,6 @@ def env1( yield env1 -@pytest.fixture(scope='module', autouse=True) -def dataset1(db, env1, org1, group, user, dataset) -> Dataset: - with db.scoped_session() as session: - yield dataset( - org=org1, env=env1, name='dataset1', owner=user.userName, group=group.name - ) - - @pytest.fixture(scope='module') def dashboard(client, env1, org1, group, module_mocker, patch_es): module_mocker.patch( @@ -63,13 +55,9 @@ def dashboard(client, env1, org1, group, module_mocker, patch_es): yield response.data.importDashboard -def test_count_votes(client, dataset1, dashboard): - response = count_votes_query( - client, dataset1.datasetUri, 'dataset', dataset1.SamlAdminGroupName - ) - assert response.data.countUpVotes == 0 +def test_count_votes(client, dashboard, env1): response = count_votes_query( - client, dashboard.dashboardUri, 'dashboard', dataset1.SamlAdminGroupName + client, dashboard.dashboardUri, 'dashboard', env1.SamlGroupName ) assert response.data.countUpVotes == 0 @@ -106,57 +94,34 @@ def get_vote_query(client, target_uri, target_type, group): return response -def test_upvote(patch_es, client, dataset1, dashboard): - response = upvote_mutation( - client, dataset1.datasetUri, 'dataset', True, dataset1.SamlAdminGroupName - ) - assert response.data.upVote.upvote - response = count_votes_query( - client, dataset1.datasetUri, 'dataset', dataset1.SamlAdminGroupName - ) - assert response.data.countUpVotes == 1 - response = get_vote_query( - client, dataset1.datasetUri, 'dataset', dataset1.SamlAdminGroupName - ) - assert response.data.getVote.upvote +def test_upvote(patch_es, client, env1, dashboard): response = upvote_mutation( - client, dashboard.dashboardUri, 'dashboard', True, dataset1.SamlAdminGroupName + client, dashboard.dashboardUri, 'dashboard', True, env1.SamlGroupName ) assert response.data.upVote.upvote response = count_votes_query( - client, dashboard.dashboardUri, 'dashboard', dataset1.SamlAdminGroupName + client, dashboard.dashboardUri, 'dashboard', env1.SamlGroupName ) assert response.data.countUpVotes == 1 response = get_vote_query( - client, dashboard.dashboardUri, 'dashboard', dataset1.SamlAdminGroupName + client, dashboard.dashboardUri, 'dashboard', env1.SamlGroupName ) assert response.data.getVote.upvote response = upvote_mutation( - client, dataset1.datasetUri, 'dataset', False, dataset1.SamlAdminGroupName - ) - assert not response.data.upVote.upvote - response = upvote_mutation( - client, dashboard.dashboardUri, 'dashboard', False, dataset1.SamlAdminGroupName + client, dashboard.dashboardUri, 'dashboard', False, env1.SamlGroupName ) assert not response.data.upVote.upvote + response = get_vote_query( - client, dataset1.datasetUri, 'dataset', dataset1.SamlAdminGroupName - ) - assert not response.data.getVote.upvote - response = get_vote_query( - client, dashboard.dashboardUri, 'dashboard', dataset1.SamlAdminGroupName + client, dashboard.dashboardUri, 'dashboard', env1.SamlGroupName ) assert not response.data.getVote.upvote response = count_votes_query( - client, dataset1.datasetUri, 'dataset', dataset1.SamlAdminGroupName - ) - assert response.data.countUpVotes == 0 - response = count_votes_query( - client, dashboard.dashboardUri, 'dashboard', dataset1.SamlAdminGroupName + client, dashboard.dashboardUri, 'dashboard', env1.SamlGroupName ) assert response.data.countUpVotes == 0 diff --git a/tests/modules/datasets/conftest.py b/tests/modules/datasets/conftest.py new file mode 100644 index 000000000..56a1f33fb --- /dev/null +++ b/tests/modules/datasets/conftest.py @@ -0,0 +1,363 @@ +import random +import pytest + +from tests.api.client import * +from tests.api.conftest import * + +from dataall.db.api import ResourcePolicy +from dataall.db.models import ShareableType, ShareObjectItem, ShareObject, Environment, EnvironmentGroup, Organization, \ + PrincipalType +from dataall.db.permissions import SHARE_OBJECT_REQUESTER, SHARE_OBJECT_APPROVER +from dataall.modules.datasets import Dataset, DatasetTable, DatasetStorageLocation + + +@pytest.fixture(scope='module', autouse=True) +def patch_es(module_mocker): + module_mocker.patch('dataall.searchproxy.connect', return_value={}) + module_mocker.patch('dataall.searchproxy.search', return_value={}) + module_mocker.patch( + 'dataall.modules.datasets.indexers.table_indexer.DatasetTableIndexer.upsert_all', + return_value={} + ) + module_mocker.patch('dataall.modules.datasets.indexers.dataset_indexer.DatasetIndexer.upsert', return_value={}) + module_mocker.patch('dataall.modules.datasets.indexers.table_indexer.DatasetTableIndexer.upsert', return_value={}) + module_mocker.patch( + 'dataall.modules.datasets.indexers.location_indexer.DatasetLocationIndexer.upsert', + return_value={} + ) + module_mocker.patch('dataall.searchproxy.base_indexer.BaseIndexer.delete_doc', return_value={}) + + +@pytest.fixture(scope='module', autouse=True) +def dataset(client, patch_es): + cache = {} + + def factory( + org: Organization, + env: Environment, + name: str, + owner: str, + group: str, + ) -> Dataset: + key = f'{org.organizationUri}-{env.environmentUri}-{name}-{group}' + if cache.get(key): + print('found in cache ', cache[key]) + return cache.get(key) + response = client.query( + """ + mutation CreateDataset($input:NewDatasetInput){ + createDataset( + input:$input + ){ + datasetUri + label + description + AwsAccountId + S3BucketName + GlueDatabaseName + owner + region, + businessOwnerEmail + businessOwnerDelegationEmails + SamlAdminGroupName + GlueCrawlerName + tables{ + nodes{ + tableUri + } + } + locations{ + nodes{ + locationUri + } + } + stack{ + stack + status + stackUri + targetUri + accountid + region + stackid + link + outputs + resources + + } + topics + language + confidentiality + organization{ + organizationUri + label + } + shares{ + nodes{ + shareUri + } + } + terms{ + count + nodes{ + __typename + ...on Term { + nodeUri + path + label + } + } + } + environment{ + environmentUri + label + region + subscriptionsEnabled + subscriptionsProducersTopicImported + subscriptionsConsumersTopicImported + subscriptionsConsumersTopicName + subscriptionsProducersTopicName + organization{ + organizationUri + label + } + } + statistics{ + tables + locations + upvotes + } + } + } + """, + username=owner, + groups=[group], + input={ + 'owner': owner, + 'label': f'{name}', + 'description': 'test dataset {name}', + 'businessOwnerEmail': 'jeff@amazon.com', + 'tags': random_tags(), + 'businessOwnerDelegationEmails': random_emails(), + 'environmentUri': env.environmentUri, + 'SamlAdminGroupName': group or random_group(), + 'organizationUri': org.organizationUri, + }, + ) + print('==>', response) + return response.data.createDataset + + yield factory + + +@pytest.fixture(scope='module') +def dataset1(env1, org1, dataset, group) -> Dataset: + yield dataset( + org=org1, env=env1, name='dataset1', owner=env1.owner, group=group.name + ) + + +@pytest.fixture(scope='module', autouse=True) +def table(db): + cache = {} + + def factory(dataset: Dataset, name, username) -> DatasetTable: + key = f'{dataset.datasetUri}-{name}' + if cache.get(key): + return cache.get(key) + with db.scoped_session() as session: + table = DatasetTable( + name=name, + label=name, + owner=username, + datasetUri=dataset.datasetUri, + GlueDatabaseName=dataset.GlueDatabaseName, + GlueTableName=name, + region=dataset.region, + AWSAccountId=dataset.AwsAccountId, + S3BucketName=dataset.S3BucketName, + S3Prefix=f'{name}', + ) + session.add(table) + return table + + yield factory + + +@pytest.fixture(scope='module') +def dataset_fixture(env_fixture, org_fixture, dataset, group) -> Dataset: + yield dataset( + org=org_fixture, + env=env_fixture, + name='dataset1', + owner=env_fixture.owner, + group=group.name, + ) + + +@pytest.fixture(scope="module") +def dataset_model(db): + def factory( + organization: Organization, + environment: Environment, + label: str, + ) -> Dataset: + with db.scoped_session() as session: + dataset = Dataset( + organizationUri=organization.organizationUri, + environmentUri=environment.environmentUri, + label=label, + owner=environment.owner, + stewards=environment.SamlGroupName, + SamlAdminGroupName=environment.SamlGroupName, + businessOwnerDelegationEmails=["foo@amazon.com"], + name=label, + S3BucketName=label, + GlueDatabaseName="gluedatabase", + KmsAlias="kmsalias", + AwsAccountId=environment.AwsAccountId, + region=environment.region, + IAMDatasetAdminUserArn=f"arn:aws:iam::{environment.AwsAccountId}:user/dataset", + IAMDatasetAdminRoleArn=f"arn:aws:iam::{environment.AwsAccountId}:role/dataset", + ) + session.add(dataset) + session.commit() + return dataset + + yield factory + + +@pytest.fixture(scope='module', autouse=True) +def location(db): + cache = {} + + def factory(dataset: Dataset, name, username) -> DatasetStorageLocation: + key = f'{dataset.datasetUri}-{name}' + if cache.get(key): + return cache.get(key) + with db.scoped_session() as session: + ds_location = DatasetStorageLocation( + name=name, + label=name, + owner=username, + datasetUri=dataset.datasetUri, + S3BucketName=dataset.S3BucketName, + region=dataset.region, + AWSAccountId=dataset.AwsAccountId, + S3Prefix=f'{name}', + ) + session.add(ds_location) + return ds_location + + yield factory + + +@pytest.fixture(scope="module") +def share_item(db): + def factory( + share: ShareObject, + table: DatasetTable, + status: str + ) -> ShareObjectItem: + with db.scoped_session() as session: + share_item = ShareObjectItem( + shareUri=share.shareUri, + owner="alice", + itemUri=table.tableUri, + itemType=ShareableType.Table.value, + itemName=table.name, + status=status, + ) + session.add(share_item) + session.commit() + return share_item + + yield factory + + +@pytest.fixture(scope="module") +def share(db): + def factory( + dataset: Dataset, + environment: Environment, + env_group: EnvironmentGroup, + owner: str, + status: str + ) -> ShareObject: + with db.scoped_session() as session: + share = ShareObject( + datasetUri=dataset.datasetUri, + environmentUri=environment.environmentUri, + owner=owner, + groupUri=env_group.groupUri, + principalId=env_group.groupUri, + principalType=PrincipalType.Group.value, + principalIAMRoleName=env_group.environmentIAMRoleName, + status=status, + ) + session.add(share) + session.commit() + + ResourcePolicy.attach_resource_policy( + session=session, + group=env_group.groupUri, + permissions=SHARE_OBJECT_REQUESTER, + resource_uri=share.shareUri, + resource_type=ShareObject.__name__, + ) + ResourcePolicy.attach_resource_policy( + session=session, + group=dataset.SamlAdminGroupName, + permissions=SHARE_OBJECT_REQUESTER, + resource_uri=share.shareUri, + resource_type=ShareObject.__name__, + ) + ResourcePolicy.attach_resource_policy( + session=session, + group=dataset.stewards, + permissions=SHARE_OBJECT_APPROVER, + resource_uri=share.shareUri, + resource_type=ShareObject.__name__, + ) + if dataset.SamlAdminGroupName != environment.SamlGroupName: + ResourcePolicy.attach_resource_policy( + session=session, + group=environment.SamlGroupName, + permissions=SHARE_OBJECT_REQUESTER, + resource_uri=share.shareUri, + resource_type=ShareObject.__name__, + ) + session.commit() + return share + + yield factory + + +def random_email(): + names = ['andy', 'bill', 'satya', 'sundar'] + corps = ['google.com', 'amazon.com', 'microsoft.com'] + return f'{random.choice(names)}@{random.choice(corps)}' + + +def random_emails(): + emails = [] + for i in range(1, 2 + random.choice([2, 3, 4])): + emails.append(random_email()) + return emails + + +def random_group(): + prefixes = ['big', 'small', 'pretty', 'shiny'] + names = ['team', 'people', 'group'] + lands = ['snow', 'ice', 'green', 'high'] + return f'{random.choice(prefixes).capitalize()}{random.choice(names).capitalize()}From{random.choice(lands).capitalize()}land' + + +def random_tag(): + return random.choice( + ['sales', 'finances', 'sites', 'people', 'products', 'partners', 'operations'] + ) + + +def random_tags(): + return [random_tag() for i in range(1, random.choice([2, 3, 4, 5]))] + diff --git a/tests/api/test_dataset.py b/tests/modules/datasets/test_dataset.py similarity index 98% rename from tests/api/test_dataset.py rename to tests/modules/datasets/test_dataset.py index f5b9e80b3..1724fc0a5 100644 --- a/tests/api/test_dataset.py +++ b/tests/modules/datasets/test_dataset.py @@ -20,13 +20,6 @@ def env1(env, org1, user, group, tenant): yield env1 -@pytest.fixture(scope='module') -def dataset1(env1, org1, dataset, group) -> Dataset: - yield dataset( - org=org1, env=env1, name='dataset1', owner=env1.owner, group=group.name - ) - - @pytest.fixture(scope='module') def org2(org: typing.Callable, user2, group2, tenant) -> dataall.db.models.Organization: yield org('org2', user2.userName, group2.name) diff --git a/tests/modules/datasets/test_dataset_count_votes.py b/tests/modules/datasets/test_dataset_count_votes.py new file mode 100644 index 000000000..2212d8ad9 --- /dev/null +++ b/tests/modules/datasets/test_dataset_count_votes.py @@ -0,0 +1,48 @@ +import pytest + +from dataall.modules.datasets import Dataset +from tests.api.test_vote import * + + +@pytest.fixture(scope='module', autouse=True) +def dataset1(db, env1, org1, group, user, dataset) -> Dataset: + yield dataset( + org=org1, env=env1, name='dataset1', owner=user.userName, group=group.name + ) + + +def test_count_votes(client, dataset1, dashboard): + response = count_votes_query( + client, dataset1.datasetUri, 'dataset', dataset1.SamlAdminGroupName + ) + assert response.data.countUpVotes == 0 + + +def test_upvote(patch_es, client, dataset1): + response = upvote_mutation( + client, dataset1.datasetUri, 'dataset', True, dataset1.SamlAdminGroupName + ) + assert response.data.upVote.upvote + response = count_votes_query( + client, dataset1.datasetUri, 'dataset', dataset1.SamlAdminGroupName + ) + assert response.data.countUpVotes == 1 + response = get_vote_query( + client, dataset1.datasetUri, 'dataset', dataset1.SamlAdminGroupName + ) + assert response.data.getVote.upvote + + response = upvote_mutation( + client, dataset1.datasetUri, 'dataset', False, dataset1.SamlAdminGroupName + ) + assert not response.data.upVote.upvote + + response = get_vote_query( + client, dataset1.datasetUri, 'dataset', dataset1.SamlAdminGroupName + ) + assert not response.data.getVote.upvote + + response = count_votes_query( + client, dataset1.datasetUri, 'dataset', dataset1.SamlAdminGroupName + ) + assert response.data.countUpVotes == 0 \ No newline at end of file diff --git a/tests/modules/datasets/test_dataset_key_value_tag.py b/tests/modules/datasets/test_dataset_key_value_tag.py new file mode 100644 index 000000000..a17f5b59f --- /dev/null +++ b/tests/modules/datasets/test_dataset_key_value_tag.py @@ -0,0 +1,59 @@ +from dataall.db import models +import pytest + +from dataall.modules.datasets.db.models import Dataset +from tests.api.test_keyvaluetag import update_key_value_tags, list_tags_query + + +@pytest.fixture(scope='module') +def org1(db, org, tenant, user, group) -> models.Organization: + org = org('testorg', user.userName, group.name) + yield org + + +@pytest.fixture(scope='module') +def env1( + db, org1: models.Organization, user, group, module_mocker, env +) -> models.Environment: + module_mocker.patch('requests.post', return_value=True) + module_mocker.patch( + 'dataall.api.Objects.Environment.resolvers.check_environment', return_value=True + ) + env1 = env(org1, 'dev', user.userName, group.name, '111111111111', 'eu-west-1') + yield env1 + + +@pytest.fixture(scope='module', autouse=True) +def dataset1(db, env1, org1, group, user, dataset) -> Dataset: + with db.scoped_session(): + yield dataset( + org=org1, env=env1, name='dataset1', owner=user.userName, group=group.name + ) + + +def list_dataset_tags_query(client, dataset): + return list_tags_query(client, dataset.datasetUri, 'dataset', dataset.SamlAdminGroupName) + + +def test_empty_key_value_tags(client, dataset1): + response = list_dataset_tags_query(client, dataset1.datasetUri) + print(response) + assert len(response.data.listKeyValueTags) == 0 + + +def test_update_key_value_tags(client, dataset1): + tags = [{'key': 'tag1', 'value': 'value1', 'cascade': False}] + response = update_key_value_tags(client, dataset1.datasetUri, 'dataset', tags, dataset1.SamlAdminGroupName) + + assert len(response.data.updateKeyValueTags) == 1 + + response = list_dataset_tags_query(client, dataset1) + assert response.data.listKeyValueTags[0].key == 'tag1' + assert response.data.listKeyValueTags[0].value == 'value1' + assert not response.data.listKeyValueTags[0].cascade + + response = update_key_value_tags(client, dataset1.datasetUri, 'dataset', [], dataset1.SamlAdminGroupName) + assert len(response.data.updateKeyValueTags) == 0 + + response = list_dataset_tags_query(client, dataset1) + assert len(response.data.listKeyValueTags) == 0 diff --git a/tests/api/test_dataset_location.py b/tests/modules/datasets/test_dataset_location.py similarity index 100% rename from tests/api/test_dataset_location.py rename to tests/modules/datasets/test_dataset_location.py diff --git a/tests/api/test_dataset_profiling.py b/tests/modules/datasets/test_dataset_profiling.py similarity index 100% rename from tests/api/test_dataset_profiling.py rename to tests/modules/datasets/test_dataset_profiling.py diff --git a/tests/modules/datasets/test_dataset_stack.py b/tests/modules/datasets/test_dataset_stack.py new file mode 100644 index 000000000..81860e17b --- /dev/null +++ b/tests/modules/datasets/test_dataset_stack.py @@ -0,0 +1,7 @@ +from tests.api.test_stack import update_stack_query + + +def test_notebook_stack(client, dataset_fixture, group): + dataset = dataset_fixture + response = update_stack_query(client, dataset.datasetUri, 'dataset', dataset.SamlAdminGroupName) + assert response.data.updateStack.targetUri == dataset.datasetUri diff --git a/tests/api/test_dataset_table.py b/tests/modules/datasets/test_dataset_table.py similarity index 100% rename from tests/api/test_dataset_table.py rename to tests/modules/datasets/test_dataset_table.py diff --git a/tests/api/test_share.py b/tests/modules/datasets/test_share.py similarity index 100% rename from tests/api/test_share.py rename to tests/modules/datasets/test_share.py diff --git a/tests/modules/notebooks/test_notebook_stack.py b/tests/modules/notebooks/test_notebook_stack.py index 1c41c46c6..b7f7ce153 100644 --- a/tests/modules/notebooks/test_notebook_stack.py +++ b/tests/modules/notebooks/test_notebook_stack.py @@ -1,18 +1,6 @@ +from tests.api.test_stack import update_stack_query + def test_notebook_stack(client, sgm_notebook, group): - response = client.query( - """ - mutation updateStack($targetUri:String!, $targetType:String!){ - updateStack(targetUri:$targetUri, targetType:$targetType){ - stackUri - targetUri - name - } - } - """, - targetUri=sgm_notebook.notebookUri, - targetType="notebook", - username="alice", - groups=[group.name], - ) + response = update_stack_query(client, sgm_notebook.notebookUri, 'notebook', group.name) assert response.data.updateStack.targetUri == sgm_notebook.notebookUri From 054e1b6a77ba6d5ab706460a3686e0fd0d854361 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 16 May 2023 17:17:31 +0200 Subject: [PATCH 187/346] Fixing tests after moving them --- tests/api/test_dashboards.py | 12 -- tests/api/test_environment.py | 32 +--- tests/api/test_keyvaluetag.py | 1 - tests/api/test_vote.py | 1 - tests/modules/datasets/conftest.py | 6 +- .../datasets/test_dataset_key_value_tag.py | 4 +- .../datasets/test_dataset_resource_found.py | 154 ++++++++++++++++++ 7 files changed, 161 insertions(+), 49 deletions(-) create mode 100644 tests/modules/datasets/test_dataset_resource_found.py diff --git a/tests/api/test_dashboards.py b/tests/api/test_dashboards.py index f38ea2419..cd0da17bd 100644 --- a/tests/api/test_dashboards.py +++ b/tests/api/test_dashboards.py @@ -2,7 +2,6 @@ import pytest import dataall -from dataall.modules.datasets_base.db.models import Dataset @pytest.fixture(scope='module', autouse=True) @@ -24,17 +23,6 @@ def env1(env, org1, user, group, tenant, module_mocker): yield env1 -@pytest.fixture(scope='module') -def dataset1( - org1: dataall.db.models.Organization, - env1: dataall.db.models.Environment, - dataset: typing.Callable, -) -> Dataset: - yield dataset( - org=org1, env=env1, name='dataset1', owner=env1.owner, group='dataset1admins' - ) - - @pytest.fixture(scope='module') def dashboard(client, env1, org1, group, module_mocker, patch_es): module_mocker.patch( diff --git a/tests/api/test_environment.py b/tests/api/test_environment.py index d3a88d086..01770b8d9 100644 --- a/tests/api/test_environment.py +++ b/tests/api/test_environment.py @@ -1,8 +1,6 @@ import pytest import dataall -from dataall.modules.datasets_base.db.models import Dataset -from dataall.modules.datasets.services.dataset_permissions import CREATE_DATASET @pytest.fixture(scope='module', autouse=True) @@ -394,7 +392,7 @@ def test_paging(db, client, org1, env1, user, group): first_id = response.data.listEnvironments.nodes[0].environmentUri -def test_group_invitation(db, client, env1, org1, group2, user, group3, group, dataset): +def test_group_invitation(db, client, env1, org1, group2, user, group3, group): response = client.query( """ query listResourcePermissions($filter:ResourcePermissionFilter){ @@ -433,7 +431,6 @@ def test_group_invitation(db, client, env1, org1, group2, user, group3, group, d env_permissions = [ p.name for p in response.data.listEnvironmentGroupInvitationPermissions ] - assert CREATE_DATASET in env_permissions response = client.query( """ @@ -471,7 +468,6 @@ def test_group_invitation(db, client, env1, org1, group2, user, group3, group, d environmentUri=env1.environmentUri, ) env_permissions = [p.name for p in response.data.getGroup.environmentPermissions] - assert CREATE_DATASET in env_permissions response = client.query( """ @@ -575,32 +571,6 @@ def test_group_invitation(db, client, env1, org1, group2, user, group3, group, d assert response.data.listAllEnvironmentGroups.count == 2 - dataset = dataset( - org=org1, env=env1, name='dataset1', owner='bob', group=group2.name - ) - assert dataset.datasetUri - - response = client.query( - """ - mutation removeGroupFromEnvironment($environmentUri: String!, $groupUri: String!){ - removeGroupFromEnvironment(environmentUri: $environmentUri, groupUri: $groupUri){ - environmentUri - } - } - """, - username='alice', - environmentUri=env1.environmentUri, - groupUri=group2.name, - groups=[group.name, group2.name], - ) - print(response) - - assert 'EnvironmentResourcesFound' in response.errors[0].message - with db.scoped_session() as session: - dataset = session.query(Dataset).get(dataset.datasetUri) - session.delete(dataset) - session.commit() - response = client.query( """ mutation removeGroupFromEnvironment($environmentUri: String!, $groupUri: String!){ diff --git a/tests/api/test_keyvaluetag.py b/tests/api/test_keyvaluetag.py index be386d6a4..2e996f341 100644 --- a/tests/api/test_keyvaluetag.py +++ b/tests/api/test_keyvaluetag.py @@ -3,7 +3,6 @@ import pytest from dataall.db import exceptions -from dataall.modules.datasets_base.db.models import Dataset @pytest.fixture(scope='module') diff --git a/tests/api/test_vote.py b/tests/api/test_vote.py index e0db05c74..513f0b903 100644 --- a/tests/api/test_vote.py +++ b/tests/api/test_vote.py @@ -1,7 +1,6 @@ import pytest from dataall.db import models -from dataall.modules.datasets_base.db.models import Dataset @pytest.fixture(scope='module') diff --git a/tests/modules/datasets/conftest.py b/tests/modules/datasets/conftest.py index 56a1f33fb..af895ccd3 100644 --- a/tests/modules/datasets/conftest.py +++ b/tests/modules/datasets/conftest.py @@ -1,13 +1,15 @@ import random import pytest +from dataall.modules.dataset_sharing.db.enums import ShareableType +from dataall.modules.dataset_sharing.db.models import ShareObject, ShareObjectItem +from dataall.modules.dataset_sharing.services.share_permissions import SHARE_OBJECT_REQUESTER, SHARE_OBJECT_APPROVER from tests.api.client import * from tests.api.conftest import * from dataall.db.api import ResourcePolicy -from dataall.db.models import ShareableType, ShareObjectItem, ShareObject, Environment, EnvironmentGroup, Organization, \ +from dataall.db.models import Environment, EnvironmentGroup, Organization, \ PrincipalType -from dataall.db.permissions import SHARE_OBJECT_REQUESTER, SHARE_OBJECT_APPROVER from dataall.modules.datasets import Dataset, DatasetTable, DatasetStorageLocation diff --git a/tests/modules/datasets/test_dataset_key_value_tag.py b/tests/modules/datasets/test_dataset_key_value_tag.py index a17f5b59f..472a3c400 100644 --- a/tests/modules/datasets/test_dataset_key_value_tag.py +++ b/tests/modules/datasets/test_dataset_key_value_tag.py @@ -1,7 +1,7 @@ from dataall.db import models import pytest -from dataall.modules.datasets.db.models import Dataset +from dataall.modules.datasets_base.db.models import Dataset from tests.api.test_keyvaluetag import update_key_value_tags, list_tags_query @@ -36,7 +36,7 @@ def list_dataset_tags_query(client, dataset): def test_empty_key_value_tags(client, dataset1): - response = list_dataset_tags_query(client, dataset1.datasetUri) + response = list_dataset_tags_query(client, dataset1) print(response) assert len(response.data.listKeyValueTags) == 0 diff --git a/tests/modules/datasets/test_dataset_resource_found.py b/tests/modules/datasets/test_dataset_resource_found.py new file mode 100644 index 000000000..e8f056a61 --- /dev/null +++ b/tests/modules/datasets/test_dataset_resource_found.py @@ -0,0 +1,154 @@ +import pytest + +from dataall.modules.datasets_base.db.models import Dataset +from dataall.modules.datasets.services.dataset_permissions import CREATE_DATASET + + +@pytest.fixture(scope='module', autouse=True) +def org1(org, user, group, tenant): + org1 = org('testorg', user.userName, group.name) + yield org1 + + +@pytest.fixture(scope='module', autouse=True) +def env1(env, org1, user, group, tenant): + env1 = env(org1, 'dev', user.userName, group.name, '111111111111', 'eu-west-1') + yield env1 + + +def get_env(client, env1, group): + return client.query( + """ + query GetEnv($environmentUri:String!){ + getEnvironment(environmentUri:$environmentUri){ + organization{ + organizationUri + } + environmentUri + label + AwsAccountId + region + SamlGroupName + owner + dashboardsEnabled + mlStudiosEnabled + pipelinesEnabled + warehousesEnabled + stack{ + EcsTaskArn + EcsTaskId + } + parameters { + key + value + } + } + } + """, + username='alice', + environmentUri=env1.environmentUri, + groups=[group.name], + ) + + +def test_dataset_resource_found(db, client, env1, org1, group2, user, group3, group, dataset): + response = client.query( + """ + query listEnvironmentGroupInvitationPermissions($environmentUri:String){ + listEnvironmentGroupInvitationPermissions(environmentUri:$environmentUri){ + permissionUri + name + type + } + } + """, + username=user.userName, + groups=[group.name, group2.name], + filter={}, + ) + + env_permissions = [ + p.name for p in response.data.listEnvironmentGroupInvitationPermissions + ] + assert CREATE_DATASET in env_permissions + + response = client.query( + """ + mutation inviteGroupOnEnvironment($input:InviteGroupOnEnvironmentInput){ + inviteGroupOnEnvironment(input:$input){ + environmentUri + } + } + """, + username='alice', + input=dict( + environmentUri=env1.environmentUri, + groupUri=group2.name, + permissions=env_permissions, + environmentIAMRoleName='myteamrole', + ), + groups=[group.name, group2.name], + ) + print(response) + assert response.data.inviteGroupOnEnvironment + + response = client.query( + """ + query getGroup($groupUri:String!, $environmentUri:String){ + getGroup(groupUri:$groupUri){ + environmentPermissions(environmentUri:$environmentUri){ + name + } + } + } + """, + username=user.userName, + groups=[group2.name], + groupUri=group2.name, + environmentUri=env1.environmentUri, + ) + env_permissions = [p.name for p in response.data.getGroup.environmentPermissions] + assert CREATE_DATASET in env_permissions + + dataset = dataset( + org=org1, env=env1, name='dataset1', owner='bob', group=group2.name + ) + assert dataset.datasetUri + + response = client.query( + """ + mutation removeGroupFromEnvironment($environmentUri: String!, $groupUri: String!){ + removeGroupFromEnvironment(environmentUri: $environmentUri, groupUri: $groupUri){ + environmentUri + } + } + """, + username='alice', + environmentUri=env1.environmentUri, + groupUri=group2.name, + groups=[group.name, group2.name], + ) + print(response) + + assert 'EnvironmentResourcesFound' in response.errors[0].message + with db.scoped_session() as session: + dataset = session.query(Dataset).get(dataset.datasetUri) + session.delete(dataset) + session.commit() + + response = client.query( + """ + mutation removeGroupFromEnvironment($environmentUri: String!, $groupUri: String!){ + removeGroupFromEnvironment(environmentUri: $environmentUri, groupUri: $groupUri){ + environmentUri + } + } + """, + username='alice', + environmentUri=env1.environmentUri, + groupUri=group2.name, + groups=[group.name, group2.name], + ) + print(response) + assert response.data.removeGroupFromEnvironment + From e891ad98f513bb9faf353142b0d444db742b17cd Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 16 May 2023 17:43:33 +0200 Subject: [PATCH 188/346] Fixing tests after moving them --- tests/api/test_glossary.py | 150 +--------------- .../modules/datasets/test_dataset_glossary.py | 164 ++++++++++++++++++ 2 files changed, 165 insertions(+), 149 deletions(-) create mode 100644 tests/modules/datasets/test_dataset_glossary.py diff --git a/tests/api/test_glossary.py b/tests/api/test_glossary.py index 4802afb81..83c710c19 100644 --- a/tests/api/test_glossary.py +++ b/tests/api/test_glossary.py @@ -1,7 +1,5 @@ from datetime import datetime -from typing import List from dataall.db import models -from dataall.modules.datasets_base.db.models import DatasetTableColumn, DatasetTable, Dataset import pytest @@ -11,7 +9,7 @@ def _org(db, org, tenant, user, group) -> models.Organization: yield org -@pytest.fixture(scope='module') +@pytest.fixture(scope='module', autouse=True) def _env( db, _org: models.Organization, user, group, env ) -> models.Environment: @@ -19,53 +17,6 @@ def _env( yield env1 -@pytest.fixture(scope='module', autouse=True) -def _dataset(db, _env, _org, group, user, dataset) -> Dataset: - with db.scoped_session() as session: - yield dataset( - org=_org, env=_env, name='dataset1', owner=user.userName, group=group.name - ) - - -@pytest.fixture(scope='module', autouse=True) -def _table(db, _dataset) -> DatasetTable: - with db.scoped_session() as session: - t = DatasetTable( - datasetUri=_dataset.datasetUri, - label='table', - AWSAccountId=_dataset.AwsAccountId, - region=_dataset.region, - S3BucketName=_dataset.S3BucketName, - S3Prefix='/raw', - GlueTableName='table', - owner='alice', - GlueDatabaseName=_dataset.GlueDatabaseName, - ) - session.add(t) - yield t - - -@pytest.fixture(scope='module', autouse=True) -def _columns(db, _dataset, _table) -> List[DatasetTableColumn]: - with db.scoped_session() as session: - cols = [] - for i in range(0, 10): - c = DatasetTableColumn( - datasetUri=_dataset.datasetUri, - tableUri=_table.tableUri, - label=f'c{i+1}', - AWSAccountId=_dataset.AwsAccountId, - region=_dataset.region, - GlueTableName='table', - typeName='String', - owner='user', - GlueDatabaseName=_dataset.GlueDatabaseName, - ) - session.add(c) - cols.append(c) - yield cols - - @pytest.fixture(scope='module', autouse=True) def g1(client, group): r = client.query( @@ -300,57 +251,6 @@ def test_get_term(client, t1): assert r.data.getTerm.readme == t1.readme -def test_dataset_term_link_approval(db, client, t1, _dataset, user, group): - response = client.query( - """ - mutation UpdateDataset($datasetUri:String!,$input:ModifyDatasetInput){ - updateDataset(datasetUri:$datasetUri,input:$input){ - datasetUri - label - tags - } - } - """, - username='alice', - groups=[group.name], - datasetUri=_dataset.datasetUri, - input={'terms': [t1.nodeUri]}, - ) - with db.scoped_session() as session: - link: models.TermLink = ( - session.query(models.TermLink) - .filter(models.TermLink.nodeUri == t1.nodeUri) - .first() - ) - r = client.query( - """ - mutation ApproveTermAssociation($linkUri:String!){ - approveTermAssociation(linkUri:$linkUri) - } - """, - linkUri=link.linkUri, - username='alice', - groups=[group.name], - ) - assert r - link: models.TermLink = session.query(models.TermLink).get(link.linkUri) - assert link.approvedBySteward - - r = client.query( - """ - mutation DismissTermAssociation($linkUri:String!){ - dismissTermAssociation(linkUri:$linkUri) - } - """, - linkUri=link.linkUri, - username='alice', - groups=[group.name], - ) - assert r - link: models.TermLink = session.query(models.TermLink).get(link.linkUri) - assert not link.approvedBySteward - - def test_glossary_categories(client, g1, c1): r = client.query( """ @@ -497,54 +397,6 @@ def test_delete_subcategory(client, subcategory, group): print(r) -def test_link_term(client, t1, _columns, group): - col = _columns[0] - r = client.query( - """ - mutation LinkTerm( - $nodeUri:String!, - $targetUri:String!, - $targetType:String!, - ){ - linkTerm( - nodeUri:$nodeUri, - targetUri:$targetUri, - targetType:$targetType - ){ - linkUri - } - } - """, - nodeUri=t1.nodeUri, - targetUri=col.columnUri, - targetType='Column', - username='alice', - groups=[group.name], - ) - linkUri = r.data.linkTerm.linkUri - - r = client.query( - """ - query GetGlossaryTermLink($linkUri:String!){ - getGlossaryTermLink(linkUri:$linkUri){ - linkUri - created - target{ - __typename - ... on DatasetTableColumn{ - label - columnUri - } - } - } - } - """, - linkUri=linkUri, - username='alice', - ) - print(r) - - def test_get_term_associations(t1, db, client): r = client.query( """ diff --git a/tests/modules/datasets/test_dataset_glossary.py b/tests/modules/datasets/test_dataset_glossary.py new file mode 100644 index 000000000..07cdeeb4f --- /dev/null +++ b/tests/modules/datasets/test_dataset_glossary.py @@ -0,0 +1,164 @@ +import pytest +from typing import List + +from dataall.db.models import Environment, Organization +from dataall.modules.datasets_base.db.models import DatasetTableColumn, DatasetTable, Dataset +from tests.api.test_glossary import * + + +@pytest.fixture(scope='module') +def _org(db, org, tenant, user, group) -> Organization: + org = org('testorg', user.userName, group.name) + yield org + + +@pytest.fixture(scope='module') +def _env(db, _org: Organization, user, group, env) -> Environment: + env1 = env(_org, 'dev', user.userName, group.name, '111111111111', 'eu-west-1') + yield env1 + + +@pytest.fixture(scope='module', autouse=True) +def _dataset(db, _env, _org, group, user, dataset) -> Dataset: + with db.scoped_session() as session: + yield dataset( + org=_org, env=_env, name='dataset1', owner=user.userName, group=group.name + ) + + +@pytest.fixture(scope='module', autouse=True) +def _table(db, _dataset) -> DatasetTable: + with db.scoped_session() as session: + t = DatasetTable( + datasetUri=_dataset.datasetUri, + label='table', + AWSAccountId=_dataset.AwsAccountId, + region=_dataset.region, + S3BucketName=_dataset.S3BucketName, + S3Prefix='/raw', + GlueTableName='table', + owner='alice', + GlueDatabaseName=_dataset.GlueDatabaseName, + ) + session.add(t) + yield t + + +@pytest.fixture(scope='module', autouse=True) +def _columns(db, _dataset, _table) -> List[DatasetTableColumn]: + with db.scoped_session() as session: + cols = [] + for i in range(0, 10): + c = DatasetTableColumn( + datasetUri=_dataset.datasetUri, + tableUri=_table.tableUri, + label=f'c{i+1}', + AWSAccountId=_dataset.AwsAccountId, + region=_dataset.region, + GlueTableName='table', + typeName='String', + owner='user', + GlueDatabaseName=_dataset.GlueDatabaseName, + ) + session.add(c) + cols.append(c) + yield cols + + +def test_dataset_link_term(client, t1, _columns, group): + col = _columns[0] + r = client.query( + """ + mutation LinkTerm( + $nodeUri:String!, + $targetUri:String!, + $targetType:String!, + ){ + linkTerm( + nodeUri:$nodeUri, + targetUri:$targetUri, + targetType:$targetType + ){ + linkUri + } + } + """, + nodeUri=t1.nodeUri, + targetUri=col.columnUri, + targetType='Column', + username='alice', + groups=[group.name], + ) + link_uri = r.data.linkTerm.linkUri + + r = client.query( + """ + query GetGlossaryTermLink($linkUri:String!){ + getGlossaryTermLink(linkUri:$linkUri){ + linkUri + created + target{ + __typename + ... on DatasetTableColumn{ + label + columnUri + } + } + } + } + """, + linkUri=link_uri, + username='alice', + ) + print(r) + + +def test_dataset_term_link_approval(db, client, t1, _dataset, user, group): + response = client.query( + """ + mutation UpdateDataset($datasetUri:String!,$input:ModifyDatasetInput){ + updateDataset(datasetUri:$datasetUri,input:$input){ + datasetUri + label + tags + } + } + """, + username='alice', + groups=[group.name], + datasetUri=_dataset.datasetUri, + input={'terms': [t1.nodeUri]}, + ) + with db.scoped_session() as session: + link: models.TermLink = ( + session.query(models.TermLink) + .filter(models.TermLink.nodeUri == t1.nodeUri) + .first() + ) + r = client.query( + """ + mutation ApproveTermAssociation($linkUri:String!){ + approveTermAssociation(linkUri:$linkUri) + } + """, + linkUri=link.linkUri, + username='alice', + groups=[group.name], + ) + assert r + link: models.TermLink = session.query(models.TermLink).get(link.linkUri) + assert link.approvedBySteward + + r = client.query( + """ + mutation DismissTermAssociation($linkUri:String!){ + dismissTermAssociation(linkUri:$linkUri) + } + """, + linkUri=link.linkUri, + username='alice', + groups=[group.name], + ) + assert r + link: models.TermLink = session.query(models.TermLink).get(link.linkUri) + assert not link.approvedBySteward From 71d1ccbd1bcdae479423fd53f463d0894bd3d338 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 16 May 2023 18:17:32 +0200 Subject: [PATCH 189/346] Moving cdk stack tests --- tests/api/conftest.py | 7 -- tests/api/test_redshift_cluster.py | 8 ++ tests/api/test_tenant.py | 5 +- tests/cdkproxy/conftest.py | 82 ----------------- tests/cdkproxy/test_dataset_stack.py | 54 ------------ tests/modules/datasets/conftest.py | 7 ++ tests/modules/datasets/test_dataset.py | 7 ++ tests/modules/datasets/test_dataset_stack.py | 88 +++++++++++++++++-- .../test_environment_stack_with_dataset.py} | 38 ++++++++ 9 files changed, 145 insertions(+), 151 deletions(-) delete mode 100644 tests/cdkproxy/test_dataset_stack.py rename tests/{cdkproxy/test_environment_stack.py => modules/datasets/test_environment_stack_with_dataset.py} (61%) diff --git a/tests/api/conftest.py b/tests/api/conftest.py index 791357e66..a71099f54 100644 --- a/tests/api/conftest.py +++ b/tests/api/conftest.py @@ -20,13 +20,6 @@ def patch_check_env(module_mocker): ) -@pytest.fixture(scope='module', autouse=True) -def patch_check_dataset(module_mocker): - module_mocker.patch( - 'dataall.modules.datasets.services.dataset_service.DatasetService.check_dataset_account', return_value=True - ) - - @pytest.fixture(scope='module', autouse=True) def patch_es(module_mocker): module_mocker.patch('dataall.searchproxy.connect', return_value={}) diff --git a/tests/api/test_redshift_cluster.py b/tests/api/test_redshift_cluster.py index 502c2ef02..1fe8c7d08 100644 --- a/tests/api/test_redshift_cluster.py +++ b/tests/api/test_redshift_cluster.py @@ -10,6 +10,14 @@ from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.db.models import Dataset + +@pytest.fixture(scope='module', autouse=True) +def patch_check_dataset(module_mocker): + module_mocker.patch( + 'dataall.modules.datasets.services.dataset_service.DatasetService.check_dataset_account', return_value=True + ) + + @pytest.fixture(scope='module', autouse=True) def org1(org, user, group, tenant): org1 = org('testorg', user.userName, group.name) diff --git a/tests/api/test_tenant.py b/tests/api/test_tenant.py index 443110b8a..217c620eb 100644 --- a/tests/api/test_tenant.py +++ b/tests/api/test_tenant.py @@ -1,5 +1,4 @@ from dataall.db import permissions -from dataall.modules.datasets.services.dataset_permissions import MANAGE_DATASETS def test_list_tenant_permissions(client, user, group, tenant): @@ -61,7 +60,7 @@ def test_update_permissions(client, user, group, tenant): username='alice', input=dict( groupUri=group.name, - permissions=[permissions.MANAGE_ORGANIZATIONS, MANAGE_DATASETS], + permissions=[permissions.MANAGE_ORGANIZATIONS, permissions.MANAGE_GROUPS], ), groups=[group.name, 'DAAdministrators'], ) @@ -93,7 +92,7 @@ def test_update_permissions(client, user, group, tenant): username='alice', input=dict( groupUri=group.name, - permissions=[permissions.MANAGE_ORGANIZATIONS, MANAGE_DATASETS], + permissions=[permissions.MANAGE_ORGANIZATIONS, permissions.MANAGE_GROUPS], ), groups=[group.name, 'DAAdministrators'], ) diff --git a/tests/cdkproxy/conftest.py b/tests/cdkproxy/conftest.py index fa0680594..aa90279bf 100644 --- a/tests/cdkproxy/conftest.py +++ b/tests/cdkproxy/conftest.py @@ -1,7 +1,6 @@ import pytest from dataall.db import models, api -from dataall.modules.datasets_base.db.models import DatasetTable, Dataset @pytest.fixture(scope='module', autouse=True) @@ -59,87 +58,6 @@ def env(db, org: models.Organization) -> models.Environment: yield env -@pytest.fixture(scope='module', autouse=True) -def another_group(db, env): - with db.scoped_session() as session: - env_group: models.EnvironmentGroup = models.EnvironmentGroup( - environmentUri=env.environmentUri, - groupUri='anothergroup', - environmentIAMRoleArn='aontherGroupArn', - environmentIAMRoleName='anotherGroupRole', - environmentAthenaWorkGroup='workgroup', - ) - session.add(env_group) - dataset = Dataset( - label='thisdataset', - environmentUri=env.environmentUri, - organizationUri=env.organizationUri, - name='anotherdataset', - description='test', - AwsAccountId=env.AwsAccountId, - region=env.region, - S3BucketName='bucket', - GlueDatabaseName='db', - IAMDatasetAdminRoleArn='role', - IAMDatasetAdminUserArn='xxx', - KmsAlias='xxx', - owner='me', - confidentiality='C1', - businessOwnerEmail='jeff', - businessOwnerDelegationEmails=['andy'], - SamlAdminGroupName=env_group.groupUri, - GlueCrawlerName='dhCrawler', - ) - session.add(dataset) - yield env_group - - -@pytest.fixture(scope='module', autouse=True) -def dataset(db, env: models.Environment) -> Dataset: - with db.scoped_session() as session: - dataset = Dataset( - label='thisdataset', - environmentUri=env.environmentUri, - organizationUri=env.organizationUri, - name='thisdataset', - description='test', - AwsAccountId=env.AwsAccountId, - region=env.region, - S3BucketName='bucket', - GlueDatabaseName='db', - IAMDatasetAdminRoleArn='role', - IAMDatasetAdminUserArn='xxx', - KmsAlias='xxx', - owner='me', - confidentiality='C1', - businessOwnerEmail='jeff', - businessOwnerDelegationEmails=['andy'], - SamlAdminGroupName='admins', - GlueCrawlerName='dhCrawler', - ) - session.add(dataset) - yield dataset - - -@pytest.fixture(scope='module', autouse=True) -def table(db, dataset: Dataset) -> DatasetTable: - with db.scoped_session() as session: - table = DatasetTable( - label='thistable', - owner='me', - datasetUri=dataset.datasetUri, - AWSAccountId=dataset.AwsAccountId, - region=dataset.region, - GlueDatabaseName=dataset.GlueDatabaseName, - S3BucketName=dataset.S3BucketName, - GlueTableName='asimpletesttable', - S3Prefix='/raw/asimpletesttable/', - ) - - session.add(table) - yield table - - @pytest.fixture(scope='module', autouse=True) def sgm_studio(db, env: models.Environment) -> models.SagemakerStudioUserProfile: with db.scoped_session() as session: diff --git a/tests/cdkproxy/test_dataset_stack.py b/tests/cdkproxy/test_dataset_stack.py deleted file mode 100644 index 173d81ef4..000000000 --- a/tests/cdkproxy/test_dataset_stack.py +++ /dev/null @@ -1,54 +0,0 @@ -import json - -import pytest -from aws_cdk import App - -from dataall.modules.datasets.cdk.dataset_stack import DatasetStack - - -@pytest.fixture(scope='function', autouse=True) -def patch_methods(mocker, db, dataset, env, org): - mocker.patch('dataall.modules.datasets.cdk.dataset_stack.DatasetStack.get_engine', return_value=db) - mocker.patch( - 'dataall.modules.datasets.cdk.dataset_stack.DatasetStack.get_target', return_value=dataset - ) - mocker.patch( - 'dataall.aws.handlers.sts.SessionHelper.get_delegation_role_name', - return_value="dataall-pivot-role-name-pytest", - ) - mocker.patch( - 'dataall.aws.handlers.lakeformation.LakeFormation.check_existing_lf_registered_location', - return_value=False, - ) - mocker.patch( - 'dataall.utils.runtime_stacks_tagging.TagsUtil.get_target', - return_value=dataset, - ) - mocker.patch( - 'dataall.utils.runtime_stacks_tagging.TagsUtil.get_engine', - return_value=db, - ) - mocker.patch( - 'dataall.utils.runtime_stacks_tagging.TagsUtil.get_environment', - return_value=env, - ) - mocker.patch( - 'dataall.utils.runtime_stacks_tagging.TagsUtil.get_organization', - return_value=org, - ) - - -@pytest.fixture(scope='function', autouse=True) -def template(dataset): - app = App() - DatasetStack(app, 'Dataset', target_uri=dataset.datasetUri) - return json.dumps(app.synth().get_stack_by_name('Dataset').template) - - -def test_resources_created(template): - assert 'AWS::S3::Bucket' in template - assert 'AWS::KMS::Key' in template - assert 'AWS::IAM::Role' in template - assert 'AWS::IAM::Policy' in template - assert 'AWS::S3::BucketPolicy' in template - assert 'AWS::Glue::Job' in template diff --git a/tests/modules/datasets/conftest.py b/tests/modules/datasets/conftest.py index af895ccd3..8c5181297 100644 --- a/tests/modules/datasets/conftest.py +++ b/tests/modules/datasets/conftest.py @@ -13,6 +13,13 @@ from dataall.modules.datasets import Dataset, DatasetTable, DatasetStorageLocation +@pytest.fixture(scope='module', autouse=True) +def patch_check_dataset(module_mocker): + module_mocker.patch( + 'dataall.modules.datasets.services.dataset_service.DatasetService.check_dataset_account', return_value=True + ) + + @pytest.fixture(scope='module', autouse=True) def patch_es(module_mocker): module_mocker.patch('dataall.searchproxy.connect', return_value={}) diff --git a/tests/modules/datasets/test_dataset.py b/tests/modules/datasets/test_dataset.py index 1724fc0a5..41380b3ee 100644 --- a/tests/modules/datasets/test_dataset.py +++ b/tests/modules/datasets/test_dataset.py @@ -6,6 +6,7 @@ import dataall from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.db.models import DatasetStorageLocation, DatasetTable, Dataset +from tests.api.test_stack import update_stack_query @pytest.fixture(scope='module', autouse=True) @@ -533,3 +534,9 @@ def test_stewardship(client, dataset, env1, org1, db, group2, group, user, patch }, ) assert response.data.createDataset.stewards == group2.name + + +def test_dataset_stack(client, dataset_fixture, group): + dataset = dataset_fixture + response = update_stack_query(client, dataset.datasetUri, 'dataset', dataset.SamlAdminGroupName) + assert response.data.updateStack.targetUri == dataset.datasetUri diff --git a/tests/modules/datasets/test_dataset_stack.py b/tests/modules/datasets/test_dataset_stack.py index 81860e17b..48484f3d3 100644 --- a/tests/modules/datasets/test_dataset_stack.py +++ b/tests/modules/datasets/test_dataset_stack.py @@ -1,7 +1,85 @@ -from tests.api.test_stack import update_stack_query +import json +import pytest +from aws_cdk import App -def test_notebook_stack(client, dataset_fixture, group): - dataset = dataset_fixture - response = update_stack_query(client, dataset.datasetUri, 'dataset', dataset.SamlAdminGroupName) - assert response.data.updateStack.targetUri == dataset.datasetUri +from dataall.db.models import Environment +from dataall.modules.datasets.cdk.dataset_stack import DatasetStack +from dataall.modules.datasets_base.db.models import Dataset + +from tests.cdkproxy.conftest import * + + +@pytest.fixture(scope='module', autouse=True) +def dataset(db, env: Environment) -> Dataset: + with db.scoped_session() as session: + dataset = Dataset( + label='thisdataset', + environmentUri=env.environmentUri, + organizationUri=env.organizationUri, + name='thisdataset', + description='test', + AwsAccountId=env.AwsAccountId, + region=env.region, + S3BucketName='bucket', + GlueDatabaseName='db', + IAMDatasetAdminRoleArn='role', + IAMDatasetAdminUserArn='xxx', + KmsAlias='xxx', + owner='me', + confidentiality='C1', + businessOwnerEmail='jeff', + businessOwnerDelegationEmails=['andy'], + SamlAdminGroupName='admins', + GlueCrawlerName='dhCrawler', + ) + session.add(dataset) + yield dataset + + +@pytest.fixture(scope='function', autouse=True) +def patch_methods(mocker, db, dataset, env, org): + mocker.patch('dataall.modules.datasets.cdk.dataset_stack.DatasetStack.get_engine', return_value=db) + mocker.patch( + 'dataall.modules.datasets.cdk.dataset_stack.DatasetStack.get_target', return_value=dataset + ) + mocker.patch( + 'dataall.aws.handlers.sts.SessionHelper.get_delegation_role_name', + return_value="dataall-pivot-role-name-pytest", + ) + mocker.patch( + 'dataall.aws.handlers.lakeformation.LakeFormation.check_existing_lf_registered_location', + return_value=False, + ) + mocker.patch( + 'dataall.utils.runtime_stacks_tagging.TagsUtil.get_target', + return_value=dataset, + ) + mocker.patch( + 'dataall.utils.runtime_stacks_tagging.TagsUtil.get_engine', + return_value=db, + ) + mocker.patch( + 'dataall.utils.runtime_stacks_tagging.TagsUtil.get_environment', + return_value=env, + ) + mocker.patch( + 'dataall.utils.runtime_stacks_tagging.TagsUtil.get_organization', + return_value=org, + ) + + +@pytest.fixture(scope='function', autouse=True) +def template(dataset): + app = App() + DatasetStack(app, 'Dataset', target_uri=dataset.datasetUri) + return json.dumps(app.synth().get_stack_by_name('Dataset').template) + + +def test_resources_created(template): + assert 'AWS::S3::Bucket' in template + assert 'AWS::KMS::Key' in template + assert 'AWS::IAM::Role' in template + assert 'AWS::IAM::Policy' in template + assert 'AWS::S3::BucketPolicy' in template + assert 'AWS::Glue::Job' in template diff --git a/tests/cdkproxy/test_environment_stack.py b/tests/modules/datasets/test_environment_stack_with_dataset.py similarity index 61% rename from tests/cdkproxy/test_environment_stack.py rename to tests/modules/datasets/test_environment_stack_with_dataset.py index f5dceccdf..bf724b9f8 100644 --- a/tests/cdkproxy/test_environment_stack.py +++ b/tests/modules/datasets/test_environment_stack_with_dataset.py @@ -4,6 +4,9 @@ from aws_cdk import App from dataall.cdkproxy.stacks import EnvironmentSetup +from dataall.db.models import EnvironmentGroup +from dataall.modules.datasets_base.db.models import Dataset +from tests.cdkproxy.conftest import * @pytest.fixture(scope='function', autouse=True) @@ -51,6 +54,41 @@ def patch_methods(mocker, db, env, another_group, permissions): ) +@pytest.fixture(scope='module', autouse=True) +def another_group(db, env): + with db.scoped_session() as session: + env_group: EnvironmentGroup = EnvironmentGroup( + environmentUri=env.environmentUri, + groupUri='anothergroup', + environmentIAMRoleArn='aontherGroupArn', + environmentIAMRoleName='anotherGroupRole', + environmentAthenaWorkGroup='workgroup', + ) + session.add(env_group) + dataset = Dataset( + label='thisdataset', + environmentUri=env.environmentUri, + organizationUri=env.organizationUri, + name='anotherdataset', + description='test', + AwsAccountId=env.AwsAccountId, + region=env.region, + S3BucketName='bucket', + GlueDatabaseName='db', + IAMDatasetAdminRoleArn='role', + IAMDatasetAdminUserArn='xxx', + KmsAlias='xxx', + owner='me', + confidentiality='C1', + businessOwnerEmail='jeff', + businessOwnerDelegationEmails=['andy'], + SamlAdminGroupName=env_group.groupUri, + GlueCrawlerName='dhCrawler', + ) + session.add(dataset) + yield env_group + + @pytest.fixture(scope='function', autouse=True) def template(env): app = App() From 012a01f8d21242807fe2ebdd10d182966333ba94 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 17 May 2023 10:45:30 +0200 Subject: [PATCH 190/346] Moving next part of dataset tests --- tests/db/test_permission.py | 179 ++---------------- tests/modules/datasets/conftest.py | 17 -- .../modules/datasets/test_dataset_glossary.py | 32 ++++ .../datasets/test_dataset_indexers.py} | 6 +- .../datasets/test_dataset_permissions.py | 179 ++++++++++++++++++ tests/searchproxy/__init__.py | 0 tests/utils/factories/__init__.py | 3 - 7 files changed, 228 insertions(+), 188 deletions(-) rename tests/{searchproxy/test_indexers.py => modules/datasets/test_dataset_indexers.py} (97%) create mode 100644 tests/modules/datasets/test_dataset_permissions.py delete mode 100644 tests/searchproxy/__init__.py delete mode 100644 tests/utils/factories/__init__.py diff --git a/tests/db/test_permission.py b/tests/db/test_permission.py index 177f8fe2d..ce6a75461 100644 --- a/tests/db/test_permission.py +++ b/tests/db/test_permission.py @@ -2,23 +2,16 @@ import dataall from dataall.api.constants import OrganisationUserRole -from dataall.core.context import set_context, RequestContext from dataall.db import exceptions +from dataall.db.api import TenantPolicy, Tenant from dataall.db.models.Permission import PermissionType -from dataall.modules.datasets_base.db.models import Dataset -from dataall.modules.datasets.services.dataset_service import DatasetService -from dataall.modules.datasets.services.dataset_permissions import MANAGE_DATASETS, UPDATE_DATASET, DATASET_READ, DATASET_WRITE -from dataall.modules.datasets_base.services.permissions import DATASET_TABLE_READ +from dataall.db.permissions import MANAGE_GROUPS, ENVIRONMENT_ALL, ORGANIZATION_ALL -@pytest.fixture(scope='module') -def permissions(db): + +def permissions(db, all_perms): with db.scoped_session() as session: permissions = [] - for p in ( - DATASET_READ + DATASET_WRITE + DATASET_TABLE_READ - + dataall.db.permissions.ORGANIZATION_ALL - + dataall.db.permissions.ENVIRONMENT_ALL - ): + for p in all_perms: permissions.append( dataall.db.api.Permission.save_permission( session, @@ -37,13 +30,12 @@ def permissions(db): ) ) session.commit() - yield permissions @pytest.fixture(scope='module') def tenant(db): with db.scoped_session() as session: - tenant = dataall.db.api.Tenant.save_tenant( + tenant = Tenant.save_tenant( session, name='dataall', description='Tenant dataall' ) yield tenant @@ -67,17 +59,6 @@ def group(db, user): yield group -@pytest.fixture(scope='module') -def group_user(db, group, user): - with db.scoped_session() as session: - member = dataall.db.models.GroupMember( - userName=user.userName, - groupUri=group.groupUri, - ) - session.add(member) - yield member - - @pytest.fixture(scope='module', autouse=True) def org(db, group): with db.scoped_session() as session: @@ -114,164 +95,32 @@ def env(org, db, group): yield env -@pytest.fixture(scope='module', autouse=True) -def patch_methods(module_mocker): - module_mocker.patch( - 'dataall.modules.datasets.services.dataset_service.DatasetService._deploy_dataset_stack', - return_value=True - ) - - -@pytest.fixture(scope='module', autouse=True) -def dataset(org, env, db, group): +def test_attach_tenant_policy(db, user, group, tenant): + permissions(db, ORGANIZATION_ALL + ENVIRONMENT_ALL) with db.scoped_session() as session: - dataset = Dataset( - organizationUri=org.organizationUri, - environmentUri=env.environmentUri, - label='label', - owner='foo', - SamlAdminGroupName=group.name, - businessOwnerDelegationEmails=['foo@amazon.com'], - businessOwnerEmail=['bar@amazon.com'], - name='name', - S3BucketName='S3BucketName', - GlueDatabaseName='GlueDatabaseName', - KmsAlias='kmsalias', - AwsAccountId='123456789012', - region='eu-west-1', - IAMDatasetAdminUserArn=f'arn:aws:iam::123456789012:user/dataset', - IAMDatasetAdminRoleArn=f'arn:aws:iam::123456789012:role/dataset', - ) - session.add(dataset) - yield dataset - - -def test_attach_resource_policy(db, user, group, group_user, dataset, permissions): - with db.scoped_session() as session: - - dataall.db.api.ResourcePolicy.attach_resource_policy( - session=session, - group=group.name, - permissions=DATASET_WRITE, - resource_uri=dataset.datasetUri, - resource_type=Dataset.__name__, - ) - assert dataall.db.api.ResourcePolicy.check_user_resource_permission( - session=session, - username=user.userName, - groups=[group.name], - permission_name=UPDATE_DATASET, - resource_uri=dataset.datasetUri, - ) - - -def test_attach_tenant_policy( - db, user, group, group_user, dataset, permissions, tenant -): - with db.scoped_session() as session: - - dataall.db.api.TenantPolicy.attach_group_tenant_policy( + TenantPolicy.attach_group_tenant_policy( session=session, group=group.name, - permissions=[MANAGE_DATASETS], + permissions=[MANAGE_GROUPS], tenant_name='dataall', ) - assert dataall.db.api.TenantPolicy.check_user_tenant_permission( + assert TenantPolicy.check_user_tenant_permission( session=session, username=user.userName, groups=[group.name], - permission_name=MANAGE_DATASETS, + permission_name=MANAGE_GROUPS, tenant_name='dataall', ) -def test_unauthorized_resource_policy( - db, user, group_user, group, dataset, permissions -): - with pytest.raises(exceptions.ResourceUnauthorized): - with db.scoped_session() as session: - assert dataall.db.api.ResourcePolicy.check_user_resource_permission( - session=session, - username=user.userName, - groups=[group.name], - permission_name='UNKNOW_PERMISSION', - resource_uri=dataset.datasetUri, - ) - - -def test_unauthorized_tenant_policy( - db, user, group, group_user, dataset, permissions, tenant -): +def test_unauthorized_tenant_policy(db, user, group): with pytest.raises(exceptions.TenantUnauthorized): with db.scoped_session() as session: - assert dataall.db.api.TenantPolicy.check_user_tenant_permission( + assert TenantPolicy.check_user_tenant_permission( session=session, username=user.userName, groups=[group.name], permission_name='UNKNOW_PERMISSION', tenant_name='dataall', ) - - -def test_create_dataset(db, env, user, group, group_user, dataset, permissions, tenant): - with db.scoped_session() as session: - dataall.db.api.TenantPolicy.attach_group_tenant_policy( - session=session, - group=group.name, - permissions=dataall.db.permissions.TENANT_ALL, - tenant_name='dataall', - ) - org_with_perm = dataall.db.api.Organization.create_organization( - session=session, - username=user.userName, - groups=[group.name], - uri=None, - data={ - 'label': 'OrgWithPerm', - 'SamlGroupName': group.name, - 'description': 'desc', - 'tags': [], - }, - check_perm=True, - ) - env_with_perm = dataall.db.api.Environment.create_environment( - session=session, - username=user.userName, - groups=[group.name], - uri=org_with_perm.organizationUri, - data={ - 'label': 'EnvWithPerm', - 'organizationUri': org_with_perm.organizationUri, - 'SamlGroupName': group.name, - 'description': 'desc', - 'AwsAccountId': '123456789012', - 'region': 'eu-west-1', - 'cdk_role_name': 'cdkrole', - }, - check_perm=True, - ) - - data = dict( - label='label', - owner='foo', - SamlAdminGroupName=group.name, - businessOwnerDelegationEmails=['foo@amazon.com'], - businessOwnerEmail=['bar@amazon.com'], - name='name', - S3BucketName='S3BucketName', - GlueDatabaseName='GlueDatabaseName', - KmsAlias='kmsalias', - AwsAccountId='123456789012', - region='eu-west-1', - IAMDatasetAdminUserArn=f'arn:aws:iam::123456789012:user/dataset', - IAMDatasetAdminRoleArn=f'arn:aws:iam::123456789012:role/dataset', - ) - - set_context(RequestContext(db, user.userName, [group.name])) - dataset = DatasetService.create_dataset( - uri=env_with_perm.environmentUri, - admin_group=group.name, - data=data, - ) - assert dataset diff --git a/tests/modules/datasets/conftest.py b/tests/modules/datasets/conftest.py index 8c5181297..d6c5ce3ff 100644 --- a/tests/modules/datasets/conftest.py +++ b/tests/modules/datasets/conftest.py @@ -20,23 +20,6 @@ def patch_check_dataset(module_mocker): ) -@pytest.fixture(scope='module', autouse=True) -def patch_es(module_mocker): - module_mocker.patch('dataall.searchproxy.connect', return_value={}) - module_mocker.patch('dataall.searchproxy.search', return_value={}) - module_mocker.patch( - 'dataall.modules.datasets.indexers.table_indexer.DatasetTableIndexer.upsert_all', - return_value={} - ) - module_mocker.patch('dataall.modules.datasets.indexers.dataset_indexer.DatasetIndexer.upsert', return_value={}) - module_mocker.patch('dataall.modules.datasets.indexers.table_indexer.DatasetTableIndexer.upsert', return_value={}) - module_mocker.patch( - 'dataall.modules.datasets.indexers.location_indexer.DatasetLocationIndexer.upsert', - return_value={} - ) - module_mocker.patch('dataall.searchproxy.base_indexer.BaseIndexer.delete_doc', return_value={}) - - @pytest.fixture(scope='module', autouse=True) def dataset(client, patch_es): cache = {} diff --git a/tests/modules/datasets/test_dataset_glossary.py b/tests/modules/datasets/test_dataset_glossary.py index 07cdeeb4f..7ae12299e 100644 --- a/tests/modules/datasets/test_dataset_glossary.py +++ b/tests/modules/datasets/test_dataset_glossary.py @@ -162,3 +162,35 @@ def test_dataset_term_link_approval(db, client, t1, _dataset, user, group): assert r link: models.TermLink = session.query(models.TermLink).get(link.linkUri) assert not link.approvedBySteward + + +def test_get_column_term_associations(t1, db, client): + r = client.query( + """ + query GetTerm($nodeUri:String!){ + getTerm(nodeUri:$nodeUri){ + nodeUri + label + readme + associations{ + count + nodes{ + linkUri + target{ + ... on DatasetTableColumn{ + label + columnUri + } + } + } + } + } + + } + """, + nodeUri=t1.nodeUri, + username='alice', + ) + assert r.data.getTerm.nodeUri == t1.nodeUri + assert r.data.getTerm.label == t1.label + assert r.data.getTerm.readme == t1.readme diff --git a/tests/searchproxy/test_indexers.py b/tests/modules/datasets/test_dataset_indexers.py similarity index 97% rename from tests/searchproxy/test_indexers.py rename to tests/modules/datasets/test_dataset_indexers.py index 006060e24..4dc979681 100644 --- a/tests/searchproxy/test_indexers.py +++ b/tests/modules/datasets/test_dataset_indexers.py @@ -1,6 +1,6 @@ import pytest -import dataall +from dataall.db.models import Organization, Environment from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer from dataall.modules.datasets_base.db.models import DatasetStorageLocation, DatasetTable, Dataset @@ -10,7 +10,7 @@ @pytest.fixture(scope='module', autouse=True) def org(db): with db.scoped_session() as session: - org = dataall.db.models.Organization( + org = Organization( label='org', owner='alice', tags=[], @@ -25,7 +25,7 @@ def org(db): @pytest.fixture(scope='module', autouse=True) def env(org, db): with db.scoped_session() as session: - env = dataall.db.models.Environment( + env = Environment( organizationUri=org.organizationUri, AwsAccountId='12345678901', region='eu-west-1', diff --git a/tests/modules/datasets/test_dataset_permissions.py b/tests/modules/datasets/test_dataset_permissions.py new file mode 100644 index 000000000..015f0fd4a --- /dev/null +++ b/tests/modules/datasets/test_dataset_permissions.py @@ -0,0 +1,179 @@ +import pytest + +from dataall.core.context import set_context, RequestContext +from dataall.db.api import ResourcePolicy, TenantPolicy, Environment, Organization, Tenant +from dataall.db.exceptions import ResourceUnauthorized +from dataall.db.models import GroupMember +from dataall.db.permissions import TENANT_ALL +from dataall.modules.datasets.services.dataset_permissions import DATASET_WRITE, UPDATE_DATASET, MANAGE_DATASETS, \ + DATASET_READ +from dataall.modules.datasets.services.dataset_service import DatasetService +from dataall.modules.datasets_base.db.models import Dataset +from dataall.modules.datasets_base.services.permissions import DATASET_TABLE_READ + +from tests.db.test_permission import * + + +@pytest.fixture(scope='module', autouse=True) +def patch_methods(module_mocker): + module_mocker.patch( + 'dataall.modules.datasets.services.dataset_service.DatasetService._deploy_dataset_stack', + return_value=True + ) + + +@pytest.fixture(scope='module') +def tenant(db): + with db.scoped_session() as session: + tenant = Tenant.save_tenant( + session, name='dataall', description='Tenant dataall' + ) + yield tenant + + +@pytest.fixture(scope='module') +def group_user(db, group, user): + with db.scoped_session() as session: + member = GroupMember(userName=user.userName, groupUri=group.groupUri) + session.add(member) + yield member + + +@pytest.fixture(scope='module', autouse=True) +def dataset(org, env, db, group): + with db.scoped_session() as session: + dataset = Dataset( + organizationUri=org.organizationUri, + environmentUri=env.environmentUri, + label='label', + owner='foo', + SamlAdminGroupName=group.name, + businessOwnerDelegationEmails=['foo@amazon.com'], + businessOwnerEmail=['bar@amazon.com'], + name='name', + S3BucketName='S3BucketName', + GlueDatabaseName='GlueDatabaseName', + KmsAlias='kmsalias', + AwsAccountId='123456789012', + region='eu-west-1', + IAMDatasetAdminUserArn=f'arn:aws:iam::123456789012:user/dataset', + IAMDatasetAdminRoleArn=f'arn:aws:iam::123456789012:role/dataset', + ) + session.add(dataset) + yield dataset + + +def test_attach_resource_policy(db, user, group, group_user, dataset): + permissions(db, ENVIRONMENT_ALL + ORGANIZATION_ALL + DATASET_READ + DATASET_WRITE + DATASET_TABLE_READ) + with db.scoped_session() as session: + ResourcePolicy.attach_resource_policy( + session=session, + group=group.name, + permissions=DATASET_WRITE, + resource_uri=dataset.datasetUri, + resource_type=Dataset.__name__, + ) + assert ResourcePolicy.check_user_resource_permission( + session=session, + username=user.userName, + groups=[group.name], + permission_name=UPDATE_DATASET, + resource_uri=dataset.datasetUri, + ) + + +def test_attach_tenant_policy( + db, user, group, group_user, dataset, permissions, tenant +): + with db.scoped_session() as session: + TenantPolicy.attach_group_tenant_policy( + session=session, + group=group.name, + permissions=[MANAGE_DATASETS], + tenant_name='dataall', + ) + + assert TenantPolicy.check_user_tenant_permission( + session=session, + username=user.userName, + groups=[group.name], + permission_name=MANAGE_DATASETS, + tenant_name='dataall', + ) + + +def test_unauthorized_resource_policy( + db, user, group_user, group, dataset, permissions +): + with pytest.raises(ResourceUnauthorized): + with db.scoped_session() as session: + assert ResourcePolicy.check_user_resource_permission( + session=session, + username=user.userName, + groups=[group.name], + permission_name='UNKNOWN_PERMISSION', + resource_uri=dataset.datasetUri, + ) + + +def test_create_dataset(db, env, user, group, group_user, dataset, permissions, tenant): + with db.scoped_session() as session: + TenantPolicy.attach_group_tenant_policy( + session=session, + group=group.name, + permissions=TENANT_ALL, + tenant_name='dataall', + ) + org_with_perm = Organization.create_organization( + session=session, + username=user.userName, + groups=[group.name], + uri=None, + data={ + 'label': 'OrgWithPerm', + 'SamlGroupName': group.name, + 'description': 'desc', + 'tags': [], + }, + check_perm=True, + ) + env_with_perm = Environment.create_environment( + session=session, + username=user.userName, + groups=[group.name], + uri=org_with_perm.organizationUri, + data={ + 'label': 'EnvWithPerm', + 'organizationUri': org_with_perm.organizationUri, + 'SamlGroupName': group.name, + 'description': 'desc', + 'AwsAccountId': '123456789012', + 'region': 'eu-west-1', + 'cdk_role_name': 'cdkrole', + }, + check_perm=True, + ) + + data = dict( + label='label', + owner='foo', + SamlAdminGroupName=group.name, + businessOwnerDelegationEmails=['foo@amazon.com'], + businessOwnerEmail=['bar@amazon.com'], + name='name', + S3BucketName='S3BucketName', + GlueDatabaseName='GlueDatabaseName', + KmsAlias='kmsalias', + AwsAccountId='123456789012', + region='eu-west-1', + IAMDatasetAdminUserArn=f'arn:aws:iam::123456789012:user/dataset', + IAMDatasetAdminRoleArn=f'arn:aws:iam::123456789012:role/dataset', + ) + + set_context(RequestContext(db, user.userName, [group.name])) + dataset = DatasetService.create_dataset( + uri=env_with_perm.environmentUri, + admin_group=group.name, + data=data, + ) + assert dataset diff --git a/tests/searchproxy/__init__.py b/tests/searchproxy/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/utils/factories/__init__.py b/tests/utils/factories/__init__.py deleted file mode 100644 index ff6dabec2..000000000 --- a/tests/utils/factories/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .org import org -from .env import env -from .dataset import dataset From 929861ca1bc8c528fa6dd144eebf5f054a98387f Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 17 May 2023 11:09:23 +0200 Subject: [PATCH 191/346] Extracted LakeFormationDatasetClient --- backend/dataall/aws/handlers/lakeformation.py | 22 -------- backend/dataall/aws/handlers/quicksight.py | 53 ------------------- .../modules/datasets/aws/lf_dataset_client.py | 41 ++++++++++++++ .../modules/datasets/cdk/dataset_stack.py | 7 +-- tests/api/test_glossary.py | 32 ----------- tests/modules/datasets/test_dataset_stack.py | 7 ++- 6 files changed, 48 insertions(+), 114 deletions(-) create mode 100644 backend/dataall/modules/datasets/aws/lf_dataset_client.py diff --git a/backend/dataall/aws/handlers/lakeformation.py b/backend/dataall/aws/handlers/lakeformation.py index e1939c536..1746bdad3 100644 --- a/backend/dataall/aws/handlers/lakeformation.py +++ b/backend/dataall/aws/handlers/lakeformation.py @@ -6,34 +6,12 @@ from .sts import SessionHelper log = logging.getLogger('aws:lakeformation') -PIVOT_ROLE_NAME_PREFIX = "datallPivotRole" class LakeFormation: def __init__(self): pass - @staticmethod - def check_existing_lf_registered_location(resource_arn, accountid, region): - """ - Checks if there is a non-dataall-created registered location for the Dataset - Returns False is already existing location else return the resource info - """ - try: - session = SessionHelper.remote_session(accountid) - lf_client = session.client('lakeformation', region_name=region) - response = lf_client.describe_resource(ResourceArn=resource_arn) - registered_role_name = response['ResourceInfo']['RoleArn'].lstrip(f"arn:aws:iam::{accountid}:role/") - log.info(f'LF data location already registered: {response}, registered with role {registered_role_name}') - if registered_role_name.startswith(PIVOT_ROLE_NAME_PREFIX): - log.info('The existing data location was created as part of the dataset stack. There was no pre-existing data location.') - return False - return response['ResourceInfo'] - - except ClientError as e: - log.info(f'LF data location for resource {resource_arn} not found due to {e}') - return False - @staticmethod def grant_pivot_role_all_database_permissions(accountid, region, database): LakeFormation.grant_permissions_to_database( diff --git a/backend/dataall/aws/handlers/quicksight.py b/backend/dataall/aws/handlers/quicksight.py index 749c53829..886482f3f 100644 --- a/backend/dataall/aws/handlers/quicksight.py +++ b/backend/dataall/aws/handlers/quicksight.py @@ -452,59 +452,6 @@ def create_data_source_vpc(AwsAccountId, region, UserName, vpcConnectionId): return "dataall-metadata-db" - @staticmethod - def create_data_set_from_source(AwsAccountId, region, UserName, dataSourceId, tablesToImport): - client = Quicksight.get_quicksight_client(AwsAccountId, region) - user = Quicksight.describe_user(AwsAccountId, UserName) - if not user: - return False - - data_source = client.describe_data_source( - AwsAccountId=AwsAccountId, - DataSourceId=dataSourceId - ) - - if not data_source: - return False - - for table in tablesToImport: - - response = client.create_data_set( - AwsAccountId=AwsAccountId, - DataSetId=f"dataall-imported-{table}", - Name=f"dataall-imported-{table}", - PhysicalTableMap={ - 'string': { - 'RelationalTable': { - 'DataSourceArn': data_source.get('DataSource').get('Arn'), - 'Catalog': 'string', - 'Schema': 'dev', - 'Name': table, - 'InputColumns': [ - { - 'Name': 'string', - 'Type': 'STRING' - }, - ] - } - }}, - ImportMode='DIRECT_QUERY', - Permissions=[ - { - 'Principal': user.get('Arn'), - 'Actions': [ - "quicksight:DescribeDataSet", - "quicksight:DescribeDataSetPermissions", - "quicksight:PassDataSet", - "quicksight:DescribeIngestion", - "quicksight:ListIngestions" - ] - }, - ], - ) - - return True - @staticmethod def create_analysis(AwsAccountId, region, UserName): client = Quicksight.get_quicksight_client(AwsAccountId, region) diff --git a/backend/dataall/modules/datasets/aws/lf_dataset_client.py b/backend/dataall/modules/datasets/aws/lf_dataset_client.py new file mode 100644 index 000000000..d89a099b8 --- /dev/null +++ b/backend/dataall/modules/datasets/aws/lf_dataset_client.py @@ -0,0 +1,41 @@ +import logging +from botocore.exceptions import ClientError + +from dataall.aws.handlers.sts import SessionHelper +from dataall.db.models import Environment +from dataall.modules.datasets_base.db.models import Dataset + +log = logging.getLogger(__name__) +PIVOT_ROLE_NAME_PREFIX = "datallPivotRole" + + +class LakeFormationDatasetClient: + + def __init__(self, env: Environment, dataset: Dataset): + session = SessionHelper.remote_session(env.AwsAccountId) + self._client = session.client('lakeformation', region_name=env.region) + self._dataset = dataset + self._env = env + + def check_existing_lf_registered_location(self): + """ + Checks if there is a non-dataall-created registered location for the Dataset + Returns False is already existing location else return the resource info + """ + + resource_arn = f'arn:aws:s3:::{self._dataset.S3BucketName}' + try: + + response = self._client.describe_resource(ResourceArn=resource_arn) + registered_role_name = response['ResourceInfo']['RoleArn'].lstrip(f"arn:aws:iam::{self._env}:role/") + log.info(f'LF data location already registered: {response}, registered with role {registered_role_name}') + if registered_role_name.startswith(PIVOT_ROLE_NAME_PREFIX): + log.info( + 'The existing data location was created as part of the dataset stack. ' + 'There was no pre-existing data location.') + return False + return response['ResourceInfo'] + + except ClientError as e: + log.info(f'LF data location for resource {resource_arn} not found due to {e}') + return False diff --git a/backend/dataall/modules/datasets/cdk/dataset_stack.py b/backend/dataall/modules/datasets/cdk/dataset_stack.py index 73c2aedc5..3e45734e7 100644 --- a/backend/dataall/modules/datasets/cdk/dataset_stack.py +++ b/backend/dataall/modules/datasets/cdk/dataset_stack.py @@ -24,6 +24,7 @@ from dataall.aws.handlers.sts import SessionHelper from dataall.db import models from dataall.db.api import Environment +from dataall.modules.datasets.aws.lf_dataset_client import LakeFormationDatasetClient from dataall.utils.cdk_nag_utils import CDKNagUtil from dataall.utils.runtime_stacks_tagging import TagsUtil from dataall.modules.datasets_base.db.models import Dataset @@ -306,11 +307,7 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): dataset_admin_policy.attach_to_role(dataset_admin_role) # Datalake location custom resource: registers the S3 location in LakeFormation - registered_location = LakeFormation.check_existing_lf_registered_location( - resource_arn=f'arn:aws:s3:::{dataset.S3BucketName}', - accountid=env.AwsAccountId, - region=env.region - ) + registered_location = LakeFormationDatasetClient(env, dataset).check_existing_lf_registered_location() if not registered_location: storage_location = CfnResource( diff --git a/tests/api/test_glossary.py b/tests/api/test_glossary.py index 83c710c19..7505196f7 100644 --- a/tests/api/test_glossary.py +++ b/tests/api/test_glossary.py @@ -397,38 +397,6 @@ def test_delete_subcategory(client, subcategory, group): print(r) -def test_get_term_associations(t1, db, client): - r = client.query( - """ - query GetTerm($nodeUri:String!){ - getTerm(nodeUri:$nodeUri){ - nodeUri - label - readme - associations{ - count - nodes{ - linkUri - target{ - ... on DatasetTableColumn{ - label - columnUri - } - } - } - } - } - - } - """, - nodeUri=t1.nodeUri, - username='alice', - ) - assert r.data.getTerm.nodeUri == t1.nodeUri - assert r.data.getTerm.label == t1.label - assert r.data.getTerm.readme == t1.readme - - def test_delete_category(client, db, c1, group): now = datetime.now() r = client.query( diff --git a/tests/modules/datasets/test_dataset_stack.py b/tests/modules/datasets/test_dataset_stack.py index 48484f3d3..ded48ee91 100644 --- a/tests/modules/datasets/test_dataset_stack.py +++ b/tests/modules/datasets/test_dataset_stack.py @@ -1,4 +1,5 @@ import json +from unittest.mock import MagicMock import pytest from aws_cdk import App @@ -47,10 +48,12 @@ def patch_methods(mocker, db, dataset, env, org): 'dataall.aws.handlers.sts.SessionHelper.get_delegation_role_name', return_value="dataall-pivot-role-name-pytest", ) + lf_client = MagicMock() mocker.patch( - 'dataall.aws.handlers.lakeformation.LakeFormation.check_existing_lf_registered_location', - return_value=False, + 'dataall.modules.datasets.cdk.dataset_stack.LakeFormationDatasetClient', + return_value=lf_client, ) + lf_client.return_value.check_existing_lf_registered_location = False mocker.patch( 'dataall.utils.runtime_stacks_tagging.TagsUtil.get_target', return_value=dataset, From 3b10f32bee42d338d04502e4db0599b38b1ebc86 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 17 May 2023 11:13:24 +0200 Subject: [PATCH 192/346] Deleted dead code --- backend/dataall/aws/handlers/glue.py | 170 --------------------------- 1 file changed, 170 deletions(-) diff --git a/backend/dataall/aws/handlers/glue.py b/backend/dataall/aws/handlers/glue.py index 6c79c84a1..a8f319b85 100644 --- a/backend/dataall/aws/handlers/glue.py +++ b/backend/dataall/aws/handlers/glue.py @@ -51,12 +51,6 @@ def _create_glue_database(accountid, database, region, location): log.debug(f'Failed to create database {database}', e) raise e - @staticmethod - def get_database_arn(**data): - return 'arn:aws:glue:{}:{}:database/{}'.format( - data.get('region', 'eu-west-1'), data.get('accountid'), data.get('database') - ) - @staticmethod def database_exists(**data): accountid = data['accountid'] @@ -122,89 +116,6 @@ def table_exists(**data): log.info(f'Glue table not found: {data}') return None - @staticmethod - def _create_table(**data): - accountid = data['accountid'] - region = data.get('region', 'eu-west-1') - database = data.get('database', 'UnknownDatabaseName') - - session = SessionHelper.remote_session(accountid=accountid) - glue = session.client('glue', region_name=region) - log.info( - 'Creating table {} in database {}'.format( - data['tablename'], data['database'] - ) - ) - if not Glue.database_exists( - database=database, region=region, accountid=accountid - ): - Glue.create_database(accountid, database, region, None) - if 'table_input' not in data: - table_input = { - 'Name': data['tablename'], - 'Description': data.get('Description', 'Not available'), - 'Parameters': {'classification': 'csv', 'skip.header.line.count': '1'}, - 'StorageDescriptor': { - 'Columns': [ - {'Name': c['Name'], 'Type': c['Type']} - for c in data.get('columns') - ], - 'Location': data.get('location'), - 'InputFormat': 'org.apache.hadoop.mapred.TextInputFormat', - 'OutputFormat': 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat', - 'SerdeInfo': { - 'SerializationLibrary': 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe', - 'Parameters': { - 'serialization.format': ',', - 'field.delim': ',', - 'escape.delim': '\\', - }, - }, - }, - 'TableType': 'EXTERNAL_TABLE', - 'PartitionKeys': data.get('partition_keys') or [], - } - else: - table_input = data['table_input'] - - found_table = Glue.table_exists(**data) - - if not found_table: - response = glue.create_table( - CatalogId=accountid, - DatabaseName=data.get('database'), - TableInput=table_input, - ) - log.info(f'Successfully Created table {table_input} on account {accountid}') - return response - - else: - - if Glue.is_resource_link(found_table): - - log.info( - f'Table is a Resource Link {found_table} ' - f'on account {accountid} and is managed by source account' - ) - return found_table - - elif Glue.is_resource_link(table_input): - - return Glue.delete_table_and_create_resourcelink( - glue, database, accountid, table_input - ) - - else: - response = glue.update_table( - CatalogId=accountid, - DatabaseName=data.get('database'), - TableInput=table_input, - ) - log.info( - f'Successfully Updated table {found_table} on account {accountid}' - ) - return response - @staticmethod def delete_table(accountid, region, database, tablename): session = SessionHelper.remote_session(accountid=accountid) @@ -263,52 +174,6 @@ def create_resource_link(**data): ) raise e - @staticmethod - def is_resource_link(table_input: dict): - """ - Verifies if a Glue table or Glue table input contains the block "TargetTable" - if it is the case it means it is a Resource Link - to a shared table by Lake Formation cross account or from the same account - :param table_input: - :return: - """ - if 'TargetTable' in table_input.keys(): - log.info( - f"Table {table_input['Name']} is a resource link " - f"from account {table_input['TargetTable']['CatalogId']} and will not be updated" - ) - return True - return False - - @staticmethod - def delete_table_and_create_resourcelink(glue, database, accountid, table_input): - """ - When table exists before Lake Formation introduction it needs to be deleted - And transformed to a resource link - :param glue: - :param database: - :param accountid: - :param table_input: - :return: - """ - try: - glue.delete_table( - CatalogId=accountid, DatabaseName=database, Name=table_input['Name'] - ) - log.debug( - f'Successfully Deleted table {table_input} on account {accountid}' - ) - response = glue.create_table( - CatalogId=accountid, DatabaseName=database, TableInput=table_input - ) - log.info(f'Successfully Changed table to resource link {response}') - return response - except ClientError as e: - log.warning( - f'Failed to change table to resource link {table_input} due to: {e}' - ) - raise e - @staticmethod def delete_database(**data): accountid = data['accountid'] @@ -333,41 +198,6 @@ def delete_database(**data): ) raise e - @staticmethod - def batch_delete_tables(**data): - accountid = data['accountid'] - region = data['region'] - database = data['database'] - tables = data['tables'] - - if not tables: - log.info('No tables to delete exiting method...') - return - - log.info(f'Batch deleting tables: {tables}') - try: - session = SessionHelper.remote_session(accountid=accountid) - glue = session.client('glue', region_name=region) - if Glue.database_exists( - accountid=accountid, - region=region, - database=database, - ): - glue.batch_delete_table( - CatalogId=accountid, DatabaseName=database, TablesToDelete=tables - ) - log.debug( - f'Batch deleted tables {len(tables)} from database {database} successfully' - ) - return True - except ClientError as e: - log.error( - f'Could not batch delete tables {tables} ' - f'in database {accountid}://{database} ' - f'due to: {e}' - ) - raise e - @staticmethod @Worker.handler(path='glue.job.runs') def get_job_runs(engine, task: models.Task): From 931c778badfb847b27380259bff3c28625404110 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 17 May 2023 12:14:47 +0200 Subject: [PATCH 193/346] Moved lakeformation.py --- .../modules/dataset_sharing/aws/__init__.py | 0 .../aws/lakeformation_client.py} | 12 +++++----- .../share_managers/lf_share_manager.py | 22 +++++++++---------- .../modules/datasets/cdk/dataset_stack.py | 1 - tests/tasks/test_lf_share_manager.py | 22 ++++++++++--------- 5 files changed, 29 insertions(+), 28 deletions(-) create mode 100644 backend/dataall/modules/dataset_sharing/aws/__init__.py rename backend/dataall/{aws/handlers/lakeformation.py => modules/dataset_sharing/aws/lakeformation_client.py} (96%) diff --git a/backend/dataall/modules/dataset_sharing/aws/__init__.py b/backend/dataall/modules/dataset_sharing/aws/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/dataall/aws/handlers/lakeformation.py b/backend/dataall/modules/dataset_sharing/aws/lakeformation_client.py similarity index 96% rename from backend/dataall/aws/handlers/lakeformation.py rename to backend/dataall/modules/dataset_sharing/aws/lakeformation_client.py index 1746bdad3..bec615958 100644 --- a/backend/dataall/aws/handlers/lakeformation.py +++ b/backend/dataall/modules/dataset_sharing/aws/lakeformation_client.py @@ -3,18 +3,18 @@ from botocore.exceptions import ClientError -from .sts import SessionHelper +from dataall.aws.handlers.sts import SessionHelper log = logging.getLogger('aws:lakeformation') -class LakeFormation: +class LakeFormationClient: def __init__(self): pass @staticmethod def grant_pivot_role_all_database_permissions(accountid, region, database): - LakeFormation.grant_permissions_to_database( + LakeFormationClient.grant_permissions_to_database( client=SessionHelper.remote_session(accountid=accountid).client( 'lakeformation', region_name=region ), @@ -106,7 +106,7 @@ def revoke_iamallowedgroups_super_permission_from_table( f'Revoking IAMAllowedGroups Super ' f'permission for table {database}|{table}' ) - LakeFormation.batch_revoke_permissions( + LakeFormationClient.batch_revoke_permissions( client, accountid, entries=[ @@ -164,11 +164,11 @@ def batch_revoke_permissions(client, accountid, entries): raise ClientError( error_response={ 'Error': { - 'Code': 'LakeFormation.batch_revoke_permissions', + 'Code': 'LakeFormationClient.batch_revoke_permissions', 'Message': f'Operation ended with failures: {failures}', } }, - operation_name='LakeFormation.batch_revoke_permissions', + operation_name='LakeFormationClient.batch_revoke_permissions', ) except ClientError as e: diff --git a/backend/dataall/modules/dataset_sharing/services/share_managers/lf_share_manager.py b/backend/dataall/modules/dataset_sharing/services/share_managers/lf_share_manager.py index 2e3b830ff..5cf6171dc 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_managers/lf_share_manager.py +++ b/backend/dataall/modules/dataset_sharing/services/share_managers/lf_share_manager.py @@ -6,7 +6,7 @@ from botocore.exceptions import ClientError from dataall.aws.handlers.glue import Glue -from dataall.aws.handlers.lakeformation import LakeFormation +from dataall.modules.dataset_sharing.aws.lakeformation_client import LakeFormationClient from dataall.aws.handlers.quicksight import Quicksight from dataall.aws.handlers.sts import SessionHelper from dataall.aws.handlers.ram import Ram @@ -146,7 +146,7 @@ def grant_pivot_role_all_database_permissions(self) -> bool: """ Grants 'ALL' database Lake Formation permissions to data.all PivotRole """ - LakeFormation.grant_pivot_role_all_database_permissions( + LakeFormationClient.grant_pivot_role_all_database_permissions( self.source_environment.AwsAccountId, self.source_environment.region, self.dataset.GlueDatabaseName, @@ -191,11 +191,11 @@ def create_shared_database( f's3://{dataset.S3BucketName}', ) - LakeFormation.grant_pivot_role_all_database_permissions( + LakeFormationClient.grant_pivot_role_all_database_permissions( target_environment.AwsAccountId, target_environment.region, shared_db_name ) - LakeFormation.grant_permissions_to_database( + LakeFormationClient.grant_permissions_to_database( client=SessionHelper.remote_session( accountid=target_environment.AwsAccountId ).client('lakeformation', region_name=target_environment.region), @@ -258,11 +258,11 @@ def create_resource_link(cls, **data) -> dict: resource_link_input=resource_link_input, ) - LakeFormation.grant_resource_link_permission( + LakeFormationClient.grant_resource_link_permission( lakeformation_client, source, target, target_database ) - LakeFormation.grant_resource_link_permission_on_target( + LakeFormationClient.grant_resource_link_permission_on_target( lakeformation_client, source, target ) @@ -306,7 +306,7 @@ def revoke_table_resource_link_access(self, table: DatasetTable, principals: [st f'for principal {principal}' ) - LakeFormation.batch_revoke_permissions( + LakeFormationClient.batch_revoke_permissions( SessionHelper.remote_session(self.target_environment.AwsAccountId).client( 'lakeformation', region_name=self.target_environment.region ), @@ -359,7 +359,7 @@ def revoke_source_table_access(self, table, principals: [str]): f'on {self.source_environment.AwsAccountId}/{self.dataset.GlueDatabaseName}/{table.GlueTableName} ' f'for principals {principals}' ) - LakeFormation.revoke_source_table_access( + LakeFormationClient.revoke_source_table_access( target_accountid=self.target_environment.AwsAccountId, region=self.target_environment.region, source_database=self.dataset.GlueDatabaseName, @@ -407,7 +407,7 @@ def share_table_with_target_account(cls, **data): ) try: - LakeFormation.revoke_iamallowedgroups_super_permission_from_table( + LakeFormationClient.revoke_iamallowedgroups_super_permission_from_table( source_lf_client, source_accountid, data['source']['database'], @@ -415,7 +415,7 @@ def share_table_with_target_account(cls, **data): ) time.sleep(1) - LakeFormation.grant_permissions_to_table( + LakeFormationClient.grant_permissions_to_table( source_lf_client, target_accountid, data['source']['database'], @@ -479,7 +479,7 @@ def revoke_external_account_access_on_source_account(self) -> [dict]: 'PermissionsWithGrantOption': ['DESCRIBE', 'SELECT'], } ) - LakeFormation.batch_revoke_permissions( + LakeFormationClient.batch_revoke_permissions( client, self.source_environment.AwsAccountId, revoke_entries ) return revoke_entries diff --git a/backend/dataall/modules/datasets/cdk/dataset_stack.py b/backend/dataall/modules/datasets/cdk/dataset_stack.py index 3e45734e7..30727154b 100644 --- a/backend/dataall/modules/datasets/cdk/dataset_stack.py +++ b/backend/dataall/modules/datasets/cdk/dataset_stack.py @@ -20,7 +20,6 @@ from dataall.cdkproxy.stacks.manager import stack from dataall import db from dataall.aws.handlers.quicksight import Quicksight -from dataall.aws.handlers.lakeformation import LakeFormation from dataall.aws.handlers.sts import SessionHelper from dataall.db import models from dataall.db.api import Environment diff --git a/tests/tasks/test_lf_share_manager.py b/tests/tasks/test_lf_share_manager.py index 4c4e0f6b8..75cfef909 100644 --- a/tests/tasks/test_lf_share_manager.py +++ b/tests/tasks/test_lf_share_manager.py @@ -3,6 +3,8 @@ Remarks """ +from unittest.mock import MagicMock + import boto3 import pytest @@ -25,6 +27,8 @@ TARGET_ACCOUNT_ENV = "2" * 12 TARGET_ACCOUNT_ENV_ROLE_NAME = "dataall-ConsumersEnvironment-r71ucp4m" +LF_CLIENT = "dataall.modules.dataset_sharing.aws.lakeformation_client.LakeFormationClient" + @pytest.fixture(scope="module") def org1(org: Callable) -> models.Organization: @@ -253,7 +257,7 @@ def test_create_shared_database( return_value=True, ) lf_mock_pr = mocker.patch( - "dataall.aws.handlers.lakeformation.LakeFormation.grant_pivot_role_all_database_permissions", + f"{LF_CLIENT}.grant_pivot_role_all_database_permissions", return_value=True, ) mocker.patch( @@ -261,7 +265,7 @@ def test_create_shared_database( return_value=boto3.Session(), ) lf_mock = mocker.patch( - "dataall.aws.handlers.lakeformation.LakeFormation.grant_permissions_to_database", + f"{LF_CLIENT}.grant_permissions_to_database", return_value=True, ) # When @@ -397,11 +401,11 @@ def test_create_resource_link( return_value=True, ) lf_mock_1 = mocker.patch( - "dataall.aws.handlers.lakeformation.LakeFormation.grant_resource_link_permission", + f"{LF_CLIENT}.grant_resource_link_permission", return_value=True, ) lf_mock_2 = mocker.patch( - "dataall.aws.handlers.lakeformation.LakeFormation.grant_resource_link_permission_on_target", + f"{LF_CLIENT}.grant_resource_link_permission_on_target", return_value=True, ) @@ -459,6 +463,7 @@ def test_create_resource_link( pass + def test_revoke_table_resource_link_access( db, processor_same_account: ProcessLFSameAccountShare, @@ -482,7 +487,7 @@ def test_revoke_table_resource_link_access( ) lf_mock = mocker.patch( - "dataall.aws.handlers.lakeformation.LakeFormation.batch_revoke_permissions", + f"{LF_CLIENT}.batch_revoke_permissions", return_value=True, ) @@ -525,7 +530,7 @@ def test_revoke_source_table_access( ) lf_mock = mocker.patch( - "dataall.aws.handlers.lakeformation.LakeFormation.revoke_source_table_access", + f"{LF_CLIENT}.revoke_source_table_access", return_value=True, ) @@ -634,10 +639,7 @@ def test_revoke_external_account_access_on_source_account( table2: DatasetTable, mocker, ): - lf_mock = mocker.patch( - "dataall.aws.handlers.lakeformation.LakeFormation.batch_revoke_permissions", - return_value=True, - ) + lf_mock = mocker.patch(f"{LF_CLIENT}.batch_revoke_permissions", return_value=True) mocker.patch( "dataall.aws.handlers.sts.SessionHelper.remote_session", From e462507c1f0a3d48e595402898ef54219c677315 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 17 May 2023 12:37:23 +0200 Subject: [PATCH 194/346] Moved a method to DatasetCrawler --- backend/dataall/aws/handlers/glue.py | 31 ------------------ .../datasets/aws/glue_dataset_client.py | 32 +++++++++++++++++++ .../datasets/handlers/glue_table_handler.py | 7 ++-- .../modules/datasets/tasks/tables_syncer.py | 6 ++-- tests/tasks/test_tables_sync.py | 10 +++--- 5 files changed, 41 insertions(+), 45 deletions(-) diff --git a/backend/dataall/aws/handlers/glue.py b/backend/dataall/aws/handlers/glue.py index a8f319b85..dedeb0516 100644 --- a/backend/dataall/aws/handlers/glue.py +++ b/backend/dataall/aws/handlers/glue.py @@ -65,37 +65,6 @@ def database_exists(**data): log.info(f'Database {database} does not exist on account {accountid}...') return False - @staticmethod - def list_glue_database_tables(accountid, database, region): - aws_session = SessionHelper.remote_session(accountid=accountid) - glue = aws_session.client('glue', region_name=region) - found_tables = [] - try: - log.debug(f'Looking for {database} tables') - - if not Glue.database_exists( - accountid=accountid, database=database, region=region - ): - return found_tables - - paginator = glue.get_paginator('get_tables') - - pages = paginator.paginate( - DatabaseName=database, - CatalogId=accountid, - ) - for page in pages: - found_tables.extend(page['TableList']) - - log.debug(f'Retrieved all database {database} tables: {found_tables}') - - except ClientError as e: - log.error( - f'Failed to retrieve tables for database {accountid}|{database}: {e}', - exc_info=True, - ) - return found_tables - @staticmethod def table_exists(**data): accountid = data['accountid'] diff --git a/backend/dataall/modules/datasets/aws/glue_dataset_client.py b/backend/dataall/modules/datasets/aws/glue_dataset_client.py index 310acd2b6..2657d6a45 100644 --- a/backend/dataall/modules/datasets/aws/glue_dataset_client.py +++ b/backend/dataall/modules/datasets/aws/glue_dataset_client.py @@ -1,6 +1,7 @@ import logging from botocore.exceptions import ClientError +from dataall.aws.handlers.glue import Glue from dataall.aws.handlers.sts import SessionHelper from dataall.modules.datasets_base.db.models import Dataset @@ -65,4 +66,35 @@ def update_crawler(self, targets): else: raise e + def list_glue_database_tables(self): + dataset = self._dataset + database = dataset.GlueDatabaseName + account_id = dataset.AwsAccountId + found_tables = [] + try: + log.debug(f'Looking for {database} tables') + + if not Glue.database_exists( + accountid=account_id, database=database, region=dataset.region + ): + return found_tables + + paginator = self._client.get_paginator('get_tables') + + pages = paginator.paginate( + DatabaseName=database, + CatalogId=account_id, + ) + for page in pages: + found_tables.extend(page['TableList']) + + log.debug(f'Retrieved all database {database} tables: {found_tables}') + + except ClientError as e: + log.error( + f'Failed to retrieve tables for database {account_id}|{database}: {e}', + exc_info=True, + ) + return found_tables + diff --git a/backend/dataall/modules/datasets/handlers/glue_table_handler.py b/backend/dataall/modules/datasets/handlers/glue_table_handler.py index af148cc38..c67b8423d 100644 --- a/backend/dataall/modules/datasets/handlers/glue_table_handler.py +++ b/backend/dataall/modules/datasets/handlers/glue_table_handler.py @@ -3,6 +3,7 @@ from dataall.aws.handlers.glue import Glue from dataall.aws.handlers.service_handlers import Worker from dataall.db import models +from dataall.modules.datasets.aws.glue_dataset_client import DatasetCrawler from dataall.modules.datasets.services.dataset_table_service import DatasetTableService from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.db.models import Dataset @@ -20,10 +21,6 @@ def sync_existing_tables(engine, task: models.Task): dataset: Dataset = DatasetRepository.get_dataset_by_uri( session, task.targetUri ) - account_id = dataset.AwsAccountId - region = dataset.region - tables = Glue.list_glue_database_tables( - account_id, dataset.GlueDatabaseName, region - ) + tables = DatasetCrawler(dataset).list_glue_database_tables() DatasetTableService.sync_existing_tables(session, dataset.datasetUri, glue_tables=tables) return tables diff --git a/backend/dataall/modules/datasets/tasks/tables_syncer.py b/backend/dataall/modules/datasets/tasks/tables_syncer.py index c8c4eb76b..55812565e 100644 --- a/backend/dataall/modules/datasets/tasks/tables_syncer.py +++ b/backend/dataall/modules/datasets/tasks/tables_syncer.py @@ -4,10 +4,10 @@ from operator import and_ from dataall import db -from dataall.aws.handlers.glue import Glue from dataall.aws.handlers.sts import SessionHelper from dataall.db import get_engine from dataall.db import models +from dataall.modules.datasets.aws.glue_dataset_client import DatasetCrawler from dataall.modules.datasets.aws.lf_table_client import LakeFormationTableClient from dataall.modules.datasets.services.dataset_table_service import DatasetTableService from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository @@ -56,9 +56,7 @@ def sync_tables(engine): ) else: - tables = Glue.list_glue_database_tables( - dataset.AwsAccountId, dataset.GlueDatabaseName, dataset.region - ) + tables = DatasetCrawler(dataset).list_glue_database_tables() log.info( f'Found {len(tables)} tables on Glue database {dataset.GlueDatabaseName}' diff --git a/tests/tasks/test_tables_sync.py b/tests/tasks/test_tables_sync.py index 3206d2f4e..05ec1f09d 100644 --- a/tests/tasks/test_tables_sync.py +++ b/tests/tasks/test_tables_sync.py @@ -103,9 +103,9 @@ def permissions(db): def test_tables_sync(db, org, env, sync_dataset, table, mocker): - mocker.patch( - 'dataall.aws.handlers.glue.Glue.list_glue_database_tables', - return_value=[ + mock_crawler = MagicMock() + mocker.patch('dataall.modules.datasets.tasks.tables_syncer.DatasetCrawler', mock_crawler) + mock_crawler().list_glue_database_tables.return_value = [ { 'Name': 'new_table', 'DatabaseName': sync_dataset.GlueDatabaseName, @@ -154,8 +154,8 @@ def test_tables_sync(db, org, env, sync_dataset, table, mocker): }, ], }, - ], - ) + ] + mocker.patch( 'dataall.modules.datasets.tasks.tables_syncer.is_assumable_pivot_role', return_value=True ) From 4ad8ce78e6ab74790fe0c5621b4dcaddbf8b29c9 Mon Sep 17 00:00:00 2001 From: dlpzx <71252798+dlpzx@users.noreply.github.com> Date: Wed, 17 May 2023 13:02:19 +0200 Subject: [PATCH 195/346] Bump starlette from 0.25.0 to 0.27.0 and upgrade fastapi (#460) ### Feature or Bugfix - Bugfix ### Detail - Solve vulnerabilities found in starlette 0.25.0 ### Relates By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- backend/dataall/cdkproxy/requirements.txt | 4 ++-- backend/requirements.txt | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/backend/dataall/cdkproxy/requirements.txt b/backend/dataall/cdkproxy/requirements.txt index ccd390c61..b4630dc27 100644 --- a/backend/dataall/cdkproxy/requirements.txt +++ b/backend/dataall/cdkproxy/requirements.txt @@ -5,8 +5,8 @@ boto3-stubs==1.24.85 botocore==1.27.85 cdk-nag==2.7.2 constructs==10.0.73 -starlette==0.25.0 -fastapi == 0.92.0 +starlette==0.27.0 +fastapi == 0.95.2 Flask==2.3.2 PyYAML==6.0 requests==2.27.1 diff --git a/backend/requirements.txt b/backend/requirements.txt index 09d8a7abe..ee569f2d8 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -2,7 +2,7 @@ ariadne==0.17.0 aws-xray-sdk==2.4.3 boto3==1.26.95 botocore==1.29.95 -fastapi == 0.92.0 +fastapi == 0.95.2 Flask==2.3.2 flask-cors==3.0.10 nanoid==2.0.0 @@ -14,4 +14,4 @@ PyYAML==6.0 requests==2.27.1 requests_aws4auth==1.1.1 sqlalchemy==1.3.24 -starlette==0.25.0 \ No newline at end of file +starlette==0.27.0 \ No newline at end of file From 06644dcfbb475ebc5fbbb16a8e75e70572d1095a Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 17 May 2023 14:08:09 +0200 Subject: [PATCH 196/346] Moved glue methods into sharing --- backend/dataall/aws/handlers/glue.py | 154 ------------------ .../dataset_sharing/aws/glue_client.py | 132 +++++++++++++++ .../share_managers/lf_share_manager.py | 67 +++----- .../datasets/aws/glue_dataset_client.py | 13 +- tests/tasks/test_lf_share_manager.py | 83 +++++----- 5 files changed, 206 insertions(+), 243 deletions(-) create mode 100644 backend/dataall/modules/dataset_sharing/aws/glue_client.py diff --git a/backend/dataall/aws/handlers/glue.py b/backend/dataall/aws/handlers/glue.py index dedeb0516..c93c76d06 100644 --- a/backend/dataall/aws/handlers/glue.py +++ b/backend/dataall/aws/handlers/glue.py @@ -13,160 +13,6 @@ class Glue: def __init__(self): pass - @staticmethod - def create_database(accountid, database, region, location): - try: - existing_database = Glue.database_exists( - accountid=accountid, database=database, region=region - ) - if existing_database: - glue_database_created = True - else: - Glue._create_glue_database(accountid, database, region, location) - glue_database_created = True - return glue_database_created - except ClientError as e: - log.error( - f'Failed to create database {database} on account {accountid} due to {e}' - ) - raise e - - @staticmethod - def _create_glue_database(accountid, database, region, location): - try: - aws_session = SessionHelper.remote_session(accountid=accountid) - glue = aws_session.client('glue', region_name=region) - db_input = { - 'Name': database, - 'Description': 'dataall database {} '.format(database), - 'CreateTableDefaultPermissions': [], - } - if location: - db_input['LocationUri'] = location - log.info(f'Creating Glue database with input: {db_input}') - response = glue.create_database(CatalogId=accountid, DatabaseInput=db_input) - log.info(f'response Create Database: {response}') - return response - except ClientError as e: - log.debug(f'Failed to create database {database}', e) - raise e - - @staticmethod - def database_exists(**data): - accountid = data['accountid'] - database = data.get('database', 'UnknownDatabaseName') - region = data.get('region', 'eu-west-1') - session = SessionHelper.remote_session(accountid) - try: - glue_client = session.client('glue', region_name=region) - glue_client.get_database(CatalogId=data['accountid'], Name=database) - return True - except ClientError: - log.info(f'Database {database} does not exist on account {accountid}...') - return False - - @staticmethod - def table_exists(**data): - accountid = data['accountid'] - region = data.get('region', 'eu-west-1') - database = data.get('database', 'UndefinedDatabaseName') - table_name = data.get('tablename', 'UndefinedTableName') - try: - table = ( - SessionHelper.remote_session(accountid) - .client('glue', region_name=region) - .get_table( - CatalogId=data['accountid'], DatabaseName=database, Name=table_name - ) - ) - log.info(f'Glue table found: {data}') - return table - except ClientError: - log.info(f'Glue table not found: {data}') - return None - - @staticmethod - def delete_table(accountid, region, database, tablename): - session = SessionHelper.remote_session(accountid=accountid) - client = session.client('glue', region_name=region) - log.info( - 'Deleting table {} in database {}'.format( - tablename, database - ) - ) - response = client.delete_table( - CatalogId=accountid, - DatabaseName=database, - Name=tablename - ) - - return response - - @staticmethod - def create_resource_link(**data): - accountid = data['accountid'] - region = data['region'] - database = data['database'] - resource_link_name = data['resource_link_name'] - resource_link_input = data['resource_link_input'] - log.info( - f'Creating ResourceLink {resource_link_name} in database {accountid}://{database}' - ) - try: - session = SessionHelper.remote_session(accountid=accountid) - glue = session.client('glue', region_name=region) - resource_link = Glue.table_exists( - accountid=accountid, - region=region, - database=database, - tablename=resource_link_name, - ) - if resource_link: - log.info( - f'ResourceLink {resource_link_name} already exists in database {accountid}://{database}' - ) - else: - resource_link = glue.create_table( - CatalogId=accountid, - DatabaseName=database, - TableInput=resource_link_input, - ) - log.info( - f'Successfully created ResourceLink {resource_link_name} in database {accountid}://{database}' - ) - return resource_link - except ClientError as e: - log.error( - f'Could not create ResourceLink {resource_link_name} ' - f'in database {accountid}://{database} ' - f'due to: {e}' - ) - raise e - - @staticmethod - def delete_database(**data): - accountid = data['accountid'] - region = data['region'] - database = data['database'] - log.info(f'Deleting database {accountid}://{database} ...') - try: - session = SessionHelper.remote_session(accountid=accountid) - glue = session.client('glue', region_name=region) - if Glue.database_exists( - accountid=accountid, - region=region, - database=database, - ): - glue.delete_database(CatalogId=accountid, Name=database) - return True - except ClientError as e: - log.error( - f'Could not delete database {database} ' - f'in account {accountid} ' - f'due to: {e}' - ) - raise e - @staticmethod @Worker.handler(path='glue.job.runs') def get_job_runs(engine, task: models.Task): diff --git a/backend/dataall/modules/dataset_sharing/aws/glue_client.py b/backend/dataall/modules/dataset_sharing/aws/glue_client.py new file mode 100644 index 000000000..11141c799 --- /dev/null +++ b/backend/dataall/modules/dataset_sharing/aws/glue_client.py @@ -0,0 +1,132 @@ +import logging + +from botocore.exceptions import ClientError + +from dataall.aws.handlers.sts import SessionHelper + +log = logging.getLogger(__name__) + + +class GlueClient: + def __init__(self, account_id, region, database): + aws_session = SessionHelper.remote_session(accountid=account_id) + self._client = aws_session.client('glue', region_name=region) + self._database = database + self._account_id = account_id + + def create_database(self, location): + try: + existing_database = self.database_exists() + if existing_database: + glue_database_created = True + else: + self._create_glue_database(location) + glue_database_created = True + return glue_database_created + except ClientError as e: + log.error( + f'Failed to create database {self._database} on account {self._account_id} due to {e}' + ) + raise e + + def _create_glue_database(self, location): + database = self._database + try: + db_input = { + 'Name': database, + 'Description': 'dataall database {} '.format(database), + 'CreateTableDefaultPermissions': [], + } + if location: + db_input['LocationUri'] = location + log.info(f'Creating Glue database with input: {db_input}') + response = self._client.create_database(CatalogId=self._account_id, DatabaseInput=db_input) + log.info(f'response Create Database: {response}') + return response + except ClientError as e: + log.debug(f'Failed to create database {database}', e) + raise e + + def database_exists(self): + try: + self._client.get_database(CatalogId=self._account_id, Name=self._database) + return True + except ClientError: + log.info(f'Database {self._database} does not exist on account {self._account_id}...') + return False + + def table_exists(self, table_name): + try: + table = ( + self._client.get_table( + CatalogId=self._account_id, DatabaseName=self._database, Name=table_name + ) + ) + log.info(f'Glue table found: {table_name}') + return table + except ClientError: + log.info(f'Glue table not found: {table_name}') + return None + + def delete_table(self, table_name): + database = self._database + log.info( + 'Deleting table {} in database {}'.format( + table_name, database + ) + ) + response = self._client.delete_table( + CatalogId=self._account_id, + DatabaseName=database, + Name=table_name + ) + + return response + + def create_resource_link(self, resource_link_name, resource_link_input): + account_id = self._account_id + database = self._database + + log.info( + f'Creating ResourceLink {resource_link_name} in database {account_id}://{database}' + ) + try: + resource_link = self.table_exists(resource_link_name) + if resource_link: + log.info( + f'ResourceLink {resource_link_name} already exists in database {account_id}://{database}' + ) + else: + resource_link = self._client.create_table( + CatalogId=account_id, + DatabaseName=database, + TableInput=resource_link_input, + ) + log.info( + f'Successfully created ResourceLink {resource_link_name} in database {account_id}://{database}' + ) + return resource_link + except ClientError as e: + log.error( + f'Could not create ResourceLink {resource_link_name} ' + f'in database {account_id}://{database} ' + f'due to: {e}' + ) + raise e + + def delete_database(self): + account_id = self._account_id + database = self._database + + log.info(f'Deleting database {account_id}://{database} ...') + try: + if self.database_exists(): + self._client.delete_database(CatalogId=account_id, Name=database) + return True + except ClientError as e: + log.error( + f'Could not delete database {database} ' + f'in account {account_id} ' + f'due to: {e}' + ) + raise e diff --git a/backend/dataall/modules/dataset_sharing/services/share_managers/lf_share_manager.py b/backend/dataall/modules/dataset_sharing/services/share_managers/lf_share_manager.py index 5cf6171dc..8321021e2 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_managers/lf_share_manager.py +++ b/backend/dataall/modules/dataset_sharing/services/share_managers/lf_share_manager.py @@ -5,7 +5,7 @@ from botocore.exceptions import ClientError -from dataall.aws.handlers.glue import Glue +from dataall.modules.dataset_sharing.aws.glue_client import GlueClient from dataall.modules.dataset_sharing.aws.lakeformation_client import LakeFormationClient from dataall.aws.handlers.quicksight import Quicksight from dataall.aws.handlers.sts import SessionHelper @@ -128,12 +128,7 @@ def check_share_item_exists_on_glue_catalog( ------- exceptions.AWSResourceNotFound """ - if not Glue.table_exists( - accountid=self.source_environment.AwsAccountId, - region=self.source_environment.region, - database=table.GlueDatabaseName, - tablename=table.GlueTableName, - ): + if not self._create_glue_client().table_exists(table.GlueTableName): raise exceptions.AWSResourceNotFound( action='ProcessShare', message=( @@ -184,12 +179,11 @@ def create_shared_database( f'{target_environment.AwsAccountId}://{shared_db_name}' ) - database = Glue.create_database( + database = GlueClient( target_environment.AwsAccountId, shared_db_name, - target_environment.region, - f's3://{dataset.S3BucketName}', - ) + target_environment.region + ).create_database(f's3://{dataset.S3BucketName}') LakeFormationClient.grant_pivot_role_all_database_permissions( target_environment.AwsAccountId, target_environment.region, shared_db_name @@ -215,11 +209,7 @@ def delete_shared_database(self) -> bool: bool """ logger.info(f'Deleting shared database {self.shared_db_name}') - return Glue.delete_database( - accountid=self.target_environment.AwsAccountId, - region=self.target_environment.region, - database=self.shared_db_name, - ) + return self._create_glue_client().delete_database() @classmethod def create_resource_link(cls, **data) -> dict: @@ -250,10 +240,8 @@ def create_resource_link(cls, **data) -> dict: } try: - resource_link = Glue.create_resource_link( - accountid=target['accountid'], - region=target['region'], - database=target_database, + glue_client = GlueClient(target['accountid'], target['region'], target_database) + resource_link = glue_client.create_resource_link( resource_link_name=source['tablename'], resource_link_input=resource_link_input, ) @@ -286,12 +274,8 @@ def revoke_table_resource_link_access(self, table: DatasetTable, principals: [st ------- True if revoke is successful """ - if not Glue.table_exists( - accountid=self.target_environment.AwsAccountId, - region=self.target_environment.region, - database=self.shared_db_name, - tablename=table.GlueTableName, - ): + glue_client = self._create_glue_client() + if not glue_client.table_exists(table.GlueTableName): logger.info( f'Resource link could not be found ' f'on {self.target_environment.AwsAccountId}/{self.shared_db_name}/{table.GlueTableName} ' @@ -341,12 +325,8 @@ def revoke_source_table_access(self, table, principals: [str]): ------- True if revoke is successful """ - if not Glue.table_exists( - accountid=self.target_environment.AwsAccountId, - region=self.target_environment.region, - database=self.shared_db_name, - tablename=table.GlueTableName, - ): + glue_client = self._create_glue_client() + if not glue_client.table_exists(table.GlueTableName): logger.info( f'Source table could not be found ' f'on {self.source_environment.AwsAccountId}/{self.dataset.GlueDatabaseName}/{table.GlueTableName} ' @@ -371,20 +351,12 @@ def revoke_source_table_access(self, table, principals: [str]): def delete_resource_link_table(self, table: DatasetTable): logger.info(f'Deleting shared table {table.GlueTableName}') + glue_client = self._create_glue_client() - if not Glue.table_exists( - accountid=self.target_environment.AwsAccountId, - region=self.target_environment.region, - database=self.shared_db_name, - tablename=table.GlueTableName, - ): + if not glue_client.table_exists(table.GlueTableName): return True - Glue.delete_table( - accountid=self.target_environment.AwsAccountId, - region=self.target_environment.region, - database=self.shared_db_name, - tablename=table.GlueTableName - ) + + glue_client.delete_table(table.GlueTableName) return True @classmethod @@ -555,3 +527,10 @@ def handle_revoke_failure( table, self.share, self.target_environment ) return True + + def _create_glue_client(self): + return GlueClient( + self.target_environment.AwsAccountId, + self.target_environment.region, + self.shared_db_name, + ) diff --git a/backend/dataall/modules/datasets/aws/glue_dataset_client.py b/backend/dataall/modules/datasets/aws/glue_dataset_client.py index 2657d6a45..cf5d432b1 100644 --- a/backend/dataall/modules/datasets/aws/glue_dataset_client.py +++ b/backend/dataall/modules/datasets/aws/glue_dataset_client.py @@ -1,7 +1,6 @@ import logging from botocore.exceptions import ClientError -from dataall.aws.handlers.glue import Glue from dataall.aws.handlers.sts import SessionHelper from dataall.modules.datasets_base.db.models import Dataset @@ -74,9 +73,7 @@ def list_glue_database_tables(self): try: log.debug(f'Looking for {database} tables') - if not Glue.database_exists( - accountid=account_id, database=database, region=dataset.region - ): + if not self.database_exists(): return found_tables paginator = self._client.get_paginator('get_tables') @@ -97,4 +94,12 @@ def list_glue_database_tables(self): ) return found_tables + def database_exists(self): + dataset = self._dataset + try: + self._client.get_database(CatalogId=dataset.AwsAccountId, Name=dataset.GlueDatabaseName) + return True + except ClientError: + log.info(f'Database {dataset.GlueDatabaseName} does not exist on account {dataset.AwsAccountId}...') + return False diff --git a/tests/tasks/test_lf_share_manager.py b/tests/tasks/test_lf_share_manager.py index 75cfef909..5c1d8b067 100644 --- a/tests/tasks/test_lf_share_manager.py +++ b/tests/tasks/test_lf_share_manager.py @@ -209,6 +209,16 @@ def processor_same_account(db, dataset1, share_same_account, table1, source_envi yield processor +@pytest.fixture(scope="function") +def mock_glue_client(mocker): + mock_client = MagicMock() + mocker.patch( + "dataall.modules.dataset_sharing.services.share_managers.lf_share_manager.GlueClient", + mock_client + ) + yield mock_client + + def test_init(processor_same_account, processor_cross_account): assert processor_same_account.dataset assert processor_same_account.share @@ -251,11 +261,10 @@ def test_create_shared_database( target_environment: models.Environment, dataset1: Dataset, mocker, + mock_glue_client ): - create_db_mock = mocker.patch( - "dataall.aws.handlers.glue.Glue.create_database", - return_value=True, - ) + mock_glue_client().create_database.return_value = True + lf_mock_pr = mocker.patch( f"{LF_CLIENT}.grant_pivot_role_all_database_permissions", return_value=True, @@ -277,12 +286,12 @@ def test_create_shared_database( ) # Then - create_db_mock.assert_called_once() + mock_glue_client().create_database.assert_called_once() lf_mock_pr.assert_called_once() lf_mock.assert_called_once() # Reset mocks - create_db_mock.reset_mock() + mock_glue_client().create_database.reset_mock() lf_mock_pr.reset_mock() lf_mock.reset_mock() @@ -295,10 +304,11 @@ def test_create_shared_database( ) # Then - create_db_mock.assert_called_once() + mock_glue_client().create_database.assert_called_once() lf_mock_pr.assert_called_once() lf_mock.assert_called_once() + def test_check_share_item_exists_on_glue_catalog( db, processor_same_account: ProcessLFSameAccountShare, @@ -307,20 +317,19 @@ def test_check_share_item_exists_on_glue_catalog( share_item_same_account: ShareObjectItem, share_item_cross_account: ShareObjectItem, mocker, + mock_glue_client, ): - glue_mock = mocker.patch( - "dataall.aws.handlers.glue.Glue.table_exists", - return_value=True, - ) + mock_glue_client().table_exists.return_value = True + # When processor_same_account.check_share_item_exists_on_glue_catalog( share_item=share_item_same_account, table=table1 ) # Then - glue_mock.assert_called_once() - glue_mock.reset_mock() + mock_glue_client().table_exists.assert_called_once() + mock_glue_client().table_exists.reset_mock() # When processor_cross_account.check_share_item_exists_on_glue_catalog( @@ -328,8 +337,7 @@ def test_check_share_item_exists_on_glue_catalog( table=table1 ) # Then - glue_mock.assert_called_once() - + mock_glue_client().table_exists.assert_called_once() def test_build_share_data( @@ -391,15 +399,15 @@ def test_create_resource_link( dataset1: Dataset, table1: DatasetTable, mocker, + mock_glue_client, ): sts_mock = mocker.patch( "dataall.aws.handlers.sts.SessionHelper.remote_session", return_value=boto3.Session(), ) - glue_mock = mocker.patch( - "dataall.aws.handlers.glue.Glue.create_resource_link", - return_value=True, - ) + glue_mock = mock_glue_client().create_resource_link + glue_mock.return_value = True + lf_mock_1 = mocker.patch( f"{LF_CLIENT}.grant_resource_link_permission", return_value=True, @@ -475,11 +483,11 @@ def test_revoke_table_resource_link_access( dataset1: Dataset, table2: DatasetTable, mocker, + mock_glue_client ): - glue_mock = mocker.patch( - "dataall.aws.handlers.glue.Glue.table_exists", - return_value=True, - ) + + glue_mock = mock_glue_client().table_exists + glue_mock.return_value = True mocker.patch( "dataall.aws.handlers.sts.SessionHelper.remote_session", @@ -523,11 +531,10 @@ def test_revoke_source_table_access( dataset1: Dataset, table2: DatasetTable, mocker, + mock_glue_client ): - glue_mock = mocker.patch( - "dataall.aws.handlers.glue.Glue.table_exists", - return_value=True, - ) + glue_mock = mock_glue_client().table_exists + glue_mock.return_value = True lf_mock = mocker.patch( f"{LF_CLIENT}.revoke_source_table_access", @@ -565,17 +572,13 @@ def test_delete_resource_link_table( target_environment: models.Environment, dataset1: Dataset, table2: DatasetTable, - mocker, + mock_glue_client ): - glue_mock = mocker.patch( - "dataall.aws.handlers.glue.Glue.table_exists", - return_value=True, - ) + glue_mock = mock_glue_client().table_exists + glue_mock.return_value = True, - glue_mock2 = mocker.patch( - "dataall.aws.handlers.glue.Glue.delete_table", - return_value=True, - ) + glue_mock2 = mock_glue_client().delete_table + glue_mock2.return_value = True, processor_same_account.delete_resource_link_table( @@ -607,12 +610,10 @@ def test_delete_shared_database( target_environment: models.Environment, dataset1: Dataset, table1: DatasetTable, - mocker, + mock_glue_client ): - glue_mock = mocker.patch( - "dataall.aws.handlers.glue.Glue.delete_database", - return_value=True, - ) + glue_mock = mock_glue_client().delete_database + glue_mock.return_value = True processor_same_account.delete_shared_database() # Then From fadb54f27d473fb7f951eb12cfee65e7b6ba1c0c Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 17 May 2023 14:13:51 +0200 Subject: [PATCH 197/346] Moved filter used only in sharing --- .../dataall/api/Objects/Environment/input_types.py | 12 ------------ .../modules/dataset_sharing/api/input_types.py | 11 +++++++++++ 2 files changed, 11 insertions(+), 12 deletions(-) diff --git a/backend/dataall/api/Objects/Environment/input_types.py b/backend/dataall/api/Objects/Environment/input_types.py index 88e3e293b..05f890e74 100644 --- a/backend/dataall/api/Objects/Environment/input_types.py +++ b/backend/dataall/api/Objects/Environment/input_types.py @@ -100,18 +100,6 @@ class EnvironmentSortField(GraphQLEnumMapper): ) -EnvironmentDataItemFilter = gql.InputType( - name='EnvironmentDataItemFilter', - arguments=[ - gql.Argument('itemTypes', gql.ArrayType(gql.String)), - gql.Argument('term', gql.String), - gql.Argument('page', gql.Integer), - gql.Argument('pageSize', gql.Integer), - gql.Argument('uniqueShares', gql.Boolean) - ], -) - - InviteGroupOnEnvironmentInput = gql.InputType( name='InviteGroupOnEnvironmentInput', arguments=[ diff --git a/backend/dataall/modules/dataset_sharing/api/input_types.py b/backend/dataall/modules/dataset_sharing/api/input_types.py index a631736bf..3f1193e77 100644 --- a/backend/dataall/modules/dataset_sharing/api/input_types.py +++ b/backend/dataall/modules/dataset_sharing/api/input_types.py @@ -74,3 +74,14 @@ class ShareSortField(GraphQLEnumMapper): gql.Argument('pageSize', gql.Integer), ], ) + +EnvironmentDataItemFilter = gql.InputType( + name='EnvironmentDataItemFilter', + arguments=[ + gql.Argument('itemTypes', gql.ArrayType(gql.String)), + gql.Argument('term', gql.String), + gql.Argument('page', gql.Integer), + gql.Argument('pageSize', gql.Integer), + gql.Argument('uniqueShares', gql.Boolean) + ], +) From 03c7fb9340a0c5e5345f03d501f2334a802ae878 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 17 May 2023 14:37:47 +0200 Subject: [PATCH 198/346] Solved merge conflict after the merge --- .../dataall/modules/datasets_base/db/dataset_repository.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/backend/dataall/modules/datasets_base/db/dataset_repository.py b/backend/dataall/modules/datasets_base/db/dataset_repository.py index 8db4cac14..3b99a5f27 100644 --- a/backend/dataall/modules/datasets_base/db/dataset_repository.py +++ b/backend/dataall/modules/datasets_base/db/dataset_repository.py @@ -31,12 +31,12 @@ def get_dataset_by_uri(session, dataset_uri) -> Dataset: raise ObjectNotFound('Dataset', dataset_uri) return dataset - def count_resources(self, session, environment_uri, group_uri) -> int: + def count_resources(self, session, environment, group_uri) -> int: return ( session.query(Dataset) .filter( and_( - Dataset.environmentUri == environment_uri, + Dataset.environmentUri == environment.environmentUri, Dataset.SamlAdminGroupName == group_uri )) .count() From 8418d316539650415940028dcd3206dc35c0198c Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 17 May 2023 14:41:15 +0200 Subject: [PATCH 199/346] Found a share related exception --- backend/dataall/db/exceptions.py | 12 ------------ .../dataset_sharing/services/share_exceptions.py | 12 ++++++++++++ .../dataset_sharing/services/share_item_service.py | 3 ++- .../dataset_sharing/services/share_object_service.py | 3 ++- 4 files changed, 16 insertions(+), 14 deletions(-) create mode 100644 backend/dataall/modules/dataset_sharing/services/share_exceptions.py diff --git a/backend/dataall/db/exceptions.py b/backend/dataall/db/exceptions.py index 3453327a8..20b8c9973 100644 --- a/backend/dataall/db/exceptions.py +++ b/backend/dataall/db/exceptions.py @@ -161,18 +161,6 @@ def __str__(self): return f'{self.message}' -class ShareItemsFound(Exception): - def __init__(self, action, message): - self.action = action - self.message = f""" - An error occurred (ShareItemsFound) when calling {self.action} operation: - {message} - """ - - def __str__(self): - return f'{self.message}' - - class OrganizationResourcesFound(Exception): def __init__(self, action, message): self.action = action diff --git a/backend/dataall/modules/dataset_sharing/services/share_exceptions.py b/backend/dataall/modules/dataset_sharing/services/share_exceptions.py new file mode 100644 index 000000000..706c61ccf --- /dev/null +++ b/backend/dataall/modules/dataset_sharing/services/share_exceptions.py @@ -0,0 +1,12 @@ + + +class ShareItemsFound(Exception): + def __init__(self, action, message): + self.action = action + self.message = f""" + An error occurred (ShareItemsFound) when calling {self.action} operation: + {message} + """ + + def __str__(self): + return f'{self.message}' diff --git a/backend/dataall/modules/dataset_sharing/services/share_item_service.py b/backend/dataall/modules/dataset_sharing/services/share_item_service.py index 4d5faf18b..0cd683aac 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_item_service.py +++ b/backend/dataall/modules/dataset_sharing/services/share_item_service.py @@ -5,12 +5,13 @@ from dataall.core.permission_checker import has_resource_permission from dataall.db import utils from dataall.db.api import Environment, ResourcePolicy -from dataall.db.exceptions import ObjectNotFound, ShareItemsFound, UnauthorizedOperation +from dataall.db.exceptions import ObjectNotFound, UnauthorizedOperation from dataall.db.models import Task from dataall.modules.dataset_sharing.db.enums import ShareObjectActions, ShareableType, ShareItemStatus, \ ShareItemActions from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject from dataall.modules.dataset_sharing.db.share_object_repository import ShareObjectRepository, ShareObjectSM, ShareItemSM +from dataall.modules.dataset_sharing.services.share_exceptions import ShareItemsFound from dataall.modules.dataset_sharing.services.share_notification_service import ShareNotificationService from dataall.modules.dataset_sharing.services.share_permissions import GET_SHARE_OBJECT, ADD_ITEM, REMOVE_ITEM, \ LIST_ENVIRONMENT_SHARED_WITH_OBJECTS diff --git a/backend/dataall/modules/dataset_sharing/services/share_object_service.py b/backend/dataall/modules/dataset_sharing/services/share_object_service.py index cdc8651bd..d96265b60 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_object_service.py +++ b/backend/dataall/modules/dataset_sharing/services/share_object_service.py @@ -4,12 +4,13 @@ from dataall.core.permission_checker import has_resource_permission from dataall.db import utils from dataall.db.api import ResourcePolicy, Environment -from dataall.db.exceptions import ShareItemsFound, UnauthorizedOperation +from dataall.db.exceptions import UnauthorizedOperation from dataall.db.models import Activity, PrincipalType, EnvironmentGroup, ConsumptionRole from dataall.modules.dataset_sharing.db.enums import ShareObjectActions, ShareableType, ShareItemStatus, \ ShareObjectStatus from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject from dataall.modules.dataset_sharing.db.share_object_repository import ShareObjectRepository, ShareObjectSM, ShareItemSM +from dataall.modules.dataset_sharing.services.share_exceptions import ShareItemsFound from dataall.modules.dataset_sharing.services.share_notification_service import ShareNotificationService from dataall.modules.dataset_sharing.services.share_permissions import REJECT_SHARE_OBJECT, APPROVE_SHARE_OBJECT, \ SUBMIT_SHARE_OBJECT, SHARE_OBJECT_APPROVER, SHARE_OBJECT_REQUESTER, CREATE_SHARE_OBJECT, DELETE_SHARE_OBJECT, \ From ee4f34cfa2db0e4704e0018e20468d3f8a19c6d8 Mon Sep 17 00:00:00 2001 From: Gezim Musliaj <102723839+gmuslia@users.noreply.github.com> Date: Wed, 17 May 2023 15:42:43 +0200 Subject: [PATCH 200/346] Fixes issue with existing cognito callbacks (#464) ### Feature or Bugfix - BugFix ### Detail - In line (https://github.com/awslabs/aws-dataall/blob/13a2fc082694600a0dacaa7e88d0d61ec950d753/deploy/configs/cognito_urls_config.py#L61) It checks for example.com where instead the right callback to check is ```https://example.com``` and that's why it doesn't get replaced during the configuration phase. ### Relates - https://github.com/awslabs/aws-dataall/issues/454 By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- deploy/configs/cognito_urls_config.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/deploy/configs/cognito_urls_config.py b/deploy/configs/cognito_urls_config.py index d71e8189b..52c8f9155 100644 --- a/deploy/configs/cognito_urls_config.py +++ b/deploy/configs/cognito_urls_config.py @@ -58,8 +58,8 @@ def setup_cognito( f'https://{user_guide_link}/parseauth', ] existing_callbacks = user_pool['UserPoolClient'].get('CallbackURLs', []) - if 'example.com' in existing_callbacks: - existing_callbacks.remove('example.com') + if 'https://example.com' in existing_callbacks: + existing_callbacks.remove('https://example.com') updated_callbacks = existing_callbacks + list( set(config_callbacks) - set(existing_callbacks) ) From d7b6b51713ae9fea6249c231f3cfd724960feafd Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 17 May 2023 15:50:28 +0200 Subject: [PATCH 201/346] Added delete environment listener --- .../group/services/group_resource_manager.py | 25 +++++++++++++----- backend/dataall/db/api/environment.py | 20 +++----------- .../db/share_object_repository.py | 26 +++++++++++++++++++ backend/dataall/modules/datasets/__init__.py | 4 +-- .../datasets_base/db/dataset_repository.py | 7 ++--- .../dataall/modules/worksheets/__init__.py | 4 +-- .../modules/worksheets/db/repositories.py | 7 ++--- 7 files changed, 59 insertions(+), 34 deletions(-) diff --git a/backend/dataall/core/group/services/group_resource_manager.py b/backend/dataall/core/group/services/group_resource_manager.py index a5aeaba4a..950d66a7a 100644 --- a/backend/dataall/core/group/services/group_resource_manager.py +++ b/backend/dataall/core/group/services/group_resource_manager.py @@ -2,24 +2,35 @@ from typing import List -class GroupResource(ABC): - def count_resources(self, session, environment, group_uri) -> int: +class EnvironmentResource(ABC): + @staticmethod + def count_resources(session, environment, group_uri) -> int: raise NotImplementedError() + @staticmethod + def delete_env(session, environment): + pass + -class GroupResourceManager: +class EnvironmentResourceManager: """ API for managing group resources """ - _resources: List[GroupResource] = [] + _resources: List[EnvironmentResource] = [] @staticmethod - def register(resource: GroupResource): - GroupResourceManager._resources.append(resource) + def register(resource: EnvironmentResource): + EnvironmentResourceManager._resources.append(resource) @staticmethod def count_group_resources(session, environment, group_uri) -> int: counter = 0 - for resource in GroupResourceManager._resources: + for resource in EnvironmentResourceManager._resources: counter += resource.count_resources(session, environment, group_uri) return counter + + @staticmethod + def delete_env(session, environment): + for resource in EnvironmentResourceManager._resources: + resource.delete_env(session, environment) + diff --git a/backend/dataall/db/api/environment.py b/backend/dataall/db/api/environment.py index 961418f03..360a07d3c 100644 --- a/backend/dataall/db/api/environment.py +++ b/backend/dataall/db/api/environment.py @@ -27,9 +27,7 @@ NamingConventionService, NamingConventionPattern, ) -from dataall.core.group.services.group_resource_manager import GroupResourceManager -# TODO get rid of it -from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject +from dataall.core.group.services.group_resource_manager import EnvironmentResourceManager log = logging.getLogger(__name__) @@ -381,7 +379,7 @@ def remove_group(session, username, groups, uri, data=None, check_perm=None): .count() ) - group_env_objects_count += GroupResourceManager.count_group_resources( + group_env_objects_count += EnvironmentResourceManager.count_group_resources( session=session, environment=environment, group_uri=group @@ -979,19 +977,7 @@ def delete_environment(session, username, groups, uri, data=None, check_perm=Non session, environment.environmentUri, 'environment' ) - env_shared_with_objects = ( - session.query(ShareObject) - .filter(ShareObject.environmentUri == environment.environmentUri) - .all() - ) - for share in env_shared_with_objects: - ( - session.query(ShareObjectItem) - .filter(ShareObjectItem.shareUri == share.shareUri) - .delete() - ) - session.delete(share) - + EnvironmentResourceManager.delete_env(session, environment) EnvironmentParameterRepository(session).delete_params(environment.environmentUri) return session.delete(environment) diff --git a/backend/dataall/modules/dataset_sharing/db/share_object_repository.py b/backend/dataall/modules/dataset_sharing/db/share_object_repository.py index 036534efd..30c7d4972 100644 --- a/backend/dataall/modules/dataset_sharing/db/share_object_repository.py +++ b/backend/dataall/modules/dataset_sharing/db/share_object_repository.py @@ -3,6 +3,7 @@ from sqlalchemy import and_, or_, func, case from sqlalchemy.orm import Query +from dataall.core.group.services.group_resource_manager import EnvironmentResource from dataall.db import models, exceptions, paginate from dataall.db.models.Enums import PrincipalType from dataall.modules.dataset_sharing.db.enums import ShareObjectActions, ShareObjectStatus, ShareItemActions, \ @@ -311,6 +312,16 @@ def get_share_item_revokable_states(): ] +class ShareEnvironmentResource(EnvironmentResource): + @staticmethod + def count_resources(session, environment, group_uri) -> int: + return 0 + + @staticmethod + def delete_env(session, environment): + ShareObjectRepository.delete_all_share_items(session, environment.environmentUri) + + class ShareObjectRepository: @staticmethod def save_and_commit(session, share): @@ -916,6 +927,21 @@ def list_dataset_shares_with_existing_shared_items(session, dataset_uri) -> [Sha ) return query.all() + @staticmethod + def delete_all_share_items(session, env_uri): + env_shared_with_objects = ( + session.query(ShareObject) + .filter(ShareObject.environmentUri == env_uri) + .all() + ) + for share in env_shared_with_objects: + ( + session.query(ShareObjectItem) + .filter(ShareObjectItem.shareUri == share.shareUri) + .delete() + ) + session.delete(share) + @staticmethod def paginate_shared_datasets(session, env_uri, group_uri, data): share_item_shared_states = ShareItemSM.get_share_item_shared_states() diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index e923124a7..1c70609ae 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -2,7 +2,7 @@ import logging from typing import List, Type -from dataall.core.group.services.group_resource_manager import GroupResourceManager +from dataall.core.group.services.group_resource_manager import EnvironmentResourceManager from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base import DatasetBaseModuleInterface from dataall.modules.datasets_base.db.models import DatasetTableColumn, DatasetStorageLocation, DatasetTable, Dataset @@ -67,7 +67,7 @@ def __init__(self): TargetType("dataset", GET_DATASET, UPDATE_DATASET) - GroupResourceManager.register(DatasetRepository()) + EnvironmentResourceManager.register(DatasetRepository()) log.info("API of datasets has been imported") diff --git a/backend/dataall/modules/datasets_base/db/dataset_repository.py b/backend/dataall/modules/datasets_base/db/dataset_repository.py index 3b99a5f27..aea0bb8bc 100644 --- a/backend/dataall/modules/datasets_base/db/dataset_repository.py +++ b/backend/dataall/modules/datasets_base/db/dataset_repository.py @@ -11,7 +11,7 @@ from dataall.db.exceptions import ObjectNotFound from dataall.db.models.Enums import Language from dataall.modules.datasets_base.db.enums import ConfidentialityClassification -from dataall.core.group.services.group_resource_manager import GroupResource +from dataall.core.group.services.group_resource_manager import EnvironmentResource from dataall.modules.datasets_base.db.models import DatasetTable, Dataset from dataall.utils.naming_convention import ( NamingConventionService, @@ -21,7 +21,7 @@ logger = logging.getLogger(__name__) -class DatasetRepository(GroupResource): +class DatasetRepository(EnvironmentResource): """DAO layer for Datasets""" @staticmethod @@ -31,7 +31,8 @@ def get_dataset_by_uri(session, dataset_uri) -> Dataset: raise ObjectNotFound('Dataset', dataset_uri) return dataset - def count_resources(self, session, environment, group_uri) -> int: + @staticmethod + def count_resources(session, environment, group_uri) -> int: return ( session.query(Dataset) .filter( diff --git a/backend/dataall/modules/worksheets/__init__.py b/backend/dataall/modules/worksheets/__init__.py index 900c55b2a..9c9acdaf8 100644 --- a/backend/dataall/modules/worksheets/__init__.py +++ b/backend/dataall/modules/worksheets/__init__.py @@ -1,7 +1,7 @@ """Contains the code related to worksheets""" import logging -from dataall.core.group.services.group_resource_manager import GroupResourceManager +from dataall.core.group.services.group_resource_manager import EnvironmentResourceManager from dataall.modules.loader import ImportMode, ModuleInterface from dataall.modules.worksheets.db.models import Worksheet from dataall.modules.worksheets.db.repositories import WorksheetRepository @@ -23,6 +23,6 @@ def __init__(self): FeedRegistry.register(FeedDefinition("Worksheet", Worksheet)) - GroupResourceManager.register(WorksheetRepository()) + EnvironmentResourceManager.register(WorksheetRepository()) log.info("API of worksheets has been imported") \ No newline at end of file diff --git a/backend/dataall/modules/worksheets/db/repositories.py b/backend/dataall/modules/worksheets/db/repositories.py index 420c06909..d8828c0cb 100644 --- a/backend/dataall/modules/worksheets/db/repositories.py +++ b/backend/dataall/modules/worksheets/db/repositories.py @@ -4,17 +4,18 @@ from sqlalchemy import or_ from sqlalchemy.orm import Query -from dataall.core.group.services.group_resource_manager import GroupResource +from dataall.core.group.services.group_resource_manager import EnvironmentResource from dataall.db import paginate from dataall.modules.worksheets.db.models import Worksheet, WorksheetQueryResult -class WorksheetRepository(GroupResource): +class WorksheetRepository(EnvironmentResource): """DAO layer for worksheets""" _DEFAULT_PAGE = 1 _DEFAULT_PAGE_SIZE = 10 - def count_resources(self, session, environment, group_uri) -> int: + @staticmethod + def count_resources(session, environment, group_uri) -> int: return ( session.query(WorksheetQueryResult) .filter( From 62cbb12a6d3b757151cad270375e92f9d238666e Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 17 May 2023 16:25:16 +0200 Subject: [PATCH 202/346] Added two import mode dedicated to tasks loading and refactored these tasks --- backend/dataall/modules/datasets/__init__.py | 40 +++++++++++++++-- .../datasets/indexers/catalog_indexer.py | 21 +++++++++ backend/dataall/modules/loader.py | 2 + backend/dataall/tasks/catalog_indexer.py | 35 ++++++++------- backend/dataall/tasks/stacks_updater.py | 44 ++++++++++++------- 5 files changed, 107 insertions(+), 35 deletions(-) create mode 100644 backend/dataall/modules/datasets/indexers/catalog_indexer.py diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index 1c70609ae..371e76709 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -2,13 +2,9 @@ import logging from typing import List, Type -from dataall.core.group.services.group_resource_manager import EnvironmentResourceManager from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base import DatasetBaseModuleInterface from dataall.modules.datasets_base.db.models import DatasetTableColumn, DatasetStorageLocation, DatasetTable, Dataset -from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer -from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer -from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer from dataall.modules.datasets.services.dataset_permissions import GET_DATASET, UPDATE_DATASET from dataall.modules.loader import ModuleInterface, ImportMode @@ -34,6 +30,10 @@ def __init__(self): from dataall.api.Objects.Vote.resolvers import add_vote_type from dataall.api.Objects.Feed.registry import FeedRegistry, FeedDefinition from dataall.api.Objects.Glossary.registry import GlossaryRegistry, GlossaryDefinition + from dataall.core.group.services.group_resource_manager import EnvironmentResourceManager + from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer + from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer + from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer import dataall.modules.datasets.api @@ -109,3 +109,35 @@ def __init__(self): @staticmethod def depends_on() -> List[Type['ModuleInterface']]: return [DatasetBaseModuleInterface] + + +class DatasetStackUpdaterModuleInterface(ModuleInterface): + + @staticmethod + def is_supported(modes: List[ImportMode]) -> bool: + return ImportMode.STACK_UPDATER_TASK in modes + + def __init__(self): + from dataall.tasks.stacks_updater import StackFinder + from dataall.tasks.stacks_updater import register_stack_finder + + class DatasetStackFinder(StackFinder): + def find_stack_uris(self, session) -> List[str]: + all_datasets: [Dataset] = DatasetRepository.list_all_active_datasets(session) + log.info(f'Found {len(all_datasets)} datasets') + return [dataset.datasetUri for dataset in all_datasets] + + register_stack_finder(DatasetStackFinder()) + + +class DatasetCatalogIndexerModuleInterface(ModuleInterface): + + @staticmethod + def is_supported(modes: List[ImportMode]) -> bool: + return ImportMode.CATALOG_INDEXER_TASK in modes + + def __init__(self): + from dataall.tasks.catalog_indexer import register_catalog_indexer + from dataall.modules.datasets.indexers.catalog_indexer import DatasetCatalogIndexer + + register_catalog_indexer(DatasetCatalogIndexer()) diff --git a/backend/dataall/modules/datasets/indexers/catalog_indexer.py b/backend/dataall/modules/datasets/indexers/catalog_indexer.py new file mode 100644 index 000000000..cf573548b --- /dev/null +++ b/backend/dataall/modules/datasets/indexers/catalog_indexer.py @@ -0,0 +1,21 @@ +import logging + +from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer +from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer +from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository +from dataall.modules.datasets_base.db.models import Dataset +from dataall.tasks.catalog_indexer import CatalogIndexer + +log = logging.getLogger(__name__) + + +class DatasetCatalogIndexer(CatalogIndexer): + + def index(self, session) -> int: + all_datasets: [Dataset] = DatasetRepository.list_all_active_datasets(session) + log.info(f'Found {len(all_datasets)} datasets') + dataset: Dataset + for dataset in all_datasets: + tables = DatasetTableIndexer.upsert_all(session, dataset.datasetUri) + folders = DatasetLocationIndexer.upsert_all(session, dataset_uri=dataset.datasetUri) + return len(tables) + len(folders) + 1 diff --git a/backend/dataall/modules/loader.py b/backend/dataall/modules/loader.py index 7354ebeec..df6c83db9 100644 --- a/backend/dataall/modules/loader.py +++ b/backend/dataall/modules/loader.py @@ -24,6 +24,8 @@ class ImportMode(Enum): API = auto() CDK = auto() HANDLERS = auto() + STACK_UPDATER_TASK = auto() + CATALOG_INDEXER_TASK = auto() class ModuleInterface(ABC): diff --git a/backend/dataall/tasks/catalog_indexer.py b/backend/dataall/tasks/catalog_indexer.py index 9da3ed925..bbdc07df8 100644 --- a/backend/dataall/tasks/catalog_indexer.py +++ b/backend/dataall/tasks/catalog_indexer.py @@ -1,12 +1,11 @@ import logging import os import sys +from abc import ABC +from typing import List -from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository -from dataall.modules.datasets_base.db.models import Dataset -from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer -from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer from dataall.db import get_engine, models +from dataall.modules.loader import load_modules, ImportMode from dataall.searchproxy.indexers import DashboardIndexer from dataall.utils.alarm_service import AlarmService @@ -16,23 +15,27 @@ root.addHandler(logging.StreamHandler(sys.stdout)) log = logging.getLogger(__name__) +load_modules([ImportMode.CATALOG_INDEXER_TASK]) + + +class CatalogIndexer(ABC): + def index(self, session) -> int: + raise NotImplementedError("index is not implemented") + + +_indexers: List[CatalogIndexer] = [] + + +def register_catalog_indexer(indexer: CatalogIndexer): + _indexers.append(indexer) + def index_objects(engine): try: indexed_objects_counter = 0 with engine.scoped_session() as session: - - all_datasets: [Dataset] = DatasetRepository.list_all_active_datasets( - session - ) - log.info(f'Found {len(all_datasets)} datasets') - dataset: Dataset - for dataset in all_datasets: - tables = DatasetTableIndexer.upsert_all(session, dataset.datasetUri) - folders = DatasetLocationIndexer.upsert_all(session, dataset_uri=dataset.datasetUri) - indexed_objects_counter = ( - indexed_objects_counter + len(tables) + len(folders) + 1 - ) + for indexer in _indexers: + indexed_objects_counter += indexer.index(session) all_dashboards: [models.Dashboard] = session.query(models.Dashboard).all() log.info(f'Found {len(all_dashboards)} dashboards') diff --git a/backend/dataall/tasks/stacks_updater.py b/backend/dataall/tasks/stacks_updater.py index ea380cdd6..77bf6fc47 100644 --- a/backend/dataall/tasks/stacks_updater.py +++ b/backend/dataall/tasks/stacks_updater.py @@ -2,14 +2,15 @@ import os import sys import time +from abc import ABC +from typing import List -from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository -from dataall.modules.datasets_base.db.models import Dataset -from .. import db -from ..db import models -from ..aws.handlers.ecs import Ecs -from ..db import get_engine -from ..utils import Parameter +from dataall.modules.loader import ImportMode, load_modules +from dataall import db +from dataall.db import models +from dataall.aws.handlers.ecs import Ecs +from dataall.db import get_engine +from dataall.utils import Parameter root = logging.getLogger() root.setLevel(logging.INFO) @@ -21,23 +22,36 @@ SLEEP_TIME = 30 +load_modules([ImportMode.STACK_UPDATER_TASK]) + + +class StackFinder(ABC): + def find_stack_uris(self, session) -> List[str]: + """Finds stacks to update""" + raise NotImplementedError("retrieve_stack_uris is not implemented") + + +_finders: List[StackFinder] = [] + + +def register_stack_finder(finder: StackFinder): + _finders.append(finder) + + def update_stacks(engine, envname): with engine.scoped_session() as session: - - all_datasets: [Dataset] = DatasetRepository.list_all_active_datasets(session) all_environments: [models.Environment] = db.api.Environment.list_all_active_environments(session) + additional_stacks = [] + for finder in _finders: + additional_stacks.extend(finder.find_stack_uris(session)) log.info(f'Found {len(all_environments)} environments, triggering update stack tasks...') environment: models.Environment for environment in all_environments: update_stack(session=session, envname=envname, target_uri=environment.environmentUri, wait=True) - log.info(f'Found {len(all_datasets)} datasets') - dataset: Dataset - for dataset in all_datasets: - update_stack(session=session, envname=envname, target_uri=dataset.datasetUri, wait=False) - - return all_environments, all_datasets + for stack_uri in additional_stacks: + update_stack(session=session, envname=envname, target_uri=stack_uri, wait=False) def update_stack(session, envname, target_uri, wait=False): From 34ac1cd3e8475246b51b2c047b65cdad0fdf8e54 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 17 May 2023 16:47:01 +0200 Subject: [PATCH 203/346] Fixed imports --- .../modules/datasets/services/dataset_location_service.py | 2 +- backend/dataall/modules/datasets/services/dataset_service.py | 3 ++- .../dataall/modules/datasets/services/dataset_table_service.py | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/backend/dataall/modules/datasets/services/dataset_location_service.py b/backend/dataall/modules/datasets/services/dataset_location_service.py index 144f59ddf..00cac9bb3 100644 --- a/backend/dataall/modules/datasets/services/dataset_location_service.py +++ b/backend/dataall/modules/datasets/services/dataset_location_service.py @@ -5,9 +5,9 @@ from dataall.db.exceptions import ResourceShared, ResourceAlreadyExists from dataall.db.models import Task from dataall.modules.dataset_sharing.db.share_object_repository import ShareObjectRepository -from dataall.modules.datasets import DatasetLocationIndexer from dataall.modules.datasets.aws.s3_location_client import S3LocationClient from dataall.modules.datasets.db.dataset_location_repository import DatasetLocationRepository +from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer from dataall.modules.datasets.services.dataset_permissions import UPDATE_DATASET_FOLDER, MANAGE_DATASETS, \ CREATE_DATASET_FOLDER, LIST_DATASET_FOLDERS, DELETE_DATASET_FOLDER from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py index 643bbff39..3ba1cac19 100644 --- a/backend/dataall/modules/datasets/services/dataset_service.py +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -13,11 +13,12 @@ from dataall.modules.dataset_sharing.db.models import ShareObject from dataall.modules.dataset_sharing.db.share_object_repository import ShareObjectRepository from dataall.modules.dataset_sharing.services.share_permissions import SHARE_OBJECT_APPROVER -from dataall.modules.datasets import DatasetIndexer, DatasetTableIndexer from dataall.modules.datasets.aws.glue_dataset_client import DatasetCrawler from dataall.modules.datasets.aws.s3_dataset_client import S3DatasetClient from dataall.modules.datasets.db.dataset_location_repository import DatasetLocationRepository from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository +from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer +from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer from dataall.modules.datasets.services.dataset_permissions import CREDENTIALS_DATASET, SYNC_DATASET, CRAWL_DATASET, \ SUMMARY_DATASET, DELETE_DATASET, SUBSCRIPTIONS_DATASET, MANAGE_DATASETS, UPDATE_DATASET, LIST_ENVIRONMENT_DATASETS, \ CREATE_DATASET, DATASET_ALL, DATASET_READ diff --git a/backend/dataall/modules/datasets/services/dataset_table_service.py b/backend/dataall/modules/datasets/services/dataset_table_service.py index 958c80b48..521eb182a 100644 --- a/backend/dataall/modules/datasets/services/dataset_table_service.py +++ b/backend/dataall/modules/datasets/services/dataset_table_service.py @@ -8,9 +8,9 @@ from dataall.db.api import ResourcePolicy, Environment, Glossary from dataall.db.exceptions import ResourceShared, ResourceAlreadyExists from dataall.modules.dataset_sharing.db.share_object_repository import ShareObjectRepository -from dataall.modules.datasets import DatasetTableIndexer from dataall.modules.datasets.aws.athena_table_client import AthenaTableClient from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository +from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer from dataall.modules.datasets_base.db.enums import ConfidentialityClassification from dataall.modules.datasets.services.dataset_permissions import UPDATE_DATASET_TABLE, MANAGE_DATASETS, \ DELETE_DATASET_TABLE, CREATE_DATASET_TABLE From 1891f538e51c7aa707abf2ae3eb1703da3d75ed1 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 17 May 2023 17:57:09 +0200 Subject: [PATCH 204/346] Fixed issue with multiply loading --- backend/api_handler.py | 2 +- backend/aws_handler.py | 2 +- backend/cdkproxymain.py | 2 +- backend/dataall/cdkproxy/app.py | 2 +- .../modules/dataset_sharing/__init__.py | 4 +-- backend/dataall/modules/datasets/__init__.py | 10 +++--- .../dataall/modules/datasets_base/__init__.py | 4 +-- backend/dataall/modules/loader.py | 35 +++++++++++++++---- .../modules/sagemaker_base/__init__.py | 4 +-- .../dataall/modules/worksheets/__init__.py | 4 +-- backend/dataall/tasks/__init__.py | 1 - backend/dataall/tasks/catalog_indexer.py | 2 +- backend/dataall/tasks/stacks_updater.py | 4 ++- backend/local_graphql_server.py | 2 +- tests/conftest.py | 2 +- tests/modules/test_loader.py | 28 +++++++++------ tests/tasks/test_stacks_updater.py | 4 +-- 17 files changed, 72 insertions(+), 40 deletions(-) diff --git a/backend/api_handler.py b/backend/api_handler.py index 714e107b2..e8d5e184c 100644 --- a/backend/api_handler.py +++ b/backend/api_handler.py @@ -24,7 +24,7 @@ for name in ['boto3', 's3transfer', 'botocore', 'boto']: logging.getLogger(name).setLevel(logging.ERROR) -load_modules(modes=[ImportMode.API]) +load_modules(modes={ImportMode.API}) SCHEMA = bootstrap_schema() TYPE_DEFS = gql(SCHEMA.gql(with_directives=False)) ENVNAME = os.getenv('envname', 'local') diff --git a/backend/aws_handler.py b/backend/aws_handler.py index ec5382b6f..7101a6da2 100644 --- a/backend/aws_handler.py +++ b/backend/aws_handler.py @@ -14,7 +14,7 @@ engine = get_engine(envname=ENVNAME) -load_modules(modes=[ImportMode.HANDLERS]) +load_modules(modes={ImportMode.HANDLERS}) def handler(event, context=None): diff --git a/backend/cdkproxymain.py b/backend/cdkproxymain.py index 602b580c8..21f0e1349 100644 --- a/backend/cdkproxymain.py +++ b/backend/cdkproxymain.py @@ -21,7 +21,7 @@ f"Application started for envname= `{ENVNAME}` DH_DOCKER_VERSION:{os.environ.get('DH_DOCKER_VERSION')}" ) -load_modules(modes=[ImportMode.CDK]) +load_modules(modes={ImportMode.CDK}) StackManager.registered_stacks() diff --git a/backend/dataall/cdkproxy/app.py b/backend/dataall/cdkproxy/app.py index fde8fa2eb..297dfe116 100644 --- a/backend/dataall/cdkproxy/app.py +++ b/backend/dataall/cdkproxy/app.py @@ -13,7 +13,7 @@ logger = logging.getLogger('cdkapp process') logger.setLevel('INFO') -load_modules(modes=[ImportMode.CDK]) +load_modules(modes={ImportMode.CDK}) class CdkRunner: diff --git a/backend/dataall/modules/dataset_sharing/__init__.py b/backend/dataall/modules/dataset_sharing/__init__.py index 80ab16edc..140fb4e0c 100644 --- a/backend/dataall/modules/dataset_sharing/__init__.py +++ b/backend/dataall/modules/dataset_sharing/__init__.py @@ -1,5 +1,5 @@ import logging -from typing import List, Type +from typing import List, Type, Set from dataall.modules.datasets_base import DatasetBaseModuleInterface from dataall.modules.loader import ModuleInterface, ImportMode @@ -10,7 +10,7 @@ class SharingApiModuleInterface(ModuleInterface): @staticmethod - def is_supported(modes: List[ImportMode]) -> bool: + def is_supported(modes: Set[ImportMode]) -> bool: return ImportMode.API in modes @staticmethod diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index 371e76709..61161caf1 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -1,6 +1,6 @@ """Contains the code related to datasets""" import logging -from typing import List, Type +from typing import List, Type, Set from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base import DatasetBaseModuleInterface @@ -76,7 +76,7 @@ class DatasetAsyncHandlersModuleInterface(ModuleInterface): """Implements ModuleInterface for dataset async lambda""" @staticmethod - def is_supported(modes: List[ImportMode]): + def is_supported(modes: Set[ImportMode]): return ImportMode.HANDLERS in modes def __init__(self): @@ -94,7 +94,7 @@ class DatasetCdkModuleInterface(ModuleInterface): """Loads dataset cdk stacks """ @staticmethod - def is_supported(modes: List[ImportMode]): + def is_supported(modes: Set[ImportMode]): return ImportMode.CDK in modes def __init__(self): @@ -114,7 +114,7 @@ def depends_on() -> List[Type['ModuleInterface']]: class DatasetStackUpdaterModuleInterface(ModuleInterface): @staticmethod - def is_supported(modes: List[ImportMode]) -> bool: + def is_supported(modes: Set[ImportMode]) -> bool: return ImportMode.STACK_UPDATER_TASK in modes def __init__(self): @@ -133,7 +133,7 @@ def find_stack_uris(self, session) -> List[str]: class DatasetCatalogIndexerModuleInterface(ModuleInterface): @staticmethod - def is_supported(modes: List[ImportMode]) -> bool: + def is_supported(modes: Set[ImportMode]) -> bool: return ImportMode.CATALOG_INDEXER_TASK in modes def __init__(self): diff --git a/backend/dataall/modules/datasets_base/__init__.py b/backend/dataall/modules/datasets_base/__init__.py index 7906c0307..57ac7afaf 100644 --- a/backend/dataall/modules/datasets_base/__init__.py +++ b/backend/dataall/modules/datasets_base/__init__.py @@ -1,9 +1,9 @@ -from typing import List +from typing import Set from dataall.modules.loader import ModuleInterface, ImportMode class DatasetBaseModuleInterface(ModuleInterface): @staticmethod - def is_supported(modes: List[ImportMode]) -> bool: + def is_supported(modes: Set[ImportMode]) -> bool: return True diff --git a/backend/dataall/modules/loader.py b/backend/dataall/modules/loader.py index df6c83db9..49dbfe346 100644 --- a/backend/dataall/modules/loader.py +++ b/backend/dataall/modules/loader.py @@ -13,6 +13,9 @@ _MODULE_PREFIX = "dataall.modules" +# This needed not to load the same module twice. Should happen only in tests +_ACTIVE_MODES = set() + class ImportMode(Enum): """Defines importing mode @@ -27,6 +30,10 @@ class ImportMode(Enum): STACK_UPDATER_TASK = auto() CATALOG_INDEXER_TASK = auto() + @staticmethod + def all(): + return {mode for mode in ImportMode} + class ModuleInterface(ABC): """ @@ -35,7 +42,7 @@ class ModuleInterface(ABC): """ @staticmethod @abstractmethod - def is_supported(modes: List[ImportMode]) -> bool: + def is_supported(modes: Set[ImportMode]) -> bool: """ Return True if the module interface supports any of the ImportMode and should be loaded """ @@ -60,19 +67,35 @@ def depends_on() -> List[Type['ModuleInterface']]: return [] -def load_modules(modes: List[ImportMode]) -> None: +def load_modules(modes: Set[ImportMode]) -> None: """ Loads all modules from the config Loads only requested functionality (submodules) using the mode parameter """ + + to_load = _new_modules(modes) + if not to_load: + return + in_config, inactive = _load_modules() - _check_loading_correct(in_config, modes) - _initialize_modules(modes) + _check_loading_correct(in_config, to_load) + _initialize_modules(to_load) _describe_loading(in_config, inactive) log.info("All modules have been imported") +def _new_modules(modes: Set[ImportMode]): + """ + Extracts only new modules to load. It's needed to avoid multiply loading + """ + all_modes = _ACTIVE_MODES + + to_load = modes - all_modes # complement of set + all_modes |= modes + return to_load + + def _load_modules(): """ Loads modules but not initializing them @@ -121,7 +144,7 @@ def _load_module(name: str): return False -def _initialize_modules(modes: List[ImportMode]): +def _initialize_modules(modes: Set[ImportMode]): """ Initialize all modules for supported modes. This method is using topological sorting for a graph of module dependencies. It's needed to load module in a specific order: first modules to load are without dependencies. @@ -165,7 +188,7 @@ def _initialize_module(module): module() # call a constructor for initialization -def _check_loading_correct(in_config: Set[str], modes: List[ImportMode]): +def _check_loading_correct(in_config: Set[str], modes: Set[ImportMode]): """ To avoid unintentional loading (without ModuleInterface) we can check all loaded modules. Unintentional/incorrect loading might happen if module A has a direct reference to module B without declaring it diff --git a/backend/dataall/modules/sagemaker_base/__init__.py b/backend/dataall/modules/sagemaker_base/__init__.py index 20589ebf0..6a40c6c75 100644 --- a/backend/dataall/modules/sagemaker_base/__init__.py +++ b/backend/dataall/modules/sagemaker_base/__init__.py @@ -1,10 +1,10 @@ """Common code for machine learning studio and notebooks""" -from typing import List +from typing import Set from dataall.modules.loader import ModuleInterface, ImportMode class SagemakerCdkModuleInterface(ModuleInterface): @staticmethod - def is_supported(modes: List[ImportMode]) -> bool: + def is_supported(modes: Set[ImportMode]) -> bool: return ImportMode.CDK in modes diff --git a/backend/dataall/modules/worksheets/__init__.py b/backend/dataall/modules/worksheets/__init__.py index 9c9acdaf8..c4354a8b3 100644 --- a/backend/dataall/modules/worksheets/__init__.py +++ b/backend/dataall/modules/worksheets/__init__.py @@ -12,8 +12,8 @@ class WorksheetApiModuleInterface(ModuleInterface): """Implements ModuleInterface for worksheet GraphQl lambda""" - @classmethod - def is_supported(cls, modes): + @staticmethod + def is_supported(modes): return ImportMode.API in modes def __init__(self): diff --git a/backend/dataall/tasks/__init__.py b/backend/dataall/tasks/__init__.py index 89cb28e27..e69de29bb 100644 --- a/backend/dataall/tasks/__init__.py +++ b/backend/dataall/tasks/__init__.py @@ -1 +0,0 @@ -from .catalog_indexer import index_objects diff --git a/backend/dataall/tasks/catalog_indexer.py b/backend/dataall/tasks/catalog_indexer.py index bbdc07df8..99313d5a8 100644 --- a/backend/dataall/tasks/catalog_indexer.py +++ b/backend/dataall/tasks/catalog_indexer.py @@ -15,7 +15,7 @@ root.addHandler(logging.StreamHandler(sys.stdout)) log = logging.getLogger(__name__) -load_modules([ImportMode.CATALOG_INDEXER_TASK]) +load_modules({ImportMode.CATALOG_INDEXER_TASK}) class CatalogIndexer(ABC): diff --git a/backend/dataall/tasks/stacks_updater.py b/backend/dataall/tasks/stacks_updater.py index 77bf6fc47..535e42d30 100644 --- a/backend/dataall/tasks/stacks_updater.py +++ b/backend/dataall/tasks/stacks_updater.py @@ -22,7 +22,7 @@ SLEEP_TIME = 30 -load_modules([ImportMode.STACK_UPDATER_TASK]) +load_modules({ImportMode.STACK_UPDATER_TASK}) class StackFinder(ABC): @@ -53,6 +53,8 @@ def update_stacks(engine, envname): for stack_uri in additional_stacks: update_stack(session=session, envname=envname, target_uri=stack_uri, wait=False) + return len(all_environments), len(additional_stacks) + def update_stack(session, envname, target_uri, wait=False): stack: models.Stack = db.api.Stack.get_stack_by_target_uri( diff --git a/backend/local_graphql_server.py b/backend/local_graphql_server.py index 98e99cd73..114fdce4b 100644 --- a/backend/local_graphql_server.py +++ b/backend/local_graphql_server.py @@ -30,7 +30,7 @@ es = connect(envname=ENVNAME) logger.info('Connected') # create_schema_and_tables(engine, envname=ENVNAME) -load_modules(modes=[ImportMode.API, ImportMode.HANDLERS]) +load_modules(modes={ImportMode.API, ImportMode.HANDLERS}) Base.metadata.create_all(engine.engine) CDKPROXY_URL = ( 'http://cdkproxy:2805' if ENVNAME == 'dkrcompose' else 'http://localhost:2805' diff --git a/tests/conftest.py b/tests/conftest.py index 2767a66a3..e9d0138b2 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,7 +3,7 @@ import dataall from dataall.modules.loader import load_modules, ImportMode -load_modules(modes=[ImportMode.HANDLERS, ImportMode.API, ImportMode.CDK]) +load_modules(modes=ImportMode.all()) ENVNAME = os.environ.get('envname', 'pytest') diff --git a/tests/modules/test_loader.py b/tests/modules/test_loader.py index 9db12b408..8e834b0bb 100644 --- a/tests/modules/test_loader.py +++ b/tests/modules/test_loader.py @@ -1,5 +1,5 @@ from abc import ABC -from typing import List, Type +from typing import List, Type, Set import pytest @@ -20,7 +20,7 @@ def name(cls) -> str: class TestApiModule(TestModule): @staticmethod - def is_supported(modes: List[ImportMode]) -> bool: + def is_supported(modes: Set[ImportMode]) -> bool: return ImportMode.API in modes @@ -106,27 +106,35 @@ def patch_loading(mocker, all_modules, in_config): ) +@pytest.fixture(scope="function", autouse=True) +def patch_modes(mocker): + mocker.patch( + 'dataall.modules.loader._ACTIVE_MODES', set() + ) + yield + + def test_nothing_to_load(mocker): patch_loading(mocker, [], set()) - loader.load_modules([ImportMode.API, ImportMode.CDK]) + loader.load_modules({ImportMode.API, ImportMode.CDK}) assert len(order) == 0 def test_import_with_one_dependency(mocker): patch_loading(mocker, [AModule, BModule], {BModule}) - loader.load_modules([ImportMode.API]) + loader.load_modules({ImportMode.API}) assert order == [AModule, BModule] def test_load_with_cdk_mode(mocker): patch_loading(mocker, [DModule, CModule, BModule], {CModule}) - loader.load_modules([ImportMode.CDK]) + loader.load_modules({ImportMode.CDK}) assert order == [CModule] def test_many_nested_layers(mocker): patch_loading(mocker, [BModule, CModule, AModule, DModule], {DModule, CModule}) - loader.load_modules([ImportMode.API]) + loader.load_modules({ImportMode.API}) correct_order = [AModule, BModule, DModule] assert order == correct_order assert CModule not in correct_order @@ -137,16 +145,16 @@ def test_complex_loading(mocker): AModule, BModule, CModule, DModule, EModule, FModule, GModule, IModule, JModule, KModule ], {CModule, FModule, GModule, IModule, KModule}) - loader.load_modules([ImportMode.API]) + loader.load_modules({ImportMode.API}) assert order == [AModule, JModule, BModule, DModule, EModule, GModule, FModule, IModule, KModule] def test_incorrect_loading(mocker): - patch_loading(mocker, [AModule], set()) # A is not specified in config, but was found + patch_loading(mocker, [CModule], set()) # A is not specified in config, but was found with pytest.raises(ImportError): - loader.load_modules([ImportMode.API]) + loader.load_modules({ImportMode.CDK}) patch_loading(mocker, [AModule, BModule], {AModule}) with pytest.raises(ImportError): - loader.load_modules([ImportMode.API]) + loader.load_modules({ImportMode.API}) diff --git a/tests/tasks/test_stacks_updater.py b/tests/tasks/test_stacks_updater.py index 701d7c50d..88c5dbd71 100644 --- a/tests/tasks/test_stacks_updater.py +++ b/tests/tasks/test_stacks_updater.py @@ -72,5 +72,5 @@ def test_stacks_update(db, org, env, sync_dataset, mocker): envs, datasets = dataall.tasks.stacks_updater.update_stacks( engine=db, envname='local' ) - assert len(envs) == 1 - assert len(datasets) == 1 + assert envs == 1 + assert datasets == 1 From 4c8b91c0306829b6320b4924b6c133b71a392196 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 19 May 2023 10:29:40 +0200 Subject: [PATCH 205/346] Moved share/dataset related tests --- .../datasets/test_dataset_catalog_indexer.py} | 0 .../datasets/test_dataset_policies.py} | 0 .../datasets/test_dataset_subscriptions.py} | 0 .../datasets/test_dataset_tables_sync.py} | 0 .../datasets}/test_lf_share_manager.py | 0 .../datasets}/test_s3_share_manager.py | 0 .../test_stacks_updater_with_datasets.py | 75 +++++++++++++++++++ tests/tasks/test_stacks_updater.py | 34 +-------- 8 files changed, 79 insertions(+), 30 deletions(-) rename tests/{tasks/test_catalog_indexer.py => modules/datasets/test_dataset_catalog_indexer.py} (100%) rename tests/{tasks/test_policies.py => modules/datasets/test_dataset_policies.py} (100%) rename tests/{tasks/test_subscriptions.py => modules/datasets/test_dataset_subscriptions.py} (100%) rename tests/{tasks/test_tables_sync.py => modules/datasets/test_dataset_tables_sync.py} (100%) rename tests/{tasks => modules/datasets}/test_lf_share_manager.py (100%) rename tests/{tasks => modules/datasets}/test_s3_share_manager.py (100%) create mode 100644 tests/modules/datasets/test_stacks_updater_with_datasets.py diff --git a/tests/tasks/test_catalog_indexer.py b/tests/modules/datasets/test_dataset_catalog_indexer.py similarity index 100% rename from tests/tasks/test_catalog_indexer.py rename to tests/modules/datasets/test_dataset_catalog_indexer.py diff --git a/tests/tasks/test_policies.py b/tests/modules/datasets/test_dataset_policies.py similarity index 100% rename from tests/tasks/test_policies.py rename to tests/modules/datasets/test_dataset_policies.py diff --git a/tests/tasks/test_subscriptions.py b/tests/modules/datasets/test_dataset_subscriptions.py similarity index 100% rename from tests/tasks/test_subscriptions.py rename to tests/modules/datasets/test_dataset_subscriptions.py diff --git a/tests/tasks/test_tables_sync.py b/tests/modules/datasets/test_dataset_tables_sync.py similarity index 100% rename from tests/tasks/test_tables_sync.py rename to tests/modules/datasets/test_dataset_tables_sync.py diff --git a/tests/tasks/test_lf_share_manager.py b/tests/modules/datasets/test_lf_share_manager.py similarity index 100% rename from tests/tasks/test_lf_share_manager.py rename to tests/modules/datasets/test_lf_share_manager.py diff --git a/tests/tasks/test_s3_share_manager.py b/tests/modules/datasets/test_s3_share_manager.py similarity index 100% rename from tests/tasks/test_s3_share_manager.py rename to tests/modules/datasets/test_s3_share_manager.py diff --git a/tests/modules/datasets/test_stacks_updater_with_datasets.py b/tests/modules/datasets/test_stacks_updater_with_datasets.py new file mode 100644 index 000000000..823b62e3f --- /dev/null +++ b/tests/modules/datasets/test_stacks_updater_with_datasets.py @@ -0,0 +1,75 @@ +import pytest +import dataall +from dataall.api.constants import OrganisationUserRole +from dataall.modules.datasets_base.db.models import Dataset +from dataall.tasks.stacks_updater import update_stacks + + +@pytest.fixture(scope='module', autouse=True) +def org(db): + with db.scoped_session() as session: + org = dataall.db.models.Organization( + label='org', + owner='alice', + tags=[], + description='desc', + SamlGroupName='admins', + userRoleInOrganization=OrganisationUserRole.Owner.value, + ) + session.add(org) + yield org + + +@pytest.fixture(scope='module', autouse=True) +def env(org, db): + with db.scoped_session() as session: + env = dataall.db.models.Environment( + organizationUri=org.organizationUri, + AwsAccountId='12345678901', + region='eu-west-1', + label='org', + owner='alice', + tags=[], + description='desc', + SamlGroupName='admins', + EnvironmentDefaultIAMRoleName='EnvRole', + EnvironmentDefaultIAMRoleArn='arn:aws::123456789012:role/EnvRole/GlueJobSessionRunner', + CDKRoleArn='arn:aws::123456789012:role/EnvRole', + userRoleInEnvironment='999', + ) + session.add(env) + yield env + + +@pytest.fixture(scope='module', autouse=True) +def sync_dataset(org, env, db): + with db.scoped_session() as session: + dataset = Dataset( + organizationUri=org.organizationUri, + environmentUri=env.environmentUri, + label='label', + owner='foo', + SamlAdminGroupName='foo', + businessOwnerDelegationEmails=['foo@amazon.com'], + businessOwnerEmail=['bar@amazon.com'], + name='name', + S3BucketName='S3BucketName', + GlueDatabaseName='GlueDatabaseName', + KmsAlias='kmsalias', + AwsAccountId='123456789012', + region='eu-west-1', + IAMDatasetAdminUserArn=f'arn:aws:iam::123456789012:user/dataset', + IAMDatasetAdminRoleArn=f'arn:aws:iam::123456789012:role/dataset', + ) + session.add(dataset) + yield dataset + + +def test_stacks_update(db, org, env, sync_dataset, mocker): + mocker.patch( + 'dataall.tasks.stacks_updater.update_stack', + return_value=True, + ) + envs, datasets = update_stacks(engine=db, envname='local') + assert envs == 1 + assert datasets == 1 diff --git a/tests/tasks/test_stacks_updater.py b/tests/tasks/test_stacks_updater.py index 88c5dbd71..124768683 100644 --- a/tests/tasks/test_stacks_updater.py +++ b/tests/tasks/test_stacks_updater.py @@ -1,7 +1,7 @@ import pytest import dataall from dataall.api.constants import OrganisationUserRole -from dataall.modules.datasets_base.db.models import Dataset +from dataall.tasks.stacks_updater import update_stacks @pytest.fixture(scope='module', autouse=True) @@ -40,37 +40,11 @@ def env(org, db): yield env -@pytest.fixture(scope='module', autouse=True) -def sync_dataset(org, env, db): - with db.scoped_session() as session: - dataset = Dataset( - organizationUri=org.organizationUri, - environmentUri=env.environmentUri, - label='label', - owner='foo', - SamlAdminGroupName='foo', - businessOwnerDelegationEmails=['foo@amazon.com'], - businessOwnerEmail=['bar@amazon.com'], - name='name', - S3BucketName='S3BucketName', - GlueDatabaseName='GlueDatabaseName', - KmsAlias='kmsalias', - AwsAccountId='123456789012', - region='eu-west-1', - IAMDatasetAdminUserArn=f'arn:aws:iam::123456789012:user/dataset', - IAMDatasetAdminRoleArn=f'arn:aws:iam::123456789012:role/dataset', - ) - session.add(dataset) - yield dataset - - -def test_stacks_update(db, org, env, sync_dataset, mocker): +def test_stacks_update(db, org, env, mocker): mocker.patch( 'dataall.tasks.stacks_updater.update_stack', return_value=True, ) - envs, datasets = dataall.tasks.stacks_updater.update_stacks( - engine=db, envname='local' - ) + envs, others = update_stacks(engine=db, envname='local') assert envs == 1 - assert datasets == 1 + assert others == 0 From d75ebfd410061e429537a7d3831a21e79f8039e2 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 19 May 2023 10:37:47 +0200 Subject: [PATCH 206/346] Moved to tasks folder --- tests/modules/datasets/tasks/__init__.py | 0 tests/{ => modules/datasets}/tasks/conftest.py | 1 + .../modules/datasets/{ => tasks}/test_dataset_catalog_indexer.py | 0 tests/modules/datasets/{ => tasks}/test_dataset_policies.py | 0 tests/modules/datasets/{ => tasks}/test_dataset_subscriptions.py | 0 tests/modules/datasets/{ => tasks}/test_dataset_tables_sync.py | 0 tests/modules/datasets/{ => tasks}/test_lf_share_manager.py | 0 tests/modules/datasets/{ => tasks}/test_s3_share_manager.py | 0 .../datasets/{ => tasks}/test_stacks_updater_with_datasets.py | 0 9 files changed, 1 insertion(+) create mode 100644 tests/modules/datasets/tasks/__init__.py rename tests/{ => modules/datasets}/tasks/conftest.py (99%) rename tests/modules/datasets/{ => tasks}/test_dataset_catalog_indexer.py (100%) rename tests/modules/datasets/{ => tasks}/test_dataset_policies.py (100%) rename tests/modules/datasets/{ => tasks}/test_dataset_subscriptions.py (100%) rename tests/modules/datasets/{ => tasks}/test_dataset_tables_sync.py (100%) rename tests/modules/datasets/{ => tasks}/test_lf_share_manager.py (100%) rename tests/modules/datasets/{ => tasks}/test_s3_share_manager.py (100%) rename tests/modules/datasets/{ => tasks}/test_stacks_updater_with_datasets.py (100%) diff --git a/tests/modules/datasets/tasks/__init__.py b/tests/modules/datasets/tasks/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/tasks/conftest.py b/tests/modules/datasets/tasks/conftest.py similarity index 99% rename from tests/tasks/conftest.py rename to tests/modules/datasets/tasks/conftest.py index 4fbbd49e3..e3556fed6 100644 --- a/tests/tasks/conftest.py +++ b/tests/modules/datasets/tasks/conftest.py @@ -216,6 +216,7 @@ def factory( yield factory + @pytest.fixture(scope="module") def share_item_table(db): def factory( diff --git a/tests/modules/datasets/test_dataset_catalog_indexer.py b/tests/modules/datasets/tasks/test_dataset_catalog_indexer.py similarity index 100% rename from tests/modules/datasets/test_dataset_catalog_indexer.py rename to tests/modules/datasets/tasks/test_dataset_catalog_indexer.py diff --git a/tests/modules/datasets/test_dataset_policies.py b/tests/modules/datasets/tasks/test_dataset_policies.py similarity index 100% rename from tests/modules/datasets/test_dataset_policies.py rename to tests/modules/datasets/tasks/test_dataset_policies.py diff --git a/tests/modules/datasets/test_dataset_subscriptions.py b/tests/modules/datasets/tasks/test_dataset_subscriptions.py similarity index 100% rename from tests/modules/datasets/test_dataset_subscriptions.py rename to tests/modules/datasets/tasks/test_dataset_subscriptions.py diff --git a/tests/modules/datasets/test_dataset_tables_sync.py b/tests/modules/datasets/tasks/test_dataset_tables_sync.py similarity index 100% rename from tests/modules/datasets/test_dataset_tables_sync.py rename to tests/modules/datasets/tasks/test_dataset_tables_sync.py diff --git a/tests/modules/datasets/test_lf_share_manager.py b/tests/modules/datasets/tasks/test_lf_share_manager.py similarity index 100% rename from tests/modules/datasets/test_lf_share_manager.py rename to tests/modules/datasets/tasks/test_lf_share_manager.py diff --git a/tests/modules/datasets/test_s3_share_manager.py b/tests/modules/datasets/tasks/test_s3_share_manager.py similarity index 100% rename from tests/modules/datasets/test_s3_share_manager.py rename to tests/modules/datasets/tasks/test_s3_share_manager.py diff --git a/tests/modules/datasets/test_stacks_updater_with_datasets.py b/tests/modules/datasets/tasks/test_stacks_updater_with_datasets.py similarity index 100% rename from tests/modules/datasets/test_stacks_updater_with_datasets.py rename to tests/modules/datasets/tasks/test_stacks_updater_with_datasets.py From 69daa0d128642ea81fdd4aca1fa34302c1042c1e Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 19 May 2023 11:10:07 +0200 Subject: [PATCH 207/346] Updated a version of pytest --- tests/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/requirements.txt b/tests/requirements.txt index d68c8d230..c7ac3c1a2 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -1,5 +1,5 @@ munch==2.5.0 -pytest==6.2.5 +pytest==7.3.1 pytest-cov==3.0.0 pytest-mock==3.6.1 pytest-dependency==0.5.1 From 9c47b30d04de4d0603c969367f618386d445bf72 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 19 May 2023 11:26:58 +0200 Subject: [PATCH 208/346] Fixed linting --- .../dataall/api/Objects/Glossary/registry.py | 2 +- .../group/services/group_resource_manager.py | 1 - .../modules/dataset_sharing/api/resolvers.py | 1 + .../modules/dataset_sharing/db/models.py | 1 - .../db/share_object_repository.py | 3 -- .../handlers/ecs_share_handler.py | 15 ++++---- .../services/dataset_alarm_service.py | 2 -- .../services/share_item_service.py | 1 - .../services/share_object_service.py | 7 ++-- .../modules/datasets/api/dataset/enums.py | 1 - .../modules/datasets/api/table/resolvers.py | 2 +- .../datasets/aws/glue_dataset_client.py | 1 - .../modules/datasets/cdk/dataset_s3_policy.py | 1 - .../datasets/db/dataset_column_repository.py | 2 -- .../db/dataset_location_repository.py | 3 +- .../handlers/glue_table_sync_handler.py | 1 - .../datasets_base/db/dataset_repository.py | 1 - .../dataall/modules/datasets_base/db/enums.py | 1 - .../modules/datasets_base/db/models.py | 2 -- backend/dataall/modules/loader.py | 4 +-- .../dataall/modules/worksheets/__init__.py | 2 +- .../modules/worksheets/api/resolvers.py | 1 + .../modules/worksheets/aws/athena_client.py | 35 ++++++++++--------- .../modules/worksheets/db/repositories.py | 2 +- .../services/worksheet_permissions.py | 2 +- .../worksheets/services/worksheet_services.py | 4 +-- backend/dataall/searchproxy/base_indexer.py | 1 - ...fc49baecea4_add_enviromental_parameters.py | 22 ++++++------ 28 files changed, 51 insertions(+), 70 deletions(-) diff --git a/backend/dataall/api/Objects/Glossary/registry.py b/backend/dataall/api/Objects/Glossary/registry.py index 494d5d502..27c534368 100644 --- a/backend/dataall/api/Objects/Glossary/registry.py +++ b/backend/dataall/api/Objects/Glossary/registry.py @@ -65,4 +65,4 @@ def reindex(cls, session, target_type: str, target_uri: str): object_type="Dashboard", model=models.Dashboard, reindexer=DashboardIndexer -)) \ No newline at end of file +)) diff --git a/backend/dataall/core/group/services/group_resource_manager.py b/backend/dataall/core/group/services/group_resource_manager.py index 950d66a7a..d2f3beb96 100644 --- a/backend/dataall/core/group/services/group_resource_manager.py +++ b/backend/dataall/core/group/services/group_resource_manager.py @@ -33,4 +33,3 @@ def count_group_resources(session, environment, group_uri) -> int: def delete_env(session, environment): for resource in EnvironmentResourceManager._resources: resource.delete_env(session, environment) - diff --git a/backend/dataall/modules/dataset_sharing/api/resolvers.py b/backend/dataall/modules/dataset_sharing/api/resolvers.py index 9d32aed3d..8c4a4cfe7 100644 --- a/backend/dataall/modules/dataset_sharing/api/resolvers.py +++ b/backend/dataall/modules/dataset_sharing/api/resolvers.py @@ -73,6 +73,7 @@ def add_shared_item(context, source, shareUri: str = None, input: dict = None): def remove_shared_item(context, source, shareItemUri: str = None): return ShareItemService.remove_shared_item(uri=shareItemUri) + def resolve_shared_item(context, source: ShareObjectItem, **kwargs): if not source: return None diff --git a/backend/dataall/modules/dataset_sharing/db/models.py b/backend/dataall/modules/dataset_sharing/db/models.py index 85883b00c..65807c5c9 100644 --- a/backend/dataall/modules/dataset_sharing/db/models.py +++ b/backend/dataall/modules/dataset_sharing/db/models.py @@ -56,4 +56,3 @@ class ShareObjectItem(Base): S3AccessPointName = Column(String, nullable=True) status = Column(String, nullable=False, default=ShareItemStatus.PendingApproval.value) action = Column(String, nullable=True) - diff --git a/backend/dataall/modules/dataset_sharing/db/share_object_repository.py b/backend/dataall/modules/dataset_sharing/db/share_object_repository.py index 30c7d4972..27a813d1a 100644 --- a/backend/dataall/modules/dataset_sharing/db/share_object_repository.py +++ b/backend/dataall/modules/dataset_sharing/db/share_object_repository.py @@ -1038,6 +1038,3 @@ def paginate_shared_datasets(session, env_uri, group_uri, data): return paginate( query=q, page=data.get('page', 1), page_size=data.get('pageSize', 10) ).to_dict() - - - diff --git a/backend/dataall/modules/dataset_sharing/handlers/ecs_share_handler.py b/backend/dataall/modules/dataset_sharing/handlers/ecs_share_handler.py index 2707a0609..5be0c32df 100644 --- a/backend/dataall/modules/dataset_sharing/handlers/ecs_share_handler.py +++ b/backend/dataall/modules/dataset_sharing/handlers/ecs_share_handler.py @@ -37,11 +37,10 @@ def _manage_share(engine, task: models.Task, local_handler, ecs_handler: str): @staticmethod def _run_share_management_ecs_task(share_uri, handler): return Ecs.run_ecs_task( - task_definition_param='ecs/task_def_arn/share_management', - container_name_param='ecs/container/share_management', - context=[ - {'name': 'shareUri', 'value': share_uri}, - {'name': 'handler', 'value': handler}, - ], - ) - + task_definition_param='ecs/task_def_arn/share_management', + container_name_param='ecs/container/share_management', + context=[ + {'name': 'shareUri', 'value': share_uri}, + {'name': 'handler', 'value': handler}, + ], + ) diff --git a/backend/dataall/modules/dataset_sharing/services/dataset_alarm_service.py b/backend/dataall/modules/dataset_sharing/services/dataset_alarm_service.py index c1a11c289..a5f2684dd 100644 --- a/backend/dataall/modules/dataset_sharing/services/dataset_alarm_service.py +++ b/backend/dataall/modules/dataset_sharing/services/dataset_alarm_service.py @@ -149,5 +149,3 @@ def trigger_revoke_folder_sharing_failure_alarm( - Region: {target_environment.region} """ return self.publish_message_to_alarms_topic(subject, message) - - diff --git a/backend/dataall/modules/dataset_sharing/services/share_item_service.py b/backend/dataall/modules/dataset_sharing/services/share_item_service.py index 0cd683aac..ee4ad3bef 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_item_service.py +++ b/backend/dataall/modules/dataset_sharing/services/share_item_service.py @@ -174,4 +174,3 @@ def paginated_shared_with_environment_datasets(session, uri, data) -> dict: def paginated_shared_with_environment_group_datasets(session, env_uri, group_uri, data) -> dict: # TODO THERE WAS NOT PERMISSION return ShareObjectRepository.paginate_shared_datasets(session, env_uri, group_uri, data) - diff --git a/backend/dataall/modules/dataset_sharing/services/share_object_service.py b/backend/dataall/modules/dataset_sharing/services/share_object_service.py index d96265b60..a185443d6 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_object_service.py +++ b/backend/dataall/modules/dataset_sharing/services/share_object_service.py @@ -5,7 +5,8 @@ from dataall.db import utils from dataall.db.api import ResourcePolicy, Environment from dataall.db.exceptions import UnauthorizedOperation -from dataall.db.models import Activity, PrincipalType, EnvironmentGroup, ConsumptionRole +from dataall.db.models import Activity, PrincipalType, EnvironmentGroup, ConsumptionRole, Task +from dataall.aws.handlers.service_handlers import Worker from dataall.modules.dataset_sharing.db.enums import ShareObjectActions, ShareableType, ShareItemStatus, \ ShareObjectStatus from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject @@ -46,7 +47,7 @@ def create_share_object( if environment.region != dataset.region: raise UnauthorizedOperation( action=CREATE_SHARE_OBJECT, - message=f'Requester Team {group_uri} works in region {environment.region} ' + + message=f'Requester Team {group_uri} works in region {environment.region} ' f'and the requested dataset is stored in region {dataset.region}', ) @@ -230,7 +231,7 @@ def delete_share_object(uri: str): if shared_share_items_states: raise ShareItemsFound( action='Delete share object', - message='There are shared items in this request. ' + + message='There are shared items in this request. ' 'Revoke access to these items before deleting the request.', ) diff --git a/backend/dataall/modules/datasets/api/dataset/enums.py b/backend/dataall/modules/datasets/api/dataset/enums.py index 7169bb5c5..a68384648 100644 --- a/backend/dataall/modules/datasets/api/dataset/enums.py +++ b/backend/dataall/modules/datasets/api/dataset/enums.py @@ -15,4 +15,3 @@ class ConfidentialityClassification(GraphQLEnumMapper): Unclassified = 'Unclassified' Official = 'Official' Secret = 'Secret' - diff --git a/backend/dataall/modules/datasets/api/table/resolvers.py b/backend/dataall/modules/datasets/api/table/resolvers.py index 8fb41b064..c6cd6fda5 100644 --- a/backend/dataall/modules/datasets/api/table/resolvers.py +++ b/backend/dataall/modules/datasets/api/table/resolvers.py @@ -4,7 +4,7 @@ from dataall.db.exceptions import RequiredParameter from dataall.modules.datasets.api.dataset.resolvers import get_dataset from dataall.api.context import Context -from dataall.db.api import Glossary +from dataall.db.api import Glossary from dataall.modules.datasets.services.dataset_table_service import DatasetTableService from dataall.modules.datasets_base.db.models import DatasetTable, Dataset diff --git a/backend/dataall/modules/datasets/aws/glue_dataset_client.py b/backend/dataall/modules/datasets/aws/glue_dataset_client.py index cf5d432b1..cd68a6c85 100644 --- a/backend/dataall/modules/datasets/aws/glue_dataset_client.py +++ b/backend/dataall/modules/datasets/aws/glue_dataset_client.py @@ -102,4 +102,3 @@ def database_exists(self): except ClientError: log.info(f'Database {dataset.GlueDatabaseName} does not exist on account {dataset.AwsAccountId}...') return False - diff --git a/backend/dataall/modules/datasets/cdk/dataset_s3_policy.py b/backend/dataall/modules/datasets/cdk/dataset_s3_policy.py index 404b43db0..bc68ae087 100644 --- a/backend/dataall/modules/datasets/cdk/dataset_s3_policy.py +++ b/backend/dataall/modules/datasets/cdk/dataset_s3_policy.py @@ -30,4 +30,3 @@ def _generate_dataset_statements(datasets: List[Dataset]): resources=buckets, ) ] - diff --git a/backend/dataall/modules/datasets/db/dataset_column_repository.py b/backend/dataall/modules/datasets/db/dataset_column_repository.py index b4ae5e6ed..8febf1c63 100644 --- a/backend/dataall/modules/datasets/db/dataset_column_repository.py +++ b/backend/dataall/modules/datasets/db/dataset_column_repository.py @@ -43,5 +43,3 @@ def paginate_active_columns_for_table(session, table_uri: str, filter: dict): page=filter.get('page', 1), page_size=filter.get('pageSize', 10) ).to_dict() - - diff --git a/backend/dataall/modules/datasets/db/dataset_location_repository.py b/backend/dataall/modules/datasets/db/dataset_location_repository.py index a58527c75..d5aaed547 100644 --- a/backend/dataall/modules/datasets/db/dataset_location_repository.py +++ b/backend/dataall/modules/datasets/db/dataset_location_repository.py @@ -114,7 +114,7 @@ def count_dataset_locations(session, dataset_uri): def delete_dataset_locations(session, dataset_uri) -> bool: locations = ( session.query(DatasetStorageLocation) - .filter(DatasetStorageLocation.datasetUri == dataset_uri ) + .filter(DatasetStorageLocation.datasetUri == dataset_uri) .all() ) for location in locations: @@ -151,4 +151,3 @@ def paginated_dataset_locations(session, uri, data=None) -> dict: return paginate( query=query, page_size=data.get('pageSize', 10), page=data.get('page', 1) ).to_dict() - diff --git a/backend/dataall/modules/datasets/handlers/glue_table_sync_handler.py b/backend/dataall/modules/datasets/handlers/glue_table_sync_handler.py index e376a05e2..fa7441840 100644 --- a/backend/dataall/modules/datasets/handlers/glue_table_sync_handler.py +++ b/backend/dataall/modules/datasets/handlers/glue_table_sync_handler.py @@ -67,4 +67,3 @@ def update_table_columns(engine, task: models.Task): ) glue_client.update_table_for_column(column.name, updated_table) - diff --git a/backend/dataall/modules/datasets_base/db/dataset_repository.py b/backend/dataall/modules/datasets_base/db/dataset_repository.py index aea0bb8bc..7481e33ba 100644 --- a/backend/dataall/modules/datasets_base/db/dataset_repository.py +++ b/backend/dataall/modules/datasets_base/db/dataset_repository.py @@ -406,4 +406,3 @@ def _set_import_data(dataset, data): dataset.importedGlueDatabase = True if data.get('glueDatabaseName') else False dataset.importedKmsKey = True if data.get('KmsKeyId') else False dataset.importedAdminRole = True if data.get('adminRoleName') else False - diff --git a/backend/dataall/modules/datasets_base/db/enums.py b/backend/dataall/modules/datasets_base/db/enums.py index 5c2d89091..7eb8b2935 100644 --- a/backend/dataall/modules/datasets_base/db/enums.py +++ b/backend/dataall/modules/datasets_base/db/enums.py @@ -15,4 +15,3 @@ class DatasetRole(Enum): Admin = '900' Shared = '300' NoPermission = '000' - diff --git a/backend/dataall/modules/datasets_base/db/models.py b/backend/dataall/modules/datasets_base/db/models.py index df15c19a4..45f7fe858 100644 --- a/backend/dataall/modules/datasets_base/db/models.py +++ b/backend/dataall/modules/datasets_base/db/models.py @@ -140,5 +140,3 @@ class Dataset(Resource, Base): def uri(self): return self.datasetUri - - diff --git a/backend/dataall/modules/loader.py b/backend/dataall/modules/loader.py index 49dbfe346..a625681bb 100644 --- a/backend/dataall/modules/loader.py +++ b/backend/dataall/modules/loader.py @@ -236,10 +236,10 @@ def _describe_loading(in_config: Set[str], inactive: Set[str]): name = module.name() log.debug(f"The {name} module was loaded") if name in inactive: - log.info(f"There is a module that depends on {module.name()}. " + + log.info(f"There is a module that depends on {module.name()}. " "The module has been loaded despite it's inactive.") elif name not in in_config: - log.info(f"There is a module that depends on {module.name()}. " + + log.info(f"There is a module that depends on {module.name()}. " "The module has been loaded despite it's not specified in the configuration file.") diff --git a/backend/dataall/modules/worksheets/__init__.py b/backend/dataall/modules/worksheets/__init__.py index c4354a8b3..f3208fe3d 100644 --- a/backend/dataall/modules/worksheets/__init__.py +++ b/backend/dataall/modules/worksheets/__init__.py @@ -25,4 +25,4 @@ def __init__(self): EnvironmentResourceManager.register(WorksheetRepository()) - log.info("API of worksheets has been imported") \ No newline at end of file + log.info("API of worksheets has been imported") diff --git a/backend/dataall/modules/worksheets/api/resolvers.py b/backend/dataall/modules/worksheets/api/resolvers.py index 154189fd4..6a606e9e6 100644 --- a/backend/dataall/modules/worksheets/api/resolvers.py +++ b/backend/dataall/modules/worksheets/api/resolvers.py @@ -11,6 +11,7 @@ class WorksheetRole(GraphQLEnumMapper): Admin = '900' NoPermission = '000' + def create_worksheet(context: Context, source, input: dict = None): if not input: raise exceptions.RequiredParameter(input) diff --git a/backend/dataall/modules/worksheets/aws/athena_client.py b/backend/dataall/modules/worksheets/aws/athena_client.py index 88ebddfdc..cb9debe4e 100644 --- a/backend/dataall/modules/worksheets/aws/athena_client.py +++ b/backend/dataall/modules/worksheets/aws/athena_client.py @@ -1,27 +1,28 @@ from pyathena import connect from dataall.aws.handlers.sts import SessionHelper + class AthenaClient: """ Makes requests to AWS Athena """ @staticmethod def run_athena_query(aws_account_id, env_group, s3_staging_dir, region, sql=None): - base_session = SessionHelper.remote_session(accountid=aws_account_id) - boto3_session = SessionHelper.get_session(base_session=base_session, role_arn=env_group.environmentIAMRoleArn) - creds = boto3_session.get_credentials() - connection = connect( - aws_access_key_id=creds.access_key, - aws_secret_access_key=creds.secret_key, - aws_session_token=creds.token, - work_group=env_group.environmentAthenaWorkGroup, - s3_staging_dir=s3_staging_dir, - region_name=region, - ) - cursor = connection.cursor() - cursor.execute(sql) - - return cursor - + base_session = SessionHelper.remote_session(accountid=aws_account_id) + boto3_session = SessionHelper.get_session(base_session=base_session, role_arn=env_group.environmentIAMRoleArn) + creds = boto3_session.get_credentials() + connection = connect( + aws_access_key_id=creds.access_key, + aws_secret_access_key=creds.secret_key, + aws_session_token=creds.token, + work_group=env_group.environmentAthenaWorkGroup, + s3_staging_dir=s3_staging_dir, + region_name=region, + ) + cursor = connection.cursor() + cursor.execute(sql) + + return cursor + @staticmethod def convert_query_output(cursor): columns = [] @@ -44,4 +45,4 @@ def convert_query_output(cursor): 'ElapsedTime': cursor.total_execution_time_in_millis, 'rows': rows, 'columns': columns, - } \ No newline at end of file + } diff --git a/backend/dataall/modules/worksheets/db/repositories.py b/backend/dataall/modules/worksheets/db/repositories.py index d8828c0cb..ff4267eb8 100644 --- a/backend/dataall/modules/worksheets/db/repositories.py +++ b/backend/dataall/modules/worksheets/db/repositories.py @@ -27,7 +27,7 @@ def count_resources(session, environment, group_uri) -> int: @staticmethod def find_worksheet_by_uri(session, uri) -> Worksheet: return session.query(Worksheet).get(uri) - + @staticmethod def query_user_worksheets(session, username, groups, filter) -> Query: query = session.query(Worksheet).filter( diff --git a/backend/dataall/modules/worksheets/services/worksheet_permissions.py b/backend/dataall/modules/worksheets/services/worksheet_permissions.py index c255c1238..cad183f42 100644 --- a/backend/dataall/modules/worksheets/services/worksheet_permissions.py +++ b/backend/dataall/modules/worksheets/services/worksheet_permissions.py @@ -37,4 +37,4 @@ ENVIRONMENT_ALL.append(RUN_ATHENA_QUERY) RESOURCES_ALL.append(RUN_ATHENA_QUERY) -RESOURCES_ALL_WITH_DESC[RUN_ATHENA_QUERY] = RUN_ATHENA_QUERY \ No newline at end of file +RESOURCES_ALL_WITH_DESC[RUN_ATHENA_QUERY] = RUN_ATHENA_QUERY diff --git a/backend/dataall/modules/worksheets/services/worksheet_services.py b/backend/dataall/modules/worksheets/services/worksheet_services.py index 22abbf27a..9947f0f9b 100644 --- a/backend/dataall/modules/worksheets/services/worksheet_services.py +++ b/backend/dataall/modules/worksheets/services/worksheet_services.py @@ -25,11 +25,9 @@ def get_worksheet_by_uri(session, uri: str) -> Worksheet: raise exceptions.ObjectNotFound('Worksheet', uri) return worksheet - @staticmethod @has_tenant_permission(MANAGE_WORKSHEETS) - def create_worksheet( - session, username, uri, data=None) -> Worksheet: + def create_worksheet(session, username, uri, data=None) -> Worksheet: worksheet = Worksheet( owner=username, label=data.get('label'), diff --git a/backend/dataall/searchproxy/base_indexer.py b/backend/dataall/searchproxy/base_indexer.py index 4f1f75322..5c6ae943e 100644 --- a/backend/dataall/searchproxy/base_indexer.py +++ b/backend/dataall/searchproxy/base_indexer.py @@ -71,4 +71,3 @@ def _get_target_glossary_terms(session, target_uri): ) ) return [t.path for t in q] - diff --git a/backend/migrations/versions/5fc49baecea4_add_enviromental_parameters.py b/backend/migrations/versions/5fc49baecea4_add_enviromental_parameters.py index 7c1d13d1f..0e1aef85e 100644 --- a/backend/migrations/versions/5fc49baecea4_add_enviromental_parameters.py +++ b/backend/migrations/versions/5fc49baecea4_add_enviromental_parameters.py @@ -141,17 +141,17 @@ def downgrade(): session.add_all(envs) op.drop_table("environment_parameters") op.create_table( - 'worksheet_share', - sa.Column('worksheetShareUri', sa.String(), nullable=False), - sa.Column('worksheetUri', sa.String(), nullable=False), - sa.Column('principalId', sa.String(), nullable=False), - sa.Column('principalType', sa.String(), nullable=False), - sa.Column('canEdit', sa.Boolean(), nullable=True), - sa.Column('owner', sa.String(), nullable=False), - sa.Column('created', sa.DateTime(), nullable=True), - sa.Column('updated', sa.DateTime(), nullable=True), - sa.PrimaryKeyConstraint('worksheetShareUri'), - ) + 'worksheet_share', + sa.Column('worksheetShareUri', sa.String(), nullable=False), + sa.Column('worksheetUri', sa.String(), nullable=False), + sa.Column('principalId', sa.String(), nullable=False), + sa.Column('principalType', sa.String(), nullable=False), + sa.Column('canEdit', sa.Boolean(), nullable=True), + sa.Column('owner', sa.String(), nullable=False), + sa.Column('created', sa.DateTime(), nullable=True), + sa.Column('updated', sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint('worksheetShareUri'), + ) except Exception as ex: print(f"Failed to execute the rollback script due to: {ex}") From 53037720d61a012c376b8605debfc48c974c8826 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 19 May 2023 12:15:11 +0200 Subject: [PATCH 209/346] Moved queries to repositories --- .../db/share_object_repository.py | 31 +- .../tasks/dataset_subscription_task.py | 304 ++++++------------ 2 files changed, 116 insertions(+), 219 deletions(-) diff --git a/backend/dataall/modules/dataset_sharing/db/share_object_repository.py b/backend/dataall/modules/dataset_sharing/db/share_object_repository.py index 27a813d1a..3a74abea6 100644 --- a/backend/dataall/modules/dataset_sharing/db/share_object_repository.py +++ b/backend/dataall/modules/dataset_sharing/db/share_object_repository.py @@ -660,9 +660,7 @@ def update_share_item_status_batch( @staticmethod def get_share_data(session, share_uri): - share: ShareObject = session.query(ShareObject).get(share_uri) - if not share: - raise exceptions.ObjectNotFound('Share', share_uri) + share: ShareObject = ShareObjectRepository.get_share_by_uri(session, share_uri) dataset: Dataset = DatasetRepository.get_dataset_by_uri(session, share.datasetUri) @@ -721,9 +719,7 @@ def get_share_data(session, share_uri): @staticmethod def get_share_data_items(session, share_uri, status): - share: ShareObject = session.query(ShareObject).get(share_uri) - if not share: - raise exceptions.ObjectNotFound('Share', share_uri) + share: ShareObject = ShareObjectRepository.get_share_by_uri(session, share_uri) tables = ShareObjectRepository._find_all_share_item( session, share, status, DatasetTable, DatasetTable.tableUri @@ -1038,3 +1034,26 @@ def paginate_shared_datasets(session, env_uri, group_uri, data): return paginate( query=q, page=data.get('page', 1), page_size=data.get('pageSize', 10) ).to_dict() + + @staticmethod + def find_share_items_by_item_uri(session, item_uri): + return ( + session.query(ShareObjectItem) + .filter(ShareObjectItem.itemUri == item_uri) + .all() + ) + + @staticmethod + def get_approved_share_object(session, item): + share_object: ShareObject = ( + session.query(ShareObject) + .filter( + and_( + ShareObject.shareUri == item.shareUri, + ShareObject.status == ShareObjectStatus.Approved.value, + ) + ) + .first() + ) + return share_object + diff --git a/backend/dataall/modules/datasets/tasks/dataset_subscription_task.py b/backend/dataall/modules/datasets/tasks/dataset_subscription_task.py index 6d4c42fe2..85b02a2ae 100644 --- a/backend/dataall/modules/datasets/tasks/dataset_subscription_task.py +++ b/backend/dataall/modules/datasets/tasks/dataset_subscription_task.py @@ -4,7 +4,6 @@ import sys from botocore.exceptions import ClientError -from sqlalchemy import and_ from dataall import db from dataall.aws.handlers.service_handlers import Worker @@ -12,9 +11,10 @@ from dataall.aws.handlers.sqs import SqsQueue from dataall.db import get_engine from dataall.db import models -from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject -from dataall.modules.datasets.db.dataset_profiling_repository import DatasetProfilingRepository +from dataall.modules.dataset_sharing.db.models import ShareObjectItem +from dataall.modules.dataset_sharing.db.share_object_repository import ShareObjectRepository from dataall.modules.dataset_sharing.services.share_notification_service import ShareNotificationService +from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.tasks.subscriptions import poll_queues from dataall.utils import json_utils from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository @@ -29,8 +29,8 @@ class DatasetSubscriptionService: - def __init__(self): - pass + def __init__(self, engine): + self.engine = engine @staticmethod def get_environments(engine): @@ -52,61 +52,34 @@ def get_queues(environments: [models.Environment]): ) return queues - @staticmethod - def notify_consumers(engine, messages): - + def notify_consumers(self, engine, messages): log.info(f'Notifying consumers with messages {messages}') with engine.scoped_session() as session: - for message in messages: - - DatasetSubscriptionService.publish_table_update_message(engine, message) - - DatasetSubscriptionService.publish_location_update_message(session, message) + self.publish_table_update_message(session, message) + self.publish_location_update_message(session, message) return True - @staticmethod - def publish_table_update_message(engine, message): - with engine.scoped_session() as session: - table: DatasetTable = DatasetTableRepository.get_table_by_s3_prefix( - session, - message.get('prefix'), - message.get('accountid'), - message.get('region'), + def publish_table_update_message(self, session, message): + table: DatasetTable = DatasetTableRepository.get_table_by_s3_prefix( + session, + message.get('prefix'), + message.get('accountid'), + message.get('region'), + ) + if not table: + log.info(f'No table for message {message}') + else: + log.info( + f'Found table {table.tableUri}|{table.GlueTableName}|{table.S3Prefix}' ) - if not table: - log.info(f'No table for message {message}') - else: - log.info( - f'Found table {table.tableUri}|{table.GlueTableName}|{table.S3Prefix}' - ) - dataset: Dataset = session.query(Dataset).get( - table.datasetUri - ) - log.info( - f'Found dataset {dataset.datasetUri}|{dataset.environmentUri}|{dataset.AwsAccountId}' - ) - share_items: [ShareObjectItem] = ( - session.query(ShareObjectItem) - .filter(ShareObjectItem.itemUri == table.tableUri) - .all() - ) - log.info(f'Found shared items for table {share_items}') - - return DatasetSubscriptionService.publish_sns_message( - engine, - message, - dataset, - share_items, - table.S3Prefix, - table=table, - ) + message['table'] = table.GlueTableName + self._publish_update_message(session, message, table, table) - @staticmethod - def publish_location_update_message(session, message): + def publish_location_update_message(self, session, message): location: DatasetStorageLocation = ( DatasetLocationRepository.get_location_by_s3_prefix( session, @@ -120,161 +93,79 @@ def publish_location_update_message(session, message): else: log.info(f'Found location {location.locationUri}|{location.S3Prefix}') + self._publish_update_message(session, message, location) - dataset: Dataset = session.query(Dataset).get( - location.datasetUri - ) - log.info( - f'Found dataset {dataset.datasetUri}|{dataset.environmentUri}|{dataset.AwsAccountId}' - ) - share_items: [ShareObjectItem] = ( - session.query(ShareObjectItem) - .filter(ShareObjectItem.itemUri == location.locationUri) - .all() - ) - log.info(f'Found shared items for location {share_items}') - - return DatasetSubscriptionService.publish_sns_message( - session, message, dataset, share_items, location.S3Prefix - ) - - @staticmethod - def store_dataquality_results(session, message): - - table: DatasetTable = DatasetTableRepository.get_table_by_s3_prefix( - session, - message.get('prefix'), - message.get('accountid'), - message.get('region'), - ) - - run = DatasetProfilingRepository.start_profiling( - session=session, - datasetUri=table.datasetUri, - GlueTableName=table.GlueTableName, - tableUri=table.tableUri, - ) - - run.status = 'SUCCEEDED' - run.GlueTableName = table.GlueTableName - quality_results = message.get('dataQuality') - - if message.get('datasetRegionId'): - quality_results['regionId'] = message.get('datasetRegionId') - - if message.get('rows'): - quality_results['table_nb_rows'] = message.get('rows') - - DatasetSubscriptionService.set_columns_type(quality_results, message) - - data_types = DatasetSubscriptionService.set_data_types(message) - - quality_results['dataTypes'] = data_types - - quality_results['integrationDateTime'] = message.get('integrationDateTime') - - results = json.dumps(json_utils.to_json(quality_results)) + def _publish_update_message(self, session, message, entity, table: DatasetTable = None): + dataset: Dataset = DatasetRepository.get_dataset_by_uri(session, entity.datasetUri) log.info( - '>>> Stored dataQuality results received from the SNS notification: %s', - results, + f'Found dataset {dataset.datasetUri}|{dataset.environmentUri}|{dataset.AwsAccountId}' ) + share_items: [ShareObjectItem] = ShareObjectRepository.find_share_items_by_item_uri(session, entity.uri()) + log.info(f'Found shared items for location {share_items}') - run.results = results - - session.commit() - return True - - @staticmethod - def set_data_types(message): - data_types = [] - for field in message.get('fields'): - added = False - for d in data_types: - if d.get('type').lower() == field[1].lower(): - d['count'] = d['count'] + 1 - added = True - break - if not added: - data_types.append({'type': field[1], 'count': 1}) - return data_types - - @staticmethod - def set_columns_type(quality_results, message): - for c in quality_results.get('columns'): - if not c.get('Type'): - for field in message.get('fields'): - if field[0].lower() == c['Name'].lower(): - c['Type'] = field[1] + return self.publish_sns_message( + session, message, dataset, share_items, entity.S3Prefix, table + ) - @staticmethod def publish_sns_message( - engine, message, dataset, share_items, prefix, table: DatasetTable = None + self, session, message, dataset, share_items, prefix, table: DatasetTable = None ): - with engine.scoped_session() as session: - for item in share_items: - - share_object = DatasetSubscriptionService.get_approved_share_object( - session, item + for item in share_items: + share_object = ShareObjectRepository.get_approved_share_object(session, item) + if not share_object or not share_object.principalId: + log.error( + f'Share Item with no share object or no principalId ? {item.shareItemUri}' ) - - if not share_object or not share_object.principalId: + else: + environment = session.query(models.Environment).get( + share_object.principalId + ) + if not environment: log.error( - f'Share Item with no share object or no principalId ? {item.shareItemUri}' + f'Environment of share owner was deleted ? {share_object.principalId}' ) else: - environment = session.query(models.Environment).get( - share_object.principalId + log.info(f'Notifying share owner {share_object.owner}') + + log.info( + f'found environment {environment.environmentUri}|{environment.AwsAccountId} of share owner {share_object.owner}' ) - if not environment: - log.error( - f'Environment of share owner was deleted ? {share_object.principalId}' - ) - else: - log.info(f'Notifying share owner {share_object.owner}') + try: log.info( - f'found environment {environment.environmentUri}|{environment.AwsAccountId} of share owner {share_object.owner}' + f'Producer message before notifications: {message}' ) - try: - - if table: - message['table'] = table.GlueTableName - - log.info( - f'Producer message before notifications: {message}' - ) - - DatasetSubscriptionService.redshift_copy( - engine, message, dataset, environment, table - ) + self.redshift_copy( + session, message, dataset, environment, table + ) - message = { - 'location': prefix, - 'owner': dataset.owner, - 'message': f'Dataset owner {dataset.owner} ' - f'has updated the table shared with you {prefix}', - } + message = { + 'location': prefix, + 'owner': dataset.owner, + 'message': f'Dataset owner {dataset.owner} ' + f'has updated the table shared with you {prefix}', + } - response = DatasetSubscriptionService.sns_call( - message, environment - ) + response = DatasetSubscriptionService.sns_call( + message, environment + ) - log.info(f'SNS update publish response {response}') + log.info(f'SNS update publish response {response}') - notifications = ShareNotificationService.notify_new_data_available_from_owners( - session=session, - dataset=dataset, - share=share_object, - s3_prefix=prefix, - ) - log.info(f'Notifications for share owners {notifications}') + notifications = ShareNotificationService.notify_new_data_available_from_owners( + session=session, + dataset=dataset, + share=share_object, + s3_prefix=prefix, + ) + log.info(f'Notifications for share owners {notifications}') - except ClientError as e: - log.error( - f'Failed to deliver message {message} due to: {e}' - ) + except ClientError as e: + log.error( + f'Failed to deliver message {message} due to: {e}' + ) @staticmethod def sns_call(message, environment): @@ -286,9 +177,10 @@ def sns_call(message, environment): ) return response - @staticmethod + # TODO redshift related code def redshift_copy( - engine, + self, + session, message, dataset: Dataset, environment: models.Environment, @@ -299,35 +191,21 @@ def redshift_copy( f'{environment.environmentUri}|{dataset.datasetUri}' f'|{json_utils.to_json(message)}' ) - with engine.scoped_session() as session: - task = models.Task( - action='redshift.subscriptions.copy', - targetUri=environment.environmentUri, - payload={ - 'datasetUri': dataset.datasetUri, - 'message': json_utils.to_json(message), - 'tableUri': table.tableUri, - }, - ) - session.add(task) - session.commit() - response = Worker.queue(engine, [task.taskUri]) - return response - - @staticmethod - def get_approved_share_object(session, item): - share_object: ShareObject = ( - session.query(ShareObject) - .filter( - and_( - ShareObject.shareUri == item.shareUri, - ShareObject.status == 'Approved', - ) - ) - .first() + task = models.Task( + action='redshift.subscriptions.copy', + targetUri=environment.environmentUri, + payload={ + 'datasetUri': dataset.datasetUri, + 'message': json_utils.to_json(message), + 'tableUri': table.tableUri, + }, ) - return share_object + session.add(task) + session.commit() + + response = Worker.queue(self.engine, [task.taskUri]) + return response if __name__ == '__main__': @@ -335,7 +213,7 @@ def get_approved_share_object(session, item): ENGINE = get_engine(envname=ENVNAME) Worker.queue = SqsQueue.send log.info('Polling datasets updates...') - service = DatasetSubscriptionService() + service = DatasetSubscriptionService(ENGINE) queues = service.get_queues(service.get_environments(ENGINE)) messages = poll_queues(queues) service.notify_consumers(ENGINE, messages) From 9b3a179a95af2da964535afdc9cf3ee3c85ab7a0 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 19 May 2023 12:24:14 +0200 Subject: [PATCH 210/346] Moved queries to repositories --- .../db/share_object_repository.py | 70 +++++++++++++++++ .../datasets/tasks/bucket_policy_updater.py | 78 +------------------ 2 files changed, 73 insertions(+), 75 deletions(-) diff --git a/backend/dataall/modules/dataset_sharing/db/share_object_repository.py b/backend/dataall/modules/dataset_sharing/db/share_object_repository.py index 3a74abea6..8218529c8 100644 --- a/backend/dataall/modules/dataset_sharing/db/share_object_repository.py +++ b/backend/dataall/modules/dataset_sharing/db/share_object_repository.py @@ -1,4 +1,5 @@ import logging +from typing import List from sqlalchemy import and_, or_, func, case from sqlalchemy.orm import Query @@ -1057,3 +1058,72 @@ def get_approved_share_object(session, item): ) return share_object + @staticmethod + def get_shared_tables(session, dataset) -> List[ShareObjectItem]: + return ( + session.query( + DatasetTable.GlueDatabaseName.label('GlueDatabaseName'), + DatasetTable.GlueTableName.label('GlueTableName'), + DatasetTable.S3Prefix.label('S3Prefix'), + DatasetTable.AWSAccountId.label('SourceAwsAccountId'), + DatasetTable.region.label('SourceRegion'), + models.Environment.AwsAccountId.label('TargetAwsAccountId'), + models.Environment.region.label('TargetRegion'), + ) + .join( + ShareObjectItem, + and_( + ShareObjectItem.itemUri == DatasetTable.tableUri + ), + ) + .join( + ShareObject, + ShareObject.shareUri == ShareObjectItem.shareUri, + ) + .join( + models.Environment, + models.Environment.environmentUri == ShareObject.environmentUri, + ) + .filter( + and_( + DatasetTable.datasetUri == dataset.datasetUri, + DatasetTable.deleted.is_(None), + ShareObjectItem.status == ShareObjectStatus.Approved.value, + ) + ) + ).all() + + @staticmethod + def get_shared_folders(session, dataset) -> List[DatasetStorageLocation]: + return ( + session.query( + DatasetStorageLocation.locationUri.label('locationUri'), + DatasetStorageLocation.S3BucketName.label('S3BucketName'), + DatasetStorageLocation.S3Prefix.label('S3Prefix'), + models.Environment.AwsAccountId.label('AwsAccountId'), + models.Environment.region.label('region'), + ) + .join( + ShareObjectItem, + and_( + ShareObjectItem.itemUri + == DatasetStorageLocation.locationUri() + ), + ) + .join( + ShareObject, + ShareObject.shareUri == ShareObjectItem.shareUri, + ) + .join( + models.Environment, + models.Environment.environmentUri == ShareObject.environmentUri, + ) + .filter( + and_( + DatasetStorageLocation.datasetUri == dataset.datasetUri, + DatasetStorageLocation.deleted.is_(None), + ShareObjectItem.status == ShareObjectStatus.Approved.value, + ) + ) + ).all() + diff --git a/backend/dataall/modules/datasets/tasks/bucket_policy_updater.py b/backend/dataall/modules/datasets/tasks/bucket_policy_updater.py index bfffe873c..2df2e0500 100644 --- a/backend/dataall/modules/datasets/tasks/bucket_policy_updater.py +++ b/backend/dataall/modules/datasets/tasks/bucket_policy_updater.py @@ -12,6 +12,7 @@ from dataall.db import models from dataall.modules.dataset_sharing.db.enums import ShareObjectStatus from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject +from dataall.modules.dataset_sharing.db.share_object_repository import ShareObjectRepository from dataall.modules.datasets_base.db.models import DatasetStorageLocation, DatasetTable, Dataset root = logging.getLogger() @@ -44,12 +45,12 @@ def sync_imported_datasets_bucket_policies(self): for dataset in imported_datasets: account_prefixes = {} - shared_tables = self.get_shared_tables(dataset) + shared_tables = ShareObjectRepository.get_shared_tables(session, dataset) log.info( f'Found {len(shared_tables)} shared tables with dataset {dataset.S3BucketName}' ) - shared_folders = self.get_shared_folders(dataset) + shared_folders = ShareObjectRepository.get_shared_folders(session, dataset) log.info( f'Found {len(shared_folders)} shared folders with dataset {dataset.S3BucketName}' ) @@ -166,79 +167,6 @@ def group_prefixes_by_accountid(cls, accountid, prefix, account_prefixes): account_prefixes[accountid] = [prefix] return account_prefixes - def get_shared_tables(self, dataset) -> typing.List[ShareObjectItem]: - with self.engine.scoped_session() as session: - tables = ( - session.query( - DatasetTable.GlueDatabaseName.label('GlueDatabaseName'), - DatasetTable.GlueTableName.label('GlueTableName'), - DatasetTable.S3Prefix.label('S3Prefix'), - DatasetTable.AWSAccountId.label('SourceAwsAccountId'), - DatasetTable.region.label('SourceRegion'), - models.Environment.AwsAccountId.label('TargetAwsAccountId'), - models.Environment.region.label('TargetRegion'), - ) - .join( - ShareObjectItem, - and_( - ShareObjectItem.itemUri == DatasetTable.tableUri - ), - ) - .join( - ShareObject, - ShareObject.shareUri == ShareObjectItem.shareUri, - ) - .join( - models.Environment, - models.Environment.environmentUri - == ShareObject.environmentUri, - ) - .filter( - and_( - DatasetTable.datasetUri == dataset.datasetUri, - DatasetTable.deleted.is_(None), - ShareObjectItem.status == ShareObjectStatus.Approved.value, - ) - ) - ).all() - return tables - - def get_shared_folders(self, dataset) -> typing.List[DatasetStorageLocation]: - with self.engine.scoped_session() as session: - locations = ( - session.query( - DatasetStorageLocation.locationUri.label('locationUri'), - DatasetStorageLocation.S3BucketName.label('S3BucketName'), - DatasetStorageLocation.S3Prefix.label('S3Prefix'), - models.Environment.AwsAccountId.label('AwsAccountId'), - models.Environment.region.label('region'), - ) - .join( - ShareObjectItem, - and_( - ShareObjectItem.itemUri - == DatasetStorageLocation.locationUri - ), - ) - .join( - ShareObject, - ShareObject.shareUri == ShareObjectItem.shareUri, - ) - .join( - models.Environment, - models.Environment.environmentUri - == ShareObject.environmentUri, - ) - .filter( - and_( - DatasetStorageLocation.datasetUri == dataset.datasetUri, - DatasetStorageLocation.deleted.is_(None), - ShareObjectItem.status == ShareObjectStatus.Approved.value, - ) - ) - ).all() - return locations - @classmethod def init_s3_client(cls, dataset): session = SessionHelper.remote_session(accountid=dataset.AwsAccountId) From 4ef4dddfb4c47d6d50bdd574c6241a3141d07092 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 19 May 2023 13:48:45 +0200 Subject: [PATCH 211/346] Fixed migration's imports --- .../versions/509997f0a51e_sharing_state_machines_v1_4_0.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/backend/migrations/versions/509997f0a51e_sharing_state_machines_v1_4_0.py b/backend/migrations/versions/509997f0a51e_sharing_state_machines_v1_4_0.py index a5f5e74b0..14cfd3f6c 100644 --- a/backend/migrations/versions/509997f0a51e_sharing_state_machines_v1_4_0.py +++ b/backend/migrations/versions/509997f0a51e_sharing_state_machines_v1_4_0.py @@ -11,10 +11,10 @@ from sqlalchemy.dialects import postgresql from sqlalchemy.ext.declarative import declarative_base -from dataall.db import api, models, permissions, utils -from dataall.db.models.Enums import ShareObjectStatus, ShareItemStatus +from dataall.db import utils from datetime import datetime +from dataall.modules.dataset_sharing.db.enums import ShareObjectStatus, ShareItemStatus # revision identifiers, used by Alembic. revision = '509997f0a51e' From 2b7fd448f72c082de4ea58932b070d193d3aee33 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 19 May 2023 13:50:42 +0200 Subject: [PATCH 212/346] Fixed linting --- .../modules/dataset_sharing/db/share_object_repository.py | 1 - 1 file changed, 1 deletion(-) diff --git a/backend/dataall/modules/dataset_sharing/db/share_object_repository.py b/backend/dataall/modules/dataset_sharing/db/share_object_repository.py index 8218529c8..a1fec68ae 100644 --- a/backend/dataall/modules/dataset_sharing/db/share_object_repository.py +++ b/backend/dataall/modules/dataset_sharing/db/share_object_repository.py @@ -1126,4 +1126,3 @@ def get_shared_folders(session, dataset) -> List[DatasetStorageLocation]: ) ) ).all() - From 4fc225ca78fd4c09b8cba7d4d0499b06fd8c6605 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 19 May 2023 14:01:33 +0200 Subject: [PATCH 213/346] Fixed tests --- .../modules/dataset_sharing/db/share_object_repository.py | 3 +-- tests/modules/datasets/tasks/test_dataset_subscriptions.py | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/backend/dataall/modules/dataset_sharing/db/share_object_repository.py b/backend/dataall/modules/dataset_sharing/db/share_object_repository.py index a1fec68ae..ef7044283 100644 --- a/backend/dataall/modules/dataset_sharing/db/share_object_repository.py +++ b/backend/dataall/modules/dataset_sharing/db/share_object_repository.py @@ -1106,8 +1106,7 @@ def get_shared_folders(session, dataset) -> List[DatasetStorageLocation]: .join( ShareObjectItem, and_( - ShareObjectItem.itemUri - == DatasetStorageLocation.locationUri() + ShareObjectItem.itemUri == DatasetStorageLocation.locationUri ), ) .join( diff --git a/tests/modules/datasets/tasks/test_dataset_subscriptions.py b/tests/modules/datasets/tasks/test_dataset_subscriptions.py index b27df8462..ee01b2b97 100644 --- a/tests/modules/datasets/tasks/test_dataset_subscriptions.py +++ b/tests/modules/datasets/tasks/test_dataset_subscriptions.py @@ -141,7 +141,7 @@ def test_subscriptions(org, env, otherenv, db, dataset, share, mocker): 'dataall.modules.datasets.tasks.dataset_subscription_task.DatasetSubscriptionService.sns_call', return_value=True, ) - subscriber = DatasetSubscriptionService() + subscriber = DatasetSubscriptionService(db) messages = [ { 'prefix': 's3://dataset/testtable/csv/', From 3b85ad2acc8fd1107d7caf60d714e89b10130595 Mon Sep 17 00:00:00 2001 From: kukushking Date: Mon, 22 May 2023 08:25:17 +0100 Subject: [PATCH 214/346] Fix lambda/ECS IAM permissions for AOSS (#467) ### Feature or Bugfix - Bugfix ### Detail - Add "aoss:APIAccessAll" to lambda/ECS task IAM roles required since May 10th (see message below). Fixes 403 errors from APIs. ``` [Action required] Amazon OpenSearch Serverless requires mandatory IAM permission for access to resources Starting May 10th, 2023, OpenSearch Serverless is mandating two new IAM permissions for collection resources. The two IAM permissions are "aoss:APIAccessAll" for Data Plane API access, and "aoss:DashboardsAccessAll" for Dashboards access from the browser. You are required to add these two IAM permissions for your OpenSearch Serverless "aoss:APIAccessAll" for Data Plane API access, and "aoss:DashboardsAccessAll" for Dashboards access. You must complete this action by May 9th, 2023. Failure to add the two new IAM permissions will result in 403 errors starting on May 10th, 2023 For a sample data-plane policy [here](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/security-iam-serverless.html#security_iam_id-based-policy-examples-data-plane.html) If you have any questions or concerns, please contact [AWS Support](https://aws.amazon.com/support) ``` By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- deploy/stacks/container.py | 8 ++++++++ deploy/stacks/lambda_api.py | 8 ++++++++ 2 files changed, 16 insertions(+) diff --git a/deploy/stacks/container.py b/deploy/stacks/container.py index a667c6898..997ad5d76 100644 --- a/deploy/stacks/container.py +++ b/deploy/stacks/container.py @@ -426,6 +426,14 @@ def create_task_role(self, envname, resource_prefix, pivot_role_name): ], resources=['*'], ), + iam.PolicyStatement( + actions=[ + 'aoss:APIAccessAll', + ], + resources=[ + f'arn:aws:aoss:{self.region}:{self.account}:collection/*', + ], + ), ], ) task_role = iam.Role( diff --git a/deploy/stacks/lambda_api.py b/deploy/stacks/lambda_api.py index 19b42e754..95c670f78 100644 --- a/deploy/stacks/lambda_api.py +++ b/deploy/stacks/lambda_api.py @@ -243,6 +243,14 @@ def create_function_role(self, envname, resource_prefix, fn_name, pivot_role_nam ], resources=['*'], ), + iam.PolicyStatement( + actions=[ + 'aoss:APIAccessAll', + ], + resources=[ + f'arn:aws:aoss:{self.region}:{self.account}:collection/*', + ], + ), ], ) role = iam.Role( From 3097a3a30a7fd8981b409a1f7fcd1463881344c9 Mon Sep 17 00:00:00 2001 From: Rick Bernotas <97474536+rbernotas@users.noreply.github.com> Date: Mon, 22 May 2023 03:41:49 -0500 Subject: [PATCH 215/346] 465 - Update Aurora default Parameter Group to 'default.aurora-postgresql11'. (#466) ### Feature or Bugfix - Bugfix ### Detail - Update Aurora default Parameter Group to 'default.aurora-postgresql11'. Fixes an issue where the Aurora nested stack deploy in the data.all backend deploy would fail and/or block indefinitely due to 'default.aurora-postgresql10' mismatch with version 11 of the Aurora database engine. ### Relates - https://github.com/awslabs/aws-dataall/issues/465 By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. Authored-by: rbernota --- deploy/stacks/aurora.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deploy/stacks/aurora.py b/deploy/stacks/aurora.py index 9e66d3684..5a96bea1a 100644 --- a/deploy/stacks/aurora.py +++ b/deploy/stacks/aurora.py @@ -125,7 +125,7 @@ def __init__( deletion_protection=True, cluster_identifier=f'{resource_prefix}-{envname}-db', parameter_group=rds.ParameterGroup.from_parameter_group_name( - self, 'ParameterGroup', 'default.aurora-postgresql10' + self, 'ParameterGroup', 'default.aurora-postgresql11' ), enable_data_api=True, default_database_name=f'{envname}db', From efc06dd7ed08a190938f58b1e83f2842398a36d2 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 22 May 2023 16:31:59 +0200 Subject: [PATCH 216/346] User requests. Deleted TODOs --- .../modules/dataset_sharing/services/share_object_service.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/backend/dataall/modules/dataset_sharing/services/share_object_service.py b/backend/dataall/modules/dataset_sharing/services/share_object_service.py index a185443d6..b75b87f02 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_object_service.py +++ b/backend/dataall/modules/dataset_sharing/services/share_object_service.py @@ -266,7 +266,6 @@ def resolve_share_object_statistics(uri): @staticmethod def list_shares_in_my_inbox(filter: dict): - # TODO THERE WAS NO PERMISSION CHECK context = get_context() with context.db_engine.scoped_session() as session: return ShareObjectRepository.list_user_received_share_requests( @@ -278,7 +277,6 @@ def list_shares_in_my_inbox(filter: dict): @staticmethod def list_shares_in_my_outbox(filter): - # TODO THERE WAS NO PERMISSION CHECK context = get_context() with context.db_engine.scoped_session() as session: return ShareObjectRepository.list_user_sent_share_requests( From c11082d0b1c793a9321996bbf09dc952e0ed4930 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 22 May 2023 16:32:51 +0200 Subject: [PATCH 217/346] Fixed loading issue --- backend/dataall/modules/loader.py | 2 +- backend/dataall/modules/notebooks/__init__.py | 3 ++- backend/local_graphql_server.py | 4 ++-- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/backend/dataall/modules/loader.py b/backend/dataall/modules/loader.py index a625681bb..d925bde7c 100644 --- a/backend/dataall/modules/loader.py +++ b/backend/dataall/modules/loader.py @@ -198,7 +198,7 @@ def _check_loading_correct(in_config: Set[str], modes: Set[ImportMode]): """ expected_load = set() for module in _all_modules(): - if module.name() in in_config: + if module.is_supported(modes) and module.name() in in_config: expected_load.add(module) to_add = list(expected_load) diff --git a/backend/dataall/modules/notebooks/__init__.py b/backend/dataall/modules/notebooks/__init__.py index 0241bd2be..f272fb7ac 100644 --- a/backend/dataall/modules/notebooks/__init__.py +++ b/backend/dataall/modules/notebooks/__init__.py @@ -5,7 +5,6 @@ from dataall.db.api import TargetType from dataall.modules.loader import ImportMode, ModuleInterface from dataall.modules.notebooks.db.repositories import NotebookRepository -from dataall.modules.sagemaker_base import SagemakerCdkModuleInterface log = logging.getLogger(__name__) @@ -35,6 +34,8 @@ def is_supported(modes): @staticmethod def depends_on() -> List[Type['ModuleInterface']]: + from dataall.modules.sagemaker_base import SagemakerCdkModuleInterface + return [SagemakerCdkModuleInterface] def __init__(self): diff --git a/backend/local_graphql_server.py b/backend/local_graphql_server.py index 114fdce4b..428387994 100644 --- a/backend/local_graphql_server.py +++ b/backend/local_graphql_server.py @@ -8,8 +8,6 @@ from flask_cors import CORS from dataall import db - -sts = boto3.client('sts', region_name='eu-west-1') from dataall.api import get_executable_schema from dataall.aws.handlers.service_handlers import Worker from dataall.db import get_engine, Base, create_schema_and_tables, init_permissions, api @@ -23,6 +21,8 @@ logger = logging.getLogger('graphql') logger.propagate = False logger.setLevel(logging.INFO) + +sts = boto3.client('sts', region_name='eu-west-1') Worker.queue = Worker.process ENVNAME = os.getenv('envname', 'local') logger.warning(f'Connecting to database `{ENVNAME}`') From 3b2fb48f6f2b1e5126361b3dd2d26d0fd58b709f Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 22 May 2023 17:05:03 +0200 Subject: [PATCH 218/346] Fixed loading issue with redshift. Added more comments and removed imports --- backend/dataall/aws/handlers/redshift.py | 5 +++-- backend/dataall/db/api/redshift_cluster.py | 13 +++++++++++-- .../datasets/db/dataset_location_repository.py | 6 ------ backend/dataall/modules/loader.py | 8 ++++++-- 4 files changed, 20 insertions(+), 12 deletions(-) diff --git a/backend/dataall/aws/handlers/redshift.py b/backend/dataall/aws/handlers/redshift.py index 7bb1e096d..0810cbc09 100644 --- a/backend/dataall/aws/handlers/redshift.py +++ b/backend/dataall/aws/handlers/redshift.py @@ -9,8 +9,6 @@ from .sts import SessionHelper from ... import db from ...db import models -# TODO should be migrated in the redshift module -from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository from dataall.modules.datasets_base.db.models import DatasetTable, Dataset from ...modules.datasets_base.db.dataset_repository import DatasetRepository @@ -440,6 +438,9 @@ def update_cluster_roles(engine, task: models.Task): @staticmethod @Worker.handler(path='redshift.subscriptions.copy') def copy_data(engine, task: models.Task): + # TODO should be migrated in the redshift module + from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository + with engine.scoped_session() as session: environment: models.Environment = session.query(models.Environment).get( diff --git a/backend/dataall/db/api/redshift_cluster.py b/backend/dataall/db/api/redshift_cluster.py index 15222738b..9f0e31fab 100644 --- a/backend/dataall/db/api/redshift_cluster.py +++ b/backend/dataall/db/api/redshift_cluster.py @@ -10,8 +10,6 @@ NamingConventionPattern, ) from dataall.utils.slugify import slugify -from dataall.modules.dataset_sharing.db.models import ShareObject, ShareObjectItem -from dataall.modules.dataset_sharing.db.share_object_repository import ShareItemSM log = logging.getLogger(__name__) @@ -183,6 +181,11 @@ def get_redshift_cluster_by_uri(session, uri) -> models.RedshiftCluster: def list_available_datasets( session, username, groups, uri: str, data: dict = None, check_perm=None ): + + # TODO deal with it in redshift module + from dataall.modules.dataset_sharing.db.models import ShareObject, ShareObjectItem + from dataall.modules.dataset_sharing.db.share_object_repository import ShareItemSM + cluster: models.RedshiftCluster = RedshiftCluster.get_redshift_cluster_by_uri( session, uri ) @@ -298,9 +301,15 @@ def list_cluster_datasets( def list_available_cluster_tables( session, username, groups, uri: str, data: dict = None, check_perm=None ): + + # TODO deal with it in redshift module + from dataall.modules.dataset_sharing.db.models import ShareObject, ShareObjectItem + from dataall.modules.dataset_sharing.db.share_object_repository import ShareItemSM + cluster: models.RedshiftCluster = RedshiftCluster.get_redshift_cluster_by_uri( session, uri ) + share_item_shared_states = ShareItemSM.get_share_item_shared_states() shared = ( diff --git a/backend/dataall/modules/datasets/db/dataset_location_repository.py b/backend/dataall/modules/datasets/db/dataset_location_repository.py index d5aaed547..bfac95845 100644 --- a/backend/dataall/modules/datasets/db/dataset_location_repository.py +++ b/backend/dataall/modules/datasets/db/dataset_location_repository.py @@ -2,14 +2,8 @@ from sqlalchemy import and_, or_ -from dataall.core.context import get_context -from dataall.db.api import Glossary from dataall.db import paginate, exceptions -from dataall.modules.dataset_sharing.db.models import ShareObjectItem -from dataall.modules.dataset_sharing.db.share_object_repository import ShareItemSM -from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.db.models import DatasetStorageLocation, Dataset -from dataall.modules.datasets.services.dataset_permissions import DELETE_DATASET_FOLDER logger = logging.getLogger(__name__) diff --git a/backend/dataall/modules/loader.py b/backend/dataall/modules/loader.py index d925bde7c..20f698cef 100644 --- a/backend/dataall/modules/loader.py +++ b/backend/dataall/modules/loader.py @@ -197,10 +197,12 @@ def _check_loading_correct(in_config: Set[str], modes: Set[ImportMode]): some functionality may work wrongly. """ expected_load = set() + # 1) Adds all modules to load for module in _all_modules(): if module.is_supported(modes) and module.name() in in_config: expected_load.add(module) + # 2) Add all dependencies to_add = list(expected_load) while to_add: new_to_add = [] @@ -215,6 +217,7 @@ def _check_loading_correct(in_config: Set[str], modes: Set[ImportMode]): new_to_add.append(dependency) to_add = new_to_add + # 3) Checks all found ModuleInterfaces for module in _all_modules(): if module.is_supported(modes) and module not in expected_load: raise ImportError( @@ -222,11 +225,12 @@ def _check_loading_correct(in_config: Set[str], modes: Set[ImportMode]): "Declare the module in depends_on" ) - loaded_module_names = {module.name() for module in expected_load} + # 4) Checks all references for modules (when ModuleInterfaces don't exist or not supported) + checked_module_names = {module.name() for module in expected_load} | in_config for module in sys.modules.keys(): if module.startswith(_MODULE_PREFIX) and module != __name__: # skip loader name = _get_module_name(module) - if name and name not in loaded_module_names: + if name and name not in checked_module_names: raise ImportError(f"The package {module} has been imported, but it doesn't contain ModuleInterface") From a1c51568f0ee1da34d0405116ce558855f42052c Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 22 May 2023 17:20:41 +0200 Subject: [PATCH 219/346] DatasetSummary is not used --- .../modules/datasets/api/dataset/mutations.py | 12 -- .../modules/datasets/api/dataset/queries.py | 10 -- .../modules/datasets/api/dataset/resolvers.py | 10 -- .../datasets/services/dataset_service.py | 54 ------ frontend/src/api/Dataset/getDatasetSummary.js | 14 -- .../src/api/Dataset/saveDatasetSummary.js | 15 -- frontend/src/views/Datasets/DatasetSummary.js | 156 ------------------ 7 files changed, 271 deletions(-) delete mode 100644 frontend/src/api/Dataset/getDatasetSummary.js delete mode 100644 frontend/src/api/Dataset/saveDatasetSummary.js delete mode 100644 frontend/src/views/Datasets/DatasetSummary.js diff --git a/backend/dataall/modules/datasets/api/dataset/mutations.py b/backend/dataall/modules/datasets/api/dataset/mutations.py index a006797ce..c2e5364f4 100644 --- a/backend/dataall/modules/datasets/api/dataset/mutations.py +++ b/backend/dataall/modules/datasets/api/dataset/mutations.py @@ -9,7 +9,6 @@ update_dataset, sync_tables, generate_dataset_access_token, - save_dataset_summary, delete_dataset, import_dataset, publish_dataset_update, @@ -51,17 +50,6 @@ ) -saveDatasetSummary = gql.MutationField( - name='saveDatasetSummary', - args=[ - gql.Argument(name='datasetUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='content', type=gql.String), - ], - type=gql.Boolean, - resolver=save_dataset_summary, -) - - deleteDataset = gql.MutationField( name='deleteDataset', args=[ diff --git a/backend/dataall/modules/datasets/api/dataset/queries.py b/backend/dataall/modules/datasets/api/dataset/queries.py index 1d1cdb137..17295505a 100644 --- a/backend/dataall/modules/datasets/api/dataset/queries.py +++ b/backend/dataall/modules/datasets/api/dataset/queries.py @@ -5,7 +5,6 @@ list_datasets, get_dataset_assume_role_url, get_dataset_etl_credentials, - get_dataset_summary, get_file_upload_presigned_url, list_dataset_share_objects, list_datasets_owned_by_env_group, @@ -49,15 +48,6 @@ ) -getDatasetSummary = gql.QueryField( - name='getDatasetSummary', - args=[gql.Argument(name='datasetUri', type=gql.NonNullableType(gql.String))], - type=gql.String, - resolver=get_dataset_summary, - test_scope='Dataset', -) - - getDatasetPresignedUrl = gql.QueryField( name='getDatasetPresignedUrl', args=[ diff --git a/backend/dataall/modules/datasets/api/dataset/resolvers.py b/backend/dataall/modules/datasets/api/dataset/resolvers.py index 388be6861..6af8c67e5 100644 --- a/backend/dataall/modules/datasets/api/dataset/resolvers.py +++ b/backend/dataall/modules/datasets/api/dataset/resolvers.py @@ -150,16 +150,6 @@ def generate_dataset_access_token(context, source, datasetUri: str = None): return DatasetService.generate_dataset_access_token(uri=datasetUri) -def get_dataset_summary(context, source, datasetUri: str = None): - return DatasetService.get_dataset_summary(uri=datasetUri) - - -def save_dataset_summary( - context: Context, source, datasetUri: str = None, content: str = None -): - return DatasetService.save_dataset_summary(uri=datasetUri, content=content) - - def get_dataset_stack(context: Context, source: Dataset, **kwargs): if not source: return None diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py index 3ba1cac19..891890100 100644 --- a/backend/dataall/modules/datasets/services/dataset_service.py +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -339,60 +339,6 @@ def generate_dataset_access_token(uri): return json.dumps(credentials) - @staticmethod - def get_dataset_summary(uri: str): - # TODO THERE WAS NO PERMISSION CHECK!!! - with get_context().db_engine.scoped_session() as session: - dataset = DatasetRepository.get_dataset_by_uri(session, uri) - environment = Environment.get_environment_by_uri( - session, dataset.environmentUri - ) - - pivot_session = SessionHelper.remote_session(dataset.AwsAccountId) - env_admin_session = SessionHelper.get_session( - base_session=pivot_session, - role_arn=environment.EnvironmentDefaultIAMRoleArn, - ) - s3 = env_admin_session.client('s3', region_name=dataset.region) - - try: - s3.head_object( - Bucket=environment.EnvironmentDefaultBucketName, - Key=f'summary/{uri}/summary.md', - ) - response = s3.get_object( - Bucket=environment.EnvironmentDefaultBucketName, - Key=f'summary/{uri}/summary.md', - ) - content = str(response['Body'].read().decode('utf-8')) - return content - except Exception as e: - raise e - - @staticmethod - @has_resource_permission(SUMMARY_DATASET) - def save_dataset_summary(uri: str, content: str): - context = get_context() - with context.db_engine.scoped_session() as session: - dataset = DatasetRepository.get_dataset_by_uri(session, uri) - environment = Environment.get_environment_by_uri( - session, dataset.environmentUri - ) - - pivot_session = SessionHelper.remote_session(dataset.AwsAccountId) - env_admin_session = SessionHelper.get_session( - base_session=pivot_session, - role_arn=environment.EnvironmentDefaultIAMRoleArn, - ) - s3 = env_admin_session.client('s3', region_name=dataset.region) - - s3.put_object( - Bucket=environment.EnvironmentDefaultBucketName, - Key=f'summary/{uri}/summary.md', - Body=content, - ) - return True - @staticmethod def get_dataset_stack(dataset: Dataset): return stack_helper.get_stack_with_cfn_resources( diff --git a/frontend/src/api/Dataset/getDatasetSummary.js b/frontend/src/api/Dataset/getDatasetSummary.js deleted file mode 100644 index 0e6ae38d1..000000000 --- a/frontend/src/api/Dataset/getDatasetSummary.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getDatasetSummary = (datasetUri) => ({ - variables: { - datasetUri - }, - query: gql` - query GetDatasetSummary($datasetUri: String!) { - getDatasetSummary(datasetUri: $datasetUri) - } - ` -}); - -export default getDatasetSummary; diff --git a/frontend/src/api/Dataset/saveDatasetSummary.js b/frontend/src/api/Dataset/saveDatasetSummary.js deleted file mode 100644 index 46f508919..000000000 --- a/frontend/src/api/Dataset/saveDatasetSummary.js +++ /dev/null @@ -1,15 +0,0 @@ -import { gql } from 'apollo-boost'; - -const saveDatasetSummary = ({ datasetUri, content }) => ({ - variables: { - datasetUri, - content - }, - mutation: gql` - mutation SaveDatasetSummary($datasetUri: String!, $content: String) { - saveDatasetSummary(datasetUri: $datasetUri, content: $content) - } - ` -}); - -export default saveDatasetSummary; diff --git a/frontend/src/views/Datasets/DatasetSummary.js b/frontend/src/views/Datasets/DatasetSummary.js deleted file mode 100644 index d68a29ad3..000000000 --- a/frontend/src/views/Datasets/DatasetSummary.js +++ /dev/null @@ -1,156 +0,0 @@ -/* import Markdown from 'react-markdown/with-html'; -import { Box, Button, CircularProgress, Container, Paper } from '@mui/material'; -import PropTypes from 'prop-types'; -import { useEffect, useState } from 'react'; -import { useSnackbar } from 'notistack'; -import { styled } from '@mui/styles'; -import { LoadingButton } from '@mui/lab'; -import SimpleMDE from 'react-simplemde-editor'; -import useClient from '../../hooks/useClient'; -import { useDispatch } from '../../store'; -import { SET_ERROR } from '../../store/errorReducer'; -import getDatasetSummary from '../../api/Dataset/getDatasetSummary'; -import saveDatasetSummary from '../../api/Dataset/saveDatasetSummary'; -import PencilAlt from '../../icons/PencilAlt'; - -const MarkdownWrapper = styled('div')(({ theme }) => ({ - color: theme.palette.text.primary, - fontFamily: theme.typography.fontFamily, - '& p': { - marginBottom: theme.spacing(2) - } -})); -const DatasetSummary = (props) => { - const { dataset } = props; - const client = useClient(); - const dispatch = useDispatch(); - const { enqueueSnackbar } = useSnackbar(); - const [content, setContent] = useState(''); - const [isEditorMode, setIsEditorMode] = useState(false); - const [ready, setReady] = useState(false); - // const canEdit = ['BusinessOwner', 'Admin', 'DataSteward', 'Creator'].indexOf(dataset.userRoleForDataset) != -1; - - const handleChange = (value) => { - setContent(value); - }; - const fetchSummary = async () => { - setReady(false); - const response = await client.query(getDatasetSummary(dataset.datasetUri)); - if (!response.errors) { - setContent(response.data.getDatasetSummary === '' ? 'No content found' : response.data.getDatasetSummary); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - setReady(true); - }; - - const saveSummary = async () => { - const response = await client.mutate(saveDatasetSummary({ datasetUri: props.dataset.datasetUri, content })); - if (!response.errors) { - enqueueSnackbar('Dataset summary saved', { - anchorOrigin: { - horizontal: 'right', - vertical: 'top' - }, - variant: 'success' - }); - setIsEditorMode(false); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - }; - useEffect(() => { - if (client) { - fetchSummary().catch((e) => dispatch({ type: SET_ERROR, error: e.message })); - } - }, [client]); - - if (!ready) { - return ; - } - return ( - - - {!isEditorMode && ( - - )} - - {!isEditorMode && ( - - - - - - - - - - )} - - {isEditorMode && ( - - - - - - {isEditorMode && ( - - - Save - - - - )} - - - - )} - - ); -}; - -DatasetSummary.propTypes = { - dataset: PropTypes.object.isRequired -}; - -export default DatasetSummary; */ From fd7da756c8d73533131265950ca0ef8f0927ad69 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 23 May 2023 08:33:31 +0200 Subject: [PATCH 220/346] Bump requests from 2.27.1 to 2.31.0 in /backend (#469) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [requests](https://github.com/psf/requests) from 2.27.1 to 2.31.0.
Release notes

Sourced from requests's releases.

v2.31.0

2.31.0 (2023-05-22)

Security

  • Versions of Requests between v2.3.0 and v2.30.0 are vulnerable to potential forwarding of Proxy-Authorization headers to destination servers when following HTTPS redirects.

    When proxies are defined with user info (https://user:pass@proxy:8080), Requests will construct a Proxy-Authorization header that is attached to the request to authenticate with the proxy.

    In cases where Requests receives a redirect response, it previously reattached the Proxy-Authorization header incorrectly, resulting in the value being sent through the tunneled connection to the destination server. Users who rely on defining their proxy credentials in the URL are strongly encouraged to upgrade to Requests 2.31.0+ to prevent unintentional leakage and rotate their proxy credentials once the change has been fully deployed.

    Users who do not use a proxy or do not supply their proxy credentials through the user information portion of their proxy URL are not subject to this vulnerability.

    Full details can be read in our Github Security Advisory and CVE-2023-32681.

v2.30.0

2.30.0 (2023-05-03)

Dependencies

v2.29.0

2.29.0 (2023-04-26)

Improvements

  • Requests now defers chunked requests to the urllib3 implementation to improve standardization. (#6226)
  • Requests relaxes header component requirements to support bytes/str subclasses. (#6356)

... (truncated)

Changelog

Sourced from requests's changelog.

2.31.0 (2023-05-22)

Security

  • Versions of Requests between v2.3.0 and v2.30.0 are vulnerable to potential forwarding of Proxy-Authorization headers to destination servers when following HTTPS redirects.

    When proxies are defined with user info (https://user:pass@proxy:8080), Requests will construct a Proxy-Authorization header that is attached to the request to authenticate with the proxy.

    In cases where Requests receives a redirect response, it previously reattached the Proxy-Authorization header incorrectly, resulting in the value being sent through the tunneled connection to the destination server. Users who rely on defining their proxy credentials in the URL are strongly encouraged to upgrade to Requests 2.31.0+ to prevent unintentional leakage and rotate their proxy credentials once the change has been fully deployed.

    Users who do not use a proxy or do not supply their proxy credentials through the user information portion of their proxy URL are not subject to this vulnerability.

    Full details can be read in our Github Security Advisory and CVE-2023-32681.

2.30.0 (2023-05-03)

Dependencies

2.29.0 (2023-04-26)

Improvements

  • Requests now defers chunked requests to the urllib3 implementation to improve standardization. (#6226)
  • Requests relaxes header component requirements to support bytes/str subclasses. (#6356)

2.28.2 (2023-01-12)

... (truncated)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=requests&package-manager=pip&previous-version=2.27.1&new-version=2.31.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/awslabs/aws-dataall/network/alerts).
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- backend/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/requirements.txt b/backend/requirements.txt index ee569f2d8..16f1de1bc 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -11,7 +11,7 @@ PyAthena==2.3.0 pygresql==5.2.2 pyjwt==2.4.0 PyYAML==6.0 -requests==2.27.1 +requests==2.31.0 requests_aws4auth==1.1.1 sqlalchemy==1.3.24 starlette==0.27.0 \ No newline at end of file From 65ea17b429edb3bea1be3bb00c9af836fe603f0c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 23 May 2023 08:34:11 +0200 Subject: [PATCH 221/346] Bump requests from 2.27.1 to 2.31.0 in /backend/dataall/cdkproxy (#470) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [//]: # (dependabot-start) ⚠️ **Dependabot is rebasing this PR** ⚠️ Rebasing might not happen immediately, so don't worry if this takes some time. Note: if you make any changes to this PR yourself, they will take precedence over the rebase. --- [//]: # (dependabot-end) Bumps [requests](https://github.com/psf/requests) from 2.27.1 to 2.31.0.
Release notes

Sourced from requests's releases.

v2.31.0

2.31.0 (2023-05-22)

Security

  • Versions of Requests between v2.3.0 and v2.30.0 are vulnerable to potential forwarding of Proxy-Authorization headers to destination servers when following HTTPS redirects.

    When proxies are defined with user info (https://user:pass@proxy:8080), Requests will construct a Proxy-Authorization header that is attached to the request to authenticate with the proxy.

    In cases where Requests receives a redirect response, it previously reattached the Proxy-Authorization header incorrectly, resulting in the value being sent through the tunneled connection to the destination server. Users who rely on defining their proxy credentials in the URL are strongly encouraged to upgrade to Requests 2.31.0+ to prevent unintentional leakage and rotate their proxy credentials once the change has been fully deployed.

    Users who do not use a proxy or do not supply their proxy credentials through the user information portion of their proxy URL are not subject to this vulnerability.

    Full details can be read in our Github Security Advisory and CVE-2023-32681.

v2.30.0

2.30.0 (2023-05-03)

Dependencies

v2.29.0

2.29.0 (2023-04-26)

Improvements

  • Requests now defers chunked requests to the urllib3 implementation to improve standardization. (#6226)
  • Requests relaxes header component requirements to support bytes/str subclasses. (#6356)

... (truncated)

Changelog

Sourced from requests's changelog.

2.31.0 (2023-05-22)

Security

  • Versions of Requests between v2.3.0 and v2.30.0 are vulnerable to potential forwarding of Proxy-Authorization headers to destination servers when following HTTPS redirects.

    When proxies are defined with user info (https://user:pass@proxy:8080), Requests will construct a Proxy-Authorization header that is attached to the request to authenticate with the proxy.

    In cases where Requests receives a redirect response, it previously reattached the Proxy-Authorization header incorrectly, resulting in the value being sent through the tunneled connection to the destination server. Users who rely on defining their proxy credentials in the URL are strongly encouraged to upgrade to Requests 2.31.0+ to prevent unintentional leakage and rotate their proxy credentials once the change has been fully deployed.

    Users who do not use a proxy or do not supply their proxy credentials through the user information portion of their proxy URL are not subject to this vulnerability.

    Full details can be read in our Github Security Advisory and CVE-2023-32681.

2.30.0 (2023-05-03)

Dependencies

2.29.0 (2023-04-26)

Improvements

  • Requests now defers chunked requests to the urllib3 implementation to improve standardization. (#6226)
  • Requests relaxes header component requirements to support bytes/str subclasses. (#6356)

2.28.2 (2023-01-12)

... (truncated)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=requests&package-manager=pip&previous-version=2.27.1&new-version=2.31.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/awslabs/aws-dataall/network/alerts).
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- backend/dataall/cdkproxy/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/dataall/cdkproxy/requirements.txt b/backend/dataall/cdkproxy/requirements.txt index b4630dc27..2cb7f41a4 100644 --- a/backend/dataall/cdkproxy/requirements.txt +++ b/backend/dataall/cdkproxy/requirements.txt @@ -9,7 +9,7 @@ starlette==0.27.0 fastapi == 0.95.2 Flask==2.3.2 PyYAML==6.0 -requests==2.27.1 +requests==2.31.0 tabulate==0.8.9 uvicorn==0.15.0 jinja2==3.1.2 From b59cf9ee9bfa3db63960f874d885aa6bf76077a3 Mon Sep 17 00:00:00 2001 From: dlpzx <71252798+dlpzx@users.noreply.github.com> Date: Tue, 23 May 2023 15:40:09 +0200 Subject: [PATCH 222/346] hotfix: Remove GitHub template option from data.all Pipelines (#472) ### Feature or Bugfix - Bugfix ### Detail Remove the GitHub template development strategy from the possible types of data.all pipelines. The initial idea was to use the parameter `--template` from the [AWS DDK CLI](https://awslabs.github.io/aws-ddk/release/stable/api/cli/aws_ddk.html#ddk-init) which has been deprecated after its last major release (1.0.0). Using templates would enable customers to use any cookiecutter template directly in data.all. However, from the way that it was implemented it exposed a **vulnerability** in which customers could enter code instead of a template and perform cmd code injections in data.all ECS deployment task. Given that this is a high-risk issue + AWS DDK 1.0.0 does not use CLI + `templates` are not critical for any known customer we will remove it for the moment to ensure security. In the future we will revisit other ways of providing templates and accelerating data pipeline building in a secure manner. ### Relates By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- .../api/Objects/DataPipeline/input_types.py | 1 - .../api/Objects/DataPipeline/resolvers.py | 10 ----- backend/dataall/cdkproxy/cdk_cli_wrapper.py | 20 ++-------- .../cdkproxy/cdkpipeline/cdk_pipeline.py | 37 ++----------------- backend/dataall/db/api/pipeline.py | 2 +- documentation/userguide/docs/pipelines.md | 19 +--------- .../src/views/Pipelines/PipelineCreateForm.js | 22 +---------- frontend/src/views/Pipelines/PipelineList.js | 2 +- tests/api/conftest.py | 1 - tests/api/test_datapipelines.py | 1 - 10 files changed, 11 insertions(+), 104 deletions(-) diff --git a/backend/dataall/api/Objects/DataPipeline/input_types.py b/backend/dataall/api/Objects/DataPipeline/input_types.py index 07cf234fe..98ccf23b3 100644 --- a/backend/dataall/api/Objects/DataPipeline/input_types.py +++ b/backend/dataall/api/Objects/DataPipeline/input_types.py @@ -9,7 +9,6 @@ gql.Argument(name='SamlGroupName', type=gql.NonNullableType(gql.String)), gql.Argument(name='tags', type=gql.ArrayType(gql.String)), gql.Argument(name='devStrategy', type=gql.NonNullableType(gql.String)), - gql.Argument(name='template', type=gql.String) ], ) diff --git a/backend/dataall/api/Objects/DataPipeline/resolvers.py b/backend/dataall/api/Objects/DataPipeline/resolvers.py index d5db551bb..e431a0cbe 100644 --- a/backend/dataall/api/Objects/DataPipeline/resolvers.py +++ b/backend/dataall/api/Objects/DataPipeline/resolvers.py @@ -7,7 +7,6 @@ from ...context import Context from ....aws.handlers.service_handlers import Worker from ....aws.handlers.sts import SessionHelper -from ....aws.handlers.codecommit import CodeCommit from ....db import permissions, models, exceptions from ....db.api import Pipeline, Environment, ResourcePolicy, Stack, KeyValueTag @@ -33,15 +32,6 @@ def create_pipeline(context: Context, source, input=None): target_label=pipeline.label, payload={'account': pipeline.AwsAccountId, 'region': pipeline.region}, ) - elif input['devStrategy'] == 'template': - Stack.create_stack( - session=session, - environment_uri=pipeline.environmentUri, - target_type='template', - target_uri=pipeline.DataPipelineUri, - target_label=pipeline.label, - payload={'account': pipeline.AwsAccountId, 'region': pipeline.region}, - ) else: Stack.create_stack( session=session, diff --git a/backend/dataall/cdkproxy/cdk_cli_wrapper.py b/backend/dataall/cdkproxy/cdk_cli_wrapper.py index fccd192cf..d8fc98f8c 100644 --- a/backend/dataall/cdkproxy/cdk_cli_wrapper.py +++ b/backend/dataall/cdkproxy/cdk_cli_wrapper.py @@ -69,7 +69,7 @@ def deploy_cdk_stack(engine: Engine, stackid: str, app_path: str = None, path: s stack.status = 'PENDING' session.commit() - if stack.stack == 'cdkpipeline' or stack.stack == 'template': + if stack.stack == 'cdkpipeline': cdkpipeline = CDKPipelineStack(stack.targetUri) venv_name = cdkpipeline.venv_name if cdkpipeline.venv_name else None pipeline = Pipeline.get_pipeline_by_uri(session, stack.targetUri) @@ -106,18 +106,6 @@ def deploy_cdk_stack(engine: Engine, stackid: str, app_path: str = None, path: s 'AWS_SESSION_TOKEN': creds.get('Token'), } ) - if stack.stack == 'template': - resp = subprocess.run( - ['. ~/.nvm/nvm.sh && cdk ls'], - cwd=cwd, - text=True, - shell=True, # nosec - encoding='utf-8', - stdout=subprocess.PIPE, - env=env, - ) - logger.info(f'CDK Apps: {resp.stdout}') - stack.name = resp.stdout.split('\n')[0] app_path = app_path or './app.py' @@ -150,9 +138,7 @@ def deploy_cdk_stack(engine: Engine, stackid: str, app_path: str = None, path: s '--verbose', ] - if stack.stack == 'template' or stack.stack == 'cdkpipeline': - if stack.stack == 'template': - cmd.insert(0, f'source {venv_name}/bin/activate;') + if stack.stack == 'cdkpipeline': aws = SessionHelper.remote_session(stack.accountid) creds = aws.get_credentials() env.update( @@ -177,7 +163,7 @@ def deploy_cdk_stack(engine: Engine, stackid: str, app_path: str = None, path: s env=env, cwd=cwd, ) - if stack.stack == 'cdkpipeline' or stack.stack == 'template': + if stack.stack == 'cdkpipeline': CDKPipelineStack.clean_up_repo(path=f'./{pipeline.repo}') if process.returncode == 0: diff --git a/backend/dataall/cdkproxy/cdkpipeline/cdk_pipeline.py b/backend/dataall/cdkproxy/cdkpipeline/cdk_pipeline.py index 114967db7..abb5fc036 100644 --- a/backend/dataall/cdkproxy/cdkpipeline/cdk_pipeline.py +++ b/backend/dataall/cdkproxy/cdkpipeline/cdk_pipeline.py @@ -45,7 +45,6 @@ def __init__(self, target_uri): self.env, aws = CDKPipelineStack._set_env_vars(self.pipeline_environment) self.code_dir_path = os.path.dirname(os.path.abspath(__file__)) - template = self.pipeline.template try: codecommit_client = aws.client('codecommit', region_name=self.pipeline_environment.region) @@ -82,12 +81,9 @@ def __init__(self, target_uri): else: raise Exception except Exception as e: - if len(template): - self.venv_name = self.initialize_repo_template(template) - else: - self.venv_name = self.initialize_repo() - CDKPipelineStack.write_ddk_app_multienvironment(path=os.path.join(self.code_dir_path, self.pipeline.repo), output_file="app.py", pipeline=self.pipeline, development_environments=self.development_environments) - CDKPipelineStack.write_ddk_json_multienvironment(path=os.path.join(self.code_dir_path, self.pipeline.repo), output_file="ddk.json", pipeline_environment=self.pipeline_environment, development_environments=self.development_environments) + self.venv_name = self.initialize_repo() + CDKPipelineStack.write_ddk_app_multienvironment(path=os.path.join(self.code_dir_path, self.pipeline.repo), output_file="app.py", pipeline=self.pipeline, development_environments=self.development_environments) + CDKPipelineStack.write_ddk_json_multienvironment(path=os.path.join(self.code_dir_path, self.pipeline.repo), output_file="ddk.json", pipeline_environment=self.pipeline_environment, development_environments=self.development_environments) self.git_push_repo() def initialize_repo(self): @@ -114,33 +110,6 @@ def initialize_repo(self): return venv_name - def initialize_repo_template(self, template): - venv_name = ".venv" - cmd_init = [ - f"git clone {template} {self.pipeline.repo}", - f"cd {self.pipeline.repo}", - "rm -rf .git", - "git init --initial-branch main", - f"python3 -m venv {venv_name} && source {venv_name}/bin/activate", - "pip install -r requirements.txt", - f"ddk create-repository {self.pipeline.repo} -t application dataall -t team {self.pipeline.SamlGroupName}" - ] - - logger.info(f"Running Commands: {'; '.join(cmd_init)}") - - process = subprocess.run( - '; '.join(cmd_init), - text=True, - shell=True, # nosec - encoding='utf-8', - cwd=self.code_dir_path, - env=self.env - ) - if process.returncode == 0: - logger.info("Successfully Initialized New CDK/DDK App") - - return venv_name - @staticmethod def write_ddk_json_multienvironment(path, output_file, pipeline_environment, development_environments): json_envs = "" diff --git a/backend/dataall/db/api/pipeline.py b/backend/dataall/db/api/pipeline.py index 6007be39d..75cfb47d1 100644 --- a/backend/dataall/db/api/pipeline.py +++ b/backend/dataall/db/api/pipeline.py @@ -63,7 +63,7 @@ def create_pipeline( region=environment.region, repo=slugify(data['label']), devStrategy=data['devStrategy'], - template=data['template'] if data['devStrategy'] == 'template' else "", + template="", ) session.add(pipeline) diff --git a/documentation/userguide/docs/pipelines.md b/documentation/userguide/docs/pipelines.md index 6ec6a2f40..478a467fa 100644 --- a/documentation/userguide/docs/pipelines.md +++ b/documentation/userguide/docs/pipelines.md @@ -42,7 +42,6 @@ data.all pipelines are created from the UI, under Pipelines. We need to fill the 1. [**CDK Pipelines - Trunk-based**](#CDK-Pipelines-Overview) : A CICD pipeline based on [CDK Pipelines library](https://docs.aws.amazon.com/cdk/api/v2/python/aws_cdk.pipelines/README.html). It defines a DDK Core construct which deploys Continuous Integration and Delivery for your app. Specifically, it provisions a stack containing a self-mutating CDK code pipeline to deploy one or more copies of your CDK applications using CloudFormation with a minimal amount of effort on your part. 2. [**CodePipeline - Trunk-based**](#CodePipeline-pipelines---Trunk-based-or-GitFlow-Overview) : A CICD pipeline similar to CDK Pipelines and with a trunk-based approach but is not self-mutating. 3. [**CodePipeline - Gitflow**](#CodePipeline-pipelines---Trunk-based-or-GitFlow-Overview): A Gitflow branching strategy where each branch of the source repository has a corresponding CICD Pipeline that deploys resources for that branches environment. - 4. [**GitHub Template**](#Github-Template-Pipelines-Overview) : This is a Bring-Your-Own-Template approach where users can specify they git clone path and deploy their own pipelines and IaC rather than using one of the previous 3 strategies. Finally, we need to add **Development environments**. These are the AWS accounts and regions where the infrastructure defined in the CICD pipeline is deployed. @@ -168,17 +167,7 @@ The `dev` pipeline reads from the `dev` branch of the repository: ![created_pipeline](pictures/pipelines/pip_cp_gitflow2.png#zoom#shadow) ---- -### Github Template Pipelines Overview - -This pipeline strategy takes a pre-defined IaC CDK Application that exists in a github repository and deploys the pipeline to be managed by data all. An AWS CodeCommit repository with the code of the github repository is created in the CICD environment AWS account. - -**NOTE: You may have to specify a access token in the HTTPS Clone Path of the Github Repository if the repository is private** - -data.all performs the inital deployment of this pipeline by running `cdk deploy` for the code now existing in AWS CodeCommit in the CICD environment. Adding development environments here is on the responsibility of the pipeline creator to align with the deployment environments specified in the cloned repository. - -![created_pipeline](pictures/pipelines/github_template_create.png#zoom#shadow) - +--- ## Editing a Data All Pipeline For users who would like to promote their pipeline deployments to new environments managed by data all, you can do so by first bootstrapping the new environment(s) to be deployed to (as mentioned in the [Pre-requisites](#Pre-requisites)) and then adding and/or editing the development environments. @@ -189,7 +178,6 @@ Based on pipeline use case, editing a data all pipeline's development environmen - **CDK Pipelines**: On update, the `ddk.json` and `app.py` will be edited to update the new development environment information. The self-mutating, CICD Pipeline will trigger and deploy to the new environments based on the source CodeCommit repository changes. - **CodePipelines - Trunk-based**: On update the `ddk.json` will be edited. A new `cdk deploy` will run to update the CICD CloudFormation Stack for the AWS CodePipeline to add the new stages required for the additional environment deployment(s) (as well as manual approval steps between stages in the code pipeline). You will see these updates to the CICD stack in CloudFormaiton of the CICD environment. - **CodePipelines - Gitflow**: On update the `ddk.json` will be edited. A new `cdk deploy` will run to update the CICD CloudFormation Stack to add the new AWS CodePipelines required for the additional environment deployment(s). You will see these updates to the CICD stack in CloudFormaiton of the CICD environment. -- **Github Template Pipelines**: Editing development environments **will NOT** re-deploy the application or update the CodeCommit repository. Editing of template pipeline's development environment(s) is the responsibility of the pipeline creator for proper data all pipeline management. ## Which development strategy should I choose? @@ -204,11 +192,6 @@ Based on pipeline use case, editing a data all pipeline's development environmen 2. Developers working on the pipeline cannot modify the CICD pipeline 3. Cross-account deployments require specific definition of the environment in the code. -**Github Template Pipelines** - -1. The aforementioned pipeline strategies do not align with your desired pipeline architecture -2. You already have pipelines IaC written in AWS CDK and ready to be deployed rather than creating pipeline(s) and developing from scratch - **Summary** diff --git a/frontend/src/views/Pipelines/PipelineCreateForm.js b/frontend/src/views/Pipelines/PipelineCreateForm.js index 1f3ffe456..00d195265 100644 --- a/frontend/src/views/Pipelines/PipelineCreateForm.js +++ b/frontend/src/views/Pipelines/PipelineCreateForm.js @@ -44,7 +44,7 @@ const PipelineCrateForm = (props) => { const [loading, setLoading] = useState(true); const [groupOptions, setGroupOptions] = useState([]); const [environmentOptions, setEnvironmentOptions] = useState([]); - const devOptions =[{value:"cdk-trunk", label:"CDK Pipelines - Trunk-based"},{value:"trunk", label:"CodePipeline - Trunk-based"},{value:"gitflow", label:"CodePipeline - Gitflow"},{value:"template", label:"GitHub Template"}];/*DBT Pipelines*/ + const devOptions =[{value:"cdk-trunk", label:"CDK Pipelines - Trunk-based"},{value:"trunk", label:"CodePipeline - Trunk-based"},{value:"gitflow", label:"CodePipeline - Gitflow"}];/*DBT Pipelines*/ const [triggerEnvSubmit, setTriggerEnvSubmit] = useState(false); const [countEnvironmentsValid, setCountEnvironmentsValid] = useState(false); const [pipelineUri, setPipelineUri] = useState(''); @@ -116,8 +116,7 @@ const PipelineCrateForm = (props) => { description: values.description, SamlGroupName: values.SamlGroupName, tags: values.tags, - devStrategy: values.devStrategy, - template: values.template + devStrategy: values.devStrategy } }) ); @@ -226,7 +225,6 @@ const PipelineCrateForm = (props) => { environment: '', tags: [], devStrategy: 'cdk-trunk', - template: '', }} validationSchema={Yup.object().shape({ label: Yup.string() @@ -238,7 +236,6 @@ const PipelineCrateForm = (props) => { environment: Yup.object(), devStrategy: Yup.string().required('*A CICD strategy is required'), tags: Yup.array().nullable(), - template: Yup.string().nullable(), })} onSubmit={async ( values, @@ -434,21 +431,6 @@ const PipelineCrateForm = (props) => { ))} - - {values.devStrategy === "template" && ( - - )} - diff --git a/frontend/src/views/Pipelines/PipelineList.js b/frontend/src/views/Pipelines/PipelineList.js index e3ff00c95..4ea57ba81 100644 --- a/frontend/src/views/Pipelines/PipelineList.js +++ b/frontend/src/views/Pipelines/PipelineList.js @@ -86,7 +86,7 @@ const PipelineList = () => { const [inputValue, setInputValue] = useState(''); const [loading, setLoading] = useState(true); const client = useClient(); - const devOptions =[{value:"cdk-trunk", label:"CDK Pipelines - Trunk-based"},{value:"trunk", label:"CodePipeline - Trunk-based"},{value:"gitflow", label:"CodePipeline - Gitflow"},{value:"template", label:"GitHub Template"}];/*DBT Pipelines*/ + const devOptions =[{value:"cdk-trunk", label:"CDK Pipelines - Trunk-based"},{value:"trunk", label:"CodePipeline - Trunk-based"},{value:"gitflow", label:"CodePipeline - Gitflow"}];/*DBT Pipelines*/ const [filterItems] = useState([{title:'DevStrategy', options: devOptions},{title:'Tags'},{title: 'Region', options: AwsRegions}]); const fetchItems = useCallback(async () => { diff --git a/tests/api/conftest.py b/tests/api/conftest.py index fa3be8ade..f3666d850 100644 --- a/tests/api/conftest.py +++ b/tests/api/conftest.py @@ -740,7 +740,6 @@ def pipeline(client, tenant, group, env_fixture) -> models.DataPipeline: 'tags': [group.name], 'environmentUri': env_fixture.environmentUri, 'devStrategy': 'trunk', - 'template': '', }, username='alice', groups=[group.name], diff --git a/tests/api/test_datapipelines.py b/tests/api/test_datapipelines.py index 81a8b4e03..9dcfd1446 100644 --- a/tests/api/test_datapipelines.py +++ b/tests/api/test_datapipelines.py @@ -40,7 +40,6 @@ def pipeline(client, tenant, group, env1): 'tags': [group.name], 'environmentUri': env1.environmentUri, 'devStrategy': 'trunk', - 'template': '' }, username='alice', groups=[group.name], From 3340610adb1f1897b3e8251f1b9b58b7ca578a90 Mon Sep 17 00:00:00 2001 From: David Mutune Kimengu <57294718+kimengu-david@users.noreply.github.com> Date: Wed, 24 May 2023 10:24:11 +0100 Subject: [PATCH 223/346] fix: Upgrade aurora engine version to 11.16 (#471) ### Feature or Bugfix - Bugfix ### Detail Update Aurora engine version to 11.16. Fixes an issue where the Aurora nested stack deployment in the data.all backend which goes to deployment account would fail as AuroraPostgresEngineVersion.VER_10_18 is not compatible with parameter group default.aurora-postgresql11 ### Relates - https://github.com/awslabs/aws-dataall/pull/466 By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- deploy/stacks/aurora.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deploy/stacks/aurora.py b/deploy/stacks/aurora.py index 5a96bea1a..6fe1975b0 100644 --- a/deploy/stacks/aurora.py +++ b/deploy/stacks/aurora.py @@ -120,7 +120,7 @@ def __init__( self, f'AuroraDatabase{envname}', engine=rds.DatabaseClusterEngine.aurora_postgres( - version=rds.AuroraPostgresEngineVersion.VER_10_18 + version=rds.AuroraPostgresEngineVersion.VER_11_16 ), deletion_protection=True, cluster_identifier=f'{resource_prefix}-{envname}-db', From b44125801d76e53557df4400c5d63600eedb4e9d Mon Sep 17 00:00:00 2001 From: Gezim Musliaj <102723839+gmuslia@users.noreply.github.com> Date: Wed, 24 May 2023 15:20:51 +0200 Subject: [PATCH 224/346] Updated CDK Version to fix issue with cdkproxy/ dataset stack creations (#476) ### Feature or Bugfix - BugFix ### Detail - cdkproxy was using an outdated version of aws-cdk-lib which uses NODEJS_12_X for the AWS Custom Resources Lambda Functions, which are now not anymore supported in the AWS Accounts and causes failure of the creation of CloudFormation stacks in the case when you create a new DataSet Stack - The version change also triggered a minor type enforcement for the AccountPrincipal AccountId to be explicitly ```string``` ### Relates - https://github.com/awslabs/aws-dataall/issues/475 By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- backend/dataall/cdkproxy/requirements.txt | 2 +- backend/dataall/cdkproxy/stacks/dataset.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/backend/dataall/cdkproxy/requirements.txt b/backend/dataall/cdkproxy/requirements.txt index 2cb7f41a4..8a3e6fb98 100644 --- a/backend/dataall/cdkproxy/requirements.txt +++ b/backend/dataall/cdkproxy/requirements.txt @@ -1,4 +1,4 @@ -aws-cdk-lib==2.20.0 +aws-cdk-lib==2.61.1 aws_cdk.aws_redshift_alpha==2.14.0a0 boto3==1.24.85 boto3-stubs==1.24.85 diff --git a/backend/dataall/cdkproxy/stacks/dataset.py b/backend/dataall/cdkproxy/stacks/dataset.py index 410d4b79d..d5306f5f0 100644 --- a/backend/dataall/cdkproxy/stacks/dataset.py +++ b/backend/dataall/cdkproxy/stacks/dataset.py @@ -296,7 +296,7 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): iam.ServicePrincipal('sagemaker.amazonaws.com'), iam.ServicePrincipal('lambda.amazonaws.com'), iam.ServicePrincipal('ec2.amazonaws.com'), - iam.AccountPrincipal(os.environ.get('CURRENT_AWS_ACCOUNT')), + iam.AccountPrincipal(str(os.environ.get('CURRENT_AWS_ACCOUNT'))), iam.AccountPrincipal(dataset.AwsAccountId), iam.ArnPrincipal( f'arn:aws:iam::{dataset.AwsAccountId}:role/{self.pivot_role_name}' From 8175d105c217b860ba3d443baebdaed873621ae4 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 24 May 2023 16:11:30 +0200 Subject: [PATCH 225/346] Created dashboard module --- backend/dataall/modules/dashboards/__init__.py | 17 +++++++++++++++++ .../dataall/modules/dashboards/api/__init__.py | 0 config.json | 3 +++ 3 files changed, 20 insertions(+) create mode 100644 backend/dataall/modules/dashboards/__init__.py create mode 100644 backend/dataall/modules/dashboards/api/__init__.py diff --git a/backend/dataall/modules/dashboards/__init__.py b/backend/dataall/modules/dashboards/__init__.py new file mode 100644 index 000000000..2dddbce24 --- /dev/null +++ b/backend/dataall/modules/dashboards/__init__.py @@ -0,0 +1,17 @@ +"""Contains the code related to dashboards""" +import logging + +from dataall.modules.loader import ImportMode, ModuleInterface + +log = logging.getLogger(__name__) + + +class DashboardApiModuleInterface(ModuleInterface): + """Implements ModuleInterface for dashboard GraphQl lambda""" + + @staticmethod + def is_supported(modes): + return ImportMode.API in modes + + def __init__(self): + import dataall.modules.dashboards.api diff --git a/backend/dataall/modules/dashboards/api/__init__.py b/backend/dataall/modules/dashboards/api/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/config.json b/config.json index 6f60f494f..c8fa11ab5 100644 --- a/config.json +++ b/config.json @@ -8,6 +8,9 @@ }, "worksheets": { "active": true + }, + "dashboards": { + "active": true } } } \ No newline at end of file From faebdafa3c496c33508a56bcec5e88e7de5e4f62 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 24 May 2023 16:13:33 +0200 Subject: [PATCH 226/346] Moved dashboard api into modules --- .../dataall/api/Objects/Dashboard/__init__.py | 9 --------- .../dataall/modules/dashboards/api/__init__.py | 9 +++++++++ .../dashboards/api}/input_types.py | 2 +- .../dashboards/api}/mutations.py | 4 ++-- .../dashboards/api}/queries.py | 4 ++-- .../dashboards/api}/resolvers.py | 17 ++++++++--------- .../dashboards/api}/schema.py | 6 +++--- 7 files changed, 25 insertions(+), 26 deletions(-) delete mode 100644 backend/dataall/api/Objects/Dashboard/__init__.py rename backend/dataall/{api/Objects/Dashboard => modules/dashboards/api}/input_types.py (98%) rename backend/dataall/{api/Objects/Dashboard => modules/dashboards/api}/mutations.py (95%) rename backend/dataall/{api/Objects/Dashboard => modules/dashboards/api}/queries.py (93%) rename backend/dataall/{api/Objects/Dashboard => modules/dashboards/api}/resolvers.py (96%) rename backend/dataall/{api/Objects/Dashboard => modules/dashboards/api}/schema.py (95%) diff --git a/backend/dataall/api/Objects/Dashboard/__init__.py b/backend/dataall/api/Objects/Dashboard/__init__.py deleted file mode 100644 index dfa46b264..000000000 --- a/backend/dataall/api/Objects/Dashboard/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from . import ( - input_types, - mutations, - queries, - resolvers, - schema, -) - -__all__ = ['resolvers', 'schema', 'input_types', 'queries', 'mutations'] diff --git a/backend/dataall/modules/dashboards/api/__init__.py b/backend/dataall/modules/dashboards/api/__init__.py index e69de29bb..dfa46b264 100644 --- a/backend/dataall/modules/dashboards/api/__init__.py +++ b/backend/dataall/modules/dashboards/api/__init__.py @@ -0,0 +1,9 @@ +from . import ( + input_types, + mutations, + queries, + resolvers, + schema, +) + +__all__ = ['resolvers', 'schema', 'input_types', 'queries', 'mutations'] diff --git a/backend/dataall/api/Objects/Dashboard/input_types.py b/backend/dataall/modules/dashboards/api/input_types.py similarity index 98% rename from backend/dataall/api/Objects/Dashboard/input_types.py rename to backend/dataall/modules/dashboards/api/input_types.py index 1686c31e3..93e6cb5c1 100644 --- a/backend/dataall/api/Objects/Dashboard/input_types.py +++ b/backend/dataall/modules/dashboards/api/input_types.py @@ -1,4 +1,4 @@ -from ... import gql +from dataall.api import gql ImportDashboardInput = gql.InputType( name='ImportDashboardInput', diff --git a/backend/dataall/api/Objects/Dashboard/mutations.py b/backend/dataall/modules/dashboards/api/mutations.py similarity index 95% rename from backend/dataall/api/Objects/Dashboard/mutations.py rename to backend/dataall/modules/dashboards/api/mutations.py index 7af472838..5e7072d65 100644 --- a/backend/dataall/api/Objects/Dashboard/mutations.py +++ b/backend/dataall/modules/dashboards/api/mutations.py @@ -1,5 +1,5 @@ -from ... import gql -from .resolvers import * +from dataall.api import gql +from dataall.modules.dashboards.api.resolvers import * importDashboard = gql.MutationField( diff --git a/backend/dataall/api/Objects/Dashboard/queries.py b/backend/dataall/modules/dashboards/api/queries.py similarity index 93% rename from backend/dataall/api/Objects/Dashboard/queries.py rename to backend/dataall/modules/dashboards/api/queries.py index d8d3b9982..c690d52ce 100644 --- a/backend/dataall/api/Objects/Dashboard/queries.py +++ b/backend/dataall/modules/dashboards/api/queries.py @@ -1,5 +1,5 @@ -from ... import gql -from .resolvers import * +from dataall.api import gql +from dataall.modules.dashboards.api.resolvers import * searchDashboards = gql.QueryField( name='searchDashboards', diff --git a/backend/dataall/api/Objects/Dashboard/resolvers.py b/backend/dataall/modules/dashboards/api/resolvers.py similarity index 96% rename from backend/dataall/api/Objects/Dashboard/resolvers.py rename to backend/dataall/modules/dashboards/api/resolvers.py index 94372f5d1..b8470861c 100644 --- a/backend/dataall/api/Objects/Dashboard/resolvers.py +++ b/backend/dataall/modules/dashboards/api/resolvers.py @@ -1,13 +1,12 @@ import os -from .... import db -from ....api.constants import DashboardRole -from ....api.context import Context -from ....aws.handlers.quicksight import Quicksight -from ....aws.handlers.parameter_store import ParameterStoreManager -from ....db import permissions, models -from ....db.api import ResourcePolicy, Glossary, Vote -from ....searchproxy import indexers -from ....utils import Parameter +from dataall import db +from dataall.api.constants import DashboardRole +from dataall.api.context import Context +from dataall.aws.handlers.quicksight import Quicksight +from dataall.aws.handlers.parameter_store import ParameterStoreManager +from dataall.db import permissions, models +from dataall.db.api import ResourcePolicy, Glossary, Vote +from dataall.utils import Parameter from dataall.searchproxy.indexers import DashboardIndexer param_store = Parameter() diff --git a/backend/dataall/api/Objects/Dashboard/schema.py b/backend/dataall/modules/dashboards/api/schema.py similarity index 95% rename from backend/dataall/api/Objects/Dashboard/schema.py rename to backend/dataall/modules/dashboards/api/schema.py index 58b6a30cb..9ef1682cb 100644 --- a/backend/dataall/api/Objects/Dashboard/schema.py +++ b/backend/dataall/modules/dashboards/api/schema.py @@ -1,6 +1,6 @@ -from ... import gql -from .resolvers import * -from ...constants import DashboardRole +from dataall.api import gql +from dataall.modules.dashboards.api.resolvers import * +from dataall.api.constants import DashboardRole from dataall.api.Objects.Environment.resolvers import resolve_environment From 92f6ca8510f0c72f0efda2cf888851b442bc2f3b Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 24 May 2023 16:18:00 +0200 Subject: [PATCH 227/346] Created DashboardRepository --- backend/dataall/db/api/__init__.py | 1 - .../modules/dashboards/api/resolvers.py | 33 ++++++++++--------- .../dataall/modules/dashboards/db/__init__.py | 0 .../dashboards/db/dashboard_repository.py} | 24 +++++++------- .../modules/dashboards/services/__init__.py | 0 5 files changed, 29 insertions(+), 29 deletions(-) create mode 100644 backend/dataall/modules/dashboards/db/__init__.py rename backend/dataall/{db/api/dashboard.py => modules/dashboards/db/dashboard_repository.py} (94%) create mode 100644 backend/dataall/modules/dashboards/services/__init__.py diff --git a/backend/dataall/db/api/__init__.py b/backend/dataall/db/api/__init__.py index c1b1dd964..3242a0a9e 100644 --- a/backend/dataall/db/api/__init__.py +++ b/backend/dataall/db/api/__init__.py @@ -14,5 +14,4 @@ from .redshift_cluster import RedshiftCluster from .vpc import Vpc from .sgm_studio_notebook import SgmStudioNotebook -from .dashboard import Dashboard from .pipeline import Pipeline diff --git a/backend/dataall/modules/dashboards/api/resolvers.py b/backend/dataall/modules/dashboards/api/resolvers.py index b8470861c..9fe909154 100644 --- a/backend/dataall/modules/dashboards/api/resolvers.py +++ b/backend/dataall/modules/dashboards/api/resolvers.py @@ -6,6 +6,7 @@ from dataall.aws.handlers.parameter_store import ParameterStoreManager from dataall.db import permissions, models from dataall.db.api import ResourcePolicy, Glossary, Vote +from dataall.modules.dashboards.db.dashboard_repository import DashboardRepository from dataall.utils import Parameter from dataall.searchproxy.indexers import DashboardIndexer @@ -42,7 +43,7 @@ def get_quicksight_reader_url(context, source, dashboardUri: str = None): domain_name=DOMAIN_URL, ) else: - shared_groups = db.api.Dashboard.query_all_user_groups_shareddashboard( + shared_groups = DashboardRepository.query_all_user_groups_shareddashboard( session=session, username=context.username, groups=context.groups, @@ -137,7 +138,7 @@ def import_dashboard(context: Context, source, input: dict = None): ) input['environment'] = env - dashboard = db.api.Dashboard.import_dashboard( + dashboard = DashboardRepository.import_dashboard( session=session, username=context.username, groups=context.groups, @@ -153,11 +154,11 @@ def import_dashboard(context: Context, source, input: dict = None): def update_dashboard(context, source, input: dict = None): with context.engine.scoped_session() as session: - dashboard = db.api.Dashboard.get_dashboard_by_uri( + dashboard = DashboardRepository.get_dashboard_by_uri( session, input['dashboardUri'] ) input['dashboard'] = dashboard - db.api.Dashboard.update_dashboard( + DashboardRepository.update_dashboard( session=session, username=context.username, groups=context.groups, @@ -175,7 +176,7 @@ def list_dashboards(context: Context, source, filter: dict = None): if not filter: filter = {} with context.engine.scoped_session() as session: - return db.api.Dashboard.paginated_user_dashboards( + return DashboardRepository.paginated_user_dashboards( session=session, username=context.username, groups=context.groups, @@ -187,7 +188,7 @@ def list_dashboards(context: Context, source, filter: dict = None): def get_dashboard(context: Context, source, dashboardUri: str = None): with context.engine.scoped_session() as session: - return db.api.Dashboard.get_dashboard( + return DashboardRepository.get_dashboard( session=session, username=context.username, groups=context.groups, @@ -218,7 +219,7 @@ def request_dashboard_share( dashboardUri: str = None, ): with context.engine.scoped_session() as session: - return db.api.Dashboard.request_dashboard_share( + return DashboardRepository.request_dashboard_share( session=session, username=context.username, groups=context.groups, @@ -234,9 +235,9 @@ def approve_dashboard_share( shareUri: str = None, ): with context.engine.scoped_session() as session: - share = db.api.Dashboard.get_dashboard_share_by_uri(session, shareUri) - dashboard = db.api.Dashboard.get_dashboard_by_uri(session, share.dashboardUri) - return db.api.Dashboard.approve_dashboard_share( + share = DashboardRepository.get_dashboard_share_by_uri(session, shareUri) + dashboard = DashboardRepository.get_dashboard_by_uri(session, share.dashboardUri) + return DashboardRepository.approve_dashboard_share( session=session, username=context.username, groups=context.groups, @@ -252,9 +253,9 @@ def reject_dashboard_share( shareUri: str = None, ): with context.engine.scoped_session() as session: - share = db.api.Dashboard.get_dashboard_share_by_uri(session, shareUri) - dashboard = db.api.Dashboard.get_dashboard_by_uri(session, share.dashboardUri) - return db.api.Dashboard.reject_dashboard_share( + share = DashboardRepository.get_dashboard_share_by_uri(session, shareUri) + dashboard = DashboardRepository.get_dashboard_by_uri(session, share.dashboardUri) + return DashboardRepository.reject_dashboard_share( session=session, username=context.username, groups=context.groups, @@ -273,7 +274,7 @@ def list_dashboard_shares( if not filter: filter = {} with context.engine.scoped_session() as session: - return db.api.Dashboard.paginated_dashboard_shares( + return DashboardRepository.paginated_dashboard_shares( session=session, username=context.username, groups=context.groups, @@ -290,7 +291,7 @@ def share_dashboard( dashboardUri: str = None, ): with context.engine.scoped_session() as session: - return db.api.Dashboard.share_dashboard( + return DashboardRepository.share_dashboard( session=session, username=context.username, groups=context.groups, @@ -302,7 +303,7 @@ def share_dashboard( def delete_dashboard(context: Context, source, dashboardUri: str = None): with context.engine.scoped_session() as session: - db.api.Dashboard.delete_dashboard( + DashboardRepository.delete_dashboard( session=session, username=context.username, groups=context.groups, diff --git a/backend/dataall/modules/dashboards/db/__init__.py b/backend/dataall/modules/dashboards/db/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/dataall/db/api/dashboard.py b/backend/dataall/modules/dashboards/db/dashboard_repository.py similarity index 94% rename from backend/dataall/db/api/dashboard.py rename to backend/dataall/modules/dashboards/db/dashboard_repository.py index bf6950002..58da82d9e 100644 --- a/backend/dataall/db/api/dashboard.py +++ b/backend/dataall/modules/dashboards/db/dashboard_repository.py @@ -3,8 +3,8 @@ from sqlalchemy import or_, and_ from sqlalchemy.orm import Query -from .. import models, exceptions, permissions, paginate -from . import ( +from dataall.db import models, exceptions, permissions, paginate +from dataall.db.api import ( Environment, has_tenant_perm, has_resource_perm, @@ -16,7 +16,7 @@ logger = logging.getLogger(__name__) -class Dashboard: +class DashboardRepository: @staticmethod @has_tenant_perm(permissions.MANAGE_DASHBOARDS) @has_resource_perm(permissions.CREATE_DASHBOARD) @@ -76,7 +76,7 @@ def import_dashboard( ) session.add(activity) - Dashboard.set_dashboard_resource_policy( + DashboardRepository.set_dashboard_resource_policy( session, env, dashboard, data['SamlGroupName'] ) @@ -119,7 +119,7 @@ def get_dashboard( data: dict = None, check_perm: bool = False, ) -> models.Dashboard: - return Dashboard.get_dashboard_by_uri(session, uri) + return DashboardRepository.get_dashboard_by_uri(session, uri) @staticmethod def get_dashboard_by_uri(session, uri) -> models.Dashboard: @@ -162,7 +162,7 @@ def paginated_user_dashboards( session, username, groups, uri, data=None, check_perm=None ) -> dict: return paginate( - query=Dashboard.query_user_dashboards(session, username, groups, data), + query=DashboardRepository.query_user_dashboards(session, username, groups, data), page=data.get('page', 1), page_size=data.get('pageSize', 10), ).to_dict() @@ -220,7 +220,7 @@ def paginated_dashboard_shares( session, username, groups, uri, data=None, check_perm=None ) -> dict: return paginate( - query=Dashboard.query_dashboard_shares( + query=DashboardRepository.query_dashboard_shares( session, username, groups, uri, data ), page=data.get('page', 1), @@ -241,7 +241,7 @@ def update_dashboard( dashboard = data.get( 'dashboard', - Dashboard.get_dashboard_by_uri(session, data['dashboardUri']), + DashboardRepository.get_dashboard_by_uri(session, data['dashboardUri']), ) for k in data.keys(): @@ -258,7 +258,7 @@ def update_dashboard( environment: models.Environment = Environment.get_environment_by_uri( session, dashboard.environmentUri ) - Dashboard.set_dashboard_resource_policy( + DashboardRepository.set_dashboard_resource_policy( session, environment, dashboard, dashboard.SamlGroupName ) return dashboard @@ -267,7 +267,7 @@ def update_dashboard( def delete_dashboard( session, username, groups, uri, data=None, check_perm=None ) -> bool: - dashboard = Dashboard.get_dashboard_by_uri(session, uri) + dashboard = DashboardRepository.get_dashboard_by_uri(session, uri) session.delete(dashboard) ResourcePolicy.delete_resource_policy( session=session, resource_uri=uri, group=dashboard.SamlGroupName @@ -289,7 +289,7 @@ def request_dashboard_share( data: dict = None, check_perm: bool = False, ) -> models.DashboardShare: - dashboard = Dashboard.get_dashboard_by_uri(session, uri) + dashboard = DashboardRepository.get_dashboard_by_uri(session, uri) if dashboard.SamlGroupName == data['principalId']: raise exceptions.UnauthorizedOperation( action=permissions.CREATE_DASHBOARD, @@ -408,7 +408,7 @@ def share_dashboard( check_perm: bool = False, ) -> models.DashboardShare: - dashboard = Dashboard.get_dashboard_by_uri(session, uri) + dashboard = DashboardRepository.get_dashboard_by_uri(session, uri) share = models.DashboardShare( owner=username, dashboardUri=dashboard.dashboardUri, diff --git a/backend/dataall/modules/dashboards/services/__init__.py b/backend/dataall/modules/dashboards/services/__init__.py new file mode 100644 index 000000000..e69de29bb From d528544f5ec75c61f63826890f2381a9c4ef43ef Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 24 May 2023 16:27:21 +0200 Subject: [PATCH 228/346] Moved dashboard models to the dashboard module --- backend/dataall/db/models/DashboardShare.py | 24 ---- backend/dataall/db/models/__init__.py | 3 - .../modules/dashboards/api/resolvers.py | 21 +-- .../dashboards/db/dashboard_repository.py | 121 +++++++++--------- .../dashboards/db/models.py} | 23 +++- 5 files changed, 94 insertions(+), 98 deletions(-) delete mode 100644 backend/dataall/db/models/DashboardShare.py rename backend/dataall/{db/models/Dashboard.py => modules/dashboards/db/models.py} (54%) diff --git a/backend/dataall/db/models/DashboardShare.py b/backend/dataall/db/models/DashboardShare.py deleted file mode 100644 index a8d25dca0..000000000 --- a/backend/dataall/db/models/DashboardShare.py +++ /dev/null @@ -1,24 +0,0 @@ -from enum import Enum - -from sqlalchemy import Column, String - -from .. import Base, utils - - -class DashboardShareStatus(Enum): - REQUESTED = 'REQUESTED' - APPROVED = 'APPROVED' - REJECTED = 'REJECTED' - - -class DashboardShare(Base): - __tablename__ = 'dashboardshare' - shareUri = Column( - String, nullable=False, primary_key=True, default=utils.uuid('shareddashboard') - ) - dashboardUri = Column(String, nullable=False, default=utils.uuid('dashboard')) - SamlGroupName = Column(String, nullable=False) - owner = Column(String, nullable=True) - status = Column( - String, nullable=False, default=DashboardShareStatus.REQUESTED.value - ) diff --git a/backend/dataall/db/models/__init__.py b/backend/dataall/db/models/__init__.py index 4e7e9a8b6..beb40102c 100644 --- a/backend/dataall/db/models/__init__.py +++ b/backend/dataall/db/models/__init__.py @@ -1,9 +1,6 @@ from .Enums import * from .Activity import Activity from .KeyValueTag import KeyValueTag -from .Dashboard import Dashboard -from .DashboardShare import DashboardShare -from .DashboardShare import DashboardShareStatus from .Environment import Environment from .EnvironmentGroup import EnvironmentGroup from .FeedMessage import FeedMessage diff --git a/backend/dataall/modules/dashboards/api/resolvers.py b/backend/dataall/modules/dashboards/api/resolvers.py index 9fe909154..0dfc65e82 100644 --- a/backend/dataall/modules/dashboards/api/resolvers.py +++ b/backend/dataall/modules/dashboards/api/resolvers.py @@ -7,6 +7,7 @@ from dataall.db import permissions, models from dataall.db.api import ResourcePolicy, Glossary, Vote from dataall.modules.dashboards.db.dashboard_repository import DashboardRepository +from dataall.modules.dashboards.db.models import Dashboard from dataall.utils import Parameter from dataall.searchproxy.indexers import DashboardIndexer @@ -18,7 +19,7 @@ def get_quicksight_reader_url(context, source, dashboardUri: str = None): with context.engine.scoped_session() as session: - dash: models.Dashboard = session.query(models.Dashboard).get(dashboardUri) + dash: Dashboard = session.query(Dashboard).get(dashboardUri) env: models.Environment = session.query(models.Environment).get( dash.environmentUri ) @@ -198,7 +199,7 @@ def get_dashboard(context: Context, source, dashboardUri: str = None): ) -def resolve_user_role(context: Context, source: models.Dashboard): +def resolve_user_role(context: Context, source: Dashboard): if context.username and source.owner == context.username: return DashboardRole.Creator.value elif context.groups and source.SamlGroupName in context.groups: @@ -206,7 +207,7 @@ def resolve_user_role(context: Context, source: models.Dashboard): return DashboardRole.Shared.value -def get_dashboard_organization(context: Context, source: models.Dashboard, **kwargs): +def get_dashboard_organization(context: Context, source: Dashboard, **kwargs): with context.engine.scoped_session() as session: org = session.query(models.Organization).get(source.organizationUri) return org @@ -214,7 +215,7 @@ def get_dashboard_organization(context: Context, source: models.Dashboard, **kwa def request_dashboard_share( context: Context, - source: models.Dashboard, + source: Dashboard, principalId: str = None, dashboardUri: str = None, ): @@ -231,7 +232,7 @@ def request_dashboard_share( def approve_dashboard_share( context: Context, - source: models.Dashboard, + source: Dashboard, shareUri: str = None, ): with context.engine.scoped_session() as session: @@ -249,7 +250,7 @@ def approve_dashboard_share( def reject_dashboard_share( context: Context, - source: models.Dashboard, + source: Dashboard, shareUri: str = None, ): with context.engine.scoped_session() as session: @@ -267,7 +268,7 @@ def reject_dashboard_share( def list_dashboard_shares( context: Context, - source: models.Dashboard, + source: Dashboard, dashboardUri: str = None, filter: dict = None, ): @@ -286,7 +287,7 @@ def list_dashboard_shares( def share_dashboard( context: Context, - source: models.Dashboard, + source: Dashboard, principalId: str = None, dashboardUri: str = None, ): @@ -315,14 +316,14 @@ def delete_dashboard(context: Context, source, dashboardUri: str = None): return True -def resolve_glossary_terms(context: Context, source: models.Dashboard, **kwargs): +def resolve_glossary_terms(context: Context, source: Dashboard, **kwargs): with context.engine.scoped_session() as session: return Glossary.get_glossary_terms_links( session, source.dashboardUri, 'Dashboard' ) -def resolve_upvotes(context: Context, source: models.Dashboard, **kwargs): +def resolve_upvotes(context: Context, source: Dashboard, **kwargs): with context.engine.scoped_session() as session: return Vote.count_upvotes( session, None, None, source.dashboardUri, data={'targetType': 'dashboard'} diff --git a/backend/dataall/modules/dashboards/db/dashboard_repository.py b/backend/dataall/modules/dashboards/db/dashboard_repository.py index 58da82d9e..41984e8a5 100644 --- a/backend/dataall/modules/dashboards/db/dashboard_repository.py +++ b/backend/dataall/modules/dashboards/db/dashboard_repository.py @@ -12,6 +12,7 @@ Glossary, Vote, ) +from dataall.modules.dashboards.db.models import DashboardShare, DashboardShareStatus, Dashboard logger = logging.getLogger(__name__) @@ -27,7 +28,7 @@ def import_dashboard( uri: str, data: dict = None, check_perm: bool = False, - ) -> models.Dashboard: + ) -> Dashboard: if not data: raise exceptions.RequiredParameter(data) if not data.get('environmentUri'): @@ -51,7 +52,7 @@ def import_dashboard( env: models.Environment = data.get( 'environment', Environment.get_environment_by_uri(session, uri) ) - dashboard: models.Dashboard = models.Dashboard( + dashboard: Dashboard = Dashboard( label=data.get('label', 'untitled'), environmentUri=data.get('environmentUri'), organizationUri=env.organizationUri, @@ -97,7 +98,7 @@ def set_dashboard_resource_policy(session, environment, dashboard, group): group=group, permissions=permissions.DASHBOARD_ALL, resource_uri=dashboard.dashboardUri, - resource_type=models.Dashboard.__name__, + resource_type=Dashboard.__name__, ) if environment.SamlGroupName != dashboard.SamlGroupName: ResourcePolicy.attach_resource_policy( @@ -105,7 +106,7 @@ def set_dashboard_resource_policy(session, environment, dashboard, group): group=environment.SamlGroupName, permissions=permissions.DASHBOARD_ALL, resource_uri=dashboard.dashboardUri, - resource_type=models.Dashboard.__name__, + resource_type=Dashboard.__name__, ) @staticmethod @@ -118,12 +119,12 @@ def get_dashboard( uri: str, data: dict = None, check_perm: bool = False, - ) -> models.Dashboard: + ) -> Dashboard: return DashboardRepository.get_dashboard_by_uri(session, uri) @staticmethod - def get_dashboard_by_uri(session, uri) -> models.Dashboard: - dashboard: models.Dashboard = session.query(models.Dashboard).get(uri) + def get_dashboard_by_uri(session, uri) -> Dashboard: + dashboard: Dashboard = session.query(Dashboard).get(uri) if not dashboard: raise exceptions.ObjectNotFound('Dashboard', uri) return dashboard @@ -131,19 +132,19 @@ def get_dashboard_by_uri(session, uri) -> models.Dashboard: @staticmethod def query_user_dashboards(session, username, groups, filter) -> Query: query = ( - session.query(models.Dashboard) + session.query(Dashboard) .outerjoin( - models.DashboardShare, - models.Dashboard.dashboardUri == models.DashboardShare.dashboardUri, + DashboardShare, + Dashboard.dashboardUri == DashboardShare.dashboardUri, ) .filter( or_( - models.Dashboard.owner == username, - models.Dashboard.SamlGroupName.in_(groups), + Dashboard.owner == username, + Dashboard.SamlGroupName.in_(groups), and_( - models.DashboardShare.SamlGroupName.in_(groups), - models.DashboardShare.status - == models.DashboardShareStatus.APPROVED.value, + DashboardShare.SamlGroupName.in_(groups), + DashboardShare.status + == DashboardShareStatus.APPROVED.value, ), ) ) @@ -151,8 +152,8 @@ def query_user_dashboards(session, username, groups, filter) -> Query: if filter and filter.get('term'): query = query.filter( or_( - models.Dashboard.description.ilike(filter.get('term') + '%%'), - models.Dashboard.label.ilike(filter.get('term') + '%%'), + Dashboard.description.ilike(filter.get('term') + '%%'), + Dashboard.label.ilike(filter.get('term') + '%%'), ) ) return query @@ -170,17 +171,17 @@ def paginated_user_dashboards( @staticmethod def query_dashboard_shares(session, username, groups, uri, filter) -> Query: query = ( - session.query(models.DashboardShare) + session.query(DashboardShare) .join( - models.Dashboard, - models.Dashboard.dashboardUri == models.DashboardShare.dashboardUri, + Dashboard, + Dashboard.dashboardUri == DashboardShare.dashboardUri, ) .filter( and_( - models.DashboardShare.dashboardUri == uri, + DashboardShare.dashboardUri == uri, or_( - models.Dashboard.owner == username, - models.Dashboard.SamlGroupName.in_(groups), + Dashboard.owner == username, + Dashboard.SamlGroupName.in_(groups), ), ) ) @@ -188,10 +189,10 @@ def query_dashboard_shares(session, username, groups, uri, filter) -> Query: if filter and filter.get('term'): query = query.filter( or_( - models.DashboardShare.SamlGroupName.ilike( + DashboardShare.SamlGroupName.ilike( filter.get('term') + '%%' ), - models.Dashboard.label.ilike(filter.get('term') + '%%'), + Dashboard.label.ilike(filter.get('term') + '%%'), ) ) return query @@ -199,11 +200,11 @@ def query_dashboard_shares(session, username, groups, uri, filter) -> Query: @staticmethod def query_all_user_groups_shareddashboard(session, username, groups, uri) -> Query: query = ( - session.query(models.DashboardShare) + session.query(DashboardShare) .filter( and_( - models.DashboardShare.dashboardUri == uri, - models.DashboardShare.SamlGroupName.in_(groups), + DashboardShare.dashboardUri == uri, + DashboardShare.SamlGroupName.in_(groups), ) ) ) @@ -237,7 +238,7 @@ def update_dashboard( uri: str, data: dict = None, check_perm: bool = False, - ) -> models.Dashboard: + ) -> Dashboard: dashboard = data.get( 'dashboard', @@ -288,35 +289,35 @@ def request_dashboard_share( uri: str, data: dict = None, check_perm: bool = False, - ) -> models.DashboardShare: + ) -> DashboardShare: dashboard = DashboardRepository.get_dashboard_by_uri(session, uri) if dashboard.SamlGroupName == data['principalId']: raise exceptions.UnauthorizedOperation( action=permissions.CREATE_DASHBOARD, message=f'Team {dashboard.SamlGroupName} is the owner of the dashboard {dashboard.label}', ) - share: models.DashboardShare = ( - session.query(models.DashboardShare) + share: DashboardShare = ( + session.query(DashboardShare) .filter( - models.DashboardShare.dashboardUri == uri, - models.DashboardShare.SamlGroupName == data['principalId'], + DashboardShare.dashboardUri == uri, + DashboardShare.SamlGroupName == data['principalId'], ) .first() ) if not share: - share = models.DashboardShare( + share = DashboardShare( owner=username, dashboardUri=dashboard.dashboardUri, SamlGroupName=data['principalId'], - status=models.DashboardShareStatus.REQUESTED.value, + status=DashboardShareStatus.REQUESTED.value, ) session.add(share) else: - if share.status not in models.DashboardShareStatus.__members__: + if share.status not in DashboardShareStatus.__members__: raise exceptions.InvalidInput( 'Share status', share.status, - str(models.DashboardShareStatus.__members__), + str(DashboardShareStatus.__members__), ) if share.status == 'REJECTED': share.status = 'REQUESTED' @@ -333,29 +334,29 @@ def approve_dashboard_share( uri: str, data: dict = None, check_perm: bool = False, - ) -> models.DashboardShare: + ) -> DashboardShare: - share: models.DashboardShare = data.get( - 'share', session.query(models.DashboardShare).get(data['shareUri']) + share: DashboardShare = data.get( + 'share', session.query(DashboardShare).get(data['shareUri']) ) - if share.status not in models.DashboardShareStatus.__members__: + if share.status not in DashboardShareStatus.__members__: raise exceptions.InvalidInput( 'Share status', share.status, - str(models.DashboardShareStatus.__members__), + str(DashboardShareStatus.__members__), ) - if share.status == models.DashboardShareStatus.APPROVED.value: + if share.status == DashboardShareStatus.APPROVED.value: return share - share.status = models.DashboardShareStatus.APPROVED.value + share.status = DashboardShareStatus.APPROVED.value ResourcePolicy.attach_resource_policy( session=session, group=share.SamlGroupName, permissions=[permissions.GET_DASHBOARD], resource_uri=share.dashboardUri, - resource_type=models.Dashboard.__name__, + resource_type=Dashboard.__name__, ) return share @@ -370,28 +371,28 @@ def reject_dashboard_share( uri: str, data: dict = None, check_perm: bool = False, - ) -> models.DashboardShare: + ) -> DashboardShare: - share: models.DashboardShare = data.get( - 'share', session.query(models.DashboardShare).get(data['shareUri']) + share: DashboardShare = data.get( + 'share', session.query(DashboardShare).get(data['shareUri']) ) - if share.status not in models.DashboardShareStatus.__members__: + if share.status not in DashboardShareStatus.__members__: raise exceptions.InvalidInput( 'Share status', share.status, - str(models.DashboardShareStatus.__members__), + str(DashboardShareStatus.__members__), ) - if share.status == models.DashboardShareStatus.REJECTED.value: + if share.status == DashboardShareStatus.REJECTED.value: return share - share.status = models.DashboardShareStatus.REJECTED.value + share.status = DashboardShareStatus.REJECTED.value ResourcePolicy.delete_resource_policy( session=session, group=share.SamlGroupName, resource_uri=share.dashboardUri, - resource_type=models.Dashboard.__name__, + resource_type=Dashboard.__name__, ) return share @@ -406,14 +407,14 @@ def share_dashboard( uri: str, data: dict = None, check_perm: bool = False, - ) -> models.DashboardShare: + ) -> DashboardShare: dashboard = DashboardRepository.get_dashboard_by_uri(session, uri) - share = models.DashboardShare( + share = DashboardShare( owner=username, dashboardUri=dashboard.dashboardUri, SamlGroupName=data['principalId'], - status=models.DashboardShareStatus.APPROVED.value, + status=DashboardShareStatus.APPROVED.value, ) session.add(share) ResourcePolicy.attach_resource_policy( @@ -421,13 +422,13 @@ def share_dashboard( group=data['principalId'], permissions=[permissions.GET_DASHBOARD], resource_uri=dashboard.dashboardUri, - resource_type=models.Dashboard.__name__, + resource_type=Dashboard.__name__, ) return share @staticmethod - def get_dashboard_share_by_uri(session, uri) -> models.DashboardShare: - share: models.DashboardShare = session.query(models.DashboardShare).get(uri) + def get_dashboard_share_by_uri(session, uri) -> DashboardShare: + share: DashboardShare = session.query(DashboardShare).get(uri) if not share: raise exceptions.ObjectNotFound('DashboardShare', uri) return share diff --git a/backend/dataall/db/models/Dashboard.py b/backend/dataall/modules/dashboards/db/models.py similarity index 54% rename from backend/dataall/db/models/Dashboard.py rename to backend/dataall/modules/dashboards/db/models.py index 0b12ecd96..2c5946f6e 100644 --- a/backend/dataall/db/models/Dashboard.py +++ b/backend/dataall/modules/dashboards/db/models.py @@ -1,7 +1,28 @@ +from enum import Enum + from sqlalchemy import Column, String, ForeignKey from sqlalchemy.orm import query_expression -from .. import Base, Resource, utils +from dataall.db import Base, Resource, utils + + +class DashboardShareStatus(Enum): + REQUESTED = 'REQUESTED' + APPROVED = 'APPROVED' + REJECTED = 'REJECTED' + + +class DashboardShare(Base): + __tablename__ = 'dashboardshare' + shareUri = Column( + String, nullable=False, primary_key=True, default=utils.uuid('shareddashboard') + ) + dashboardUri = Column(String, nullable=False, default=utils.uuid('dashboard')) + SamlGroupName = Column(String, nullable=False) + owner = Column(String, nullable=True) + status = Column( + String, nullable=False, default=DashboardShareStatus.REQUESTED.value + ) class Dashboard(Resource, Base): From c3e2b3715c541fc0e026bf88e30e57d54b2f0934 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 24 May 2023 16:42:23 +0200 Subject: [PATCH 229/346] Moved Dashboard's feed and glossary code --- backend/dataall/api/Objects/Feed/registry.py | 1 - backend/dataall/api/Objects/Glossary/registry.py | 11 +---------- backend/dataall/modules/dashboards/__init__.py | 14 ++++++++++++++ 3 files changed, 15 insertions(+), 11 deletions(-) diff --git a/backend/dataall/api/Objects/Feed/registry.py b/backend/dataall/api/Objects/Feed/registry.py index ad5ce108a..0f529e469 100644 --- a/backend/dataall/api/Objects/Feed/registry.py +++ b/backend/dataall/api/Objects/Feed/registry.py @@ -37,4 +37,3 @@ def types(cls): FeedRegistry.register(FeedDefinition("DataPipeline", models.DataPipeline)) -FeedRegistry.register(FeedDefinition("Dashboard", models.Dashboard)) diff --git a/backend/dataall/api/Objects/Glossary/registry.py b/backend/dataall/api/Objects/Glossary/registry.py index 27c534368..132765a69 100644 --- a/backend/dataall/api/Objects/Glossary/registry.py +++ b/backend/dataall/api/Objects/Glossary/registry.py @@ -3,8 +3,7 @@ from dataall.api import gql from dataall.api.gql.graphql_union_type import UnionTypeRegistry -from dataall.db import Resource, models -from dataall.searchproxy.indexers import DashboardIndexer +from dataall.db import Resource from dataall.searchproxy.base_indexer import BaseIndexer @@ -58,11 +57,3 @@ def reindex(cls, session, target_type: str, target_uri: str): definition = cls._DEFINITIONS[target_type] if definition.reindexer: definition.reindexer.upsert(session, target_uri) - - -GlossaryRegistry.register(GlossaryDefinition( - target_type="Dashboard", - object_type="Dashboard", - model=models.Dashboard, - reindexer=DashboardIndexer -)) diff --git a/backend/dataall/modules/dashboards/__init__.py b/backend/dataall/modules/dashboards/__init__.py index 2dddbce24..e1ba86ab1 100644 --- a/backend/dataall/modules/dashboards/__init__.py +++ b/backend/dataall/modules/dashboards/__init__.py @@ -1,6 +1,7 @@ """Contains the code related to dashboards""" import logging +from dataall.modules.dashboards.db.models import Dashboard from dataall.modules.loader import ImportMode, ModuleInterface log = logging.getLogger(__name__) @@ -15,3 +16,16 @@ def is_supported(modes): def __init__(self): import dataall.modules.dashboards.api + from dataall.api.Objects.Feed.registry import FeedRegistry, FeedDefinition + from dataall.api.Objects.Glossary.registry import GlossaryRegistry, GlossaryDefinition + from dataall.searchproxy.indexers import DashboardIndexer + + FeedRegistry.register(FeedDefinition("Dashboard", Dashboard)) + + GlossaryRegistry.register(GlossaryDefinition( + target_type="Dashboard", + object_type="Dashboard", + model=Dashboard, + reindexer=DashboardIndexer + )) + From ace2b021b2ea15ecbbe7f124b0f897ceb1c3a62c Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 25 May 2023 10:54:14 +0200 Subject: [PATCH 230/346] Moved DashboardIndexer --- backend/dataall/api/Objects/Vote/resolvers.py | 5 --- .../dataall/modules/dashboards/__init__.py | 5 ++- .../modules/dashboards/api/resolvers.py | 2 +- .../modules/dashboards/indexers/__init__.py | 0 .../dashboards/indexers/dashboard_indexer.py} | 34 +++++++++---------- 5 files changed, 22 insertions(+), 24 deletions(-) create mode 100644 backend/dataall/modules/dashboards/indexers/__init__.py rename backend/dataall/{searchproxy/indexers.py => modules/dashboards/indexers/dashboard_indexer.py} (69%) diff --git a/backend/dataall/api/Objects/Vote/resolvers.py b/backend/dataall/api/Objects/Vote/resolvers.py index b9a14e117..5cbcff7c2 100644 --- a/backend/dataall/api/Objects/Vote/resolvers.py +++ b/backend/dataall/api/Objects/Vote/resolvers.py @@ -2,7 +2,6 @@ from dataall import db from dataall.api.context import Context -from dataall.searchproxy.indexers import DashboardIndexer from dataall.searchproxy.base_indexer import BaseIndexer _VOTE_TYPES: Dict[str, Type[BaseIndexer]] = {} @@ -51,7 +50,3 @@ def get_vote(context: Context, source, targetUri: str = None, targetType: str = data={'targetType': targetType}, check_perm=True, ) - - -# TODO should migrate after into the Dashboard module -add_vote_type("dashboard", DashboardIndexer) diff --git a/backend/dataall/modules/dashboards/__init__.py b/backend/dataall/modules/dashboards/__init__.py index e1ba86ab1..ee6a15be7 100644 --- a/backend/dataall/modules/dashboards/__init__.py +++ b/backend/dataall/modules/dashboards/__init__.py @@ -18,7 +18,8 @@ def __init__(self): import dataall.modules.dashboards.api from dataall.api.Objects.Feed.registry import FeedRegistry, FeedDefinition from dataall.api.Objects.Glossary.registry import GlossaryRegistry, GlossaryDefinition - from dataall.searchproxy.indexers import DashboardIndexer + from dataall.api.Objects.Vote.resolvers import add_vote_type + from dataall.modules.dashboards.indexers.dashboard_indexer import DashboardIndexer FeedRegistry.register(FeedDefinition("Dashboard", Dashboard)) @@ -29,3 +30,5 @@ def __init__(self): reindexer=DashboardIndexer )) + add_vote_type("dashboard", DashboardIndexer) + diff --git a/backend/dataall/modules/dashboards/api/resolvers.py b/backend/dataall/modules/dashboards/api/resolvers.py index 0dfc65e82..5545cf219 100644 --- a/backend/dataall/modules/dashboards/api/resolvers.py +++ b/backend/dataall/modules/dashboards/api/resolvers.py @@ -9,7 +9,7 @@ from dataall.modules.dashboards.db.dashboard_repository import DashboardRepository from dataall.modules.dashboards.db.models import Dashboard from dataall.utils import Parameter -from dataall.searchproxy.indexers import DashboardIndexer +from dataall.modules.dashboards.indexers.dashboard_indexer import DashboardIndexer param_store = Parameter() ENVNAME = os.getenv("envname", "local") diff --git a/backend/dataall/modules/dashboards/indexers/__init__.py b/backend/dataall/modules/dashboards/indexers/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/dataall/searchproxy/indexers.py b/backend/dataall/modules/dashboards/indexers/dashboard_indexer.py similarity index 69% rename from backend/dataall/searchproxy/indexers.py rename to backend/dataall/modules/dashboards/indexers/dashboard_indexer.py index 4655de65a..8c17ca75d 100644 --- a/backend/dataall/searchproxy/indexers.py +++ b/backend/dataall/modules/dashboards/indexers/dashboard_indexer.py @@ -1,43 +1,43 @@ import logging -from .. import db -from ..db import models +from dataall import db +from dataall.db import models from dataall.searchproxy.base_indexer import BaseIndexer +from dataall.modules.dashboards.db.models import Dashboard log = logging.getLogger(__name__) -# TODO Should be moved to dashboard module class DashboardIndexer(BaseIndexer): @classmethod def upsert(cls, session, dashboard_uri: str): dashboard = ( session.query( - models.Dashboard.dashboardUri.label('uri'), - models.Dashboard.name.label('name'), - models.Dashboard.owner.label('owner'), - models.Dashboard.label.label('label'), - models.Dashboard.description.label('description'), - models.Dashboard.tags.label('tags'), - models.Dashboard.region.label('region'), + Dashboard.dashboardUri.label('uri'), + Dashboard.name.label('name'), + Dashboard.owner.label('owner'), + Dashboard.label.label('label'), + Dashboard.description.label('description'), + Dashboard.tags.label('tags'), + Dashboard.region.label('region'), models.Organization.organizationUri.label('orgUri'), models.Organization.name.label('orgName'), models.Environment.environmentUri.label('envUri'), models.Environment.name.label('envName'), - models.Dashboard.SamlGroupName.label('admins'), - models.Dashboard.created, - models.Dashboard.updated, - models.Dashboard.deleted, + Dashboard.SamlGroupName.label('admins'), + Dashboard.created, + Dashboard.updated, + Dashboard.deleted, ) .join( models.Organization, - models.Dashboard.organizationUri == models.Dashboard.organizationUri, + Dashboard.organizationUri == Dashboard.organizationUri, ) .join( models.Environment, - models.Dashboard.environmentUri == models.Environment.environmentUri, + Dashboard.environmentUri == models.Environment.environmentUri, ) - .filter(models.Dashboard.dashboardUri == dashboard_uri) + .filter(Dashboard.dashboardUri == dashboard_uri) .first() ) if dashboard: From 1f340c06c659fd10d3dcb644d734b714c240410a Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 25 May 2023 10:56:48 +0200 Subject: [PATCH 231/346] Renamed the file name --- backend/dataall/modules/datasets/__init__.py | 2 +- .../indexers/{catalog_indexer.py => dataset_catalog_indexer.py} | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename backend/dataall/modules/datasets/indexers/{catalog_indexer.py => dataset_catalog_indexer.py} (100%) diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index 61161caf1..08301b47b 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -138,6 +138,6 @@ def is_supported(modes: Set[ImportMode]) -> bool: def __init__(self): from dataall.tasks.catalog_indexer import register_catalog_indexer - from dataall.modules.datasets.indexers.catalog_indexer import DatasetCatalogIndexer + from dataall.modules.datasets.indexers.dataset_catalog_indexer import DatasetCatalogIndexer register_catalog_indexer(DatasetCatalogIndexer()) diff --git a/backend/dataall/modules/datasets/indexers/catalog_indexer.py b/backend/dataall/modules/datasets/indexers/dataset_catalog_indexer.py similarity index 100% rename from backend/dataall/modules/datasets/indexers/catalog_indexer.py rename to backend/dataall/modules/datasets/indexers/dataset_catalog_indexer.py From 880963e632055b5a77291760df0ffbb6749c968e Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 25 May 2023 11:55:50 +0200 Subject: [PATCH 232/346] Created dashboard_catalog_indexer --- .../indexers/dashboard_catalog_indexer.py | 19 +++++++++++++++++++ backend/dataall/tasks/catalog_indexer.py | 9 +++++---- 2 files changed, 24 insertions(+), 4 deletions(-) create mode 100644 backend/dataall/modules/dashboards/indexers/dashboard_catalog_indexer.py diff --git a/backend/dataall/modules/dashboards/indexers/dashboard_catalog_indexer.py b/backend/dataall/modules/dashboards/indexers/dashboard_catalog_indexer.py new file mode 100644 index 000000000..74fa18fa0 --- /dev/null +++ b/backend/dataall/modules/dashboards/indexers/dashboard_catalog_indexer.py @@ -0,0 +1,19 @@ +import logging + +from dataall.modules.dashboards import Dashboard +from dataall.modules.dashboards.indexers.dashboard_indexer import DashboardIndexer +from dataall.tasks.catalog_indexer import CatalogIndexer + +log = logging.getLogger(__name__) + + +class DashboardCatalogIndexer(CatalogIndexer): + + def index(self, session) -> int: + all_dashboards: [Dashboard] = session.query(Dashboard).all() + log.info(f'Found {len(all_dashboards)} dashboards') + dashboard: Dashboard + for dashboard in all_dashboards: + DashboardIndexer.upsert(session=session, dashboard_uri=dashboard.dashboardUri) + + return len(all_dashboards) diff --git a/backend/dataall/tasks/catalog_indexer.py b/backend/dataall/tasks/catalog_indexer.py index 99313d5a8..4f291941e 100644 --- a/backend/dataall/tasks/catalog_indexer.py +++ b/backend/dataall/tasks/catalog_indexer.py @@ -4,9 +4,10 @@ from abc import ABC from typing import List -from dataall.db import get_engine, models +from dataall.db import get_engine +from dataall.modules.dashboards.db.models import Dashboard from dataall.modules.loader import load_modules, ImportMode -from dataall.searchproxy.indexers import DashboardIndexer +from dataall.modules.dashboards.indexers.dashboard_indexer import DashboardIndexer from dataall.utils.alarm_service import AlarmService root = logging.getLogger() @@ -37,9 +38,9 @@ def index_objects(engine): for indexer in _indexers: indexed_objects_counter += indexer.index(session) - all_dashboards: [models.Dashboard] = session.query(models.Dashboard).all() + all_dashboards: [Dashboard] = session.query(Dashboard).all() log.info(f'Found {len(all_dashboards)} dashboards') - dashboard: models.Dashboard + dashboard: Dashboard for dashboard in all_dashboards: DashboardIndexer.upsert(session=session, dashboard_uri=dashboard.dashboardUri) indexed_objects_counter = indexed_objects_counter + 1 From c97b5e98c2f13a8d1d135ea74212d65978ac72f6 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 25 May 2023 12:02:37 +0200 Subject: [PATCH 233/346] Implemented EnvironmentResource for dashboards --- backend/dataall/db/api/environment.py | 5 ----- .../dataall/modules/dashboards/__init__.py | 19 ++++++++++++++++++- .../dashboards/db/dashboard_repository.py | 16 +++++++++++++++- 3 files changed, 33 insertions(+), 7 deletions(-) diff --git a/backend/dataall/db/api/environment.py b/backend/dataall/db/api/environment.py index 360a07d3c..be33a07c2 100644 --- a/backend/dataall/db/api/environment.py +++ b/backend/dataall/db/api/environment.py @@ -361,10 +361,6 @@ def remove_group(session, username, groups, uri, data=None, check_perm=None): models.DataPipeline, models.DataPipeline.environmentUri == models.Environment.environmentUri, ) - .outerjoin( - models.Dashboard, - models.Dashboard.environmentUri == models.Environment.environmentUri, - ) .filter( and_( models.Environment.environmentUri == environment.environmentUri, @@ -372,7 +368,6 @@ def remove_group(session, username, groups, uri, data=None, check_perm=None): models.RedshiftCluster.SamlGroupName == group, models.SagemakerStudioUserProfile.SamlAdminGroupName == group, models.DataPipeline.SamlGroupName == group, - models.Dashboard.SamlGroupName == group, ), ) ) diff --git a/backend/dataall/modules/dashboards/__init__.py b/backend/dataall/modules/dashboards/__init__.py index ee6a15be7..d85e9f9fe 100644 --- a/backend/dataall/modules/dashboards/__init__.py +++ b/backend/dataall/modules/dashboards/__init__.py @@ -1,6 +1,9 @@ """Contains the code related to dashboards""" import logging +from typing import Set +from dataall.core.group.services.group_resource_manager import EnvironmentResourceManager +from dataall.modules.dashboards.db.dashboard_repository import DashboardRepository from dataall.modules.dashboards.db.models import Dashboard from dataall.modules.loader import ImportMode, ModuleInterface @@ -11,7 +14,7 @@ class DashboardApiModuleInterface(ModuleInterface): """Implements ModuleInterface for dashboard GraphQl lambda""" @staticmethod - def is_supported(modes): + def is_supported(modes: Set[ImportMode]) -> bool: return ImportMode.API in modes def __init__(self): @@ -32,3 +35,17 @@ def __init__(self): add_vote_type("dashboard", DashboardIndexer) + EnvironmentResourceManager.register(DashboardRepository()) + + +class DatasetCatalogIndexerModuleInterface(ModuleInterface): + + @staticmethod + def is_supported(modes: Set[ImportMode]) -> bool: + return ImportMode.CATALOG_INDEXER_TASK in modes + + def __init__(self): + from dataall.tasks.catalog_indexer import register_catalog_indexer + from dataall.modules.dashboards.indexers.dashboard_catalog_indexer import DashboardCatalogIndexer + + register_catalog_indexer(DashboardCatalogIndexer()) diff --git a/backend/dataall/modules/dashboards/db/dashboard_repository.py b/backend/dataall/modules/dashboards/db/dashboard_repository.py index 41984e8a5..ac8cdd069 100644 --- a/backend/dataall/modules/dashboards/db/dashboard_repository.py +++ b/backend/dataall/modules/dashboards/db/dashboard_repository.py @@ -3,6 +3,7 @@ from sqlalchemy import or_, and_ from sqlalchemy.orm import Query +from dataall.core.group.services.group_resource_manager import EnvironmentResource from dataall.db import models, exceptions, permissions, paginate from dataall.db.api import ( Environment, @@ -17,7 +18,20 @@ logger = logging.getLogger(__name__) -class DashboardRepository: +class DashboardRepository(EnvironmentResource): + + @staticmethod + def count_resources(session, environment, group_uri) -> int: + return ( + session.query(Dashboard) + .filter( + and_( + Dashboard.environmentUri == environment.environmentUri, + Dashboard.SamlGroupName == group_uri + )) + .count() + ) + @staticmethod @has_tenant_perm(permissions.MANAGE_DASHBOARDS) @has_resource_perm(permissions.CREATE_DASHBOARD) From 5d71bd6de1340dd7c744c767338447dd1f0606eb Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 25 May 2023 12:05:34 +0200 Subject: [PATCH 234/346] Fixed copy error --- backend/dataall/db/api/pipeline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/dataall/db/api/pipeline.py b/backend/dataall/db/api/pipeline.py index 6007be39d..02b313ee0 100644 --- a/backend/dataall/db/api/pipeline.py +++ b/backend/dataall/db/api/pipeline.py @@ -83,7 +83,7 @@ def create_pipeline( action='PIPELINE:CREATE', label='PIPELINE:CREATE', owner=username, - summary=f'{username} created dashboard {pipeline.label} in {environment.label}', + summary=f'{username} created pipeline {pipeline.label} in {environment.label}', targetUri=pipeline.DataPipelineUri, targetType='pipeline', ) From d0ea832bc8176baa7dc87624692e2f53701d9199 Mon Sep 17 00:00:00 2001 From: dlpzx <71252798+dlpzx@users.noreply.github.com> Date: Thu, 25 May 2023 12:08:18 +0200 Subject: [PATCH 235/346] update auth-at-edge semantic version to latest 2.1.5 (#480) ### Feature or Bugfix - Bugfix ### Detail Custom resources created by the [cloudfront-authorization-at-edge](https://github.com/aws-samples/cloudfront-authorization-at-edge/blob/master/example-serverless-app-reuse/README.md) application used in data.all use node12 for the version of the application previously used (2.0.4). By upgrading to the latest version (2.1.5) the Lambda custom resources used also use node14 at runtime. After upgrading the semantic version, I performed the following tests: - [X] upgrade a pre-existing deployment (Lambdas node12) and check that the runtime has been updated to node14. See screenshot below. - [X] open userguide (where auth at edge is used) in pre-existing deployment - [X] execute GraphQL APIs in pre-existing deployment - [X] execute ES APIs in pre-existing deployment ![image](https://github.com/awslabs/aws-dataall/assets/71252798/4d50a8fb-0084-48ee-adb8-d11b20dd6b4a) - [X] deploy data.all from scratch and check that the Lambdas deployed use node14 - [X] open userguide (where auth at edge is used) in new deployment - [X] execute GraphQL APIs in new deployment - [X] execute ES APIs in new deployment ![image](https://github.com/awslabs/aws-dataall/assets/71252798/341124bd-c4c7-4a94-b53b-e451306e2653) ### Relates - #479 By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- deploy/stacks/auth_at_edge.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deploy/stacks/auth_at_edge.py b/deploy/stacks/auth_at_edge.py index c6d434867..052eb04b0 100644 --- a/deploy/stacks/auth_at_edge.py +++ b/deploy/stacks/auth_at_edge.py @@ -23,7 +23,7 @@ def __init__(self, scope, id, envname='dev', resource_prefix='dataall', **kwargs f'{resource_prefix}-{envname}-authatedge', location={ 'applicationId': 'arn:aws:serverlessrepo:us-east-1:520945424137:applications/cloudfront-authorization-at-edge', - 'semanticVersion': '2.0.4', + 'semanticVersion': '2.1.5', }, parameters={ 'UserPoolArn': userpool_arn, From f1a52a17fea4b9b4ee009aa866e66cba8fb07adb Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 25 May 2023 14:02:14 +0200 Subject: [PATCH 236/346] Added ShareEnvironmentResource --- backend/dataall/modules/dataset_sharing/__init__.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/backend/dataall/modules/dataset_sharing/__init__.py b/backend/dataall/modules/dataset_sharing/__init__.py index 140fb4e0c..450d66047 100644 --- a/backend/dataall/modules/dataset_sharing/__init__.py +++ b/backend/dataall/modules/dataset_sharing/__init__.py @@ -1,6 +1,8 @@ import logging from typing import List, Type, Set +from dataall.core.group.services.group_resource_manager import EnvironmentResourceManager +from dataall.modules.dataset_sharing.db.share_object_repository import ShareEnvironmentResource from dataall.modules.datasets_base import DatasetBaseModuleInterface from dataall.modules.loader import ModuleInterface, ImportMode @@ -19,6 +21,8 @@ def depends_on() -> List[Type['ModuleInterface']]: def __init__(self): from dataall.modules.dataset_sharing import api + + EnvironmentResourceManager.register(ShareEnvironmentResource()) log.info("API of dataset sharing has been imported") From 15d1a68f482cfb73c37cbcefe777cc8a1f8ba617 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 25 May 2023 15:46:40 +0200 Subject: [PATCH 237/346] Minor changes --- backend/dataall/modules/datasets/services/dataset_service.py | 4 ++-- .../dataall/modules/datasets_base/db/dataset_repository.py | 4 ---- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py index 891890100..f89f98b7c 100644 --- a/backend/dataall/modules/datasets/services/dataset_service.py +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -20,7 +20,7 @@ from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer from dataall.modules.datasets.services.dataset_permissions import CREDENTIALS_DATASET, SYNC_DATASET, CRAWL_DATASET, \ - SUMMARY_DATASET, DELETE_DATASET, SUBSCRIPTIONS_DATASET, MANAGE_DATASETS, UPDATE_DATASET, LIST_ENVIRONMENT_DATASETS, \ + DELETE_DATASET, SUBSCRIPTIONS_DATASET, MANAGE_DATASETS, UPDATE_DATASET, LIST_ENVIRONMENT_DATASETS, \ CREATE_DATASET, DATASET_ALL, DATASET_READ from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.db.enums import DatasetRole @@ -105,7 +105,7 @@ def import_dataset(uri, admin_group, data): def get_dataset(uri): context = get_context() with context.db_engine.scoped_session() as session: - dataset = DatasetRepository.get_dataset(session, uri=uri) + dataset = DatasetRepository.get_dataset_by_uri(session, uri) if dataset.SamlAdminGroupName in context.groups: dataset.userRoleForDataset = DatasetRole.Admin.value return dataset diff --git a/backend/dataall/modules/datasets_base/db/dataset_repository.py b/backend/dataall/modules/datasets_base/db/dataset_repository.py index 7481e33ba..d3697ca3f 100644 --- a/backend/dataall/modules/datasets_base/db/dataset_repository.py +++ b/backend/dataall/modules/datasets_base/db/dataset_repository.py @@ -148,10 +148,6 @@ def _set_dataset_aws_resources(dataset: Dataset, data, environment): dataset.GlueDataQualityTriggerName = f'{dataset.S3BucketName}-{dataset.datasetUri}-dqtrigger' return dataset - @staticmethod - def get_dataset(session, uri: str) -> Dataset: - return DatasetRepository.get_dataset_by_uri(session, uri) - @staticmethod def paginated_dataset_tables(session, uri, data=None) -> dict: query = ( From 27076a35308e8c9e838710886c929317306928e3 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 25 May 2023 15:59:20 +0200 Subject: [PATCH 238/346] Moved dashboard permissions --- backend/dataall/db/permissions.py | 28 ----------- .../modules/dashboards/api/resolvers.py | 19 ++++---- .../dashboards/db/dashboard_repository.py | 46 ++++++++++--------- .../services/dashboard_permissions.py | 43 +++++++++++++++++ 4 files changed, 77 insertions(+), 59 deletions(-) create mode 100644 backend/dataall/modules/dashboards/services/dashboard_permissions.py diff --git a/backend/dataall/db/permissions.py b/backend/dataall/db/permissions.py index 969509de7..e0ca1708b 100644 --- a/backend/dataall/db/permissions.py +++ b/backend/dataall/db/permissions.py @@ -23,7 +23,6 @@ TENANT PERMISSIONS """ MANAGE_REDSHIFT_CLUSTERS = 'MANAGE_REDSHIFT_CLUSTERS' -MANAGE_DASHBOARDS = 'MANAGE_DASHBOARDS' MANAGE_PIPELINES = 'MANAGE_PIPELINES' MANAGE_GROUPS = 'MANAGE_GROUPS' MANAGE_ENVIRONMENT = 'MANAGE_ENVIRONMENT' @@ -52,8 +51,6 @@ LIST_ENVIRONMENT_REDSHIFT_CLUSTERS = 'LIST_ENVIRONMENT_REDSHIFT_CLUSTERS' CREATE_SGMSTUDIO_NOTEBOOK = 'CREATE_SGMSTUDIO_NOTEBOOK' LIST_ENVIRONMENT_SGMSTUDIO_NOTEBOOKS = 'LIST_ENVIRONMENT_SGMSTUDIO_NOTEBOOKS' -CREATE_DASHBOARD = 'CREATE_DASHBOARD' -LIST_ENVIRONMENT_DASHBOARDS = 'LIST_ENVIRONMENT_DASHBOARDS' CREATE_PIPELINE = 'CREATE_PIPELINE' LIST_PIPELINES = 'LIST_PIPELINES' CREATE_NETWORK = 'CREATE_NETWORK' @@ -69,8 +66,6 @@ LIST_ENVIRONMENT_REDSHIFT_CLUSTERS, CREATE_SGMSTUDIO_NOTEBOOK, LIST_ENVIRONMENT_SGMSTUDIO_NOTEBOOKS, - CREATE_DASHBOARD, - LIST_ENVIRONMENT_DASHBOARDS, INVITE_ENVIRONMENT_GROUP, ADD_ENVIRONMENT_CONSUMPTION_ROLES, CREATE_PIPELINE, @@ -83,7 +78,6 @@ ADD_ENVIRONMENT_CONSUMPTION_ROLES, CREATE_REDSHIFT_CLUSTER, CREATE_SGMSTUDIO_NOTEBOOK, - CREATE_DASHBOARD, CREATE_PIPELINE, CREATE_NETWORK, ] @@ -105,8 +99,6 @@ LIST_ENVIRONMENT_REDSHIFT_CLUSTERS, CREATE_SGMSTUDIO_NOTEBOOK, LIST_ENVIRONMENT_SGMSTUDIO_NOTEBOOKS, - CREATE_DASHBOARD, - LIST_ENVIRONMENT_DASHBOARDS, CREATE_PIPELINE, LIST_PIPELINES, CREATE_NETWORK, @@ -145,7 +137,6 @@ TENANT_ALL = [ MANAGE_REDSHIFT_CLUSTERS, - MANAGE_DASHBOARDS, MANAGE_PIPELINES, MANAGE_GLOSSARIES, MANAGE_GROUPS, @@ -155,7 +146,6 @@ ] TENANT_ALL_WITH_DESC = {k: k for k in TENANT_ALL} -TENANT_ALL_WITH_DESC[MANAGE_DASHBOARDS] = 'Manage dashboards' TENANT_ALL_WITH_DESC[MANAGE_REDSHIFT_CLUSTERS] = 'Manage Redshift clusters' TENANT_ALL_WITH_DESC[MANAGE_GLOSSARIES] = 'Manage glossaries' TENANT_ALL_WITH_DESC[MANAGE_ENVIRONMENTS] = 'Manage environments' @@ -208,22 +198,6 @@ SGMSTUDIO_NOTEBOOK_URL, ] -""" -DASHBOARDS -""" -GET_DASHBOARD = 'GET_DASHBOARD' -UPDATE_DASHBOARD = 'UPDATE_DASHBOARD' -DELETE_DASHBOARD = 'DELETE_DASHBOARD' -DASHBOARD_URL = 'DASHBOARD_URL' -SHARE_DASHBOARD = 'SHARE_DASHBOARD' -DASHBOARD_ALL = [ - GET_DASHBOARD, - UPDATE_DASHBOARD, - DELETE_DASHBOARD, - DASHBOARD_URL, - SHARE_DASHBOARD, -] - """ PIPELINES """ @@ -260,13 +234,11 @@ + REDSHIFT_CLUSTER_ALL + GLOSSARY_ALL + SGMSTUDIO_NOTEBOOK_ALL - + DASHBOARD_ALL + PIPELINE_ALL + NETWORK_ALL ) RESOURCES_ALL_WITH_DESC = {k: k for k in RESOURCES_ALL} -RESOURCES_ALL_WITH_DESC[CREATE_DASHBOARD] = 'Create dashboards on this environment' RESOURCES_ALL_WITH_DESC[CREATE_REDSHIFT_CLUSTER] = 'Create Redshift clusters on this environment' RESOURCES_ALL_WITH_DESC[CREATE_SGMSTUDIO_NOTEBOOK] = 'Create ML Studio profiles on this environment' RESOURCES_ALL_WITH_DESC[INVITE_ENVIRONMENT_GROUP] = 'Invite other teams to this environment' diff --git a/backend/dataall/modules/dashboards/api/resolvers.py b/backend/dataall/modules/dashboards/api/resolvers.py index 5545cf219..9b1ed5d5e 100644 --- a/backend/dataall/modules/dashboards/api/resolvers.py +++ b/backend/dataall/modules/dashboards/api/resolvers.py @@ -4,10 +4,11 @@ from dataall.api.context import Context from dataall.aws.handlers.quicksight import Quicksight from dataall.aws.handlers.parameter_store import ParameterStoreManager -from dataall.db import permissions, models +from dataall.db import models from dataall.db.api import ResourcePolicy, Glossary, Vote from dataall.modules.dashboards.db.dashboard_repository import DashboardRepository from dataall.modules.dashboards.db.models import Dashboard +from dataall.modules.dashboards.services.dashboard_permissions import GET_DASHBOARD, CREATE_DASHBOARD from dataall.utils import Parameter from dataall.modules.dashboards.indexers.dashboard_indexer import DashboardIndexer @@ -28,11 +29,11 @@ def get_quicksight_reader_url(context, source, dashboardUri: str = None): username=context.username, groups=context.groups, resource_uri=dash.dashboardUri, - permission_name=permissions.GET_DASHBOARD, + permission_name=GET_DASHBOARD, ) if not env.dashboardsEnabled: raise db.exceptions.UnauthorizedOperation( - action=permissions.GET_DASHBOARD, + action=GET_DASHBOARD, message=f'Dashboards feature is disabled for the environment {env.label}', ) if dash.SamlGroupName in context.groups: @@ -52,7 +53,7 @@ def get_quicksight_reader_url(context, source, dashboardUri: str = None): ) if not shared_groups: raise db.exceptions.UnauthorizedOperation( - action=permissions.GET_DASHBOARD, + action=GET_DASHBOARD, message='Dashboard has not been shared with your Teams', ) @@ -87,12 +88,12 @@ def get_quicksight_designer_url( username=context.username, groups=context.groups, resource_uri=environmentUri, - permission_name=permissions.CREATE_DASHBOARD, + permission_name=CREATE_DASHBOARD, ) env: models.Environment = session.query(models.Environment).get(environmentUri) if not env.dashboardsEnabled: raise db.exceptions.UnauthorizedOperation( - action=permissions.CREATE_DASHBOARD, + action=CREATE_DASHBOARD, message=f'Dashboards feature is disabled for the environment {env.label}', ) @@ -113,7 +114,7 @@ def import_dashboard(context: Context, source, input: dict = None): username=context.username, groups=context.groups, resource_uri=input['environmentUri'], - permission_name=permissions.CREATE_DASHBOARD, + permission_name=CREATE_DASHBOARD, ) env: models.Environment = db.api.Environment.get_environment_by_uri( session, input['environmentUri'] @@ -121,7 +122,7 @@ def import_dashboard(context: Context, source, input: dict = None): if not env.dashboardsEnabled: raise db.exceptions.UnauthorizedOperation( - action=permissions.CREATE_DASHBOARD, + action=CREATE_DASHBOARD, message=f'Dashboards feature is disabled for the environment {env.label}', ) @@ -134,7 +135,7 @@ def import_dashboard(context: Context, source, input: dict = None): if not can_import: raise db.exceptions.UnauthorizedOperation( - action=permissions.CREATE_DASHBOARD, + action=CREATE_DASHBOARD, message=f'User: {context.username} has not AUTHOR rights on quicksight for the environment {env.label}', ) diff --git a/backend/dataall/modules/dashboards/db/dashboard_repository.py b/backend/dataall/modules/dashboards/db/dashboard_repository.py index ac8cdd069..7ba72c86a 100644 --- a/backend/dataall/modules/dashboards/db/dashboard_repository.py +++ b/backend/dataall/modules/dashboards/db/dashboard_repository.py @@ -4,7 +4,7 @@ from sqlalchemy.orm import Query from dataall.core.group.services.group_resource_manager import EnvironmentResource -from dataall.db import models, exceptions, permissions, paginate +from dataall.db import models, exceptions, paginate from dataall.db.api import ( Environment, has_tenant_perm, @@ -14,6 +14,8 @@ Vote, ) from dataall.modules.dashboards.db.models import DashboardShare, DashboardShareStatus, Dashboard +from dataall.modules.dashboards.services.dashboard_permissions import MANAGE_DASHBOARDS, CREATE_DASHBOARD, \ + DASHBOARD_ALL, GET_DASHBOARD, SHARE_DASHBOARD, UPDATE_DASHBOARD logger = logging.getLogger(__name__) @@ -33,8 +35,8 @@ def count_resources(session, environment, group_uri) -> int: ) @staticmethod - @has_tenant_perm(permissions.MANAGE_DASHBOARDS) - @has_resource_perm(permissions.CREATE_DASHBOARD) + @has_tenant_perm(MANAGE_DASHBOARDS) + @has_resource_perm(CREATE_DASHBOARD) def import_dashboard( session, username: str, @@ -60,7 +62,7 @@ def import_dashboard( groups=groups, uri=uri, group=data['SamlGroupName'], - permission_name=permissions.CREATE_DASHBOARD, + permission_name=CREATE_DASHBOARD, ) env: models.Environment = data.get( @@ -110,7 +112,7 @@ def set_dashboard_resource_policy(session, environment, dashboard, group): ResourcePolicy.attach_resource_policy( session=session, group=group, - permissions=permissions.DASHBOARD_ALL, + permissions=DASHBOARD_ALL, resource_uri=dashboard.dashboardUri, resource_type=Dashboard.__name__, ) @@ -118,14 +120,14 @@ def set_dashboard_resource_policy(session, environment, dashboard, group): ResourcePolicy.attach_resource_policy( session=session, group=environment.SamlGroupName, - permissions=permissions.DASHBOARD_ALL, + permissions=DASHBOARD_ALL, resource_uri=dashboard.dashboardUri, resource_type=Dashboard.__name__, ) @staticmethod - @has_tenant_perm(permissions.MANAGE_DASHBOARDS) - @has_resource_perm(permissions.GET_DASHBOARD) + @has_tenant_perm(MANAGE_DASHBOARDS) + @has_resource_perm(GET_DASHBOARD) def get_dashboard( session, username: str, @@ -229,8 +231,8 @@ def query_all_user_groups_shareddashboard(session, username, groups, uri) -> Que ] @staticmethod - @has_tenant_perm(permissions.MANAGE_DASHBOARDS) - @has_resource_perm(permissions.SHARE_DASHBOARD) + @has_tenant_perm(MANAGE_DASHBOARDS) + @has_resource_perm(SHARE_DASHBOARD) def paginated_dashboard_shares( session, username, groups, uri, data=None, check_perm=None ) -> dict: @@ -243,8 +245,8 @@ def paginated_dashboard_shares( ).to_dict() @staticmethod - @has_tenant_perm(permissions.MANAGE_DASHBOARDS) - @has_resource_perm(permissions.UPDATE_DASHBOARD) + @has_tenant_perm(MANAGE_DASHBOARDS) + @has_resource_perm(UPDATE_DASHBOARD) def update_dashboard( session, username: str, @@ -295,7 +297,7 @@ def delete_dashboard( return True @staticmethod - @has_tenant_perm(permissions.MANAGE_DASHBOARDS) + @has_tenant_perm(MANAGE_DASHBOARDS) def request_dashboard_share( session, username: str, @@ -307,7 +309,7 @@ def request_dashboard_share( dashboard = DashboardRepository.get_dashboard_by_uri(session, uri) if dashboard.SamlGroupName == data['principalId']: raise exceptions.UnauthorizedOperation( - action=permissions.CREATE_DASHBOARD, + action=CREATE_DASHBOARD, message=f'Team {dashboard.SamlGroupName} is the owner of the dashboard {dashboard.label}', ) share: DashboardShare = ( @@ -339,8 +341,8 @@ def request_dashboard_share( return share @staticmethod - @has_tenant_perm(permissions.MANAGE_DASHBOARDS) - @has_resource_perm(permissions.SHARE_DASHBOARD) + @has_tenant_perm(MANAGE_DASHBOARDS) + @has_resource_perm(SHARE_DASHBOARD) def approve_dashboard_share( session, username: str, @@ -368,7 +370,7 @@ def approve_dashboard_share( ResourcePolicy.attach_resource_policy( session=session, group=share.SamlGroupName, - permissions=[permissions.GET_DASHBOARD], + permissions=[GET_DASHBOARD], resource_uri=share.dashboardUri, resource_type=Dashboard.__name__, ) @@ -376,8 +378,8 @@ def approve_dashboard_share( return share @staticmethod - @has_tenant_perm(permissions.MANAGE_DASHBOARDS) - @has_resource_perm(permissions.SHARE_DASHBOARD) + @has_tenant_perm(MANAGE_DASHBOARDS) + @has_resource_perm(SHARE_DASHBOARD) def reject_dashboard_share( session, username: str, @@ -412,8 +414,8 @@ def reject_dashboard_share( return share @staticmethod - @has_tenant_perm(permissions.MANAGE_DASHBOARDS) - @has_resource_perm(permissions.SHARE_DASHBOARD) + @has_tenant_perm(MANAGE_DASHBOARDS) + @has_resource_perm(SHARE_DASHBOARD) def share_dashboard( session, username: str, @@ -434,7 +436,7 @@ def share_dashboard( ResourcePolicy.attach_resource_policy( session=session, group=data['principalId'], - permissions=[permissions.GET_DASHBOARD], + permissions=[GET_DASHBOARD], resource_uri=dashboard.dashboardUri, resource_type=Dashboard.__name__, ) diff --git a/backend/dataall/modules/dashboards/services/dashboard_permissions.py b/backend/dataall/modules/dashboards/services/dashboard_permissions.py new file mode 100644 index 000000000..ac09a7780 --- /dev/null +++ b/backend/dataall/modules/dashboards/services/dashboard_permissions.py @@ -0,0 +1,43 @@ +from dataall.db.permissions import ENVIRONMENT_INVITED, ENVIRONMENT_INVITATION_REQUEST, ENVIRONMENT_ALL, TENANT_ALL, \ + TENANT_ALL_WITH_DESC, RESOURCES_ALL, RESOURCES_ALL_WITH_DESC + +""" +DASHBOARDS +""" +GET_DASHBOARD = 'GET_DASHBOARD' +UPDATE_DASHBOARD = 'UPDATE_DASHBOARD' +DELETE_DASHBOARD = 'DELETE_DASHBOARD' +DASHBOARD_URL = 'DASHBOARD_URL' +SHARE_DASHBOARD = 'SHARE_DASHBOARD' +DASHBOARD_ALL = [ + GET_DASHBOARD, + UPDATE_DASHBOARD, + DELETE_DASHBOARD, + DASHBOARD_URL, + SHARE_DASHBOARD, +] + +RESOURCES_ALL.extend(DASHBOARD_ALL) +for perm in DASHBOARD_ALL: + RESOURCES_ALL_WITH_DESC[perm] = perm + +""" +TENANT PERMISSIONS +""" +MANAGE_DASHBOARDS = 'MANAGE_DASHBOARDS' + +TENANT_ALL.append(MANAGE_DASHBOARDS) +TENANT_ALL_WITH_DESC[MANAGE_DASHBOARDS] = 'Manage dashboards' + + +""" +ENVIRONMENT PERMISSIONS +""" +CREATE_DASHBOARD = 'CREATE_DASHBOARD' + + +ENVIRONMENT_INVITED.append(CREATE_DASHBOARD) +ENVIRONMENT_INVITATION_REQUEST.append(CREATE_DASHBOARD) +ENVIRONMENT_ALL.append(CREATE_DASHBOARD) +RESOURCES_ALL.append(CREATE_DASHBOARD) +RESOURCES_ALL_WITH_DESC[CREATE_DASHBOARD] = 'Create dashboards on this environment' From f14d78997f41767a40cd0347ee502d6f3319a834 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 25 May 2023 16:03:09 +0200 Subject: [PATCH 239/346] Migrated dashboards to a new API for permission check --- .../dashboards/db/dashboard_repository.py | 33 +++++++++---------- 1 file changed, 16 insertions(+), 17 deletions(-) diff --git a/backend/dataall/modules/dashboards/db/dashboard_repository.py b/backend/dataall/modules/dashboards/db/dashboard_repository.py index 7ba72c86a..ee2c46ffc 100644 --- a/backend/dataall/modules/dashboards/db/dashboard_repository.py +++ b/backend/dataall/modules/dashboards/db/dashboard_repository.py @@ -4,11 +4,10 @@ from sqlalchemy.orm import Query from dataall.core.group.services.group_resource_manager import EnvironmentResource +from dataall.core.permission_checker import has_tenant_permission, has_resource_permission from dataall.db import models, exceptions, paginate from dataall.db.api import ( Environment, - has_tenant_perm, - has_resource_perm, ResourcePolicy, Glossary, Vote, @@ -35,8 +34,8 @@ def count_resources(session, environment, group_uri) -> int: ) @staticmethod - @has_tenant_perm(MANAGE_DASHBOARDS) - @has_resource_perm(CREATE_DASHBOARD) + @has_tenant_permission(MANAGE_DASHBOARDS) + @has_resource_permission(CREATE_DASHBOARD) def import_dashboard( session, username: str, @@ -126,8 +125,8 @@ def set_dashboard_resource_policy(session, environment, dashboard, group): ) @staticmethod - @has_tenant_perm(MANAGE_DASHBOARDS) - @has_resource_perm(GET_DASHBOARD) + @has_tenant_permission(MANAGE_DASHBOARDS) + @has_resource_permission(GET_DASHBOARD) def get_dashboard( session, username: str, @@ -231,8 +230,8 @@ def query_all_user_groups_shareddashboard(session, username, groups, uri) -> Que ] @staticmethod - @has_tenant_perm(MANAGE_DASHBOARDS) - @has_resource_perm(SHARE_DASHBOARD) + @has_tenant_permission(MANAGE_DASHBOARDS) + @has_resource_permission(SHARE_DASHBOARD) def paginated_dashboard_shares( session, username, groups, uri, data=None, check_perm=None ) -> dict: @@ -245,8 +244,8 @@ def paginated_dashboard_shares( ).to_dict() @staticmethod - @has_tenant_perm(MANAGE_DASHBOARDS) - @has_resource_perm(UPDATE_DASHBOARD) + @has_tenant_permission(MANAGE_DASHBOARDS) + @has_resource_permission(UPDATE_DASHBOARD) def update_dashboard( session, username: str, @@ -297,7 +296,7 @@ def delete_dashboard( return True @staticmethod - @has_tenant_perm(MANAGE_DASHBOARDS) + @has_tenant_permission(MANAGE_DASHBOARDS) def request_dashboard_share( session, username: str, @@ -341,8 +340,8 @@ def request_dashboard_share( return share @staticmethod - @has_tenant_perm(MANAGE_DASHBOARDS) - @has_resource_perm(SHARE_DASHBOARD) + @has_tenant_permission(MANAGE_DASHBOARDS) + @has_resource_permission(SHARE_DASHBOARD) def approve_dashboard_share( session, username: str, @@ -378,8 +377,8 @@ def approve_dashboard_share( return share @staticmethod - @has_tenant_perm(MANAGE_DASHBOARDS) - @has_resource_perm(SHARE_DASHBOARD) + @has_tenant_permission(MANAGE_DASHBOARDS) + @has_resource_permission(SHARE_DASHBOARD) def reject_dashboard_share( session, username: str, @@ -414,8 +413,8 @@ def reject_dashboard_share( return share @staticmethod - @has_tenant_perm(MANAGE_DASHBOARDS) - @has_resource_perm(SHARE_DASHBOARD) + @has_tenant_permission(MANAGE_DASHBOARDS) + @has_resource_permission(SHARE_DASHBOARD) def share_dashboard( session, username: str, From ecb16b8e03d129f2100fec9f71fe6e8133f68b80 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 25 May 2023 18:01:38 +0200 Subject: [PATCH 240/346] Removed unused parameters --- .../modules/dashboards/api/resolvers.py | 64 +++++-------- .../dashboards/db/dashboard_repository.py | 93 ++++--------------- 2 files changed, 41 insertions(+), 116 deletions(-) diff --git a/backend/dataall/modules/dashboards/api/resolvers.py b/backend/dataall/modules/dashboards/api/resolvers.py index 9b1ed5d5e..f18ac9d4d 100644 --- a/backend/dataall/modules/dashboards/api/resolvers.py +++ b/backend/dataall/modules/dashboards/api/resolvers.py @@ -6,8 +6,9 @@ from dataall.aws.handlers.parameter_store import ParameterStoreManager from dataall.db import models from dataall.db.api import ResourcePolicy, Glossary, Vote +from dataall.db.exceptions import InvalidInput from dataall.modules.dashboards.db.dashboard_repository import DashboardRepository -from dataall.modules.dashboards.db.models import Dashboard +from dataall.modules.dashboards.db.models import Dashboard, DashboardShareStatus from dataall.modules.dashboards.services.dashboard_permissions import GET_DASHBOARD, CREATE_DASHBOARD from dataall.utils import Parameter from dataall.modules.dashboards.indexers.dashboard_indexer import DashboardIndexer @@ -47,7 +48,6 @@ def get_quicksight_reader_url(context, source, dashboardUri: str = None): else: shared_groups = DashboardRepository.query_all_user_groups_shareddashboard( session=session, - username=context.username, groups=context.groups, uri=dashboardUri ) @@ -146,7 +146,6 @@ def import_dashboard(context: Context, source, input: dict = None): groups=context.groups, uri=env.environmentUri, data=input, - check_perm=True, ) DashboardIndexer.upsert(session, dashboard_uri=dashboard.dashboardUri) @@ -159,14 +158,12 @@ def update_dashboard(context, source, input: dict = None): dashboard = DashboardRepository.get_dashboard_by_uri( session, input['dashboardUri'] ) - input['dashboard'] = dashboard DashboardRepository.update_dashboard( session=session, username=context.username, - groups=context.groups, uri=dashboard.dashboardUri, - data=input, - check_perm=True, + dashboard=dashboard, + data=input ) DashboardIndexer.upsert(session, dashboard_uri=dashboard.dashboardUri) @@ -182,22 +179,13 @@ def list_dashboards(context: Context, source, filter: dict = None): session=session, username=context.username, groups=context.groups, - uri=None, data=filter, - check_perm=True, ) def get_dashboard(context: Context, source, dashboardUri: str = None): with context.engine.scoped_session() as session: - return DashboardRepository.get_dashboard( - session=session, - username=context.username, - groups=context.groups, - uri=dashboardUri, - data=None, - check_perm=True, - ) + return DashboardRepository.get_dashboard(session=session, uri=dashboardUri) def resolve_user_role(context: Context, source: Dashboard): @@ -224,10 +212,8 @@ def request_dashboard_share( return DashboardRepository.request_dashboard_share( session=session, username=context.username, - groups=context.groups, uri=dashboardUri, - data={'principalId': principalId}, - check_perm=True, + principal_id=principalId, ) @@ -238,14 +224,18 @@ def approve_dashboard_share( ): with context.engine.scoped_session() as session: share = DashboardRepository.get_dashboard_share_by_uri(session, shareUri) + if share.status not in DashboardShareStatus.__members__: + raise InvalidInput( + 'Share status', + share.status, + str(DashboardShareStatus.__members__), + ) + dashboard = DashboardRepository.get_dashboard_by_uri(session, share.dashboardUri) return DashboardRepository.approve_dashboard_share( session=session, - username=context.username, - groups=context.groups, uri=dashboard.dashboardUri, - data={'share': share, 'shareUri': shareUri}, - check_perm=True, + share=share ) @@ -256,14 +246,18 @@ def reject_dashboard_share( ): with context.engine.scoped_session() as session: share = DashboardRepository.get_dashboard_share_by_uri(session, shareUri) + if share.status not in DashboardShareStatus.__members__: + raise InvalidInput( + 'Share status', + share.status, + str(DashboardShareStatus.__members__), + ) + dashboard = DashboardRepository.get_dashboard_by_uri(session, share.dashboardUri) return DashboardRepository.reject_dashboard_share( session=session, - username=context.username, - groups=context.groups, uri=dashboard.dashboardUri, - data={'share': share, 'shareUri': shareUri}, - check_perm=True, + share=share ) @@ -282,7 +276,6 @@ def list_dashboard_shares( groups=context.groups, uri=dashboardUri, data=filter, - check_perm=True, ) @@ -296,23 +289,14 @@ def share_dashboard( return DashboardRepository.share_dashboard( session=session, username=context.username, - groups=context.groups, uri=dashboardUri, - data={'principalId': principalId}, - check_perm=True, + principal_id=principalId, ) def delete_dashboard(context: Context, source, dashboardUri: str = None): with context.engine.scoped_session() as session: - DashboardRepository.delete_dashboard( - session=session, - username=context.username, - groups=context.groups, - uri=dashboardUri, - data=None, - check_perm=True, - ) + DashboardRepository.delete_dashboard(session=session, uri=dashboardUri) DashboardIndexer.delete_doc(doc_id=dashboardUri) return True diff --git a/backend/dataall/modules/dashboards/db/dashboard_repository.py b/backend/dataall/modules/dashboards/db/dashboard_repository.py index ee2c46ffc..cf2492add 100644 --- a/backend/dataall/modules/dashboards/db/dashboard_repository.py +++ b/backend/dataall/modules/dashboards/db/dashboard_repository.py @@ -42,7 +42,6 @@ def import_dashboard( groups: [str], uri: str, data: dict = None, - check_perm: bool = False, ) -> Dashboard: if not data: raise exceptions.RequiredParameter(data) @@ -127,14 +126,7 @@ def set_dashboard_resource_policy(session, environment, dashboard, group): @staticmethod @has_tenant_permission(MANAGE_DASHBOARDS) @has_resource_permission(GET_DASHBOARD) - def get_dashboard( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ) -> Dashboard: + def get_dashboard(session, uri: str) -> Dashboard: return DashboardRepository.get_dashboard_by_uri(session, uri) @staticmethod @@ -175,7 +167,7 @@ def query_user_dashboards(session, username, groups, filter) -> Query: @staticmethod def paginated_user_dashboards( - session, username, groups, uri, data=None, check_perm=None + session, username, groups, data=None ) -> dict: return paginate( query=DashboardRepository.query_user_dashboards(session, username, groups, data), @@ -213,7 +205,7 @@ def query_dashboard_shares(session, username, groups, uri, filter) -> Query: return query @staticmethod - def query_all_user_groups_shareddashboard(session, username, groups, uri) -> Query: + def query_all_user_groups_shareddashboard(session, groups, uri) -> [str]: query = ( session.query(DashboardShare) .filter( @@ -224,16 +216,13 @@ def query_all_user_groups_shareddashboard(session, username, groups, uri) -> Que ) ) - return [ - share.SamlGroupName - for share in query.all() - ] + return [share.SamlGroupName for share in query.all()] @staticmethod @has_tenant_permission(MANAGE_DASHBOARDS) @has_resource_permission(SHARE_DASHBOARD) def paginated_dashboard_shares( - session, username, groups, uri, data=None, check_perm=None + session, username, groups, uri, data=None ) -> dict: return paginate( query=DashboardRepository.query_dashboard_shares( @@ -249,17 +238,10 @@ def paginated_dashboard_shares( def update_dashboard( session, username: str, - groups: [str], uri: str, + dashboard: Dashboard, data: dict = None, - check_perm: bool = False, ) -> Dashboard: - - dashboard = data.get( - 'dashboard', - DashboardRepository.get_dashboard_by_uri(session, data['dashboardUri']), - ) - for k in data.keys(): setattr(dashboard, k, data.get(k)) @@ -280,9 +262,8 @@ def update_dashboard( return dashboard @staticmethod - def delete_dashboard( - session, username, groups, uri, data=None, check_perm=None - ) -> bool: + def delete_dashboard(session, uri) -> bool: + # TODO THERE WAS NO PERMISSION CHECK dashboard = DashboardRepository.get_dashboard_by_uri(session, uri) session.delete(dashboard) ResourcePolicy.delete_resource_policy( @@ -300,13 +281,11 @@ def delete_dashboard( def request_dashboard_share( session, username: str, - groups: [str], uri: str, - data: dict = None, - check_perm: bool = False, + principal_id: str ) -> DashboardShare: dashboard = DashboardRepository.get_dashboard_by_uri(session, uri) - if dashboard.SamlGroupName == data['principalId']: + if dashboard.SamlGroupName == principal_id: raise exceptions.UnauthorizedOperation( action=CREATE_DASHBOARD, message=f'Team {dashboard.SamlGroupName} is the owner of the dashboard {dashboard.label}', @@ -315,7 +294,7 @@ def request_dashboard_share( session.query(DashboardShare) .filter( DashboardShare.dashboardUri == uri, - DashboardShare.SamlGroupName == data['principalId'], + DashboardShare.SamlGroupName == principal_id, ) .first() ) @@ -323,7 +302,7 @@ def request_dashboard_share( share = DashboardShare( owner=username, dashboardUri=dashboard.dashboardUri, - SamlGroupName=data['principalId'], + SamlGroupName=principal_id, status=DashboardShareStatus.REQUESTED.value, ) session.add(share) @@ -342,25 +321,7 @@ def request_dashboard_share( @staticmethod @has_tenant_permission(MANAGE_DASHBOARDS) @has_resource_permission(SHARE_DASHBOARD) - def approve_dashboard_share( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ) -> DashboardShare: - - share: DashboardShare = data.get( - 'share', session.query(DashboardShare).get(data['shareUri']) - ) - - if share.status not in DashboardShareStatus.__members__: - raise exceptions.InvalidInput( - 'Share status', - share.status, - str(DashboardShareStatus.__members__), - ) + def approve_dashboard_share(session, uri: str, share) -> DashboardShare: if share.status == DashboardShareStatus.APPROVED.value: return share @@ -379,25 +340,7 @@ def approve_dashboard_share( @staticmethod @has_tenant_permission(MANAGE_DASHBOARDS) @has_resource_permission(SHARE_DASHBOARD) - def reject_dashboard_share( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ) -> DashboardShare: - - share: DashboardShare = data.get( - 'share', session.query(DashboardShare).get(data['shareUri']) - ) - - if share.status not in DashboardShareStatus.__members__: - raise exceptions.InvalidInput( - 'Share status', - share.status, - str(DashboardShareStatus.__members__), - ) + def reject_dashboard_share( session, uri: str, share) -> DashboardShare: if share.status == DashboardShareStatus.REJECTED.value: return share @@ -418,23 +361,21 @@ def reject_dashboard_share( def share_dashboard( session, username: str, - groups: [str], uri: str, - data: dict = None, - check_perm: bool = False, + principal_id: str ) -> DashboardShare: dashboard = DashboardRepository.get_dashboard_by_uri(session, uri) share = DashboardShare( owner=username, dashboardUri=dashboard.dashboardUri, - SamlGroupName=data['principalId'], + SamlGroupName=principal_id, status=DashboardShareStatus.APPROVED.value, ) session.add(share) ResourcePolicy.attach_resource_policy( session=session, - group=data['principalId'], + group=principal_id, permissions=[GET_DASHBOARD], resource_uri=dashboard.dashboardUri, resource_type=Dashboard.__name__, From dfa7b6d3b70e7d9103b566f1adde34b2277c08db Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 25 May 2023 18:03:53 +0200 Subject: [PATCH 241/346] Moved validation to resolvers.py --- backend/dataall/modules/dashboards/api/resolvers.py | 13 ++++++++++++- .../modules/dashboards/db/dashboard_repository.py | 11 ----------- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/backend/dataall/modules/dashboards/api/resolvers.py b/backend/dataall/modules/dashboards/api/resolvers.py index f18ac9d4d..f37766ce6 100644 --- a/backend/dataall/modules/dashboards/api/resolvers.py +++ b/backend/dataall/modules/dashboards/api/resolvers.py @@ -6,7 +6,7 @@ from dataall.aws.handlers.parameter_store import ParameterStoreManager from dataall.db import models from dataall.db.api import ResourcePolicy, Glossary, Vote -from dataall.db.exceptions import InvalidInput +from dataall.db.exceptions import InvalidInput, RequiredParameter from dataall.modules.dashboards.db.dashboard_repository import DashboardRepository from dataall.modules.dashboards.db.models import Dashboard, DashboardShareStatus from dataall.modules.dashboards.services.dashboard_permissions import GET_DASHBOARD, CREATE_DASHBOARD @@ -108,6 +108,17 @@ def get_quicksight_designer_url( def import_dashboard(context: Context, source, input: dict = None): + if not input: + raise RequiredParameter(input) + if not input.get('environmentUri'): + raise RequiredParameter('environmentUri') + if not input.get('SamlGroupName'): + raise RequiredParameter('group') + if not input.get('dashboardId'): + raise RequiredParameter('dashboardId') + if not input.get('label'): + raise RequiredParameter('label') + with context.engine.scoped_session() as session: ResourcePolicy.check_user_resource_permission( session=session, diff --git a/backend/dataall/modules/dashboards/db/dashboard_repository.py b/backend/dataall/modules/dashboards/db/dashboard_repository.py index cf2492add..e668f6738 100644 --- a/backend/dataall/modules/dashboards/db/dashboard_repository.py +++ b/backend/dataall/modules/dashboards/db/dashboard_repository.py @@ -43,17 +43,6 @@ def import_dashboard( uri: str, data: dict = None, ) -> Dashboard: - if not data: - raise exceptions.RequiredParameter(data) - if not data.get('environmentUri'): - raise exceptions.RequiredParameter('environmentUri') - if not data.get('SamlGroupName'): - raise exceptions.RequiredParameter('group') - if not data.get('dashboardId'): - raise exceptions.RequiredParameter('dashboardId') - if not data.get('label'): - raise exceptions.RequiredParameter('label') - Environment.check_group_environment_permission( session=session, username=username, From 07e315522ffd989311b65484650a0ec1cb3c9a53 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 26 May 2023 12:05:01 +0200 Subject: [PATCH 242/346] Introduced DashboardShareService --- .../modules/dashboards/api/resolvers.py | 74 ++--------- .../dashboards/db/dashboard_repository.py | 120 +++-------------- .../services/dashboard_share_service.py | 123 ++++++++++++++++++ 3 files changed, 153 insertions(+), 164 deletions(-) create mode 100644 backend/dataall/modules/dashboards/services/dashboard_share_service.py diff --git a/backend/dataall/modules/dashboards/api/resolvers.py b/backend/dataall/modules/dashboards/api/resolvers.py index f37766ce6..eb4a300eb 100644 --- a/backend/dataall/modules/dashboards/api/resolvers.py +++ b/backend/dataall/modules/dashboards/api/resolvers.py @@ -6,10 +6,11 @@ from dataall.aws.handlers.parameter_store import ParameterStoreManager from dataall.db import models from dataall.db.api import ResourcePolicy, Glossary, Vote -from dataall.db.exceptions import InvalidInput, RequiredParameter +from dataall.db.exceptions import RequiredParameter from dataall.modules.dashboards.db.dashboard_repository import DashboardRepository -from dataall.modules.dashboards.db.models import Dashboard, DashboardShareStatus +from dataall.modules.dashboards.db.models import Dashboard from dataall.modules.dashboards.services.dashboard_permissions import GET_DASHBOARD, CREATE_DASHBOARD +from dataall.modules.dashboards.services.dashboard_share_service import DashboardShareService from dataall.utils import Parameter from dataall.modules.dashboards.indexers.dashboard_indexer import DashboardIndexer @@ -219,57 +220,15 @@ def request_dashboard_share( principalId: str = None, dashboardUri: str = None, ): - with context.engine.scoped_session() as session: - return DashboardRepository.request_dashboard_share( - session=session, - username=context.username, - uri=dashboardUri, - principal_id=principalId, - ) - - -def approve_dashboard_share( - context: Context, - source: Dashboard, - shareUri: str = None, -): - with context.engine.scoped_session() as session: - share = DashboardRepository.get_dashboard_share_by_uri(session, shareUri) - if share.status not in DashboardShareStatus.__members__: - raise InvalidInput( - 'Share status', - share.status, - str(DashboardShareStatus.__members__), - ) + return DashboardShareService.request_dashboard_share(uri=dashboardUri, principal_id=principalId) - dashboard = DashboardRepository.get_dashboard_by_uri(session, share.dashboardUri) - return DashboardRepository.approve_dashboard_share( - session=session, - uri=dashboard.dashboardUri, - share=share - ) +def approve_dashboard_share(context: Context, source: Dashboard, shareUri: str = None): + return DashboardShareService.approve_dashboard_share(uri=shareUri) -def reject_dashboard_share( - context: Context, - source: Dashboard, - shareUri: str = None, -): - with context.engine.scoped_session() as session: - share = DashboardRepository.get_dashboard_share_by_uri(session, shareUri) - if share.status not in DashboardShareStatus.__members__: - raise InvalidInput( - 'Share status', - share.status, - str(DashboardShareStatus.__members__), - ) - dashboard = DashboardRepository.get_dashboard_by_uri(session, share.dashboardUri) - return DashboardRepository.reject_dashboard_share( - session=session, - uri=dashboard.dashboardUri, - share=share - ) +def reject_dashboard_share(context: Context,source: Dashboard, shareUri: str = None): + return DashboardShareService.reject_dashboard_share(uri=shareUri) def list_dashboard_shares( @@ -280,14 +239,7 @@ def list_dashboard_shares( ): if not filter: filter = {} - with context.engine.scoped_session() as session: - return DashboardRepository.paginated_dashboard_shares( - session=session, - username=context.username, - groups=context.groups, - uri=dashboardUri, - data=filter, - ) + return DashboardShareService.list_dashboard_shares(uri=dashboardUri, data=filter) def share_dashboard( @@ -296,13 +248,7 @@ def share_dashboard( principalId: str = None, dashboardUri: str = None, ): - with context.engine.scoped_session() as session: - return DashboardRepository.share_dashboard( - session=session, - username=context.username, - uri=dashboardUri, - principal_id=principalId, - ) + return DashboardShareService.share_dashboard(uri=dashboardUri, principal_id=principalId) def delete_dashboard(context: Context, source, dashboardUri: str = None): diff --git a/backend/dataall/modules/dashboards/db/dashboard_repository.py b/backend/dataall/modules/dashboards/db/dashboard_repository.py index e668f6738..d25dc65c4 100644 --- a/backend/dataall/modules/dashboards/db/dashboard_repository.py +++ b/backend/dataall/modules/dashboards/db/dashboard_repository.py @@ -126,7 +126,7 @@ def get_dashboard_by_uri(session, uri) -> Dashboard: return dashboard @staticmethod - def query_user_dashboards(session, username, groups, filter) -> Query: + def _query_user_dashboards(session, username, groups, filter) -> Query: query = ( session.query(Dashboard) .outerjoin( @@ -159,13 +159,13 @@ def paginated_user_dashboards( session, username, groups, data=None ) -> dict: return paginate( - query=DashboardRepository.query_user_dashboards(session, username, groups, data), + query=DashboardRepository._query_user_dashboards(session, username, groups, data), page=data.get('page', 1), page_size=data.get('pageSize', 10), ).to_dict() @staticmethod - def query_dashboard_shares(session, username, groups, uri, filter) -> Query: + def _query_dashboard_shares(session, username, groups, uri, filter) -> Query: query = ( session.query(DashboardShare) .join( @@ -208,13 +208,11 @@ def query_all_user_groups_shareddashboard(session, groups, uri) -> [str]: return [share.SamlGroupName for share in query.all()] @staticmethod - @has_tenant_permission(MANAGE_DASHBOARDS) - @has_resource_permission(SHARE_DASHBOARD) def paginated_dashboard_shares( session, username, groups, uri, data=None ) -> dict: return paginate( - query=DashboardRepository.query_dashboard_shares( + query=DashboardRepository._query_dashboard_shares( session, username, groups, uri, data ), page=data.get('page', 1), @@ -266,109 +264,20 @@ def delete_dashboard(session, uri) -> bool: return True @staticmethod - @has_tenant_permission(MANAGE_DASHBOARDS) - def request_dashboard_share( - session, - username: str, - uri: str, - principal_id: str - ) -> DashboardShare: - dashboard = DashboardRepository.get_dashboard_by_uri(session, uri) - if dashboard.SamlGroupName == principal_id: - raise exceptions.UnauthorizedOperation( - action=CREATE_DASHBOARD, - message=f'Team {dashboard.SamlGroupName} is the owner of the dashboard {dashboard.label}', - ) - share: DashboardShare = ( - session.query(DashboardShare) - .filter( - DashboardShare.dashboardUri == uri, - DashboardShare.SamlGroupName == principal_id, - ) - .first() - ) - if not share: - share = DashboardShare( - owner=username, - dashboardUri=dashboard.dashboardUri, - SamlGroupName=principal_id, - status=DashboardShareStatus.REQUESTED.value, - ) - session.add(share) - else: - if share.status not in DashboardShareStatus.__members__: - raise exceptions.InvalidInput( - 'Share status', - share.status, - str(DashboardShareStatus.__members__), - ) - if share.status == 'REJECTED': - share.status = 'REQUESTED' - - return share - - @staticmethod - @has_tenant_permission(MANAGE_DASHBOARDS) - @has_resource_permission(SHARE_DASHBOARD) - def approve_dashboard_share(session, uri: str, share) -> DashboardShare: - if share.status == DashboardShareStatus.APPROVED.value: - return share - - share.status = DashboardShareStatus.APPROVED.value - - ResourcePolicy.attach_resource_policy( - session=session, - group=share.SamlGroupName, - permissions=[GET_DASHBOARD], - resource_uri=share.dashboardUri, - resource_type=Dashboard.__name__, - ) - - return share - - @staticmethod - @has_tenant_permission(MANAGE_DASHBOARDS) - @has_resource_permission(SHARE_DASHBOARD) - def reject_dashboard_share( session, uri: str, share) -> DashboardShare: - if share.status == DashboardShareStatus.REJECTED.value: - return share - - share.status = DashboardShareStatus.REJECTED.value - - ResourcePolicy.delete_resource_policy( - session=session, - group=share.SamlGroupName, - resource_uri=share.dashboardUri, - resource_type=Dashboard.__name__, - ) - - return share - - @staticmethod - @has_tenant_permission(MANAGE_DASHBOARDS) - @has_resource_permission(SHARE_DASHBOARD) - def share_dashboard( + def create_share( session, username: str, - uri: str, - principal_id: str + dashboard: Dashboard, + principal_id: str, + init_status: DashboardShareStatus = DashboardShareStatus.REQUESTED ) -> DashboardShare: - - dashboard = DashboardRepository.get_dashboard_by_uri(session, uri) share = DashboardShare( owner=username, dashboardUri=dashboard.dashboardUri, SamlGroupName=principal_id, - status=DashboardShareStatus.APPROVED.value, + status=init_status.value, ) session.add(share) - ResourcePolicy.attach_resource_policy( - session=session, - group=principal_id, - permissions=[GET_DASHBOARD], - resource_uri=dashboard.dashboardUri, - resource_type=Dashboard.__name__, - ) return share @staticmethod @@ -377,3 +286,14 @@ def get_dashboard_share_by_uri(session, uri) -> DashboardShare: if not share: raise exceptions.ObjectNotFound('DashboardShare', uri) return share + + @staticmethod + def find_share_for_group(session, dashboard_uri, group) -> DashboardShare: + return ( + session.query(DashboardShare) + .filter( + DashboardShare.dashboardUri == dashboard_uri, + DashboardShare.SamlGroupName == group, + ) + .first() + ) diff --git a/backend/dataall/modules/dashboards/services/dashboard_share_service.py b/backend/dataall/modules/dashboards/services/dashboard_share_service.py new file mode 100644 index 000000000..445a28285 --- /dev/null +++ b/backend/dataall/modules/dashboards/services/dashboard_share_service.py @@ -0,0 +1,123 @@ +from dataall.core.context import get_context +from dataall.core.permission_checker import has_tenant_permission, has_resource_permission +from dataall.db.api import ResourcePolicy +from dataall.db.exceptions import InvalidInput, UnauthorizedOperation +from dataall.modules.dashboards import DashboardRepository +from dataall.modules.dashboards.db.models import DashboardShareStatus, Dashboard +from dataall.modules.dashboards.services.dashboard_permissions import SHARE_DASHBOARD, MANAGE_DASHBOARDS, GET_DASHBOARD, \ + CREATE_DASHBOARD + + +class DashboardShareService: + @staticmethod + def _get_dashboard_uri_by_share_uri(session, uri): + share = DashboardRepository.get_dashboard_share_by_uri(session, uri) + dashboard = DashboardRepository.get_dashboard_by_uri(session, share.dashboardUri) + return dashboard.dashboardUri + + @staticmethod + @has_tenant_permission(MANAGE_DASHBOARDS) + def request_dashboard_share(uri: str, principal_id: str): + context = get_context() + with context.db_engine.scoped_session() as session: + dashboard = DashboardRepository.get_dashboard_by_uri(session, uri) + if dashboard.SamlGroupName == principal_id: + raise UnauthorizedOperation( + action=CREATE_DASHBOARD, + message=f'Team {dashboard.SamlGroupName} is the owner of the dashboard {dashboard.label}', + ) + + share = DashboardRepository.find_share_for_group(session, dashboard.dashboardUri, principal_id) + if not share: + DashboardRepository.create_share(session, context.username, dashboard, principal_id) + else: + DashboardShareService._check_stare_status(share) + + if share.status == DashboardShareStatus.REJECTED.value: + share.status = DashboardShareStatus.REQUESTED.value + + return share + + @staticmethod + @has_tenant_permission(MANAGE_DASHBOARDS) + @has_resource_permission(SHARE_DASHBOARD, parent_resource=_get_dashboard_uri_by_share_uri) + def approve_dashboard_share(uri: str): + with get_context().db_engine.scoped_session() as session: + share = DashboardRepository.get_dashboard_share_by_uri(session, uri) + DashboardShareService._change_share_status(share, DashboardShareStatus.APPROVED) + DashboardShareService._create_share_policy(session, share.SamlGroupName, share.dashboardUri) + return share + + @staticmethod + @has_tenant_permission(MANAGE_DASHBOARDS) + @has_resource_permission(SHARE_DASHBOARD, parent_resource=_get_dashboard_uri_by_share_uri) + def reject_dashboard_share(uri: str): + with get_context().db_engine.scoped_session() as session: + share = DashboardRepository.get_dashboard_share_by_uri(session, uri) + DashboardShareService._change_share_status(share, DashboardShareStatus.REJECTED) + + ResourcePolicy.delete_resource_policy( + session=session, + group=share.SamlGroupName, + resource_uri=share.dashboardUri, + resource_type=Dashboard.__name__, + ) + + return share + + @staticmethod + def list_dashboard_shares(uri: str, data: dict): + context = get_context() + with context.db_engine.scoped_session() as session: + return DashboardRepository.paginated_dashboard_shares( + session=session, + username=context.username, + groups=context.groups, + uri=uri, + data=data, + ) + + @staticmethod + @has_tenant_permission(MANAGE_DASHBOARDS) + @has_resource_permission(SHARE_DASHBOARD) + def share_dashboard(uri: str, principal_id: str): + context = get_context() + with context.db_engine.scoped_session() as session: + dashboard = DashboardRepository.get_dashboard_by_uri(session, uri) + share = DashboardRepository.create_share( + session=session, + username=context.username, + dashboard=dashboard, + principal_id=principal_id, + init_status=DashboardShareStatus.APPROVED + ) + + DashboardShareService._create_share_policy(session, principal_id, dashboard.dashboardUri) + return share + + @staticmethod + def _change_share_status(share, status): + DashboardShareService._check_stare_status(share) + if share.status == status.value: + return share + + share.status = status.value + + @staticmethod + def _check_stare_status(share): + if share.status not in DashboardShareStatus.__members__: + raise InvalidInput( + 'Share status', + share.status, + str(DashboardShareStatus.__members__), + ) + + @staticmethod + def _create_share_policy(session, principal_id, dashboard_uri): + ResourcePolicy.attach_resource_policy( + session=session, + group=principal_id, + permissions=[GET_DASHBOARD], + resource_uri=dashboard_uri, + resource_type=Dashboard.__name__, + ) From d174b435266119922a963b97043bd2c36be6743b Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 26 May 2023 16:06:21 +0200 Subject: [PATCH 243/346] Added missed handlers --- .../dataall/modules/dataset_sharing/handlers/__init__.py | 3 +++ backend/dataall/modules/datasets/handlers/__init__.py | 7 +++++-- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/backend/dataall/modules/dataset_sharing/handlers/__init__.py b/backend/dataall/modules/dataset_sharing/handlers/__init__.py index e69de29bb..eb86af3de 100644 --- a/backend/dataall/modules/dataset_sharing/handlers/__init__.py +++ b/backend/dataall/modules/dataset_sharing/handlers/__init__.py @@ -0,0 +1,3 @@ +from dataall.modules.dataset_sharing.handlers import ecs_share_handler + +__all__ = ["ecs_share_handler"] diff --git a/backend/dataall/modules/datasets/handlers/__init__.py b/backend/dataall/modules/datasets/handlers/__init__.py index 91d4aeff6..688f78fc3 100644 --- a/backend/dataall/modules/datasets/handlers/__init__.py +++ b/backend/dataall/modules/datasets/handlers/__init__.py @@ -6,7 +6,10 @@ glue_table_sync_handler, glue_table_handler, glue_profiling_handler, - s3_folder_creator_handler + s3_folder_creator_handler, + sns_dataset_handler, + glue_dataset_handler ) -__all__ = ["glue_table_sync_handler", "glue_table_handler", "glue_profiling_handler", "s3_folder_creator_handler"] +__all__ = ["glue_table_sync_handler", "glue_table_handler", "glue_profiling_handler", "s3_folder_creator_handler", + "sns_dataset_handler", "glue_dataset_handler"] From 45194d58c6f6a91f8f601071020620e999cd816e Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 30 May 2023 13:20:16 +0200 Subject: [PATCH 244/346] Migrated quicksight policy to dashboards --- backend/dataall/api/Objects/__init__.py | 1 - backend/dataall/modules/dashboards/__init__.py | 12 +++++++++++- backend/dataall/modules/dashboards/cdk/__init__.py | 3 +++ .../dashboards/cdk/dashboard_quicksight_policy.py} | 6 +++--- 4 files changed, 17 insertions(+), 5 deletions(-) create mode 100644 backend/dataall/modules/dashboards/cdk/__init__.py rename backend/dataall/{cdkproxy/stacks/policies/quicksight.py => modules/dashboards/cdk/dashboard_quicksight_policy.py} (89%) diff --git a/backend/dataall/api/Objects/__init__.py b/backend/dataall/api/Objects/__init__.py index 5ac0bccd3..d80dec4c0 100644 --- a/backend/dataall/api/Objects/__init__.py +++ b/backend/dataall/api/Objects/__init__.py @@ -19,7 +19,6 @@ Activity, Group, Principal, - Dashboard, Organization, Stack, Test, diff --git a/backend/dataall/modules/dashboards/__init__.py b/backend/dataall/modules/dashboards/__init__.py index d85e9f9fe..3b4b150c3 100644 --- a/backend/dataall/modules/dashboards/__init__.py +++ b/backend/dataall/modules/dashboards/__init__.py @@ -38,7 +38,17 @@ def __init__(self): EnvironmentResourceManager.register(DashboardRepository()) -class DatasetCatalogIndexerModuleInterface(ModuleInterface): +class DashboardCdkModuleInterface(ModuleInterface): + + @staticmethod + def is_supported(modes: Set[ImportMode]) -> bool: + return ImportMode.CDK in modes + + def __init__(self): + import dataall.modules.dashboards.cdk + + +class DashboardCatalogIndexerModuleInterface(ModuleInterface): @staticmethod def is_supported(modes: Set[ImportMode]) -> bool: diff --git a/backend/dataall/modules/dashboards/cdk/__init__.py b/backend/dataall/modules/dashboards/cdk/__init__.py new file mode 100644 index 000000000..50217cecc --- /dev/null +++ b/backend/dataall/modules/dashboards/cdk/__init__.py @@ -0,0 +1,3 @@ +from dataall.modules.dashboards.cdk import dashboard_quicksight_policy + +__all__ = ['dashboard_quicksight_policy'] diff --git a/backend/dataall/cdkproxy/stacks/policies/quicksight.py b/backend/dataall/modules/dashboards/cdk/dashboard_quicksight_policy.py similarity index 89% rename from backend/dataall/cdkproxy/stacks/policies/quicksight.py rename to backend/dataall/modules/dashboards/cdk/dashboard_quicksight_policy.py index e67b3436c..36f22748d 100644 --- a/backend/dataall/cdkproxy/stacks/policies/quicksight.py +++ b/backend/dataall/modules/dashboards/cdk/dashboard_quicksight_policy.py @@ -1,12 +1,12 @@ from aws_cdk import aws_iam as iam -from dataall.db import permissions -from .service_policy import ServicePolicy +from dataall.cdkproxy.stacks.policies.service_policy import ServicePolicy +from dataall.modules.dashboards.services.dashboard_permissions import CREATE_DASHBOARD class QuickSight(ServicePolicy): def get_statements(self, group_permissions, **kwargs): - if permissions.CREATE_DASHBOARD not in group_permissions: + if CREATE_DASHBOARD not in group_permissions: return [] return [ From 27ccfe360e588ae37bb6fa590dc38cf3ce04c398 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 30 May 2023 13:40:47 +0200 Subject: [PATCH 245/346] Moved dashboard API to the dashboard module --- .../dataall/api/Objects/Tenant/mutations.py | 9 -- backend/dataall/api/Objects/Tenant/queries.py | 32 +----- .../dataall/api/Objects/Tenant/resolvers.py | 83 -------------- .../modules/dashboards/api/mutations.py | 9 ++ .../dataall/modules/dashboards/api/queries.py | 29 +++++ .../modules/dashboards/api/resolvers.py | 102 +++++++++++++++++- 6 files changed, 139 insertions(+), 125 deletions(-) diff --git a/backend/dataall/api/Objects/Tenant/mutations.py b/backend/dataall/api/Objects/Tenant/mutations.py index 7f57a5050..f01033c09 100644 --- a/backend/dataall/api/Objects/Tenant/mutations.py +++ b/backend/dataall/api/Objects/Tenant/mutations.py @@ -13,15 +13,6 @@ resolver=update_group_permissions, ) -createQuicksightDataSourceSet = gql.MutationField( - name='createQuicksightDataSourceSet', - args=[ - gql.Argument(name='vpcConnectionId', type=gql.NonNullableType(gql.String)) - ], - type=gql.String, - resolver=create_quicksight_data_source_set, -) - updateSSMParameter = gql.MutationField( name='updateSSMParameter', args=[ diff --git a/backend/dataall/api/Objects/Tenant/queries.py b/backend/dataall/api/Objects/Tenant/queries.py index 62cac727f..8b4218443 100644 --- a/backend/dataall/api/Objects/Tenant/queries.py +++ b/backend/dataall/api/Objects/Tenant/queries.py @@ -15,34 +15,4 @@ ], type=gql.Ref('GroupSearchResult'), resolver=list_tenant_groups, -) - -getMonitoringDashboardId = gql.QueryField( - name='getMonitoringDashboardId', - type=gql.String, - resolver=get_monitoring_dashboard_id, -) - -getMonitoringVpcConnectionId = gql.QueryField( - name='getMonitoringVPCConnectionId', - type=gql.String, - resolver=get_monitoring_vpc_connection_id, -) - -getPlatformAuthorSession = gql.QueryField( - name='getPlatformAuthorSession', - args=[ - gql.Argument(name='awsAccount', type=gql.NonNullableType(gql.String)), - ], - type=gql.String, - resolver=get_quicksight_author_session, -) - -getPlatformReaderSession = gql.QueryField( - name='getPlatformReaderSession', - args=[ - gql.Argument(name='dashboardId', type=gql.NonNullableType(gql.String)), - ], - type=gql.String, - resolver=get_quicksight_reader_session, -) +) \ No newline at end of file diff --git a/backend/dataall/api/Objects/Tenant/resolvers.py b/backend/dataall/api/Objects/Tenant/resolvers.py index 8bc57be62..5cdba0dee 100644 --- a/backend/dataall/api/Objects/Tenant/resolvers.py +++ b/backend/dataall/api/Objects/Tenant/resolvers.py @@ -47,86 +47,3 @@ def update_ssm_parameter(context, source, name: str = None, value: str = None): print(name) response = ParameterStoreManager.update_parameter(AwsAccountId=current_account, region=region, parameter_name=f'/dataall/{os.getenv("envname", "local")}/quicksightmonitoring/{name}', parameter_value=value) return response - - -def get_monitoring_dashboard_id(context, source): - current_account = SessionHelper.get_account() - region = os.getenv('AWS_REGION', 'eu-west-1') - dashboard_id = ParameterStoreManager.get_parameter_value(AwsAccountId=current_account, region=region, parameter_path=f'/dataall/{os.getenv("envname", "local")}/quicksightmonitoring/DashboardId') - if not dashboard_id: - raise exceptions.AWSResourceNotFound( - action='GET_DASHBOARD_ID', - message='Dashboard Id could not be found on AWS Parameter Store', - ) - return dashboard_id - - -def get_monitoring_vpc_connection_id(context, source): - current_account = SessionHelper.get_account() - region = os.getenv('AWS_REGION', 'eu-west-1') - vpc_connection_id = ParameterStoreManager.get_parameter_value(AwsAccountId=current_account, region=region, parameter_path=f'/dataall/{os.getenv("envname", "local")}/quicksightmonitoring/VPCConnectionId') - if not vpc_connection_id: - raise exceptions.AWSResourceNotFound( - action='GET_VPC_CONNECTION_ID', - message='Dashboard Id could not be found on AWS Parameter Store', - ) - return vpc_connection_id - - -def create_quicksight_data_source_set(context, source, vpcConnectionId: str = None): - current_account = SessionHelper.get_account() - region = os.getenv('AWS_REGION', 'eu-west-1') - user = Quicksight.register_user_in_group(AwsAccountId=current_account, UserName=context.username, GroupName='dataall', UserRole='AUTHOR') - - datasourceId = Quicksight.create_data_source_vpc(AwsAccountId=current_account, region=region, UserName=context.username, vpcConnectionId=vpcConnectionId) - # Data sets are not created programmatically. Too much overhead for the value added. However, an example API is provided: - # datasets = Quicksight.create_data_set_from_source(AwsAccountId=current_account, region=region, UserName='dataallTenantUser', dataSourceId=datasourceId, tablesToImport=['organization', 'environment', 'dataset', 'datapipeline', 'dashboard', 'share_object']) - - return datasourceId - - -def get_quicksight_author_session(context, source, awsAccount: str = None): - with context.engine.scoped_session() as session: - admin = db.api.TenantPolicy.is_tenant_admin(context.groups) - - if not admin: - raise db.exceptions.TenantUnauthorized( - username=context.username, - action=db.permissions.TENANT_ALL, - tenant_name=context.username, - ) - region = os.getenv('AWS_REGION', 'eu-west-1') - - url = Quicksight.get_author_session( - AwsAccountId=awsAccount, - region=region, - UserName=context.username, - UserRole='AUTHOR', - ) - - return url - - -def get_quicksight_reader_session(context, source, dashboardId: str = None): - with context.engine.scoped_session() as session: - admin = db.api.TenantPolicy.is_tenant_admin(context.groups) - - if not admin: - raise db.exceptions.TenantUnauthorized( - username=context.username, - action=db.permissions.TENANT_ALL, - tenant_name=context.username, - ) - - region = os.getenv('AWS_REGION', 'eu-west-1') - current_account = SessionHelper.get_account() - - url = Quicksight.get_reader_session( - AwsAccountId=current_account, - region=region, - UserName=context.username, - UserRole='READER', - DashboardId=dashboardId - ) - - return url diff --git a/backend/dataall/modules/dashboards/api/mutations.py b/backend/dataall/modules/dashboards/api/mutations.py index 5e7072d65..3e92b1995 100644 --- a/backend/dataall/modules/dashboards/api/mutations.py +++ b/backend/dataall/modules/dashboards/api/mutations.py @@ -70,3 +70,12 @@ ], resolver=reject_dashboard_share, ) + +createQuicksightDataSourceSet = gql.MutationField( + name='createQuicksightDataSourceSet', + args=[ + gql.Argument(name='vpcConnectionId', type=gql.NonNullableType(gql.String)) + ], + type=gql.String, + resolver=create_quicksight_data_source_set, +) diff --git a/backend/dataall/modules/dashboards/api/queries.py b/backend/dataall/modules/dashboards/api/queries.py index c690d52ce..9dae022bb 100644 --- a/backend/dataall/modules/dashboards/api/queries.py +++ b/backend/dataall/modules/dashboards/api/queries.py @@ -15,6 +15,35 @@ resolver=get_dashboard, ) +getMonitoringDashboardId = gql.QueryField( + name='getMonitoringDashboardId', + type=gql.String, + resolver=get_monitoring_dashboard_id, +) + +getMonitoringVpcConnectionId = gql.QueryField( + name='getMonitoringVPCConnectionId', + type=gql.String, + resolver=get_monitoring_vpc_connection_id, +) + +getPlatformAuthorSession = gql.QueryField( + name='getPlatformAuthorSession', + args=[ + gql.Argument(name='awsAccount', type=gql.NonNullableType(gql.String)), + ], + type=gql.String, + resolver=get_quicksight_author_session, +) + +getPlatformReaderSession = gql.QueryField( + name='getPlatformReaderSession', + args=[ + gql.Argument(name='dashboardId', type=gql.NonNullableType(gql.String)), + ], + type=gql.String, + resolver=get_quicksight_reader_session, +) getAuthorSession = gql.QueryField( name='getAuthorSession', diff --git a/backend/dataall/modules/dashboards/api/resolvers.py b/backend/dataall/modules/dashboards/api/resolvers.py index eb4a300eb..387530d95 100644 --- a/backend/dataall/modules/dashboards/api/resolvers.py +++ b/backend/dataall/modules/dashboards/api/resolvers.py @@ -4,9 +4,10 @@ from dataall.api.context import Context from dataall.aws.handlers.quicksight import Quicksight from dataall.aws.handlers.parameter_store import ParameterStoreManager +from dataall.aws.handlers.sts import SessionHelper from dataall.db import models -from dataall.db.api import ResourcePolicy, Glossary, Vote -from dataall.db.exceptions import RequiredParameter +from dataall.db.api import ResourcePolicy, Glossary, Vote, TenantPolicy +from dataall.db.exceptions import RequiredParameter, AWSResourceNotFound, TenantUnauthorized from dataall.modules.dashboards.db.dashboard_repository import DashboardRepository from dataall.modules.dashboards.db.models import Dashboard from dataall.modules.dashboards.services.dashboard_permissions import GET_DASHBOARD, CREATE_DASHBOARD @@ -270,3 +271,100 @@ def resolve_upvotes(context: Context, source: Dashboard, **kwargs): return Vote.count_upvotes( session, None, None, source.dashboardUri, data={'targetType': 'dashboard'} ) + + +def get_monitoring_dashboard_id(context, source): + current_account = SessionHelper.get_account() + region = os.getenv('AWS_REGION', 'eu-west-1') + dashboard_id = ParameterStoreManager.get_parameter_value( + AwsAccountId=current_account, + region=region, + parameter_path=f'/dataall/{os.getenv("envname", "local")}/quicksightmonitoring/DashboardId' + ) + + if not dashboard_id: + raise AWSResourceNotFound( + action='GET_DASHBOARD_ID', + message='Dashboard Id could not be found on AWS Parameter Store', + ) + return dashboard_id + + +def get_monitoring_vpc_connection_id(context, source): + current_account = SessionHelper.get_account() + region = os.getenv('AWS_REGION', 'eu-west-1') + vpc_connection_id = ParameterStoreManager.get_parameter_value( + AwsAccountId=current_account, + region=region, + parameter_path=f'/dataall/{os.getenv("envname", "local")}/quicksightmonitoring/VPCConnectionId' + ) + + if not vpc_connection_id: + raise AWSResourceNotFound( + action='GET_VPC_CONNECTION_ID', + message='VPC Connection Id could not be found on AWS Parameter Store', + ) + return vpc_connection_id + + +def create_quicksight_data_source_set(context, source, vpcConnectionId: str = None): + current_account = SessionHelper.get_account() + region = os.getenv('AWS_REGION', 'eu-west-1') + user = Quicksight.register_user_in_group( + AwsAccountId=current_account, + UserName=context.username, + GroupName='dataall', + UserRole='AUTHOR') + + datasourceId = Quicksight.create_data_source_vpc(AwsAccountId=current_account, region=region, UserName=context.username, vpcConnectionId=vpcConnectionId) + # Data sets are not created programmatically. Too much overhead for the value added. However, an example API is provided: + # datasets = Quicksight.create_data_set_from_source(AwsAccountId=current_account, region=region, UserName='dataallTenantUser', dataSourceId=datasourceId, tablesToImport=['organization', 'environment', 'dataset', 'datapipeline', 'dashboard', 'share_object']) + + return datasourceId + + +def get_quicksight_author_session(context, source, awsAccount: str = None): + with context.engine.scoped_session() as session: + admin = TenantPolicy.is_tenant_admin(context.groups) + + if not admin: + raise TenantUnauthorized( + username=context.username, + action=db.permissions.TENANT_ALL, + tenant_name=context.username, + ) + region = os.getenv('AWS_REGION', 'eu-west-1') + + url = Quicksight.get_author_session( + AwsAccountId=awsAccount, + region=region, + UserName=context.username, + UserRole='AUTHOR', + ) + + return url + + +def get_quicksight_reader_session(context, source, dashboardId: str = None): + with context.engine.scoped_session() as session: + admin = db.api.TenantPolicy.is_tenant_admin(context.groups) + + if not admin: + raise TenantUnauthorized( + username=context.username, + action=db.permissions.TENANT_ALL, + tenant_name=context.username, + ) + + region = os.getenv('AWS_REGION', 'eu-west-1') + current_account = SessionHelper.get_account() + + url = Quicksight.get_reader_session( + AwsAccountId=current_account, + region=region, + UserName=context.username, + UserRole='READER', + DashboardId=dashboardId + ) + + return url From 4fb4a11ca90036b7ba538c08020dcf8e39e1a0c4 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 30 May 2023 15:05:57 +0200 Subject: [PATCH 246/346] Introduced DashboardService --- .../modules/dashboards/api/resolvers.py | 72 +-------- .../dashboards/db/dashboard_repository.py | 124 +-------------- .../dashboards/services/dashboard_service.py | 143 ++++++++++++++++++ 3 files changed, 154 insertions(+), 185 deletions(-) create mode 100644 backend/dataall/modules/dashboards/services/dashboard_service.py diff --git a/backend/dataall/modules/dashboards/api/resolvers.py b/backend/dataall/modules/dashboards/api/resolvers.py index 387530d95..f9f9ef0b9 100644 --- a/backend/dataall/modules/dashboards/api/resolvers.py +++ b/backend/dataall/modules/dashboards/api/resolvers.py @@ -11,6 +11,7 @@ from dataall.modules.dashboards.db.dashboard_repository import DashboardRepository from dataall.modules.dashboards.db.models import Dashboard from dataall.modules.dashboards.services.dashboard_permissions import GET_DASHBOARD, CREATE_DASHBOARD +from dataall.modules.dashboards.services.dashboard_service import DashboardService from dataall.modules.dashboards.services.dashboard_share_service import DashboardShareService from dataall.utils import Parameter from dataall.modules.dashboards.indexers.dashboard_indexer import DashboardIndexer @@ -121,68 +122,11 @@ def import_dashboard(context: Context, source, input: dict = None): if not input.get('label'): raise RequiredParameter('label') - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=input['environmentUri'], - permission_name=CREATE_DASHBOARD, - ) - env: models.Environment = db.api.Environment.get_environment_by_uri( - session, input['environmentUri'] - ) - - if not env.dashboardsEnabled: - raise db.exceptions.UnauthorizedOperation( - action=CREATE_DASHBOARD, - message=f'Dashboards feature is disabled for the environment {env.label}', - ) - - can_import = Quicksight.can_import_dashboard( - AwsAccountId=env.AwsAccountId, - region=env.region, - UserName=context.username, - DashboardId=input.get('dashboardId'), - ) - - if not can_import: - raise db.exceptions.UnauthorizedOperation( - action=CREATE_DASHBOARD, - message=f'User: {context.username} has not AUTHOR rights on quicksight for the environment {env.label}', - ) - - input['environment'] = env - dashboard = DashboardRepository.import_dashboard( - session=session, - username=context.username, - groups=context.groups, - uri=env.environmentUri, - data=input, - ) - - DashboardIndexer.upsert(session, dashboard_uri=dashboard.dashboardUri) - - return dashboard + return DashboardService.import_dashboard(uri=input['environmentUri'], data=input) def update_dashboard(context, source, input: dict = None): - with context.engine.scoped_session() as session: - dashboard = DashboardRepository.get_dashboard_by_uri( - session, input['dashboardUri'] - ) - DashboardRepository.update_dashboard( - session=session, - username=context.username, - uri=dashboard.dashboardUri, - dashboard=dashboard, - data=input - ) - - DashboardIndexer.upsert(session, dashboard_uri=dashboard.dashboardUri) - - return dashboard - + return DashboardService.update_dashboard(uri=input['dashboardUri'], data=input) def list_dashboards(context: Context, source, filter: dict = None): if not filter: @@ -197,8 +141,7 @@ def list_dashboards(context: Context, source, filter: dict = None): def get_dashboard(context: Context, source, dashboardUri: str = None): - with context.engine.scoped_session() as session: - return DashboardRepository.get_dashboard(session=session, uri=dashboardUri) + return DashboardService.get_dashboard(uri=dashboardUri) def resolve_user_role(context: Context, source: Dashboard): @@ -253,10 +196,7 @@ def share_dashboard( def delete_dashboard(context: Context, source, dashboardUri: str = None): - with context.engine.scoped_session() as session: - DashboardRepository.delete_dashboard(session=session, uri=dashboardUri) - DashboardIndexer.delete_doc(doc_id=dashboardUri) - return True + return DashboardService.delete_dashboard(uri=dashboardUri) def resolve_glossary_terms(context: Context, source: Dashboard, **kwargs): @@ -347,7 +287,7 @@ def get_quicksight_author_session(context, source, awsAccount: str = None): def get_quicksight_reader_session(context, source, dashboardId: str = None): with context.engine.scoped_session() as session: - admin = db.api.TenantPolicy.is_tenant_admin(context.groups) + admin = TenantPolicy.is_tenant_admin(context.groups) if not admin: raise TenantUnauthorized( diff --git a/backend/dataall/modules/dashboards/db/dashboard_repository.py b/backend/dataall/modules/dashboards/db/dashboard_repository.py index d25dc65c4..f59235cde 100644 --- a/backend/dataall/modules/dashboards/db/dashboard_repository.py +++ b/backend/dataall/modules/dashboards/db/dashboard_repository.py @@ -4,17 +4,9 @@ from sqlalchemy.orm import Query from dataall.core.group.services.group_resource_manager import EnvironmentResource -from dataall.core.permission_checker import has_tenant_permission, has_resource_permission -from dataall.db import models, exceptions, paginate -from dataall.db.api import ( - Environment, - ResourcePolicy, - Glossary, - Vote, -) +from dataall.db import exceptions, paginate +from dataall.db.models import Environment from dataall.modules.dashboards.db.models import DashboardShare, DashboardShareStatus, Dashboard -from dataall.modules.dashboards.services.dashboard_permissions import MANAGE_DASHBOARDS, CREATE_DASHBOARD, \ - DASHBOARD_ALL, GET_DASHBOARD, SHARE_DASHBOARD, UPDATE_DASHBOARD logger = logging.getLogger(__name__) @@ -34,27 +26,7 @@ def count_resources(session, environment, group_uri) -> int: ) @staticmethod - @has_tenant_permission(MANAGE_DASHBOARDS) - @has_resource_permission(CREATE_DASHBOARD) - def import_dashboard( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - ) -> Dashboard: - Environment.check_group_environment_permission( - session=session, - username=username, - groups=groups, - uri=uri, - group=data['SamlGroupName'], - permission_name=CREATE_DASHBOARD, - ) - - env: models.Environment = data.get( - 'environment', Environment.get_environment_by_uri(session, uri) - ) + def create_dashboard(session, env: Environment, username: str, data: dict = None) -> Dashboard: dashboard: Dashboard = Dashboard( label=data.get('label', 'untitled'), environmentUri=data.get('environmentUri'), @@ -69,55 +41,8 @@ def import_dashboard( ) session.add(dashboard) session.commit() - - activity = models.Activity( - action='DASHBOARD:CREATE', - label='DASHBOARD:CREATE', - owner=username, - summary=f'{username} created dashboard {dashboard.label} in {env.label}', - targetUri=dashboard.dashboardUri, - targetType='dashboard', - ) - session.add(activity) - - DashboardRepository.set_dashboard_resource_policy( - session, env, dashboard, data['SamlGroupName'] - ) - - if 'terms' in data.keys(): - Glossary.set_glossary_terms_links( - session, - username, - dashboard.dashboardUri, - 'Dashboard', - data.get('terms', []), - ) return dashboard - @staticmethod - def set_dashboard_resource_policy(session, environment, dashboard, group): - ResourcePolicy.attach_resource_policy( - session=session, - group=group, - permissions=DASHBOARD_ALL, - resource_uri=dashboard.dashboardUri, - resource_type=Dashboard.__name__, - ) - if environment.SamlGroupName != dashboard.SamlGroupName: - ResourcePolicy.attach_resource_policy( - session=session, - group=environment.SamlGroupName, - permissions=DASHBOARD_ALL, - resource_uri=dashboard.dashboardUri, - resource_type=Dashboard.__name__, - ) - - @staticmethod - @has_tenant_permission(MANAGE_DASHBOARDS) - @has_resource_permission(GET_DASHBOARD) - def get_dashboard(session, uri: str) -> Dashboard: - return DashboardRepository.get_dashboard_by_uri(session, uri) - @staticmethod def get_dashboard_by_uri(session, uri) -> Dashboard: dashboard: Dashboard = session.query(Dashboard).get(uri) @@ -220,47 +145,8 @@ def paginated_dashboard_shares( ).to_dict() @staticmethod - @has_tenant_permission(MANAGE_DASHBOARDS) - @has_resource_permission(UPDATE_DASHBOARD) - def update_dashboard( - session, - username: str, - uri: str, - dashboard: Dashboard, - data: dict = None, - ) -> Dashboard: - for k in data.keys(): - setattr(dashboard, k, data.get(k)) - - if 'terms' in data.keys(): - Glossary.set_glossary_terms_links( - session, - username, - dashboard.dashboardUri, - 'Dashboard', - data.get('terms', []), - ) - environment: models.Environment = Environment.get_environment_by_uri( - session, dashboard.environmentUri - ) - DashboardRepository.set_dashboard_resource_policy( - session, environment, dashboard, dashboard.SamlGroupName - ) - return dashboard - - @staticmethod - def delete_dashboard(session, uri) -> bool: - # TODO THERE WAS NO PERMISSION CHECK - dashboard = DashboardRepository.get_dashboard_by_uri(session, uri) - session.delete(dashboard) - ResourcePolicy.delete_resource_policy( - session=session, resource_uri=uri, group=dashboard.SamlGroupName - ) - Glossary.delete_glossary_terms_links( - session, target_uri=dashboard.dashboardUri, target_type='Dashboard' - ) - Vote.delete_votes(session, dashboard.dashboardUri, 'dashboard') - session.commit() + def delete_dashboard(session, dashboard_uri) -> bool: + session.delete(dashboard_uri) return True @staticmethod diff --git a/backend/dataall/modules/dashboards/services/dashboard_service.py b/backend/dataall/modules/dashboards/services/dashboard_service.py new file mode 100644 index 000000000..10416b75b --- /dev/null +++ b/backend/dataall/modules/dashboards/services/dashboard_service.py @@ -0,0 +1,143 @@ +from dataall.aws.handlers.quicksight import Quicksight +from dataall.core.context import get_context +from dataall.core.permission_checker import has_tenant_permission, has_resource_permission, has_group_permission +from dataall.db.api import ResourcePolicy, Glossary, Vote, Environment +from dataall.db.exceptions import UnauthorizedOperation +from dataall.db.models import Activity +from dataall.modules.dashboards import DashboardRepository, Dashboard +from dataall.modules.dashboards.indexers.dashboard_indexer import DashboardIndexer +from dataall.modules.dashboards.services.dashboard_permissions import MANAGE_DASHBOARDS, GET_DASHBOARD, \ + UPDATE_DASHBOARD, CREATE_DASHBOARD, DASHBOARD_ALL + + +class DashboardService: + @staticmethod + @has_tenant_permission(MANAGE_DASHBOARDS) + @has_resource_permission(GET_DASHBOARD) + def get_dashboard(uri: str) -> Dashboard: + with get_context().db_engine.scoped_session() as session: + return DashboardRepository.get_dashboard_by_uri(session, uri) + + @staticmethod + @has_tenant_permission(MANAGE_DASHBOARDS) + @has_resource_permission(CREATE_DASHBOARD) + def import_dashboard(uri: str, data: dict = None) -> Dashboard: + context = get_context() + with context.db_engine.scoped_session() as session: + env = Environment.get_environment_by_uri(session, data['environmentUri']) + + if not env.dashboardsEnabled: + raise UnauthorizedOperation( + action=CREATE_DASHBOARD, + message=f'Dashboards feature is disabled for the environment {env.label}', + ) + + can_import = Quicksight.can_import_dashboard( + AwsAccountId=env.AwsAccountId, + region=env.region, + UserName=context.username, + DashboardId=data.get('dashboardId'), + ) + + if not can_import: + raise db.exceptions.UnauthorizedOperation( + action=CREATE_DASHBOARD, + message=f'User: {context.username} has not AUTHOR rights on quicksight for the environment {env.label}', + ) + Environment.check_group_environment_permission( + session=session, + username=username, + groups=groups, + uri=uri, + group=data['SamlGroupName'], + permission_name=CREATE_DASHBOARD, + ) + + env = data.get( + 'environment', Environment.get_environment_by_uri(session, uri) + ) + + dashboard = DashboardRepository.create_dashboard(session, env, context.username, data) + + activity = Activity( + action='DASHBOARD:CREATE', + label='DASHBOARD:CREATE', + owner=username, + summary=f'{username} created dashboard {dashboard.label} in {env.label}', + targetUri=dashboard.dashboardUri, + targetType='dashboard', + ) + session.add(activity) + + DashboardService._set_dashboard_resource_policy( + session, env, dashboard, data['SamlGroupName'] + ) + + DashboardService._update_glossary(session, dashboard, data) + DashboardIndexer.upsert(session, dashboard_uri=dashboard.dashboardUri) + return dashboard + + @staticmethod + @has_tenant_permission(MANAGE_DASHBOARDS) + @has_resource_permission(UPDATE_DASHBOARD) + def update_dashboard(uri: str, data: dict = None) -> Dashboard: + with get_context().db_engine.scoped_session() as session: + dashboard = DashboardRepository.get_dashboard_by_uri(session, uri) + for k in data.keys(): + setattr(dashboard, k, data.get(k)) + + DashboardService._update_glossary(session, dashboard, data) + environment = Environment.get_environment_by_uri(session, dashboard.environmentUri) + DashboardService._set_dashboard_resource_policy( + session, environment, dashboard, dashboard.SamlGroupName + ) + + DashboardIndexer.upsert(session, dashboard_uri=dashboard.dashboardUri) + return dashboard + + @staticmethod + def delete_dashboard(uri) -> bool: + # TODO THERE WAS NO PERMISSION CHECK + with get_context().db_engine.scoped_session() as session: + dashboard = DashboardRepository.get_dashboard_by_uri(session, uri) + DashboardRepository.delete_dashboard(session, dashboard.dashboardUri) + + ResourcePolicy.delete_resource_policy( + session=session, resource_uri=uri, group=dashboard.SamlGroupName + ) + Glossary.delete_glossary_terms_links( + session, target_uri=dashboard.dashboardUri, target_type='Dashboard' + ) + Vote.delete_votes(session, dashboard.dashboardUri, 'dashboard') + + DashboardIndexer.delete_doc(doc_id=uri) + return True + + @staticmethod + def _set_dashboard_resource_policy(session, environment, dashboard, group): + DashboardService._attach_dashboard_policy(session, group, dashboard) + if environment.SamlGroupName != dashboard.SamlGroupName: + DashboardService._attach_dashboard_policy(session, environment.SamlGroupName, dashboard) + + @staticmethod + def _attach_dashboard_policy(session, group: str, dashboard: Dashboard): + ResourcePolicy.attach_resource_policy( + session=session, + group=group, + permissions=DASHBOARD_ALL, + resource_uri=dashboard.dashboardUri, + resource_type=Dashboard.__name__, + ) + + @staticmethod + def _update_glossary(session, dashboard, data): + context = get_context() + if 'terms' in data: + Glossary.set_glossary_terms_links( + session, + context.username, + dashboard.dashboardUri, + 'Dashboard', + data['terms'], + ) + From bfca075099864e09f68253910cbc9ef40a7126d9 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 30 May 2023 15:07:45 +0200 Subject: [PATCH 247/346] Renamed policy --- .../modules/dashboards/cdk/dashboard_quicksight_policy.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/dataall/modules/dashboards/cdk/dashboard_quicksight_policy.py b/backend/dataall/modules/dashboards/cdk/dashboard_quicksight_policy.py index 36f22748d..fb237a22d 100644 --- a/backend/dataall/modules/dashboards/cdk/dashboard_quicksight_policy.py +++ b/backend/dataall/modules/dashboards/cdk/dashboard_quicksight_policy.py @@ -4,7 +4,7 @@ from dataall.modules.dashboards.services.dashboard_permissions import CREATE_DASHBOARD -class QuickSight(ServicePolicy): +class QuickSightPolicy(ServicePolicy): def get_statements(self, group_permissions, **kwargs): if CREATE_DASHBOARD not in group_permissions: return [] From 130898273a5b64ceaa798c976b85df8f286b1f7c Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 30 May 2023 16:10:43 +0200 Subject: [PATCH 248/346] Introduced DashboardQuicksightService --- .../modules/dashboards/api/resolvers.py | 199 ++--------------- .../services/dashboard_quicksight_service.py | 206 ++++++++++++++++++ .../dashboards/services/dashboard_service.py | 3 +- 3 files changed, 224 insertions(+), 184 deletions(-) create mode 100644 backend/dataall/modules/dashboards/services/dashboard_quicksight_service.py diff --git a/backend/dataall/modules/dashboards/api/resolvers.py b/backend/dataall/modules/dashboards/api/resolvers.py index f9f9ef0b9..fbca97a71 100644 --- a/backend/dataall/modules/dashboards/api/resolvers.py +++ b/backend/dataall/modules/dashboards/api/resolvers.py @@ -1,113 +1,13 @@ -import os -from dataall import db from dataall.api.constants import DashboardRole from dataall.api.context import Context -from dataall.aws.handlers.quicksight import Quicksight -from dataall.aws.handlers.parameter_store import ParameterStoreManager -from dataall.aws.handlers.sts import SessionHelper from dataall.db import models -from dataall.db.api import ResourcePolicy, Glossary, Vote, TenantPolicy -from dataall.db.exceptions import RequiredParameter, AWSResourceNotFound, TenantUnauthorized +from dataall.db.api import Glossary, Vote +from dataall.db.exceptions import RequiredParameter from dataall.modules.dashboards.db.dashboard_repository import DashboardRepository from dataall.modules.dashboards.db.models import Dashboard -from dataall.modules.dashboards.services.dashboard_permissions import GET_DASHBOARD, CREATE_DASHBOARD +from dataall.modules.dashboards.services.dashboard_quicksight_service import DashboardQuicksightService from dataall.modules.dashboards.services.dashboard_service import DashboardService from dataall.modules.dashboards.services.dashboard_share_service import DashboardShareService -from dataall.utils import Parameter -from dataall.modules.dashboards.indexers.dashboard_indexer import DashboardIndexer - -param_store = Parameter() -ENVNAME = os.getenv("envname", "local") -DOMAIN_NAME = param_store.get_parameter(env=ENVNAME, path="frontend/custom_domain_name") if ENVNAME not in ["local", "dkrcompose"] else None -DOMAIN_URL = f"https://{DOMAIN_NAME}" if DOMAIN_NAME else "http://localhost:8080" - - -def get_quicksight_reader_url(context, source, dashboardUri: str = None): - with context.engine.scoped_session() as session: - dash: Dashboard = session.query(Dashboard).get(dashboardUri) - env: models.Environment = session.query(models.Environment).get( - dash.environmentUri - ) - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=dash.dashboardUri, - permission_name=GET_DASHBOARD, - ) - if not env.dashboardsEnabled: - raise db.exceptions.UnauthorizedOperation( - action=GET_DASHBOARD, - message=f'Dashboards feature is disabled for the environment {env.label}', - ) - if dash.SamlGroupName in context.groups: - url = Quicksight.get_reader_session( - AwsAccountId=env.AwsAccountId, - region=env.region, - UserName=context.username, - DashboardId=dash.DashboardId, - domain_name=DOMAIN_URL, - ) - else: - shared_groups = DashboardRepository.query_all_user_groups_shareddashboard( - session=session, - groups=context.groups, - uri=dashboardUri - ) - if not shared_groups: - raise db.exceptions.UnauthorizedOperation( - action=GET_DASHBOARD, - message='Dashboard has not been shared with your Teams', - ) - - session_type = ParameterStoreManager.get_parameter_value( - parameter_path=f"/dataall/{os.getenv('envname', 'local')}/quicksight/sharedDashboardsSessions" - ) - - if session_type == 'reader': - url = Quicksight.get_shared_reader_session( - AwsAccountId=env.AwsAccountId, - region=env.region, - UserName=context.username, - GroupName=shared_groups[0], - DashboardId=dash.DashboardId, - ) - else: - url = Quicksight.get_anonymous_session( - AwsAccountId=env.AwsAccountId, - region=env.region, - UserName=context.username, - DashboardId=dash.DashboardId, - ) - return url - - -def get_quicksight_designer_url( - context, source, environmentUri: str = None, dashboardUri: str = None -): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=environmentUri, - permission_name=CREATE_DASHBOARD, - ) - env: models.Environment = session.query(models.Environment).get(environmentUri) - if not env.dashboardsEnabled: - raise db.exceptions.UnauthorizedOperation( - action=CREATE_DASHBOARD, - message=f'Dashboards feature is disabled for the environment {env.label}', - ) - - url = Quicksight.get_author_session( - AwsAccountId=env.AwsAccountId, - region=env.region, - UserName=context.username, - UserRole='AUTHOR', - ) - - return url def import_dashboard(context: Context, source, input: dict = None): @@ -214,97 +114,30 @@ def resolve_upvotes(context: Context, source: Dashboard, **kwargs): def get_monitoring_dashboard_id(context, source): - current_account = SessionHelper.get_account() - region = os.getenv('AWS_REGION', 'eu-west-1') - dashboard_id = ParameterStoreManager.get_parameter_value( - AwsAccountId=current_account, - region=region, - parameter_path=f'/dataall/{os.getenv("envname", "local")}/quicksightmonitoring/DashboardId' - ) - - if not dashboard_id: - raise AWSResourceNotFound( - action='GET_DASHBOARD_ID', - message='Dashboard Id could not be found on AWS Parameter Store', - ) - return dashboard_id + return DashboardQuicksightService.get_monitoring_dashboard_id() def get_monitoring_vpc_connection_id(context, source): - current_account = SessionHelper.get_account() - region = os.getenv('AWS_REGION', 'eu-west-1') - vpc_connection_id = ParameterStoreManager.get_parameter_value( - AwsAccountId=current_account, - region=region, - parameter_path=f'/dataall/{os.getenv("envname", "local")}/quicksightmonitoring/VPCConnectionId' - ) - - if not vpc_connection_id: - raise AWSResourceNotFound( - action='GET_VPC_CONNECTION_ID', - message='VPC Connection Id could not be found on AWS Parameter Store', - ) - return vpc_connection_id + return DashboardQuicksightService.get_monitoring_vpc_connection_id() def create_quicksight_data_source_set(context, source, vpcConnectionId: str = None): - current_account = SessionHelper.get_account() - region = os.getenv('AWS_REGION', 'eu-west-1') - user = Quicksight.register_user_in_group( - AwsAccountId=current_account, - UserName=context.username, - GroupName='dataall', - UserRole='AUTHOR') + return DashboardQuicksightService.create_quicksight_data_source_set(vpcConnectionId) - datasourceId = Quicksight.create_data_source_vpc(AwsAccountId=current_account, region=region, UserName=context.username, vpcConnectionId=vpcConnectionId) - # Data sets are not created programmatically. Too much overhead for the value added. However, an example API is provided: - # datasets = Quicksight.create_data_set_from_source(AwsAccountId=current_account, region=region, UserName='dataallTenantUser', dataSourceId=datasourceId, tablesToImport=['organization', 'environment', 'dataset', 'datapipeline', 'dashboard', 'share_object']) - return datasourceId +def get_quicksight_author_session(context, source, awsAccount: str = None): + return DashboardQuicksightService.get_quicksight_author_session(awsAccount) -def get_quicksight_author_session(context, source, awsAccount: str = None): - with context.engine.scoped_session() as session: - admin = TenantPolicy.is_tenant_admin(context.groups) - - if not admin: - raise TenantUnauthorized( - username=context.username, - action=db.permissions.TENANT_ALL, - tenant_name=context.username, - ) - region = os.getenv('AWS_REGION', 'eu-west-1') - - url = Quicksight.get_author_session( - AwsAccountId=awsAccount, - region=region, - UserName=context.username, - UserRole='AUTHOR', - ) +def get_quicksight_reader_session(context, source, dashboardId: str = None): + return DashboardQuicksightService.get_quicksight_reader_session(dashboardId) - return url +def get_quicksight_reader_url(context, source, dashboardUri: str = None): + return DashboardQuicksightService.get_quicksight_reader_url(uri=dashboardUri) -def get_quicksight_reader_session(context, source, dashboardId: str = None): - with context.engine.scoped_session() as session: - admin = TenantPolicy.is_tenant_admin(context.groups) - - if not admin: - raise TenantUnauthorized( - username=context.username, - action=db.permissions.TENANT_ALL, - tenant_name=context.username, - ) - - region = os.getenv('AWS_REGION', 'eu-west-1') - current_account = SessionHelper.get_account() - - url = Quicksight.get_reader_session( - AwsAccountId=current_account, - region=region, - UserName=context.username, - UserRole='READER', - DashboardId=dashboardId - ) - return url +def get_quicksight_designer_url( + context, source, environmentUri: str = None, dashboardUri: str = None +): + return DashboardQuicksightService.get_quicksight_designer_url(environmentUri) diff --git a/backend/dataall/modules/dashboards/services/dashboard_quicksight_service.py b/backend/dataall/modules/dashboards/services/dashboard_quicksight_service.py new file mode 100644 index 000000000..f271f48d9 --- /dev/null +++ b/backend/dataall/modules/dashboards/services/dashboard_quicksight_service.py @@ -0,0 +1,206 @@ +import os + +from dataall.aws.handlers.parameter_store import ParameterStoreManager +from dataall.aws.handlers.quicksight import Quicksight +from dataall.aws.handlers.sts import SessionHelper +from dataall.core.context import get_context +from dataall.core.permission_checker import has_resource_permission +from dataall.db.api import Environment, TenantPolicy +from dataall.db.exceptions import UnauthorizedOperation, TenantUnauthorized, AWSResourceNotFound +from dataall.db.permissions import TENANT_ALL +from dataall.modules.dashboards import DashboardRepository, Dashboard +from dataall.modules.dashboards.services.dashboard_permissions import GET_DASHBOARD, CREATE_DASHBOARD +from dataall.utils import Parameter + + +class DashboardQuicksightService: + _PARAM_STORE = Parameter() + _REGION = os.getenv('AWS_REGION', 'eu-west-1') + + @staticmethod + def _get_env_uri(session, uri): + dashboard: Dashboard = DashboardRepository.get_dashboard_by_uri(session, uri) + return dashboard.environmentUri + + @staticmethod + @has_resource_permission(GET_DASHBOARD, parent_resource=_get_env_uri) + def get_quicksight_reader_url(uri): + context = get_context() + with context.db_engine.scoped_session() as session: + dash: Dashboard = DashboardRepository.get_dashboard_by_uri(session, uri) + env = Environment.get_environment_by_uri(session, dash.environmentUri) + if not env.dashboardsEnabled: + raise UnauthorizedOperation( + action=GET_DASHBOARD, + message=f'Dashboards feature is disabled for the environment {env.label}', + ) + + if dash.SamlGroupName in context.groups: + url = Quicksight.get_reader_session( + AwsAccountId=env.AwsAccountId, + region=env.region, + UserName=context.username, + DashboardId=dash.DashboardId, + domain_name=DashboardQuicksightService._get_domain_url(), + ) + + else: + shared_groups = DashboardRepository.query_all_user_groups_shareddashboard( + session=session, + groups=context.groups, + uri=uri + ) + if not shared_groups: + raise UnauthorizedOperation( + action=GET_DASHBOARD, + message='Dashboard has not been shared with your Teams', + ) + + session_type = ParameterStoreManager.get_parameter_value( + parameter_path=f"/dataall/{os.getenv('envname', 'local')}/quicksight/sharedDashboardsSessions" + ) + + if session_type == 'reader': + url = Quicksight.get_shared_reader_session( + AwsAccountId=env.AwsAccountId, + region=env.region, + UserName=context.username, + GroupName=shared_groups[0], + DashboardId=dash.DashboardId, + ) + else: + url = Quicksight.get_anonymous_session( + AwsAccountId=env.AwsAccountId, + region=env.region, + UserName=context.username, + DashboardId=dash.DashboardId, + ) + return url + + @staticmethod + @has_resource_permission(CREATE_DASHBOARD) + def get_quicksight_designer_url(uri: str): + context = get_context() + with context.db_engine.scoped_session() as session: + env = Environment.get_environment_by_uri(session, uri) + if not env.dashboardsEnabled: + raise UnauthorizedOperation( + action=CREATE_DASHBOARD, + message=f'Dashboards feature is disabled for the environment {env.label}', + ) + + url = Quicksight.get_author_session( + AwsAccountId=env.AwsAccountId, + region=env.region, + UserName=context.username, + UserRole='AUTHOR', + ) + + return url + + @staticmethod + def get_monitoring_dashboard_id(): + current_account = SessionHelper.get_account() + dashboard_id = ParameterStoreManager.get_parameter_value( + AwsAccountId=current_account, + region=DashboardQuicksightService._REGION, + parameter_path=f'/dataall/{os.getenv("envname", "local")}/quicksightmonitoring/DashboardId' + ) + + if not dashboard_id: + raise AWSResourceNotFound( + action='GET_DASHBOARD_ID', + message='Dashboard Id could not be found on AWS Parameter Store', + ) + return dashboard_id + + @staticmethod + def get_monitoring_vpc_connection_id(): + current_account = SessionHelper.get_account() + vpc_connection_id = ParameterStoreManager.get_parameter_value( + AwsAccountId=current_account, + region=DashboardQuicksightService._REGION, + parameter_path=f'/dataall/{os.getenv("envname", "local")}/quicksightmonitoring/VPCConnectionId' + ) + + if not vpc_connection_id: + raise AWSResourceNotFound( + action='GET_VPC_CONNECTION_ID', + message='VPC Connection Id could not be found on AWS Parameter Store', + ) + return vpc_connection_id + + @staticmethod + def create_quicksight_data_source_set(vpc_connection_id): + context = get_context() + current_account = SessionHelper.get_account() + Quicksight.register_user_in_group( + AwsAccountId=current_account, + UserName=context.username, + GroupName='dataall', + UserRole='AUTHOR') + + datasource_id = Quicksight.create_data_source_vpc( + AwsAccountId=current_account, + region=DashboardQuicksightService._REGION, + UserName=context.username, + vpcConnectionId=vpc_connection_id + ) + # Data sets are not created programmatically. Too much overhead for the value added. + # However, an example API is provided: datasets = Quicksight.create_data_set_from_source( + # AwsAccountId=current_account, region=region, UserName='dataallTenantUser', + # dataSourceId=datasourceId, tablesToImport=['organization', + # 'environment', 'dataset', 'datapipeline', 'dashboard', 'share_object'] + # ) + + return datasource_id + + @staticmethod + def get_quicksight_author_session(aws_account): + DashboardQuicksightService._check_user_must_be_admin() + + return Quicksight.get_author_session( + AwsAccountId=aws_account, + region=DashboardQuicksightService._REGION, + UserName=get_context().username, + UserRole='AUTHOR', + ) + + @staticmethod + def get_quicksight_reader_session(dashboard_uri): + DashboardQuicksightService._check_user_must_be_admin() + current_account = SessionHelper.get_account() + + return Quicksight.get_reader_session( + AwsAccountId=current_account, + region=DashboardQuicksightService._REGION, + UserName=get_context().username, + UserRole='READER', + DashboardId=dashboard_uri + ) + + @staticmethod + def _check_user_must_be_admin(): + context = get_context() + admin = TenantPolicy.is_tenant_admin(context.groups) + + if not admin: + raise TenantUnauthorized( + username=context.username, + action=TENANT_ALL, + tenant_name=context.username, + ) + + @staticmethod + def _get_domain_url(): + envname = os.getenv("envname", "local") + if envname in ["local", "dkrcompose"]: + return "http://localhost:8080" + + domain_name = DashboardQuicksightService._PARAM_STORE.get_parameter( + env=envname, + path="frontend/custom_domain_name" + ) + + return f"https://{domain_name}" + diff --git a/backend/dataall/modules/dashboards/services/dashboard_service.py b/backend/dataall/modules/dashboards/services/dashboard_service.py index 10416b75b..83dc21e00 100644 --- a/backend/dataall/modules/dashboards/services/dashboard_service.py +++ b/backend/dataall/modules/dashboards/services/dashboard_service.py @@ -1,6 +1,6 @@ from dataall.aws.handlers.quicksight import Quicksight from dataall.core.context import get_context -from dataall.core.permission_checker import has_tenant_permission, has_resource_permission, has_group_permission +from dataall.core.permission_checker import has_tenant_permission, has_resource_permission from dataall.db.api import ResourcePolicy, Glossary, Vote, Environment from dataall.db.exceptions import UnauthorizedOperation from dataall.db.models import Activity @@ -11,6 +11,7 @@ class DashboardService: + """Service that serves request related to dashboard""" @staticmethod @has_tenant_permission(MANAGE_DASHBOARDS) @has_resource_permission(GET_DASHBOARD) From 67ce7873d9453f7037178f9375e658de1ef21783 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 30 May 2023 16:22:10 +0200 Subject: [PATCH 249/346] Moved dashboard enums --- backend/dataall/api/constants.py | 7 ------- backend/dataall/db/models/Enums.py | 7 ------- backend/dataall/modules/dashboards/api/enums.py | 8 ++++++++ backend/dataall/modules/dashboards/api/resolvers.py | 2 +- 4 files changed, 9 insertions(+), 15 deletions(-) create mode 100644 backend/dataall/modules/dashboards/api/enums.py diff --git a/backend/dataall/api/constants.py b/backend/dataall/api/constants.py index 914663f38..eb5daf3b6 100644 --- a/backend/dataall/api/constants.py +++ b/backend/dataall/api/constants.py @@ -67,13 +67,6 @@ class ProjectMemberRole(GraphQLEnumMapper): NotContributor = '000' -class DashboardRole(GraphQLEnumMapper): - Creator = '999' - Admin = '900' - Shared = '800' - NoPermission = '000' - - class DataPipelineRole(GraphQLEnumMapper): Creator = '999' Admin = '900' diff --git a/backend/dataall/db/models/Enums.py b/backend/dataall/db/models/Enums.py index 9e1674dbf..615188147 100644 --- a/backend/dataall/db/models/Enums.py +++ b/backend/dataall/db/models/Enums.py @@ -36,13 +36,6 @@ class ProjectMemberRole(Enum): NotContributor = '000' -class DashboardRole(Enum): - Creator = '999' - Admin = '900' - Shared = '800' - NoPermission = '000' - - class DataPipelineRole(Enum): Creator = '999' Admin = '900' diff --git a/backend/dataall/modules/dashboards/api/enums.py b/backend/dataall/modules/dashboards/api/enums.py new file mode 100644 index 000000000..00b72d961 --- /dev/null +++ b/backend/dataall/modules/dashboards/api/enums.py @@ -0,0 +1,8 @@ +from dataall.api.constants import GraphQLEnumMapper + + +class DashboardRole(GraphQLEnumMapper): + Creator = '999' + Admin = '900' + Shared = '800' + NoPermission = '000' diff --git a/backend/dataall/modules/dashboards/api/resolvers.py b/backend/dataall/modules/dashboards/api/resolvers.py index fbca97a71..735b94685 100644 --- a/backend/dataall/modules/dashboards/api/resolvers.py +++ b/backend/dataall/modules/dashboards/api/resolvers.py @@ -1,8 +1,8 @@ -from dataall.api.constants import DashboardRole from dataall.api.context import Context from dataall.db import models from dataall.db.api import Glossary, Vote from dataall.db.exceptions import RequiredParameter +from dataall.modules.dashboards.api.enums import DashboardRole from dataall.modules.dashboards.db.dashboard_repository import DashboardRepository from dataall.modules.dashboards.db.models import Dashboard from dataall.modules.dashboards.services.dashboard_quicksight_service import DashboardQuicksightService From bf3d739480cf1b2cecdad82481aa4dde5c78d6c9 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 30 May 2023 16:24:31 +0200 Subject: [PATCH 250/346] Removed missed dashboard code --- backend/dataall/tasks/catalog_indexer.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/backend/dataall/tasks/catalog_indexer.py b/backend/dataall/tasks/catalog_indexer.py index 4f291941e..e20697bcd 100644 --- a/backend/dataall/tasks/catalog_indexer.py +++ b/backend/dataall/tasks/catalog_indexer.py @@ -5,9 +5,7 @@ from typing import List from dataall.db import get_engine -from dataall.modules.dashboards.db.models import Dashboard from dataall.modules.loader import load_modules, ImportMode -from dataall.modules.dashboards.indexers.dashboard_indexer import DashboardIndexer from dataall.utils.alarm_service import AlarmService root = logging.getLogger() @@ -38,13 +36,6 @@ def index_objects(engine): for indexer in _indexers: indexed_objects_counter += indexer.index(session) - all_dashboards: [Dashboard] = session.query(Dashboard).all() - log.info(f'Found {len(all_dashboards)} dashboards') - dashboard: Dashboard - for dashboard in all_dashboards: - DashboardIndexer.upsert(session=session, dashboard_uri=dashboard.dashboardUri) - indexed_objects_counter = indexed_objects_counter + 1 - log.info(f'Successfully indexed {indexed_objects_counter} objects') return indexed_objects_counter except Exception as e: From e83833bb04afaf74d58022a299c4f01d625a52bd Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 31 May 2023 11:02:42 +0200 Subject: [PATCH 251/346] dashboardsEnabled has become a parameter --- .../api/Objects/Environment/input_types.py | 2 - .../dataall/api/Objects/Environment/schema.py | 1 - .../cdkproxy/stacks/policies/__init__.py | 5 +- backend/dataall/db/api/environment.py | 8 +- backend/dataall/db/models/Environment.py | 1 - .../dataall/modules/dashboards/api/schema.py | 1 - .../dashboards/db/dashboard_repository.py | 4 +- .../services/dashboard_quicksight_service.py | 21 ++--- .../dashboards/services/dashboard_service.py | 15 ++-- .../services/dashboard_share_service.py | 2 +- .../share_managers/lf_share_manager.py | 5 +- .../modules/datasets/cdk/dataset_stack.py | 6 +- .../datasets/services/dataset_service.py | 9 +- .../notebooks/services/notebook_service.py | 4 +- ...fc49baecea4_add_enviromental_parameters.py | 10 +++ .../src/api/Environment/createEnvironment.js | 1 - .../src/api/Environment/getEnvironment.js | 2 - .../listOrganizationEnvironments.js | 1 - .../src/api/Environment/updateEnvironment.js | 1 - .../Environments/EnvironmentCreateForm.js | 6 +- .../views/Environments/EnvironmentEditForm.js | 9 +- .../views/Environments/EnvironmentFeatures.js | 10 +-- tests/api/conftest.py | 9 +- tests/api/test_environment.py | 82 ++++++++++--------- tests/api/test_organization.py | 7 ++ tests/modules/datasets/tasks/conftest.py | 2 - .../datasets/test_dataset_resource_found.py | 1 - tests/modules/datasets/test_share.py | 2 - 28 files changed, 121 insertions(+), 106 deletions(-) diff --git a/backend/dataall/api/Objects/Environment/input_types.py b/backend/dataall/api/Objects/Environment/input_types.py index 05f890e74..9a1086174 100644 --- a/backend/dataall/api/Objects/Environment/input_types.py +++ b/backend/dataall/api/Objects/Environment/input_types.py @@ -28,7 +28,6 @@ gql.Argument('description', gql.String), gql.Argument('AwsAccountId', gql.NonNullableType(gql.String)), gql.Argument('region', gql.NonNullableType(gql.String)), - gql.Argument('dashboardsEnabled', type=gql.Boolean), gql.Argument('mlStudiosEnabled', type=gql.Boolean), gql.Argument('pipelinesEnabled', type=gql.Boolean), gql.Argument('warehousesEnabled', type=gql.Boolean), @@ -52,7 +51,6 @@ gql.Argument('vpcId', gql.String), gql.Argument('privateSubnetIds', gql.ArrayType(gql.String)), gql.Argument('publicSubnetIds', gql.ArrayType(gql.String)), - gql.Argument('dashboardsEnabled', type=gql.Boolean), gql.Argument('mlStudiosEnabled', type=gql.Boolean), gql.Argument('pipelinesEnabled', type=gql.Boolean), gql.Argument('warehousesEnabled', type=gql.Boolean), diff --git a/backend/dataall/api/Objects/Environment/schema.py b/backend/dataall/api/Objects/Environment/schema.py index bb5749065..dba9d20ad 100644 --- a/backend/dataall/api/Objects/Environment/schema.py +++ b/backend/dataall/api/Objects/Environment/schema.py @@ -83,7 +83,6 @@ resolver=resolve_user_role, ), gql.Field('validated', type=gql.Boolean), - gql.Field('dashboardsEnabled', type=gql.Boolean), gql.Field('mlStudiosEnabled', type=gql.Boolean), gql.Field('pipelinesEnabled', type=gql.Boolean), gql.Field('warehousesEnabled', type=gql.Boolean), diff --git a/backend/dataall/cdkproxy/stacks/policies/__init__.py b/backend/dataall/cdkproxy/stacks/policies/__init__.py index 4391d0287..dd47a49dd 100644 --- a/backend/dataall/cdkproxy/stacks/policies/__init__.py +++ b/backend/dataall/cdkproxy/stacks/policies/__init__.py @@ -1,8 +1,7 @@ """Contains the code for creating environment policies""" from dataall.cdkproxy.stacks.policies import ( - _lambda, cloudformation, codestar, quicksight, redshift, stepfunctions, data_policy, service_policy + _lambda, cloudformation, codestar, redshift, stepfunctions, data_policy, service_policy ) -__all__ = ["_lambda", "cloudformation", "codestar", "quicksight", - "redshift", "stepfunctions", "data_policy", "service_policy", "mlstudio"] +__all__ = ["_lambda", "cloudformation", "codestar", "redshift", "stepfunctions", "data_policy", "service_policy", "mlstudio"] diff --git a/backend/dataall/db/api/environment.py b/backend/dataall/db/api/environment.py index be33a07c2..464415776 100644 --- a/backend/dataall/db/api/environment.py +++ b/backend/dataall/db/api/environment.py @@ -57,7 +57,6 @@ def create_environment(session, username, groups, uri, data=None, check_perm=Non ), EnvironmentDefaultIAMRoleArn=f'arn:aws:iam::{data.get("AwsAccountId")}:role/{data.get("EnvironmentDefaultIAMRoleName")}', CDKRoleArn=f"arn:aws:iam::{data.get('AwsAccountId')}:role/{data['cdk_role_name']}", - dashboardsEnabled=data.get('dashboardsEnabled', False), mlStudiosEnabled=data.get('mlStudiosEnabled', True), pipelinesEnabled=data.get('pipelinesEnabled', True), warehousesEnabled=data.get('warehousesEnabled', True), @@ -187,8 +186,6 @@ def update_environment(session, username, groups, uri, data=None, check_perm=Non environment.description = data.get('description', 'No description provided') if data.get('tags'): environment.tags = data.get('tags') - if 'dashboardsEnabled' in data.keys(): - environment.dashboardsEnabled = data.get('dashboardsEnabled') if 'mlStudiosEnabled' in data.keys(): environment.mlStudiosEnabled = data.get('mlStudiosEnabled') if 'pipelinesEnabled' in data.keys(): @@ -1024,3 +1021,8 @@ def check_group_environment_permission( @staticmethod def get_environment_parameters(session, env_uri): return EnvironmentParameterRepository(session).get_params(env_uri) + + @staticmethod + def get_boolean_env_param(session, env: models.Environment, param: str) -> bool: + param = EnvironmentParameterRepository(session).get_param(env.environmentUri, param) + return param and param.value.lower() == "true" diff --git a/backend/dataall/db/models/Environment.py b/backend/dataall/db/models/Environment.py index 55246aabd..127e41e6c 100644 --- a/backend/dataall/db/models/Environment.py +++ b/backend/dataall/db/models/Environment.py @@ -24,7 +24,6 @@ class Environment(Resource, Base): EnvironmentDefaultAthenaWorkGroup = Column(String) roleCreated = Column(Boolean, nullable=False, default=False) - dashboardsEnabled = Column(Boolean, default=False) mlStudiosEnabled = Column(Boolean, default=True) pipelinesEnabled = Column(Boolean, default=True) warehousesEnabled = Column(Boolean, default=True) diff --git a/backend/dataall/modules/dashboards/api/schema.py b/backend/dataall/modules/dashboards/api/schema.py index 9ef1682cb..429696ab8 100644 --- a/backend/dataall/modules/dashboards/api/schema.py +++ b/backend/dataall/modules/dashboards/api/schema.py @@ -1,6 +1,5 @@ from dataall.api import gql from dataall.modules.dashboards.api.resolvers import * -from dataall.api.constants import DashboardRole from dataall.api.Objects.Environment.resolvers import resolve_environment diff --git a/backend/dataall/modules/dashboards/db/dashboard_repository.py b/backend/dataall/modules/dashboards/db/dashboard_repository.py index f59235cde..9fe040ff6 100644 --- a/backend/dataall/modules/dashboards/db/dashboard_repository.py +++ b/backend/dataall/modules/dashboards/db/dashboard_repository.py @@ -145,8 +145,8 @@ def paginated_dashboard_shares( ).to_dict() @staticmethod - def delete_dashboard(session, dashboard_uri) -> bool: - session.delete(dashboard_uri) + def delete_dashboard(session, dashboard) -> bool: + session.delete(dashboard) return True @staticmethod diff --git a/backend/dataall/modules/dashboards/services/dashboard_quicksight_service.py b/backend/dataall/modules/dashboards/services/dashboard_quicksight_service.py index f271f48d9..8c3b43c3c 100644 --- a/backend/dataall/modules/dashboards/services/dashboard_quicksight_service.py +++ b/backend/dataall/modules/dashboards/services/dashboard_quicksight_service.py @@ -29,11 +29,7 @@ def get_quicksight_reader_url(uri): with context.db_engine.scoped_session() as session: dash: Dashboard = DashboardRepository.get_dashboard_by_uri(session, uri) env = Environment.get_environment_by_uri(session, dash.environmentUri) - if not env.dashboardsEnabled: - raise UnauthorizedOperation( - action=GET_DASHBOARD, - message=f'Dashboards feature is disabled for the environment {env.label}', - ) + DashboardQuicksightService._check_dashboards_enabled(session, env, GET_DASHBOARD) if dash.SamlGroupName in context.groups: url = Quicksight.get_reader_session( @@ -83,11 +79,7 @@ def get_quicksight_designer_url(uri: str): context = get_context() with context.db_engine.scoped_session() as session: env = Environment.get_environment_by_uri(session, uri) - if not env.dashboardsEnabled: - raise UnauthorizedOperation( - action=CREATE_DASHBOARD, - message=f'Dashboards feature is disabled for the environment {env.label}', - ) + DashboardQuicksightService._check_dashboards_enabled(session, env, CREATE_DASHBOARD) url = Quicksight.get_author_session( AwsAccountId=env.AwsAccountId, @@ -204,3 +196,12 @@ def _get_domain_url(): return f"https://{domain_name}" + @staticmethod + def _check_dashboards_enabled(session, env, action): + enabled = Environment.get_boolean_env_param(session, env, "dashboardsEnabled") + if not enabled: + raise UnauthorizedOperation( + action=action, + message=f'Dashboards feature is disabled for the environment {env.label}', + ) + diff --git a/backend/dataall/modules/dashboards/services/dashboard_service.py b/backend/dataall/modules/dashboards/services/dashboard_service.py index 83dc21e00..3638355f5 100644 --- a/backend/dataall/modules/dashboards/services/dashboard_service.py +++ b/backend/dataall/modules/dashboards/services/dashboard_service.py @@ -26,8 +26,9 @@ def import_dashboard(uri: str, data: dict = None) -> Dashboard: context = get_context() with context.db_engine.scoped_session() as session: env = Environment.get_environment_by_uri(session, data['environmentUri']) + enabled = Environment.get_boolean_env_param(session, env, "dashboardsEnabled") - if not env.dashboardsEnabled: + if not enabled: raise UnauthorizedOperation( action=CREATE_DASHBOARD, message=f'Dashboards feature is disabled for the environment {env.label}', @@ -41,14 +42,14 @@ def import_dashboard(uri: str, data: dict = None) -> Dashboard: ) if not can_import: - raise db.exceptions.UnauthorizedOperation( + raise UnauthorizedOperation( action=CREATE_DASHBOARD, message=f'User: {context.username} has not AUTHOR rights on quicksight for the environment {env.label}', ) Environment.check_group_environment_permission( session=session, - username=username, - groups=groups, + username=context.username, + groups=context.groups, uri=uri, group=data['SamlGroupName'], permission_name=CREATE_DASHBOARD, @@ -63,8 +64,8 @@ def import_dashboard(uri: str, data: dict = None) -> Dashboard: activity = Activity( action='DASHBOARD:CREATE', label='DASHBOARD:CREATE', - owner=username, - summary=f'{username} created dashboard {dashboard.label} in {env.label}', + owner=context.username, + summary=f'{context.username} created dashboard {dashboard.label} in {env.label}', targetUri=dashboard.dashboardUri, targetType='dashboard', ) @@ -101,7 +102,7 @@ def delete_dashboard(uri) -> bool: # TODO THERE WAS NO PERMISSION CHECK with get_context().db_engine.scoped_session() as session: dashboard = DashboardRepository.get_dashboard_by_uri(session, uri) - DashboardRepository.delete_dashboard(session, dashboard.dashboardUri) + DashboardRepository.delete_dashboard(session, dashboard) ResourcePolicy.delete_resource_policy( session=session, resource_uri=uri, group=dashboard.SamlGroupName diff --git a/backend/dataall/modules/dashboards/services/dashboard_share_service.py b/backend/dataall/modules/dashboards/services/dashboard_share_service.py index 445a28285..7bfafc126 100644 --- a/backend/dataall/modules/dashboards/services/dashboard_share_service.py +++ b/backend/dataall/modules/dashboards/services/dashboard_share_service.py @@ -29,7 +29,7 @@ def request_dashboard_share(uri: str, principal_id: str): share = DashboardRepository.find_share_for_group(session, dashboard.dashboardUri, principal_id) if not share: - DashboardRepository.create_share(session, context.username, dashboard, principal_id) + share = DashboardRepository.create_share(session, context.username, dashboard, principal_id) else: DashboardShareService._check_stare_status(share) diff --git a/backend/dataall/modules/dataset_sharing/services/share_managers/lf_share_manager.py b/backend/dataall/modules/dataset_sharing/services/share_managers/lf_share_manager.py index 8321021e2..53c80c1ca 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_managers/lf_share_manager.py +++ b/backend/dataall/modules/dataset_sharing/services/share_managers/lf_share_manager.py @@ -5,6 +5,7 @@ from botocore.exceptions import ClientError +from dataall.db.api import Environment from dataall.modules.dataset_sharing.aws.glue_client import GlueClient from dataall.modules.dataset_sharing.aws.lakeformation_client import LakeFormationClient from dataall.aws.handlers.quicksight import Quicksight @@ -61,7 +62,9 @@ def get_share_principals(self) -> [str]: List of principals """ principals = [f"arn:aws:iam::{self.target_environment.AwsAccountId}:role/{self.share.principalIAMRoleName}"] - if self.target_environment.dashboardsEnabled: + dashboard_enabled = Environment.get_boolean_env_param(self.session, self.target_environment, "dashboardsEnabled") + + if dashboard_enabled: group = Quicksight.create_quicksight_group(AwsAccountId=self.target_environment.AwsAccountId) if group and group.get('Group'): group_arn = group.get('Group').get('Arn') diff --git a/backend/dataall/modules/datasets/cdk/dataset_stack.py b/backend/dataall/modules/datasets/cdk/dataset_stack.py index 34df41906..0c66a15c0 100644 --- a/backend/dataall/modules/datasets/cdk/dataset_stack.py +++ b/backend/dataall/modules/datasets/cdk/dataset_stack.py @@ -78,6 +78,10 @@ def get_target(self) -> Dataset: raise Exception('ObjectNotFound') return dataset + def has_quicksight_enabled(self, env) -> bool: + with self.get_engine().scoped_session() as session: + return Environment.get_boolean_env_param(session, env, "dashboardsEnabled") + def __init__(self, scope, id, target_uri: str = None, **kwargs): super().__init__( scope, @@ -97,7 +101,7 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): env_group = self.get_env_group(dataset) quicksight_default_group_arn = None - if env.dashboardsEnabled: + if self.has_quicksight_enabled(env): quicksight_default_group = Quicksight.create_quicksight_group(AwsAccountId=env.AwsAccountId) quicksight_default_group_arn = quicksight_default_group['Group']['Arn'] diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py index 891890100..6d164d09f 100644 --- a/backend/dataall/modules/datasets/services/dataset_service.py +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -33,8 +33,9 @@ class DatasetService: @staticmethod - def check_dataset_account(environment): - if environment.dashboardsEnabled: + def check_dataset_account(session, environment): + dashboards_enabled = Environment.get_boolean_env_param(session, environment, "dashboardsEnabled") + if dashboards_enabled: quicksight_subscription = Quicksight.check_quicksight_enterprise_subscription( AwsAccountId=environment.AwsAccountId) if quicksight_subscription: @@ -50,7 +51,7 @@ def create_dataset(uri, admin_group, data: dict): context = get_context() with context.db_engine.scoped_session() as session: environment = Environment.get_environment_by_uri(session, uri) - DatasetService.check_dataset_account(environment=environment) + DatasetService.check_dataset_account(session=session, environment=environment) dataset = DatasetRepository.create_dataset( session=session, @@ -150,7 +151,7 @@ def update_dataset(uri: str, data: dict): with get_context().db_engine.scoped_session() as session: dataset = DatasetRepository.get_dataset_by_uri(session, uri) environment = Environment.get_environment_by_uri(session, dataset.environmentUri) - DatasetService.check_dataset_account(environment=environment) + DatasetService.check_dataset_account(session=session, environment=environment) username = get_context().username dataset: Dataset = DatasetRepository.get_dataset_by_uri(session, uri) diff --git a/backend/dataall/modules/notebooks/services/notebook_service.py b/backend/dataall/modules/notebooks/services/notebook_service.py index 041f2d044..668cb9c38 100644 --- a/backend/dataall/modules/notebooks/services/notebook_service.py +++ b/backend/dataall/modules/notebooks/services/notebook_service.py @@ -73,9 +73,9 @@ def create_notebook(*, uri: str, admin_group: str, request: NotebookCreationRequ with _session() as session: env = Environment.get_environment_by_uri(session, uri) - enabled = EnvironmentParameterRepository(session).get_param(uri, "notebooksEnabled") + enabled = Environment.get_boolean_env_param(session, env, "notebooksEnabled") - if not enabled and enabled.lower() != "true": + if not enabled: raise exceptions.UnauthorizedOperation( action=CREATE_NOTEBOOK, message=f'Notebooks feature is disabled for the environment {env.label}', diff --git a/backend/migrations/versions/5fc49baecea4_add_enviromental_parameters.py b/backend/migrations/versions/5fc49baecea4_add_enviromental_parameters.py index 0e1aef85e..b17fe47da 100644 --- a/backend/migrations/versions/5fc49baecea4_add_enviromental_parameters.py +++ b/backend/migrations/versions/5fc49baecea4_add_enviromental_parameters.py @@ -33,6 +33,7 @@ class Environment(Resource, Base): __tablename__ = "environment" environmentUri = Column(String, primary_key=True) notebooksEnabled = Column(Boolean) + dashboardsEnabled = Column(Boolean) class EnvironmentParameter(Resource, Base): @@ -76,11 +77,15 @@ def upgrade(): _add_param_if_exists( params, env, "notebooksEnabled", str(env.notebooksEnabled).lower() # for frontend ) + _add_param_if_exists( + params, env, "dashboardsEnabled", str(env.dashboardsEnabled).lower() # for frontend + ) session.add_all(params) print("Migration of the environmental parameters has been complete") op.drop_column("environment", "notebooksEnabled") + op.drop_column("environment", "dashbaordsEnabled") print("Dropped the columns from the environment table ") create_foreign_key_to_env(op, 'sagemaker_notebook') @@ -129,6 +134,7 @@ def downgrade(): op.drop_constraint("fk_notebook_env_uri", "sagemaker_notebook") op.add_column("environment", Column("notebooksEnabled", Boolean, default=True)) + op.add_column("environment", Column("dashbaordsEnabled", Boolean, default=True)) params = session.query(EnvironmentParameter).all() envs = [] @@ -137,6 +143,10 @@ def downgrade(): environmentUri=param.environmentUri, notebooksEnabled=params["notebooksEnabled"] == "true" )) + envs.append(Environment( + environmentUri=param.environmentUri, + dashboardsEnabled=params["dashboardsEnabled"] == "true" + )) session.add_all(envs) op.drop_table("environment_parameters") diff --git a/frontend/src/api/Environment/createEnvironment.js b/frontend/src/api/Environment/createEnvironment.js index af4972d84..7917fabdf 100644 --- a/frontend/src/api/Environment/createEnvironment.js +++ b/frontend/src/api/Environment/createEnvironment.js @@ -13,7 +13,6 @@ const createEnvironment = (input) => ({ SamlGroupName AwsAccountId created - dashboardsEnabled mlStudiosEnabled pipelinesEnabled warehousesEnabled diff --git a/frontend/src/api/Environment/getEnvironment.js b/frontend/src/api/Environment/getEnvironment.js index 70ccc54a5..5da41f4d5 100644 --- a/frontend/src/api/Environment/getEnvironment.js +++ b/frontend/src/api/Environment/getEnvironment.js @@ -14,7 +14,6 @@ const getEnvironment = ({ environmentUri }) => ({ name label AwsAccountId - dashboardsEnabled mlStudiosEnabled pipelinesEnabled warehousesEnabled @@ -49,7 +48,6 @@ const getEnvironment = ({ environmentUri }) => ({ outputs resources } - dashboardsEnabled mlStudiosEnabled pipelinesEnabled warehousesEnabled diff --git a/frontend/src/api/Environment/listOrganizationEnvironments.js b/frontend/src/api/Environment/listOrganizationEnvironments.js index c5244c34b..3cd46e35f 100644 --- a/frontend/src/api/Environment/listOrganizationEnvironments.js +++ b/frontend/src/api/Environment/listOrganizationEnvironments.js @@ -32,7 +32,6 @@ const listOrganizationEnvironments = ({ organizationUri, filter }) => ({ tags environmentType AwsAccountId - dashboardsEnabled mlStudiosEnabled pipelinesEnabled warehousesEnabled diff --git a/frontend/src/api/Environment/updateEnvironment.js b/frontend/src/api/Environment/updateEnvironment.js index ddd851637..41883f950 100644 --- a/frontend/src/api/Environment/updateEnvironment.js +++ b/frontend/src/api/Environment/updateEnvironment.js @@ -16,7 +16,6 @@ const updateEnvironment = ({ environmentUri, input }) => ({ userRoleInEnvironment SamlGroupName AwsAccountId - dashboardsEnabled mlStudiosEnabled pipelinesEnabled warehousesEnabled diff --git a/frontend/src/views/Environments/EnvironmentCreateForm.js b/frontend/src/views/Environments/EnvironmentCreateForm.js index e955b556a..28ba90644 100644 --- a/frontend/src/views/Environments/EnvironmentCreateForm.js +++ b/frontend/src/views/Environments/EnvironmentCreateForm.js @@ -153,7 +153,6 @@ const EnvironmentCreateForm = (props) => { tags: values.tags, description: values.description, region: values.region, - dashboardsEnabled: values.dashboardsEnabled, mlStudiosEnabled: values.mlStudiosEnabled, pipelinesEnabled: values.pipelinesEnabled, warehousesEnabled: values.warehousesEnabled, @@ -161,8 +160,11 @@ const EnvironmentCreateForm = (props) => { resourcePrefix: values.resourcePrefix, parameters: [ { - key: "notebooksEnabled", + key: 'notebooksEnabled', value: String(values.notebooksEnabled) + }, { + key: 'dashboardsEnabled', + value: String(values.dashboardsEnabled) } ] }) diff --git a/frontend/src/views/Environments/EnvironmentEditForm.js b/frontend/src/views/Environments/EnvironmentEditForm.js index 174a598a7..b1fd2d1c1 100644 --- a/frontend/src/views/Environments/EnvironmentEditForm.js +++ b/frontend/src/views/Environments/EnvironmentEditForm.js @@ -73,7 +73,6 @@ const EnvironmentEditForm = (props) => { label: values.label, tags: values.tags, description: values.description, - dashboardsEnabled: values.dashboardsEnabled, mlStudiosEnabled: values.mlStudiosEnabled, pipelinesEnabled: values.pipelinesEnabled, warehousesEnabled: values.warehousesEnabled, @@ -82,6 +81,10 @@ const EnvironmentEditForm = (props) => { { key: "notebooksEnabled", value: String(values.notebooksEnabled) + }, + { + key: "dashboardsEnabled", + value: String(values.dashboardsEnabled) } ] } @@ -198,8 +201,8 @@ const EnvironmentEditForm = (props) => { label: env.label, description: env.description, tags: env.tags || [], - dashboardsEnabled: env.dashboardsEnabled, - notebooksEnabled: env.parameters["notebooksEnabled"] === 'true', + dashboardsEnabled: env.parameters['dashboardsEnabled'] === 'true', + notebooksEnabled: env.parameters['notebooksEnabled'] === 'true', mlStudiosEnabled: env.mlStudiosEnabled, pipelinesEnabled: env.pipelinesEnabled, warehousesEnabled: env.warehousesEnabled, diff --git a/frontend/src/views/Environments/EnvironmentFeatures.js b/frontend/src/views/Environments/EnvironmentFeatures.js index ac753df71..1ede63ef7 100644 --- a/frontend/src/views/Environments/EnvironmentFeatures.js +++ b/frontend/src/views/Environments/EnvironmentFeatures.js @@ -32,10 +32,8 @@ const EnvironmentFeatures = (props) => { Dashboards - @@ -51,8 +49,8 @@ const EnvironmentFeatures = (props) => { Notebooks - diff --git a/tests/api/conftest.py b/tests/api/conftest.py index 6d5e019d2..3c2507da0 100644 --- a/tests/api/conftest.py +++ b/tests/api/conftest.py @@ -1,4 +1,3 @@ -import dataall.searchproxy.indexers from .client import * from dataall.db import models @@ -24,7 +23,6 @@ def patch_check_env(module_mocker): def patch_es(module_mocker): module_mocker.patch('dataall.searchproxy.connect', return_value={}) module_mocker.patch('dataall.searchproxy.search', return_value={}) - module_mocker.patch('dataall.searchproxy.indexers.DashboardIndexer.upsert', return_value={}) module_mocker.patch('dataall.searchproxy.base_indexer.BaseIndexer.delete_doc', return_value={}) @@ -170,8 +168,8 @@ def env(client): cache = {} def factory(org, envname, owner, group, account, region, desc='test', parameters=None): - if parameters == None: - parameters = {} + if not parameters: + parameters = {"dashboardsEnabled": "true"} key = f"{org.organizationUri}{envname}{owner}{''.join(group or '-')}{account}{region}" if cache.get(key): @@ -205,7 +203,6 @@ def factory(org, envname, owner, group, account, region, desc='test', parameters 'tags': ['a', 'b', 'c'], 'region': f'{region}', 'SamlGroupName': f'{group}', - 'dashboardsEnabled': True, 'vpcId': 'vpc-123456', 'parameters': [{'key': k, 'value': v} for k, v in parameters.items()] }, @@ -225,7 +222,6 @@ def factory( owner: str, samlGroupName: str, environmentDefaultIAMRoleName: str, - dashboardsEnabled: bool = False, ) -> models.Environment: with db.scoped_session() as session: env = models.Environment( @@ -240,7 +236,6 @@ def factory( EnvironmentDefaultIAMRoleName=environmentDefaultIAMRoleName, EnvironmentDefaultIAMRoleArn=f"arn:aws:iam::{awsAccountId}:role/{environmentDefaultIAMRoleName}", CDKRoleArn=f"arn:aws::{awsAccountId}:role/EnvRole", - dashboardsEnabled=dashboardsEnabled, ) session.add(env) session.commit() diff --git a/tests/api/test_environment.py b/tests/api/test_environment.py index 01770b8d9..377b14a9b 100644 --- a/tests/api/test_environment.py +++ b/tests/api/test_environment.py @@ -29,7 +29,6 @@ def get_env(client, env1, group): region SamlGroupName owner - dashboardsEnabled mlStudiosEnabled pipelinesEnabled warehousesEnabled @@ -56,12 +55,15 @@ def test_get_environment(client, org1, env1, group): response.data.getEnvironment.organization.organizationUri == org1.organizationUri ) - assert response.data.getEnvironment.owner == 'alice' - assert response.data.getEnvironment.AwsAccountId == env1.AwsAccountId - assert response.data.getEnvironment.dashboardsEnabled - assert response.data.getEnvironment.mlStudiosEnabled - assert response.data.getEnvironment.pipelinesEnabled - assert response.data.getEnvironment.warehousesEnabled + body = response.data.getEnvironment + assert body.owner == 'alice' + assert body.AwsAccountId == env1.AwsAccountId + assert body.mlStudiosEnabled + assert body.pipelinesEnabled + assert body.warehousesEnabled + + params = {p.key: p.value for p in body.parameters} + assert params["dashboardsEnabled"] == "true" def test_get_environment_object_not_found(client, org1, env1, group): @@ -102,7 +104,6 @@ def test_update_env(client, org1, env1, group): owner tags resourcePrefix - dashboardsEnabled mlStudiosEnabled pipelinesEnabled warehousesEnabled @@ -120,7 +121,6 @@ def test_update_env(client, org1, env1, group): input={ 'label': 'DEV', 'tags': ['test', 'env'], - 'dashboardsEnabled': False, 'mlStudiosEnabled': False, 'pipelinesEnabled': False, 'warehousesEnabled': False, @@ -128,6 +128,10 @@ def test_update_env(client, org1, env1, group): { 'key': 'notebooksEnabled', 'value': 'True' + }, + { + 'key': 'dashboardsEnabled', + 'value': 'False' } ], 'resourcePrefix': 'customer-prefix_AZ390 ', @@ -142,38 +146,40 @@ def test_update_env(client, org1, env1, group): input={ 'label': 'DEV', 'tags': ['test', 'env'], - 'dashboardsEnabled': False, 'mlStudiosEnabled': False, 'pipelinesEnabled': False, 'warehousesEnabled': False, 'parameters': [ { 'key': 'notebooksEnabled', - 'value': 'True' - } + 'value': 'true' + }, + { + 'key': 'dashboardsEnabled', + 'value': 'false' + }, ], 'resourcePrefix': 'customer-prefix', }, groups=[group.name], ) print(response) - assert ( - response.data.updateEnvironment.organization.organizationUri - == org1.organizationUri - ) - assert response.data.updateEnvironment.owner == 'alice' - assert response.data.updateEnvironment.AwsAccountId == env1.AwsAccountId - assert response.data.updateEnvironment.label == 'DEV' - assert str(response.data.updateEnvironment.tags) == str(['test', 'env']) - assert not response.data.updateEnvironment.dashboardsEnabled - assert not response.data.updateEnvironment.notebooksEnabled - assert not response.data.updateEnvironment.mlStudiosEnabled - assert not response.data.updateEnvironment.pipelinesEnabled - assert not response.data.updateEnvironment.warehousesEnabled - assert response.data.updateEnvironment.parameters - assert response.data.updateEnvironment.parameters[0]["key"] == "notebooksEnabled" - assert response.data.updateEnvironment.parameters[0]["value"] == "True" - assert response.data.updateEnvironment.resourcePrefix == 'customer-prefix' + + body = response.data.updateEnvironment + assert body.organization.organizationUri == org1.organizationUri + assert body.owner == 'alice' + assert body.AwsAccountId == env1.AwsAccountId + assert body.label == 'DEV' + assert str(body.tags) == str(['test', 'env']) + assert not body.mlStudiosEnabled + assert not body.pipelinesEnabled + assert not body.warehousesEnabled + + params = {p.key: p.value for p in body.parameters} + assert params["notebooksEnabled"] == "true" + assert params["dashboardsEnabled"] == "false" + + assert body.resourcePrefix == 'customer-prefix' def test_update_params(client, org1, env1, group): @@ -701,7 +707,6 @@ def test_create_environment(db, client, org1, env1, user, group): owner EnvironmentDefaultIAMRoleName EnvironmentDefaultIAMRoleImported - dashboardsEnabled resourcePrefix networks{ VpcId @@ -726,18 +731,17 @@ def test_create_environment(db, client, org1, env1, user, group): 'vpcId': 'vpc-1234567', 'privateSubnetIds': 'subnet-1', 'publicSubnetIds': 'subnet-21', - 'dashboardsEnabled': True, 'resourcePrefix': 'customer-prefix', }, ) - assert response.data.createEnvironment.dashboardsEnabled - assert response.data.createEnvironment.networks - assert ( - response.data.createEnvironment.EnvironmentDefaultIAMRoleName == 'myOwnIamRole' - ) - assert response.data.createEnvironment.EnvironmentDefaultIAMRoleImported - assert response.data.createEnvironment.resourcePrefix == 'customer-prefix' - for vpc in response.data.createEnvironment.networks: + + body = response.data.createEnvironment + + assert body.networks + assert body.EnvironmentDefaultIAMRoleName == 'myOwnIamRole' + assert body.EnvironmentDefaultIAMRoleImported + assert body.resourcePrefix == 'customer-prefix' + for vpc in body.networks: assert vpc.privateSubnetIds assert vpc.publicSubnetIds assert vpc.default diff --git a/tests/api/test_organization.py b/tests/api/test_organization.py index 9b74e52a2..4d9c01787 100644 --- a/tests/api/test_organization.py +++ b/tests/api/test_organization.py @@ -1,6 +1,8 @@ import dataall import pytest +from dataall.core.environment.models import EnvironmentParameter + @pytest.fixture(scope='module', autouse=True) def org1(org, user, group, tenant): @@ -281,6 +283,11 @@ def test_group_invitation(db, client, org1, group2, user, group3, group, env): assert 'OrganizationResourcesFound' in response.errors[0].message with db.scoped_session() as session: env = session.query(dataall.db.models.Environment).get(env2.environmentUri) + ( + session.query(EnvironmentParameter) + .filter(EnvironmentParameter.environmentUri == env2.environmentUri) + .delete() + ) session.delete(env) session.commit() diff --git a/tests/modules/datasets/tasks/conftest.py b/tests/modules/datasets/tasks/conftest.py index e3556fed6..8b4e241ac 100644 --- a/tests/modules/datasets/tasks/conftest.py +++ b/tests/modules/datasets/tasks/conftest.py @@ -50,7 +50,6 @@ def factory( owner: str, samlGroupName: str, environmentDefaultIAMRoleName: str, - dashboardsEnabled: bool = False, ) -> models.Environment: with db.scoped_session() as session: env = models.Environment( @@ -65,7 +64,6 @@ def factory( EnvironmentDefaultIAMRoleName=environmentDefaultIAMRoleName, EnvironmentDefaultIAMRoleArn=f"arn:aws:iam::{awsAccountId}:role/{environmentDefaultIAMRoleName}", CDKRoleArn=f"arn:aws::{awsAccountId}:role/EnvRole", - dashboardsEnabled=dashboardsEnabled, ) session.add(env) session.commit() diff --git a/tests/modules/datasets/test_dataset_resource_found.py b/tests/modules/datasets/test_dataset_resource_found.py index e8f056a61..984480c88 100644 --- a/tests/modules/datasets/test_dataset_resource_found.py +++ b/tests/modules/datasets/test_dataset_resource_found.py @@ -30,7 +30,6 @@ def get_env(client, env1, group): region SamlGroupName owner - dashboardsEnabled mlStudiosEnabled pipelinesEnabled warehousesEnabled diff --git a/tests/modules/datasets/test_share.py b/tests/modules/datasets/test_share.py index e1f6983c0..80d1fcf2b 100644 --- a/tests/modules/datasets/test_share.py +++ b/tests/modules/datasets/test_share.py @@ -40,7 +40,6 @@ def env1(environment: typing.Callable, org1: dataall.db.models.Organization, use owner=user.userName, samlGroupName=group.name, environmentDefaultIAMRoleName=f"source-{group.name}", - dashboardsEnabled=False, ) @@ -96,7 +95,6 @@ def env2( owner=user2.userName, samlGroupName=group2.name, environmentDefaultIAMRoleName=f"source-{group2.name}", - dashboardsEnabled=False, ) From 19ee2ff4d4bd78254d5ac73ea5c2f7bd9c1754d1 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 31 May 2023 12:55:33 +0200 Subject: [PATCH 252/346] Renamed Quicksight to QuicksightClient and extracted DashboardQuicksightClient --- .../dataall/api/Objects/Tenant/resolvers.py | 2 - backend/dataall/aws/handlers/quicksight.py | 377 +----------------- .../modules/dashboards/aws/__init__.py | 0 .../aws/dashboard_quicksight_client.py | 270 +++++++++++++ .../services/dashboard_quicksight_service.py | 106 ++--- .../dashboards/services/dashboard_service.py | 10 +- .../share_managers/lf_share_manager.py | 4 +- .../modules/datasets/cdk/dataset_stack.py | 4 +- .../datasets/services/dataset_service.py | 6 +- 9 files changed, 331 insertions(+), 448 deletions(-) create mode 100644 backend/dataall/modules/dashboards/aws/__init__.py create mode 100644 backend/dataall/modules/dashboards/aws/dashboard_quicksight_client.py diff --git a/backend/dataall/api/Objects/Tenant/resolvers.py b/backend/dataall/api/Objects/Tenant/resolvers.py index 5cdba0dee..7a46239b1 100644 --- a/backend/dataall/api/Objects/Tenant/resolvers.py +++ b/backend/dataall/api/Objects/Tenant/resolvers.py @@ -3,8 +3,6 @@ from .... import db from ....aws.handlers.sts import SessionHelper from ....aws.handlers.parameter_store import ParameterStoreManager -from ....aws.handlers.quicksight import Quicksight -from ....db import exceptions def update_group_permissions(context, source, input=None): diff --git a/backend/dataall/aws/handlers/quicksight.py b/backend/dataall/aws/handlers/quicksight.py index 886482f3f..703e3a09e 100644 --- a/backend/dataall/aws/handlers/quicksight.py +++ b/backend/dataall/aws/handlers/quicksight.py @@ -1,20 +1,15 @@ import logging import re -import os -import ast -from botocore.exceptions import ClientError from .sts import SessionHelper -from .secrets_manager import SecretsManager -from .parameter_store import ParameterStoreManager logger = logging.getLogger('QuicksightHandler') logger.setLevel(logging.DEBUG) -class Quicksight: +class QuicksightClient: - _DEFAULT_GROUP_NAME = 'dataall' + DEFAULT_GROUP_NAME = 'dataall' def __init__(self): pass @@ -43,10 +38,10 @@ def get_identity_region(AwsAccountId): """ identity_region_rex = re.compile('Please use the (?P.*) endpoint.') identity_region = 'us-east-1' - client = Quicksight.get_quicksight_client(AwsAccountId=AwsAccountId, region=identity_region) + client = QuicksightClient.get_quicksight_client(AwsAccountId=AwsAccountId, region=identity_region) try: response = client.describe_group( - AwsAccountId=AwsAccountId, GroupName=Quicksight._DEFAULT_GROUP_NAME, Namespace='default' + AwsAccountId=AwsAccountId, GroupName=QuicksightClient._DEFAULT_GROUP_NAME, Namespace='default' ) except client.exceptions.AccessDeniedException as e: match = identity_region_rex.findall(str(e)) @@ -66,7 +61,7 @@ def get_quicksight_client_in_identity_region(AwsAccountId): Returns : boto3.client ("quicksight") """ - identity_region = Quicksight.get_identity_region(AwsAccountId) + identity_region = QuicksightClient.get_identity_region(AwsAccountId) session = SessionHelper.remote_session(accountid=AwsAccountId) return session.client('quicksight', region_name=identity_region) @@ -80,7 +75,7 @@ def check_quicksight_enterprise_subscription(AwsAccountId, region=None): True if Quicksight Enterprise Edition is enabled in the AWS Account """ logger.info(f'Checking Quicksight subscription in AWS account = {AwsAccountId}') - client = Quicksight.get_quicksight_client(AwsAccountId=AwsAccountId, region=region) + client = QuicksightClient.get_quicksight_client(AwsAccountId=AwsAccountId, region=region) try: response = client.describe_account_subscription(AwsAccountId=AwsAccountId) if not response['AccountInfo']: @@ -104,7 +99,7 @@ def check_quicksight_enterprise_subscription(AwsAccountId, region=None): return False @staticmethod - def create_quicksight_group(AwsAccountId, GroupName=_DEFAULT_GROUP_NAME): + def create_quicksight_group(AwsAccountId, GroupName=DEFAULT_GROUP_NAME): """Creates a Quicksight group called GroupName Args: AwsAccountId(str): aws account @@ -113,12 +108,12 @@ def create_quicksight_group(AwsAccountId, GroupName=_DEFAULT_GROUP_NAME): Returns:dict quicksight.describe_group response """ - client = Quicksight.get_quicksight_client_in_identity_region(AwsAccountId) - group = Quicksight.describe_group(client, AwsAccountId, GroupName) + client = QuicksightClient.get_quicksight_client_in_identity_region(AwsAccountId) + group = QuicksightClient.describe_group(client, AwsAccountId, GroupName) if not group: - if GroupName == Quicksight._DEFAULT_GROUP_NAME: + if GroupName == QuicksightClient.DEFAULT_GROUP_NAME: logger.info(f'Initializing data.all default group = {GroupName}') - Quicksight.check_quicksight_enterprise_subscription(AwsAccountId) + QuicksightClient.check_quicksight_enterprise_subscription(AwsAccountId) logger.info(f'Attempting to create Quicksight group `{GroupName}...') response = client.create_group( @@ -135,362 +130,18 @@ def create_quicksight_group(AwsAccountId, GroupName=_DEFAULT_GROUP_NAME): return group @staticmethod - def describe_group(client, AwsAccountId, GroupName=_DEFAULT_GROUP_NAME): + def describe_group(client, AwsAccountId, GroupName=DEFAULT_GROUP_NAME): try: response = client.describe_group( AwsAccountId=AwsAccountId, GroupName=GroupName, Namespace='default' ) logger.info( f'Quicksight {GroupName} group already exists in {AwsAccountId} ' - f'(using identity region {Quicksight.get_identity_region(AwsAccountId)}): ' + f'(using identity region {QuicksightClient.get_identity_region(AwsAccountId)}): ' f'{response}' ) return response except client.exceptions.ResourceNotFoundException: logger.info( - f'Creating Quicksight group in {AwsAccountId} (using identity region {Quicksight.get_identity_region(AwsAccountId)})' + f'Creating Quicksight group in {AwsAccountId} (using identity region {QuicksightClient.get_identity_region(AwsAccountId)})' ) - - @staticmethod - def describe_user(AwsAccountId, UserName): - """Describes a QS user, returns None if not found - Args: - AwsAccountId(str) : aws account - UserName(str) : name of the QS user - """ - client = Quicksight.get_quicksight_client_in_identity_region(AwsAccountId) - try: - response = client.describe_user( - UserName=UserName, AwsAccountId=AwsAccountId, Namespace='default' - ) - exists = True - except ClientError: - return None - return response.get('User') - - @staticmethod - def get_quicksight_group_arn(AwsAccountId): - default_group_arn = None - group = Quicksight.describe_group( - client=Quicksight.get_quicksight_client_in_identity_region( - AwsAccountId=AwsAccountId - ), - AwsAccountId=AwsAccountId, - ) - if group and group.get('Group', {}).get('Arn'): - default_group_arn = group.get('Group', {}).get('Arn') - - return default_group_arn - - @staticmethod - def list_user_groups(AwsAccountId, UserName): - client = Quicksight.get_quicksight_client_in_identity_region(AwsAccountId) - user = Quicksight.describe_user(AwsAccountId, UserName) - if not user: - return [] - response = client.list_user_groups( - UserName=UserName, AwsAccountId=AwsAccountId, Namespace='default' - ) - return response['GroupList'] - - @staticmethod - def register_user_in_group(AwsAccountId, UserName, GroupName, UserRole='READER'): - client = Quicksight.get_quicksight_client_in_identity_region( - AwsAccountId=AwsAccountId - ) - - Quicksight.create_quicksight_group(AwsAccountId, GroupName) - - exists = False - user = Quicksight.describe_user(AwsAccountId, UserName=UserName) - - if user is not None: - exists = True - - if exists: - response = client.update_user( - UserName=UserName, - AwsAccountId=AwsAccountId, - Namespace='default', - Email=UserName, - Role=UserRole, - ) - else: - response = client.register_user( - UserName=UserName, - Email=UserName, - AwsAccountId=AwsAccountId, - Namespace='default', - IdentityType='QUICKSIGHT', - UserRole=UserRole, - ) - member = False - - response = client.list_user_groups( - UserName=UserName, AwsAccountId=AwsAccountId, Namespace='default' - ) - logger.info( - f'list_user_groups for {UserName}: {response})' - ) - if GroupName not in [g['GroupName'] for g in response['GroupList']]: - logger.warning(f'Adding {UserName} to Quicksight group {GroupName} on {AwsAccountId}') - response = client.create_group_membership( - MemberName=UserName, - GroupName=GroupName, - AwsAccountId=AwsAccountId, - Namespace='default', - ) - return Quicksight.describe_user(AwsAccountId, UserName) - - @staticmethod - def get_reader_session(AwsAccountId, region, UserName, UserRole="READER", DashboardId=None, domain_name: str = None): - - client = Quicksight.get_quicksight_client(AwsAccountId, region) - user = Quicksight.describe_user(AwsAccountId, UserName) - if user is None: - user = Quicksight.register_user_in_group( - AwsAccountId=AwsAccountId, UserName=UserName, GroupName=Quicksight._DEFAULT_GROUP_NAME, UserRole=UserRole - ) - - response = client.generate_embed_url_for_registered_user( - AwsAccountId=AwsAccountId, - SessionLifetimeInMinutes=120, - UserArn=user.get("Arn"), - ExperienceConfiguration={ - "Dashboard": { - "InitialDashboardId": DashboardId, - }, - }, - AllowedDomains=[domain_name], - ) - return response.get('EmbedUrl') - - @staticmethod - def check_dashboard_permissions(AwsAccountId, region, DashboardId): - client = Quicksight.get_quicksight_client(AwsAccountId, region) - response = client.describe_dashboard_permissions( - AwsAccountId=AwsAccountId, - DashboardId=DashboardId - )['Permissions'] - logger.info(f"Dashboard initial permissions: {response}") - read_principals = [] - write_principals = [] - - for a, p in zip([p["Actions"] for p in response], [p["Principal"] for p in response]): - write_principals.append(p) if "Update" in str(a) else read_principals.append(p) - - logger.info(f"Dashboard updated permissions, Read principals: {read_principals}") - logger.info(f"Dashboard updated permissions, Write principals: {write_principals}") - - return read_principals, write_principals - - @staticmethod - def get_shared_reader_session( - AwsAccountId, region, UserName, GroupName, UserRole='READER', DashboardId=None - ): - - client = Quicksight.get_quicksight_client(AwsAccountId, region) - identity_region = Quicksight.get_identity_region(AwsAccountId) - groupPrincipal = f"arn:aws:quicksight:{identity_region}:{AwsAccountId}:group/default/{GroupName}" - - user = Quicksight.register_user_in_group( - AwsAccountId=AwsAccountId, UserName=UserName, GroupName=GroupName, UserRole=UserRole - ) - - read_principals, write_principals = Quicksight.check_dashboard_permissions( - AwsAccountId=AwsAccountId, - region=region, - DashboardId=DashboardId - ) - - if groupPrincipal not in read_principals: - permissions = client.update_dashboard_permissions( - AwsAccountId=AwsAccountId, - DashboardId=DashboardId, - GrantPermissions=[ - { - 'Principal': groupPrincipal, - 'Actions': [ - "quicksight:DescribeDashboard", - "quicksight:ListDashboardVersions", - "quicksight:QueryDashboard", - ] - }, - ] - ) - logger.info(f"Permissions granted: {permissions}") - - response = client.get_dashboard_embed_url( - AwsAccountId=AwsAccountId, - DashboardId=DashboardId, - IdentityType='QUICKSIGHT', - SessionLifetimeInMinutes=120, - UserArn=user.get('Arn'), - ) - return response.get('EmbedUrl') - - @staticmethod - def get_anonymous_session(AwsAccountId, region, UserName, DashboardId=None): - client = Quicksight.get_quicksight_client(AwsAccountId, region) - response = client.generate_embed_url_for_anonymous_user( - AwsAccountId=AwsAccountId, - SessionLifetimeInMinutes=120, - Namespace='default', - SessionTags=[ - {'Key': Quicksight._DEFAULT_GROUP_NAME, 'Value': UserName}, - ], - AuthorizedResourceArns=[ - f'arn:aws:quicksight:{region}:{AwsAccountId}:dashboard/{DashboardId}', - ], - ExperienceConfiguration={'Dashboard': {'InitialDashboardId': DashboardId}}, - ) - return response.get('EmbedUrl') - - @staticmethod - def get_author_session(AwsAccountId, region, UserName, UserRole='AUTHOR'): - client = Quicksight.get_quicksight_client(AwsAccountId, region) - user = Quicksight.describe_user(AwsAccountId, UserName=UserName) - if user is None: - user = Quicksight.register_user_in_group( - AwsAccountId=AwsAccountId, UserName=UserName, GroupName=Quicksight._DEFAULT_GROUP_NAME, UserRole=UserRole - ) - elif user.get("Role", None) not in ["AUTHOR", "ADMIN"]: - user = Quicksight.register_user_in_group( - AwsAccountId=AwsAccountId, UserName=UserName, GroupName=Quicksight._DEFAULT_GROUP_NAME, UserRole=UserRole - ) - else: - pass - response = client.get_session_embed_url( - AwsAccountId=AwsAccountId, - EntryPoint='/start/dashboards', - SessionLifetimeInMinutes=120, - UserArn=user['Arn'], - ) - return response['EmbedUrl'] - - @staticmethod - def can_import_dashboard(AwsAccountId, region, UserName, DashboardId): - client = Quicksight.get_quicksight_client(AwsAccountId, region) - user = Quicksight.describe_user(AwsAccountId, UserName) - if not user: - return False - - groups = Quicksight.list_user_groups(AwsAccountId, UserName) - grouparns = [g['Arn'] for g in groups] - try: - response = client.describe_dashboard_permissions( - AwsAccountId=AwsAccountId, DashboardId=DashboardId - ) - except ClientError as e: - raise e - - permissions = response.get('Permissions', []) - for p in permissions: - if p['Principal'] == user.get('Arn') or p['Principal'] in grouparns: - for a in p['Actions']: - if a in [ - 'quicksight:UpdateDashboard', - 'UpdateDashboardPermissions', - ]: - return True - - return False - - @staticmethod - def create_data_source_vpc(AwsAccountId, region, UserName, vpcConnectionId): - client = Quicksight.get_quicksight_client(AwsAccountId, region) - identity_region = 'us-east-1' - user = Quicksight.register_user_in_group( - AwsAccountId=AwsAccountId, UserName=UserName, GroupName=Quicksight._DEFAULT_GROUP_NAME, UserRole='AUTHOR' - ) - try: - response = client.describe_data_source( - AwsAccountId=AwsAccountId, DataSourceId="dataall-metadata-db" - ) - - except client.exceptions.ResourceNotFoundException: - aurora_secret_arn = ParameterStoreManager.get_parameter_value(AwsAccountId=AwsAccountId, region=region, parameter_path=f'/dataall/{os.getenv("envname", "local")}/aurora/secret_arn') - aurora_params = SecretsManager.get_secret_value( - AwsAccountId=AwsAccountId, region=region, secretId=aurora_secret_arn - ) - aurora_params_dict = ast.literal_eval(aurora_params) - response = client.create_data_source( - AwsAccountId=AwsAccountId, - DataSourceId="dataall-metadata-db", - Name="dataall-metadata-db", - Type="AURORA_POSTGRESQL", - DataSourceParameters={ - 'AuroraPostgreSqlParameters': { - 'Host': aurora_params_dict["host"], - 'Port': aurora_params_dict["port"], - 'Database': aurora_params_dict["dbname"] - } - }, - Credentials={ - "CredentialPair": { - "Username": aurora_params_dict["username"], - "Password": aurora_params_dict["password"], - } - }, - Permissions=[ - { - "Principal": f"arn:aws:quicksight:{region}:{AwsAccountId}:group/default/dataall", - "Actions": [ - "quicksight:UpdateDataSourcePermissions", - "quicksight:DescribeDataSource", - "quicksight:DescribeDataSourcePermissions", - "quicksight:PassDataSource", - "quicksight:UpdateDataSource", - "quicksight:DeleteDataSource" - ] - } - ], - VpcConnectionProperties={ - 'VpcConnectionArn': f"arn:aws:quicksight:{region}:{AwsAccountId}:vpcConnection/{vpcConnectionId}" - } - ) - - return "dataall-metadata-db" - - @staticmethod - def create_analysis(AwsAccountId, region, UserName): - client = Quicksight.get_quicksight_client(AwsAccountId, region) - user = Quicksight.describe_user(AwsAccountId, UserName) - if not user: - return False - - response = client.create_analysis( - AwsAccountId=AwsAccountId, - AnalysisId='dataallMonitoringAnalysis', - Name='dataallMonitoringAnalysis', - Permissions=[ - { - 'Principal': user.get('Arn'), - 'Actions': [ - 'quicksight:DescribeAnalysis', - 'quicksight:DescribeAnalysisPermissions', - 'quicksight:UpdateAnalysisPermissions', - 'quicksight:UpdateAnalysis' - ] - }, - ], - SourceEntity={ - 'SourceTemplate': { - 'DataSetReferences': [ - { - 'DataSetPlaceholder': 'environment', - 'DataSetArn': f"arn:aws:quicksight:{region}:{AwsAccountId}:dataset/" - }, - ], - 'Arn': ' Dashboard: message=f'Dashboards feature is disabled for the environment {env.label}', ) - can_import = Quicksight.can_import_dashboard( - AwsAccountId=env.AwsAccountId, - region=env.region, - UserName=context.username, - DashboardId=data.get('dashboardId'), - ) + aws_client = DashboardQuicksightClient(context.username, env.AwsAccountId, env.region) + can_import = aws_client.can_import_dashboard(data.get('dashboardId')) if not can_import: raise UnauthorizedOperation( diff --git a/backend/dataall/modules/dataset_sharing/services/share_managers/lf_share_manager.py b/backend/dataall/modules/dataset_sharing/services/share_managers/lf_share_manager.py index 53c80c1ca..22fb9dac5 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_managers/lf_share_manager.py +++ b/backend/dataall/modules/dataset_sharing/services/share_managers/lf_share_manager.py @@ -8,7 +8,7 @@ from dataall.db.api import Environment from dataall.modules.dataset_sharing.aws.glue_client import GlueClient from dataall.modules.dataset_sharing.aws.lakeformation_client import LakeFormationClient -from dataall.aws.handlers.quicksight import Quicksight +from dataall.aws.handlers.quicksight import QuicksightClient from dataall.aws.handlers.sts import SessionHelper from dataall.aws.handlers.ram import Ram from dataall.db import exceptions, models @@ -65,7 +65,7 @@ def get_share_principals(self) -> [str]: dashboard_enabled = Environment.get_boolean_env_param(self.session, self.target_environment, "dashboardsEnabled") if dashboard_enabled: - group = Quicksight.create_quicksight_group(AwsAccountId=self.target_environment.AwsAccountId) + group = QuicksightClient.create_quicksight_group(AwsAccountId=self.target_environment.AwsAccountId) if group and group.get('Group'): group_arn = group.get('Group').get('Arn') if group_arn: diff --git a/backend/dataall/modules/datasets/cdk/dataset_stack.py b/backend/dataall/modules/datasets/cdk/dataset_stack.py index 0c66a15c0..9637f69fb 100644 --- a/backend/dataall/modules/datasets/cdk/dataset_stack.py +++ b/backend/dataall/modules/datasets/cdk/dataset_stack.py @@ -19,7 +19,7 @@ from dataall.cdkproxy.stacks.manager import stack from dataall import db -from dataall.aws.handlers.quicksight import Quicksight +from dataall.aws.handlers.quicksight import QuicksightClient from dataall.aws.handlers.sts import SessionHelper from dataall.db import models from dataall.db.api import Environment @@ -102,7 +102,7 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): quicksight_default_group_arn = None if self.has_quicksight_enabled(env): - quicksight_default_group = Quicksight.create_quicksight_group(AwsAccountId=env.AwsAccountId) + quicksight_default_group = QuicksightClient.create_quicksight_group(AwsAccountId=env.AwsAccountId) quicksight_default_group_arn = quicksight_default_group['Group']['Arn'] # Dataset S3 Bucket and KMS key diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py index 6d164d09f..05049806a 100644 --- a/backend/dataall/modules/datasets/services/dataset_service.py +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -2,7 +2,7 @@ import logging from dataall.api.Objects.Stack import stack_helper -from dataall.aws.handlers.quicksight import Quicksight +from dataall.aws.handlers.quicksight import QuicksightClient from dataall.aws.handlers.service_handlers import Worker from dataall.aws.handlers.sts import SessionHelper from dataall.core.context import get_context @@ -36,10 +36,10 @@ class DatasetService: def check_dataset_account(session, environment): dashboards_enabled = Environment.get_boolean_env_param(session, environment, "dashboardsEnabled") if dashboards_enabled: - quicksight_subscription = Quicksight.check_quicksight_enterprise_subscription( + quicksight_subscription = QuicksightClient.check_quicksight_enterprise_subscription( AwsAccountId=environment.AwsAccountId) if quicksight_subscription: - group = Quicksight.create_quicksight_group(AwsAccountId=environment.AwsAccountId) + group = QuicksightClient.create_quicksight_group(AwsAccountId=environment.AwsAccountId) return True if group else False return True From e9ee05624e1a0e5fb0a7d425a7369d56abb83e7d Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 31 May 2023 14:12:14 +0200 Subject: [PATCH 253/346] Moved dashboard tests into the module --- tests/api/test_vote.py | 87 ------------------- tests/modules/dashboards/__init__.py | 0 tests/modules/dashboards/conftest.py | 63 ++++++++++++++ .../dashboards/test_dashboard_votes.py | 40 +++++++++ .../dashboards}/test_dashboards.py | 62 ------------- .../datasets/test_dataset_count_votes.py | 18 +++- 6 files changed, 120 insertions(+), 150 deletions(-) create mode 100644 tests/modules/dashboards/__init__.py create mode 100644 tests/modules/dashboards/conftest.py create mode 100644 tests/modules/dashboards/test_dashboard_votes.py rename tests/{api => modules/dashboards}/test_dashboards.py (82%) diff --git a/tests/api/test_vote.py b/tests/api/test_vote.py index 513f0b903..c67ee6935 100644 --- a/tests/api/test_vote.py +++ b/tests/api/test_vote.py @@ -1,58 +1,3 @@ -import pytest - -from dataall.db import models - - -@pytest.fixture(scope='module') -def org1(db, org, tenant, user, group) -> models.Organization: - org = org('testorg', user.userName, group.name) - yield org - - -@pytest.fixture(scope='module') -def env1( - db, org1: models.Organization, user, group, env -) -> models.Environment: - env1 = env(org1, 'dev', user.userName, group.name, '111111111111', 'eu-west-1') - yield env1 - - -@pytest.fixture(scope='module') -def dashboard(client, env1, org1, group, module_mocker, patch_es): - module_mocker.patch( - 'dataall.aws.handlers.quicksight.Quicksight.can_import_dashboard', - return_value=True, - ) - response = client.query( - """ - mutation importDashboard( - $input:ImportDashboardInput, - ){ - importDashboard(input:$input){ - dashboardUri - name - label - DashboardId - created - owner - SamlGroupName - } - } - """, - input={ - 'dashboardId': f'1234', - 'label': f'1234', - 'environmentUri': env1.environmentUri, - 'SamlGroupName': group.name, - 'terms': ['term'], - }, - username='alice', - groups=[group.name], - ) - assert response.data.importDashboard.owner == 'alice' - assert response.data.importDashboard.SamlGroupName == group.name - yield response.data.importDashboard - def test_count_votes(client, dashboard, env1): response = count_votes_query( @@ -93,38 +38,6 @@ def get_vote_query(client, target_uri, target_type, group): return response -def test_upvote(patch_es, client, env1, dashboard): - - response = upvote_mutation( - client, dashboard.dashboardUri, 'dashboard', True, env1.SamlGroupName - ) - assert response.data.upVote.upvote - response = count_votes_query( - client, dashboard.dashboardUri, 'dashboard', env1.SamlGroupName - ) - assert response.data.countUpVotes == 1 - response = get_vote_query( - client, dashboard.dashboardUri, 'dashboard', env1.SamlGroupName - ) - assert response.data.getVote.upvote - - response = upvote_mutation( - client, dashboard.dashboardUri, 'dashboard', False, env1.SamlGroupName - ) - - assert not response.data.upVote.upvote - - response = get_vote_query( - client, dashboard.dashboardUri, 'dashboard', env1.SamlGroupName - ) - assert not response.data.getVote.upvote - - response = count_votes_query( - client, dashboard.dashboardUri, 'dashboard', env1.SamlGroupName - ) - assert response.data.countUpVotes == 0 - - def upvote_mutation(client, target_uri, target_type, upvote, group): response = client.query( """ diff --git a/tests/modules/dashboards/__init__.py b/tests/modules/dashboards/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/modules/dashboards/conftest.py b/tests/modules/dashboards/conftest.py new file mode 100644 index 000000000..6d9b0de97 --- /dev/null +++ b/tests/modules/dashboards/conftest.py @@ -0,0 +1,63 @@ +from unittest.mock import MagicMock + +import pytest + +from tests.api.conftest import * + +@pytest.fixture(scope='module', autouse=True) +def org1(org, user, group, tenant): + org1 = org('testorg', user.userName, group.name) + yield org1 + + +@pytest.fixture(scope='module', autouse=True) +def env1(env, org1, user, group, tenant, module_mocker): + module_mocker.patch('requests.post', return_value=True) + module_mocker.patch( + 'dataall.api.Objects.Environment.resolvers.check_environment', return_value=True + ) + module_mocker.patch( + 'dataall.api.Objects.Environment.resolvers.get_pivot_role_as_part_of_environment', return_value=False + ) + env1 = env(org1, 'dev', user.userName, group.name, '111111111111', 'eu-west-1') + yield env1 + + +@pytest.fixture(scope='module') +def dashboard(client, env1, org1, group, module_mocker, patch_es): + mock_client = MagicMock() + module_mocker.patch( + 'dataall.modules.dashboards.services.dashboard_service.DashboardQuicksightClient', + mock_client + ) + response = client.query( + """ + mutation importDashboard( + $input:ImportDashboardInput, + ){ + importDashboard(input:$input){ + dashboardUri + name + label + DashboardId + created + owner + SamlGroupName + upvotes + userRoleForDashboard + } + } + """, + input={ + 'dashboardId': f'1234', + 'label': f'1234', + 'environmentUri': env1.environmentUri, + 'SamlGroupName': group.name, + 'terms': ['term'], + }, + username='alice', + groups=[group.name], + ) + assert response.data.importDashboard.owner == 'alice' + assert response.data.importDashboard.SamlGroupName == group.name + yield response.data.importDashboard \ No newline at end of file diff --git a/tests/modules/dashboards/test_dashboard_votes.py b/tests/modules/dashboards/test_dashboard_votes.py new file mode 100644 index 000000000..e8842fce9 --- /dev/null +++ b/tests/modules/dashboards/test_dashboard_votes.py @@ -0,0 +1,40 @@ +from tests.api.test_vote import upvote_mutation, count_votes_query, get_vote_query + + +def test_dashboard_count_votes(client, dashboard, env1): + response = count_votes_query( + client, dashboard.dashboardUri, 'dashboard', env1.SamlGroupName + ) + assert response.data.countUpVotes == 0 + + +def test_dashboard_upvote(patch_es, client, env1, dashboard): + + response = upvote_mutation( + client, dashboard.dashboardUri, 'dashboard', True, env1.SamlGroupName + ) + assert response.data.upVote.upvote + response = count_votes_query( + client, dashboard.dashboardUri, 'dashboard', env1.SamlGroupName + ) + assert response.data.countUpVotes == 1 + response = get_vote_query( + client, dashboard.dashboardUri, 'dashboard', env1.SamlGroupName + ) + assert response.data.getVote.upvote + + response = upvote_mutation( + client, dashboard.dashboardUri, 'dashboard', False, env1.SamlGroupName + ) + + assert not response.data.upVote.upvote + + response = get_vote_query( + client, dashboard.dashboardUri, 'dashboard', env1.SamlGroupName + ) + assert not response.data.getVote.upvote + + response = count_votes_query( + client, dashboard.dashboardUri, 'dashboard', env1.SamlGroupName + ) + assert response.data.countUpVotes == 0 \ No newline at end of file diff --git a/tests/api/test_dashboards.py b/tests/modules/dashboards/test_dashboards.py similarity index 82% rename from tests/api/test_dashboards.py rename to tests/modules/dashboards/test_dashboards.py index cd0da17bd..aa4629fcb 100644 --- a/tests/api/test_dashboards.py +++ b/tests/modules/dashboards/test_dashboards.py @@ -4,71 +4,9 @@ import dataall -@pytest.fixture(scope='module', autouse=True) -def org1(org, user, group, tenant): - org1 = org('testorg', user.userName, group.name) - yield org1 - - -@pytest.fixture(scope='module', autouse=True) -def env1(env, org1, user, group, tenant, module_mocker): - module_mocker.patch('requests.post', return_value=True) - module_mocker.patch( - 'dataall.api.Objects.Environment.resolvers.check_environment', return_value=True - ) - module_mocker.patch( - 'dataall.api.Objects.Environment.resolvers.get_pivot_role_as_part_of_environment', return_value=False - ) - env1 = env(org1, 'dev', user.userName, group.name, '111111111111', 'eu-west-1') - yield env1 - - -@pytest.fixture(scope='module') -def dashboard(client, env1, org1, group, module_mocker, patch_es): - module_mocker.patch( - 'dataall.aws.handlers.quicksight.Quicksight.can_import_dashboard', - return_value=True, - ) - response = client.query( - """ - mutation importDashboard( - $input:ImportDashboardInput, - ){ - importDashboard(input:$input){ - dashboardUri - name - label - DashboardId - created - owner - SamlGroupName - upvotes - userRoleForDashboard - } - } - """, - input={ - 'dashboardId': f'1234', - 'label': f'1234', - 'environmentUri': env1.environmentUri, - 'SamlGroupName': group.name, - 'terms': ['term'], - }, - username='alice', - groups=[group.name], - ) - assert response.data.importDashboard.owner == 'alice' - assert response.data.importDashboard.SamlGroupName == group.name - yield response.data.importDashboard - - def test_update_dashboard( client, env1, org1, group, module_mocker, patch_es, dashboard ): - module_mocker.patch( - 'dataall.aws.handlers.quicksight.Quicksight.can_import_dashboard', - return_value=True, - ) response = client.query( """ mutation updateDashboard( diff --git a/tests/modules/datasets/test_dataset_count_votes.py b/tests/modules/datasets/test_dataset_count_votes.py index 2212d8ad9..d35be46ed 100644 --- a/tests/modules/datasets/test_dataset_count_votes.py +++ b/tests/modules/datasets/test_dataset_count_votes.py @@ -3,6 +3,22 @@ from dataall.modules.datasets import Dataset from tests.api.test_vote import * +from dataall.db import models + + +@pytest.fixture(scope='module') +def org1(db, org, tenant, user, group) -> models.Organization: + org = org('testorg', user.userName, group.name) + yield org + + +@pytest.fixture(scope='module') +def env1( + db, org1: models.Organization, user, group, env +) -> models.Environment: + env1 = env(org1, 'dev', user.userName, group.name, '111111111111', 'eu-west-1') + yield env1 + @pytest.fixture(scope='module', autouse=True) def dataset1(db, env1, org1, group, user, dataset) -> Dataset: @@ -11,7 +27,7 @@ def dataset1(db, env1, org1, group, user, dataset) -> Dataset: ) -def test_count_votes(client, dataset1, dashboard): +def test_count_votes(client, dataset1): response = count_votes_query( client, dataset1.datasetUri, 'dataset', dataset1.SamlAdminGroupName ) From 238234ca20996ea763911e625891d707b60731f8 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 31 May 2023 14:13:02 +0200 Subject: [PATCH 254/346] Moved dashboard tests into the module --- tests/api/test_vote.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/tests/api/test_vote.py b/tests/api/test_vote.py index c67ee6935..69183f150 100644 --- a/tests/api/test_vote.py +++ b/tests/api/test_vote.py @@ -1,11 +1,4 @@ -def test_count_votes(client, dashboard, env1): - response = count_votes_query( - client, dashboard.dashboardUri, 'dashboard', env1.SamlGroupName - ) - assert response.data.countUpVotes == 0 - - def count_votes_query(client, target_uri, target_type, group): response = client.query( """ From 071094dbe743471a281f0504face27d07dd03c90 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 31 May 2023 17:25:01 +0200 Subject: [PATCH 255/346] Fixed issue staticmethod is not callable for python3.8 --- backend/dataall/core/permission_checker.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/backend/dataall/core/permission_checker.py b/backend/dataall/core/permission_checker.py index 4101a9b67..b5e1ab593 100644 --- a/backend/dataall/core/permission_checker.py +++ b/backend/dataall/core/permission_checker.py @@ -84,7 +84,11 @@ def decorated(*args, **kwargs): with get_context().db_engine.scoped_session() as session: if parent_resource: - uri = parent_resource(session, uri) + try: + uri = parent_resource(session, uri) + except TypeError: + uri = parent_resource.__func__(session, uri) + _check_resource_permission(session, uri, permission) return fn(*args, **kwargs) From 96f770d158282fe0db9d33fb951fbfc6edac08f4 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 1 Jun 2023 09:43:21 +0200 Subject: [PATCH 256/346] Returned missed return statement --- backend/dataall/modules/datasets/api/table_column/resolvers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/dataall/modules/datasets/api/table_column/resolvers.py b/backend/dataall/modules/datasets/api/table_column/resolvers.py index b4e6ca0df..2e8235a52 100644 --- a/backend/dataall/modules/datasets/api/table_column/resolvers.py +++ b/backend/dataall/modules/datasets/api/table_column/resolvers.py @@ -14,7 +14,7 @@ def list_table_columns( tableUri = source.tableUri if not filter: filter = {} - DatasetColumnService.paginate_active_columns_for_table(tableUri, filter) + return DatasetColumnService.paginate_active_columns_for_table(tableUri, filter) def sync_table_columns(context: Context, source, tableUri: str = None): From 33d3d7f55a6c98c8940258d8d5ffd378330961ac Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 1 Jun 2023 09:51:26 +0200 Subject: [PATCH 257/346] Column name should be used instead of instance uri --- .../dataall/api/Objects/Glossary/registry.py | 3 ++- backend/dataall/db/models/Dashboard.py | 5 +++-- .../modules/datasets_base/db/models.py | 20 +++++++++++-------- 3 files changed, 17 insertions(+), 11 deletions(-) diff --git a/backend/dataall/api/Objects/Glossary/registry.py b/backend/dataall/api/Objects/Glossary/registry.py index 27c534368..3c27f3796 100644 --- a/backend/dataall/api/Objects/Glossary/registry.py +++ b/backend/dataall/api/Objects/Glossary/registry.py @@ -9,7 +9,8 @@ class Identifiable(Protocol): - def uri(self): + @classmethod + def uri(cls): ... diff --git a/backend/dataall/db/models/Dashboard.py b/backend/dataall/db/models/Dashboard.py index 0b12ecd96..61c4d1400 100644 --- a/backend/dataall/db/models/Dashboard.py +++ b/backend/dataall/db/models/Dashboard.py @@ -19,5 +19,6 @@ class Dashboard(Resource, Base): userRoleForDashboard = query_expression() - def uri(self): - return self.dashboardUri + @classmethod + def uri(cls): + return cls.dashboardUri diff --git a/backend/dataall/modules/datasets_base/db/models.py b/backend/dataall/modules/datasets_base/db/models.py index 45f7fe858..4f41f4919 100644 --- a/backend/dataall/modules/datasets_base/db/models.py +++ b/backend/dataall/modules/datasets_base/db/models.py @@ -19,8 +19,9 @@ class DatasetTableColumn(Resource, Base): String, default='column' ) # can be either "column" or "partition" - def uri(self): - return self.columnUri + @classmethod + def uri(cls): + return cls.columnUri class DatasetProfilingRun(Resource, Base): @@ -53,8 +54,9 @@ class DatasetStorageLocation(Resource, Base): projectPermission = query_expression() environmentEndPoint = query_expression() - def uri(self): - return self.locationUri + @classmethod + def uri(cls): + return cls.locationUri class DatasetTable(Resource, Base): @@ -79,8 +81,9 @@ class DatasetTable(Resource, Base): topics = Column(ARRAY(String), nullable=True) confidentiality = Column(String, nullable=False, default='C1') - def uri(self): - return self.tableUri + @classmethod + def uri(cls): + return cls.tableUri class Dataset(Resource, Base): @@ -138,5 +141,6 @@ class Dataset(Resource, Base): importedAdminRole = Column(Boolean, default=False) imported = Column(Boolean, default=False) - def uri(self): - return self.datasetUri + @classmethod + def uri(cls): + return cls.datasetUri From 8af93a3a4e1e2eec5c69e054cdb76e53d199df76 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 1 Jun 2023 10:00:50 +0200 Subject: [PATCH 258/346] Fixed description of the permissions --- .../modules/dataset_sharing/services/share_permissions.py | 2 +- .../modules/worksheets/services/worksheet_permissions.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/backend/dataall/modules/dataset_sharing/services/share_permissions.py b/backend/dataall/modules/dataset_sharing/services/share_permissions.py index 5ae4071da..fd7620ab3 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_permissions.py +++ b/backend/dataall/modules/dataset_sharing/services/share_permissions.py @@ -56,4 +56,4 @@ RESOURCES_ALL_WITH_DESC[perm] = perm RESOURCES_ALL_WITH_DESC[CREATE_SHARE_OBJECT] = 'Request datasets access for this environment' -RESOURCES_ALL_WITH_DESC[LIST_ENVIRONMENT_SHARED_WITH_OBJECTS] = LIST_ENVIRONMENT_SHARED_WITH_OBJECTS +RESOURCES_ALL_WITH_DESC[LIST_ENVIRONMENT_SHARED_WITH_OBJECTS] = "List datasets shared with this environments" diff --git a/backend/dataall/modules/worksheets/services/worksheet_permissions.py b/backend/dataall/modules/worksheets/services/worksheet_permissions.py index cad183f42..581ddfc36 100644 --- a/backend/dataall/modules/worksheets/services/worksheet_permissions.py +++ b/backend/dataall/modules/worksheets/services/worksheet_permissions.py @@ -37,4 +37,4 @@ ENVIRONMENT_ALL.append(RUN_ATHENA_QUERY) RESOURCES_ALL.append(RUN_ATHENA_QUERY) -RESOURCES_ALL_WITH_DESC[RUN_ATHENA_QUERY] = RUN_ATHENA_QUERY +RESOURCES_ALL_WITH_DESC[RUN_ATHENA_QUERY] = "Run Athena queries on this environment" From c33b248429d8445ddd68b149e63f24380a4247c1 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 1 Jun 2023 11:02:48 +0200 Subject: [PATCH 259/346] Added missed depends on statements --- backend/dataall/modules/dataset_sharing/__init__.py | 4 ++++ backend/dataall/modules/datasets/__init__.py | 12 ++++++++++++ 2 files changed, 16 insertions(+) diff --git a/backend/dataall/modules/dataset_sharing/__init__.py b/backend/dataall/modules/dataset_sharing/__init__.py index 450d66047..491a9bf66 100644 --- a/backend/dataall/modules/dataset_sharing/__init__.py +++ b/backend/dataall/modules/dataset_sharing/__init__.py @@ -33,6 +33,10 @@ class SharingAsyncHandlersModuleInterface(ModuleInterface): def is_supported(modes: List[ImportMode]): return ImportMode.HANDLERS in modes + @staticmethod + def depends_on() -> List[Type['ModuleInterface']]: + return [DatasetBaseModuleInterface] + def __init__(self): import dataall.modules.dataset_sharing.handlers log.info("Sharing handlers have been imported") diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index 61161caf1..d7a7e6cae 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -97,6 +97,10 @@ class DatasetCdkModuleInterface(ModuleInterface): def is_supported(modes: Set[ImportMode]): return ImportMode.CDK in modes + @staticmethod + def depends_on() -> List[Type['ModuleInterface']]: + return [DatasetBaseModuleInterface] + def __init__(self): import dataall.modules.datasets.cdk from dataall.cdkproxy.stacks.environment import EnvironmentSetup @@ -117,6 +121,10 @@ class DatasetStackUpdaterModuleInterface(ModuleInterface): def is_supported(modes: Set[ImportMode]) -> bool: return ImportMode.STACK_UPDATER_TASK in modes + @staticmethod + def depends_on() -> List[Type['ModuleInterface']]: + return [DatasetBaseModuleInterface] + def __init__(self): from dataall.tasks.stacks_updater import StackFinder from dataall.tasks.stacks_updater import register_stack_finder @@ -136,6 +144,10 @@ class DatasetCatalogIndexerModuleInterface(ModuleInterface): def is_supported(modes: Set[ImportMode]) -> bool: return ImportMode.CATALOG_INDEXER_TASK in modes + @staticmethod + def depends_on() -> List[Type['ModuleInterface']]: + return [DatasetBaseModuleInterface] + def __init__(self): from dataall.tasks.catalog_indexer import register_catalog_indexer from dataall.modules.datasets.indexers.catalog_indexer import DatasetCatalogIndexer From f23eeca80eaf13ff9ba61ce8307d8a8bd2e96036 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 1 Jun 2023 11:08:56 +0200 Subject: [PATCH 260/346] Added missed return statement --- .../modules/dataset_sharing/services/share_object_service.py | 2 -- .../dataall/modules/datasets/services/dataset_column_service.py | 2 +- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/backend/dataall/modules/dataset_sharing/services/share_object_service.py b/backend/dataall/modules/dataset_sharing/services/share_object_service.py index b75b87f02..d90eed5d1 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_object_service.py +++ b/backend/dataall/modules/dataset_sharing/services/share_object_service.py @@ -1,5 +1,3 @@ -from sqlalchemy import and_ - from dataall.core.context import get_context from dataall.core.permission_checker import has_resource_permission from dataall.db import utils diff --git a/backend/dataall/modules/datasets/services/dataset_column_service.py b/backend/dataall/modules/datasets/services/dataset_column_service.py index f50c5bdef..9e28878a1 100644 --- a/backend/dataall/modules/datasets/services/dataset_column_service.py +++ b/backend/dataall/modules/datasets/services/dataset_column_service.py @@ -14,7 +14,7 @@ class DatasetColumnService: def paginate_active_columns_for_table(table_uri: str, filter=None): # TODO THERE WAS NO PERMISSION CHECK!!! with get_context().db_engine.scoped_session() as session: - DatasetColumnRepository.paginate_active_columns_for_table(session, table_uri, filter) + return DatasetColumnRepository.paginate_active_columns_for_table(session, table_uri, filter) @staticmethod def sync_table_columns(table_uri: str): From e991e70aefd650d21c461091e66e3f403e6a3a5d Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 1 Jun 2023 11:32:39 +0200 Subject: [PATCH 261/346] Removed unused queries --- .../datasets/api/profiling/mutations.py | 17 ++----------- .../modules/datasets/api/profiling/queries.py | 10 -------- .../datasets/api/profiling/resolvers.py | 9 ------- .../db/dataset_profiling_repository.py | 12 +-------- .../services/dataset_profiling_service.py | 17 ------------- .../datasets/test_dataset_profiling.py | 25 ------------------- 6 files changed, 3 insertions(+), 87 deletions(-) diff --git a/backend/dataall/modules/datasets/api/profiling/mutations.py b/backend/dataall/modules/datasets/api/profiling/mutations.py index e4bcd62cc..dcc5a0bb7 100644 --- a/backend/dataall/modules/datasets/api/profiling/mutations.py +++ b/backend/dataall/modules/datasets/api/profiling/mutations.py @@ -1,22 +1,9 @@ from dataall.api import gql -from dataall.modules.datasets.api.profiling.resolvers import ( - start_profiling_run, - update_profiling_run_results -) +from dataall.modules.datasets.api.profiling.resolvers import start_profiling_run startDatasetProfilingRun = gql.MutationField( name='startDatasetProfilingRun', args=[gql.Argument(name='input', type=gql.Ref('StartDatasetProfilingRunInput'))], type=gql.Ref('DatasetProfilingRun'), resolver=start_profiling_run, -) - -updateDatasetProfilingRunResults = gql.MutationField( - name='updateDatasetProfilingRunResults', - args=[ - gql.Argument(name='profilingRunUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='results', type=gql.NonNullableType(gql.String)), - ], - type=gql.Ref('DatasetProfilingRun'), - resolver=update_profiling_run_results, -) +) \ No newline at end of file diff --git a/backend/dataall/modules/datasets/api/profiling/queries.py b/backend/dataall/modules/datasets/api/profiling/queries.py index 8d2fbb25c..0481867ad 100644 --- a/backend/dataall/modules/datasets/api/profiling/queries.py +++ b/backend/dataall/modules/datasets/api/profiling/queries.py @@ -1,20 +1,10 @@ from dataall.api import gql from dataall.modules.datasets.api.profiling.resolvers import ( - get_profiling_run, list_profiling_runs, list_table_profiling_runs, get_last_table_profiling_run ) - -getDatasetProfilingRun = gql.QueryField( - name='getDatasetProfilingRun', - args=[gql.Argument(name='profilingRunUri', type=gql.NonNullableType(gql.String))], - type=gql.Ref('DatasetProfilingRun'), - resolver=get_profiling_run, -) - - listDatasetProfilingRuns = gql.QueryField( name='listDatasetProfilingRuns', args=[ diff --git a/backend/dataall/modules/datasets/api/profiling/resolvers.py b/backend/dataall/modules/datasets/api/profiling/resolvers.py index 13cb232d9..8db0581b5 100644 --- a/backend/dataall/modules/datasets/api/profiling/resolvers.py +++ b/backend/dataall/modules/datasets/api/profiling/resolvers.py @@ -40,19 +40,10 @@ def get_profiling_results(context: Context, source: DatasetProfilingRun): else: return json.dumps(source.results) - -def update_profiling_run_results(context: Context, source, profilingRunUri, results): - return DatasetProfilingService.update_profiling_run_results(profilingRunUri, results) - - def list_profiling_runs(context: Context, source, datasetUri=None): return DatasetProfilingService.list_profiling_runs(datasetUri) -def get_profiling_run(context: Context, source, profilingRunUri=None): - return DatasetProfilingService.get_profiling_run(profilingRunUri) - - def get_last_table_profiling_run(context: Context, source, tableUri=None): return DatasetProfilingService.get_last_table_profiling_run(tableUri) diff --git a/backend/dataall/modules/datasets/db/dataset_profiling_repository.py b/backend/dataall/modules/datasets/db/dataset_profiling_repository.py index 71a7c3016..02058b296 100644 --- a/backend/dataall/modules/datasets/db/dataset_profiling_repository.py +++ b/backend/dataall/modules/datasets/db/dataset_profiling_repository.py @@ -29,22 +29,12 @@ def save_profiling(session, dataset, env, glue_table_name): return run @staticmethod - def update_run( - session, - run_uri=None, - glue_job_run_id=None, - glue_job_state=None, - results=None, - ): + def update_run(session, run_uri, glue_job_run_id): run = DatasetProfilingRepository.get_profiling_run( session, profilingRunUri=run_uri, GlueJobRunId=glue_job_run_id ) if glue_job_run_id: run.GlueJobRunId = glue_job_run_id - if glue_job_state: - run.status = glue_job_state - if results: - run.results = results session.commit() return run diff --git a/backend/dataall/modules/datasets/services/dataset_profiling_service.py b/backend/dataall/modules/datasets/services/dataset_profiling_service.py index 109bbcafd..87cd37d0f 100644 --- a/backend/dataall/modules/datasets/services/dataset_profiling_service.py +++ b/backend/dataall/modules/datasets/services/dataset_profiling_service.py @@ -59,29 +59,12 @@ def queue_profiling_run(run_uri): session.add(task) Worker.queue(engine=context.db_engine, task_ids=[task.taskUri]) - @staticmethod - def update_profiling_run_results(run_uri, results): - # TODO NO PERMISSION CHECK - with get_context().db_engine.scoped_session() as session: - run = DatasetProfilingRepository.update_run( - session=session, run_uri=run_uri, results=results - ) - return run - @staticmethod def list_profiling_runs(dataset_uri): # TODO NO PERMISSION CHECK with get_context().db_engine.scoped_session() as session: return DatasetProfilingRepository.list_profiling_runs(session, dataset_uri) - @staticmethod - def get_profiling_run(run_uri): - # TODO NO PERMISSION CHECK - with get_context().db_engine.scoped_session() as session: - return DatasetProfilingRepository.get_profiling_run( - session=session, profilingRunUri=run_uri - ) - @staticmethod def get_last_table_profiling_run(table_uri: str): # TODO NO PERMISSION CHECK diff --git a/tests/modules/datasets/test_dataset_profiling.py b/tests/modules/datasets/test_dataset_profiling.py index 852b4ea0b..d670e41c8 100644 --- a/tests/modules/datasets/test_dataset_profiling.py +++ b/tests/modules/datasets/test_dataset_profiling.py @@ -105,31 +105,6 @@ def list_profiling_runs(client, dataset1, group): return response.data.listDatasetProfilingRuns['nodes'] -def test_get_profiling_run(client, dataset1, env1, module_mocker, db, group): - runs = list_profiling_runs(client, dataset1, group) - module_mocker.patch( - 'dataall.aws.handlers.service_handlers.Worker.queue', - return_value=update_runs(db, runs), - ) - response = client.query( - """ - query getDatasetProfilingRun($profilingRunUri:String!){ - getDatasetProfilingRun(profilingRunUri:$profilingRunUri){ - profilingRunUri - status - } - } - """, - profilingRunUri=runs[0]['profilingRunUri'], - groups=[group.name], - ) - assert ( - response.data.getDatasetProfilingRun['profilingRunUri'] - == runs[0]['profilingRunUri'] - ) - assert response.data.getDatasetProfilingRun['status'] == 'SUCCEEDED' - - def test_get_table_profiling_run( client, dataset1, env1, module_mocker, table, db, group ): From 28a2ef73c36dd9d6715acc542a07243d9d68f3bd Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 1 Jun 2023 11:46:46 +0200 Subject: [PATCH 262/346] Removed unused queries --- .../dataall/modules/datasets/api/dataset/queries.py | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/backend/dataall/modules/datasets/api/dataset/queries.py b/backend/dataall/modules/datasets/api/dataset/queries.py index 17295505a..2a202bcf8 100644 --- a/backend/dataall/modules/datasets/api/dataset/queries.py +++ b/backend/dataall/modules/datasets/api/dataset/queries.py @@ -58,18 +58,6 @@ resolver=get_file_upload_presigned_url, ) - -getGlueCrawlerStatus = gql.MutationField( - name='getGlueCrawlerStatus', - args=[ - gql.Argument(name='datasetUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='name', type=gql.NonNullableType(gql.String)), - ], - resolver=lambda *_, **__: None, - type=gql.Ref('GlueCrawler'), -) - - listShareObjects = gql.QueryField( name='listDatasetShareObjects', resolver=list_dataset_share_objects, From a38be2e029ee8a2dcd3abcfab8e09fcb37fa69b6 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 1 Jun 2023 12:16:44 +0200 Subject: [PATCH 263/346] Renaming of the camelcase variables --- .../db/share_object_repository.py | 18 +++++------- .../services/data_sharing_service.py | 28 +++++++++---------- .../services/share_notification_service.py | 20 ++++++------- .../datasets/api/profiling/mutations.py | 2 +- .../db/dataset_profiling_repository.py | 12 ++++---- .../handlers/glue_profiling_handler.py | 2 +- .../datasets/services/dataset_service.py | 4 +-- .../datasets_base/db/dataset_repository.py | 10 +++---- 8 files changed, 45 insertions(+), 51 deletions(-) diff --git a/backend/dataall/modules/dataset_sharing/db/share_object_repository.py b/backend/dataall/modules/dataset_sharing/db/share_object_repository.py index ef7044283..f0aad8da7 100644 --- a/backend/dataall/modules/dataset_sharing/db/share_object_repository.py +++ b/backend/dataall/modules/dataset_sharing/db/share_object_repository.py @@ -30,7 +30,8 @@ def validate_transition(self, prev_state): elif prev_state not in self._all_source_states: raise exceptions.UnauthorizedOperation( action=self._name, - message=f'This transition is not possible, {prev_state} cannot go to {self._all_target_states}. If there is a sharing or revoking in progress wait until it is complete and try again.', + message=f'This transition is not possible, {prev_state} cannot go to {self._all_target_states}. ' + f'If there is a sharing or revoking in progress wait until it is complete and try again.', ) else: return True @@ -146,7 +147,7 @@ def update_state(self, session, share, new_state): logger.info(f"Updating share object {share.shareUri} in DB from {self._state} to state {new_state}") ShareObjectRepository.update_share_object_status( session=session, - shareUri=share.shareUri, + share_uri=share.shareUri, status=new_state ) self._state = new_state @@ -532,8 +533,8 @@ def list_shareable_items(session, share, states, data): ) ) if 'isShared' in data.keys(): - isShared = data.get('isShared') - query = query.filter(shareable_objects.c.isShared == isShared) + is_shared = data.get('isShared') + query = query.filter(shareable_objects.c.isShared == is_shared) return paginate(query, data.get('page', 1), data.get('pageSize', 10)).to_dict() @@ -595,13 +596,8 @@ def get_share_by_dataset_and_environment(session, dataset_uri, environment_uri): return share @staticmethod - def update_share_object_status( - session, - shareUri: str, - status: str, - ) -> ShareObject: - - share = ShareObjectRepository.get_share_by_uri(session, shareUri) + def update_share_object_status(session, share_uri: str, status: str) -> ShareObject: + share = ShareObjectRepository.get_share_by_uri(session, share_uri) share.status = status session.commit() return share diff --git a/backend/dataall/modules/dataset_sharing/services/data_sharing_service.py b/backend/dataall/modules/dataset_sharing/services/data_sharing_service.py index 53ab11c98..fdf9d95e2 100644 --- a/backend/dataall/modules/dataset_sharing/services/data_sharing_service.py +++ b/backend/dataall/modules/dataset_sharing/services/data_sharing_service.py @@ -44,9 +44,9 @@ def approve_share(cls, engine: Engine, share_uri: str) -> bool: target_environment, ) = ShareObjectRepository.get_share_data(session, share_uri) - Share_SM = ShareObjectSM(share.status) - new_share_state = Share_SM.run_transition(ShareObjectActions.Start.value) - Share_SM.update_state(session, share, new_share_state) + share_sm = ShareObjectSM(share.status) + new_share_state = share_sm.run_transition(ShareObjectActions.Start.value) + share_sm.update_state(session, share, new_share_state) ( shared_tables, @@ -94,8 +94,8 @@ def approve_share(cls, engine: Engine, share_uri: str) -> bool: approved_tables_succeed = processor.process_approved_shares() log.info(f'sharing tables succeeded = {approved_tables_succeed}') - new_share_state = Share_SM.run_transition(ShareObjectActions.Finish.value) - Share_SM.update_state(session, share, new_share_state) + new_share_state = share_sm.run_transition(ShareObjectActions.Finish.value) + share_sm.update_state(session, share, new_share_state) return approved_tables_succeed if approved_folders_succeed else False @@ -131,19 +131,19 @@ def revoke_share(cls, engine: Engine, share_uri: str): target_environment, ) = ShareObjectRepository.get_share_data(session, share_uri) - Share_SM = ShareObjectSM(share.status) - new_share_state = Share_SM.run_transition(ShareObjectActions.Start.value) - Share_SM.update_state(session, share, new_share_state) + share_sm = ShareObjectSM(share.status) + new_share_state = share_sm.run_transition(ShareObjectActions.Start.value) + share_sm.update_state(session, share, new_share_state) - revoked_item_SM = ShareItemSM(ShareItemStatus.Revoke_Approved.value) + revoked_item_sm = ShareItemSM(ShareItemStatus.Revoke_Approved.value) ( revoked_tables, revoked_folders ) = ShareObjectRepository.get_share_data_items(session, share_uri, ShareItemStatus.Revoke_Approved.value) - new_state = revoked_item_SM.run_transition(ShareObjectActions.Start.value) - revoked_item_SM.update_state(session, share_uri, new_state) + new_state = revoked_item_sm.run_transition(ShareObjectActions.Start.value) + revoked_item_sm.update_state(session, share_uri, new_state) log.info(f'Revoking permissions to folders: {revoked_folders}') @@ -212,9 +212,9 @@ def revoke_share(cls, engine: Engine, share_uri: str): existing_pending_items = ShareObjectRepository.check_pending_share_items(session, share_uri) if existing_pending_items: - new_share_state = Share_SM.run_transition(ShareObjectActions.FinishPending.value) + new_share_state = share_sm.run_transition(ShareObjectActions.FinishPending.value) else: - new_share_state = Share_SM.run_transition(ShareObjectActions.Finish.value) - Share_SM.update_state(session, share, new_share_state) + new_share_state = share_sm.run_transition(ShareObjectActions.Finish.value) + share_sm.update_state(session, share, new_share_state) return revoked_tables_succeed and revoked_folders_succeed diff --git a/backend/dataall/modules/dataset_sharing/services/share_notification_service.py b/backend/dataall/modules/dataset_sharing/services/share_notification_service.py index 4f1bb20df..b6297720b 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_notification_service.py +++ b/backend/dataall/modules/dataset_sharing/services/share_notification_service.py @@ -9,18 +9,15 @@ class ShareNotificationService: def notify_share_object_submission( session, username: str, dataset: Dataset, share: ShareObject ): - notifications = [] + notifications = [Notification.create( + session=session, + username=dataset.owner, + notification_type=models.NotificationType.SHARE_OBJECT_SUBMITTED, + target_uri=f'{share.shareUri}|{dataset.datasetUri}', + message=f'User {username} submitted share request for dataset {dataset.label}', + )] # stewards = Notification.get_dataset_stewards(session, dataset) # for steward in stewards: - notifications.append( - Notification.create( - session=session, - username=dataset.owner, - notification_type=models.NotificationType.SHARE_OBJECT_SUBMITTED, - target_uri=f'{share.shareUri}|{dataset.datasetUri}', - message=f'User {username} submitted share request for dataset {dataset.label}', - ) - ) session.add_all(notifications) return notifications @@ -81,7 +78,8 @@ def notify_new_data_available_from_owners( username=user, notification_type=models.NotificationType.DATASET_VERSION, target_uri=f'{share.shareUri}|{dataset.datasetUri}', - message=f'New data (at {s3_prefix}) is available from dataset {dataset.datasetUri} shared by owner {dataset.owner}', + message=f'New data (at {s3_prefix}) is available from dataset {dataset.datasetUri} ' + f'shared by owner {dataset.owner}', ) ) session.add_all(notifications) diff --git a/backend/dataall/modules/datasets/api/profiling/mutations.py b/backend/dataall/modules/datasets/api/profiling/mutations.py index dcc5a0bb7..c0597a5c4 100644 --- a/backend/dataall/modules/datasets/api/profiling/mutations.py +++ b/backend/dataall/modules/datasets/api/profiling/mutations.py @@ -6,4 +6,4 @@ args=[gql.Argument(name='input', type=gql.Ref('StartDatasetProfilingRunInput'))], type=gql.Ref('DatasetProfilingRun'), resolver=start_profiling_run, -) \ No newline at end of file +) diff --git a/backend/dataall/modules/datasets/db/dataset_profiling_repository.py b/backend/dataall/modules/datasets/db/dataset_profiling_repository.py index 02058b296..c0f3d07eb 100644 --- a/backend/dataall/modules/datasets/db/dataset_profiling_repository.py +++ b/backend/dataall/modules/datasets/db/dataset_profiling_repository.py @@ -31,7 +31,7 @@ def save_profiling(session, dataset, env, glue_table_name): @staticmethod def update_run(session, run_uri, glue_job_run_id): run = DatasetProfilingRepository.get_profiling_run( - session, profilingRunUri=run_uri, GlueJobRunId=glue_job_run_id + session, profiling_run_uri=run_uri, glue_job_run_id=glue_job_run_id ) if glue_job_run_id: run.GlueJobRunId = glue_job_run_id @@ -40,17 +40,17 @@ def update_run(session, run_uri, glue_job_run_id): @staticmethod def get_profiling_run( - session, profilingRunUri=None, GlueJobRunId=None, GlueTableName=None + session, profiling_run_uri=None, glue_job_run_id=None, glue_table_name=None ): - if profilingRunUri: + if profiling_run_uri: run: DatasetProfilingRun = session.query( DatasetProfilingRun - ).get(profilingRunUri) + ).get(profiling_run_uri) else: run: DatasetProfilingRun = ( session.query(DatasetProfilingRun) - .filter(DatasetProfilingRun.GlueJobRunId == GlueJobRunId) - .filter(DatasetProfilingRun.GlueTableName == GlueTableName) + .filter(DatasetProfilingRun.GlueJobRunId == glue_job_run_id) + .filter(DatasetProfilingRun.GlueTableName == glue_table_name) .first() ) return run diff --git a/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py b/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py index 81d37d36d..d001311b7 100644 --- a/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py +++ b/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py @@ -41,7 +41,7 @@ def start_profiling_run(engine, task: models.Task): def _get_job_data(session, task): profiling: DatasetProfilingRun = ( DatasetProfilingRepository.get_profiling_run( - session, profilingRunUri=task.targetUri + session, profiling_run_uri=task.targetUri ) ) dataset: Dataset = session.query(Dataset).get( diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py index f89f98b7c..2f12d3ca0 100644 --- a/backend/dataall/modules/datasets/services/dataset_service.py +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -496,8 +496,8 @@ def list_datasets_owned_by_env_group(env_uri: str, group_uri: str, data: dict): with get_context().db_engine.scoped_session() as session: return DatasetRepository.paginated_environment_group_datasets( session=session, - envUri=env_uri, - groupUri=group_uri, + env_uri=env_uri, + group_uri=group_uri, data=data, ) diff --git a/backend/dataall/modules/datasets_base/db/dataset_repository.py b/backend/dataall/modules/datasets_base/db/dataset_repository.py index d3697ca3f..dbd60f476 100644 --- a/backend/dataall/modules/datasets_base/db/dataset_repository.py +++ b/backend/dataall/modules/datasets_base/db/dataset_repository.py @@ -318,11 +318,11 @@ def count_dataset_tables(session, dataset_uri): ) @staticmethod - def query_environment_group_datasets(session, envUri, groupUri, filter) -> Query: + def query_environment_group_datasets(session, env_uri, group_uri, filter) -> Query: query = session.query(Dataset).filter( and_( - Dataset.environmentUri == envUri, - Dataset.SamlAdminGroupName == groupUri, + Dataset.environmentUri == env_uri, + Dataset.SamlAdminGroupName == group_uri, Dataset.deleted.is_(None), ) ) @@ -372,11 +372,11 @@ def paginated_environment_datasets( @staticmethod def paginated_environment_group_datasets( - session, envUri, groupUri, data=None + session, env_uri, group_uri, data=None ) -> dict: return paginate( query=DatasetRepository.query_environment_group_datasets( - session, envUri, groupUri, data + session, env_uri, group_uri, data ), page=data.get('page', 1), page_size=data.get('pageSize', 10), From e0b67b173cfbd01234bd109dc46a8f107a8e67a4 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 1 Jun 2023 12:47:49 +0200 Subject: [PATCH 264/346] Indexers don't have queries. Removed big join queries --- .../datasets/db/dataset_table_repository.py | 26 ++++++ .../datasets/indexers/dataset_indexer.py | 57 +++--------- .../datasets/indexers/location_indexer.py | 71 ++++----------- .../datasets/indexers/table_indexer.py | 88 +++++-------------- 4 files changed, 77 insertions(+), 165 deletions(-) diff --git a/backend/dataall/modules/datasets/db/dataset_table_repository.py b/backend/dataall/modules/datasets/db/dataset_table_repository.py index 9fb6eeec7..76e7f2f7e 100644 --- a/backend/dataall/modules/datasets/db/dataset_table_repository.py +++ b/backend/dataall/modules/datasets/db/dataset_table_repository.py @@ -140,6 +140,32 @@ def update_existing_tables_status(existing_tables, glue_tables): f'Table {existing_table.GlueTableName} status set to Deleted from Glue.' ) + @staticmethod + def find_all_active_tables(session, dataset_uri): + return ( + session.query(DatasetTable) + .filter( + and_( + DatasetTable.datasetUri == dataset_uri, + DatasetTable.LastGlueTableStatus != 'Deleted', + ) + ) + .all() + ) + + @staticmethod + def find_all_deleted_tables(session, dataset_uri): + return ( + session.query(DatasetTable) + .filter( + and_( + DatasetTable.datasetUri == dataset_uri, + DatasetTable.LastGlueTableStatus == 'Deleted', + ) + ) + .all() + ) + @staticmethod def sync_table_columns(session, dataset_table, glue_table): diff --git a/backend/dataall/modules/datasets/indexers/dataset_indexer.py b/backend/dataall/modules/datasets/indexers/dataset_indexer.py index 4036136ba..062964767 100644 --- a/backend/dataall/modules/datasets/indexers/dataset_indexer.py +++ b/backend/dataall/modules/datasets/indexers/dataset_indexer.py @@ -1,9 +1,7 @@ """Indexes Datasets in OpenSearch""" -from dataall.db import models -from dataall.db.api import Vote +from dataall.db.api import Vote, Environment, Organization from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository -from dataall.modules.datasets_base.db.models import Dataset from dataall.modules.datasets.db.dataset_location_repository import DatasetLocationRepository from dataall.searchproxy.base_indexer import BaseIndexer @@ -12,39 +10,10 @@ class DatasetIndexer(BaseIndexer): @classmethod def upsert(cls, session, dataset_uri: str): - dataset = ( - session.query( - Dataset.datasetUri.label('datasetUri'), - Dataset.name.label('name'), - Dataset.owner.label('owner'), - Dataset.label.label('label'), - Dataset.description.label('description'), - Dataset.confidentiality.label('classification'), - Dataset.tags.label('tags'), - Dataset.topics.label('topics'), - Dataset.region.label('region'), - models.Organization.organizationUri.label('orgUri'), - models.Organization.name.label('orgName'), - models.Environment.environmentUri.label('envUri'), - models.Environment.name.label('envName'), - Dataset.SamlAdminGroupName.label('admins'), - Dataset.GlueDatabaseName.label('database'), - Dataset.S3BucketName.label('source'), - Dataset.created, - Dataset.updated, - Dataset.deleted, - ) - .join( - models.Organization, - Dataset.organizationUri == models.Organization.organizationUri, - ) - .join( - models.Environment, - Dataset.environmentUri == models.Environment.environmentUri, - ) - .filter(Dataset.datasetUri == dataset_uri) - .first() - ) + dataset = DatasetRepository.get_dataset_by_uri(session, dataset_uri) + env = Environment.get_environment_by_uri(session, dataset.environmentUri) + org = Organization.get_organization_by_uri(session, dataset.organizationUri) + count_tables = DatasetRepository.count_dataset_tables(session, dataset_uri) count_folders = DatasetLocationRepository.count_dataset_locations(session, dataset_uri) count_upvotes = Vote.count_upvotes( @@ -59,19 +28,19 @@ def upsert(cls, session, dataset_uri: str): 'name': dataset.name, 'owner': dataset.owner, 'label': dataset.label, - 'admins': dataset.admins, - 'database': dataset.database, - 'source': dataset.source, + 'admins': dataset.SamlAdminGroupName, + 'database': dataset.GlueDatabaseName, + 'source': dataset.S3BucketName, 'resourceKind': 'dataset', 'description': dataset.description, - 'classification': dataset.classification, + 'classification': dataset.confidentiality, 'tags': [t.replace('-', '') for t in dataset.tags or []], 'topics': dataset.topics, 'region': dataset.region.replace('-', ''), - 'environmentUri': dataset.envUri, - 'environmentName': dataset.envName, - 'organizationUri': dataset.orgUri, - 'organizationName': dataset.orgName, + 'environmentUri': env.environmentUri, + 'environmentName': env.name, + 'organizationUri': org.organizationUri, + 'organizationName': org.name, 'created': dataset.created, 'updated': dataset.updated, 'deleted': dataset.deleted, diff --git a/backend/dataall/modules/datasets/indexers/location_indexer.py b/backend/dataall/modules/datasets/indexers/location_indexer.py index 9a6694d66..7856e4bea 100644 --- a/backend/dataall/modules/datasets/indexers/location_indexer.py +++ b/backend/dataall/modules/datasets/indexers/location_indexer.py @@ -1,7 +1,7 @@ """Indexes DatasetStorageLocation in OpenSearch""" -from dataall.modules.datasets_base.db.models import DatasetStorageLocation, Dataset - -from dataall.db import models +from dataall.db.api import Environment, Organization +from dataall.modules.datasets.db.dataset_location_repository import DatasetLocationRepository +from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer from dataall.searchproxy.base_indexer import BaseIndexer @@ -10,64 +10,33 @@ class DatasetLocationIndexer(BaseIndexer): @classmethod def upsert(cls, session, folder_uri: str): - folder = ( - session.query( - DatasetStorageLocation.datasetUri.label('datasetUri'), - DatasetStorageLocation.locationUri.label('uri'), - DatasetStorageLocation.name.label('name'), - DatasetStorageLocation.owner.label('owner'), - DatasetStorageLocation.label.label('label'), - DatasetStorageLocation.description.label('description'), - DatasetStorageLocation.tags.label('tags'), - DatasetStorageLocation.region.label('region'), - models.Organization.organizationUri.label('orgUri'), - models.Organization.name.label('orgName'), - models.Environment.environmentUri.label('envUri'), - models.Environment.name.label('envName'), - Dataset.SamlAdminGroupName.label('admins'), - Dataset.S3BucketName.label('source'), - Dataset.topics.label('topics'), - Dataset.confidentiality.label('classification'), - DatasetStorageLocation.created, - DatasetStorageLocation.updated, - DatasetStorageLocation.deleted, - ) - .join( - Dataset, - Dataset.datasetUri == DatasetStorageLocation.datasetUri, - ) - .join( - models.Organization, - Dataset.organizationUri == models.Organization.organizationUri, - ) - .join( - models.Environment, - Dataset.environmentUri == models.Environment.environmentUri, - ) - .filter(DatasetStorageLocation.locationUri == folder_uri) - .first() - ) + folder = DatasetLocationRepository.get_location_by_uri(session, folder_uri) + if folder: + dataset = DatasetRepository.get_dataset_by_uri(session, folder.datasetUri) + env = Environment.get_environment_by_uri(session, dataset.environmentUri) + org = Organization.get_organization_by_uri(session, dataset.environmentUri) glossary = BaseIndexer._get_target_glossary_terms(session, folder_uri) + BaseIndexer._index( doc_id=folder_uri, doc={ 'name': folder.name, - 'admins': folder.admins, + 'admins': dataset.SamlAdminGroupName, 'owner': folder.owner, 'label': folder.label, 'resourceKind': 'folder', 'description': folder.description, - 'source': folder.source, - 'classification': folder.classification, + 'source': dataset.S3BucketName, + 'classification': dataset.confidentiality, 'tags': [f.replace('-', '') for f in folder.tags or []], - 'topics': folder.topics, + 'topics': dataset.topics, 'region': folder.region.replace('-', ''), 'datasetUri': folder.datasetUri, - 'environmentUri': folder.envUri, - 'environmentName': folder.envName, - 'organizationUri': folder.orgUri, - 'organizationName': folder.orgName, + 'environmentUri': env.environmentUri, + 'environmentName': env.name, + 'organizationUri': org.organizationUri, + 'organizationName': org.name, 'created': folder.created, 'updated': folder.updated, 'deleted': folder.deleted, @@ -79,11 +48,7 @@ def upsert(cls, session, folder_uri: str): @classmethod def upsert_all(cls, session, dataset_uri: str): - folders = ( - session.query(DatasetStorageLocation) - .filter(DatasetStorageLocation.datasetUri == dataset_uri) - .all() - ) + folders = DatasetLocationRepository.get_dataset_folders(session, dataset_uri) for folder in folders: DatasetLocationIndexer.upsert(session=session, folder_uri=folder.locationUri) return folders diff --git a/backend/dataall/modules/datasets/indexers/table_indexer.py b/backend/dataall/modules/datasets/indexers/table_indexer.py index edbf262b5..ca0b0b88d 100644 --- a/backend/dataall/modules/datasets/indexers/table_indexer.py +++ b/backend/dataall/modules/datasets/indexers/table_indexer.py @@ -2,6 +2,9 @@ from operator import and_ from dataall.db import models +from dataall.db.api import Environment, Organization +from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository +from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.db.models import DatasetTable, Dataset from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer from dataall.searchproxy.base_indexer import BaseIndexer @@ -11,68 +14,35 @@ class DatasetTableIndexer(BaseIndexer): @classmethod def upsert(cls, session, table_uri: str): - table = ( - session.query( - DatasetTable.datasetUri.label('datasetUri'), - DatasetTable.tableUri.label('uri'), - DatasetTable.name.label('name'), - DatasetTable.owner.label('owner'), - DatasetTable.label.label('label'), - DatasetTable.description.label('description'), - Dataset.confidentiality.label('classification'), - DatasetTable.tags.label('tags'), - Dataset.topics.label('topics'), - Dataset.region.label('region'), - models.Organization.organizationUri.label('orgUri'), - models.Organization.name.label('orgName'), - models.Environment.environmentUri.label('envUri'), - models.Environment.name.label('envName'), - Dataset.SamlAdminGroupName.label('admins'), - Dataset.GlueDatabaseName.label('database'), - Dataset.S3BucketName.label('source'), - DatasetTable.created, - DatasetTable.updated, - DatasetTable.deleted, - ) - .join( - Dataset, - Dataset.datasetUri == DatasetTable.datasetUri, - ) - .join( - models.Organization, - Dataset.organizationUri == models.Organization.organizationUri, - ) - .join( - models.Environment, - Dataset.environmentUri == models.Environment.environmentUri, - ) - .filter(DatasetTable.tableUri == table_uri) - .first() - ) + table = DatasetTableRepository.get_dataset_table_by_uri(session, table_uri) if table: + dataset = DatasetRepository.get_dataset_by_uri(session, table.datasetUri) + env = Environment.get_environment_by_uri(session, dataset.environmentUri) + org = Organization.get_organization_by_uri(session, dataset.environmentUri) glossary = BaseIndexer._get_target_glossary_terms(session, table_uri) + tags = table.tags if table.tags else [] BaseIndexer._index( doc_id=table_uri, doc={ 'name': table.name, - 'admins': table.admins, + 'admins': dataset.SamlAdminGroupName, 'owner': table.owner, 'label': table.label, 'resourceKind': 'table', 'description': table.description, - 'database': table.database, - 'source': table.source, - 'classification': table.classification, + 'database': table.GlueDatabaseName, + 'source': table.S3BucketName, + 'classification': dataset.confidentiality, 'tags': [t.replace('-', '') for t in tags or []], - 'topics': table.topics, - 'region': table.region.replace('-', ''), + 'topics': dataset.topics, + 'region': dataset.region.replace('-', ''), 'datasetUri': table.datasetUri, - 'environmentUri': table.envUri, - 'environmentName': table.envName, - 'organizationUri': table.orgUri, - 'organizationName': table.orgName, + 'environmentUri': env.environmentUri, + 'environmentName': env.name, + 'organizationUri': org.organizationUri, + 'organizationName': org.name, 'created': table.created, 'updated': table.updated, 'deleted': table.deleted, @@ -84,32 +54,14 @@ def upsert(cls, session, table_uri: str): @classmethod def upsert_all(cls, session, dataset_uri: str): - tables = ( - session.query(DatasetTable) - .filter( - and_( - DatasetTable.datasetUri == dataset_uri, - DatasetTable.LastGlueTableStatus != 'Deleted', - ) - ) - .all() - ) + tables = DatasetTableRepository.find_all_active_tables(session, dataset_uri) for table in tables: DatasetTableIndexer.upsert(session=session, table_uri=table.tableUri) return tables @classmethod def remove_all_deleted(cls, session, dataset_uri: str): - tables = ( - session.query(DatasetTable) - .filter( - and_( - DatasetTable.datasetUri == dataset_uri, - DatasetTable.LastGlueTableStatus == 'Deleted', - ) - ) - .all() - ) + tables = DatasetTableRepository.find_all_deleted_tables(session, dataset_uri) for table in tables: cls.delete_doc(doc_id=table.tableUri) return tables From 0eee55684c0afe3059a08e5d3e6ee7b12b42d8c5 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 1 Jun 2023 14:30:35 +0200 Subject: [PATCH 265/346] Added default filter --- .../dataall/modules/datasets/services/dataset_column_service.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/dataall/modules/datasets/services/dataset_column_service.py b/backend/dataall/modules/datasets/services/dataset_column_service.py index 9e28878a1..b3bf0b839 100644 --- a/backend/dataall/modules/datasets/services/dataset_column_service.py +++ b/backend/dataall/modules/datasets/services/dataset_column_service.py @@ -24,7 +24,7 @@ def sync_table_columns(table_uri: str): task = models.Task(action='glue.table.columns', targetUri=table_uri) session.add(task) Worker.process(engine=context.db_engine, task_ids=[task.taskUri], save_response=False) - return DatasetColumnService.paginate_active_columns_for_table(table_uri) + return DatasetColumnService.paginate_active_columns_for_table(table_uri, {}) @staticmethod def update_table_column_description(column_uri: str, description) -> DatasetTableColumn: From bca586dec3f8db0f2c25eab771478645c712f4c8 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 1 Jun 2023 15:50:33 +0200 Subject: [PATCH 266/346] The method is under permission --- .../modules/datasets/services/dataset_profiling_service.py | 1 - 1 file changed, 1 deletion(-) diff --git a/backend/dataall/modules/datasets/services/dataset_profiling_service.py b/backend/dataall/modules/datasets/services/dataset_profiling_service.py index 87cd37d0f..69a9f83ea 100644 --- a/backend/dataall/modules/datasets/services/dataset_profiling_service.py +++ b/backend/dataall/modules/datasets/services/dataset_profiling_service.py @@ -50,7 +50,6 @@ def start_profiling_run(uri, table_uri, glue_table_name): @staticmethod def queue_profiling_run(run_uri): - # TODO NO PERMISSION CHECK context = get_context() with context.db_engine.scoped_session() as session: task = Task( From cf5e4c83b3335bc1435dba803d846826252f18a0 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 2 Jun 2023 09:38:05 +0200 Subject: [PATCH 267/346] Removed addDatasetTable since it's not used --- .../modules/datasets/api/table/input_types.py | 13 ------- .../modules/datasets/api/table/mutations.py | 16 +-------- .../modules/datasets/api/table/resolvers.py | 8 ----- .../datasets/db/dataset_table_repository.py | 34 ------------------- .../services/dataset_table_service.py | 30 ++-------------- frontend/src/api/Dataset/addDatasetTable.js | 18 ---------- 6 files changed, 3 insertions(+), 116 deletions(-) delete mode 100644 frontend/src/api/Dataset/addDatasetTable.js diff --git a/backend/dataall/modules/datasets/api/table/input_types.py b/backend/dataall/modules/datasets/api/table/input_types.py index 2e6649515..c8c36a5bc 100644 --- a/backend/dataall/modules/datasets/api/table/input_types.py +++ b/backend/dataall/modules/datasets/api/table/input_types.py @@ -1,19 +1,6 @@ from dataall.api import gql from dataall.api.constants import SortDirection, GraphQLEnumMapper - -NewDatasetTableInput = gql.InputType( - name='NewDatasetTableInput', - arguments=[ - gql.Argument('label', gql.String), - gql.Argument('name', gql.NonNullableType(gql.String)), - gql.Argument('tags', gql.ArrayType(gql.String)), - gql.Argument('description', gql.String), - gql.Argument('config', gql.String), - gql.Argument('region', gql.String), - ], -) - ModifyDatasetTableInput = gql.InputType( name='ModifyDatasetTableInput', arguments=[ diff --git a/backend/dataall/modules/datasets/api/table/mutations.py b/backend/dataall/modules/datasets/api/table/mutations.py index 7a26a6c15..bc2531ebf 100644 --- a/backend/dataall/modules/datasets/api/table/mutations.py +++ b/backend/dataall/modules/datasets/api/table/mutations.py @@ -1,25 +1,11 @@ from dataall.api import gql -from dataall.modules.datasets.api.table.input_types import ( - ModifyDatasetTableInput, - NewDatasetTableInput, -) +from dataall.modules.datasets.api.table.input_types import ModifyDatasetTableInput from dataall.modules.datasets.api.table.resolvers import ( - create_table, update_table, delete_table, publish_table_update ) -createDatasetTable = gql.MutationField( - name='createDatasetTable', - args=[ - gql.Argument(name='datasetUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='input', type=NewDatasetTableInput), - ], - type=gql.Ref('DatasetTable'), - resolver=create_table, -) - updateDatasetTable = gql.MutationField( name='updateDatasetTable', args=[ diff --git a/backend/dataall/modules/datasets/api/table/resolvers.py b/backend/dataall/modules/datasets/api/table/resolvers.py index c6cd6fda5..bdd9f1bfd 100644 --- a/backend/dataall/modules/datasets/api/table/resolvers.py +++ b/backend/dataall/modules/datasets/api/table/resolvers.py @@ -1,7 +1,6 @@ import logging from dataall import db -from dataall.db.exceptions import RequiredParameter from dataall.modules.datasets.api.dataset.resolvers import get_dataset from dataall.api.context import Context from dataall.db.api import Glossary @@ -11,13 +10,6 @@ log = logging.getLogger(__name__) -def create_table(context, source, datasetUri: str = None, input: dict = None): - if "name" not in input: - raise RequiredParameter("name") - - return DatasetTableService.create_table(dataset_uri=datasetUri, table_data=input) - - def list_dataset_tables(context, source, filter: dict = None): if not source: return None diff --git a/backend/dataall/modules/datasets/db/dataset_table_repository.py b/backend/dataall/modules/datasets/db/dataset_table_repository.py index 76e7f2f7e..ac87aef65 100644 --- a/backend/dataall/modules/datasets/db/dataset_table_repository.py +++ b/backend/dataall/modules/datasets/db/dataset_table_repository.py @@ -18,40 +18,6 @@ class DatasetTableRepository: def save(session, table: DatasetTable): session.add(table) - @staticmethod - def exists(session, dataset_uri, glue_table_name): - return ( - session.query(DatasetTable) - .filter( - and_( - DatasetTable.datasetUri == dataset_uri, - DatasetTable.GlueTableName == glue_table_name, - ) - ) - .count() - ) - - @staticmethod - def create_dataset_table(session, dataset: Dataset, data: dict = None) -> DatasetTable: - table = DatasetTable( - datasetUri=dataset.datasetUri, - label=data['name'], - name=data['name'], - description=data.get('description', 'No description provided'), - tags=data.get('tags', []), - S3BucketName=dataset.S3BucketName, - S3Prefix=data.get('S3Prefix', 'unknown'), - AWSAccountId=dataset.AwsAccountId, - GlueDatabaseName=dataset.GlueDatabaseName, - GlueTableConfig=data.get('config'), - GlueTableName=data['name'], - owner=dataset.owner, - region=dataset.region, - ) - session.add(table) - session.commit() - return table - @staticmethod def create_synced_table(session, dataset: Dataset, table: dict): updated_table = DatasetTable( diff --git a/backend/dataall/modules/datasets/services/dataset_table_service.py b/backend/dataall/modules/datasets/services/dataset_table_service.py index 521eb182a..866afaa71 100644 --- a/backend/dataall/modules/datasets/services/dataset_table_service.py +++ b/backend/dataall/modules/datasets/services/dataset_table_service.py @@ -6,14 +6,14 @@ from dataall.core.permission_checker import has_resource_permission, has_tenant_permission from dataall.db import models from dataall.db.api import ResourcePolicy, Environment, Glossary -from dataall.db.exceptions import ResourceShared, ResourceAlreadyExists +from dataall.db.exceptions import ResourceShared from dataall.modules.dataset_sharing.db.share_object_repository import ShareObjectRepository from dataall.modules.datasets.aws.athena_table_client import AthenaTableClient from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer from dataall.modules.datasets_base.db.enums import ConfidentialityClassification from dataall.modules.datasets.services.dataset_permissions import UPDATE_DATASET_TABLE, MANAGE_DATASETS, \ - DELETE_DATASET_TABLE, CREATE_DATASET_TABLE + DELETE_DATASET_TABLE from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.db.models import DatasetTable, Dataset from dataall.modules.datasets_base.services.permissions import PREVIEW_DATASET_TABLE, DATASET_TABLE_READ @@ -28,32 +28,6 @@ def _get_dataset_uri(session, table_uri): table = DatasetTableRepository.get_dataset_table_by_uri(session, table_uri) return table.datasetUri - @staticmethod - @has_tenant_permission(MANAGE_DATASETS) - @has_resource_permission(CREATE_DATASET_TABLE) - def create_table(uri: str, table_data: dict): - with get_context().db_engine.scoped_session() as session: - dataset = DatasetRepository.get_dataset_by_uri(session, uri) - glue_table = table_data['name'] - exists = DatasetTableRepository.exists(session, dataset_uri=uri, glue_table_name=glue_table) - - if exists: - raise ResourceAlreadyExists( - action='Create Table', - message=f'table: {glue_table} already exist on dataset {uri}', - ) - - table = DatasetTableRepository.create_dataset_table(session, dataset, table_data) - - if 'terms' in table_data: - Glossary.set_glossary_terms_links( - session, get_context().username, table.tableUri, 'DatasetTable', table_data['terms'] - ) - - DatasetTableService._attach_dataset_table_permission(session, dataset, table.tableUri) - DatasetTableIndexer.upsert(session, table_uri=table.tableUri) - return table - @staticmethod @has_tenant_permission(MANAGE_DATASETS) def list_dataset_tables(dataset_uri: str, filter): diff --git a/frontend/src/api/Dataset/addDatasetTable.js b/frontend/src/api/Dataset/addDatasetTable.js deleted file mode 100644 index faf6541d9..000000000 --- a/frontend/src/api/Dataset/addDatasetTable.js +++ /dev/null @@ -1,18 +0,0 @@ -import { gql } from 'apollo-boost'; - -const createDatasetTable = ({ datasetUri, input }) => ({ - variables: { datasetUri, input }, - mutation: gql` - mutation CreateDatasetTable( - $datasetUri: String - $input: NewDatasetTableInput - ) { - createDatasetTable(datasetUri: $datasetUri, input: $input) { - tableUri - name - } - } - ` -}); - -export default createDatasetTable; From 3de2e09653ca4215cbc1bd8f1fbc2c20f547e2b2 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 2 Jun 2023 09:40:24 +0200 Subject: [PATCH 268/346] list_dataset_tables is not used --- .../modules/datasets/api/table/resolvers.py | 8 -------- .../datasets/db/dataset_table_repository.py | 15 --------------- .../datasets/services/dataset_table_service.py | 6 ------ 3 files changed, 29 deletions(-) diff --git a/backend/dataall/modules/datasets/api/table/resolvers.py b/backend/dataall/modules/datasets/api/table/resolvers.py index bdd9f1bfd..5fb5e99c5 100644 --- a/backend/dataall/modules/datasets/api/table/resolvers.py +++ b/backend/dataall/modules/datasets/api/table/resolvers.py @@ -10,14 +10,6 @@ log = logging.getLogger(__name__) -def list_dataset_tables(context, source, filter: dict = None): - if not source: - return None - if not filter: - filter = {} - return DatasetTableService.list_dataset_tables(dataset_uri=source.datasetUri, filter=filter) - - def get_table(context, source: Dataset, tableUri: str = None): return DatasetTableService.get_table(uri=tableUri) diff --git a/backend/dataall/modules/datasets/db/dataset_table_repository.py b/backend/dataall/modules/datasets/db/dataset_table_repository.py index ac87aef65..a045fce08 100644 --- a/backend/dataall/modules/datasets/db/dataset_table_repository.py +++ b/backend/dataall/modules/datasets/db/dataset_table_repository.py @@ -40,21 +40,6 @@ def create_synced_table(session, dataset: Dataset, table: dict): session.commit() return updated_table - @staticmethod - def paginate_dataset_tables(session, dataset_uri, filter: dict) -> dict: - query = ( - session.query(DatasetTable) - .filter(DatasetTable.datasetUri == dataset_uri) - .order_by(DatasetTable.created.desc()) - ) - if 'term' in filter: - query = query.filter(DatasetTable.label.ilike('%' + filter['term'] + '%')) - return paginate( - query=query, - page=filter.get('page', 1), - page_size=filter.get('pageSize', 10) - ).to_dict() - @staticmethod def delete(session, table: DatasetTable): session.delete(table) diff --git a/backend/dataall/modules/datasets/services/dataset_table_service.py b/backend/dataall/modules/datasets/services/dataset_table_service.py index 866afaa71..a8ee44153 100644 --- a/backend/dataall/modules/datasets/services/dataset_table_service.py +++ b/backend/dataall/modules/datasets/services/dataset_table_service.py @@ -28,12 +28,6 @@ def _get_dataset_uri(session, table_uri): table = DatasetTableRepository.get_dataset_table_by_uri(session, table_uri) return table.datasetUri - @staticmethod - @has_tenant_permission(MANAGE_DATASETS) - def list_dataset_tables(dataset_uri: str, filter): - with get_context().db_engine.scoped_session() as session: - return DatasetTableRepository.paginate_dataset_tables(session, dataset_uri, filter) - @staticmethod @has_tenant_permission(MANAGE_DATASETS) def get_table(uri: str): From 345db03b6c469879ad4e7623e770e56d6097f835 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 2 Jun 2023 09:54:55 +0200 Subject: [PATCH 269/346] Removed unused API --- .../modules/datasets/api/table/mutations.py | 10 ------ .../modules/datasets/api/table/resolvers.py | 4 --- .../services/dataset_table_service.py | 24 -------------- .../DatasetTable/getDatasetProfilingReport.js | 14 -------- .../listDatasetTableProfilingJobs.js | 33 ------------------- frontend/src/api/DatasetTable/previewTable.js | 22 ------------- .../DatasetTable/publishDatasetTableUpdate.js | 14 -------- .../src/api/DatasetTable/startProfilingJob.js | 16 --------- 8 files changed, 137 deletions(-) delete mode 100644 frontend/src/api/DatasetTable/getDatasetProfilingReport.js delete mode 100644 frontend/src/api/DatasetTable/listDatasetTableProfilingJobs.js delete mode 100644 frontend/src/api/DatasetTable/previewTable.js delete mode 100644 frontend/src/api/DatasetTable/publishDatasetTableUpdate.js delete mode 100644 frontend/src/api/DatasetTable/startProfilingJob.js diff --git a/backend/dataall/modules/datasets/api/table/mutations.py b/backend/dataall/modules/datasets/api/table/mutations.py index bc2531ebf..61b805cdd 100644 --- a/backend/dataall/modules/datasets/api/table/mutations.py +++ b/backend/dataall/modules/datasets/api/table/mutations.py @@ -3,7 +3,6 @@ from dataall.modules.datasets.api.table.resolvers import ( update_table, delete_table, - publish_table_update ) updateDatasetTable = gql.MutationField( @@ -22,12 +21,3 @@ type=gql.Boolean, resolver=delete_table, ) - -publishDatasetTableUpdate = gql.MutationField( - name='publishDatasetTableUpdate', - args=[ - gql.Argument(name='tableUri', type=gql.NonNullableType(gql.String)), - ], - resolver=publish_table_update, - type=gql.Boolean, -) diff --git a/backend/dataall/modules/datasets/api/table/resolvers.py b/backend/dataall/modules/datasets/api/table/resolvers.py index 5fb5e99c5..dbae3b818 100644 --- a/backend/dataall/modules/datasets/api/table/resolvers.py +++ b/backend/dataall/modules/datasets/api/table/resolvers.py @@ -55,10 +55,6 @@ def resolve_glossary_terms(context: Context, source: DatasetTable, **kwargs): ) -def publish_table_update(context: Context, source, tableUri: str = None): - return DatasetTableService.publish_table_update(uri=tableUri) - - def resolve_redshift_copy_schema(context, source: DatasetTable, clusterUri: str): if not source: return None diff --git a/backend/dataall/modules/datasets/services/dataset_table_service.py b/backend/dataall/modules/datasets/services/dataset_table_service.py index a8ee44153..d53236ce8 100644 --- a/backend/dataall/modules/datasets/services/dataset_table_service.py +++ b/backend/dataall/modules/datasets/services/dataset_table_service.py @@ -103,30 +103,6 @@ def get_glue_table_properties(table_uri: str): table: DatasetTable = DatasetTableRepository.get_dataset_table_by_uri(session, table_uri) return json_utils.to_string(table.GlueTableProperties).replace('\\', ' ') - @staticmethod - @has_resource_permission(UPDATE_DATASET_TABLE, parent_resource=_get_dataset_uri) - def publish_table_update(uri: str): - context = get_context() - with context.db_engine.scoped_session() as session: - table: DatasetTable = DatasetTableRepository.get_dataset_table_by_uri(session, uri) - dataset = DatasetRepository.get_dataset_by_uri(session, table.datasetUri) - env = Environment.get_environment_by_uri(session, dataset.environmentUri) - if not env.subscriptionsEnabled or not env.subscriptionsProducersTopicName: - raise Exception( - 'Subscriptions are disabled. ' - "First enable subscriptions for this dataset's environment then retry." - ) - - task = models.Task( - targetUri=table.datasetUri, - action='sns.dataset.publish_update', - payload={'s3Prefix': table.S3Prefix}, - ) - session.add(task) - - Worker.process(engine=context.db_engine, task_ids=[task.taskUri], save_response=False) - return True - @staticmethod def list_shared_tables_by_env_dataset(dataset_uri: str, env_uri: str): # TODO THERE WAS NO PERMISSION CHECK diff --git a/frontend/src/api/DatasetTable/getDatasetProfilingReport.js b/frontend/src/api/DatasetTable/getDatasetProfilingReport.js deleted file mode 100644 index eb2ca82b7..000000000 --- a/frontend/src/api/DatasetTable/getDatasetProfilingReport.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getDatasetTableProfilingReport = (jobUri) => ({ - variables: { - jobUri - }, - query: gql` - query getDatasetTableProfilingReport($jobUri: String!) { - getDatasetTableProfilingReport(jobUri: $jobUri) - } - ` -}); - -export default getDatasetTableProfilingReport; diff --git a/frontend/src/api/DatasetTable/listDatasetTableProfilingJobs.js b/frontend/src/api/DatasetTable/listDatasetTableProfilingJobs.js deleted file mode 100644 index 3fcd4fbad..000000000 --- a/frontend/src/api/DatasetTable/listDatasetTableProfilingJobs.js +++ /dev/null @@ -1,33 +0,0 @@ -import { gql } from 'apollo-boost'; - -const listDatasetTableProfilingJobs = (tableUri) => ({ - variables: { - tableUri - }, - query: gql` - query GetDatasetTable($tableUri: String!) { - getDatasetTable(tableUri: $tableUri) { - datasetUri - owner - created - tableUri - AwsAccountId - GlueTableName - profilingJobs { - count - page - pages - hasNext - hasPrevious - nodes { - jobUri - created - status - } - } - } - } - ` -}); - -export default listDatasetTableProfilingJobs; diff --git a/frontend/src/api/DatasetTable/previewTable.js b/frontend/src/api/DatasetTable/previewTable.js deleted file mode 100644 index e9f442249..000000000 --- a/frontend/src/api/DatasetTable/previewTable.js +++ /dev/null @@ -1,22 +0,0 @@ -import { gql } from 'apollo-boost'; - -const previewTable = ({ tableUri, queryExecutionId }) => ({ - variables: { - tableUri, - queryExecutionId - }, - query: gql` - query PreviewTable($tableUri: String!, $queryExecutionId: String) { - previewTable(tableUri: $tableUri, queryExecutionId: $queryExecutionId) { - count - status - queryExecutionId - nodes { - data - } - } - } - ` -}); - -export default previewTable; diff --git a/frontend/src/api/DatasetTable/publishDatasetTableUpdate.js b/frontend/src/api/DatasetTable/publishDatasetTableUpdate.js deleted file mode 100644 index b8a44ff39..000000000 --- a/frontend/src/api/DatasetTable/publishDatasetTableUpdate.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const publishDatasetTableUpdate = ({ tableUri }) => ({ - variables: { - tableUri - }, - mutation: gql` - mutation publishDatasetTableUpdate($tableUri: String!) { - publishDatasetTableUpdate(tableUri: $tableUri) - } - ` -}); - -export default publishDatasetTableUpdate; diff --git a/frontend/src/api/DatasetTable/startProfilingJob.js b/frontend/src/api/DatasetTable/startProfilingJob.js deleted file mode 100644 index 8e61cfa4c..000000000 --- a/frontend/src/api/DatasetTable/startProfilingJob.js +++ /dev/null @@ -1,16 +0,0 @@ -import { gql } from 'apollo-boost'; - -const startProfilingJob = (tableUri) => ({ - variables: { - tableUri - }, - mutation: gql` - mutation StartProfilingJob($tableUri: String!) { - startProfilingJob(tableUri: $tableUri) { - jobUri - } - } - ` -}); - -export default startProfilingJob; From a25c9be44437e7f5ea8214de23df1e14548fad1e Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 2 Jun 2023 10:12:42 +0200 Subject: [PATCH 270/346] Removed unused API for datasets --- .../modules/datasets/api/dataset/mutations.py | 11 ----- .../modules/datasets/api/dataset/queries.py | 10 ---- .../modules/datasets/api/dataset/resolvers.py | 10 ---- .../api/storage_location/mutations.py | 10 ---- .../api/storage_location/resolvers.py | 4 -- .../modules/datasets/handlers/__init__.py | 3 +- .../datasets/handlers/sns_dataset_handler.py | 30 ------------ .../services/dataset_location_service.py | 23 ---------- .../datasets/services/dataset_service.py | 38 --------------- .../src/api/Dataset/addDatasetContributor.js | 24 ---------- frontend/src/api/Dataset/addDatasetLoader.js | 15 ------ .../src/api/Dataset/addTablePermission.js | 26 ----------- frontend/src/api/Dataset/archiveDataset.js | 14 ------ frontend/src/api/Dataset/getCrawlerStatus.js | 18 -------- .../api/Dataset/getDatasetETLCredentials.js | 14 ------ .../api/Dataset/listDatasetContributors.js | 31 ------------- .../src/api/Dataset/listDatasetLoaders.js | 34 -------------- .../src/api/Dataset/listDatasetObjects.js | 46 ------------------- .../api/Dataset/listDeltaLakeCrawlerRuns.js | 23 ---------- .../src/api/Dataset/listTablePermissions.js | 25 ---------- .../Dataset/publishDatasetLocationUpdate.js | 14 ------ .../src/api/Dataset/publishDatasetUpdate.js | 15 ------ .../api/Dataset/removeDatasetContributor.js | 16 ------- .../src/api/Dataset/removeDatasetLoader.js | 12 ----- .../src/api/Dataset/removeTablePermission.js | 16 ------- .../api/Dataset/updateDatasetContributor.js | 24 ---------- .../src/api/Dataset/updateDatasetStack.js | 12 ----- .../createDatasetQualityRule.js | 25 ---------- .../deleteDatasetqualityRule.js | 14 ------ .../getDatasetQualityRule.js | 21 --------- .../listDatasetQualityRules.js | 32 ------------- .../updateDatasetQualityRule.js | 25 ---------- 32 files changed, 1 insertion(+), 634 deletions(-) delete mode 100644 backend/dataall/modules/datasets/handlers/sns_dataset_handler.py delete mode 100644 frontend/src/api/Dataset/addDatasetContributor.js delete mode 100644 frontend/src/api/Dataset/addDatasetLoader.js delete mode 100644 frontend/src/api/Dataset/addTablePermission.js delete mode 100644 frontend/src/api/Dataset/archiveDataset.js delete mode 100644 frontend/src/api/Dataset/getCrawlerStatus.js delete mode 100644 frontend/src/api/Dataset/getDatasetETLCredentials.js delete mode 100644 frontend/src/api/Dataset/listDatasetContributors.js delete mode 100644 frontend/src/api/Dataset/listDatasetLoaders.js delete mode 100644 frontend/src/api/Dataset/listDatasetObjects.js delete mode 100644 frontend/src/api/Dataset/listDeltaLakeCrawlerRuns.js delete mode 100644 frontend/src/api/Dataset/listTablePermissions.js delete mode 100644 frontend/src/api/Dataset/publishDatasetLocationUpdate.js delete mode 100644 frontend/src/api/Dataset/publishDatasetUpdate.js delete mode 100644 frontend/src/api/Dataset/removeDatasetContributor.js delete mode 100644 frontend/src/api/Dataset/removeDatasetLoader.js delete mode 100644 frontend/src/api/Dataset/removeTablePermission.js delete mode 100644 frontend/src/api/Dataset/updateDatasetContributor.js delete mode 100644 frontend/src/api/Dataset/updateDatasetStack.js delete mode 100644 frontend/src/api/DatasetQualityRule/createDatasetQualityRule.js delete mode 100644 frontend/src/api/DatasetQualityRule/deleteDatasetqualityRule.js delete mode 100644 frontend/src/api/DatasetQualityRule/getDatasetQualityRule.js delete mode 100644 frontend/src/api/DatasetQualityRule/listDatasetQualityRules.js delete mode 100644 frontend/src/api/DatasetQualityRule/updateDatasetQualityRule.js diff --git a/backend/dataall/modules/datasets/api/dataset/mutations.py b/backend/dataall/modules/datasets/api/dataset/mutations.py index c2e5364f4..bc63c6f88 100644 --- a/backend/dataall/modules/datasets/api/dataset/mutations.py +++ b/backend/dataall/modules/datasets/api/dataset/mutations.py @@ -11,7 +11,6 @@ generate_dataset_access_token, delete_dataset, import_dataset, - publish_dataset_update, start_crawler ) @@ -69,16 +68,6 @@ test_scope='Dataset', ) -publishDatasetUpdate = gql.MutationField( - name='publishDatasetUpdate', - args=[ - gql.Argument(name='datasetUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='s3Prefix', type=gql.NonNullableType(gql.String)), - ], - resolver=publish_dataset_update, - type=gql.Boolean, -) - StartGlueCrawler = gql.MutationField( name='startGlueCrawler', args=[ diff --git a/backend/dataall/modules/datasets/api/dataset/queries.py b/backend/dataall/modules/datasets/api/dataset/queries.py index 2a202bcf8..184b1fc6f 100644 --- a/backend/dataall/modules/datasets/api/dataset/queries.py +++ b/backend/dataall/modules/datasets/api/dataset/queries.py @@ -4,7 +4,6 @@ get_dataset, list_datasets, get_dataset_assume_role_url, - get_dataset_etl_credentials, get_file_upload_presigned_url, list_dataset_share_objects, list_datasets_owned_by_env_group, @@ -39,15 +38,6 @@ ) -getDatasetETLCredentials = gql.QueryField( - name='getDatasetETLCredentials', - args=[gql.Argument(name='datasetUri', type=gql.NonNullableType(gql.String))], - type=gql.String, - resolver=get_dataset_etl_credentials, - test_scope='Dataset', -) - - getDatasetPresignedUrl = gql.QueryField( name='getDatasetPresignedUrl', args=[ diff --git a/backend/dataall/modules/datasets/api/dataset/resolvers.py b/backend/dataall/modules/datasets/api/dataset/resolvers.py index 6af8c67e5..e42967485 100644 --- a/backend/dataall/modules/datasets/api/dataset/resolvers.py +++ b/backend/dataall/modules/datasets/api/dataset/resolvers.py @@ -122,10 +122,6 @@ def get_dataset_statistics(context: Context, source: Dataset, **kwargs): return DatasetService.get_dataset_statistics(source) -def get_dataset_etl_credentials(context: Context, source, datasetUri: str = None): - return DatasetService.get_dataset_etl_credentials(uri=datasetUri) - - def get_dataset_assume_role_url(context: Context, source, datasetUri: str = None): return DatasetService.get_dataset_assume_role_url(uri=datasetUri) @@ -184,12 +180,6 @@ def get_dataset_glossary_terms(context: Context, source: Dataset, **kwargs): return paginate(terms, page_size=100, page=1).to_dict() -def publish_dataset_update( - context: Context, source, datasetUri: str = None, s3Prefix: str = None -): - return DatasetService.publish_dataset_update(uri=datasetUri, s3_prefix=s3Prefix) - - def resolve_redshift_copy_enabled(context, source: Dataset, clusterUri: str): if not source: return None diff --git a/backend/dataall/modules/datasets/api/storage_location/mutations.py b/backend/dataall/modules/datasets/api/storage_location/mutations.py index 14aafddc7..0a01d14e3 100644 --- a/backend/dataall/modules/datasets/api/storage_location/mutations.py +++ b/backend/dataall/modules/datasets/api/storage_location/mutations.py @@ -7,7 +7,6 @@ create_storage_location, update_storage_location, remove_storage_location, - publish_location_update ) from dataall.modules.datasets.api.storage_location.schema import DatasetStorageLocation @@ -38,12 +37,3 @@ resolver=remove_storage_location, type=gql.Boolean, ) - -publishDatasetStorageLocationUpdate = gql.MutationField( - name='publishDatasetStorageLocationUpdate', - args=[ - gql.Argument(name='locationUri', type=gql.NonNullableType(gql.String)), - ], - resolver=publish_location_update, - type=gql.Boolean, -) diff --git a/backend/dataall/modules/datasets/api/storage_location/resolvers.py b/backend/dataall/modules/datasets/api/storage_location/resolvers.py index ed29097e3..78ff2f601 100644 --- a/backend/dataall/modules/datasets/api/storage_location/resolvers.py +++ b/backend/dataall/modules/datasets/api/storage_location/resolvers.py @@ -46,10 +46,6 @@ def resolve_dataset(context, source: DatasetStorageLocation, **kwargs): return d -def publish_location_update(context: Context, source, locationUri: str = None): - return DatasetLocationService.publish_location_update(uri=locationUri) - - def resolve_glossary_terms( context: Context, source: DatasetStorageLocation, **kwargs ): diff --git a/backend/dataall/modules/datasets/handlers/__init__.py b/backend/dataall/modules/datasets/handlers/__init__.py index 688f78fc3..6e5d2867c 100644 --- a/backend/dataall/modules/datasets/handlers/__init__.py +++ b/backend/dataall/modules/datasets/handlers/__init__.py @@ -7,9 +7,8 @@ glue_table_handler, glue_profiling_handler, s3_folder_creator_handler, - sns_dataset_handler, glue_dataset_handler ) __all__ = ["glue_table_sync_handler", "glue_table_handler", "glue_profiling_handler", "s3_folder_creator_handler", - "sns_dataset_handler", "glue_dataset_handler"] + "glue_dataset_handler"] diff --git a/backend/dataall/modules/datasets/handlers/sns_dataset_handler.py b/backend/dataall/modules/datasets/handlers/sns_dataset_handler.py deleted file mode 100644 index f2c3b8dd5..000000000 --- a/backend/dataall/modules/datasets/handlers/sns_dataset_handler.py +++ /dev/null @@ -1,30 +0,0 @@ -import logging - -from dataall.aws.handlers.service_handlers import Worker -from dataall.db import models -from dataall.db.api import Environment -from dataall.modules.datasets.aws.sns_dataset_client import SnsDatasetClient -from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository - -logger = logging.getLogger(__name__) - - -class SnsDatasetHandler: - def __init__(self): - pass - - @staticmethod - @Worker.handler(path='sns.dataset.publish_update') - def publish_update(engine, task: models.Task): - with engine.scoped_session() as session: - dataset = DatasetRepository.get_dataset_by_uri(session, task.targetUri) - environment = Environment.get_environment_by_uri(session, dataset.environmentUri) - - message = { - 'prefix': task.payload['s3Prefix'], - 'accountid': environment.AwsAccountId, - 'region': environment.region, - 'bucket_name': dataset.S3BucketName, - } - - SnsDatasetClient(environment, dataset).publish_dataset_message(message) diff --git a/backend/dataall/modules/datasets/services/dataset_location_service.py b/backend/dataall/modules/datasets/services/dataset_location_service.py index 00cac9bb3..a801730dd 100644 --- a/backend/dataall/modules/datasets/services/dataset_location_service.py +++ b/backend/dataall/modules/datasets/services/dataset_location_service.py @@ -98,29 +98,6 @@ def remove_storage_location(uri: str = None): DatasetLocationIndexer.delete_doc(doc_id=location.locationUri) return True - @staticmethod - @has_resource_permission(UPDATE_DATASET_FOLDER, parent_resource=_get_dataset_uri) - def publish_location_update(uri: str): - context = get_context() - with context.db_engine.scoped_session() as session: - location = DatasetLocationRepository.get_location_by_uri(session, uri) - dataset = DatasetRepository.get_dataset_by_uri(session, location.datasetUri) - env = Environment.get_environment_by_uri(session, dataset.environmentUri) - if not env.subscriptionsEnabled or not env.subscriptionsProducersTopicName: - raise Exception( - 'Subscriptions are disabled. ' - "First enable subscriptions for this dataset's environment then retry." - ) - task = Task( - targetUri=location.datasetUri, - action='sns.dataset.publish_update', - payload={'s3Prefix': location.S3Prefix}, - ) - session.add(task) - - Worker.process(engine=context.db_engine, task_ids=[task.taskUri], save_response=False) - return True - @staticmethod def _create_glossary_links(session, location, terms): Glossary.set_glossary_terms_links( diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py index 2f12d3ca0..f3b0914d9 100644 --- a/backend/dataall/modules/datasets/services/dataset_service.py +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -200,18 +200,6 @@ def get_dataset_statistics(dataset: Dataset): 'upvotes': count_upvotes or 0, } - @staticmethod - @has_resource_permission(CREDENTIALS_DATASET) - def get_dataset_etl_credentials(uri): - context = get_context() - with context.db_engine.scoped_session() as session: - task = Task(targetUri=uri, action='iam.dataset.user.credentials') - session.add(task) - response = Worker.process( - engine=context.db_engine, task_ids=[task.taskUri], save_response=False - )[0] - return json.dumps(response['response']) - @staticmethod @has_resource_permission(CREDENTIALS_DATASET) def get_dataset_assume_role_url(uri): @@ -430,32 +418,6 @@ def delete_dataset(uri: str, delete_from_aws: bool = False): stack_helper.deploy_stack(dataset.environmentUri) return True - @staticmethod - @has_resource_permission(SUBSCRIPTIONS_DATASET) - def publish_dataset_update(uri: str, s3_prefix: str): - engine = get_context().db_engine - with engine.scoped_session() as session: - dataset = DatasetRepository.get_dataset_by_uri(session, uri) - env = Environment.get_environment_by_uri(session, dataset.environmentUri) - if not env.subscriptionsEnabled or not env.subscriptionsProducersTopicName: - raise Exception( - 'Subscriptions are disabled. ' - "First enable subscriptions for this dataset's environment then retry." - ) - - task = Task( - targetUri=uri, - action='sns.dataset.publish_update', - payload={'s3Prefix': s3_prefix}, - ) - session.add(task) - - response = Worker.process( - engine=engine, task_ids=[task.taskUri], save_response=False - )[0] - log.info(f'Dataset update publish response: {response}') - return True - @staticmethod def _deploy_dataset_stack(dataset: Dataset): """ diff --git a/frontend/src/api/Dataset/addDatasetContributor.js b/frontend/src/api/Dataset/addDatasetContributor.js deleted file mode 100644 index 5e104b3cc..000000000 --- a/frontend/src/api/Dataset/addDatasetContributor.js +++ /dev/null @@ -1,24 +0,0 @@ -import { gql } from 'apollo-boost'; - -const addDatasetContributor = ({ userName, datasetUri, role }) => ({ - variables: { userName, datasetUri, role }, - mutation: gql` - mutation AddDatasetContributor( - $datasetUri: String - $userName: String - $role: DatasetRole - ) { - addDatasetContributor( - datasetUri: $datasetUri - userName: $userName - role: $role - ) { - datasetUri - label - userRoleForDataset - } - } - ` -}); - -export default addDatasetContributor; diff --git a/frontend/src/api/Dataset/addDatasetLoader.js b/frontend/src/api/Dataset/addDatasetLoader.js deleted file mode 100644 index 22241074d..000000000 --- a/frontend/src/api/Dataset/addDatasetLoader.js +++ /dev/null @@ -1,15 +0,0 @@ -import { gql } from 'apollo-boost'; - -const createDatasetLoader = ({ datasetUri, input }) => ({ - variables: { input, datasetUri }, - mutation: gql` - mutation createDatasetLoader( - $datasetUri: String - $input: NewDatasetLoaderInput - ) { - createDatasetLoader(datasetUri: $datasetUri, input: $input) - } - ` -}); - -export default createDatasetLoader; diff --git a/frontend/src/api/Dataset/addTablePermission.js b/frontend/src/api/Dataset/addTablePermission.js deleted file mode 100644 index 857b2fc57..000000000 --- a/frontend/src/api/Dataset/addTablePermission.js +++ /dev/null @@ -1,26 +0,0 @@ -import { gql } from 'apollo-boost'; - -const addTablePermissions = ({ tableUri, role, userName }) => ({ - variables: { - tableUri, - role, - userName - }, - mutation: gql` - mutation AddTablePermission( - $tableUri: String! - $userName: String! - $role: DatasetRole! - ) { - addTablePermission( - tableUri: $tableUri - userName: $userName - role: $role - ) { - tableUri - } - } - ` -}); - -export default addTablePermissions; diff --git a/frontend/src/api/Dataset/archiveDataset.js b/frontend/src/api/Dataset/archiveDataset.js deleted file mode 100644 index 686d13238..000000000 --- a/frontend/src/api/Dataset/archiveDataset.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const archiveDataset = (datasetUri) => ({ - variables: { - datasetUri - }, - mutation: gql` - mutation archiveDataset($datasetUri: String!) { - archiveDataset(datasetUri: $datasetUri) - } - ` -}); - -export default archiveDataset; diff --git a/frontend/src/api/Dataset/getCrawlerStatus.js b/frontend/src/api/Dataset/getCrawlerStatus.js deleted file mode 100644 index 47d1c2258..000000000 --- a/frontend/src/api/Dataset/getCrawlerStatus.js +++ /dev/null @@ -1,18 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getCrawlerStatus = ({ datasetUri, name }) => ({ - variables: { - datasetUri, - input: name - }, - query: gql`query GetCrawlerStatus($datasetUri:String, name:String){ - getCrawlerStatus(datasetUri:$datasetUri,name:$name){ - Name - AwsAccountId - region - status - } - }` -}); - -export default getCrawlerStatus; diff --git a/frontend/src/api/Dataset/getDatasetETLCredentials.js b/frontend/src/api/Dataset/getDatasetETLCredentials.js deleted file mode 100644 index 616e579dd..000000000 --- a/frontend/src/api/Dataset/getDatasetETLCredentials.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getDatasetETLCredentials = (datasetUri) => ({ - variables: { - datasetUri - }, - query: gql` - query GetDatasetETLCredentials($datasetUri: String!) { - getDatasetETLCredentials(datasetUri: $datasetUri) - } - ` -}); - -export default getDatasetETLCredentials; diff --git a/frontend/src/api/Dataset/listDatasetContributors.js b/frontend/src/api/Dataset/listDatasetContributors.js deleted file mode 100644 index 12498db8f..000000000 --- a/frontend/src/api/Dataset/listDatasetContributors.js +++ /dev/null @@ -1,31 +0,0 @@ -import { gql } from 'apollo-boost'; - -const listDatasetContributors = ({ datasetUri, filter }) => ({ - variables: { - datasetUri, - filter - }, - query: gql` - query GetDataset($filter: DatasetContributorFilter, $datasetUri: String!) { - getDataset(datasetUri: $datasetUri) { - datasetUri - contributors(filter: $filter) { - count - page - pageSize - hasNext - hasPrevious - pages - nodes { - userName - userRoleForDataset - userRoleInEnvironment - created - } - } - } - } - ` -}); - -export default listDatasetContributors; diff --git a/frontend/src/api/Dataset/listDatasetLoaders.js b/frontend/src/api/Dataset/listDatasetLoaders.js deleted file mode 100644 index 963e19a2d..000000000 --- a/frontend/src/api/Dataset/listDatasetLoaders.js +++ /dev/null @@ -1,34 +0,0 @@ -import { gql } from 'apollo-boost'; - -const listDatasetLoaders = ({ datasetUri, filter }) => ({ - variables: { - datasetUri, - filter - }, - query: gql` - query GetDataset($filter: DatasetLoaderFilter, $datasetUri: String!) { - getDataset(datasetUri: $datasetUri) { - datasetUri - loaders(filter: $filter) { - count - page - pageSize - hasNext - hasPrevious - pages - nodes { - loaderUri - description - label - IAMPrincipalArn - description - label - tags - } - } - } - } - ` -}); - -export default listDatasetLoaders; diff --git a/frontend/src/api/Dataset/listDatasetObjects.js b/frontend/src/api/Dataset/listDatasetObjects.js deleted file mode 100644 index 2cda3f56d..000000000 --- a/frontend/src/api/Dataset/listDatasetObjects.js +++ /dev/null @@ -1,46 +0,0 @@ -import { gql } from 'apollo-boost'; - -const listDatasetObjects = ({ datasetUri, filter }) => ({ - variables: { - datasetUri, - filter - }, - query: gql` - query GetDataset($datasetUri:String!,$filter:DatasetTableFilter){ - getDataset(datasetUri:$datasetUri){ - datasetUri - locations(filter:$filer){ - count - page - pages - hasNext - hasPrevious - nodes{ - locationUri - created - label - } - } - - } - tables(filter:$filter){ - count - page - pages - hasNext - hasPrevious - nodes{ - datasetUri - tableUri - created - GlueTableName - label - } - } - - } - } - ` -}); - -export default listDatasetObjects; diff --git a/frontend/src/api/Dataset/listDeltaLakeCrawlerRuns.js b/frontend/src/api/Dataset/listDeltaLakeCrawlerRuns.js deleted file mode 100644 index d610e297a..000000000 --- a/frontend/src/api/Dataset/listDeltaLakeCrawlerRuns.js +++ /dev/null @@ -1,23 +0,0 @@ -import { gql } from 'apollo-boost'; - -const listDeltaLakeCrawlerRuns = ({ datasetUri }) => ({ - variables: { - datasetUri - }, - query: gql` - query listDeltaLakeCrawlerRuns($datasetUri: String!) { - listDeltaLakeCrawlerRuns(datasetUri: $datasetUri) { - datasetUri - GlueJobName - GlueJobRunId - AwsAccountId - GlueTriggerName - created - status - owner - } - } - ` -}); - -export default listDeltaLakeCrawlerRuns; diff --git a/frontend/src/api/Dataset/listTablePermissions.js b/frontend/src/api/Dataset/listTablePermissions.js deleted file mode 100644 index 446a63804..000000000 --- a/frontend/src/api/Dataset/listTablePermissions.js +++ /dev/null @@ -1,25 +0,0 @@ -import { gql } from 'apollo-boost'; - -const listTablePermissions = ({ tableUri }) => ({ - variables: { - tableUri - }, - query: gql` - query GetDatasetTable($tableUri: String!) { - getDatasetTable(tableUri: $tableUri) { - tableUri - userRoleForTable - permissions { - count - nodes { - userName - userRoleForTable - created - } - } - } - } - ` -}); - -export default listTablePermissions; diff --git a/frontend/src/api/Dataset/publishDatasetLocationUpdate.js b/frontend/src/api/Dataset/publishDatasetLocationUpdate.js deleted file mode 100644 index 25bfbdc39..000000000 --- a/frontend/src/api/Dataset/publishDatasetLocationUpdate.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const publishDatasetStorageLocationUpdate = ({ locationUri }) => ({ - variables: { - locationUri - }, - mutation: gql` - mutation publishDatasetStorageLocationUpdate($locationUri: String!) { - publishDatasetStorageLocationUpdate(locationUri: $locationUri) - } - ` -}); - -export default publishDatasetStorageLocationUpdate; diff --git a/frontend/src/api/Dataset/publishDatasetUpdate.js b/frontend/src/api/Dataset/publishDatasetUpdate.js deleted file mode 100644 index 2d542c9b2..000000000 --- a/frontend/src/api/Dataset/publishDatasetUpdate.js +++ /dev/null @@ -1,15 +0,0 @@ -import { gql } from 'apollo-boost'; - -const publishDatasetUpdate = ({ datasetUri, s3Prefix }) => ({ - variables: { - datasetUri, - s3Prefix - }, - mutation: gql` - mutation publishDatasetUpdate($datasetUri: String!, $s3Prefix: String!) { - publishDatasetUpdate(datasetUri: $datasetUri, s3Prefix: $s3Prefix) - } - ` -}); - -export default publishDatasetUpdate; diff --git a/frontend/src/api/Dataset/removeDatasetContributor.js b/frontend/src/api/Dataset/removeDatasetContributor.js deleted file mode 100644 index b44ce9f0f..000000000 --- a/frontend/src/api/Dataset/removeDatasetContributor.js +++ /dev/null @@ -1,16 +0,0 @@ -import { gql } from 'apollo-boost'; - -const removeDatasetContributor = ({ userName, datasetUri }) => ({ - variables: { userName, datasetUri }, - mutation: gql` - mutation RemoveDatasetContributor($datasetUri: String, $userName: String) { - removeDatasetContributor(datasetUri: $datasetUri, userName: $userName) { - datasetUri - label - userRoleForDataset - } - } - ` -}); - -export default removeDatasetContributor; diff --git a/frontend/src/api/Dataset/removeDatasetLoader.js b/frontend/src/api/Dataset/removeDatasetLoader.js deleted file mode 100644 index d99352f75..000000000 --- a/frontend/src/api/Dataset/removeDatasetLoader.js +++ /dev/null @@ -1,12 +0,0 @@ -import { gql } from 'apollo-boost'; - -const removeDatasetLoader = ({ loaderUri }) => ({ - variables: { loaderUri }, - mutation: gql` - mutation RemoveDatasetLoader($loaderUri: String) { - removeDatasetLoader(loaderUri: $loaderUri) - } - ` -}); - -export default removeDatasetLoader; diff --git a/frontend/src/api/Dataset/removeTablePermission.js b/frontend/src/api/Dataset/removeTablePermission.js deleted file mode 100644 index 45df8204f..000000000 --- a/frontend/src/api/Dataset/removeTablePermission.js +++ /dev/null @@ -1,16 +0,0 @@ -import { gql } from 'apollo-boost'; - -const removeTablePermissions = ({ tableUri, role, userName }) => ({ - variables: { - tableUri, - role, - userName - }, - mutation: gql` - mutation RemoveTablePermission($tableUri: String!, $userName: String!) { - removeTablePermission(tableUri: $tableUri, userName: $userName) - } - ` -}); - -export default removeTablePermissions; diff --git a/frontend/src/api/Dataset/updateDatasetContributor.js b/frontend/src/api/Dataset/updateDatasetContributor.js deleted file mode 100644 index 83a0ee6d7..000000000 --- a/frontend/src/api/Dataset/updateDatasetContributor.js +++ /dev/null @@ -1,24 +0,0 @@ -import { gql } from 'apollo-boost'; - -const updateDatasetContributor = ({ userName, datasetUri, role }) => ({ - variables: { userName, datasetUri, role }, - mutation: gql` - mutation UpdateDatasetContributor( - $datasetUri: String - $userName: String - $role: DatasetRole - ) { - updateDatasetContributor( - datasetUri: $datasetUri - userName: $userName - role: $role - ) { - datasetUri - label - userRoleForDataset - } - } - ` -}); - -export default updateDatasetContributor; diff --git a/frontend/src/api/Dataset/updateDatasetStack.js b/frontend/src/api/Dataset/updateDatasetStack.js deleted file mode 100644 index 66778b572..000000000 --- a/frontend/src/api/Dataset/updateDatasetStack.js +++ /dev/null @@ -1,12 +0,0 @@ -import { gql } from 'apollo-boost'; - -const updateDatasetStack = (datasetUri) => ({ - variables: { datasetUri }, - mutation: gql` - mutation updateDatasetStack($datasetUri: String!) { - updateDatasetStack(datasetUri: $datasetUri) - } - ` -}); - -export default updateDatasetStack; diff --git a/frontend/src/api/DatasetQualityRule/createDatasetQualityRule.js b/frontend/src/api/DatasetQualityRule/createDatasetQualityRule.js deleted file mode 100644 index 5143c173d..000000000 --- a/frontend/src/api/DatasetQualityRule/createDatasetQualityRule.js +++ /dev/null @@ -1,25 +0,0 @@ -import { gql } from 'apollo-boost'; - -const createDatasetQualityRule = ({ datasetUri, input }) => ({ - variables: { - datasetUri, - input - }, - mutation: gql` - mutation CreateDatasetQualityRule( - $datasetUri: String! - $input: NewDatasetQualityRuleInput - ) { - createDatasetQualityRule(datasetUri: $datasetUri, input: $input) { - ruleUri - name - label - description - created - query - } - } - ` -}); - -export default createDatasetQualityRule; diff --git a/frontend/src/api/DatasetQualityRule/deleteDatasetqualityRule.js b/frontend/src/api/DatasetQualityRule/deleteDatasetqualityRule.js deleted file mode 100644 index 2464047a8..000000000 --- a/frontend/src/api/DatasetQualityRule/deleteDatasetqualityRule.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const deleteDatasetQualityRule = (ruleUri) => ({ - variables: { - ruleUri - }, - mutation: gql` - mutation DeleteDatasetQualityRule($ruleUri: String!) { - deleteDatasetQualityRule(ruleUri: $ruleUri) - } - ` -}); - -export default deleteDatasetQualityRule; diff --git a/frontend/src/api/DatasetQualityRule/getDatasetQualityRule.js b/frontend/src/api/DatasetQualityRule/getDatasetQualityRule.js deleted file mode 100644 index 038d229c5..000000000 --- a/frontend/src/api/DatasetQualityRule/getDatasetQualityRule.js +++ /dev/null @@ -1,21 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getDatasetQualityRule = (ruleUri) => ({ - variables: { - ruleUri - }, - query: gql` - query GetDatasetQualityRule($ruleUri: String!) { - getDatasetQualityRule(ruleUri: $ruleUri) { - ruleUri - name - label - description - created - query - } - } - ` -}); - -export default getDatasetQualityRule; diff --git a/frontend/src/api/DatasetQualityRule/listDatasetQualityRules.js b/frontend/src/api/DatasetQualityRule/listDatasetQualityRules.js deleted file mode 100644 index 7e7aecd91..000000000 --- a/frontend/src/api/DatasetQualityRule/listDatasetQualityRules.js +++ /dev/null @@ -1,32 +0,0 @@ -import { gql } from 'apollo-boost'; - -const listDatasetQualityRules = ({ datasetUri, filter }) => ({ - variables: { - datasetUri, - filter - }, - query: gql` - query ListDatasetQualityRules( - $datasetUri: String! - $filter: DatasetQualityRuleFilter - ) { - listDatasetQualityRules(datasetUri: $datasetUri, filter: $filter) { - count - page - pages - hasNext - hasPrevious - nodes { - ruleUri - name - label - description - created - query - } - } - } - ` -}); - -export default listDatasetQualityRules; diff --git a/frontend/src/api/DatasetQualityRule/updateDatasetQualityRule.js b/frontend/src/api/DatasetQualityRule/updateDatasetQualityRule.js deleted file mode 100644 index e03fd63ed..000000000 --- a/frontend/src/api/DatasetQualityRule/updateDatasetQualityRule.js +++ /dev/null @@ -1,25 +0,0 @@ -import { gql } from 'apollo-boost'; - -const updateDatasetQualityRule = ({ ruleUri, input }) => ({ - variables: { - ruleUri, - input - }, - mutation: gql` - mutation UpdateDatasetQualityRule( - $ruleUri: String! - $input: ModifyDatasetQualityRuleInput - ) { - updateDatasetQualityRule(ruleUri: $ruleUri, input: $input) { - ruleUri - name - label - description - created - query - } - } - ` -}); - -export default updateDatasetQualityRule; From 972d5339d5b00446ede8910865f8418105527469 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 2 Jun 2023 10:18:05 +0200 Subject: [PATCH 271/346] Fixed linting --- backend/dataall/modules/datasets/__init__.py | 4 ---- backend/dataall/modules/datasets/api/profiling/resolvers.py | 1 + 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index d7a7e6cae..921dee297 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -110,10 +110,6 @@ def __init__(self): log.info("Dataset stacks have been imported") - @staticmethod - def depends_on() -> List[Type['ModuleInterface']]: - return [DatasetBaseModuleInterface] - class DatasetStackUpdaterModuleInterface(ModuleInterface): diff --git a/backend/dataall/modules/datasets/api/profiling/resolvers.py b/backend/dataall/modules/datasets/api/profiling/resolvers.py index 8db0581b5..e579627a9 100644 --- a/backend/dataall/modules/datasets/api/profiling/resolvers.py +++ b/backend/dataall/modules/datasets/api/profiling/resolvers.py @@ -40,6 +40,7 @@ def get_profiling_results(context: Context, source: DatasetProfilingRun): else: return json.dumps(source.results) + def list_profiling_runs(context: Context, source, datasetUri=None): return DatasetProfilingService.list_profiling_runs(datasetUri) From 4cc4e0f16ccab1ac18b06e1b05bd0060f6ae51b5 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 2 Jun 2023 10:20:54 +0200 Subject: [PATCH 272/346] Handler was not used --- .../modules/datasets/handlers/__init__.py | 4 +--- .../handlers/s3_folder_creator_handler.py | 23 ------------------- 2 files changed, 1 insertion(+), 26 deletions(-) delete mode 100644 backend/dataall/modules/datasets/handlers/s3_folder_creator_handler.py diff --git a/backend/dataall/modules/datasets/handlers/__init__.py b/backend/dataall/modules/datasets/handlers/__init__.py index 6e5d2867c..ff59697b1 100644 --- a/backend/dataall/modules/datasets/handlers/__init__.py +++ b/backend/dataall/modules/datasets/handlers/__init__.py @@ -6,9 +6,7 @@ glue_table_sync_handler, glue_table_handler, glue_profiling_handler, - s3_folder_creator_handler, glue_dataset_handler ) -__all__ = ["glue_table_sync_handler", "glue_table_handler", "glue_profiling_handler", "s3_folder_creator_handler", - "glue_dataset_handler"] +__all__ = ["glue_table_sync_handler", "glue_table_handler", "glue_profiling_handler", "glue_dataset_handler"] diff --git a/backend/dataall/modules/datasets/handlers/s3_folder_creator_handler.py b/backend/dataall/modules/datasets/handlers/s3_folder_creator_handler.py deleted file mode 100644 index 86bc21d2b..000000000 --- a/backend/dataall/modules/datasets/handlers/s3_folder_creator_handler.py +++ /dev/null @@ -1,23 +0,0 @@ -import logging - -from dataall.aws.handlers.service_handlers import Worker -from dataall.db import models -from dataall.modules.datasets.aws.s3_location_client import S3LocationClient -from dataall.modules.datasets.db.dataset_location_repository import DatasetLocationRepository - -log = logging.getLogger(__name__) - - -class S3FolderCreatorHandler: - """Handles async requests related to s3 for dataset folders""" - - @staticmethod - @Worker.handler(path='s3.prefix.create') - def create_dataset_location(engine, task: models.Task): - with engine.scoped_session() as session: - location = DatasetLocationRepository.get_location_by_uri( - session, task.targetUri - ) - S3LocationClient(location).create_bucket_prefix() - location.locationCreated = True - return location From fa165fbc0a0a37611703acd92aa97978a46c180d Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 2 Jun 2023 13:06:11 +0200 Subject: [PATCH 273/346] Fixed tests --- .../services/share_object_service.py | 43 ++++++++++--------- .../datasets/indexers/location_indexer.py | 2 +- .../datasets/indexers/table_indexer.py | 2 +- .../datasets/services/dataset_service.py | 2 +- .../modules/datasets/test_dataset_indexers.py | 21 ++++----- 5 files changed, 36 insertions(+), 34 deletions(-) diff --git a/backend/dataall/modules/dataset_sharing/services/share_object_service.py b/backend/dataall/modules/dataset_sharing/services/share_object_service.py index d90eed5d1..df38acc6a 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_object_service.py +++ b/backend/dataall/modules/dataset_sharing/services/share_object_service.py @@ -26,9 +26,10 @@ def get_share_object(uri): with get_context().db_engine.scoped_session() as session: return ShareObjectRepository.get_share_by_uri(session, uri) - @staticmethod + @classmethod @has_resource_permission(CREATE_SHARE_OBJECT) def create_share_object( + cls, uri: str, dataset_uri: str, item_uri: str, @@ -72,7 +73,7 @@ def create_share_object( message=f'Team: {group_uri} is managing the dataset {dataset.name}', ) - ShareObjectService._validate_group_membership(session, group_uri, environment.environmentUri) + cls._validate_group_membership(session, group_uri, environment.environmentUri) share = ShareObjectRepository.find_share(session, dataset, environment, principal_id, group_uri) if not share: @@ -131,10 +132,10 @@ def create_share_object( # requester group (groupUri) # dataset.SamlAdminGroupName # environment.SamlGroupName - ShareObjectService._attach_share_resource_policy(session, share, group_uri) - ShareObjectService._attach_share_resource_policy(session, share, dataset.SamlAdminGroupName) + cls._attach_share_resource_policy(session, share, group_uri) + cls._attach_share_resource_policy(session, share, dataset.SamlAdminGroupName) if dataset.SamlAdminGroupName != environment.SamlGroupName: - ShareObjectService._attach_share_resource_policy(session, share, environment.SamlGroupName) + cls._attach_share_resource_policy(session, share, environment.SamlGroupName) # Attaching REQUESTER permissions to: # dataset.stewards (includes the dataset Admins) @@ -147,12 +148,12 @@ def create_share_object( ) return share - @staticmethod + @classmethod @has_resource_permission(SUBMIT_SHARE_OBJECT) - def submit_share_object(uri: str): + def submit_share_object(cls, uri: str): context = get_context() with context.db_engine.scoped_session() as session: - share, dataset, states = ShareObjectService._get_share_data(session, uri) + share, dataset, states = cls._get_share_data(session, uri) valid_states = [ShareItemStatus.PendingApproval.value] valid_share_items_states = [x for x in valid_states if x in states] @@ -163,19 +164,19 @@ def submit_share_object(uri: str): message='The request is empty of pending items. Add items to share request.', ) - ShareObjectService._run_transitions(session, share, states, ShareObjectActions.Submit) + cls._run_transitions(session, share, states, ShareObjectActions.Submit) ShareNotificationService.notify_share_object_submission( session, context.username, dataset, share ) return share - @staticmethod + @classmethod @has_resource_permission(APPROVE_SHARE_OBJECT) - def approve_share_object(uri: str): + def approve_share_object(cls, uri: str): context = get_context() with context.db_engine.scoped_session() as session: - share, dataset, states = ShareObjectService._get_share_data(session, uri) - ShareObjectService._run_transitions(session, share, states, ShareObjectActions.Approve) + share, dataset, states = cls._get_share_data(session, uri) + cls._run_transitions(session, share, states, ShareObjectActions.Approve) # GET TABLES SHARED AND APPROVE SHARE FOR EACH TABLE share_table_items = ShareObjectRepository.find_all_share_items(session, uri, ShareableType.Table.value) @@ -202,13 +203,13 @@ def approve_share_object(uri: str): return share - @staticmethod + @classmethod @has_resource_permission(REJECT_SHARE_OBJECT) - def reject_share_object(uri: str): + def reject_share_object(cls, uri: str): context = get_context() with context.db_engine.scoped_session() as session: - share, dataset, states = ShareObjectService._get_share_data(session, uri) - ShareObjectService._run_transitions(session, share, states, ShareObjectActions.Reject) + share, dataset, states = cls._get_share_data(session, uri) + cls._run_transitions(session, share, states, ShareObjectActions.Reject) ResourcePolicy.delete_resource_policy( session=session, group=share.groupUri, @@ -218,14 +219,14 @@ def reject_share_object(uri: str): ShareNotificationService.notify_share_object_rejection(session, context.username, dataset, share) return share - @staticmethod + @classmethod @has_resource_permission(DELETE_SHARE_OBJECT) - def delete_share_object(uri: str): + def delete_share_object(cls, uri: str): with get_context().db_engine.scoped_session() as session: - share, dataset, states = ShareObjectService._get_share_data(session, uri) + share, dataset, states = cls._get_share_data(session, uri) shared_share_items_states = [x for x in ShareItemSM.get_share_item_shared_states() if x in states] - new_state = ShareObjectService._run_transitions(session, share, states, ShareObjectActions.Delete) + new_state = cls._run_transitions(session, share, states, ShareObjectActions.Delete) if shared_share_items_states: raise ShareItemsFound( action='Delete share object', diff --git a/backend/dataall/modules/datasets/indexers/location_indexer.py b/backend/dataall/modules/datasets/indexers/location_indexer.py index 7856e4bea..419990dc0 100644 --- a/backend/dataall/modules/datasets/indexers/location_indexer.py +++ b/backend/dataall/modules/datasets/indexers/location_indexer.py @@ -15,7 +15,7 @@ def upsert(cls, session, folder_uri: str): if folder: dataset = DatasetRepository.get_dataset_by_uri(session, folder.datasetUri) env = Environment.get_environment_by_uri(session, dataset.environmentUri) - org = Organization.get_organization_by_uri(session, dataset.environmentUri) + org = Organization.get_organization_by_uri(session, dataset.organizationUri) glossary = BaseIndexer._get_target_glossary_terms(session, folder_uri) BaseIndexer._index( diff --git a/backend/dataall/modules/datasets/indexers/table_indexer.py b/backend/dataall/modules/datasets/indexers/table_indexer.py index ca0b0b88d..a5dd1fb0e 100644 --- a/backend/dataall/modules/datasets/indexers/table_indexer.py +++ b/backend/dataall/modules/datasets/indexers/table_indexer.py @@ -19,7 +19,7 @@ def upsert(cls, session, table_uri: str): if table: dataset = DatasetRepository.get_dataset_by_uri(session, table.datasetUri) env = Environment.get_environment_by_uri(session, dataset.environmentUri) - org = Organization.get_organization_by_uri(session, dataset.environmentUri) + org = Organization.get_organization_by_uri(session, dataset.organizationUri) glossary = BaseIndexer._get_target_glossary_terms(session, table_uri) tags = table.tags if table.tags else [] diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py index f3b0914d9..62701e667 100644 --- a/backend/dataall/modules/datasets/services/dataset_service.py +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -20,7 +20,7 @@ from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer from dataall.modules.datasets.services.dataset_permissions import CREDENTIALS_DATASET, SYNC_DATASET, CRAWL_DATASET, \ - DELETE_DATASET, SUBSCRIPTIONS_DATASET, MANAGE_DATASETS, UPDATE_DATASET, LIST_ENVIRONMENT_DATASETS, \ + DELETE_DATASET, MANAGE_DATASETS, UPDATE_DATASET, LIST_ENVIRONMENT_DATASETS, \ CREATE_DATASET, DATASET_ALL, DATASET_READ from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.db.enums import DatasetRole diff --git a/tests/modules/datasets/test_dataset_indexers.py b/tests/modules/datasets/test_dataset_indexers.py index 4dc979681..10c61e92f 100644 --- a/tests/modules/datasets/test_dataset_indexers.py +++ b/tests/modules/datasets/test_dataset_indexers.py @@ -104,6 +104,11 @@ def folder(org, env, db, dataset): yield location +@pytest.fixture(scope='function', autouse=True) +def patch_methods(mocker): + mocker.patch('dataall.searchproxy.base_indexer.BaseIndexer._index', return_value={}) + + def test_es_request(): body = '{"preference":"SearchResult"}\n{"query":{"match_all":{}},"size":8,"_source":{"includes":["*"],"excludes":[]},"from":0}\n' body = body.split('\n') @@ -121,8 +126,7 @@ def test_es_request(): } -def test_upsert_dataset(db, dataset, env, mocker): - mocker.patch('dataall.searchproxy.base_indexer.BaseIndexer._index', return_value={}) +def test_upsert_dataset(db, dataset, env): with db.scoped_session() as session: dataset_indexed = DatasetIndexer.upsert( session, dataset_uri=dataset.datasetUri @@ -130,24 +134,21 @@ def test_upsert_dataset(db, dataset, env, mocker): assert dataset_indexed.datasetUri == dataset.datasetUri -def test_upsert_table(db, dataset, env, mocker, table): - mocker.patch('dataall.searchproxy.base_indexer.BaseIndexer._index', return_value={}) +def test_upsert_table(db, dataset, env, table): with db.scoped_session() as session: table_indexed = DatasetTableIndexer.upsert(session, table_uri=table.tableUri) - assert table_indexed.uri == table.tableUri + assert table_indexed.tableUri == table.tableUri -def test_upsert_folder(db, dataset, env, mocker, folder): - mocker.patch('dataall.searchproxy.base_indexer.BaseIndexer._index', return_value={}) +def test_upsert_folder(db, dataset, env, folder): with db.scoped_session() as session: folder_indexed = DatasetLocationIndexer.upsert( session=session, folder_uri=folder.locationUri ) - assert folder_indexed.uri == folder.locationUri + assert folder_indexed.locationUri == folder.locationUri -def test_upsert_tables(db, dataset, env, mocker, folder): - mocker.patch('dataall.searchproxy.base_indexer.BaseIndexer._index', return_value={}) +def test_upsert_tables(db, dataset, env, folder): with db.scoped_session() as session: tables = DatasetTableIndexer.upsert_all( session, dataset_uri=dataset.datasetUri From d493a7024cbe3ae0e6f33edbef07005e13603643 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 2 Jun 2023 14:55:49 +0200 Subject: [PATCH 274/346] Added more logging --- backend/dataall/modules/datasets/__init__.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index 921dee297..642649dc8 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -132,6 +132,7 @@ def find_stack_uris(self, session) -> List[str]: return [dataset.datasetUri for dataset in all_datasets] register_stack_finder(DatasetStackFinder()) + log.info("Dataset stack updater task has been loaded") class DatasetCatalogIndexerModuleInterface(ModuleInterface): @@ -149,3 +150,4 @@ def __init__(self): from dataall.modules.datasets.indexers.catalog_indexer import DatasetCatalogIndexer register_catalog_indexer(DatasetCatalogIndexer()) + log.info("Dataset catalog indexer task has been loaded") From 684eb49d4901b5f57236e84ca3dd19b98ff2a789 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 2 Jun 2023 17:04:42 +0200 Subject: [PATCH 275/346] Removed unused method --- .../modules/datasets/api/dataset/resolvers.py | 4 ---- .../modules/datasets/services/dataset_service.py | 15 --------------- 2 files changed, 19 deletions(-) diff --git a/backend/dataall/modules/datasets/api/dataset/resolvers.py b/backend/dataall/modules/datasets/api/dataset/resolvers.py index e42967485..98555e0c5 100644 --- a/backend/dataall/modules/datasets/api/dataset/resolvers.py +++ b/backend/dataall/modules/datasets/api/dataset/resolvers.py @@ -155,10 +155,6 @@ def get_dataset_stack(context: Context, source: Dataset, **kwargs): ) -def get_crawler(context, source, datasetUri: str = None, name: str = None): - return DatasetService.get_crawler(uri=datasetUri, name=name) - - def delete_dataset( context: Context, source, datasetUri: str = None, deleteFromAWS: bool = False ): diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py index 62701e667..5e5a062ea 100644 --- a/backend/dataall/modules/datasets/services/dataset_service.py +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -334,21 +334,6 @@ def get_dataset_stack(dataset: Dataset): environmentUri=dataset.environmentUri, ) - @staticmethod - @has_resource_permission(CRAWL_DATASET) - def get_crawler(uri: str, name: str): - context = get_context() - with context.db_engine.scoped_session() as session: - dataset = DatasetRepository.get_dataset_by_uri(session, uri) - - response = DatasetCrawler(dataset).get_crawler(crawler_name=name) - return { - 'Name': name, - 'AwsAccountId': dataset.AwsAccountId, - 'region': dataset.region, - 'status': response.get('LastCrawl', {}).get('Status', 'N/A'), - } - @staticmethod @has_resource_permission(DELETE_DATASET) def delete_dataset(uri: str, delete_from_aws: bool = False): From be05244cb70392d9776a79ce23bea7face233f31 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 5 Jun 2023 10:51:05 +0200 Subject: [PATCH 276/346] Replaced process with queue --- .../dataall/modules/datasets/services/dataset_column_service.py | 2 +- .../modules/datasets/services/dataset_profiling_service.py | 2 +- backend/dataall/modules/datasets/services/dataset_service.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/backend/dataall/modules/datasets/services/dataset_column_service.py b/backend/dataall/modules/datasets/services/dataset_column_service.py index b3bf0b839..ff6f425de 100644 --- a/backend/dataall/modules/datasets/services/dataset_column_service.py +++ b/backend/dataall/modules/datasets/services/dataset_column_service.py @@ -23,7 +23,7 @@ def sync_table_columns(table_uri: str): DatasetColumnService._check_resource_permission(session, table_uri, UPDATE_DATASET_TABLE) task = models.Task(action='glue.table.columns', targetUri=table_uri) session.add(task) - Worker.process(engine=context.db_engine, task_ids=[task.taskUri], save_response=False) + Worker.queue(engine=context.db_engine, task_ids=[task.taskUri], save_response=False) return DatasetColumnService.paginate_active_columns_for_table(table_uri, {}) @staticmethod diff --git a/backend/dataall/modules/datasets/services/dataset_profiling_service.py b/backend/dataall/modules/datasets/services/dataset_profiling_service.py index 69a9f83ea..51a88918d 100644 --- a/backend/dataall/modules/datasets/services/dataset_profiling_service.py +++ b/backend/dataall/modules/datasets/services/dataset_profiling_service.py @@ -44,7 +44,7 @@ def start_profiling_run(uri, table_uri, glue_table_name): ) session.add(task) - Worker.process(engine=context.db_engine, task_ids=[task.taskUri]) + Worker.queue(engine=context.db_engine, task_ids=[task.taskUri]) return run diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py index 5e5a062ea..effee1786 100644 --- a/backend/dataall/modules/datasets/services/dataset_service.py +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -250,7 +250,7 @@ def sync_tables(uri): targetUri=dataset.datasetUri, ) session.add(task) - Worker.process(engine=context.db_engine, task_ids=[task.taskUri], save_response=False) + Worker.queue(engine=context.db_engine, task_ids=[task.taskUri], save_response=False) with context.db_engine.scoped_session() as session: DatasetTableIndexer.upsert_all( session=session, dataset_uri=dataset.datasetUri From f9cf61abb5081f9964100b86a09c10042e02a615 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 5 Jun 2023 14:52:01 +0200 Subject: [PATCH 277/346] There was a nasty bug. It turns out that __main__.ClassName is not the same as real_path.ClassName Fixed by extracting common code in the different locations --- backend/dataall/core/catalog/__init__.py | 0 .../dataall/core/catalog/catalog_indexer.py | 16 +++++++++++++ backend/dataall/core/stack_finder.py | 17 ++++++++++++++ backend/dataall/modules/datasets/__init__.py | 17 ++++---------- ..._indexer.py => dataset_catalog_indexer.py} | 11 ++++++--- .../datasets/tasks/dataset_stack_finder.py | 19 +++++++++++++++ ...log_indexer.py => catalog_indexer_task.py} | 21 ++++------------- backend/dataall/tasks/stacks_updater.py | 23 ++++--------------- deploy/stacks/container.py | 2 +- 9 files changed, 74 insertions(+), 52 deletions(-) create mode 100644 backend/dataall/core/catalog/__init__.py create mode 100644 backend/dataall/core/catalog/catalog_indexer.py create mode 100644 backend/dataall/core/stack_finder.py rename backend/dataall/modules/datasets/indexers/{catalog_indexer.py => dataset_catalog_indexer.py} (70%) create mode 100644 backend/dataall/modules/datasets/tasks/dataset_stack_finder.py rename backend/dataall/tasks/{catalog_indexer.py => catalog_indexer_task.py} (78%) diff --git a/backend/dataall/core/catalog/__init__.py b/backend/dataall/core/catalog/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/dataall/core/catalog/catalog_indexer.py b/backend/dataall/core/catalog/catalog_indexer.py new file mode 100644 index 000000000..1273cc414 --- /dev/null +++ b/backend/dataall/core/catalog/catalog_indexer.py @@ -0,0 +1,16 @@ +from abc import ABC +from typing import List + + +class CatalogIndexer(ABC): + _INDEXERS: List['CatalogIndexer'] = [] + + def __init__(self): + CatalogIndexer._INDEXERS.append(self) + + @staticmethod + def all(): + return CatalogIndexer._INDEXERS + + def index(self, session) -> int: + raise NotImplementedError("index is not implemented") \ No newline at end of file diff --git a/backend/dataall/core/stack_finder.py b/backend/dataall/core/stack_finder.py new file mode 100644 index 000000000..f6ee07db2 --- /dev/null +++ b/backend/dataall/core/stack_finder.py @@ -0,0 +1,17 @@ +from abc import ABC +from typing import List + + +class StackFinder(ABC): + _FINDERS: List['StackFinder'] = [] + + @staticmethod + def all(): + return StackFinder._FINDERS + + def __init__(self): + StackFinder._FINDERS.append(self) + + def find_stack_uris(self, session) -> List[str]: + """Finds stacks to update""" + raise NotImplementedError("retrieve_stack_uris is not implemented") diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index 642649dc8..f5fa35b4a 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -2,6 +2,7 @@ import logging from typing import List, Type, Set +from dataall.core.catalog.catalog_indexer import CatalogIndexer from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base import DatasetBaseModuleInterface from dataall.modules.datasets_base.db.models import DatasetTableColumn, DatasetStorageLocation, DatasetTable, Dataset @@ -122,16 +123,9 @@ def depends_on() -> List[Type['ModuleInterface']]: return [DatasetBaseModuleInterface] def __init__(self): - from dataall.tasks.stacks_updater import StackFinder - from dataall.tasks.stacks_updater import register_stack_finder + from dataall.modules.datasets.tasks.dataset_stack_finder import DatasetStackFinder - class DatasetStackFinder(StackFinder): - def find_stack_uris(self, session) -> List[str]: - all_datasets: [Dataset] = DatasetRepository.list_all_active_datasets(session) - log.info(f'Found {len(all_datasets)} datasets') - return [dataset.datasetUri for dataset in all_datasets] - - register_stack_finder(DatasetStackFinder()) + DatasetStackFinder() log.info("Dataset stack updater task has been loaded") @@ -146,8 +140,7 @@ def depends_on() -> List[Type['ModuleInterface']]: return [DatasetBaseModuleInterface] def __init__(self): - from dataall.tasks.catalog_indexer import register_catalog_indexer - from dataall.modules.datasets.indexers.catalog_indexer import DatasetCatalogIndexer + from dataall.modules.datasets.indexers.dataset_catalog_indexer import DatasetCatalogIndexer - register_catalog_indexer(DatasetCatalogIndexer()) + DatasetCatalogIndexer() log.info("Dataset catalog indexer task has been loaded") diff --git a/backend/dataall/modules/datasets/indexers/catalog_indexer.py b/backend/dataall/modules/datasets/indexers/dataset_catalog_indexer.py similarity index 70% rename from backend/dataall/modules/datasets/indexers/catalog_indexer.py rename to backend/dataall/modules/datasets/indexers/dataset_catalog_indexer.py index cf573548b..00d1fe8a4 100644 --- a/backend/dataall/modules/datasets/indexers/catalog_indexer.py +++ b/backend/dataall/modules/datasets/indexers/dataset_catalog_indexer.py @@ -4,18 +4,23 @@ from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.db.models import Dataset -from dataall.tasks.catalog_indexer import CatalogIndexer +from dataall.tasks.catalog_indexer_task import CatalogIndexer log = logging.getLogger(__name__) class DatasetCatalogIndexer(CatalogIndexer): + """ + Dataset indexer for the catalog. Indexes all tables and folders of datasets + Register automatically itself when CatalogIndexer instance is created + """ def index(self, session) -> int: all_datasets: [Dataset] = DatasetRepository.list_all_active_datasets(session) log.info(f'Found {len(all_datasets)} datasets') - dataset: Dataset + indexed = 0 for dataset in all_datasets: tables = DatasetTableIndexer.upsert_all(session, dataset.datasetUri) folders = DatasetLocationIndexer.upsert_all(session, dataset_uri=dataset.datasetUri) - return len(tables) + len(folders) + 1 + indexed += len(tables) + len(folders) + 1 + return indexed diff --git a/backend/dataall/modules/datasets/tasks/dataset_stack_finder.py b/backend/dataall/modules/datasets/tasks/dataset_stack_finder.py new file mode 100644 index 000000000..8d74b5fd2 --- /dev/null +++ b/backend/dataall/modules/datasets/tasks/dataset_stack_finder.py @@ -0,0 +1,19 @@ +import logging +from typing import List + +from dataall.core.stack_finder import StackFinder +from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository +from dataall.modules.datasets_base.db.models import Dataset + +log = logging.getLogger(__name__) + + +class DatasetStackFinder(StackFinder): + """ + Dataset stack finder. Looks for datasets stack to update + Register automatically itself when StackFinder instance is created + """ + def find_stack_uris(self, session) -> List[str]: + all_datasets: [Dataset] = DatasetRepository.list_all_active_datasets(session) + log.info(f'Found {len(all_datasets)} datasets') + return [dataset.datasetUri for dataset in all_datasets] diff --git a/backend/dataall/tasks/catalog_indexer.py b/backend/dataall/tasks/catalog_indexer_task.py similarity index 78% rename from backend/dataall/tasks/catalog_indexer.py rename to backend/dataall/tasks/catalog_indexer_task.py index 99313d5a8..e25f0902d 100644 --- a/backend/dataall/tasks/catalog_indexer.py +++ b/backend/dataall/tasks/catalog_indexer_task.py @@ -1,9 +1,8 @@ import logging import os import sys -from abc import ABC -from typing import List +from dataall.core.catalog.catalog_indexer import CatalogIndexer from dataall.db import get_engine, models from dataall.modules.loader import load_modules, ImportMode from dataall.searchproxy.indexers import DashboardIndexer @@ -15,26 +14,12 @@ root.addHandler(logging.StreamHandler(sys.stdout)) log = logging.getLogger(__name__) -load_modules({ImportMode.CATALOG_INDEXER_TASK}) - - -class CatalogIndexer(ABC): - def index(self, session) -> int: - raise NotImplementedError("index is not implemented") - - -_indexers: List[CatalogIndexer] = [] - - -def register_catalog_indexer(indexer: CatalogIndexer): - _indexers.append(indexer) - def index_objects(engine): try: indexed_objects_counter = 0 with engine.scoped_session() as session: - for indexer in _indexers: + for indexer in CatalogIndexer.all(): indexed_objects_counter += indexer.index(session) all_dashboards: [models.Dashboard] = session.query(models.Dashboard).all() @@ -54,4 +39,6 @@ def index_objects(engine): if __name__ == '__main__': ENVNAME = os.environ.get('envname', 'local') ENGINE = get_engine(envname=ENVNAME) + + load_modules({ImportMode.CATALOG_INDEXER_TASK}) index_objects(engine=ENGINE) diff --git a/backend/dataall/tasks/stacks_updater.py b/backend/dataall/tasks/stacks_updater.py index 535e42d30..646263441 100644 --- a/backend/dataall/tasks/stacks_updater.py +++ b/backend/dataall/tasks/stacks_updater.py @@ -2,9 +2,8 @@ import os import sys import time -from abc import ABC -from typing import List +from dataall.core.stack_finder import StackFinder from dataall.modules.loader import ImportMode, load_modules from dataall import db from dataall.db import models @@ -22,27 +21,11 @@ SLEEP_TIME = 30 -load_modules({ImportMode.STACK_UPDATER_TASK}) - - -class StackFinder(ABC): - def find_stack_uris(self, session) -> List[str]: - """Finds stacks to update""" - raise NotImplementedError("retrieve_stack_uris is not implemented") - - -_finders: List[StackFinder] = [] - - -def register_stack_finder(finder: StackFinder): - _finders.append(finder) - - def update_stacks(engine, envname): with engine.scoped_session() as session: all_environments: [models.Environment] = db.api.Environment.list_all_active_environments(session) additional_stacks = [] - for finder in _finders: + for finder in StackFinder.all(): additional_stacks.extend(finder.find_stack_uris(session)) log.info(f'Found {len(all_environments)} environments, triggering update stack tasks...') @@ -82,4 +65,6 @@ def update_stack(session, envname, target_uri, wait=False): if __name__ == '__main__': envname = os.environ.get('envname', 'local') engine = get_engine(envname=envname) + + load_modules({ImportMode.STACK_UPDATER_TASK}) update_stacks(engine=engine, envname=envname) diff --git a/deploy/stacks/container.py b/deploy/stacks/container.py index c6f8ace1b..845a3429d 100644 --- a/deploy/stacks/container.py +++ b/deploy/stacks/container.py @@ -118,7 +118,7 @@ def __init__( catalog_indexer_task, catalog_indexer_task_def = self.set_scheduled_task( cluster=cluster, - command=['python3.8', '-m', 'dataall.tasks.catalog_indexer'], + command=['python3.8', '-m', 'dataall.tasks.catalog_indexer_task'], container_id=f'container', ecr_repository=ecr_repository, environment=self._create_env('INFO'), From c5180c27e4a28a6b36ffa1b38e45f3a065393ca5 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 5 Jun 2023 15:01:21 +0200 Subject: [PATCH 278/346] Removed used arguments --- .../dataall/modules/datasets/services/dataset_column_service.py | 2 +- backend/dataall/modules/datasets/services/dataset_service.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/backend/dataall/modules/datasets/services/dataset_column_service.py b/backend/dataall/modules/datasets/services/dataset_column_service.py index ff6f425de..955d60b95 100644 --- a/backend/dataall/modules/datasets/services/dataset_column_service.py +++ b/backend/dataall/modules/datasets/services/dataset_column_service.py @@ -23,7 +23,7 @@ def sync_table_columns(table_uri: str): DatasetColumnService._check_resource_permission(session, table_uri, UPDATE_DATASET_TABLE) task = models.Task(action='glue.table.columns', targetUri=table_uri) session.add(task) - Worker.queue(engine=context.db_engine, task_ids=[task.taskUri], save_response=False) + Worker.queue(engine=context.db_engine, task_ids=[task.taskUri]) return DatasetColumnService.paginate_active_columns_for_table(table_uri, {}) @staticmethod diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py index effee1786..bc6ed4ef4 100644 --- a/backend/dataall/modules/datasets/services/dataset_service.py +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -250,7 +250,7 @@ def sync_tables(uri): targetUri=dataset.datasetUri, ) session.add(task) - Worker.queue(engine=context.db_engine, task_ids=[task.taskUri], save_response=False) + Worker.queue(engine=context.db_engine, task_ids=[task.taskUri]) with context.db_engine.scoped_session() as session: DatasetTableIndexer.upsert_all( session=session, dataset_uri=dataset.datasetUri From 1d6a7b069bfed6016e35340f99d6c6b5ee74c2db Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 5 Jun 2023 15:46:16 +0200 Subject: [PATCH 279/346] Added missed dataset feed target --- backend/dataall/modules/datasets/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index f5fa35b4a..055c4f7ca 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -41,6 +41,7 @@ def __init__(self): FeedRegistry.register(FeedDefinition("DatasetTableColumn", DatasetTableColumn)) FeedRegistry.register(FeedDefinition("DatasetStorageLocation", DatasetStorageLocation)) FeedRegistry.register(FeedDefinition("DatasetTable", DatasetTable)) + FeedRegistry.register(FeedDefinition("Dataset", Dataset)) GlossaryRegistry.register(GlossaryDefinition("Column", "DatasetTableColumn", DatasetTableColumn)) GlossaryRegistry.register(GlossaryDefinition( From 0ab3cf32d0b173a241d40d4f99f4aecec45deb5e Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 9 Jun 2023 15:11:13 +0200 Subject: [PATCH 280/346] Returned synchronous code back --- .../dataall/modules/datasets/services/dataset_column_service.py | 2 +- .../modules/datasets/services/dataset_profiling_service.py | 2 +- backend/dataall/modules/datasets/services/dataset_service.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/backend/dataall/modules/datasets/services/dataset_column_service.py b/backend/dataall/modules/datasets/services/dataset_column_service.py index 955d60b95..0daaca247 100644 --- a/backend/dataall/modules/datasets/services/dataset_column_service.py +++ b/backend/dataall/modules/datasets/services/dataset_column_service.py @@ -23,7 +23,7 @@ def sync_table_columns(table_uri: str): DatasetColumnService._check_resource_permission(session, table_uri, UPDATE_DATASET_TABLE) task = models.Task(action='glue.table.columns', targetUri=table_uri) session.add(task) - Worker.queue(engine=context.db_engine, task_ids=[task.taskUri]) + Worker.process(engine=context.db_engine, task_ids=[task.taskUri]) return DatasetColumnService.paginate_active_columns_for_table(table_uri, {}) @staticmethod diff --git a/backend/dataall/modules/datasets/services/dataset_profiling_service.py b/backend/dataall/modules/datasets/services/dataset_profiling_service.py index 51a88918d..69a9f83ea 100644 --- a/backend/dataall/modules/datasets/services/dataset_profiling_service.py +++ b/backend/dataall/modules/datasets/services/dataset_profiling_service.py @@ -44,7 +44,7 @@ def start_profiling_run(uri, table_uri, glue_table_name): ) session.add(task) - Worker.queue(engine=context.db_engine, task_ids=[task.taskUri]) + Worker.process(engine=context.db_engine, task_ids=[task.taskUri]) return run diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py index bc6ed4ef4..4d496cf47 100644 --- a/backend/dataall/modules/datasets/services/dataset_service.py +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -250,7 +250,7 @@ def sync_tables(uri): targetUri=dataset.datasetUri, ) session.add(task) - Worker.queue(engine=context.db_engine, task_ids=[task.taskUri]) + Worker.process(engine=context.db_engine, task_ids=[task.taskUri]) with context.db_engine.scoped_session() as session: DatasetTableIndexer.upsert_all( session=session, dataset_uri=dataset.datasetUri From d19761cb4c132fe05ba642f57f5a1146c2aa498a Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 9 Jun 2023 15:17:46 +0200 Subject: [PATCH 281/346] Moved sync method to table API --- .../modules/datasets/api/dataset/mutations.py | 9 ------- .../modules/datasets/api/dataset/resolvers.py | 4 ---- .../modules/datasets/api/table/mutations.py | 11 +++++++-- .../modules/datasets/api/table/resolvers.py | 4 ++++ .../datasets/services/dataset_service.py | 24 ------------------- .../services/dataset_table_service.py | 24 ++++++++++++++++--- 6 files changed, 34 insertions(+), 42 deletions(-) diff --git a/backend/dataall/modules/datasets/api/dataset/mutations.py b/backend/dataall/modules/datasets/api/dataset/mutations.py index bc63c6f88..075ec4d01 100644 --- a/backend/dataall/modules/datasets/api/dataset/mutations.py +++ b/backend/dataall/modules/datasets/api/dataset/mutations.py @@ -7,7 +7,6 @@ from dataall.modules.datasets.api.dataset.resolvers import ( create_dataset, update_dataset, - sync_tables, generate_dataset_access_token, delete_dataset, import_dataset, @@ -33,14 +32,6 @@ test_scope='Dataset', ) -syncTables = gql.MutationField( - name='syncTables', - args=[gql.Argument(name='datasetUri', type=gql.NonNullableType(gql.String))], - type=gql.Ref('DatasetTableSearchResult'), - resolver=sync_tables, -) - - generateDatasetAccessToken = gql.MutationField( name='generateDatasetAccessToken', args=[gql.Argument(name='datasetUri', type=gql.NonNullableType(gql.String))], diff --git a/backend/dataall/modules/datasets/api/dataset/resolvers.py b/backend/dataall/modules/datasets/api/dataset/resolvers.py index 98555e0c5..93f394468 100644 --- a/backend/dataall/modules/datasets/api/dataset/resolvers.py +++ b/backend/dataall/modules/datasets/api/dataset/resolvers.py @@ -126,10 +126,6 @@ def get_dataset_assume_role_url(context: Context, source, datasetUri: str = None return DatasetService.get_dataset_assume_role_url(uri=datasetUri) -def sync_tables(context: Context, source, datasetUri: str = None): - return DatasetService.sync_tables(uri=datasetUri) - - def start_crawler(context: Context, source, datasetUri: str, input: dict = None): return DatasetService.start_crawler(uri=datasetUri, data=input) diff --git a/backend/dataall/modules/datasets/api/table/mutations.py b/backend/dataall/modules/datasets/api/table/mutations.py index 61b805cdd..16c4e09ac 100644 --- a/backend/dataall/modules/datasets/api/table/mutations.py +++ b/backend/dataall/modules/datasets/api/table/mutations.py @@ -1,8 +1,7 @@ from dataall.api import gql from dataall.modules.datasets.api.table.input_types import ModifyDatasetTableInput from dataall.modules.datasets.api.table.resolvers import ( - update_table, - delete_table, + update_table, delete_table, sync_tables, ) updateDatasetTable = gql.MutationField( @@ -21,3 +20,11 @@ type=gql.Boolean, resolver=delete_table, ) + +syncTables = gql.MutationField( + name='syncTables', + args=[gql.Argument(name='datasetUri', type=gql.NonNullableType(gql.String))], + type=gql.Ref('DatasetTableSearchResult'), + resolver=sync_tables, +) + diff --git a/backend/dataall/modules/datasets/api/table/resolvers.py b/backend/dataall/modules/datasets/api/table/resolvers.py index dbae3b818..c69a13c72 100644 --- a/backend/dataall/modules/datasets/api/table/resolvers.py +++ b/backend/dataall/modules/datasets/api/table/resolvers.py @@ -36,6 +36,10 @@ def get_glue_table_properties(context: Context, source: DatasetTable, **kwargs): return DatasetTableService.get_glue_table_properties(source.tableUri) +def sync_tables(context: Context, source, datasetUri: str = None): + return DatasetTableService.sync_tables_for_dataset(uri=datasetUri) + + def resolve_dataset(context, source: DatasetTable, **kwargs): if not source: return None diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py index 4d496cf47..549bc8f9a 100644 --- a/backend/dataall/modules/datasets/services/dataset_service.py +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -238,30 +238,6 @@ def get_dataset_assume_role_url(uri): ) return url - @staticmethod - @has_resource_permission(SYNC_DATASET) - def sync_tables(uri): - context = get_context() - with context.db_engine.scoped_session() as session: - dataset = DatasetRepository.get_dataset_by_uri(session, uri) - - task = Task( - action='glue.dataset.database.tables', - targetUri=dataset.datasetUri, - ) - session.add(task) - Worker.process(engine=context.db_engine, task_ids=[task.taskUri]) - with context.db_engine.scoped_session() as session: - DatasetTableIndexer.upsert_all( - session=session, dataset_uri=dataset.datasetUri - ) - DatasetTableIndexer.remove_all_deleted(session=session, dataset_uri=dataset.datasetUri) - return DatasetRepository.paginated_dataset_tables( - session=session, - uri=uri, - data={'page': 1, 'pageSize': 10}, - ) - @staticmethod @has_resource_permission(CRAWL_DATASET) def start_crawler(uri: str, data: dict = None): diff --git a/backend/dataall/modules/datasets/services/dataset_table_service.py b/backend/dataall/modules/datasets/services/dataset_table_service.py index d53236ce8..403247e7f 100644 --- a/backend/dataall/modules/datasets/services/dataset_table_service.py +++ b/backend/dataall/modules/datasets/services/dataset_table_service.py @@ -1,19 +1,18 @@ import logging -from dataall.aws.handlers.service_handlers import Worker from dataall.core.context import get_context from dataall.core.permission_checker import has_resource_permission, has_tenant_permission -from dataall.db import models from dataall.db.api import ResourcePolicy, Environment, Glossary from dataall.db.exceptions import ResourceShared from dataall.modules.dataset_sharing.db.share_object_repository import ShareObjectRepository from dataall.modules.datasets.aws.athena_table_client import AthenaTableClient +from dataall.modules.datasets.aws.glue_dataset_client import DatasetCrawler from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer from dataall.modules.datasets_base.db.enums import ConfidentialityClassification from dataall.modules.datasets.services.dataset_permissions import UPDATE_DATASET_TABLE, MANAGE_DATASETS, \ - DELETE_DATASET_TABLE + DELETE_DATASET_TABLE, SYNC_DATASET from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.db.models import DatasetTable, Dataset from dataall.modules.datasets_base.services.permissions import PREVIEW_DATASET_TABLE, DATASET_TABLE_READ @@ -114,6 +113,25 @@ def list_shared_tables_by_env_dataset(dataset_uri: str, env_uri: str): ) ] + @classmethod + @has_resource_permission(SYNC_DATASET) + def sync_tables_for_dataset(cls, uri): + context = get_context() + with context.db_engine.scoped_session() as session: + dataset = DatasetRepository.get_dataset_by_uri(session, uri) + + tables = DatasetCrawler(dataset).list_glue_database_tables() + cls.sync_existing_tables(session, dataset.datasetUri, glue_tables=tables) + DatasetTableIndexer.upsert_all( + session=session, dataset_uri=dataset.datasetUri + ) + DatasetTableIndexer.remove_all_deleted(session=session, dataset_uri=dataset.datasetUri) + return DatasetRepository.paginated_dataset_tables( + session=session, + uri=uri, + data={'page': 1, 'pageSize': 10}, + ) + @staticmethod def sync_existing_tables(session, dataset_uri, glue_tables=None): dataset: Dataset = DatasetRepository.get_dataset_by_uri(session, dataset_uri) From d9c287209f003aad57cc9dbc95167efcdf565f93 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 9 Jun 2023 15:29:39 +0200 Subject: [PATCH 282/346] Replaced creation of the task with direct invokation of code --- .../modules/datasets/handlers/__init__.py | 7 +-- .../datasets/handlers/glue_table_handler.py | 26 ----------- .../handlers/glue_table_sync_handler.py | 15 ------- .../services/dataset_column_service.py | 45 ++++++++++--------- 4 files changed, 27 insertions(+), 66 deletions(-) delete mode 100644 backend/dataall/modules/datasets/handlers/glue_table_handler.py diff --git a/backend/dataall/modules/datasets/handlers/__init__.py b/backend/dataall/modules/datasets/handlers/__init__.py index ff59697b1..ce055bc1b 100644 --- a/backend/dataall/modules/datasets/handlers/__init__.py +++ b/backend/dataall/modules/datasets/handlers/__init__.py @@ -3,10 +3,7 @@ processing in a separate lambda function """ from dataall.modules.datasets.handlers import ( - glue_table_sync_handler, - glue_table_handler, - glue_profiling_handler, - glue_dataset_handler + glue_table_sync_handler, glue_profiling_handler, glue_dataset_handler ) -__all__ = ["glue_table_sync_handler", "glue_table_handler", "glue_profiling_handler", "glue_dataset_handler"] +__all__ = ["glue_table_sync_handler", "glue_profiling_handler", "glue_dataset_handler"] diff --git a/backend/dataall/modules/datasets/handlers/glue_table_handler.py b/backend/dataall/modules/datasets/handlers/glue_table_handler.py deleted file mode 100644 index c67b8423d..000000000 --- a/backend/dataall/modules/datasets/handlers/glue_table_handler.py +++ /dev/null @@ -1,26 +0,0 @@ -import logging - -from dataall.aws.handlers.glue import Glue -from dataall.aws.handlers.service_handlers import Worker -from dataall.db import models -from dataall.modules.datasets.aws.glue_dataset_client import DatasetCrawler -from dataall.modules.datasets.services.dataset_table_service import DatasetTableService -from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository -from dataall.modules.datasets_base.db.models import Dataset - -log = logging.getLogger(__name__) - - -class DatasetTableSyncHandler: - """A handler for dataset table""" - - @staticmethod - @Worker.handler(path='glue.dataset.database.tables') - def sync_existing_tables(engine, task: models.Task): - with engine.scoped_session() as session: - dataset: Dataset = DatasetRepository.get_dataset_by_uri( - session, task.targetUri - ) - tables = DatasetCrawler(dataset).list_glue_database_tables() - DatasetTableService.sync_existing_tables(session, dataset.datasetUri, glue_tables=tables) - return tables diff --git a/backend/dataall/modules/datasets/handlers/glue_table_sync_handler.py b/backend/dataall/modules/datasets/handlers/glue_table_sync_handler.py index fa7441840..d0a283251 100644 --- a/backend/dataall/modules/datasets/handlers/glue_table_sync_handler.py +++ b/backend/dataall/modules/datasets/handlers/glue_table_sync_handler.py @@ -14,21 +14,6 @@ class DatasetColumnGlueHandler: """A handler for dataset table columns""" - @staticmethod - @Worker.handler('glue.table.columns') - def get_table_columns(engine, task: models.Task): - with engine.scoped_session() as session: - dataset_table: DatasetTable = session.query(DatasetTable).get( - task.targetUri - ) - aws = SessionHelper.remote_session(dataset_table.AWSAccountId) - glue_table = GlueTableClient(aws, dataset_table).get_table() - - DatasetTableRepository.sync_table_columns( - session, dataset_table, glue_table['Table'] - ) - return True - @staticmethod @Worker.handler('glue.table.update_column') def update_table_columns(engine, task: models.Task): diff --git a/backend/dataall/modules/datasets/services/dataset_column_service.py b/backend/dataall/modules/datasets/services/dataset_column_service.py index 0daaca247..a02a54a7c 100644 --- a/backend/dataall/modules/datasets/services/dataset_column_service.py +++ b/backend/dataall/modules/datasets/services/dataset_column_service.py @@ -1,7 +1,10 @@ from dataall.aws.handlers.service_handlers import Worker +from dataall.aws.handlers.sts import SessionHelper from dataall.core.context import get_context +from dataall.core.permission_checker import has_resource_permission from dataall.db import models from dataall.db.api import ResourcePolicy +from dataall.modules.datasets.aws.glue_table_client import GlueTableClient from dataall.modules.datasets.db.dataset_column_repository import DatasetColumnRepository from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository from dataall.modules.datasets.services.dataset_permissions import UPDATE_DATASET_TABLE @@ -10,28 +13,41 @@ class DatasetColumnService: + @staticmethod + def _get_table_uri(session, column_uri): + column: DatasetTableColumn = DatasetColumnRepository.get_column(session, column_uri) + return column.tableUri + + @staticmethod + def _get_dataset_uri(session, table_uri): + table = DatasetTableRepository.get_dataset_table_by_uri(session, table_uri) + return table.datasetUri + @staticmethod def paginate_active_columns_for_table(table_uri: str, filter=None): # TODO THERE WAS NO PERMISSION CHECK!!! with get_context().db_engine.scoped_session() as session: return DatasetColumnRepository.paginate_active_columns_for_table(session, table_uri, filter) - @staticmethod - def sync_table_columns(table_uri: str): + @classmethod + @has_resource_permission(UPDATE_DATASET_TABLE, parent_resource=_get_dataset_uri) + def sync_table_columns(cls, table_uri: str): context = get_context() with context.db_engine.scoped_session() as session: - DatasetColumnService._check_resource_permission(session, table_uri, UPDATE_DATASET_TABLE) - task = models.Task(action='glue.table.columns', targetUri=table_uri) - session.add(task) - Worker.process(engine=context.db_engine, task_ids=[task.taskUri]) - return DatasetColumnService.paginate_active_columns_for_table(table_uri, {}) + table: DatasetTable = DatasetTableRepository.get_dataset_table_by_uri(session, table_uri) + aws = SessionHelper.remote_session(table.AWSAccountId) + glue_table = GlueTableClient(aws, table).get_table() + + DatasetTableRepository.sync_table_columns( + session, table, glue_table['Table'] + ) + return cls.paginate_active_columns_for_table(table_uri, {}) @staticmethod + @has_resource_permission(UPDATE_DATASET_TABLE, parent_resource=_get_table_uri) def update_table_column_description(column_uri: str, description) -> DatasetTableColumn: with get_context().db_engine.scoped_session() as session: column: DatasetTableColumn = DatasetColumnRepository.get_column(session, column_uri) - DatasetColumnService._check_resource_permission(session, column.tableUri, UPDATE_DATASET_TABLE) - column.description = description task = models.Task( @@ -43,14 +59,3 @@ def update_table_column_description(column_uri: str, description) -> DatasetTabl Worker.queue(engine=get_context().db_engine, task_ids=[task.taskUri]) return column - @staticmethod - def _check_resource_permission(session, table_uri: str, permission): - context = get_context() - table: DatasetTable = DatasetTableRepository.get_dataset_table_by_uri(session, table_uri) - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=table.datasetUri, - permission_name=permission, - ) From 9064636bb3e0df23b9c41320580bbf449525643e Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 9 Jun 2023 15:32:57 +0200 Subject: [PATCH 283/346] Replaced creation of the task with direct invokation of code --- .../handlers/glue_profiling_handler.py | 35 ++++--------------- .../services/dataset_profiling_service.py | 12 ++++--- 2 files changed, 14 insertions(+), 33 deletions(-) diff --git a/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py b/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py index d001311b7..df3f90d69 100644 --- a/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py +++ b/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py @@ -3,6 +3,7 @@ from dataall.aws.handlers.service_handlers import Worker from dataall.db import models from dataall.modules.datasets.aws.glue_profiler_client import GlueDatasetProfilerClient +from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base.db.models import DatasetProfilingRun, Dataset from dataall.modules.datasets.db.dataset_profiling_repository import DatasetProfilingRepository @@ -16,36 +17,14 @@ class DatasetProfilingGlueHandler: @Worker.handler('glue.job.profiling_run_status') def get_profiling_run(engine, task: models.Task): with engine.scoped_session() as session: - dataset, profiling = DatasetProfilingGlueHandler._get_job_data(session, task) + profiling: DatasetProfilingRun = ( + DatasetProfilingRepository.get_profiling_run( + session, profiling_run_uri=task.targetUri + ) + ) + dataset: Dataset = DatasetRepository.get_dataset_by_uri(session, profiling.datasetUri) status = GlueDatasetProfilerClient(dataset).get_job_status(profiling) profiling.status = status session.commit() return profiling.status - - @staticmethod - @Worker.handler('glue.job.start_profiling_run') - def start_profiling_run(engine, task: models.Task): - with engine.scoped_session() as session: - dataset, profiling = DatasetProfilingGlueHandler._get_job_data(session, task) - run_id = GlueDatasetProfilerClient(dataset).run_job(profiling) - - DatasetProfilingRepository.update_run( - session, - run_uri=profiling.profilingRunUri, - glue_job_run_id=run_id, - ) - return run_id - - @staticmethod - def _get_job_data(session, task): - profiling: DatasetProfilingRun = ( - DatasetProfilingRepository.get_profiling_run( - session, profiling_run_uri=task.targetUri - ) - ) - dataset: Dataset = session.query(Dataset).get( - profiling.datasetUri - ) - - return dataset, profiling diff --git a/backend/dataall/modules/datasets/services/dataset_profiling_service.py b/backend/dataall/modules/datasets/services/dataset_profiling_service.py index 69a9f83ea..7048f4049 100644 --- a/backend/dataall/modules/datasets/services/dataset_profiling_service.py +++ b/backend/dataall/modules/datasets/services/dataset_profiling_service.py @@ -6,6 +6,7 @@ from dataall.db.api import Environment from dataall.db.exceptions import ObjectNotFound from dataall.db.models import Task +from dataall.modules.datasets.aws.glue_profiler_client import GlueDatasetProfilerClient from dataall.modules.datasets.aws.s3_profiler_client import S3ProfilerClient from dataall.modules.datasets.db.dataset_profiling_repository import DatasetProfilingRepository from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository @@ -39,12 +40,13 @@ def start_profiling_run(uri, table_uri, glue_table_name): glue_table_name=glue_table_name, ) - task = Task( - targetUri=run.profilingRunUri, action='glue.job.start_profiling_run' - ) - session.add(task) + run_id = GlueDatasetProfilerClient(dataset).run_job(run) - Worker.process(engine=context.db_engine, task_ids=[task.taskUri]) + DatasetProfilingRepository.update_run( + session, + run_uri=run.profilingRunUri, + glue_job_run_id=run_id, + ) return run From 05414381c1033d434584c6af2c529dd391f23c9a Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 9 Jun 2023 15:58:26 +0200 Subject: [PATCH 284/346] Fixed tests --- backend/dataall/core/permission_checker.py | 14 ++++++++++++-- .../modules/datasets/api/table_column/resolvers.py | 4 ++-- .../datasets/services/dataset_column_service.py | 8 ++++---- .../datasets/tasks/test_dataset_catalog_indexer.py | 2 +- tests/modules/datasets/test_dataset_profiling.py | 12 +++++++++--- 5 files changed, 28 insertions(+), 12 deletions(-) diff --git a/backend/dataall/core/permission_checker.py b/backend/dataall/core/permission_checker.py index b5e1ab593..415f25835 100644 --- a/backend/dataall/core/permission_checker.py +++ b/backend/dataall/core/permission_checker.py @@ -64,13 +64,21 @@ def no_decorated(f): return fn, staticmethod if static_func else no_decorated -def has_resource_permission(permission: str, resource_name: str = None, parent_resource: Callable = None): +def has_resource_permission( + permission: str, + param_name: str = None, + resource_name: str = None, + parent_resource: Callable = None +): """ Decorator that check if a user has access to the resource. The method or function decorated with this decorator must have a URI of accessing resource Good rule of thumb: if there is a URI that accesses a specific resource, hence it has URI - it must be decorated with this decorator """ + if not param_name: + param_name = "uri" + def decorator(f): fn, fn_decorator = _process_func(f) @@ -80,7 +88,9 @@ def decorated(*args, **kwargs): resource: Identifiable = kwargs[resource_name] uri = resource.get_resource_uri() else: - uri = kwargs["uri"] + if param_name not in kwargs: + raise KeyError(f"{f.__name__} doesn't have parameter {param_name}") + uri = kwargs[param_name] with get_context().db_engine.scoped_session() as session: if parent_resource: diff --git a/backend/dataall/modules/datasets/api/table_column/resolvers.py b/backend/dataall/modules/datasets/api/table_column/resolvers.py index 2e8235a52..034220eb0 100644 --- a/backend/dataall/modules/datasets/api/table_column/resolvers.py +++ b/backend/dataall/modules/datasets/api/table_column/resolvers.py @@ -20,7 +20,7 @@ def list_table_columns( def sync_table_columns(context: Context, source, tableUri: str = None): if tableUri is None: return None - return DatasetColumnService.sync_table_columns(tableUri) + return DatasetColumnService.sync_table_columns(table_uri=tableUri) def resolve_terms(context, source: DatasetTableColumn, **kwargs): @@ -43,4 +43,4 @@ def update_table_column( input = {} description = input.get('description', 'No description provided') - return DatasetColumnService.update_table_column_description(columnUri, description) + return DatasetColumnService.update_table_column_description(column_uri=columnUri, description=description) diff --git a/backend/dataall/modules/datasets/services/dataset_column_service.py b/backend/dataall/modules/datasets/services/dataset_column_service.py index a02a54a7c..e6b16a790 100644 --- a/backend/dataall/modules/datasets/services/dataset_column_service.py +++ b/backend/dataall/modules/datasets/services/dataset_column_service.py @@ -14,9 +14,9 @@ class DatasetColumnService: @staticmethod - def _get_table_uri(session, column_uri): + def _get_dataset_uri_for_column(session, column_uri): column: DatasetTableColumn = DatasetColumnRepository.get_column(session, column_uri) - return column.tableUri + return DatasetColumnService._get_dataset_uri(session, column.tableUri) @staticmethod def _get_dataset_uri(session, table_uri): @@ -30,7 +30,7 @@ def paginate_active_columns_for_table(table_uri: str, filter=None): return DatasetColumnRepository.paginate_active_columns_for_table(session, table_uri, filter) @classmethod - @has_resource_permission(UPDATE_DATASET_TABLE, parent_resource=_get_dataset_uri) + @has_resource_permission(UPDATE_DATASET_TABLE, parent_resource=_get_dataset_uri, param_name="table_uri") def sync_table_columns(cls, table_uri: str): context = get_context() with context.db_engine.scoped_session() as session: @@ -44,7 +44,7 @@ def sync_table_columns(cls, table_uri: str): return cls.paginate_active_columns_for_table(table_uri, {}) @staticmethod - @has_resource_permission(UPDATE_DATASET_TABLE, parent_resource=_get_table_uri) + @has_resource_permission(UPDATE_DATASET_TABLE, parent_resource=_get_dataset_uri_for_column, param_name="column_uri") def update_table_column_description(column_uri: str, description) -> DatasetTableColumn: with get_context().db_engine.scoped_session() as session: column: DatasetTableColumn = DatasetColumnRepository.get_column(session, column_uri) diff --git a/tests/modules/datasets/tasks/test_dataset_catalog_indexer.py b/tests/modules/datasets/tasks/test_dataset_catalog_indexer.py index 712f4a6f9..db4ebc6e3 100644 --- a/tests/modules/datasets/tasks/test_dataset_catalog_indexer.py +++ b/tests/modules/datasets/tasks/test_dataset_catalog_indexer.py @@ -90,7 +90,7 @@ def test_catalog_indexer(db, org, env, sync_dataset, table, mocker): mocker.patch( 'dataall.modules.datasets.indexers.dataset_indexer.DatasetIndexer.upsert', return_value=sync_dataset ) - indexed_objects_counter = dataall.tasks.catalog_indexer.index_objects( + indexed_objects_counter = dataall.tasks.catalog_indexer_task.index_objects( engine=db ) assert indexed_objects_counter == 2 diff --git a/tests/modules/datasets/test_dataset_profiling.py b/tests/modules/datasets/test_dataset_profiling.py index d670e41c8..d74993377 100644 --- a/tests/modules/datasets/test_dataset_profiling.py +++ b/tests/modules/datasets/test_dataset_profiling.py @@ -23,13 +23,19 @@ def dataset1(env1, org1, dataset, group, user) -> Dataset: org=org1, env=env1, name='dataset1', owner=user.userName, group=group.name ) + @pytest.fixture(scope='module', autouse=True) def patch_methods(module_mocker): - mock_client = MagicMock() + s3_mock_client = MagicMock() + glue_mock_client = MagicMock() + module_mocker.patch( + 'dataall.modules.datasets.services.dataset_profiling_service.S3ProfilerClient', s3_mock_client + ) module_mocker.patch( - 'dataall.modules.datasets.services.dataset_profiling_service.S3ProfilerClient', mock_client + 'dataall.modules.datasets.services.dataset_profiling_service.GlueDatasetProfilerClient', glue_mock_client ) - mock_client().get_profiling_results_from_s3.return_value = '{"results": "yes"}' + s3_mock_client().get_profiling_results_from_s3.return_value = '{"results": "yes"}' + glue_mock_client().run_job.return_value = True def test_add_tables(table, dataset1, db): From 1ec37ba356019e64a56e0e7151322df51daccdbe Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 15 Jun 2023 10:38:21 +0200 Subject: [PATCH 285/346] Removed unused imports --- .../dataall/modules/datasets/tasks/bucket_policy_updater.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/backend/dataall/modules/datasets/tasks/bucket_policy_updater.py b/backend/dataall/modules/datasets/tasks/bucket_policy_updater.py index 2df2e0500..8071cb4bd 100644 --- a/backend/dataall/modules/datasets/tasks/bucket_policy_updater.py +++ b/backend/dataall/modules/datasets/tasks/bucket_policy_updater.py @@ -2,18 +2,14 @@ import logging import os import sys -import typing from botocore.exceptions import ClientError from sqlalchemy import and_ from dataall.aws.handlers.sts import SessionHelper from dataall.db import get_engine -from dataall.db import models -from dataall.modules.dataset_sharing.db.enums import ShareObjectStatus -from dataall.modules.dataset_sharing.db.models import ShareObjectItem, ShareObject from dataall.modules.dataset_sharing.db.share_object_repository import ShareObjectRepository -from dataall.modules.datasets_base.db.models import DatasetStorageLocation, DatasetTable, Dataset +from dataall.modules.datasets_base.db.models import Dataset root = logging.getLogger() root.setLevel(logging.INFO) From 43a630597c602ad967af05c975ea1ad62cf3e841 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 15 Jun 2023 10:48:18 +0200 Subject: [PATCH 286/346] Fixed tests --- backend/dataall/modules/mlstudio/db/mlstudio_repository.py | 6 +++--- backend/dataall/modules/notebooks/db/notebook_repository.py | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/backend/dataall/modules/mlstudio/db/mlstudio_repository.py b/backend/dataall/modules/mlstudio/db/mlstudio_repository.py index 3a5d81071..8cdff3da0 100644 --- a/backend/dataall/modules/mlstudio/db/mlstudio_repository.py +++ b/backend/dataall/modules/mlstudio/db/mlstudio_repository.py @@ -6,12 +6,12 @@ from sqlalchemy.sql import and_ from sqlalchemy.orm import Query -from dataall.db import paginate, exceptions +from dataall.db import paginate from dataall.modules.mlstudio.db.models import SagemakerStudioUser -from dataall.core.group.services.group_resource_manager import GroupResource +from dataall.core.group.services.group_resource_manager import EnvironmentResource -class SageMakerStudioRepository(GroupResource): +class SageMakerStudioRepository(EnvironmentResource): """DAO layer for ML Studio""" _DEFAULT_PAGE = 1 _DEFAULT_PAGE_SIZE = 10 diff --git a/backend/dataall/modules/notebooks/db/notebook_repository.py b/backend/dataall/modules/notebooks/db/notebook_repository.py index 608763b1b..8be5193f2 100644 --- a/backend/dataall/modules/notebooks/db/notebook_repository.py +++ b/backend/dataall/modules/notebooks/db/notebook_repository.py @@ -8,10 +8,10 @@ from dataall.db import paginate from dataall.modules.notebooks.db.models import SagemakerNotebook -from dataall.core.group.services.group_resource_manager import GroupResource +from dataall.core.group.services.group_resource_manager import EnvironmentResource -class NotebookRepository(GroupResource): +class NotebookRepository(EnvironmentResource): """DAO layer for notebooks""" _DEFAULT_PAGE = 1 _DEFAULT_PAGE_SIZE = 10 From f45eeed63aec52cc5899871f9f573de1a8f4f110 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 15 Jun 2023 12:30:55 +0200 Subject: [PATCH 287/346] Fix profiling --- .../modules/datasets/aws/glue_profiler_client.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/backend/dataall/modules/datasets/aws/glue_profiler_client.py b/backend/dataall/modules/datasets/aws/glue_profiler_client.py index 523dc2c20..2bb54f760 100644 --- a/backend/dataall/modules/datasets/aws/glue_profiler_client.py +++ b/backend/dataall/modules/datasets/aws/glue_profiler_client.py @@ -28,13 +28,11 @@ def get_job_status(self, profiling: DatasetProfilingRun): def run_job(self, profiling: DatasetProfilingRun): """Run glue job. Returns id of the job""" - args = { - 'arguments': ( - {'--table': profiling.GlueTableName} - if profiling.GlueTableName - else {} - ), - } + args = ( + {'--table': profiling.GlueTableName} + if profiling.GlueTableName + else {} + ), try: response = self._client.start_job_run( JobName=self._name, Arguments=args From 7e259c3f38a14d3a99525935c7635e6ed2949ebe Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 15 Jun 2023 14:52:33 +0200 Subject: [PATCH 288/346] Fix ML studio migration script --- .../versions/4a0618805341_rename_sgm_studio_permissions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/migrations/versions/4a0618805341_rename_sgm_studio_permissions.py b/backend/migrations/versions/4a0618805341_rename_sgm_studio_permissions.py index 40a8324db..0033e970c 100644 --- a/backend/migrations/versions/4a0618805341_rename_sgm_studio_permissions.py +++ b/backend/migrations/versions/4a0618805341_rename_sgm_studio_permissions.py @@ -111,7 +111,7 @@ def upgrade(): .first() ) - if existing_tenant_permissions.permissionUri: + if existing_tenant_permissions: print(f"Permission already exists {existing_tenant_permissions.permissionUri}, skipping...") else: print("Permission does not exist, adding it...") From d66799628d1edafd941f42cbc0e06c437aea9947 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Thu, 15 Jun 2023 15:00:08 +0200 Subject: [PATCH 289/346] Removed return statement --- .../modules/dataset_sharing/services/share_object_service.py | 1 - 1 file changed, 1 deletion(-) diff --git a/backend/dataall/modules/dataset_sharing/services/share_object_service.py b/backend/dataall/modules/dataset_sharing/services/share_object_service.py index df38acc6a..271a03052 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_object_service.py +++ b/backend/dataall/modules/dataset_sharing/services/share_object_service.py @@ -190,7 +190,6 @@ def approve_share_object(cls, uri: str): ) ShareNotificationService.notify_share_object_approval(session, context.username, dataset, share) - return share approve_share_task: Task = Task( action='ecs.share.approve', From f477f37eecfc4707c727e71d0425c73a58889188 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 16 Jun 2023 11:21:28 +0200 Subject: [PATCH 290/346] Fixed wrong order of params --- .../services/share_managers/lf_share_manager.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/backend/dataall/modules/dataset_sharing/services/share_managers/lf_share_manager.py b/backend/dataall/modules/dataset_sharing/services/share_managers/lf_share_manager.py index 8321021e2..b4eaab65b 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_managers/lf_share_manager.py +++ b/backend/dataall/modules/dataset_sharing/services/share_managers/lf_share_manager.py @@ -180,9 +180,9 @@ def create_shared_database( ) database = GlueClient( - target_environment.AwsAccountId, - shared_db_name, - target_environment.region + account_id=target_environment.AwsAccountId, + database=shared_db_name, + region=target_environment.region ).create_database(f's3://{dataset.S3BucketName}') LakeFormationClient.grant_pivot_role_all_database_permissions( @@ -530,7 +530,7 @@ def handle_revoke_failure( def _create_glue_client(self): return GlueClient( - self.target_environment.AwsAccountId, - self.target_environment.region, - self.shared_db_name, + account_id=self.target_environment.AwsAccountId, + region=self.target_environment.region, + database=self.shared_db_name, ) From 3a0a521268db3ba6b0e7677eb7abc00c1b5d27b7 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 16 Jun 2023 15:45:17 +0200 Subject: [PATCH 291/346] Disable and skip module test directories for modules that are inactive --- backend/dataall/modules/datasets/aws/glue_profiler_client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/backend/dataall/modules/datasets/aws/glue_profiler_client.py b/backend/dataall/modules/datasets/aws/glue_profiler_client.py index 2bb54f760..c2c2412a5 100644 --- a/backend/dataall/modules/datasets/aws/glue_profiler_client.py +++ b/backend/dataall/modules/datasets/aws/glue_profiler_client.py @@ -28,11 +28,11 @@ def get_job_status(self, profiling: DatasetProfilingRun): def run_job(self, profiling: DatasetProfilingRun): """Run glue job. Returns id of the job""" - args = ( + args = { {'--table': profiling.GlueTableName} if profiling.GlueTableName else {} - ), + }, try: response = self._client.start_job_run( JobName=self._name, Arguments=args From 96cdeada390e0ad91917875e71b267587b23a821 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 19 Jun 2023 10:39:04 +0200 Subject: [PATCH 292/346] Fixed dict --- .../dataall/modules/datasets/aws/glue_profiler_client.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/backend/dataall/modules/datasets/aws/glue_profiler_client.py b/backend/dataall/modules/datasets/aws/glue_profiler_client.py index c2c2412a5..fb6a4bfb1 100644 --- a/backend/dataall/modules/datasets/aws/glue_profiler_client.py +++ b/backend/dataall/modules/datasets/aws/glue_profiler_client.py @@ -28,11 +28,7 @@ def get_job_status(self, profiling: DatasetProfilingRun): def run_job(self, profiling: DatasetProfilingRun): """Run glue job. Returns id of the job""" - args = { - {'--table': profiling.GlueTableName} - if profiling.GlueTableName - else {} - }, + args = {'--table': profiling.GlueTableName} if profiling.GlueTableName else {} try: response = self._client.start_job_run( JobName=self._name, Arguments=args From bb7a1abf260fe2959cba9a6805e70f9c7adca98c Mon Sep 17 00:00:00 2001 From: nikpodsh <124577300+nikpodsh@users.noreply.github.com> Date: Fri, 23 Jun 2023 12:58:23 +0200 Subject: [PATCH 293/346] Update backend/dataall/core/stack_finder.py Co-authored-by: dbalintx <132444646+dbalintx@users.noreply.github.com> --- backend/dataall/core/stack_finder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/dataall/core/stack_finder.py b/backend/dataall/core/stack_finder.py index f6ee07db2..106fec5b6 100644 --- a/backend/dataall/core/stack_finder.py +++ b/backend/dataall/core/stack_finder.py @@ -14,4 +14,4 @@ def __init__(self): def find_stack_uris(self, session) -> List[str]: """Finds stacks to update""" - raise NotImplementedError("retrieve_stack_uris is not implemented") + raise NotImplementedError("find_stack_uris is not implemented") From f052a4f4a54577daee60a1a8581106657073f5ab Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 23 Jun 2023 13:13:35 +0200 Subject: [PATCH 294/346] Renamed the file --- ...roup_resource_manager.py => environment_resource_manager.py} | 0 backend/dataall/db/api/environment.py | 2 +- backend/dataall/modules/dataset_sharing/__init__.py | 2 +- .../modules/dataset_sharing/db/share_object_repository.py | 2 +- backend/dataall/modules/datasets/__init__.py | 2 +- backend/dataall/modules/datasets_base/db/dataset_repository.py | 2 +- backend/dataall/modules/mlstudio/db/mlstudio_repository.py | 2 +- backend/dataall/modules/notebooks/db/notebook_repository.py | 2 +- backend/dataall/modules/worksheets/__init__.py | 2 +- backend/dataall/modules/worksheets/db/repositories.py | 2 +- 10 files changed, 9 insertions(+), 9 deletions(-) rename backend/dataall/core/group/services/{group_resource_manager.py => environment_resource_manager.py} (100%) diff --git a/backend/dataall/core/group/services/group_resource_manager.py b/backend/dataall/core/group/services/environment_resource_manager.py similarity index 100% rename from backend/dataall/core/group/services/group_resource_manager.py rename to backend/dataall/core/group/services/environment_resource_manager.py diff --git a/backend/dataall/db/api/environment.py b/backend/dataall/db/api/environment.py index a7a005ff7..04af5522e 100644 --- a/backend/dataall/db/api/environment.py +++ b/backend/dataall/db/api/environment.py @@ -29,7 +29,7 @@ NamingConventionService, NamingConventionPattern, ) -from dataall.core.group.services.group_resource_manager import EnvironmentResourceManager +from dataall.core.group.services.environment_resource_manager import EnvironmentResourceManager log = logging.getLogger(__name__) diff --git a/backend/dataall/modules/dataset_sharing/__init__.py b/backend/dataall/modules/dataset_sharing/__init__.py index 491a9bf66..616a129f2 100644 --- a/backend/dataall/modules/dataset_sharing/__init__.py +++ b/backend/dataall/modules/dataset_sharing/__init__.py @@ -1,7 +1,7 @@ import logging from typing import List, Type, Set -from dataall.core.group.services.group_resource_manager import EnvironmentResourceManager +from dataall.core.group.services.environment_resource_manager import EnvironmentResourceManager from dataall.modules.dataset_sharing.db.share_object_repository import ShareEnvironmentResource from dataall.modules.datasets_base import DatasetBaseModuleInterface from dataall.modules.loader import ModuleInterface, ImportMode diff --git a/backend/dataall/modules/dataset_sharing/db/share_object_repository.py b/backend/dataall/modules/dataset_sharing/db/share_object_repository.py index f0aad8da7..bdabbb837 100644 --- a/backend/dataall/modules/dataset_sharing/db/share_object_repository.py +++ b/backend/dataall/modules/dataset_sharing/db/share_object_repository.py @@ -4,7 +4,7 @@ from sqlalchemy import and_, or_, func, case from sqlalchemy.orm import Query -from dataall.core.group.services.group_resource_manager import EnvironmentResource +from dataall.core.group.services.environment_resource_manager import EnvironmentResource from dataall.db import models, exceptions, paginate from dataall.db.models.Enums import PrincipalType from dataall.modules.dataset_sharing.db.enums import ShareObjectActions, ShareObjectStatus, ShareItemActions, \ diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index 055c4f7ca..0cbec5ed1 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -31,7 +31,7 @@ def __init__(self): from dataall.api.Objects.Vote.resolvers import add_vote_type from dataall.api.Objects.Feed.registry import FeedRegistry, FeedDefinition from dataall.api.Objects.Glossary.registry import GlossaryRegistry, GlossaryDefinition - from dataall.core.group.services.group_resource_manager import EnvironmentResourceManager + from dataall.core.group.services.environment_resource_manager import EnvironmentResourceManager from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer diff --git a/backend/dataall/modules/datasets_base/db/dataset_repository.py b/backend/dataall/modules/datasets_base/db/dataset_repository.py index dbd60f476..878ac9a48 100644 --- a/backend/dataall/modules/datasets_base/db/dataset_repository.py +++ b/backend/dataall/modules/datasets_base/db/dataset_repository.py @@ -11,7 +11,7 @@ from dataall.db.exceptions import ObjectNotFound from dataall.db.models.Enums import Language from dataall.modules.datasets_base.db.enums import ConfidentialityClassification -from dataall.core.group.services.group_resource_manager import EnvironmentResource +from dataall.core.group.services.environment_resource_manager import EnvironmentResource from dataall.modules.datasets_base.db.models import DatasetTable, Dataset from dataall.utils.naming_convention import ( NamingConventionService, diff --git a/backend/dataall/modules/mlstudio/db/mlstudio_repository.py b/backend/dataall/modules/mlstudio/db/mlstudio_repository.py index 8cdff3da0..55717aec1 100644 --- a/backend/dataall/modules/mlstudio/db/mlstudio_repository.py +++ b/backend/dataall/modules/mlstudio/db/mlstudio_repository.py @@ -8,7 +8,7 @@ from dataall.db import paginate from dataall.modules.mlstudio.db.models import SagemakerStudioUser -from dataall.core.group.services.group_resource_manager import EnvironmentResource +from dataall.core.group.services.environment_resource_manager import EnvironmentResource class SageMakerStudioRepository(EnvironmentResource): diff --git a/backend/dataall/modules/notebooks/db/notebook_repository.py b/backend/dataall/modules/notebooks/db/notebook_repository.py index 8be5193f2..f7d1f50f7 100644 --- a/backend/dataall/modules/notebooks/db/notebook_repository.py +++ b/backend/dataall/modules/notebooks/db/notebook_repository.py @@ -8,7 +8,7 @@ from dataall.db import paginate from dataall.modules.notebooks.db.models import SagemakerNotebook -from dataall.core.group.services.group_resource_manager import EnvironmentResource +from dataall.core.group.services.environment_resource_manager import EnvironmentResource class NotebookRepository(EnvironmentResource): diff --git a/backend/dataall/modules/worksheets/__init__.py b/backend/dataall/modules/worksheets/__init__.py index f3208fe3d..ebff948cf 100644 --- a/backend/dataall/modules/worksheets/__init__.py +++ b/backend/dataall/modules/worksheets/__init__.py @@ -1,7 +1,7 @@ """Contains the code related to worksheets""" import logging -from dataall.core.group.services.group_resource_manager import EnvironmentResourceManager +from dataall.core.group.services.environment_resource_manager import EnvironmentResourceManager from dataall.modules.loader import ImportMode, ModuleInterface from dataall.modules.worksheets.db.models import Worksheet from dataall.modules.worksheets.db.repositories import WorksheetRepository diff --git a/backend/dataall/modules/worksheets/db/repositories.py b/backend/dataall/modules/worksheets/db/repositories.py index ff4267eb8..e291f8061 100644 --- a/backend/dataall/modules/worksheets/db/repositories.py +++ b/backend/dataall/modules/worksheets/db/repositories.py @@ -4,7 +4,7 @@ from sqlalchemy import or_ from sqlalchemy.orm import Query -from dataall.core.group.services.group_resource_manager import EnvironmentResource +from dataall.core.group.services.environment_resource_manager import EnvironmentResource from dataall.db import paginate from dataall.modules.worksheets.db.models import Worksheet, WorksheetQueryResult From a540db30f4dd5c762d9b5f4f1c98a1d6161016b6 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Fri, 23 Jun 2023 13:32:46 +0200 Subject: [PATCH 295/346] Renamed the previewTable --- backend/dataall/modules/datasets/__init__.py | 1 - backend/dataall/modules/datasets/api/table/queries.py | 4 ++-- frontend/src/api/DatasetTable/previewTable2.js | 8 ++++---- frontend/src/views/Tables/TablePreview.js | 6 +++--- 4 files changed, 9 insertions(+), 10 deletions(-) diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py index 0cbec5ed1..d0baa03db 100644 --- a/backend/dataall/modules/datasets/__init__.py +++ b/backend/dataall/modules/datasets/__init__.py @@ -2,7 +2,6 @@ import logging from typing import List, Type, Set -from dataall.core.catalog.catalog_indexer import CatalogIndexer from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository from dataall.modules.datasets_base import DatasetBaseModuleInterface from dataall.modules.datasets_base.db.models import DatasetTableColumn, DatasetStorageLocation, DatasetTable, Dataset diff --git a/backend/dataall/modules/datasets/api/table/queries.py b/backend/dataall/modules/datasets/api/table/queries.py index a6d8d48cf..fbdbd9d6e 100644 --- a/backend/dataall/modules/datasets/api/table/queries.py +++ b/backend/dataall/modules/datasets/api/table/queries.py @@ -34,8 +34,8 @@ ], ) -previewTable2 = gql.QueryField( - name='previewTable2', +previewTable = gql.QueryField( + name='previewTable', args=[gql.Argument(name='tableUri', type=gql.NonNullableType(gql.String))], resolver=preview, type=gql.Ref('QueryPreviewResult'), diff --git a/frontend/src/api/DatasetTable/previewTable2.js b/frontend/src/api/DatasetTable/previewTable2.js index ffeef03a3..2c3dc0a4c 100644 --- a/frontend/src/api/DatasetTable/previewTable2.js +++ b/frontend/src/api/DatasetTable/previewTable2.js @@ -1,12 +1,12 @@ import { gql } from 'apollo-boost'; -const previewTable2 = (tableUri) => ({ +const previewTable = (tableUri) => ({ variables: { tableUri }, query: gql` - query PreviewTable2($tableUri: String!) { - previewTable2(tableUri: $tableUri) { + query PreviewTable($tableUri: String!) { + previewTable(tableUri: $tableUri) { rows fields } @@ -14,4 +14,4 @@ const previewTable2 = (tableUri) => ({ ` }); -export default previewTable2; +export default previewTable; diff --git a/frontend/src/views/Tables/TablePreview.js b/frontend/src/views/Tables/TablePreview.js index 73878f678..f2ce1ea39 100644 --- a/frontend/src/views/Tables/TablePreview.js +++ b/frontend/src/views/Tables/TablePreview.js @@ -4,7 +4,7 @@ import { Card, CircularProgress } from '@mui/material'; import * as PropTypes from 'prop-types'; import { DataGrid } from '@mui/x-data-grid'; import { styled } from '@mui/styles'; -import previewTable2 from '../../api/DatasetTable/previewTable2'; +import previewTable from '../../api/DatasetTable/previewTable'; import { SET_ERROR } from '../../store/errorReducer'; import { useDispatch } from '../../store'; import useClient from '../../hooks/useClient'; @@ -25,9 +25,9 @@ const TablePreview = (props) => { const [result, setResult] = useState({ rows: [], fields: [] }); const fetchData = useCallback(async () => { setRunning(true); - const response = await client.query(previewTable2(table.tableUri)); + const response = await client.query(previewTable(table.tableUri)); if (!response.errors) { - setResult(response.data.previewTable2); + setResult(response.data.previewTable); } else { dispatch({ type: SET_ERROR, error: response.errors[0].message }); } From 7e1aed49e6258e7fa263088ff100e45dbca615ae Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 26 Jun 2023 10:41:50 +0200 Subject: [PATCH 296/346] Removed in_config union --- backend/dataall/modules/loader.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/dataall/modules/loader.py b/backend/dataall/modules/loader.py index 20f698cef..36d4f5c54 100644 --- a/backend/dataall/modules/loader.py +++ b/backend/dataall/modules/loader.py @@ -226,7 +226,7 @@ def _check_loading_correct(in_config: Set[str], modes: Set[ImportMode]): ) # 4) Checks all references for modules (when ModuleInterfaces don't exist or not supported) - checked_module_names = {module.name() for module in expected_load} | in_config + checked_module_names = {module.name() for module in expected_load} for module in sys.modules.keys(): if module.startswith(_MODULE_PREFIX) and module != __name__: # skip loader name = _get_module_name(module) From 6e5869078e7490645140595beed5c189fc8a1c94 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 26 Jun 2023 10:44:41 +0200 Subject: [PATCH 297/346] LIST_ENVIRONMENT_NOTEBOOKS is not used --- .../modules/notebooks/services/notebook_permissions.py | 7 ------- .../versions/5fc49baecea4_add_enviromental_parameters.py | 6 ++---- 2 files changed, 2 insertions(+), 11 deletions(-) diff --git a/backend/dataall/modules/notebooks/services/notebook_permissions.py b/backend/dataall/modules/notebooks/services/notebook_permissions.py index 073286e9f..cb20a2d4e 100644 --- a/backend/dataall/modules/notebooks/services/notebook_permissions.py +++ b/backend/dataall/modules/notebooks/services/notebook_permissions.py @@ -25,13 +25,8 @@ UPDATE_NOTEBOOK, ] -LIST_ENVIRONMENT_NOTEBOOKS = 'LIST_ENVIRONMENT_NOTEBOOKS' - -ENVIRONMENT_ALL.append(LIST_ENVIRONMENT_NOTEBOOKS) ENVIRONMENT_ALL.append(CREATE_NOTEBOOK) -ENVIRONMENT_INVITED.append(LIST_ENVIRONMENT_NOTEBOOKS) ENVIRONMENT_INVITED.append(CREATE_NOTEBOOK) -ENVIRONMENT_INVITATION_REQUEST.append(LIST_ENVIRONMENT_NOTEBOOKS) ENVIRONMENT_INVITATION_REQUEST.append(CREATE_NOTEBOOK) TENANT_ALL.append(MANAGE_NOTEBOOKS) @@ -40,10 +35,8 @@ RESOURCES_ALL.append(CREATE_NOTEBOOK) RESOURCES_ALL.extend(NOTEBOOK_ALL) -RESOURCES_ALL.append(LIST_ENVIRONMENT_NOTEBOOKS) RESOURCES_ALL_WITH_DESC[CREATE_NOTEBOOK] = "Create notebooks on this environment" -RESOURCES_ALL_WITH_DESC[LIST_ENVIRONMENT_NOTEBOOKS] = "List notebooks on this environment" RESOURCES_ALL_WITH_DESC[GET_NOTEBOOK] = "General permission to get a notebook" RESOURCES_ALL_WITH_DESC[DELETE_NOTEBOOK] = "Permission to delete a notebook" RESOURCES_ALL_WITH_DESC[UPDATE_NOTEBOOK] = "Permission to edit a notebook" diff --git a/backend/migrations/versions/5fc49baecea4_add_enviromental_parameters.py b/backend/migrations/versions/5fc49baecea4_add_enviromental_parameters.py index 940b7db40..d7cea6746 100644 --- a/backend/migrations/versions/5fc49baecea4_add_enviromental_parameters.py +++ b/backend/migrations/versions/5fc49baecea4_add_enviromental_parameters.py @@ -14,9 +14,8 @@ from sqlalchemy.ext.declarative import declarative_base from dataall.db import Resource, models from dataall.db.api import ResourcePolicy -from dataall.db.models import TenantPolicyPermission, PermissionType, EnvironmentGroup +from dataall.db.models import EnvironmentGroup from dataall.modules.datasets.services.dataset_permissions import LIST_ENVIRONMENT_DATASETS, CREATE_DATASET -from dataall.modules.notebooks.services.notebook_permissions import MANAGE_NOTEBOOKS, LIST_ENVIRONMENT_NOTEBOOKS, CREATE_NOTEBOOK # revision identifiers, used by Alembic. revision = "5fc49baecea4" @@ -159,8 +158,7 @@ def migrate_groups_permissions(session): """ Adds new permission if the old exist. needed to get rid of old hacks in the code """ - permissions = [(CREATE_DATASET, LIST_ENVIRONMENT_DATASETS), - (CREATE_NOTEBOOK, LIST_ENVIRONMENT_NOTEBOOKS)] + permissions = [CREATE_DATASET, LIST_ENVIRONMENT_DATASETS] groups = find_all_groups(session) for group in groups: From 314a6aab3d89dbda5ea1f6ee35da3dbdade0d4b0 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 26 Jun 2023 10:55:23 +0200 Subject: [PATCH 298/346] Delete unused permissions --- .../datasets/services/dataset_permissions.py | 22 ----------------- .../datasets/services/dataset_service.py | 3 +-- ...fc49baecea4_add_enviromental_parameters.py | 24 +++++++++++++++++-- 3 files changed, 23 insertions(+), 26 deletions(-) diff --git a/backend/dataall/modules/datasets/services/dataset_permissions.py b/backend/dataall/modules/datasets/services/dataset_permissions.py index c17f43e9d..5c81f120c 100644 --- a/backend/dataall/modules/datasets/services/dataset_permissions.py +++ b/backend/dataall/modules/datasets/services/dataset_permissions.py @@ -14,17 +14,11 @@ """ GET_DATASET = 'GET_DATASET' -LIST_DATASETS = 'LIST_DATASETS' -LIST_DATASET_TABLES = 'LIST_DATASET_TABLES' -LIST_DATASET_SHARES = 'LIST_DATASET_SHARES' LIST_DATASET_FOLDERS = 'LIST_DATASET_FOLDERS' CREDENTIALS_DATASET = 'CREDENTIALS_DATASET' DATASET_READ = [ GET_DATASET, - LIST_DATASETS, - LIST_DATASET_TABLES, - LIST_DATASET_SHARES, LIST_DATASET_FOLDERS, CREDENTIALS_DATASET, ] @@ -32,40 +26,24 @@ UPDATE_DATASET = 'UPDATE_DATASET' SYNC_DATASET = 'SYNC_DATASET' -SUMMARY_DATASET = 'SUMMARY_DATASET' -IMPORT_DATASET = 'IMPORT_DATASET' -UPLOAD_DATASET = 'UPLOAD_DATASET' -URL_DATASET = 'URL_DATASET' CRAWL_DATASET = 'CRAWL_DATASET' DELETE_DATASET = 'DELETE_DATASET' -STACK_DATASET = 'STACK_DATASET' -SUBSCRIPTIONS_DATASET = 'SUBSCRIPTIONS_DATASET' -CREATE_DATASET_TABLE = 'CREATE_DATASET_TABLE' DELETE_DATASET_TABLE = 'DELETE_DATASET_TABLE' UPDATE_DATASET_TABLE = 'UPDATE_DATASET_TABLE' PROFILE_DATASET_TABLE = 'PROFILE_DATASET_TABLE' CREATE_DATASET_FOLDER = 'CREATE_DATASET_FOLDER' DELETE_DATASET_FOLDER = 'DELETE_DATASET_FOLDER' -GET_DATASET_FOLDER = 'DELETE_DATASET_FOLDER' UPDATE_DATASET_FOLDER = 'UPDATE_DATASET_FOLDER' DATASET_WRITE = [ UPDATE_DATASET, SYNC_DATASET, - SUMMARY_DATASET, - IMPORT_DATASET, - UPLOAD_DATASET, CREDENTIALS_DATASET, - URL_DATASET, CRAWL_DATASET, DELETE_DATASET, - STACK_DATASET, - SUBSCRIPTIONS_DATASET, UPDATE_DATASET_TABLE, DELETE_DATASET_TABLE, - CREATE_DATASET_TABLE, PROFILE_DATASET_TABLE, - LIST_DATASET_SHARES, CREATE_DATASET_FOLDER, DELETE_DATASET_FOLDER, UPDATE_DATASET_FOLDER, diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py index 549bc8f9a..b0193ec77 100644 --- a/backend/dataall/modules/datasets/services/dataset_service.py +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -18,8 +18,7 @@ from dataall.modules.datasets.db.dataset_location_repository import DatasetLocationRepository from dataall.modules.datasets.db.dataset_table_repository import DatasetTableRepository from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer -from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer -from dataall.modules.datasets.services.dataset_permissions import CREDENTIALS_DATASET, SYNC_DATASET, CRAWL_DATASET, \ +from dataall.modules.datasets.services.dataset_permissions import CREDENTIALS_DATASET, CRAWL_DATASET, \ DELETE_DATASET, MANAGE_DATASETS, UPDATE_DATASET, LIST_ENVIRONMENT_DATASETS, \ CREATE_DATASET, DATASET_ALL, DATASET_READ from dataall.modules.datasets_base.db.dataset_repository import DatasetRepository diff --git a/backend/migrations/versions/5fc49baecea4_add_enviromental_parameters.py b/backend/migrations/versions/5fc49baecea4_add_enviromental_parameters.py index d7cea6746..2c68b2a66 100644 --- a/backend/migrations/versions/5fc49baecea4_add_enviromental_parameters.py +++ b/backend/migrations/versions/5fc49baecea4_add_enviromental_parameters.py @@ -13,8 +13,8 @@ from sqlalchemy import Boolean, Column, String, orm from sqlalchemy.ext.declarative import declarative_base from dataall.db import Resource, models -from dataall.db.api import ResourcePolicy -from dataall.db.models import EnvironmentGroup +from dataall.db.api import ResourcePolicy, Permission +from dataall.db.models import EnvironmentGroup, PermissionType, ResourcePolicyPermission from dataall.modules.datasets.services.dataset_permissions import LIST_ENVIRONMENT_DATASETS, CREATE_DATASET # revision identifiers, used by Alembic. @@ -25,6 +25,10 @@ Base = declarative_base() +UNUSED_PERMISSIONS = ['LIST_DATASETS', 'LIST_DATASET_TABLES', 'LIST_DATASET_SHARES', 'SUMMARY_DATASET', + 'IMPORT_DATASET', 'UPLOAD_DATASET', 'URL_DATASET', 'STACK_DATASET', 'SUBSCRIPTIONS_DATASET', + 'CREATE_DATASET_TABLE'] + class Environment(Resource, Base): __tablename__ = "environment" @@ -93,6 +97,7 @@ def upgrade(): session.commit() migrate_groups_permissions(session) + delete_unused_resource_permissions(session) except Exception as ex: print(f"Failed to execute the migration script due to: {ex}") @@ -125,10 +130,14 @@ def downgrade(): mlStudiosEnabled=params["mlStudiosEnabled"] == "true" )) + for name in UNUSED_PERMISSIONS: + Permission.save_permission(session, name, name, PermissionType.RESOURCE.value) + session.add_all(envs) print("Dropping environment_parameter table...") op.drop_table("environment_parameters") + except Exception as ex: print(f"Failed to execute the rollback script due to: {ex}") @@ -180,3 +189,14 @@ def migrate_groups_permissions(session): resource_uri=group.environmentUri, resource_type=models.Environment.__name__ ) + + +def delete_unused_resource_permissions(session): + for name in UNUSED_PERMISSIONS: + perm = Permission.get_permission_by_name(session, name, PermissionType.RESOURCE.value) + ( + session.query(ResourcePolicyPermission) + .filter(ResourcePolicyPermission.permissionUri == perm.permissionUri) + .delete() + ) + session.delete(perm) From 13188c8c54a0f14c192d0a0a3358941421812739 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 26 Jun 2023 11:43:25 +0200 Subject: [PATCH 299/346] Removed the commented line --- .../dataset_sharing/services/share_notification_service.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/backend/dataall/modules/dataset_sharing/services/share_notification_service.py b/backend/dataall/modules/dataset_sharing/services/share_notification_service.py index b6297720b..7e35cbf8c 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_notification_service.py +++ b/backend/dataall/modules/dataset_sharing/services/share_notification_service.py @@ -16,8 +16,6 @@ def notify_share_object_submission( target_uri=f'{share.shareUri}|{dataset.datasetUri}', message=f'User {username} submitted share request for dataset {dataset.label}', )] - # stewards = Notification.get_dataset_stewards(session, dataset) - # for steward in stewards: session.add_all(notifications) return notifications From 4417ec95b764f3cb4c7b0df776534b095fc1ce72 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Mon, 26 Jun 2023 11:46:25 +0200 Subject: [PATCH 300/346] Moved client to a constructor --- .../share_managers/lf_share_manager.py | 27 ++++++++----------- 1 file changed, 11 insertions(+), 16 deletions(-) diff --git a/backend/dataall/modules/dataset_sharing/services/share_managers/lf_share_manager.py b/backend/dataall/modules/dataset_sharing/services/share_managers/lf_share_manager.py index b4eaab65b..cdf164cd9 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_managers/lf_share_manager.py +++ b/backend/dataall/modules/dataset_sharing/services/share_managers/lf_share_manager.py @@ -41,6 +41,12 @@ def __init__( self.shared_db_name = self.build_shared_db_name() self.principals = self.get_share_principals() + self.glue_client = GlueClient( + account_id=self.target_environment.AwsAccountId, + region=self.target_environment.region, + database=self.shared_db_name, + ) + @abc.abstractmethod def process_approved_shares(self) -> [str]: return NotImplementedError @@ -74,10 +80,6 @@ def build_shared_db_name(self) -> str: """ Build Glue shared database name. Unique per share Uri. - Parameters - ---------- - dataset : Dataset - share : ShareObject Returns ------- @@ -128,7 +130,7 @@ def check_share_item_exists_on_glue_catalog( ------- exceptions.AWSResourceNotFound """ - if not self._create_glue_client().table_exists(table.GlueTableName): + if not self.glue_client.table_exists(table.GlueTableName): raise exceptions.AWSResourceNotFound( action='ProcessShare', message=( @@ -209,7 +211,7 @@ def delete_shared_database(self) -> bool: bool """ logger.info(f'Deleting shared database {self.shared_db_name}') - return self._create_glue_client().delete_database() + return self.glue_client.delete_database() @classmethod def create_resource_link(cls, **data) -> dict: @@ -274,7 +276,7 @@ def revoke_table_resource_link_access(self, table: DatasetTable, principals: [st ------- True if revoke is successful """ - glue_client = self._create_glue_client() + glue_client = self.glue_client if not glue_client.table_exists(table.GlueTableName): logger.info( f'Resource link could not be found ' @@ -325,7 +327,7 @@ def revoke_source_table_access(self, table, principals: [str]): ------- True if revoke is successful """ - glue_client = self._create_glue_client() + glue_client = self.glue_client if not glue_client.table_exists(table.GlueTableName): logger.info( f'Source table could not be found ' @@ -351,7 +353,7 @@ def revoke_source_table_access(self, table, principals: [str]): def delete_resource_link_table(self, table: DatasetTable): logger.info(f'Deleting shared table {table.GlueTableName}') - glue_client = self._create_glue_client() + glue_client = self.glue_client if not glue_client.table_exists(table.GlueTableName): return True @@ -527,10 +529,3 @@ def handle_revoke_failure( table, self.share, self.target_environment ) return True - - def _create_glue_client(self): - return GlueClient( - account_id=self.target_environment.AwsAccountId, - region=self.target_environment.region, - database=self.shared_db_name, - ) From daa7046f389ea5865114ea449c1d98e63636c764 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 27 Jun 2023 10:40:24 +0200 Subject: [PATCH 301/346] Resolve merge conflict --- backend/dataall/db/api/environment.py | 2 -- backend/dataall/modules/dashboards/__init__.py | 8 +++++--- .../dataall/modules/dashboards/db/dashboard_repository.py | 2 +- .../dashboards/indexers/dashboard_catalog_indexer.py | 2 +- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/backend/dataall/db/api/environment.py b/backend/dataall/db/api/environment.py index a53e9dcaa..c08dcc425 100644 --- a/backend/dataall/db/api/environment.py +++ b/backend/dataall/db/api/environment.py @@ -59,8 +59,6 @@ def create_environment(session, username, groups, uri, data=None, check_perm=Non ), EnvironmentDefaultIAMRoleArn=f'arn:aws:iam::{data.get("AwsAccountId")}:role/{data.get("EnvironmentDefaultIAMRoleName")}', CDKRoleArn=f"arn:aws:iam::{data.get('AwsAccountId')}:role/{data['cdk_role_name']}", - dashboardsEnabled=data.get('dashboardsEnabled', False), - mlStudiosEnabled=data.get('mlStudiosEnabled', True), pipelinesEnabled=data.get('pipelinesEnabled', True), warehousesEnabled=data.get('warehousesEnabled', True), resourcePrefix=data.get('resourcePrefix'), diff --git a/backend/dataall/modules/dashboards/__init__.py b/backend/dataall/modules/dashboards/__init__.py index 3b4b150c3..9c2f4f10e 100644 --- a/backend/dataall/modules/dashboards/__init__.py +++ b/backend/dataall/modules/dashboards/__init__.py @@ -2,7 +2,7 @@ import logging from typing import Set -from dataall.core.group.services.group_resource_manager import EnvironmentResourceManager +from dataall.core.group.services.environment_resource_manager import EnvironmentResourceManager from dataall.modules.dashboards.db.dashboard_repository import DashboardRepository from dataall.modules.dashboards.db.models import Dashboard from dataall.modules.loader import ImportMode, ModuleInterface @@ -36,6 +36,7 @@ def __init__(self): add_vote_type("dashboard", DashboardIndexer) EnvironmentResourceManager.register(DashboardRepository()) + log.info("Dashboard API has been loaded") class DashboardCdkModuleInterface(ModuleInterface): @@ -46,6 +47,7 @@ def is_supported(modes: Set[ImportMode]) -> bool: def __init__(self): import dataall.modules.dashboards.cdk + log.info("Dashboard CDK code has been loaded") class DashboardCatalogIndexerModuleInterface(ModuleInterface): @@ -55,7 +57,7 @@ def is_supported(modes: Set[ImportMode]) -> bool: return ImportMode.CATALOG_INDEXER_TASK in modes def __init__(self): - from dataall.tasks.catalog_indexer import register_catalog_indexer from dataall.modules.dashboards.indexers.dashboard_catalog_indexer import DashboardCatalogIndexer - register_catalog_indexer(DashboardCatalogIndexer()) + DashboardCatalogIndexer() + log.info("Dashboard catalog indexer task has been loaded") diff --git a/backend/dataall/modules/dashboards/db/dashboard_repository.py b/backend/dataall/modules/dashboards/db/dashboard_repository.py index 9fe040ff6..7fa1febeb 100644 --- a/backend/dataall/modules/dashboards/db/dashboard_repository.py +++ b/backend/dataall/modules/dashboards/db/dashboard_repository.py @@ -3,7 +3,7 @@ from sqlalchemy import or_, and_ from sqlalchemy.orm import Query -from dataall.core.group.services.group_resource_manager import EnvironmentResource +from dataall.core.group.services.environment_resource_manager import EnvironmentResource from dataall.db import exceptions, paginate from dataall.db.models import Environment from dataall.modules.dashboards.db.models import DashboardShare, DashboardShareStatus, Dashboard diff --git a/backend/dataall/modules/dashboards/indexers/dashboard_catalog_indexer.py b/backend/dataall/modules/dashboards/indexers/dashboard_catalog_indexer.py index 74fa18fa0..44633e592 100644 --- a/backend/dataall/modules/dashboards/indexers/dashboard_catalog_indexer.py +++ b/backend/dataall/modules/dashboards/indexers/dashboard_catalog_indexer.py @@ -1,8 +1,8 @@ import logging +from dataall.core.catalog.catalog_indexer import CatalogIndexer from dataall.modules.dashboards import Dashboard from dataall.modules.dashboards.indexers.dashboard_indexer import DashboardIndexer -from dataall.tasks.catalog_indexer import CatalogIndexer log = logging.getLogger(__name__) From ab5960361650fbe176cffcccec845932486f917d Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Tue, 27 Jun 2023 11:35:33 +0200 Subject: [PATCH 302/346] Fix merge conflict --- backend/dataall/modules/dashboards/api/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/dataall/modules/dashboards/api/__init__.py b/backend/dataall/modules/dashboards/api/__init__.py index 9a57d36d3..e92cd6d66 100644 --- a/backend/dataall/modules/dashboards/api/__init__.py +++ b/backend/dataall/modules/dashboards/api/__init__.py @@ -1,4 +1,4 @@ -from dataall.modules.datapipelines.api import ( +from dataall.modules.dashboards.api import ( input_types, mutations, queries, From f12e2c05c9cf325808c071eacda091355405f910 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 28 Jun 2023 10:21:17 +0200 Subject: [PATCH 303/346] Fix tests --- .../share_managers/lf_share_manager.py | 23 +++++++-------- tests/api/test_organization.py | 1 + .../datasets/test_dataset_count_votes.py | 28 +++++++------------ 3 files changed, 23 insertions(+), 29 deletions(-) diff --git a/backend/dataall/modules/dataset_sharing/services/share_managers/lf_share_manager.py b/backend/dataall/modules/dataset_sharing/services/share_managers/lf_share_manager.py index c72a6b7a7..96940f518 100644 --- a/backend/dataall/modules/dataset_sharing/services/share_managers/lf_share_manager.py +++ b/backend/dataall/modules/dataset_sharing/services/share_managers/lf_share_manager.py @@ -42,12 +42,6 @@ def __init__( self.shared_db_name = self.build_shared_db_name() self.principals = self.get_share_principals() - self.glue_client = GlueClient( - account_id=self.target_environment.AwsAccountId, - region=self.target_environment.region, - database=self.shared_db_name, - ) - @abc.abstractmethod def process_approved_shares(self) -> [str]: return NotImplementedError @@ -133,7 +127,7 @@ def check_share_item_exists_on_glue_catalog( ------- exceptions.AWSResourceNotFound """ - if not self.glue_client.table_exists(table.GlueTableName): + if not self.glue_client().table_exists(table.GlueTableName): raise exceptions.AWSResourceNotFound( action='ProcessShare', message=( @@ -214,7 +208,7 @@ def delete_shared_database(self) -> bool: bool """ logger.info(f'Deleting shared database {self.shared_db_name}') - return self.glue_client.delete_database() + return self.glue_client().delete_database() @classmethod def create_resource_link(cls, **data) -> dict: @@ -279,7 +273,7 @@ def revoke_table_resource_link_access(self, table: DatasetTable, principals: [st ------- True if revoke is successful """ - glue_client = self.glue_client + glue_client = self.glue_client() if not glue_client.table_exists(table.GlueTableName): logger.info( f'Resource link could not be found ' @@ -330,7 +324,7 @@ def revoke_source_table_access(self, table, principals: [str]): ------- True if revoke is successful """ - glue_client = self.glue_client + glue_client = self.glue_client() if not glue_client.table_exists(table.GlueTableName): logger.info( f'Source table could not be found ' @@ -356,7 +350,7 @@ def revoke_source_table_access(self, table, principals: [str]): def delete_resource_link_table(self, table: DatasetTable): logger.info(f'Deleting shared table {table.GlueTableName}') - glue_client = self.glue_client + glue_client = self.glue_client() if not glue_client.table_exists(table.GlueTableName): return True @@ -532,3 +526,10 @@ def handle_revoke_failure( table, self.share, self.target_environment ) return True + + def glue_client(self): + return GlueClient( + account_id=self.target_environment.AwsAccountId, + region=self.target_environment.region, + database=self.shared_db_name, + ) diff --git a/tests/api/test_organization.py b/tests/api/test_organization.py index 6d6d353cc..8930f014b 100644 --- a/tests/api/test_organization.py +++ b/tests/api/test_organization.py @@ -282,6 +282,7 @@ def test_group_invitation(db, client, org1, group2, user, group3, group, env): assert 'OrganizationResourcesFound' in response.errors[0].message with db.scoped_session() as session: + session.query(EnvironmentParameter).filter(EnvironmentParameter.environmentUri == env2.environmentUri).delete() env = session.query(dataall.db.models.Environment).get(env2.environmentUri) session.delete(env) session.commit() diff --git a/tests/modules/datasets/test_dataset_count_votes.py b/tests/modules/datasets/test_dataset_count_votes.py index 5bf85f333..60af2e524 100644 --- a/tests/modules/datasets/test_dataset_count_votes.py +++ b/tests/modules/datasets/test_dataset_count_votes.py @@ -1,48 +1,40 @@ import pytest -from dataall.modules.datasets import Dataset from tests.api.test_vote import * -@pytest.fixture(scope='module', autouse=True) -def dataset1(db, env1, org1, group, user, dataset) -> Dataset: - yield dataset( - org=org1, env=env1, name='dataset1', owner=user.userName, group=group.name - ) - - -def test_count_votes(client, dataset1): +def test_count_votes(client, dataset_fixture): response = count_votes_query( - client, dataset1.datasetUri, 'dataset', dataset1.SamlAdminGroupName + client, dataset_fixture.datasetUri, 'dataset', dataset_fixture.SamlAdminGroupName ) assert response.data.countUpVotes == 0 -def test_upvote(patch_es, client, dataset1): +def test_upvote(patch_es, client, dataset_fixture): response = upvote_mutation( - client, dataset1.datasetUri, 'dataset', True, dataset1.SamlAdminGroupName + client, dataset_fixture.datasetUri, 'dataset', True, dataset_fixture.SamlAdminGroupName ) assert response.data.upVote.upvote response = count_votes_query( - client, dataset1.datasetUri, 'dataset', dataset1.SamlAdminGroupName + client, dataset_fixture.datasetUri, 'dataset', dataset_fixture.SamlAdminGroupName ) assert response.data.countUpVotes == 1 response = get_vote_query( - client, dataset1.datasetUri, 'dataset', dataset1.SamlAdminGroupName + client, dataset_fixture.datasetUri, 'dataset', dataset_fixture.SamlAdminGroupName ) assert response.data.getVote.upvote response = upvote_mutation( - client, dataset1.datasetUri, 'dataset', False, dataset1.SamlAdminGroupName + client, dataset_fixture.datasetUri, 'dataset', False, dataset_fixture.SamlAdminGroupName ) assert not response.data.upVote.upvote response = get_vote_query( - client, dataset1.datasetUri, 'dataset', dataset1.SamlAdminGroupName + client, dataset_fixture.datasetUri, 'dataset', dataset_fixture.SamlAdminGroupName ) assert not response.data.getVote.upvote response = count_votes_query( - client, dataset1.datasetUri, 'dataset', dataset1.SamlAdminGroupName + client, dataset_fixture.datasetUri, 'dataset', dataset_fixture.SamlAdminGroupName ) - assert response.data.countUpVotes == 0 \ No newline at end of file + assert response.data.countUpVotes == 0 From dff822869f8136d7381c95e54aaae2e0c53b50d8 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 28 Jun 2023 10:26:22 +0200 Subject: [PATCH 304/346] Simplified the indexer. SQL Alchemy caches the responses and it's likely that the requests are cached --- .../dashboards/indexers/dashboard_indexer.py | 47 +++++-------------- 1 file changed, 12 insertions(+), 35 deletions(-) diff --git a/backend/dataall/modules/dashboards/indexers/dashboard_indexer.py b/backend/dataall/modules/dashboards/indexers/dashboard_indexer.py index 8c17ca75d..6988696ae 100644 --- a/backend/dataall/modules/dashboards/indexers/dashboard_indexer.py +++ b/backend/dataall/modules/dashboards/indexers/dashboard_indexer.py @@ -1,7 +1,8 @@ import logging from dataall import db -from dataall.db import models +from dataall.db.api import Environment, Organization +from dataall.modules.dashboards import DashboardRepository from dataall.searchproxy.base_indexer import BaseIndexer from dataall.modules.dashboards.db.models import Dashboard @@ -11,36 +12,12 @@ class DashboardIndexer(BaseIndexer): @classmethod def upsert(cls, session, dashboard_uri: str): - dashboard = ( - session.query( - Dashboard.dashboardUri.label('uri'), - Dashboard.name.label('name'), - Dashboard.owner.label('owner'), - Dashboard.label.label('label'), - Dashboard.description.label('description'), - Dashboard.tags.label('tags'), - Dashboard.region.label('region'), - models.Organization.organizationUri.label('orgUri'), - models.Organization.name.label('orgName'), - models.Environment.environmentUri.label('envUri'), - models.Environment.name.label('envName'), - Dashboard.SamlGroupName.label('admins'), - Dashboard.created, - Dashboard.updated, - Dashboard.deleted, - ) - .join( - models.Organization, - Dashboard.organizationUri == Dashboard.organizationUri, - ) - .join( - models.Environment, - Dashboard.environmentUri == models.Environment.environmentUri, - ) - .filter(Dashboard.dashboardUri == dashboard_uri) - .first() - ) + dashboard: Dashboard = DashboardRepository.get_dashboard_by_uri(session, dashboard_uri) + if dashboard: + env = Environment.get_environment_by_uri(session, dashboard.environmentUri) + org = Organization.get_organization_by_uri(session, env.organizationUri) + glossary = BaseIndexer._get_target_glossary_terms(session, dashboard_uri) count_upvotes = db.api.Vote.count_upvotes( session, None, None, dashboard_uri, {'targetType': 'dashboard'} @@ -49,7 +26,7 @@ def upsert(cls, session, dashboard_uri: str): doc_id=dashboard_uri, doc={ 'name': dashboard.name, - 'admins': dashboard.admins, + 'admins': dashboard.SamlGroupName, 'owner': dashboard.owner, 'label': dashboard.label, 'resourceKind': 'dashboard', @@ -57,10 +34,10 @@ def upsert(cls, session, dashboard_uri: str): 'tags': [f.replace('-', '') for f in dashboard.tags or []], 'topics': [], 'region': dashboard.region.replace('-', ''), - 'environmentUri': dashboard.envUri, - 'environmentName': dashboard.envName, - 'organizationUri': dashboard.orgUri, - 'organizationName': dashboard.orgName, + 'environmentUri': env.environmentUri, + 'environmentName': env.name, + 'organizationUri': org.organizationUri, + 'organizationName': org.name, 'created': dashboard.created, 'updated': dashboard.updated, 'deleted': dashboard.deleted, From 4e88625da018521297c1bd0c63ad0962650d04f8 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 28 Jun 2023 10:58:42 +0200 Subject: [PATCH 305/346] Fixed a merge error --- .../versions/5fc49baecea4_add_enviromental_parameters.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/backend/migrations/versions/5fc49baecea4_add_enviromental_parameters.py b/backend/migrations/versions/5fc49baecea4_add_enviromental_parameters.py index 8ac591d35..d1640246a 100644 --- a/backend/migrations/versions/5fc49baecea4_add_enviromental_parameters.py +++ b/backend/migrations/versions/5fc49baecea4_add_enviromental_parameters.py @@ -141,8 +141,8 @@ def downgrade(): environmentUri=param.environmentUri, notebooksEnabled=params["notebooksEnabled"] == "true", mlStudiosEnabled=params["mlStudiosEnabled"] == "true", - pipelinesEnabled=params["pipelinesEnabled"] == "true" - mlStudiosEnabled=params["dashboardsEnabled"] == "true" + pipelinesEnabled=params["pipelinesEnabled"] == "true", + dashboardsEnabled=params["dashboardsEnabled"] == "true" )) for name in UNUSED_PERMISSIONS: From d93bd37893992c994b51b45c8e7cb204f56daaf9 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 28 Jun 2023 11:35:46 +0200 Subject: [PATCH 306/346] Fixed a typo --- .../versions/5fc49baecea4_add_enviromental_parameters.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/migrations/versions/5fc49baecea4_add_enviromental_parameters.py b/backend/migrations/versions/5fc49baecea4_add_enviromental_parameters.py index d1640246a..9242026cd 100644 --- a/backend/migrations/versions/5fc49baecea4_add_enviromental_parameters.py +++ b/backend/migrations/versions/5fc49baecea4_add_enviromental_parameters.py @@ -130,7 +130,7 @@ def downgrade(): op.add_column("environment", Column("notebooksEnabled", Boolean, default=True)) op.add_column("environment", Column("mlStudiosEnabled", Boolean, default=True)) op.add_column("environment", Column("pipelinesEnabled", Boolean, default=True)) - op.add_column("environment", Column("dashbaordsEnabled", Boolean, default=True)) + op.add_column("environment", Column("dashboardsEnabled", Boolean, default=True)) print("Filling environment table with parameters rows...") params = session.query(EnvironmentParameter).all() From 7e4c209a579fb3e280aa2cba463200ba251fcef8 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 28 Jun 2023 12:09:11 +0200 Subject: [PATCH 307/346] Fixed a typo --- .../versions/5fc49baecea4_add_enviromental_parameters.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/migrations/versions/5fc49baecea4_add_enviromental_parameters.py b/backend/migrations/versions/5fc49baecea4_add_enviromental_parameters.py index 9242026cd..9150ce99b 100644 --- a/backend/migrations/versions/5fc49baecea4_add_enviromental_parameters.py +++ b/backend/migrations/versions/5fc49baecea4_add_enviromental_parameters.py @@ -95,7 +95,7 @@ def upgrade(): op.drop_column("environment", "notebooksEnabled") op.drop_column("environment", "mlStudiosEnabled") op.drop_column("environment", "pipelinesEnabled") - op.drop_column("environment", "dashbaordsEnabled") + op.drop_column("environment", "dashboardsEnabled") print("Dropped the columns from the environment table ") create_foreign_key_to_env(op, 'sagemaker_notebook') From b0444a78c57ffd76d1620d6df56eadfddd1412b4 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 28 Jun 2023 12:51:20 +0200 Subject: [PATCH 308/346] Renamed a file name --- .../src/api/DatasetTable/{previewTable2.js => previewTable.js} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename frontend/src/api/DatasetTable/{previewTable2.js => previewTable.js} (100%) diff --git a/frontend/src/api/DatasetTable/previewTable2.js b/frontend/src/api/DatasetTable/previewTable.js similarity index 100% rename from frontend/src/api/DatasetTable/previewTable2.js rename to frontend/src/api/DatasetTable/previewTable.js From f25ffab9e5c8f014668d36fc5d91109019dcb9c5 Mon Sep 17 00:00:00 2001 From: Nikita Podshivalov Date: Wed, 28 Jun 2023 13:46:02 +0200 Subject: [PATCH 309/346] Fixed JS linting --- frontend/src/views/Environments/EnvironmentEditForm.js | 5 +++-- frontend/src/views/Environments/EnvironmentFeatures.js | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/frontend/src/views/Environments/EnvironmentEditForm.js b/frontend/src/views/Environments/EnvironmentEditForm.js index dc4351b7a..9a3789985 100644 --- a/frontend/src/views/Environments/EnvironmentEditForm.js +++ b/frontend/src/views/Environments/EnvironmentEditForm.js @@ -91,7 +91,7 @@ const EnvironmentEditForm = (props) => { value: String(values.pipelinesEnabled) }, { - key: "dashboardsEnabled", + key: 'dashboardsEnabled', value: String(values.dashboardsEnabled) } ] @@ -213,7 +213,8 @@ const EnvironmentEditForm = (props) => { notebooksEnabled: env.parameters['notebooksEnabled'] === 'true', mlStudiosEnabled: env.parameters['mlStudiosEnabled'] === 'true', pipelinesEnabled: env.parameters['pipelinesEnabled'] === 'true', - dashboardsEnabled: env.parameters['dashboardsEnabled'] === 'true', + dashboardsEnabled: + env.parameters['dashboardsEnabled'] === 'true', warehousesEnabled: env.warehousesEnabled, resourcePrefix: env.resourcePrefix }} diff --git a/frontend/src/views/Environments/EnvironmentFeatures.js b/frontend/src/views/Environments/EnvironmentFeatures.js index 782ab89dd..d8d8ebf66 100644 --- a/frontend/src/views/Environments/EnvironmentFeatures.js +++ b/frontend/src/views/Environments/EnvironmentFeatures.js @@ -32,7 +32,7 @@ const EnvironmentFeatures = (props) => { Dashboards -