From b33afd598b9c48ea7f8d0ee9385c648e0a625056 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Wed, 17 Nov 2021 15:14:23 +0000 Subject: [PATCH 001/142] [llvm] Don't use fully-qualified module imports for codegen scripts. The files compiler_gym/envs/llvm/service/passes/*.py are used to generate files that are used during the compiler_gym build. Don't use fully-qualified module paths to emphasize this. Issue #488. --- compiler_gym/envs/llvm/service/passes/BUILD | 3 +++ compiler_gym/envs/llvm/service/passes/config.py | 2 +- .../service/passes/extract_passes_from_llvm_source_tree.py | 4 ++-- compiler_gym/envs/llvm/service/passes/filter_action_space.py | 4 ++-- .../envs/llvm/service/passes/make_action_space_genfiles.py | 4 ++-- 5 files changed, 10 insertions(+), 7 deletions(-) diff --git a/compiler_gym/envs/llvm/service/passes/BUILD b/compiler_gym/envs/llvm/service/passes/BUILD index 1421a22aa..157d0f44b 100644 --- a/compiler_gym/envs/llvm/service/passes/BUILD +++ b/compiler_gym/envs/llvm/service/passes/BUILD @@ -1,5 +1,8 @@ # This package contains scripts for extracting passes from the LLVM source tree # and converting them to an action space for reinforcement learning. +# +# These scripts are used to programatically generate C++ headers and sources +# that are then used to compiler the C++ LLVM compiler service. load("@rules_python//python:defs.bzl", "py_binary", "py_library") genrule( diff --git a/compiler_gym/envs/llvm/service/passes/config.py b/compiler_gym/envs/llvm/service/passes/config.py index cb7a4db87..45e6e7f64 100644 --- a/compiler_gym/envs/llvm/service/passes/config.py +++ b/compiler_gym/envs/llvm/service/passes/config.py @@ -3,7 +3,7 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. """Configuration for building an action space from a list of LLVM passes.""" -from compiler_gym.envs.llvm.service.passes.common import Pass +from common import Pass # A set of headers that must be included. EXTRA_LLVM_HEADERS = { diff --git a/compiler_gym/envs/llvm/service/passes/extract_passes_from_llvm_source_tree.py b/compiler_gym/envs/llvm/service/passes/extract_passes_from_llvm_source_tree.py index f75136528..d3d23e71f 100644 --- a/compiler_gym/envs/llvm/service/passes/extract_passes_from_llvm_source_tree.py +++ b/compiler_gym/envs/llvm/service/passes/extract_passes_from_llvm_source_tree.py @@ -33,8 +33,8 @@ from pathlib import Path from typing import Dict, Iterable, List, Optional, Tuple -from compiler_gym.envs.llvm.service.passes.common import Pass -from compiler_gym.envs.llvm.service.passes.config import CREATE_PASS_NAME_MAP +from common import Pass +from config import CREATE_PASS_NAME_MAP logger = logging.getLogger(__name__) diff --git a/compiler_gym/envs/llvm/service/passes/filter_action_space.py b/compiler_gym/envs/llvm/service/passes/filter_action_space.py index 9204e6ad4..0c755e1dc 100644 --- a/compiler_gym/envs/llvm/service/passes/filter_action_space.py +++ b/compiler_gym/envs/llvm/service/passes/filter_action_space.py @@ -12,8 +12,8 @@ import sys from typing import Iterable -from compiler_gym.envs.llvm.service.passes import config -from compiler_gym.envs.llvm.service.passes.common import Pass +import config +from common import Pass logger = logging.getLogger(__name__) diff --git a/compiler_gym/envs/llvm/service/passes/make_action_space_genfiles.py b/compiler_gym/envs/llvm/service/passes/make_action_space_genfiles.py index 58d5e800e..3265525d7 100644 --- a/compiler_gym/envs/llvm/service/passes/make_action_space_genfiles.py +++ b/compiler_gym/envs/llvm/service/passes/make_action_space_genfiles.py @@ -81,8 +81,8 @@ import sys from pathlib import Path -from compiler_gym.envs.llvm.service.passes.common import Pass -from compiler_gym.envs.llvm.service.passes.config import EXTRA_LLVM_HEADERS +from common import Pass +from config import EXTRA_LLVM_HEADERS logger = logging.getLogger(__name__) From bbfb14b8930edde4da0305b6179f37de6882b150 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Mon, 22 Nov 2021 16:33:55 +0000 Subject: [PATCH 002/142] [docs] Add a favicon for CompilerGym documentation site. --- docs/source/_static/img/favicon.png | Bin 0 -> 5400 bytes docs/source/conf.py | 2 ++ 2 files changed, 2 insertions(+) create mode 100644 docs/source/_static/img/favicon.png diff --git a/docs/source/_static/img/favicon.png b/docs/source/_static/img/favicon.png new file mode 100644 index 0000000000000000000000000000000000000000..2468df08132fd78c20f3acbb0ede1cda102cc0f8 GIT binary patch literal 5400 zcmV+z73b=SP)cC4ca2*=-6ZA!S<<%@6R@ z7D0=)@wtr~7!AN<;-o){Ry3ltL4sIR&`3?nMnr=KMS@mT(fsO zkrpM2lqr(3{3WF^nKW(MqW2c>?(FW&?Cjpm-t6A)@m`7_H`<%MoqhIspLyqA{MQzNaqo-YSvRe- zC{}pJSnVG!)j4FO0Z}Tmg=geaY_L&otrc8JJzzi*Q7UN*m9?}>X$4^D-fUA*8pRgE zF&d0jNZT%q!CPHX%G^#ITX@#8E@e9cgKTuQe|R<~CSl?tTGFMo0x)!*?5OvJL@9F( zqlR@Qaw#hZho(&5la&=37#70cDp9JVEo3uxDJuttCJ^!P@_|9yQCcfjbjGkD{CrWG zkS#pZb}7pT2Kx~3Cz z1CB23AIM$LrVR|yI!)ieo_eSu0wQ)|ow1Y}!UO{LA~?sh_a<^*sao5WBsBMlXE)dR z@NCIW&3NWNI3u)s(oAxC8x#SX}o140dax8Rity%f6f2-xG*69B}odvxKk{@k!e zkr^XZU~o5#-FOKk-WPHp@wmW!-Fxwg0fz+^AAhn!T~m%7+;jN#`*r80hf>&K_tpMk zYV@Nv>qPDchRT9cuZ(*Ih}`ks(e8^wOpta00|N-);OcOP^Go~A|EqC|fNd)>V^|9e z$*w}h4mE)tP-X{7HwdoctkYWwvjYQ(;F>xSjl%J%S&@O!kUusMl7g#14h%6u(mjU+ zhFY9f3BJZ(>OIi(ywV~AqsR0O4G78L#>cY*SB9Z7P@J#EK1B^V)&OieYZMX~OkZf5 z1tG~j^?K|e0+446o*jbofyg{f2S#DUCmYA02CM2sC~XKyu0$a&!$OiTQ`IdBPVsXD z$9eC=v4a<0YFe&G?t#GoF`+xSg(@(B*YF}3#3SXNMLZbza;aw_-Q;lnez^<=v23IN z!b?qyYvdLfCV&WTM)+xk>Qs5=#>A-Jb0cnyk1m2P~vOnu||@;G)t?!=Mo z5br94x?`?qZ0O>5GiQx=W_?1HF}E=tm8RDMLQ<&P(DjWsBjLE|jKOA*PpeoWZFa~| zNHz-YLnBM)XI1T&mNk_zR{MviWG!L%G{u5JmmM+^l8XAH!7X01qq#E84Q&czFo8MC z=To#$H#05Qcj(aN`OH`&bH-qK6m)@|aqQ5dLNXR05C#KZf2ORFDPs^QkU)W?C$^=K zjB*vhS>vx~-!CpNG?cKcYnhFeywX2hMa1)N!fYYNE8KxJp)9IyMR~+fy?2e^e|Dq6 zxXi{-GGUA>2ZyRGJ>Qre44x+LeG+5jOGpZzTladnV;=m#%FC&*%aOXqxN>l)j0_a@ zd@ZONH3kfzXh6R1;0A{4_sg3jB(+_i0^Ad9ATd=-l`&Qh4)pSAh1!>RO25Mio@&>N`m=(IQl2)+%| z;=C^|EsiV0(D^{>$pq(8^pgCPKC$jvs7$|?tVg5&a`_d))`}I z{{Vg9lMrHuwvdc;6$Y-4J8TO9N*}y>F%{OZsxg-K5A-22Q23>}5t7u9^Nx#ii)TZ6 z!u_*^P6j&CZxYYFLS4zbnPJx#?JBssB3&P8xABDQdDJzljB)9Sfq8VsKXM}^&oBWz zJFs{zQN(ce=k|30U>VT~M^})`PPZf^)z=t0PGk&Lm!mZ*>zAyvhBaW&bUZ}#XNH-w zgGC{^0f18oFg>t%&U%CW)6e#nS&v{ek{!&1q;NeUmsQ{OjXk}-WIbQQ3NY9d{Rwt2z7(dsK*dsYA^+tVQPQ>@cStAKkdzsYbb^i%??p9l#OSuUAD>^4Flul z$NJYjL6%`RlUSlUn33DiwQoQCUJC3@AqG<(41{EGyjL!{90?5U82|XO-+Xo18f!EJ zjD^SgtB5o}Ci#4hCL-A(c|>v!5GsQgo6iza_N~3|tpebw1VS?8Dj?K$>}Uxf)Gi-> zb=g`XPeZ_%-*=D(a(a}0uQ4GRHC}Og@Zz~#E`KQ7Hy?VmkHt(nQ(y<_0rK1M(6iT9 ztgvX}koft12c15BhKVnQ0R`EwUA;J8LyCVmfwiDb6m3-#!tKe14_LU}TCgiIFla`0 zhT|2dhc14%Fre`4skKY<6c|IK@HKKbKRn(OfJOv^DyvKRK)FC0`Np0{tSmv52pH%3 z4$#WBBlXw;8=j0zte;?`b+Se5rV#@T{_aX&xzZ3gUyK8jO?MigzIfgK<^{ z*AfB4WebiS!lQp2y45V8xt!4gxnQuU;qX4)Eet#HV6#B ziVEFoS7E-K5d+8;tztKF>3nW`A13x28<%OB=+nVdZ-r5M_gI4Vb^f9Owd&{_ zt5zN)W5z&dgfaERm$~xN`P}a1k<^~Kw!BG!IZYucv`bB92hR<;2qAVYkG`>L?LjgJ z24bZFA<5BSw68_89fk^JAAWUt77@?LLQ<(6q_z?O5`cud!{zFm_CNuqz}$&tINzDHkFe0Y4P8;e=#pB|niJ_%_@^MC+gcQ(zEX)ePAoLC_|5 zTQu0VXRoi&_|oY*LNbCe{PtTw#M0PXS5tY2%z)wb$Ln`9lLl>0|HAS@%kF;68`8cd zssN#i9YSqInQ2h|=v!CwktL1@BO2YzMuIjiz56k5A9?-CI#P0x20KVBee9!eU9F@E z3NY!e{>!6}IU{#sSBd@P`d)sQ3i;RUZ>d`*m2G@r<>g!rqo-{(ck2pgc`30C6(IoX zjv`h*c4IA*HqvGcMCeN(B-wklf;KJJJ*{g9Gcvo@r1oP0!ZV~NvH}Gd0RymPVe~I9 zq`R%yf^787)lCHFrU%`3F$6$^Z%;pWW3BSsjkQc8z($N=C?rjSHXFZm;k}@tbS8&{|r+_~-ZZ`5UW5v4aQ@|1T9g zgj6>+xDjsA4H2Iv*8IfH_1ugkYXJihova{B?7#!U9f;fJ!1%&_qab$A#EIT~FJr+T`tyyC5)B{KfHtd#EM!4H!dDNFw(`#SR#7idR=Ad$hN@ zF?Oi1?6+P}+%MMTT7k3Vj1fMez!)kpfB-W8f9mdt*tTe!ynRoFt@JYrdJn5z#<0W= z!AIxn`I`sc)Ah`?%k3_!ra3(eS4dWZjB_nrxAB=Dy{XmCx&f|vT_GuhA-W8Bp!IF% z@e&=I+_|Sru7Y0&%v9Hzj9_a4<5#a<-2A(T9@P?(GCPF900Q%{Uu|GW_9r{#>YSg(vGiT8%uj{VftbrL(sx^Cff|8+w<(b^y% z1J_+(#0yCcc2Je|eB<6nO275mm6pm7pX$1gL}Hh;plk{G?j-Q->Ag(sD&mB{k*hXg zjCIusK@$vodBk8`g>A-LSaX2Dw)PTORHesM>|iD&6~}S^6V`P`Be*6xpR+dBbch*@ zEBtZ?n>BY@oHje?Zoj}aU`tG9{^So{qlHq+Y!-TO<0GaA#6L_tW2oCNaOLuS@XOKz z2C(jq%nnIbe3A1u0+#~SQ{8*J>VJ~_+mRo>PC-G}twiLZpdmYW?5bwza2Y$-#^?X% zd+7qh-L*(-@=)kv;u*uI*VH#?$9hHkeNGC+Mp3I zYKXISg(L(M>bj&(;t}l&%89?H9!k zDp#S>b-{Y8PD$d`1B{o9{_8ytQzY+gPz7y(541EKwArF4;ctI0KXWbW*@=#BifU4B zA&a#B{g=C{a8*13x z&9_k5iH`2R;~m|!T;m$;^5pDtXs4T5&sD_fx`x&oNxIJGw5E`3C}=|qwS4u?p4{q< z(p27xF3G^?N^%yj^iEOC7^Y)+~k=cX(j^PreNU9Bf+53b#mSA z$~z6SY+}av%X=Oo5xVYh7V8}J`n41%QkGK zE;O{m(bso55z2K^;Xy-Y;%nysu=4!YYngOq|L~{pDX|_>ZO)C#h!<*%OZC8nAJe@QUlE%I^1T?pPXHoS}{@}IDy0oAFsk>>T-^1RA%1B5C zm`}4a>>j2opwLzW6vg&K+D5&(QAHJSRTHwSjyGVRa^An|-#169U zo5aoLEi$h%M65=}G-7;h;=wr4w^O)z?Mt`qp8n!(yHii}nfULUGnBiDh;vcbqsl`8 z#b*y)k1DH64;062f&y&r?f%s}AD}fW9p!H7YwN3Sy{Cy)dl~?zKmDUOQ<h?-6s7&_ zf4 zp?qV4o5E`JkR3jExX*MvC*0MQ*tK(MeIbKo{-eF=O=M9l_3MG zz##k6JMO34gTo1hWaw!MHL5J?Kmm}Tf?zxbMz3uMChHFd$ed&KZH#VTTL{n;l{;lP+nTWeyB*6JPq+ z3UZzD6TD4?WSlv{t7d`%ngtASlU};Ooju>{mTVg#8E0o0D4IUWs%gORY~q$(GzfIq z20IkURmkRSdzIFJSQ2YGFo2C{vR#^kg-u&eNJiNlV~JcXOqUB_kbTZgfSGXEqqT%& zl;w^kc*_kiJp1fV-bvX=Q_eo}%nB?dqwF+gf#pF2`EClOPXIzsoT~s1 zB>BffKHY}OatjR4KK&B}5Oe?n#bgIyy^X0XMn!E|?t$Ui@4w@Ys+)j|BL!gx(hxID zM-*{NN zgCYsYMyzFlp<_pObjg!|=UegVPh#5|7;TF%0Qi4@h1=1!ETV@10000 Date: Mon, 22 Nov 2021 17:02:45 +0000 Subject: [PATCH 003/142] [www] Update the splash logo. --- .../compiler_gym/src/compilerGym-logo.png | Bin 0 -> 36715 bytes www/frontends/compiler_gym/src/logo.svg | 1 - 2 files changed, 1 deletion(-) create mode 100644 www/frontends/compiler_gym/src/compilerGym-logo.png delete mode 100644 www/frontends/compiler_gym/src/logo.svg diff --git a/www/frontends/compiler_gym/src/compilerGym-logo.png b/www/frontends/compiler_gym/src/compilerGym-logo.png new file mode 100644 index 0000000000000000000000000000000000000000..6dbd26488677a4a3c2de02c4f80133c7b9deb4bc GIT binary patch literal 36715 zcmeFYc{tQ>*f%U$lcbUwYm|P7vF}S$wqzT|l0pa>*<$QW(LzEcWhd***i!aoq+;4o znW2a&%4Eb4V;S3f4K2U>e(w8zpW`{6_rJH}=xCa`zSnYI=khr}=lM;rvM}b|2HD2K z!oqvP#K4+`g%iQT!iwhN0N?Z!eaQv?a0i&23SwcAmt_9k5>~7q!otE%^+q@ZJD8hk zx%>OdyPorR^NR=wTCKfpCxTao0R2p|2;d6Q%_w@bw6G6%X^h;1{G7rYo^AuNL^s{2D4DzA+@& zM^^&ITu|J>+)Dho|3wdRb$LZOcLfCnadl1kbFOZ#?rv^J#FZ436rl<#P(>9v1qCf- zO)V7_@n3%=z;YMQd1_f382wrd{7+Znd~k4p78Dv98Y&;EEbo8O3#zE8sR>n3f+{J= zfe~^+;eNrcVRC*!lD}s#@Cb6h=p7L3?e8bfoYB?I9~G=C0k*n%2H$``)A|Mdk`s^^ zG|V*sswl6(Jk!R&bMAk}1)wfo*jW6WJJjQXhp&fUa1a=)_-AaudH-Plp!5F!&D4K> z{tpWP*_xaGS>wOli?8pWC4zztL%=b93FN;V9fSxE@PJx-1o@*bx_cOgfNe@LPZOYZ z{Gx|zu>VDbzyF2bH)ZvE%i>B(@~Yzhn7g`r`!Nq8{jWzn3|xafbS0SgCa0(%r=*Bb zRMt{d(^A!xRZ!GYP}m%5?tjkPGyH!Ys;;G~uBD*yABF<0Ip-Se`u{rkoV%8%|3zO{ zaAa>^S1%7}fS;Fy_#Z)P9rwTBe-SJUcBlOB?@t^*ZgtV$)B6JWA;{YJnD`09<7$eU zYHD&y@`@YVH8gVop!a!F7h)3Sr`<#}Nx{8vDlE*nYPX$*MITbZU4LJ=} zcQrXhS9JvsB_(xtO$CqN&l~u=qnJwY`}uR=c{Me6Wi=&NcR3ArWkvA3+BrE_O?P!U z4^4LZThQesM zx-$(ySHhjC0UqZhetqrzUr6A8?*yCEhMxBTll~8Su{kWr-!nMW^`ZyN3rOpKL-U~j z-S&fAL;mN@|LH=snT8NBbrD92wl34Z5vfzrJC!{WS->7Zqz20`d~S(QrE9{cXI!S>P_9=iNK=wovC&384N71)bt!uC6rP2ZT`fCBOcx% z>UXxfyN(v-r0JQeoScSr-kcGZ2D#GSeGP{4{+wQsV^PZx-1z>eRk2av!+H-Zr+up7 ztg|=A5oPGj$=Ga>dpJ3@*+|b;@MRSoqE%CS2`{KFzh`^C^-CBbziIn5kzTBidA8T@ zeUEcI{U{1U?YCVnvFX8IsU&#ccnMzbYUtNw#$8XFwX|C zlKCfA#@p~|Ogjwa>PWye5rT1~w%3AHwWvS!p=KX@P zCjb24*pB1rB^Dr0^kVJSSvMx(!X4bn!^g@=`gYE<^vi$x{<;7g$pbN|9;HS4E1nl| zUy2Iyn^ev(22;~)UvwL9?h=-^82hDVBtIhE__f{fQk&F-*T1)Ko`|jBa-*hkcYP>8 zJ{c$4icq9Begk?U-2PAqZsjz5vS#0;RnPugTsLB=Dqnz9)>kT6QwaGd+BR1PsxYy2#Fh6=V)`euvsx(c z^!J+cEV_Ge>dO_Go)Mik> zZEi)PNOa8X-{S(!!8n!P%JE?2t}-+_W_)Sf)J*ryC{B7a@*K4~l##Dh5x5~Uo8ECE z#(rJC1a9w6?Wt5MW+qrHEI_MvBlv_0T)@x&*srgxyr0dPI7U^rf1|UoF@Ob@w#N-w z4};N(-_sX^k$1sLAz&r$1N0l~QDaMZiulGjam=nrt4Z}E7}2qXo4%wATJ%>`@_O=F zI?b(^Ce5$9@G181wix+Z=g(T{zM%;XR-Qzx(QJF{ni&gTJvwAv%>rj`Y}wy)!Z}GN zj)QaR^qvUULLDlo44XJsUywfF_(_cJH6clrvB7Pu4>XhrL-l>mcf9URB*jN8CYE_- z*nOWJ*Ut-&X*glBno?TjYPuPTcOnwhFgNbLv?#>Wi*7X`g*8e->ff_mMljkGXx8S$mns0p-S4lbe`_!kcba%@2I*u=svkZy0v+GTW zHE{5#vBsGl{HTu8e^2s`o%G;R#+nKho%8j*YUd#Waai;K{e>kt`1xI#-?Q0T<#1)x zW*X^=TBodl%iw9f8?6=~q4+Ea5w@A}&4 zIHgnDqA>US;lIO7+>j?fm`N7+L98v~G5w1lizx(l0x^c4UzHDgQnJT#V`-o(*YXDh zG<|9}j!aTEj$rQ~&R- zYCEt!BK?DkB_T?)cO?FOyTvfu?Q-4_x*+=%qOKi7G|x!4YhaCs&98z9)4Udj)fmZdNTW48L$$VkdAjh?HOwY zm*N?-tP)i211M7aHO1P;J|<3ra7!TeSuxb`r=P!^2%GIOPSRzore#xdelMoE3akxh zYDIWSE6ghAo=#FkM~YTiuIit?u<_v}-_?BZCOX(L{P?ZyTRbirnTf0wtuR%!37M<- zcO)-v_e}6(Z=ws)2ZB;9aK3GsE1Df#x$@QAf1D%RNH^)P6`i*duy#K+-b_%THpNHS zndp9r!OWU1JDB-;cp4fLReQm0f3XefIsN;F5oVL|n8B*KRl_a(bPB0rPFJqLdGxS< zQwW`AY2iRW{l=7>bew8ysW-T=vgt3uQQ1hjY4{nlLXI=zk&XtsQ{pMup-US|VZSn% z+S9G`D_FzQcEV6+A*xAf_(PV3xt_V>N7mLm%nJ3IgG^TICo-&m&uGerbMM7$0bb^W zS>fIYf1*SQe0|d3{=L5nQOrs5RxW{)Gl|TzHT9?|5WE~!v6_~|d! zqepI3Qj~ugpS2NiV7`#r_;@6Q{)4$Hl}`=W$nKs^WC3u>&3r9|q^4&oRON4Gg=>+| zdoco)OeurwXjQ{W+SDh;_5=CovRouTZl>BmE?XKfh(++Qd{Zri`DCBjo0K{n1w3 zEyoUUveZoGPRnwQa`q}DF`fZ&$pYiFG>N`I%G&#{!H|Aa^2U`gX5)d+QTc94zE}cB zTiKbkI(%e$bKRdquRWdhe{H#T_(1J$rJ}V^plm3Y@zcX9)Z(yGc>PU`msz2_BsDO) z>d-GeaBRCZ(0$V_&qz+90E7_j0z2KLCi#LG{ja8Ef_gH>e&z9>TdA-$@%7+H$~1~D z2?w`PH3yVN^|L9tSArV&=R#^Z6zSs1RkOe`Z%b#M4%~)$DA4703FKREA|WKxsI{-q zKK-|8ldc>`sw*ccGJ{Rxsqy8g8rf(e&7=s$kHFF0is%Rxjehkf)=BNP7F;+X(jQYy zE50Z}MH{rduiw#&ANzr>)LlkPYWK|WRrho*sxJ5$q(0S2GK6OUO2 zmY9F(76d3WfyZII`ILzhiO$%xd@fwzb)v=oiRi)%ql&Y|{-j>a%gVWEiBG0v!Dr=j zx}^H5zewozHKLv8&ey=#0FAdBFeOv&fUsT&E{P+@D7yTTH&ITKs(OI_RQ1noBF_QK z%eb0u1YGKY+VnJh2|P!qPJlmUy;-_mH z8W774pkDsDLynu+);BFLHtats20|xGv7>)N?at@q-WEZ-FOg-g)`0 zh-zTxf~HwUy6vg6X0IpqZyZJ(vscVU-En4_3{2K>ym{h}#n@UF#x%!!6OBW*VU6)K z)3-18Z|1>At>n~zuz+uZ_F~vA3iX60nSqC~3NHVbE+hCiVqV$U{y+ZP@hGsN&UcoD zdhU?g@9~aw6=a1SL(j3>l>9O?KEl+bbtLbPs!)d&Yl8+>t;@2FR!@Oz2imK*;Relt zF}sW}Uucny2F@e8rreuof!UCQ_nPG&Q$$hH2XtfFh)$B!k^{9} zn9}jc_RV~gjWp?w)Ne||-+u*+Pz(r7&L2Xz2JvG<*XB*h{`c$N*FW|Lz;QNq(}{B7 z20MtZB-!W*5a584HC);ZfFLx1oh}>)R?~7R4WE(*03z1fj{`>t|MHOX;Gh1&KU zJn{{kw4;%UXq*1}9cMwCyw$XBwElA6sn-9>7@3{~_@8ww0BpOX=?@M!Ij{V}zI0Qw z=n09E?)qG#Rs^t;pm!F9m5CUAd7ol`uk~NM@Ye)hEGZ0vx&!tBzEX>+-AoBHsH9Bh9}F47vT53HYG=wz{L_mA#@+1>%>IDZ@`o(fYVL zVvGtfd^fNN`Jz7wpb8XI*-3OLf%h8{0Tw7_2bT2^n4zg1!}$nE-Fn+4!1EQ7R9il0 z;qcv?-U1?kxUYyJ1F-y`BiM&0-b5a-r)^k1=lK--+E)I}Cz7~u>+~O|wbxNPC3`hM zgr-$)M(Ajr3U8vnpkflk1c(iwf>s~|9mOK~-^ZtlA*vFv0_AfR0Ovm5ls6fAwwTs- zZx0;6K4we=H(_4~fYZ#PfJ@AD;4cF`8XZSXOKZ@dJ`Q1dZ@`x!>59M z-&0SDF#ijCFkV)xRVV#@J;n}{GKB#y<8?A7%c9V)?ft^XFc1W?nQ^txbpu;PoZ?44 z^Cm*`zFwAf9NxKc$V-mwB%KnAo*Nkb`uFNx-o!-SY@=4IZWXFJ@bymh?-$BC0OnRL zT6<=@)t_tQnWMgdxhRU9nATQse)Ks8X0;~7!X z8-igP>boatco@J#DoIVJ00=4O6g{2(2jz@&kn&t2@NF+m$$4pY?-v@2>=G5R}2yat*NfA0lqD#xmVp-wtn`(522dCdW7j;;f4}`LNMF2MK zn^B=I0B{mr4U{(6o)HpjNUSn$dB5-)07#vpN^0aUM(+;y#-kH}WzZiy^a~oTHI%2} zb5=KeDu~`7ti-4Tx0T>c90HR6C+crHdkIiX$~~6HDmH-9$27d<>K3W<1>l6QMWaF@moWFy#2~V%qU9 zAi3FRO(P#EikTU>74dP&_%|&S$VuAZTD*y#0EC_dNRTXZ7Q&1IZI8eOQ%m6Y`RO;H zXZq+;n?imLvC&>%dJ{A|0+Of=v~e=iNKU)J4=9=&K;Q4m=UR8tOvzeiO!wt7pjI%9 zb^_60=GXHE`vBeHQDV61-Q>hUJYWinXvgRS*Bt9jbOQcS5l9N^rlbc>sXhX{z}SBR9ZZ!d zIlxS0ES`gd@&^~wb_4gWt5`HFb`FBt53fRsjfXwnR1BEvwe*3dl+wH`b<`#dsQ{qI z`1PrAZQvXI0mMxqXeUL)lrl5!%;tv*KRtn@uFRD|8Usq4ke=Lu&7&;kT5+Q`m{8Jb(72i0&_EbQfBp`j_TCILY{F;@J)j<)yx>r6pbI zMy>lcK|`?uKnh$~V}5!-R7IWob`};BHH$iB&WsbDcqtY6wOQAa!x(__6gfz){QUG% z81O{^z{Nc}M(y}s3il#A8T1l{RbI>~51GjlH5V&*a{Nlv8{h6fXOwEJ)$1gI1Tc2O zjuBIPX#;bdkd3Y~Anra!y~9NJCeWggS(B!d!Py=hv4B_FX=nwwue^y){QLSAZ{o=r z;L-NO>$8oVp7+AX#COxhE`nKe%=DF4#w2sb<$E#Xm2<88HUhA39sut-Jx-oXoqo>@ z*A4lL4P0d+%=!cyNyZ&HLC%Zu6J7u7nfXAtovGzV@mpQbaF$y7hu&qJ`@@ri9Jct< zyBuhNG|+;}tjDOU05(+`5uf!8g%u$)ap5C5|sIyZ~5d zS82Kv_FRxJGQ2$7XqTnBVo&KQKC^087WOEqOt!@z@44rixo}5&S*hX$VNKXmdR+tR z9oxh)_x9M3-@khE+l#`@3zM@%;r-7^ex7r5bv;EWU>)pVg9IP|a2jcNmw()f{g>Y` z`KFmI8R-(pwkOMR9cMF*U3<$|p48UOGiKvpLa~06-Te)x@ zi{n|tN@__=XbbSF%O`&bDpT_X*GVA;Ln!aNmsEAt9Iti~1mtpI_3;te(zZCfgF}6M zlYFJTlzl58Dzc*9T@alPG6k2o6W*hs*a!|VC1exXe2tk{5P-7=0T*p`)FcVOxY5Hv zGop&Cun%;jCX*~{0Ljqu)0@co9su+xkiaZVidySOl0G$>u?un!X23u5TvgoPIL*mo z0E#;*yAfN|k};D}Ika-XbJavvC7V^S0({#E(mT&$f5l#mAs}03ex%_OBMWx`3QE-O zbjF&!ScID$iAgjufkF>}q>PWY3(kc!9jUh+?S7XcFBhL1VHIZyce#RE@ylMtkVPF zMfK{?OqPZ|JP|$iC?iPlfUN^NOTpqNBKG_23s8d8P%W}}-mBcdB~p%KC&F?*RlT(s zPf_QeIxv$Y$#TamHkPin zm0&9pWhYGFsEOQJHxjvqC6r4{L^`~qq%`Jl3-~6Abq@@NCPuztWbHj_ zrF#R*IcPu>yI%{k479`PTd_k#txtz@?J-DHe9W;g{|k(M*gxW= zta6HLF^x~sd3?DQ@V1Fr6?0CiyQ~XSa24pN=^5Bv+32kng)xEb#H^=FGVAUS@fxsT%&k7#no_Uy%vX|n z{}_mxZB7OvoPf@Ip|+96$__Vtj%CX_vuC2o9(l;fj%Dkmd>PC(+!JR-Qmb{Day9F| z{4p4vVA8LQd!f$9EZdN~OhcK+Rox~W? z=__v*g*A+~Pz4ZP4>m#wq}`M}>bbu1J zISdoPNR%W$H-!saVu8_(bJXjVX1|uep;FpOP1~_N$9C99&8!(OHotgCvlGU6o?I98$}W(_lYK~@8B; z(p7?M46S;2gmqAHGL2jt{S-aiwO${$l9x78?Qlkz&VGnxy75TnPH7CQ8`W|4&B660 z&Wt#-g$DOFAIg<=3+OtT?l96=z5Um7^4<0!A1(C1IsL ze|Sad(bv-QhM+D$P_$bN!Tu~w3Iz4VFWX8ZyYfn}-Ah7oR@Jc3;fpRG!#@((0H(xs zA16zJV8QyF2s>6Cz^`>ZoW++@LoIP}peJtf$b(U0sNYrtQ3U!!cpdpU2p!z&NlnNj zC69s-zApO?0cUg#5XtK!+tD!#$Mv48jm8csT()Z9%3Q^{e{o|~(RY6QBJS~m0}I9& zA+mRIDc`1@CXQkC;CL3p8W@nho?XF}Y_FXcYwj9Lnm=^uMI80WJ>cS*k&~HYL>d4x zGWA|`(x4y*a1|iWKnu>8lH24%ifP`UG#Xcem_R?M6kC!Z$2p9i)oazdG4~<7iv1EJ z%6Vcc(1p}N!NRWUH|KA=s~Nq2_8@W(KmBHdR(#k+l<4ft*7Lzut6x%k<`)wFO@jUe z%2MBfxRHcg1tisQKS=6I4Vl*0w=i<%@O#y|)VuPxzAc{Vm;cb*IYXb%`)#^ z++N>O9|eugJ7vZG>c-_+lTvsyw`h-Z42|^tY0a#N_|trbzonjbVqh_C0pzJryT|WC zRSV=3uzRM0CV8R(rT`HblmYdlNzWVY1_drEAE=#oC1oCD0XIb5V{@9nvRve9!(rql z_rWBE@*Oz~Ed1;wbmY&Eejqurq=5>91IRf}8xehuQLjq6$$T>;Ez8e^0-~v3g_?P* z=~c%KhSMjAaos?@OQHMVM|ZirWzY@43^duIt491C_!BpFV}*oAW6qUbY8~GCd^ZAW z?v}nm$A0!EdYA~ot?^T|k5x<6&StE9LzV(Zt$@Bxe*a4kQ9~_~F(-Tdox)xTiNaQf zqEl{C8#Y{&rT<<~E&qW_o-N-iSnGJ(0Do$?eC)SgTrXF0Jx?;rrTlG0!i#Q={?vvO zfl!yN!p~}`A7)wdNAekQhoMQ^c(H;)8c7k+2D&SK9&R#eh9p!kCQllaYm!hI%o2GJ zU%FZq!LVYV-D+}H-8Hq?$g=Ko&|kx5EB@u7x?-B+cnxxU*}ceI9u@(1l9*07HZ8** zq`+a0AcG!0Qsk*Mdy}UgG4dE-aB~3EK&W{ANP;>97*Du~?$6TuwK?u5T;ASrCDCAU zDd@Hlcl~J{konk(x95ji1-{7g)Dpd37+D6Iciz6uA0Vp~0WJSGITZ=&(n*+;iRlA@ zZq}!)S{H8Hc{&33NjA_)Y683{v%+P6jQakq=Di;s<#u|-4z_Pjm+zm76!x34Yq|LG zz61;C1v8Nzv2CS>N{l$oh6T&S=$yIFijlyqkq^t819EgD(FKGR(3?AS9>kRLg5LX= zZxVrr4Cn$bG|bVU^+&hltbXI{7PLuGxT23$pc&mTt1QK^^eIMb24ubf##%io1GWAa z?g2yWA6i(}wj6a(4lD-(7kT>V}fd zwh*uxy@^YFGNmg7K-J}R_o1U^zEKF(v~5->tJSsLJ#);GN7G&4%l7WvsACbw`RN0< zniP5(&Re{J)uv_S2rvtM73Yd+PF_Yt$wF;fp4Y&}{N^$@u+aqlrti9_FZ|5u4J3wI zV$-!kRtm#f%!*kJ_8<-qnicke8YM_X2#x10=GMk|`9ebgi(dw+ku({ghx@JXM|4X* z@Z;G^Ku-{GEq&y`$l&g%a9Lh@lXC5Kl)HYla1x6JB|Gra3mfiR-d2 z0Zk)RddUdnTfpce%A83Vo1~mt3g+Zv|0aK#gr$l8wm@s!jl{*Wjt|QtNilyUSfwTK zoY)vvzc8vMCv&0sx*XGOa2qwk*93K))xVWHghXt zkB$DJk`mEx#|Q`-Pls*{r?v$rpBdLyp#E@)b*EwCHCwGNI$~*5XL$6}DP0$ad1F=JT8oB5_>_5Qc2nM00{WJ|C|=`Er_a^i{vjbf}qQ_Zv~;pGt00KI zZGEDM8foCnQ*l1?^ z?yIe5`MvyP1Y|N6UD%r8m-_p{MY;KK+pn;TwONSj9^NvX0w&!v=l|((9Cg%^Zys>e z!H%-giCIQYs_TH~nd=HbeuL7&&ojMyDe%g2F~wEsjx_xe!ewZ=hX1Q*ofMZn`!nNP zE-KFCKfToCtuhhWQn1eY3NsSqac4yE;7`hDg|lYbli_L?UG>8ZM2vqzWXpMQN!&Qu z>I;yMULrlus4n;UOkxd&PA@lPzA~I$sZKZC_l2`MPcr;U@=W6PUV~@Bb3GX= zwV{aiNw&J^shZZ3&iyCYNUgb%aa`moi%8KO6Z1MZ>7TmR$As7-oObL-9Ot-%ZBylL zaf=vB=Zwtl;aSUgy>hCVJ+J5SjpNi4fdM<01!aD6=_dU6ATs82_2jZp=p=e^WYn^r zNpskMn4Q9`1Kz}(bY8?(v^7v31U}%WPpty_6trA@O}*cxA5I8Ytkpx$iLv3s+Ajd6VL|c1r52;F+e&@}Bh}E%#l%@yC7L>hfZ?vUl2j$aH4-MRnU=Fq7r>GSD{g zA!W$*lx%O$HXNs6#T=kJm@ps#G)kNR#cYTmRTwdt1#s6asA>$`9yMqUDvQt5*6-qS zqc^cAtLB(?l>{AD<;K>wO^YoZG*)kaIrh)fvv|*ocfPZ)-lM$fPPLS*s_N!zGd^^n z)*>(R4!LqyZ(|nvW$}$!NrV-zrnJUP{ayicBR-*+@@4636Ap1mylG_S(d|ccw@(H9 z(7J6=l-?(-lo{@B}#zq*pt zJg@%=#caIF{0@{756a45Ut3`Gb~{gJ(7U29>r+hg1C_C%7z48knu^QAmtutMJNP|s zi*7}H`fR_ujetCtDx70LtB)L5g`M6aM(~rEh*ppB3-4B8&_icd#>3175Ry?*6Vn}O z15xdV`hwJBgNlT{v(_&=SbKcuY5e(k=&idjH9W?OlAnp*ayBz;=b2Xh^Rk~cKbW6u zrIpOD357bDP1{aP2Z2) z0c*U-kWcsK8A1zl`HT;}&-Q`xKywW!*(iY~%3iEWXDKb_VOj=FDTz@E@WF8Pf(liF znZD3>ShvTfjGDLmOqCo%W-g#rUc&P*nUga?EW3ydp3y<)Wv%9K#4#ZM*uAUHO)AvCax-1Nq zxm!84gphQ&?(MSmsS|Cy3q@w{wHdSpd{tSEH}UXmkW`KM+|RHm4u{bzIZtPVLwYfk ze)B^4F6F+*j`tml>f9~1^zL^T+^rrWj`34@sp$RT>UWFFf{R&oAs4@D_AYWahHpDy zVO3#iRWUBZkeJiuEJR07-4k|f+5*)GU6C_7SXRm>JQzHca>mxm_eHRWG+Z?WJIf)` zoP$>hcC?zT;>D(b4zz)U5N6r|I(vCaY`n|$n3Yt3A>;=mz_D;+*I>GQ5EOQBj1;(8H-$vLw4D}>t`rid=FB8A5{D! zAA(FO9_-uMTQgVVN>F`1M|)@i1Doi*#@=4oPmQ*Jtgccb&wGOlSIF$Dx(iBBUCbUL z7>f8-yg`BA=21UT5gl0*LIEw%sAj0gj1-Arz*UKjLQBgR>p32DG`Z9zDE6)GjWpx4&RgUa zK?sT3gl5+4v8g(p^JzdDvj(_Qr>7}y44ltdI!UZdB}l+z;0dwGIlkxejL09oz}vEq#^ zvla*TO|M~t(5clO;hK-<)f=VfVw_@;`7Aj%lGB?W&-lUU@6_od%pNPinWWu{2z1S@ zmfEuyPVeR{`BDP!p=l*G#bbCLTV@Bf5wCUPsv|WdN^anIo2UQTF&TiK;2`^}4UpOn zfKD*g7V2b!>J(7;2oZ%sa??I@wz_If^K z`lJ*#-EVPGg&y-bvT99N8nXkWj~fW|?&J3hLI93d`3u7B8>?8pu6^z1WOx9>dBg-| zC1|#X<#f^oe*^$i4OK~vH0E@w5x=HRoCca?$;u@l3U0pt(CPW zUUT%Xis5I!+oVbWj<3uaP}bY9*IgRltj>H!T7eb~re?}~+HhgKI(A7Zc2jvb(veEl zCt$Y117BkDxy_y&422FD$oHL2YHdx(r!IV$S(u9|`*^>TwZ+I%NEAUSVM$P7ORiWo z_Z;%Cmx9^u`2?QZqAo)G>0EP|o#X~k1t7I80S613EZ-A_4xjM}rmPl%Zo9o|Ntr%9 z%1$Zp(L!5H?ft2|S85aw+0S29_$x+t1bl(-`E+2LK^20%`X@3@6cfXzJ~pAF=oDe)!c1}cH}M18fhiK9-0_ycaSDlltLg@g1imn z^s4&;qOB(PC?!Qi9_Ym+f;4ogFvwRnhI`3;v3j&?FQu6K=`@44VUZToqHO?M=((v~ z!0zBO5ooSK+TtwJ3%U-&8ATHN<)+v1dB;`*AxPv~bv zjJ_>Q6}+30FkO4$j(*RMWc~GIb5f<&i*@zX_~9UmtsY-1^d1DksaURkMV0dX?Ev|E z|7xAjPK05*i1X{MmES7qcd4~7`cs=iP;85l1Uk={cfK$-d3wu{z_jylV)99Kj zJK+7V6F-WbnmLIQ49Jcw!cX~y2L`6Q9MOs4uYRo56}qB7L0Qj&e_d$8EqAQn63kx~ z$`xBpOYC{KA}X6=TvhO}Z%CnB#9*;SYdZVmd2^4p7yVf)pETm9RY7QA=3JEx z);xcM%V+7;n>a3KW6xaW(iqo5v5}KPON|Qg=Z{rNf@Act5nj=hAtSa2B}?s!|c66z-q@MB=fU9oKO zLs$299J3Lk0kqC@iwgRjjYj0KgHD4?uZU!Q+x6QthsW)W8aKZetixWM=vo;BrbS=HK#L|;!PeP|I@$Pofz+=x*b=j z)Mjvp&@S_gTJ#(>^Vs8|+{_|MHrH`77lLR*J?O;_f{YXt$%mi?Oj&y^eid2Z+X^6o zxf0&|#CG&oOJSc~4s`8=vy(2eZHr_Kck^?39gbF(9LqT~AEgJobjrU8hWfNREBzGs z5N9&%?#N$mJ4u>m?s=B(z3ZiJZ=^uf#|N~au7QQ`uBP?g+^Cr$GV(`wZXN6J=p(08 z_dag^9X^W@0=_1?-xPEEzOEHKzSY~k<#2;~#BezWX}DyAwPmwsI15_0MWvzI0z9pB z$saWfhVg+0P8P+Y8e;}~s#gw}zS#|#l_@Qiz%S}!zI3cd+b@U6=!h955Qs3_1xi<8 z@b|>|*=zb3vJ#=`VCEnz=dd*X5fYqfERQ zAXg;!KlEELN{*`aFD)Q63cQ4Tj^2LELMv})F~xT1`g~Kq7HCmHv~DoJReM0IGP9oh zInXBH;bndN>Pl3-H*sF-0DXFtGLV{*WqJsouNBau=`P}))_biYKE=HGY2y0q4ZAHy z`?pGJg56pahd>x-sH$_C%JD0@rLUm>5Ph{QfCi{>&ZZZh`b;@Treuamr7fI^-1pV3 zv8~_a+{hy}AN1Dp*<9OcZNvRrh32ou6_i?Q^9whJu&3f%Q{$+TIWuDi=r5%}W79R_ zRjj(n2MtW}wlmCn5JgNq&9vpfYq5gz=7+kJo>=N{FUt#&*(;y<`+H$-!ur`VJx z_e{KS`@9miXSZ#AF4fm?K0Cv{c0N=UV{v?=!)mixE)?*neD`@kFQOwoUmdR_*VK?V>PJGS96`G_t)%(r7nLX6lPtr0c zaN}msic&ae-GzV)ov7u(=Ylz=&6jjAZo4?=HN1`w%?6||215E~oFCurAX*fqL~mO_ zh!{rvXgW|DWXXf`k^IwN1bV*sKvBaUwYjHSVAccxTxW0cEf(#3++rCp5KL`}w3zNXynO+LDYJb&i$D4e*zS{;jVpY?V=jim1w*lGWGD|Ug zuUPxEmYS2<0@Qpa*XO04`qmh4%+ReK)!IaCEY+Fdd*^fOw&R(1qgIUj*6i?8^)Byd zQYD?LFP+NLA=5nE{kkdlV6}sbpa(^b2iusC`1GNNm2SW2Bxqb#>58}6L+4y8N|rr- zVr(U=@q6AI1JXk&!7d+EE+TmS3#yY_q}k6&-YpV;upX&${?ksx=#nn0|Je7X?A-(} zA;sF7#Lm&THX49~%Y5tD?OH(b`Zy6o5m@tcLq@?BsMxU=F78HzSZXR~30!@a*_gSn z;h*h;00&I2-2+u|I-WCGOq#=%kZEJrf2}2*Sn)+sm>B4LwnI>4Szbe(g1>Nicx$O@ zOm{GHK67B(;-RZeOj!i_?cLY+RrA#Rec>_Q#WTvd3G%K7PxD8mFjA1ju>?@HZ?f2L z7v;RLTbqx;-zs%vYGUB07J4 zodImPZrZ3=*RH-@^Ms+ zTHw7ug4*H~sA)B=$G_OxY)D!n^z6W<-n7Mn*U~3jd^lUuRj^Cd+Z*))9ODwlYPBs{ zEoJ)sYR~0Ac~|i<$P+Q!PlD&zo#Vj;u81Zk_z7tiM5UBXIL;qz`+g8~NAPmvd>5k1 z&kfPaN61z8C_xDXpPlU!FzA<2K?##k`#9>?Jmz~758rQAMHUwH)q-62h0M+bO}>=o zN4aK>YgvG;%(Z9-C`x~-QwDSWmTzgusadF8e&tUsXSPxZa2GK5P>zL5S zyr$N^o4R77r5U~;E5ZdRE zb>@-NiBXThI(Y+a+?zHi_SoC7YU|^>?!iw42I5Hhj^~-hg4!#f=j`)Sg&)F*$Z!JzYkq|H zdrd*)x1l$I=0THHwrz0B=zgV2b}o_BOdYRlybNCsD%leyC|dDE*2QTap0t;FYBrM9Li~k80nXhMdRU=F9l!psbev7Q&yxnD zs)a%;SnX-LDY@!7^Hthgnc=|?Mn@Ew-8H*DBHy1np5oiQLvVr>m;v#}OkUzUZL#K< zSF|{M(VZ0^PK;e{^%{A#jH7q9MC;gE}X9qI<9;<$M|RDHLdHM zs*zopn~%pYytp5=nq(QgzyrE;06G+ex79d-vfoOf{4LPE7b~M!alz-u)%7PmW64^ERxj~-+zlTs?*`VZ@RS+o`(i4N3}c3tOa`m zwt(K*uat8iZGxYf#PsZ>&~)gLXuFk6RousFpX=Ljt+#qH>&$L!L6ECasu)j~jJ@P{ zY;9nHVs^a?hDr8Y+XdbP2%b|l>IY|uO`#r5i+lZt`Pv;*{Ubx68Ce9gKeeb#0Go!ex#P_=kWehY^3&n@WM_s^PM8_E`{@M z>$ZT3gv4jeiukL&l)<-O&-mK4l?Y!)?jPFPF*l$`U~Lf#*&W^e2C^10@bi!WO!a4v zULl9BSfq4Jb$c+=uD0R4bfg}aT&rV&g+t!az)Dk4k(({{Q$@>ns&aNA07Vp240>-k z!CNs4f}jEH&I7QZyf_ceAOuerrdS`=Fl|f0i8ss!PGnjnF~U=kR+EwDypQJVbG|Pf zz~rt5Sw>Ht7~$D5dquzh4PXg`6}CI(Nn`q4S^f8pFFDLNk>lcH>|p`*qkXD55PHxL zM!q9py+%RTZmtO2p7)F}$Ib?~=Fdij_Q-A#%GuN#g;#n~lPM0_WhZ>_N{#owj}Fr_ zBW7W*TsU?id{(bsf5xFF=7o)}KJaA1Sn|3E>$Ubu;NS19q}Oa+^~>SvA%xrDcfNa0 zP=DaAj-9ENUFH#LHMA0;3B$z(07HBqykX}e14>m`J@hHE;Yk)7LHQStzBKkJhC7ti zw)Gl7^(=kX8cs_$EQ4qDuE!aOg~C+dEZ?6kyw+xXscXw_bo0SL(GSA6$B0Ql#Mh$E zbeduekeB;EfxpW{bi#chKkV@(ImsY7~vZ zM?dcfWZA7L|6=RUK(3oMQ}PmfVv*+dQam_4C@GNYVpXB`ygmw_fWG>u z|NDFSU1GqbkzKLKcqsQxNlm`j&d0TGPB$I~>Wk7@umF1i@9;#|Zgk6C)i2)?^}3nr zA0;`bvZUuG;PT`oO4WG?yqOOqqU&|X=`{Ib3>OY>VmIEXfSSr|N|1iYjZ5OeRUB<| z*N&YHII}>8TglCR;AJNvVyU_O^w*$Yo!P&tiw-dWtCw>@U9Yl8%8ty-Elx{oVrhqz65;pAOg{(a2rld9xcHEta}BrRSd~v@oj~nnQ?2 zt`XzYZiHmc@Lt3MFOJVSn+9s5?KL31cYO?eIpa*XSl`j21GUSyv!*i7?Ff>1&Cs9L zzASI`M%W)l(k(J$2izEtL9TK_u8h?k=Am@u-n3gyUyq$VC>SX`VU*`SiGUiM zUT69U;9#h9mA8~){A{6Kn{j@R-kz#a&F>O{+a`{)lNc5Uz4V_Qc|!Y$onDZQ9P%%_ zTxAa)fgHDT0u<}U-9yhZ-(Rp#S7CLF$5en`_K;&Hx+~Kjz69zU{ht(YriOofY~8E{ zpFFQcO7*nv;0B9Q%#QbC__D=nx8a-ul}(CV+6n@3wH6m#GYEJ9@gXz1FLvLZuk80C}k!GE(c!d!s zL7%}!J1hdcU6It*OuNm_Y(6ZK|9{Fm^KdBp_Tjgo5>m9%cR9E3d4A5ZNL35)0Yi!Alv%@k(HNN8PH>z{uRocA4Cp2#c>pvhIgvz+%_0F|Bj!BPDdWu;L7|SZH#`kebg7$SaKSj3fritoO-fcK@=V3ngzdw)L|< zY5ns2^;cRGzE9doyv@OmBRS_COai?(shwg0e0)~UZPL9{K5`56%5AuhnK=8 z+{&fA?PJ0nc@3i5+U_Tpz+$kdPqBb{7g2tw9KNB9BoR!@(Cb!ISq-ao!C}(Q#UI~B zBIgaO*t?GP9az{$^byzqi@AOa^CEotZc5vyx0_{%62I@gpqkjai?7%5y=@K{%ixV# zKsf9sh#1*RC>H)Cg)QUKcl4GpIp)ZxnrVp6MA(bfO3tF?_k7=>1h+$$bdq9MHGEeD zO~&53%FqPyVsIPp^uoEgkM3y}O(4G^jpX$W~+9Fcl&9R{!Ma z1zjEp6n?**Pc=Dr$0ka)3Bc#@zq5 zs8isl@57=oPvJyMSmTrw3u{c`COBr}WI{WyhZw>c49|3&?x;g1Qj8~z%`YCWd-|P1 zlbn5jQ&h$xqA2VOyv3h+@bZVfA`?hz{SkV+)=h&M5{U1n7l?P4O6<}+rK#7~flRay zFHJ$?=hq8t$L1AKbVZj>jW`W-3qNhjR^!;x`ztMjo5$c3CcjH6y{pip@)+HX&JbGg zf6FhjxSk+feA%S(>_4xzfj+PuMsPLBCas_h)3N3++=(uTz^(NkJoT-q^`fB{Z)ka^ zHh1{kFa}TrmIBG>i_y;Rvc{!ZYq971+5fS|e;a=4)lClAHlLW!M=Uz2%?F$W5!&A)E+ z+3G1O0XLz!>tf`VwhH59`9a_qV^}Ehk@MqU+icO~Jhbu>#bMV{SJQ@oY1pQM8&o;l zGzkqLk=3LI{j@G~2wMU=CZ9jsr7%Ls`7K_1v`^M=P((jXLS_2#$W-Oxp6-upW#fW0 zo@Vjplv*o+KY^%ML}7w@3-5joP0)*26pfbE{ zWT%G~Gu}Qq0%iYDdSVT*i&t6L{K-Wz=3ibndhaP*q)Xo$;HBwTz7V83yAGrlafb1b zH%IP(01D6hb>2LZf|a(StgO zTUazT?1H=QFV;h3Ul8;#SF45{WoZh$Ye-~`X=I`1Q_iWc`wKT1b_ZQ)4I;P1b7eyN z!is>dh`|&@V1lI@;ISU6f1wI|)>B8j*|#8q7^fnEWPmjk>k)dz+d5r2;RSf#s3Nxn#(^l1UBMkpi9=w^*@r0#nm6Km90yS0(s;M6)(hxk&GkdvjJ*I-1B zGrl_Dt$ZlkF1{-01hPaE#yi%|&2PX#irEt{|K6>{(KcJQu?Q|lm3Q(%z(k3xu92iNcpX*XXV=$2CHRPH>SItGV)w050lp%GCiZ;=?@*}5qIT6}Js zXkeD9wb$oG3T4?~!>vlSuO9=NaNNO+wlNvpB{}$FGVKwXwT(8(TaR`McyBPdo#^bn zuI&X~s<3a+POX8nJBP(^ke<&i14x(kYyG=<7d`avLMb4F%!6T7TNgnT$)BRQ4FaT# znlpdEq(gGC=`EVZ^K?R@I(lsG&{_)q2!`k>5me;m5U+<}l?vnh8fw6OGAr{B7*mW) zVfKn(JS!| zZVJsc7lbiTvksNl)UumVv(#6|8G$ZAnOiPKyn0hqy!WHqH}*?8wdsc%N8-NNYZ-a( zsQB|RNy_uA*DK&8{4aTh3DUBpl?cR;E^S@Dm|W*}ZU-1sR=n za=&yhj|a>b$gU>sUKi#&a{Uf|@`1uEdqv{$TwbkdKhY(Q;!w_rZw3d=37>l#uhqZC zL7i_S^`?6EHlqCQ$tzYHaK|Mg&wj2hSr+qtsJ7zof=%+gw9)d6F zRR#+2K#G5K5>r&2emdbp31utuv!QP0s+i3x(_|-^qMIGLm-PrEzxYW88WLgIEib9h zw&6iyA>JXCzcV!pt7X!r#1K0U0IdtX2IA9eiJdju6hGG}hEz&1PC%WT*mV>OGADl} zCAeq5=3mx%Hxrku5N1!d>T$ZclBX)t%uDP}`qj%u{@Q$bArsfh?_N;J zNJ+obgYMHx3tO^F&bf-16`~X|QQV+`WtPMbDjgQ66Iyvbw)4i=c-IbMg;qu}i-cQ6 z(}?&??6W_3w|nq1t(&uEW_fAqq(`9e5iM{Rbi8(RwS?8gcEP;JtWm(eebPTT>9m5e zcRNc83hcNG8DJMkZv8FyyhkRajQn>w-@fCfHHz1sUAfh@*P$gnY-xFNmdWpj+JX}! zntq)0<_Pr`HR2-F zJLnexX60KWlyz>^(WV?q*Yrv>qA#TQUjH)X)n=`qbSckK?pg~m=!*E_&olp(@p-u%A9Sso$ga_IWmV?!Aq;KDiu;nMD-~~q@XI9!h62V z)$Ai~XnjZ`f(hLpwsK)DPTLw; zemAX;5jWx(gvyK*i@Bktn@!mv+aoekPzD%rp4l5M6EkNpyMF26u;3reiSk_%(Wo3y z=Guc@Xckghu^Nh7DsZ4#?T_7?)oyC->a1ROFs-J~KEKE}>B#)oHRG?LG97A8A<^0y zqNc!Pyl_mjhsO|#Erw-@MV*2zN&G~W7`yeCAM~HROugVilj`By&ql`Nh~o+(*?B|3`Ipr zy>EB|v|AKAlqPmKA3H!Wdt0w{U7_n(-EGHCYI8S7W$Q}foaT0Ay=8~9;c&R_Yn5CX z5lE6*7^`K!R*aU)*Em?ab+PWwnC45!mR3sl-wEmN>=WwYs$ch|ODqm3&<}ZzwGN+O z*v$TUMS0%sFSGf-JotZ!``ii`NSZuXrW`1*?h}5!0f?2ECa)^XMD}~}Sk}rAbzyb; zML3t^#|&kf3N>4`30Q9w8fV=x+=w6Zy8UbCIP9C9*VVd<>Dk0xU>2|8p#_m3JzZwN zu^AAY2ZVlMj*_t~GJ6V&u@J`69^giq^Ki;VchP@7Thi`u%p{aVm3;izb;>&P@0B)% zHa4YW%|EPGYdlqlgc-!@r^T1$MGP0Z9mj-2l(4k_U13E&Eioqd$nD^=0;t|LIxL}k z&W63rxZA!(U~9Q_dDE=dwGn!&%s8FEm7&+rte#W%g~@`zLed6(JC(=xY++Y0jk@q% znEcbR`3ZsvAwcYnDGMSi#uq9W(+3msM|M=$?G!|Lbd(4`vy%5n%?#XgCM;}8B-huO z8&HDWR$KEBLgJVRE^SUjcX|CQJA);`xUAQK9fPQxfYwgQ#@CGONRyHepDR`QXi%x= zu-UcN^)zQrrve$S(VQX`TO8z!-XrF~Gd&>scxpjh_iU&*mpVeFDB z1PVeRZ^n}eB)*c#wYP5XxiLcaWEUhNWn;_`pgvJv+zj(984N z7g8s(o6qj6EYQ8R|1%7hcuq=FHdpODK+}ftA8wt-gfl5m1KU-m#3mM7T)KV3Qi|so zbZ>1e!P|xA{9ZB|yDsv$L8`}4C9%)(YZ7N=?d7GZsCPI5d(9}B(#UR(nw2)6xg;{K zO#>N52j7|2L{zDOZ0|gP5`8ai`A~FCHkiNocJq6@{}pLr@m9yU%Gc$WJB)rL{6*WF z&YeFpKdGACoitc&p_rfDB{rc@Lt9)ZO|hqSO(?n**fHe-8DpZ4=kD{+bc1R0ZjV2B z%sXAPs@x@+Wq7ClX&mRlfEcV!+6@U|=R|-2xSin0c)#Vq%qTon{(=%7(u`IbrP&Nx z=NHnHZoHBnz;7{|;Pg>A`;|G}7NLtU1V=dcK<^3bi_Eq{Cr@8wY zPk5hg(JGPSm^p1|Lg5ex=9%xaIipKrH{6ezAgiSqdeR?hL`$dcF51*J7P7u4jx-KmH_tEtJMnV+nzrkm`|Xu=YIZSJZa^cYj3_rsB0u2!PU zS=*A&CrDqrF}xW!w?>?u!4-6$`DiNBch7JDQ%F$9+r^EM@z9#fp1qvl$t2lYO52z$NC^A;NAzyqGNa8XlrNC~S+x7x&}Z+! zamNQC!8WEQe%?`I1LGcleOthYiQ^_K9UgrhXNDopY`hm{0o?Q#rZB>MRS@oL5s&qQ z5+#EuVFE6kz+n)%XK?Cq%Zhxp$zv*u{0)hpvgGO$K2_TfZfmVp`1uSkS+crC;~jdj zOtOEe)H}5@mG+iDEGsuxeUE+Bb}idrVM8w=Yp+d6u!j8XIjVPI*V2xrZK>?;h*n%# zWqkdL12-C6H%+M5Ca3W&*JK+=t*+2)4JAF58a^zvzWQsM#@JV8m*!B}tKM_{$tqP# z;!UWnIlEWUnlo{hsYvL0w*+pIsM@|8DWwQFYo<-{U5cS17*HrsXlxESc%B8v0E`=b zXK;ucaT}^{&7L>1DdPaxYEV;{yeNQ;LD_oUy})9gy+wFzOVpBRQmd4Y%46jJJ)K=i z=eRIn`cp3PA8T+^4p=(;6FCtivx7qqc|7z9yEi}iB_$Bd&*ZCqd8YEw6FZ}aCCicjN_Z{wzw^Q zJ<IHwwdB4}yxHt9NTg3BMAedzcTtSLr-LmrbRr4#ZGD8$mGcTGwuOY_Wp= zt}P~N!imQ>MxJtA;N@79A~?qn3q&4TpMo`~C znq#g?gC*(k9Lag8O)MLko)nk-6sk(1&ej=Tq5>0E+!4@b2LV&!nbf1=Kh%=seyE7Z zD35wfwh3xj%%()TS|Be+JL7 z?QnXMtw8pP#b48tyQ@OGLTvmT29G9vg?uu zl2&r{qE-DCtTJT|@;MyKRQlNBEGyFIL5XB1yl`q}ul=Zg{QB{T#Oky&l3~GH0}#j5 z3Nxw$1oAvra6-wuks4M&Wmva^68QtG2wX8v9lSSZAx$}Ei!o3j&URR()oWYynf;n z@*NV@O)|tTa4>T2uCfE|6-N@IY%y2+qhoEfHu{DOw)?Tf6YT8>3KE?=NR+?MK@(*4IW#LT6x zs1v5L1)iSWM?yv3hTAaJE?2GU+dRnJnOaGdC8|b%ae14OOkcpvrAN&Ufy??G!J{n- zv!A*$t zkA1H1BTA5AP$1ewkc153I3nCWgPFDV%#d#dG8ht`-2(S;rArlRZOpQsh*L$7hK$z? zFXmLPoE9HEKdNxf(;(-O&w*_7rpe40x={7^gin8-B6+Z;m-6w=O|QW4l(u_p-h$e% zU(cr&j2;Pfr0K>hIWi86@1-G!c93DUnv_hPdxr)X)M*$9s9o=ce9_5!1pW`=#x8|v zPqu_u4Os%>p8$+r`;yRNC4%N5yal0q1?ELlC?Q^t;O6G=s@Y6~Yx+5=JD{k68gZ>= zXtE7cU`*P!(<#?IRv>fnQeb8jUXNfudYoeugTJpS{$Z`r zd4cSOnk(%P8T*$B^(46Yf%$CScrkVIrJEgjR1+Iy)i-)*q;%-a@*Amccd?xu1!FLH zr0Gj5{ghsQE)?i=iCq~w%<}#=zIEpV{-u^>A=abA-cel-8R@qwEnyct*@CD%OTbOO zqj11J$#=a_tiMBj!k66J{kndJ?n;$!nH4-Zi2;Y<00KmnucePqw8mX>nMR$nBh1lm z^9g9V=pV5SXJ~LV6c25EeVqE(J^eDo7{a~Gc+a@XJ}}L;PjI>!M4Q+bG5kftn5}8# zE20^^z$@W`NF7%UW%ylBOA|2$kel%PeUQBMNcc4IVEXlgX^Bz@_qjbDxFvn!<{;!& z=ZM*!L@C)(^7jtb%1>wc%NKHVnWLry-rttyh#zj^p)ZH&9u}dU~y{)?dUao)a^?dIm$;YAX2{s`?=Uu^H@$>Cqhw$x#Ww zvxS%~ZP=aq24a$P2)7A3LCrct80s~YVYqd@yfo|j^(X667$MJZJ0a&z_MNc1@{3Rn z3DLE&BPP*>CSk4SX!2R6fU;mtwZeus8hkvo4+>X2C+Ga{W`OIwBgG>@k(^*Wsx);N zDWZhx;^Z#Y+jFsw&1A48K=LmY0=jK*`YtR+ANs!UsrxSmHGf>dhQQ0QsuZK> zoU9*Rq#htdkaF3sFd4G_N$^Xz+nLxC^6?{nx7=`PlXhncqPQodEzBd)mpig)qNb~8 zm#8YumrJUZSPF1{cUFU84)(G6ey-M&dXeTJS68IFo?F##0i0? z1IzLV#pm>ex093O5M1y@AgM6#!I~Y-8di(_Hkqm; zUA8B_xA@hm#aAVVHyp|{+!4VD4<#N|lyAwuu?EiwJB|75(0ArLcnqT z4DF-J8Tb`j7!Em06UiMjM+3mUGSUG6)i~%KN$0J`OlY^^Wp^<)6v?DG_oAf$O5}c9 z)ZlF``oO`s7!bD_mb}h7jU{BJhn?V?(P=t$K{TRbrha!y#et!YoWm9M6Yj@we0Z$| zk1Ov#?CTbd4Ysi#FLs;Cc-cScVqtf5w3W4)7jtfe(f7{jamZ)yz8;SRzMiDHlz3z7 z1J@JKn5!|MjurtWI0|4R4Pb|b)64L)a|Ip4PxP*!jMd^AT7+4_qh_;I0M4hH&q_oR zb9n2nuCUQ&3>h*ymNXv7%QGRqhny#0~n^!A25>WlvSR?GJ$CoK+GgUmfhy}_tXIb|^99JN_z{Z!1A8H2WX7irOMNmnt!icrnDbunHWc$oYSoL*snln_(bZ2bGr8&S(>w0^l}h3ZjbA6>2#gDksL#1%F3lCwF5jAWlTTYsKl5J z^{MU>Cfttl=?jW|(^lt=P#%C%f+kp<$BZ7X*haJ&&sH1L3MFu;Qtd&_9byAp6bfqv zvODR;J=0iI*ssxmptl4SH|O*bf-Lf>1KSC9eWiSWUGr+rP=t{tjG9ub-zcmU0(m`nL4AktC-YaVCF!@U=rmG9!E zQIUI{!V;#kA>m=s4N&UQP6L$uUzFfU)843@9WFVm zojb^>sDUkk9PpHm9&E}+P&3PxvEmGsk1h{n#1nnyWmmz5S*FXI zMo(*Pjk+*bzhjKvMPjtI`7AXbIzBiv5x#aV;34JupG|dYq+fELF6I6Gk|t^}U_2`~ zZ6hX)MY?Fw72Hv*@3jC4YFNTk{Lwc~sieM{m`kezhuF^uHt0A2aLEs2sAXn=NhEo(_7`FQ{{2rMm%YmY^Y+3n7z*d@_*H1iB2c^g;oMm$1!u&jxF>8T! zpbU6ghuLKFJ0!Q@=EdTX*WjmGUW7Ig3X zTzvthR@JHCTsU{ntVC!r%hvD9ls8*2JS|A#i-0_5%Q+})KwkkVk;0+S<6j^XlxLw$ zdlP;5vi_*D8;X^P9P?1%yzT=g_Lk!fDwYJ-P56 zHo6O-w8?!?W{%{Dl$h5lic4SnNmO0R-LZoq!QlnD0E5sI%6~l(KGS=hBI=+ZcP5|1 zOdiRUU$+&x?=+&#nvrWO3eg zNT)3jKw;U5H+-5sWY)W3D7i3Fo)HK;n;r#cek>bb8nbjd>FGvQ*E4>!(c54GLoWvC z0>q4=%6|f>05`x%nuxLu5_ujQDtE5}s2!Go8Btm-RUQ@c`$N{FpdVuyKUq z!R-W`w*45j7fz6T9}OKYI<89tG_n^v6a{S^5@EyUMG^}}#1R6!0+Rb@)jqirL7gp2 zIdp*LA+wqkbb^NNk@Y(b$W{t)jaqe^ea)ocE#28~*GRPZ+IWv4Rt|w9h&8NKXxG zU^ps3GYRrM_yg6i)d9AT>>NGdl-)c)c;}$A9v8+ADd=pa13Ui_!n|`xfE6svD)b(B z`&pu{J7&-#&aI<~SkQa}rThsFU#8TdE5h%4bk?Y!(-%K72EbiWQx}Sh1tj-_pAb+0 z710D?f&qZSAaqGq`ux~T0A=Pf>@WZ#4LkGqDvw-a9Udj7Q^`ndJH z>Hl>*#Ufs-kVSO{0rv=$9$OP&qCw5o&Z-CN;Z{FX0%qPAUMe;~j(UJL$9@E<44E*} z2?}!D)%0+q$=t0h<5(*bJ{#lNvTh+~ilfR#2r`VQPc=I@krTnXd)iDhq+aRMqj%i%r1ZAf zHGqGZ28U@1pxD))@=|oSAxYztm=&n}2#4c9Z zdSxTB3&{X_h7K%V&~KvhzL3?SDi3wH>9yINHAG@)G4xIVU zUEZ(?&NU42z_N-5@u1}ebldM*yIZ{=`_jhho2o{r!;pG15D+4qPK04By&cegn*lw+)5s6Y@fB;<4UFe9YfyP7k%L7A z_{PI1pD|VDSyq*MQ_W$;PgA`4>Y`J}D$X;Mnm7mXJKcZfDK94^hsHy5sagYsX;uBY z)Q&r}J0)=r=VhX)>&=hN!g8J&*YxhuEmbpaxPp$P&u4IMGQ;eV8STN3#iK< zg#*2QqECGawTalnZG4hMImY-V%u<0g9zc;U`kV!Yp~(FrqE-X(%QMa9SHMA`H!g8R z3~6)qzn_0bagt?0BZIF10axcO09$vy!2MycXg~AP0E_%$8D0z*z#j0mh?^OZ^<(Y@ zv&jiUXqN5ZA3|R@u0gG>7%r2VWH-inX!!0a`0gU}(7xr(YKTZG0otb%>TtLPK!uU+ z4B+F7T?BA#Q4IC|us8IwKpcdVGQ_&g8?%qed?BXzY%;a=tw(PGO|veW(`{i9{M5O- zk98qf_}guPu8yrr_>(JLpclciWl`D~YyGr{4k-~}s9Gbr4vK9aoO2;SXy@2NH#pzD zoc=rwd=BADf9LL!n}as3kFoq{@bxd*!0 zYA<4i;lZDV_zQrLnW0`N)2*FtR{GrDfe!>oC$Nwr=4k@w=I4Z=`yPPY^JAzbFhGca z3XGv%{D`AFbDMuP3W5$=h%cEF3xdvFNKrFWVOu&iA1oU*EMjH^H0^-k3Ybk`exU2c zxUDlVVJYz^Xm${67D7n2w_uaf?NQ3J(77s3SKukr0C}rr%ZR2hbU&QNP$Iw+Wxx_; zo&i1IR?bcFm=+5N0kZks2i-N+()6-pWgY5x$+&KzJ-Lck4XE-f@W?vbb;=YAvvtlxKor@xjT_qT2T7X; zLLe5p=*gu1>q|@lnWW1W?^Y?SEYc(K2gbu{B2Lo<) z8bk%OYkC6BI=#nvIk#9vdq80Zx1aFUY1ZI$g`(es=2Cw)W0Zc|zNYbVf!~kw6sFo) z>nm_j0pZmN3by*aPO~Tkj!PUf`*pZo7uVxf`Z9BznHLAz4bz}uPOB13iopI2;NK!2 zG`W%JlNTKaoduz{C9K0IeQ%vMasCATWV$u-Ra>j{-$W7tiu7NjFHKRifsQz6?pSQ| zgmdvSwDOzhvsLcz;TC{10+BMzk$UgPV%w&DR&lObO}dXNov_kUjD&M=@BgAt?-Ij{ zY)P%#I9DBoTsqSAgH4A)DnX}EBZxRzb z4@p=p^))ISCN&;tf5vrswoSwl=O4=4(Or)l7B+2XF7~%)nnE|y--<}@qO5<34YM7OD#bP|&;)SwdB7D&iisZshz8n)+oU=B@K3GdQG@1HX?d)$52uzi2SY*2v$^ zx#lLEu@Kv;)`day!au6xu^xAGuE)tHnE4ILIwyWCZdAE)5}w}V&t9xLz`XG>H@D_F zblQlG)BT9!iWbjjey}Rho^4qipJevk8w1(xZ2EAepHs(I#D%yvI$XIf5+$jjG1mXV1abFHimat=GSKi)FyU-nqovQPYZ+9!kNw%6RHBQLu5MA>hCM{&u8Ko5MN;QSf2Z=fYV z-lBhkf(^;U{(cKn!^oUbbyDrB3fRyZ|DV?t&qnD#nF?!Td9e0W-%sSmEd*x?1q8p`#N zP204bcVqrMBx>{7_L)t?J5UjmB5w?=JJsqUkJnG_<$M_;l*7lIRBW9}*5yXW|K0Ri z@38f}zjY*60mEN)DqH#)-L!7iJKG-R@iz67Y#O+7<|roUN4b;3E^3}(=@=j5o!V{bi66)Z5{-aBfXvcr!?f(6t{-dY+_uKv3F#JFN0!J|fZudT} z%g33F|Dj3wzZlN{?2><*o&W51|27Q&&A0pawfR5&0_$<&Ttk54dj5a2@luSJwC-wj UR|IwDa+KC79YgKBW9I_@7s+0FO8@`> literal 0 HcmV?d00001 diff --git a/www/frontends/compiler_gym/src/logo.svg b/www/frontends/compiler_gym/src/logo.svg deleted file mode 100644 index 716947603..000000000 --- a/www/frontends/compiler_gym/src/logo.svg +++ /dev/null @@ -1 +0,0 @@ - From a79ea0c7a061cff52d7f749d92d7734e517b0661 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Mon, 22 Nov 2021 17:03:04 +0000 Subject: [PATCH 004/142] [www] Make set_api_ip.sh run from root. --- www/set_api_ip.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/www/set_api_ip.sh b/www/set_api_ip.sh index 8e0ce25ee..91fa76b1f 100644 --- a/www/set_api_ip.sh +++ b/www/set_api_ip.sh @@ -5,13 +5,13 @@ # # Update the hardocded IP endpoints for the backend API. Defaults to localhost. # -# Usage: bash ./set_api_ip.sh +# Usage: bash www/set_api_ip.sh set -euo pipefail main() { local ip="$1" - for file in $(grep 127.0.0.1 --files-with-matches -R frontends/compiler_gym/src frontends/compiler_gym/package.json); do + for file in $(grep 127.0.0.1 --files-with-matches -R www/frontends/compiler_gym/src www/frontends/compiler_gym/package.json); do sed -i 's/127.0.0.1/'"$ip"'/' "$file" done } From 4658225054715ff0124872d846cb5e82932bb166 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Mon, 22 Nov 2021 17:03:44 +0000 Subject: [PATCH 005/142] [Makefile] Clean up www build files. --- Makefile | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 8672909e7..5cf94f187 100644 --- a/Makefile +++ b/Makefile @@ -230,6 +230,12 @@ all: docs bdist_wheel bdist_wheel-linux # Web interface # ################# +# A list of in-tree files generated by the www project build. +WWW_OUTS = \ + www/frontends/compiler_gym/build \ + www/frontends/compiler_gym/node_modules \ + $(NULL) + www: www-build cd www && $(PYTHON) www.py @@ -384,7 +390,7 @@ COMPILER_GYM_DATA_FILE_LOCATIONS = \ .PHONY: clean distclean uninstall purge clean: - rm -rf $(GENERATED_DOCS) $(DISTTOOLS_OUTS) + rm -rf $(GENERATED_DOCS) $(DISTTOOLS_OUTS) $(WWW_OUTS) find . -type d -name __pycache__ -o -name .benchmarks -print0 | xargs -0 -I {} /bin/rm -rf "{}" find . -type f -name '.coverage*' -delete From 288873a76d4b310475925e96cb95685cdcdfd77a Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Mon, 22 Nov 2021 17:04:27 +0000 Subject: [PATCH 006/142] [Makefile] Make docker image tag customizable. --- Makefile | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 5cf94f187..46f0202db 100644 --- a/Makefile +++ b/Makefile @@ -236,6 +236,9 @@ WWW_OUTS = \ www/frontends/compiler_gym/node_modules \ $(NULL) +# The name of the docker image built by the "www-image" target. +WWW_IMAGE_TAG ?= chriscummins/compiler_gym-www + www: www-build cd www && $(PYTHON) www.py @@ -243,8 +246,8 @@ www-build: cd www/frontends/compiler_gym && npm ci && npm run build www-image: www-build - cd www && docker build -t chriscummins/compiler_gym-www . - docker run -p 5000:5000 chriscummins/compiler_gym-www + cd www && docker build -t "$(WWW_IMAGE_TAG)" . + docker run -p 5000:5000 "$(WWW_IMAGE_TAG)" .PHONY: www www-build From 0743ce725931f787b8c3f746ac2cf01812091d0e Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Mon, 22 Nov 2021 17:05:40 +0000 Subject: [PATCH 007/142] [envs] Add benchmark URI to error message if reset() fails. --- compiler_gym/envs/compiler_env.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler_gym/envs/compiler_env.py b/compiler_gym/envs/compiler_env.py index bb4040f5c..1eb00e774 100644 --- a/compiler_gym/envs/compiler_env.py +++ b/compiler_gym/envs/compiler_env.py @@ -746,7 +746,7 @@ def _retry(error) -> Optional[ObservationType]: if retry_count >= self._connection_settings.init_max_attempts: raise OSError( - f"Failed to reset environment after {retry_count - 1} attempts.\n" + f"Failed to reset environment using benchmark {self.benchmark} after {retry_count - 1} attempts.\n" f"Last error ({type(error).__name__}): {error}" ) from error else: From ec72427409d4d12d76efc9b6cc851e7b64bd7e9c Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Mon, 22 Nov 2021 17:15:38 +0000 Subject: [PATCH 008/142] [llvm] Remove benchmark scratch directory upon deletion. --- compiler_gym/envs/llvm/service/Benchmark.cc | 8 +++++++- compiler_gym/envs/llvm/service/Benchmark.h | 2 ++ compiler_gym/envs/llvm/service/BenchmarkFactory.cc | 1 + 3 files changed, 10 insertions(+), 1 deletion(-) diff --git a/compiler_gym/envs/llvm/service/Benchmark.cc b/compiler_gym/envs/llvm/service/Benchmark.cc index cde3746ce..813dd8270 100644 --- a/compiler_gym/envs/llvm/service/Benchmark.cc +++ b/compiler_gym/envs/llvm/service/Benchmark.cc @@ -163,7 +163,13 @@ Benchmark::Benchmark(const std::string& name, std::unique_ptr needsRecompile_(true) { sys::error_code ec; fs::create_directory(scratchDirectory(), ec); - CHECK(!ec) << "Failed to create scratch directory: " << scratchDirectory(); + CHECK(!ec) << "Failed to create scratch directory: " << scratchDirectory().string(); +} + +void Benchmark::close() { + sys::error_code ec; + fs::remove_all(scratchDirectory(), ec); + CHECK(!ec) << "Failed to delete scratch directory: " << scratchDirectory().string(); } std::unique_ptr Benchmark::clone(const fs::path& workingDirectory) const { diff --git a/compiler_gym/envs/llvm/service/Benchmark.h b/compiler_gym/envs/llvm/service/Benchmark.h index 83ab25b36..b0f034398 100644 --- a/compiler_gym/envs/llvm/service/Benchmark.h +++ b/compiler_gym/envs/llvm/service/Benchmark.h @@ -130,6 +130,8 @@ class Benchmark { std::unique_ptr module, const BenchmarkDynamicConfig& dynamicConfig, const boost::filesystem::path& workingDirectory, const BaselineCosts& baselineCosts); + void close(); + /** * Make a copy of the benchmark. * diff --git a/compiler_gym/envs/llvm/service/BenchmarkFactory.cc b/compiler_gym/envs/llvm/service/BenchmarkFactory.cc index d90f7255f..3ad73da77 100644 --- a/compiler_gym/envs/llvm/service/BenchmarkFactory.cc +++ b/compiler_gym/envs/llvm/service/BenchmarkFactory.cc @@ -99,6 +99,7 @@ Status BenchmarkFactory::addBitcode(const std::string& uri, const Bitcode& bitco auto iterator = std::next(std::begin(benchmarks_), index); // Evict the benchmark from the pool of loaded benchmarks. + iterator->second.close(); benchmarks_.erase(iterator); } } From e3ce619cc39aeb450c9f83e6504933da525df840 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Mon, 22 Nov 2021 17:16:19 +0000 Subject: [PATCH 009/142] [docs] Fix exception types in docstrings. --- compiler_gym/envs/llvm/llvm_benchmark.py | 4 +++- compiler_gym/envs/llvm/llvm_env.py | 4 +++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/compiler_gym/envs/llvm/llvm_benchmark.py b/compiler_gym/envs/llvm/llvm_benchmark.py index 2530cecb4..14376bb38 100644 --- a/compiler_gym/envs/llvm/llvm_benchmark.py +++ b/compiler_gym/envs/llvm/llvm_benchmark.py @@ -259,7 +259,9 @@ def make_benchmark( :raises TypeError: If the inputs are of unsupported types. - :raises OSError: If a compilation job fails. + :raises OSError: If a suitable compiler cannot be found. + + :raises BenchmarkInitError: If a compilation job fails. :raises TimeoutExpired: If a compilation job exceeds :code:`timeout` seconds. diff --git a/compiler_gym/envs/llvm/llvm_env.py b/compiler_gym/envs/llvm/llvm_env.py index a5811a6c4..f814b6d8f 100644 --- a/compiler_gym/envs/llvm/llvm_env.py +++ b/compiler_gym/envs/llvm/llvm_env.py @@ -390,7 +390,9 @@ def make_benchmark( :raises TypeError: If the inputs are of unsupported types. - :raises OSError: If a compilation job fails. + :raises OSError: If a suitable compiler cannot be found. + + :raises BenchmarkInitError: If a compilation job fails. :raises TimeoutExpired: If a compilation job exceeds :code:`timeout` seconds. From 89ba0668ac3f46c272b31c4a562da6fa3b7d9420 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Mon, 22 Nov 2021 17:46:11 +0000 Subject: [PATCH 010/142] [examples] Remove unused config key. --- examples/llvm_autotuning/config/default.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/examples/llvm_autotuning/config/default.yaml b/examples/llvm_autotuning/config/default.yaml index 94f6df117..6d3987292 100644 --- a/examples/llvm_autotuning/config/default.yaml +++ b/examples/llvm_autotuning/config/default.yaml @@ -8,7 +8,6 @@ executor: # Specifies the number of parallel worker processes. If -1, defaults to the # number of cores on the machine. cpus: -1 -search_time_seconds: 3600 # The base directory for logs and other runtime artifacts. outputs: ${env:HOME}/logs/compiler_gym/llvm_autotuning # The top subdirectory to arrange files in. Use this to group related runs by From 75d5ff575a7e72f173d13025aea4fdcf3828267f Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Mon, 22 Nov 2021 18:45:47 +0000 Subject: [PATCH 011/142] [examples] Add support for file:/// URIs and make it zero-safe. --- .../llvm_autotuning/optimization_target.py | 20 +++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/examples/llvm_autotuning/optimization_target.py b/examples/llvm_autotuning/optimization_target.py index 2be2833b3..b822d17b7 100644 --- a/examples/llvm_autotuning/optimization_target.py +++ b/examples/llvm_autotuning/optimization_target.py @@ -4,6 +4,7 @@ # LICENSE file in the root directory of this source tree. import logging from enum import Enum +from pathlib import Path from threading import Lock import numpy as np @@ -32,7 +33,12 @@ def optimization_space_enum_name(self) -> str: }[self.value] def make_env(self, benchmark: str) -> LlvmEnv: - env: LlvmEnv = compiler_gym.make("llvm-v0", benchmark=benchmark) + env: LlvmEnv = compiler_gym.make("llvm-v0") + + if benchmark.startswith("file:///"): + benchmark = env.make_benchmark(Path(benchmark[len("file:///") :])) + + env.benchmark = benchmark if self.value == OptimizationTarget.CODESIZE: env.reward_space = "IrInstructionCountOz" @@ -70,15 +76,13 @@ def final_reward(self, env: LlvmEnv, runtime_count: int = 30) -> float: raise ValueError("Failed to replay environment's actions") if self.value == OptimizationTarget.CODESIZE: - return ( - env.observation.IrInstructionCountOz() - / env.observation.IrInstructionCount() + return env.observation.IrInstructionCountOz() / max( + env.observation.IrInstructionCount(), 1 ) if self.value == OptimizationTarget.BINSIZE: - return ( - env.observation.ObjectTextSizeOz() - / env.observation.ObjectTextSizeBytes() + return env.observation.ObjectTextSizeOz() / max( + env.observation.ObjectTextSizeBytes(), 1 ) if self.value == OptimizationTarget.RUNTIME: @@ -98,7 +102,7 @@ def final_reward(self, env: LlvmEnv, runtime_count: int = 30) -> float: logger.debug("O3 runtimes: %s", o3_runtimes) logger.debug("Final runtimes: %s", final_runtimes) - speedup = np.median(o3_runtimes) / np.median(final_runtimes) + speedup = np.median(o3_runtimes) / max(np.median(final_runtimes), 1e-12) logger.debug("Speedup: %.4f", speedup) return speedup From ea10acdcc3fd7f5103db7813ad356ea524cb3214 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Mon, 22 Nov 2021 18:46:19 +0000 Subject: [PATCH 012/142] [examples] Write aggregated results to CSV. --- examples/llvm_autotuning/info.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/examples/llvm_autotuning/info.py b/examples/llvm_autotuning/info.py index 0e9777271..e93171809 100644 --- a/examples/llvm_autotuning/info.py +++ b/examples/llvm_autotuning/info.py @@ -42,6 +42,8 @@ def info( if not len(df): continue + df.to_csv(experiment.working_directory / "results.csv", index=False) + walltimes = df[["benchmark", "walltime"]].groupby("benchmark").mean() rewards = df[["benchmark", "reward"]].groupby("benchmark").agg(geometric_mean) num_results = len(df) From 83a9bb30507730369fa74c8f0d46df7775a77516 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Mon, 22 Nov 2021 18:49:16 +0000 Subject: [PATCH 013/142] [examples] Log errors to files. --- examples/llvm_autotuning/experiment.py | 44 ++++++++++++++++++++++---- 1 file changed, 37 insertions(+), 7 deletions(-) diff --git a/examples/llvm_autotuning/experiment.py b/examples/llvm_autotuning/experiment.py index 0f37d499f..3153cd07e 100644 --- a/examples/llvm_autotuning/experiment.py +++ b/examples/llvm_autotuning/experiment.py @@ -5,7 +5,7 @@ import json import logging from pathlib import Path -from typing import Iterable, List +from typing import Dict, Iterable, List import gym import pandas as pd @@ -80,13 +80,17 @@ def run(self) -> None: for benchmark in self.benchmarks.benchmark_uris_iterator(env): results_num += 1 results_path = ( - self.working_directory / f"results-{results_num}.csv" + self.working_directory / f"results-{results_num:03d}.csv" + ) + errors_path = ( + self.working_directory / f"errors-{results_num:03d}.csv" ) executor.submit( _experiment_worker, autotuner=self.autotuner, benchmark=benchmark, results_path=results_path, + errors_path=errors_path, seed=self.seed + replica_num, ) @@ -108,6 +112,18 @@ def results_paths(self) -> Iterable[Path]: if path.is_file() and path.name.startswith("results-"): yield path + @property + def errors(self) -> Iterable[Dict[str, str]]: + """Return an iterator over errors. + + An error is a dictionary with keys: "benchmark", "error_type", and + "error_message". + """ + for path in self.working_directory.iterdir(): + if path.is_file() and path.name.startswith("errors-"): + with open(path, "r") as f: + yield json.load(f) + @property def configuration_number(self) -> str: return self.working_directory.name.split("-")[-1] @@ -163,16 +179,30 @@ class Config: def _experiment_worker( - autotuner: Autotuner, benchmark: str, results_path: Path, seed: int + autotuner: Autotuner, + benchmark: str, + results_path: Path, + errors_path: Path, + seed: int, ) -> None: try: with autotuner.optimization_target.make_env(benchmark) as env: env.seed(seed) env.action_space.seed(seed) state = autotuner(env, seed=seed) - - logger.info("State %s", state) - with CompilerEnvStateWriter(open(results_path, "w")) as writer: - writer.write_state(state, flush=True) except Exception as e: # pylint: disable=broad-except logger.warning("Autotuner failed on benchmark %s: %s", benchmark, e) + with open(errors_path, "w") as f: + json.dump( + { + "benchmark": benchmark, + "error_type": type(e).__name__, + "error_message": str(e), + }, + f, + ) + return + + logger.info("State %s", state) + with CompilerEnvStateWriter(open(results_path, "w")) as writer: + writer.write_state(state, flush=True) From 9d2a433c51420c852010f1613cf91b678b8c8c01 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Mon, 22 Nov 2021 18:50:07 +0000 Subject: [PATCH 014/142] [examples] Allow file:/// URIs. --- examples/llvm_autotuning/benchmarks.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/examples/llvm_autotuning/benchmarks.py b/examples/llvm_autotuning/benchmarks.py index 31bfbfaa1..88bcfb5a8 100644 --- a/examples/llvm_autotuning/benchmarks.py +++ b/examples/llvm_autotuning/benchmarks.py @@ -62,7 +62,9 @@ def validate_uris(cls, value, *, values, **kwargs): del kwargs del values for uri in value: - assert BENCHMARK_URI_RE.match(uri), f"Invalid benchmark URI: {uri}" + assert BENCHMARK_URI_RE.match(uri) or uri.startswith( + "file:///" + ), f"Invalid benchmark URI: {uri}" return list(value) def _benchmark_iterator( From e356447ecb4bcd0c49acd68500182b80733032fe Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Mon, 22 Nov 2021 18:55:44 +0000 Subject: [PATCH 015/142] [llvm] Ensure that custom benchmarks have legal URIs. --- compiler_gym/envs/llvm/llvm_benchmark.py | 8 +++++--- tests/llvm/custom_benchmarks_test.py | 7 +++++-- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/compiler_gym/envs/llvm/llvm_benchmark.py b/compiler_gym/envs/llvm/llvm_benchmark.py index 14376bb38..cf8f5cb19 100644 --- a/compiler_gym/envs/llvm/llvm_benchmark.py +++ b/compiler_gym/envs/llvm/llvm_benchmark.py @@ -277,7 +277,7 @@ def _add_path(path: Path): raise FileNotFoundError(path) if path.suffix == ".bc": - bitcodes.append(path) + bitcodes.append(path.absolute()) elif path.suffix in {".c", ".cxx", ".cpp", ".cc"}: clang_jobs.append( ClangInvocation.from_c_file( @@ -307,7 +307,9 @@ def _add_path(path: Path): # Shortcut if we only have a single pre-compiled bitcode. if len(bitcodes) == 1 and not clang_jobs: bitcode = bitcodes[0] - return Benchmark.from_file(uri=f"file:///{bitcode}", path=bitcode) + return Benchmark.from_file( + uri=os.path.join("benchmark://file-v0", bitcode), path=bitcode + ) tmpdir_root = transient_cache_path(".") tmpdir_root.mkdir(exist_ok=True, parents=True) @@ -378,5 +380,5 @@ def _add_path(path: Path): ) timestamp = datetime.now().strftime("%Y%m%HT%H%M%S") - uri = f"benchmark://user/{timestamp}-{random.randrange(16**4):04x}" + uri = f"benchmark://user-v0/{timestamp}-{random.randrange(16**4):04x}" return Benchmark.from_file_contents(uri, bitcode) diff --git a/tests/llvm/custom_benchmarks_test.py b/tests/llvm/custom_benchmarks_test.py index bb856b3e2..d1555ac44 100644 --- a/tests/llvm/custom_benchmarks_test.py +++ b/tests/llvm/custom_benchmarks_test.py @@ -12,6 +12,7 @@ import pytest from compiler_gym.datasets import Benchmark +from compiler_gym.datasets.uri import BENCHMARK_URI_RE from compiler_gym.envs import LlvmEnv, llvm from compiler_gym.service.proto import Benchmark as BenchmarkProto from compiler_gym.service.proto import File @@ -115,7 +116,8 @@ def test_custom_benchmark_constructor(): def test_make_benchmark_single_bitcode(env: LlvmEnv): benchmark = llvm.make_benchmark(EXAMPLE_BITCODE_FILE) - assert benchmark == f"file:///{EXAMPLE_BITCODE_FILE}" + assert benchmark == f"benchmark://file-v0{EXAMPLE_BITCODE_FILE}" + assert BENCHMARK_URI_RE.match(benchmark.uri) with open(EXAMPLE_BITCODE_FILE, "rb") as f: contents = f.read() @@ -131,7 +133,8 @@ def test_make_benchmark_single_bitcode(env: LlvmEnv): def test_make_benchmark_single_ll(): """Test passing a single .ll file into make_benchmark().""" benchmark = llvm.make_benchmark(INVALID_IR_PATH) - assert benchmark.uri.startswith("benchmark://user/") + assert benchmark.uri.startswith("benchmark://user-v0/") + assert BENCHMARK_URI_RE.match(benchmark.uri) def test_make_benchmark_single_clang_job(env: LlvmEnv): From 2eefcdab19f3742358ccc4c84aec054c0c7fc8cb Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Mon, 22 Nov 2021 18:57:03 +0000 Subject: [PATCH 016/142] [examples] Permit whitespace in benchmark URIs. --- compiler_gym/datasets/uri.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler_gym/datasets/uri.py b/compiler_gym/datasets/uri.py index 350e224df..0a9db5eb4 100644 --- a/compiler_gym/datasets/uri.py +++ b/compiler_gym/datasets/uri.py @@ -18,7 +18,7 @@ # {{protocol}}://{{dataset}}/{{id}} # # Example matches: "benchmark://foo-v0/foo" or "generator://bar-v1/foo/bar.txt". -BENCHMARK_URI_PATTERN = r"(?P(?P[a-zA-z0-9-_]+)://(?P[a-zA-z0-9-_]+-v(?P[0-9]+)))/(?P[^\s]+)$" +BENCHMARK_URI_PATTERN = r"(?P(?P[a-zA-z0-9-_]+)://(?P[a-zA-z0-9-_]+-v(?P[0-9]+)))/(?P.+)$" BENCHMARK_URI_RE = re.compile(BENCHMARK_URI_PATTERN) From 22d671a358239e9b2e5f25dee612db9087e4484d Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Mon, 22 Nov 2021 22:09:08 +0000 Subject: [PATCH 017/142] [www] Fix CompilerGym dependency version. --- www/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/www/requirements.txt b/www/requirements.txt index 9e96b1d79..b3dd04497 100644 --- a/www/requirements.txt +++ b/www/requirements.txt @@ -1,3 +1,3 @@ -compiler_gym==0.1.9 +compiler_gym==0.2.1 Flask==2.0.1 Flask-Cors==3.0.10 From b75bb0e60c1cc88efc74ea518246b8fd2bed8e74 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Mon, 22 Nov 2021 22:11:32 +0000 Subject: [PATCH 018/142] [scripts] Set www/requirements.txt version when updating. --- packaging/set_version.sh | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/packaging/set_version.sh b/packaging/set_version.sh index 23033f89a..1742518b7 100644 --- a/packaging/set_version.sh +++ b/packaging/set_version.sh @@ -28,6 +28,9 @@ main() { sed -e 's/^date-released:.*$/date-released: '"$date"'/' -i CITATION.cff echo "Wrote CITATION.cff" - git add -p VERSION CITATION.cff + sed -e 's/^compiler_gym==.*$/compiler_gym=='"$version"'/' -i www/requirements.txt + echo "Wrote www/requirements.txt" + + git add -p VERSION CITATION.cff www/requirements.txt } main $@ From 877429d94d9e12190f2f9f57c8a1ec78bca2ae4f Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Tue, 23 Nov 2021 14:27:17 +0000 Subject: [PATCH 019/142] [examples] Fix JSON file path. --- examples/llvm_autotuning/experiment.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/llvm_autotuning/experiment.py b/examples/llvm_autotuning/experiment.py index 3153cd07e..9e61e8591 100644 --- a/examples/llvm_autotuning/experiment.py +++ b/examples/llvm_autotuning/experiment.py @@ -83,7 +83,7 @@ def run(self) -> None: self.working_directory / f"results-{results_num:03d}.csv" ) errors_path = ( - self.working_directory / f"errors-{results_num:03d}.csv" + self.working_directory / f"errors-{results_num:03d}.json" ) executor.submit( _experiment_worker, From c0478e16334cf8e6df137b72f5aa09fb8ea32eff Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Tue, 23 Nov 2021 14:27:54 +0000 Subject: [PATCH 020/142] [llvm] Fix fast path URI and file count. --- compiler_gym/envs/llvm/llvm_benchmark.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/compiler_gym/envs/llvm/llvm_benchmark.py b/compiler_gym/envs/llvm/llvm_benchmark.py index cf8f5cb19..9e001b4f6 100644 --- a/compiler_gym/envs/llvm/llvm_benchmark.py +++ b/compiler_gym/envs/llvm/llvm_benchmark.py @@ -305,11 +305,9 @@ def _add_path(path: Path): raise TypeError(f"Invalid input type: {type(input).__name__}") # Shortcut if we only have a single pre-compiled bitcode. - if len(bitcodes) == 1 and not clang_jobs: + if len(bitcodes) == 1 and not clang_jobs and not ll_paths: bitcode = bitcodes[0] - return Benchmark.from_file( - uri=os.path.join("benchmark://file-v0", bitcode), path=bitcode - ) + return Benchmark.from_file(uri=f"benchmark://file-v0{bitcode}", path=bitcode) tmpdir_root = transient_cache_path(".") tmpdir_root.mkdir(exist_ok=True, parents=True) From 4b5fd35bc5ccceede87a0cf7afff94cbcb24dbb9 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Tue, 23 Nov 2021 14:28:24 +0000 Subject: [PATCH 021/142] [examples] Add TODO note about custom benchmarks. --- examples/llvm_autotuning/optimization_target.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/examples/llvm_autotuning/optimization_target.py b/examples/llvm_autotuning/optimization_target.py index b822d17b7..58feddfc4 100644 --- a/examples/llvm_autotuning/optimization_target.py +++ b/examples/llvm_autotuning/optimization_target.py @@ -6,11 +6,13 @@ from enum import Enum from pathlib import Path from threading import Lock +from typing import Union import numpy as np from llvm_autotuning.just_keep_going_env import JustKeepGoingEnv import compiler_gym +from compiler_gym.datasets import Benchmark from compiler_gym.envs import LlvmEnv from compiler_gym.wrappers import RuntimePointEstimateReward @@ -32,10 +34,12 @@ def optimization_space_enum_name(self) -> str: OptimizationTarget.RUNTIME: "Runtime", }[self.value] - def make_env(self, benchmark: str) -> LlvmEnv: + def make_env(self, benchmark: Union[str, Benchmark]) -> LlvmEnv: env: LlvmEnv = compiler_gym.make("llvm-v0") - if benchmark.startswith("file:///"): + # TODO(cummins): This does not work with custom benchmarks, as the URI + # will not be known to the new environment. + if str(benchmark).startswith("file:///"): benchmark = env.make_benchmark(Path(benchmark[len("file:///") :])) env.benchmark = benchmark From 41a8b6c95746852e09bbd28430a82beca435f40d Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Tue, 30 Nov 2021 12:06:04 +0000 Subject: [PATCH 022/142] Update protobuf dependency version. --- WORKSPACE | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/WORKSPACE b/WORKSPACE index 8335e7f71..a8caf96c0 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -102,11 +102,11 @@ py_repositories() http_archive( name = "rules_proto", - sha256 = "8e7d59a5b12b233be5652e3d29f42fba01c7cbab09f6b3a8d0a57ed6d1e9a0da", - strip_prefix = "rules_proto-7e4afce6fe62dbff0a4a03450143146f9f2d7488", + sha256 = "66bfdf8782796239d3875d37e7de19b1d94301e8972b3cbd2446b332429b4df1", + strip_prefix = "rules_proto-4.0.0", urls = [ - "https://mirror.bazel.build/github.com/bazelbuild/rules_proto/archive/7e4afce6fe62dbff0a4a03450143146f9f2d7488.tar.gz", - "https://github.com/bazelbuild/rules_proto/archive/7e4afce6fe62dbff0a4a03450143146f9f2d7488.tar.gz", + "https://mirror.bazel.build/github.com/bazelbuild/rules_proto/archive/refs/tags/4.0.0.tar.gz", + "https://github.com/bazelbuild/rules_proto/archive/refs/tags/4.0.0.tar.gz", ], ) From 30afdeecb4549c37943e91164430c86d297a49a7 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Tue, 30 Nov 2021 12:06:27 +0000 Subject: [PATCH 023/142] [examples] Add a --only-nonzero-reward flag to info. --- examples/llvm_autotuning/info.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/examples/llvm_autotuning/info.py b/examples/llvm_autotuning/info.py index e93171809..71149f85c 100644 --- a/examples/llvm_autotuning/info.py +++ b/examples/llvm_autotuning/info.py @@ -33,12 +33,19 @@ def info( log_dirs: List[Path] = ["~/logs/compiler_gym/llvm_autotuning"], all_runs: bool = False, group_by_working_directory: bool = False, + only_nonzero_reward: bool = False, ): experiments = experiments_from_paths(log_dirs) results = [] for experiment in experiments: df = experiment.dataframe + + # Exclude runs where reward was zero, used for pruning false results if + # the environment is flaky or can fail. + if only_nonzero_reward: + df = df[df.reward != 0] + if not len(df): continue From 5772c89f18103fd6c9d0eff71da35202c4e33bb8 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Tue, 30 Nov 2021 12:08:06 +0000 Subject: [PATCH 024/142] [ci] Add a step to empty the build cache. This is a temporary commit to force the CI's build cache to clean as it has become corrupted. --- .github/workflows/ci.yaml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 305527009..872f438c6 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -32,6 +32,9 @@ jobs: - name: Install build dependencies uses: ./.github/actions/install-build-dependencies + - name: Nuke the cache + run: make distclean + - name: Build Python wheel run: make bdist_wheel bdist_wheel-linux-rename env: @@ -70,6 +73,9 @@ jobs: - name: Install build dependencies uses: ./.github/actions/install-build-dependencies + - name: Nuke the cache + run: make distclean + - name: Build Python wheel run: make bdist_wheel env: From 74979bebc7a5457fe3007b74b84934abb558dc0b Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Tue, 30 Nov 2021 15:54:14 +0000 Subject: [PATCH 025/142] Bump tensorflow version. https://github.com/tensorflow/tensorflow/security/advisories/GHSA-3rcw-9p9x-582v --- examples/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/requirements.txt b/examples/requirements.txt index aa2f450b0..cc6f93435 100644 --- a/examples/requirements.txt +++ b/examples/requirements.txt @@ -12,7 +12,7 @@ pandas>=1.1.5 ray[default,rllib]==1.4.1 submitit>=1.2.0 submitit>=1.2.0 -tensorflow==2.6.0 +tensorflow==2.6.1 torch>=1.6.0 typer[all]>=0.3.2 typing-extensions~=3.7.4 # Pin version for tensorflow. From 4f32ac60157c2841e950127e131e21b6052015c3 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Mon, 22 Nov 2021 16:33:55 +0000 Subject: [PATCH 026/142] [docs] Add a favicon for CompilerGym documentation site. --- docs/source/_static/img/favicon.png | Bin 0 -> 5400 bytes docs/source/conf.py | 2 ++ 2 files changed, 2 insertions(+) create mode 100644 docs/source/_static/img/favicon.png diff --git a/docs/source/_static/img/favicon.png b/docs/source/_static/img/favicon.png new file mode 100644 index 0000000000000000000000000000000000000000..2468df08132fd78c20f3acbb0ede1cda102cc0f8 GIT binary patch literal 5400 zcmV+z73b=SP)cC4ca2*=-6ZA!S<<%@6R@ z7D0=)@wtr~7!AN<;-o){Ry3ltL4sIR&`3?nMnr=KMS@mT(fsO zkrpM2lqr(3{3WF^nKW(MqW2c>?(FW&?Cjpm-t6A)@m`7_H`<%MoqhIspLyqA{MQzNaqo-YSvRe- zC{}pJSnVG!)j4FO0Z}Tmg=geaY_L&otrc8JJzzi*Q7UN*m9?}>X$4^D-fUA*8pRgE zF&d0jNZT%q!CPHX%G^#ITX@#8E@e9cgKTuQe|R<~CSl?tTGFMo0x)!*?5OvJL@9F( zqlR@Qaw#hZho(&5la&=37#70cDp9JVEo3uxDJuttCJ^!P@_|9yQCcfjbjGkD{CrWG zkS#pZb}7pT2Kx~3Cz z1CB23AIM$LrVR|yI!)ieo_eSu0wQ)|ow1Y}!UO{LA~?sh_a<^*sao5WBsBMlXE)dR z@NCIW&3NWNI3u)s(oAxC8x#SX}o140dax8Rity%f6f2-xG*69B}odvxKk{@k!e zkr^XZU~o5#-FOKk-WPHp@wmW!-Fxwg0fz+^AAhn!T~m%7+;jN#`*r80hf>&K_tpMk zYV@Nv>qPDchRT9cuZ(*Ih}`ks(e8^wOpta00|N-);OcOP^Go~A|EqC|fNd)>V^|9e z$*w}h4mE)tP-X{7HwdoctkYWwvjYQ(;F>xSjl%J%S&@O!kUusMl7g#14h%6u(mjU+ zhFY9f3BJZ(>OIi(ywV~AqsR0O4G78L#>cY*SB9Z7P@J#EK1B^V)&OieYZMX~OkZf5 z1tG~j^?K|e0+446o*jbofyg{f2S#DUCmYA02CM2sC~XKyu0$a&!$OiTQ`IdBPVsXD z$9eC=v4a<0YFe&G?t#GoF`+xSg(@(B*YF}3#3SXNMLZbza;aw_-Q;lnez^<=v23IN z!b?qyYvdLfCV&WTM)+xk>Qs5=#>A-Jb0cnyk1m2P~vOnu||@;G)t?!=Mo z5br94x?`?qZ0O>5GiQx=W_?1HF}E=tm8RDMLQ<&P(DjWsBjLE|jKOA*PpeoWZFa~| zNHz-YLnBM)XI1T&mNk_zR{MviWG!L%G{u5JmmM+^l8XAH!7X01qq#E84Q&czFo8MC z=To#$H#05Qcj(aN`OH`&bH-qK6m)@|aqQ5dLNXR05C#KZf2ORFDPs^QkU)W?C$^=K zjB*vhS>vx~-!CpNG?cKcYnhFeywX2hMa1)N!fYYNE8KxJp)9IyMR~+fy?2e^e|Dq6 zxXi{-GGUA>2ZyRGJ>Qre44x+LeG+5jOGpZzTladnV;=m#%FC&*%aOXqxN>l)j0_a@ zd@ZONH3kfzXh6R1;0A{4_sg3jB(+_i0^Ad9ATd=-l`&Qh4)pSAh1!>RO25Mio@&>N`m=(IQl2)+%| z;=C^|EsiV0(D^{>$pq(8^pgCPKC$jvs7$|?tVg5&a`_d))`}I z{{Vg9lMrHuwvdc;6$Y-4J8TO9N*}y>F%{OZsxg-K5A-22Q23>}5t7u9^Nx#ii)TZ6 z!u_*^P6j&CZxYYFLS4zbnPJx#?JBssB3&P8xABDQdDJzljB)9Sfq8VsKXM}^&oBWz zJFs{zQN(ce=k|30U>VT~M^})`PPZf^)z=t0PGk&Lm!mZ*>zAyvhBaW&bUZ}#XNH-w zgGC{^0f18oFg>t%&U%CW)6e#nS&v{ek{!&1q;NeUmsQ{OjXk}-WIbQQ3NY9d{Rwt2z7(dsK*dsYA^+tVQPQ>@cStAKkdzsYbb^i%??p9l#OSuUAD>^4Flul z$NJYjL6%`RlUSlUn33DiwQoQCUJC3@AqG<(41{EGyjL!{90?5U82|XO-+Xo18f!EJ zjD^SgtB5o}Ci#4hCL-A(c|>v!5GsQgo6iza_N~3|tpebw1VS?8Dj?K$>}Uxf)Gi-> zb=g`XPeZ_%-*=D(a(a}0uQ4GRHC}Og@Zz~#E`KQ7Hy?VmkHt(nQ(y<_0rK1M(6iT9 ztgvX}koft12c15BhKVnQ0R`EwUA;J8LyCVmfwiDb6m3-#!tKe14_LU}TCgiIFla`0 zhT|2dhc14%Fre`4skKY<6c|IK@HKKbKRn(OfJOv^DyvKRK)FC0`Np0{tSmv52pH%3 z4$#WBBlXw;8=j0zte;?`b+Se5rV#@T{_aX&xzZ3gUyK8jO?MigzIfgK<^{ z*AfB4WebiS!lQp2y45V8xt!4gxnQuU;qX4)Eet#HV6#B ziVEFoS7E-K5d+8;tztKF>3nW`A13x28<%OB=+nVdZ-r5M_gI4Vb^f9Owd&{_ zt5zN)W5z&dgfaERm$~xN`P}a1k<^~Kw!BG!IZYucv`bB92hR<;2qAVYkG`>L?LjgJ z24bZFA<5BSw68_89fk^JAAWUt77@?LLQ<(6q_z?O5`cud!{zFm_CNuqz}$&tINzDHkFe0Y4P8;e=#pB|niJ_%_@^MC+gcQ(zEX)ePAoLC_|5 zTQu0VXRoi&_|oY*LNbCe{PtTw#M0PXS5tY2%z)wb$Ln`9lLl>0|HAS@%kF;68`8cd zssN#i9YSqInQ2h|=v!CwktL1@BO2YzMuIjiz56k5A9?-CI#P0x20KVBee9!eU9F@E z3NY!e{>!6}IU{#sSBd@P`d)sQ3i;RUZ>d`*m2G@r<>g!rqo-{(ck2pgc`30C6(IoX zjv`h*c4IA*HqvGcMCeN(B-wklf;KJJJ*{g9Gcvo@r1oP0!ZV~NvH}Gd0RymPVe~I9 zq`R%yf^787)lCHFrU%`3F$6$^Z%;pWW3BSsjkQc8z($N=C?rjSHXFZm;k}@tbS8&{|r+_~-ZZ`5UW5v4aQ@|1T9g zgj6>+xDjsA4H2Iv*8IfH_1ugkYXJihova{B?7#!U9f;fJ!1%&_qab$A#EIT~FJr+T`tyyC5)B{KfHtd#EM!4H!dDNFw(`#SR#7idR=Ad$hN@ zF?Oi1?6+P}+%MMTT7k3Vj1fMez!)kpfB-W8f9mdt*tTe!ynRoFt@JYrdJn5z#<0W= z!AIxn`I`sc)Ah`?%k3_!ra3(eS4dWZjB_nrxAB=Dy{XmCx&f|vT_GuhA-W8Bp!IF% z@e&=I+_|Sru7Y0&%v9Hzj9_a4<5#a<-2A(T9@P?(GCPF900Q%{Uu|GW_9r{#>YSg(vGiT8%uj{VftbrL(sx^Cff|8+w<(b^y% z1J_+(#0yCcc2Je|eB<6nO275mm6pm7pX$1gL}Hh;plk{G?j-Q->Ag(sD&mB{k*hXg zjCIusK@$vodBk8`g>A-LSaX2Dw)PTORHesM>|iD&6~}S^6V`P`Be*6xpR+dBbch*@ zEBtZ?n>BY@oHje?Zoj}aU`tG9{^So{qlHq+Y!-TO<0GaA#6L_tW2oCNaOLuS@XOKz z2C(jq%nnIbe3A1u0+#~SQ{8*J>VJ~_+mRo>PC-G}twiLZpdmYW?5bwza2Y$-#^?X% zd+7qh-L*(-@=)kv;u*uI*VH#?$9hHkeNGC+Mp3I zYKXISg(L(M>bj&(;t}l&%89?H9!k zDp#S>b-{Y8PD$d`1B{o9{_8ytQzY+gPz7y(541EKwArF4;ctI0KXWbW*@=#BifU4B zA&a#B{g=C{a8*13x z&9_k5iH`2R;~m|!T;m$;^5pDtXs4T5&sD_fx`x&oNxIJGw5E`3C}=|qwS4u?p4{q< z(p27xF3G^?N^%yj^iEOC7^Y)+~k=cX(j^PreNU9Bf+53b#mSA z$~z6SY+}av%X=Oo5xVYh7V8}J`n41%QkGK zE;O{m(bso55z2K^;Xy-Y;%nysu=4!YYngOq|L~{pDX|_>ZO)C#h!<*%OZC8nAJe@QUlE%I^1T?pPXHoS}{@}IDy0oAFsk>>T-^1RA%1B5C zm`}4a>>j2opwLzW6vg&K+D5&(QAHJSRTHwSjyGVRa^An|-#169U zo5aoLEi$h%M65=}G-7;h;=wr4w^O)z?Mt`qp8n!(yHii}nfULUGnBiDh;vcbqsl`8 z#b*y)k1DH64;062f&y&r?f%s}AD}fW9p!H7YwN3Sy{Cy)dl~?zKmDUOQ<h?-6s7&_ zf4 zp?qV4o5E`JkR3jExX*MvC*0MQ*tK(MeIbKo{-eF=O=M9l_3MG zz##k6JMO34gTo1hWaw!MHL5J?Kmm}Tf?zxbMz3uMChHFd$ed&KZH#VTTL{n;l{;lP+nTWeyB*6JPq+ z3UZzD6TD4?WSlv{t7d`%ngtASlU};Ooju>{mTVg#8E0o0D4IUWs%gORY~q$(GzfIq z20IkURmkRSdzIFJSQ2YGFo2C{vR#^kg-u&eNJiNlV~JcXOqUB_kbTZgfSGXEqqT%& zl;w^kc*_kiJp1fV-bvX=Q_eo}%nB?dqwF+gf#pF2`EClOPXIzsoT~s1 zB>BffKHY}OatjR4KK&B}5Oe?n#bgIyy^X0XMn!E|?t$Ui@4w@Ys+)j|BL!gx(hxID zM-*{NN zgCYsYMyzFlp<_pObjg!|=UegVPh#5|7;TF%0Qi4@h1=1!ETV@10000 Date: Mon, 22 Nov 2021 17:02:45 +0000 Subject: [PATCH 027/142] [www] Update the splash logo. --- .../compiler_gym/src/compilerGym-logo.png | Bin 0 -> 36715 bytes www/frontends/compiler_gym/src/logo.svg | 1 - 2 files changed, 1 deletion(-) create mode 100644 www/frontends/compiler_gym/src/compilerGym-logo.png delete mode 100644 www/frontends/compiler_gym/src/logo.svg diff --git a/www/frontends/compiler_gym/src/compilerGym-logo.png b/www/frontends/compiler_gym/src/compilerGym-logo.png new file mode 100644 index 0000000000000000000000000000000000000000..6dbd26488677a4a3c2de02c4f80133c7b9deb4bc GIT binary patch literal 36715 zcmeFYc{tQ>*f%U$lcbUwYm|P7vF}S$wqzT|l0pa>*<$QW(LzEcWhd***i!aoq+;4o znW2a&%4Eb4V;S3f4K2U>e(w8zpW`{6_rJH}=xCa`zSnYI=khr}=lM;rvM}b|2HD2K z!oqvP#K4+`g%iQT!iwhN0N?Z!eaQv?a0i&23SwcAmt_9k5>~7q!otE%^+q@ZJD8hk zx%>OdyPorR^NR=wTCKfpCxTao0R2p|2;d6Q%_w@bw6G6%X^h;1{G7rYo^AuNL^s{2D4DzA+@& zM^^&ITu|J>+)Dho|3wdRb$LZOcLfCnadl1kbFOZ#?rv^J#FZ436rl<#P(>9v1qCf- zO)V7_@n3%=z;YMQd1_f382wrd{7+Znd~k4p78Dv98Y&;EEbo8O3#zE8sR>n3f+{J= zfe~^+;eNrcVRC*!lD}s#@Cb6h=p7L3?e8bfoYB?I9~G=C0k*n%2H$``)A|Mdk`s^^ zG|V*sswl6(Jk!R&bMAk}1)wfo*jW6WJJjQXhp&fUa1a=)_-AaudH-Plp!5F!&D4K> z{tpWP*_xaGS>wOli?8pWC4zztL%=b93FN;V9fSxE@PJx-1o@*bx_cOgfNe@LPZOYZ z{Gx|zu>VDbzyF2bH)ZvE%i>B(@~Yzhn7g`r`!Nq8{jWzn3|xafbS0SgCa0(%r=*Bb zRMt{d(^A!xRZ!GYP}m%5?tjkPGyH!Ys;;G~uBD*yABF<0Ip-Se`u{rkoV%8%|3zO{ zaAa>^S1%7}fS;Fy_#Z)P9rwTBe-SJUcBlOB?@t^*ZgtV$)B6JWA;{YJnD`09<7$eU zYHD&y@`@YVH8gVop!a!F7h)3Sr`<#}Nx{8vDlE*nYPX$*MITbZU4LJ=} zcQrXhS9JvsB_(xtO$CqN&l~u=qnJwY`}uR=c{Me6Wi=&NcR3ArWkvA3+BrE_O?P!U z4^4LZThQesM zx-$(ySHhjC0UqZhetqrzUr6A8?*yCEhMxBTll~8Su{kWr-!nMW^`ZyN3rOpKL-U~j z-S&fAL;mN@|LH=snT8NBbrD92wl34Z5vfzrJC!{WS->7Zqz20`d~S(QrE9{cXI!S>P_9=iNK=wovC&384N71)bt!uC6rP2ZT`fCBOcx% z>UXxfyN(v-r0JQeoScSr-kcGZ2D#GSeGP{4{+wQsV^PZx-1z>eRk2av!+H-Zr+up7 ztg|=A5oPGj$=Ga>dpJ3@*+|b;@MRSoqE%CS2`{KFzh`^C^-CBbziIn5kzTBidA8T@ zeUEcI{U{1U?YCVnvFX8IsU&#ccnMzbYUtNw#$8XFwX|C zlKCfA#@p~|Ogjwa>PWye5rT1~w%3AHwWvS!p=KX@P zCjb24*pB1rB^Dr0^kVJSSvMx(!X4bn!^g@=`gYE<^vi$x{<;7g$pbN|9;HS4E1nl| zUy2Iyn^ev(22;~)UvwL9?h=-^82hDVBtIhE__f{fQk&F-*T1)Ko`|jBa-*hkcYP>8 zJ{c$4icq9Begk?U-2PAqZsjz5vS#0;RnPugTsLB=Dqnz9)>kT6QwaGd+BR1PsxYy2#Fh6=V)`euvsx(c z^!J+cEV_Ge>dO_Go)Mik> zZEi)PNOa8X-{S(!!8n!P%JE?2t}-+_W_)Sf)J*ryC{B7a@*K4~l##Dh5x5~Uo8ECE z#(rJC1a9w6?Wt5MW+qrHEI_MvBlv_0T)@x&*srgxyr0dPI7U^rf1|UoF@Ob@w#N-w z4};N(-_sX^k$1sLAz&r$1N0l~QDaMZiulGjam=nrt4Z}E7}2qXo4%wATJ%>`@_O=F zI?b(^Ce5$9@G181wix+Z=g(T{zM%;XR-Qzx(QJF{ni&gTJvwAv%>rj`Y}wy)!Z}GN zj)QaR^qvUULLDlo44XJsUywfF_(_cJH6clrvB7Pu4>XhrL-l>mcf9URB*jN8CYE_- z*nOWJ*Ut-&X*glBno?TjYPuPTcOnwhFgNbLv?#>Wi*7X`g*8e->ff_mMljkGXx8S$mns0p-S4lbe`_!kcba%@2I*u=svkZy0v+GTW zHE{5#vBsGl{HTu8e^2s`o%G;R#+nKho%8j*YUd#Waai;K{e>kt`1xI#-?Q0T<#1)x zW*X^=TBodl%iw9f8?6=~q4+Ea5w@A}&4 zIHgnDqA>US;lIO7+>j?fm`N7+L98v~G5w1lizx(l0x^c4UzHDgQnJT#V`-o(*YXDh zG<|9}j!aTEj$rQ~&R- zYCEt!BK?DkB_T?)cO?FOyTvfu?Q-4_x*+=%qOKi7G|x!4YhaCs&98z9)4Udj)fmZdNTW48L$$VkdAjh?HOwY zm*N?-tP)i211M7aHO1P;J|<3ra7!TeSuxb`r=P!^2%GIOPSRzore#xdelMoE3akxh zYDIWSE6ghAo=#FkM~YTiuIit?u<_v}-_?BZCOX(L{P?ZyTRbirnTf0wtuR%!37M<- zcO)-v_e}6(Z=ws)2ZB;9aK3GsE1Df#x$@QAf1D%RNH^)P6`i*duy#K+-b_%THpNHS zndp9r!OWU1JDB-;cp4fLReQm0f3XefIsN;F5oVL|n8B*KRl_a(bPB0rPFJqLdGxS< zQwW`AY2iRW{l=7>bew8ysW-T=vgt3uQQ1hjY4{nlLXI=zk&XtsQ{pMup-US|VZSn% z+S9G`D_FzQcEV6+A*xAf_(PV3xt_V>N7mLm%nJ3IgG^TICo-&m&uGerbMM7$0bb^W zS>fIYf1*SQe0|d3{=L5nQOrs5RxW{)Gl|TzHT9?|5WE~!v6_~|d! zqepI3Qj~ugpS2NiV7`#r_;@6Q{)4$Hl}`=W$nKs^WC3u>&3r9|q^4&oRON4Gg=>+| zdoco)OeurwXjQ{W+SDh;_5=CovRouTZl>BmE?XKfh(++Qd{Zri`DCBjo0K{n1w3 zEyoUUveZoGPRnwQa`q}DF`fZ&$pYiFG>N`I%G&#{!H|Aa^2U`gX5)d+QTc94zE}cB zTiKbkI(%e$bKRdquRWdhe{H#T_(1J$rJ}V^plm3Y@zcX9)Z(yGc>PU`msz2_BsDO) z>d-GeaBRCZ(0$V_&qz+90E7_j0z2KLCi#LG{ja8Ef_gH>e&z9>TdA-$@%7+H$~1~D z2?w`PH3yVN^|L9tSArV&=R#^Z6zSs1RkOe`Z%b#M4%~)$DA4703FKREA|WKxsI{-q zKK-|8ldc>`sw*ccGJ{Rxsqy8g8rf(e&7=s$kHFF0is%Rxjehkf)=BNP7F;+X(jQYy zE50Z}MH{rduiw#&ANzr>)LlkPYWK|WRrho*sxJ5$q(0S2GK6OUO2 zmY9F(76d3WfyZII`ILzhiO$%xd@fwzb)v=oiRi)%ql&Y|{-j>a%gVWEiBG0v!Dr=j zx}^H5zewozHKLv8&ey=#0FAdBFeOv&fUsT&E{P+@D7yTTH&ITKs(OI_RQ1noBF_QK z%eb0u1YGKY+VnJh2|P!qPJlmUy;-_mH z8W774pkDsDLynu+);BFLHtats20|xGv7>)N?at@q-WEZ-FOg-g)`0 zh-zTxf~HwUy6vg6X0IpqZyZJ(vscVU-En4_3{2K>ym{h}#n@UF#x%!!6OBW*VU6)K z)3-18Z|1>At>n~zuz+uZ_F~vA3iX60nSqC~3NHVbE+hCiVqV$U{y+ZP@hGsN&UcoD zdhU?g@9~aw6=a1SL(j3>l>9O?KEl+bbtLbPs!)d&Yl8+>t;@2FR!@Oz2imK*;Relt zF}sW}Uucny2F@e8rreuof!UCQ_nPG&Q$$hH2XtfFh)$B!k^{9} zn9}jc_RV~gjWp?w)Ne||-+u*+Pz(r7&L2Xz2JvG<*XB*h{`c$N*FW|Lz;QNq(}{B7 z20MtZB-!W*5a584HC);ZfFLx1oh}>)R?~7R4WE(*03z1fj{`>t|MHOX;Gh1&KU zJn{{kw4;%UXq*1}9cMwCyw$XBwElA6sn-9>7@3{~_@8ww0BpOX=?@M!Ij{V}zI0Qw z=n09E?)qG#Rs^t;pm!F9m5CUAd7ol`uk~NM@Ye)hEGZ0vx&!tBzEX>+-AoBHsH9Bh9}F47vT53HYG=wz{L_mA#@+1>%>IDZ@`o(fYVL zVvGtfd^fNN`Jz7wpb8XI*-3OLf%h8{0Tw7_2bT2^n4zg1!}$nE-Fn+4!1EQ7R9il0 z;qcv?-U1?kxUYyJ1F-y`BiM&0-b5a-r)^k1=lK--+E)I}Cz7~u>+~O|wbxNPC3`hM zgr-$)M(Ajr3U8vnpkflk1c(iwf>s~|9mOK~-^ZtlA*vFv0_AfR0Ovm5ls6fAwwTs- zZx0;6K4we=H(_4~fYZ#PfJ@AD;4cF`8XZSXOKZ@dJ`Q1dZ@`x!>59M z-&0SDF#ijCFkV)xRVV#@J;n}{GKB#y<8?A7%c9V)?ft^XFc1W?nQ^txbpu;PoZ?44 z^Cm*`zFwAf9NxKc$V-mwB%KnAo*Nkb`uFNx-o!-SY@=4IZWXFJ@bymh?-$BC0OnRL zT6<=@)t_tQnWMgdxhRU9nATQse)Ks8X0;~7!X z8-igP>boatco@J#DoIVJ00=4O6g{2(2jz@&kn&t2@NF+m$$4pY?-v@2>=G5R}2yat*NfA0lqD#xmVp-wtn`(522dCdW7j;;f4}`LNMF2MK zn^B=I0B{mr4U{(6o)HpjNUSn$dB5-)07#vpN^0aUM(+;y#-kH}WzZiy^a~oTHI%2} zb5=KeDu~`7ti-4Tx0T>c90HR6C+crHdkIiX$~~6HDmH-9$27d<>K3W<1>l6QMWaF@moWFy#2~V%qU9 zAi3FRO(P#EikTU>74dP&_%|&S$VuAZTD*y#0EC_dNRTXZ7Q&1IZI8eOQ%m6Y`RO;H zXZq+;n?imLvC&>%dJ{A|0+Of=v~e=iNKU)J4=9=&K;Q4m=UR8tOvzeiO!wt7pjI%9 zb^_60=GXHE`vBeHQDV61-Q>hUJYWinXvgRS*Bt9jbOQcS5l9N^rlbc>sXhX{z}SBR9ZZ!d zIlxS0ES`gd@&^~wb_4gWt5`HFb`FBt53fRsjfXwnR1BEvwe*3dl+wH`b<`#dsQ{qI z`1PrAZQvXI0mMxqXeUL)lrl5!%;tv*KRtn@uFRD|8Usq4ke=Lu&7&;kT5+Q`m{8Jb(72i0&_EbQfBp`j_TCILY{F;@J)j<)yx>r6pbI zMy>lcK|`?uKnh$~V}5!-R7IWob`};BHH$iB&WsbDcqtY6wOQAa!x(__6gfz){QUG% z81O{^z{Nc}M(y}s3il#A8T1l{RbI>~51GjlH5V&*a{Nlv8{h6fXOwEJ)$1gI1Tc2O zjuBIPX#;bdkd3Y~Anra!y~9NJCeWggS(B!d!Py=hv4B_FX=nwwue^y){QLSAZ{o=r z;L-NO>$8oVp7+AX#COxhE`nKe%=DF4#w2sb<$E#Xm2<88HUhA39sut-Jx-oXoqo>@ z*A4lL4P0d+%=!cyNyZ&HLC%Zu6J7u7nfXAtovGzV@mpQbaF$y7hu&qJ`@@ri9Jct< zyBuhNG|+;}tjDOU05(+`5uf!8g%u$)ap5C5|sIyZ~5d zS82Kv_FRxJGQ2$7XqTnBVo&KQKC^087WOEqOt!@z@44rixo}5&S*hX$VNKXmdR+tR z9oxh)_x9M3-@khE+l#`@3zM@%;r-7^ex7r5bv;EWU>)pVg9IP|a2jcNmw()f{g>Y` z`KFmI8R-(pwkOMR9cMF*U3<$|p48UOGiKvpLa~06-Te)x@ zi{n|tN@__=XbbSF%O`&bDpT_X*GVA;Ln!aNmsEAt9Iti~1mtpI_3;te(zZCfgF}6M zlYFJTlzl58Dzc*9T@alPG6k2o6W*hs*a!|VC1exXe2tk{5P-7=0T*p`)FcVOxY5Hv zGop&Cun%;jCX*~{0Ljqu)0@co9su+xkiaZVidySOl0G$>u?un!X23u5TvgoPIL*mo z0E#;*yAfN|k};D}Ika-XbJavvC7V^S0({#E(mT&$f5l#mAs}03ex%_OBMWx`3QE-O zbjF&!ScID$iAgjufkF>}q>PWY3(kc!9jUh+?S7XcFBhL1VHIZyce#RE@ylMtkVPF zMfK{?OqPZ|JP|$iC?iPlfUN^NOTpqNBKG_23s8d8P%W}}-mBcdB~p%KC&F?*RlT(s zPf_QeIxv$Y$#TamHkPin zm0&9pWhYGFsEOQJHxjvqC6r4{L^`~qq%`Jl3-~6Abq@@NCPuztWbHj_ zrF#R*IcPu>yI%{k479`PTd_k#txtz@?J-DHe9W;g{|k(M*gxW= zta6HLF^x~sd3?DQ@V1Fr6?0CiyQ~XSa24pN=^5Bv+32kng)xEb#H^=FGVAUS@fxsT%&k7#no_Uy%vX|n z{}_mxZB7OvoPf@Ip|+96$__Vtj%CX_vuC2o9(l;fj%Dkmd>PC(+!JR-Qmb{Day9F| z{4p4vVA8LQd!f$9EZdN~OhcK+Rox~W? z=__v*g*A+~Pz4ZP4>m#wq}`M}>bbu1J zISdoPNR%W$H-!saVu8_(bJXjVX1|uep;FpOP1~_N$9C99&8!(OHotgCvlGU6o?I98$}W(_lYK~@8B; z(p7?M46S;2gmqAHGL2jt{S-aiwO${$l9x78?Qlkz&VGnxy75TnPH7CQ8`W|4&B660 z&Wt#-g$DOFAIg<=3+OtT?l96=z5Um7^4<0!A1(C1IsL ze|Sad(bv-QhM+D$P_$bN!Tu~w3Iz4VFWX8ZyYfn}-Ah7oR@Jc3;fpRG!#@((0H(xs zA16zJV8QyF2s>6Cz^`>ZoW++@LoIP}peJtf$b(U0sNYrtQ3U!!cpdpU2p!z&NlnNj zC69s-zApO?0cUg#5XtK!+tD!#$Mv48jm8csT()Z9%3Q^{e{o|~(RY6QBJS~m0}I9& zA+mRIDc`1@CXQkC;CL3p8W@nho?XF}Y_FXcYwj9Lnm=^uMI80WJ>cS*k&~HYL>d4x zGWA|`(x4y*a1|iWKnu>8lH24%ifP`UG#Xcem_R?M6kC!Z$2p9i)oazdG4~<7iv1EJ z%6Vcc(1p}N!NRWUH|KA=s~Nq2_8@W(KmBHdR(#k+l<4ft*7Lzut6x%k<`)wFO@jUe z%2MBfxRHcg1tisQKS=6I4Vl*0w=i<%@O#y|)VuPxzAc{Vm;cb*IYXb%`)#^ z++N>O9|eugJ7vZG>c-_+lTvsyw`h-Z42|^tY0a#N_|trbzonjbVqh_C0pzJryT|WC zRSV=3uzRM0CV8R(rT`HblmYdlNzWVY1_drEAE=#oC1oCD0XIb5V{@9nvRve9!(rql z_rWBE@*Oz~Ed1;wbmY&Eejqurq=5>91IRf}8xehuQLjq6$$T>;Ez8e^0-~v3g_?P* z=~c%KhSMjAaos?@OQHMVM|ZirWzY@43^duIt491C_!BpFV}*oAW6qUbY8~GCd^ZAW z?v}nm$A0!EdYA~ot?^T|k5x<6&StE9LzV(Zt$@Bxe*a4kQ9~_~F(-Tdox)xTiNaQf zqEl{C8#Y{&rT<<~E&qW_o-N-iSnGJ(0Do$?eC)SgTrXF0Jx?;rrTlG0!i#Q={?vvO zfl!yN!p~}`A7)wdNAekQhoMQ^c(H;)8c7k+2D&SK9&R#eh9p!kCQllaYm!hI%o2GJ zU%FZq!LVYV-D+}H-8Hq?$g=Ko&|kx5EB@u7x?-B+cnxxU*}ceI9u@(1l9*07HZ8** zq`+a0AcG!0Qsk*Mdy}UgG4dE-aB~3EK&W{ANP;>97*Du~?$6TuwK?u5T;ASrCDCAU zDd@Hlcl~J{konk(x95ji1-{7g)Dpd37+D6Iciz6uA0Vp~0WJSGITZ=&(n*+;iRlA@ zZq}!)S{H8Hc{&33NjA_)Y683{v%+P6jQakq=Di;s<#u|-4z_Pjm+zm76!x34Yq|LG zz61;C1v8Nzv2CS>N{l$oh6T&S=$yIFijlyqkq^t819EgD(FKGR(3?AS9>kRLg5LX= zZxVrr4Cn$bG|bVU^+&hltbXI{7PLuGxT23$pc&mTt1QK^^eIMb24ubf##%io1GWAa z?g2yWA6i(}wj6a(4lD-(7kT>V}fd zwh*uxy@^YFGNmg7K-J}R_o1U^zEKF(v~5->tJSsLJ#);GN7G&4%l7WvsACbw`RN0< zniP5(&Re{J)uv_S2rvtM73Yd+PF_Yt$wF;fp4Y&}{N^$@u+aqlrti9_FZ|5u4J3wI zV$-!kRtm#f%!*kJ_8<-qnicke8YM_X2#x10=GMk|`9ebgi(dw+ku({ghx@JXM|4X* z@Z;G^Ku-{GEq&y`$l&g%a9Lh@lXC5Kl)HYla1x6JB|Gra3mfiR-d2 z0Zk)RddUdnTfpce%A83Vo1~mt3g+Zv|0aK#gr$l8wm@s!jl{*Wjt|QtNilyUSfwTK zoY)vvzc8vMCv&0sx*XGOa2qwk*93K))xVWHghXt zkB$DJk`mEx#|Q`-Pls*{r?v$rpBdLyp#E@)b*EwCHCwGNI$~*5XL$6}DP0$ad1F=JT8oB5_>_5Qc2nM00{WJ|C|=`Er_a^i{vjbf}qQ_Zv~;pGt00KI zZGEDM8foCnQ*l1?^ z?yIe5`MvyP1Y|N6UD%r8m-_p{MY;KK+pn;TwONSj9^NvX0w&!v=l|((9Cg%^Zys>e z!H%-giCIQYs_TH~nd=HbeuL7&&ojMyDe%g2F~wEsjx_xe!ewZ=hX1Q*ofMZn`!nNP zE-KFCKfToCtuhhWQn1eY3NsSqac4yE;7`hDg|lYbli_L?UG>8ZM2vqzWXpMQN!&Qu z>I;yMULrlus4n;UOkxd&PA@lPzA~I$sZKZC_l2`MPcr;U@=W6PUV~@Bb3GX= zwV{aiNw&J^shZZ3&iyCYNUgb%aa`moi%8KO6Z1MZ>7TmR$As7-oObL-9Ot-%ZBylL zaf=vB=Zwtl;aSUgy>hCVJ+J5SjpNi4fdM<01!aD6=_dU6ATs82_2jZp=p=e^WYn^r zNpskMn4Q9`1Kz}(bY8?(v^7v31U}%WPpty_6trA@O}*cxA5I8Ytkpx$iLv3s+Ajd6VL|c1r52;F+e&@}Bh}E%#l%@yC7L>hfZ?vUl2j$aH4-MRnU=Fq7r>GSD{g zA!W$*lx%O$HXNs6#T=kJm@ps#G)kNR#cYTmRTwdt1#s6asA>$`9yMqUDvQt5*6-qS zqc^cAtLB(?l>{AD<;K>wO^YoZG*)kaIrh)fvv|*ocfPZ)-lM$fPPLS*s_N!zGd^^n z)*>(R4!LqyZ(|nvW$}$!NrV-zrnJUP{ayicBR-*+@@4636Ap1mylG_S(d|ccw@(H9 z(7J6=l-?(-lo{@B}#zq*pt zJg@%=#caIF{0@{756a45Ut3`Gb~{gJ(7U29>r+hg1C_C%7z48knu^QAmtutMJNP|s zi*7}H`fR_ujetCtDx70LtB)L5g`M6aM(~rEh*ppB3-4B8&_icd#>3175Ry?*6Vn}O z15xdV`hwJBgNlT{v(_&=SbKcuY5e(k=&idjH9W?OlAnp*ayBz;=b2Xh^Rk~cKbW6u zrIpOD357bDP1{aP2Z2) z0c*U-kWcsK8A1zl`HT;}&-Q`xKywW!*(iY~%3iEWXDKb_VOj=FDTz@E@WF8Pf(liF znZD3>ShvTfjGDLmOqCo%W-g#rUc&P*nUga?EW3ydp3y<)Wv%9K#4#ZM*uAUHO)AvCax-1Nq zxm!84gphQ&?(MSmsS|Cy3q@w{wHdSpd{tSEH}UXmkW`KM+|RHm4u{bzIZtPVLwYfk ze)B^4F6F+*j`tml>f9~1^zL^T+^rrWj`34@sp$RT>UWFFf{R&oAs4@D_AYWahHpDy zVO3#iRWUBZkeJiuEJR07-4k|f+5*)GU6C_7SXRm>JQzHca>mxm_eHRWG+Z?WJIf)` zoP$>hcC?zT;>D(b4zz)U5N6r|I(vCaY`n|$n3Yt3A>;=mz_D;+*I>GQ5EOQBj1;(8H-$vLw4D}>t`rid=FB8A5{D! zAA(FO9_-uMTQgVVN>F`1M|)@i1Doi*#@=4oPmQ*Jtgccb&wGOlSIF$Dx(iBBUCbUL z7>f8-yg`BA=21UT5gl0*LIEw%sAj0gj1-Arz*UKjLQBgR>p32DG`Z9zDE6)GjWpx4&RgUa zK?sT3gl5+4v8g(p^JzdDvj(_Qr>7}y44ltdI!UZdB}l+z;0dwGIlkxejL09oz}vEq#^ zvla*TO|M~t(5clO;hK-<)f=VfVw_@;`7Aj%lGB?W&-lUU@6_od%pNPinWWu{2z1S@ zmfEuyPVeR{`BDP!p=l*G#bbCLTV@Bf5wCUPsv|WdN^anIo2UQTF&TiK;2`^}4UpOn zfKD*g7V2b!>J(7;2oZ%sa??I@wz_If^K z`lJ*#-EVPGg&y-bvT99N8nXkWj~fW|?&J3hLI93d`3u7B8>?8pu6^z1WOx9>dBg-| zC1|#X<#f^oe*^$i4OK~vH0E@w5x=HRoCca?$;u@l3U0pt(CPW zUUT%Xis5I!+oVbWj<3uaP}bY9*IgRltj>H!T7eb~re?}~+HhgKI(A7Zc2jvb(veEl zCt$Y117BkDxy_y&422FD$oHL2YHdx(r!IV$S(u9|`*^>TwZ+I%NEAUSVM$P7ORiWo z_Z;%Cmx9^u`2?QZqAo)G>0EP|o#X~k1t7I80S613EZ-A_4xjM}rmPl%Zo9o|Ntr%9 z%1$Zp(L!5H?ft2|S85aw+0S29_$x+t1bl(-`E+2LK^20%`X@3@6cfXzJ~pAF=oDe)!c1}cH}M18fhiK9-0_ycaSDlltLg@g1imn z^s4&;qOB(PC?!Qi9_Ym+f;4ogFvwRnhI`3;v3j&?FQu6K=`@44VUZToqHO?M=((v~ z!0zBO5ooSK+TtwJ3%U-&8ATHN<)+v1dB;`*AxPv~bv zjJ_>Q6}+30FkO4$j(*RMWc~GIb5f<&i*@zX_~9UmtsY-1^d1DksaURkMV0dX?Ev|E z|7xAjPK05*i1X{MmES7qcd4~7`cs=iP;85l1Uk={cfK$-d3wu{z_jylV)99Kj zJK+7V6F-WbnmLIQ49Jcw!cX~y2L`6Q9MOs4uYRo56}qB7L0Qj&e_d$8EqAQn63kx~ z$`xBpOYC{KA}X6=TvhO}Z%CnB#9*;SYdZVmd2^4p7yVf)pETm9RY7QA=3JEx z);xcM%V+7;n>a3KW6xaW(iqo5v5}KPON|Qg=Z{rNf@Act5nj=hAtSa2B}?s!|c66z-q@MB=fU9oKO zLs$299J3Lk0kqC@iwgRjjYj0KgHD4?uZU!Q+x6QthsW)W8aKZetixWM=vo;BrbS=HK#L|;!PeP|I@$Pofz+=x*b=j z)Mjvp&@S_gTJ#(>^Vs8|+{_|MHrH`77lLR*J?O;_f{YXt$%mi?Oj&y^eid2Z+X^6o zxf0&|#CG&oOJSc~4s`8=vy(2eZHr_Kck^?39gbF(9LqT~AEgJobjrU8hWfNREBzGs z5N9&%?#N$mJ4u>m?s=B(z3ZiJZ=^uf#|N~au7QQ`uBP?g+^Cr$GV(`wZXN6J=p(08 z_dag^9X^W@0=_1?-xPEEzOEHKzSY~k<#2;~#BezWX}DyAwPmwsI15_0MWvzI0z9pB z$saWfhVg+0P8P+Y8e;}~s#gw}zS#|#l_@Qiz%S}!zI3cd+b@U6=!h955Qs3_1xi<8 z@b|>|*=zb3vJ#=`VCEnz=dd*X5fYqfERQ zAXg;!KlEELN{*`aFD)Q63cQ4Tj^2LELMv})F~xT1`g~Kq7HCmHv~DoJReM0IGP9oh zInXBH;bndN>Pl3-H*sF-0DXFtGLV{*WqJsouNBau=`P}))_biYKE=HGY2y0q4ZAHy z`?pGJg56pahd>x-sH$_C%JD0@rLUm>5Ph{QfCi{>&ZZZh`b;@Treuamr7fI^-1pV3 zv8~_a+{hy}AN1Dp*<9OcZNvRrh32ou6_i?Q^9whJu&3f%Q{$+TIWuDi=r5%}W79R_ zRjj(n2MtW}wlmCn5JgNq&9vpfYq5gz=7+kJo>=N{FUt#&*(;y<`+H$-!ur`VJx z_e{KS`@9miXSZ#AF4fm?K0Cv{c0N=UV{v?=!)mixE)?*neD`@kFQOwoUmdR_*VK?V>PJGS96`G_t)%(r7nLX6lPtr0c zaN}msic&ae-GzV)ov7u(=Ylz=&6jjAZo4?=HN1`w%?6||215E~oFCurAX*fqL~mO_ zh!{rvXgW|DWXXf`k^IwN1bV*sKvBaUwYjHSVAccxTxW0cEf(#3++rCp5KL`}w3zNXynO+LDYJb&i$D4e*zS{;jVpY?V=jim1w*lGWGD|Ug zuUPxEmYS2<0@Qpa*XO04`qmh4%+ReK)!IaCEY+Fdd*^fOw&R(1qgIUj*6i?8^)Byd zQYD?LFP+NLA=5nE{kkdlV6}sbpa(^b2iusC`1GNNm2SW2Bxqb#>58}6L+4y8N|rr- zVr(U=@q6AI1JXk&!7d+EE+TmS3#yY_q}k6&-YpV;upX&${?ksx=#nn0|Je7X?A-(} zA;sF7#Lm&THX49~%Y5tD?OH(b`Zy6o5m@tcLq@?BsMxU=F78HzSZXR~30!@a*_gSn z;h*h;00&I2-2+u|I-WCGOq#=%kZEJrf2}2*Sn)+sm>B4LwnI>4Szbe(g1>Nicx$O@ zOm{GHK67B(;-RZeOj!i_?cLY+RrA#Rec>_Q#WTvd3G%K7PxD8mFjA1ju>?@HZ?f2L z7v;RLTbqx;-zs%vYGUB07J4 zodImPZrZ3=*RH-@^Ms+ zTHw7ug4*H~sA)B=$G_OxY)D!n^z6W<-n7Mn*U~3jd^lUuRj^Cd+Z*))9ODwlYPBs{ zEoJ)sYR~0Ac~|i<$P+Q!PlD&zo#Vj;u81Zk_z7tiM5UBXIL;qz`+g8~NAPmvd>5k1 z&kfPaN61z8C_xDXpPlU!FzA<2K?##k`#9>?Jmz~758rQAMHUwH)q-62h0M+bO}>=o zN4aK>YgvG;%(Z9-C`x~-QwDSWmTzgusadF8e&tUsXSPxZa2GK5P>zL5S zyr$N^o4R77r5U~;E5ZdRE zb>@-NiBXThI(Y+a+?zHi_SoC7YU|^>?!iw42I5Hhj^~-hg4!#f=j`)Sg&)F*$Z!JzYkq|H zdrd*)x1l$I=0THHwrz0B=zgV2b}o_BOdYRlybNCsD%leyC|dDE*2QTap0t;FYBrM9Li~k80nXhMdRU=F9l!psbev7Q&yxnD zs)a%;SnX-LDY@!7^Hthgnc=|?Mn@Ew-8H*DBHy1np5oiQLvVr>m;v#}OkUzUZL#K< zSF|{M(VZ0^PK;e{^%{A#jH7q9MC;gE}X9qI<9;<$M|RDHLdHM zs*zopn~%pYytp5=nq(QgzyrE;06G+ex79d-vfoOf{4LPE7b~M!alz-u)%7PmW64^ERxj~-+zlTs?*`VZ@RS+o`(i4N3}c3tOa`m zwt(K*uat8iZGxYf#PsZ>&~)gLXuFk6RousFpX=Ljt+#qH>&$L!L6ECasu)j~jJ@P{ zY;9nHVs^a?hDr8Y+XdbP2%b|l>IY|uO`#r5i+lZt`Pv;*{Ubx68Ce9gKeeb#0Go!ex#P_=kWehY^3&n@WM_s^PM8_E`{@M z>$ZT3gv4jeiukL&l)<-O&-mK4l?Y!)?jPFPF*l$`U~Lf#*&W^e2C^10@bi!WO!a4v zULl9BSfq4Jb$c+=uD0R4bfg}aT&rV&g+t!az)Dk4k(({{Q$@>ns&aNA07Vp240>-k z!CNs4f}jEH&I7QZyf_ceAOuerrdS`=Fl|f0i8ss!PGnjnF~U=kR+EwDypQJVbG|Pf zz~rt5Sw>Ht7~$D5dquzh4PXg`6}CI(Nn`q4S^f8pFFDLNk>lcH>|p`*qkXD55PHxL zM!q9py+%RTZmtO2p7)F}$Ib?~=Fdij_Q-A#%GuN#g;#n~lPM0_WhZ>_N{#owj}Fr_ zBW7W*TsU?id{(bsf5xFF=7o)}KJaA1Sn|3E>$Ubu;NS19q}Oa+^~>SvA%xrDcfNa0 zP=DaAj-9ENUFH#LHMA0;3B$z(07HBqykX}e14>m`J@hHE;Yk)7LHQStzBKkJhC7ti zw)Gl7^(=kX8cs_$EQ4qDuE!aOg~C+dEZ?6kyw+xXscXw_bo0SL(GSA6$B0Ql#Mh$E zbeduekeB;EfxpW{bi#chKkV@(ImsY7~vZ zM?dcfWZA7L|6=RUK(3oMQ}PmfVv*+dQam_4C@GNYVpXB`ygmw_fWG>u z|NDFSU1GqbkzKLKcqsQxNlm`j&d0TGPB$I~>Wk7@umF1i@9;#|Zgk6C)i2)?^}3nr zA0;`bvZUuG;PT`oO4WG?yqOOqqU&|X=`{Ib3>OY>VmIEXfSSr|N|1iYjZ5OeRUB<| z*N&YHII}>8TglCR;AJNvVyU_O^w*$Yo!P&tiw-dWtCw>@U9Yl8%8ty-Elx{oVrhqz65;pAOg{(a2rld9xcHEta}BrRSd~v@oj~nnQ?2 zt`XzYZiHmc@Lt3MFOJVSn+9s5?KL31cYO?eIpa*XSl`j21GUSyv!*i7?Ff>1&Cs9L zzASI`M%W)l(k(J$2izEtL9TK_u8h?k=Am@u-n3gyUyq$VC>SX`VU*`SiGUiM zUT69U;9#h9mA8~){A{6Kn{j@R-kz#a&F>O{+a`{)lNc5Uz4V_Qc|!Y$onDZQ9P%%_ zTxAa)fgHDT0u<}U-9yhZ-(Rp#S7CLF$5en`_K;&Hx+~Kjz69zU{ht(YriOofY~8E{ zpFFQcO7*nv;0B9Q%#QbC__D=nx8a-ul}(CV+6n@3wH6m#GYEJ9@gXz1FLvLZuk80C}k!GE(c!d!s zL7%}!J1hdcU6It*OuNm_Y(6ZK|9{Fm^KdBp_Tjgo5>m9%cR9E3d4A5ZNL35)0Yi!Alv%@k(HNN8PH>z{uRocA4Cp2#c>pvhIgvz+%_0F|Bj!BPDdWu;L7|SZH#`kebg7$SaKSj3fritoO-fcK@=V3ngzdw)L|< zY5ns2^;cRGzE9doyv@OmBRS_COai?(shwg0e0)~UZPL9{K5`56%5AuhnK=8 z+{&fA?PJ0nc@3i5+U_Tpz+$kdPqBb{7g2tw9KNB9BoR!@(Cb!ISq-ao!C}(Q#UI~B zBIgaO*t?GP9az{$^byzqi@AOa^CEotZc5vyx0_{%62I@gpqkjai?7%5y=@K{%ixV# zKsf9sh#1*RC>H)Cg)QUKcl4GpIp)ZxnrVp6MA(bfO3tF?_k7=>1h+$$bdq9MHGEeD zO~&53%FqPyVsIPp^uoEgkM3y}O(4G^jpX$W~+9Fcl&9R{!Ma z1zjEp6n?**Pc=Dr$0ka)3Bc#@zq5 zs8isl@57=oPvJyMSmTrw3u{c`COBr}WI{WyhZw>c49|3&?x;g1Qj8~z%`YCWd-|P1 zlbn5jQ&h$xqA2VOyv3h+@bZVfA`?hz{SkV+)=h&M5{U1n7l?P4O6<}+rK#7~flRay zFHJ$?=hq8t$L1AKbVZj>jW`W-3qNhjR^!;x`ztMjo5$c3CcjH6y{pip@)+HX&JbGg zf6FhjxSk+feA%S(>_4xzfj+PuMsPLBCas_h)3N3++=(uTz^(NkJoT-q^`fB{Z)ka^ zHh1{kFa}TrmIBG>i_y;Rvc{!ZYq971+5fS|e;a=4)lClAHlLW!M=Uz2%?F$W5!&A)E+ z+3G1O0XLz!>tf`VwhH59`9a_qV^}Ehk@MqU+icO~Jhbu>#bMV{SJQ@oY1pQM8&o;l zGzkqLk=3LI{j@G~2wMU=CZ9jsr7%Ls`7K_1v`^M=P((jXLS_2#$W-Oxp6-upW#fW0 zo@Vjplv*o+KY^%ML}7w@3-5joP0)*26pfbE{ zWT%G~Gu}Qq0%iYDdSVT*i&t6L{K-Wz=3ibndhaP*q)Xo$;HBwTz7V83yAGrlafb1b zH%IP(01D6hb>2LZf|a(StgO zTUazT?1H=QFV;h3Ul8;#SF45{WoZh$Ye-~`X=I`1Q_iWc`wKT1b_ZQ)4I;P1b7eyN z!is>dh`|&@V1lI@;ISU6f1wI|)>B8j*|#8q7^fnEWPmjk>k)dz+d5r2;RSf#s3Nxn#(^l1UBMkpi9=w^*@r0#nm6Km90yS0(s;M6)(hxk&GkdvjJ*I-1B zGrl_Dt$ZlkF1{-01hPaE#yi%|&2PX#irEt{|K6>{(KcJQu?Q|lm3Q(%z(k3xu92iNcpX*XXV=$2CHRPH>SItGV)w050lp%GCiZ;=?@*}5qIT6}Js zXkeD9wb$oG3T4?~!>vlSuO9=NaNNO+wlNvpB{}$FGVKwXwT(8(TaR`McyBPdo#^bn zuI&X~s<3a+POX8nJBP(^ke<&i14x(kYyG=<7d`avLMb4F%!6T7TNgnT$)BRQ4FaT# znlpdEq(gGC=`EVZ^K?R@I(lsG&{_)q2!`k>5me;m5U+<}l?vnh8fw6OGAr{B7*mW) zVfKn(JS!| zZVJsc7lbiTvksNl)UumVv(#6|8G$ZAnOiPKyn0hqy!WHqH}*?8wdsc%N8-NNYZ-a( zsQB|RNy_uA*DK&8{4aTh3DUBpl?cR;E^S@Dm|W*}ZU-1sR=n za=&yhj|a>b$gU>sUKi#&a{Uf|@`1uEdqv{$TwbkdKhY(Q;!w_rZw3d=37>l#uhqZC zL7i_S^`?6EHlqCQ$tzYHaK|Mg&wj2hSr+qtsJ7zof=%+gw9)d6F zRR#+2K#G5K5>r&2emdbp31utuv!QP0s+i3x(_|-^qMIGLm-PrEzxYW88WLgIEib9h zw&6iyA>JXCzcV!pt7X!r#1K0U0IdtX2IA9eiJdju6hGG}hEz&1PC%WT*mV>OGADl} zCAeq5=3mx%Hxrku5N1!d>T$ZclBX)t%uDP}`qj%u{@Q$bArsfh?_N;J zNJ+obgYMHx3tO^F&bf-16`~X|QQV+`WtPMbDjgQ66Iyvbw)4i=c-IbMg;qu}i-cQ6 z(}?&??6W_3w|nq1t(&uEW_fAqq(`9e5iM{Rbi8(RwS?8gcEP;JtWm(eebPTT>9m5e zcRNc83hcNG8DJMkZv8FyyhkRajQn>w-@fCfHHz1sUAfh@*P$gnY-xFNmdWpj+JX}! zntq)0<_Pr`HR2-F zJLnexX60KWlyz>^(WV?q*Yrv>qA#TQUjH)X)n=`qbSckK?pg~m=!*E_&olp(@p-u%A9Sso$ga_IWmV?!Aq;KDiu;nMD-~~q@XI9!h62V z)$Ai~XnjZ`f(hLpwsK)DPTLw; zemAX;5jWx(gvyK*i@Bktn@!mv+aoekPzD%rp4l5M6EkNpyMF26u;3reiSk_%(Wo3y z=Guc@Xckghu^Nh7DsZ4#?T_7?)oyC->a1ROFs-J~KEKE}>B#)oHRG?LG97A8A<^0y zqNc!Pyl_mjhsO|#Erw-@MV*2zN&G~W7`yeCAM~HROugVilj`By&ql`Nh~o+(*?B|3`Ipr zy>EB|v|AKAlqPmKA3H!Wdt0w{U7_n(-EGHCYI8S7W$Q}foaT0Ay=8~9;c&R_Yn5CX z5lE6*7^`K!R*aU)*Em?ab+PWwnC45!mR3sl-wEmN>=WwYs$ch|ODqm3&<}ZzwGN+O z*v$TUMS0%sFSGf-JotZ!``ii`NSZuXrW`1*?h}5!0f?2ECa)^XMD}~}Sk}rAbzyb; zML3t^#|&kf3N>4`30Q9w8fV=x+=w6Zy8UbCIP9C9*VVd<>Dk0xU>2|8p#_m3JzZwN zu^AAY2ZVlMj*_t~GJ6V&u@J`69^giq^Ki;VchP@7Thi`u%p{aVm3;izb;>&P@0B)% zHa4YW%|EPGYdlqlgc-!@r^T1$MGP0Z9mj-2l(4k_U13E&Eioqd$nD^=0;t|LIxL}k z&W63rxZA!(U~9Q_dDE=dwGn!&%s8FEm7&+rte#W%g~@`zLed6(JC(=xY++Y0jk@q% znEcbR`3ZsvAwcYnDGMSi#uq9W(+3msM|M=$?G!|Lbd(4`vy%5n%?#XgCM;}8B-huO z8&HDWR$KEBLgJVRE^SUjcX|CQJA);`xUAQK9fPQxfYwgQ#@CGONRyHepDR`QXi%x= zu-UcN^)zQrrve$S(VQX`TO8z!-XrF~Gd&>scxpjh_iU&*mpVeFDB z1PVeRZ^n}eB)*c#wYP5XxiLcaWEUhNWn;_`pgvJv+zj(984N z7g8s(o6qj6EYQ8R|1%7hcuq=FHdpODK+}ftA8wt-gfl5m1KU-m#3mM7T)KV3Qi|so zbZ>1e!P|xA{9ZB|yDsv$L8`}4C9%)(YZ7N=?d7GZsCPI5d(9}B(#UR(nw2)6xg;{K zO#>N52j7|2L{zDOZ0|gP5`8ai`A~FCHkiNocJq6@{}pLr@m9yU%Gc$WJB)rL{6*WF z&YeFpKdGACoitc&p_rfDB{rc@Lt9)ZO|hqSO(?n**fHe-8DpZ4=kD{+bc1R0ZjV2B z%sXAPs@x@+Wq7ClX&mRlfEcV!+6@U|=R|-2xSin0c)#Vq%qTon{(=%7(u`IbrP&Nx z=NHnHZoHBnz;7{|;Pg>A`;|G}7NLtU1V=dcK<^3bi_Eq{Cr@8wY zPk5hg(JGPSm^p1|Lg5ex=9%xaIipKrH{6ezAgiSqdeR?hL`$dcF51*J7P7u4jx-KmH_tEtJMnV+nzrkm`|Xu=YIZSJZa^cYj3_rsB0u2!PU zS=*A&CrDqrF}xW!w?>?u!4-6$`DiNBch7JDQ%F$9+r^EM@z9#fp1qvl$t2lYO52z$NC^A;NAzyqGNa8XlrNC~S+x7x&}Z+! zamNQC!8WEQe%?`I1LGcleOthYiQ^_K9UgrhXNDopY`hm{0o?Q#rZB>MRS@oL5s&qQ z5+#EuVFE6kz+n)%XK?Cq%Zhxp$zv*u{0)hpvgGO$K2_TfZfmVp`1uSkS+crC;~jdj zOtOEe)H}5@mG+iDEGsuxeUE+Bb}idrVM8w=Yp+d6u!j8XIjVPI*V2xrZK>?;h*n%# zWqkdL12-C6H%+M5Ca3W&*JK+=t*+2)4JAF58a^zvzWQsM#@JV8m*!B}tKM_{$tqP# z;!UWnIlEWUnlo{hsYvL0w*+pIsM@|8DWwQFYo<-{U5cS17*HrsXlxESc%B8v0E`=b zXK;ucaT}^{&7L>1DdPaxYEV;{yeNQ;LD_oUy})9gy+wFzOVpBRQmd4Y%46jJJ)K=i z=eRIn`cp3PA8T+^4p=(;6FCtivx7qqc|7z9yEi}iB_$Bd&*ZCqd8YEw6FZ}aCCicjN_Z{wzw^Q zJ<IHwwdB4}yxHt9NTg3BMAedzcTtSLr-LmrbRr4#ZGD8$mGcTGwuOY_Wp= zt}P~N!imQ>MxJtA;N@79A~?qn3q&4TpMo`~C znq#g?gC*(k9Lag8O)MLko)nk-6sk(1&ej=Tq5>0E+!4@b2LV&!nbf1=Kh%=seyE7Z zD35wfwh3xj%%()TS|Be+JL7 z?QnXMtw8pP#b48tyQ@OGLTvmT29G9vg?uu zl2&r{qE-DCtTJT|@;MyKRQlNBEGyFIL5XB1yl`q}ul=Zg{QB{T#Oky&l3~GH0}#j5 z3Nxw$1oAvra6-wuks4M&Wmva^68QtG2wX8v9lSSZAx$}Ei!o3j&URR()oWYynf;n z@*NV@O)|tTa4>T2uCfE|6-N@IY%y2+qhoEfHu{DOw)?Tf6YT8>3KE?=NR+?MK@(*4IW#LT6x zs1v5L1)iSWM?yv3hTAaJE?2GU+dRnJnOaGdC8|b%ae14OOkcpvrAN&Ufy??G!J{n- zv!A*$t zkA1H1BTA5AP$1ewkc153I3nCWgPFDV%#d#dG8ht`-2(S;rArlRZOpQsh*L$7hK$z? zFXmLPoE9HEKdNxf(;(-O&w*_7rpe40x={7^gin8-B6+Z;m-6w=O|QW4l(u_p-h$e% zU(cr&j2;Pfr0K>hIWi86@1-G!c93DUnv_hPdxr)X)M*$9s9o=ce9_5!1pW`=#x8|v zPqu_u4Os%>p8$+r`;yRNC4%N5yal0q1?ELlC?Q^t;O6G=s@Y6~Yx+5=JD{k68gZ>= zXtE7cU`*P!(<#?IRv>fnQeb8jUXNfudYoeugTJpS{$Z`r zd4cSOnk(%P8T*$B^(46Yf%$CScrkVIrJEgjR1+Iy)i-)*q;%-a@*Amccd?xu1!FLH zr0Gj5{ghsQE)?i=iCq~w%<}#=zIEpV{-u^>A=abA-cel-8R@qwEnyct*@CD%OTbOO zqj11J$#=a_tiMBj!k66J{kndJ?n;$!nH4-Zi2;Y<00KmnucePqw8mX>nMR$nBh1lm z^9g9V=pV5SXJ~LV6c25EeVqE(J^eDo7{a~Gc+a@XJ}}L;PjI>!M4Q+bG5kftn5}8# zE20^^z$@W`NF7%UW%ylBOA|2$kel%PeUQBMNcc4IVEXlgX^Bz@_qjbDxFvn!<{;!& z=ZM*!L@C)(^7jtb%1>wc%NKHVnWLry-rttyh#zj^p)ZH&9u}dU~y{)?dUao)a^?dIm$;YAX2{s`?=Uu^H@$>Cqhw$x#Ww zvxS%~ZP=aq24a$P2)7A3LCrct80s~YVYqd@yfo|j^(X667$MJZJ0a&z_MNc1@{3Rn z3DLE&BPP*>CSk4SX!2R6fU;mtwZeus8hkvo4+>X2C+Ga{W`OIwBgG>@k(^*Wsx);N zDWZhx;^Z#Y+jFsw&1A48K=LmY0=jK*`YtR+ANs!UsrxSmHGf>dhQQ0QsuZK> zoU9*Rq#htdkaF3sFd4G_N$^Xz+nLxC^6?{nx7=`PlXhncqPQodEzBd)mpig)qNb~8 zm#8YumrJUZSPF1{cUFU84)(G6ey-M&dXeTJS68IFo?F##0i0? z1IzLV#pm>ex093O5M1y@AgM6#!I~Y-8di(_Hkqm; zUA8B_xA@hm#aAVVHyp|{+!4VD4<#N|lyAwuu?EiwJB|75(0ArLcnqT z4DF-J8Tb`j7!Em06UiMjM+3mUGSUG6)i~%KN$0J`OlY^^Wp^<)6v?DG_oAf$O5}c9 z)ZlF``oO`s7!bD_mb}h7jU{BJhn?V?(P=t$K{TRbrha!y#et!YoWm9M6Yj@we0Z$| zk1Ov#?CTbd4Ysi#FLs;Cc-cScVqtf5w3W4)7jtfe(f7{jamZ)yz8;SRzMiDHlz3z7 z1J@JKn5!|MjurtWI0|4R4Pb|b)64L)a|Ip4PxP*!jMd^AT7+4_qh_;I0M4hH&q_oR zb9n2nuCUQ&3>h*ymNXv7%QGRqhny#0~n^!A25>WlvSR?GJ$CoK+GgUmfhy}_tXIb|^99JN_z{Z!1A8H2WX7irOMNmnt!icrnDbunHWc$oYSoL*snln_(bZ2bGr8&S(>w0^l}h3ZjbA6>2#gDksL#1%F3lCwF5jAWlTTYsKl5J z^{MU>Cfttl=?jW|(^lt=P#%C%f+kp<$BZ7X*haJ&&sH1L3MFu;Qtd&_9byAp6bfqv zvODR;J=0iI*ssxmptl4SH|O*bf-Lf>1KSC9eWiSWUGr+rP=t{tjG9ub-zcmU0(m`nL4AktC-YaVCF!@U=rmG9!E zQIUI{!V;#kA>m=s4N&UQP6L$uUzFfU)843@9WFVm zojb^>sDUkk9PpHm9&E}+P&3PxvEmGsk1h{n#1nnyWmmz5S*FXI zMo(*Pjk+*bzhjKvMPjtI`7AXbIzBiv5x#aV;34JupG|dYq+fELF6I6Gk|t^}U_2`~ zZ6hX)MY?Fw72Hv*@3jC4YFNTk{Lwc~sieM{m`kezhuF^uHt0A2aLEs2sAXn=NhEo(_7`FQ{{2rMm%YmY^Y+3n7z*d@_*H1iB2c^g;oMm$1!u&jxF>8T! zpbU6ghuLKFJ0!Q@=EdTX*WjmGUW7Ig3X zTzvthR@JHCTsU{ntVC!r%hvD9ls8*2JS|A#i-0_5%Q+})KwkkVk;0+S<6j^XlxLw$ zdlP;5vi_*D8;X^P9P?1%yzT=g_Lk!fDwYJ-P56 zHo6O-w8?!?W{%{Dl$h5lic4SnNmO0R-LZoq!QlnD0E5sI%6~l(KGS=hBI=+ZcP5|1 zOdiRUU$+&x?=+&#nvrWO3eg zNT)3jKw;U5H+-5sWY)W3D7i3Fo)HK;n;r#cek>bb8nbjd>FGvQ*E4>!(c54GLoWvC z0>q4=%6|f>05`x%nuxLu5_ujQDtE5}s2!Go8Btm-RUQ@c`$N{FpdVuyKUq z!R-W`w*45j7fz6T9}OKYI<89tG_n^v6a{S^5@EyUMG^}}#1R6!0+Rb@)jqirL7gp2 zIdp*LA+wqkbb^NNk@Y(b$W{t)jaqe^ea)ocE#28~*GRPZ+IWv4Rt|w9h&8NKXxG zU^ps3GYRrM_yg6i)d9AT>>NGdl-)c)c;}$A9v8+ADd=pa13Ui_!n|`xfE6svD)b(B z`&pu{J7&-#&aI<~SkQa}rThsFU#8TdE5h%4bk?Y!(-%K72EbiWQx}Sh1tj-_pAb+0 z710D?f&qZSAaqGq`ux~T0A=Pf>@WZ#4LkGqDvw-a9Udj7Q^`ndJH z>Hl>*#Ufs-kVSO{0rv=$9$OP&qCw5o&Z-CN;Z{FX0%qPAUMe;~j(UJL$9@E<44E*} z2?}!D)%0+q$=t0h<5(*bJ{#lNvTh+~ilfR#2r`VQPc=I@krTnXd)iDhq+aRMqj%i%r1ZAf zHGqGZ28U@1pxD))@=|oSAxYztm=&n}2#4c9Z zdSxTB3&{X_h7K%V&~KvhzL3?SDi3wH>9yINHAG@)G4xIVU zUEZ(?&NU42z_N-5@u1}ebldM*yIZ{=`_jhho2o{r!;pG15D+4qPK04By&cegn*lw+)5s6Y@fB;<4UFe9YfyP7k%L7A z_{PI1pD|VDSyq*MQ_W$;PgA`4>Y`J}D$X;Mnm7mXJKcZfDK94^hsHy5sagYsX;uBY z)Q&r}J0)=r=VhX)>&=hN!g8J&*YxhuEmbpaxPp$P&u4IMGQ;eV8STN3#iK< zg#*2QqECGawTalnZG4hMImY-V%u<0g9zc;U`kV!Yp~(FrqE-X(%QMa9SHMA`H!g8R z3~6)qzn_0bagt?0BZIF10axcO09$vy!2MycXg~AP0E_%$8D0z*z#j0mh?^OZ^<(Y@ zv&jiUXqN5ZA3|R@u0gG>7%r2VWH-inX!!0a`0gU}(7xr(YKTZG0otb%>TtLPK!uU+ z4B+F7T?BA#Q4IC|us8IwKpcdVGQ_&g8?%qed?BXzY%;a=tw(PGO|veW(`{i9{M5O- zk98qf_}guPu8yrr_>(JLpclciWl`D~YyGr{4k-~}s9Gbr4vK9aoO2;SXy@2NH#pzD zoc=rwd=BADf9LL!n}as3kFoq{@bxd*!0 zYA<4i;lZDV_zQrLnW0`N)2*FtR{GrDfe!>oC$Nwr=4k@w=I4Z=`yPPY^JAzbFhGca z3XGv%{D`AFbDMuP3W5$=h%cEF3xdvFNKrFWVOu&iA1oU*EMjH^H0^-k3Ybk`exU2c zxUDlVVJYz^Xm${67D7n2w_uaf?NQ3J(77s3SKukr0C}rr%ZR2hbU&QNP$Iw+Wxx_; zo&i1IR?bcFm=+5N0kZks2i-N+()6-pWgY5x$+&KzJ-Lck4XE-f@W?vbb;=YAvvtlxKor@xjT_qT2T7X; zLLe5p=*gu1>q|@lnWW1W?^Y?SEYc(K2gbu{B2Lo<) z8bk%OYkC6BI=#nvIk#9vdq80Zx1aFUY1ZI$g`(es=2Cw)W0Zc|zNYbVf!~kw6sFo) z>nm_j0pZmN3by*aPO~Tkj!PUf`*pZo7uVxf`Z9BznHLAz4bz}uPOB13iopI2;NK!2 zG`W%JlNTKaoduz{C9K0IeQ%vMasCATWV$u-Ra>j{-$W7tiu7NjFHKRifsQz6?pSQ| zgmdvSwDOzhvsLcz;TC{10+BMzk$UgPV%w&DR&lObO}dXNov_kUjD&M=@BgAt?-Ij{ zY)P%#I9DBoTsqSAgH4A)DnX}EBZxRzb z4@p=p^))ISCN&;tf5vrswoSwl=O4=4(Or)l7B+2XF7~%)nnE|y--<}@qO5<34YM7OD#bP|&;)SwdB7D&iisZshz8n)+oU=B@K3GdQG@1HX?d)$52uzi2SY*2v$^ zx#lLEu@Kv;)`day!au6xu^xAGuE)tHnE4ILIwyWCZdAE)5}w}V&t9xLz`XG>H@D_F zblQlG)BT9!iWbjjey}Rho^4qipJevk8w1(xZ2EAepHs(I#D%yvI$XIf5+$jjG1mXV1abFHimat=GSKi)FyU-nqovQPYZ+9!kNw%6RHBQLu5MA>hCM{&u8Ko5MN;QSf2Z=fYV z-lBhkf(^;U{(cKn!^oUbbyDrB3fRyZ|DV?t&qnD#nF?!Td9e0W-%sSmEd*x?1q8p`#N zP204bcVqrMBx>{7_L)t?J5UjmB5w?=JJsqUkJnG_<$M_;l*7lIRBW9}*5yXW|K0Ri z@38f}zjY*60mEN)DqH#)-L!7iJKG-R@iz67Y#O+7<|roUN4b;3E^3}(=@=j5o!V{bi66)Z5{-aBfXvcr!?f(6t{-dY+_uKv3F#JFN0!J|fZudT} z%g33F|Dj3wzZlN{?2><*o&W51|27Q&&A0pawfR5&0_$<&Ttk54dj5a2@luSJwC-wj UR|IwDa+KC79YgKBW9I_@7s+0FO8@`> literal 0 HcmV?d00001 diff --git a/www/frontends/compiler_gym/src/logo.svg b/www/frontends/compiler_gym/src/logo.svg deleted file mode 100644 index 716947603..000000000 --- a/www/frontends/compiler_gym/src/logo.svg +++ /dev/null @@ -1 +0,0 @@ - From 9488942a3a3c7f271d4079b48fa595e5b2e9c2f8 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Mon, 22 Nov 2021 17:03:04 +0000 Subject: [PATCH 028/142] [www] Make set_api_ip.sh run from root. --- www/set_api_ip.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/www/set_api_ip.sh b/www/set_api_ip.sh index 8e0ce25ee..91fa76b1f 100644 --- a/www/set_api_ip.sh +++ b/www/set_api_ip.sh @@ -5,13 +5,13 @@ # # Update the hardocded IP endpoints for the backend API. Defaults to localhost. # -# Usage: bash ./set_api_ip.sh +# Usage: bash www/set_api_ip.sh set -euo pipefail main() { local ip="$1" - for file in $(grep 127.0.0.1 --files-with-matches -R frontends/compiler_gym/src frontends/compiler_gym/package.json); do + for file in $(grep 127.0.0.1 --files-with-matches -R www/frontends/compiler_gym/src www/frontends/compiler_gym/package.json); do sed -i 's/127.0.0.1/'"$ip"'/' "$file" done } From 47afbf41898877b426f6f33759b313ce5cebf24f Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Mon, 22 Nov 2021 17:03:44 +0000 Subject: [PATCH 029/142] [Makefile] Clean up www build files. --- Makefile | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 8672909e7..5cf94f187 100644 --- a/Makefile +++ b/Makefile @@ -230,6 +230,12 @@ all: docs bdist_wheel bdist_wheel-linux # Web interface # ################# +# A list of in-tree files generated by the www project build. +WWW_OUTS = \ + www/frontends/compiler_gym/build \ + www/frontends/compiler_gym/node_modules \ + $(NULL) + www: www-build cd www && $(PYTHON) www.py @@ -384,7 +390,7 @@ COMPILER_GYM_DATA_FILE_LOCATIONS = \ .PHONY: clean distclean uninstall purge clean: - rm -rf $(GENERATED_DOCS) $(DISTTOOLS_OUTS) + rm -rf $(GENERATED_DOCS) $(DISTTOOLS_OUTS) $(WWW_OUTS) find . -type d -name __pycache__ -o -name .benchmarks -print0 | xargs -0 -I {} /bin/rm -rf "{}" find . -type f -name '.coverage*' -delete From 8a48afc945a311e695790fc449a10e725aa69253 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Mon, 22 Nov 2021 17:04:27 +0000 Subject: [PATCH 030/142] [Makefile] Make docker image tag customizable. --- Makefile | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 5cf94f187..46f0202db 100644 --- a/Makefile +++ b/Makefile @@ -236,6 +236,9 @@ WWW_OUTS = \ www/frontends/compiler_gym/node_modules \ $(NULL) +# The name of the docker image built by the "www-image" target. +WWW_IMAGE_TAG ?= chriscummins/compiler_gym-www + www: www-build cd www && $(PYTHON) www.py @@ -243,8 +246,8 @@ www-build: cd www/frontends/compiler_gym && npm ci && npm run build www-image: www-build - cd www && docker build -t chriscummins/compiler_gym-www . - docker run -p 5000:5000 chriscummins/compiler_gym-www + cd www && docker build -t "$(WWW_IMAGE_TAG)" . + docker run -p 5000:5000 "$(WWW_IMAGE_TAG)" .PHONY: www www-build From 5bb0ec87cde35f8152b98ca10562bbf8362cf205 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Tue, 30 Nov 2021 12:08:06 +0000 Subject: [PATCH 031/142] [ci] Add a step to empty the build cache. This is a temporary commit to force the CI's build cache to clean as it has become corrupted. --- .github/workflows/ci.yaml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 305527009..872f438c6 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -32,6 +32,9 @@ jobs: - name: Install build dependencies uses: ./.github/actions/install-build-dependencies + - name: Nuke the cache + run: make distclean + - name: Build Python wheel run: make bdist_wheel bdist_wheel-linux-rename env: @@ -70,6 +73,9 @@ jobs: - name: Install build dependencies uses: ./.github/actions/install-build-dependencies + - name: Nuke the cache + run: make distclean + - name: Build Python wheel run: make bdist_wheel env: From 8b60069c7b2e78e875873ba134a6ade2d5bc42e2 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Thu, 2 Dec 2021 20:40:29 +0000 Subject: [PATCH 032/142] [examples] Fix GCC binary path. --- examples/gcc_autotuning/tune.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/gcc_autotuning/tune.py b/examples/gcc_autotuning/tune.py index 3adaf828d..b7ffbe480 100644 --- a/examples/gcc_autotuning/tune.py +++ b/examples/gcc_autotuning/tune.py @@ -161,7 +161,7 @@ def scaled_best(self) -> float: def run_search(search: str, benchmark: str, seed: int) -> SearchResult: """Run a search and return the search class instance.""" with GCC_ENV_CONSTRUCTOR_LOCK: - env = compiler_gym.make("gcc-v0") + env = compiler_gym.make("gcc-v0", gcc_bin=FLAGS.gcc_bin) try: random.seed(seed) From 1a3bcfb97a519b466d73a57c7a96e97199ea3f57 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Thu, 2 Dec 2021 20:41:04 +0000 Subject: [PATCH 033/142] [examples] Bump ray dependency. --- examples/requirements.txt | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/examples/requirements.txt b/examples/requirements.txt index aa2f450b0..a3387aa83 100644 --- a/examples/requirements.txt +++ b/examples/requirements.txt @@ -1,4 +1,3 @@ -aiohttp<3.8.0 # Pin version for ray. aioredis<2.0.0 # Pin version for ray. dgl==0.6.1 geneticalgorithm>=1.0.2 @@ -9,7 +8,7 @@ nevergrad>=0.4.3 numpy~=1.19.2 # Pin version for tensorflow. opentuner>=0.8.5 pandas>=1.1.5 -ray[default,rllib]==1.4.1 +ray[default,rllib]==1.8.0 submitit>=1.2.0 submitit>=1.2.0 tensorflow==2.6.0 From 3e03d8459c6d8c0adcb7b3039f574b4523a3a607 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Thu, 2 Dec 2021 20:41:13 +0000 Subject: [PATCH 034/142] [examples] Bump tensorflow dependency. --- examples/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/requirements.txt b/examples/requirements.txt index a3387aa83..820ca2a4e 100644 --- a/examples/requirements.txt +++ b/examples/requirements.txt @@ -11,7 +11,7 @@ pandas>=1.1.5 ray[default,rllib]==1.8.0 submitit>=1.2.0 submitit>=1.2.0 -tensorflow==2.6.0 +tensorflow==2.6.1 torch>=1.6.0 typer[all]>=0.3.2 typing-extensions~=3.7.4 # Pin version for tensorflow. From a95f5983c65139c19f0c6cf74e3d445f5cf640c3 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Thu, 2 Dec 2021 20:41:49 +0000 Subject: [PATCH 035/142] [tests] Add a test for GCC path. --- tests/gcc/gcc_env_test.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/tests/gcc/gcc_env_test.py b/tests/gcc/gcc_env_test.py index 3777c5832..4bda5d3ec 100644 --- a/tests/gcc/gcc_env_test.py +++ b/tests/gcc/gcc_env_test.py @@ -35,6 +35,13 @@ def test_docker_default_action_space(): assert env.action_spaces[0].names[0] == "-O0" +def test_gcc_bin(gcc_bin: str): + """Test that the environment reports the service's reward spaces.""" + with gym.make("gcc-v0", gcc_bin=gcc_bin) as env: + env.reset() + assert env.gcc_spec.gcc.bin == gcc_bin + + @pytest.mark.xfail( not docker_is_available(), strict=True, From af90a81259e362bba17150cdd5de29cdf6e4132b Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Thu, 2 Dec 2021 20:42:18 +0000 Subject: [PATCH 036/142] [docker] Move transient cache out of /dev/shm --- packaging/compiler_gym/Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/packaging/compiler_gym/Dockerfile b/packaging/compiler_gym/Dockerfile index e29774cf9..075c471c4 100644 --- a/packaging/compiler_gym/Dockerfile +++ b/packaging/compiler_gym/Dockerfile @@ -14,6 +14,7 @@ ENV COMPILER_GYM_VERSION=0.1.10 # Put the runtime downloads in a convenient location. ENV COMPILER_GYM_CACHE=/compiler_gym/cache ENV COMPILER_GYM_SITE_DATA=/compiler_gym/site_data +ENV COMPILER_GYM_TRANSIENT_CACHE=/compiler_gym/cache/transient # We need a C/C++ toolchain to build the CompilerGym python dependencies and to # provide the system includes for the LLVM environment. From 3cc3a3246f5f047a561e3a1bca157e8ecb9b4f58 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Thu, 2 Dec 2021 20:42:38 +0000 Subject: [PATCH 037/142] [docker] Bump CompilerGym dependency. --- packaging/compiler_gym/Dockerfile | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/packaging/compiler_gym/Dockerfile b/packaging/compiler_gym/Dockerfile index 075c471c4..cbf3e5bbe 100644 --- a/packaging/compiler_gym/Dockerfile +++ b/packaging/compiler_gym/Dockerfile @@ -9,8 +9,6 @@ FROM python:3.8-slim-buster LABEL maintainer="Chris Cummins " -# The version of CompilerGym to install. -ENV COMPILER_GYM_VERSION=0.1.10 # Put the runtime downloads in a convenient location. ENV COMPILER_GYM_CACHE=/compiler_gym/cache ENV COMPILER_GYM_SITE_DATA=/compiler_gym/site_data @@ -29,6 +27,9 @@ RUN apt-get update \ # Create an unversioned library for libtinfo5 so that -ltinfo works. RUN ln -s /lib/x86_64-linux-gnu/libtinfo.so.5 /lib/x86_64-linux-gnu/libtinfo.so +# The version of CompilerGym to install. +ENV COMPILER_GYM_VERSION=0.2.1 + # Install CompilerGym. RUN python3 -m pip install --no-cache-dir 'compiler_gym=='"$COMPILER_GYM_VERSION" From 699d199ad152d064ccb7730c9e2b0b483a66b1b5 Mon Sep 17 00:00:00 2001 From: Boian Petkantchin Date: Thu, 2 Dec 2021 17:03:06 -0800 Subject: [PATCH 038/142] Meaningless commit --- examples/loop_tool_sweep.py | 1 + 1 file changed, 1 insertion(+) diff --git a/examples/loop_tool_sweep.py b/examples/loop_tool_sweep.py index eb2e78ba5..933737fc5 100644 --- a/examples/loop_tool_sweep.py +++ b/examples/loop_tool_sweep.py @@ -25,6 +25,7 @@ def wrapped_step(env, action): if done: logger.warning("Step failed: %s", info["error_details"]) env.reset() + return observation, reward, done, info From 134a412df1ad2d44729036aec051a88189eeef44 Mon Sep 17 00:00:00 2001 From: Yongqiang Tian <41743566+yqtianust@users.noreply.github.com> Date: Fri, 3 Dec 2021 23:58:41 +0800 Subject: [PATCH 039/142] replace the --logfile by --leaderboard_results --- leaderboard/llvm_instcount/random_search/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/leaderboard/llvm_instcount/random_search/README.md b/leaderboard/llvm_instcount/random_search/README.md index 86d47061d..101cf514b 100644 --- a/leaderboard/llvm_instcount/random_search/README.md +++ b/leaderboard/llvm_instcount/random_search/README.md @@ -126,5 +126,5 @@ value for the search on the test set. For example: $ python random_search.py \ --search_time=30 \ --patience_ratio=1.25 \ - --logfile=results_p125_t30.csv + --leaderboard_results=results_p125_t30.csv ``` From 1f54ea7f482d0e59bef21061a5216a31a9a76453 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Mon, 6 Dec 2021 17:13:36 +0000 Subject: [PATCH 040/142] [tests] Update benchmark URI test. --- tests/datasets/benchmark_test.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/tests/datasets/benchmark_test.py b/tests/datasets/benchmark_test.py index 6392d5528..d92156dce 100644 --- a/tests/datasets/benchmark_test.py +++ b/tests/datasets/benchmark_test.py @@ -49,7 +49,6 @@ def test_invalid_benchmark_uris(): assert not BENCHMARK_URI_RE.match("benchmark://cbench-v0/") # Missing benchmark ID # Invalid benchmark ID - assert not BENCHMARK_URI_RE.match("benchmark://cbench-v1/ whitespace") # Whitespace assert not BENCHMARK_URI_RE.match("benchmark://cbench-v1/\t") # Whitespace @@ -90,6 +89,7 @@ def test_benchmark_uri_id(): ) == "foo/123.txt" ) + # Query parameters are allowed in benchmark URIs. assert ( _rgx_match( BENCHMARK_URI_RE, @@ -98,6 +98,12 @@ def test_benchmark_uri_id(): ) == "foo/123?param=true&false" ) + # Whitespace is allowed in benchmark URIs. + assert ( + _rgx_match( + BENCHMARK_URI_RE, "benchmark_name", "benchmark://cbench-v1/ white space" + ) + ) == " white space" def test_benchmark_attribute_outside_init(): From 7a4c072c5345b0576e27c752619109570480668d Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Mon, 6 Dec 2021 17:14:15 +0000 Subject: [PATCH 041/142] [leaderboard] Correct the ETA estimation. --- compiler_gym/leaderboard/llvm_instcount.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler_gym/leaderboard/llvm_instcount.py b/compiler_gym/leaderboard/llvm_instcount.py index 26e3c0ad0..9e13d0d51 100644 --- a/compiler_gym/leaderboard/llvm_instcount.py +++ b/compiler_gym/leaderboard/llvm_instcount.py @@ -282,7 +282,7 @@ def main(argv): "\r\033[2A" "\033[K" f"Runtime: {humanize_duration_hms(time)}. " - f"Estimated completion: {humanize_duration_hms(time + mean_walltime * remaining_count)}. " + f"Estimated completion: {humanize_duration_hms(mean_walltime * remaining_count)}. " f"Completed: {humanize.intcomma(done_count)} / {humanize.intcomma(total_count)} " f"({done_count / total_count:.1%})." "\n\033[K" From 13d5dc326d078a3e29d6c81baec80cb3adec61a9 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Mon, 6 Dec 2021 17:16:39 +0000 Subject: [PATCH 042/142] [leaderboard] Clarify the evaluation of stateful policies. See #502. --- compiler_gym/leaderboard/llvm_instcount.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/compiler_gym/leaderboard/llvm_instcount.py b/compiler_gym/leaderboard/llvm_instcount.py index 9e13d0d51..9dde6eaa8 100644 --- a/compiler_gym/leaderboard/llvm_instcount.py +++ b/compiler_gym/leaderboard/llvm_instcount.py @@ -182,6 +182,12 @@ def eval_llvm_instcount_policy(policy: Policy) -> None: >>> eval_llvm_instcount_policy(my_policy) + The :func:`eval_llvm_instcount_policy() + ` + function calls the policy function for each benchmark in the dataset, one at + a time, from a single thread. Stateful policies can assume thread safe + access to member variables. + Put together as a complete example, a leaderboard submission script may look like: @@ -203,7 +209,8 @@ def my_policy(env: LlvmEnv) -> None: defines a number of commandline flags that can be overriden to control the behavior of the evaluation. For example the flag :code:`--n` determines the number of times the policy is run on each benchmark (default is 10), and - :code:`--leaderboard_results` determines the path of the generated results file: + :code:`--leaderboard_results` determines the path of the generated results + file: .. code-block:: From a1bde8abb1ed941371f78102bbc1817ee1d22980 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Mon, 6 Dec 2021 18:44:52 +0000 Subject: [PATCH 043/142] [examples] Fix GCC autotuning benchmark selection. --- examples/gcc_autotuning/tune.py | 33 ++++++++++++++++++--------------- 1 file changed, 18 insertions(+), 15 deletions(-) diff --git a/examples/gcc_autotuning/tune.py b/examples/gcc_autotuning/tune.py index b7ffbe480..b1613e06c 100644 --- a/examples/gcc_autotuning/tune.py +++ b/examples/gcc_autotuning/tune.py @@ -7,7 +7,7 @@ from itertools import islice, product from multiprocessing import Lock from pathlib import Path -from typing import List, NamedTuple +from typing import NamedTuple import numpy as np from absl import app, flags @@ -191,25 +191,23 @@ def main(argv): if search not in _SEARCH_FUNCTIONS: raise app.UsageError(f"Invalid --search value: {search}") - def get_benchmarks_from_all_datasets(): - """Enumerate first 50 benchmarks from each dataset.""" + def get_benchmarks(): benchmarks = [] with compiler_gym.make("gcc-v0", gcc_bin=FLAGS.gcc_bin) as env: env.reset() - for dataset in env.datasets: - benchmarks += islice(dataset.benchmark_uris(), 50) + if FLAGS.gcc_benchmark == ["all"]: + for dataset in env.datasets: + benchmarks += islice(dataset.benchmark_uris(), 50) + elif FLAGS.gcc_benchmark: + for uri in FLAGS.gcc_benchmark: + benchmarks.append(env.datasets.benchmark(uri).uri) + else: + benchmarks = list( + env.datasets["benchmark://chstone-v0"].benchmark_uris() + ) benchmarks.sort() return benchmarks - def get_chstone_benchmark_uris() -> List: - with compiler_gym.make("gcc-v0", gcc_bin=FLAGS.gcc_bin) as env: - return list(env.datasets["benchmark://chstone-v0"].benchmark_uris()) - - if FLAGS.gcc_benchmark == ["all"]: - benchmarks = get_benchmarks_from_all_datasets() - else: - benchmarks = FLAGS.gcc_benchmark or get_chstone_benchmark_uris() - logdir = ( Path(FLAGS.output_dir) if FLAGS.output_dir @@ -234,8 +232,13 @@ def get_chstone_benchmark_uris() -> List: with executor.get_executor(logs_dir=logdir) as session: jobs = [] # Submit each search instance as a separate job. - grid = product(range(FLAGS.gcc_search_repetitions), FLAGS.search, benchmarks) + grid = product( + range(FLAGS.gcc_search_repetitions), FLAGS.search, get_benchmarks() + ) for _, search, benchmark in grid: + if not benchmark: + raise app.UsageError("Empty benchmark name not allowed") + jobs.append( session.submit( run_search, From fe6314da05ce8176b338844fba269149742f7db4 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Mon, 6 Dec 2021 18:45:14 +0000 Subject: [PATCH 044/142] [runtime] Log an exception in the backend. --- compiler_gym/service/runtime/compiler_gym_service.py | 1 + 1 file changed, 1 insertion(+) diff --git a/compiler_gym/service/runtime/compiler_gym_service.py b/compiler_gym/service/runtime/compiler_gym_service.py index 50c4b5af4..88feef809 100644 --- a/compiler_gym/service/runtime/compiler_gym_service.py +++ b/compiler_gym/service/runtime/compiler_gym_service.py @@ -42,6 +42,7 @@ @contextmanager def exception_to_grpc_status(context): # pragma: no cover def handle_exception_as(exception, code): + logger.warning("%s: %s", type(exception).__name__, exception) context.set_code(code) context.set_details(str(exception)) From 1ee489b044e750d8bb1b19e3aa605e4598ab8f72 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Mon, 6 Dec 2021 18:45:45 +0000 Subject: [PATCH 045/142] [runtime] Add missing fork() implementation for Python runtime. --- .../service/runtime/compiler_gym_service.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/compiler_gym/service/runtime/compiler_gym_service.py b/compiler_gym/service/runtime/compiler_gym_service.py index 88feef809..38a183b99 100644 --- a/compiler_gym/service/runtime/compiler_gym_service.py +++ b/compiler_gym/service/runtime/compiler_gym_service.py @@ -18,6 +18,8 @@ from compiler_gym.service.proto import ( EndSessionReply, EndSessionRequest, + ForkSessionReply, + ForkSessionRequest, GetSpacesReply, GetSpacesRequest, GetVersionReply, @@ -149,6 +151,22 @@ def StartSession(self, request: StartSessionRequest, context) -> StartSessionRep return reply + def ForkSession(self, request: ForkSessionRequest, context) -> ForkSessionReply: + logger.debug( + "ForkSession(id=%d), [%s]", + request.session_id, + self.next_session_id, + ) + + reply = ForkSessionReply() + with exception_to_grpc_status(context): + session = self.sessions[request.session_id] + self.sessions[reply.session_id] = session.fork() + reply.session_id = self.next_session_id + self.next_session_id += 1 + + return reply + def EndSession(self, request: EndSessionRequest, context) -> EndSessionReply: del context # Unused logger.debug( From 46978302d96272f7c6b4314500835ae0bd35152f Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Mon, 6 Dec 2021 18:45:57 +0000 Subject: [PATCH 046/142] [runtime] Handle exceptions in session lookup. --- compiler_gym/service/runtime/compiler_gym_service.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/compiler_gym/service/runtime/compiler_gym_service.py b/compiler_gym/service/runtime/compiler_gym_service.py index 38a183b99..ed5114eb5 100644 --- a/compiler_gym/service/runtime/compiler_gym_service.py +++ b/compiler_gym/service/runtime/compiler_gym_service.py @@ -189,11 +189,10 @@ def Step(self, request: StepRequest, context) -> StepReply: context.set_details(f"Session not found: {request.session_id}") return reply - session = self.sessions[request.session_id] - reply.action_had_no_effect = True with exception_to_grpc_status(context): + session = self.sessions[request.session_id] for action in request.action: reply.end_of_session, nas, ahne = session.apply_action(action) reply.action_had_no_effect &= ahne From 84c59513e2bb5a73181ecdd0eabe1bb5ef55b687 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Mon, 6 Dec 2021 18:47:40 +0000 Subject: [PATCH 047/142] [tests] Update URI test. --- tests/datasets/benchmark_test.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/tests/datasets/benchmark_test.py b/tests/datasets/benchmark_test.py index d92156dce..80a16c8b7 100644 --- a/tests/datasets/benchmark_test.py +++ b/tests/datasets/benchmark_test.py @@ -48,9 +48,6 @@ def test_invalid_benchmark_uris(): assert not BENCHMARK_URI_RE.match("benchmark://cbench-v0") # Missing benchmark ID assert not BENCHMARK_URI_RE.match("benchmark://cbench-v0/") # Missing benchmark ID - # Invalid benchmark ID - assert not BENCHMARK_URI_RE.match("benchmark://cbench-v1/\t") # Whitespace - def test_benchmark_uri_dataset(): assert ( @@ -104,6 +101,10 @@ def test_benchmark_uri_id(): BENCHMARK_URI_RE, "benchmark_name", "benchmark://cbench-v1/ white space" ) ) == " white space" + # This URI makes no sense, but is valid I suppose. + assert ( + _rgx_match(BENCHMARK_URI_RE, "benchmark_name", "benchmark://cbench-v1/\t") + ) == "\t" def test_benchmark_attribute_outside_init(): From 53a87fbb256a2f756dbbfba0a2576e8bbb0b7c35 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Wed, 8 Dec 2021 01:54:52 +0000 Subject: [PATCH 048/142] [www] Update package dependency. --- www/frontends/compiler_gym/package-lock.json | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/www/frontends/compiler_gym/package-lock.json b/www/frontends/compiler_gym/package-lock.json index baa455781..cc822c8a9 100644 --- a/www/frontends/compiler_gym/package-lock.json +++ b/www/frontends/compiler_gym/package-lock.json @@ -10714,9 +10714,10 @@ } }, "node_modules/immer": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/immer/-/immer-8.0.1.tgz", - "integrity": "sha512-aqXhGP7//Gui2+UrEtvxZxSquQVXTpZ7KDxfCcKAF3Vysvw0CViVaW9RZ1j1xlIYqaaaipBoqdqeibkc18PNvA==", + "version": "9.0.7", + "resolved": "https://registry.npmjs.org/immer/-/immer-9.0.7.tgz", + "integrity": "sha512-KGllzpbamZDvOIxnmJ0jI840g7Oikx58lBPWV0hUh7dtAyZpFqqrBZdKka5GlTwMTZ1Tjc/bKKW4VSFAt6BqMA==", + "dev": true, "funding": { "type": "opencollective", "url": "https://opencollective.com/immer" @@ -31497,9 +31498,10 @@ "integrity": "sha512-BMpfD7PpiETpBl/A6S498BaIJ6Y/ABT93ETbby2fP00v4EbvPBXWEoaR1UBPKs3iR53pJY7EtZk5KACI57i1Uw==" }, "immer": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/immer/-/immer-8.0.1.tgz", - "integrity": "sha512-aqXhGP7//Gui2+UrEtvxZxSquQVXTpZ7KDxfCcKAF3Vysvw0CViVaW9RZ1j1xlIYqaaaipBoqdqeibkc18PNvA==" + "version": "9.0.7", + "resolved": "https://registry.npmjs.org/immer/-/immer-9.0.7.tgz", + "integrity": "sha512-KGllzpbamZDvOIxnmJ0jI840g7Oikx58lBPWV0hUh7dtAyZpFqqrBZdKka5GlTwMTZ1Tjc/bKKW4VSFAt6BqMA==", + "dev": true }, "import-cwd": { "version": "2.1.0", From 025c611bdd2495ebf8e631861a3ce1e4676afa40 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Wed, 8 Dec 2021 01:59:04 +0000 Subject: [PATCH 049/142] Add missing copyright headers. Add some missing copyright headers to files. --- examples/example_unrolling_service/benchmarks/conv2d.c | 4 ++++ examples/example_unrolling_service/benchmarks/offsets1.c | 4 ++++ examples/example_unrolling_service/example.py | 5 +++++ examples/setup.py | 7 +++++++ 4 files changed, 20 insertions(+) diff --git a/examples/example_unrolling_service/benchmarks/conv2d.c b/examples/example_unrolling_service/benchmarks/conv2d.c index 9848d9529..5366dc9b5 100644 --- a/examples/example_unrolling_service/benchmarks/conv2d.c +++ b/examples/example_unrolling_service/benchmarks/conv2d.c @@ -1,3 +1,7 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. #include "header.h" // TODO: use templates instead of macros diff --git a/examples/example_unrolling_service/benchmarks/offsets1.c b/examples/example_unrolling_service/benchmarks/offsets1.c index b5206cabf..affc96fe9 100644 --- a/examples/example_unrolling_service/benchmarks/offsets1.c +++ b/examples/example_unrolling_service/benchmarks/offsets1.c @@ -1,3 +1,7 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. #include "header.h" #ifndef N diff --git a/examples/example_unrolling_service/example.py b/examples/example_unrolling_service/example.py index af2298ba0..5e58ba3d3 100644 --- a/examples/example_unrolling_service/example.py +++ b/examples/example_unrolling_service/example.py @@ -1,3 +1,8 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + import compiler_gym import examples.example_unrolling_service as unrolling_service # noqa Register environments. diff --git a/examples/setup.py b/examples/setup.py index 427485921..3e375d78e 100644 --- a/examples/setup.py +++ b/examples/setup.py @@ -1,3 +1,10 @@ +#!/usr/bin/env python3 +# +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + import distutils.util import setuptools From a558f782f29a6bbf5c91788de60fe8468dc505e7 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Tue, 7 Dec 2021 15:25:53 +0000 Subject: [PATCH 050/142] [tests] Update xfail annotations. --- tests/gcc/datasets/csmith_test.py | 3 +++ tests/gcc/gcc_env_test.py | 5 ++++- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/tests/gcc/datasets/csmith_test.py b/tests/gcc/datasets/csmith_test.py index 5c9a05f3d..230fbe68a 100644 --- a/tests/gcc/datasets/csmith_test.py +++ b/tests/gcc/datasets/csmith_test.py @@ -43,6 +43,9 @@ def test_csmith_random_select(gcc_bin: str, index: int, tmpwd: Path): assert (tmpwd / "source.c").is_file() +@pytest.mark.xfail( + reason="github.com/facebookresearch/CompilerGym/issues/459", +) @with_gcc_support def test_random_benchmark(gcc_bin: str): with gym.make("gcc-v0", gcc_bin=gcc_bin) as env: diff --git a/tests/gcc/gcc_env_test.py b/tests/gcc/gcc_env_test.py index 4bda5d3ec..fecf82776 100644 --- a/tests/gcc/gcc_env_test.py +++ b/tests/gcc/gcc_env_test.py @@ -35,6 +35,10 @@ def test_docker_default_action_space(): assert env.action_spaces[0].names[0] == "-O0" +@pytest.mark.xfail( + not docker_is_available(), + reason="github.com/facebookresearch/CompilerGym/issues/459", +) def test_gcc_bin(gcc_bin: str): """Test that the environment reports the service's reward spaces.""" with gym.make("gcc-v0", gcc_bin=gcc_bin) as env: @@ -44,7 +48,6 @@ def test_gcc_bin(gcc_bin: str): @pytest.mark.xfail( not docker_is_available(), - strict=True, reason="github.com/facebookresearch/CompilerGym/issues/459", ) def test_observation_spaces_failing_because_of_bug(gcc_bin: str): From dd29d8b8822a1d09b0ff42696e86e9226e30ac21 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Thu, 9 Dec 2021 16:05:03 +0000 Subject: [PATCH 051/142] [www] Update dependent package. --- www/frontends/compiler_gym/package-lock.json | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/www/frontends/compiler_gym/package-lock.json b/www/frontends/compiler_gym/package-lock.json index cc822c8a9..4670408b5 100644 --- a/www/frontends/compiler_gym/package-lock.json +++ b/www/frontends/compiler_gym/package-lock.json @@ -13515,9 +13515,10 @@ "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==" }, "node_modules/json-schema": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", - "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=" + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz", + "integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==", + "dev": true }, "node_modules/json-schema-traverse": { "version": "0.4.1", @@ -33542,9 +33543,10 @@ "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==" }, "json-schema": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", - "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=" + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz", + "integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==", + "dev": true }, "json-schema-traverse": { "version": "0.4.1", From 5ce7165da5fb80a1820b9a66599e68146222d3e6 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Thu, 9 Dec 2021 16:20:58 +0000 Subject: [PATCH 052/142] [www] Update package dependency. --- www/frontends/compiler_gym/package-lock.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/www/frontends/compiler_gym/package-lock.json b/www/frontends/compiler_gym/package-lock.json index 4670408b5..f628a2700 100644 --- a/www/frontends/compiler_gym/package-lock.json +++ b/www/frontends/compiler_gym/package-lock.json @@ -13575,7 +13575,7 @@ "dependencies": { "assert-plus": "1.0.0", "extsprintf": "1.3.0", - "json-schema": "0.2.3", + "json-schema": "0.4.0", "verror": "1.10.0" } }, From 155b241606d2b8d01ef117b59f276613f3c2c857 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Thu, 9 Dec 2021 22:47:47 +0000 Subject: [PATCH 053/142] [util] Add C++ implementation of getCacheRootPath(). --- compiler_gym/util/BUILD | 1 + compiler_gym/util/RunfilesPath.cc | 37 ++++++++++++++++++++++++++----- compiler_gym/util/RunfilesPath.h | 7 ++++++ 3 files changed, 40 insertions(+), 5 deletions(-) diff --git a/compiler_gym/util/BUILD b/compiler_gym/util/BUILD index 6c1ecd42c..a9cad9397 100644 --- a/compiler_gym/util/BUILD +++ b/compiler_gym/util/BUILD @@ -68,6 +68,7 @@ cc_library( visibility = ["//visibility:public"], deps = [ "@boost//:filesystem", + "@fmt", ], ) diff --git a/compiler_gym/util/RunfilesPath.cc b/compiler_gym/util/RunfilesPath.cc index 8e6ebf888..5183b62f9 100644 --- a/compiler_gym/util/RunfilesPath.cc +++ b/compiler_gym/util/RunfilesPath.cc @@ -2,16 +2,24 @@ // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. +#include + #include "boost/filesystem.hpp" namespace fs = boost::filesystem; namespace compiler_gym::util { -// When running under bazel, the working directory is the root of the -// CompilerGym workspace. Back up one level so that we can reference other -// workspaces. -static const std::string kDefaultBase{"."}; +namespace { + +static const char* UNKNOWN_USER_NAME = "unknown"; + +inline std::string getUser() { + const char* base = std::getenv("USER"); + return base ? base : UNKNOWN_USER_NAME; +} + +} // namespace fs::path getRunfilesPath(const std::string& relPath) { const char* base = std::getenv("COMPILER_GYM_RUNFILES"); @@ -37,7 +45,26 @@ fs::path getSiteDataPath(const std::string& relPath) { } else { // $HOME may not be set under testing conditions. In this case, use a // throwaway directory. - return fs::temp_directory_path() / "compiler_gym" / relPath; + return fs::temp_directory_path() / fmt::format("compiler_gym_{}", getUser()) / relPath; + } +} + +fs::path getCacheRootPath() { + // NOTE(cummins): This function has a related implementation in the Python + // sources, compiler_gym.util.runfiles_path.get_cache_path(). Any change to + // behavior here must be reflected in the Python version. + const char* force = std::getenv("COMPILER_GYM_CACHE"); + if (force) { + return fs::path(force); + } + + const char* home = std::getenv("HOME"); + if (home) { + return fs::path(home) / ".local/cache/compiler_gym"; + } else { + // $HOME may not be set under testing conditions. In this case, use a + // throwaway directory. + return fs::temp_directory_path() / fmt::format("compiler_gym_{}", getUser()); } } diff --git a/compiler_gym/util/RunfilesPath.h b/compiler_gym/util/RunfilesPath.h index f2308edd2..e38fe7907 100644 --- a/compiler_gym/util/RunfilesPath.h +++ b/compiler_gym/util/RunfilesPath.h @@ -22,4 +22,11 @@ boost::filesystem::path getRunfilesPath(const std::string& relPath); */ boost::filesystem::path getSiteDataPath(const std::string& relPath); +/** + * Resolve the root of the cache path. + * + * @return boost::filesystem::path A path. + */ +boost::filesystem::path getCacheRootPath(); + } // namespace compiler_gym::util From 98b82bddb5c92247d5eb520e9d1a410ecac2d403 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Thu, 9 Dec 2021 22:50:40 +0000 Subject: [PATCH 054/142] [llvm] Add a BenchmarkFactory::close() method. --- compiler_gym/envs/llvm/service/BenchmarkFactory.cc | 9 +++++++++ compiler_gym/envs/llvm/service/BenchmarkFactory.h | 4 ++++ 2 files changed, 13 insertions(+) diff --git a/compiler_gym/envs/llvm/service/BenchmarkFactory.cc b/compiler_gym/envs/llvm/service/BenchmarkFactory.cc index 3ad73da77..4da5984a0 100644 --- a/compiler_gym/envs/llvm/service/BenchmarkFactory.cc +++ b/compiler_gym/envs/llvm/service/BenchmarkFactory.cc @@ -38,6 +38,15 @@ BenchmarkFactory::BenchmarkFactory(const boost::filesystem::path& workingDirecto VLOG(2) << "BenchmarkFactory initialized"; } +BenchmarkFactory::~BenchmarkFactory() { close(); } + +void BenchmarkFactory::close() { + VLOG(2) << "BenchmarkFactory closing with " << benchmarks_.size() << " entries"; + for (auto& entry : benchmarks_) { + entry.second.close(); + } +} + Status BenchmarkFactory::getBenchmark(const BenchmarkProto& benchmarkMessage, std::unique_ptr* benchmark) { // Check if the benchmark has already been loaded into memory. diff --git a/compiler_gym/envs/llvm/service/BenchmarkFactory.h b/compiler_gym/envs/llvm/service/BenchmarkFactory.h index bd30974bd..d057785ab 100644 --- a/compiler_gym/envs/llvm/service/BenchmarkFactory.h +++ b/compiler_gym/envs/llvm/service/BenchmarkFactory.h @@ -62,6 +62,10 @@ class BenchmarkFactory { return instance; } + ~BenchmarkFactory(); + + void close(); + /** * Get the requested named benchmark. * From b9acc4c800f557162d9b2f2c0b49dc277effc62f Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Thu, 9 Dec 2021 22:51:14 +0000 Subject: [PATCH 055/142] [llvm] Use on-disk cache as benchmark scratch directory. Issue #465. --- compiler_gym/envs/llvm/service/Benchmark.cc | 34 +++++++++++++-------- 1 file changed, 22 insertions(+), 12 deletions(-) diff --git a/compiler_gym/envs/llvm/service/Benchmark.cc b/compiler_gym/envs/llvm/service/Benchmark.cc index 813dd8270..dcc034a18 100644 --- a/compiler_gym/envs/llvm/service/Benchmark.cc +++ b/compiler_gym/envs/llvm/service/Benchmark.cc @@ -67,6 +67,24 @@ RealizedBenchmarkDynamicConfig realizeDynamicConfig(const BenchmarkDynamicConfig return RealizedBenchmarkDynamicConfig(cfg); } +/** + * Create a temporary directory to use as a scratch pad for on-disk storage. + * This directory is guaranteed to exist. + * + * Errors in this function are fatal. + * + * @return fs::path A path. + */ +fs::path createScratchDirectoryOrDie() { + const fs::path cacheRoot = util::getCacheRootPath(); + const fs::path dir = fs::unique_path(cacheRoot / "benchmark-scratch-%%%%-%%%%"); + + sys::error_code ec; + fs::create_directories(dir, ec); + CHECK(!ec) << "Failed to create scratch directory: " << dir; + return dir; +} + } // anonymous namespace Status readBitcodeFile(const fs::path& path, Bitcode* bitcode) { @@ -135,7 +153,7 @@ Benchmark::Benchmark(const std::string& name, const Bitcode& bitcode, const BaselineCosts& baselineCosts) : context_(std::make_unique()), module_(makeModuleOrDie(*context_, bitcode, name)), - scratchDirectory_(fs::path(fs::unique_path(workingDirectory / "scratch-%%%%-%%%%"))), + scratchDirectory_(createScratchDirectoryOrDie()), dynamicConfigProto_(dynamicConfig), dynamicConfig_(realizeDynamicConfig(dynamicConfig, scratchDirectory_)), baselineCosts_(baselineCosts), @@ -143,11 +161,7 @@ Benchmark::Benchmark(const std::string& name, const Bitcode& bitcode, needsRecompile_(true), runtimesPerObservationCount_(kDefaultRuntimesPerObservationCount), warmupRunsPerRuntimeObservationCount_(kDefaultWarmupRunsPerRuntimeObservationCount), - buildtimesPerObservationCount_(kDefaultBuildtimesPerObservationCount) { - sys::error_code ec; - fs::create_directory(scratchDirectory(), ec); - CHECK(!ec) << "Failed to create scratch directory: " << scratchDirectory(); -} + buildtimesPerObservationCount_(kDefaultBuildtimesPerObservationCount) {} Benchmark::Benchmark(const std::string& name, std::unique_ptr context, std::unique_ptr module, @@ -155,16 +169,12 @@ Benchmark::Benchmark(const std::string& name, std::unique_ptr const BaselineCosts& baselineCosts) : context_(std::move(context)), module_(std::move(module)), - scratchDirectory_(fs::path(fs::unique_path(workingDirectory / "scratch-%%%%-%%%%"))), + scratchDirectory_(createScratchDirectoryOrDie()), dynamicConfigProto_(dynamicConfig), dynamicConfig_(realizeDynamicConfig(dynamicConfig, scratchDirectory_)), baselineCosts_(baselineCosts), name_(name), - needsRecompile_(true) { - sys::error_code ec; - fs::create_directory(scratchDirectory(), ec); - CHECK(!ec) << "Failed to create scratch directory: " << scratchDirectory().string(); -} + needsRecompile_(true) {} void Benchmark::close() { sys::error_code ec; From 65a6da8f0fef04b699012a51f095171b010b33ec Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Thu, 9 Dec 2021 23:21:16 +0000 Subject: [PATCH 056/142] [llvm] Use hidden file name for lockfile. --- compiler_gym/envs/llvm/datasets/cbench.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/compiler_gym/envs/llvm/datasets/cbench.py b/compiler_gym/envs/llvm/datasets/cbench.py index f344f085c..992e67add 100644 --- a/compiler_gym/envs/llvm/datasets/cbench.py +++ b/compiler_gym/envs/llvm/datasets/cbench.py @@ -288,7 +288,7 @@ def _make_cBench_validator( def validator_cb(env: "LlvmEnv") -> Optional[ValidationError]: # noqa: F821 """The validation callback.""" with _CBENCH_DOWNLOAD_THREAD_LOCK: - with fasteners.InterProcessLock(cache_path("cbench-v1-runtime-data.LOCK")): + with fasteners.InterProcessLock(cache_path(".cbench-v1-runtime-data.LOCK")): download_cBench_runtime_data() cbench_data = site_data_path("llvm-v0/cbench-v1-runtime-data/runtime_data") @@ -557,7 +557,7 @@ def __init__(self, site_data_base: Path): def install(self): super().install() with _CBENCH_DOWNLOAD_THREAD_LOCK: - with fasteners.InterProcessLock(cache_path("cbench-v1-runtime-data.LOCK")): + with fasteners.InterProcessLock(cache_path(".cbench-v1-runtime-data.LOCK")): download_cBench_runtime_data() From 5aa521840dc3df3dcf4da17ed2220b904a550fc2 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Fri, 10 Dec 2021 02:26:41 +0000 Subject: [PATCH 057/142] [llvm] Update mibench to v1. This deprecates the mibench-v0 dataset and adds a new mibench-v1 dataset. The difference is that the files in v1 have the "optnone" attribute stripped, enabling the optimizer to modify the functions. Fixes #505. --- compiler_gym/envs/llvm/datasets/__init__.py | 26 +++++++++++++++++++++ docs/source/llvm/index.rst | 2 +- examples/llvm_rl/config/testing/all.yaml | 2 +- tests/llvm/datasets/llvm_datasets_test.py | 2 +- 4 files changed, 29 insertions(+), 3 deletions(-) diff --git a/compiler_gym/envs/llvm/datasets/__init__.py b/compiler_gym/envs/llvm/datasets/__init__.py index 8fcd298fe..31d714b32 100644 --- a/compiler_gym/envs/llvm/datasets/__init__.py +++ b/compiler_gym/envs/llvm/datasets/__init__.py @@ -109,6 +109,30 @@ def __init__(self, site_data_base: Path, sort_order: int = 0): class MibenchDataset(TarDatasetWithManifest): + def __init__(self, site_data_base: Path, sort_order: int = 0): + super().__init__( + name="benchmark://mibench-v1", + tar_urls=[ + "https://dl.fbaipublicfiles.com/compiler_gym/llvm_bitcodes-10.0.0-mibench-v1.tar.bz2" + ], + tar_sha256="795b80d3198bc96e394823a4cb294d256845beffccce52fea0e3446395212bb5", + manifest_urls=[ + "https://dl.fbaipublicfiles.com/compiler_gym/llvm_bitcodes-10.0.0-mibench-v0-manifest.bz2" + ], + manifest_sha256="8ed985d685b48f444a3312cd84ccc5debda4a839850e442a3cdc93910ba0dc5f", + references={ + "Paper": "http://vhosts.eecs.umich.edu/mibench/Publications/MiBench.pdf" + }, + license="BSD 3-Clause", + strip_prefix="mibench-v1", + description="C benchmarks", + benchmark_file_suffix=".bc", + site_data_base=site_data_base, + sort_order=sort_order, + ) + + +class MibenchV0Dataset(TarDatasetWithManifest): def __init__(self, site_data_base: Path, sort_order: int = 0): super().__init__( name="benchmark://mibench-v0", @@ -129,6 +153,7 @@ def __init__(self, site_data_base: Path, sort_order: int = 0): benchmark_file_suffix=".bc", site_data_base=site_data_base, sort_order=sort_order, + deprecated="Please use mibench-v1", ) @@ -259,6 +284,7 @@ def get_llvm_datasets(site_data_base: Optional[Path] = None) -> Iterable[Dataset yield LinuxDataset(site_data_base=site_data_base, sort_order=0) yield LlvmStressDataset(site_data_base=site_data_base, sort_order=0) yield MibenchDataset(site_data_base=site_data_base, sort_order=0) + yield MibenchV0Dataset(site_data_base=site_data_base, sort_order=100) yield NPBDataset(site_data_base=site_data_base, sort_order=0) yield OpenCVDataset(site_data_base=site_data_base, sort_order=0) yield POJ104Dataset(site_data_base=site_data_base, sort_order=0) diff --git a/docs/source/llvm/index.rst b/docs/source/llvm/index.rst index 5ffb8afbe..48302bbf9 100644 --- a/docs/source/llvm/index.rst +++ b/docs/source/llvm/index.rst @@ -42,7 +42,7 @@ We provide several datasets of open-source LLVM-IR benchmarks for use: +----------------------------+--------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------+ | benchmark://linux-v0 | 13,894 | Compile-only object files from C Linux kernel [`Homepage `__] | No | +----------------------------+--------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------+ -| benchmark://mibench-v0 | 40 | C benchmarks [`Paper `__] | No | +| benchmark://mibench-v1 | 40 | C benchmarks [`Paper `__] | No | +----------------------------+--------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------+ | benchmark://npb-v0 | 122 | NASA Parallel Benchmarks [`Paper `__] | No | +----------------------------+--------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------+ diff --git a/examples/llvm_rl/config/testing/all.yaml b/examples/llvm_rl/config/testing/all.yaml index 9584fd676..6827e5c66 100644 --- a/examples/llvm_rl/config/testing/all.yaml +++ b/examples/llvm_rl/config/testing/all.yaml @@ -21,7 +21,7 @@ benchmarks: max_benchmarks: 50 - dataset: generator://llvm-stress-v0 max_benchmarks: 50 - - dataset: benchmark://mibench-v0 + - dataset: benchmark://mibench-v1 max_benchmarks: 50 - dataset: benchmark://npb-v0 max_benchmarks: 50 diff --git a/tests/llvm/datasets/llvm_datasets_test.py b/tests/llvm/datasets/llvm_datasets_test.py index 697d26de5..61251f649 100644 --- a/tests/llvm/datasets/llvm_datasets_test.py +++ b/tests/llvm/datasets/llvm_datasets_test.py @@ -19,7 +19,7 @@ def test_default_dataset_list(): "benchmark://clgen-v0", "benchmark://github-v0", "benchmark://linux-v0", - "benchmark://mibench-v0", + "benchmark://mibench-v1", "benchmark://npb-v0", "benchmark://opencv-v0", "benchmark://poj104-v1", From a29ee0c2588f60cc25e88f17740042116f438ccf Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Fri, 10 Dec 2021 16:03:08 +0000 Subject: [PATCH 058/142] [llvm] Add a utility module for stripping optnone attributes. --- compiler_gym/envs/llvm/service/BUILD | 52 ++++++++++++++ .../llvm/service/StripOptNoneAttribute.cc | 68 +++++++++++++++++++ 2 files changed, 120 insertions(+) create mode 100644 compiler_gym/envs/llvm/service/StripOptNoneAttribute.cc diff --git a/compiler_gym/envs/llvm/service/BUILD b/compiler_gym/envs/llvm/service/BUILD index a283c1c27..35908779e 100644 --- a/compiler_gym/envs/llvm/service/BUILD +++ b/compiler_gym/envs/llvm/service/BUILD @@ -270,3 +270,55 @@ cc_library( "@programl//programl/proto:programl_cc", ], ) + +# The strip-optnone-attribute binary is a utility for stripping the "optnone" +# function attribute from LLVM bitcode files. It is used for preparing datasets +# of bitcodes. + +filegroup( + name = "strip-optnone-attribute-files", + srcs = [ + ":strip-optnone-attribute", + ] + select({ + "@llvm//:darwin": [], + "//conditions:default": [ + ":libLLVMPolly", + ], + }), +) + +cc_binary( + name = "strip-optnone-attribute-prelinked", + srcs = ["StripOptNoneAttribute.cc"], + copts = [ + "-DGOOGLE_PROTOBUF_NO_RTTI", + "-fno-rtti", + ], + deps = [ + ":BenchmarkFactory", + "//compiler_gym/service/proto:compiler_gym_service_cc", + "//compiler_gym/util:GrpcStatusMacros", + "@boost//:filesystem", + "@glog", + "@llvm//10.0.0", + "@magic_enum", + ], +) + +genrule( + name = "strip-optnone-bin", + srcs = [":strip-optnone-attribute-prelinked"], + outs = ["strip-optnone"], + cmd = select({ + "@llvm//:darwin": ( + "cp $(location :strip-optnone-attribute-prelinked) $@" + ), + "//conditions:default": ( + "cp $(location :strip-optnone-attribute-prelinked) $@ && " + + "chmod 666 $@ && " + + "patchelf --set-rpath '$$ORIGIN' $@ && " + + "chmod 555 $@" + ), + }), + visibility = ["//visibility:public"], +) diff --git a/compiler_gym/envs/llvm/service/StripOptNoneAttribute.cc b/compiler_gym/envs/llvm/service/StripOptNoneAttribute.cc new file mode 100644 index 000000000..891d47a6b --- /dev/null +++ b/compiler_gym/envs/llvm/service/StripOptNoneAttribute.cc @@ -0,0 +1,68 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. +#include +#include + +#include +#include + +#include "compiler_gym/envs/llvm/service/BenchmarkFactory.h" +#include "compiler_gym/envs/llvm/service/Observation.h" +#include "compiler_gym/envs/llvm/service/ObservationSpaces.h" +#include "compiler_gym/service/proto/compiler_gym_service.pb.h" +#include "compiler_gym/util/GrpcStatusMacros.h" +#include "llvm/IR/Module.h" +#include "llvm/IRReader/IRReader.h" +#include "llvm/Support/ErrorOr.h" +#include "llvm/Support/SourceMgr.h" + +namespace fs = boost::filesystem; + +using namespace compiler_gym; +using namespace compiler_gym::llvm_service; + +int main(int argc, char** argv) { + google::InitGoogleLogging(argv[0]); + + CHECK(argc == 2) << "Usage: compute_observation "; + + const fs::path workingDirectory{"."}; + + compiler_gym::Benchmark request; + request.set_uri("user"); + request.mutable_program()->set_uri(fmt::format("file:///{}", argv[1])); + + auto& benchmarkFactory = BenchmarkFactory::getSingleton(workingDirectory); + std::unique_ptr<::llvm_service::Benchmark> benchmark; + { + const auto status = benchmarkFactory.getBenchmark(request, &benchmark); + CHECK(status.ok()) << "Failed to load benchmark: " << status.error_message(); + } + + llvm::Module& module = benchmark->module(); + + // Iterate through the functions in the module, removing the optnone attribute + // where set. + int removedOptNoneCount = 0; + for (llvm::Function& function : module.functions()) { + for (auto& attrSet : function.getAttributes()) { + for (auto& attr : attrSet) { + // NOTE(cummins): there is definitely a more efficient way of doing + // this than string-ifying all of the attributes, but I don't know + // it :-) + if (attr.getAsString() == "optnone") { + ++removedOptNoneCount; + function.removeFnAttr(attr.getKindAsEnum()); + } + } + } + } + + ASSERT_OK(benchmark->writeBitcodeToFile(argv[1])); + std::cerr << "Stripped " << removedOptNoneCount << " optnone attributes from " << argv[1] + << std::endl; + + return 0; +} From 893e7061e77496d2ebcb26e1364feefc9e86ba83 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Fri, 10 Dec 2021 16:15:28 +0000 Subject: [PATCH 059/142] [llvm] Better error message from build script. --- .../extract_passes_from_llvm_source_tree.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/compiler_gym/envs/llvm/service/passes/extract_passes_from_llvm_source_tree.py b/compiler_gym/envs/llvm/service/passes/extract_passes_from_llvm_source_tree.py index d3d23e71f..0ea0ccd4a 100644 --- a/compiler_gym/envs/llvm/service/passes/extract_passes_from_llvm_source_tree.py +++ b/compiler_gym/envs/llvm/service/passes/extract_passes_from_llvm_source_tree.py @@ -242,10 +242,17 @@ def main(argv): else: # Get the names of all files which contain a pass definition. matching_paths = [] - grep = subprocess.check_output( - ["grep", "-l", "-E", rf"^\s*{INITIALIZE_PASS_RE}", "-R", "lib/"], - universal_newlines=True, - ) + try: + grep = subprocess.check_output( + ["grep", "-l", "-E", rf"^\s*{INITIALIZE_PASS_RE}", "-R", "lib/"], + universal_newlines=True, + ) + except subprocess.CalledProcessError: + print( + f"fatal: Failed to find any LLVM pass declarations in {root}", + file=sys.stderr, + ) + sys.exit(1) matching_paths += grep.strip().split("\n") logger.debug("Processing %s files ...", len(matching_paths)) paths = [Path(path) for path in matching_paths] From 81c7ab4dec9e78a31b605cdf753e43d18da42d69 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Fri, 10 Dec 2021 16:23:44 +0000 Subject: [PATCH 060/142] Update proto and gRPC dependencies. --- WORKSPACE | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/WORKSPACE b/WORKSPACE index a8caf96c0..0e0339be5 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -102,11 +102,10 @@ py_repositories() http_archive( name = "rules_proto", - sha256 = "66bfdf8782796239d3875d37e7de19b1d94301e8972b3cbd2446b332429b4df1", - strip_prefix = "rules_proto-4.0.0", + sha256 = "83c8798f5a4fe1f6a13b5b6ae4267695b71eed7af6fbf2b6ec73a64cf01239ab", + strip_prefix = "rules_proto-b22f78685bf62775b80738e766081b9e4366cdf0", urls = [ - "https://mirror.bazel.build/github.com/bazelbuild/rules_proto/archive/refs/tags/4.0.0.tar.gz", - "https://github.com/bazelbuild/rules_proto/archive/refs/tags/4.0.0.tar.gz", + "https://github.com/bazelbuild/rules_proto/archive/b22f78685bf62775b80738e766081b9e4366cdf0.tar.gz", ], ) @@ -121,10 +120,10 @@ rules_proto_toolchains() # Version should be kept in step with compiler_gym/requirements.txt. http_archive( name = "com_github_grpc_grpc", - sha256 = "1a5127c81487f4e3e57973bb332f04b9159f94d860c207e096d8a587d371edbd", - strip_prefix = "grpc-1.36.0", + sha256 = "2b8a2c9ee689a23ce852ef010b27be80fe6aff827bf6c794bf1273e9fdf8dfb6", + strip_prefix = "grpc-240557a55cab84125e95beda54ceb5dcd5bba08c", urls = [ - "https://github.com/grpc/grpc/archive/v1.36.0.tar.gz", + "https://github.com/grpc/grpc/archive/240557a55cab84125e95beda54ceb5dcd5bba08c.tar.gz", ], ) From 4d71f1c652b280cbb46f41adcef7b77a9b535104 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Fri, 10 Dec 2021 16:32:05 +0000 Subject: [PATCH 061/142] Bump bazel requirement to workaround macOS build error. A segmentation fault in protoc caused by the macOS Monterey update is mitigated by updating bazel version to 4.2.2. See: https://github.com/protocolbuffers/protobuf/issues/9172#issuecomment-990117255 Issue #494. --- .bazelversion | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.bazelversion b/.bazelversion index fcdb2e109..af8c8ec7c 100644 --- a/.bazelversion +++ b/.bazelversion @@ -1 +1 @@ -4.0.0 +4.2.2 From 5e3516ee9f9328afda179d3226d0d445c970d183 Mon Sep 17 00:00:00 2001 From: Boian Petkantchin Date: Tue, 28 Sep 2021 22:57:33 -0700 Subject: [PATCH 062/142] Add Building with CMake of compiler_gym module Co-authored-by: kyle --- .../action.yaml | 27 + .github/workflows/ci.yaml | 41 ++ CMakeLists.txt | 68 +++ INSTALL.md | 80 ++- WORKSPACE | 6 +- build_tools/cmake/FindBazel.cmake | 53 ++ build_tools/cmake/FindClog.cmake | 42 ++ build_tools/cmake/FindCsmith.cmake | 75 +++ build_tools/cmake/FindLabm8.cmake | 146 ++++++ build_tools/cmake/FindProGraML.cmake | 179 +++++++ build_tools/cmake/FindSubprocess.cmake | 37 ++ .../cmake/build_external_cmake_project.cmake | 53 ++ build_tools/cmake/cg_add_all_subdirs.cmake | 32 ++ build_tools/cmake/cg_cc_binary.cmake | 148 ++++++ build_tools/cmake/cg_cc_library.cmake | 192 +++++++ build_tools/cmake/cg_cc_test.cmake | 96 ++++ build_tools/cmake/cg_copts.cmake | 124 +++++ build_tools/cmake/cg_filegroup.cmake | 51 ++ build_tools/cmake/cg_genrule.cmake | 73 +++ build_tools/cmake/cg_installed_test.cmake | 97 ++++ build_tools/cmake/cg_macros.cmake | 353 +++++++++++++ build_tools/cmake/cg_py_binary.cmake | 45 ++ build_tools/cmake/cg_py_library.cmake | 88 ++++ build_tools/cmake/cg_py_test.cmake | 87 ++++ build_tools/cmake/cg_python.cmake | 207 ++++++++ build_tools/cmake/grpc.cmake | 123 +++++ build_tools/cmake/protobuf.cmake | 154 ++++++ build_tools/cmake/run_test.sh | 30 ++ .../cmake/set_command_pythonpath.cmake | 23 + build_tools/cmake/write_cache_script.cmake | 36 ++ compiler_gym/CMakeLists.txt | 114 +++++ compiler_gym/bin/CMakeLists.txt | 99 ++++ compiler_gym/datasets/CMakeLists.txt | 32 ++ compiler_gym/envs/CMakeLists.txt | 36 ++ compiler_gym/envs/gcc/CMakeLists.txt | 25 + compiler_gym/envs/gcc/datasets/CMakeLists.txt | 23 + compiler_gym/envs/gcc/service/CMakeLists.txt | 13 + compiler_gym/envs/llvm/CMakeLists.txt | 101 ++++ compiler_gym/envs/llvm/__init__.py | 7 +- .../envs/llvm/datasets/CMakeLists.txt | 29 ++ compiler_gym/envs/llvm/make_specs.py | 12 +- compiler_gym/envs/llvm/service/CMakeLists.txt | 274 ++++++++++ .../envs/llvm/service/passes/CMakeLists.txt | 102 ++++ compiler_gym/envs/loop_tool/CMakeLists.txt | 20 + .../envs/loop_tool/service/CMakeLists.txt | 13 + compiler_gym/leaderboard/CMakeLists.txt | 29 ++ compiler_gym/requirements.txt | 1 + compiler_gym/service/CMakeLists.txt | 54 ++ compiler_gym/service/proto/CMakeLists.txt | 60 +++ compiler_gym/service/runtime/CMakeLists.txt | 127 +++++ compiler_gym/spaces/CMakeLists.txt | 94 ++++ compiler_gym/third_party/CMakeLists.txt | 6 + .../third_party/autophase/CMakeLists.txt | 63 +++ .../third_party/cbench/CMakeLists.txt | 237 +++++++++ .../third_party/csmith/CMakeLists.txt | 8 + .../third_party/inst2vec/CMakeLists.txt | 52 ++ compiler_gym/third_party/llvm/CMakeLists.txt | 81 +++ .../neuro-vectorizer/CMakeLists.txt | 11 + compiler_gym/util/CMakeLists.txt | 136 +++++ compiler_gym/util/flags/CMakeLists.txt | 26 + compiler_gym/views/CMakeLists.txt | 56 ++ compiler_gym/wrappers/CMakeLists.txt | 24 + external/absl/CMakeLists.txt | 20 + external/boost/CMakeLists.txt | 48 ++ external/cpuinfo/CMakeLists.txt | 19 + external/csmith/CMakeLists.txt | 20 + external/external.cmake | 481 ++++++++++++++++++ external/gflags/CMakeLists.txt | 21 + external/llvm/CMakeLists.txt | 53 ++ external/programl/CMakeLists.txt | 47 ++ external/protobuf/CMakeLists.txt | 39 ++ external/protobuf/build_protobuf.cmake | 66 +++ external/subprocess/CMakeLists.txt | 20 + tests/CMakeLists.txt | 126 +++++ tests/bin/CMakeLists.txt | 51 ++ tests/datasets/CMakeLists.txt | 50 ++ tests/fuzzing/CMakeLists.txt | 104 ++++ tests/gcc/CMakeLists.txt | 44 ++ tests/gcc/datasets/CMakeLists.txt | 32 ++ tests/leaderboard/CMakeLists.txt | 17 + tests/llvm/CMakeLists.txt | 316 ++++++++++++ tests/llvm/datasets/CMakeLists.txt | 133 +++++ tests/llvm/service/CMakeLists.txt | 20 + tests/loop_tool/CMakeLists.txt | 14 + tests/pytest_plugins/CMakeLists.txt | 56 ++ tests/service/CMakeLists.txt | 18 + tests/service/proto/CMakeLists.txt | 15 + tests/service/runtime/CMakeLists.txt | 30 ++ tests/spaces/CMakeLists.txt | 46 ++ tests/util/CMakeLists.txt | 167 ++++++ tests/version_test.py | 14 +- tests/views/CMakeLists.txt | 27 + tests/wrappers/CMakeLists.txt | 60 +++ 93 files changed, 6882 insertions(+), 23 deletions(-) create mode 100644 .github/actions/install-cmake-build-dependencies/action.yaml create mode 100644 CMakeLists.txt create mode 100644 build_tools/cmake/FindBazel.cmake create mode 100644 build_tools/cmake/FindClog.cmake create mode 100644 build_tools/cmake/FindCsmith.cmake create mode 100644 build_tools/cmake/FindLabm8.cmake create mode 100644 build_tools/cmake/FindProGraML.cmake create mode 100644 build_tools/cmake/FindSubprocess.cmake create mode 100644 build_tools/cmake/build_external_cmake_project.cmake create mode 100644 build_tools/cmake/cg_add_all_subdirs.cmake create mode 100644 build_tools/cmake/cg_cc_binary.cmake create mode 100644 build_tools/cmake/cg_cc_library.cmake create mode 100644 build_tools/cmake/cg_cc_test.cmake create mode 100644 build_tools/cmake/cg_copts.cmake create mode 100644 build_tools/cmake/cg_filegroup.cmake create mode 100644 build_tools/cmake/cg_genrule.cmake create mode 100644 build_tools/cmake/cg_installed_test.cmake create mode 100644 build_tools/cmake/cg_macros.cmake create mode 100644 build_tools/cmake/cg_py_binary.cmake create mode 100644 build_tools/cmake/cg_py_library.cmake create mode 100644 build_tools/cmake/cg_py_test.cmake create mode 100644 build_tools/cmake/cg_python.cmake create mode 100644 build_tools/cmake/grpc.cmake create mode 100644 build_tools/cmake/protobuf.cmake create mode 100755 build_tools/cmake/run_test.sh create mode 100644 build_tools/cmake/set_command_pythonpath.cmake create mode 100644 build_tools/cmake/write_cache_script.cmake create mode 100644 compiler_gym/CMakeLists.txt create mode 100644 compiler_gym/bin/CMakeLists.txt create mode 100644 compiler_gym/datasets/CMakeLists.txt create mode 100644 compiler_gym/envs/CMakeLists.txt create mode 100644 compiler_gym/envs/gcc/CMakeLists.txt create mode 100644 compiler_gym/envs/gcc/datasets/CMakeLists.txt create mode 100644 compiler_gym/envs/gcc/service/CMakeLists.txt create mode 100644 compiler_gym/envs/llvm/CMakeLists.txt create mode 100644 compiler_gym/envs/llvm/datasets/CMakeLists.txt create mode 100644 compiler_gym/envs/llvm/service/CMakeLists.txt create mode 100644 compiler_gym/envs/llvm/service/passes/CMakeLists.txt create mode 100644 compiler_gym/envs/loop_tool/CMakeLists.txt create mode 100644 compiler_gym/envs/loop_tool/service/CMakeLists.txt create mode 100644 compiler_gym/leaderboard/CMakeLists.txt create mode 100644 compiler_gym/service/CMakeLists.txt create mode 100644 compiler_gym/service/proto/CMakeLists.txt create mode 100644 compiler_gym/service/runtime/CMakeLists.txt create mode 100644 compiler_gym/spaces/CMakeLists.txt create mode 100644 compiler_gym/third_party/CMakeLists.txt create mode 100644 compiler_gym/third_party/autophase/CMakeLists.txt create mode 100644 compiler_gym/third_party/cbench/CMakeLists.txt create mode 100644 compiler_gym/third_party/csmith/CMakeLists.txt create mode 100644 compiler_gym/third_party/inst2vec/CMakeLists.txt create mode 100644 compiler_gym/third_party/llvm/CMakeLists.txt create mode 100644 compiler_gym/third_party/neuro-vectorizer/CMakeLists.txt create mode 100644 compiler_gym/util/CMakeLists.txt create mode 100644 compiler_gym/util/flags/CMakeLists.txt create mode 100644 compiler_gym/views/CMakeLists.txt create mode 100644 compiler_gym/wrappers/CMakeLists.txt create mode 100644 external/absl/CMakeLists.txt create mode 100644 external/boost/CMakeLists.txt create mode 100644 external/cpuinfo/CMakeLists.txt create mode 100644 external/csmith/CMakeLists.txt create mode 100644 external/external.cmake create mode 100644 external/gflags/CMakeLists.txt create mode 100644 external/llvm/CMakeLists.txt create mode 100644 external/programl/CMakeLists.txt create mode 100644 external/protobuf/CMakeLists.txt create mode 100644 external/protobuf/build_protobuf.cmake create mode 100644 external/subprocess/CMakeLists.txt create mode 100644 tests/CMakeLists.txt create mode 100644 tests/bin/CMakeLists.txt create mode 100644 tests/datasets/CMakeLists.txt create mode 100644 tests/fuzzing/CMakeLists.txt create mode 100644 tests/gcc/CMakeLists.txt create mode 100644 tests/gcc/datasets/CMakeLists.txt create mode 100644 tests/leaderboard/CMakeLists.txt create mode 100644 tests/llvm/CMakeLists.txt create mode 100644 tests/llvm/datasets/CMakeLists.txt create mode 100644 tests/llvm/service/CMakeLists.txt create mode 100644 tests/loop_tool/CMakeLists.txt create mode 100644 tests/pytest_plugins/CMakeLists.txt create mode 100644 tests/service/CMakeLists.txt create mode 100644 tests/service/proto/CMakeLists.txt create mode 100644 tests/service/runtime/CMakeLists.txt create mode 100644 tests/spaces/CMakeLists.txt create mode 100644 tests/util/CMakeLists.txt create mode 100644 tests/views/CMakeLists.txt create mode 100644 tests/wrappers/CMakeLists.txt diff --git a/.github/actions/install-cmake-build-dependencies/action.yaml b/.github/actions/install-cmake-build-dependencies/action.yaml new file mode 100644 index 000000000..4d75b2538 --- /dev/null +++ b/.github/actions/install-cmake-build-dependencies/action.yaml @@ -0,0 +1,27 @@ +--- +name: Install CMake build dependencies +description: Install CMake build dependencies +runs: + using: composite + steps: + - uses: ./.github/actions/install-build-dependencies + + - name: Install CMake dependencies + run: | + if [ "$(uname)" = "Darwin" ]; then + echo "CMake build for Darwin is unimplemented." + exit 1 + else + # Compiler + sudo apt-get install clang++-9 lld-9 tar bzip2 ninja-build + sudo apt-get install tar bzip2 ninja-build + # CMake + wget https://github.com/Kitware/CMake/releases/download/v3.20.5/cmake-3.20.5-linux-x86_64.sh -O /tmp/cmake.sh + sudo bash /tmp/cmake.sh --prefix=/usr/local --exclude-subdir --skip-license + rm /tmp/cmake.sh + # protobuf + sudo apt-get install autoconf libtool make + # Testing + sudo apt-get install coreutils + fi + shell: bash diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 872f438c6..f2de69995 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -52,6 +52,46 @@ jobs: if-no-files-found: error retention-days: 7 + build-linux-cmake: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + + - uses: actions/setup-python@v2 + with: + python-version: 3.9 + + - name: Install build dependencies + uses: ./.github/actions/install-cmake-build-dependencies + + - name: CMake Build + run: | + cmake \ + -GNinja \ + -DCMAKE_C_COMPILER=clang-9 \ + -DCMAKE_CXX_COMPILER=clang++-9 \ + -DCMAKE_EXE_LINKER_FLAGS_INIT="-fuse-ld=lld" \ + -DCMAKE_MODULE_LINKER_FLAGS_INIT="-fuse-ld=lld" \ + -DCMAKE_SHARED_LINKER_FLAGS_INIT="-fuse-ld=lld" \ + -DPython3_FIND_VIRTUALENV=FIRST \ + -DCOMPILER_GYM_BUILD_TESTS=ON \ + -S . \ + -B ~/cmake_build + cmake --build ~/cmake_build + shell: bash + + - name: Install runtime dependencies + uses: ./.github/actions/install-runtime-dependencies + + - name: Install test dependencies + run: python -m pip install -r tests/requirements.txt + + - name: Run the test suite + run: | + cd ~/cmake_build + ctest --parallel $(nproc) --tests-regex tests/ --label-exclude manual + shell: bash + build-macos: runs-on: macos-latest steps: @@ -125,6 +165,7 @@ jobs: - name: Upload coverage report to Codecov uses: codecov/codecov-action@v2 + test-macos: needs: build-macos runs-on: macos-latest diff --git a/CMakeLists.txt b/CMakeLists.txt new file mode 100644 index 000000000..4ed57c2bc --- /dev/null +++ b/CMakeLists.txt @@ -0,0 +1,68 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cmake_minimum_required(VERSION 3.20) + +if(CMAKE_CURRENT_SOURCE_DIR STREQUAL CMAKE_CURRENT_BINARY_DIR) + message(FATAL_ERROR "In-source builds are unsupported. Please, build out of the source tree.") +endif() + +if (${CMAKE_SYSTEM_NAME} MATCHES "Darwin") + set(DARWIN TRUE) +endif() + +set(CMAKE_EXPORT_COMPILE_COMMANDS ON) + +project(compiler_gym ASM C CXX) + +set(CMAKE_C_STANDARD 11 CACHE STRING "C standard to be used.") +set(CMAKE_CXX_STANDARD 17 CACHE STRING "C++ standard to be used.") + +set_property(GLOBAL PROPERTY USE_FOLDERS ON) + +list(APPEND CMAKE_MODULE_PATH + ${CMAKE_CURRENT_LIST_DIR}/build_tools/cmake/ +) + +set(COMPILER_GYM_BUILD_TESTS OFF CACHE BOOL "Enable Compiler Gym tests.") + +include(cg_macros) +include(cg_copts) +include(cg_genrule) +include(cg_cc_binary) +include(cg_cc_library) +include(cg_cc_test) +include(cg_py_binary) +include(cg_py_library) +include(cg_py_test) +include(cg_python) +include(cg_add_all_subdirs) +include(cg_filegroup) +include(grpc) +include(protobuf) + +set(COMPILER_GYM_PYTHONPATH "$ENV{PYTHONPATH}" CACHE STRING "PYTHONPATH environment variable during build step.") +if (COMPILER_GYM_PYTHONPATH) + string(PREPEND COMPILER_GYM_PYTHONPATH ":") +endif() +string(PREPEND COMPILER_GYM_PYTHONPATH "${CMAKE_BINARY_DIR}") +include(set_command_pythonpath) + +set(DEFAULT_CMAKE_BUILD_TYPE "Release") +if(NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES) + message(STATUS "No build type selected, default to ${DEFAULT_CMAKE_BUILD_TYPE}") + set(CMAKE_BUILD_TYPE "${DEFAULT_CMAKE_BUILD_TYPE}" CACHE STRING "Build type (default ${DEFAULT_CMAKE_BUILD_TYPE})" FORCE) +endif() + +set(CMAKE_POSITION_INDEPENDENT_CODE TRUE) + +find_package(Python3 REQUIRED COMPONENTS Interpreter) + +include(external/external.cmake) +add_subdirectory(compiler_gym) +if(COMPILER_GYM_BUILD_TESTS) + enable_testing() + add_subdirectory(tests) +endif() diff --git a/INSTALL.md b/INSTALL.md index e05451afe..a1f6be475 100644 --- a/INSTALL.md +++ b/INSTALL.md @@ -7,12 +7,12 @@ Install the latest CompilerGym release using: CompilerGym requires Python >= 3.6. The binary works on macOS and Linux (on Ubuntu 18.04, Fedora 28, Debian 10 or newer equivalents). -## Building from Source +# Building from Source If you prefer, you may build from source. This requires a modern C++ toolchain and bazel. -### macOS +## macOS On macOS the required dependencies can be installed using [homebrew](https://docs.brew.sh/Installation): @@ -26,12 +26,13 @@ export PKG_CONFIG_PATH="/usr/local/opt/zlib/lib/pkgconfig" Now proceed to [All platforms](#all-platforms) below. -### Linux +## Linux On debian-based linux systems, install the required toolchain using: ```sh -sudo apt install clang-9 clang-format golang libjpeg-dev libtinfo5 m4 make patch zlib1g-dev +sudo apt install clang-9 clang++-9 clang-format golang libjpeg-dev \ + libtinfo5 m4 make patch zlib1g-dev tar bzip2 wget mkdir -pv ~/.local/bin wget https://github.com/bazelbuild/bazelisk/releases/download/v1.7.5/bazelisk-linux-amd64 -O ~/.local/bin/bazel wget https://github.com/hadolint/hadolint/releases/download/v1.19.0/hadolint-Linux-x86_64 -O ~/.local/bin/hadolint @@ -44,7 +45,7 @@ export CXX=clang++ ``` -### All platforms +## All platforms We recommend using [conda](https://docs.conda.io/projects/conda/en/latest/user-guide/install/) @@ -70,6 +71,8 @@ your preferred branch and install the python development dependencies using: The `make init` target only needs to be run on initial setup and after pulling remote changes to the CompilerGym repository. +## Building from source with Bazel + Run the test suite to confirm that everything is working: make test @@ -87,3 +90,70 @@ environment using: conda deactivate conda env remove -n compiler_gym + +## Building from source with CMake + +### Dependency instructions for Ubuntu + +```bash +sudo apt-get install lld-9 \ + autoconf libtool ninja-build ccache git \ +``` + +Requires CMake (>=3.20). + +```bash +wget https://github.com/Kitware/CMake/releases/download/v3.20.5/cmake-3.20.5-linux-x86_64.sh -O cmake.sh +bash cmake.sh --prefix=$HOME/.local --exclude-subdir --skip-license +rm cmake.sh +``` + +### Dependency Arguments +By default most dependencies are built together with Compiler Gym. To search for a dependency instead use: + +``` +-DCOMPILER_GYM__PROVIDER=external +``` + +* `COMPILER_GYM_BOOST_PROVIDER` +* `COMPILER_GYM_GFLAGS_PROVIDER` +* `COMPILER_GYM_GLOG_PROVIDER` +* `COMPILER_GYM_GRPC_PROVIDER` +* `COMPILER_GYM_GTEST_PROVIDER` +* `COMPILER_GYM_NLOHMANN_JSON_PROVIDER` +* `COMPILER_GYM_PROTOBUF_PROVIDER` + +```bash +cmake -GNinja \ + -DCMAKE_C_COMPILER=clang-9 \ + -DCMAKE_CXX_COMPILER=clang++-9 \ + -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache \ # For faster rebuilds, can be removed + -DCMAKE_EXE_LINKER_FLAGS_INIT="-fuse-ld=lld" -DCMAKE_MODULE_LINKER_FLAGS_INIT="-fuse-ld=lld" -DCMAKE_SHARED_LINKER_FLAGS_INIT="-fuse-ld=lld" \ # For faster builds, can be removed + -DPython3_FIND_VIRTUALENV=FIRST \ + -S "" \ + -B "" + +cmake --build "" +``` +Additional optional configuration arguments: + +* Enables testing. + + ```bash + -DCOMPILER_GYM_BUILD_TESTS=ON + ``` + +* For faster linking. + + ```bash + -DCMAKE_EXE_LINKER_FLAGS_INIT="-fuse-ld=lld-9" + -DCMAKE_MODULE_LINKER_FLAGS_INIT="-fuse-ld=lld-9" + -DCMAKE_SHARED_LINKER_FLAGS_INIT="-fuse-ld=lld-9" + ``` + +* For faster rebuilds. + + ```bash + -DCMAKE_C_COMPILER_LAUNCHER=ccache + -DCMAKE_CXX_COMPILER_LAUNCHER=ccache + ``` diff --git a/WORKSPACE b/WORKSPACE index a8caf96c0..95b207686 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -285,9 +285,9 @@ http_file( http_archive( name = "cpuinfo", build_file_content = all_content, - sha256 = "18a99130ced1eaacab2ba8f75a1435f9955aab54fa0436b60468f020876ee902", - strip_prefix = "cpuinfo-63b254577ed77a8004a9be6ac707f3dccc4e1fd9", - urls = ["https://github.com/pytorch/cpuinfo/archive/63b254577ed77a8004a9be6ac707f3dccc4e1fd9.tar.gz"], + sha256 = "b9874dbb2f9436c9d0d7f42aaf3f94f1af3da37bc0b250268760ada2507ca543", + strip_prefix = "cpuinfo-2e79955ecaec85da13ac8f1245a8b2afa10d31c2", + urls = ["https://github.com/pytorch/cpuinfo/archive/2e79955ecaec85da13ac8f1245a8b2afa10d31c2.tar.gz"], ) # === Csmith === diff --git a/build_tools/cmake/FindBazel.cmake b/build_tools/cmake/FindBazel.cmake new file mode 100644 index 000000000..4860d9415 --- /dev/null +++ b/build_tools/cmake/FindBazel.cmake @@ -0,0 +1,53 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +#[=======================================================================[.rst: +Result Variables +^^^^^^^^^^^^^^^^ + +This will define the following variables in your project: + +``Bazel_FOUND`` + true if Bazel is available. +``Bazel_VERSION`` + the version of Bazel. +``Bazel_EXECUTABLE`` + Path to the Bazel executable. + +#]=======================================================================] + +find_program(Bazel_EXECUTABLE bazel) + +execute_process(COMMAND "${Bazel_EXECUTABLE}" version + RESULT_VARIABLE _BAZEL_VERSION_EXECUTE_PROCESS_RESULT_VARIABLE + OUTPUT_VARIABLE _BAZEL_VERSION_EXECUTE_PROCESS_OUTPUT_VARIABLE + ERROR_QUIET +) + +set(Bazel_VERSION) + +if(_BAZEL_VERSION_EXECUTE_PROCESS_RESULT_VARIABLE EQUAL 0) + string(REGEX MATCH "Build label: ([0-9a-zA-Z.]+)" + _BAZEL_VERSION_REGEX_MATCH_OUTPUT_VARIABLE + "${_BAZEL_VERSION_EXECUTE_PROCESS_OUTPUT_VARIABLE}" + ) + + if(CMAKE_MATCH_1) + set(Bazel_VERSION "${CMAKE_MATCH_1}") + endif() + + unset(_BAZEL_VERSION_REGEX_MATCH_OUTPUT_VARIABLE) +endif() + +unset(_BAZEL_VERSION_EXECUTE_PROCESS_OUTPUT_VARIABLE) +unset(_BAZEL_VERSION_EXECUTE_PROCESS_RESULT_VARIABLE) + +include(FindPackageHandleStandardArgs) + +find_package_handle_standard_args(Bazel + FOUND_VAR Bazel_FOUND + REQUIRED_VARS Bazel_EXECUTABLE + VERSION_VAR Bazel_VERSION +) diff --git a/build_tools/cmake/FindClog.cmake b/build_tools/cmake/FindClog.cmake new file mode 100644 index 000000000..43c1ed177 --- /dev/null +++ b/build_tools/cmake/FindClog.cmake @@ -0,0 +1,42 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +#[=======================================================================[.rst: +Find Clog headers and libraries. + +Imported Targets +^^^^^^^^^^^^^^^^ + +``Clog::libclog`` + +Result Variables +^^^^^^^^^^^^^^^^ + +This will define the following variables in your project: + +``Clog_FOUND`` + true if Clog is available. + + +#]=======================================================================] + +include(FindPackageHandleStandardArgs) + +find_path(Clog_INCLUDE_DIRS clog.h + PATH_SUFFIXES include) + +find_library(Clog_LIBRARIES clog PATH_SUFFIXES lib) +if(Clog_INCLUDE_DIRS AND Clog_LIBRARIES) + add_library(Clog::libclog UNKNOWN IMPORTED) + set_target_properties(Clog::libclog PROPERTIES + INTERFACE_INCLUDE_DIRECTORIES "${Clog_INCLUDE_DIRS}" + IMPORTED_LINK_INTERFACE_LANGUAGES "C" + IMPORTED_LOCATION "${Clog_LIBRARIES}") +endif() +find_package_handle_standard_args( + Clog + REQUIRED_VARS + Clog_INCLUDE_DIRS + Clog_LIBRARIES) diff --git a/build_tools/cmake/FindCsmith.cmake b/build_tools/cmake/FindCsmith.cmake new file mode 100644 index 000000000..114f74f89 --- /dev/null +++ b/build_tools/cmake/FindCsmith.cmake @@ -0,0 +1,75 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +#[=======================================================================[.rst: +Find Csmith headers and library. + +Imported Targets +^^^^^^^^^^^^^^^^ + +``Csmith::libcsmith`` + The Csmith library, if found. +``Csmith::csmith`` + The Csmith executable. + +Result Variables +^^^^^^^^^^^^^^^^ + +This will define the following variables in your project: + +``Csmith_FOUND`` + true if Csmith is available. +``Csmith_VERSION`` + the version of Csmith. +``Csmith_ROOT_DIR`` +``Csmith_EXECUTABLE`` +``Csmith_LIBRARIES`` + the libraries to link against to use Csmith. +``Csmith_LIBRARY_DIRS`` + the directories of the Csmith libraries. +``Csmith_INCLUDE_DIRS`` + where to find the libinput headers. + + +#]=======================================================================] + +include(FindPackageHandleStandardArgs) + +find_program(Csmith_EXECUTABLE csmith) +if (Csmith_EXECUTABLE) + execute_process( + COMMAND "${Csmith_EXECUTABLE}" --version + OUTPUT_VARIABLE Csmith_VERSION) + string(REGEX MATCH "[0-9]+\\.[0-9]+\\.[0-9]+" Csmith_VERSION "${Csmith_VERSION}") + + add_executable(Csmith::csmith IMPORTED GLOBAL) + set_target_properties(Csmith::csmith PROPERTIES IMPORTED_LOCATION "${Csmith_EXECUTABLE}") + + get_filename_component(Csmith_ROOT_DIR "${Csmith_EXECUTABLE}" DIRECTORY) + get_filename_component(Csmith_ROOT_DIR "${Csmith_ROOT_DIR}/.." ABSOLUTE) + set(Csmith_ROOT_DIR "${Csmith_ROOT_DIR}" CACHE string "Path to the root installation directory of Csmith.") +endif() +find_path(Csmith_INCLUDE_DIRS csmith.h PATH_SUFFIXES csmith csmith-2.3.0) +find_library(Csmith_LIBRARIES csmith) +if (Csmith_LIBRARIES) + get_filename_component(Csmith_LIBRARY_DIRS "${Csmith_LIBRARIES}" DIRECTORY) +endif() +if (Csmith_LIBRARIES AND Csmith_INCLUDE_DIRS) + add_library(Csmith::libcsmith UNKNOWN IMPORTED) + set_target_properties(Csmith::libcsmith PROPERTIES + INTERFACE_INCLUDE_DIRECTORIES "${Csmith_INCLUDE_DIRS}" + IMPORTED_LINK_INTERFACE_LANGUAGES "C" + IMPORTED_LOCATION "${Csmith_LIBRARIES}") +endif() + +find_package_handle_standard_args(Csmith + REQUIRED_VARS + Csmith_ROOT_DIR + Csmith_EXECUTABLE + Csmith_INCLUDE_DIRS + Csmith_LIBRARIES + Csmith_LIBRARY_DIRS + VERSION_VAR Csmith_VERSION + HANDLE_VERSION_RANGE) diff --git a/build_tools/cmake/FindLabm8.cmake b/build_tools/cmake/FindLabm8.cmake new file mode 100644 index 000000000..811028c2e --- /dev/null +++ b/build_tools/cmake/FindLabm8.cmake @@ -0,0 +1,146 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +#[=======================================================================[.rst: +Find Labm8 headers and libraries. + +Imported Targets +^^^^^^^^^^^^^^^^ + +``Labm8::cpp::status`` +``Labm8::cpp::statusor`` +``Labm8::cpp::logging`` +``Labm8::cpp::string`` +``Labm8::cpp::stringpiece`` + +Result Variables +^^^^^^^^^^^^^^^^ + +This will define the following variables in your project: + +``Labm8_FOUND`` + true if Labm8 is available. + + +#]=======================================================================] + +include(FindPackageHandleStandardArgs) + +function(has_absl _RES_VAR) + if(TARGET absl::strings AND + TARGET absl::time) + set(${_RES_VAR} True PARENT_SCOPE) + else() + set(${_RES_VAR} False PARENT_SCOPE) + endif() +endfunction() + +function(has_fmt _RES_VAR) + if(TARGET fmt) + set(${_RES_VAR} True PARENT_SCOPE) + else() + set(${_RES_VAR} False PARENT_SCOPE) + endif() +endfunction() + +if(Labm8_FIND_REQUIRED) + set(_REQUIRED REQUIRED) +endif() + +has_absl(Labm8_HAS_absl) +if(NOT Labm8_HAS_absl) + find_package(absl ${_REQUIRED}) + has_absl(Labm8_HAS_absl) +endif() + +has_fmt(Labm8_HAS_fmt) +if(NOT Labm8_HAS_fmt) + find_package(fmt ${_REQUIRED}) + has_fmt(Labm8_HAS_fmt) +endif() + +find_path(Labm8_INCLUDE_DIRS + labm8/cpp/status.h + ) + +find_library(Labm8_cpp_string_LIBRARIES + ${CMAKE_STATIC_LIBRARY_PREFIX}string${CMAKE_STATIC_LIBRARY_SUFFIX} + PATH_SUFFIXES labm8/cpp) +if(Labm8_INCLUDE_DIRS AND Labm8_cpp_string_LIBRARIES) + add_library(Labm8::cpp::string UNKNOWN IMPORTED) + set_target_properties(Labm8::cpp::string PROPERTIES + INTERFACE_INCLUDE_DIRECTORIES "${Labm8_INCLUDE_DIRS}" + IMPORTED_LINK_INTERFACE_LANGUAGES "CXX" + IMPORTED_LOCATION "${Labm8_cpp_string_LIBRARIES}" + IMPORTED_LINK_INTERFACE_LIBRARIES absl::strings) +endif() + +find_library(Labm8_cpp_stringpiece_LIBRARIES + ${CMAKE_STATIC_LIBRARY_PREFIX}stringpiece${CMAKE_STATIC_LIBRARY_SUFFIX} + PATH_SUFFIXES labm8/cpp) +if(Labm8_INCLUDE_DIRS AND Labm8_cpp_stringpiece_LIBRARIES) + add_library(Labm8::cpp::stringpiece UNKNOWN IMPORTED) + set_target_properties(Labm8::cpp::stringpiece PROPERTIES + INTERFACE_INCLUDE_DIRECTORIES "${Labm8_INCLUDE_DIRS}" + IMPORTED_LINK_INTERFACE_LANGUAGES "CXX" + IMPORTED_LOCATION "${Labm8_cpp_stringpiece_LIBRARIES}" + IMPORTED_LINK_INTERFACE_LIBRARIES Labm8::cpp::string) +endif() + +find_library(Labm8_cpp_status_LIBRARIES + ${CMAKE_STATIC_LIBRARY_PREFIX}status${CMAKE_STATIC_LIBRARY_SUFFIX} + PATH_SUFFIXES labm8/cpp) +if(Labm8_INCLUDE_DIRS AND Labm8_cpp_status_LIBRARIES) + add_library(Labm8::cpp::status UNKNOWN IMPORTED) + set(_LINK_LIBS + Labm8::cpp::string + Labm8::cpp::stringpiece + fmt) + set_target_properties(Labm8::cpp::status PROPERTIES + INTERFACE_INCLUDE_DIRECTORIES "${Labm8_INCLUDE_DIRS}" + IMPORTED_LINK_INTERFACE_LANGUAGES "CXX" + IMPORTED_LOCATION "${Labm8_cpp_status_LIBRARIES}" + IMPORTED_LINK_INTERFACE_LIBRARIES "${_LINK_LIBS}") +endif() + +find_library(Labm8_cpp_statusor_LIBRARIES + ${CMAKE_STATIC_LIBRARY_PREFIX}statusor${CMAKE_STATIC_LIBRARY_SUFFIX} + PATH_SUFFIXES labm8/cpp) +if(Labm8_INCLUDE_DIRS AND Labm8_cpp_statusor_LIBRARIES) + add_library(Labm8::cpp::statusor UNKNOWN IMPORTED) + set_target_properties(Labm8::cpp::statusor PROPERTIES + INTERFACE_INCLUDE_DIRECTORIES "${Labm8_INCLUDE_DIRS}" + IMPORTED_LINK_INTERFACE_LANGUAGES "CXX" + IMPORTED_LOCATION "${Labm8_cpp_statusor_LIBRARIES}") +endif() + +find_library(Labm8_cpp_logging_LIBRARIES + ${CMAKE_STATIC_LIBRARY_PREFIX}logging${CMAKE_STATIC_LIBRARY_SUFFIX} + PATH_SUFFIXES labm8/cpp) +if(Labm8_INCLUDE_DIRS AND Labm8_cpp_logging_LIBRARIES) + add_library(Labm8::cpp::logging UNKNOWN IMPORTED) + set(_LINK_LIBS + Labm8::cpp::string + Labm8::cpp::stringpiece + absl::strings + absl::time) + set_target_properties(Labm8::cpp::logging PROPERTIES + INTERFACE_INCLUDE_DIRECTORIES "${Labm8_INCLUDE_DIRS}" + IMPORTED_LINK_INTERFACE_LANGUAGES "CXX" + IMPORTED_LOCATION "${Labm8_cpp_logging_LIBRARIES}" + IMPORTED_LINK_INTERFACE_LIBRARIES "${_LINK_LIBS}") +endif() + +find_package_handle_standard_args( + Labm8 + REQUIRED_VARS + Labm8_HAS_absl + Labm8_HAS_fmt + Labm8_INCLUDE_DIRS + Labm8_cpp_string_LIBRARIES + Labm8_cpp_stringpiece_LIBRARIES + Labm8_cpp_status_LIBRARIES + Labm8_cpp_statusor_LIBRARIES + Labm8_cpp_logging_LIBRARIES) diff --git a/build_tools/cmake/FindProGraML.cmake b/build_tools/cmake/FindProGraML.cmake new file mode 100644 index 000000000..a459f25e2 --- /dev/null +++ b/build_tools/cmake/FindProGraML.cmake @@ -0,0 +1,179 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +#[=======================================================================[.rst: +Find ProGraML headers and libraries. + +Imported Targets +^^^^^^^^^^^^^^^^ + +``ProGraML::graph::format::node_link_graph`` +``ProGraML::ir::llvm::llvm-10`` +``ProGraML::proto::programl_cc`` +``ProGraML::graph::program_graph_builder`` + +Result Variables +^^^^^^^^^^^^^^^^ + +This will define the following variables in your project: + +``ProGraML_FOUND`` + true if ProGraML is available. + + +#]=======================================================================] + +include(FindPackageHandleStandardArgs) + +function(has_Labm8 _RES_VAR) + if(TARGET Labm8::cpp::status AND + TARGET Labm8::cpp::statusor AND + TARGET Labm8::cpp::logging AND + TARGET Labm8::cpp::string AND + TARGET Labm8::cpp::stringpiece) + set(${_RES_VAR} True PARENT_SCOPE) + else() + set(${_RES_VAR} False PARENT_SCOPE) + endif() +endfunction() + +function(has_absl _RES_VAR) + if(TARGET absl::flat_hash_map AND + TARGET absl::flat_hash_set) + set(${_RES_VAR} True PARENT_SCOPE) + else() + set(${_RES_VAR} False PARENT_SCOPE) + endif() +endfunction() + +if(ProGraML_FIND_REQUIRED) + set(_REQUIRED REQUIRED) +endif() + +has_Labm8(ProGraML_HAS_Labm8) +if(NOT ProGraML_HAS_Labm8) + find_package(Labm8 ${_REQUIRED}) + has_Labm8(ProGraML_HAS_Labm8) +endif() + +has_absl(ProGraML_HAS_absl) +if(NOT ProGraML_HAS_absl) + find_package(absl ${_REQUIRED}) + has_absl(ProGraML_HAS_absl) +endif() + +# Deliberately find static libs. +# For some reason the linker takes the path to the library +# instead of just the name for the dynamic section when linking to these libs. +# See https://stackoverflow.com/questions/70088552/linker-adds-the-path-to-library-in-the-dynamic-section-instead-of-its-name +find_library(ProGraML_proto_programl_cc_LIBRARIES + ${CMAKE_STATIC_LIBRARY_PREFIX}programl${CMAKE_STATIC_LIBRARY_SUFFIX} + PATH_SUFFIXES programl/proto) +find_path(ProGraML_proto_programl_cc_INCLUDE_DIRS programl/proto/program_graph_options.pb.h) +if (ProGraML_proto_programl_cc_LIBRARIES AND ProGraML_proto_programl_cc_INCLUDE_DIRS) + add_library(ProGraML::proto::programl_cc UNKNOWN IMPORTED) + set_target_properties(ProGraML::proto::programl_cc PROPERTIES + INTERFACE_INCLUDE_DIRECTORIES "${ProGraML_proto_programl_cc_INCLUDE_DIRS}" + IMPORTED_LINK_INTERFACE_LANGUAGES "CXX" + IMPORTED_LOCATION "${ProGraML_proto_programl_cc_LIBRARIES}") +endif() + +find_library(ProGraML_graph_program_graph_builder_LIBRARIES + ${CMAKE_STATIC_LIBRARY_PREFIX}program_graph_builder${CMAKE_STATIC_LIBRARY_SUFFIX} + PATH_SUFFIXES programl/graph/) +find_path(ProGraML_graph_program_graph_builder_INCLUDE_DIRS programl/graph/program_graph_builder.h) +if (ProGraML_graph_program_graph_builder_LIBRARIES AND + ProGraML_graph_program_graph_builder_INCLUDE_DIRS) + set(_INCLUDE_DIRS ${ProGraML_graph_program_graph_builder_INCLUDE_DIRS}) + add_library(ProGraML::graph::program_graph_builder UNKNOWN IMPORTED) + set_target_properties(ProGraML::graph::program_graph_builder PROPERTIES + INTERFACE_INCLUDE_DIRECTORIES + "${ProGraML_graph_program_graph_builder_INCLUDE_DIRS}") + set_target_properties(ProGraML::graph::program_graph_builder PROPERTIES + IMPORTED_LINK_INTERFACE_LANGUAGES "CXX" + IMPORTED_LOCATION "${ProGraML_graph_program_graph_builder_LIBRARIES}") + set(_LINK_LIBS + ProGraML::proto::programl_cc + absl::flat_hash_map + absl::flat_hash_set + Labm8::cpp::logging + Labm8::cpp::status + Labm8::cpp::statusor + Labm8::cpp::string) + set_target_properties(ProGraML::graph::program_graph_builder + PROPERTIES IMPORTED_LINK_INTERFACE_LIBRARIES "${_LINK_LIBS}") +endif() + +find_library(ProGraML_graph_features_LIBRARIES + ${CMAKE_STATIC_LIBRARY_PREFIX}features${CMAKE_STATIC_LIBRARY_SUFFIX} + PATH_SUFFIXES programl/graph/) +find_path(ProGraML_graph_features_INCLUDE_DIRS programl/graph/features.h) +if (ProGraML_graph_features_LIBRARIES AND + ProGraML_graph_features_INCLUDE_DIRS) + set(_INCLUDE_DIRS ${ProGraML_graph_features_INCLUDE_DIRS}) + add_library(ProGraML::graph::features UNKNOWN IMPORTED) + set_target_properties(ProGraML::graph::features PROPERTIES + INTERFACE_INCLUDE_DIRECTORIES "${ProGraML_graph_features_INCLUDE_DIRS}" + IMPORTED_LINK_INTERFACE_LANGUAGES "CXX" + IMPORTED_LOCATION "${ProGraML_graph_features_LIBRARIES}" + IMPORTED_LINK_INTERFACE_LIBRARIES ProGraML::proto::programl_cc) +endif() + +find_library(ProGraML_graph_format_node_link_graph_LIBRARIES + ${CMAKE_STATIC_LIBRARY_PREFIX}node_link_graph${CMAKE_STATIC_LIBRARY_SUFFIX} + PATH_SUFFIXES programl/graph/format) +find_path(ProGraML_graph_format_node_link_graph_INCLUDE_DIRS programl/graph/format/node_link_graph.h) +if (ProGraML_graph_format_node_link_graph_LIBRARIES AND + ProGraML_graph_format_node_link_graph_INCLUDE_DIRS) + add_library(ProGraML::graph::format::node_link_graph UNKNOWN IMPORTED) + set(_INCLUDE_DIRS ${ProGraML_graph_format_node_link_graph_INCLUDE_DIRS}) + set(_LINK_LIBS + ProGraML::proto::programl_cc + Labm8::cpp::status + Labm8::cpp::logging) + set_target_properties(ProGraML::graph::format::node_link_graph PROPERTIES + INTERFACE_INCLUDE_DIRECTORIES "${_INCLUDE_DIRS}" + IMPORTED_LINK_INTERFACE_LANGUAGES "CXX" + IMPORTED_LOCATION "${ProGraML_graph_format_node_link_graph_LIBRARIES}" + IMPORTED_LINK_INTERFACE_LIBRARIES "${_LINK_LIBS}" + ) +endif() + +find_library(ProGraML_ir_llvm_llvm_10_LIBRARIES + ${CMAKE_STATIC_LIBRARY_PREFIX}llvm-10${CMAKE_STATIC_LIBRARY_SUFFIX} + PATH_SUFFIXES programl/ir/llvm) +find_path(ProGraML_ir_llvm_llvm_10_INCLUDE_DIRS programl/ir/llvm/llvm.h) +if (ProGraML_ir_llvm_llvm_10_LIBRARIES AND ProGraML_ir_llvm_llvm_10_INCLUDE_DIRS) + add_library(ProGraML::ir::llvm::llvm-10 UNKNOWN IMPORTED) + set(_LINK_LIBS + ProGraML::graph::features + ProGraML::graph::program_graph_builder + ProGraML::proto::programl_cc + absl::flat_hash_map + absl::flat_hash_set + Labm8::cpp::status + Labm8::cpp::statusor + Labm8::cpp::string) + set_target_properties(ProGraML::ir::llvm::llvm-10 PROPERTIES + INTERFACE_INCLUDE_DIRECTORIES "${ProGraML_ir_llvm_llvm_10_INCLUDE_DIRS}" + IMPORTED_LINK_INTERFACE_LANGUAGES "CXX" + IMPORTED_LOCATION "${ProGraML_ir_llvm_llvm_10_LIBRARIES}" + IMPORTED_LINK_INTERFACE_LIBRARIES "${_LINK_LIBS}") +endif() + +find_package_handle_standard_args(ProGraML + REQUIRED_VARS + ProGraML_HAS_Labm8 + ProGraML_HAS_absl + ProGraML_graph_format_node_link_graph_LIBRARIES + ProGraML_graph_format_node_link_graph_INCLUDE_DIRS + ProGraML_graph_features_LIBRARIES + ProGraML_graph_features_INCLUDE_DIRS + ProGraML_graph_program_graph_builder_INCLUDE_DIRS + ProGraML_graph_program_graph_builder_LIBRARIES + ProGraML_ir_llvm_llvm_10_LIBRARIES + ProGraML_ir_llvm_llvm_10_INCLUDE_DIRS + ProGraML_proto_programl_cc_LIBRARIES + ProGraML_proto_programl_cc_INCLUDE_DIRS) diff --git a/build_tools/cmake/FindSubprocess.cmake b/build_tools/cmake/FindSubprocess.cmake new file mode 100644 index 000000000..f10729f1e --- /dev/null +++ b/build_tools/cmake/FindSubprocess.cmake @@ -0,0 +1,37 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +#[=======================================================================[.rst: +Find Subprocess headers and library. + +Imported Targets +^^^^^^^^^^^^^^^^ + +``Subprocess::libsubprocess`` + +Result Variables +^^^^^^^^^^^^^^^^ + +This will define the following variables in your project: + +``Subprocess_FOUND`` + true if Subprocess is available. + + +#]=======================================================================] + +include(FindPackageHandleStandardArgs) + +find_path(Subprocess_INCLUDE_DIRS subprocess/subprocess.hpp) +if (Subprocess_INCLUDE_DIRS) + add_library(Subprocess::libsubprocess INTERFACE IMPORTED) + set_target_properties(Subprocess::libsubprocess PROPERTIES + INTERFACE_INCLUDE_DIRECTORIES "${Subprocess_INCLUDE_DIRS}") +endif() + + +find_package_handle_standard_args(Subprocess + REQUIRED_VARS + Subprocess_INCLUDE_DIRS) diff --git a/build_tools/cmake/build_external_cmake_project.cmake b/build_tools/cmake/build_external_cmake_project.cmake new file mode 100644 index 000000000..a3ebb9437 --- /dev/null +++ b/build_tools/cmake/build_external_cmake_project.cmake @@ -0,0 +1,53 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +include_guard(GLOBAL) +include(CMakeParseArguments) +include(write_cache_script) + +function(build_external_cmake_project) + cmake_parse_arguments( + _RULE + "" + "NAME;SRC_DIR;INSTALL_PREFIX" + "CONFIG_ARGS" + ${ARGN} + ) + + set(_BIN_DIR "${CMAKE_CURRENT_BINARY_DIR}/external/${_RULE_NAME}") + if(_RULE_INSTALL_PREFIX) + set(_INSTALL_PREFIX "${_RULE_INSTALL_PREFIX}") + else() + set(_INSTALL_PREFIX "${_BIN_DIR}/install") + endif() + + set(_INTIAL_CACHE_PATH "${_BIN_DIR}/${_RULE_NAME}_initial_cache.cmake") + write_cache_script("${_INTIAL_CACHE_PATH}") + + execute_process( + COMMAND "${CMAKE_COMMAND}" + -G "${CMAKE_GENERATOR}" # For some reason the generator is not taken from the initial cache. + -C "${_INTIAL_CACHE_PATH}" + -S "${_RULE_SRC_DIR}" + -B "${_BIN_DIR}" + -D "CMAKE_INSTALL_PREFIX=${_INSTALL_PREFIX}" + ${_RULE_CONFIG_ARGS} + COMMAND_ERROR_IS_FATAL ANY + ) + execute_process( + COMMAND + "${CMAKE_COMMAND}" + --build "${_BIN_DIR}" + COMMAND_ERROR_IS_FATAL ANY + ) + execute_process( + COMMAND + "${CMAKE_COMMAND}" + --install "${_BIN_DIR}" + COMMAND_ERROR_IS_FATAL ANY + ) + list(PREPEND CMAKE_PREFIX_PATH "${_INSTALL_PREFIX}") + set(CMAKE_PREFIX_PATH ${CMAKE_PREFIX_PATH} PARENT_SCOPE) +endfunction() diff --git a/build_tools/cmake/cg_add_all_subdirs.cmake b/build_tools/cmake/cg_add_all_subdirs.cmake new file mode 100644 index 000000000..5a7cd2f02 --- /dev/null +++ b/build_tools/cmake/cg_add_all_subdirs.cmake @@ -0,0 +1,32 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +# === +# Copied from https://github.com/google/iree/blob/main/build_tools/cmake/iree_add_all_subdirs.cmake +# Copyright 2020 The IREE Authors +# +# Licensed under the Apache License v2.0 with LLVM Exceptions. +# See https://llvm.org/LICENSE.txt for license information. +# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception + +# cg_add_all_subidrs +# +# CMake function to add all subdirectories of the current directory that contain +# a CMakeLists.txt file +# +# Takes no arguments. +function(cg_add_all_subdirs) + FILE(GLOB _CHILDREN RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_SOURCE_DIR}/*) + SET(_DIRLIST "") + foreach(_CHILD ${_CHILDREN}) + if(IS_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/${_CHILD} AND EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/${_CHILD}/CMakeLists.txt) + LIST(APPEND _DIRLIST ${_CHILD}) + endif() + endforeach() + + foreach(subdir ${_DIRLIST}) + add_subdirectory(${subdir}) + endforeach() +endfunction() diff --git a/build_tools/cmake/cg_cc_binary.cmake b/build_tools/cmake/cg_cc_binary.cmake new file mode 100644 index 000000000..6eb377195 --- /dev/null +++ b/build_tools/cmake/cg_cc_binary.cmake @@ -0,0 +1,148 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +# Copied from https://github.com/google/iree/blob/main/build_tools/cmake/iree_cc_binary.cmake[ +# Copyright 2019 The IREE Authors +# +# Licensed under the Apache License v2.0 with LLVM Exceptions. +# See https://llvm.org/LICENSE.txt for license information. +# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception + +include(CMakeParseArguments) + +# cg_cc_binary() +# +# CMake function to imitate Bazel's cc_binary rule. +# +# Parameters: +# NAME: name of target (see Usage below) +# SRCS: List of source files for the binary +# DATA: List of other targets and files required for this binary +# DEPS: List of other libraries to be linked in to the binary targets +# COPTS: List of private compile options +# DEFINES: List of public defines +# LINKOPTS: List of link options +# TESTONLY: for testing; won't compile when tests are disabled +# HOSTONLY: host only; compile using host toolchain when cross-compiling +# +# Note: +# cg_cc_binary will create a binary called ${PACKAGE_NAME}_${NAME}, e.g. +# cmake_base_foo with two alias (readonly) targets, a qualified +# ${PACKAGE_NS}::${NAME} and an unqualified ${NAME}. Thus NAME must be globally +# unique in the project. +# +# Usage: +# cg_cc_library( +# NAME +# awesome +# HDRS +# "a.h" +# SRCS +# "a.cc" +# PUBLIC +# ) +# +# cg_cc_binary( +# NAME +# awesome_tool +# SRCS +# "awesome-tool-main.cc" +# DEPS +# compiler_gym::awesome +# ) +function(cg_cc_binary) + cmake_parse_arguments( + _RULE + "HOSTONLY;TESTONLY" + "NAME" + "SRCS;COPTS;DEFINES;LINKOPTS;DATA;DEPS;ABS_DEPS;INCLUDES" + ${ARGN} + ) + + if(_RULE_TESTONLY AND NOT COMPILER_GYM_BUILD_TESTS) + return() + endif() + + cg_package_ns(_PACKAGE_NS) + # Prefix the library with the package name, so we get: cg_package_name + rename_bazel_targets(_NAME "${_RULE_NAME}") + + add_executable(${_NAME} "") + add_executable(${_PACKAGE_NS}::${_RULE_NAME} ALIAS ${_NAME}) + + # If the binary name matches the package then treat it as a default. For + # example, foo/bar/ library 'bar' would end up as 'foo::bar'. This isn't + # likely to be common for binaries, but is consistent with the behavior for + # libraries and in Bazel. + cg_package_dir(_PACKAGE_DIR) + if(${_RULE_NAME} STREQUAL ${_PACKAGE_DIR}) + add_executable(${_PACKAGE_NS} ALIAS ${_NAME}) + endif() + + # Finally, since we have so few binaries and we also want to support + # installing from a separate host build, binaries get an unqualified global + # alias. This means binary names must be unique across the whole project. + # (We could consider making this configurable). + add_executable(${_RULE_NAME} ALIAS ${_NAME}) + + set_target_properties(${_NAME} PROPERTIES OUTPUT_NAME "${_RULE_NAME}") + if(_RULE_SRCS) + target_sources(${_NAME} + PRIVATE + ${_RULE_SRCS} + ) + else() + set(_DUMMY_SRC "${CMAKE_CURRENT_BINARY_DIR}/${_NAME}_dummy.cc") + file(WRITE ${_DUMMY_SRC} "") + target_sources(${_NAME} + PRIVATE + ${_DUMMY_SRC} + ) + endif() + target_include_directories(${_NAME} SYSTEM + PUBLIC + "$" + "$" + ) + target_include_directories(${_NAME} + PUBLIC + "$" + ) + target_compile_definitions(${_NAME} + PUBLIC + ${_RULE_DEFINES} + ) + target_compile_options(${_NAME} + PRIVATE + ${COMPILER_GYM_DEFAULT_COPTS} + ${_RULE_COPTS} + ) + target_link_options(${_NAME} + PRIVATE + ${COMPILER_GYM_DEFAULT_LINKOPTS} + ${_RULE_LINKOPTS} + ) + + rename_bazel_targets(_RULE_DEPS "${_RULE_DEPS}") + + target_link_libraries(${_NAME} + PUBLIC + ${_RULE_DEPS} + ${_RULE_ABS_DEPS} + ) + + cg_add_data_dependencies(NAME ${_RULE_NAME} DATA ${_RULE_DATA}) + + # Add all targets to a folder in the IDE for organization. + set_target_properties(${_NAME} PROPERTIES + FOLDER ${COMPILER_GYM_IDE_FOLDER}/binaries + CXX_STANDARD ${COMPILER_GYM_CXX_STANDARD} + CXX_STANDARD_REQUIRED ON) + + install(TARGETS ${_NAME} + RENAME ${_RULE_NAME} + COMPONENT ${_RULE_NAME} + RUNTIME DESTINATION bin) +endfunction() diff --git a/build_tools/cmake/cg_cc_library.cmake b/build_tools/cmake/cg_cc_library.cmake new file mode 100644 index 000000000..dfcd066d6 --- /dev/null +++ b/build_tools/cmake/cg_cc_library.cmake @@ -0,0 +1,192 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +# Copied from https://github.com/google/iree/blob/main/build_tools/cmake/iree_cc_library.cmake +# Copyright 2019 The IREE Authors +# +# Licensed under the Apache License v2.0 with LLVM Exceptions. +# See https://llvm.org/LICENSE.txt for license information. +# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception + +include(CMakeParseArguments) + +# cg_cc_library() +# +# CMake function to imitate Bazel's cc_library rule. +# +# Parameters: +# NAME: name of target (see Note) +# HDRS: List of public header files for the library +# TEXTUAL_HDRS: List of public header files that cannot be compiled on their own +# SRCS: List of source files for the library +# DATA: List of other targets and files required for this binary +# DEPS: List of other libraries to be linked in to the binary targets +# COPTS: List of private compile options +# DEFINES: List of public defines +# INCLUDES: Include directories to add to dependencies +# LINKOPTS: List of link options +# Also in IDE, target will appear in IREE folder while non PUBLIC will be in IREE/internal. +# TESTONLY: When added, this target will only be built if user passes -DCOMPILER_GYM_BUILD_TESTS=ON to CMake. +# SHARED: If set, will compile to a shared object. +# +# cg_cc_library( +# NAME +# awesome +# HDRS +# "a.h" +# SRCS +# "a.cc" +# ) +# cg_cc_library( +# NAME +# fantastic_lib +# SRCS +# "b.cc" +# DEPS +# package::awesome # not "awesome" ! +# PUBLIC +# ) +# +# cg_cc_library( +# NAME +# main_lib +# ... +# DEPS +# package::fantastic_lib +# ) +function(cg_cc_library) + cmake_parse_arguments( + _RULE + "PUBLIC;TESTONLY;SHARED" + "NAME" + "HDRS;TEXTUAL_HDRS;SRCS;COPTS;DEFINES;LINKOPTS;DATA;DEPS;ABS_DEPS;NON_LIB_DEPS;INCLUDES" + ${ARGN} + ) + + if(_RULE_TESTONLY AND NOT COMPILER_GYM_BUILD_TESTS) + return() + endif() + + cg_package_ns(_PACKAGE_NS) + rename_bazel_targets(_DEPS "${_RULE_DEPS}") + list(APPEND _DEPS ${_RULE_ABS_DEPS}) + + # Prefix the library with the package name, so we get: cg_package_name. + rename_bazel_targets(_NAME "${_RULE_NAME}") + + # Check if this is a header-only library. + # Note that as of February 2019, many popular OS's (for example, Ubuntu + # 16.04 LTS) only come with cmake 3.5 by default. For this reason, we can't + # use list(FILTER...) + set(_CC_SRCS "${_RULE_SRCS}") + foreach(src_file IN LISTS _CC_SRCS) + if(${src_file} MATCHES ".*\\.(h|inc)") + list(REMOVE_ITEM _CC_SRCS "${src_file}") + endif() + endforeach() + if("${_CC_SRCS}" STREQUAL "") + set(_RULE_IS_INTERFACE 1) + else() + set(_RULE_IS_INTERFACE 0) + endif() + + if(NOT _RULE_IS_INTERFACE) + if(_RULE_SHARED) + add_library(${_NAME} SHARED "") + else() + add_library(${_NAME} STATIC "") + endif() + if(_RULE_SRCS) + list(JOIN _RULE_SRCS ";\n" SRCSTR) + message("${SRCSTR}") + endif() + target_sources(${_NAME} + PRIVATE + ${_RULE_SRCS} + ${_RULE_TEXTUAL_HDRS} + ${_RULE_HDRS} + ) + target_include_directories(${_NAME} SYSTEM + PUBLIC + "$" + "$" + ) + target_include_directories(${_NAME} + PUBLIC + "$" + ${_RULE_INCLUDES} + ) + target_compile_options(${_NAME} + PRIVATE + ${COMPILER_GYM_DEFAULT_COPTS} + ${_RULE_COPTS} + ) + target_link_options(${_NAME} + PRIVATE + ${COMPILER_GYM_DEFAULT_LINKOPTS} + ${_RULE_LINKOPTS} + ) + target_link_libraries(${_NAME} + PUBLIC + ${_DEPS} + ) + + target_compile_definitions(${_NAME} + PUBLIC + ${_RULE_DEFINES} + ) + + # Add all targets to a folder in the IDE for organization. + if(_RULE_PUBLIC) + set_property(TARGET ${_NAME} PROPERTY FOLDER ${COMPILER_GYM_IDE_FOLDER}) + elseif(_RULE_TESTONLY) + set_property(TARGET ${_NAME} PROPERTY FOLDER ${COMPILER_GYM_IDE_FOLDER}/test) + else() + set_property(TARGET ${_NAME} PROPERTY FOLDER ${COMPILER_GYM_IDE_FOLDER}/internal) + endif() + + # INTERFACE libraries can't have the CXX_STANDARD property set. + set_property(TARGET ${_NAME} PROPERTY CXX_STANDARD ${COMPILER_GYM_CXX_STANDARD}) + set_property(TARGET ${_NAME} PROPERTY CXX_STANDARD_REQUIRED ON) + else() + # Generating header-only library. + add_library(${_NAME} INTERFACE ${_RULE_SRCS} ${_RULE_TEXTUAL_HDRS} ${_RULE_HDRS}) + target_include_directories(${_NAME} SYSTEM + INTERFACE + "$" + "$" + ) + target_link_options(${_NAME} + INTERFACE + ${COMPILER_GYM_DEFAULT_LINKOPTS} + ${_RULE_LINKOPTS} + ) + target_link_libraries(${_NAME} + INTERFACE + ${_DEPS} + ) + target_compile_definitions(${_NAME} + INTERFACE + ${_RULE_DEFINES} + ) + endif() + + cg_add_data_dependencies(NAME ${_RULE_NAME} DATA ${_RULE_DATA}) + + if (_RULE_NON_LIB_DEPS) + rename_bazel_targets(_NON_LIB_DEPS "${_RULE_NON_LIB_DEPS}") + add_dependencies(${_NAME} ${_NON_LIB_DEPS}) + endif() + + add_library(${_PACKAGE_NS}::${_RULE_NAME} ALIAS ${_NAME}) + + # If the library name matches the final component of the package then treat + # it as a default. For example, foo/bar/ library 'bar' would end up as + # 'foo::bar'. + cg_package_dir(_PACKAGE_DIR) + if(${_RULE_NAME} STREQUAL ${_PACKAGE_DIR}) + add_library(${_PACKAGE_NS} ALIAS ${_NAME}) + endif() +endfunction() diff --git a/build_tools/cmake/cg_cc_test.cmake b/build_tools/cmake/cg_cc_test.cmake new file mode 100644 index 000000000..d0df3b48c --- /dev/null +++ b/build_tools/cmake/cg_cc_test.cmake @@ -0,0 +1,96 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +# Copied from https://github.com/google/iree/blob/main/build_tools/cmake/iree_cc_test.cmake +# Copyright 2019 The IREE Authors +# +# Licensed under the Apache License v2.0 with LLVM Exceptions. +# See https://llvm.org/LICENSE.txt for license information. +# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception + +include(CMakeParseArguments) +include(cg_installed_test) + +# cg_cc_test() +# +# CMake function to imitate Bazel's cc_test rule. +# +# Parameters: +# NAME: name of target. This name is used for the generated executable and +# SRCS: List of source files for the binary +# DATA: List of other targets and files required for this binary +# DEPS: List of other libraries to be linked in to the binary targets +# COPTS: List of private compile options +# DEFINES: List of public defines +# LINKOPTS: List of link options +# LABELS: Additional labels to apply to the test. The package path is added +# automatically. +# +# Note: +# cg_cc_test will create a binary called ${PACKAGE_NAME}_${NAME}, e.g. +# cg_base_foo_test. +# +# +# Usage: +# cg_cc_library( +# NAME +# awesome +# HDRS +# "a.h" +# SRCS +# "a.cc" +# PUBLIC +# ) +# +# cg_cc_test( +# NAME +# awesome_test +# SRCS +# "awesome_test.cc" +# DEPS +# gtest_main +# compiler_gym::awesome +# ) +function(cg_cc_test) + if(NOT COMPILER_GYM_BUILD_TESTS) + return() + endif() + + cmake_parse_arguments( + _RULE + "" + "NAME" + "SRCS;COPTS;DEFINES;LINKOPTS;DATA;DEPS;LABELS" + ${ARGN} + ) + + cg_cc_binary(${ARGV}) + + rename_bazel_targets(_NAME "${_RULE_NAME}") + cg_package_ns(_PACKAGE_NS) + string(REPLACE "::" "/" _PACKAGE_PATH ${_PACKAGE_NS}) + set(_TEST_NAME "${_PACKAGE_PATH}/${_RULE_NAME}") + set(_LABELS "${_RULE_LABELS}") + list(APPEND _LABELS "${_PACKAGE_PATH}") + + cg_add_installed_test( + TEST_NAME "${_TEST_NAME}" + LABELS "${_LABELS}" + COMMAND + # We run all our tests through a custom test runner to allow temp + # directory cleanup upon test completion. + "${CMAKE_SOURCE_DIR}/build_tools/cmake/run_test.${COMPILER_GYM_HOST_SCRIPT_EXT}" + "$" + INSTALLED_COMMAND + # Must match install destination below. + "${_PACKAGE_PATH}/$" + ) + + install(TARGETS ${_NAME} + DESTINATION "tests/${_PACKAGE_PATH}" + COMPONENT Tests + ) + +endfunction() diff --git a/build_tools/cmake/cg_copts.cmake b/build_tools/cmake/cg_copts.cmake new file mode 100644 index 000000000..9ae5edd69 --- /dev/null +++ b/build_tools/cmake/cg_copts.cmake @@ -0,0 +1,124 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +# Copyright 2019 The IREE Authors +# +# Licensed under the Apache License v2.0 with LLVM Exceptions. +# See https://llvm.org/LICENSE.txt for license information. +# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception + +#------------------------------------------------------------------------------- +# C/C++ options as used within Compiler Gym +#------------------------------------------------------------------------------- +# +# ██ ██ █████ ██████ ███ ██ ██ ███ ██ ██████ +# ██ ██ ██ ██ ██ ██ ████ ██ ██ ████ ██ ██ +# ██ █ ██ ███████ ██████ ██ ██ ██ ██ ██ ██ ██ ██ ███ +# ██ ███ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ +# ███ ███ ██ ██ ██ ██ ██ ████ ██ ██ ████ ██████ +# +# Everything here is added to *every* cg_cc_library/cg_cc_binary/etc. +# That includes both runtime and compiler components, and these may propagate +# out to user code interacting with either (such as custom modules). +# +# Be extremely judicious in the use of these flags. +# +# - Need to disable a warning? +# Usually these are encountered in compiler-specific code and can be disabled +# in a compiler-specific way. Only add global warning disables when it's clear +# that we never want them or that they'll show up in a lot of places. +# +# See: https://stackoverflow.com/questions/3378560/how-to-disable-gcc-warnings-for-a-few-lines-of-code +# +# - Need to add a linker dependency? +# First figure out if you *really* need it. If it's only required on specific +# platforms and in very specific files clang or msvc are used prefer +# autolinking. GCC is stubborn and doesn't have autolinking so additional +# flags may be required there. +# +# See: https://en.wikipedia.org/wiki/Auto-linking + + +set(COMPILER_GYM_CXX_STANDARD ${CMAKE_CXX_STANDARD}) + +# TODO(benvanik): fix these names (or remove entirely). +set(COMPILER_GYM_ROOT_DIR ${CMAKE_CURRENT_SOURCE_DIR}) +set(COMPILER_GYM_SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}) +set(COMPILER_GYM_BINARY_DIR ${CMAKE_CURRENT_BINARY_DIR}) + +# Compiler diagnostics. +cg_select_compiler_opts(COMPILER_GYM_DEFAULT_COPTS + # Clang diagnostics. These largely match the set of warnings used within + # Google. They have not been audited super carefully by the IREE team but are + # generally thought to be a good set and consistency with those used + # internally is very useful when importing. If you feel that some of these + # should be different (especially more strict), please raise an issue! + CLANG + "-Werror" + "-Wall" + + # Disable warnings we don't care about or that generally have a low + # signal/noise ratio. + "-Wno-ambiguous-member-template" + "-Wno-char-subscripts" + "-Wno-deprecated-declarations" + "-Wno-extern-c-compat" # Matches upstream. Cannot impact due to extern C inclusion method. + "-Wno-gnu-alignof-expression" + "-Wno-gnu-variable-sized-type-not-at-end" + "-Wno-ignored-optimization-argument" + "-Wno-invalid-offsetof" # Technically UB but needed for intrusive ptrs + "-Wno-invalid-source-encoding" + "-Wno-mismatched-tags" + "-Wno-pointer-sign" + "-Wno-reserved-user-defined-literal" + "-Wno-return-type-c-linkage" + "-Wno-self-assign-overloaded" + "-Wno-sign-compare" + "-Wno-signed-unsigned-wchar" + "-Wno-strict-overflow" + "-Wno-trigraphs" + "-Wno-unknown-pragmas" + "-Wno-unknown-warning-option" + "-Wno-unused-command-line-argument" + "-Wno-unused-const-variable" + "-Wno-unused-function" + "-Wno-unused-local-typedef" + "-Wno-unused-private-field" + "-Wno-user-defined-warnings" + + # Explicitly enable some additional warnings. + # Some of these aren't on by default, or under -Wall, or are subsets of + # warnings turned off above. + "-Wctad-maybe-unsupported" + "-Wfloat-overflow-conversion" + "-Wfloat-zero-conversion" + "-Wfor-loop-analysis" + "-Wformat-security" + "-Wgnu-redeclared-enum" + "-Wimplicit-fallthrough" + "-Winfinite-recursion" + "-Wliteral-conversion" + #"-Wnon-virtual-dtor" + "-Woverloaded-virtual" + "-Wself-assign" + "-Wstring-conversion" + "-Wtautological-overlap-compare" + "-Wthread-safety" + "-Wthread-safety-beta" + "-Wunused-comparison" + "-Wvla" + + # TODO(#6959): Enable -Werror once we have a presubmit CI. + GCC + "-Wall" + "-Wno-address-of-packed-member" + "-Wno-comment" + "-Wno-format-zero-length" + # Technically UB but needed for intrusive ptrs + $<$:-Wno-invalid-offsetof> + $<$:-Wno-pointer-sign> + "-Wno-sign-compare" + "-Wno-unused-function" +) diff --git a/build_tools/cmake/cg_filegroup.cmake b/build_tools/cmake/cg_filegroup.cmake new file mode 100644 index 000000000..5a12d0187 --- /dev/null +++ b/build_tools/cmake/cg_filegroup.cmake @@ -0,0 +1,51 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +function(cg_filegroup) + cmake_parse_arguments( + _ARG + "PUBLIC" + "NAME" + "FILES;DEPENDS" + ${ARGN} + ) + rename_bazel_targets(_NAME "${_ARG_NAME}") + add_custom_target(${_NAME}) + + foreach(FILE_ ${_ARG_FILES}) + if(IS_ABSOLUTE "${FILE_}") + set(_INPUT_PATH "${FILE_}") + get_filename_component(_FILE_NAME ${FILE_} NAME) + canonize_bazel_target_names(_FILE_TARGET "${_FILE_NAME}") + rename_bazel_targets(_TARGET "${_FILE_TARGET}") + set(_OUTPUT_PATH "${CMAKE_CURRENT_BINARY_DIR}/${_FILE_NAME}") + else() + canonize_bazel_target_names(_FILE_TARGET "${FILE_}") + rename_bazel_targets(_TARGET "${_FILE_TARGET}") + set(_INPUT_PATH "${CMAKE_CURRENT_SOURCE_DIR}/${FILE_}") + set(_OUTPUT_PATH "${CMAKE_CURRENT_BINARY_DIR}/${FILE_}") + endif() + + if(NOT TARGET ${_TARGET}) + if (NOT _INPUT_PATH STREQUAL _OUTPUT_PATH) + add_custom_command(OUTPUT "${_OUTPUT_PATH}" + COMMAND ${CMAKE_COMMAND} -E create_symlink "${_INPUT_PATH}" "${_OUTPUT_PATH}" + DEPENDS "${_INPUT_PATH}") + endif() + add_custom_target(${_TARGET} DEPENDS "${_OUTPUT_PATH}") + endif() + + add_dependencies(${_NAME} ${_TARGET}) + endforeach() + + if(_ARG_DEPENDS) + rename_bazel_targets(_DEPS "${_ARG_DEPENDS}") + add_dependencies(${_NAME} ${_DEPS}) + endif() + + set_target_properties(${_NAME} PROPERTIES + IS_FILEGROUP TRUE + OUTPUTS "${_SRCS}") +endfunction() diff --git a/build_tools/cmake/cg_genrule.cmake b/build_tools/cmake/cg_genrule.cmake new file mode 100644 index 000000000..cacae5bca --- /dev/null +++ b/build_tools/cmake/cg_genrule.cmake @@ -0,0 +1,73 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +include_guard(GLOBAL) + +include(CMakeParseArguments) +include(cg_macros) + +# cg_genrule() +# +# CMake function to imitate Bazel's genrule rule. +# +function(cg_genrule) + cmake_parse_arguments( + _RULE + "PUBLIC;TESTONLY" + "NAME;COMMAND" + "SRCS;OUTS;DEPENDS;ABS_DEPENDS" + ${ARGN} + ) + + if(_RULE_TESTONLY AND NOT COMPILER_GYM_BUILD_TESTS) + return() + endif() + + # TODO(boian): remove this renaming when call sites do not include ":" in target dependency names + rename_bazel_targets(_DEPS "${_RULE_DEPENDS}") + + rename_bazel_targets(_NAME "${_RULE_NAME}") + + make_paths_absolute( + PATHS ${_RULE_SRCS} + BASE_DIR "${CMAKE_CURRENT_SOURCE_DIR}" + RESULT_VARIABLE _SRCS + ) + + make_paths_absolute( + PATHS ${_RULE_OUTS} + BASE_DIR "${CMAKE_CURRENT_BINARY_DIR}" + RESULT_VARIABLE _OUTS + ) + + list(LENGTH _OUTS _OUTS_LENGTH) + if(_OUTS_LENGTH EQUAL 1) + get_filename_component(_OUTS_DIR "${_OUTS}" DIRECTORY) + else() + set(_OUTS_DIR "${CMAKE_CURRENT_BINARY_DIR}") + endif() + + # Substitute special Bazel references + string(REPLACE "$@" "${_OUTS}" _CMD "${_RULE_COMMAND}") + string(REPLACE "$(@D)" "${_OUTS_DIR}" _CMD "${_CMD}") + #string(REPLACE "$<" "\"${_SRCS}\"" _CMD "${_CMD}") + + add_custom_command( + OUTPUT ${_OUTS} + COMMAND bash -c "${_CMD}" + DEPENDS ${_DEPS} ${_SRCS} + VERBATIM + ) + + add_custom_target(${_NAME} ALL DEPENDS ${_OUTS}) + set_target_properties(${_NAME} PROPERTIES + OUTPUTS "${_OUTS}") + + list(LENGTH _OUTS _OUTS_LENGTH) + if(_OUTS_LENGTH EQUAL "1") + set_target_properties(${_NAME} PROPERTIES LOCATION "${_OUTS}") + endif() + +endfunction() diff --git a/build_tools/cmake/cg_installed_test.cmake b/build_tools/cmake/cg_installed_test.cmake new file mode 100644 index 000000000..3f9e9c077 --- /dev/null +++ b/build_tools/cmake/cg_installed_test.cmake @@ -0,0 +1,97 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +# Copyright 2020 The IREE Authors +# +# Licensed under the Apache License v2.0 with LLVM Exceptions. +# See https://llvm.org/LICENSE.txt for license information. +# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception + +# cg_add_installed_test() +# +# Creates a build-time and exported install-time test. All tests are installed +# into the tests/ tree. Calling code must arrange to install dependencies of the +# test into that tree. +# +# Parameters: +# TEST_NAME: Name of the test (as in "some/path/to/test"). +# COMMAND: Passed to add_test() as is. +# ENVIRONMENT: Set as the ENVIRONMENT property of the build-time test. +# INSTALLED_COMMAND: Corrollary to the 'COMMAND' argument but added to the +# install time definition. +# WORKING_DIRECTORY: Passed to add_test() as is. Note that in the install tree +# all tests run in the tests/ directory. +# LABELS: Labels to pass to add_test() and installed tests. +function(cg_add_installed_test) + cmake_parse_arguments( + _RULE + "" + "TEST_NAME" + "COMMAND;ENVIRONMENT;INSTALLED_COMMAND;WORKING_DIRECTORY;LABELS" + ${ARGN} + ) + + + add_test( + NAME + ${_RULE_TEST_NAME} + COMMAND + ${_RULE_COMMAND} + ) + if (DEFINED _RULE_WORKING_DIRECTORY) + set_property( + TEST + ${_RULE_TEST_NAME} + PROPERTY WORKING_DIRECTORY + "${_RULE_WORKING_DIRECTORY}" + ) + endif() + set_property( + TEST + ${_RULE_TEST_NAME} + PROPERTY LABELS + "${_RULE_LABELS}" + ) + set_property( + TEST + ${_RULE_TEST_NAME} + PROPERTY ENVIRONMENT + "TEST_TMPDIR=${CMAKE_BINARY_DIR}/${_RULE_TEST_NAME}_test_tmpdir" + ${_RULE_ENVIRONMENT} + ) + cg_add_test_environment_properties(${_RULE_TEST_NAME}) + + # Write the to the installed ctest file template. + set(_installed_ctest_input_file + "${CMAKE_BINARY_DIR}/cg_installed_tests.cmake.in") + get_property(_has_tests GLOBAL PROPERTY COMPILER_GYM_HAS_INSTALLED_TESTS) + if(NOT _has_tests) + # First time. + file(WRITE "${_installed_ctest_input_file}") # Truncate. + set_property(GLOBAL PROPERTY COMPILER_GYM_HAS_INSTALLED_TESTS ON) + endif() + + # Now write directives to the installed tests cmake file. + file(APPEND "${_installed_ctest_input_file}" + "add_test(${_RULE_TEST_NAME} ${_RULE_INSTALLED_COMMAND})\n" + "set_tests_properties(${_RULE_TEST_NAME} PROPERTIES LABELS \"${_RULE_LABELS}\")\n" + ) + + # First time generation and setup to install. Note that since this all runs + # at the generate phase, it doesn't matter that we trigger it before all + # tests accumulate. + if(NOT _has_tests) + set(_installed_ctest_output_file "${CMAKE_BINARY_DIR}/cg_installed_tests.cmake") + file(GENERATE + OUTPUT "${_installed_ctest_output_file}" + INPUT "${_installed_ctest_input_file}" + ) + install(FILES "${_installed_ctest_output_file}" + DESTINATION tests + RENAME "CTestTestfile.cmake" + COMPONENT Tests + ) + endif() +endfunction() diff --git a/build_tools/cmake/cg_macros.cmake b/build_tools/cmake/cg_macros.cmake new file mode 100644 index 000000000..28e0cdd98 --- /dev/null +++ b/build_tools/cmake/cg_macros.cmake @@ -0,0 +1,353 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +# Copyright 2019 The IREE Authors +# +# Licensed under the Apache License v2.0 with LLVM Exceptions. +# See https://llvm.org/LICENSE.txt for license information. +# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception + +include_guard(GLOBAL) +include(CMakeParseArguments) + +#------------------------------------------------------------------------------- +# Missing CMake Variables +#------------------------------------------------------------------------------- + +if(${CMAKE_HOST_SYSTEM_NAME} STREQUAL "Windows") + set(COMPILER_GYM_HOST_SCRIPT_EXT "bat") + # https://gitlab.kitware.com/cmake/cmake/-/issues/17553 + set(COMPILER_GYM_HOST_EXECUTABLE_SUFFIX ".exe") +else() + set(COMPILER_GYM_HOST_SCRIPT_EXT "sh") + set(COMPILER_GYM_HOST_EXECUTABLE_SUFFIX "") +endif() + +#------------------------------------------------------------------------------- +# General utilities +#------------------------------------------------------------------------------- + +# cg_to_bool +# +# Sets `variable` to `ON` if `value` is true and `OFF` otherwise. +function(cg_to_bool VARIABLE VALUE) + if(VALUE) + set(${VARIABLE} "ON" PARENT_SCOPE) + else() + set(${VARIABLE} "OFF" PARENT_SCOPE) + endif() +endfunction() + +# cg_append_list_to_string +# +# Joins ${ARGN} together as a string separated by " " and appends it to +# ${VARIABLE}. +function(cg_append_list_to_string VARIABLE) + if(NOT "${ARGN}" STREQUAL "") + string(JOIN " " _ARGN_STR ${ARGN}) + set(${VARIABLE} "${${VARIABLE}} ${_ARGN_STR}" PARENT_SCOPE) + endif() +endfunction() + + +#------------------------------------------------------------------------------- +# Packages and Paths +#------------------------------------------------------------------------------- + +# Sets ${PACKAGE_NS} to the root relative package name in C++ namespace +# format (::). +# +# Example when called from proj/base/CMakeLists.txt: +# proj::base +function(cg_package_ns PACKAGE_NS) + string(REPLACE ${COMPILER_GYM_ROOT_DIR} "" _PACKAGE ${CMAKE_CURRENT_LIST_DIR}) + string(SUBSTRING ${_PACKAGE} 1 -1 _PACKAGE) + string(REPLACE "/" "::" _PACKAGE_NS ${_PACKAGE}) + set(${PACKAGE_NS} ${_PACKAGE_NS} PARENT_SCOPE) +endfunction() + +# Sets ${PACKAGE_NAME} to the root relative package name. +# +# Example when called from proj/base/CMakeLists.txt: +# proj__base +function(cg_package_name PACKAGE_NAME) + cg_package_ns(_PACKAGE_NS) + string(REPLACE "::" "__" _PACKAGE_NAME ${_PACKAGE_NS}) + set(${PACKAGE_NAME} ${_PACKAGE_NAME} PARENT_SCOPE) +endfunction() + +# Sets ${PACKAGE_PATH} to the root relative package path. +# +# Example when called from proj/base/CMakeLists.txt: +# proj/base +function(cg_package_path PACKAGE_PATH) + cg_package_ns(_PACKAGE_NS) + string(REPLACE "::" "/" _PACKAGE_PATH ${_PACKAGE_NS}) + set(${PACKAGE_PATH} ${_PACKAGE_PATH} PARENT_SCOPE) +endfunction() + +# Sets ${PACKAGE_DIR} to the directory name of the current package. +# +# Example when called from proj/base/CMakeLists.txt: +# base +function(cg_package_dir PACKAGE_DIR) + cg_package_ns(_PACKAGE_NS) + string(FIND ${_PACKAGE_NS} "::" _END_OFFSET REVERSE) + math(EXPR _END_OFFSET "${_END_OFFSET} + 2") + string(SUBSTRING ${_PACKAGE_NS} ${_END_OFFSET} -1 _PACKAGE_DIR) + set(${PACKAGE_DIR} ${_PACKAGE_DIR} PARENT_SCOPE) +endfunction() + +function(canonize_bazel_target_names _RESULT _BAZEL_TARGETS) + unset(_RES) + cg_package_ns(_PACKAGE_NS) + foreach(_TARGET ${_BAZEL_TARGETS}) + if (NOT _TARGET MATCHES ":") + # local target + set(_TARGET "${_PACKAGE_NS}::${_TARGET}") + endif() + list(APPEND _RES "${_TARGET}") + endforeach() + list(TRANSFORM _RES REPLACE "^::" "${_PACKAGE_NS}::") + set(${_RESULT} ${_RES} PARENT_SCOPE) +endfunction() + +function(rename_bazel_targets _RESULT _BAZEL_TARGETS) + canonize_bazel_target_names(_RES "${_BAZEL_TARGETS}") + list(TRANSFORM _RES REPLACE ":" "_") + set(${_RESULT} ${_RES} PARENT_SCOPE) +endfunction() + +function(get_target_as_relative_dir _TARGET _RESULT) + set(_RES "${_TARGET}") + list(TRANSFORM _RES REPLACE "__" "/") + get_filename_component(_RES "${_RES}" DIRECTORY) + set(${_RESULT} "${_RES}" PARENT_SCOPE) +endfunction() + +function(get_target_out_cxx_header_dir _TARGET _RESULT) + get_target_property(_BIN_DIR ${_TARGET} BINARY_DIR) + get_target_as_relative_dir(${_TARGET} _REL_HEADER_DIR) + set(${_RESULT} "${_BIN_DIR}/include/${_REL_HEADER_DIR}" PARENT_SCOPE) +endfunction() + +function(make_paths_absolute) + cmake_parse_arguments( + _ARG + "" + "BASE_DIR;RESULT_VARIABLE" + "PATHS" + ${ARGN} + ) + + unset(_RES) + foreach(_PATH ${_ARG_PATHS}) + if(NOT IS_ABSOLUTE _PATH) + get_filename_component(_PATH "${_PATH}" ABSOLUTE BASE_DIR "${_ARG_BASE_DIR}") + endif() + list(APPEND _RES "${_PATH}") + endforeach() + + set(${_ARG_RESULT_VARIABLE} "${_RES}" PARENT_SCOPE) +endfunction() + +function(paths_to_targets) + cmake_parse_arguments( + _ARG + "" + "RESULT" + "PATHS" + ${ARGN} + ) + + string(REGEX REPLACE "[^A-Za-z0-9_+-]" "_" _TARGETS "${_ARG_PATHS}") + set(${_ARG_RESULT} "${_TARGETS}" PARENT_SCOPE) +endfunction() + +#------------------------------------------------------------------------------- +# select()-like Evaluation +#------------------------------------------------------------------------------- + +# Appends ${OPTS} with a list of values based on the current compiler. +# +# Example: +# cg_select_compiler_opts(COPTS +# CLANG +# "-Wno-foo" +# "-Wno-bar" +# CLANG_CL +# "/W3" +# GCC +# "-Wsome-old-flag" +# MSVC +# "/W3" +# ) +# +# Note that variables are allowed, making it possible to share options between +# different compiler targets. +function(cg_select_compiler_opts OPTS) + cmake_parse_arguments( + PARSE_ARGV 1 + _COMPILER_GYM_SELECTS + "" + "" + "ALL;CLANG;CLANG_CL;MSVC;GCC;CLANG_OR_GCC;MSVC_OR_CLANG_CL" + ) + # OPTS is a variable containing the *name* of the variable being populated, so + # we need to dereference it twice. + set(_OPTS "${${OPTS}}") + list(APPEND _OPTS "${_COMPILER_GYM_SELECTS_ALL}") + if("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU") + list(APPEND _OPTS "${_COMPILER_GYM_SELECTS_GCC}") + list(APPEND _OPTS "${_COMPILER_GYM_SELECTS_CLANG_OR_GCC}") + elseif("${CMAKE_CXX_COMPILER_ID}" MATCHES "Clang") + if(MSVC) + list(APPEND _OPTS ${_COMPILER_GYM_SELECTS_CLANG_CL}) + list(APPEND _OPTS ${_COMPILER_GYM_SELECTS_MSVC_OR_CLANG_CL}) + else() + list(APPEND _OPTS ${_COMPILER_GYM_SELECTS_CLANG}) + list(APPEND _OPTS ${_COMPILER_GYM_SELECTS_CLANG_OR_GCC}) + endif() + elseif("${CMAKE_CXX_COMPILER_ID}" STREQUAL "MSVC") + list(APPEND _OPTS ${_COMPILER_GYM_SELECTS_MSVC}) + list(APPEND _OPTS ${_COMPILER_GYM_SELECTS_MSVC_OR_CLANG_CL}) + else() + message(ERROR "Unknown compiler: ${CMAKE_CXX_COMPILER}") + list(APPEND _OPTS "") + endif() + set(${OPTS} ${_OPTS} PARENT_SCOPE) +endfunction() + +#------------------------------------------------------------------------------- +# Data dependencies +#------------------------------------------------------------------------------- + +# Adds 'data' dependencies to a target. +# +# Parameters: +# NAME: name of the target to add data dependencies to +# DATA: List of targets and/or files in the source tree. Files should use the +# same format as targets (i.e. iree::package::subpackage::file.txt) +function(cg_add_data_dependencies) + cmake_parse_arguments( + _RULE + "" + "NAME" + "DATA" + ${ARGN} + ) + # TODO(boian): Make runtime targets that depend on data + + if(NOT DEFINED _RULE_DATA) + return() + endif() + + rename_bazel_targets(_NAME "${_RULE_NAME}") + unset(_DEPS) + + foreach(_DATA ${_RULE_DATA}) + if(IS_ABSOLUTE "${_DATA}") + get_filename_component(FILE_ "${_DATA}" ABSOLUTE) + paths_to_targets(PATHS "${FILE_}" RESULT _TARGET) + string(PREPEND _TARGET "${_NAME}_data_") + get_filename_component(_FILE_NAME "${FILE_}" NAME) + set(_DST_DIR "${CMAKE_CURRENT_BINARY_DIR}") + set(_DST_PATH "${_DST_DIR}/${_FILE_NAME}") + if(NOT _DST_PATH STREQUAL _DATA) + add_custom_command( + OUTPUT "${_DST_PATH}" + COMMAND + ${CMAKE_COMMAND} -E make_directory "${_DST_DIR}" + COMMAND ${CMAKE_COMMAND} -E create_symlink + "${FILE_}" "${_DST_PATH}" + DEPENDS "${FILE_}" + VERBATIM + ) + endif() + add_custom_target(${_TARGET} DEPENDS "${_DST_PATH}") + else() + rename_bazel_targets(_TARGET "${_DATA}") + endif() + list(APPEND _DEPS "${_TARGET}") + endforeach() + + add_dependencies(${_NAME} ${_DEPS}) +endfunction() + +#------------------------------------------------------------------------------- +# Tool symlinks +#------------------------------------------------------------------------------- + +# cg_symlink_tool +# +# Adds a command to TARGET which symlinks a tool from elsewhere +# (FROM_TOOL_TARGET_NAME) to a local file name (TO_EXE_NAME) in the current +# binary directory. +# +# Parameters: +# TARGET: Local target to which to add the symlink command (i.e. an +# cg_py_library, etc). +# FROM_TOOL_TARGET: Target of the tool executable that is the source of the +# link. +# TO_EXE_NAME: The executable name to output in the current binary dir. +function(cg_symlink_tool) + cmake_parse_arguments( + ARG + "" + "TARGET;FROM_TOOL_TARGET;TO_EXE_NAME" + "" + ${ARGN} + ) + + # Transform TARGET + cg_package_ns(_PACKAGE_NS) + cg_package_name(_PACKAGE_NAME) + set(_TARGET "${_PACKAGE_NAME}_${ARG_TARGET}") + set(_FROM_TOOL_TARGET ${ARG_FROM_TOOL_TARGET}) + set(_TO_TOOL_PATH "${CMAKE_CURRENT_BINARY_DIR}/${ARG_TO_EXE_NAME}${CMAKE_EXECUTABLE_SUFFIX}") + get_filename_component(_TO_TOOL_DIR "${_TO_TOOL_PATH}" DIRECTORY) + + + add_custom_command( + TARGET "${_TARGET}" + BYPRODUCTS + "${CMAKE_CURRENT_BINARY_DIR}/${ARG_TO_EXE_NAME}${CMAKE_EXECUTABLE_SUFFIX}" + COMMAND + ${CMAKE_COMMAND} -E make_directory "${_TO_TOOL_DIR}" + COMMAND + ${CMAKE_COMMAND} -E create_symlink + "$" + "${_TO_TOOL_PATH}" + VERBATIM + ) +endfunction() + + +#------------------------------------------------------------------------------- +# Tests +#------------------------------------------------------------------------------- + +# cg_add_test_environment_properties +# +# Adds test environment variable properties based on the current build options. +# +function(cg_add_test_environment_properties TEST_NAME) + # COMPILER_GYM_*_DISABLE environment variables may used to skip test cases which + # require both a compiler target backend and compatible runtime HAL driver. + # + # These variables may be set by the test environment, typically as a property + # of some continuous execution test runner or by an individual developer, or + # here by the build system. + # + # Tests which only depend on a compiler target backend or a runtime HAL + # driver, but not both, should generally use a different method of filtering. + if(NOT "${COMPILER_GYM_TARGET_BACKEND_VULKAN-SPIRV}" OR NOT "${COMPILER_GYM_HAL_DRIVER_VULKAN}") + set_property(TEST ${TEST_NAME} APPEND PROPERTY ENVIRONMENT "COMPILER_GYM_VULKAN_DISABLE=1") + endif() + if(NOT "${COMPILER_GYM_TARGET_BACKEND_DYLIB-LLVM-AOT}" OR NOT "${COMPILER_GYM_HAL_DRIVER_DYLIB}" + OR NOT "${COMPILER_GYM_HAL_DRIVER_DYLIB_SYNC}") + set_property(TEST ${TEST_NAME} APPEND PROPERTY ENVIRONMENT "COMPILER_GYM_LLVMAOT_DISABLE=1") + endif() +endfunction() diff --git a/build_tools/cmake/cg_py_binary.cmake b/build_tools/cmake/cg_py_binary.cmake new file mode 100644 index 000000000..286bc0512 --- /dev/null +++ b/build_tools/cmake/cg_py_binary.cmake @@ -0,0 +1,45 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +# Copied from https://github.com/google/iree/blob/main/build_tools/cmake/iree_cc_binary.cmake +# Copyright 2019 The IREE Authors +# +# Licensed under the Apache License v2.0 with LLVM Exceptions. +# See https://llvm.org/LICENSE.txt for license information. +# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception + +include(cg_py_library) + +# cg_cc_binary() +# +# CMake function to imitate Bazel's py_binary rule. +# +# Parameters: +# NAME: name of target (see Note) +# SRCS: List of source files for the binary +# GENERATED_SRCS: List of source files for the binary that are generated by other targets +# DATA: List of other targets and files required for this binary +# DEPS: List of other libraries to be linked in to the binary targets +# TESTONLY: When added, this target will only be built if user passes -DCOMPILER_GYM_BUILD_TESTS=ON to CMake. +# +# Note: +# cg_py_binary will create a binary called ${PACKAGE_NAME}_${NAME}, e.g. +# cg_base_foo with two alias (readonly) targets, a qualified +# ${PACKAGE_NS}::${NAME} and an unqualified ${NAME}. Thus NAME must be globally +# unique in the project. +# +function(cg_py_binary) + cmake_parse_arguments( + _RULE + "PUBLIC;TESTONLY" + "NAME;SRCS;GENERATED_SRCS" + "DATA;DEPS" + ${ARGN} + ) + + # Currently the same as adding a library. + # When install rules are added they will need to split. + cg_py_library(${ARGV}) +endfunction() diff --git a/build_tools/cmake/cg_py_library.cmake b/build_tools/cmake/cg_py_library.cmake new file mode 100644 index 000000000..3847a065c --- /dev/null +++ b/build_tools/cmake/cg_py_library.cmake @@ -0,0 +1,88 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +# Copied from https://github.com/google/iree/blob/main/build_tools/cmake/iree_cc_library.cmake +# Copyright 2019 The IREE Authors +# +# Licensed under the Apache License v2.0 with LLVM Exceptions. +# See https://llvm.org/LICENSE.txt for license information. +# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception + +include_guard(GLOBAL) + +include(CMakeParseArguments) +include(cg_macros) + +# cg_py_library() +# +# CMake function to imitate Bazel's py_library rule. +# +# Parameters: +# NAME: name of target (see Note) +# SRCS: List of source files for the library +# GENERATED_SRCS: List of source files for the library that are generated by other targets +# DATA: List of other targets and files required for this binary +# DEPS: List of other libraries to be linked in to the binary targets +# TESTONLY: When added, this target will only be built if user passes -DCOMPILER_GYM_BUILD_TESTS=ON to CMake. +# +function(cg_py_library) + cmake_parse_arguments( + _RULE + "PUBLIC;TESTONLY" + "NAME" + "SRCS;GENERATED_SRCS;DATA;DEPS" + ${ARGN} + ) + + if(_RULE_TESTONLY AND NOT COMPILER_GYM_BUILD_TESTS) + return() + endif() + + # TODO(boian): remove this renaming when call sites do not include ":" in target dependency names + rename_bazel_targets(_RULE_DEPS "${_RULE_DEPS}") + + # Prefix the library with the package name, so we get: cg_package_name. + rename_bazel_targets(_NAME "${_RULE_NAME}") + + unset(_BIN_PATHS) + # Symlink each file as its own target. + foreach(_SRC_FILE ${_RULE_SRCS}) + if(IS_ABSOLUTE _SRC_FILE) + message(FATAL_ERROR "Absolute path for SRCS not allowed.") + endif() + + # _SRC_FILE could have other path components in it, so we need to make a + # directory for it. Ninja does this automatically, but make doesn't. See + # https://github.com/google/iree/issues/6801 + set(_SRC_BIN_PATH "${CMAKE_CURRENT_BINARY_DIR}/${_SRC_FILE}") + get_filename_component(_SRC_BIN_DIR "${_SRC_BIN_PATH}" DIRECTORY) + add_custom_command( + OUTPUT "${_SRC_BIN_PATH}" + COMMAND + ${CMAKE_COMMAND} -E make_directory "${_SRC_BIN_DIR}" + COMMAND ${CMAKE_COMMAND} -E create_symlink + "${CMAKE_CURRENT_SOURCE_DIR}/${_SRC_FILE}" "${_SRC_BIN_PATH}" + DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/${_SRC_FILE}" + VERBATIM + ) + list(APPEND _BIN_PATHS "${_SRC_BIN_PATH}") + endforeach() + + list(APPEND _BIN_PATHS ${_RULE_GENERATED_SRCS}) + + set(_DEPS ${_RULE_DEPS} ${_BIN_PATHS}) + add_custom_target(${_NAME} ALL DEPENDS ${_DEPS}) + + cg_add_data_dependencies(NAME ${_RULE_NAME} DATA ${_RULE_DATA}) + + # If only one src file set the LOCATION target property to point to it. + list(LENGTH _BIN_PATHS _BIN_PATHS_LENGTH) + if(_BIN_PATHS_LENGTH EQUAL "1") + set_target_properties(${_NAME} PROPERTIES LOCATION "${_BIN_PATHS}") + endif() + + # TODO(boian): add install rules + +endfunction() diff --git a/build_tools/cmake/cg_py_test.cmake b/build_tools/cmake/cg_py_test.cmake new file mode 100644 index 000000000..588304e02 --- /dev/null +++ b/build_tools/cmake/cg_py_test.cmake @@ -0,0 +1,87 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +# Copied from https://github.com/google/iree/blob/main/build_tools/cmake/iree_cc_test.cmake +# Copyright 2019 The IREE Authors +# +# Licensed under the Apache License v2.0 with LLVM Exceptions. +# See https://llvm.org/LICENSE.txt for license information. +# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception + +include(CMakeParseArguments) +include(cg_installed_test) + +# cg_py_test() +# +# CMake function to imitate Bazel's cc_test rule. +# +# Parameters: +# NAME: name of target. +# SRCS: List of source files +# DATA: List of other targets and files required for this binary +# DEPS: List of other libraries to be linked in to the binary targets +# LABELS: Additional labels to apply to the test. The package path is added +# automatically. +# ARGS command line arguments for the test. +# +# Note: +# cg_cc_test will create a binary called ${PACKAGE_NAME}_${NAME}, e.g. +# cg_base_foo_test. +# +function(cg_py_test) + if(NOT COMPILER_GYM_BUILD_TESTS) + return() + endif() + + cmake_parse_arguments( + _RULE + "" + "NAME;SRCS" + "ARGS;LABELS;DATA;DEPS" + ${ARGN} + ) + + cg_py_binary( + NAME ${_RULE_NAME} + SRCS ${_RULE_SRCS} + DEPS ${_RULE_DEPS} + DATA ${_RULE_DATA} + ) + + rename_bazel_targets(_NAME "${_RULE_NAME}") + cg_package_ns(_PACKAGE_NS) + string(REPLACE "::" "/" _PACKAGE_PATH ${_PACKAGE_NS}) + set(_TEST_NAME "${_PACKAGE_PATH}/${_RULE_NAME}") + set(_LABELS "${_RULE_LABELS}") + list(APPEND _LABELS "${_PACKAGE_PATH}") + + cg_add_installed_test( + TEST_NAME "${_TEST_NAME}" + LABELS "${_LABELS}" + ENVIRONMENT + "PYTHONPATH=${CMAKE_BINARY_DIR}:$ENV{PYTHONPATH}" + "TEST_WORKSPACE=compiler_gym" + #"COMPILER_GYM_RUNFILES=${CMAKE_CURRENT_BINARY_DIR}" + COMMAND + "${CMAKE_SOURCE_DIR}/build_tools/cmake/run_test.${COMPILER_GYM_HOST_SCRIPT_EXT}" + "${Python3_EXECUTABLE}" + "${CMAKE_CURRENT_BINARY_DIR}/${_RULE_SRCS}" + ${_RULE_ARGS} + INSTALLED_COMMAND + python + "${_PACKAGE_PATH}/${_RULE_SRCS}" + ) + + #cg_add_data_dependencies(NAME ${_RULE_NAME} DATA ${_RULE_DATA}) + + install(FILES ${_RULE_SRCS} + DESTINATION "tests/${_PACKAGE_PATH}" + COMPONENT Tests + ) + + # TODO(boian): Find out how to add deps to tests. + # CMake seems to not allow build targets to be dependencies for tests. + # One way to achieve this is to make the test execution a target. +endfunction() diff --git a/build_tools/cmake/cg_python.cmake b/build_tools/cmake/cg_python.cmake new file mode 100644 index 000000000..3cabcc98e --- /dev/null +++ b/build_tools/cmake/cg_python.cmake @@ -0,0 +1,207 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +# Copyright 2020 The IREE Authors +# +# Licensed under the Apache License v2.0 with LLVM Exceptions. +# See https://llvm.org/LICENSE.txt for license information. +# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception + +include(CMakeParseArguments) +include(cg_installed_test) + +############################################################################### +# Main user rules +############################################################################### + +# Declares that the current source directory is part of a python package +# that will: +# - Will create an install target install-COMPONENT (global, not package +# scoped) +# - Be installed under python_packages/PACKAGE_NAME +# - Have a local path of MODULE_PATH (i.e. namespace package path) +# - Process a setup.py.in from the current directory (if NOT AUGMENT_EXISTING_PACKAGE) +# - Process a version.py.in from the current directory (if NOT AUGMENT_EXISTING_PACKAGE) +# Will set parent scope variables: +# - PY_INSTALL_COMPONENT: Install component. Echoed back from the argument +# for easier addition after this call. +# - PY_INSTALL_PACKAGES_DIR: The python_packages/PACKAGE_NAME path +# - PY_INSTALL_MODULE_DIR: The path to the module directory under +# INSTALL_PACKAGES_DIR. +# +# Add any built deps to DEPS (you will need to add install actions to them +# after). +# +# Any python files in the source directory will be automatically installed +# (recursive). +# +# Also adds a *-stripped target which strips any binaries that are +# present. +# +# Arguments: +# AUGMENT_EXISTING_PACKAGE: Whether to add install artifacts to an existing +# package. +# COMPONENT: Install component +# PACKAGE_NAME: Name of the Python package in the install directory tree. +# MODULE_PATH: Relative path within the package to the module being installed. +# FILES_MATCHING: Explicit arguments to the install FILES_MATCHING directive. +# (Defaults to "PATTERN *.py") +# DEPS: Dependencies. +function(cg_py_install_package) + cmake_parse_arguments(ARG + "AUGMENT_EXISTING_PACKAGE" + "COMPONENT;PACKAGE_NAME;MODULE_PATH" + "DEPS;ADDL_PACKAGE_FILES;FILES_MATCHING" + ${ARGN}) + set(_install_component ${ARG_COMPONENT}) + set(_install_packages_dir "${CMAKE_INSTALL_PREFIX}/python_packages/${ARG_PACKAGE_NAME}") + set(_install_module_dir "${_install_packages_dir}/${ARG_MODULE_PATH}") + set(_target_name install-${_install_component}) + + if(NOT FILES_MATCHING) + set(_files_matching PATTERN "*.py") + else() + set(_files_matching ${ARG_FILES_MATCHING}) + endif() + + if(NOT ARG_AUGMENT_EXISTING_PACKAGE) + configure_file(setup.py.in setup.py) + install( + FILES + ${CMAKE_CURRENT_BINARY_DIR}/setup.py + ${ARG_ADDL_PACKAGE_FILES} + COMPONENT ${_install_component} + DESTINATION "${_install_packages_dir}" + ) + configure_file(version.py.in version.py) + install( + FILES + ${CMAKE_CURRENT_BINARY_DIR}/version.py + COMPONENT ${_install_component} + DESTINATION "${_install_module_dir}" + ) + + set(_component_option -DCMAKE_INSTALL_COMPONENT="${ARG_COMPONENT}") + add_custom_target(${_target_name} + COMMAND "${CMAKE_COMMAND}" + ${_component_option} + -P "${CMAKE_BINARY_DIR}/cg_install.cmake" + USES_TERMINAL) + add_custom_target(${_target_name}-stripped + COMMAND "${CMAKE_COMMAND}" + ${_component_option} + -DCMAKE_INSTALL_DO_STRIP=1 + -P "${CMAKE_BINARY_DIR}/cg_install.cmake" + USES_TERMINAL) + endif() + + # Explicit add dependencies in case if we are just extending a package + # vs adding the targets. + if(ARG_DEPS) + add_dependencies(${_target_name} ${ARG_DEPS}) + add_dependencies(${_target_name}-stripped ${ARG_DEPS}) + endif() + + install( + DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/ + COMPONENT ${_install_component} + DESTINATION "${_install_module_dir}" + FILES_MATCHING ${_files_matching} + ) + + set(PY_INSTALL_COMPONENT ${_install_component} PARENT_SCOPE) + set(PY_INSTALL_PACKAGES_DIR "${_install_packages_dir}" PARENT_SCOPE) + set(PY_INSTALL_MODULE_DIR "${_install_module_dir}" PARENT_SCOPE) +endfunction() + +# cg_pyext_module() +# +# Builds a native python module (.so/.dylib/.pyd). +# +# Parameters: +# NAME: name of target +# MODULE_NAME: Base-name of the module. +# SRCS: List of source files for the library +# DEPS: List of other targets the test python libraries require +function(cg_pyext_module) + cmake_parse_arguments(ARG + "" + "NAME;MODULE_NAME;UNIX_LINKER_SCRIPT" + "SRCS;DEPS;COPTS;INCLUDES" + ${ARGN}) + + cg_package_ns(_PACKAGE_NS) + list(TRANSFORM ARG_DEPS REPLACE "^::" "${_PACKAGE_NS}::") + list(TRANSFORM ARG_PYEXT_DEPS REPLACE "^::" "${_PACKAGE_NS}::") + # Prefix the library with the package name, so we get: cg_package_name. + rename_bazel_targets(_NAME "${_RULE_NAME}") + + pybind11_add_module( + ${_NAME} + ${ARG_SRCS} + ) + + # Alias the library so that we can + # refer to this target with the namespaced format. + add_library(${_PACKAGE_NS}::${ARG_NAME} ALIAS ${_NAME}) + + target_link_libraries( + ${_NAME} + PRIVATE ${ARG_DEPS} + ) + + set_target_properties( + ${_NAME} PROPERTIES + OUTPUT_NAME "${ARG_MODULE_NAME}" + ) + + target_include_directories(${_NAME} + PUBLIC + "$" + ) + + # pybind11 requires both RTTI and Exceptions, and it does not know that + # we have disabled them globally, so turn them back on. Since this is + # *the only* place in the codebase where we do this, just inline here. + # Note that this is playing with fire and the extension code is structured + # so as not to cause problems with RTTI cross-module issues. + cg_select_compiler_opts(_RTTI_AND_EXCEPTION_COPTS + CLANG_OR_GCC + "-frtti" + "-fexceptions" + MSVC_OR_CLANG_CL + # Configure exception handling for standard C++ behavior. + # - /EHs enables C++ catch-style exceptions + # - /EHc breaks unwinding across extern C boundaries, dramatically reducing + # unwind table size and associated exception handling overhead as the + # compiler can assume no exception will ever be thrown within any function + # annotated with extern "C". + # https://docs.microsoft.com/en-us/cpp/build/reference/eh-exception-handling-model + "/EHsc" + # Configure RTTI generation. + # - /GR - Enable generation of RTTI (default) + # - /GR- - Disables generation of RTTI + # https://docs.microsoft.com/en-us/cpp/build/reference/gr-enable-run-time-type-information?view=msvc-160 + "/GR" + ) + + set_property(TARGET ${_NAME} PROPERTY CXX_STANDARD 17) + set_property(TARGET ${_NAME} PROPERTY CXX_STANDARD_REQUIRED ON) + + target_compile_options( + ${_NAME} PRIVATE + ${ARG_COPTS} + ${COMPILER_GYM_DEFAULT_COPTS} + ${_RTTI_AND_EXCEPTION_COPTS} + ) + + # Link flags. + if(UNIX AND NOT APPLE) # Apple does not support linker scripts. + if(ARG_UNIX_LINKER_SCRIPT) + set_target_properties(${_NAME} PROPERTIES LINK_FLAGS + "-Wl,--version-script=${CMAKE_CURRENT_SOURCE_DIR}/${ARG_UNIX_LINKER_SCRIPT}") + endif() + endif() +endfunction() diff --git a/build_tools/cmake/grpc.cmake b/build_tools/cmake/grpc.cmake new file mode 100644 index 000000000..2818e2c0f --- /dev/null +++ b/build_tools/cmake/grpc.cmake @@ -0,0 +1,123 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +include_guard(GLOBAL) +include(CMakeParseArguments) +include(cg_macros) +include(cg_py_library) +include(protobuf) + +function(get_cc_grpc_proto_out_files _PROTO_FILENAME _RESULT) + set(_PROTO_FILENAME_WITHOUT_EXT "${_PROTO_FILENAME}") + string(REGEX REPLACE "\\.proto$" "" _PROTO_FILENAME_WITHOUT_EXT "${_PROTO_FILENAME}") + string(REGEX REPLACE "\\.proto\\.bin$" "" _PROTO_FILENAME_WITHOUT_EXT "${_PROTO_FILENAME_WITHOUT_EXT}") + set(${_RESULT} + "${_PROTO_FILENAME_WITHOUT_EXT}.grpc.pb.h" + "${_PROTO_FILENAME_WITHOUT_EXT}.grpc.pb.cc" + PARENT_SCOPE) +endfunction() + +function(cc_grpc_library) + cmake_parse_arguments( + _RULE + "PUBLIC;GRPC_ONLY" + "NAME;SRCS" + "DEPS" + ${ARGN} + ) + + if (NOT _RULE_GRPC_ONLY) + message("GRPC_ONLY=False unsupported.") + endif() + + rename_bazel_targets(_DEPS "${_RULE_DEPS}") + rename_bazel_targets(_NAME "${_RULE_NAME}") + rename_bazel_targets(_SRCS "${_RULE_SRCS}") + + get_target_as_relative_dir(${_NAME} _HEADER_DST_DIR) + set(_HEADER_DST_DIR "${CMAKE_CURRENT_BINARY_DIR}/include/") + get_target_property(_DESCRIPTOR_SET_FILE ${_SRCS} PROTO_DESCRIPTOR_SETS) + + get_target_property(_PROTO_FILE ${_SRCS} PROTO_FILES) + file(RELATIVE_PATH _RELATIVE_PROTO_FILE "${CMAKE_SOURCE_DIR}" "${_PROTO_FILE}") + + get_filename_component(_RELATIVE_PROTO_DIR "${_RELATIVE_PROTO_FILE}" DIRECTORY) + get_filename_component(_SRC_FILENAME "${_DESCRIPTOR_SET_FILE}" NAME) + get_cc_grpc_proto_out_files("${_SRC_FILENAME}" _GRPC_PROTO_FILES) + list(TRANSFORM _GRPC_PROTO_FILES PREPEND "${_HEADER_DST_DIR}/${_RELATIVE_PROTO_DIR}/") + + add_custom_command( + OUTPUT ${_GRPC_PROTO_FILES} + COMMAND ${CMAKE_COMMAND} -E make_directory "${_HEADER_DST_DIR}" + COMMAND "${Protobuf_PROTOC_EXECUTABLE}" + --proto_path "${CMAKE_SOURCE_DIR}" + --descriptor_set_in "${_DESCRIPTOR_SET_FILE}" + --grpc_out "${_HEADER_DST_DIR}" + --plugin "protoc-gen-grpc=${_GRPC_CPP_PLUGIN_EXECUTABLE}" + "${_RELATIVE_PROTO_FILE}" + DEPENDS "${Protobuf_PROTOC_EXECUTABLE}" "${_DESCRIPTOR_SET_FILE}" "${_PROTO_FILE}" ${_DEPS} + VERBATIM) + + cg_cc_library( + NAME ${_RULE_NAME} + SRCS ${_GRPC_PROTO_FILES} + ABS_DEPS grpc++ + INCLUDES "${CMAKE_CURRENT_BINARY_DIR}/include" + PUBLIC + ) +endfunction() + +function(get_py_grpc_proto_out_files _PROTO_FILENAME _RESULT) + set(_PROTO_FILENAME_WITHOUT_EXT "${_PROTO_FILENAME}") + string(REGEX REPLACE "\\.proto$" "" _PROTO_FILENAME_WITHOUT_EXT "${_PROTO_FILENAME}") + string(REGEX REPLACE "\\.proto\\.bin$" "" _PROTO_FILENAME_WITHOUT_EXT "${_PROTO_FILENAME_WITHOUT_EXT}") + set(${_RESULT} + "${_PROTO_FILENAME_WITHOUT_EXT}_pb2_grpc.py" + PARENT_SCOPE) +endfunction() + +function(py_grpc_library) + cmake_parse_arguments( + _RULE + "" + "NAME;SRCS" + "DEPS" + ${ARGN} + ) + + rename_bazel_targets(_DEPS "${_RULE_DEPS}") + rename_bazel_targets(_SRCS "${_RULE_SRCS}") + + get_target_property(_DESCRIPTOR_SET_FILE ${_SRCS} PROTO_DESCRIPTOR_SETS) + get_filename_component(_SRC_FILENAME "${_DESCRIPTOR_SET_FILE}" NAME) + get_py_grpc_proto_out_files("${_SRC_FILENAME}" _PY_GRPC_PROTO_FILES) + set(_PYTHON_DST_DIR "${CMAKE_CURRENT_BINARY_DIR}") + set(_ABS_PATH_PY_GRPC_PROTO_FILES ${_PY_GRPC_PROTO_FILES}) + list(TRANSFORM _ABS_PATH_PY_GRPC_PROTO_FILES PREPEND "${_PYTHON_DST_DIR}/") + + get_target_property(_PROTO_FILE ${_SRCS} PROTO_FILES) + get_filename_component(_PROTO_FILENAME "${_PROTO_FILE}" NAME) + file(RELATIVE_PATH _RELATIVE_PROTO_FILE "${CMAKE_SOURCE_DIR}" "${_PROTO_FILE}") + + add_custom_command( + OUTPUT ${_ABS_PATH_PY_GRPC_PROTO_FILES} + COMMAND ${CMAKE_COMMAND} -E make_directory "${_PYTHON_DST_DIR}" + COMMAND "${Python3_EXECUTABLE}" + -m grpc_tools.protoc + --proto_path "${CMAKE_SOURCE_DIR}" + --descriptor_set_in "${_DESCRIPTOR_SET_FILE}" + --grpc_python_out "${CMAKE_BINARY_DIR}" + "${_RELATIVE_PROTO_FILE}" + DEPENDS "${Python3_EXECUTABLE}" "${_DESCRIPTOR_SET_FILE}" "${_PROTO_FILE}" ${_DEPS} + VERBATIM) + + cg_py_library( + NAME "${_RULE_NAME}" + GENERATED_SRCS ${_PY_GRPC_PROTO_FILES} + ) +endfunction() diff --git a/build_tools/cmake/protobuf.cmake b/build_tools/cmake/protobuf.cmake new file mode 100644 index 000000000..ce4526303 --- /dev/null +++ b/build_tools/cmake/protobuf.cmake @@ -0,0 +1,154 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +include_guard(GLOBAL) +include(CMakeParseArguments) +include(cg_macros) +include(cg_py_library) + +function(proto_library) + cmake_parse_arguments( + _RULE + "PUBLIC" + "NAME;SRCS" + "DEPS" + ${ARGN} + ) + + rename_bazel_targets(_RULE_DEPS "${_RULE_DEPS}") + rename_bazel_targets(_RULE_NAME "${_RULE_NAME}") + + set(_SRC_FILE "${CMAKE_CURRENT_SOURCE_DIR}/${_RULE_SRCS}") + set(_DST_FILE "${CMAKE_CURRENT_BINARY_DIR}/${_RULE_SRCS}.bin") + get_filename_component(_DST_DIR "${_DST_FILE}" DIRECTORY) + file(RELATIVE_PATH _RELATIVE_PROTO_FILE "${CMAKE_SOURCE_DIR}" "${_SRC_FILE}") + + add_custom_command( + OUTPUT "${_DST_FILE}" + COMMAND ${CMAKE_COMMAND} -E make_directory "${_DST_DIR}" + COMMAND "${Protobuf_PROTOC_EXECUTABLE}" + --proto_path "${CMAKE_SOURCE_DIR}" + --descriptor_set_out "${_DST_FILE}" + "${_RELATIVE_PROTO_FILE}" + DEPENDS "${Protobuf_PROTOC_EXECUTABLE}" "${_SRC_FILE}" ${_RULE_DEPS} + VERBATIM) + + add_custom_target(${_RULE_NAME} ALL DEPENDS "${_DST_FILE}") + set_target_properties(${_RULE_NAME} PROPERTIES PROTO_DESCRIPTOR_SETS "${_DST_FILE}") + set_target_properties(${_RULE_NAME} PROPERTIES PROTO_FILES "${_SRC_FILE}") +endfunction() + +function(get_cc_proto_out_files _PROTO_FILENAME _RESULT) + set(_PROTO_FILENAME_WITHOUT_EXT "${_PROTO_FILENAME}") + string(REGEX REPLACE "\\.proto$" "" _PROTO_FILENAME_WITHOUT_EXT "${_PROTO_FILENAME}") + string(REGEX REPLACE "\\.proto\\.bin$" "" _PROTO_FILENAME_WITHOUT_EXT "${_PROTO_FILENAME_WITHOUT_EXT}") + set(${_RESULT} + "${_PROTO_FILENAME_WITHOUT_EXT}.pb.h" + "${_PROTO_FILENAME_WITHOUT_EXT}.pb.cc" + PARENT_SCOPE) +endfunction() + +function(cc_proto_library) + cmake_parse_arguments( + _RULE + "PUBLIC" + "NAME;DEPS" + "" + ${ARGN} + ) + + rename_bazel_targets(_DEPS "${_RULE_DEPS}") + rename_bazel_targets(_NAME "${_RULE_NAME}") + + get_target_as_relative_dir(${_NAME} _HEADER_DST_DIR) + set(_HEADER_DST_DIR "${CMAKE_CURRENT_BINARY_DIR}/include") + get_target_property(_DESCRIPTOR_SET_FILE ${_DEPS} PROTO_DESCRIPTOR_SETS) + + get_target_property(_PROTO_FILE ${_DEPS} PROTO_FILES) + get_filename_component(_PROTO_FILENAME "${_PROTO_FILE}" NAME) + file(RELATIVE_PATH _RELATIVE_PROTO_FILE "${CMAKE_SOURCE_DIR}" "${_PROTO_FILE}") + + get_filename_component(_RELATIVE_PROTO_DIR "${_RELATIVE_PROTO_FILE}" DIRECTORY) + get_filename_component(_SRC_FILENAME "${_DESCRIPTOR_SET_FILE}" NAME) + get_cc_proto_out_files("${_SRC_FILENAME}" _CC_PROTO_FILES) + list(TRANSFORM _CC_PROTO_FILES PREPEND "${_HEADER_DST_DIR}/${_RELATIVE_PROTO_DIR}/") + + add_custom_command( + OUTPUT ${_CC_PROTO_FILES} + COMMAND ${CMAKE_COMMAND} -E make_directory "${_HEADER_DST_DIR}" + COMMAND "${Protobuf_PROTOC_EXECUTABLE}" + --proto_path "${CMAKE_SOURCE_DIR}" + --descriptor_set_in "${_DESCRIPTOR_SET_FILE}" + --cpp_out "${_HEADER_DST_DIR}" + "${_RELATIVE_PROTO_FILE}" + DEPENDS + "${Protobuf_PROTOC_EXECUTABLE}" + "${_DESCRIPTOR_SET_FILE}" + "${_PROTO_FILE}" + ${_DEPS} + VERBATIM) + + cg_cc_library( + NAME ${_RULE_NAME} + SRCS ${_CC_PROTO_FILES} + ABS_DEPS protobuf::libprotobuf + INCLUDES "${CMAKE_CURRENT_BINARY_DIR}/include" + PUBLIC + ) +endfunction() + +function(get_py_proto_out_files _PROTO_FILENAME _RESULT) + set(_PROTO_FILENAME_WITHOUT_EXT "${_PROTO_FILENAME}") + string(REGEX REPLACE "\\.proto$" "" _PROTO_FILENAME_WITHOUT_EXT "${_PROTO_FILENAME}") + string(REGEX REPLACE "\\.proto\\.bin$" "" _PROTO_FILENAME_WITHOUT_EXT "${_PROTO_FILENAME_WITHOUT_EXT}") + set(${_RESULT} + "${_PROTO_FILENAME_WITHOUT_EXT}_pb2.py" + PARENT_SCOPE) +endfunction() + +function(py_proto_library) + cmake_parse_arguments( + _RULE + "PUBLIC" + "NAME;DEPS" + "" + ${ARGN} + ) + + rename_bazel_targets(_DEPS "${_RULE_DEPS}") + + get_target_property(_DESCRIPTOR_SET_FILE ${_DEPS} PROTO_DESCRIPTOR_SETS) + get_filename_component(_SRC_FILENAME "${_DESCRIPTOR_SET_FILE}" NAME) + get_py_proto_out_files("${_SRC_FILENAME}" _PY_PROTO_FILES) + set(_PYTHON_DST_DIR "${CMAKE_CURRENT_BINARY_DIR}") + set(_ABS_PATH_PY_PROTO_FILES ${_PY_PROTO_FILES}) + list(TRANSFORM _ABS_PATH_PY_PROTO_FILES PREPEND "${_PYTHON_DST_DIR}/") + + get_target_property(_PROTO_FILE ${_DEPS} PROTO_FILES) + file(RELATIVE_PATH _RELATIVE_PROTO_FILE "${CMAKE_SOURCE_DIR}" "${_PROTO_FILE}") + + add_custom_command( + OUTPUT ${_ABS_PATH_PY_PROTO_FILES} + COMMAND ${CMAKE_COMMAND} -E make_directory "${CMAKE_BINARY_DIR}" + COMMAND "${Protobuf_PROTOC_EXECUTABLE}" + --proto_path "${CMAKE_SOURCE_DIR}" + --descriptor_set_in "${_DESCRIPTOR_SET_FILE}" + --python_out "${CMAKE_BINARY_DIR}" + "${_RELATIVE_PROTO_FILE}" + DEPENDS + "${Protobuf_PROTOC_EXECUTABLE}" + "${_DESCRIPTOR_SET_FILE}" + "${_PROTO_FILE}" + ${_DEPS} + VERBATIM) + + cg_py_library( + NAME "${_RULE_NAME}" + GENERATED_SRCS ${_PY_PROTO_FILES} + ) +endfunction() diff --git a/build_tools/cmake/run_test.sh b/build_tools/cmake/run_test.sh new file mode 100755 index 000000000..686114d87 --- /dev/null +++ b/build_tools/cmake/run_test.sh @@ -0,0 +1,30 @@ +#!/bin/bash + +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +# Copyright 2020 The IREE Authors +# +# Licensed under the Apache License v2.0 with LLVM Exceptions. +# See https://llvm.org/LICENSE.txt for license information. +# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception + +# A wrapper around a test command that performs setup and teardown. This is +# appranetly not supported natively in ctest/cmake. + +set -x +set -e + +function cleanup() { + echo "Cleaning up test environment" + rm -rf ${TEST_TMPDIR?} +} + +echo "Creating test environment" +rm -rf "${TEST_TMPDIR?}" # In case this wasn't cleaned up previously +mkdir -p "${TEST_TMPDIR?}" +trap cleanup EXIT +# Execute whatever we were passed. +"$@" diff --git a/build_tools/cmake/set_command_pythonpath.cmake b/build_tools/cmake/set_command_pythonpath.cmake new file mode 100644 index 000000000..96a9c2a56 --- /dev/null +++ b/build_tools/cmake/set_command_pythonpath.cmake @@ -0,0 +1,23 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +include(CMakeParseArguments) + +function(set_command_pythonpath) + cmake_parse_arguments( + _ARG + "" + "COMMAND;RESULT" + "" + ${ARGN} + ) + + if(COMPILER_GYM_PYTHONPATH) + set(${_ARG_RESULT} "\"${CMAKE_COMMAND}\" -E env \"PYTHONPATH=${COMPILER_GYM_PYTHONPATH}\" ${_ARG_COMMAND}" PARENT_SCOPE) + else() + set(${_ARG_RESULT} ${_ARG_COMMAND} PARENT_SCOPE) + endif() + +endfunction() diff --git a/build_tools/cmake/write_cache_script.cmake b/build_tools/cmake/write_cache_script.cmake new file mode 100644 index 000000000..4d452d8c8 --- /dev/null +++ b/build_tools/cmake/write_cache_script.cmake @@ -0,0 +1,36 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +include_guard(GLOBAL) + +function(write_cache_script _DST_FILE) + file(WRITE "${_DST_FILE}" "") + set(_VARS + CMAKE_BUILD_TYPE + CMAKE_GENERATOR + CMAKE_C_COMPILER + CMAKE_CXX_COMPILER + CMAKE_CXX_STANDARD + CMAKE_CXX_FLAGS + CMAKE_C_FLAGS + CMAKE_GENERATOR_TOOLSET + CMAKE_GENERATOR_PLATFORM + CMAKE_C_COMPILER_LAUNCHER + CMAKE_CXX_COMPILER_LAUNCHER + CMAKE_MODULE_LINKER_FLAGS_INIT + CMAKE_MODULE_LINKER_FLAGS + CMAKE_STATIC_LINKER_FLAGS_INIT + CMAKE_STATIC_LINKER_FLAGS + CMAKE_SHARED_LINKER_FLAGS_INIT + CMAKE_SHARED_LINKER_FLAGS + CMAKE_EXE_LINKER_FLAGS_INIT + CMAKE_EXE_LINKER_FLAGS + ) + foreach(_VAR in ${_VARS}) + if(DEFINED ${_VAR}) + file(APPEND "${_DST_FILE}" "set(${_VAR} \"${${_VAR}}\" CACHE STRING \"\")\n") + endif() + endforeach() +endfunction() diff --git a/compiler_gym/CMakeLists.txt b/compiler_gym/CMakeLists.txt new file mode 100644 index 000000000..402c6dd1c --- /dev/null +++ b/compiler_gym/CMakeLists.txt @@ -0,0 +1,114 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +# This target trickery with compiler_gym and compiler_gym_partial +# is needed because specs.py imports the compiler_gym module, +# therefore creating a circular dependency. +# compiler_gym_partial is all the other bits of the proto package so that +# specs.py can import it. +cg_py_library( + NAME + compiler_gym + GENERATED_SRCS + "$" + DEPS + ::compiler_gym_partial + compiler_gym::envs::llvm::specs + PUBLIC +) + +cg_py_library( + NAME + compiler_gym_partial + SRCS + "__init__.py" + DEPS + ::random_replay + ::random_search + ::validate + compiler_gym::bin::bin + compiler_gym::datasets::datasets + compiler_gym::envs::envs + compiler_gym::leaderboard::leaderboard + compiler_gym::service::service + compiler_gym::spaces::spaces + compiler_gym::util::util + compiler_gym::util::flags::flags + compiler_gym::wrappers::wrappers + PUBLIC +) + +cg_py_library( + NAME + compiler_env_state + SRCS + "compiler_env_state.py" + DEPS + compiler_gym::datasets::uri + compiler_gym::util::util + PUBLIC +) + +cg_py_library( + NAME + random_replay + SRCS + "random_replay.py" + DEPS + ::random_search + compiler_gym::envs::envs + compiler_gym::util::util + PUBLIC +) + +cg_py_library( + NAME + random_search + SRCS + "random_search.py" + DATA + compiler_gym::envs::llvm::service::service + DEPS + compiler_gym::envs::envs + compiler_gym::service::connection + compiler_gym::util::util + PUBLIC +) + +cg_py_library( + NAME + validate + SRCS + "validate.py" + DEPS + ::validation_error + ::validation_result + compiler_gym::envs::compiler_env + compiler_gym::spaces::spaces + compiler_gym::util::util + PUBLIC +) + +cg_py_library( + NAME + validation_error + SRCS + "validation_error.py" + PUBLIC +) + +cg_py_library( + NAME + validation_result + SRCS + "validation_result.py" + DEPS + ::compiler_env_state + ::validation_error + compiler_gym::util::util + PUBLIC +) diff --git a/compiler_gym/bin/CMakeLists.txt b/compiler_gym/bin/CMakeLists.txt new file mode 100644 index 000000000..98c437f73 --- /dev/null +++ b/compiler_gym/bin/CMakeLists.txt @@ -0,0 +1,99 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_py_library( + NAME + bin + DEPS + ::datasets + ::manual_env + ::random_replay + ::random_search + ::service + ::validate + PUBLIC +) + +cg_py_binary( + NAME + datasets + SRCS + "datasets.py" + DEPS + ::service + compiler_gym::datasets::datasets + compiler_gym::envs::envs + compiler_gym::util::util + compiler_gym::util::flags::flags +) + +cg_py_binary( + NAME + manual_env + SRCS + "manual_env.py" + DEPS + compiler_gym::envs::envs + compiler_gym::util::util + compiler_gym::util::flags::flags +) + +cg_py_binary( + NAME + random_eval + SRCS + "random_eval.py" + DEPS + compiler_gym::random_search + compiler_gym::util::util + compiler_gym::util::flags::flags +) + +cg_py_binary( + NAME + random_search + SRCS + "random_search.py" + DEPS + compiler_gym::random_search + compiler_gym::util::flags::flags +) + +cg_py_binary( + NAME + random_replay + SRCS + "random_replay.py" + DEPS + compiler_gym::random_search + compiler_gym::util::util + compiler_gym::util::flags::flags +) + +cg_py_binary( + NAME + service + SRCS + "service.py" + DEPS + compiler_gym::datasets::datasets + compiler_gym::envs::envs + compiler_gym::spaces::spaces + compiler_gym::util::util + compiler_gym::util::flags::flags +) + +cg_py_binary( + NAME + validate + SRCS + "validate.py" + DEPS + compiler_gym::util::util + compiler_gym::util::flags::flags + compiler_gym::validate +) diff --git a/compiler_gym/datasets/CMakeLists.txt b/compiler_gym/datasets/CMakeLists.txt new file mode 100644 index 000000000..2191dc664 --- /dev/null +++ b/compiler_gym/datasets/CMakeLists.txt @@ -0,0 +1,32 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_py_library( + NAME + datasets + SRCS + "__init__.py" + "benchmark.py" + "dataset.py" + "datasets.py" + "files_dataset.py" + "tar_dataset.py" + DEPS + ::uri + compiler_gym::service::proto::proto + compiler_gym::util::util + compiler_gym::validation_result + PUBLIC +) + +cg_py_library( + NAME + uri + SRCS + "uri.py" + PUBLIC +) diff --git a/compiler_gym/envs/CMakeLists.txt b/compiler_gym/envs/CMakeLists.txt new file mode 100644 index 000000000..15a1f64ca --- /dev/null +++ b/compiler_gym/envs/CMakeLists.txt @@ -0,0 +1,36 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_py_library( + NAME + envs + SRCS + "__init__.py" + DEPS + ::compiler_env + compiler_gym::envs::gcc::gcc + compiler_gym::envs::llvm::llvm + compiler_gym::envs::loop_tool::loop_tool + PUBLIC +) + +cg_py_library( + NAME + compiler_env + SRCS + "compiler_env.py" + DEPS + compiler_gym::compiler_env_state + compiler_gym::datasets::datasets + compiler_gym::service::service + compiler_gym::service::proto::proto + compiler_gym::spaces::spaces + compiler_gym::util::util + compiler_gym::validation_result + compiler_gym::views::views + PUBLIC +) diff --git a/compiler_gym/envs/gcc/CMakeLists.txt b/compiler_gym/envs/gcc/CMakeLists.txt new file mode 100644 index 000000000..301315d1b --- /dev/null +++ b/compiler_gym/envs/gcc/CMakeLists.txt @@ -0,0 +1,25 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_py_library( + NAME + gcc + SRCS + "__init__.py" + "gcc.py" + "gcc_env.py" + "gcc_rewards.py" + DATA + compiler_gym::envs::gcc::service::service + DEPS + compiler_gym::envs::compiler_env + compiler_gym::envs::gcc::datasets::datasets + compiler_gym::service::service + compiler_gym::service::runtime::runtime + compiler_gym::util::util + PUBLIC +) diff --git a/compiler_gym/envs/gcc/datasets/CMakeLists.txt b/compiler_gym/envs/gcc/datasets/CMakeLists.txt new file mode 100644 index 000000000..4ea3d2070 --- /dev/null +++ b/compiler_gym/envs/gcc/datasets/CMakeLists.txt @@ -0,0 +1,23 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_py_library( + NAME + datasets + SRCS + "__init__.py" + "anghabench.py" + "chstone.py" + "csmith.py" + DATA + compiler_gym::third_party::csmith::all + DEPS + compiler_gym::datasets::datasets + compiler_gym::service::proto::proto + compiler_gym::util::util + PUBLIC +) diff --git a/compiler_gym/envs/gcc/service/CMakeLists.txt b/compiler_gym/envs/gcc/service/CMakeLists.txt new file mode 100644 index 000000000..b94bf8f55 --- /dev/null +++ b/compiler_gym/envs/gcc/service/CMakeLists.txt @@ -0,0 +1,13 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_filegroup( + NAME "service" + FILES + "${CMAKE_CURRENT_LIST_DIR}/gcc_service.py" + "${CMAKE_CURRENT_LIST_DIR}/compiler_gym-gcc-service" +) diff --git a/compiler_gym/envs/llvm/CMakeLists.txt b/compiler_gym/envs/llvm/CMakeLists.txt new file mode 100644 index 000000000..84bd9b75b --- /dev/null +++ b/compiler_gym/envs/llvm/CMakeLists.txt @@ -0,0 +1,101 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_py_library( + NAME + llvm + SRCS + "__init__.py" + DATA + compiler_gym::envs::llvm::service::service + DEPS + ::compute_observation + ::llvm_benchmark + ::llvm_env + compiler_gym::util::util + PUBLIC +) + +cg_py_library( + NAME compute_observation + SRCS compute_observation.py + DATA compiler_gym::envs::llvm::service::compute_observation-files + DEPS compiler_gym::util::util +) + +cg_py_library( + NAME + llvm_benchmark + SRCS + "llvm_benchmark.py" + DEPS + compiler_gym::datasets::datasets + compiler_gym::service::proto::proto + compiler_gym::third_party::llvm::llvm + compiler_gym::util::util + PUBLIC +) + +cg_py_library( + NAME + llvm_env + SRCS + "llvm_env.py" + DEPS + ::llvm_benchmark + ::llvm_rewards + compiler_gym::datasets::datasets + compiler_gym::envs::compiler_env + compiler_gym::envs::llvm::datasets::datasets + compiler_gym::spaces::spaces + compiler_gym::third_party::autophase::autophase + compiler_gym::third_party::inst2vec::inst2vec + compiler_gym::third_party::llvm::llvm + compiler_gym::third_party::llvm::instcount + PUBLIC +) + +cg_py_library( + NAME + llvm_rewards + SRCS + "llvm_rewards.py" + DEPS + compiler_gym::service::service + compiler_gym::spaces::spaces + compiler_gym::util::util + compiler_gym::views::views + PUBLIC +) + +string(CONCAT _CMD + "\"${Python3_EXECUTABLE}\" " + "\"$\" " + "\"$\" " + "\"$@\"") +set_command_pythonpath(COMMAND "${_CMD}" RESULT _CMD) +cg_genrule( + NAME specs + OUTS "specs.py" + COMMAND "${_CMD}" + DEPENDS + ::make_specs + compiler_gym::compiler_gym_partial + compiler_gym::envs::llvm::service::service + compiler_gym::envs::llvm::service::compiler_gym-llvm-service +) + +cg_py_binary( + NAME + make_specs + SRCS + "make_specs.py" + DATA "${CMAKE_CURRENT_BINARY_DIR}/service/passes/flag_descriptions.txt" + DEPS + ::llvm_env + compiler_gym::util::util +) diff --git a/compiler_gym/envs/llvm/__init__.py b/compiler_gym/envs/llvm/__init__.py index 0f7475ffd..ebbeaff00 100644 --- a/compiler_gym/envs/llvm/__init__.py +++ b/compiler_gym/envs/llvm/__init__.py @@ -3,6 +3,7 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. """Register the LLVM environments.""" +import sys from itertools import product from compiler_gym.envs.llvm.compute_observation import compute_observation @@ -12,7 +13,11 @@ make_benchmark, ) from compiler_gym.envs.llvm.llvm_env import LlvmEnv -from compiler_gym.envs.llvm.specs import observation_spaces, reward_spaces + +# TODO(github.com/facebookresearch/CompilerGym/issues/506): Tidy up. +if "compiler_gym.envs.llvm.is_making_specs" not in sys.modules: + from compiler_gym.envs.llvm.specs import observation_spaces, reward_spaces + from compiler_gym.util.registration import register from compiler_gym.util.runfiles_path import runfiles_path diff --git a/compiler_gym/envs/llvm/datasets/CMakeLists.txt b/compiler_gym/envs/llvm/datasets/CMakeLists.txt new file mode 100644 index 000000000..c20fe166d --- /dev/null +++ b/compiler_gym/envs/llvm/datasets/CMakeLists.txt @@ -0,0 +1,29 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_py_library( + NAME + datasets + SRCS + "__init__.py" + "anghabench.py" + "cbench.py" + "chstone.py" + "clgen.py" + "csmith.py" + "llvm_stress.py" + "poj104.py" + DATA + compiler_gym::third_party::csmith::all + DEPS + compiler_gym::datasets::datasets + compiler_gym::envs::llvm::llvm_benchmark + compiler_gym::service::proto::proto + compiler_gym::third_party::llvm::llvm + compiler_gym::util::util + PUBLIC +) diff --git a/compiler_gym/envs/llvm/make_specs.py b/compiler_gym/envs/llvm/make_specs.py index c44ce0d3d..76f53dcf9 100644 --- a/compiler_gym/envs/llvm/make_specs.py +++ b/compiler_gym/envs/llvm/make_specs.py @@ -11,10 +11,18 @@ # TODO: As we add support for more compilers we could generalize this script # to work with other compiler services rather than hardcoding to LLVM. import sys +import types from pathlib import Path -from compiler_gym.envs.llvm.llvm_env import LlvmEnv -from compiler_gym.util.runfiles_path import runfiles_path +# TODO(github.com/facebookresearch/CompilerGym/issues/506): Avoids circular +# dependency during specs.py generation, because it is imported from +# compiler_gym.envs.llvm before being generated. +sys.modules["compiler_gym.envs.llvm.is_making_specs"] = types.ModuleType( + "compiler_gym.envs.llvm.is_making_specs" +) + +from compiler_gym.envs.llvm.llvm_env import LlvmEnv # noqa: E402 +from compiler_gym.util.runfiles_path import runfiles_path # noqa: E402 with open( runfiles_path("compiler_gym/envs/llvm/service/passes/flag_descriptions.txt") diff --git a/compiler_gym/envs/llvm/service/CMakeLists.txt b/compiler_gym/envs/llvm/service/CMakeLists.txt new file mode 100644 index 000000000..3fdd67f9d --- /dev/null +++ b/compiler_gym/envs/llvm/service/CMakeLists.txt @@ -0,0 +1,274 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +set(_DEPS "compiler_gym-llvm-service") +if(DARWIN) + list(APPEND _DEPS ::libLLVMPolly) + #TODO(boian): figure out what is this target. + #list(APPEND _DEPS "@llvm//:darwin") +endif() +cg_filegroup( + NAME "service" + DEPENDS ${_DEPS} +) + +cg_genrule( + NAME libLLVMPolly + OUTS "libLLVMPolly.so" + COMMAND + "cp $ $@" + ABS_DEPENDS + LLVMPolly +) + +cg_cc_binary( + NAME + compiler_gym-llvm-service + SRCS + "RunService.cc" + DEPS + ::LlvmSession + compiler_gym::service::runtime::cc_runtime +) + +cg_cc_library( + NAME + ActionSpace + HDRS + "ActionSpace.h" + "$/ActionEnum.h" + SRCS + "ActionSpace.cc" + DEPS + compiler_gym::service::proto::compiler_gym_service_cc + compiler_gym::util::EnumUtil + compiler_gym::util::Unreachable + ABS_DEPS + fmt + magic_enum + NON_LIB_DEPS + compiler_gym::envs::llvm::service::passes::actions_genfiles + PUBLIC +) + +llvm_map_components_to_libnames(_LLVM_LIBS core support bitwriter) +cg_cc_library( + NAME + Benchmark + HDRS + "Benchmark.h" + SRCS + "Benchmark.cc" + DEPS + ::Cost + compiler_gym::service::proto::compiler_gym_service_cc + compiler_gym::util::GrpcStatusMacros + compiler_gym::util::RunfilesPath + compiler_gym::util::Subprocess + ABS_DEPS + Boost::filesystem + grpc++ + fmt + glog::glog + ${_LLVM_LIBS} + INCLUDES + ${LLVM_INCLUDE_DIRS} + DEFINES + ${LLVM_DEFINITIONS} + PUBLIC +) + +llvm_map_components_to_libnames(_LLVM_LIBS core) +cg_cc_library( + NAME + BenchmarkFactory + HDRS + "BenchmarkFactory.h" + SRCS + "BenchmarkFactory.cc" + DEPS + ::Benchmark + ::Cost + compiler_gym::service::proto::compiler_gym_service_cc + compiler_gym::util::GrpcStatusMacros + compiler_gym::util::RunfilesPath + compiler_gym::util::StrLenConstexpr + ABS_DEPS + Boost::filesystem + grpc++ + fmt + glog::glog + ${_LLVM_LIBS} + INCLUDES + ${LLVM_INCLUDE_DIRS} + DEFINES + ${LLVM_DEFINITIONS} + PUBLIC +) + +llvm_map_components_to_libnames(_LLVM_LIBS core support irreader) +cg_cc_binary( + NAME compute_observation + SRCS ComputeObservation.cc + COPTS + "-DGOOGLE_PROTOBUF_NO_RTTI" + "-fno-rtti" + DEPS + ::BenchmarkFactory + ::Observation + ::ObservationSpaces + compiler_gym::service::proto::compiler_gym_service_cc + ABS_DEPS + Boost::filesystem + glog::glog + ${_LLVM_LIBS} + magic_enum + INCLUDES + ${LLVM_INCLUDE_DIRS} + DEFINES + ${LLVM_DEFINITIONS} +) + +set(_FILES "${CMAKE_CURRENT_BINARY_DIR}/compute_observation") +if(DARWIN) + message(FATAL_ERROR "TODO(boian): implement") +endif() +cg_filegroup( + NAME compute_observation-files + FILES ${_FILES} +) + +llvm_map_components_to_libnames(_LLVM_LIBS core transformutils ipo) +cg_cc_library( + NAME + Cost + HDRS + "Cost.h" + SRCS + "Cost.cc" + DEPS + compiler_gym::util::GrpcStatusMacros + compiler_gym::util::RunfilesPath + compiler_gym::util::Subprocess + compiler_gym::util::Unreachable + ABS_DEPS + Boost::filesystem + Boost::headers + grpc++ + fmt + glog::glog + ${_LLVM_LIBS} + magic_enum + INCLUDES + ${LLVM_INCLUDE_DIRS} + DEFINES + ${LLVM_DEFINITIONS} + PUBLIC +) + +llvm_map_components_to_libnames(_LLVM_LIBS + core analysis coroutines objcarcopts target codegen + x86codegen x86asmparser #TODO(boian): can these be found programmatically + ) +cg_cc_library( + NAME + LlvmSession + COPTS + "-DGOOGLE_PROTOBUF_NO_RTTI" + "-fno-rtti" + HDRS + "$/ActionHeaders.h" + "$/ActionSwitch.h" + "LlvmSession.h" + SRCS + "LlvmSession.cc" + DEPS + ::ActionSpace + ::Benchmark + ::BenchmarkFactory + ::Cost + ::Observation + ::ObservationSpaces + compiler_gym::service::CompilationSession + compiler_gym::service::proto::compiler_gym_service_cc_grpc + compiler_gym::third_party::autophase::InstCount + compiler_gym::util::EnumUtil + compiler_gym::util::GrpcStatusMacros + compiler_gym::util::RunfilesPath + ABS_DEPS + Boost::filesystem + Boost::headers + fmt + glog::glog + ${_LLVM_LIBS} + magic_enum + nlohmann_json::nlohmann_json + ProGraML::graph::format::node_link_graph + ProGraML::ir::llvm::llvm-10 + ProGraML::proto::programl_cc + Subprocess::libsubprocess + CpuInfo::cpuinfo + Clog::libclog + INCLUDES + ${LLVM_INCLUDE_DIRS} + "$" + DEFINES + ${LLVM_DEFINITIONS} + PUBLIC +) + +llvm_map_components_to_libnames(_LLVM_LIBS + core support bitwriter + ) +cg_cc_library( + NAME Observation + SRCS Observation.cc + HDRS Observation.h + DEPS + ::Benchmark + ::Cost + ::ObservationSpaces + compiler_gym::service::proto::compiler_gym_service_cc_grpc + compiler_gym::third_party::autophase::InstCount + compiler_gym::util::GrpcStatusMacros + ABS_DEPS + CpuInfo::cpuinfo + Boost::filesystem + glog::glog + ${_LLVM_LIBS} + magic_enum + nlohmann_json::nlohmann_json + ProGraML::graph::format::node_link_graph + ProGraML::ir::llvm::llvm-10 + ProGraML::proto::programl_cc + Clog::libclog + INCLUDES + ${LLVM_INCLUDE_DIRS} + DEFINES + ${LLVM_DEFINITIONS} +) + +cg_cc_library( + NAME + ObservationSpaces + HDRS + "ObservationSpaces.h" + SRCS + "ObservationSpaces.cc" + DEPS + ::Benchmark + compiler_gym::service::proto::compiler_gym_service_cc + compiler_gym::third_party::llvm::InstCount + compiler_gym::util::EnumUtil + ABS_DEPS + glog::glog + magic_enum + nlohmann_json::nlohmann_json + ProGraML::graph::format::node_link_graph + ProGraML::proto::programl_cc + PUBLIC +) diff --git a/compiler_gym/envs/llvm/service/passes/CMakeLists.txt b/compiler_gym/envs/llvm/service/passes/CMakeLists.txt new file mode 100644 index 000000000..fde84228c --- /dev/null +++ b/compiler_gym/envs/llvm/service/passes/CMakeLists.txt @@ -0,0 +1,102 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +string(CONCAT _CMD + "\"${Python3_EXECUTABLE}\" " + "\"$\"" + " \"${LLVM_SRC_DIR}/llvm\" > $@") +set_command_pythonpath(COMMAND "${_CMD}" RESULT _CMD) +cg_genrule( + NAME passes_list + OUTS "passes_list.csv" + COMMAND ${_CMD} + DEPENDS + ::extract_passes_from_llvm_source_tree +) + +string(CONCAT _CMD + "\"${Python3_EXECUTABLE}\" " + "\"$\" < " + " \"$\" > $@") +set_command_pythonpath(COMMAND "${_CMD}" RESULT _CMD) +cg_genrule( + NAME actions_csv + OUTS "actions.csv" + COMMAND ${_CMD} + DEPENDS + ::passes_list + ::filter_action_space +) + +cg_py_library( + NAME + common + SRCS + "common.py" + PUBLIC +) + +cg_py_library( + NAME + config + SRCS + "config.py" + DEPS + ::common + PUBLIC +) + +cg_py_binary( + NAME + extract_passes_from_llvm_source_tree + SRCS + "extract_passes_from_llvm_source_tree.py" + DEPS + ::common + ::config +) + +cg_py_binary( + NAME + make_action_space_genfiles + SRCS + "make_action_space_genfiles.py" + DEPS + ::common + ::config +) + +cg_py_binary( + NAME + filter_action_space + SRCS + "filter_action_space.py" + DEPS + ::common + ::config +) + +string(CONCAT _CMD + "\"${Python3_EXECUTABLE}\" " + "\"$\"" + " $(@D) < \"$\"" +) +set_command_pythonpath(COMMAND "${_CMD}" RESULT _CMD) +cg_genrule( + NAME actions_genfiles + OUTS + "ActionEnum.h" + "ActionSwitch.h" + "ActionHeaders.h" + "flags.txt" + "flag_descriptions.txt" + COMMAND + ${_CMD} + DEPENDS + ::actions_csv + ::make_action_space_genfiles +) diff --git a/compiler_gym/envs/loop_tool/CMakeLists.txt b/compiler_gym/envs/loop_tool/CMakeLists.txt new file mode 100644 index 000000000..1b707400f --- /dev/null +++ b/compiler_gym/envs/loop_tool/CMakeLists.txt @@ -0,0 +1,20 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_py_library( + NAME loop_tool + SRCS + "__init__.py" + "loop_tool_env.py" + DATA compiler_gym::envs::loop_tool::service::service + DEPS + compiler_gym::envs::compiler_env + compiler_gym::service::service + compiler_gym::service::proto::proto + compiler_gym::service::runtime::runtime + PUBLIC +) diff --git a/compiler_gym/envs/loop_tool/service/CMakeLists.txt b/compiler_gym/envs/loop_tool/service/CMakeLists.txt new file mode 100644 index 000000000..aba317c95 --- /dev/null +++ b/compiler_gym/envs/loop_tool/service/CMakeLists.txt @@ -0,0 +1,13 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_filegroup( + NAME service + FILES + "${CMAKE_CURRENT_LIST_DIR}/compiler_gym-loop_tool-service" + "${CMAKE_CURRENT_LIST_DIR}/loop_tool_compilation_session.py" +) diff --git a/compiler_gym/leaderboard/CMakeLists.txt b/compiler_gym/leaderboard/CMakeLists.txt new file mode 100644 index 000000000..9c95813e3 --- /dev/null +++ b/compiler_gym/leaderboard/CMakeLists.txt @@ -0,0 +1,29 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_py_library( + NAME + leaderboard + SRCS + "__init__.py" + DEPS + ::llvm_instcount + PUBLIC +) + +cg_py_library( + NAME + llvm_instcount + SRCS + "llvm_instcount.py" + DEPS + compiler_gym::bin::validate + compiler_gym::compiler_env_state + compiler_gym::envs::envs + compiler_gym::util::util + PUBLIC +) diff --git a/compiler_gym/requirements.txt b/compiler_gym/requirements.txt index 721c9590d..04dfeec4f 100644 --- a/compiler_gym/requirements.txt +++ b/compiler_gym/requirements.txt @@ -3,6 +3,7 @@ deprecated>=1.2.12 docker>=4.0.0 fasteners>=0.15 grpcio>=1.32.0 +grpcio_tools>=1.32.0 gym>=0.18.0,<0.21 humanize>=2.6.0 loop_tool_py==0.0.7 diff --git a/compiler_gym/service/CMakeLists.txt b/compiler_gym/service/CMakeLists.txt new file mode 100644 index 000000000..9ad17ae9c --- /dev/null +++ b/compiler_gym/service/CMakeLists.txt @@ -0,0 +1,54 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_py_library( + NAME + service + SRCS + "__init__.py" + DEPS + ::compilation_session + ::connection + compiler_gym::service::proto::proto + PUBLIC +) + +cg_py_library( + NAME + compilation_session + SRCS + "compilation_session.py" + DEPS + compiler_gym::service::proto::proto + PUBLIC +) + +cg_cc_library( + NAME + CompilationSession + HDRS + "CompilationSession.h" + SRCS + "CompilationSession.cc" + DEPS + compiler_gym::service::proto::compiler_gym_service_cc + ABS_DEPS + Boost::filesystem + grpc++ + PUBLIC +) + +cg_py_library( + NAME + connection + SRCS + "connection.py" + DEPS + compiler_gym::service::proto::proto + compiler_gym::util::util + PUBLIC +) diff --git a/compiler_gym/service/proto/CMakeLists.txt b/compiler_gym/service/proto/CMakeLists.txt new file mode 100644 index 000000000..015d87b70 --- /dev/null +++ b/compiler_gym/service/proto/CMakeLists.txt @@ -0,0 +1,60 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_py_library( + NAME "proto" + SRCS + "__init__.py" + "py_converters.py" + PUBLIC + DEPS + "::compiler_gym_service_py" + "::compiler_gym_service_py_grpc" + compiler_gym::spaces::commandline + compiler_gym::spaces::dict + compiler_gym::spaces::discrete + compiler_gym::spaces::named_discrete + compiler_gym::spaces::scalar + compiler_gym::spaces::tuple +) + +proto_library( + NAME + compiler_gym_service + SRCS + compiler_gym_service.proto + PUBLIC +) + +py_proto_library( + NAME + compiler_gym_service_py + DEPS + ::compiler_gym_service +) + +cc_proto_library( + NAME + compiler_gym_service_cc + DEPS + ::compiler_gym_service + PUBLIC +) + +cc_grpc_library( + NAME compiler_gym_service_cc_grpc + SRCS ::compiler_gym_service + GRPC_ONLY + PUBLIC + DEPS ::compiler_gym_service_cc +) + +py_grpc_library( + NAME "compiler_gym_service_py_grpc" + SRCS "::compiler_gym_service" + DEPS "::compiler_gym_service_py" +) diff --git a/compiler_gym/service/runtime/CMakeLists.txt b/compiler_gym/service/runtime/CMakeLists.txt new file mode 100644 index 000000000..4cf6344e3 --- /dev/null +++ b/compiler_gym/service/runtime/CMakeLists.txt @@ -0,0 +1,127 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_py_library( + NAME + runtime + SRCS + "__init__.py" + DEPS + ::create_and_run_compiler_gym_service + PUBLIC +) + +cg_cc_library( + NAME + cc_runtime + HDRS + "Runtime.h" + DEPS + ::CreateAndRunCompilerGymServiceImpl + PUBLIC +) + +cg_py_library( + NAME + benchmark_cache + SRCS + "benchmark_cache.py" + DEPS + compiler_gym::service::proto::proto + PUBLIC +) + +cg_cc_library( + NAME + BenchmarkCache + HDRS + "BenchmarkCache.h" + SRCS + "BenchmarkCache.cc" + DEPS + compiler_gym::service::proto::compiler_gym_service_cc + ABS_DEPS + Boost::filesystem + grpc++ + glog::glog + PUBLIC +) + +cg_py_library( + NAME + compiler_gym_service + SRCS + "compiler_gym_service.py" + DEPS + ::benchmark_cache + compiler_gym::service::compilation_session + compiler_gym::service::proto::proto + compiler_gym::util::util + PUBLIC +) + +cg_cc_library( + NAME + CompilerGymService + HDRS + "CompilerGymService.h" + "CompilerGymServiceImpl.h" + DEPS + ::BenchmarkCache + ::CompilerGymServiceImpl + compiler_gym::service::CompilationSession + compiler_gym::service::proto::compiler_gym_service_cc + compiler_gym::service::proto::compiler_gym_service_cc_grpc + ABS_DEPS + Boost::filesystem + grpc++ + PUBLIC +) + +cg_cc_library( + NAME + CompilerGymServiceImpl + HDRS + "CompilerGymServiceImpl.h" + DEPS + compiler_gym::util::GrpcStatusMacros + compiler_gym::util::Version + ABS_DEPS + fmt + glog::glog + PUBLIC +) + +cg_py_library( + NAME + create_and_run_compiler_gym_service + SRCS + "create_and_run_compiler_gym_service.py" + DEPS + ::compiler_gym_service + compiler_gym::service::proto::proto + compiler_gym::util::util + PUBLIC +) + +cg_cc_library( + NAME + CreateAndRunCompilerGymServiceImpl + HDRS + "CreateAndRunCompilerGymServiceImpl.h" + SRCS + "CreateAndRunCompilerGymServiceImpl.cc" + DEPS + ::CompilerGymService + compiler_gym::util::GrpcStatusMacros + ABS_DEPS + Boost::filesystem + grpc++ + gflags + glog::glog + PUBLIC +) diff --git a/compiler_gym/spaces/CMakeLists.txt b/compiler_gym/spaces/CMakeLists.txt new file mode 100644 index 000000000..f209fbc96 --- /dev/null +++ b/compiler_gym/spaces/CMakeLists.txt @@ -0,0 +1,94 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_py_library( + NAME + spaces + SRCS + "__init__.py" + DEPS + ::box + ::commandline + ::dict + ::discrete + ::named_discrete + ::reward + ::scalar + ::sequence + ::tuple + PUBLIC +) + +cg_py_library( + NAME box + SRCS box.py +) + +cg_py_library( + NAME + commandline + SRCS + "commandline.py" + DEPS + ::named_discrete + PUBLIC +) + +cg_py_library( + NAME dict + SRCS dict.py +) + +cg_py_library( + NAME discrete + SRCS discrete.py +) + +cg_py_library( + NAME + named_discrete + SRCS + "named_discrete.py" + DEPS + ::discrete + PUBLIC +) + +cg_py_library( + NAME + reward + SRCS + "reward.py" + DEPS + ::scalar + compiler_gym::service::service + compiler_gym::util::util + PUBLIC +) + +cg_py_library( + NAME + scalar + SRCS + "scalar.py" + PUBLIC +) + +cg_py_library( + NAME + sequence + SRCS + "sequence.py" + DEPS + ::scalar + PUBLIC +) + +cg_py_library( + NAME tuple + SRCS "tuple.py" +) diff --git a/compiler_gym/third_party/CMakeLists.txt b/compiler_gym/third_party/CMakeLists.txt new file mode 100644 index 000000000..8c0906a57 --- /dev/null +++ b/compiler_gym/third_party/CMakeLists.txt @@ -0,0 +1,6 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() diff --git a/compiler_gym/third_party/autophase/CMakeLists.txt b/compiler_gym/third_party/autophase/CMakeLists.txt new file mode 100644 index 000000000..61ac60c25 --- /dev/null +++ b/compiler_gym/third_party/autophase/CMakeLists.txt @@ -0,0 +1,63 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_py_library( + NAME + autophase + SRCS + "__init__.py" + PUBLIC +) + +llvm_map_components_to_libnames(_LLVM_LIBS analysis core support) +cg_cc_library( + NAME + InstCount + COPTS + "-DGOOGLE_PROTOBUF_NO_RTTI" + "-fno-rtti" + HDRS + "InstCount.h" + SRCS + "InstCount.cc" + ABS_DEPS + ${_LLVM_LIBS} + INCLUDES + ${LLVM_INCLUDE_DIRS} + DEFINES + ${LLVM_DEFINITIONS} + PUBLIC +) + +llvm_map_components_to_libnames(_LLVM_LIBS analysis core irreader support) +cg_cc_binary( + NAME + compute_autophase + SRCS + "compute_autophase.cc" + COPTS + "-DGOOGLE_PROTOBUF_NO_RTTI" + "-fno-rtti" + DEPS + ::InstCount + ABS_DEPS + glog::glog + ${_LLVM_LIBS} + INCLUDES + ${LLVM_INCLUDE_DIRS} + DEFINES + ${LLVM_DEFINITIONS} +) + +cg_genrule( + NAME libLLVMPolly + OUTS "libLLVMPolly.so" + COMMAND + "cp $ $@" + ABS_DEPENDS + LLVMPolly +) diff --git a/compiler_gym/third_party/cbench/CMakeLists.txt b/compiler_gym/third_party/cbench/CMakeLists.txt new file mode 100644 index 000000000..f87a87412 --- /dev/null +++ b/compiler_gym/third_party/cbench/CMakeLists.txt @@ -0,0 +1,237 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_genrule( + NAME cbench_tar + DEPENDS ::cbench + OUTS "llvm_bitcodes-10.0.0-cbench-v1.tar.bz2" + CMD "tar cjfh \"$@\" -C \"$(@D)\" cbench-v1" + PUBLIC +) + +cg_filegroup( + NAME cbench + DEPENDS + ::adpcm + ::bitcount + ::blowfish + ::bzip2 + ::crc32 + ::dijkstra + ::ghostscript + ::gsm + ::ispell + ::jpeg-c + ::jpeg-d + ::lame + ::patricia + ::qsort + ::rijndael + ::sha + ::stringsearch + ::stringsearch2 + ::susan + ::tiff2bw + ::tiff2rgba + ::tiffdither + ::tiffmedian + PUBLIC +) + +set(cBench_RUNTIME_DATA_SRC_FILES + ${cBench_consumer_tiff_data_FILE} + ${cBench_office_data_FILE} + ${cBench_telecom_data_FILE} + ${cBench_consumer_jpeg_data_FILE} + ${cBench_telecom_gsm_data_FILE} + ${cBench_consumer_data_FILE} + ${cBench_bzip2_data_FILE} + ${cBench_network_patricia_data_FILE} + ${cBench_network_dijkstra_data_FILE} + ${cBench_automotive_susan_data_FILE} + ${cBench_automotive_qsort_data_FILE}) +set(_CMD "cp -R \"${CMAKE_CURRENT_SOURCE_DIR}/runtime_data\" \"$@\"") +foreach(FILE_ IN LISTS cBench_RUNTIME_DATA_SRC_FILES) + string(CONCAT _CMD "${_CMD}" " && tar xzf \"${FILE_}\" -C \"$(@D)/runtime_data\"") +endforeach() +file(GLOB_RECURSE LOCAL_RUNTIME_DATA_FILES + LIST_DIRECTORIES true "${CMAKE_CURRENT_SOURCE_DIR}/runtime_data") +cg_genrule( + NAME make_runtime_data + SRCS + ${cBench_RUNTIME_DATA_SRC_FILES} + ${LOCAL_RUNTIME_DATA_FILES} + OUTS "runtime_data" + COMMAND "${_CMD}" +) + +cg_filegroup( + NAME benchmarks_list + FILES "${CMAKE_CURRENT_SOURCE_DIR}/benchmarks.txt" + PUBLIC +) + +cg_py_binary( + NAME make_llvm_module + SRCS "make_llvm_module.py" + DEPS compiler_gym::envs::llvm::llvm_benchmark +) + +set(CBENCH_MODULES + crc32 + jpeg-c + jpeg-d + stringsearch2 + adpcm + bitcount + blowfish + bzip2 + dijkstra + patricia + qsort + rijndael + sha + stringsearch + susan + tiff2bw + tiff2rgba + tiffdither + tiffmedian) +set(CBENCH_MODULE_DIRS + "${cBench_SRC_DIR}/telecom_CRC32" + "${ctuning_ai_SRC_DIR}/program/cbench-consumer-jpeg-c" + "${ctuning_ai_SRC_DIR}/program/cbench-consumer-jpeg-d" + "${ctuning_ai_SRC_DIR}/program/cbench-office-stringsearch2" + "${cBench_SRC_DIR}/telecom_adpcm_c" + "${cBench_SRC_DIR}/automotive_bitcount" + "${cBench_SRC_DIR}/security_blowfish_d" + "${cBench_SRC_DIR}/bzip2d" + "${cBench_SRC_DIR}/network_dijkstra" + "${cBench_SRC_DIR}/network_patricia" + "${cBench_SRC_DIR}/automotive_qsort1" + "${cBench_SRC_DIR}/security_rijndael_d" + "${cBench_SRC_DIR}/security_sha" + "${cBench_SRC_DIR}/office_stringsearch1" + "${cBench_SRC_DIR}/automotive_susan_c" + "${cBench_SRC_DIR}/consumer_tiff2bw" + "${cBench_SRC_DIR}/consumer_tiff2rgba" + "${cBench_SRC_DIR}/consumer_tiffdither" + "${cBench_SRC_DIR}/consumer_tiffmedian") +foreach(MODULE_ DIR_ IN ZIP_LISTS CBENCH_MODULES CBENCH_MODULE_DIRS) + file(GLOB_RECURSE MODULE_FILES LIST_DIRECTORIES true "${DIR_}/*") + string(CONCAT _CMD_PY + "\"${Python3_EXECUTABLE}\" " + "\"${CMAKE_CURRENT_BINARY_DIR}/make_llvm_module.py\" \"${DIR_}\" \"$@\"") + set_command_pythonpath(COMMAND "${_CMD_PY}" RESULT _CMD_PY) + string(CONCAT _CMD + "mkdir -p \"$(@D)\" && " + "${_CMD_PY}") + cg_genrule( + NAME ${MODULE_} + SRCS + ${MODULE_FILES} + OUTS "cbench-v1/${MODULE_}.bc" + COMMAND "${_CMD}" + DEPENDS + ::make_llvm_module + PUBLIC + ) +endforeach() + +file(GLOB_RECURSE office_ghostscript_FILES LIST_DIRECTORIES true + "${cBench_SRC_DIR}/office_ghostscript/*") +string(CONCAT _CMD + "mkdir -p \"$(@D)\" && " + "rsync -rL \"${cBench_SRC_DIR}/office_ghostscript/\" \"$(@D)/office_ghostscript_src/\" && " + "patch --quiet --forward \"$(@D)/office_ghostscript_src/src/idebug.c\" < \"${CMAKE_CURRENT_SOURCE_DIR}/cBench-ghostscript-idebug.c.patch\" && " + "patch --quiet --forward \"$(@D)/office_ghostscript_src/src/std.h\" < \"${CMAKE_CURRENT_SOURCE_DIR}/cBench-ghostscript-std.h.patch\" && ") +string(CONCAT _CMD_PY + "\"${Python3_EXECUTABLE}\" " + "\"${CMAKE_CURRENT_BINARY_DIR}/make_llvm_module.py\" \"$(@D)/office_ghostscript_src\" \"$@\"") +set_command_pythonpath(COMMAND "${_CMD_PY}" RESULT _CMD_PY) +string(CONCAT _CMD "${_CMD}" "${_CMD_PY}") +cg_genrule( + NAME ghostscript + SRCS + ${office_ghostscript_FILES} + "cBench-ghostscript-std.h.patch" + "cBench-ghostscript-idebug.c.patch" + OUTS "cbench-v1/ghostscript.bc" + COMMAND "${_CMD}" + DEPENDS + ::make_llvm_module + PUBLIC +) + +file(GLOB_RECURSE telecom_gsm_FILES LIST_DIRECTORIES true + "${cBench_SRC_DIR}/telecom_gsm/*") +string(CONCAT _CMD + "mkdir -p \"$(@D)\" && " + "rsync -rL \"${cBench_SRC_DIR}/telecom_gsm/\" \"$(@D)/telecom_gsm_src/\" && " + "patch --quiet --forward \"$(@D)/telecom_gsm_src/src/add.c\"" + " < \"${CMAKE_CURRENT_SOURCE_DIR}/cBench-gsm-add.c.patch\" && ") +string(CONCAT _CMD_PY + "\"${Python3_EXECUTABLE}\" " + "\"${CMAKE_CURRENT_BINARY_DIR}/make_llvm_module.py\" \"$(@D)/telecom_gsm_src\" \"$@\" " + "-DSASR -DSTUPID_COMPILER -DNeedFunctionPrototypes=1") +set_command_pythonpath(COMMAND "${_CMD_PY}" RESULT _CMD_PY) +string(CONCAT _CMD "${_CMD}" "${_CMD_PY}") +cg_genrule( + NAME gsm + SRCS + ${telecom_gsm_FILES} + "cBench-gsm-add.c.patch" + OUTS "cbench-v1/gsm.bc" + COMMAND "${_CMD}" + DEPENDS + ::make_llvm_module + PUBLIC +) + +file(GLOB_RECURSE office_ispell_FILES LIST_DIRECTORIES true + "${cBench_SRC_DIR}/office_ispell/*") +string(CONCAT _CMD + "mkdir -p \"$(@D)\" && " + "rsync -rL \"${cBench_SRC_DIR}/office_ispell/\" \"$(@D)/office_ispell_src/\" && " + "patch --quiet --forward \"$(@D)/office_ispell_src/src/correct.c\" < \"${CMAKE_CURRENT_SOURCE_DIR}/cBench-ispell-correct.c.patch\" && ") +string(CONCAT _CMD_PY + "\"${Python3_EXECUTABLE}\" " + "\"${CMAKE_CURRENT_BINARY_DIR}/make_llvm_module.py\" \"$(@D)/office_ispell_src\" \"$@\"") +set_command_pythonpath(COMMAND "${_CMD_PY}" RESULT _CMD_PY) +string(CONCAT _CMD "${_CMD}" "${_CMD_PY}") +cg_genrule( + NAME ispell + SRCS + ${office_ispell_FILES} + "cBench-ispell-correct.c.patch" + OUTS "cbench-v1/ispell.bc" + COMMAND "${_CMD}" + DEPENDS + ::make_llvm_module + PUBLIC +) + +file(GLOB_RECURSE consumer_lame_FILES LIST_DIRECTORIES true + "${cBench_SRC_DIR}/consumer_lame/*") +string(CONCAT _CMD + "mkdir -p \"$(@D)\" && ") +string(CONCAT _CMD_PY + "\"${Python3_EXECUTABLE}\" " + "\"${CMAKE_CURRENT_BINARY_DIR}/make_llvm_module.py\" " + "\"${cBench_SRC_DIR}/consumer_lame\" \"$@\" -DLAMESNDFILE -DHAVEMPGLIB -DLAMEPARSE") +set_command_pythonpath(COMMAND "${_CMD_PY}" RESULT _CMD_PY) +string(CONCAT _CMD "${_CMD}" "${_CMD_PY}") +cg_genrule( + NAME lame + SRCS + ${consumer_lame_FILES} + OUTS "cbench-v1/lame.bc" + COMMAND "${_CMD}" + DEPENDS + ::make_llvm_module + PUBLIC +) diff --git a/compiler_gym/third_party/csmith/CMakeLists.txt b/compiler_gym/third_party/csmith/CMakeLists.txt new file mode 100644 index 000000000..ec5273001 --- /dev/null +++ b/compiler_gym/third_party/csmith/CMakeLists.txt @@ -0,0 +1,8 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_filegroup(NAME all FILES "${Csmith_ROOT_DIR}") diff --git a/compiler_gym/third_party/inst2vec/CMakeLists.txt b/compiler_gym/third_party/inst2vec/CMakeLists.txt new file mode 100644 index 000000000..b1f1659d3 --- /dev/null +++ b/compiler_gym/third_party/inst2vec/CMakeLists.txt @@ -0,0 +1,52 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_genrule( + NAME dictionary + OUTS "dictionary.pickle" + SRCS "dictionary.tar.bz2" + COMMAND "tar xjf ${CMAKE_CURRENT_SOURCE_DIR}/dictionary.tar.bz2 -C $(@D)" +) + +cg_genrule( + NAME embeddings + OUTS "embeddings.pickle" + SRCS "embeddings.tar.bz2" + COMMAND "tar xjf ${CMAKE_CURRENT_SOURCE_DIR}/embeddings.tar.bz2 -C $(@D)" +) + +cg_py_library( + NAME + inst2vec + SRCS + "__init__.py" + DATA + ::dictionary + ::embeddings + DEPS + ::inst2vec_preprocess + compiler_gym::util::util + PUBLIC +) + +cg_py_library( + NAME + inst2vec_preprocess + SRCS + "inst2vec_preprocess.py" + DEPS + ::rgx_utils + PUBLIC +) + +cg_py_library( + NAME + rgx_utils + SRCS + "rgx_utils.py" + PUBLIC +) diff --git a/compiler_gym/third_party/llvm/CMakeLists.txt b/compiler_gym/third_party/llvm/CMakeLists.txt new file mode 100644 index 000000000..700e2dede --- /dev/null +++ b/compiler_gym/third_party/llvm/CMakeLists.txt @@ -0,0 +1,81 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_py_library( + NAME + llvm + SRCS + "__init__.py" + DEPS + compiler_gym::util::util + PUBLIC +) + +llvm_map_components_to_libnames(llvm_libs support irreader core) +cg_cc_binary( + NAME + compute_ir_instruction_count + SRCS + "compute_ir_instruction_count.cc" + COPTS + "-DGOOGLE_PROTOBUF_NO_RTTI" + "-fno-rtti" + ABS_DEPS + glog::glog + ${llvm_libs} + INCLUDES + ${LLVM_INCLUDE_DIRS} + DEFINES + ${LLVM_DEFINITIONS} +) + +cg_genrule( + NAME libLLVMPolly + OUTS "libLLVMPolly.so" + COMMAND + "cp $ $@" + ABS_DEPENDS + LLVMPolly +) + +cg_py_library( + NAME + instcount + SRCS + "instcount.py" + PUBLIC +) + +llvm_map_components_to_libnames(llvm_libs support core analysis) +cg_cc_library( + NAME + InstCount + COPTS + "-DGOOGLE_PROTOBUF_NO_RTTI" + "-fno-rtti" + HDRS + "InstCount.h" + SRCS + "InstCount.cc" + ABS_DEPS + glog::glog + ${llvm_libs} + INCLUDES + ${LLVM_INCLUDE_DIRS} + DEFINES + ${LLVM_DEFINITIONS} + PUBLIC +) + +cg_cc_binary( + NAME + PrintInstCountFeatureNames + SRCS + "PrintInstCountFeatureNames.cc" + DEPS + ::InstCount +) diff --git a/compiler_gym/third_party/neuro-vectorizer/CMakeLists.txt b/compiler_gym/third_party/neuro-vectorizer/CMakeLists.txt new file mode 100644 index 000000000..60db208cc --- /dev/null +++ b/compiler_gym/third_party/neuro-vectorizer/CMakeLists.txt @@ -0,0 +1,11 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_filegroup( + NAME header + FILES "${CMAKE_CURRENT_LIST_DIR}/header.h" +) diff --git a/compiler_gym/util/CMakeLists.txt b/compiler_gym/util/CMakeLists.txt new file mode 100644 index 000000000..5bffc6add --- /dev/null +++ b/compiler_gym/util/CMakeLists.txt @@ -0,0 +1,136 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_py_library( + NAME + util + SRCS + "__init__.py" + "capture_output.py" + "commands.py" + "debug_util.py" + "decorators.py" + "download.py" + "executor.py" + "filesystem.py" + "gym_type_hints.py" + "logging.py" + "logs.py" + "minimize_trajectory.py" + "parallelization.py" + "registration.py" + "runfiles_path.py" + "shell_format.py" + "statistics.py" + "tabulate.py" + "temporary_working_directory.py" + "thread_pool.py" + "timer.py" + "truncate.py" + GENERATED_SRCS + "$" + DEPS + make_version + # TODO(boian): verify if needed + #unconverted_name:@rules_python//python/runfiles + PUBLIC +) + +cg_cc_library( + NAME + EnumUtil + SRCS + "EnumUtil.h" + ABS_DEPS + grpc++ + fmt + magic_enum + PUBLIC +) + +cg_cc_library( + NAME + GrpcStatusMacros + HDRS + "GrpcStatusMacros.h" + ABS_DEPS + Boost::headers + grpc++ + fmt + glog::glog + PUBLIC +) + +cg_cc_library( + NAME + RunfilesPath + HDRS + "RunfilesPath.h" + SRCS + "RunfilesPath.cc" + ABS_DEPS + Boost::filesystem + fmt + PUBLIC +) + +cg_cc_library( + NAME + StrLenConstexpr + HDRS + "StrLenConstexpr.h" + PUBLIC +) + +cg_cc_library( + NAME + Subprocess + HDRS + "Subprocess.h" + SRCS + "Subprocess.cc" + DEPS + compiler_gym::service::proto::compiler_gym_service_cc + ABS_DEPS + Boost::filesystem + Boost::headers + grpc++ + fmt + PUBLIC +) + +cg_cc_library( + NAME + Unreachable + HDRS + "Unreachable.h" + ABS_DEPS + glog::glog + PUBLIC +) + +cg_genrule( + NAME make_version + OUTS "version.py" + SRCS "${CMAKE_CURRENT_SOURCE_DIR}/../../VERSION" + COMMAND "echo \"__version__ = \\\"$(cat \"${CMAKE_CURRENT_SOURCE_DIR}/../../VERSION\")\\\"\" > $@" +) + +cg_cc_library( + NAME + Version + HDRS + "$/Version.h" + PUBLIC +) + +cg_genrule( + NAME make_version_header + OUTS "Version.h" + SRCS "${CMAKE_CURRENT_SOURCE_DIR}/../../VERSION" + COMMAND "echo \"#define COMPILER_GYM_VERSION \\\"$(cat \"${CMAKE_CURRENT_SOURCE_DIR}/../../VERSION\")\\\"\" > $@" +) diff --git a/compiler_gym/util/flags/CMakeLists.txt b/compiler_gym/util/flags/CMakeLists.txt new file mode 100644 index 000000000..3b748ce48 --- /dev/null +++ b/compiler_gym/util/flags/CMakeLists.txt @@ -0,0 +1,26 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_py_library( + NAME + flags + SRCS + "benchmark_from_flags.py" + "env_from_flags.py" + "episode_length.py" + "episodes.py" + "learning_rate.py" + "nproc.py" + "output_dir.py" + "seed.py" + DEPS + compiler_gym::datasets::datasets + compiler_gym::envs::envs + compiler_gym::service::service + compiler_gym::service::proto::proto + PUBLIC +) diff --git a/compiler_gym/views/CMakeLists.txt b/compiler_gym/views/CMakeLists.txt new file mode 100644 index 000000000..f41e879dd --- /dev/null +++ b/compiler_gym/views/CMakeLists.txt @@ -0,0 +1,56 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_py_library( + NAME + views + SRCS + "__init__.py" + DEPS + ::observation + ::reward + PUBLIC +) + +cg_py_library( + NAME + observation + SRCS + "observation.py" + DEPS + ::observation_space_spec + compiler_gym::service::service + compiler_gym::service::proto::proto + compiler_gym::util::util + PUBLIC +) + +cg_py_library( + NAME + observation_space_spec + SRCS + "observation_space_spec.py" + DEPS + compiler_gym::service::service + compiler_gym::service::proto::proto + compiler_gym::spaces::spaces + compiler_gym::util::util + PUBLIC +) + +cg_py_library( + NAME + reward + SRCS + "reward.py" + DEPS + ::observation + compiler_gym::datasets::datasets + compiler_gym::service::proto::proto + compiler_gym::spaces::spaces + PUBLIC +) diff --git a/compiler_gym/wrappers/CMakeLists.txt b/compiler_gym/wrappers/CMakeLists.txt new file mode 100644 index 000000000..326ba828e --- /dev/null +++ b/compiler_gym/wrappers/CMakeLists.txt @@ -0,0 +1,24 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_py_library( + NAME + wrappers + SRCS + "__init__.py" + "commandline.py" + "core.py" + "datasets.py" + "llvm.py" + "time_limit.py" + DEPS + compiler_gym::datasets::datasets + compiler_gym::envs::envs + compiler_gym::util::util + compiler_gym::views::views + PUBLIC +) diff --git a/external/absl/CMakeLists.txt b/external/absl/CMakeLists.txt new file mode 100644 index 000000000..0aa5c47c7 --- /dev/null +++ b/external/absl/CMakeLists.txt @@ -0,0 +1,20 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +project(abls) +cmake_minimum_required(VERSION 3.15) + +include(ExternalProject) + +ExternalProject_Add( + absl + PREFIX "${CMAKE_CURRENT_BINARY_DIR}/absl" + URL "https://github.com/abseil/abseil-cpp/archive/997aaf3a28308eba1b9156aa35ab7bca9688e9f6.tar.gz" + URL_HASH "SHA256=35f22ef5cb286f09954b7cc4c85b5a3f6221c9d4df6b8c4a1e9d399555b366ee" + CMAKE_ARGS + -C "${CMAKE_CURRENT_BINARY_DIR}/absl_initial_cache.cmake" + "-DCMAKE_INSTALL_PREFIX=${CMAKE_INSTALL_PREFIX}" + -DCMAKE_POSITION_INDEPENDENT_CODE=ON +) diff --git a/external/boost/CMakeLists.txt b/external/boost/CMakeLists.txt new file mode 100644 index 000000000..d889dc661 --- /dev/null +++ b/external/boost/CMakeLists.txt @@ -0,0 +1,48 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +project(protobuf) +cmake_minimum_required(VERSION 3.15) + +include(ExternalProject) +include(ProcessorCount) + +find_package(Git REQUIRED) + +if(DEFINED CMAKE_C_FLAGS) + list(APPEND _ADDITIONAL_ARGS "cflags=\"${CMAKE_C_FLAGS}\"") +endif() +if(DEFINED CMAKE_CXX_FLAGS) + list(APPEND _ADDITIONAL_ARGS "cxxflags=\"${CMAKE_CXX_FLAGS}\"") +endif() +if (DEFINED CMAKE_STATIC_LINKER_FLAGS_INIT OR + DEFINED CMAKE_SHARED_LINKER_FLAGS_INIT OR + DEFINED CMAKE_STATIC_LINKER_FLAGS OR + DEFINED CMAKE_SHARED_LINKER_FLAGS) + list(APPEND _ADDITIONAL_ARGS "linkflags=\"${CMAKE_STATIC_LINKER_FLAGS_INIT} ${CMAKE_SHARED_LINKER_FLAGS_INIT} ${CMAKE_STATIC_LINKER_FLAGS} ${CMAKE_SHARED_LINKER_FLAGS}\"") +endif() + +ProcessorCount(_JOBS) +ExternalProject_Add( + boost + PREFIX "${CMAKE_CURRENT_BINARY_DIR}/boost" + GIT_REPOSITORY "https://github.com/boostorg/boost.git" + GIT_TAG afb333b7c5101041f0280b2edf155c55114c9c95 #tag: boost-1.71.0 + BUILD_IN_SOURCE TRUE + CONFIGURE_COMMAND + "${CMAKE_COMMAND}" -E env "CC=${CMAKE_C_COMPILER}" "CXX=${CMAKE_CXX_COMPILER}" + ./bootstrap.sh + BUILD_COMMAND + "${CMAKE_COMMAND}" -E env "CC=${CMAKE_C_COMPILER}" "CXX=${CMAKE_CXX_COMPILER}" + ./b2 + -j${_JOBS} + "--prefix=${CMAKE_INSTALL_PREFIX}" + "--build-dir=${CMAKE_CURRENT_BINARY_DIR}/build" + --with-filesystem + --with-headers + cxxstd=${CMAKE_CXX_STANDARD} + install + INSTALL_COMMAND "" +) diff --git a/external/cpuinfo/CMakeLists.txt b/external/cpuinfo/CMakeLists.txt new file mode 100644 index 000000000..090095318 --- /dev/null +++ b/external/cpuinfo/CMakeLists.txt @@ -0,0 +1,19 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +project(gflags) +cmake_minimum_required(VERSION 3.15) + +include(ExternalProject) + +ExternalProject_Add( + cpuinfo + PREFIX "${CMAKE_CURRENT_BINARY_DIR}/cpuinfo" + GIT_REPOSITORY "https://github.com/pytorch/cpuinfo.git" + GIT_TAG 2e79955ecaec85da13ac8f1245a8b2afa10d31c2 + CMAKE_ARGS + -C "${CMAKE_CURRENT_BINARY_DIR}/cpuinfo_initial_cache.cmake" + "-DCMAKE_INSTALL_PREFIX=${CMAKE_INSTALL_PREFIX}" +) diff --git a/external/csmith/CMakeLists.txt b/external/csmith/CMakeLists.txt new file mode 100644 index 000000000..bd58189fc --- /dev/null +++ b/external/csmith/CMakeLists.txt @@ -0,0 +1,20 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +project(gflags) +cmake_minimum_required(VERSION 3.15) + +include(ExternalProject) + +ExternalProject_Add( + csmith + PREFIX "${CMAKE_CURRENT_BINARY_DIR}/csmith" + GIT_REPOSITORY "https://github.com/csmith-project/csmith.git" + GIT_TAG 30dccd73b78652c4719f36572994778a5b233a4e #tag csmith-2.3.0 + CMAKE_ARGS + -C "${CMAKE_CURRENT_BINARY_DIR}/csmith_initial_cache.cmake" + "-DCMAKE_INSTALL_PREFIX=${CMAKE_INSTALL_PREFIX}" + "-DCOMPILE_DEFINITIONS=_LIBCPP_ENABLE_CXX17_REMOVED_FEATURES" +) diff --git a/external/external.cmake b/external/external.cmake new file mode 100644 index 000000000..6185c3dd1 --- /dev/null +++ b/external/external.cmake @@ -0,0 +1,481 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +include(ExternalProject) +include(FetchContent) +include(write_cache_script) +include(build_external_cmake_project) + +unset(FETCH_CONTENT_LIST) + +# # === Google test === + +set(COMPILER_GYM_GTEST_PROVIDER "internal" CACHE STRING "Find or build gtest together with Compiler Gym.") +set_property(CACHE COMPILER_GYM_GTEST_PROVIDER PROPERTY STRINGS "internal" "external") +if(COMPILER_GYM_GTEST_PROVIDER STREQUAL "internal") + FetchContent_Declare( + gtest + PREFIX "${CMAKE_CURRENT_BINARY_DIR}/external/gtest" + GIT_REPOSITORY "https://github.com/google/googletest.git" + GIT_TAG 703bd9caab50b139428cea1aaff9974ebee5742e #tag release-1.10.0 + ) + FetchContent_MakeAvailable(gtest) + add_library(GTest::GTest ALIAS gtest) + add_library(GTest::Main ALIAS gtest_main) +else() + find_package(GTest REQUIRED) +endif() + +# # === Google benchmark === + +set(COMPILER_GYM_BENCHMARK_PROVIDER "internal" CACHE STRING "Find or build benchmark together with Compiler Gym.") +set_property(CACHE COMPILER_GYM_BENCHMARK_PROVIDER PROPERTY STRINGS "internal" "external") +if(COMPILER_GYM_BENCHMARK_PROVIDER STREQUAL "internal") + FetchContent_Declare( + benchmark + PREFIX "${CMAKE_CURRENT_BINARY_DIR}/external/benchmark" + GIT_REPOSITORY "https://github.com/google/benchmark.git" + GIT_TAG 9913418d323e64a0111ca0da81388260c2bbe1e9 #tag v1.4.0 + ) + + if(NOT benchmark_POPULATED) + FetchContent_Populate(benchmark) + + # Benchmark v1.4.0 requires C++03. + set(_CMAKE_CXX_STANDARD_OLD ${CMAKE_CXX_STANDARD}) + unset(CMAKE_CXX_STANDARD CACHE) + + option(BENCHMARK_ENABLE_TESTING "Enable testing of the benchmark library." OFF) + + add_subdirectory(${benchmark_SOURCE_DIR} ${benchmark_BINARY_DIR}) + + set(CMAKE_CXX_STANDARD ${_CMAKE_CXX_STANDARD_OLD} CACHE STRING "C++ standard to be used." FORCE) + endif() +else() + find_package(benchmark REQUIRED) +endif() + +# # === Abseil === + +set(COMPILER_GYM_ABSEIL_PROVIDER "internal" CACHE STRING "Find or build abseil together with Compiler Gym.") +set_property(CACHE COMPILER_GYM_ABSEIL_PROVIDER PROPERTY STRINGS "internal" "external") +if(COMPILER_GYM_ABSEIL_PROVIDER STREQUAL "internal") + build_external_cmake_project( + NAME absl + SRC_DIR "${CMAKE_CURRENT_LIST_DIR}/absl") +endif() +find_package(absl REQUIRED) + +# # === Google flags === + +set(COMPILER_GYM_GFLAGS_PROVIDER "internal" CACHE STRING "Find or build gflags together with Compiler Gym.") +set_property(CACHE COMPILER_GYM_GFLAGS_PROVIDER PROPERTY STRINGS "internal" "external") +if(COMPILER_GYM_GFLAGS_PROVIDER STREQUAL "internal") + build_external_cmake_project( + NAME gflags + SRC_DIR "${CMAKE_CURRENT_LIST_DIR}/gflags") +endif() +find_package(gflags REQUIRED) + + +# # === Google logging === + +set(COMPILER_GYM_GLOG_PROVIDER "internal" CACHE STRING "Find or build glog together with Compiler Gym.") +set_property(CACHE COMPILER_GYM_GLOG_PROVIDER PROPERTY STRINGS "internal" "external") +if(COMPILER_GYM_GLOG_PROVIDER STREQUAL "internal") + FetchContent_Declare( + glog + PREFIX "${CMAKE_CURRENT_BINARY_DIR}/external/glog" + GIT_REPOSITORY "https://github.com/google/glog.git" + GIT_TAG 96a2f23dca4cc7180821ca5f32e526314395d26a #tag v0.4.0 + ) + list(APPEND FETCH_CONTENT_LIST glog) +else() + find_package(glog REQUIRED) +endif() + +# # C++ subprocess management. https://github.com/arun11299/cpp-subprocess + +set(COMPILER_GYM_SUBPROCESS_PROVIDER "internal" CACHE STRING "Find or build subprocess together with Compiler Gym.") +set_property(CACHE COMPILER_GYM_SUBPROCESS_PROVIDER PROPERTY STRINGS "internal" "external") +if(COMPILER_GYM_SUBPROCESS_PROVIDER STREQUAL "internal") + build_external_cmake_project( + NAME subprocess + SRC_DIR "${CMAKE_CURRENT_LIST_DIR}/subprocess" + ) +endif() +find_package(Subprocess REQUIRED) + +# # === LLVM === + +set(COMPILER_GYM_LLVM_PROVIDER "internal" CACHE STRING "Find or build llvm together with Compiler Gym.") +set_property(CACHE COMPILER_GYM_LLVM_PROVIDER PROPERTY STRINGS "internal" "external") +build_external_cmake_project( + NAME llvm + SRC_DIR "${CMAKE_CURRENT_LIST_DIR}/llvm" + CONFIG_ARGS "-DCOMPILER_GYM_LLVM_PROVIDER=${COMPILER_GYM_LLVM_PROVIDER}") +set(LLVM_SRC_DIR "${CMAKE_CURRENT_BINARY_DIR}/external/llvm/llvm/src/llvm") +find_package(LLVM 10.0.0 EXACT REQUIRED) +# In a bunch of places in the code it is used "#include " +list(APPEND LLVM_INCLUDE_DIRS "${CMAKE_CURRENT_BINARY_DIR}/external/llvm/install") + + +# # === Protocol buffers === + +set(COMPILER_GYM_PROTOBUF_PROVIDER "internal" CACHE STRING "Find or build protobuf together with Compiler Gym.") +set_property(CACHE COMPILER_GYM_PROTOBUF_PROVIDER PROPERTY STRINGS "internal" "external") +if(COMPILER_GYM_PROTOBUF_PROVIDER STREQUAL "internal") + write_cache_script("${CMAKE_CURRENT_BINARY_DIR}/external/protobuf/protobuf_initial_cache.cmake") + execute_process( + COMMAND "${CMAKE_COMMAND}" + -C "${CMAKE_CURRENT_BINARY_DIR}/external/protobuf/protobuf_initial_cache.cmake" + -S "${CMAKE_CURRENT_LIST_DIR}/protobuf" + -B "${CMAKE_CURRENT_BINARY_DIR}/external/protobuf" + -D "CMAKE_INSTALL_PREFIX=${CMAKE_CURRENT_BINARY_DIR}/external/protobuf/install" + COMMAND_ERROR_IS_FATAL ANY + ) + execute_process( + COMMAND + "${CMAKE_COMMAND}" + --build "${CMAKE_CURRENT_BINARY_DIR}/external/protobuf" + COMMAND_ERROR_IS_FATAL ANY + ) + list(PREPEND CMAKE_PREFIX_PATH "${CMAKE_CURRENT_BINARY_DIR}/external/protobuf/install") +endif() +find_package(Protobuf REQUIRED) + +# # === GRPC === + +set(COMPILER_GYM_GRPC_PROVIDER "internal" CACHE STRING "Find or build gRPC together with Compiler Gym.") +set_property(CACHE COMPILER_GYM_GRPC_PROVIDER PROPERTY STRINGS "internal" "external") + +set(gRPC_ABSL_PROVIDER package) +if(COMPILER_GYM_GRPC_PROVIDER STREQUAL "internal") + if (NOT DEFINED gRPC_ABSL_PROVIDER OR gRPC_ABSL_PROVIDER STREQUAL "module") + list(APPEND _gRPC_GIT_SUBMODULES "third_party/abseil-cpp") + endif() + + if (NOT DEFINED gRPC_ZLIB_PROVIDER OR gRPC_ZLIB_PROVIDER STREQUAL "module") + list(APPEND _gRPC_GIT_SUBMODULES "third_party/zlib") + endif() + + if (NOT DEFINED gRPC_CARES_PROVIDER OR gRPC_CARES_PROVIDER STREQUAL "module") + list(APPEND _gRPC_GIT_SUBMODULES "third_party/cares/cares") + endif() + + if (NOT DEFINED gRPC_RE2_PROVIDER OR gRPC_RE2_PROVIDER STREQUAL "module") + list(APPEND _gRPC_GIT_SUBMODULES "third_party/re2") + endif() + + if (NOT DEFINED gRPC_SSL_PROVIDER OR gRPC_SSL_PROVIDER STREQUAL "module") + list(APPEND _gRPC_GIT_SUBMODULES "third_party/boringssl-with-bazel") + endif() + + set(gRPC_PROTOBUF_PROVIDER "package" CACHE STRING "") + + # In CMake v3.19.6 if GIT_SUBMODULES changes during reconfiguration + # the FetchContent will not populate new submodules. + # The PREFIX directory will have to be deleted manually. + FetchContent_Declare( + grpc + PREFIX "${CMAKE_CURRENT_BINARY_DIR}/external/grpc" + GIT_REPOSITORY "https://github.com/grpc/grpc.git" + GIT_TAG 736e3758351ced3cd842bad3ba4e2540f01bbc48 # v1.36.0 + GIT_SUBMODULES ${_gRPC_GIT_SUBMODULES} + ) + FetchContent_MakeAvailable(grpc) + set(_GRPC_CPP_PLUGIN_EXECUTABLE $) + #TODO(boian): remove this when GrpcStatusMacros.h uses the correct include path. + target_include_directories(grpc++ INTERFACE "${grpc_SOURCE_DIR}") +else() + find_package(gRPC REQUIRED) + set(_GRPC_CPP_PLUGIN_EXECUTABLE $) +endif() + +# # === C++ enum trickery === +# # https://github.com/Neargye/magic_enum + +set(COMPILER_GYM_MAGIC_ENUM_PROVIDER "internal" CACHE STRING "Find or build magic_enum together with Compiler Gym.") +set_property(CACHE COMPILER_GYM_MAGIC_ENUM_PROVIDER PROPERTY STRINGS "internal" "external") +if(COMPILER_GYM_MAGIC_ENUM_PROVIDER STREQUAL "internal") + FetchContent_Declare( + magic_enum + PREFIX "${CMAKE_CURRENT_BINARY_DIR}/external/magic_enum" + GIT_REPOSITORY "https://github.com/Neargye/magic_enum.git" + GIT_TAG 6e932ef66dbe054e039d4dba77a41a12f9f52e0c #tag 0.7.3 + ) + list(APPEND FETCH_CONTENT_LIST magic_enum) +else() + find_package(magic_enum REQUIRED) +endif() + +# # === ctuning-programs === +# # https://github.com/ChrisCummins/ctuning-programs + +# This seems to be unused. +#ExternalProject_Add( +# ctuning-programs +# PREFIX "${CMAKE_BINARY_DIR}/ctuning-programs" +# URL "https://github.com/ChrisCummins/ctuning-programs/archive/c3c126fcb400f3a14b69b152f15d15eae78ef908.tar.gz" +# URL_HASH "SHA256=5e14a49f87c70999a082cb5cf19b780d0b56186f63356f8f994dd9ffc79ec6f3" +# CONFIGURE_COMMAND "" +# BUILD_COMMAND "" +# INSTALL_COMMAND "" +#) + +file(GLOB CTUNING-PROGRAMS-SRCS "ctuning-programs/**") + +source_group( + ctuning-programs-all + FILES CTUNING-PROGRAMS-SRCS +) + +source_group( + ctuning-programs-readme + FILES "ctuning-programs/README.md" +) + +# # === cBench === +# # https://ctuning.org/wiki/index.php/CTools:CBench + +FetchContent_Declare( + cBench + PREFIX "${CMAKE_CURRENT_BINARY_DIR}/external/cbench" + URL "https://dl.fbaipublicfiles.com/compiler_gym/cBench_V1.1.tar.gz" + URL_HASH "SHA256=8908d742f5223f09f9a4d10f7e06bc805a0c1694aa70974d2aae91ab627b51e6" + DOWNLOAD_NO_EXTRACT FALSE +) +FetchContent_MakeAvailable(cBench) +FetchContent_GetProperties(cBench SOURCE_DIR cBench_SRC_DIR) + +FetchContent_Declare( + ctuning-ai + PREFIX "${CMAKE_CURRENT_BINARY_DIR}/external/ctuning-ai" + URL "https://github.com/ChrisCummins/ck-mlops/archive/406738ad6d1fb2c1da9daa2c09d26fccab4e0938.tar.gz" + URL_HASH "SHA256=a82c13733696c46b5201c614fcf7229c3a74a83ce485cab2fbf17309b7564f9c" +) +FetchContent_MakeAvailable(ctuning-ai) +FetchContent_GetProperties(ctuning-ai SOURCE_DIR ctuning_ai_SRC_DIR) + +# # Datasets. + +FetchContent_Declare( + cBench_consumer_tiff_data + PREFIX "${CMAKE_CURRENT_BINARY_DIR}/external/cBench_consumer_tiff_data" + URL "https://downloads.sourceforge.net/project/cbenchmark/cDatasets/V1.1/cDatasets_V1.1_consumer_tiff_data.tar.gz" + URL_HASH "SHA256=779abb7b7fee8733313e462e6066c16375e9209a9f7ff692fd06c7598946939a" +) +FetchContent_MakeAvailable(cBench_consumer_tiff_data) +set(cBench_consumer_tiff_data_FILE + "${CMAKE_CURRENT_BINARY_DIR}/external/cBench_consumer_tiff_data/src/cDatasets_V1.1_consumer_tiff_data.tar.gz") + +FetchContent_Declare( + cBench_office_data + PREFIX "${CMAKE_CURRENT_BINARY_DIR}/external/cBench_office_data" + URL "https://downloads.sourceforge.net/project/cbenchmark/cDatasets/V1.1/cDatasets_V1.1_office_data.tar.gz" + URL_HASH "SHA256=cfa09cd37cb93aba57415033905dc6308653c7b833feba5a25067bfb62999f32" +) +FetchContent_MakeAvailable(cBench_office_data) +set(cBench_office_data_FILE + "${CMAKE_CURRENT_BINARY_DIR}/external/cBench_office_data/src/cDatasets_V1.1_office_data.tar.gz") + +FetchContent_Declare( + cBench_telecom_data + PREFIX "${CMAKE_CURRENT_BINARY_DIR}/external/cBench_telecom_data" + URL "https://downloads.sourceforge.net/project/cbenchmark/cDatasets/V1.1/cDatasets_V1.1_telecom_data.tar.gz" + URL_HASH "SHA256=e5cb6663beefe32fd12f90c8f533f8e1bce2f05ee4e3836efb5556d5e1089df0" +) +FetchContent_MakeAvailable(cBench_telecom_data) +set(cBench_telecom_data_FILE + "${CMAKE_CURRENT_BINARY_DIR}/external/cBench_telecom_data/src/cDatasets_V1.1_telecom_data.tar.gz") + +FetchContent_Declare( + cBench_consumer_jpeg_data + PREFIX "${CMAKE_CURRENT_BINARY_DIR}/external/cBench_consumer_jpeg_data" + URL "https://downloads.sourceforge.net/project/cbenchmark/cDatasets/V1.1/cDatasets_V1.1_consumer_jpeg_data.tar.gz" + URL_HASH "SHA256=bec5ffc15cd2f952d9a786f3cd31d90955c318a5e4f69c5ba472f79d5a3e8f0b" +) +FetchContent_MakeAvailable(cBench_consumer_jpeg_data) +set(cBench_consumer_jpeg_data_FILE + "${CMAKE_CURRENT_BINARY_DIR}/external/cBench_consumer_jpeg_data/src/cDatasets_V1.1_consumer_jpeg_data.tar.gz") + +FetchContent_Declare( + cBench_telecom_gsm_data + PREFIX "${CMAKE_CURRENT_BINARY_DIR}/external/cBench_telecom_gsm_data" + URL "https://downloads.sourceforge.net/project/cbenchmark/cDatasets/V1.1/cDatasets_V1.1_telecom_gsm_data.tar.gz" + URL_HASH "SHA256=52545d3a0ce15021131c62d96d3a3d7e6670e2d6c34226ac9a3d5191a1ee214a" +) +FetchContent_MakeAvailable(cBench_telecom_gsm_data) +set(cBench_telecom_gsm_data_FILE + "${CMAKE_CURRENT_BINARY_DIR}/external/cBench_telecom_gsm_data/src/cDatasets_V1.1_telecom_gsm_data.tar.gz") + +FetchContent_Declare( + cBench_consumer_data + PREFIX "${CMAKE_CURRENT_BINARY_DIR}/external/cBench_consumer_data" + URL "https://downloads.sourceforge.net/project/cbenchmark/cDatasets/V1.1/cDatasets_V1.1_consumer_data.tar.gz" + URL_HASH "SHA256=a4d40344af3022bfd7b4c6fcf6d59d598825b07d9e37769dbf1b3effa39aa445" +) +FetchContent_MakeAvailable(cBench_consumer_data) +set(cBench_consumer_data_FILE + "${CMAKE_CURRENT_BINARY_DIR}/external/cBench_consumer_data/src/cDatasets_V1.1_consumer_data.tar.gz") + +FetchContent_Declare( + cBench_bzip2_data + PREFIX "${CMAKE_CURRENT_BINARY_DIR}/external/cBench_bzip2_data" + URL "https://downloads.sourceforge.net/project/cbenchmark/cDatasets/V1.1/cDatasets_V1.1_bzip2_data.tar.gz" + URL_HASH "SHA256=46e5760eeef77e6b0c273af92de971bc45f33a59e0efc183073d9aa6b716c302" +) +FetchContent_MakeAvailable(cBench_bzip2_data) +set(cBench_bzip2_data_FILE + "${CMAKE_CURRENT_BINARY_DIR}/external/cBench_bzip2_data/src/cDatasets_V1.1_bzip2_data.tar.gz") + +FetchContent_Declare( + cBench_network_patricia_data + PREFIX "${CMAKE_CURRENT_BINARY_DIR}/external/cBench_network_patricia_data" + URL "https://downloads.sourceforge.net/project/cbenchmark/cDatasets/V1.1/cDatasets_V1.1_network_patricia_data.tar.gz" + URL_HASH "SHA256=72dae0e670d93ef929e50aca7a138463e0915502281ccafe793e378cb2a85dfb" +) +FetchContent_MakeAvailable(cBench_network_patricia_data) +set(cBench_network_patricia_data_FILE + "${CMAKE_CURRENT_BINARY_DIR}/external/cBench_network_patricia_data/src/cDatasets_V1.1_network_patricia_data.tar.gz") + +FetchContent_Declare( + cBench_network_dijkstra_data + PREFIX "${CMAKE_CURRENT_BINARY_DIR}/external/cBench_network_dijkstra_data" + URL "https://downloads.sourceforge.net/project/cbenchmark/cDatasets/V1.1/cDatasets_V1.1_network_dijkstra_data.tar.gz" + URL_HASH "SHA256=41c13f59cdfbc772081cd941f499b030370bc570fc2ba60a5c4b7194bc36ca5f" +) +FetchContent_MakeAvailable(cBench_network_dijkstra_data) +set(cBench_network_dijkstra_data_FILE + "${CMAKE_CURRENT_BINARY_DIR}/external/cBench_network_dijkstra_data/src/cDatasets_V1.1_network_dijkstra_data.tar.gz") + +FetchContent_Declare( + cBench_automotive_susan_data + PREFIX "${CMAKE_CURRENT_BINARY_DIR}/external/cBench_automotive_susan_data" + URL "https://downloads.sourceforge.net/project/cbenchmark/cDatasets/V1.1/cDatasets_V1.1_automotive_susan_data.tar.gz" + URL_HASH "SHA256=df56e1e44ccc560072381cdb001d770003ac74f92593dd5dbdfdd4ff9332a8e6" +) +FetchContent_MakeAvailable(cBench_automotive_susan_data) +set(cBench_automotive_susan_data_FILE + "${CMAKE_CURRENT_BINARY_DIR}/external/cBench_automotive_susan_data/src/cDatasets_V1.1_automotive_susan_data.tar.gz") + +FetchContent_Declare( + cBench_automotive_qsort_data + PREFIX "${CMAKE_CURRENT_BINARY_DIR}/external/cBench_automotive_qsort_data" + URL "https://downloads.sourceforge.net/project/cbenchmark/cDatasets/V1.1/cDatasets_V1.1_automotive_qsort_data.tar.gz" + URL_HASH "SHA256=510b4225021408ac190f6f793e7d7171d3553c9916cfa8b2fb4ace005105e768" +) +FetchContent_MakeAvailable(cBench_automotive_qsort_data) +set(cBench_automotive_qsort_data_FILE + "${CMAKE_CURRENT_BINARY_DIR}/external/cBench_automotive_qsort_data/src/cDatasets_V1.1_automotive_qsort_data.tar.gz") + +# # === C++ cpuinfo === + +set(COMPILER_GYM_CPUINFO_PROVIDER "internal" CACHE STRING "Find or build cpuinfo together with Compiler Gym.") +set_property(CACHE COMPILER_GYM_CPUINFO_PROVIDER PROPERTY STRINGS "internal" "external") +if(COMPILER_GYM_CPUINFO_PROVIDER STREQUAL "internal") + build_external_cmake_project( + NAME cpuinfo + SRC_DIR "${CMAKE_CURRENT_LIST_DIR}/cpuinfo") +endif() +set(PKG_CONFIG_USE_CMAKE_PREFIX_PATH TRUE) +find_package(PkgConfig REQUIRED) +pkg_check_modules(CpuInfo REQUIRED IMPORTED_TARGET libcpuinfo) +add_library(CpuInfo::cpuinfo ALIAS PkgConfig::CpuInfo) + +find_package(Clog REQUIRED) +# For some reason this does not propagate to the linker when CpuInfo::cpuinfo is included +#get_target_property(_CpuInfo_LINK_LIBS PkgConfig::CpuInfo IMPORTED_LINK_INTERFACE_LIBRARIES) +#if (NOT _CpuInfo_LINK_LIBS) +# set(_CpuInfo_LINK_LIBS Clog::libclog) +#else() +# list(APPEND _CpuInfo_LINK_LIBS Clog::libclog) +#endif() +#set_target_properties(PkgConfig::CpuInfo +# PROPERTIES IMPORTED_LINK_INTERFACE_LIBRARIES +# "${_CpuInfo_LINK_LIBS}") + + +# # === Csmith === +# # https://embed.cs.utah.edu/csmith/ + +build_external_cmake_project( + NAME csmith + SRC_DIR "${CMAKE_CURRENT_LIST_DIR}/csmith" + INSTALL_PREFIX "${CMAKE_CURRENT_BINARY_DIR}/external/csmith/install/csmith") +find_package(Csmith REQUIRED) + +# # === DeepDataFlow === +# # https://zenodo.org/record/4122437 + +#FetchContent_Declare( +# DeepDataFlow +# PREFIX "${CMAKE_CURRENT_BINARY_DIR}/external/DeepDataFlow" +# SOURCE_DIR "${CMAKE_BINARY_DIR}/compiler_gym/third_party/DeepDataFlow" +# URL "https://zenodo.org/record/4122437/files/llvm_bc_20.06.01.tar.bz2?download=1" +# URL_HASH "SHA256=ea6accbeb005889db3ecaae99403933c1008e0f2f4adc3c4afae3d7665c54004" +#) +#list(APPEND FETCH_CONTENT_LIST DeepDataFlow) + +# === A modern C++ formatting library === +# https://fmt.dev + +set(COMPILER_GYM_FMT_PROVIDER "internal" CACHE STRING "Find or build fmt together with Compiler Gym.") +set_property(CACHE COMPILER_GYM_FMT_PROVIDER PROPERTY STRINGS "internal" "external") +if(COMPILER_GYM_FMT_PROVIDER STREQUAL "internal") + FetchContent_Declare( + fmt + PREFIX "${CMAKE_CURRENT_BINARY_DIR}/external/fmt" + GIT_REPOSITORY "https://github.com/fmtlib/fmt.git" + GIT_TAG f94b7364b9409f05207c3af3fa4666730e11a854 #tag 6.1.2 + ) + FetchContent_MakeAvailable(fmt) +else() + find_package(fmt REQUIRED) +endif() + +# # === Boost === + +set(COMPILER_GYM_BOOST_PROVIDER "internal" CACHE STRING "Find or build boost together with Compiler Gym.") +set_property(CACHE COMPILER_GYM_BOOST_PROVIDER PROPERTY STRINGS "internal" "external") +if(COMPILER_GYM_BOOST_PROVIDER STREQUAL "internal") + build_external_cmake_project( + NAME boost + SRC_DIR "${CMAKE_CURRENT_LIST_DIR}/boost") +endif() +find_package(Boost REQUIRED COMPONENTS filesystem headers) + +# # === nlohmann_json === + +set(COMPILER_GYM_NLOHMANN_JSON_PROVIDER "internal" CACHE STRING "Find or build nlohmann_json together with Compiler Gym.") +set_property(CACHE COMPILER_GYM_NLOHMANN_JSON_PROVIDER PROPERTY STRINGS "internal" "external") +if(COMPILER_GYM_NLOHMANN_JSON_PROVIDER STREQUAL "internal") + FetchContent_Declare( + nlohmann_json + PREFIX "${CMAKE_CURRENT_BINARY_DIR}/external/nlohmann_json" + GIT_REPOSITORY "https://github.com/nlohmann/json.git" + GIT_TAG e7b3b40b5a95bc74b9a7f662830a27c49ffc01b4 #tag: v3.7.3 + ) + list(APPEND FETCH_CONTENT_LIST nlohmann_json) +else() + find_package(nlohmann_json REQUIRED) +endif() + +# # === ProGraML === +# # https://github.com/ChrisCummins/ProGraML + +build_external_cmake_project( + NAME programl + SRC_DIR "${CMAKE_CURRENT_LIST_DIR}/programl") +list(PREPEND CMAKE_PREFIX_PATH + "${CMAKE_CURRENT_BINARY_DIR}/external/programl/programl/src/programl/bazel-bin" + "${CMAKE_CURRENT_BINARY_DIR}/external/programl/programl/src/programl/bazel-bin/external/labm8" + "${CMAKE_CURRENT_BINARY_DIR}/external/programl/programl/src/programl/bazel-programl" + "${CMAKE_CURRENT_BINARY_DIR}/external/programl/programl/src/programl/bazel-programl/external/labm8" + ) +find_package(Labm8 REQUIRED) +find_package(ProGraML REQUIRED) + +FetchContent_MakeAvailable(${FETCH_CONTENT_LIST}) diff --git a/external/gflags/CMakeLists.txt b/external/gflags/CMakeLists.txt new file mode 100644 index 000000000..48a1e1a20 --- /dev/null +++ b/external/gflags/CMakeLists.txt @@ -0,0 +1,21 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +project(gflags) +cmake_minimum_required(VERSION 3.15) + +include(ExternalProject) + +# FetchContent is not used here because for some reason the install step +# fails with not installing all necessary files in CMake v3.19.6. +ExternalProject_Add( + gflags + PREFIX "${CMAKE_CURRENT_BINARY_DIR}/gflags" + GIT_REPOSITORY "https://github.com/gflags/gflags.git" + GIT_TAG e171aa2d15ed9eb17054558e0b3a6a413bb01067 #tag v2.2.2 + CMAKE_ARGS + -C "${CMAKE_CURRENT_BINARY_DIR}/gflags_initial_cache.cmake" + "-DCMAKE_INSTALL_PREFIX=${CMAKE_INSTALL_PREFIX}" +) diff --git a/external/llvm/CMakeLists.txt b/external/llvm/CMakeLists.txt new file mode 100644 index 000000000..ce3465463 --- /dev/null +++ b/external/llvm/CMakeLists.txt @@ -0,0 +1,53 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +project(gflags) +cmake_minimum_required(VERSION 3.15) + +include(ExternalProject) + +include(ProcessorCount) +if (DEFINED ENV{CMAKE_BUILD_PARALLEL_LEVEL}) + set(_JOBS $ENV{CMAKE_BUILD_PARALLEL_LEVEL}) +else() + ProcessorCount(_JOBS) +endif() + +# LLVM is memory hungry during linking +cmake_host_system_information(RESULT _RAM QUERY AVAILABLE_PHYSICAL_MEMORY) +set(_RAM_PER_LINK_JOB 10240) # 10 GiB +math(EXPR _LINK_JOBS "${_RAM} / ${_RAM_PER_LINK_JOB}" OUTPUT_FORMAT DECIMAL) +if (NOT _LINK_JOBS) + set(_LINK_JOBS 1) +endif() + +# The source is still required even if version is external. +if(NOT COMPILER_GYM_LLVM_PROVIDER STREQUAL "internal") + set(_BUILD_COMMAND "") + set(_INSTALL_COMMAND "") +else() + set(_BUILD_COMMAND "${CMAKE_COMMAND}" --build "") + set(_INSTALL_COMMAND "${CMAKE_COMMAND}" --install "") +endif() + +file(APPEND "${CMAKE_CURRENT_BINARY_DIR}/llvm_initial_cache.cmake" "set(LLVM_ENABLE_PROJECTS \"clang;polly\" CACHE STRING \"\")\n") + +ExternalProject_Add( + llvm + PREFIX "${CMAKE_CURRENT_BINARY_DIR}/llvm" + GIT_REPOSITORY "https://github.com/llvm/llvm-project.git" + GIT_TAG d32170dbd5b0d54436537b6b75beaf44324e0c28 #tag llvmorg-10.0.0 + SOURCE_SUBDIR llvm + CMAKE_ARGS + -G Ninja + -C "${CMAKE_CURRENT_BINARY_DIR}/llvm_initial_cache.cmake" + "-DCMAKE_INSTALL_PREFIX=${CMAKE_INSTALL_PREFIX}" + -DLLVM_ABI_BREAKING_CHECKS=FORCE_OFF + -DLLVM_PARALLEL_LINK_JOBS=${_LINK_JOBS} + -DLLVM_PARALLEL_COMPILE_JOBS=${_JOBS} + -DLLVM_TARGETS_TO_BUILD=host + BUILD_COMMAND ${_BUILD_COMMAND} + INSTALL_COMMAND ${_INSTALL_COMMAND} +) diff --git a/external/programl/CMakeLists.txt b/external/programl/CMakeLists.txt new file mode 100644 index 000000000..525a392b6 --- /dev/null +++ b/external/programl/CMakeLists.txt @@ -0,0 +1,47 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +project(gflags) +cmake_minimum_required(VERSION 3.15) + +include(ExternalProject) + +list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_LIST_DIR}/../../build_tools/cmake") +find_package(Bazel REQUIRED) + +ExternalProject_Add( + programl + PREFIX "${CMAKE_CURRENT_BINARY_DIR}/programl" + URL "https://github.com/ChrisCummins/ProGraML/archive/4f0981d7a0d27aecef3d6e918c886642b231562d.tar.gz" + URL_HASH "SHA256=c56360aade351eda1c138a594177fcb7cd2cda2a0a6c5c0d9aa62c7f856194bd" + DOWNLOAD_NO_EXTRACT FALSE + CONFIGURE_COMMAND "" + BUILD_COMMAND "" + INSTALL_COMMAND "" +) + +ExternalProject_Add_Step(programl build_programl + ALWAYS TRUE + COMMAND + "${CMAKE_COMMAND}" -E env "CC=${CMAKE_C_COMPILER}" "CXX=${CMAKE_CXX_COMPILER}" + "${Bazel_EXECUTABLE}" build + --verbose_failures + "--cxxopt=-std=c++${CMAKE_CXX_STANDARD}" + -- + //programl/graph:features + //programl/graph:program_graph_builder + //programl/graph/format:node_link_graph + //programl/ir/llvm:llvm-10 + //programl/proto:programl_cc + //programl/proto:programl + @labm8//labm8/cpp:logging + @labm8//labm8/cpp:status + @labm8//labm8/cpp:status_macros + @labm8//labm8/cpp:statusor + @labm8//labm8/cpp:string + @labm8//labm8/cpp:stringpiece + DEPENDEES update + WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/programl/src/programl" +) diff --git a/external/protobuf/CMakeLists.txt b/external/protobuf/CMakeLists.txt new file mode 100644 index 000000000..5467dec36 --- /dev/null +++ b/external/protobuf/CMakeLists.txt @@ -0,0 +1,39 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +project(protobuf) +cmake_minimum_required(VERSION 3.15) + +include(ExternalProject) +include(ProcessorCount) + +find_package(Git REQUIRED) + +ProcessorCount(_JOBS) +ExternalProject_Add( + protobuf + PREFIX "${CMAKE_CURRENT_BINARY_DIR}/protobuf" + GIT_REPOSITORY "https://github.com/protocolbuffers/protobuf.git" + # TODO: update version to match that in Bazel once ProGraML also advances protobuf version. + # see: https://github.com/facebookresearch/CompilerGym/pull/498#issuecomment-991236132 + GIT_TAG fde7cf7358ec7cd69e8db9be4f1fa6a5c431386a #v3.13.0 + CONFIGURE_COMMAND "" + BUILD_COMMAND "" + INSTALL_COMMAND "" +) + +ExternalProject_Add_Step(protobuf build_protobuf + ALWAYS TRUE + COMMAND + "${CMAKE_COMMAND}" -E env "CC=${CMAKE_C_COMPILER}" "CXX=${CMAKE_CXX_COMPILER}" + "${CMAKE_COMMAND}" + -C "${CMAKE_CURRENT_BINARY_DIR}/protobuf_initial_cache.cmake" + "-DGIT_EXECUTABLE=${GIT_EXECUTABLE}" + "-DGIT_REPOSITORY_DIR=${CMAKE_CURRENT_BINARY_DIR}/protobuf/src/protobuf" + "-DCMAKE_INSTALL_PREFIX=${CMAKE_INSTALL_PREFIX}" + -P "${CMAKE_CURRENT_SOURCE_DIR}/build_protobuf.cmake" + DEPENDEES update + WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/protobuf/src/protobuf" +) diff --git a/external/protobuf/build_protobuf.cmake b/external/protobuf/build_protobuf.cmake new file mode 100644 index 000000000..35bdba322 --- /dev/null +++ b/external/protobuf/build_protobuf.cmake @@ -0,0 +1,66 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cmake_minimum_required(VERSION 3.15) + +# Trickery to circumvent https://gitlab.kitware.com/cmake/cmake/-/issues/19703 +# Avoids rebuilding if the git was in the same state as in the previous build. + +execute_process( + COMMAND "${GIT_EXECUTABLE}" log -n 1 --pretty=format:%H + WORKING_DIRECTORY "${GIT_REPOSITORY_DIR}" + OUTPUT_VARIABLE _GIT_HASH + COMMAND_ERROR_IS_FATAL ANY + ) + +execute_process( + COMMAND "${GIT_EXECUTABLE}" diff --quiet + WORKING_DIRECTORY "${GIT_REPOSITORY_DIR}" + RESULT_VARIABLE _GIT_DIFF_RES) +if(_GIT_DIFF_RES STREQUAL 0) + set(_IS_GIT_DIRTY FALSE) +else() + set(_IS_GIT_DIRTY TRUE) +endif() + +if(NOT _IS_GIT_DIRTY AND EXISTS "${GIT_REPOSITORY_DIR}/../build_git_hash" AND + "${GIT_REPOSITORY_DIR}/../build_git_hash" IS_NEWER_THAN "${CMAKE_CURRENT_LIST_DIR}") + file(READ "${GIT_REPOSITORY_DIR}/../build_git_hash" _PREV_GIT_HASH) + if (_GIT_HASH STREQUAL _PREV_GIT_HASH) + return() + endif() +endif() + +file(REMOVE "${GIT_REPOSITORY_DIR}/../build_git_hash") + +execute_process( + COMMAND ./autogen.sh + WORKING_DIRECTORY "${GIT_REPOSITORY_DIR}" + COMMAND_ERROR_IS_FATAL ANY) + +execute_process( + COMMAND "${CMAKE_COMMAND}" + -E env + "CC=${CMAKE_C_COMPILER}" + "CXX=${CMAKE_CXX_COMPILER}" + "CFLAGS=${CMAKE_C_FLAGS} $ENV{CFLAGS}" + "CXXFLAGS=-std=c++${CMAKE_CXX_STANDARD} ${CMAKE_CXX_FLAGS} $ENV{CFLAGS}" + "LDFLAGS=-Wl,-rpath,${CMAKE_INSTALL_PREFIX}/lib ${CMAKE_STATIC_LINKER_FLAGS_INIT} ${CMAKE_SHARED_LINKER_FLAGS_INIT} ${CMAKE_EXE_LINKER_FLAGS_INIT} ${CMAKE_STATIC_LINKER_FLAGS} ${CMAKE_SHARED_LINKER_FLAGS} ${CMAKE_EXE_LINKER_FLAGS} $ENV{LDFLAGS}" + ./configure "--prefix=${CMAKE_INSTALL_PREFIX}" + WORKING_DIRECTORY "${GIT_REPOSITORY_DIR}" + COMMAND_ERROR_IS_FATAL ANY) + +include(ProcessorCount) +ProcessorCount(_JOBS) +execute_process( + COMMAND make -j${_JOBS} install + WORKING_DIRECTORY "${GIT_REPOSITORY_DIR}" + COMMAND_ERROR_IS_FATAL ANY) + +if(_IS_GIT_DIRTY) + return() +endif() + +file(WRITE "${GIT_REPOSITORY_DIR}/../build_git_hash" "${_GIT_HASH}") diff --git a/external/subprocess/CMakeLists.txt b/external/subprocess/CMakeLists.txt new file mode 100644 index 000000000..d5aad65b6 --- /dev/null +++ b/external/subprocess/CMakeLists.txt @@ -0,0 +1,20 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +project(gflags) +cmake_minimum_required(VERSION 3.15) + +include(ExternalProject) + +ExternalProject_Add( + subprocess + PREFIX "${CMAKE_CURRENT_BINARY_DIR}/subprocess" + GIT_REPOSITORY "https://github.com/arun11299/cpp-subprocess.git" + GIT_TAG 9c624ce4e3423cce9f148bafbae56abfd6437ea0 #tag v2.0 + CMAKE_ARGS + -C "${CMAKE_CURRENT_BINARY_DIR}/subprocess_initial_cache.cmake" + "-DCMAKE_INSTALL_PREFIX=${CMAKE_INSTALL_PREFIX}" + "-DCMAKE_CXX_FLAGS=-pthread" +) diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt new file mode 100644 index 000000000..dc113d8b8 --- /dev/null +++ b/tests/CMakeLists.txt @@ -0,0 +1,126 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_py_test( + NAME + compiler_env_test + SRCS + "compiler_env_test.py" + DEPS + compiler_gym::datasets::datasets + compiler_gym::envs::envs + tests::pytest_plugins::llvm + tests::test_main +) + +cg_py_test( + NAME + compiler_env_state_test + SRCS + "compiler_env_state_test.py" + DEPS + compiler_gym::compiler_gym + tests::pytest_plugins::common + tests::test_main +) + +cg_py_test( + NAME + make_test + SRCS + "make_test.py" + DEPS + compiler_gym::compiler_gym + tests::test_main +) + +cg_py_test( + NAME + random_search_test + SRCS + "random_search_test.py" + DEPS + compiler_gym::compiler_gym + compiler_gym::random_replay + compiler_gym::random_search + tests::pytest_plugins::common + tests::test_main +) + +cg_py_library( + NAME + test_main + SRCS + "test_main.py" + DEPS + compiler_gym::util::util + TESTONLY + PUBLIC +) + +cg_cc_library( + NAME + TestMacros + SRCS + "TestMacros.h" + PUBLIC +) + +cg_cc_library( + NAME + TestMain + SRCS + "TestMain.cc" + ABS_DEPS + glog::glog + GTest::GTest + TESTONLY + PUBLIC +) + +cg_py_test( + NAME + validate_test + SRCS + "validate_test.py" + DEPS + ::test_main + compiler_gym::compiler_gym +) + +cg_py_test( + NAME + validation_result_test + SRCS + "validation_result_test.py" + DEPS + ::test_main + compiler_gym::compiler_gym +) + +string(CONCAT _CMD + "\"${CMAKE_COMMAND}\" -E create_symlink" + " \"${CMAKE_CURRENT_SOURCE_DIR}/../VERSION\"" + " \"${CMAKE_CURRENT_BINARY_DIR}/../VERSION\"") +cg_genrule( + NAME version_file + SRCS "${CMAKE_CURRENT_SOURCE_DIR}/../VERSION" + OUTS "${CMAKE_CURRENT_BINARY_DIR}/../VERSION" + COMMAND "${_CMD}" +) + +cg_py_test( + NAME + version_test + SRCS + "version_test.py" + DEPS + ::test_main + ::version_file + compiler_gym::compiler_gym + tests::pytest_plugins::common +) diff --git a/tests/bin/CMakeLists.txt b/tests/bin/CMakeLists.txt new file mode 100644 index 000000000..4e207d373 --- /dev/null +++ b/tests/bin/CMakeLists.txt @@ -0,0 +1,51 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_py_test( + NAME + datasets_bin_test + SRCS + "datasets_bin_test.py" + DEPS + compiler_gym::bin::datasets + tests::pytest_plugins::common + tests::test_main +) + +cg_py_test( + NAME + manual_env_bin_test + SRCS + "manual_env_bin_test.py" + DEPS + compiler_gym::bin::manual_env + compiler_gym::util::util + tests::test_main +) + +cg_py_test( + NAME + service_bin_test + SRCS + "service_bin_test.py" + DEPS + compiler_gym::compiler_gym + compiler_gym::bin::service + tests::test_main +) + +cg_py_test( + NAME + validate_bin_test + SRCS + "validate_bin_test.py" + DEPS + compiler_gym::compiler_gym + compiler_gym::bin::validate + tests::pytest_plugins::common + tests::test_main +) diff --git a/tests/datasets/CMakeLists.txt b/tests/datasets/CMakeLists.txt new file mode 100644 index 000000000..ce8622c3d --- /dev/null +++ b/tests/datasets/CMakeLists.txt @@ -0,0 +1,50 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_py_test( + NAME + benchmark_test + SRCS + "benchmark_test.py" + DEPS + compiler_gym::datasets::datasets + tests::pytest_plugins::common + tests::test_main +) + +cg_py_test( + NAME + dataset_test + SRCS + "dataset_test.py" + DEPS + compiler_gym::datasets::datasets + tests::pytest_plugins::common + tests::test_main +) + +cg_py_test( + NAME + datasets_test + SRCS + "datasets_test.py" + DEPS + compiler_gym::datasets::datasets + tests::pytest_plugins::common + tests::test_main +) + +cg_py_test( + NAME + files_dataset_test + SRCS + "files_dataset_test.py" + DEPS + compiler_gym::datasets::datasets + tests::pytest_plugins::common + tests::test_main +) diff --git a/tests/fuzzing/CMakeLists.txt b/tests/fuzzing/CMakeLists.txt new file mode 100644 index 000000000..2201b1aa1 --- /dev/null +++ b/tests/fuzzing/CMakeLists.txt @@ -0,0 +1,104 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_py_test( + NAME + llvm_cbench_validate_fuzz_test + SRCS + "llvm_cbench_validate_fuzz_test.py" + DEPS + compiler_gym::compiler_gym + tests::pytest_plugins::llvm + tests::test_main + LABELS + "manual" +) + +cg_py_test( + NAME + llvm_commandline_opt_equivalence_fuzz_test + SRCS + "llvm_commandline_opt_equivalence_fuzz_test.py" + DEPS + compiler_gym::compiler_gym + tests::pytest_plugins::common + tests::pytest_plugins::llvm + tests::pytest_plugins::random_util + tests::test_main + LABELS + "manual" +) + +cg_py_test( + NAME + llvm_deterministic_action_fuzz_test + SRCS + "llvm_deterministic_action_fuzz_test.py" + DEPS + compiler_gym::compiler_gym + tests::pytest_plugins::llvm + tests::test_main + LABELS + "manual" +) + +cg_py_test( + NAME + llvm_fork_env_fuzz_test + SRCS + "llvm_fork_env_fuzz_test.py" + DEPS + compiler_gym::compiler_gym + tests::pytest_plugins::llvm + tests::test_main + LABELS + "manual" +) + +# TODO(boian): fix when benchmarks_random_actions_test.py is present in source. +#cg_py_test( +# NAME +# benchmarks_random_actions_test +# SRCS +# "benchmarks_random_actions_test.py" +# DEPS +# compiler_gym::compiler_gym +# compiler_gym::envs::envs +# compiler_gym::third_party::autophase::autophase +# tests::pytest_plugins::llvm +# tests::test_main +# LABELS +# "manual" +#) + +cg_py_test( + NAME + llvm_trajectory_replay_fuzz_test + SRCS + "llvm_trajectory_replay_fuzz_test.py" + DEPS + compiler_gym::compiler_gym + tests::pytest_plugins::llvm + tests::pytest_plugins::random_util + tests::test_main + LABELS + "manual" +) + +cg_py_test( + NAME + llvm_stress_fuzz_test + SRCS + "llvm_stress_fuzz_test.py" + DEPS + compiler_gym::compiler_gym + tests::pytest_plugins::llvm + tests::pytest_plugins::random_util + tests::test_main + LABELS + "manual" +) diff --git a/tests/gcc/CMakeLists.txt b/tests/gcc/CMakeLists.txt new file mode 100644 index 000000000..da35bc57b --- /dev/null +++ b/tests/gcc/CMakeLists.txt @@ -0,0 +1,44 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_py_test( + NAME + gcc_bin_test + SRCS + "gcc_bin_test.py" + DEPS + compiler_gym::envs::gcc::gcc + compiler_gym::service::service + tests::pytest_plugins::gcc + tests::test_main +) + +cg_py_test( + NAME + gcc_docker_test + SRCS + "gcc_docker_test.py" + DEPS + compiler_gym::envs::gcc::gcc + compiler_gym::service::service + tests::pytest_plugins::common + tests::test_main +) + +cg_py_test( + NAME + gcc_env_test + SRCS + "gcc_env_test.py" + DEPS + compiler_gym::envs::gcc::gcc + compiler_gym::service::service + compiler_gym::spaces::spaces + tests::pytest_plugins::common + tests::pytest_plugins::gcc + tests::test_main +) diff --git a/tests/gcc/datasets/CMakeLists.txt b/tests/gcc/datasets/CMakeLists.txt new file mode 100644 index 000000000..0bb4b6cb1 --- /dev/null +++ b/tests/gcc/datasets/CMakeLists.txt @@ -0,0 +1,32 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_py_test( + NAME + anghabench_test + SRCS + "anghabench_test.py" + DEPS + compiler_gym::envs::gcc::gcc + compiler_gym::envs::gcc::datasets::datasets + tests::pytest_plugins::common + tests::pytest_plugins::gcc + tests::test_main +) + +cg_py_test( + NAME + csmith_test + SRCS + "csmith_test.py" + DEPS + compiler_gym::envs::gcc::gcc + compiler_gym::envs::gcc::datasets::datasets + tests::pytest_plugins::common + tests::pytest_plugins::gcc + tests::test_main +) diff --git a/tests/leaderboard/CMakeLists.txt b/tests/leaderboard/CMakeLists.txt new file mode 100644 index 000000000..306bc3cfb --- /dev/null +++ b/tests/leaderboard/CMakeLists.txt @@ -0,0 +1,17 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_py_test( + NAME + llvm_instcount_test + SRCS + "llvm_instcount_test.py" + DEPS + compiler_gym::leaderboard::llvm_instcount + tests::pytest_plugins::common + tests::test_main +) diff --git a/tests/llvm/CMakeLists.txt b/tests/llvm/CMakeLists.txt new file mode 100644 index 000000000..1b6a7c40a --- /dev/null +++ b/tests/llvm/CMakeLists.txt @@ -0,0 +1,316 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_py_test( + NAME action_space_test + SRCS "action_space_test.py" + DEPS + compiler_gym::envs::envs + tests::test_main + tests::pytest_plugins::llvm +) + +cg_py_test( + NAME + all_actions_single_step_test + SRCS + "all_actions_single_step_test.py" + DEPS + compiler_gym::envs::envs + compiler_gym::third_party::autophase::autophase + tests::pytest_plugins::llvm + tests::test_main +) + +cg_py_test( + NAME + all_benchmarks_init_close_test + SRCS + "all_benchmarks_init_close_test.py" + DEPS + compiler_gym::envs::envs + tests::pytest_plugins::llvm + tests::test_main +) + +cg_py_test( + NAME + autophase_test + SRCS + "autophase_test.py" + DEPS + compiler_gym::envs::envs + tests::pytest_plugins::llvm + tests::test_main +) + +cg_filegroup( + NAME custom_benchmarks_test_files + FILES "${CMAKE_CURRENT_LIST_DIR}/invalid_ir.ll") +cg_py_test( + NAME + custom_benchmarks_test + SRCS + "custom_benchmarks_test.py" + DATA + compiler_gym::third_party::cbench::crc32 + ::custom_benchmarks_test_files + DEPS + compiler_gym::envs::envs + compiler_gym::service::proto::proto + compiler_gym::util::util + tests::pytest_plugins::common + tests::pytest_plugins::llvm + tests::test_main +) + +cg_py_test( + NAME + datasets_pickle_test + SRCS + "datasets_pickle_test.py" + DEPS + compiler_gym::datasets::datasets + compiler_gym::envs::llvm::llvm + tests::pytest_plugins::common + tests::pytest_plugins::llvm + tests::test_main +) + +cg_py_test( + NAME + download_llvm_test + SRCS + "download_llvm_test.py" + DEPS + compiler_gym::third_party::llvm::llvm + compiler_gym::util::util + tests::pytest_plugins::common + tests::test_main +) + +cg_py_test( + NAME + episode_reward_test + SRCS + "episode_reward_test.py" + DEPS + compiler_gym::envs::envs + tests::pytest_plugins::llvm + tests::test_main +) + +cg_py_test( + NAME + fork_env_test + SRCS + "fork_env_test.py" + DATA + compiler_gym::third_party::cbench::crc32 + DEPS + compiler_gym::envs::envs + tests::pytest_plugins::llvm + tests::test_main +) + +py_test( + NAME fork_regression_test + SRCS fork_regression_test.py + DEPS + compiler_gym::envs::envs + tests::test_main + tests::pytest_plugins::llvm +) + +cg_py_test( + NAME + fresh_environment_observation_reward_test + SRCS + "fresh_environment_observation_reward_test.py" + DEPS + compiler_gym::envs::envs + tests::pytest_plugins::llvm + tests::test_main +) + +cg_py_test( + NAME + fuzzing_regression_test + SRCS + "fuzzing_regression_test.py" + DEPS + tests::pytest_plugins::common + tests::pytest_plugins::llvm + tests::test_main +) + +cg_py_test( + NAME + gym_interface_compatability + SRCS + "gym_interface_compatability.py" + DEPS + compiler_gym::envs::llvm::llvm + tests::pytest_plugins::llvm + tests::test_main +) + +cg_filegroup( + NAME invalid_ir_test_files + FILES "${CMAKE_CURRENT_LIST_DIR}/invalid_ir.ll") +cg_py_test( + NAME + invalid_ir_test + SRCS + "invalid_ir_test.py" + DATA + invalid_ir_test_files + DEPS + compiler_gym::util::util + tests::pytest_plugins::common + tests::pytest_plugins::llvm + tests::test_main +) + +cg_py_test( + NAME + llvm_benchmarks_test + SRCS + "llvm_benchmarks_test.py" + DEPS + compiler_gym::envs::envs + compiler_gym::service::proto::proto + tests::pytest_plugins::llvm + tests::test_main +) + +cg_py_test( + NAME + llvm_env_test + SRCS + "llvm_env_test.py" + DEPS + compiler_gym::compiler_gym + tests::pytest_plugins::common + tests::pytest_plugins::llvm + tests::test_main +) + +cg_py_test( + NAME + llvm_session_parameters_test + SRCS + "llvm_session_parameters_test.py" + DEPS + compiler_gym::envs::envs + tests::pytest_plugins::llvm + tests::test_main +) + +cg_py_test( + NAME + module_id_test + SRCS + "module_id_test.py" + DEPS + compiler_gym::compiler_gym + tests::pytest_plugins::common + tests::pytest_plugins::llvm + tests::test_main +) + +cg_py_test( + NAME + multiprocessing_test + SRCS + "multiprocessing_test.py" + DEPS + compiler_gym::compiler_gym + tests::pytest_plugins::common + tests::test_main +) + +cg_py_test( + NAME + observation_spaces_test + SRCS + "observation_spaces_test.py" + DEPS + compiler_gym::envs::envs + tests::pytest_plugins::llvm + tests::test_main +) + +cg_py_test( + NAME + reward_spaces_test + SRCS + "reward_spaces_test.py" + DEPS + compiler_gym::envs::envs + tests::pytest_plugins::llvm + tests::test_main +) + +cg_py_test( + NAME runtime_test + SRCS "runtime_test.py" + DEPS + compiler_gym::envs::llvm::llvm + compiler_gym::service::connection + tests::test_main + tests::pytest_plugins::llvm +) + +cg_py_test( + NAME + service_connection_test + SRCS + "service_connection_test.py" + DEPS + compiler_gym::compiler_gym + compiler_gym::envs::envs + compiler_gym::third_party::autophase::autophase + compiler_gym::util::util + tests::pytest_plugins::llvm + tests::test_main +) + +cg_py_test( + NAME + threading_test + SRCS + "threading_test.py" + DEPS + compiler_gym::compiler_gym + tests::test_main +) + +cg_py_test( + NAME + validate_test + SRCS + "validate_test.py" + DEPS + compiler_gym::compiler_gym + compiler_gym::datasets::datasets + compiler_gym::envs::envs + tests::pytest_plugins::llvm + tests::test_main +) + +cg_py_test( + NAME + validation_regression_test + SRCS + "validation_regression_test.py" + DEPS + compiler_gym::compiler_gym + tests::pytest_plugins::common + tests::pytest_plugins::llvm + tests::test_main +) diff --git a/tests/llvm/datasets/CMakeLists.txt b/tests/llvm/datasets/CMakeLists.txt new file mode 100644 index 000000000..528822dad --- /dev/null +++ b/tests/llvm/datasets/CMakeLists.txt @@ -0,0 +1,133 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_py_test( + NAME + anghabench_test + SRCS + "anghabench_test.py" + DEPS + compiler_gym::envs::llvm::llvm + compiler_gym::envs::llvm::datasets::datasets + tests::pytest_plugins::common + tests::pytest_plugins::llvm + tests::test_main +) + +cg_py_test( + NAME + cbench_test + SRCS + "cbench_test.py" + DEPS + compiler_gym::envs::llvm::llvm + compiler_gym::envs::llvm::datasets::datasets + tests::pytest_plugins::common + tests::pytest_plugins::llvm + tests::test_main +) + +cg_py_test( + NAME + cbench_validate_test + SRCS + "cbench_validate_test.py" + DEPS + compiler_gym::compiler_gym + compiler_gym::datasets::datasets + compiler_gym::envs::envs + tests::pytest_plugins::llvm + tests::test_main +) + +cg_py_test( + NAME + chstone_test + SRCS + "chstone_test.py" + DEPS + compiler_gym::envs::llvm::llvm + compiler_gym::envs::llvm::datasets::datasets + tests::pytest_plugins::common + tests::pytest_plugins::llvm + tests::test_main +) + +cg_py_test( + NAME + clgen_test + SRCS + "clgen_test.py" + DEPS + compiler_gym::envs::llvm::llvm + compiler_gym::envs::llvm::datasets::datasets + tests::pytest_plugins::common + tests::pytest_plugins::llvm + tests::test_main +) + +cg_py_test( + NAME + csmith_test + SRCS + "csmith_test.py" + DEPS + compiler_gym::envs::llvm::llvm + compiler_gym::envs::llvm::datasets::datasets + tests::pytest_plugins::common + tests::pytest_plugins::llvm + tests::test_main +) + +cg_py_test( + NAME + github_test + SRCS + "github_test.py" + DEPS + compiler_gym::envs::llvm::llvm + compiler_gym::envs::llvm::datasets::datasets + tests::pytest_plugins::common + tests::pytest_plugins::llvm + tests::test_main +) + +cg_py_test( + NAME + llvm_datasets_test + SRCS + "llvm_datasets_test.py" + DEPS + compiler_gym::envs::llvm::llvm + tests::test_main +) + +cg_py_test( + NAME + llvm_stress_test + SRCS + "llvm_stress_test.py" + DEPS + compiler_gym::envs::llvm::llvm + compiler_gym::envs::llvm::datasets::datasets + tests::pytest_plugins::common + tests::pytest_plugins::llvm + tests::test_main +) + +cg_py_test( + NAME + poj104_test + SRCS + "poj104_test.py" + DEPS + compiler_gym::envs::llvm::llvm + compiler_gym::envs::llvm::datasets::datasets + tests::pytest_plugins::common + tests::pytest_plugins::llvm + tests::test_main +) diff --git a/tests/llvm/service/CMakeLists.txt b/tests/llvm/service/CMakeLists.txt new file mode 100644 index 000000000..548f06620 --- /dev/null +++ b/tests/llvm/service/CMakeLists.txt @@ -0,0 +1,20 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_cc_test( + NAME + ActionSpaceTest + SRCS + "ActionSpaceTest.cc" + DEPS + compiler_gym::envs::llvm::service::ActionSpace + tests::TestMacros + tests::TestMain + ABS_DEPS + GTest::GTest + magic_enum +) diff --git a/tests/loop_tool/CMakeLists.txt b/tests/loop_tool/CMakeLists.txt new file mode 100644 index 000000000..97adba4d7 --- /dev/null +++ b/tests/loop_tool/CMakeLists.txt @@ -0,0 +1,14 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_py_test( + NAME "actions_test" + SRCS "actions_test.py" + DEPS + compiler_gym::compiler_gym + tests::test_main +) diff --git a/tests/pytest_plugins/CMakeLists.txt b/tests/pytest_plugins/CMakeLists.txt new file mode 100644 index 000000000..a1175952b --- /dev/null +++ b/tests/pytest_plugins/CMakeLists.txt @@ -0,0 +1,56 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_py_library( + NAME + gcc + SRCS + "gcc.py" + DEPS + ::common + compiler_gym::envs::gcc::gcc + TESTONLY + PUBLIC +) + +cg_py_library( + NAME + llvm + SRCS + "llvm.py" + DATA + compiler_gym::envs::llvm::service::passes::actions_genfiles + compiler_gym::third_party::cbench::benchmarks_list + DEPS + compiler_gym::envs::llvm::llvm + compiler_gym::third_party::llvm::llvm + compiler_gym::util::util + TESTONLY + PUBLIC +) + +cg_py_library( + NAME + common + SRCS + "common.py" + DEPS + compiler_gym::util::util + TESTONLY + PUBLIC +) + +cg_py_library( + NAME + random_util + SRCS + "random_util.py" + DEPS + compiler_gym::compiler_gym + TESTONLY + PUBLIC +) diff --git a/tests/service/CMakeLists.txt b/tests/service/CMakeLists.txt new file mode 100644 index 000000000..274fee9f5 --- /dev/null +++ b/tests/service/CMakeLists.txt @@ -0,0 +1,18 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_py_test( + NAME + connection_test + SRCS + "connection_test.py" + DEPS + compiler_gym::compiler_gym + compiler_gym::envs::envs + compiler_gym::service::service + tests::test_main +) diff --git a/tests/service/proto/CMakeLists.txt b/tests/service/proto/CMakeLists.txt new file mode 100644 index 000000000..92f2026cf --- /dev/null +++ b/tests/service/proto/CMakeLists.txt @@ -0,0 +1,15 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +py_test( + NAME py_converters_test + SRCS "py_converters_test.py" + DEPS + compiler_gym::service::proto::proto + compiler_gym::spaces::spaces + tests::test_main +) diff --git a/tests/service/runtime/CMakeLists.txt b/tests/service/runtime/CMakeLists.txt new file mode 100644 index 000000000..b18ef2a40 --- /dev/null +++ b/tests/service/runtime/CMakeLists.txt @@ -0,0 +1,30 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_py_test( + NAME + benchmark_cache_test + SRCS + "benchmark_cache_test.py" + DEPS + compiler_gym::service::proto::proto + compiler_gym::service::runtime::benchmark_cache + tests::test_main +) + +cg_cc_test( + NAME + BenchmarkCacheTest + SRCS + "BenchmarkCacheTest.cc" + DEPS + compiler_gym::service::proto::compiler_gym_service_cc + compiler_gym::service::runtime::BenchmarkCache + tests::TestMain + ABS_DEPS + GTest::GTest +) diff --git a/tests/spaces/CMakeLists.txt b/tests/spaces/CMakeLists.txt new file mode 100644 index 000000000..d80905b62 --- /dev/null +++ b/tests/spaces/CMakeLists.txt @@ -0,0 +1,46 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_py_test( + NAME + commandline_test + SRCS + "commandline_test.py" + DEPS + compiler_gym::spaces::spaces + tests::test_main +) + +cg_py_test( + NAME + named_discrete_test + SRCS + "named_discrete_test.py" + DEPS + compiler_gym::spaces::spaces + tests::test_main +) + +cg_py_test( + NAME + scalar_test + SRCS + "scalar_test.py" + DEPS + compiler_gym::spaces::spaces + tests::test_main +) + +cg_py_test( + NAME + sequence_test + SRCS + "sequence_test.py" + DEPS + compiler_gym::spaces::spaces + tests::test_main +) diff --git a/tests/util/CMakeLists.txt b/tests/util/CMakeLists.txt new file mode 100644 index 000000000..92a607442 --- /dev/null +++ b/tests/util/CMakeLists.txt @@ -0,0 +1,167 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_py_test( + NAME + capture_output_test + SRCS + "capture_output_test.py" + DEPS + compiler_gym::util::util + tests::test_main +) + +cg_py_test( + NAME + debug_util_test + SRCS + "debug_util_test.py" + DEPS + compiler_gym::util::util + tests::pytest_plugins::common + tests::test_main +) + +cg_py_test( + NAME + download_test + SRCS + "download_test.py" + DEPS + compiler_gym::util::util + tests::test_main +) + +cg_py_test( + NAME executor_test + SRCS executor_test.py + DEPS + compiler_gym::util::util + tests::test_main +) + +cg_cc_test( + NAME + EnumUtilTest + SRCS + "EnumUtilTest.cc" + DEPS + compiler_gym::util::EnumUtil + tests::TestMacros + tests::TestMain + ABS_DEPS + GTest::GTest +) + +cg_py_test( + NAME + filesystem_test + SRCS + "filesystem_test.py" + DEPS + compiler_gym::util::util + tests::pytest_plugins::common + tests::test_main +) + +cg_py_test( + NAME + minimize_trajectory_test + SRCS + "minimize_trajectory_test.py" + DEPS + compiler_gym::util::util + tests::pytest_plugins::llvm + tests::test_main +) + +cg_py_test( + NAME + parallelization_test + SRCS + "parallelization_test.py" + DEPS + compiler_gym::util::util + tests::test_main +) + +cg_py_test( + NAME runfiles_path_test + SRCS "runfiles_path_test.py" + DEPS + compiler_gym::util::util + tests::test_main + tests::pytest_plugins::common +) + +cg_py_test( + NAME shell_format_test + SRCS shell_format_test.py + DEPS + compiler_gym::util::util + tests::test_main +) + +cg_py_test( + NAME + statistics_test + SRCS + "statistics_test.py" + DEPS + compiler_gym::util::util + tests::test_main +) + +cg_cc_test( + NAME + StrLenConstexprTest + SRCS + "StrLenConstexprTest.cc" + DEPS + compiler_gym::util::StrLenConstexpr + tests::TestMain +) + +cg_cc_test( + NAME + SubprocessTest + SRCS + "SubprocessTest.cc" + DEPS + compiler_gym::util::Subprocess + tests::TestMain +) + +cg_py_test( + NAME + temporary_working_directory_test + SRCS + "temporary_working_directory_test.py" + DEPS + compiler_gym::util::util + tests::test_main +) + +cg_py_test( + NAME + timer_test + SRCS + "timer_test.py" + DEPS + compiler_gym::util::util + tests::test_main +) + +cg_py_test( + NAME + truncate_test + SRCS + "truncate_test.py" + DEPS + compiler_gym::util::util + tests::test_main +) diff --git a/tests/version_test.py b/tests/version_test.py index b5af4aa48..4533770ef 100644 --- a/tests/version_test.py +++ b/tests/version_test.py @@ -2,24 +2,14 @@ # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -import os - import pkg_resources -import pytest import compiler_gym from compiler_gym.util.runfiles_path import runfiles_path from packaging import version -from tests.pytest_plugins.common import bazel_only +from tests.pytest_plugins.common import bazel_only, install_test_only from tests.test_main import main -# Marker to skip a test if running under bazel. -# This uses $TEST_WORKSPACE, set by the bazel test runner. -# See: https://docs.bazel.build/versions/master/test-encyclopedia.html#initial-conditions -install_test = pytest.mark.skipif( - bool(os.environ.get("TEST_WORKSPACE")), reason="Install test" -) - def test_version_dunder(): assert isinstance(compiler_gym.__version__, str) @@ -29,7 +19,7 @@ def test_version_dunder_format(): version.parse(compiler_gym.__version__) -@install_test +@install_test_only def test_setuptools_version(): version = pkg_resources.require("compiler_gym")[0].version assert version == compiler_gym.__version__ diff --git a/tests/views/CMakeLists.txt b/tests/views/CMakeLists.txt new file mode 100644 index 000000000..623239c5b --- /dev/null +++ b/tests/views/CMakeLists.txt @@ -0,0 +1,27 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_py_test( + NAME + observation_test + SRCS + "observation_test.py" + DEPS + compiler_gym::service::proto::proto + compiler_gym::views::views + tests::test_main +) + +cg_py_test( + NAME + reward_test + SRCS + "reward_test.py" + DEPS + compiler_gym::views::views + tests::test_main +) diff --git a/tests/wrappers/CMakeLists.txt b/tests/wrappers/CMakeLists.txt new file mode 100644 index 000000000..3b7e2303e --- /dev/null +++ b/tests/wrappers/CMakeLists.txt @@ -0,0 +1,60 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + +cg_py_test( + NAME + commandline_wrappers_test + SRCS + "commandline_wrappers_test.py" + DEPS + compiler_gym::wrappers::wrappers + tests::pytest_plugins::llvm + tests::test_main +) + +cg_py_test( + NAME + core_wrappers_test + SRCS + "core_wrappers_test.py" + DEPS + compiler_gym::wrappers::wrappers + tests::pytest_plugins::llvm + tests::test_main +) + +cg_py_test( + NAME + datasets_wrappers_test + SRCS + "datasets_wrappers_test.py" + DEPS + compiler_gym::wrappers::wrappers + tests::pytest_plugins::llvm + tests::test_main +) + +cg_py_test( + NAME llvm_test + SRCS "llvm_test.py" + DEPS + compiler_gym::envs::llvm::llvm + compiler_gym::wrappers::wrappers + tests::test_main + tests::pytest_plugins::llvm +) + +cg_py_test( + NAME + time_limit_wrappers_test + SRCS + "time_limit_wrappers_test.py" + DEPS + compiler_gym::wrappers::wrappers + tests::pytest_plugins::llvm + tests::test_main +) From 64a4e85b21fabf99e42915e4f2b47574fbd8953d Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Sat, 11 Dec 2021 13:17:35 +0000 Subject: [PATCH 063/142] Revert "Update proto and gRPC dependencies." This reverts commit 81c7ab4dec9e78a31b605cdf753e43d18da42d69. --- WORKSPACE | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/WORKSPACE b/WORKSPACE index 0e0339be5..a8caf96c0 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -102,10 +102,11 @@ py_repositories() http_archive( name = "rules_proto", - sha256 = "83c8798f5a4fe1f6a13b5b6ae4267695b71eed7af6fbf2b6ec73a64cf01239ab", - strip_prefix = "rules_proto-b22f78685bf62775b80738e766081b9e4366cdf0", + sha256 = "66bfdf8782796239d3875d37e7de19b1d94301e8972b3cbd2446b332429b4df1", + strip_prefix = "rules_proto-4.0.0", urls = [ - "https://github.com/bazelbuild/rules_proto/archive/b22f78685bf62775b80738e766081b9e4366cdf0.tar.gz", + "https://mirror.bazel.build/github.com/bazelbuild/rules_proto/archive/refs/tags/4.0.0.tar.gz", + "https://github.com/bazelbuild/rules_proto/archive/refs/tags/4.0.0.tar.gz", ], ) @@ -120,10 +121,10 @@ rules_proto_toolchains() # Version should be kept in step with compiler_gym/requirements.txt. http_archive( name = "com_github_grpc_grpc", - sha256 = "2b8a2c9ee689a23ce852ef010b27be80fe6aff827bf6c794bf1273e9fdf8dfb6", - strip_prefix = "grpc-240557a55cab84125e95beda54ceb5dcd5bba08c", + sha256 = "1a5127c81487f4e3e57973bb332f04b9159f94d860c207e096d8a587d371edbd", + strip_prefix = "grpc-1.36.0", urls = [ - "https://github.com/grpc/grpc/archive/240557a55cab84125e95beda54ceb5dcd5bba08c.tar.gz", + "https://github.com/grpc/grpc/archive/v1.36.0.tar.gz", ], ) From aa2fb0c0c42850cada2b6f7f63d263b619a3dabd Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Mon, 13 Dec 2021 17:49:31 +0000 Subject: [PATCH 064/142] [llvm] Fix invalid use of BitcodeWriter API. Use the WriteBitcodeToFile() function rather than BitcodeWriter::writeMethod() method to prevent a debugging assertion. Issue #514. --- compiler_gym/envs/llvm/service/Benchmark.cc | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/compiler_gym/envs/llvm/service/Benchmark.cc b/compiler_gym/envs/llvm/service/Benchmark.cc index dcc034a18..93ae45207 100644 --- a/compiler_gym/envs/llvm/service/Benchmark.cc +++ b/compiler_gym/envs/llvm/service/Benchmark.cc @@ -34,12 +34,15 @@ namespace { BenchmarkHash getModuleHash(const llvm::Module& module) { BenchmarkHash hash; - llvm::SmallVector buffer; + Bitcode bitcode; + // Writing the entire bitcode to a buffer that is then discarded is // inefficient. - llvm::BitcodeWriter writer(buffer); - writer.writeModule(module, /*ShouldPreserveUseListOrder=*/false, - /*Index=*/nullptr, /*GenerateHash=*/true, &hash); + llvm::raw_svector_ostream ostream(bitcode); + llvm::WriteBitcodeToFile(module, ostream, + /*ShouldPreserveUseListOrder=*/false, + /*Index=*/nullptr, /*GenerateHash=*/true, &hash); + return hash; } From 085e79c25802daed0c2ef0fa1f25dccd68125402 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Mon, 13 Dec 2021 17:59:07 +0000 Subject: [PATCH 065/142] [llvm] Fix strip-optnone-attribute build. --- compiler_gym/envs/llvm/service/BUILD | 2 +- compiler_gym/envs/llvm/service/StripOptNoneAttribute.cc | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/compiler_gym/envs/llvm/service/BUILD b/compiler_gym/envs/llvm/service/BUILD index 35908779e..29c4b046d 100644 --- a/compiler_gym/envs/llvm/service/BUILD +++ b/compiler_gym/envs/llvm/service/BUILD @@ -308,7 +308,7 @@ cc_binary( genrule( name = "strip-optnone-bin", srcs = [":strip-optnone-attribute-prelinked"], - outs = ["strip-optnone"], + outs = ["strip-optnone-attribute"], cmd = select({ "@llvm//:darwin": ( "cp $(location :strip-optnone-attribute-prelinked) $@" diff --git a/compiler_gym/envs/llvm/service/StripOptNoneAttribute.cc b/compiler_gym/envs/llvm/service/StripOptNoneAttribute.cc index 891d47a6b..e74a54d26 100644 --- a/compiler_gym/envs/llvm/service/StripOptNoneAttribute.cc +++ b/compiler_gym/envs/llvm/service/StripOptNoneAttribute.cc @@ -9,8 +9,6 @@ #include #include "compiler_gym/envs/llvm/service/BenchmarkFactory.h" -#include "compiler_gym/envs/llvm/service/Observation.h" -#include "compiler_gym/envs/llvm/service/ObservationSpaces.h" #include "compiler_gym/service/proto/compiler_gym_service.pb.h" #include "compiler_gym/util/GrpcStatusMacros.h" #include "llvm/IR/Module.h" From 353d01f72ae700fec7342bad3502048fcb0a948d Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Mon, 13 Dec 2021 21:52:54 +0000 Subject: [PATCH 066/142] [docs] Add Google Analytics ID. --- docs/source/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index 64babe21a..d422cfdfd 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -66,7 +66,7 @@ html_theme = "sphinx_rtd_theme" html_theme_options = { - "analytics_id": "G-T95G5EVYXM", + "analytics_id": "G-WJN2CKJJKH", "collapse_navigation": True, "display_version": True, "logo_only": True, From d43645719ca3acbd41166729fdd35eba6a6a1b03 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Tue, 14 Dec 2021 15:44:18 +0000 Subject: [PATCH 067/142] [datasets] Make the `site_data_base` path optional. This allows datasets to be specified without needing to pass a `site_data_base` argument. The purpose of `site_data_base` to provide an on-disk cache for persistent files, but not all datasets need such a cache. --- compiler_gym/datasets/dataset.py | 38 +++++++++++++++++++++++++++----- tests/datasets/dataset_test.py | 25 +++++++++++++++++++++ 2 files changed, 57 insertions(+), 6 deletions(-) diff --git a/compiler_gym/datasets/dataset.py b/compiler_gym/datasets/dataset.py index 1ecc1a184..42cb98549 100644 --- a/compiler_gym/datasets/dataset.py +++ b/compiler_gym/datasets/dataset.py @@ -45,7 +45,7 @@ def __init__( name: str, description: str, license: str, # pylint: disable=redefined-builtin - site_data_base: Path, + site_data_base: Optional[Path] = None, benchmark_class=Benchmark, references: Optional[Dict[str, str]] = None, deprecated: Optional[str] = None, @@ -61,8 +61,15 @@ def __init__( :param license: The name of the dataset's license. - :param site_data_base: The base path of a directory that will be used to - store installed files. + :param site_data_base: An optional directory that can be used by the + dataset to house the "site data", i.e. persistent files on disk. The + site data directory is a subdirectory of this :code:`site_data_base` + path, which can be shared by multiple datasets. If not provided, the + :attr:`dataset.site_data_path + ` attribute will raise + an error. Use :attr:`dataset.has_site_data + ` to check if a site + data path was set. :param benchmark_class: The class to use when instantiating benchmarks. It must have the same constructor signature as :class:`Benchmark @@ -110,8 +117,11 @@ def __init__( self.benchmark_class = benchmark_class # Set up the site data name. - basename = components.group("dataset_name") - self._site_data_path = Path(site_data_base).resolve() / self.protocol / basename + if site_data_base: + basename = components.group("dataset_name") + self._site_data_path = ( + Path(site_data_base).resolve() / self.protocol / basename + ) def __repr__(self): return self.name @@ -212,6 +222,14 @@ def validatable(self) -> str: """ return self._validatable + @property + def has_site_data(self) -> bool: + """Return whether the dataset has a site data directory. + + :type: bool + """ + return hasattr(self, "_site_data_path") + @property def site_data_path(self) -> Path: """The filesystem path used to store persistent dataset files. @@ -219,7 +237,12 @@ def site_data_path(self) -> Path: This directory may not exist. :type: Path + + :raises ValueError: If no site data path was specified at constructor + time. """ + if not self.has_site_data: + raise ValueError(f"Dataset has no site data path: {self.name}") return self._site_data_path @property @@ -228,6 +251,9 @@ def site_data_size_in_bytes(self) -> int: :type: int """ + if not self.has_site_data: + return 0 + if not self.site_data_path.is_dir(): return 0 @@ -314,7 +340,7 @@ def uninstall(self) -> None: `. The dataset can still be used after calling this method. """ - if self.site_data_path.is_dir(): + if self.has_site_data() and self.site_data_path.is_dir(): shutil.rmtree(self.site_data_path) def benchmarks(self) -> Iterable[Benchmark]: diff --git a/tests/datasets/dataset_test.py b/tests/datasets/dataset_test.py index 99645c256..82351b0e1 100644 --- a/tests/datasets/dataset_test.py +++ b/tests/datasets/dataset_test.py @@ -243,5 +243,30 @@ def test_logger_is_deprecated(): dataset.logger +def test_with_site_data(): + """Test the dataset property values.""" + dataset = Dataset( + name="benchmark://test-v0", + description="A test dataset", + license="MIT", + site_data_base="test", + ) + assert dataset.has_site_data + + +def test_without_site_data(): + """Test the dataset property values.""" + dataset = Dataset( + name="benchmark://test-v0", + description="A test dataset", + license="MIT", + ) + assert not dataset.has_site_data + with pytest.raises( + ValueError, match=r"^Dataset has no site data path: benchmark://test-v0$" + ): + dataset.site_data_path # noqa + + if __name__ == "__main__": main() From b797c33173797bbd8a657176cb8d0fe3b2fc9224 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Tue, 14 Dec 2021 15:46:35 +0000 Subject: [PATCH 068/142] [loop_tool] Remove unused site data path from datasets. --- compiler_gym/envs/loop_tool/__init__.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/compiler_gym/envs/loop_tool/__init__.py b/compiler_gym/envs/loop_tool/__init__.py index 6d73f5caf..4367093ae 100644 --- a/compiler_gym/envs/loop_tool/__init__.py +++ b/compiler_gym/envs/loop_tool/__init__.py @@ -9,7 +9,7 @@ from compiler_gym.datasets import Benchmark, Dataset, benchmark from compiler_gym.spaces import Reward from compiler_gym.util.registration import register -from compiler_gym.util.runfiles_path import runfiles_path, site_data_path +from compiler_gym.util.runfiles_path import runfiles_path LOOP_TOOL_SERVICE_BINARY: Path = runfiles_path( "compiler_gym/envs/loop_tool/service/compiler_gym-loop_tool-service" @@ -56,7 +56,6 @@ def __init__(self, *args, **kwargs): name="benchmark://loop_tool-cuda-v0", license="MIT", description="loop_tool dataset", - site_data_base=site_data_path("loop_tool_dataset"), ) def benchmark_uris(self) -> Iterable[str]: @@ -72,7 +71,6 @@ def __init__(self, *args, **kwargs): name="benchmark://loop_tool-cpu-v0", license="MIT", description="loop_tool dataset", - site_data_base=site_data_path("loop_tool_dataset"), ) def benchmark_uris(self) -> Iterable[str]: From e8676f30da11fc5a0db956593dd2c1024944af09 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Tue, 14 Dec 2021 16:12:44 +0000 Subject: [PATCH 069/142] [examples] Remove unused site_data_base argument. --- examples/example_compiler_gym_service/__init__.py | 3 +-- examples/example_compiler_gym_service/demo_without_bazel.py | 2 -- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/examples/example_compiler_gym_service/__init__.py b/examples/example_compiler_gym_service/__init__.py index 7e4a7b62e..f86cbd3ae 100644 --- a/examples/example_compiler_gym_service/__init__.py +++ b/examples/example_compiler_gym_service/__init__.py @@ -9,7 +9,7 @@ from compiler_gym.datasets import Benchmark, Dataset from compiler_gym.spaces import Reward from compiler_gym.util.registration import register -from compiler_gym.util.runfiles_path import runfiles_path, site_data_path +from compiler_gym.util.runfiles_path import runfiles_path EXAMPLE_CC_SERVICE_BINARY: Path = runfiles_path( "examples/example_compiler_gym_service/service_cc/compiler_gym-example-service-cc" @@ -58,7 +58,6 @@ def __init__(self, *args, **kwargs): name="benchmark://example-v0", license="MIT", description="An example dataset", - site_data_base=site_data_path("example_dataset"), ) self._benchmarks = { "benchmark://example-v0/foo": Benchmark.from_file_contents( diff --git a/examples/example_compiler_gym_service/demo_without_bazel.py b/examples/example_compiler_gym_service/demo_without_bazel.py index 24f58c8da..f18aa7d2e 100644 --- a/examples/example_compiler_gym_service/demo_without_bazel.py +++ b/examples/example_compiler_gym_service/demo_without_bazel.py @@ -19,7 +19,6 @@ from compiler_gym.spaces import Reward from compiler_gym.util.logging import init_logging from compiler_gym.util.registration import register -from compiler_gym.util.runfiles_path import site_data_path EXAMPLE_PY_SERVICE_BINARY: Path = Path( "example_compiler_gym_service/service_py/example_service.py" @@ -65,7 +64,6 @@ def __init__(self, *args, **kwargs): name="benchmark://example-v0", license="MIT", description="An example dataset", - site_data_base=site_data_path("example_dataset"), ) self._benchmarks = { "benchmark://example-v0/foo": Benchmark.from_file_contents( From 830bf7c3a2a5209cb2ab131fc3cc749787b0af30 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Tue, 14 Dec 2021 16:26:18 +0000 Subject: [PATCH 070/142] [examples] Tweak requirements.txt. * Bump ray dependency 1.8 -> 1.9 after having problems installing 1.8. * Don't specify numpy version (#501). --- examples/requirements.txt | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/examples/requirements.txt b/examples/requirements.txt index 820ca2a4e..2899a759c 100644 --- a/examples/requirements.txt +++ b/examples/requirements.txt @@ -3,12 +3,11 @@ dgl==0.6.1 geneticalgorithm>=1.0.2 hydra-core==1.1.0 keras==2.6.0 -matplotlib>=3.3.0 +matplotlib>=3.3.4 nevergrad>=0.4.3 -numpy~=1.19.2 # Pin version for tensorflow. opentuner>=0.8.5 pandas>=1.1.5 -ray[default,rllib]==1.8.0 +ray[default,rllib]==1.9.0 submitit>=1.2.0 submitit>=1.2.0 tensorflow==2.6.1 From 81d708435518d413e4b10e5544c45637c4389de1 Mon Sep 17 00:00:00 2001 From: Boian Petkantchin Date: Fri, 10 Dec 2021 08:44:43 -0800 Subject: [PATCH 071/142] CMake: add building and installing of the python package --- .github/workflows/ci.yaml | 50 ++++++++--- INSTALL.md | 2 + build_tools/cmake/cg_genrule.cmake | 36 ++++++-- build_tools/cmake/cg_macros.cmake | 15 ++-- compiler_gym/CMakeLists.txt | 47 ++++++++++ external/external.cmake | 6 ++ external/protobuf/build_protobuf.cmake | 11 ++- setup.py | 117 ++++++++++++++++++++----- tests/CMakeLists.txt | 12 +++ 9 files changed, 248 insertions(+), 48 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index f2de69995..9d6427a45 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -80,17 +80,13 @@ jobs: cmake --build ~/cmake_build shell: bash - - name: Install runtime dependencies - uses: ./.github/actions/install-runtime-dependencies - - - name: Install test dependencies - run: python -m pip install -r tests/requirements.txt - - - name: Run the test suite - run: | - cd ~/cmake_build - ctest --parallel $(nproc) --tests-regex tests/ --label-exclude manual - shell: bash + - name: Upload Python wheel + uses: actions/upload-artifact@v2 + with: + name: linux-wheel-cmake + path: ~/cmake_build/py_pkg/dist/*.whl + if-no-files-found: error + retention-days: 7 build-macos: runs-on: macos-latest @@ -166,6 +162,38 @@ jobs: - name: Upload coverage report to Codecov uses: codecov/codecov-action@v2 + test-linux-cmake: + needs: build-linux-cmake + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python: [3.9] + steps: + - uses: actions/checkout@v2 + + - name: Set up Python ${{ matrix.python }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python }} + + - name: Download Python wheel + uses: actions/download-artifact@v2 + with: + name: linux-wheel-cmake + + - name: Install wheel + run: python -m pip install *.whl + + - name: Install runtime dependencies + uses: ./.github/actions/install-runtime-dependencies + + - name: Install test dependencies + run: python -m pip install -r tests/requirements.txt + + - name: Run the test suite + run: make install-test + test-macos: needs: build-macos runs-on: macos-latest diff --git a/INSTALL.md b/INSTALL.md index a1f6be475..4eb577d4f 100644 --- a/INSTALL.md +++ b/INSTALL.md @@ -93,6 +93,8 @@ environment using: ## Building from source with CMake +Darwin is not supported with CMake. + ### Dependency instructions for Ubuntu ```bash diff --git a/build_tools/cmake/cg_genrule.cmake b/build_tools/cmake/cg_genrule.cmake index cacae5bca..64e72213a 100644 --- a/build_tools/cmake/cg_genrule.cmake +++ b/build_tools/cmake/cg_genrule.cmake @@ -15,7 +15,7 @@ include(cg_macros) function(cg_genrule) cmake_parse_arguments( _RULE - "PUBLIC;TESTONLY" + "PUBLIC;TESTONLY;EXCLUDE_FROM_ALL" "NAME;COMMAND" "SRCS;OUTS;DEPENDS;ABS_DEPENDS" ${ARGN} @@ -52,16 +52,34 @@ function(cg_genrule) # Substitute special Bazel references string(REPLACE "$@" "${_OUTS}" _CMD "${_RULE_COMMAND}") string(REPLACE "$(@D)" "${_OUTS_DIR}" _CMD "${_CMD}") - #string(REPLACE "$<" "\"${_SRCS}\"" _CMD "${_CMD}") - add_custom_command( - OUTPUT ${_OUTS} - COMMAND bash -c "${_CMD}" - DEPENDS ${_DEPS} ${_SRCS} - VERBATIM - ) + if(_OUTS) + add_custom_command( + OUTPUT ${_OUTS} + COMMAND bash -c "${_CMD}" + DEPENDS ${_DEPS} ${_SRCS} + VERBATIM + USES_TERMINAL + ) + endif() + + if(_RULE_EXCLUDE_FROM_ALL) + unset(_ALL) + else() + set(_ALL ALL) + endif() + + if(_OUTS) + add_custom_target(${_NAME} ${_ALL} DEPENDS ${_OUTS}) + else() + add_custom_target( + ${_NAME} ${_ALL} + COMMAND bash -c "${_CMD}" + DEPENDS ${_DEPS} ${_SRCS} + VERBATIM + USES_TERMINAL) + endif() - add_custom_target(${_NAME} ALL DEPENDS ${_OUTS}) set_target_properties(${_NAME} PROPERTIES OUTPUTS "${_OUTS}") diff --git a/build_tools/cmake/cg_macros.cmake b/build_tools/cmake/cg_macros.cmake index 28e0cdd98..ee4c2cedd 100644 --- a/build_tools/cmake/cg_macros.cmake +++ b/build_tools/cmake/cg_macros.cmake @@ -62,10 +62,13 @@ endfunction() # Example when called from proj/base/CMakeLists.txt: # proj::base function(cg_package_ns PACKAGE_NS) - string(REPLACE ${COMPILER_GYM_ROOT_DIR} "" _PACKAGE ${CMAKE_CURRENT_LIST_DIR}) - string(SUBSTRING ${_PACKAGE} 1 -1 _PACKAGE) - string(REPLACE "/" "::" _PACKAGE_NS ${_PACKAGE}) - set(${PACKAGE_NS} ${_PACKAGE_NS} PARENT_SCOPE) + string(REPLACE "${COMPILER_GYM_ROOT_DIR}" "" _PACKAGE "${CMAKE_CURRENT_LIST_DIR}") + string(LENGTH "${_PACKAGE}" _LENGTH) + if (_LENGTH) + string(SUBSTRING "${_PACKAGE}" 1 -1 _PACKAGE) + endif() + string(REPLACE "/" "::" _PACKAGE_NS "${_PACKAGE}") + set(${PACKAGE_NS} "${_PACKAGE_NS}" PARENT_SCOPE) endfunction() # Sets ${PACKAGE_NAME} to the root relative package name. @@ -106,7 +109,9 @@ function(canonize_bazel_target_names _RESULT _BAZEL_TARGETS) foreach(_TARGET ${_BAZEL_TARGETS}) if (NOT _TARGET MATCHES ":") # local target - set(_TARGET "${_PACKAGE_NS}::${_TARGET}") + if (NOT _PACKAGE_NS STREQUAL "") + set(_TARGET "${_PACKAGE_NS}::${_TARGET}") + endif() endif() list(APPEND _RES "${_TARGET}") endforeach() diff --git a/compiler_gym/CMakeLists.txt b/compiler_gym/CMakeLists.txt index 402c6dd1c..3264c331e 100644 --- a/compiler_gym/CMakeLists.txt +++ b/compiler_gym/CMakeLists.txt @@ -112,3 +112,50 @@ cg_py_library( compiler_gym::util::util PUBLIC ) + +set(_PY_PKG_IN_DIR "${CMAKE_CURRENT_BINARY_DIR}/..") +cmake_path(RELATIVE_PATH _PY_PKG_IN_DIR BASE_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/..") +set(_PY_PKG_OUT_DIR "${CMAKE_CURRENT_BINARY_DIR}/../py_pkg") +string(CONCAT _CMD + "cd \"${CMAKE_CURRENT_SOURCE_DIR}/..\"" + " && \"${Python3_EXECUTABLE}\" setup.py" + " --build-dir \"${_PY_PKG_OUT_DIR}/build\"" + " bdist_wheel" + " --package-dir \"${_PY_PKG_IN_DIR}\"" + " --dist-dir \"${_PY_PKG_OUT_DIR}/dist\"" + " --bdist-dir \"${_PY_PKG_OUT_DIR}/bdist\"") +string(CONCAT _CMD_GET_WHEEL_FILENAME "${_CMD}" " --get-wheel-filename") +execute_process(COMMAND + bash -c "${_CMD_GET_WHEEL_FILENAME}" + OUTPUT_VARIABLE _PY_PGK_FILE_NAME + COMMAND_ERROR_IS_FATAL ANY) +string(CONCAT _BUILD_CMD + "rm -rf \"${_PY_PKG_OUT_DIR}/build\"" + " && ${_CMD}") +cg_genrule( + NAME python_package + COMMAND "${_BUILD_CMD}" + OUTS "${_PY_PKG_OUT_DIR}/dist/${_PY_PGK_FILE_NAME}" + DEPENDS + compiler_gym::third_party::cbench::benchmarks_list + compiler_gym::third_party::cbench::crc32 + compiler_gym::compiler_gym + compiler_gym::bin::bin + compiler_gym::datasets::datasets + compiler_gym::envs::envs + compiler_gym::service::service + compiler_gym::service::runtime::runtime + compiler_gym::spaces::spaces + compiler_gym::util::util + compiler_gym::util::flags::flags + compiler_gym::views::views +) + +string(CONCAT _CMD + "\"${Python3_EXECUTABLE}\" -m pip install --upgrade --no-deps --force-reinstall" + " \"${_PY_PKG_OUT_DIR}/dist/${_PY_PGK_FILE_NAME}\"") +cg_genrule( + NAME install_python_package + COMMAND "${_CMD}" + DEPENDS ::python_package + EXCLUDE_FROM_ALL) diff --git a/external/external.cmake b/external/external.cmake index 6185c3dd1..b4644703c 100644 --- a/external/external.cmake +++ b/external/external.cmake @@ -143,6 +143,9 @@ if(COMPILER_GYM_PROTOBUF_PROVIDER STREQUAL "internal") COMMAND_ERROR_IS_FATAL ANY ) list(PREPEND CMAKE_PREFIX_PATH "${CMAKE_CURRENT_BINARY_DIR}/external/protobuf/install") + if(NOT DEFINED Protobuf_USE_STATIC_LIBS) + set(Protobuf_USE_STATIC_LIBS ON) + endif() endif() find_package(Protobuf REQUIRED) @@ -444,6 +447,9 @@ if(COMPILER_GYM_BOOST_PROVIDER STREQUAL "internal") build_external_cmake_project( NAME boost SRC_DIR "${CMAKE_CURRENT_LIST_DIR}/boost") + if (NOT DEFINED Boost_USE_STATIC_LIBS) + set(Boost_USE_STATIC_LIBS ON) + endif() endif() find_package(Boost REQUIRED COMPONENTS filesystem headers) diff --git a/external/protobuf/build_protobuf.cmake b/external/protobuf/build_protobuf.cmake index 35bdba322..36060bab1 100644 --- a/external/protobuf/build_protobuf.cmake +++ b/external/protobuf/build_protobuf.cmake @@ -48,7 +48,16 @@ execute_process( "CFLAGS=${CMAKE_C_FLAGS} $ENV{CFLAGS}" "CXXFLAGS=-std=c++${CMAKE_CXX_STANDARD} ${CMAKE_CXX_FLAGS} $ENV{CFLAGS}" "LDFLAGS=-Wl,-rpath,${CMAKE_INSTALL_PREFIX}/lib ${CMAKE_STATIC_LINKER_FLAGS_INIT} ${CMAKE_SHARED_LINKER_FLAGS_INIT} ${CMAKE_EXE_LINKER_FLAGS_INIT} ${CMAKE_STATIC_LINKER_FLAGS} ${CMAKE_SHARED_LINKER_FLAGS} ${CMAKE_EXE_LINKER_FLAGS} $ENV{LDFLAGS}" - ./configure "--prefix=${CMAKE_INSTALL_PREFIX}" + ./configure + "--prefix=${CMAKE_INSTALL_PREFIX}" + # --enable-shared=no seems to not work. It still produces shared libraries. + "--enable-shared=no" + WORKING_DIRECTORY "${GIT_REPOSITORY_DIR}" + COMMAND_ERROR_IS_FATAL ANY) + + +execute_process( + COMMAND "${CMAKE_COMMAND}" -E remove_directory "${CMAKE_INSTALL_PREFIX}" WORKING_DIRECTORY "${GIT_REPOSITORY_DIR}" COMMAND_ERROR_IS_FATAL ANY) diff --git a/setup.py b/setup.py index 5a95da963..3e21ef5df 100644 --- a/setup.py +++ b/setup.py @@ -5,10 +5,36 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. +import argparse +import distutils.command.build import distutils.util +import fnmatch import io +import sys +from pathlib import Path import setuptools +from setuptools.command.build_py import build_py as build_py_orig +from setuptools.dist import Distribution + +argparser = argparse.ArgumentParser(add_help=False) +argparser.add_argument( + "--package-dir", + help="Source directory of package files.", + default="bazel-bin/package.runfiles/CompilerGym", +) +argparser.add_argument( + "--get-wheel-filename", + action="store_true", + help="Print only output filename without building it.", +) +argparser.add_argument( + "--build-dir", + help="Path to build dir. This is where this script copies files from the source before making the wheel package.", + default="build", +) +args, unknown = argparser.parse_known_args() +sys.argv = [sys.argv[0]] + unknown with open("VERSION") as f: version = f.read().strip() @@ -38,16 +64,55 @@ def get_tag(self): except ImportError: bdist_wheel = None -setuptools.setup( - name="compiler_gym", - version=version, - description="Reinforcement learning environments for compiler research", - author="Facebook AI Research", - long_description=long_description, - long_description_content_type="text/markdown", - url="https://github.com/facebookresearch/CompilerGym", - license="MIT", - packages=[ + +class build(distutils.command.build.build): + def initialize_options(self): + distutils.command.build.build.initialize_options(self) + self.build_base = args.build_dir + + +# Add files that should be excluded from the package. +# The argument exclude_package_data of setuptools.setup(...) +# does not work with py files. They have to be excluded here. +excluded = [ + str(Path(args.package_dir) / "compiler_gym/envs/llvm/make_specs.py"), + str(Path(args.package_dir) / "compiler_gym/bin/random_eval.py"), +] + + +class build_py(build_py_orig): + def find_package_modules(self, package, package_dir): + modules = super().find_package_modules(package, package_dir) + res = [ + (pkg, mod, file) + for (pkg, mod, file) in modules + if not any(fnmatch.fnmatchcase(file, pat=pattern) for pattern in excluded) + ] + return res + + +def wheel_filename(**kwargs): + # create a fake distribution from arguments + dist = Distribution(attrs=kwargs) + # finalize bdist_wheel command + bdist_wheel_cmd = dist.get_command_obj("bdist_wheel") + bdist_wheel_cmd.ensure_finalized() + # assemble wheel file name + distname = bdist_wheel_cmd.wheel_dist_name + tag = "-".join(bdist_wheel_cmd.get_tag()) + return f"{distname}-{tag}.whl" + + +setup_kwargs = { + "name": "compiler_gym", + "version": version, + "description": "Reinforcement learning environments for compiler research", + "author": "Facebook AI Research", + "long_description": long_description, + "long_description_content_type": "text/markdown", + "url": "https://github.com/facebookresearch/CompilerGym", + "license": "MIT", + "packages": [ "compiler_gym.bin", "compiler_gym.datasets", "compiler_gym.envs.gcc.datasets", @@ -75,10 +140,10 @@ def get_tag(self): "compiler_gym.wrappers", "compiler_gym", ], - package_dir={ - "": "bazel-bin/package.runfiles/CompilerGym", + "package_dir": { + "": args.package_dir, }, - package_data={ + "package_data": { "compiler_gym": [ "envs/gcc/service/compiler_gym-gcc-service", "envs/llvm/service/compiler_gym-llvm-service", @@ -86,16 +151,16 @@ def get_tag(self): "envs/llvm/service/libLLVMPolly.so", "envs/llvm/service/compute_observation", "third_party/cbench/benchmarks.txt", - "third_party/cbench/cbench-v*/*", + "third_party/cbench/cbench-v*/crc32.bc", "third_party/csmith/csmith/bin/csmith", "third_party/csmith/csmith/include/csmith-2.3.0/*.h", "third_party/inst2vec/*.pickle", ] }, - install_requires=requirements, - include_package_data=True, - python_requires=">=3.6", - classifiers=[ + "install_requires": requirements, + "include_package_data": True, + "python_requires": ">=3.6", + "classifiers": [ "Development Status :: 3 - Alpha", "Environment :: Console", "Intended Audience :: Developers", @@ -109,7 +174,15 @@ def get_tag(self): "Topic :: Scientific/Engineering :: Artificial Intelligence", "Topic :: Software Development :: Compilers", ], - cmdclass={"bdist_wheel": bdist_wheel}, - platforms=[distutils.util.get_platform()], - zip_safe=False, -) + "cmdclass": {"bdist_wheel": bdist_wheel, "build": build, "build_py": build_py}, + "platforms": [distutils.util.get_platform()], + "zip_safe": False, +} + +if args.get_wheel_filename: + # Instead of generating the wheel file, + # print its filename. + file_name = wheel_filename(**setup_kwargs) + sys.stdout.write(file_name) +else: + setuptools.setup(**setup_kwargs) diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index dc113d8b8..6d9b7bf3d 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -124,3 +124,15 @@ cg_py_test( compiler_gym::compiler_gym tests::pytest_plugins::common ) + +file(GLOB_RECURSE _SRC_TEST_FILES + LIST_DIRECTORIES true "${CMAKE_CURRENT_SOURCE_DIR}") +set(_TESTS_BIN_DIR "${CMAKE_CURRENT_BINARY_DIR}/py_pkg") +string(CONCAT _CMD + "cd \"${CMAKE_CURRENT_SOURCE_DIR}/..\"" + " && make install-test") +cg_genrule( + NAME python_package_tests + COMMAND "${_CMD}" + DEPENDS compiler_gym::install_python_package + EXCLUDE_FROM_ALL) From 7eb513a50b6d7be36f8337daad680cb504babc8c Mon Sep 17 00:00:00 2001 From: Boian Petkantchin Date: Wed, 15 Dec 2021 10:20:47 -0800 Subject: [PATCH 072/142] Restrict setuptools version Fixes failing test_setuptools_version, where pkg_resources.require(...) raises an exception with setuptools v59. --- .github/workflows/pre_commit.yaml | 1 - compiler_gym/requirements.txt | 1 - compiler_gym/requirements_build.txt | 3 +++ requirements.txt | 1 + requirements_pre_commit.txt | 1 + tests/requirements.txt | 1 + 6 files changed, 6 insertions(+), 2 deletions(-) create mode 100644 compiler_gym/requirements_build.txt diff --git a/.github/workflows/pre_commit.yaml b/.github/workflows/pre_commit.yaml index b77f50302..8df61e9ff 100644 --- a/.github/workflows/pre_commit.yaml +++ b/.github/workflows/pre_commit.yaml @@ -51,7 +51,6 @@ jobs: - name: Install Python dependencies run: | - sudo apt-get install python3-setuptools python3 -m pip install --upgrade wheel python3 -m pip install -r requirements_pre_commit.txt python3 -m isort --version diff --git a/compiler_gym/requirements.txt b/compiler_gym/requirements.txt index 04dfeec4f..721c9590d 100644 --- a/compiler_gym/requirements.txt +++ b/compiler_gym/requirements.txt @@ -3,7 +3,6 @@ deprecated>=1.2.12 docker>=4.0.0 fasteners>=0.15 grpcio>=1.32.0 -grpcio_tools>=1.32.0 gym>=0.18.0,<0.21 humanize>=2.6.0 loop_tool_py==0.0.7 diff --git a/compiler_gym/requirements_build.txt b/compiler_gym/requirements_build.txt new file mode 100644 index 000000000..f6fe2b47d --- /dev/null +++ b/compiler_gym/requirements_build.txt @@ -0,0 +1,3 @@ +-r requirements.txt +grpcio_tools>=1.32.0 +setuptools diff --git a/requirements.txt b/requirements.txt index 2780e5c8d..30eb41d05 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,4 @@ +-r compiler_gym/requirements_build.txt -r compiler_gym/requirements.txt -r docs/requirements.txt -r examples/requirements.txt diff --git a/requirements_pre_commit.txt b/requirements_pre_commit.txt index 7f650a7f1..23e389839 100644 --- a/requirements_pre_commit.txt +++ b/requirements_pre_commit.txt @@ -2,3 +2,4 @@ black==19.10b0 flake8==3.9.2 isort==4.3.21 pre-commit>=2.12.1 +setuptools diff --git a/tests/requirements.txt b/tests/requirements.txt index a93d91ec0..8456fb2f5 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -9,3 +9,4 @@ pytest-stress==1.0.1 pytest-sugar==0.9.4 pytest-timeout==1.4.2 pytest-xdist==2.4.0 +setuptools<59 # v59 causes pkg_resources.require in test_setuptools_version to fail From df53252c4033672ad07a729931100f0ef05db76b Mon Sep 17 00:00:00 2001 From: Boian Petkantchin Date: Thu, 16 Dec 2021 16:18:20 -0800 Subject: [PATCH 073/142] In CI install requirements_build.txt --- .github/actions/install-build-dependencies/action.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/install-build-dependencies/action.yaml b/.github/actions/install-build-dependencies/action.yaml index f1d4f442f..13e77c11b 100644 --- a/.github/actions/install-build-dependencies/action.yaml +++ b/.github/actions/install-build-dependencies/action.yaml @@ -15,7 +15,7 @@ runs: sudo apt-get install -y clang-9 patchelf fi python -m pip install -U pip wheel - python -m pip install -r compiler_gym/requirements.txt + python -m pip install -r compiler_gym/requirements_build.txt shell: bash env: LDFLAGS: -L/usr/local/opt/zlib/lib From 95b6e0fbe789642b4ee087b3ed39faddbaa90fa0 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Tue, 21 Dec 2021 11:28:08 +0000 Subject: [PATCH 074/142] [examples] Move loop unroller passes into anonymous namespace. This appears to be the way that the LLVM sources structure their passes, with the implementation in an anonymous namespace and the INITIALIZE_PASS_xxx macros in no namespace. Fixes #509. --- .../loop_unroller/loop_unroller.cc | 42 ++++++++++--------- 1 file changed, 23 insertions(+), 19 deletions(-) diff --git a/examples/example_unrolling_service/loop_unroller/loop_unroller.cc b/examples/example_unrolling_service/loop_unroller/loop_unroller.cc index 29c87559a..ddd55ef8b 100644 --- a/examples/example_unrolling_service/loop_unroller/loop_unroller.cc +++ b/examples/example_unrolling_service/loop_unroller/loop_unroller.cc @@ -35,7 +35,8 @@ using namespace llvm; -namespace llvm { +namespace { + /// Input LLVM module file name. cl::opt InputFilename(cl::Positional, cl::desc("Specify input filename"), cl::value_desc("filename"), cl::init("-")); @@ -65,9 +66,6 @@ static cl::opt PreserveAssemblyUseListOrder( "preserve-ll-uselistorder", cl::desc("Preserve use-list order when writing LLVM assembly."), cl::init(false), cl::Hidden); -// The INITIALIZE_PASS_XXX macros put the initialiser in the llvm namespace. -void initializeLoopCounterPass(PassRegistry& Registry); - class LoopCounter : public llvm::FunctionPass { public: static char ID; @@ -90,15 +88,6 @@ class LoopCounter : public llvm::FunctionPass { } }; -// Initialise the pass. We have to declare the dependencies we use. -char LoopCounter::ID = 0; -INITIALIZE_PASS_BEGIN(LoopCounter, "count-loops", "Count loops", false, false) -INITIALIZE_PASS_DEPENDENCY(LoopInfoWrapperPass) -INITIALIZE_PASS_END(LoopCounter, "count-loops", "Count loops", false, false) - -// The INITIALIZE_PASS_XXX macros put the initialiser in the llvm namespace. -void initializeLoopUnrollConfiguratorPass(PassRegistry& Registry); - class LoopUnrollConfigurator : public llvm::FunctionPass { public: static char ID; @@ -125,13 +114,7 @@ class LoopUnrollConfigurator : public llvm::FunctionPass { } }; -// Initialise the pass. We have to declare the dependencies we use. char LoopUnrollConfigurator::ID = 1; -INITIALIZE_PASS_BEGIN(LoopUnrollConfigurator, "unroll-loops-configurator", - "Configurates loop unrolling", false, false) -INITIALIZE_PASS_DEPENDENCY(LoopInfoWrapperPass) -INITIALIZE_PASS_END(LoopUnrollConfigurator, "unroll-loops-configurator", - "Configurates loop unrolling", false, false) /// Reads a module from a file. /// On error, messages are written to stderr and null is returned. @@ -148,8 +131,29 @@ static std::unique_ptr readModule(LLVMContext& Context, StringRef Name) return Module; } +char LoopCounter::ID = 0; + +} // anonymous namespace + +namespace llvm { + +// The INITIALIZE_PASS_XXX macros put the initialiser in the llvm namespace. +void initializeLoopCounterPass(PassRegistry& Registry); +void initializeLoopUnrollConfiguratorPass(PassRegistry& Registry); + } // namespace llvm +// Initialise the passes. We have to declare the dependencies we use. +INITIALIZE_PASS_BEGIN(LoopUnrollConfigurator, "unroll-loops-configurator", + "Configurates loop unrolling", false, false) +INITIALIZE_PASS_DEPENDENCY(LoopInfoWrapperPass) +INITIALIZE_PASS_END(LoopUnrollConfigurator, "unroll-loops-configurator", + "Configurates loop unrolling", false, false) + +INITIALIZE_PASS_BEGIN(LoopCounter, "count-loops", "Count loops", false, false) +INITIALIZE_PASS_DEPENDENCY(LoopInfoWrapperPass) +INITIALIZE_PASS_END(LoopCounter, "count-loops", "Count loops", false, false) + int main(int argc, char** argv) { cl::ParseCommandLineOptions(argc, argv, " LLVM-Counter\n\n" From d7caf3b76e8af19c77ccaeda05b78acb61e04563 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Tue, 21 Dec 2021 11:40:05 +0000 Subject: [PATCH 075/142] [llvm] Begin support for multiple cBench datasets. Issue #370. --- compiler_gym/envs/llvm/datasets/cbench.py | 49 +++++++++++++---------- 1 file changed, 27 insertions(+), 22 deletions(-) diff --git a/compiler_gym/envs/llvm/datasets/cbench.py b/compiler_gym/envs/llvm/datasets/cbench.py index 992e67add..ff58f85c7 100644 --- a/compiler_gym/envs/llvm/datasets/cbench.py +++ b/compiler_gym/envs/llvm/datasets/cbench.py @@ -494,23 +494,25 @@ def validator( # Create the BenchmarkDynamicConfig object. cbench_data = site_data_path("llvm-v0/cbench-v1-runtime-data/runtime_data") - DYNAMIC_CONFIGS[benchmark] = BenchmarkDynamicConfig( - build_cmd=Command( - argument=["$CC", "$IN"] + linkopts, - timeout_seconds=60, - outfile=["a.out"], - ), - run_cmd=Command( - argument=cmd.replace("$BIN", "./a.out") - .replace("$D", str(cbench_data)) - .split(), - timeout_seconds=300, - infile=["a.out", "_finfo_dataset"], - outfile=[str(s) for s in outfiles], - ), - pre_run_cmd=[ - Command(argument=["echo", "1", ">_finfo_dataset"], timeout_seconds=30), - ], + DYNAMIC_CONFIGS[benchmark].append( + BenchmarkDynamicConfig( + build_cmd=Command( + argument=["$CC", "$IN"] + linkopts, + timeout_seconds=60, + outfile=["a.out"], + ), + run_cmd=Command( + argument=cmd.replace("$BIN", "./a.out") + .replace("$D", str(cbench_data)) + .split(), + timeout_seconds=300, + infile=["a.out", "_finfo_dataset"], + outfile=[str(s) for s in outfiles], + ), + pre_run_cmd=[ + Command(argument=["echo", "1", ">_finfo_dataset"], timeout_seconds=30), + ], + ) ) return True @@ -523,9 +525,11 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) for val in VALIDATORS.get(self.uri, []): self.add_validation_callback(val) - self.proto.dynamic_config.MergeFrom( - DYNAMIC_CONFIGS.get(self.uri, BenchmarkDynamicConfig()) - ) + if DYNAMIC_CONFIGS[self.uri]: + # TODO(github.com/facebookresearch/CompilerGym/issues/370): Add + # support for multiple datasets. + config = DYNAMIC_CONFIGS[self.uri][-1] + self.proto.dynamic_config.MergeFrom(config) class CBenchDataset(TarDatasetWithManifest): @@ -653,8 +657,9 @@ def __init__(self, site_data_base: Path): ] = defaultdict(list) -# A map from benchmark name to BenchmarkDynamicConfig messages. -DYNAMIC_CONFIGS: Dict[str, Optional[BenchmarkDynamicConfig]] = {} +# A map from benchmark name to a list of BenchmarkDynamicConfig messages, one +# per dataset. +DYNAMIC_CONFIGS: Dict[str, List[BenchmarkDynamicConfig]] = defaultdict(list) def validate_sha_output(result: BenchmarkExecutionResult) -> Optional[str]: From d3d52bea05019673866f5dc92681ec325b1f5451 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Tue, 21 Dec 2021 12:04:41 +0000 Subject: [PATCH 076/142] [datasets] Prevent name shadowing of module. --- compiler_gym/datasets/dataset.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/compiler_gym/datasets/dataset.py b/compiler_gym/datasets/dataset.py index 42cb98549..ada612155 100644 --- a/compiler_gym/datasets/dataset.py +++ b/compiler_gym/datasets/dataset.py @@ -10,7 +10,9 @@ from typing import Dict, Iterable, Optional, Union import numpy as np -from deprecated.sphinx import deprecated + +# The "deprecated" name is used as a constructor argument to Dataset, so rename +# this import to prevent shadowing. from deprecated.sphinx import deprecated as mark_deprecated from compiler_gym.datasets.benchmark import Benchmark @@ -127,7 +129,7 @@ def __repr__(self): return self.name @property - @deprecated( + @mark_deprecated( version="0.2.1", reason=( "The `Dataset.logger` attribute is deprecated. All Dataset " From 79ea2e1b9a2093230fe35758680b1aab844f2fc9 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Tue, 21 Dec 2021 12:49:51 +0000 Subject: [PATCH 077/142] [datasets] Deprecate resolve_uri_protocol(). Issue #524. --- compiler_gym/datasets/uri.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/compiler_gym/datasets/uri.py b/compiler_gym/datasets/uri.py index 0a9db5eb4..6f6b9db81 100644 --- a/compiler_gym/datasets/uri.py +++ b/compiler_gym/datasets/uri.py @@ -5,6 +5,8 @@ """This module contains utility code for working with URIs.""" import re +from deprecated.sphinx import deprecated + # Regular expression that matches the full two-part URI prefix of a dataset: # {{protocol}}://{{dataset}} # @@ -22,6 +24,10 @@ BENCHMARK_URI_RE = re.compile(BENCHMARK_URI_PATTERN) +@deprecated( + version="0.2.2", + reason=("Use the new compiler_gym.datasets.BenchmarkUri class to parse URIs"), +) def resolve_uri_protocol(uri: str) -> str: """Require that the URI has a protocol by applying a default "benchmark" protocol if none is set.""" From 375a88e6d13dc5ca8343e7fe039cb19fa2e0c6c3 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Tue, 21 Dec 2021 14:56:58 +0000 Subject: [PATCH 078/142] [datasets] Add a new BenchmarkUri class. This new class handles the parsing logic for URIs, and will be used to enable a greater set of customization options for benchmark construction. --- compiler_gym/bin/manual_env.py | 2 +- compiler_gym/datasets/__init__.py | 2 + compiler_gym/datasets/dataset.py | 67 +++++++--- compiler_gym/datasets/datasets.py | 17 +-- compiler_gym/datasets/uri.py | 117 +++++++++++++++++- .../envs/llvm/service/BenchmarkFactory.cc | 4 +- .../util/flags/benchmark_from_flags.py | 6 +- compiler_gym/validation_result.py | 4 +- docs/source/compiler_gym/datasets.rst | 7 ++ tests/compiler_env_test.py | 2 +- tests/datasets/BUILD | 11 ++ tests/datasets/benchmark_test.py | 92 -------------- tests/datasets/dataset_test.py | 43 +++---- tests/datasets/datasets_test.py | 2 +- tests/datasets/uri_test.py | 99 +++++++++++++++ tests/llvm/custom_benchmarks_test.py | 6 +- tests/llvm/llvm_benchmarks_test.py | 6 +- 17 files changed, 324 insertions(+), 163 deletions(-) create mode 100644 tests/datasets/uri_test.py diff --git a/compiler_gym/bin/manual_env.py b/compiler_gym/bin/manual_env.py index babe31ebc..14b82f00c 100644 --- a/compiler_gym/bin/manual_env.py +++ b/compiler_gym/bin/manual_env.py @@ -291,7 +291,7 @@ def __init__(self, env: CompilerEnv): self.benchmarks += islice(dataset.benchmark_uris(), 50) self.benchmarks.sort() - # Strip default benchmark:// protocol. + # Strip default benchmark:// scheme. for i, benchmark in enumerate(self.benchmarks): if benchmark.startswith("benchmark://"): self.benchmarks[i] = benchmark[len("benchmark://") :] diff --git a/compiler_gym/datasets/__init__.py b/compiler_gym/datasets/__init__.py index df9b009a4..5b13ae55c 100644 --- a/compiler_gym/datasets/__init__.py +++ b/compiler_gym/datasets/__init__.py @@ -19,12 +19,14 @@ from compiler_gym.datasets.datasets import Datasets from compiler_gym.datasets.files_dataset import FilesDataset from compiler_gym.datasets.tar_dataset import TarDataset, TarDatasetWithManifest +from compiler_gym.datasets.uri import BenchmarkUri __all__ = [ "activate", "Benchmark", "BenchmarkInitError", "BenchmarkSource", + "BenchmarkUri", "Dataset", "DatasetInitError", "Datasets", diff --git a/compiler_gym/datasets/dataset.py b/compiler_gym/datasets/dataset.py index ada612155..cea11310e 100644 --- a/compiler_gym/datasets/dataset.py +++ b/compiler_gym/datasets/dataset.py @@ -4,6 +4,7 @@ # LICENSE file in the root directory of this source tree. import logging import os +import re import shutil import warnings from pathlib import Path @@ -16,7 +17,7 @@ from deprecated.sphinx import deprecated as mark_deprecated from compiler_gym.datasets.benchmark import Benchmark -from compiler_gym.datasets.uri import DATASET_NAME_RE +from compiler_gym.datasets.uri import BenchmarkUri logger = logging.getLogger(__name__) @@ -25,6 +26,9 @@ # attribute is removed, scheduled for release 0.2.3. _logger = logger +_DATASET_VERSION_PATTERN = r"[a-zA-z0-9-_]+-v(?P[0-9]+)" +_DATASET_VERSION_RE = re.compile(_DATASET_VERSION_PATTERN) + class Dataset: """A dataset is a collection of benchmarks. @@ -56,8 +60,8 @@ def __init__( ): """Constructor. - :param name: The name of the dataset. Must conform to the pattern - :code:`{{protocol}}://{{name}}-v{{version}}`. + :param name: The name of the dataset, in the format: + :code:`scheme://name`. :param description: A short human-readable description of the dataset. @@ -101,16 +105,15 @@ def __init__( :raises ValueError: If :code:`name` does not match the expected type. """ self._name = name - components = DATASET_NAME_RE.match(name) - if not components: - raise ValueError( - f"Invalid dataset name: '{name}'. " - "Dataset name must be in the form: '{{protocol}}://{{name}}-v{{version}}'" - ) + + uri = BenchmarkUri.from_string(name) + self._description = description self._license = license - self._protocol = components.group("dataset_protocol") - self._version = int(components.group("dataset_version")) + self._scheme = uri.scheme + + match = _DATASET_VERSION_RE.match(uri.dataset) + self._version = int(match.group("version") if match else 0) self._references = references or {} self._deprecation_message = deprecated self._validatable = validatable @@ -120,9 +123,8 @@ def __init__( # Set up the site data name. if site_data_base: - basename = components.group("dataset_name") self._site_data_path = ( - Path(site_data_base).resolve() / self.protocol / basename + Path(site_data_base).resolve() / uri.scheme / uri.dataset ) def __repr__(self): @@ -168,16 +170,27 @@ def license(self) -> str: return self._license @property + @mark_deprecated( + version="0.2.2", reason="The `protocol` attribute has been renamed `scheme`" + ) def protocol(self) -> str: - """The URI protocol that is used to identify benchmarks in this dataset. + """The URI scheme that is used to identify benchmarks in this dataset. :type: str """ - return self._protocol + return self.scheme + + @property + def scheme(self) -> str: + """The URI scheme that is used to identify benchmarks in this dataset. + + :type: str + """ + return self._scheme @property def version(self) -> int: - """The version tag for this dataset. + """The version tag for this dataset. Defaults to zero. :type: int """ @@ -395,6 +408,24 @@ def benchmark_uris(self) -> Iterable[str]: """ raise NotImplementedError("abstract class") + def benchmark_from_parsed_uri(self, uri: BenchmarkUri) -> Benchmark: + """Select a benchmark. + + Subclasses must implement this method. Implementors may assume that the + URI is well formed and that the :code:`scheme` and :code:`dataset` + components are correct. + + :param uri: The parsed URI of the benchmark to return. + + :return: A :class:`Benchmark ` + instance. + + :raise LookupError: If :code:`uri` is not found. + + :raise ValueError: If the URI is invalid. + """ + raise NotImplementedError("abstract class") + def benchmark(self, uri: str) -> Benchmark: """Select a benchmark. @@ -404,8 +435,10 @@ def benchmark(self, uri: str) -> Benchmark: instance. :raise LookupError: If :code:`uri` is not found. + + :raise ValueError: If the URI is invalid. """ - raise NotImplementedError("abstract class") + return self.benchmark_from_parsed_uri(BenchmarkUri.from_string(uri)) def random_benchmark( self, random_state: Optional[np.random.Generator] = None diff --git a/compiler_gym/datasets/datasets.py b/compiler_gym/datasets/datasets.py index 3d21c00f0..9b9e7b319 100644 --- a/compiler_gym/datasets/datasets.py +++ b/compiler_gym/datasets/datasets.py @@ -9,7 +9,7 @@ from compiler_gym.datasets.benchmark import Benchmark from compiler_gym.datasets.dataset import Dataset -from compiler_gym.datasets.uri import BENCHMARK_URI_RE, resolve_uri_protocol +from compiler_gym.datasets.uri import BENCHMARK_URI_RE, BenchmarkUri T = TypeVar("T") @@ -131,12 +131,13 @@ def dataset(self, dataset: str) -> Dataset: :raises LookupError: If :code:`dataset` is not found. """ - dataset_name = resolve_uri_protocol(dataset) + uri = BenchmarkUri.from_string(dataset) + key = f"{uri.scheme}://{uri.dataset}" - if dataset_name not in self._datasets: - raise LookupError(f"Dataset not found: {dataset_name}") + if key not in self._datasets: + raise LookupError(f"Dataset not found: {key}") - return self._datasets[dataset_name] + return self._datasets[key] def __getitem__(self, dataset: str) -> Dataset: """Lookup a dataset. @@ -155,7 +156,7 @@ def __setitem__(self, key: str, dataset: Dataset): :param key: The name of the dataset. :param dataset: The dataset to add. """ - dataset_name = resolve_uri_protocol(key) + dataset_name = BenchmarkUri.canonicalize(key) self._datasets[dataset_name] = dataset if not dataset.deprecated: @@ -173,7 +174,7 @@ def __delitem__(self, dataset: str): :return: :code:`True` if the dataset was removed, :code:`False` if it was already removed. """ - dataset_name = resolve_uri_protocol(dataset) + dataset_name = BenchmarkUri.canonicalize(dataset) if dataset_name in self._visible_datasets: self._visible_datasets.remove(dataset_name) del self._datasets[dataset_name] @@ -242,7 +243,7 @@ def benchmark(self, uri: str) -> Benchmark: :return: A :class:`Benchmark ` instance. """ - uri = resolve_uri_protocol(uri) + uri = BenchmarkUri.canonicalize(uri) match = BENCHMARK_URI_RE.match(uri) if not match: diff --git a/compiler_gym/datasets/uri.py b/compiler_gym/datasets/uri.py index 6f6b9db81..265aac4d1 100644 --- a/compiler_gym/datasets/uri.py +++ b/compiler_gym/datasets/uri.py @@ -4,11 +4,20 @@ # LICENSE file in the root directory of this source tree. """This module contains utility code for working with URIs.""" import re +from typing import Dict, List +from urllib.parse import ParseResult, parse_qs, urlencode, urlparse, urlunparse from deprecated.sphinx import deprecated +from pydantic import BaseModel + +# === BEGIN DEPRECATED DECLARATIONS === +# +# The following regular expression definitions have been deprecated and will be +# removed in a future release! Please update your code to use the new +# BenchmarkUri class defined in this file. # Regular expression that matches the full two-part URI prefix of a dataset: -# {{protocol}}://{{dataset}} +# {{scheme}}://{{dataset}} # # An optional trailing slash is permitted. # @@ -17,20 +26,118 @@ DATASET_NAME_RE = re.compile(DATASET_NAME_PATTERN) # Regular expression that matches the full three-part format of a benchmark URI: -# {{protocol}}://{{dataset}}/{{id}} +# {{sceme}}://{{dataset}}/{{id}} # # Example matches: "benchmark://foo-v0/foo" or "generator://bar-v1/foo/bar.txt". BENCHMARK_URI_PATTERN = r"(?P(?P[a-zA-z0-9-_]+)://(?P[a-zA-z0-9-_]+-v(?P[0-9]+)))/(?P.+)$" BENCHMARK_URI_RE = re.compile(BENCHMARK_URI_PATTERN) +# === END DEPRECATED DECLARATIONS === + @deprecated( version="0.2.2", - reason=("Use the new compiler_gym.datasets.BenchmarkUri class to parse URIs"), + reason=("Use compiler_gym.datasets.BenchmarkUri.canonicalize()"), ) def resolve_uri_protocol(uri: str) -> str: - """Require that the URI has a protocol by applying a default "benchmark" - protocol if none is set.""" + """Require that the URI has a scheme by applying a default "benchmark" + scheme if none is set.""" if "://" not in uri: return f"benchmark://{uri}" return uri + + +class BenchmarkUri(BaseModel): + """A URI string used to look up a benchmark. + + A benchmark URI has the following format: + + :code:`scheme://dataset/path?params#fragment` + + where: + + * :code:`scheme` (optional, default :code:`benchmark`): An arbitrary string + used to group datasets, for example :code:`generator` if the dataset is a + benchmark generator. + + * :code:`dataset`: The name of a dataset, optionally with a version tag, for + example :code:`linux-v0`. + + * :code:`path` (optional, default empty string): The path of a benchmark + within a dataset. + + * :code:`params` (optional, default empty dictionary): A set of query + parameters for the benchmark. This is parsed a dictionary of string keys + to a list of string values. For example :code:`dataset=1&debug=true` which + will be parsed as :code:`{"dataset": ["1"], "debug": ["true"]}`. + + * :code:`fragment` (optional, default empty string): An optional fragment + within the benchmark. + + The :code:`scheme` and :code:`dataset` components are used to resolve a + :class:`Dataset ` class that can serve the + benchmark. The :meth:`Dataset.benchmark_from_parsed_uri()` method is then + used to interpret the remainder of the URI components. + + A benchmark URI may resolve to zero or more benchmarks, for example: + + * :code:`csmith-v0` resolves to any benchmark from the + :code:`benchmark://csmith-v0` dataset. + + * :code:`benchmark://cbench-v0/qsort` resolves to the path :code:`/qsort` + within the dataset :code:`benchmark://cbench-v0`. + + * :code:`benchmark://cbench-v0/qsort?debug=true` also resolves to the path + :code:`/qsort` within the dataset :code:`benchmark://cbench-v0`, but with + an additional parameter :code:`debug=true`. + """ + + scheme: str + """The benchmark scheme. Defaults to :code:`benchmark`.""" + + dataset: str + """The name of the dataset.""" + + path: str + """The path of the benchmark. Empty string if not set.""" + + params: Dict[str, List[str]] = {} + """A dictionary of query parameters. Empty dictionary if not set.""" + + fragment: str = "" + """The URL fragment. Empty string if not set.""" + + @staticmethod + def canonicalize(uri: str): + return str(BenchmarkUri.from_string(uri)) + + @classmethod + def from_string(cls, uri: str) -> "BenchmarkUri": + components = urlparse(uri) + + # Add the default "benchmark://" scheme if required. + if not components.scheme and not components.netloc: + components = urlparse(f"benchmark://{uri}") + + return cls( + scheme=components.scheme, + dataset=components.netloc, + path=components.path, + params=parse_qs(components.query), + fragment=components.fragment, + ) + + def __repr__(self): + return urlunparse( + ParseResult( + scheme=self.scheme, + netloc=self.dataset, + path=self.path, + query=urlencode(self.params, doseq=True), + fragment=self.fragment, + params="", # Field not used. + ) + ) + + def __str__(self) -> str: + return repr(self) diff --git a/compiler_gym/envs/llvm/service/BenchmarkFactory.cc b/compiler_gym/envs/llvm/service/BenchmarkFactory.cc index 4da5984a0..dbcf0fc66 100644 --- a/compiler_gym/envs/llvm/service/BenchmarkFactory.cc +++ b/compiler_gym/envs/llvm/service/BenchmarkFactory.cc @@ -70,11 +70,11 @@ Status BenchmarkFactory::getBenchmark(const BenchmarkProto& benchmarkMessage, } case compiler_gym::File::DataCase::kUri: { VLOG(3) << "LLVM benchmark cache miss, read from URI: " << benchmarkMessage.uri(); - // Check the protocol of the benchmark URI. + // Check the scheme of the benchmark URI. if (programFile.uri().find("file:///") != 0) { return Status(StatusCode::INVALID_ARGUMENT, fmt::format("Invalid benchmark data URI. " - "Only the file:/// protocol is supported: \"{}\"", + "Only the file:/// scheme is supported: \"{}\"", programFile.uri())); } diff --git a/compiler_gym/util/flags/benchmark_from_flags.py b/compiler_gym/util/flags/benchmark_from_flags.py index e9d606e5d..34e260d4a 100644 --- a/compiler_gym/util/flags/benchmark_from_flags.py +++ b/compiler_gym/util/flags/benchmark_from_flags.py @@ -13,9 +13,9 @@ flags.DEFINE_string( "benchmark", None, - "The URI of the benchmark to use. Use the benchmark:// protocol to " - "reference named benchmarks, or the file:/// protocol to reference paths " - "to program data. If no protocol is specified, benchmark:// is implied.", + "The URI of the benchmark to use. Use the benchmark:// scheme to " + "reference named benchmarks, or the file:/// scheme to reference paths " + "to program data. If no scheme is specified, benchmark:// is implied.", ) FLAGS = flags.FLAGS diff --git a/compiler_gym/validation_result.py b/compiler_gym/validation_result.py index 14531bcb5..031fdad4b 100644 --- a/compiler_gym/validation_result.py +++ b/compiler_gym/validation_result.py @@ -113,8 +113,8 @@ def okay(self) -> bool: ) def __repr__(self): - # Remove default-protocol prefix to improve output readability. - benchmark = re.sub(r"^benchmark://", "", self.state.benchmark) + # Remove default-scheme prefix to improve output readability. + benchmark = re.sub(r"^benchmark://", "", str(self.state.benchmark)) if not self.okay(): msg = ", ".join(self.error_details.strip().split("\n")) diff --git a/docs/source/compiler_gym/datasets.rst b/docs/source/compiler_gym/datasets.rst index e39b7bb97..c7946c9ab 100644 --- a/docs/source/compiler_gym/datasets.rst +++ b/docs/source/compiler_gym/datasets.rst @@ -12,6 +12,13 @@ that can be installed and made available for use. .. currentmodule:: compiler_gym.datasets +BenchmarkUri +------------ + +.. autoclass:: BenchmarkUri + :members: + + Benchmark --------- diff --git a/tests/compiler_env_test.py b/tests/compiler_env_test.py index 9ff33814d..41352ba6d 100644 --- a/tests/compiler_env_test.py +++ b/tests/compiler_env_test.py @@ -52,7 +52,7 @@ def test_uri_substring_no_match(env: LlvmEnv): env.reset(benchmark="benchmark://cbench-v1/cr") -def test_uri_substring_candidate_no_match_infer_protocol(env: LlvmEnv): +def test_uri_substring_candidate_no_match_infer_scheme(env: LlvmEnv): env.reset(benchmark="cbench-v1/crc32") assert env.benchmark == "benchmark://cbench-v1/crc32" diff --git a/tests/datasets/BUILD b/tests/datasets/BUILD index 1dfa72a47..3d435ff55 100644 --- a/tests/datasets/BUILD +++ b/tests/datasets/BUILD @@ -46,3 +46,14 @@ py_test( "//tests/pytest_plugins:common", ], ) + +py_test( + name = "uri_test", + timeout = "short", + srcs = ["uri_test.py"], + deps = [ + "//compiler_gym/datasets", + "//tests:test_main", + "//tests/pytest_plugins:common", + ], +) diff --git a/tests/datasets/benchmark_test.py b/tests/datasets/benchmark_test.py index 80a16c8b7..1cd9a70af 100644 --- a/tests/datasets/benchmark_test.py +++ b/tests/datasets/benchmark_test.py @@ -3,13 +3,11 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. """Unit tests for //compiler_gym/datasets:benchmark.""" -import re from pathlib import Path import pytest from compiler_gym.datasets import Benchmark, BenchmarkSource -from compiler_gym.datasets.uri import BENCHMARK_URI_RE, DATASET_NAME_RE from compiler_gym.service.proto import Benchmark as BenchmarkProto from compiler_gym.validation_error import ValidationError from tests.test_main import main @@ -17,96 +15,6 @@ pytest_plugins = ["tests.pytest_plugins.common"] -def _rgx_match(regex, groupname, string) -> str: - """Match the regex and return a named group.""" - match = re.match(regex, string) - assert match, f"Failed to match regex '{regex}' using string '{groupname}'" - return match.group(groupname) - - -def test_benchmark_uri_protocol(): - assert ( - _rgx_match(DATASET_NAME_RE, "dataset_protocol", "benchmark://cbench-v1/") - == "benchmark" - ) - assert ( - _rgx_match(DATASET_NAME_RE, "dataset_protocol", "Generator13://gen-v11/") - == "Generator13" - ) - - -def test_invalid_benchmark_uris(): - # Invalid protocol - assert not DATASET_NAME_RE.match("B?://cbench-v1/") # Invalid characters - assert not DATASET_NAME_RE.match("cbench-v1/") # Missing protocol - - # Invalid dataset name - assert not BENCHMARK_URI_RE.match("benchmark://cbench?v0/foo") # Invalid character - assert not BENCHMARK_URI_RE.match( - "benchmark://cbench/foo" - ) # Missing version suffix - assert not BENCHMARK_URI_RE.match("benchmark://cbench-v0") # Missing benchmark ID - assert not BENCHMARK_URI_RE.match("benchmark://cbench-v0/") # Missing benchmark ID - - -def test_benchmark_uri_dataset(): - assert ( - _rgx_match(BENCHMARK_URI_RE, "dataset_name", "benchmark://cbench-v1/foo") - == "cbench-v1" - ) - assert ( - _rgx_match(BENCHMARK_URI_RE, "dataset_name", "Generator13://gen-v11/foo") - == "gen-v11" - ) - - -def test_benchmark_dataset_name(): - assert ( - _rgx_match(BENCHMARK_URI_RE, "dataset", "benchmark://cbench-v1/foo") - == "benchmark://cbench-v1" - ) - assert ( - _rgx_match(BENCHMARK_URI_RE, "dataset", "Generator13://gen-v11/foo") - == "Generator13://gen-v11" - ) - - -def test_benchmark_uri_id(): - assert ( - _rgx_match(BENCHMARK_URI_RE, "benchmark_name", "benchmark://cbench-v1/foo") - == "foo" - ) - assert ( - _rgx_match(BENCHMARK_URI_RE, "benchmark_name", "benchmark://cbench-v1/foo/123") - == "foo/123" - ) - assert ( - _rgx_match( - BENCHMARK_URI_RE, "benchmark_name", "benchmark://cbench-v1/foo/123.txt" - ) - == "foo/123.txt" - ) - # Query parameters are allowed in benchmark URIs. - assert ( - _rgx_match( - BENCHMARK_URI_RE, - "benchmark_name", - "benchmark://cbench-v1/foo/123?param=true&false", - ) - == "foo/123?param=true&false" - ) - # Whitespace is allowed in benchmark URIs. - assert ( - _rgx_match( - BENCHMARK_URI_RE, "benchmark_name", "benchmark://cbench-v1/ white space" - ) - ) == " white space" - # This URI makes no sense, but is valid I suppose. - assert ( - _rgx_match(BENCHMARK_URI_RE, "benchmark_name", "benchmark://cbench-v1/\t") - ) == "\t" - - def test_benchmark_attribute_outside_init(): """Test that new attributes cannot be added to Benchmark.""" benchmark = Benchmark(None) diff --git a/tests/datasets/dataset_test.py b/tests/datasets/dataset_test.py index 82351b0e1..c9a4c16bf 100644 --- a/tests/datasets/dataset_test.py +++ b/tests/datasets/dataset_test.py @@ -15,27 +15,6 @@ # pylint: disable=abstract-method -@pytest.mark.parametrize( - "invalid_name", ["benchmark://test", "test-v0", "benchmark://v0"] -) -def test_dataset__invalid_name(invalid_name: str): - """Test that invalid dataset names raise an error on init.""" - - with pytest.raises( - ValueError, - match=( - f"Invalid dataset name: '{invalid_name}'. " - "Dataset name must be in the form: '{{protocol}}://{{name}}-v{{version}}'" - ), - ): - Dataset( - name=invalid_name, - description="A test dataset", - license="MIT", - site_data_base="test", - ) - - def test_dataset_properties(): """Test the dataset property values.""" dataset = Dataset( @@ -46,7 +25,7 @@ def test_dataset_properties(): ) assert dataset.name == "benchmark://test-v0" - assert dataset.protocol == "benchmark" + assert dataset.scheme == "benchmark" assert dataset.description == "A test dataset" assert dataset.license == "MIT" @@ -66,6 +45,20 @@ def test_dataset_optional_properties(): assert dataset.validatable == "No" +def test_dataset_default_version(): + """Test the dataset property values.""" + dataset = Dataset( + name="benchmark://test", + description="A test dataset", + license="MIT", + site_data_base="test", + ) + + assert dataset.name == "benchmark://test" + assert dataset.scheme == "benchmark" + assert dataset.version == 0 + + def test_dataset_optional_properties_explicit_values(): """Test the non-default values of optional dataset properties.""" dataset = Dataset( @@ -90,14 +83,14 @@ def test_dataset_optional_properties_explicit_values(): def test_dataset_inferred_properties(): """Test the values of inferred dataset properties.""" dataset = Dataset( - name="benchmark://test-v0", + name="benchmark://test-v2", description="A test dataset", license="MIT", site_data_base="test", ) - assert dataset.protocol == "benchmark" - assert dataset.version == 0 + assert dataset.scheme == "benchmark" + assert dataset.version == 2 def test_dataset_properties_read_only(tmpwd: Path): diff --git a/tests/datasets/datasets_test.py b/tests/datasets/datasets_test.py index 14394abf4..da2b65244 100644 --- a/tests/datasets/datasets_test.py +++ b/tests/datasets/datasets_test.py @@ -135,7 +135,7 @@ def test_datasets_get_item(): assert datasets["benchmark://foo-v0"] == da -def test_datasets_get_item_default_protocol(): +def test_datasets_get_item_default_scheme(): da = MockDataset("benchmark://foo-v0") datasets = Datasets([da]) diff --git a/tests/datasets/uri_test.py b/tests/datasets/uri_test.py new file mode 100644 index 000000000..f781e5eac --- /dev/null +++ b/tests/datasets/uri_test.py @@ -0,0 +1,99 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. +"""Unit tests for compiler_gym.datasets.uri.""" +from compiler_gym.datasets import BenchmarkUri +from tests.test_main import main + +pytest_plugins = ["tests.pytest_plugins.common"] + + +def test_from_string_1(): + uri = BenchmarkUri.from_string("benchmark://test-v0") + assert uri.scheme == "benchmark" + assert uri.dataset == "test-v0" + assert uri.path == "" + assert uri.params == {} + assert uri.fragment == "" + assert str(uri) == "benchmark://test-v0" + + +def test_from_string_2(): + uri = BenchmarkUri.from_string("test-v0") + assert uri.scheme == "benchmark" + assert uri.dataset == "test-v0" + assert uri.path == "" + assert uri.params == {} + assert uri.fragment == "" + assert str(uri) == "benchmark://test-v0" + + +def test_from_string_3(): + uri = BenchmarkUri.from_string("benchmark://test-v0") + assert uri.scheme == "benchmark" + assert uri.dataset == "test-v0" + assert uri.path == "" + assert uri.params == {} + assert uri.fragment == "" + assert str(uri) == "benchmark://test-v0" + + +def test_from_string_4(): + uri = BenchmarkUri.from_string( + "generator://csmith-v0/this path has whitespace/in/it" + ) + assert uri.scheme == "generator" + assert uri.dataset == "csmith-v0" + assert uri.path == "/this path has whitespace/in/it" + assert uri.params == {} + assert uri.fragment == "" + assert str(uri) == "generator://csmith-v0/this path has whitespace/in/it" + + +def test_from_string_5(): + uri = BenchmarkUri.from_string("generator://csmith-v0/0") + assert uri.scheme == "generator" + assert uri.dataset == "csmith-v0" + assert uri.path == "/0" + assert uri.params == {} + assert uri.fragment == "" + assert str(uri) == "generator://csmith-v0/0" + + +def test_from_string_6(): + uri = BenchmarkUri.from_string("generator://csmith-v0?a=b&c=d#foo") + assert uri.scheme == "generator" + assert uri.dataset == "csmith-v0" + assert uri.path == "" + assert uri.params == {"a": ["b"], "c": ["d"]} + assert uri.fragment == "foo" + assert str(uri) == "generator://csmith-v0?a=b&c=d#foo" + + +def test_from_string_7(): + uri = BenchmarkUri.from_string("") + assert uri.scheme == "benchmark" + assert uri.dataset == "" + assert uri.path == "" + assert uri.params == {} + assert uri.fragment == "" + assert str(uri) == "benchmark:" + + +def test_from_string_8(): + uri = BenchmarkUri.from_string("generator:") + assert uri.scheme == "generator" + assert uri.dataset == "" + assert uri.path == "" + assert uri.params == {} + assert uri.fragment == "" + assert str(uri) == "generator:" + + +def test_canonicalize_1(): + assert BenchmarkUri.canonicalize("test-v0") == "benchmark://test-v0" + + +if __name__ == "__main__": + main() diff --git a/tests/llvm/custom_benchmarks_test.py b/tests/llvm/custom_benchmarks_test.py index d1555ac44..0a15c4c34 100644 --- a/tests/llvm/custom_benchmarks_test.py +++ b/tests/llvm/custom_benchmarks_test.py @@ -83,10 +83,10 @@ def test_invalid_benchmark_path_contents(env: LlvmEnv): env.reset(benchmark=benchmark) -def test_benchmark_path_invalid_protocol(env: LlvmEnv): +def test_benchmark_path_invalid_scheme(env: LlvmEnv): benchmark = Benchmark( BenchmarkProto( - uri="benchmark://new", program=File(uri="invalid_protocol://test") + uri="benchmark://new", program=File(uri="invalid_scheme://test") ), ) @@ -94,7 +94,7 @@ def test_benchmark_path_invalid_protocol(env: LlvmEnv): ValueError, match=( "Invalid benchmark data URI. " - 'Only the file:/// protocol is supported: "invalid_protocol://test"' + 'Only the file:/// scheme is supported: "invalid_scheme://test"' ), ): env.reset(benchmark=benchmark) diff --git a/tests/llvm/llvm_benchmarks_test.py b/tests/llvm/llvm_benchmarks_test.py index 0986da359..fe4c5dbcd 100644 --- a/tests/llvm/llvm_benchmarks_test.py +++ b/tests/llvm/llvm_benchmarks_test.py @@ -17,18 +17,18 @@ pytest_plugins = ["tests.pytest_plugins.llvm"] -def test_add_benchmark_invalid_protocol(env: CompilerEnv): +def test_add_benchmark_invalid_scheme(env: CompilerEnv): with pytest.raises(ValueError) as ctx: env.reset( benchmark=Benchmark( BenchmarkProto( - uri="benchmark://foo", program=File(uri="https://invalid/protocol") + uri="benchmark://foo", program=File(uri="https://invalid/scheme") ), ) ) assert str(ctx.value) == ( "Invalid benchmark data URI. " - 'Only the file:/// protocol is supported: "https://invalid/protocol"' + 'Only the file:/// scheme is supported: "https://invalid/scheme"' ) From 5bd7213e6346c3629e50f3e1b7003aef1697efe7 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Tue, 21 Dec 2021 14:57:33 +0000 Subject: [PATCH 079/142] [Makefile] Build docs from source dir to prevent import error. --- Makefile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 46f0202db..eabaeff5a 100644 --- a/Makefile +++ b/Makefile @@ -278,10 +278,10 @@ doxygen-rst: cd docs && $(PYTHON) generate_cc_rst.py docs: gendocs bazel-build doxygen - PYTHONPATH=$(ROOT)/bazel-bin/package.runfiles/CompilerGym sphinx-build -M html docs/source docs/build $(SPHINXOPTS) + cd docs/source && PYTHONPATH=$(ROOT)/bazel-bin/package.runfiles/CompilerGym sphinx-build -M html . ../build $(SPHINXOPTS) livedocs: gendocs doxygen - PYTHONPATH=$(ROOT)/bazel-bin/package.runfiles/CompilerGym sphinx-autobuild docs/source docs/build $(SPHINXOPTS) --pre-build 'make gendocs bazel-build doxygen' --watch compiler_gym + cd docs/source && PYTHONPATH=$(ROOT)/bazel-bin/package.runfiles/CompilerGym sphinx-autobuild . ../build $(SPHINXOPTS) --pre-build 'make -C ../.. gendocs bazel-build doxygen' --watch ../../compiler_gym .PHONY: doxygen doxygen-rst From 51be732dc9151e4e8f8f51a66a895210b62c8af3 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Tue, 21 Dec 2021 16:35:08 +0000 Subject: [PATCH 080/142] [datasets] Accept BenchmarkUri instance in Benchmark.from_file() Issue #524. --- compiler_gym/datasets/benchmark.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/compiler_gym/datasets/benchmark.py b/compiler_gym/datasets/benchmark.py index 9d53503dd..b0c9730a1 100644 --- a/compiler_gym/datasets/benchmark.py +++ b/compiler_gym/datasets/benchmark.py @@ -6,6 +6,7 @@ from pathlib import Path from typing import Callable, Iterable, List, NamedTuple, Optional, Union +from compiler_gym.datasets.uri import BenchmarkUri from compiler_gym.service.proto import Benchmark as BenchmarkProto from compiler_gym.service.proto import File from compiler_gym.util import thread_pool @@ -263,7 +264,7 @@ def write_sources_to_directory(self, directory: Path) -> int: return len(uniq_paths) @classmethod - def from_file(cls, uri: str, path: Path): + def from_file(cls, uri: Union[str, BenchmarkUri], path: Path): """Construct a benchmark from a file. :param uri: The URI of the benchmark. @@ -284,10 +285,10 @@ def from_file(cls, uri: str, path: Path): # don't share a filesystem. with open(path, "rb") as f: contents = f.read() - return cls(proto=BenchmarkProto(uri=uri, program=File(contents=contents))) + return cls(proto=BenchmarkProto(uri=str(uri), program=File(contents=contents))) @classmethod - def from_file_contents(cls, uri: str, data: bytes): + def from_file_contents(cls, uri: Union[str, BenchmarkUri], data: bytes): """Construct a benchmark from raw data. :param uri: The URI of the benchmark. @@ -295,7 +296,7 @@ def from_file_contents(cls, uri: str, data: bytes): :param data: An array of bytes that will be passed to the compiler service. """ - return cls(proto=BenchmarkProto(uri=uri, program=File(contents=data))) + return cls(proto=BenchmarkProto(uri=str(uri), program=File(contents=data))) def __eq__(self, other: Union[str, "Benchmark"]): if isinstance(other, Benchmark): From 434f4582bed4a5ea0d98138520ce3c6e91d3eaf2 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Tue, 21 Dec 2021 16:36:33 +0000 Subject: [PATCH 081/142] [datasets] Implement benchmark_from_parsed_uri(). --- compiler_gym/datasets/files_dataset.py | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/compiler_gym/datasets/files_dataset.py b/compiler_gym/datasets/files_dataset.py index 5b4e31883..8fc5f8bae 100644 --- a/compiler_gym/datasets/files_dataset.py +++ b/compiler_gym/datasets/files_dataset.py @@ -9,6 +9,7 @@ import numpy as np from compiler_gym.datasets.dataset import Benchmark, Dataset +from compiler_gym.datasets.uri import BenchmarkUri from compiler_gym.util.decorators import memoized_property @@ -111,14 +112,19 @@ def benchmark_uris(self) -> Iterable[str]: else: yield from self._benchmark_uris_iter - def benchmark(self, uri: str) -> Benchmark: + def benchmark_from_parsed_uri(self, uri: BenchmarkUri) -> Benchmark: self.install() - relpath = f"{uri[len(self.name) + 1:]}{self.benchmark_file_suffix}" - abspath = self.dataset_root / relpath - if not abspath.is_file(): - raise LookupError(f"Benchmark not found: {uri} (file not found: {abspath})") - return self.benchmark_class.from_file(uri, abspath) + path = Path( + # Use normpath() rather than joinpath() because uri.path may start + # with a leading '/'. + os.path.normpath( + f"{self.dataset_root}/{uri.path}{self.benchmark_file_suffix}" + ) + ) + if not path.is_file(): + raise LookupError(f"Benchmark not found: {uri} (file not found: {path})") + return self.benchmark_class.from_file(uri, path) def _random_benchmark(self, random_state: np.random.Generator) -> Benchmark: return self.benchmark(random_state.choice(list(self.benchmark_uris()))) From 669ed0353f085f1e4dfbc6531b3cf38b142e2dcc Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Tue, 21 Dec 2021 17:28:34 +0000 Subject: [PATCH 082/142] Update benchmark URI parsing logic to BenchmarkUri class. --- compiler_gym/compiler_env_state.py | 2 - compiler_gym/datasets/benchmark.py | 4 +- compiler_gym/datasets/datasets.py | 61 +++++++++++++------ compiler_gym/datasets/uri.py | 11 +++- compiler_gym/envs/compiler_env.py | 2 +- compiler_gym/envs/gcc/datasets/chstone.py | 14 ++--- compiler_gym/envs/gcc/datasets/csmith.py | 5 +- compiler_gym/envs/llvm/datasets/anghabench.py | 5 +- compiler_gym/envs/llvm/datasets/cbench.py | 39 ++++++------ compiler_gym/envs/llvm/datasets/chstone.py | 5 +- compiler_gym/envs/llvm/datasets/clgen.py | 5 +- compiler_gym/envs/llvm/datasets/csmith.py | 6 +- .../envs/llvm/datasets/llvm_stress.py | 6 +- compiler_gym/envs/llvm/datasets/poj104.py | 11 +++- compiler_gym/envs/loop_tool/__init__.py | 9 +-- .../example_compiler_gym_service/__init__.py | 9 +-- .../demo_without_bazel.py | 7 ++- .../example_unrolling_service/__init__.py | 11 ++-- examples/llvm_autotuning/benchmarks.py | 10 ++- examples/llvm_rl/model/benchmarks.py | 8 +-- tests/datasets/dataset_test.py | 8 +-- tests/datasets/datasets_test.py | 7 ++- tests/gcc/gcc_env_test.py | 4 +- tests/llvm/custom_benchmarks_test.py | 7 ++- 24 files changed, 155 insertions(+), 101 deletions(-) diff --git a/compiler_gym/compiler_env_state.py b/compiler_gym/compiler_env_state.py index 90c7ea80e..b8ec68a48 100644 --- a/compiler_gym/compiler_env_state.py +++ b/compiler_gym/compiler_env_state.py @@ -9,7 +9,6 @@ from pydantic import BaseModel, Field, validator -from compiler_gym.datasets.uri import BENCHMARK_URI_PATTERN from compiler_gym.util.truncate import truncate @@ -23,7 +22,6 @@ class CompilerEnvState(BaseModel): benchmark: str = Field( allow_mutation=False, - regex=BENCHMARK_URI_PATTERN, examples=[ "benchmark://cbench-v1/crc32", "generator://csmith-v0/0", diff --git a/compiler_gym/datasets/benchmark.py b/compiler_gym/datasets/benchmark.py index b0c9730a1..6bfab49a1 100644 --- a/compiler_gym/datasets/benchmark.py +++ b/compiler_gym/datasets/benchmark.py @@ -103,7 +103,7 @@ def __hash__(self) -> int: return hash(self.uri) @property - def uri(self) -> str: + def uri(self) -> BenchmarkUri: """The URI of the benchmark. Benchmark URIs should be unique, that is, that two URIs with the same @@ -113,7 +113,7 @@ def uri(self) -> str: :return: A URI string. :type: string """ - return self._proto.uri + return BenchmarkUri.from_string(self._proto.uri) @property def proto(self) -> BenchmarkProto: diff --git a/compiler_gym/datasets/datasets.py b/compiler_gym/datasets/datasets.py index 9b9e7b319..28ee7307f 100644 --- a/compiler_gym/datasets/datasets.py +++ b/compiler_gym/datasets/datasets.py @@ -9,7 +9,7 @@ from compiler_gym.datasets.benchmark import Benchmark from compiler_gym.datasets.dataset import Dataset -from compiler_gym.datasets.uri import BENCHMARK_URI_RE, BenchmarkUri +from compiler_gym.datasets.uri import BenchmarkUri T = TypeVar("T") @@ -131,14 +131,34 @@ def dataset(self, dataset: str) -> Dataset: :raises LookupError: If :code:`dataset` is not found. """ - uri = BenchmarkUri.from_string(dataset) - key = f"{uri.scheme}://{uri.dataset}" + return self.dataset_from_parsed_uri(BenchmarkUri.from_string(dataset)) + + def dataset_from_parsed_uri(self, uri: BenchmarkUri) -> Dataset: + """Get a dataset. + + Return the corresponding :meth:`Dataset + `. Name lookup will succeed whether or + not the dataset is deprecated. + + :param uri: A parsed URI. + + :return: A :meth:`Dataset ` instance. + + :raises LookupError: If :code:`dataset` is not found. + """ + key = self._dataset_key_from_uri(uri) if key not in self._datasets: raise LookupError(f"Dataset not found: {key}") return self._datasets[key] + @staticmethod + def _dataset_key_from_uri(uri: BenchmarkUri) -> str: + if not (uri.scheme and uri.dataset): + raise ValueError(f"Invalid benchmark URI: '{uri}'") + return f"{uri.scheme}://{uri.dataset}" + def __getitem__(self, dataset: str) -> Dataset: """Lookup a dataset. @@ -156,11 +176,11 @@ def __setitem__(self, key: str, dataset: Dataset): :param key: The name of the dataset. :param dataset: The dataset to add. """ - dataset_name = BenchmarkUri.canonicalize(key) + key = self._dataset_key_from_uri(BenchmarkUri.from_string(key)) - self._datasets[dataset_name] = dataset + self._datasets[key] = dataset if not dataset.deprecated: - self._visible_datasets.add(dataset_name) + self._visible_datasets.add(key) def __delitem__(self, dataset: str): """Remove a dataset from the collection. @@ -174,10 +194,11 @@ def __delitem__(self, dataset: str): :return: :code:`True` if the dataset was removed, :code:`False` if it was already removed. """ - dataset_name = BenchmarkUri.canonicalize(dataset) - if dataset_name in self._visible_datasets: - self._visible_datasets.remove(dataset_name) - del self._datasets[dataset_name] + key = self._dataset_key_from_uri(BenchmarkUri.from_string(dataset)) + + if key in self._visible_datasets: + self._visible_datasets.remove(key) + del self._datasets[key] def __contains__(self, dataset: str) -> bool: """Returns whether the dataset is contained.""" @@ -243,16 +264,22 @@ def benchmark(self, uri: str) -> Benchmark: :return: A :class:`Benchmark ` instance. """ - uri = BenchmarkUri.canonicalize(uri) + return self.benchmark_from_parsed_uri(BenchmarkUri.from_string(uri)) - match = BENCHMARK_URI_RE.match(uri) - if not match: - raise ValueError(f"Invalid benchmark URI: '{uri}'") + def benchmark_from_parsed_uri(self, uri: BenchmarkUri) -> Benchmark: + """Select a benchmark. + + Returns the corresponding :class:`Benchmark + `, regardless of whether the containing + dataset is installed or deprecated. - dataset_name = match.group("dataset") - dataset = self._datasets[dataset_name] + :param uri: The parsed URI of the benchmark to return. - return dataset.benchmark(uri) + :return: A :class:`Benchmark ` + instance. + """ + dataset = self.dataset_from_parsed_uri(uri) + return dataset.benchmark_from_parsed_uri(uri) def random_benchmark( self, diff --git a/compiler_gym/datasets/uri.py b/compiler_gym/datasets/uri.py index 265aac4d1..6dfb34e23 100644 --- a/compiler_gym/datasets/uri.py +++ b/compiler_gym/datasets/uri.py @@ -4,7 +4,7 @@ # LICENSE file in the root directory of this source tree. """This module contains utility code for working with URIs.""" import re -from typing import Dict, List +from typing import Dict, List, Union from urllib.parse import ParseResult, parse_qs, urlencode, urlparse, urlunparse from deprecated.sphinx import deprecated @@ -141,3 +141,12 @@ def __repr__(self): def __str__(self) -> str: return repr(self) + + def __hash__(self) -> int: + return hash(str(self)) + + def __eq__(self, other: Union["BenchmarkUri", str]) -> bool: + return str(self) == str(other) + + def __lt__(self, other: Union["BenchmarkUri", str]) -> bool: + return str(self) < str(other) diff --git a/compiler_gym/envs/compiler_env.py b/compiler_gym/envs/compiler_env.py index 1eb00e774..f95df1fdb 100644 --- a/compiler_gym/envs/compiler_env.py +++ b/compiler_gym/envs/compiler_env.py @@ -811,7 +811,7 @@ def _call_with_error( if self.service.opts.always_send_benchmark_on_reset: self._benchmark_in_use_proto = self._benchmark_in_use.proto else: - self._benchmark_in_use_proto.uri = self._benchmark_in_use.uri + self._benchmark_in_use_proto.uri = str(self._benchmark_in_use.uri) start_session_request = StartSessionRequest( benchmark=self._benchmark_in_use_proto, diff --git a/compiler_gym/envs/gcc/datasets/chstone.py b/compiler_gym/envs/gcc/datasets/chstone.py index c48ce2ae9..26c2756af 100644 --- a/compiler_gym/envs/gcc/datasets/chstone.py +++ b/compiler_gym/envs/gcc/datasets/chstone.py @@ -2,10 +2,12 @@ # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. +import os from pathlib import Path from typing import Iterable from compiler_gym.datasets import Benchmark, TarDatasetWithManifest +from compiler_gym.datasets.uri import BenchmarkUri from compiler_gym.envs.gcc.gcc import Gcc from compiler_gym.util.decorators import memoized_property from compiler_gym.util.filesystem import atomic_file_write @@ -82,13 +84,9 @@ def gcc(self): # expensive. return Gcc(bin=self.gcc_bin) - def benchmark(self, uri: str) -> Benchmark: + def benchmark_from_parsed_uri(self, uri: BenchmarkUri) -> Benchmark: self.install() - benchmark_name = uri[len(self.name) + 1 :] - if not benchmark_name: - raise LookupError(f"No benchmark specified: {uri}") - # Most of the source files are named after the parent directory, but not # all. c_file_name = { @@ -96,8 +94,8 @@ def benchmark(self, uri: str) -> Benchmark: "motion": "mpeg2.c", "sha": "sha_driver.c", "jpeg": "main.c", - }.get(benchmark_name, f"{benchmark_name}.c") - source_dir_path = self.dataset_root / benchmark_name + }.get(uri.path[1:], f"{uri.path[1:]}.c") + source_dir_path = Path(os.path.normpath(f"{self.dataset_root}/{uri.path}")) source_path = source_dir_path / c_file_name preprocessed_path = source_dir_path / "src.c" @@ -112,7 +110,7 @@ def benchmark(self, uri: str) -> Benchmark: # TODO(github.com/facebookresearch/CompilerGym/issues/325): Send # over the unprocessed code to the service, have the service # preprocess. Until then, we do it client side with GCC having - # to fixed by an environment variable + # to be fixed by an environment variable. self.gcc( "-E", "-o", diff --git a/compiler_gym/envs/gcc/datasets/csmith.py b/compiler_gym/envs/gcc/datasets/csmith.py index 2041e4d60..cba4c646a 100644 --- a/compiler_gym/envs/gcc/datasets/csmith.py +++ b/compiler_gym/envs/gcc/datasets/csmith.py @@ -15,6 +15,7 @@ from compiler_gym.datasets import Benchmark, BenchmarkSource, Dataset from compiler_gym.datasets.benchmark import BenchmarkWithSource +from compiler_gym.datasets.uri import BenchmarkUri from compiler_gym.envs.gcc.gcc import Gcc from compiler_gym.util.decorators import memoized_property from compiler_gym.util.runfiles_path import runfiles_path @@ -139,8 +140,8 @@ def gcc(self): def benchmark_uris(self) -> Iterable[str]: return (f"{self.name}/{i}" for i in range(UINT_MAX)) - def benchmark(self, uri: str) -> CsmithBenchmark: - return self.benchmark_from_seed(int(uri.split("/")[-1])) + def benchmark_from_parsed_uri(self, uri: BenchmarkUri) -> CsmithBenchmark: + return self.benchmark_from_seed(int(uri.path[1:])) def _random_benchmark(self, random_state: np.random.Generator) -> Benchmark: seed = random_state.integers(UINT_MAX) diff --git a/compiler_gym/envs/llvm/datasets/anghabench.py b/compiler_gym/envs/llvm/datasets/anghabench.py index ecee29f6a..38d0eba04 100644 --- a/compiler_gym/envs/llvm/datasets/anghabench.py +++ b/compiler_gym/envs/llvm/datasets/anghabench.py @@ -10,6 +10,7 @@ from compiler_gym.datasets import Benchmark, TarDatasetWithManifest from compiler_gym.datasets.benchmark import BenchmarkWithSource +from compiler_gym.datasets.uri import BenchmarkUri from compiler_gym.envs.llvm.llvm_benchmark import ClangInvocation from compiler_gym.util import thread_pool from compiler_gym.util.filesystem import atomic_file_write @@ -80,10 +81,10 @@ def __init__( deprecated=deprecated, ) - def benchmark(self, uri: str) -> Benchmark: + def benchmark_from_parsed_uri(self, uri: BenchmarkUri) -> Benchmark: self.install() - benchmark_name = uri[len(self.name) + 1 :] + benchmark_name = uri.path[1:] if not benchmark_name: raise LookupError(f"No benchmark specified: {uri}") diff --git a/compiler_gym/envs/llvm/datasets/cbench.py b/compiler_gym/envs/llvm/datasets/cbench.py index ff58f85c7..6c66b0e7b 100644 --- a/compiler_gym/envs/llvm/datasets/cbench.py +++ b/compiler_gym/envs/llvm/datasets/cbench.py @@ -20,6 +20,7 @@ import fasteners from compiler_gym.datasets import Benchmark, TarDatasetWithManifest +from compiler_gym.datasets.uri import BenchmarkUri from compiler_gym.service.proto import BenchmarkDynamicConfig, Command from compiler_gym.third_party import llvm from compiler_gym.util.download import download @@ -494,7 +495,8 @@ def validator( # Create the BenchmarkDynamicConfig object. cbench_data = site_data_path("llvm-v0/cbench-v1-runtime-data/runtime_data") - DYNAMIC_CONFIGS[benchmark].append( + uri = BenchmarkUri.from_string(benchmark) + DYNAMIC_CONFIGS[uri.path].append( BenchmarkDynamicConfig( build_cmd=Command( argument=["$CC", "$IN"] + linkopts, @@ -518,20 +520,6 @@ def validator( return True -class CBenchBenchmark(Benchmark): - """A cBench benchmmark.""" - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - for val in VALIDATORS.get(self.uri, []): - self.add_validation_callback(val) - if DYNAMIC_CONFIGS[self.uri]: - # TODO(github.com/facebookresearch/CompilerGym/issues/370): Add - # support for multiple datasets. - config = DYNAMIC_CONFIGS[self.uri][-1] - self.proto.dynamic_config.MergeFrom(config) - - class CBenchDataset(TarDatasetWithManifest): def __init__(self, site_data_base: Path): platform = {"darwin": "macos"}.get(sys.platform, sys.platform) @@ -552,7 +540,7 @@ def __init__(self, site_data_base: Path): manifest_sha256="eeffd7593aeb696a160fd22e6b0c382198a65d0918b8440253ea458cfe927741", strip_prefix="cBench-v1", benchmark_file_suffix=".bc", - benchmark_class=CBenchBenchmark, + benchmark_class=Benchmark, site_data_base=site_data_base, sort_order=-1, validatable="Partially", @@ -564,6 +552,20 @@ def install(self): with fasteners.InterProcessLock(cache_path(".cbench-v1-runtime-data.LOCK")): download_cBench_runtime_data() + def benchmark_from_parsed_uri(self, uri: BenchmarkUri) -> Benchmark: + benchmark = super().benchmark_from_parsed_uri(uri) + + for val in VALIDATORS.get(str(uri), []): + self.add_validation_callback(val) + + if DYNAMIC_CONFIGS[uri.path]: + # TODO(github.com/facebookresearch/CompilerGym/issues/370): Add + # support for multiple datasets. + config = DYNAMIC_CONFIGS[uri.path][-1] + self.proto.dynamic_config.MergeFrom(config) + + return benchmark + class CBenchLegacyDataset2(TarDatasetWithManifest): def __init__( @@ -593,7 +595,6 @@ def __init__( benchmark_file_suffix=".bc", site_data_base=site_data_base, sort_order=sort_order, - benchmark_class=CBenchBenchmark, deprecated=deprecated, validatable="Partially", ) @@ -657,8 +658,8 @@ def __init__(self, site_data_base: Path): ] = defaultdict(list) -# A map from benchmark name to a list of BenchmarkDynamicConfig messages, one -# per dataset. +# A map from cBench benchmark path to a list of BenchmarkDynamicConfig messages, +# one per dataset. DYNAMIC_CONFIGS: Dict[str, List[BenchmarkDynamicConfig]] = defaultdict(list) diff --git a/compiler_gym/envs/llvm/datasets/chstone.py b/compiler_gym/envs/llvm/datasets/chstone.py index 2ef3ca97d..8a40af8a1 100644 --- a/compiler_gym/envs/llvm/datasets/chstone.py +++ b/compiler_gym/envs/llvm/datasets/chstone.py @@ -9,6 +9,7 @@ from compiler_gym.datasets import Benchmark, TarDatasetWithManifest from compiler_gym.datasets.benchmark import BenchmarkWithSource +from compiler_gym.datasets.uri import BenchmarkUri from compiler_gym.envs.llvm.llvm_benchmark import ClangInvocation from compiler_gym.util import thread_pool from compiler_gym.util.filesystem import atomic_file_write @@ -74,10 +75,10 @@ def __init__( def benchmark_uris(self) -> Iterable[str]: yield from URIS - def benchmark(self, uri: str) -> Benchmark: + def benchmark_from_parsed_uri(self, uri: BenchmarkUri) -> Benchmark: self.install() - benchmark_name = uri[len(self.name) + 1 :] + benchmark_name = uri.path[1:] if not benchmark_name: raise LookupError(f"No benchmark specified: {uri}") diff --git a/compiler_gym/envs/llvm/datasets/clgen.py b/compiler_gym/envs/llvm/datasets/clgen.py index acb4720e0..9e3bd0f4d 100644 --- a/compiler_gym/envs/llvm/datasets/clgen.py +++ b/compiler_gym/envs/llvm/datasets/clgen.py @@ -15,6 +15,7 @@ from compiler_gym.datasets import Benchmark, BenchmarkInitError, TarDatasetWithManifest from compiler_gym.datasets.benchmark import BenchmarkWithSource +from compiler_gym.datasets.uri import BenchmarkUri from compiler_gym.envs.llvm.llvm_benchmark import ClangInvocation from compiler_gym.util.download import download from compiler_gym.util.filesystem import atomic_file_write @@ -117,10 +118,10 @@ def install(self): self._opencl_headers_installed_marker.touch() - def benchmark(self, uri: str) -> Benchmark: + def benchmark_from_parsed_uri(self, uri: BenchmarkUri) -> Benchmark: self.install() - benchmark_name = uri[len(self.name) + 1 :] + benchmark_name = uri.path[1:] if not benchmark_name: raise LookupError(f"No benchmark specified: {uri}") diff --git a/compiler_gym/envs/llvm/datasets/csmith.py b/compiler_gym/envs/llvm/datasets/csmith.py index c7adb5220..a19456acb 100644 --- a/compiler_gym/envs/llvm/datasets/csmith.py +++ b/compiler_gym/envs/llvm/datasets/csmith.py @@ -11,6 +11,7 @@ from compiler_gym.datasets import Benchmark, BenchmarkSource, Dataset from compiler_gym.datasets.benchmark import BenchmarkInitError, BenchmarkWithSource +from compiler_gym.datasets.uri import BenchmarkUri from compiler_gym.envs.llvm.llvm_benchmark import ClangInvocation from compiler_gym.service.proto import BenchmarkDynamicConfig, Command from compiler_gym.util.decorators import memoized_property @@ -149,8 +150,9 @@ def size(self) -> int: def benchmark_uris(self) -> Iterable[str]: return (f"{self.name}/{i}" for i in range(UINT_MAX)) - def benchmark(self, uri: str) -> CsmithBenchmark: - return self.benchmark_from_seed(int(uri.split("/")[-1])) + def benchmark_from_parsed_uri(self, uri: BenchmarkUri) -> CsmithBenchmark: + seed = int(uri.path[1:]) + return self.benchmark_from_seed(seed) def _random_benchmark(self, random_state: np.random.Generator) -> Benchmark: seed = random_state.integers(UINT_MAX) diff --git a/compiler_gym/envs/llvm/datasets/llvm_stress.py b/compiler_gym/envs/llvm/datasets/llvm_stress.py index 949b17c49..2fa2a760b 100644 --- a/compiler_gym/envs/llvm/datasets/llvm_stress.py +++ b/compiler_gym/envs/llvm/datasets/llvm_stress.py @@ -10,6 +10,7 @@ from compiler_gym.datasets import Benchmark, Dataset from compiler_gym.datasets.benchmark import BenchmarkInitError +from compiler_gym.datasets.uri import BenchmarkUri from compiler_gym.third_party import llvm # The maximum value for the --seed argument to llvm-stress. @@ -55,8 +56,9 @@ def size(self) -> int: def benchmark_uris(self) -> Iterable[str]: return (f"{self.name}/{i}" for i in range(UINT_MAX)) - def benchmark(self, uri: str) -> Benchmark: - return self.benchmark_from_seed(int(uri.split("/")[-1])) + def benchmark_from_parsed_uri(self, uri: BenchmarkUri) -> Benchmark: + seed = int(uri.path[1:]) + return self.benchmark_from_seed(seed) def _random_benchmark(self, random_state: np.random.Generator) -> Benchmark: seed = random_state.integers(UINT_MAX) diff --git a/compiler_gym/envs/llvm/datasets/poj104.py b/compiler_gym/envs/llvm/datasets/poj104.py index 140d73174..1c2037577 100644 --- a/compiler_gym/envs/llvm/datasets/poj104.py +++ b/compiler_gym/envs/llvm/datasets/poj104.py @@ -9,8 +9,11 @@ from pathlib import Path from typing import Optional +import numpy as np + from compiler_gym.datasets import Benchmark, BenchmarkInitError, TarDatasetWithManifest from compiler_gym.datasets.benchmark import BenchmarkWithSource +from compiler_gym.datasets.uri import BenchmarkUri from compiler_gym.envs.llvm.llvm_benchmark import ClangInvocation from compiler_gym.util import thread_pool from compiler_gym.util.download import download @@ -69,7 +72,7 @@ def __init__(self, site_data_base: Path, sort_order: int = 0): sort_order=sort_order, ) - def benchmark(self, uri: Optional[str] = None) -> Benchmark: + def benchmark_from_parsed_uri(self, uri: BenchmarkUri) -> Benchmark: self.install() if uri is None or len(uri) <= len(self.name) + 1: return self._get_benchmark_by_index(self.random.integers(self.size)) @@ -128,6 +131,12 @@ def benchmark(self, uri: Optional[str] = None) -> Benchmark: return BenchmarkWithSource.create(uri, bitcode_path, "source.cc", cc_file_path) + def random_benchmark( + self, random_state: Optional[np.random.Generator] = None + ) -> Benchmark: + random_state = random_state or np.random.default_rng() + return self._get_benchmark_by_index(random_state.integers(self.size)) + @staticmethod def preprocess_poj104_source(src: str) -> str: """Pre-process a POJ-104 C++ source file for compilation.""" diff --git a/compiler_gym/envs/loop_tool/__init__.py b/compiler_gym/envs/loop_tool/__init__.py index 4367093ae..247c73f31 100644 --- a/compiler_gym/envs/loop_tool/__init__.py +++ b/compiler_gym/envs/loop_tool/__init__.py @@ -7,6 +7,7 @@ from typing import Iterable from compiler_gym.datasets import Benchmark, Dataset, benchmark +from compiler_gym.datasets.uri import BenchmarkUri from compiler_gym.spaces import Reward from compiler_gym.util.registration import register from compiler_gym.util.runfiles_path import runfiles_path @@ -61,8 +62,8 @@ def __init__(self, *args, **kwargs): def benchmark_uris(self) -> Iterable[str]: return (f"loop_tool-cuda-v0/{i}" for i in range(1, 1024 * 1024 * 8)) - def benchmark(self, uri: str) -> Benchmark: - return Benchmark(proto=benchmark.BenchmarkProto(uri=uri)) + def benchmark_from_parsed_uri(self, uri: BenchmarkUri) -> Benchmark: + return Benchmark(proto=benchmark.BenchmarkProto(uri=str(uri))) class LoopToolCPUDataset(Dataset): @@ -76,8 +77,8 @@ def __init__(self, *args, **kwargs): def benchmark_uris(self) -> Iterable[str]: return (f"loop_tool-cpu-v0/{i}" for i in range(1, 1024 * 1024 * 8)) - def benchmark(self, uri: str) -> Benchmark: - return Benchmark(proto=benchmark.BenchmarkProto(uri=uri)) + def benchmark_from_parsed_uri(self, uri: BenchmarkUri) -> Benchmark: + return Benchmark(proto=benchmark.BenchmarkProto(uri=str(uri))) register( diff --git a/examples/example_compiler_gym_service/__init__.py b/examples/example_compiler_gym_service/__init__.py index f86cbd3ae..c8522b61f 100644 --- a/examples/example_compiler_gym_service/__init__.py +++ b/examples/example_compiler_gym_service/__init__.py @@ -7,6 +7,7 @@ from typing import Iterable from compiler_gym.datasets import Benchmark, Dataset +from compiler_gym.datasets.uri import BenchmarkUri from compiler_gym.spaces import Reward from compiler_gym.util.registration import register from compiler_gym.util.runfiles_path import runfiles_path @@ -60,10 +61,10 @@ def __init__(self, *args, **kwargs): description="An example dataset", ) self._benchmarks = { - "benchmark://example-v0/foo": Benchmark.from_file_contents( + "/foo": Benchmark.from_file_contents( "benchmark://example-v0/foo", "Ir data".encode("utf-8") ), - "benchmark://example-v0/bar": Benchmark.from_file_contents( + "/bar": Benchmark.from_file_contents( "benchmark://example-v0/bar", "Ir data".encode("utf-8") ), } @@ -71,8 +72,8 @@ def __init__(self, *args, **kwargs): def benchmark_uris(self) -> Iterable[str]: yield from self._benchmarks.keys() - def benchmark(self, uri: str) -> Benchmark: - if uri in self._benchmarks: + def benchmark_from_parsed_uris(self, uri: BenchmarkUri) -> Benchmark: + if uri.path in self._benchmarks: return self._benchmarks[uri] else: raise LookupError("Unknown program name") diff --git a/examples/example_compiler_gym_service/demo_without_bazel.py b/examples/example_compiler_gym_service/demo_without_bazel.py index f18aa7d2e..5c58c8d3d 100644 --- a/examples/example_compiler_gym_service/demo_without_bazel.py +++ b/examples/example_compiler_gym_service/demo_without_bazel.py @@ -16,6 +16,7 @@ import gym from compiler_gym.datasets import Benchmark, Dataset +from compiler_gym.datasets.uri import BenchmarkUri from compiler_gym.spaces import Reward from compiler_gym.util.logging import init_logging from compiler_gym.util.registration import register @@ -77,9 +78,9 @@ def __init__(self, *args, **kwargs): def benchmark_uris(self) -> Iterable[str]: yield from self._benchmarks.keys() - def benchmark(self, uri: str) -> Benchmark: - if uri in self._benchmarks: - return self._benchmarks[uri] + def benchmark_from_parsed_uris(self, uri: BenchmarkUri) -> Benchmark: + if uri.path in self._benchmarks: + return self._benchmarks[uri.path] else: raise LookupError("Unknown program name") diff --git a/examples/example_unrolling_service/__init__.py b/examples/example_unrolling_service/__init__.py index 19aa30de2..4838558db 100644 --- a/examples/example_unrolling_service/__init__.py +++ b/examples/example_unrolling_service/__init__.py @@ -8,6 +8,7 @@ from typing import Iterable from compiler_gym.datasets import Benchmark, Dataset +from compiler_gym.datasets.uri import BenchmarkUri from compiler_gym.envs.llvm.llvm_benchmark import get_system_includes from compiler_gym.spaces import Reward from compiler_gym.third_party import llvm @@ -89,11 +90,11 @@ def __init__(self, *args, **kwargs): ) self._benchmarks = { - "benchmark://unrolling-v0/offsets1": Benchmark.from_file_contents( + "/offsets1": Benchmark.from_file_contents( "benchmark://unrolling-v0/offsets1", self.preprocess(BENCHMARKS_PATH / "offsets1.c"), ), - "benchmark://unrolling-v0/conv2d": Benchmark.from_file_contents( + "/conv2d": Benchmark.from_file_contents( "benchmark://unrolling-v0/conv2d", self.preprocess(BENCHMARKS_PATH / "conv2d.c"), ), @@ -124,9 +125,9 @@ def preprocess(src: Path) -> bytes: def benchmark_uris(self) -> Iterable[str]: yield from self._benchmarks.keys() - def benchmark(self, uri: str) -> Benchmark: - if uri in self._benchmarks: - return self._benchmarks[uri] + def benchmark_from_parsed_uri(self, uri: BenchmarkUri) -> Benchmark: + if uri.path in self._benchmarks: + return self._benchmarks[uri.path] else: raise LookupError("Unknown program name") diff --git a/examples/llvm_autotuning/benchmarks.py b/examples/llvm_autotuning/benchmarks.py index 88bcfb5a8..93175ea46 100644 --- a/examples/llvm_autotuning/benchmarks.py +++ b/examples/llvm_autotuning/benchmarks.py @@ -7,8 +7,7 @@ from pydantic import BaseModel, Field, root_validator, validator -from compiler_gym.datasets import Benchmark -from compiler_gym.datasets.uri import BENCHMARK_URI_RE, DATASET_NAME_PATTERN +from compiler_gym.datasets import Benchmark, BenchmarkUri from compiler_gym.envs.llvm import LlvmEnv @@ -17,7 +16,7 @@ class BenchmarksEntry(BaseModel): # === Start of fields list. === - dataset: str = Field(default=None, allow_mutation=False, regex=DATASET_NAME_PATTERN) + dataset: str = Field(default=None, allow_mutation=False) """The name of a dataset to iterate over. If set, benchmarks are produced by iterating over this dataset in order. If not set, the :code:`uris` list must be provided. @@ -62,9 +61,8 @@ def validate_uris(cls, value, *, values, **kwargs): del kwargs del values for uri in value: - assert BENCHMARK_URI_RE.match(uri) or uri.startswith( - "file:///" - ), f"Invalid benchmark URI: {uri}" + uri = BenchmarkUri.from_string(uri) + assert uri.scheme and uri.dataset, f"Invalid benchmark URI: {uri}" return list(value) def _benchmark_iterator( diff --git a/examples/llvm_rl/model/benchmarks.py b/examples/llvm_rl/model/benchmarks.py index 85c620e0c..b707564e7 100644 --- a/examples/llvm_rl/model/benchmarks.py +++ b/examples/llvm_rl/model/benchmarks.py @@ -7,8 +7,7 @@ from pydantic import BaseModel, Field, root_validator, validator -from compiler_gym.datasets import Benchmark -from compiler_gym.datasets.uri import BENCHMARK_URI_RE, DATASET_NAME_PATTERN +from compiler_gym.datasets import Benchmark, BenchmarkUri from compiler_gym.envs import CompilerEnv @@ -32,7 +31,7 @@ class Benchmarks(BaseModel): # === Start of fields list. === - dataset: str = Field(default=None, allow_mutation=False, regex=DATASET_NAME_PATTERN) + dataset: str = Field(default=None, allow_mutation=False) """The name of a dataset to iterate over. If set, benchmarks are produced by iterating over this dataset in order. If not set, the :code:`uris` list must be provided. @@ -76,7 +75,8 @@ def check_that_either_dataset_or_uris_is_set(cls, values): def validate_uris(cls, value, *, values, **kwargs): del kwargs for uri in value: - assert BENCHMARK_URI_RE.match(uri), f"Invalid benchmark URI: {uri}" + uri = BenchmarkUri.from_string(uri) + assert uri.scheme and uri.dataset, f"Invalid benchmark URI: {uri}" return list(value) def _benchmark_iterator( diff --git a/tests/datasets/dataset_test.py b/tests/datasets/dataset_test.py index c9a4c16bf..5c9e7a175 100644 --- a/tests/datasets/dataset_test.py +++ b/tests/datasets/dataset_test.py @@ -8,6 +8,7 @@ import pytest from compiler_gym.datasets.dataset import Dataset +from compiler_gym.datasets.uri import BenchmarkUri from tests.test_main import main pytest_plugins = ["tests.pytest_plugins.common"] @@ -201,11 +202,8 @@ def __init__(self, benchmarks=None): def benchmark_uris(self): return sorted(self._benchmarks) - def benchmark(self, uri): - if uri: - return self._benchmarks[uri] - else: - return next(iter(self._benchmarks.values())) + def benchmark_from_parsed_uri(self, uri: BenchmarkUri): + return self._benchmarks[str(uri)] @property def size(self): diff --git a/tests/datasets/datasets_test.py b/tests/datasets/datasets_test.py index da2b65244..54b9b9348 100644 --- a/tests/datasets/datasets_test.py +++ b/tests/datasets/datasets_test.py @@ -7,6 +7,7 @@ import pytest from compiler_gym.datasets.datasets import Datasets, round_robin_iterables +from compiler_gym.datasets.uri import BenchmarkUri from tests.test_main import main pytest_plugins = ["tests.pytest_plugins.common"] @@ -34,11 +35,11 @@ def benchmark_uris(self): def benchmarks(self): yield from self.benchmark_values - def benchmark(self, uri): + def benchmark_from_parsed_uri(self, uri: BenchmarkUri): for b in self.benchmark_values: - if b.uri == uri: + if b.uri == str(uri): return b - raise KeyError(uri) + raise KeyError(str(uri)) def random_benchmark(self, random_state=None): return random_state.choice(self.benchmark_values) diff --git a/tests/gcc/gcc_env_test.py b/tests/gcc/gcc_env_test.py index fecf82776..90f719a3e 100644 --- a/tests/gcc/gcc_env_test.py +++ b/tests/gcc/gcc_env_test.py @@ -116,7 +116,9 @@ def test_reward_before_reset(gcc_bin: str): def test_reset_invalid_benchmark(gcc_bin: str): """Test requesting a specific benchmark.""" with gym.make("gcc-v0", gcc_bin=gcc_bin) as env: - with pytest.raises(LookupError, match=r"'benchmark://chstone-v1"): + with pytest.raises( + LookupError, match=r"Dataset not found: benchmark://chstone-v1" + ): env.reset(benchmark="chstone-v1/flubbedydubfishface") diff --git a/tests/llvm/custom_benchmarks_test.py b/tests/llvm/custom_benchmarks_test.py index 0a15c4c34..1e2c32cef 100644 --- a/tests/llvm/custom_benchmarks_test.py +++ b/tests/llvm/custom_benchmarks_test.py @@ -12,7 +12,6 @@ import pytest from compiler_gym.datasets import Benchmark -from compiler_gym.datasets.uri import BENCHMARK_URI_RE from compiler_gym.envs import LlvmEnv, llvm from compiler_gym.service.proto import Benchmark as BenchmarkProto from compiler_gym.service.proto import File @@ -117,7 +116,8 @@ def test_make_benchmark_single_bitcode(env: LlvmEnv): benchmark = llvm.make_benchmark(EXAMPLE_BITCODE_FILE) assert benchmark == f"benchmark://file-v0{EXAMPLE_BITCODE_FILE}" - assert BENCHMARK_URI_RE.match(benchmark.uri) + assert benchmark.uri.scheme == "benchmark" + assert benchmark.uri.dataset == "file-v0" with open(EXAMPLE_BITCODE_FILE, "rb") as f: contents = f.read() @@ -134,7 +134,8 @@ def test_make_benchmark_single_ll(): """Test passing a single .ll file into make_benchmark().""" benchmark = llvm.make_benchmark(INVALID_IR_PATH) assert benchmark.uri.startswith("benchmark://user-v0/") - assert BENCHMARK_URI_RE.match(benchmark.uri) + assert benchmark.uri.scheme == "benchmark" + assert benchmark.uri.dataset == "user-v0" def test_make_benchmark_single_clang_job(env: LlvmEnv): From f9529154c106cae93e6197d1e2579d4a2172f028 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Tue, 21 Dec 2021 17:34:57 +0000 Subject: [PATCH 083/142] [llvm] Add support for multiple runtime datasets to cBench. Issue #370. --- compiler_gym/envs/llvm/datasets/cbench.py | 20 +++++++++++++++----- tests/llvm/datasets/cbench_test.py | 17 +++++++++++++++++ 2 files changed, 32 insertions(+), 5 deletions(-) diff --git a/compiler_gym/envs/llvm/datasets/cbench.py b/compiler_gym/envs/llvm/datasets/cbench.py index 6c66b0e7b..68db1f231 100644 --- a/compiler_gym/envs/llvm/datasets/cbench.py +++ b/compiler_gym/envs/llvm/datasets/cbench.py @@ -556,13 +556,23 @@ def benchmark_from_parsed_uri(self, uri: BenchmarkUri) -> Benchmark: benchmark = super().benchmark_from_parsed_uri(uri) for val in VALIDATORS.get(str(uri), []): - self.add_validation_callback(val) + benchmark.add_validation_callback(val) + # Parse the "dataset" parameter to determine the correct dynamic + # configuration to use. if DYNAMIC_CONFIGS[uri.path]: - # TODO(github.com/facebookresearch/CompilerGym/issues/370): Add - # support for multiple datasets. - config = DYNAMIC_CONFIGS[uri.path][-1] - self.proto.dynamic_config.MergeFrom(config) + cfgs = DYNAMIC_CONFIGS[uri.path] + dataset = uri.params.get("dataset", ["0"]) + + try: + dataset_index = int(dataset[-1]) + except (ValueError, TypeError) as e: + raise ValueError(f"Invalid dataset: {dataset[-1]}") from e + + if dataset_index < 0 or dataset_index >= len(cfgs): + raise ValueError(f"Invalid dataset: {dataset_index}") + + benchmark.proto.dynamic_config.MergeFrom(cfgs[dataset_index]) return benchmark diff --git a/tests/llvm/datasets/cbench_test.py b/tests/llvm/datasets/cbench_test.py index 13543ee0b..17736d365 100644 --- a/tests/llvm/datasets/cbench_test.py +++ b/tests/llvm/datasets/cbench_test.py @@ -84,5 +84,22 @@ def test_cbench_v1_deprecation(env: LlvmEnv): env.datasets.benchmark("benchmark://cBench-v1/crc32") +def test_cbench_v1_dataset_param(env: LlvmEnv): + a = env.datasets.benchmark("cbench-v1/qsort?dataset=0") + b = env.datasets.benchmark("cbench-v1/qsort?dataset=0") # same as a + c = env.datasets.benchmark("cbench-v1/qsort?dataset=1") + + assert a.proto.dynamic_config == b.proto.dynamic_config # sanity check + assert a.proto.dynamic_config != c.proto.dynamic_config # sanity check + + +def test_cbench_v1_dataset_out_of_range(env: LlvmEnv): + with pytest.raises(ValueError, match="Invalid dataset: 50"): + env.datasets.benchmark("cbench-v1/qsort?dataset=50") + + with pytest.raises(ValueError, match="Invalid dataset: abc"): + env.datasets.benchmark("cbench-v1/qsort?dataset=abc") + + if __name__ == "__main__": main() From e396261ca1156c9df70d80d730360a8a2be3aaab Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Tue, 21 Dec 2021 18:05:33 +0000 Subject: [PATCH 084/142] [datasets] Add support for proto:// and file:// URIs. --- compiler_gym/datasets/datasets.py | 21 ++++++++++++++++ tests/datasets/datasets_test.py | 40 +++++++++++++++++++++++++++++++ 2 files changed, 61 insertions(+) diff --git a/compiler_gym/datasets/datasets.py b/compiler_gym/datasets/datasets.py index 28ee7307f..05ecf0e45 100644 --- a/compiler_gym/datasets/datasets.py +++ b/compiler_gym/datasets/datasets.py @@ -2,7 +2,9 @@ # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. +import os from collections import deque +from pathlib import Path from typing import Dict, Iterable, List, Optional, Set, TypeVar import numpy as np @@ -10,6 +12,7 @@ from compiler_gym.datasets.benchmark import Benchmark from compiler_gym.datasets.dataset import Dataset from compiler_gym.datasets.uri import BenchmarkUri +from compiler_gym.service.proto import Benchmark as BenchmarkProto T = TypeVar("T") @@ -278,6 +281,24 @@ def benchmark_from_parsed_uri(self, uri: BenchmarkUri) -> Benchmark: :return: A :class:`Benchmark ` instance. """ + if uri.scheme == "proto": + path = Path(os.path.normpath(f"{uri.dataset}/{uri.path}")) + if not path.is_file(): + raise FileNotFoundError(str(path)) + + proto = BenchmarkProto() + with open(path, "rb") as f: + proto.ParseFromString(f.read()) + + return Benchmark(proto=proto) + + if uri.scheme == "file": + path = Path(os.path.normpath(f"{uri.dataset}/{uri.path}")) + if not path.is_file(): + raise FileNotFoundError(str(path)) + + return Benchmark.from_file(uri=uri, path=path) + dataset = self.dataset_from_parsed_uri(uri) return dataset.benchmark_from_parsed_uri(uri) diff --git a/tests/datasets/datasets_test.py b/tests/datasets/datasets_test.py index 54b9b9348..979270058 100644 --- a/tests/datasets/datasets_test.py +++ b/tests/datasets/datasets_test.py @@ -3,11 +3,14 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. """Unit tests for //compiler_gym/datasets.""" +from pathlib import Path + import numpy as np import pytest from compiler_gym.datasets.datasets import Datasets, round_robin_iterables from compiler_gym.datasets.uri import BenchmarkUri +from compiler_gym.service.proto import Benchmark as BenchmarkProto from tests.test_main import main pytest_plugins = ["tests.pytest_plugins.common"] @@ -269,5 +272,42 @@ def test_random_benchmark(mocker, weighted: bool): assert next(iter(random_benchmarks)) == "benchmark://foo-v0/abc" +def test_dataset_proto_scheme(tmpdir): + """Test the proto:// scheme handler.""" + tmpdir = Path(tmpdir) + datasets = Datasets(datasets={}) + + proto = BenchmarkProto(uri="hello world") + with open(tmpdir / "file.pb", "wb") as f: + f.write(proto.SerializeToString()) + + benchmark = datasets.benchmark(f"proto://{tmpdir}/file.pb") + + assert benchmark.proto.uri == "hello world" + assert benchmark.uri == "benchmark://hello world" + + +def test_dataset_proto_scheme_file_not_found(tmpdir): + tmpdir = Path(tmpdir) + datasets = Datasets(datasets={}) + with pytest.raises(FileNotFoundError): + datasets.benchmark(f"proto://{tmpdir}/not_a_file") + + +def test_dataset_file_scheme(tmpdir): + """Test the file:// scheme handler.""" + tmpdir = Path(tmpdir) + datasets = Datasets(datasets={}) + + with open(tmpdir / "file.dat", "w") as f: + f.write("hello, world") + + benchmark = datasets.benchmark(f"file://{tmpdir}/file.dat") + + assert benchmark.proto.uri == f"file://{tmpdir}/file.dat" + assert benchmark.proto.program.contents == b"hello, world" + assert benchmark.uri == f"file://{tmpdir}/file.dat" + + if __name__ == "__main__": main() From 7c78fc6dfbaa0c686bdfa7cf880641c22b609a55 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Wed, 22 Dec 2021 14:09:35 +0000 Subject: [PATCH 085/142] [datasets] Compatability fixes with new benchmark URI class. Issue #524. --- compiler_gym/bin/manual_env.py | 9 ++++++--- compiler_gym/envs/llvm/datasets/clgen.py | 3 ++- compiler_gym/envs/llvm/datasets/poj104.py | 5 ++--- compiler_gym/random_search.py | 8 +++++--- examples/llvm_rl/tests/benchmarks_test.py | 5 ----- tests/compiler_env_state_test.py | 14 -------------- tests/llvm/custom_benchmarks_test.py | 2 +- 7 files changed, 16 insertions(+), 30 deletions(-) diff --git a/compiler_gym/bin/manual_env.py b/compiler_gym/bin/manual_env.py index 14b82f00c..ffb72dee7 100644 --- a/compiler_gym/bin/manual_env.py +++ b/compiler_gym/bin/manual_env.py @@ -329,9 +329,12 @@ def postloop(self): def set_prompt(self): """Set the prompt - shows the benchmark name""" - benchmark_name = self.env.benchmark.uri - if benchmark_name.startswith("benchmark://"): - benchmark_name = benchmark_name[len("benchmark://") :] + uri = self.env.benchmark.uri + benchmark_name = ( + f"{uri.dataset}{uri.path}" + if uri.scheme == "benchmark" + else f"{uri.scheme}://{uri.dataset}{uri.path}" + ) prompt = f"compiler_gym:{benchmark_name}>" self.prompt = f"\n{emph(prompt)} " diff --git a/compiler_gym/envs/llvm/datasets/clgen.py b/compiler_gym/envs/llvm/datasets/clgen.py index 9e3bd0f4d..a41d0a180 100644 --- a/compiler_gym/envs/llvm/datasets/clgen.py +++ b/compiler_gym/envs/llvm/datasets/clgen.py @@ -4,6 +4,7 @@ # LICENSE file in the root directory of this source tree. import io import logging +import os import shutil import subprocess import tarfile @@ -126,7 +127,7 @@ def benchmark_from_parsed_uri(self, uri: BenchmarkUri) -> Benchmark: raise LookupError(f"No benchmark specified: {uri}") # The absolute path of the file, without an extension. - path_stem = self.dataset_root / uri[len(self.name) + 1 :] + path_stem = os.path.normpath(f"{self.dataset_root}/{uri.path}") bc_path, cl_path = Path(f"{path_stem}.bc"), Path(f"{path_stem}.cl") diff --git a/compiler_gym/envs/llvm/datasets/poj104.py b/compiler_gym/envs/llvm/datasets/poj104.py index 1c2037577..90086fadc 100644 --- a/compiler_gym/envs/llvm/datasets/poj104.py +++ b/compiler_gym/envs/llvm/datasets/poj104.py @@ -3,6 +3,7 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import logging +import os import subprocess import sys from concurrent.futures import as_completed @@ -74,11 +75,9 @@ def __init__(self, site_data_base: Path, sort_order: int = 0): def benchmark_from_parsed_uri(self, uri: BenchmarkUri) -> Benchmark: self.install() - if uri is None or len(uri) <= len(self.name) + 1: - return self._get_benchmark_by_index(self.random.integers(self.size)) # The absolute path of the file, without an extension. - path_stem = self.dataset_root / uri[len(self.name) + 1 :] + path_stem = os.path.normpath(f"{self.dataset_root}/{uri.path}") # If the file does not exist, compile it on-demand. bitcode_path = Path(f"{path_stem}.bc") diff --git a/compiler_gym/random_search.py b/compiler_gym/random_search.py index f80983cb6..7cec5c356 100644 --- a/compiler_gym/random_search.py +++ b/compiler_gym/random_search.py @@ -4,6 +4,7 @@ # LICENSE file in the root directory of this source tree. """Simple parallelized random search.""" import json +import os from multiprocessing import cpu_count from pathlib import Path from threading import Thread @@ -155,8 +156,9 @@ def random_search( benchmark_uri = env.benchmark.uri if not outdir: - sanitized_benchmark_uri = "/".join(benchmark_uri.split("/")[-2:]) - outdir = create_logging_dir(f"random/{sanitized_benchmark_uri}") + outdir = create_logging_dir( + os.path.normpath(f"random/{benchmark_uri.scheme}/{benchmark_uri.path}") + ) outdir = Path(outdir) if not env.reward_space: @@ -177,7 +179,7 @@ def random_search( # Write a metadata file. metadata = { "env": env.spec.id if env.spec else "", - "benchmark": benchmark_uri, + "benchmark": str(benchmark_uri), "reward": reward_space_name, "patience": patience, } diff --git a/examples/llvm_rl/tests/benchmarks_test.py b/examples/llvm_rl/tests/benchmarks_test.py index 8d90a68e4..95e1da076 100644 --- a/examples/llvm_rl/tests/benchmarks_test.py +++ b/examples/llvm_rl/tests/benchmarks_test.py @@ -36,8 +36,3 @@ def test_validation_benchmarks_uris_list_yaml(): ) ) assert len(cfg.uris) == 1 - - -def test_benchmarks_uris_invalid_regex(): - with pytest.raises(ValidationError): - Benchmarks(uris=["bad-uri"]) diff --git a/tests/compiler_env_state_test.py b/tests/compiler_env_state_test.py index f566e7658..1d722d410 100644 --- a/tests/compiler_env_state_test.py +++ b/tests/compiler_env_state_test.py @@ -22,11 +22,6 @@ def test_state_from_dict_empty(): CompilerEnvState(**{}) -def test_state_invalid_benchmark_uri(): - with pytest.raises(PydanticValidationError, match="benchmark"): - CompilerEnvState(benchmark="invalid", walltime=100, reward=1.5, commandline="") - - def test_state_invalid_walltime(): with pytest.raises(PydanticValidationError, match="Walltime cannot be negative"): CompilerEnvState( @@ -293,15 +288,6 @@ def test_state_from_csv_invalid_format(): next(iter(reader)) -def test_state_from_csv_invalid_benchmark_uri(): - buf = StringIO( - "benchmark,reward,walltime,commandline\n" "invalid-uri,2.0,5.0,-a -b -c\n" - ) - reader = CompilerEnvStateReader(buf) - with pytest.raises(ValueError, match="string does not match regex"): - next(iter(reader)) - - def test_state_serialize_deserialize_equality(): original_state = CompilerEnvState( benchmark="benchmark://cbench-v0/foo", diff --git a/tests/llvm/custom_benchmarks_test.py b/tests/llvm/custom_benchmarks_test.py index 1e2c32cef..da7ed4a94 100644 --- a/tests/llvm/custom_benchmarks_test.py +++ b/tests/llvm/custom_benchmarks_test.py @@ -32,7 +32,7 @@ def test_reset_invalid_benchmark(env: LlvmEnv): invalid_benchmark = "an invalid benchmark" with pytest.raises( - ValueError, match=f"Invalid benchmark URI: 'benchmark://{invalid_benchmark}'" + LookupError, match=f"Dataset not found: benchmark://{invalid_benchmark}" ): env.reset(benchmark=invalid_benchmark) From ec3bc5538cc8166186babf62ccce4fdfeaf97df7 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Wed, 22 Dec 2021 15:14:03 +0000 Subject: [PATCH 086/142] [datasets] Tidy up temporary file on error. --- compiler_gym/envs/llvm/datasets/poj104.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/compiler_gym/envs/llvm/datasets/poj104.py b/compiler_gym/envs/llvm/datasets/poj104.py index 90086fadc..ed4d01a44 100644 --- a/compiler_gym/envs/llvm/datasets/poj104.py +++ b/compiler_gym/envs/llvm/datasets/poj104.py @@ -118,11 +118,14 @@ def benchmark_from_parsed_uri(self, uri: BenchmarkUri) -> Benchmark: if clang.returncode: compile_cmd = " ".join(compile_cmd) error = truncate(stderr.decode("utf-8"), max_lines=20, max_line_len=100) + if tmp_bitcode_path.is_file(): + tmp_bitcode_path.unlink() raise BenchmarkInitError( f"Compilation job failed!\n" f"Command: {compile_cmd}\n" f"Error: {error}" ) + if not bitcode_path.is_file(): raise BenchmarkInitError( f"Compilation job failed to produce output file!\nCommand: {compile_cmd}" From e44c921a8366cebd35f6f94f064bc3174cbc337d Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Wed, 22 Dec 2021 15:14:24 +0000 Subject: [PATCH 087/142] [datasets] Log truncated cBench validation errors. --- compiler_gym/envs/llvm/datasets/cbench.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/compiler_gym/envs/llvm/datasets/cbench.py b/compiler_gym/envs/llvm/datasets/cbench.py index 68db1f231..962ff4f48 100644 --- a/compiler_gym/envs/llvm/datasets/cbench.py +++ b/compiler_gym/envs/llvm/datasets/cbench.py @@ -26,6 +26,7 @@ from compiler_gym.util.download import download from compiler_gym.util.runfiles_path import cache_path, site_data_path from compiler_gym.util.timer import Timer +from compiler_gym.util.truncate import truncate from compiler_gym.validation_result import ValidationError logger = logging.getLogger(__name__) @@ -419,7 +420,12 @@ def flaky_wrapped_cb(env: "LlvmEnv") -> Optional[ValidationError]: # noqa: F821 # Timeout errors can be raised by the environment in case of a # slow step / observation, and should be retried. pass - logger.warning("Validation callback failed, attempt=%d/%d", j, flakiness) + logger.warning( + "Validation callback failed (%s), attempt=%d/%d", + truncate(str(error), max_line_len=50, max_lines=1), + j, + flakiness, + ) return error return flaky_wrapped_cb From 551f39213182ed3f0a2ebc8e48506c1de28a837c Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Wed, 22 Dec 2021 15:14:36 +0000 Subject: [PATCH 088/142] [rpc] Add an early exit path for unavailable local services. --- compiler_gym/service/connection.py | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/compiler_gym/service/connection.py b/compiler_gym/service/connection.py index af4f7072e..3a14e0ce1 100644 --- a/compiler_gym/service/connection.py +++ b/compiler_gym/service/connection.py @@ -217,6 +217,12 @@ def __call__( # For "unavailable" errors we retry with exponential # backoff. This is because this error can be caused by an # overloaded service, a flaky connection, etc. + + # Early exit in case we can detect that the service is down + # and so there is no use in retrying the RPC call. + if self.service_is_down(): + raise ServiceIsClosed("Service is offline") + attempt += 1 if attempt > max_retries: raise ServiceTransportError( @@ -268,6 +274,14 @@ def loglines(self) -> Iterable[str]: """ yield from () + def service_is_down(self) -> bool: + """Return true if the service is known to be dead. + + Subclasses can use this for fast checks that a service is down to avoid + retry loops. + """ + return False + def make_working_dir() -> Path: """Make a working directory for a service. The calling code is responsible @@ -456,6 +470,10 @@ def __init__( super().__init__(channel, url) + def service_is_down(self) -> bool: + """Return true if the service subprocess has terminated.""" + return self.process.poll() is not None + def loglines(self) -> Iterable[str]: """Fetch any available log lines from the service backend. @@ -486,6 +504,9 @@ def close(self): raise ServiceError( f"Service exited with returncode {self.process.returncode}" ) + except ServiceIsClosed: + # The service has already been closed, nothing to do. + pass except ProcessLookupError: logger.warning("Service process not found at %s", self.working_dir) except subprocess.TimeoutExpired: From d6b606fbc8a16812948229a4b857769072a1c138 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Wed, 22 Dec 2021 16:02:33 +0000 Subject: [PATCH 089/142] Audit use of suprocess.Popen(). Add a new Popen() utility wrapper and go through every use of subprocess.Popen() in the codebase to ensure that it uses `with` statement context managers. --- compiler_gym/envs/compiler_env.py | 5 ++ compiler_gym/envs/gcc/datasets/csmith.py | 35 ++++---- compiler_gym/envs/llvm/compute_observation.py | 43 ++++------ compiler_gym/envs/llvm/datasets/cbench.py | 85 ++++++++----------- compiler_gym/envs/llvm/datasets/clgen.py | 18 ++-- compiler_gym/envs/llvm/datasets/csmith.py | 78 +++++++++-------- .../envs/llvm/datasets/llvm_stress.py | 35 ++++---- compiler_gym/envs/llvm/datasets/poj104.py | 20 +++-- compiler_gym/envs/llvm/llvm_benchmark.py | 34 ++++---- compiler_gym/util/commands.py | 61 +++++++++---- .../example_compiler_gym_service/env_tests.py | 21 ++--- .../example_unrolling_service/env_tests.py | 9 +- ...m_commandline_opt_equivalence_fuzz_test.py | 18 ++-- tests/util/BUILD | 9 ++ tests/util/commands_test.py | 28 ++++++ 15 files changed, 281 insertions(+), 218 deletions(-) create mode 100644 tests/util/commands_test.py diff --git a/compiler_gym/envs/compiler_env.py b/compiler_gym/envs/compiler_env.py index f95df1fdb..09eedc09e 100644 --- a/compiler_gym/envs/compiler_env.py +++ b/compiler_gym/envs/compiler_env.py @@ -28,6 +28,7 @@ ServiceTransportError, SessionNotFound, ) +from compiler_gym.service.connection import ServiceIsClosed from compiler_gym.service.proto import Action, AddBenchmarkRequest from compiler_gym.service.proto import Benchmark as BenchmarkProto from compiler_gym.service.proto import ( @@ -686,6 +687,10 @@ def close(self): # not kill it. if reply.remaining_sessions: close_service = False + except ServiceIsClosed: + # This error can be safely ignored as it means that the service + # is already offline. + pass except Exception as e: logger.warning( "Failed to end active compiler session on close(): %s (%s)", diff --git a/compiler_gym/envs/gcc/datasets/csmith.py b/compiler_gym/envs/gcc/datasets/csmith.py index cba4c646a..681a6a3a5 100644 --- a/compiler_gym/envs/gcc/datasets/csmith.py +++ b/compiler_gym/envs/gcc/datasets/csmith.py @@ -17,6 +17,7 @@ from compiler_gym.datasets.benchmark import BenchmarkWithSource from compiler_gym.datasets.uri import BenchmarkUri from compiler_gym.envs.gcc.gcc import Gcc +from compiler_gym.util.commands import Popen from compiler_gym.util.decorators import memoized_property from compiler_gym.util.runfiles_path import runfiles_path from compiler_gym.util.shell_format import plural @@ -198,27 +199,27 @@ def benchmark_from_seed( # Run csmith with the given seed and pipe the output to clang to # assemble a bitcode. logger.debug("Exec csmith --seed %d", seed) - csmith = subprocess.Popen( + with Popen( [str(self.csmith_bin_path), "--seed", str(seed)], stdout=subprocess.PIPE, stderr=subprocess.PIPE, - ) - - # Generate the C source. - src, stderr = csmith.communicate(timeout=300) - if csmith.returncode: - try: - stderr = "\n".join( - truncate(stderr.decode("utf-8"), max_line_len=200, max_lines=20) + ) as csmith: + # Generate the C source. + src, stderr = csmith.communicate(timeout=300) + + if csmith.returncode: + try: + stderr = "\n".join( + truncate(stderr.decode("utf-8"), max_line_len=200, max_lines=20) + ) + logger.warning("Csmith failed with seed %d: %s", seed, stderr) + except UnicodeDecodeError: + # Failed to interpret the stderr output, generate a generic + # error message. + logger.warning("Csmith failed with seed %d", seed) + return self.benchmark_from_seed( + seed, max_retries=max_retries, retry_count=retry_count + 1 ) - logger.warning("Csmith failed with seed %d: %s", seed, stderr) - except UnicodeDecodeError: - # Failed to interpret the stderr output, generate a generic - # error message. - logger.warning("Csmith failed with seed %d", seed) - return self.benchmark_from_seed( - seed, max_retries=max_retries, retry_count=retry_count + 1 - ) # Pre-process the source. with tempfile.TemporaryDirectory() as tmpdir: diff --git a/compiler_gym/envs/llvm/compute_observation.py b/compiler_gym/envs/llvm/compute_observation.py index c73998356..14c36c55e 100644 --- a/compiler_gym/envs/llvm/compute_observation.py +++ b/compiler_gym/envs/llvm/compute_observation.py @@ -4,13 +4,13 @@ # LICENSE file in the root directory of this source tree. """This module defines a utility function for computing LLVM observations.""" import subprocess -import sys from pathlib import Path from typing import List import google.protobuf.text_format from compiler_gym.service.proto import Observation +from compiler_gym.util.commands import Popen from compiler_gym.util.gym_type_hints import ObservationType from compiler_gym.util.runfiles_path import runfiles_path from compiler_gym.util.shell_format import plural @@ -74,37 +74,30 @@ def compute_observation( observation_space_name = pascal_case_to_enum(observation_space.id) - process = subprocess.Popen( - [str(_COMPUTE_OBSERVATION_BIN), observation_space_name, str(bitcode)], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - try: - stdout, stderr = process.communicate(timeout=timeout) + with Popen( + [str(_COMPUTE_OBSERVATION_BIN), observation_space_name, str(bitcode)], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) as process: + stdout, stderr = process.communicate(timeout=timeout) + + if process.returncode: + try: + stderr = stderr.decode("utf-8") + raise ValueError( + f"Failed to compute {observation_space.id} observation: {stderr}" + ) + except UnicodeDecodeError as e: + raise ValueError( + f"Failed to compute {observation_space.id} observation" + ) from e except subprocess.TimeoutExpired as e: - # kill() was added in Python 3.7. - if sys.version_info >= (3, 7, 0): - process.kill() - else: - process.terminate() - process.communicate(timeout=timeout) # Wait for shutdown to complete. raise TimeoutError( f"Failed to compute {observation_space.id} observation in " f"{timeout:.1f} {plural(int(round(timeout)), 'second', 'seconds')}" ) from e - if process.returncode: - try: - stderr = stderr.decode("utf-8") - raise ValueError( - f"Failed to compute {observation_space.id} observation: {stderr}" - ) - except UnicodeDecodeError as e: - raise ValueError( - f"Failed to compute {observation_space.id} observation" - ) from e - try: stdout = stdout.decode("utf-8") except UnicodeDecodeError as e: diff --git a/compiler_gym/envs/llvm/datasets/cbench.py b/compiler_gym/envs/llvm/datasets/cbench.py index 962ff4f48..f66d5f06c 100644 --- a/compiler_gym/envs/llvm/datasets/cbench.py +++ b/compiler_gym/envs/llvm/datasets/cbench.py @@ -23,6 +23,7 @@ from compiler_gym.datasets.uri import BenchmarkUri from compiler_gym.service.proto import BenchmarkDynamicConfig, Command from compiler_gym.third_party import llvm +from compiler_gym.util.commands import Popen from compiler_gym.util.download import download from compiler_gym.util.runfiles_path import cache_path, site_data_path from compiler_gym.util.timer import Timer @@ -137,22 +138,25 @@ def _compile_and_run_bitcode_file( error_data["compile_cmd"] = compile_cmd logger.debug("compile: %s", compile_cmd) assert not binary.is_file() - clang = subprocess.Popen( - compile_cmd, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - universal_newlines=True, - env={"PATH": f"{clang_path.parent}:{os.environ.get('PATH', '')}"}, - ) try: - output, _ = clang.communicate(timeout=compilation_timeout_seconds) + with Popen( + compile_cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + universal_newlines=True, + env={"PATH": f"{clang_path.parent}:{os.environ.get('PATH', '')}"}, + ) as clang: + output, _ = clang.communicate(timeout=compilation_timeout_seconds) + if clang.returncode: + error_data["output"] = output + return BenchmarkExecutionResult( + walltime_seconds=timeout_seconds, + error=ValidationError( + type="Compilation failed", + data=error_data, + ), + ) except subprocess.TimeoutExpired: - # kill() was added in Python 3.7. - if sys.version_info >= (3, 7, 0): - clang.kill() - else: - clang.terminate() - clang.communicate(timeout=30) # Wait for shutdown to complete. error_data["timeout"] = compilation_timeout_seconds return BenchmarkExecutionResult( walltime_seconds=timeout_seconds, @@ -161,41 +165,24 @@ def _compile_and_run_bitcode_file( data=error_data, ), ) - if clang.returncode: - error_data["output"] = output - return BenchmarkExecutionResult( - walltime_seconds=timeout_seconds, - error=ValidationError( - type="Compilation failed", - data=error_data, - ), - ) assert binary.is_file() else: lli_path = llvm.lli_path() error_data["run_cmd"] = cmd.replace("$BIN", f"{lli_path.name} benchmark.bc") run_env["PATH"] = str(lli_path.parent) + logger.debug("exec: %s", error_data["run_cmd"]) try: - logger.debug("exec: %s", error_data["run_cmd"]) - process = subprocess.Popen( + with Timer() as timer, Popen( error_data["run_cmd"], shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=run_env, cwd=cwd, - ) - - with Timer() as timer: + ) as process: stdout, _ = process.communicate(timeout=timeout_seconds) except subprocess.TimeoutExpired: - # kill() was added in Python 3.7. - if sys.version_info >= (3, 7, 0): - process.kill() - else: - process.terminate() - process.communicate(timeout=30) # Wait for shutdown to complete. error_data["timeout_seconds"] = timeout_seconds return BenchmarkExecutionResult( walltime_seconds=timeout_seconds, @@ -390,24 +377,24 @@ def validator_cb(env: "LlvmEnv") -> Optional[ValidationError]: # noqa: F821 type="Output not generated", data={"path": path.name, "command": cmd}, ) - diff = subprocess.Popen( + with Popen( ["diff", str(path), f"{path}.gold_standard"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, - ) - stdout, _ = diff.communicate() - if diff.returncode: - try: - stdout = stdout.decode("utf-8") - return ValidationError( - type="Wrong output (file)", - data={"path": path.name, "diff": stdout}, - ) - except UnicodeDecodeError: - return ValidationError( - type="Wrong output (file)", - data={"path": path.name, "diff": ""}, - ) + ) as diff: + stdout, _ = diff.communicate(timeout=300) + if diff.returncode: + try: + stdout = stdout.decode("utf-8") + return ValidationError( + type="Wrong output (file)", + data={"path": path.name, "diff": stdout}, + ) + except UnicodeDecodeError: + return ValidationError( + type="Wrong output (file)", + data={"path": path.name, "diff": ""}, + ) def flaky_wrapped_cb(env: "LlvmEnv") -> Optional[ValidationError]: # noqa: F821 """Wrap the validation callback in a flakiness retry loop.""" diff --git a/compiler_gym/envs/llvm/datasets/clgen.py b/compiler_gym/envs/llvm/datasets/clgen.py index a41d0a180..31b256f31 100644 --- a/compiler_gym/envs/llvm/datasets/clgen.py +++ b/compiler_gym/envs/llvm/datasets/clgen.py @@ -18,6 +18,7 @@ from compiler_gym.datasets.benchmark import BenchmarkWithSource from compiler_gym.datasets.uri import BenchmarkUri from compiler_gym.envs.llvm.llvm_benchmark import ClangInvocation +from compiler_gym.util.commands import Popen, communicate from compiler_gym.util.download import download from compiler_gym.util.filesystem import atomic_file_write from compiler_gym.util.truncate import truncate @@ -154,13 +155,16 @@ def benchmark_from_parsed_uri(self, uri: BenchmarkUri) -> Benchmark: ], ).command(outpath=tmp_bc_path) logger.debug("Exec %s", compile_command) - clang = subprocess.Popen( - compile_command, - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - _, stderr = clang.communicate(timeout=300) + try: + with Popen( + compile_command, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) as clang: + _, stderr = communicate(clang, timeout=300) + except subprocess.TimeoutExpired: + raise BenchmarkInitError(f"Benchmark compilation timed out: {uri}") if clang.returncode: compile_command = " ".join(compile_command) diff --git a/compiler_gym/envs/llvm/datasets/csmith.py b/compiler_gym/envs/llvm/datasets/csmith.py index a19456acb..4fa94a207 100644 --- a/compiler_gym/envs/llvm/datasets/csmith.py +++ b/compiler_gym/envs/llvm/datasets/csmith.py @@ -14,6 +14,7 @@ from compiler_gym.datasets.uri import BenchmarkUri from compiler_gym.envs.llvm.llvm_benchmark import ClangInvocation from compiler_gym.service.proto import BenchmarkDynamicConfig, Command +from compiler_gym.util.commands import Popen, communicate from compiler_gym.util.decorators import memoized_property from compiler_gym.util.runfiles_path import runfiles_path from compiler_gym.util.shell_format import plural @@ -183,43 +184,48 @@ def benchmark_from_seed( # Run csmith with the given seed and pipe the output to clang to # assemble a bitcode. logger.debug("Exec csmith --seed %d", seed) - csmith = subprocess.Popen( - [str(self.csmith_bin_path), "--seed", str(seed)], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - - # Generate the C source. - src, stderr = csmith.communicate(timeout=300) - if csmith.returncode: - try: - stderr = "\n".join( - truncate(stderr.decode("utf-8"), max_line_len=200, max_lines=20) - ) - logger.warning("Csmith failed with seed %d: %s", seed, stderr) - except UnicodeDecodeError: - # Failed to interpret the stderr output, generate a generic - # error message. - logger.warning("Csmith failed with seed %d", seed) - return self.benchmark_from_seed( - seed, max_retries=max_retries, retry_count=retry_count + 1 - ) - - # Compile to IR. - clang = subprocess.Popen( - self.clang_compile_command, - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.DEVNULL, - ) - stdout, _ = clang.communicate(src, timeout=300) - - if clang.returncode: - compile_cmd = " ".join(self.clang_compile_command) + try: + with Popen( + [str(self.csmith_bin_path), "--seed", str(seed)], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) as csmith: + # Generate the C source. + src, stderr = communicate(csmith, timeout=300) + if csmith.returncode: + try: + stderr = "\n".join( + truncate( + stderr.decode("utf-8"), max_line_len=200, max_lines=20 + ) + ) + logger.warning("Csmith failed with seed %d: %s", seed, stderr) + except UnicodeDecodeError: + # Failed to interpret the stderr output, generate a generic + # error message. + logger.warning("Csmith failed with seed %d", seed) + return self.benchmark_from_seed( + seed, max_retries=max_retries, retry_count=retry_count + 1 + ) + + # Compile to IR. + with Popen( + self.clang_compile_command, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + ) as clang: + stdout, _ = communicate(clang, input=src, timeout=300) + if clang.returncode: + compile_cmd = " ".join(self.clang_compile_command) + raise BenchmarkInitError( + f"Compilation job failed!\n" + f"Csmith seed: {seed}\n" + f"Command: {compile_cmd}\n" + ) + except subprocess.TimeoutExpired: raise BenchmarkInitError( - f"Compilation job failed!\n" - f"Csmith seed: {seed}\n" - f"Command: {compile_cmd}\n" + f"Benchmark generation using seed {seed} timed out" ) return self.benchmark_class.create(f"{self.name}/{seed}", stdout, src) diff --git a/compiler_gym/envs/llvm/datasets/llvm_stress.py b/compiler_gym/envs/llvm/datasets/llvm_stress.py index 2fa2a760b..daa0985ac 100644 --- a/compiler_gym/envs/llvm/datasets/llvm_stress.py +++ b/compiler_gym/envs/llvm/datasets/llvm_stress.py @@ -12,6 +12,7 @@ from compiler_gym.datasets.benchmark import BenchmarkInitError from compiler_gym.datasets.uri import BenchmarkUri from compiler_gym.third_party import llvm +from compiler_gym.util.commands import Popen # The maximum value for the --seed argument to llvm-stress. UINT_MAX = (2 ** 32) - 1 @@ -75,21 +76,23 @@ def benchmark_from_seed(self, seed: int) -> Benchmark: # Run llvm-stress with the given seed and pipe the output to llvm-as to # assemble a bitcode. - llvm_stress = subprocess.Popen( - [str(llvm.llvm_stress_path()), f"--seed={seed}"], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - llvm_as = subprocess.Popen( - [str(llvm.llvm_as_path()), "-"], - stdin=llvm_stress.stdout, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - - stdout, _ = llvm_as.communicate(timeout=60) - llvm_stress.communicate(timeout=60) - if llvm_stress.returncode or llvm_as.returncode: - raise BenchmarkInitError("Failed to generate benchmark") + try: + with Popen( + [str(llvm.llvm_stress_path()), f"--seed={seed}"], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) as llvm_stress: + with Popen( + [str(llvm.llvm_as_path()), "-"], + stdin=llvm_stress.stdout, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) as llvm_as: + stdout, _ = llvm_as.communicate(timeout=60) + llvm_stress.communicate(timeout=60) + if llvm_stress.returncode or llvm_as.returncode: + raise BenchmarkInitError("Failed to generate benchmark") + except subprocess.TimeoutExpired: + raise BenchmarkInitError("Benchmark generation timed out") return Benchmark.from_file_contents(f"{self.name}/{seed}", stdout) diff --git a/compiler_gym/envs/llvm/datasets/poj104.py b/compiler_gym/envs/llvm/datasets/poj104.py index ed4d01a44..53e7bbf13 100644 --- a/compiler_gym/envs/llvm/datasets/poj104.py +++ b/compiler_gym/envs/llvm/datasets/poj104.py @@ -17,6 +17,7 @@ from compiler_gym.datasets.uri import BenchmarkUri from compiler_gym.envs.llvm.llvm_benchmark import ClangInvocation from compiler_gym.util import thread_pool +from compiler_gym.util.commands import Popen from compiler_gym.util.download import download from compiler_gym.util.filesystem import atomic_file_write from compiler_gym.util.truncate import truncate @@ -107,13 +108,18 @@ def benchmark_from_parsed_uri(self, uri: BenchmarkUri) -> Benchmark: ], ).command(outpath=tmp_bitcode_path) logger.debug("Exec %s", compile_cmd) - clang = subprocess.Popen( - compile_cmd, - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - _, stderr = clang.communicate(src.encode("utf-8"), timeout=300) + try: + with Popen( + compile_cmd, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) as clang: + _, stderr = clang.communicate( + input=src.encode("utf-8"), timeout=300 + ) + except subprocess.TimeoutExpired: + raise BenchmarkInitError(f"Benchmark compilation timed out: {uri}") if clang.returncode: compile_cmd = " ".join(compile_cmd) diff --git a/compiler_gym/envs/llvm/llvm_benchmark.py b/compiler_gym/envs/llvm/llvm_benchmark.py index 9e001b4f6..5a5406945 100644 --- a/compiler_gym/envs/llvm/llvm_benchmark.py +++ b/compiler_gym/envs/llvm/llvm_benchmark.py @@ -15,7 +15,7 @@ from compiler_gym.datasets import Benchmark, BenchmarkInitError from compiler_gym.third_party import llvm -from compiler_gym.util.commands import communicate, run_command +from compiler_gym.util.commands import Popen, run_command from compiler_gym.util.runfiles_path import transient_cache_path from compiler_gym.util.thread_pool import get_thread_pool_executor @@ -30,26 +30,26 @@ def get_compiler_includes(compiler: str) -> Iterable[Path]: # GNU assembler does not support piping to stdout. with tempfile.TemporaryDirectory() as d: try: - process = subprocess.Popen( + with Popen( [compiler, "-xc++", "-v", "-c", "-", "-o", str(Path(d) / "a.out")], stdout=subprocess.DEVNULL, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True, - ) + ) as process: + _, stderr = process.communicate(input="", timeout=30) + if process.returncode: + raise OSError( + f"Failed to invoke {compiler}. " + f"Is there a working system compiler?\n" + f"Error: {stderr.strip()}" + ) except FileNotFoundError as e: raise OSError( f"Failed to invoke {compiler}. " f"Is there a working system compiler?\n" f"Error: {e}" ) from e - _, stderr = communicate(process, input="", timeout=30) - if process.returncode: - raise OSError( - f"Failed to invoke {compiler}. " - f"Is there a working system compiler?\n" - f"Error: {stderr.strip()}" - ) # Parse the compiler output that matches the conventional output format # used by clang and GCC: @@ -368,14 +368,14 @@ def _add_path(path: Path): llvm_link_cmd = [str(llvm.llvm_link_path()), "-o", "-"] + [ str(path) for path in bitcodes + clang_outs ] - llvm_link = subprocess.Popen( + with Popen( llvm_link_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE - ) - bitcode, stderr = communicate(llvm_link, timeout=timeout) - if llvm_link.returncode: - raise BenchmarkInitError( - f"Failed to link LLVM bitcodes with error: {stderr.decode('utf-8')}" - ) + ) as llvm_link: + bitcode, stderr = llvm_link.communicate(timeout=timeout) + if llvm_link.returncode: + raise BenchmarkInitError( + f"Failed to link LLVM bitcodes with error: {stderr.decode('utf-8')}" + ) timestamp = datetime.now().strftime("%Y%m%HT%H%M%S") uri = f"benchmark://user-v0/{timestamp}-{random.randrange(16**4):04x}" diff --git a/compiler_gym/util/commands.py b/compiler_gym/util/commands.py index e3377376e..1fa9effc0 100644 --- a/compiler_gym/util/commands.py +++ b/compiler_gym/util/commands.py @@ -5,28 +5,30 @@ import subprocess import sys +from contextlib import contextmanager from signal import Signals +from subprocess import Popen as _Popen from typing import List def run_command(cmd: List[str], timeout: int): - process = subprocess.Popen( + with Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True - ) - stdout, stderr = communicate(process, timeout=timeout) - if process.returncode: - returncode = process.returncode - try: - # Try and decode the name of a signal. Signal returncodes - # are negative. - returncode = f"{returncode} ({Signals(abs(returncode)).name})" - except ValueError: - pass - raise OSError( - f"Compilation job failed with returncode {returncode}\n" - f"Command: {' '.join(cmd)}\n" - f"Stderr: {stderr.strip()}" - ) + ) as process: + stdout, stderr = process.communicate(timeout=timeout) + if process.returncode: + returncode = process.returncode + try: + # Try and decode the name of a signal. Signal returncodes + # are negative. + returncode = f"{returncode} ({Signals(abs(returncode)).name})" + except ValueError: + pass + raise OSError( + f"Compilation job failed with returncode {returncode}\n" + f"Command: {' '.join(cmd)}\n" + f"Stderr: {stderr.strip()}" + ) return stdout @@ -40,5 +42,30 @@ def communicate(process, input=None, timeout=None): process.kill() else: process.terminate() - process.communicate(timeout=timeout) # Wait for shutdown to complete. + # Wait for shutdown to complete. + try: + process.communicate(timeout=timeout) + except subprocess.TimeoutExpired: + pass # Stubborn process won't die, nothing can be done. raise + + +@contextmanager +def Popen(*args, **kwargs): + """subprocess.Popen() with resilient process termination at end of scope.""" + with _Popen(*args, **kwargs) as process: + try: + yield process + finally: + # Process has not yet terminated, kill it. + if process.poll() is None: + # kill() was added in Python 3.7. + if sys.version_info >= (3, 7, 0): + process.kill() + else: + process.terminate() + # Wait for shutdown to complete. + try: + process.communicate(timeout=60) + except subprocess.TimeoutExpired: + pass # Stubborn process won't die, nothing can be done. diff --git a/examples/example_compiler_gym_service/env_tests.py b/examples/example_compiler_gym_service/env_tests.py index 3e4e53404..cbb1e543b 100644 --- a/examples/example_compiler_gym_service/env_tests.py +++ b/examples/example_compiler_gym_service/env_tests.py @@ -17,6 +17,7 @@ from compiler_gym.envs import CompilerEnv from compiler_gym.service import SessionNotFound from compiler_gym.spaces import Box, NamedDiscrete, Scalar, Sequence +from compiler_gym.util.commands import Popen from tests.test_main import main # Given that the C++ and Python service implementations have identical @@ -44,11 +45,11 @@ def test_invalid_arguments(bin: Path): """Test that running the binary with unrecognized arguments is an error.""" def run(cmd): - p = subprocess.Popen( + with Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True - ) - stdout, stderr = p.communicate(timeout=10) - return p.returncode, stdout, stderr + ) as p: + stdout, stderr = p.communicate(timeout=10) + return p.returncode, stdout, stderr returncode, _, stderr = run([str(bin), "foobar"]) assert "ERROR:" in stderr @@ -232,17 +233,13 @@ def test_fork(env: CompilerEnv): def test_force_working_dir(bin: Path, tmpdir): """Test that expected files are generated in the working directory.""" tmpdir = Path(tmpdir) / "subdir" - service = subprocess.Popen([str(bin), "--working_dir", str(tmpdir)]) - try: + with Popen([str(bin), "--working_dir", str(tmpdir)]): for _ in range(10): sleep(0.5) if (tmpdir / "pid.txt").is_file() and (tmpdir / "port.txt").is_file(): break else: pytest.fail(f"PID file not found in {tmpdir}: {list(tmpdir.iterdir())}") - finally: - service.terminate() - service.communicate(timeout=60) def unsafe_select_unused_port() -> int: @@ -278,8 +275,7 @@ def test_force_port(bin: Path, tmpdir): assert port_is_free(port) # Sanity check tmpdir = Path(tmpdir) - p = subprocess.Popen([str(bin), "--port", str(port), "--working_dir", str(tmpdir)]) - try: + with Popen([str(bin), "--port", str(port), "--working_dir", str(tmpdir)]): for _ in range(10): sleep(0.5) if (tmpdir / "pid.txt").is_file() and (tmpdir / "port.txt").is_file(): @@ -292,9 +288,6 @@ def test_force_port(bin: Path, tmpdir): assert actual_port == port assert not port_is_free(actual_port) - finally: - p.terminate() - p.communicate(timeout=60) if __name__ == "__main__": diff --git a/examples/example_unrolling_service/env_tests.py b/examples/example_unrolling_service/env_tests.py index dc5555c94..c1be88223 100644 --- a/examples/example_unrolling_service/env_tests.py +++ b/examples/example_unrolling_service/env_tests.py @@ -15,6 +15,7 @@ from compiler_gym.envs import CompilerEnv from compiler_gym.service import SessionNotFound from compiler_gym.spaces import Box, NamedDiscrete, Scalar, Sequence +from compiler_gym.util.commands import Popen from tests.test_main import main @@ -34,11 +35,11 @@ def test_invalid_arguments(bin: Path): """Test that running the binary with unrecognized arguments is an error.""" def run(cmd): - p = subprocess.Popen( + with Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True - ) - stdout, stderr = p.communicate(timeout=10) - return p.returncode, stdout, stderr + ) as p: + stdout, stderr = p.communicate(timeout=10) + return p.returncode, stdout, stderr returncode, _, stderr = run([str(bin), "foobar"]) assert "ERROR:" in stderr diff --git a/tests/fuzzing/llvm_commandline_opt_equivalence_fuzz_test.py b/tests/fuzzing/llvm_commandline_opt_equivalence_fuzz_test.py index 57b852b23..b8e633375 100644 --- a/tests/fuzzing/llvm_commandline_opt_equivalence_fuzz_test.py +++ b/tests/fuzzing/llvm_commandline_opt_equivalence_fuzz_test.py @@ -10,6 +10,7 @@ import pytest from compiler_gym.envs import LlvmEnv +from compiler_gym.util.commands import Popen from tests.pytest_plugins.random_util import apply_random_trajectory from tests.test_main import main @@ -52,19 +53,18 @@ def test_fuzz(env: LlvmEnv, tmpwd: Path, llvm_opt: Path, llvm_diff: Path): assert Path("output.ll").is_file() os.rename("output.ll", "opt.ll") - diff = subprocess.Popen( + with Popen( [llvm_diff, "opt.ll", "env.ll"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, - ) - stdout, stderr = diff.communicate(timeout=300) - - if diff.returncode: - pytest.fail( - f"Opt produced different output to CompilerGym " - f"(returncode: {diff.returncode}):\n{stdout}\n{stderr}" - ) + ) as diff: + stdout, stderr = diff.communicate(timeout=300) + if diff.returncode: + pytest.fail( + f"Opt produced different output to CompilerGym " + f"(returncode: {diff.returncode}):\n{stdout}\n{stderr}" + ) if __name__ == "__main__": diff --git a/tests/util/BUILD b/tests/util/BUILD index 808a016cf..d977ecef9 100644 --- a/tests/util/BUILD +++ b/tests/util/BUILD @@ -14,6 +14,15 @@ py_test( ], ) +py_test( + name = "commands_test", + srcs = ["commands_test.py"], + deps = [ + "//compiler_gym/util", + "//tests:test_main", + ], +) + py_test( name = "debug_util_test", srcs = ["debug_util_test.py"], diff --git a/tests/util/commands_test.py b/tests/util/commands_test.py new file mode 100644 index 000000000..11c214426 --- /dev/null +++ b/tests/util/commands_test.py @@ -0,0 +1,28 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. +"""Unit tests for compiler_gym.util.commands.""" +import subprocess + +import pytest + +from compiler_gym.util.commands import Popen, communicate +from tests.test_main import main + + +def test_communicate_timeout(): + with pytest.raises(subprocess.TimeoutExpired): + with subprocess.Popen(["sleep", "60"]) as process: + communicate(process, timeout=1) + assert process.poll() is not None # Process is dead. + + +def test_popen(): + with Popen(["echo"]) as process: + communicate(process, timeout=60) + assert process.poll() is not None # Process is dead. + + +if __name__ == "__main__": + main() From 433d47e82e667afc579969786fdb7233fc5b137f Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Wed, 22 Dec 2021 16:05:57 +0000 Subject: [PATCH 090/142] [datasets] Fix error logging format. --- compiler_gym/envs/llvm/datasets/cbench.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/compiler_gym/envs/llvm/datasets/cbench.py b/compiler_gym/envs/llvm/datasets/cbench.py index f66d5f06c..d3bfb4426 100644 --- a/compiler_gym/envs/llvm/datasets/cbench.py +++ b/compiler_gym/envs/llvm/datasets/cbench.py @@ -27,7 +27,6 @@ from compiler_gym.util.download import download from compiler_gym.util.runfiles_path import cache_path, site_data_path from compiler_gym.util.timer import Timer -from compiler_gym.util.truncate import truncate from compiler_gym.validation_result import ValidationError logger = logging.getLogger(__name__) @@ -409,7 +408,7 @@ def flaky_wrapped_cb(env: "LlvmEnv") -> Optional[ValidationError]: # noqa: F821 pass logger.warning( "Validation callback failed (%s), attempt=%d/%d", - truncate(str(error), max_line_len=50, max_lines=1), + error.type, j, flakiness, ) From 10ece87d61c29d508e11478ca7d2bad7e1c759e2 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Wed, 22 Dec 2021 16:09:21 +0000 Subject: [PATCH 091/142] [tests] Remove to xfail annotations. These should be addressed by the new Popen() usage. Issue #459. --- tests/gcc/datasets/csmith_test.py | 3 --- tests/gcc/gcc_env_test.py | 10 +--------- 2 files changed, 1 insertion(+), 12 deletions(-) diff --git a/tests/gcc/datasets/csmith_test.py b/tests/gcc/datasets/csmith_test.py index 230fbe68a..5c9a05f3d 100644 --- a/tests/gcc/datasets/csmith_test.py +++ b/tests/gcc/datasets/csmith_test.py @@ -43,9 +43,6 @@ def test_csmith_random_select(gcc_bin: str, index: int, tmpwd: Path): assert (tmpwd / "source.c").is_file() -@pytest.mark.xfail( - reason="github.com/facebookresearch/CompilerGym/issues/459", -) @with_gcc_support def test_random_benchmark(gcc_bin: str): with gym.make("gcc-v0", gcc_bin=gcc_bin) as env: diff --git a/tests/gcc/gcc_env_test.py b/tests/gcc/gcc_env_test.py index 90f719a3e..d2295dd1a 100644 --- a/tests/gcc/gcc_env_test.py +++ b/tests/gcc/gcc_env_test.py @@ -14,7 +14,7 @@ from compiler_gym.service.connection import ServiceError from compiler_gym.spaces import Scalar, Sequence from tests.pytest_plugins.common import with_docker, without_docker -from tests.pytest_plugins.gcc import docker_is_available, with_gcc_support +from tests.pytest_plugins.gcc import with_gcc_support from tests.test_main import main pytest_plugins = ["tests.pytest_plugins.gcc"] @@ -35,10 +35,6 @@ def test_docker_default_action_space(): assert env.action_spaces[0].names[0] == "-O0" -@pytest.mark.xfail( - not docker_is_available(), - reason="github.com/facebookresearch/CompilerGym/issues/459", -) def test_gcc_bin(gcc_bin: str): """Test that the environment reports the service's reward spaces.""" with gym.make("gcc-v0", gcc_bin=gcc_bin) as env: @@ -46,10 +42,6 @@ def test_gcc_bin(gcc_bin: str): assert env.gcc_spec.gcc.bin == gcc_bin -@pytest.mark.xfail( - not docker_is_available(), - reason="github.com/facebookresearch/CompilerGym/issues/459", -) def test_observation_spaces_failing_because_of_bug(gcc_bin: str): """Test that the environment reports the service's observation spaces.""" with gym.make("gcc-v0", gcc_bin=gcc_bin) as env: From cd1b93e7df126d3a718334371cea4850f912b91f Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Wed, 22 Dec 2021 16:21:55 +0000 Subject: [PATCH 092/142] [util] Fix the logging message for unknown slurm partition. --- compiler_gym/util/executor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler_gym/util/executor.py b/compiler_gym/util/executor.py index 9d48c6c2d..f7f1c6f83 100644 --- a/compiler_gym/util/executor.py +++ b/compiler_gym/util/executor.py @@ -108,7 +108,7 @@ def get_executor( gpus_per_node=self.gpus, slurm_partition=self.slurm_partition, ) - name = self.slurm_partition + name = self.slurm_partition or "slurm" # default value for logging elif self.type == self.Type.LOCAL: executor, name = ( LocalParallelExecutor( From 65f3c222fe64545c7faa79a17a5599e9df07b786 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Wed, 22 Dec 2021 17:10:09 +0000 Subject: [PATCH 093/142] Further BenchmarkUri compatability fixes. --- compiler_gym/compiler_env_state.py | 7 +++++++ compiler_gym/envs/compiler_env.py | 7 ++++++- examples/brute_force.py | 13 ++++++++----- examples/example_compiler_gym_service/__init__.py | 2 +- .../demo_without_bazel.py | 8 ++++---- examples/example_unrolling_service/__init__.py | 2 +- examples/llvm_rl/model/inference_result.py | 9 ++++++++- 7 files changed, 35 insertions(+), 13 deletions(-) diff --git a/compiler_gym/compiler_env_state.py b/compiler_gym/compiler_env_state.py index b8ec68a48..e3c4307aa 100644 --- a/compiler_gym/compiler_env_state.py +++ b/compiler_gym/compiler_env_state.py @@ -9,6 +9,7 @@ from pydantic import BaseModel, Field, validator +from compiler_gym.datasets.uri import BenchmarkUri from compiler_gym.util.truncate import truncate @@ -48,6 +49,12 @@ def walltime_nonnegative(cls, v): assert v >= 0, "Walltime cannot be negative" return v + @validator("benchmark", pre=True) + def validate_benchmark(cls, value): + if isinstance(value, BenchmarkUri): + return str(value) + return value + @property def has_reward(self) -> bool: """Return whether the state has a reward value.""" diff --git a/compiler_gym/envs/compiler_env.py b/compiler_gym/envs/compiler_env.py index 09eedc09e..2cf5af4a7 100644 --- a/compiler_gym/envs/compiler_env.py +++ b/compiler_gym/envs/compiler_env.py @@ -20,6 +20,7 @@ from compiler_gym.compiler_env_state import CompilerEnvState from compiler_gym.datasets import Benchmark, Dataset, Datasets +from compiler_gym.datasets.uri import BenchmarkUri from compiler_gym.service import ( CompilerGymServiceConnection, ConnectionOpts, @@ -462,7 +463,7 @@ def benchmark(self) -> Benchmark: return self._benchmark_in_use @benchmark.setter - def benchmark(self, benchmark: Union[str, Benchmark]): + def benchmark(self, benchmark: Union[str, Benchmark, BenchmarkUri]): if self.in_episode: warnings.warn( "Changing the benchmark has no effect until reset() is called" @@ -474,6 +475,10 @@ def benchmark(self, benchmark: Union[str, Benchmark]): elif isinstance(benchmark, Benchmark): logger.debug("Setting benchmark: %s", benchmark.uri) self._next_benchmark = benchmark + elif isinstance(benchmark, BenchmarkUri): + benchmark_object = self.datasets.benchmark_from_parsed_uri(benchmark) + logger.debug("Setting benchmark by name: %s", benchmark_object) + self._next_benchmark = benchmark_object else: raise TypeError( f"Expected a Benchmark or str, received: '{type(benchmark).__name__}'" diff --git a/examples/brute_force.py b/examples/brute_force.py index 2f281d585..497df84db 100644 --- a/examples/brute_force.py +++ b/examples/brute_force.py @@ -25,6 +25,7 @@ import json import logging import math +import os import sys from pathlib import Path from queue import Queue @@ -180,7 +181,7 @@ def run_brute_force( reward_space_name = env.reward_space.id actions = [env.action_space.names.index(a) for a in action_names] - benchmark_uri = env.benchmark.uri + benchmark_uri = str(env.benchmark) meta = { "env": env.spec.id, @@ -307,10 +308,12 @@ def main(argv): with env_from_flags(benchmark) as env: env.reset() - sanitized_benchmark_uri = "/".join(str(env.benchmark).split("/")[-2:]) - logs_dir = Path( - FLAGS.output_dir or create_logging_dir(f"brute_force/{sanitized_benchmark_uri}") - ) + logs_dir = Path( + FLAGS.output_dir + or create_logging_dir( + f'brute_force/{os.path.normpath(f"random/{env.benchmark.uri.scheme}/{env.benchmark.uri.path}")}' + ) + ) run_brute_force( make_env=lambda: env_from_flags(benchmark_from_flags()), diff --git a/examples/example_compiler_gym_service/__init__.py b/examples/example_compiler_gym_service/__init__.py index c8522b61f..3349b9867 100644 --- a/examples/example_compiler_gym_service/__init__.py +++ b/examples/example_compiler_gym_service/__init__.py @@ -70,7 +70,7 @@ def __init__(self, *args, **kwargs): } def benchmark_uris(self) -> Iterable[str]: - yield from self._benchmarks.keys() + yield from (f"benchmark://example-v0{k}" for k in self._benchmarks.keys()) def benchmark_from_parsed_uris(self, uri: BenchmarkUri) -> Benchmark: if uri.path in self._benchmarks: diff --git a/examples/example_compiler_gym_service/demo_without_bazel.py b/examples/example_compiler_gym_service/demo_without_bazel.py index 5c58c8d3d..7d070fe1e 100644 --- a/examples/example_compiler_gym_service/demo_without_bazel.py +++ b/examples/example_compiler_gym_service/demo_without_bazel.py @@ -67,18 +67,18 @@ def __init__(self, *args, **kwargs): description="An example dataset", ) self._benchmarks = { - "benchmark://example-v0/foo": Benchmark.from_file_contents( + "/foo": Benchmark.from_file_contents( "benchmark://example-v0/foo", "Ir data".encode("utf-8") ), - "benchmark://example-v0/bar": Benchmark.from_file_contents( + "/bar": Benchmark.from_file_contents( "benchmark://example-v0/bar", "Ir data".encode("utf-8") ), } def benchmark_uris(self) -> Iterable[str]: - yield from self._benchmarks.keys() + yield from (f"benchmark://example-v0{k}" for k in self._benchmarks.keys()) - def benchmark_from_parsed_uris(self, uri: BenchmarkUri) -> Benchmark: + def benchmark_from_parsed_uri(self, uri: BenchmarkUri) -> Benchmark: if uri.path in self._benchmarks: return self._benchmarks[uri.path] else: diff --git a/examples/example_unrolling_service/__init__.py b/examples/example_unrolling_service/__init__.py index 4838558db..ca25f0e90 100644 --- a/examples/example_unrolling_service/__init__.py +++ b/examples/example_unrolling_service/__init__.py @@ -123,7 +123,7 @@ def preprocess(src: Path) -> bytes: ) def benchmark_uris(self) -> Iterable[str]: - yield from self._benchmarks.keys() + yield from (f"benchmark://unrolling-v0{k}" for k in self._benchmarks.keys()) def benchmark_from_parsed_uri(self, uri: BenchmarkUri) -> Benchmark: if uri.path in self._benchmarks: diff --git a/examples/llvm_rl/model/inference_result.py b/examples/llvm_rl/model/inference_result.py index 1e5bca601..f0bac3ba5 100644 --- a/examples/llvm_rl/model/inference_result.py +++ b/examples/llvm_rl/model/inference_result.py @@ -6,11 +6,12 @@ from typing import List import numpy as np -from pydantic import BaseModel +from pydantic import BaseModel, validator from ray.rllib.agents.dqn import ApexTrainer, R2D2Trainer # noqa from ray.rllib.agents.impala import ImpalaTrainer # noqa from ray.rllib.agents.ppo import PPOTrainer # noqa +from compiler_gym.datasets import BenchmarkUri from compiler_gym.envs import CompilerEnv from compiler_gym.util.timer import Timer @@ -118,3 +119,9 @@ def from_agent( runtime_reduction=np.median(runtimes_o3 or [0]) / max(np.median(runtimes_final or [0]), 1), ) + + @validator("benchmark", pre=True) + def validate_benchmark(cls, value): + if isinstance(value, BenchmarkUri): + return str(value) + return value From 5ced7f73ee34e6387261e8488709cc69ac7c2753 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Thu, 23 Dec 2021 01:20:54 +0000 Subject: [PATCH 094/142] Revert "[tests] Remove to xfail annotations." This reverts commit 10ece87d61c29d508e11478ca7d2bad7e1c759e2. --- tests/gcc/datasets/csmith_test.py | 3 +++ tests/gcc/gcc_env_test.py | 10 +++++++++- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/tests/gcc/datasets/csmith_test.py b/tests/gcc/datasets/csmith_test.py index 5c9a05f3d..230fbe68a 100644 --- a/tests/gcc/datasets/csmith_test.py +++ b/tests/gcc/datasets/csmith_test.py @@ -43,6 +43,9 @@ def test_csmith_random_select(gcc_bin: str, index: int, tmpwd: Path): assert (tmpwd / "source.c").is_file() +@pytest.mark.xfail( + reason="github.com/facebookresearch/CompilerGym/issues/459", +) @with_gcc_support def test_random_benchmark(gcc_bin: str): with gym.make("gcc-v0", gcc_bin=gcc_bin) as env: diff --git a/tests/gcc/gcc_env_test.py b/tests/gcc/gcc_env_test.py index d2295dd1a..90f719a3e 100644 --- a/tests/gcc/gcc_env_test.py +++ b/tests/gcc/gcc_env_test.py @@ -14,7 +14,7 @@ from compiler_gym.service.connection import ServiceError from compiler_gym.spaces import Scalar, Sequence from tests.pytest_plugins.common import with_docker, without_docker -from tests.pytest_plugins.gcc import with_gcc_support +from tests.pytest_plugins.gcc import docker_is_available, with_gcc_support from tests.test_main import main pytest_plugins = ["tests.pytest_plugins.gcc"] @@ -35,6 +35,10 @@ def test_docker_default_action_space(): assert env.action_spaces[0].names[0] == "-O0" +@pytest.mark.xfail( + not docker_is_available(), + reason="github.com/facebookresearch/CompilerGym/issues/459", +) def test_gcc_bin(gcc_bin: str): """Test that the environment reports the service's reward spaces.""" with gym.make("gcc-v0", gcc_bin=gcc_bin) as env: @@ -42,6 +46,10 @@ def test_gcc_bin(gcc_bin: str): assert env.gcc_spec.gcc.bin == gcc_bin +@pytest.mark.xfail( + not docker_is_available(), + reason="github.com/facebookresearch/CompilerGym/issues/459", +) def test_observation_spaces_failing_because_of_bug(gcc_bin: str): """Test that the environment reports the service's observation spaces.""" with gym.make("gcc-v0", gcc_bin=gcc_bin) as env: From 9a47271026909eff4a4e1257a7a56f009dbc161d Mon Sep 17 00:00:00 2001 From: Mostafa Elhoushi Date: Sat, 25 Dec 2021 15:15:45 -0500 Subject: [PATCH 095/142] Update CMake Instructions Add command to install .whl file + some other minor additions --- INSTALL.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/INSTALL.md b/INSTALL.md index 4eb577d4f..aeb28c3dd 100644 --- a/INSTALL.md +++ b/INSTALL.md @@ -98,7 +98,7 @@ Darwin is not supported with CMake. ### Dependency instructions for Ubuntu ```bash -sudo apt-get install lld-9 \ +sudo apt-get install g++ lld-9 \ autoconf libtool ninja-build ccache git \ ``` @@ -132,10 +132,13 @@ cmake -GNinja \ -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache \ # For faster rebuilds, can be removed -DCMAKE_EXE_LINKER_FLAGS_INIT="-fuse-ld=lld" -DCMAKE_MODULE_LINKER_FLAGS_INIT="-fuse-ld=lld" -DCMAKE_SHARED_LINKER_FLAGS_INIT="-fuse-ld=lld" \ # For faster builds, can be removed -DPython3_FIND_VIRTUALENV=FIRST \ + -DCMAKE_BUILD_WITH_INSTALL_RPATH=true \ -S "" \ -B "" cmake --build "" + +pip install /py_pkg/dist/compiler_gym*.whl --force-reinstall ``` Additional optional configuration arguments: From 32a03a3b06dd684b017de9b10aa787724f2be360 Mon Sep 17 00:00:00 2001 From: Mostafa Elhoushi Date: Sat, 25 Dec 2021 17:11:45 -0500 Subject: [PATCH 096/142] successful working example_unrolling_service without bazel --- examples/example_unrolling_service/example.py | 4 + .../example_without_bazel.py | 174 ++++++++++++++++++ 2 files changed, 178 insertions(+) create mode 100644 examples/example_unrolling_service/example_without_bazel.py diff --git a/examples/example_unrolling_service/example.py b/examples/example_unrolling_service/example.py index 5e58ba3d3..ba46b1796 100644 --- a/examples/example_unrolling_service/example.py +++ b/examples/example_unrolling_service/example.py @@ -2,7 +2,11 @@ # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. +"""This script demonstrates how the example services defined in this directory +can be used as gym environments. Usage: + $ bazel run -c opt //examples/example_unrolling_service:example +""" import compiler_gym import examples.example_unrolling_service as unrolling_service # noqa Register environments. diff --git a/examples/example_unrolling_service/example_without_bazel.py b/examples/example_unrolling_service/example_without_bazel.py new file mode 100644 index 000000000..d33b8ae7a --- /dev/null +++ b/examples/example_unrolling_service/example_without_bazel.py @@ -0,0 +1,174 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. +"""This module demonstrates how to """ +import os +import subprocess +from pathlib import Path +from typing import Iterable + +import compiler_gym +from compiler_gym.datasets import Benchmark, Dataset +from compiler_gym.envs.llvm.llvm_benchmark import get_system_includes +from compiler_gym.spaces import Reward +from compiler_gym.third_party import llvm +from compiler_gym.util.registration import register + +UNROLLING_PY_SERVICE_BINARY: Path = Path( + "example_unrolling_service/service_py/example_service.py" +) +assert UNROLLING_PY_SERVICE_BINARY.is_file(), "Service script not found" + +BENCHMARKS_PATH: Path = Path("example_unrolling_service/benchmarks") + +NEURO_VECTORIZER_HEADER: Path = Path( + "../compiler_gym/third_party/neuro-vectorizer/header.h" +) + + +class RuntimeReward(Reward): + """An example reward that uses changes in the "runtime" observation value + to compute incremental reward. + """ + + def __init__(self): + super().__init__( + id="runtime", + observation_spaces=["runtime"], + default_value=0, + default_negates_returns=True, + deterministic=False, + platform_dependent=True, + ) + self.baseline_runtime = 0 + + def reset(self, benchmark: str, observation_view): + del benchmark # unused + self.baseline_runtime = observation_view["runtime"] + + def update(self, action, observations, observation_view): + del action # unused + del observation_view # unused + return float(self.baseline_runtime - observations[0]) / self.baseline_runtime + + +class SizeReward(Reward): + """An example reward that uses changes in the "size" observation value + to compute incremental reward. + """ + + def __init__(self): + super().__init__( + id="size", + observation_spaces=["size"], + default_value=0, + default_negates_returns=True, + deterministic=False, + platform_dependent=True, + ) + self.baseline_size = 0 + + def reset(self, benchmark: str, observation_view): + del benchmark # unused + self.baseline_runtime = observation_view["size"] + + def update(self, action, observations, observation_view): + del action # unused + del observation_view # unused + return float(self.baseline_size - observations[0]) / self.baseline_size + + +class UnrollingDataset(Dataset): + def __init__(self, *args, **kwargs): + super().__init__( + name="benchmark://unrolling-v0", + license="MIT", + description="Unrolling example dataset", + ) + + self._benchmarks = { + "benchmark://unrolling-v0/offsets1": Benchmark.from_file_contents( + "benchmark://unrolling-v0/offsets1", + self.preprocess(os.path.join(BENCHMARKS_PATH, "offsets1.c")), + ), + "benchmark://unrolling-v0/conv2d": Benchmark.from_file_contents( + "benchmark://unrolling-v0/conv2d", + self.preprocess(os.path.join(BENCHMARKS_PATH, "conv2d.c")), + ), + } + + @staticmethod + def preprocess(src: Path) -> bytes: + """Front a C source through the compiler frontend.""" + # TODO(github.com/facebookresearch/CompilerGym/issues/325): We can skip + # this pre-processing, or do it on the service side, once support for + # multi-file benchmarks lands. + cmd = [ + str(llvm.clang_path()), + "-E", + "-o", + "-", + "-I", + str(NEURO_VECTORIZER_HEADER.parent), + src, + ] + for directory in get_system_includes(): + cmd += ["-isystem", str(directory)] + return subprocess.check_output( + cmd, + timeout=300, + ) + + def benchmark_uris(self) -> Iterable[str]: + yield from self._benchmarks.keys() + + def benchmark(self, uri: str) -> Benchmark: + if uri in self._benchmarks: + return self._benchmarks[uri] + else: + raise LookupError("Unknown program name") + + +# Register the unrolling example service on module import. After importing this module, +# the unrolling-py-v0 environment will be available to gym.make(...). + +register( + id="unrolling-py-v0", + entry_point="compiler_gym.envs:CompilerEnv", + kwargs={ + "service": UNROLLING_PY_SERVICE_BINARY, + "rewards": [RuntimeReward(), SizeReward()], + "datasets": [UnrollingDataset()], + }, +) + +env = compiler_gym.make( + "unrolling-py-v0", + benchmark="unrolling-v0/offsets1", + observation_space="features", + reward_space="runtime", +) +compiler_gym.set_debug_level(4) # TODO: check why this has no effect + +observation = env.reset() +print("observation: ", observation) + +print() + +observation, reward, done, info = env.step(env.action_space.sample()) +print("observation: ", observation) +print("reward: ", reward) +print("done: ", done) +print("info: ", info) + +print() + +observation, reward, done, info = env.step(env.action_space.sample()) +print("observation: ", observation) +print("reward: ", reward) +print("done: ", done) +print("info: ", info) + +# TODO: implement write_bitcode(..) or write_ir(..) +# env.write_bitcode("/tmp/output.bc") From 5fa4e6881732bc2e6683fbcffb70f2679710105b Mon Sep 17 00:00:00 2001 From: Mostafa Elhoushi Date: Sat, 25 Dec 2021 19:05:00 -0500 Subject: [PATCH 097/142] sucessfully build loop_unroller using CMakeLists --- CMakeLists.txt | 4 +++ examples/CMakeLists.txt | 1 + .../example_unrolling_service/CMakeLists.txt | 1 + .../loop_unroller/CMakeLists.txt | 25 +++++++++++++++++++ 4 files changed, 31 insertions(+) create mode 100644 examples/CMakeLists.txt create mode 100644 examples/example_unrolling_service/CMakeLists.txt create mode 100644 examples/example_unrolling_service/loop_unroller/CMakeLists.txt diff --git a/CMakeLists.txt b/CMakeLists.txt index 4ed57c2bc..66e7eb10b 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -27,6 +27,7 @@ list(APPEND CMAKE_MODULE_PATH ) set(COMPILER_GYM_BUILD_TESTS OFF CACHE BOOL "Enable Compiler Gym tests.") +set(COMPILER_GYM_BUILD_EXAMPLES OFF CACHE BOOL "Enable Comiler Gym examples.") include(cg_macros) include(cg_copts) @@ -66,3 +67,6 @@ if(COMPILER_GYM_BUILD_TESTS) enable_testing() add_subdirectory(tests) endif() +if(COMPILER_GYM_BUILD_EXAMPLES) + add_subdirectory(examples) +endif() diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt new file mode 100644 index 000000000..47460fb2d --- /dev/null +++ b/examples/CMakeLists.txt @@ -0,0 +1 @@ +add_subdirectory(example_unrolling_service) diff --git a/examples/example_unrolling_service/CMakeLists.txt b/examples/example_unrolling_service/CMakeLists.txt new file mode 100644 index 000000000..af19cb0ce --- /dev/null +++ b/examples/example_unrolling_service/CMakeLists.txt @@ -0,0 +1 @@ +add_subdirectory(loop_unroller) diff --git a/examples/example_unrolling_service/loop_unroller/CMakeLists.txt b/examples/example_unrolling_service/loop_unroller/CMakeLists.txt new file mode 100644 index 000000000..dcb30b451 --- /dev/null +++ b/examples/example_unrolling_service/loop_unroller/CMakeLists.txt @@ -0,0 +1,25 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() + + +llvm_map_components_to_libnames(_LLVM_LIBS analysis core irreader support passes) +cg_cc_binary( + NAME + loop_unroller + SRCS + "loop_unroller.cc" + COPTS + "-Wall" + "-fdiagnostics-color=always" + "-fno-rtti" + ABS_DEPS + ${_LLVM_LIBS} + INCLUDES + ${LLVM_INCLUDE_DIRS} + DEFINES + ${LLVM_DEFINITIONS} +) From 1ead9dfcc8d1bdbd8a22a7a33f08a778da35f04f Mon Sep 17 00:00:00 2001 From: Mostafa Elhoushi Date: Sun, 26 Dec 2021 15:52:00 -0500 Subject: [PATCH 098/142] update instructions --- INSTALL.md | 6 ++++++ .../example_without_bazel.py | 16 +++++++++++++++- 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/INSTALL.md b/INSTALL.md index 4eb577d4f..b27105875 100644 --- a/INSTALL.md +++ b/INSTALL.md @@ -145,6 +145,12 @@ Additional optional configuration arguments: -DCOMPILER_GYM_BUILD_TESTS=ON ``` +* Builds additional tools required by some examples. + + ```bash + -DCOMPILER_GYM_BUILD_EXAMPLES=ON + ``` + * For faster linking. ```bash diff --git a/examples/example_unrolling_service/example_without_bazel.py b/examples/example_unrolling_service/example_without_bazel.py index d33b8ae7a..5f0ef6c61 100644 --- a/examples/example_unrolling_service/example_without_bazel.py +++ b/examples/example_unrolling_service/example_without_bazel.py @@ -2,7 +2,21 @@ # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -"""This module demonstrates how to """ +"""This script demonstrates how the Python example service without needing +to use the bazel build system. + +Prerequisite: + # In the repo's INSTALL.md, follow the 'Building from source using CMake' instructions with `-DCOMPILER_GYM_BUILD_EXAMPLES=ON` added to the `cmake` command + # Then copy the `loop_unroller` binary + $ cd /examples + $ cp /examples/example_unrolling_service/loop_unroller/loop_unroller ./example_unrolling_service/loop_unroller/ + +Usage: + + $ python example_unrolling_service/examples_without_bazel.py + +It is equivalent in behavior to the example.py script in this directory. +""" import os import subprocess from pathlib import Path From 155c0982310071b14fdf1846fd183d8588083e0c Mon Sep 17 00:00:00 2001 From: Mostafa Elhoushi Date: Sun, 26 Dec 2021 16:42:45 -0500 Subject: [PATCH 099/142] make CMakeLists.txt consistent with others in the repo --- examples/CMakeLists.txt | 7 ++++++- examples/example_unrolling_service/CMakeLists.txt | 7 ++++++- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index 47460fb2d..8c0906a57 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -1 +1,6 @@ -add_subdirectory(example_unrolling_service) +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() diff --git a/examples/example_unrolling_service/CMakeLists.txt b/examples/example_unrolling_service/CMakeLists.txt index af19cb0ce..8c0906a57 100644 --- a/examples/example_unrolling_service/CMakeLists.txt +++ b/examples/example_unrolling_service/CMakeLists.txt @@ -1 +1,6 @@ -add_subdirectory(loop_unroller) +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() From 7a4a9d6872d96c98164d157cd255d26ed43d8222 Mon Sep 17 00:00:00 2001 From: Mostafa Elhoushi Date: Sun, 26 Dec 2021 16:53:20 -0500 Subject: [PATCH 100/142] update README file --- examples/example_unrolling_service/README.md | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/examples/example_unrolling_service/README.md b/examples/example_unrolling_service/README.md index 628dae3ef..db1618534 100644 --- a/examples/example_unrolling_service/README.md +++ b/examples/example_unrolling_service/README.md @@ -28,3 +28,23 @@ Run `env_tests.py` unit tests: ```sh $ bazel test //examples/example_unrolling_service:env_tests ``` + +### Using the python service without bazel + +Because the python service contains no compiled code, it can be run directly as +a standalone script without using the bazel build system. From the root of the +CompilerGym repository, + +1. Build the `loop_unroller` custom tool that modifies the unrolling factor of each loop in a LLVM IR file: +Follow the [Building from source using CMake](../../INSTALL.md#building-from-source-with-cmake) instructions with `-DCOMPILER_GYM_BUILD_EXAMPLES=ON` added to the `cmake` command. + +2. Copy the `loop_unroller` binary: +```sh +cp /examples/example_unrolling_service/loop_unroller/loop_unroller ./examples/example_unrolling_service/loop_unroller/ +``` + +3. Run the example +```sh +$ cd examples +$ python3 example_compiler_gym_service/demo_without_bazel.py +``` From f588453595b03eff72fba4bbcfb0828bb7646eb2 Mon Sep 17 00:00:00 2001 From: Mostafa Elhoushi Date: Tue, 28 Dec 2021 12:44:30 -0500 Subject: [PATCH 101/142] build examples in CI --- .github/workflows/ci.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 9d6427a45..855faf3e2 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -75,6 +75,7 @@ jobs: -DCMAKE_SHARED_LINKER_FLAGS_INIT="-fuse-ld=lld" \ -DPython3_FIND_VIRTUALENV=FIRST \ -DCOMPILER_GYM_BUILD_TESTS=ON \ + -DCOMPILER_GYM_BUILD_EXAMPLES=ON \ -S . \ -B ~/cmake_build cmake --build ~/cmake_build From a47003f024ec3973dc9bf55300e3518a4ed4aad4 Mon Sep 17 00:00:00 2001 From: Mostafa Elhoushi Date: Tue, 28 Dec 2021 13:09:24 -0500 Subject: [PATCH 102/142] remove unnecessary flags --- examples/example_unrolling_service/loop_unroller/CMakeLists.txt | 2 -- 1 file changed, 2 deletions(-) diff --git a/examples/example_unrolling_service/loop_unroller/CMakeLists.txt b/examples/example_unrolling_service/loop_unroller/CMakeLists.txt index dcb30b451..e50751a3d 100644 --- a/examples/example_unrolling_service/loop_unroller/CMakeLists.txt +++ b/examples/example_unrolling_service/loop_unroller/CMakeLists.txt @@ -13,8 +13,6 @@ cg_cc_binary( SRCS "loop_unroller.cc" COPTS - "-Wall" - "-fdiagnostics-color=always" "-fno-rtti" ABS_DEPS ${_LLVM_LIBS} From fb041c1e50213145356a60824b046fadcfde65a4 Mon Sep 17 00:00:00 2001 From: Mostafa Elhoushi Date: Thu, 30 Dec 2021 00:46:14 -0500 Subject: [PATCH 103/142] create symlink of loop_unroller in CMake --- examples/example_unrolling_service/README.md | 10 ++-------- .../example_unrolling_service/example_without_bazel.py | 3 --- .../loop_unroller/CMakeLists.txt | 3 +++ 3 files changed, 5 insertions(+), 11 deletions(-) diff --git a/examples/example_unrolling_service/README.md b/examples/example_unrolling_service/README.md index db1618534..f4871ceaa 100644 --- a/examples/example_unrolling_service/README.md +++ b/examples/example_unrolling_service/README.md @@ -32,18 +32,12 @@ $ bazel test //examples/example_unrolling_service:env_tests ### Using the python service without bazel Because the python service contains no compiled code, it can be run directly as -a standalone script without using the bazel build system. From the root of the -CompilerGym repository, +a standalone script without using the bazel build system. 1. Build the `loop_unroller` custom tool that modifies the unrolling factor of each loop in a LLVM IR file: Follow the [Building from source using CMake](../../INSTALL.md#building-from-source-with-cmake) instructions with `-DCOMPILER_GYM_BUILD_EXAMPLES=ON` added to the `cmake` command. -2. Copy the `loop_unroller` binary: -```sh -cp /examples/example_unrolling_service/loop_unroller/loop_unroller ./examples/example_unrolling_service/loop_unroller/ -``` - -3. Run the example +2. Run the example ```sh $ cd examples $ python3 example_compiler_gym_service/demo_without_bazel.py diff --git a/examples/example_unrolling_service/example_without_bazel.py b/examples/example_unrolling_service/example_without_bazel.py index 5f0ef6c61..a5bd71657 100644 --- a/examples/example_unrolling_service/example_without_bazel.py +++ b/examples/example_unrolling_service/example_without_bazel.py @@ -7,10 +7,7 @@ Prerequisite: # In the repo's INSTALL.md, follow the 'Building from source using CMake' instructions with `-DCOMPILER_GYM_BUILD_EXAMPLES=ON` added to the `cmake` command - # Then copy the `loop_unroller` binary $ cd /examples - $ cp /examples/example_unrolling_service/loop_unroller/loop_unroller ./example_unrolling_service/loop_unroller/ - Usage: $ python example_unrolling_service/examples_without_bazel.py diff --git a/examples/example_unrolling_service/loop_unroller/CMakeLists.txt b/examples/example_unrolling_service/loop_unroller/CMakeLists.txt index e50751a3d..17f605535 100644 --- a/examples/example_unrolling_service/loop_unroller/CMakeLists.txt +++ b/examples/example_unrolling_service/loop_unroller/CMakeLists.txt @@ -21,3 +21,6 @@ cg_cc_binary( DEFINES ${LLVM_DEFINITIONS} ) + +ADD_CUSTOM_TARGET(link_target ALL + COMMAND ${CMAKE_COMMAND} -E create_symlink ${CMAKE_BINARY_DIR}/examples/example_unrolling_service/loop_unroller/loop_unroller ${CMAKE_SOURCE_DIR}/examples/example_unrolling_service/loop_unroller/loop_unroller) From 49ea948bee1fc079cdbc2c093171aa34496dbc1f Mon Sep 17 00:00:00 2001 From: Mostafa Elhoushi Date: Tue, 23 Nov 2021 23:37:54 -0500 Subject: [PATCH 104/142] copied unrolling service --- examples/loop_optimizations_service/BUILD | 38 +++ examples/loop_optimizations_service/README.md | 30 ++ .../loop_optimizations_service/__init__.py | 145 +++++++++ .../benchmarks/BUILD | 12 + .../benchmarks/conv2d.c | 73 +++++ .../benchmarks/offsets1.c | 26 ++ .../loop_optimizations_service/env_tests.py | 218 +++++++++++++ .../loop_optimizations_service/example.py | 32 ++ .../loop_unroller/BUILD | 23 ++ .../loop_unroller/README.md | 6 + .../loop_unroller/loop_unroller.cc | 201 ++++++++++++ .../service_py/BUILD | 21 ++ .../service_py/example_service.py | 304 ++++++++++++++++++ .../service_py/utils.py | 61 ++++ 14 files changed, 1190 insertions(+) create mode 100644 examples/loop_optimizations_service/BUILD create mode 100644 examples/loop_optimizations_service/README.md create mode 100644 examples/loop_optimizations_service/__init__.py create mode 100644 examples/loop_optimizations_service/benchmarks/BUILD create mode 100644 examples/loop_optimizations_service/benchmarks/conv2d.c create mode 100644 examples/loop_optimizations_service/benchmarks/offsets1.c create mode 100644 examples/loop_optimizations_service/env_tests.py create mode 100644 examples/loop_optimizations_service/example.py create mode 100644 examples/loop_optimizations_service/loop_unroller/BUILD create mode 100644 examples/loop_optimizations_service/loop_unroller/README.md create mode 100644 examples/loop_optimizations_service/loop_unroller/loop_unroller.cc create mode 100644 examples/loop_optimizations_service/service_py/BUILD create mode 100755 examples/loop_optimizations_service/service_py/example_service.py create mode 100644 examples/loop_optimizations_service/service_py/utils.py diff --git a/examples/loop_optimizations_service/BUILD b/examples/loop_optimizations_service/BUILD new file mode 100644 index 000000000..010c0cd3c --- /dev/null +++ b/examples/loop_optimizations_service/BUILD @@ -0,0 +1,38 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. +load("@rules_python//python:defs.bzl", "py_library", "py_test") + +py_library( + name = "loop_optimizations_service", + srcs = ["__init__.py"], + data = [ + "//examples/loop_optimizations_service/benchmarks", + "//examples/loop_optimizations_service/service_py:example-unrolling-service-py", + ], + visibility = ["//visibility:public"], + deps = [ + "//compiler_gym/envs/llvm", + "//compiler_gym/util", + ], +) + +py_test( + name = "env_tests", + srcs = ["env_tests.py"], + deps = [ + ":loop_optimizations_service", + "//compiler_gym", + "//tests:test_main", + ], +) + +py_binary( + name = "example", + srcs = ["example.py"], + deps = [ + ":loop_optimizations_service", + "//compiler_gym", + ], +) diff --git a/examples/loop_optimizations_service/README.md b/examples/loop_optimizations_service/README.md new file mode 100644 index 000000000..628dae3ef --- /dev/null +++ b/examples/loop_optimizations_service/README.md @@ -0,0 +1,30 @@ +# Unrolling CompilerGym Service Example + +This is an example of how to create your own CompilerGym environment. All paths listed below are relative to the path of this README file. + +* Actions: this environment focuses on the unrolling optimization. The actions are the different unrolling factors. + - The actions are listed in `action_spaces` struct in `service_py/example_service.py` + - The actions are implemented in `apply_action(...)` function in `service_py/example_service.py` +* Observations: the observations are: textual form of the LLVM IR, statistical features of different types of IR instructions, runtime execution, or code size + - The observations are listed in `observation_spaces` struct in `service_py/example_service.py`. + - The observations are implemented in `get_observation(...)` function in `service_py/example_service.py` +* Rewards: the rewards could be runtime or code size. + - The rewards are implemented in `__init__.py` and they reuse the runtime and code size observations mentioned above +* Benchmarks: this environment expects your benchmarks to follow the templates from the [Neruovectorizer repo](https://github.com/intel/neuro-vectorizer/tree/master/training_data) repo, that was in turn adapted from the [LLVM loop test suite](https://github.com/llvm/llvm-test-suite/blob/main/SingleSource/UnitTests/Vectorizer/gcc-loops.cpp). + - To implement your benchmark, you need to: include the `header.h` file, implement your benchmark in a custom function, then invoke it using `BENCH` macro inside the `main()` function. + - Following this template is necessary in order for the benchmark to measure the execution runtime and write it to stdout, which is in turn parsed by this environment to measure the runtime reward. + - You can view and add examples of benchmarks in `benchmarks` directory + - Also, when adding your own benchmark, you need to add it to the `UnrollingDataset` class in `__init__.py` + +## Usage + +Run `example.py` example: +```sh +$ bazel run //examples/example_unrolling_service:example +``` + +Run `env_tests.py` unit tests: + +```sh +$ bazel test //examples/example_unrolling_service:env_tests +``` diff --git a/examples/loop_optimizations_service/__init__.py b/examples/loop_optimizations_service/__init__.py new file mode 100644 index 000000000..c89938af3 --- /dev/null +++ b/examples/loop_optimizations_service/__init__.py @@ -0,0 +1,145 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. +"""This module demonstrates how to """ +import subprocess +from pathlib import Path +from typing import Iterable + +from compiler_gym.datasets import Benchmark, Dataset +from compiler_gym.envs.llvm.llvm_benchmark import get_system_includes +from compiler_gym.spaces import Reward +from compiler_gym.third_party import llvm +from compiler_gym.util.registration import register +from compiler_gym.util.runfiles_path import runfiles_path, site_data_path + +UNROLLING_PY_SERVICE_BINARY: Path = runfiles_path( + "examples/loop_optimizations_service/service_py/example-unrolling-service-py" +) + +BENCHMARKS_PATH: Path = runfiles_path("examples/loop_optimizations_service/benchmarks") + +NEURO_VECTORIZER_HEADER: Path = runfiles_path( + "compiler_gym/third_party/neuro-vectorizer/header.h" +) + + +class RuntimeReward(Reward): + """An example reward that uses changes in the "runtime" observation value + to compute incremental reward. + """ + + def __init__(self): + super().__init__( + id="runtime", + observation_spaces=["runtime"], + default_value=0, + default_negates_returns=True, + deterministic=False, + platform_dependent=True, + ) + self.baseline_runtime = 0 + + def reset(self, benchmark: str, observation_view): + del benchmark # unused + self.baseline_runtime = observation_view["runtime"] + + def update(self, action, observations, observation_view): + del action # unused + del observation_view # unused + return float(self.baseline_runtime - observations[0]) / self.baseline_runtime + + +class SizeReward(Reward): + """An example reward that uses changes in the "size" observation value + to compute incremental reward. + """ + + def __init__(self): + super().__init__( + id="size", + observation_spaces=["size"], + default_value=0, + default_negates_returns=True, + deterministic=False, + platform_dependent=True, + ) + self.baseline_size = 0 + + def reset(self, benchmark: str, observation_view): + del benchmark # unused + self.baseline_runtime = observation_view["size"] + + def update(self, action, observations, observation_view): + del action # unused + del observation_view # unused + return float(self.baseline_size - observations[0]) / self.baseline_size + + +class UnrollingDataset(Dataset): + def __init__(self, *args, **kwargs): + super().__init__( + name="benchmark://unrolling-v0", + license="MIT", + description="Unrolling example dataset", + site_data_base=site_data_path( + "example_dataset" + ), # TODO: what should we set this to? we are not using it + ) + + self._benchmarks = { + "benchmark://unrolling-v0/offsets1": Benchmark.from_file_contents( + "benchmark://unrolling-v0/offsets1", + self.preprocess(BENCHMARKS_PATH / "offsets1.c"), + ), + "benchmark://unrolling-v0/conv2d": Benchmark.from_file_contents( + "benchmark://unrolling-v0/conv2d", + self.preprocess(BENCHMARKS_PATH / "conv2d.c"), + ), + } + + @staticmethod + def preprocess(src: Path) -> bytes: + """Front a C source through the compiler frontend.""" + # TODO(github.com/facebookresearch/CompilerGym/issues/325): We can skip + # this pre-processing, or do it on the service side, once support for + # multi-file benchmarks lands. + cmd = [ + str(llvm.clang_path()), + "-E", + "-o", + "-", + "-I", + str(NEURO_VECTORIZER_HEADER.parent), + src, + ] + for directory in get_system_includes(): + cmd += ["-isystem", str(directory)] + return subprocess.check_output( + cmd, + timeout=300, + ) + + def benchmark_uris(self) -> Iterable[str]: + yield from self._benchmarks.keys() + + def benchmark(self, uri: str) -> Benchmark: + if uri in self._benchmarks: + return self._benchmarks[uri] + else: + raise LookupError("Unknown program name") + + +# Register the unrolling example service on module import. After importing this module, +# the unrolling-py-v0 environment will be available to gym.make(...). + +register( + id="unrolling-py-v0", + entry_point="compiler_gym.envs:CompilerEnv", + kwargs={ + "service": UNROLLING_PY_SERVICE_BINARY, + "rewards": [RuntimeReward(), SizeReward()], + "datasets": [UnrollingDataset()], + }, +) diff --git a/examples/loop_optimizations_service/benchmarks/BUILD b/examples/loop_optimizations_service/benchmarks/BUILD new file mode 100644 index 000000000..f565b5020 --- /dev/null +++ b/examples/loop_optimizations_service/benchmarks/BUILD @@ -0,0 +1,12 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +filegroup( + name = "benchmarks", + srcs = glob(["*.c"]) + [ + "//compiler_gym/third_party/neuro-vectorizer:header", + ], + visibility = ["//visibility:public"], +) diff --git a/examples/loop_optimizations_service/benchmarks/conv2d.c b/examples/loop_optimizations_service/benchmarks/conv2d.c new file mode 100644 index 000000000..9848d9529 --- /dev/null +++ b/examples/loop_optimizations_service/benchmarks/conv2d.c @@ -0,0 +1,73 @@ +#include "header.h" + +// TODO: use templates instead of macros +#ifndef N +#define N 32 +#endif + +#ifndef Ih +#define Ih 3 +#endif + +#ifndef Iw +#define Iw 12 +#endif + +#ifndef Ic +#define Ic 12 +#endif + +#ifndef Oc +#define Oc 64 +#endif + +#ifndef Kh +#define Kh 3 +#endif + +#ifndef Kw +#define Kw 3 +#endif + +// TODO: include pad, stride, and dilation + +#define Oh Ih - Kh + 1 +#define Ow Iw - Kw + 1 + +float x[N][Ih][Iw][Ic]; +float w[Oc][Kh][Kw][Ic]; +float y[N][Oh][Ow][Oc]; + +__attribute__((noinline)) +//template +void conv2d(int* ret) { + // loop over output + for (int n = 0; n < N; n++) { + for (int oh = 0; oh < Oh; oh++) { + for (int ow = 0; ow < Ow; ow++) { + for (int oc = 0; oc < Oc; oc++) { + y[n][oh][ow][oc] = 0; +// loop over filter +#pragma unroll(Kh) + for (int kh = 0; kh < Kh; kh++) { + for (int kw = 0; kw < Kw; kw++) { + for (int ic = 0; ic < Iw; ic++) { + // TODO: include pad, stride, and dilation + y[n][oh][ow][oc] += w[oc][kh][kw][ic] * x[n][oh - kh + 1][ow - kw + 1][ic]; + } + } + } + } + } + } + } + *ret = y[N - 1][Oh - 1][Ow - 1][Oc - 1]; +} + +__attribute__((optnone)) int main(int argc, char* argv[]) { + int dummy = 0; + // TODO: initialize tensors + BENCH("conv2d", conv2d(&dummy), 100, dummy); + + return 0; +} diff --git a/examples/loop_optimizations_service/benchmarks/offsets1.c b/examples/loop_optimizations_service/benchmarks/offsets1.c new file mode 100644 index 000000000..b5206cabf --- /dev/null +++ b/examples/loop_optimizations_service/benchmarks/offsets1.c @@ -0,0 +1,26 @@ +#include "header.h" + +#ifndef N +#define N 1000000 +#endif + +#ifndef n +#define n 3 +#endif + +int A[N]; + +__attribute__((noinline)) void example1(int* ret) { + //#pragma unroll(n) + for (int i = 0; i < N - 3; i++) A[i] = A[i + 1] + A[i + 2] + A[i + 3]; + + *ret = A[N - 1]; +} + +__attribute__((optnone)) int main(int argc, char* argv[]) { + int dummy = 0; + // TODO: initialize tensors + BENCH("example1", example1(&dummy), 100, dummy); + + return 0; +} diff --git a/examples/loop_optimizations_service/env_tests.py b/examples/loop_optimizations_service/env_tests.py new file mode 100644 index 000000000..766f8a0e5 --- /dev/null +++ b/examples/loop_optimizations_service/env_tests.py @@ -0,0 +1,218 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. +"""Tests for the unrolling CompilerGym service example.""" +import subprocess +from pathlib import Path + +import gym +import numpy as np +import pytest + +import compiler_gym +import examples.loop_optimizations_service as unrolling_service +from compiler_gym.envs import CompilerEnv +from compiler_gym.service import SessionNotFound +from compiler_gym.spaces import Box, NamedDiscrete, Scalar, Sequence +from tests.test_main import main + + +@pytest.fixture(scope="function") +def env() -> CompilerEnv: + """Text fixture that yields an environment.""" + with gym.make("unrolling-py-v0") as env_: + yield env_ + + +@pytest.fixture(scope="module") +def bin() -> Path: + return unrolling_service.UNROLLING_PY_SERVICE_BINARY + + +def test_invalid_arguments(bin: Path): + """Test that running the binary with unrecognized arguments is an error.""" + + def run(cmd): + p = subprocess.Popen( + cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True + ) + stdout, stderr = p.communicate(timeout=10) + return p.returncode, stdout, stderr + + returncode, _, stderr = run([str(bin), "foobar"]) + assert "ERROR:" in stderr + assert "'foobar'" in stderr + assert returncode == 1 + + returncode, _, stderr = run([str(bin), "--foobar"]) + # C++ and python flag parsing library emit slightly different error + # messages. + assert "ERROR:" in stderr or "FATAL" in stderr + assert "'foobar'" in stderr + assert returncode == 1 + + +def test_versions(env: CompilerEnv): + """Tests the GetVersion() RPC endpoint.""" + assert env.version == compiler_gym.__version__ + assert env.compiler_version == "1.0.0" + + +def test_action_space(env: CompilerEnv): + """Test that the environment reports the service's action spaces.""" + assert env.action_spaces == [ + NamedDiscrete( + name="unrolling", + items=[ + "-loop-unroll -unroll-count=2", + "-loop-unroll -unroll-count=4", + "-loop-unroll -unroll-count=8", + ], + ) + ] + + +def test_observation_spaces(env: CompilerEnv): + """Test that the environment reports the service's observation spaces.""" + env.reset() + assert env.observation.spaces.keys() == {"ir", "features", "runtime", "size"} + assert env.observation.spaces["ir"].space == Sequence( + name="ir", size_range=(0, None), dtype=str, opaque_data_format="" + ) + assert env.observation.spaces["features"].space == Box( + name="features", shape=(3,), low=0, high=1e5, dtype=int + ) + assert env.observation.spaces["runtime"].space == Scalar( + name="runtime", min=0, max=np.inf, dtype=float + ) + assert env.observation.spaces["size"].space == Scalar( + name="size", min=0, max=np.inf, dtype=float + ) + + +def test_reward_spaces(env: CompilerEnv): + """Test that the environment reports the service's reward spaces.""" + env.reset() + assert env.reward.spaces.keys() == {"runtime", "size"} + + +def test_step_before_reset(env: CompilerEnv): + """Taking a step() before reset() is illegal.""" + with pytest.raises(SessionNotFound, match=r"Must call reset\(\) before step\(\)"): + env.step(0) + + +def test_observation_before_reset(env: CompilerEnv): + """Taking an observation before reset() is illegal.""" + with pytest.raises(SessionNotFound, match=r"Must call reset\(\) before step\(\)"): + _ = env.observation["ir"] + + +def test_reward_before_reset(env: CompilerEnv): + """Taking a reward before reset() is illegal.""" + with pytest.raises(SessionNotFound, match=r"Must call reset\(\) before step\(\)"): + _ = env.reward["runtime"] + + +def test_reset_invalid_benchmark(env: CompilerEnv): + """Test requesting a specific benchmark.""" + with pytest.raises(LookupError) as ctx: + env.reset(benchmark="unrolling-v0/foobar") + assert str(ctx.value) == "Unknown program name" + + +def test_invalid_observation_space(env: CompilerEnv): + """Test error handling with invalid observation space.""" + with pytest.raises(LookupError): + env.observation_space = 100 + + +def test_invalid_reward_space(env: CompilerEnv): + """Test error handling with invalid reward space.""" + with pytest.raises(LookupError): + env.reward_space = 100 + + +def test_double_reset(env: CompilerEnv): + """Test that reset() can be called twice.""" + env.reset() + assert env.in_episode + env.reset() + assert env.in_episode + + +def test_Step_out_of_range(env: CompilerEnv): + """Test error handling with an invalid action.""" + env.reset() + with pytest.raises(ValueError) as ctx: + env.step(100) + assert str(ctx.value) == "Out-of-range" + + +def test_default_ir_observation(env: CompilerEnv): + """Test default observation space.""" + env.observation_space = "ir" + observation = env.reset() + assert len(observation) > 0 + + observation, reward, done, info = env.step(0) + assert not done, info + assert len(observation) > 0 + assert reward is None + + +def test_default_features_observation(env: CompilerEnv): + """Test default observation space.""" + env.observation_space = "features" + observation = env.reset() + assert isinstance(observation, np.ndarray) + assert observation.shape == (3,) + assert observation.dtype == np.int64 + assert all(obs >= 0 for obs in observation.tolist()) + + +def test_default_reward(env: CompilerEnv): + """Test default reward space.""" + env.reward_space = "runtime" + env.reset() + observation, reward, done, info = env.step(0) + assert not done, info + assert observation is None + assert reward is not None + + +def test_observations(env: CompilerEnv): + """Test observation spaces.""" + env.reset() + assert len(env.observation["ir"]) > 0 + np.testing.assert_array_less([-1, -1, -1], env.observation["features"]) + + +def test_rewards(env: CompilerEnv): + """Test reward spaces.""" + env.reset() + assert env.reward["runtime"] is not None + + +def test_benchmarks(env: CompilerEnv): + assert list(env.datasets.benchmark_uris()) == [ + "benchmark://unrolling-v0/offsets1", + "benchmark://unrolling-v0/conv2d", + ] + + +def test_fork(env: CompilerEnv): + env.reset() + env.step(0) + env.step(1) + other_env = env.fork() + try: + assert env.benchmark == other_env.benchmark + assert other_env.actions == [0, 1] + finally: + other_env.close() + + +if __name__ == "__main__": + main() diff --git a/examples/loop_optimizations_service/example.py b/examples/loop_optimizations_service/example.py new file mode 100644 index 000000000..546111fb5 --- /dev/null +++ b/examples/loop_optimizations_service/example.py @@ -0,0 +1,32 @@ +import compiler_gym +import examples.loop_optimizations_service as unrolling_service # noqa Register environments. + +env = compiler_gym.make( + "unrolling-py-v0", + benchmark="unrolling-v0/offsets1", + observation_space="features", + reward_space="runtime", +) +compiler_gym.set_debug_level(4) # TODO: check why this has no effect + +observation = env.reset() +print("observation: ", observation) + +print() + +observation, reward, done, info = env.step(env.action_space.sample()) +print("observation: ", observation) +print("reward: ", reward) +print("done: ", done) +print("info: ", info) + +print() + +observation, reward, done, info = env.step(env.action_space.sample()) +print("observation: ", observation) +print("reward: ", reward) +print("done: ", done) +print("info: ", info) + +# TODO: implement write_bitcode(..) or write_ir(..) +# env.write_bitcode("/tmp/output.bc") diff --git a/examples/loop_optimizations_service/loop_unroller/BUILD b/examples/loop_optimizations_service/loop_unroller/BUILD new file mode 100644 index 000000000..3bec18c35 --- /dev/null +++ b/examples/loop_optimizations_service/loop_unroller/BUILD @@ -0,0 +1,23 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the LICENSE file +# in the root directory of this source tree. +# +# This package exposes the LLVM optimization pipeline as a CompilerGym service. +load("@rules_cc//cc:defs.bzl", "cc_binary") + +cc_binary( + name = "loop_unroller", + srcs = [ + "loop_unroller.cc", + ], + copts = [ + "-Wall", + "-fdiagnostics-color=always", + "-fno-rtti", + ], + visibility = ["//visibility:public"], + deps = [ + "@llvm//10.0.0", + ], +) diff --git a/examples/loop_optimizations_service/loop_unroller/README.md b/examples/loop_optimizations_service/loop_unroller/README.md new file mode 100644 index 000000000..5684e6580 --- /dev/null +++ b/examples/loop_optimizations_service/loop_unroller/README.md @@ -0,0 +1,6 @@ +LLVM's opt does not always enforce the unrolling options passed as cli arguments. Hence, we created our own exeutable with custom unrolling pass in examples/example_unrolling_service/loop_unroller that enforces the unrolling factors passed in its cli. + +To run the custom unroller: +``` +bazel run //examples/example_unrolling_service/loop_unroller:loop_unroller -- .ll --funroll-count= -S -o .ll +``` diff --git a/examples/loop_optimizations_service/loop_unroller/loop_unroller.cc b/examples/loop_optimizations_service/loop_unroller/loop_unroller.cc new file mode 100644 index 000000000..29c87559a --- /dev/null +++ b/examples/loop_optimizations_service/loop_unroller/loop_unroller.cc @@ -0,0 +1,201 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. +#include +#include +#include +#include +#include + +#include "llvm/ADT/SetVector.h" +#include "llvm/ADT/SmallPtrSet.h" +#include "llvm/ADT/SmallVector.h" +#include "llvm/Analysis/LoopInfo.h" +#include "llvm/Bitcode/BitcodeWriterPass.h" +#include "llvm/IR/BasicBlock.h" +#include "llvm/IR/Function.h" +#include "llvm/IR/IRBuilder.h" +#include "llvm/IR/IRPrintingPasses.h" +#include "llvm/IR/InstIterator.h" +#include "llvm/IR/LegacyPassManager.h" +#include "llvm/IR/Module.h" +#include "llvm/IR/Verifier.h" +#include "llvm/IRReader/IRReader.h" +#include "llvm/InitializePasses.h" +#include "llvm/Pass.h" +#include "llvm/Support/CommandLine.h" +#include "llvm/Support/Debug.h" +#include "llvm/Support/FileSystem.h" +#include "llvm/Support/SourceMgr.h" +#include "llvm/Support/SystemUtils.h" +#include "llvm/Support/ToolOutputFile.h" +#include "llvm/Transforms/Scalar.h" +#include "llvm/Transforms/Utils/LoopUtils.h" + +using namespace llvm; + +namespace llvm { +/// Input LLVM module file name. +cl::opt InputFilename(cl::Positional, cl::desc("Specify input filename"), + cl::value_desc("filename"), cl::init("-")); +/// Output LLVM module file name. +cl::opt OutputFilename("o", cl::desc("Specify output filename"), + cl::value_desc("filename"), cl::init("-")); + +static cl::opt UnrollEnable("floop-unroll", cl::desc("Enable loop unrolling"), + cl::init(true)); + +static cl::opt UnrollCount( + "funroll-count", cl::desc("Use this unroll count for all loops including those with " + "unroll_count pragma values, for testing purposes")); + +// Force binary on terminals +static cl::opt Force("f", cl::desc("Enable binary output on terminals")); + +// Output assembly +static cl::opt OutputAssembly("S", cl::desc("Write output as LLVM assembly")); + +// Preserve use list order +static cl::opt PreserveBitcodeUseListOrder( + "preserve-bc-uselistorder", cl::desc("Preserve use-list order when writing LLVM bitcode."), + cl::init(true), cl::Hidden); + +static cl::opt PreserveAssemblyUseListOrder( + "preserve-ll-uselistorder", cl::desc("Preserve use-list order when writing LLVM assembly."), + cl::init(false), cl::Hidden); + +// The INITIALIZE_PASS_XXX macros put the initialiser in the llvm namespace. +void initializeLoopCounterPass(PassRegistry& Registry); + +class LoopCounter : public llvm::FunctionPass { + public: + static char ID; + std::unordered_map counts; + + LoopCounter() : FunctionPass(ID) {} + + virtual void getAnalysisUsage(AnalysisUsage& AU) const override { + AU.addRequired(); + } + + bool runOnFunction(llvm::Function& F) override { + LoopInfo& LI = getAnalysis().getLoopInfo(); + auto Loops = LI.getLoopsInPreorder(); + + // Should really account for module, too. + counts[F.getName().str()] = Loops.size(); + + return false; + } +}; + +// Initialise the pass. We have to declare the dependencies we use. +char LoopCounter::ID = 0; +INITIALIZE_PASS_BEGIN(LoopCounter, "count-loops", "Count loops", false, false) +INITIALIZE_PASS_DEPENDENCY(LoopInfoWrapperPass) +INITIALIZE_PASS_END(LoopCounter, "count-loops", "Count loops", false, false) + +// The INITIALIZE_PASS_XXX macros put the initialiser in the llvm namespace. +void initializeLoopUnrollConfiguratorPass(PassRegistry& Registry); + +class LoopUnrollConfigurator : public llvm::FunctionPass { + public: + static char ID; + + LoopUnrollConfigurator() : FunctionPass(ID) {} + + virtual void getAnalysisUsage(AnalysisUsage& AU) const override { + AU.addRequired(); + } + + bool runOnFunction(llvm::Function& F) override { + LoopInfo& LI = getAnalysis().getLoopInfo(); + auto Loops = LI.getLoopsInPreorder(); + + // Should really account for module, too. + for (auto ALoop : Loops) { + if (UnrollEnable) + addStringMetadataToLoop(ALoop, "llvm.loop.unroll.enable", UnrollEnable); + if (UnrollCount) + addStringMetadataToLoop(ALoop, "llvm.loop.unroll.count", UnrollCount); + } + + return false; + } +}; + +// Initialise the pass. We have to declare the dependencies we use. +char LoopUnrollConfigurator::ID = 1; +INITIALIZE_PASS_BEGIN(LoopUnrollConfigurator, "unroll-loops-configurator", + "Configurates loop unrolling", false, false) +INITIALIZE_PASS_DEPENDENCY(LoopInfoWrapperPass) +INITIALIZE_PASS_END(LoopUnrollConfigurator, "unroll-loops-configurator", + "Configurates loop unrolling", false, false) + +/// Reads a module from a file. +/// On error, messages are written to stderr and null is returned. +/// +/// \param Context LLVM Context for the module. +/// \param Name Input file name. +static std::unique_ptr readModule(LLVMContext& Context, StringRef Name) { + SMDiagnostic Diag; + std::unique_ptr Module = parseIRFile(Name, Diag, Context); + + if (!Module) + Diag.print("llvm-counter", errs()); + + return Module; +} + +} // namespace llvm + +int main(int argc, char** argv) { + cl::ParseCommandLineOptions(argc, argv, + " LLVM-Counter\n\n" + " Count the loops in a bitcode file.\n"); + + LLVMContext Context; + SMDiagnostic Err; + SourceMgr SM; + std::error_code EC; + + std::unique_ptr Module = readModule(Context, InputFilename); + + if (!Module) + return 1; + + // Prepare output + ToolOutputFile Out(OutputFilename, EC, sys::fs::OF_None); + if (EC) { + Err = SMDiagnostic(OutputFilename, SourceMgr::DK_Error, + "Could not open output file: " + EC.message()); + Err.print(argv[0], errs()); + return 1; + } + + // Run the passes + initializeLoopCounterPass(*PassRegistry::getPassRegistry()); + legacy::PassManager PM; + LoopCounter* Counter = new LoopCounter(); + LoopUnrollConfigurator* UnrollConfigurator = new LoopUnrollConfigurator(); + PM.add(Counter); + PM.add(UnrollConfigurator); + PM.add(createLoopUnrollPass()); + // Passes to output the module + if (OutputAssembly) { + PM.add(createPrintModulePass(Out.os(), "", PreserveAssemblyUseListOrder)); + } else if (Force || !CheckBitcodeOutputToConsole(Out.os())) { + PM.add(createBitcodeWriterPass(Out.os(), PreserveBitcodeUseListOrder)); + } + PM.run(*Module); + + // Log loop stats + for (auto& x : Counter->counts) { + llvm::dbgs() << x.first << ": " << x.second << " loops" << '\n'; + } + + Out.keep(); + + return 0; +} diff --git a/examples/loop_optimizations_service/service_py/BUILD b/examples/loop_optimizations_service/service_py/BUILD new file mode 100644 index 000000000..f9095f0f0 --- /dev/null +++ b/examples/loop_optimizations_service/service_py/BUILD @@ -0,0 +1,21 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. +load("@rules_cc//cc:defs.bzl", "cc_binary", "cc_library") + +py_binary( + name = "example-unrolling-service-py", + srcs = ["example_service.py"], + data = [ + "//examples/loop_optimizations_service/loop_unroller", + ], + main = "example_service.py", + visibility = ["//visibility:public"], + deps = [ + "//compiler_gym/service", + "//compiler_gym/service/proto", + "//compiler_gym/service/runtime", + "//compiler_gym/third_party/llvm", + ], +) diff --git a/examples/loop_optimizations_service/service_py/example_service.py b/examples/loop_optimizations_service/service_py/example_service.py new file mode 100755 index 000000000..e115c5de5 --- /dev/null +++ b/examples/loop_optimizations_service/service_py/example_service.py @@ -0,0 +1,304 @@ +#! /usr/bin/env python3 +# +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. +"""An example CompilerGym service in python.""" +import logging +import os +import shutil +import subprocess +from pathlib import Path +from typing import Optional, Tuple + +import numpy as np +import utils + +import compiler_gym.third_party.llvm as llvm +from compiler_gym.service import CompilationSession +from compiler_gym.service.proto import ( + Action, + ActionSpace, + Benchmark, + ChoiceSpace, + NamedDiscreteSpace, + Observation, + ObservationSpace, + ScalarLimit, + ScalarRange, + ScalarRangeList, +) +from compiler_gym.service.runtime import create_and_run_compiler_gym_service +from compiler_gym.util.commands import run_command + + +class UnrollingCompilationSession(CompilationSession): + """Represents an instance of an interactive compilation session.""" + + compiler_version: str = "1.0.0" + + # The list of actions that are supported by this service. + action_spaces = [ + ActionSpace( + name="unrolling", + choice=[ + ChoiceSpace( + name="unroll_choice", + named_discrete_space=NamedDiscreteSpace( + value=[ + "-loop-unroll -unroll-count=2", + "-loop-unroll -unroll-count=4", + "-loop-unroll -unroll-count=8", + ], + ), + ) + ], + ) + ] + + # A list of observation spaces supported by this service. Each of these + # ObservationSpace protos describes an observation space. + observation_spaces = [ + ObservationSpace( + name="ir", + string_size_range=ScalarRange(min=ScalarLimit(value=0)), + deterministic=True, + platform_dependent=False, + default_value=Observation(string_value=""), + ), + ObservationSpace( + name="features", + int64_range_list=ScalarRangeList( + range=[ + ScalarRange(min=ScalarLimit(value=0), max=ScalarLimit(value=1e5)), + ScalarRange(min=ScalarLimit(value=0), max=ScalarLimit(value=1e5)), + ScalarRange(min=ScalarLimit(value=0), max=ScalarLimit(value=1e5)), + ] + ), + ), + ObservationSpace( + name="runtime", + scalar_double_range=ScalarRange(min=ScalarLimit(value=0)), + deterministic=False, + platform_dependent=True, + default_value=Observation( + scalar_double=0, + ), + ), + ObservationSpace( + name="size", + scalar_double_range=ScalarRange(min=ScalarLimit(value=0)), + deterministic=True, + platform_dependent=True, + default_value=Observation( + scalar_double=0, + ), + ), + ] + + def __init__( + self, + working_directory: Path, + action_space: ActionSpace, + benchmark: Benchmark, + use_custom_opt: bool = True, + ): + super().__init__(working_directory, action_space, benchmark) + logging.info("Started a compilation session for %s", benchmark.uri) + self._benchmark = benchmark + self._action_space = action_space + + # Resolve the paths to LLVM binaries once now. + self._clang = str(llvm.clang_path()) + self._llc = str(llvm.llc_path()) + self._llvm_diff = str(llvm.llvm_diff_path()) + self._opt = str(llvm.opt_path()) + # LLVM's opt does not always enforce the unrolling options passed as cli arguments. Hence, we created our own exeutable with custom unrolling pass in examples/example_unrolling_service/loop_unroller that enforces the unrolling factors passed in its cli. + # if self._use_custom_opt is true, use our custom exeutable, otherwise use LLVM's opt + self._use_custom_opt = use_custom_opt + + # Dump the benchmark source to disk. + self._src_path = str(self.working_dir / "benchmark.c") + with open(self.working_dir / "benchmark.c", "wb") as f: + f.write(benchmark.program.contents) + + self._llvm_path = str(self.working_dir / "benchmark.ll") + self._llvm_before_path = str(self.working_dir / "benchmark.previous.ll") + self._obj_path = str(self.working_dir / "benchmark.o") + self._exe_path = str(self.working_dir / "benchmark.exe") + + run_command( + [ + self._clang, + "-Xclang", + "-disable-O0-optnone", + "-emit-llvm", + "-S", + self._src_path, + "-o", + self._llvm_path, + ], + timeout=30, + ) + + def apply_action(self, action: Action) -> Tuple[bool, Optional[ActionSpace], bool]: + num_choices = len(self._action_space.choice[0].named_discrete_space.value) + + if len(action.choice) != 1: + raise ValueError("Invalid choice count") + + # This is the index into the action space's values ("a", "b", "c") that + # the user selected, e.g. 0 -> "a", 1 -> "b", 2 -> "c". + choice_index = action.choice[0].named_discrete_value_index + if choice_index < 0 or choice_index >= num_choices: + raise ValueError("Out-of-range") + + args = self._action_space.choice[0].named_discrete_space.value[choice_index] + logging.info( + "Applying action %d, equivalent command-line arguments: '%s'", + choice_index, + args, + ) + args = args.split() + + # make a copy of the LLVM file to compare its contents after applying the action + shutil.copyfile(self._llvm_path, self._llvm_before_path) + + # apply action + if self._use_custom_opt: + # our custom unroller has an additional `f` at the beginning of each argument + for i, arg in enumerate(args): + # convert - to -f + arg = arg[0] + "f" + arg[1:] + args[i] = arg + run_command( + [ + "../loop_unroller/loop_unroller", + self._llvm_path, + *args, + "-S", + "-o", + self._llvm_path, + ], + timeout=30, + ) + else: + run_command( + [ + self._opt, + *args, + self._llvm_path, + "-S", + "-o", + self._llvm_path, + ], + timeout=30, + ) + + # compare the IR files to check if the action had an effect + try: + subprocess.check_call( + [self._llvm_diff, self._llvm_before_path, self._llvm_path], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + timeout=60, + ) + action_had_no_effect = True + except subprocess.CalledProcessError: + action_had_no_effect = False + + end_of_session = False # TODO: this needs investigation: for how long can we apply loop unrolling? e.g., detect if there are no more loops in the IR? + new_action_space = None + return (end_of_session, new_action_space, action_had_no_effect) + + @property + def ir(self) -> str: + with open(self._llvm_path) as f: + return f.read() + + def get_observation(self, observation_space: ObservationSpace) -> Observation: + logging.info("Computing observation from space %s", observation_space.name) + if observation_space.name == "ir": + return Observation(string_value=self.ir) + elif observation_space.name == "features": + stats = utils.extract_statistics_from_ir(self.ir) + observation = Observation() + observation.int64_list.value[:] = list(stats.values()) + return observation + elif observation_space.name == "runtime": + # compile LLVM to object file + run_command( + [ + self._llc, + "-filetype=obj", + self._llvm_path, + "-o", + self._obj_path, + ], + timeout=30, + ) + + # build object file to binary + run_command( + [ + "clang", + self._obj_path, + "-O3", + "-o", + self._exe_path, + ], + timeout=30, + ) + + # TODO: add documentation that benchmarks need print out execution time + # Running 5 times and taking the average of middle 3 + exec_times = [] + for _ in range(5): + stdout = run_command( + [self._exe_path], + timeout=30, + ) + try: + exec_times.append(int(stdout)) + except ValueError: + raise ValueError( + f"Error in parsing execution time from output of command\n" + f"Please ensure that the source code of the benchmark measures execution time and prints to stdout\n" + f"Stdout of the program: {stdout}" + ) + exec_times = np.sort(exec_times) + avg_exec_time = np.mean(exec_times[1:4]) + return Observation(scalar_double=avg_exec_time) + elif observation_space.name == "size": + # compile LLVM to object file + run_command( + [ + self._llc, + "-filetype=obj", + self._llvm_path, + "-o", + self._obj_path, + ], + timeout=30, + ) + + # build object file to binary + run_command( + [ + "clang", + self._obj_path, + "-Oz", + "-o", + self._exe_path, + ], + timeout=30, + ) + binary_size = os.path.getsize(self._exe_path) + return Observation(scalar_double=binary_size) + else: + raise KeyError(observation_space.name) + + +if __name__ == "__main__": + create_and_run_compiler_gym_service(UnrollingCompilationSession) diff --git a/examples/loop_optimizations_service/service_py/utils.py b/examples/loop_optimizations_service/service_py/utils.py new file mode 100644 index 000000000..c3ab2ace3 --- /dev/null +++ b/examples/loop_optimizations_service/service_py/utils.py @@ -0,0 +1,61 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + + +def extract_statistics_from_ir(ir: str): + stats = {"control_flow": 0, "arithmetic": 0, "memory": 0} + for line in ir.splitlines(): + tokens = line.split() + if len(tokens) > 0: + opcode = tokens[0] + if opcode in [ + "br", + "call", + "ret", + "switch", + "indirectbr", + "invoke", + "callbr", + "resume", + "catchswitch", + "catchret", + "cleanupret", + "unreachable", + ]: + stats["control_flow"] += 1 + elif opcode in [ + "fneg", + "add", + "fadd", + "sub", + "fsub", + "mul", + "fmul", + "udiv", + "sdiv", + "fdiv", + "urem", + "srem", + "frem", + "shl", + "lshr", + "ashr", + "and", + "or", + "xor", + ]: + stats["arithmetic"] += 1 + elif opcode in [ + "alloca", + "load", + "store", + "fence", + "cmpxchg", + "atomicrmw", + "getelementptr", + ]: + stats["memory"] += 1 + + return stats From 557c67ca194170b70d31d099d802ef2d8bd3e7c4 Mon Sep 17 00:00:00 2001 From: Mostafa Elhoushi Date: Wed, 24 Nov 2021 00:06:48 -0500 Subject: [PATCH 105/142] add loop vectorization factor to action space --- examples/loop_optimizations_service/README.md | 4 +-- .../service_py/example_service.py | 26 ++++++++++++++++--- 2 files changed, 24 insertions(+), 6 deletions(-) diff --git a/examples/loop_optimizations_service/README.md b/examples/loop_optimizations_service/README.md index 628dae3ef..e5d8650dc 100644 --- a/examples/loop_optimizations_service/README.md +++ b/examples/loop_optimizations_service/README.md @@ -20,11 +20,11 @@ This is an example of how to create your own CompilerGym environment. All paths Run `example.py` example: ```sh -$ bazel run //examples/example_unrolling_service:example +$ bazel run //examples/loop_optimizations_service:example ``` Run `env_tests.py` unit tests: ```sh -$ bazel test //examples/example_unrolling_service:env_tests +$ bazel test //examples/loop_optimizations_service:env_tests ``` diff --git a/examples/loop_optimizations_service/service_py/example_service.py b/examples/loop_optimizations_service/service_py/example_service.py index e115c5de5..56d67b5be 100755 --- a/examples/loop_optimizations_service/service_py/example_service.py +++ b/examples/loop_optimizations_service/service_py/example_service.py @@ -41,18 +41,36 @@ class UnrollingCompilationSession(CompilationSession): # The list of actions that are supported by this service. action_spaces = [ ActionSpace( - name="unrolling", + name="loop-opt", choice=[ ChoiceSpace( - name="unroll_choice", + name="interleave", named_discrete_space=NamedDiscreteSpace( value=[ + "-disable-loop-unrolling", "-loop-unroll -unroll-count=2", "-loop-unroll -unroll-count=4", "-loop-unroll -unroll-count=8", + "-loop-unroll -unroll-count=16", + "-loop-unroll -unroll-count=32", + "-loop-unroll -unroll-count=64", ], ), - ) + ), + ChoiceSpace( + name="vectorize", + named_discrete_space=NamedDiscreteSpace( + value=[ + "-disable-loop-vectorize", + "-loop-vectorize -force-vector-width=2", + "-loop-vectorize -force-vector-width=4", + "-loop-vectorize -force-vector-width=8", + "-loop-vectorize -force-vector-width=16", + "-loop-vectorize -force-vector-width=32", + "-loop-vectorize -force-vector-width=64", + ], + ), + ), ], ) ] @@ -102,7 +120,7 @@ def __init__( working_directory: Path, action_space: ActionSpace, benchmark: Benchmark, - use_custom_opt: bool = True, + use_custom_opt: bool = False, ): super().__init__(working_directory, action_space, benchmark) logging.info("Started a compilation session for %s", benchmark.uri) From 915d4c0fc8f0857f06187fe8645a902c506326df Mon Sep 17 00:00:00 2001 From: Mostafa Elhoushi Date: Wed, 1 Dec 2021 08:50:00 -0500 Subject: [PATCH 106/142] add vectorize option --- .../loop_optimizations_service/example.py | 2 +- .../service_py/example_service.py | 29 +++++-------------- 2 files changed, 8 insertions(+), 23 deletions(-) diff --git a/examples/loop_optimizations_service/example.py b/examples/loop_optimizations_service/example.py index 546111fb5..e54d8711b 100644 --- a/examples/loop_optimizations_service/example.py +++ b/examples/loop_optimizations_service/example.py @@ -14,7 +14,7 @@ print() -observation, reward, done, info = env.step(env.action_space.sample()) +observation, reward, done, info = env.step(0, 32) print("observation: ", observation) print("reward: ", reward) print("done: ", done) diff --git a/examples/loop_optimizations_service/service_py/example_service.py b/examples/loop_optimizations_service/service_py/example_service.py index 56d67b5be..6dfeeb7ea 100755 --- a/examples/loop_optimizations_service/service_py/example_service.py +++ b/examples/loop_optimizations_service/service_py/example_service.py @@ -22,7 +22,6 @@ ActionSpace, Benchmark, ChoiceSpace, - NamedDiscreteSpace, Observation, ObservationSpace, ScalarLimit, @@ -44,31 +43,17 @@ class UnrollingCompilationSession(CompilationSession): name="loop-opt", choice=[ ChoiceSpace( - name="interleave", - named_discrete_space=NamedDiscreteSpace( - value=[ - "-disable-loop-unrolling", - "-loop-unroll -unroll-count=2", - "-loop-unroll -unroll-count=4", - "-loop-unroll -unroll-count=8", - "-loop-unroll -unroll-count=16", - "-loop-unroll -unroll-count=32", - "-loop-unroll -unroll-count=64", - ], + name="unroll", + int64_range=ScalarRange( + min=ScalarLimit(value=0), + max=None, # no upper bound ), ), ChoiceSpace( name="vectorize", - named_discrete_space=NamedDiscreteSpace( - value=[ - "-disable-loop-vectorize", - "-loop-vectorize -force-vector-width=2", - "-loop-vectorize -force-vector-width=4", - "-loop-vectorize -force-vector-width=8", - "-loop-vectorize -force-vector-width=16", - "-loop-vectorize -force-vector-width=32", - "-loop-vectorize -force-vector-width=64", - ], + int64_range=ScalarRange( + min=ScalarLimit(value=0), + max=None, # no upper bound ), ), ], From 53c83e80752b46c1758d1e5ab1e942d9ce2edf81 Mon Sep 17 00:00:00 2001 From: Mostafa Elhoushi Date: Thu, 9 Dec 2021 22:39:46 -0500 Subject: [PATCH 107/142] close environment at end of script --- examples/example_unrolling_service/example.py | 2 ++ examples/loop_optimizations_service/example.py | 2 ++ 2 files changed, 4 insertions(+) diff --git a/examples/example_unrolling_service/example.py b/examples/example_unrolling_service/example.py index ba46b1796..ad1b55c60 100644 --- a/examples/example_unrolling_service/example.py +++ b/examples/example_unrolling_service/example.py @@ -37,5 +37,7 @@ print("done: ", done) print("info: ", info) +env.reset() + # TODO: implement write_bitcode(..) or write_ir(..) # env.write_bitcode("/tmp/output.bc") diff --git a/examples/loop_optimizations_service/example.py b/examples/loop_optimizations_service/example.py index e54d8711b..abd0444c2 100644 --- a/examples/loop_optimizations_service/example.py +++ b/examples/loop_optimizations_service/example.py @@ -28,5 +28,7 @@ print("done: ", done) print("info: ", info) +env.close() + # TODO: implement write_bitcode(..) or write_ir(..) # env.write_bitcode("/tmp/output.bc") From 0f186a50185f62c9ceea7e7f68916a57cebe3d78 Mon Sep 17 00:00:00 2001 From: Mostafa Elhoushi Date: Fri, 10 Dec 2021 22:11:22 -0500 Subject: [PATCH 108/142] some update --- .../loop_optimizations_service/example.py | 2 +- .../service_py/example_service.py | 25 ++++++++++++++----- 2 files changed, 20 insertions(+), 7 deletions(-) diff --git a/examples/loop_optimizations_service/example.py b/examples/loop_optimizations_service/example.py index abd0444c2..326ef5952 100644 --- a/examples/loop_optimizations_service/example.py +++ b/examples/loop_optimizations_service/example.py @@ -14,7 +14,7 @@ print() -observation, reward, done, info = env.step(0, 32) +observation, reward, done, info = env.step({"unroll": 7, "vectorize": 32}) print("observation: ", observation) print("reward: ", reward) print("done: ", done) diff --git a/examples/loop_optimizations_service/service_py/example_service.py b/examples/loop_optimizations_service/service_py/example_service.py index 6dfeeb7ea..764ab3175 100755 --- a/examples/loop_optimizations_service/service_py/example_service.py +++ b/examples/loop_optimizations_service/service_py/example_service.py @@ -22,6 +22,7 @@ ActionSpace, Benchmark, ChoiceSpace, + NamedDiscreteSpace, Observation, ObservationSpace, ScalarLimit, @@ -44,16 +45,28 @@ class UnrollingCompilationSession(CompilationSession): choice=[ ChoiceSpace( name="unroll", - int64_range=ScalarRange( - min=ScalarLimit(value=0), - max=None, # no upper bound + named_discrete_space=NamedDiscreteSpace( + value=[ + "-loop-unroll=false", + "-loop-unroll -unroll-count=2", + "-loop-unroll -unroll-count=4", + "-loop-unroll -unroll-count=8", + "-loop-unroll -unroll-count=16", + "-loop-unroll -unroll-count=32", + ] ), ), ChoiceSpace( name="vectorize", - int64_range=ScalarRange( - min=ScalarLimit(value=0), - max=None, # no upper bound + named_discrete_space=NamedDiscreteSpace( + value=[ + "-loop-vectorize=false", + "-loop-vectorize -force-vector-width=2", + "-loop-vectorize -force-vector-width=4", + "-loop-vectorize -force-vector-width=8", + "-loop-vectorize -force-vector-width=16", + "-loop-vectorize -force-vector-width=32", + ] ), ), ], From 145d3606bf48582fc23ef5eb0e388f0d00125ef3 Mon Sep 17 00:00:00 2001 From: Mostafa Elhoushi Date: Sat, 11 Dec 2021 00:00:22 -0500 Subject: [PATCH 109/142] trying to pass multiple actions --- examples/loop_optimizations_service/example.py | 5 ++++- .../loop_unroller/README.md | 4 ++-- .../service_py/example_service.py | 15 ++++++++------- 3 files changed, 14 insertions(+), 10 deletions(-) diff --git a/examples/loop_optimizations_service/example.py b/examples/loop_optimizations_service/example.py index 326ef5952..021204056 100644 --- a/examples/loop_optimizations_service/example.py +++ b/examples/loop_optimizations_service/example.py @@ -14,7 +14,10 @@ print() -observation, reward, done, info = env.step({"unroll": 7, "vectorize": 32}) +# TODO: these methods are not working: +# - env.step(env.action_space.sample()) +# - env.step({"unroll": 0, "vectorize": 2}) +observation, reward, done, info = env.step({0: 4, 1: 3}) print("observation: ", observation) print("reward: ", reward) print("done: ", done) diff --git a/examples/loop_optimizations_service/loop_unroller/README.md b/examples/loop_optimizations_service/loop_unroller/README.md index 5684e6580..1b593d730 100644 --- a/examples/loop_optimizations_service/loop_unroller/README.md +++ b/examples/loop_optimizations_service/loop_unroller/README.md @@ -1,6 +1,6 @@ -LLVM's opt does not always enforce the unrolling options passed as cli arguments. Hence, we created our own exeutable with custom unrolling pass in examples/example_unrolling_service/loop_unroller that enforces the unrolling factors passed in its cli. +LLVM's opt does not always enforce the unrolling options passed as cli arguments. Hence, we created our own exeutable with custom unrolling pass in examples/loop_optimizations_service/loop_unroller that enforces the unrolling factors passed in its cli. To run the custom unroller: ``` -bazel run //examples/example_unrolling_service/loop_unroller:loop_unroller -- .ll --funroll-count= -S -o .ll +bazel run //examples/loop_optimizations_service/loop_unroller:loop_unroller -- .ll --funroll-count= -S -o .ll ``` diff --git a/examples/loop_optimizations_service/service_py/example_service.py b/examples/loop_optimizations_service/service_py/example_service.py index 764ab3175..93547458a 100755 --- a/examples/loop_optimizations_service/service_py/example_service.py +++ b/examples/loop_optimizations_service/service_py/example_service.py @@ -47,12 +47,12 @@ class UnrollingCompilationSession(CompilationSession): name="unroll", named_discrete_space=NamedDiscreteSpace( value=[ - "-loop-unroll=false", - "-loop-unroll -unroll-count=2", - "-loop-unroll -unroll-count=4", - "-loop-unroll -unroll-count=8", - "-loop-unroll -unroll-count=16", - "-loop-unroll -unroll-count=32", + "--disable-loop-unrolling", + "--loop-unroll --unroll-count=2", + "--loop-unroll --unroll-count=4", + "--loop-unroll --unroll-count=8", + "--loop-unroll --unroll-count=16", + "--loop-unroll --unroll-count=32", ] ), ), @@ -70,6 +70,7 @@ class UnrollingCompilationSession(CompilationSession): ), ), ], + named_choices=True, ) ] @@ -162,7 +163,7 @@ def apply_action(self, action: Action) -> Tuple[bool, Optional[ActionSpace], boo num_choices = len(self._action_space.choice[0].named_discrete_space.value) if len(action.choice) != 1: - raise ValueError("Invalid choice count") + raise ValueError("Currently we support one choice at a time") # This is the index into the action space's values ("a", "b", "c") that # the user selected, e.g. 0 -> "a", 1 -> "b", 2 -> "c". From f95dd024b61d67c1212a16acbadaecd26ab3aeb2 Mon Sep 17 00:00:00 2001 From: Mostafa Elhoushi Date: Sat, 11 Dec 2021 00:09:49 -0500 Subject: [PATCH 110/142] loop unroll and loop vectorize in one actionspace --- .../loop_optimizations_service/example.py | 15 +++++++------ .../service_py/example_service.py | 22 +++++-------------- 2 files changed, 14 insertions(+), 23 deletions(-) diff --git a/examples/loop_optimizations_service/example.py b/examples/loop_optimizations_service/example.py index 021204056..79848ecb3 100644 --- a/examples/loop_optimizations_service/example.py +++ b/examples/loop_optimizations_service/example.py @@ -17,13 +17,14 @@ # TODO: these methods are not working: # - env.step(env.action_space.sample()) # - env.step({"unroll": 0, "vectorize": 2}) -observation, reward, done, info = env.step({0: 4, 1: 3}) -print("observation: ", observation) -print("reward: ", reward) -print("done: ", done) -print("info: ", info) - -print() +for i in range(env.action_space.n): + observation, reward, done, info = env.step(i) + print("observation: ", observation) + print("reward: ", reward) + print("done: ", done) + print("info: ", info) + + print() observation, reward, done, info = env.step(env.action_space.sample()) print("observation: ", observation) diff --git a/examples/loop_optimizations_service/service_py/example_service.py b/examples/loop_optimizations_service/service_py/example_service.py index 93547458a..da8507b9a 100755 --- a/examples/loop_optimizations_service/service_py/example_service.py +++ b/examples/loop_optimizations_service/service_py/example_service.py @@ -44,33 +44,23 @@ class UnrollingCompilationSession(CompilationSession): name="loop-opt", choice=[ ChoiceSpace( - name="unroll", + name="loop-opt", named_discrete_space=NamedDiscreteSpace( value=[ - "--disable-loop-unrolling", "--loop-unroll --unroll-count=2", "--loop-unroll --unroll-count=4", "--loop-unroll --unroll-count=8", "--loop-unroll --unroll-count=16", "--loop-unroll --unroll-count=32", - ] - ), - ), - ChoiceSpace( - name="vectorize", - named_discrete_space=NamedDiscreteSpace( - value=[ - "-loop-vectorize=false", - "-loop-vectorize -force-vector-width=2", - "-loop-vectorize -force-vector-width=4", - "-loop-vectorize -force-vector-width=8", - "-loop-vectorize -force-vector-width=16", - "-loop-vectorize -force-vector-width=32", + "--loop-vectorize -force-vector-width=2", + "--loop-vectorize -force-vector-width=4", + "--loop-vectorize -force-vector-width=8", + "--loop-vectorize -force-vector-width=16", + "--loop-vectorize -force-vector-width=32", ] ), ), ], - named_choices=True, ) ] From d67a0f5eaa7afd27ad7a35fe45121d3b26b0e126 Mon Sep 17 00:00:00 2001 From: Mostafa Elhoushi Date: Sat, 11 Dec 2021 12:20:51 -0500 Subject: [PATCH 111/142] add vectorization to custom optimizer --- .../loop_unroller/loop_unroller.cc | 16 +++++++++++++++- .../service_py/example_service.py | 6 +++--- 2 files changed, 18 insertions(+), 4 deletions(-) diff --git a/examples/loop_optimizations_service/loop_unroller/loop_unroller.cc b/examples/loop_optimizations_service/loop_unroller/loop_unroller.cc index 29c87559a..d2db1f305 100644 --- a/examples/loop_optimizations_service/loop_unroller/loop_unroller.cc +++ b/examples/loop_optimizations_service/loop_unroller/loop_unroller.cc @@ -11,6 +11,7 @@ #include "llvm/ADT/SetVector.h" #include "llvm/ADT/SmallPtrSet.h" #include "llvm/ADT/SmallVector.h" +#include "llvm/Analysis/LoopAccessAnalysis.h" #include "llvm/Analysis/LoopInfo.h" #include "llvm/Bitcode/BitcodeWriterPass.h" #include "llvm/IR/BasicBlock.h" @@ -32,6 +33,7 @@ #include "llvm/Support/ToolOutputFile.h" #include "llvm/Transforms/Scalar.h" #include "llvm/Transforms/Utils/LoopUtils.h" +#include "llvm/Transforms/Vectorize.h" using namespace llvm; @@ -43,13 +45,20 @@ cl::opt InputFilename(cl::Positional, cl::desc("Specify input filen cl::opt OutputFilename("o", cl::desc("Specify output filename"), cl::value_desc("filename"), cl::init("-")); +/// Loop Optimizations static cl::opt UnrollEnable("floop-unroll", cl::desc("Enable loop unrolling"), - cl::init(true)); + cl::init(false)); static cl::opt UnrollCount( "funroll-count", cl::desc("Use this unroll count for all loops including those with " "unroll_count pragma values, for testing purposes")); +static cl::opt VectorizeEnable("floop-vectorize", cl::desc("Enable loop vectorize"), + cl::init("false")); + +static cl::opt VectorizationFactor("fforce-vector-width", + cl::desc("Sets the SIMD width. Zero is autoselect.")); + // Force binary on terminals static cl::opt Force("f", cl::desc("Enable binary output on terminals")); @@ -119,6 +128,10 @@ class LoopUnrollConfigurator : public llvm::FunctionPass { addStringMetadataToLoop(ALoop, "llvm.loop.unroll.enable", UnrollEnable); if (UnrollCount) addStringMetadataToLoop(ALoop, "llvm.loop.unroll.count", UnrollCount); + if (VectorizeEnable) + addStringMetadataToLoop(ALoop, "llvm.loop.vectorize.enable", VectorizeEnable); + if (VectorizationFactor) + addStringMetadataToLoop(ALoop, "llvm.loop.vectorize.factor", VectorizationFactor); } return false; @@ -182,6 +195,7 @@ int main(int argc, char** argv) { PM.add(Counter); PM.add(UnrollConfigurator); PM.add(createLoopUnrollPass()); + PM.add(createLoopVectorizePass()); // Passes to output the module if (OutputAssembly) { PM.add(createPrintModulePass(Out.os(), "", PreserveAssemblyUseListOrder)); diff --git a/examples/loop_optimizations_service/service_py/example_service.py b/examples/loop_optimizations_service/service_py/example_service.py index da8507b9a..27d61c1ef 100755 --- a/examples/loop_optimizations_service/service_py/example_service.py +++ b/examples/loop_optimizations_service/service_py/example_service.py @@ -109,7 +109,7 @@ def __init__( working_directory: Path, action_space: ActionSpace, benchmark: Benchmark, - use_custom_opt: bool = False, + use_custom_opt: bool = True, ): super().__init__(working_directory, action_space, benchmark) logging.info("Started a compilation session for %s", benchmark.uri) @@ -176,8 +176,8 @@ def apply_action(self, action: Action) -> Tuple[bool, Optional[ActionSpace], boo if self._use_custom_opt: # our custom unroller has an additional `f` at the beginning of each argument for i, arg in enumerate(args): - # convert - to -f - arg = arg[0] + "f" + arg[1:] + # convert -- to --f + arg = arg[0:2] + "f" + arg[2:] args[i] = arg run_command( [ From c9e0d751ecf8e9034a193ca40771e1574138086c Mon Sep 17 00:00:00 2001 From: Mostafa Elhoushi Date: Mon, 13 Dec 2021 21:00:22 -0500 Subject: [PATCH 112/142] make our custom opt vectorize loops by copying from llvm's opt --- .../benchmarks/offsets1.c | 5 -- .../loop_unroller/loop_unroller.cc | 62 +++++++++++++++++-- 2 files changed, 57 insertions(+), 10 deletions(-) diff --git a/examples/loop_optimizations_service/benchmarks/offsets1.c b/examples/loop_optimizations_service/benchmarks/offsets1.c index b5206cabf..7a382d20b 100644 --- a/examples/loop_optimizations_service/benchmarks/offsets1.c +++ b/examples/loop_optimizations_service/benchmarks/offsets1.c @@ -4,14 +4,9 @@ #define N 1000000 #endif -#ifndef n -#define n 3 -#endif - int A[N]; __attribute__((noinline)) void example1(int* ret) { - //#pragma unroll(n) for (int i = 0; i < N - 3; i++) A[i] = A[i + 1] + A[i + 2] + A[i + 3]; *ret = A[N - 1]; diff --git a/examples/loop_optimizations_service/loop_unroller/loop_unroller.cc b/examples/loop_optimizations_service/loop_unroller/loop_unroller.cc index d2db1f305..6c96f2588 100644 --- a/examples/loop_optimizations_service/loop_unroller/loop_unroller.cc +++ b/examples/loop_optimizations_service/loop_unroller/loop_unroller.cc @@ -31,7 +31,9 @@ #include "llvm/Support/SourceMgr.h" #include "llvm/Support/SystemUtils.h" #include "llvm/Support/ToolOutputFile.h" +#include "llvm/Transforms/IPO/PassManagerBuilder.h" #include "llvm/Transforms/Scalar.h" +#include "llvm/Transforms/Utils/Debugify.h" #include "llvm/Transforms/Utils/LoopUtils.h" #include "llvm/Transforms/Vectorize.h" @@ -74,9 +76,55 @@ static cl::opt PreserveAssemblyUseListOrder( "preserve-ll-uselistorder", cl::desc("Preserve use-list order when writing LLVM assembly."), cl::init(false), cl::Hidden); +// added from opt.cpp +static cl::opt DebugifyEach( + "debugify-each", cl::desc("Start each pass with debugify and end it with check-debugify")); + // The INITIALIZE_PASS_XXX macros put the initialiser in the llvm namespace. void initializeLoopCounterPass(PassRegistry& Registry); +class OptCustomPassManager : public legacy::PassManager { + DebugifyStatsMap DIStatsMap; + + public: + using super = legacy::PassManager; + + void add(Pass* P) override { + // Wrap each pass with (-check)-debugify passes if requested, making + // exceptions for passes which shouldn't see -debugify instrumentation. + bool WrapWithDebugify = + DebugifyEach && !P->getAsImmutablePass() && !isIRPrintingPass(P) && !isBitcodeWriterPass(P); + if (!WrapWithDebugify) { + super::add(P); + return; + } + + // Apply -debugify/-check-debugify before/after each pass and collect + // debug info loss statistics. + PassKind Kind = P->getPassKind(); + StringRef Name = P->getPassName(); + + // TODO: Implement Debugify for LoopPass. + switch (Kind) { + case PT_Function: + super::add(createDebugifyFunctionPass()); + super::add(P); + super::add(createCheckDebugifyFunctionPass(true, Name, &DIStatsMap)); + break; + case PT_Module: + super::add(createDebugifyModulePass()); + super::add(P); + super::add(createCheckDebugifyModulePass(true, Name, &DIStatsMap)); + break; + default: + super::add(P); + break; + } + } + + const DebugifyStatsMap& getDebugifyStatsMap() const { return DIStatsMap; } +}; + class LoopCounter : public llvm::FunctionPass { public: static char ID; @@ -131,7 +179,7 @@ class LoopUnrollConfigurator : public llvm::FunctionPass { if (VectorizeEnable) addStringMetadataToLoop(ALoop, "llvm.loop.vectorize.enable", VectorizeEnable); if (VectorizationFactor) - addStringMetadataToLoop(ALoop, "llvm.loop.vectorize.factor", VectorizationFactor); + addStringMetadataToLoop(ALoop, "llvm.loop.vectorize.width", VectorizationFactor); } return false; @@ -187,16 +235,20 @@ int main(int argc, char** argv) { return 1; } - // Run the passes initializeLoopCounterPass(*PassRegistry::getPassRegistry()); - legacy::PassManager PM; + OptCustomPassManager PM; LoopCounter* Counter = new LoopCounter(); LoopUnrollConfigurator* UnrollConfigurator = new LoopUnrollConfigurator(); PM.add(Counter); PM.add(UnrollConfigurator); PM.add(createLoopUnrollPass()); - PM.add(createLoopVectorizePass()); - // Passes to output the module + PM.add(createLICMPass()); + PM.add(createLoopVectorizePass(false, false)); + PassManagerBuilder Builder; + Builder.LoopVectorize = VectorizeEnable; + Builder.populateModulePassManager(PM); + + // PM to output the module if (OutputAssembly) { PM.add(createPrintModulePass(Out.os(), "", PreserveAssemblyUseListOrder)); } else if (Force || !CheckBitcodeOutputToConsole(Out.os())) { From f35346ec45a2021607c7171eda3f414b3752586d Mon Sep 17 00:00:00 2001 From: Mostafa Elhoushi Date: Mon, 13 Dec 2021 21:07:58 -0500 Subject: [PATCH 113/142] elementwise add example that is guaranteed to be vectorizable --- .../loop_optimizations_service/__init__.py | 4 ++++ .../benchmarks/add.c | 22 +++++++++++++++++++ .../loop_optimizations_service/example.py | 19 ++++++++-------- 3 files changed, 35 insertions(+), 10 deletions(-) create mode 100644 examples/loop_optimizations_service/benchmarks/add.c diff --git a/examples/loop_optimizations_service/__init__.py b/examples/loop_optimizations_service/__init__.py index c89938af3..1c4ba35f0 100644 --- a/examples/loop_optimizations_service/__init__.py +++ b/examples/loop_optimizations_service/__init__.py @@ -89,6 +89,10 @@ def __init__(self, *args, **kwargs): ) self._benchmarks = { + "benchmark://unrolling-v0/add": Benchmark.from_file_contents( + "benchmark://unrolling-v0/add", + self.preprocess(BENCHMARKS_PATH / "add.c"), + ), "benchmark://unrolling-v0/offsets1": Benchmark.from_file_contents( "benchmark://unrolling-v0/offsets1", self.preprocess(BENCHMARKS_PATH / "offsets1.c"), diff --git a/examples/loop_optimizations_service/benchmarks/add.c b/examples/loop_optimizations_service/benchmarks/add.c new file mode 100644 index 000000000..324713625 --- /dev/null +++ b/examples/loop_optimizations_service/benchmarks/add.c @@ -0,0 +1,22 @@ +#include "header.h" + +#ifndef N +#define N 1000000 +#endif + +int A[N]; +int B[N]; + +__attribute__((noinline)) void add(int* ret) { + for (int i = 0; i < N; i++) A[i] = A[i] + B[i]; + + *ret = A[N - 1]; +} + +__attribute__((optnone)) int main(int argc, char* argv[]) { + int dummy = 0; + // TODO: initialize tensors + BENCH("add", add(&dummy), 100, dummy); + + return 0; +} diff --git a/examples/loop_optimizations_service/example.py b/examples/loop_optimizations_service/example.py index 79848ecb3..4ecc1dc38 100644 --- a/examples/loop_optimizations_service/example.py +++ b/examples/loop_optimizations_service/example.py @@ -3,8 +3,8 @@ env = compiler_gym.make( "unrolling-py-v0", - benchmark="unrolling-v0/offsets1", - observation_space="features", + benchmark="unrolling-v0/add", + observation_space="ir", reward_space="runtime", ) compiler_gym.set_debug_level(4) # TODO: check why this has no effect @@ -17,14 +17,13 @@ # TODO: these methods are not working: # - env.step(env.action_space.sample()) # - env.step({"unroll": 0, "vectorize": 2}) -for i in range(env.action_space.n): - observation, reward, done, info = env.step(i) - print("observation: ", observation) - print("reward: ", reward) - print("done: ", done) - print("info: ", info) - - print() +observation, reward, done, info = env.step(7) +print("observation: ", observation) +print("reward: ", reward) +print("done: ", done) +print("info: ", info) + +print() observation, reward, done, info = env.step(env.action_space.sample()) print("observation: ", observation) From 891ff1eedf5d79c51ef06179efc6a0967f696b0e Mon Sep 17 00:00:00 2001 From: Mostafa Elhoushi Date: Mon, 13 Dec 2021 21:14:52 -0500 Subject: [PATCH 114/142] rename classes --- .../loop_unroller/loop_unroller.cc | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/examples/loop_optimizations_service/loop_unroller/loop_unroller.cc b/examples/loop_optimizations_service/loop_unroller/loop_unroller.cc index 6c96f2588..c1b544723 100644 --- a/examples/loop_optimizations_service/loop_unroller/loop_unroller.cc +++ b/examples/loop_optimizations_service/loop_unroller/loop_unroller.cc @@ -154,13 +154,13 @@ INITIALIZE_PASS_DEPENDENCY(LoopInfoWrapperPass) INITIALIZE_PASS_END(LoopCounter, "count-loops", "Count loops", false, false) // The INITIALIZE_PASS_XXX macros put the initialiser in the llvm namespace. -void initializeLoopUnrollConfiguratorPass(PassRegistry& Registry); +void initializeLoopConfiguratorPassPass(PassRegistry& Registry); -class LoopUnrollConfigurator : public llvm::FunctionPass { +class LoopConfiguratorPass : public llvm::FunctionPass { public: static char ID; - LoopUnrollConfigurator() : FunctionPass(ID) {} + LoopConfiguratorPass() : FunctionPass(ID) {} virtual void getAnalysisUsage(AnalysisUsage& AU) const override { AU.addRequired(); @@ -187,11 +187,11 @@ class LoopUnrollConfigurator : public llvm::FunctionPass { }; // Initialise the pass. We have to declare the dependencies we use. -char LoopUnrollConfigurator::ID = 1; -INITIALIZE_PASS_BEGIN(LoopUnrollConfigurator, "unroll-loops-configurator", +char LoopConfiguratorPass::ID = 1; +INITIALIZE_PASS_BEGIN(LoopConfiguratorPass, "unroll-loops-configurator", "Configurates loop unrolling", false, false) INITIALIZE_PASS_DEPENDENCY(LoopInfoWrapperPass) -INITIALIZE_PASS_END(LoopUnrollConfigurator, "unroll-loops-configurator", +INITIALIZE_PASS_END(LoopConfiguratorPass, "unroll-loops-configurator", "Configurates loop unrolling", false, false) /// Reads a module from a file. @@ -238,9 +238,9 @@ int main(int argc, char** argv) { initializeLoopCounterPass(*PassRegistry::getPassRegistry()); OptCustomPassManager PM; LoopCounter* Counter = new LoopCounter(); - LoopUnrollConfigurator* UnrollConfigurator = new LoopUnrollConfigurator(); + LoopConfiguratorPass* LoopConfigurator = new LoopConfiguratorPass(); PM.add(Counter); - PM.add(UnrollConfigurator); + PM.add(LoopConfigurator); PM.add(createLoopUnrollPass()); PM.add(createLICMPass()); PM.add(createLoopVectorizePass(false, false)); From fe9d00621fdd19a74a42b79bd7081f2df7c0e04e Mon Sep 17 00:00:00 2001 From: Mostafa Elhoushi Date: Mon, 13 Dec 2021 22:30:25 -0500 Subject: [PATCH 115/142] rename loop_unroller to opt_loops --- .../{loop_unroller => opt_loops}/BUILD | 4 ++-- .../{loop_unroller => opt_loops}/README.md | 0 .../loop_unroller.cc => opt_loops/opt_loops.cc} | 0 examples/loop_optimizations_service/service_py/BUILD | 2 +- .../loop_optimizations_service/service_py/example_service.py | 2 +- 5 files changed, 4 insertions(+), 4 deletions(-) rename examples/loop_optimizations_service/{loop_unroller => opt_loops}/BUILD (90%) rename examples/loop_optimizations_service/{loop_unroller => opt_loops}/README.md (100%) rename examples/loop_optimizations_service/{loop_unroller/loop_unroller.cc => opt_loops/opt_loops.cc} (100%) diff --git a/examples/loop_optimizations_service/loop_unroller/BUILD b/examples/loop_optimizations_service/opt_loops/BUILD similarity index 90% rename from examples/loop_optimizations_service/loop_unroller/BUILD rename to examples/loop_optimizations_service/opt_loops/BUILD index 3bec18c35..b320e469b 100644 --- a/examples/loop_optimizations_service/loop_unroller/BUILD +++ b/examples/loop_optimizations_service/opt_loops/BUILD @@ -7,9 +7,9 @@ load("@rules_cc//cc:defs.bzl", "cc_binary") cc_binary( - name = "loop_unroller", + name = "opt_loops", srcs = [ - "loop_unroller.cc", + "opt_loops.cc", ], copts = [ "-Wall", diff --git a/examples/loop_optimizations_service/loop_unroller/README.md b/examples/loop_optimizations_service/opt_loops/README.md similarity index 100% rename from examples/loop_optimizations_service/loop_unroller/README.md rename to examples/loop_optimizations_service/opt_loops/README.md diff --git a/examples/loop_optimizations_service/loop_unroller/loop_unroller.cc b/examples/loop_optimizations_service/opt_loops/opt_loops.cc similarity index 100% rename from examples/loop_optimizations_service/loop_unroller/loop_unroller.cc rename to examples/loop_optimizations_service/opt_loops/opt_loops.cc diff --git a/examples/loop_optimizations_service/service_py/BUILD b/examples/loop_optimizations_service/service_py/BUILD index f9095f0f0..30f3d4732 100644 --- a/examples/loop_optimizations_service/service_py/BUILD +++ b/examples/loop_optimizations_service/service_py/BUILD @@ -8,7 +8,7 @@ py_binary( name = "example-unrolling-service-py", srcs = ["example_service.py"], data = [ - "//examples/loop_optimizations_service/loop_unroller", + "//examples/loop_optimizations_service/opt_loops", ], main = "example_service.py", visibility = ["//visibility:public"], diff --git a/examples/loop_optimizations_service/service_py/example_service.py b/examples/loop_optimizations_service/service_py/example_service.py index 27d61c1ef..00524a499 100755 --- a/examples/loop_optimizations_service/service_py/example_service.py +++ b/examples/loop_optimizations_service/service_py/example_service.py @@ -181,7 +181,7 @@ def apply_action(self, action: Action) -> Tuple[bool, Optional[ActionSpace], boo args[i] = arg run_command( [ - "../loop_unroller/loop_unroller", + "../opt_loops/opt_loops", self._llvm_path, *args, "-S", From 2e9937b81120fa692ed5b7ef50fc5d9262fe48cb Mon Sep 17 00:00:00 2001 From: Mostafa Elhoushi Date: Mon, 13 Dec 2021 22:40:15 -0500 Subject: [PATCH 116/142] rename example-unrolling to loops_opt --- examples/loop_optimizations_service/BUILD | 2 +- examples/loop_optimizations_service/__init__.py | 2 +- examples/loop_optimizations_service/service_py/BUILD | 6 +++--- .../service_py/{example_service.py => loops_opt_service.py} | 0 4 files changed, 5 insertions(+), 5 deletions(-) rename examples/loop_optimizations_service/service_py/{example_service.py => loops_opt_service.py} (100%) diff --git a/examples/loop_optimizations_service/BUILD b/examples/loop_optimizations_service/BUILD index 010c0cd3c..198994212 100644 --- a/examples/loop_optimizations_service/BUILD +++ b/examples/loop_optimizations_service/BUILD @@ -9,7 +9,7 @@ py_library( srcs = ["__init__.py"], data = [ "//examples/loop_optimizations_service/benchmarks", - "//examples/loop_optimizations_service/service_py:example-unrolling-service-py", + "//examples/loop_optimizations_service/service_py:loops-opt-service-py", ], visibility = ["//visibility:public"], deps = [ diff --git a/examples/loop_optimizations_service/__init__.py b/examples/loop_optimizations_service/__init__.py index 1c4ba35f0..aad490f71 100644 --- a/examples/loop_optimizations_service/__init__.py +++ b/examples/loop_optimizations_service/__init__.py @@ -15,7 +15,7 @@ from compiler_gym.util.runfiles_path import runfiles_path, site_data_path UNROLLING_PY_SERVICE_BINARY: Path = runfiles_path( - "examples/loop_optimizations_service/service_py/example-unrolling-service-py" + "examples/loop_optimizations_service/service_py/loops-opt-service-py" ) BENCHMARKS_PATH: Path = runfiles_path("examples/loop_optimizations_service/benchmarks") diff --git a/examples/loop_optimizations_service/service_py/BUILD b/examples/loop_optimizations_service/service_py/BUILD index 30f3d4732..44f3a5b2c 100644 --- a/examples/loop_optimizations_service/service_py/BUILD +++ b/examples/loop_optimizations_service/service_py/BUILD @@ -5,12 +5,12 @@ load("@rules_cc//cc:defs.bzl", "cc_binary", "cc_library") py_binary( - name = "example-unrolling-service-py", - srcs = ["example_service.py"], + name = "loops-opt-service-py", + srcs = ["loops_opt_service.py"], data = [ "//examples/loop_optimizations_service/opt_loops", ], - main = "example_service.py", + main = "loops_opt_service.py", visibility = ["//visibility:public"], deps = [ "//compiler_gym/service", diff --git a/examples/loop_optimizations_service/service_py/example_service.py b/examples/loop_optimizations_service/service_py/loops_opt_service.py similarity index 100% rename from examples/loop_optimizations_service/service_py/example_service.py rename to examples/loop_optimizations_service/service_py/loops_opt_service.py From 3c7ea63fca1b23b03fb410f72088dc40f940b13d Mon Sep 17 00:00:00 2001 From: Mostafa Elhoushi Date: Mon, 13 Dec 2021 22:44:28 -0500 Subject: [PATCH 117/142] rename remaining unrolling variables to loops-opt --- .../loop_optimizations_service/__init__.py | 28 +++++++++---------- .../loop_optimizations_service/env_tests.py | 4 +-- .../loop_optimizations_service/example.py | 6 ++-- .../service_py/loops_opt_service.py | 11 ++++---- 4 files changed, 25 insertions(+), 24 deletions(-) diff --git a/examples/loop_optimizations_service/__init__.py b/examples/loop_optimizations_service/__init__.py index aad490f71..6da47e447 100644 --- a/examples/loop_optimizations_service/__init__.py +++ b/examples/loop_optimizations_service/__init__.py @@ -14,7 +14,7 @@ from compiler_gym.util.registration import register from compiler_gym.util.runfiles_path import runfiles_path, site_data_path -UNROLLING_PY_SERVICE_BINARY: Path = runfiles_path( +LOOPS_OPT_PY_SERVICE_BINARY: Path = runfiles_path( "examples/loop_optimizations_service/service_py/loops-opt-service-py" ) @@ -77,28 +77,28 @@ def update(self, action, observations, observation_view): return float(self.baseline_size - observations[0]) / self.baseline_size -class UnrollingDataset(Dataset): +class LoopsDataset(Dataset): def __init__(self, *args, **kwargs): super().__init__( - name="benchmark://unrolling-v0", + name="benchmark://loops-opt-v0", license="MIT", - description="Unrolling example dataset", + description="Loops optimization dataset", site_data_base=site_data_path( "example_dataset" ), # TODO: what should we set this to? we are not using it ) self._benchmarks = { - "benchmark://unrolling-v0/add": Benchmark.from_file_contents( - "benchmark://unrolling-v0/add", + "benchmark://loops-opt-v0/add": Benchmark.from_file_contents( + "benchmark://loops-opt-v0/add", self.preprocess(BENCHMARKS_PATH / "add.c"), ), - "benchmark://unrolling-v0/offsets1": Benchmark.from_file_contents( - "benchmark://unrolling-v0/offsets1", + "benchmark://loops-opt-v0/offsets1": Benchmark.from_file_contents( + "benchmark://loops-opt-v0/offsets1", self.preprocess(BENCHMARKS_PATH / "offsets1.c"), ), - "benchmark://unrolling-v0/conv2d": Benchmark.from_file_contents( - "benchmark://unrolling-v0/conv2d", + "benchmark://loops-opt-v0/conv2d": Benchmark.from_file_contents( + "benchmark://loops-opt-v0/conv2d", self.preprocess(BENCHMARKS_PATH / "conv2d.c"), ), } @@ -136,14 +136,14 @@ def benchmark(self, uri: str) -> Benchmark: # Register the unrolling example service on module import. After importing this module, -# the unrolling-py-v0 environment will be available to gym.make(...). +# the loops-opt-py-v0 environment will be available to gym.make(...). register( - id="unrolling-py-v0", + id="loops-opt-py-v0", entry_point="compiler_gym.envs:CompilerEnv", kwargs={ - "service": UNROLLING_PY_SERVICE_BINARY, + "service": LOOPS_OPT_PY_SERVICE_BINARY, "rewards": [RuntimeReward(), SizeReward()], - "datasets": [UnrollingDataset()], + "datasets": [LoopsDataset()], }, ) diff --git a/examples/loop_optimizations_service/env_tests.py b/examples/loop_optimizations_service/env_tests.py index 766f8a0e5..3adb275a1 100644 --- a/examples/loop_optimizations_service/env_tests.py +++ b/examples/loop_optimizations_service/env_tests.py @@ -11,7 +11,7 @@ import pytest import compiler_gym -import examples.loop_optimizations_service as unrolling_service +import examples.loop_optimizations_service as loop_optimizations_service from compiler_gym.envs import CompilerEnv from compiler_gym.service import SessionNotFound from compiler_gym.spaces import Box, NamedDiscrete, Scalar, Sequence @@ -27,7 +27,7 @@ def env() -> CompilerEnv: @pytest.fixture(scope="module") def bin() -> Path: - return unrolling_service.UNROLLING_PY_SERVICE_BINARY + return loop_optimizations_service.LOOPS_OPT_PY_SERVICE_BINARY def test_invalid_arguments(bin: Path): diff --git a/examples/loop_optimizations_service/example.py b/examples/loop_optimizations_service/example.py index 4ecc1dc38..2adde486d 100644 --- a/examples/loop_optimizations_service/example.py +++ b/examples/loop_optimizations_service/example.py @@ -1,9 +1,9 @@ import compiler_gym -import examples.loop_optimizations_service as unrolling_service # noqa Register environments. +import examples.loop_optimizations_service as loop_optimizations_service # noqa Register environments. env = compiler_gym.make( - "unrolling-py-v0", - benchmark="unrolling-v0/add", + "loops-opt-py-v0", + benchmark="loops-opt-v0/add", observation_space="ir", reward_space="runtime", ) diff --git a/examples/loop_optimizations_service/service_py/loops_opt_service.py b/examples/loop_optimizations_service/service_py/loops_opt_service.py index 00524a499..37eaee0f1 100755 --- a/examples/loop_optimizations_service/service_py/loops_opt_service.py +++ b/examples/loop_optimizations_service/service_py/loops_opt_service.py @@ -33,7 +33,7 @@ from compiler_gym.util.commands import run_command -class UnrollingCompilationSession(CompilationSession): +class LoopsOptCompilationSession(CompilationSession): """Represents an instance of an interactive compilation session.""" compiler_version: str = "1.0.0" @@ -121,7 +121,8 @@ def __init__( self._llc = str(llvm.llc_path()) self._llvm_diff = str(llvm.llvm_diff_path()) self._opt = str(llvm.opt_path()) - # LLVM's opt does not always enforce the unrolling options passed as cli arguments. Hence, we created our own exeutable with custom unrolling pass in examples/example_unrolling_service/loop_unroller that enforces the unrolling factors passed in its cli. + # LLVM's opt does not always enforce the loop optimization options passed as cli arguments. + # Hence, we created our own exeutable with custom unrolling and vectorization pass in examples/loops_opt_service/opt_loops that enforces the unrolling and vectorization factors passed in its cli. # if self._use_custom_opt is true, use our custom exeutable, otherwise use LLVM's opt self._use_custom_opt = use_custom_opt @@ -174,7 +175,7 @@ def apply_action(self, action: Action) -> Tuple[bool, Optional[ActionSpace], boo # apply action if self._use_custom_opt: - # our custom unroller has an additional `f` at the beginning of each argument + # our custom opt-loops has an additional `f` at the beginning of each argument for i, arg in enumerate(args): # convert -- to --f arg = arg[0:2] + "f" + arg[2:] @@ -215,7 +216,7 @@ def apply_action(self, action: Action) -> Tuple[bool, Optional[ActionSpace], boo except subprocess.CalledProcessError: action_had_no_effect = False - end_of_session = False # TODO: this needs investigation: for how long can we apply loop unrolling? e.g., detect if there are no more loops in the IR? + end_of_session = False # TODO: this needs investigation: for how long can we apply loop optimizations? e.g., detect if there are no more loops in the IR? or look at the metadata? new_action_space = None return (end_of_session, new_action_space, action_had_no_effect) @@ -308,4 +309,4 @@ def get_observation(self, observation_space: ObservationSpace) -> Observation: if __name__ == "__main__": - create_and_run_compiler_gym_service(UnrollingCompilationSession) + create_and_run_compiler_gym_service(LoopsOptCompilationSession) From d9ad3b0e3dbf91354db5bfd95933e6627c1b3051 Mon Sep 17 00:00:00 2001 From: Mostafa Elhoushi Date: Tue, 21 Dec 2021 09:36:07 -0500 Subject: [PATCH 118/142] fix the way passes are defined as in PR #523 --- .../opt_loops/opt_loops.cc | 35 ++++++++++--------- 1 file changed, 18 insertions(+), 17 deletions(-) diff --git a/examples/loop_optimizations_service/opt_loops/opt_loops.cc b/examples/loop_optimizations_service/opt_loops/opt_loops.cc index c1b544723..a3dab9b93 100644 --- a/examples/loop_optimizations_service/opt_loops/opt_loops.cc +++ b/examples/loop_optimizations_service/opt_loops/opt_loops.cc @@ -39,7 +39,7 @@ using namespace llvm; -namespace llvm { +namespace { /// Input LLVM module file name. cl::opt InputFilename(cl::Positional, cl::desc("Specify input filename"), cl::value_desc("filename"), cl::init("-")); @@ -80,9 +80,6 @@ static cl::opt PreserveAssemblyUseListOrder( static cl::opt DebugifyEach( "debugify-each", cl::desc("Start each pass with debugify and end it with check-debugify")); -// The INITIALIZE_PASS_XXX macros put the initialiser in the llvm namespace. -void initializeLoopCounterPass(PassRegistry& Registry); - class OptCustomPassManager : public legacy::PassManager { DebugifyStatsMap DIStatsMap; @@ -147,14 +144,7 @@ class LoopCounter : public llvm::FunctionPass { } }; -// Initialise the pass. We have to declare the dependencies we use. char LoopCounter::ID = 0; -INITIALIZE_PASS_BEGIN(LoopCounter, "count-loops", "Count loops", false, false) -INITIALIZE_PASS_DEPENDENCY(LoopInfoWrapperPass) -INITIALIZE_PASS_END(LoopCounter, "count-loops", "Count loops", false, false) - -// The INITIALIZE_PASS_XXX macros put the initialiser in the llvm namespace. -void initializeLoopConfiguratorPassPass(PassRegistry& Registry); class LoopConfiguratorPass : public llvm::FunctionPass { public: @@ -186,13 +176,7 @@ class LoopConfiguratorPass : public llvm::FunctionPass { } }; -// Initialise the pass. We have to declare the dependencies we use. char LoopConfiguratorPass::ID = 1; -INITIALIZE_PASS_BEGIN(LoopConfiguratorPass, "unroll-loops-configurator", - "Configurates loop unrolling", false, false) -INITIALIZE_PASS_DEPENDENCY(LoopInfoWrapperPass) -INITIALIZE_PASS_END(LoopConfiguratorPass, "unroll-loops-configurator", - "Configurates loop unrolling", false, false) /// Reads a module from a file. /// On error, messages are written to stderr and null is returned. @@ -209,8 +193,25 @@ static std::unique_ptr readModule(LLVMContext& Context, StringRef Name) return Module; } +} // namespace + +namespace llvm { +// The INITIALIZE_PASS_XXX macros put the initialiser in the llvm namespace. +void initializeLoopCounterPass(PassRegistry& Registry); +void initializeLoopConfiguratorPassPass(PassRegistry& Registry); } // namespace llvm +// Initialise the pass. We have to declare the dependencies we use. +INITIALIZE_PASS_BEGIN(LoopCounter, "count-loops", "Count loops", false, false) +INITIALIZE_PASS_DEPENDENCY(LoopInfoWrapperPass) +INITIALIZE_PASS_END(LoopCounter, "count-loops", "Count loops", false, false) + +INITIALIZE_PASS_BEGIN(LoopConfiguratorPass, "unroll-loops-configurator", + "Configurates loop unrolling", false, false) +INITIALIZE_PASS_DEPENDENCY(LoopInfoWrapperPass) +INITIALIZE_PASS_END(LoopConfiguratorPass, "unroll-loops-configurator", + "Configurates loop unrolling", false, false) + int main(int argc, char** argv) { cl::ParseCommandLineOptions(argc, argv, " LLVM-Counter\n\n" From ab5bd080900cfeb70bf0f64ccbe71b809cd6236a Mon Sep 17 00:00:00 2001 From: Mostafa Elhoushi Date: Fri, 24 Dec 2021 17:46:11 -0500 Subject: [PATCH 119/142] add comments for future planning --- .../loop_optimizations_service/example.py | 34 ++++++++++++++++--- .../opt_loops/opt_loops.cc | 16 +++++++++ 2 files changed, 45 insertions(+), 5 deletions(-) diff --git a/examples/loop_optimizations_service/example.py b/examples/loop_optimizations_service/example.py index 2adde486d..9df2481ff 100644 --- a/examples/loop_optimizations_service/example.py +++ b/examples/loop_optimizations_service/example.py @@ -6,6 +6,9 @@ benchmark="loops-opt-v0/add", observation_space="ir", reward_space="runtime", + # loop="outer_to_inner",#"inner_to_outer",#"all_loops", + # function="", + # call_site="", ) compiler_gym.set_debug_level(4) # TODO: check why this has no effect @@ -14,14 +17,35 @@ print() +# loops_config = env.describe_loops() + +# env.set_loop(loops_config(i)) + +# get_loop(1.3) + +# observations: +# - for ProGraML: add an attribute to statement nodes for which loop index they belong to (e.g, loop 1.3) +# - for AutoPhase: ask for features of a loop, or a loop and its children + # TODO: these methods are not working: # - env.step(env.action_space.sample()) # - env.step({"unroll": 0, "vectorize": 2}) -observation, reward, done, info = env.step(7) -print("observation: ", observation) -print("reward: ", reward) -print("done: ", done) -print("info: ", info) + +# for loop in loops_config.loops().flatten(): +# while !done: +# observation, reward, done, info = env.step() +# # you can read observation , rewards, etc. every env.step() OR every env.next_loop() OR change all loops then step +# print("observation: ", observation) +# print("reward: ", reward) +# print("done: ", done) +# print("info: ", info) +# +# env.next_loop() +# +# for loop in loops_config.loops() +# for loop_1 in loop: +# ... +# # or use recursion print() diff --git a/examples/loop_optimizations_service/opt_loops/opt_loops.cc b/examples/loop_optimizations_service/opt_loops/opt_loops.cc index a3dab9b93..3f96621fb 100644 --- a/examples/loop_optimizations_service/opt_loops/opt_loops.cc +++ b/examples/loop_optimizations_service/opt_loops/opt_loops.cc @@ -80,6 +80,22 @@ static cl::opt PreserveAssemblyUseListOrder( static cl::opt DebugifyEach( "debugify-each", cl::desc("Start each pass with debugify and end it with check-debugify")); +// TODO: +// output loops configuration file (json), also provide any current annotations (e.g., h) +// input json file with opts for each loop +// option to insert meta data without running pass +// --annotate-only +// --run-dependencies= true|false +// --force +// python wrapper: pybind. APIs to get loops/modules/etc. as objects, and then APIs to modify +// loops = module.get_loops() +// for loop in loops: +// loop.set_metadata() loop.run_unrolling(factor), loop.get_observation() +// in the future: module.get_functions(), module.get_call_sites() +// run-vectorize-and-dependencies as well as run-vectorize (only) +// callback mechanism: e.g., loop unroller asks for unrolling factor, and we intercept with a +// callback function + class OptCustomPassManager : public legacy::PassManager { DebugifyStatsMap DIStatsMap; From 21eb5310675f1143a46b32359980db36d4d78be6 Mon Sep 17 00:00:00 2001 From: Mostafa Elhoushi Date: Mon, 27 Dec 2021 11:53:10 -0500 Subject: [PATCH 120/142] moved comments to a separate document --- .../loop_optimizations_service/example.py | 35 ------------------- .../opt_loops/opt_loops.cc | 16 --------- 2 files changed, 51 deletions(-) diff --git a/examples/loop_optimizations_service/example.py b/examples/loop_optimizations_service/example.py index 9df2481ff..9b9200ec2 100644 --- a/examples/loop_optimizations_service/example.py +++ b/examples/loop_optimizations_service/example.py @@ -6,9 +6,6 @@ benchmark="loops-opt-v0/add", observation_space="ir", reward_space="runtime", - # loop="outer_to_inner",#"inner_to_outer",#"all_loops", - # function="", - # call_site="", ) compiler_gym.set_debug_level(4) # TODO: check why this has no effect @@ -17,38 +14,6 @@ print() -# loops_config = env.describe_loops() - -# env.set_loop(loops_config(i)) - -# get_loop(1.3) - -# observations: -# - for ProGraML: add an attribute to statement nodes for which loop index they belong to (e.g, loop 1.3) -# - for AutoPhase: ask for features of a loop, or a loop and its children - -# TODO: these methods are not working: -# - env.step(env.action_space.sample()) -# - env.step({"unroll": 0, "vectorize": 2}) - -# for loop in loops_config.loops().flatten(): -# while !done: -# observation, reward, done, info = env.step() -# # you can read observation , rewards, etc. every env.step() OR every env.next_loop() OR change all loops then step -# print("observation: ", observation) -# print("reward: ", reward) -# print("done: ", done) -# print("info: ", info) -# -# env.next_loop() -# -# for loop in loops_config.loops() -# for loop_1 in loop: -# ... -# # or use recursion - -print() - observation, reward, done, info = env.step(env.action_space.sample()) print("observation: ", observation) print("reward: ", reward) diff --git a/examples/loop_optimizations_service/opt_loops/opt_loops.cc b/examples/loop_optimizations_service/opt_loops/opt_loops.cc index 3f96621fb..a3dab9b93 100644 --- a/examples/loop_optimizations_service/opt_loops/opt_loops.cc +++ b/examples/loop_optimizations_service/opt_loops/opt_loops.cc @@ -80,22 +80,6 @@ static cl::opt PreserveAssemblyUseListOrder( static cl::opt DebugifyEach( "debugify-each", cl::desc("Start each pass with debugify and end it with check-debugify")); -// TODO: -// output loops configuration file (json), also provide any current annotations (e.g., h) -// input json file with opts for each loop -// option to insert meta data without running pass -// --annotate-only -// --run-dependencies= true|false -// --force -// python wrapper: pybind. APIs to get loops/modules/etc. as objects, and then APIs to modify -// loops = module.get_loops() -// for loop in loops: -// loop.set_metadata() loop.run_unrolling(factor), loop.get_observation() -// in the future: module.get_functions(), module.get_call_sites() -// run-vectorize-and-dependencies as well as run-vectorize (only) -// callback mechanism: e.g., loop unroller asks for unrolling factor, and we intercept with a -// callback function - class OptCustomPassManager : public legacy::PassManager { DebugifyStatsMap DIStatsMap; From abbf0ad1c8c1aca08e71e55693e58ab96901b339 Mon Sep 17 00:00:00 2001 From: Mostafa Elhoushi Date: Mon, 27 Dec 2021 15:45:24 -0500 Subject: [PATCH 121/142] update test script --- .../loop_optimizations_service/env_tests.py | 24 ++++++++++++------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/examples/loop_optimizations_service/env_tests.py b/examples/loop_optimizations_service/env_tests.py index 3adb275a1..d7afea80f 100644 --- a/examples/loop_optimizations_service/env_tests.py +++ b/examples/loop_optimizations_service/env_tests.py @@ -21,7 +21,7 @@ @pytest.fixture(scope="function") def env() -> CompilerEnv: """Text fixture that yields an environment.""" - with gym.make("unrolling-py-v0") as env_: + with gym.make("loops-opt-py-v0") as env_: yield env_ @@ -63,11 +63,18 @@ def test_action_space(env: CompilerEnv): """Test that the environment reports the service's action spaces.""" assert env.action_spaces == [ NamedDiscrete( - name="unrolling", + name="loop-opt", items=[ - "-loop-unroll -unroll-count=2", - "-loop-unroll -unroll-count=4", - "-loop-unroll -unroll-count=8", + "--loop-unroll --unroll-count=2", + "--loop-unroll --unroll-count=4", + "--loop-unroll --unroll-count=8", + "--loop-unroll --unroll-count=16", + "--loop-unroll --unroll-count=32", + "--loop-vectorize -force-vector-width=2", + "--loop-vectorize -force-vector-width=4", + "--loop-vectorize -force-vector-width=8", + "--loop-vectorize -force-vector-width=16", + "--loop-vectorize -force-vector-width=32", ], ) ] @@ -118,7 +125,7 @@ def test_reward_before_reset(env: CompilerEnv): def test_reset_invalid_benchmark(env: CompilerEnv): """Test requesting a specific benchmark.""" with pytest.raises(LookupError) as ctx: - env.reset(benchmark="unrolling-v0/foobar") + env.reset(benchmark="loops-opt-v0/foobar") assert str(ctx.value) == "Unknown program name" @@ -197,8 +204,9 @@ def test_rewards(env: CompilerEnv): def test_benchmarks(env: CompilerEnv): assert list(env.datasets.benchmark_uris()) == [ - "benchmark://unrolling-v0/offsets1", - "benchmark://unrolling-v0/conv2d", + "benchmark://loops-opt-v0/add", + "benchmark://loops-opt-v0/offsets1", + "benchmark://loops-opt-v0/conv2d", ] From 6f547a35d09378da971a099860ffd6d62d2d7e8b Mon Sep 17 00:00:00 2001 From: Mostafa Elhoushi Date: Sat, 1 Jan 2022 14:31:33 -0500 Subject: [PATCH 122/142] address PR review --- .../example_unrolling_service/__init__.py | 2 +- examples/example_unrolling_service/example.py | 40 +++++++++---------- .../example_without_bazel.py | 38 +++++++++--------- examples/loop_optimizations_service/README.md | 16 ++++---- .../loop_optimizations_service/__init__.py | 2 +- .../benchmarks/BUILD | 5 ++- .../benchmarks/add.c | 4 ++ .../benchmarks/conv2d.c | 4 ++ .../benchmarks/offsets1.c | 4 ++ .../loop_optimizations_service/example.py | 28 ++++++------- .../opt_loops/README.md | 4 +- .../service_py/loops_opt_service.py | 2 +- .../service_py/utils.py | 3 +- 13 files changed, 84 insertions(+), 68 deletions(-) diff --git a/examples/example_unrolling_service/__init__.py b/examples/example_unrolling_service/__init__.py index 19aa30de2..d760e02f9 100644 --- a/examples/example_unrolling_service/__init__.py +++ b/examples/example_unrolling_service/__init__.py @@ -2,7 +2,7 @@ # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -"""This module demonstrates how to """ +"""This module defines and registers the example gym environments.""" import subprocess from pathlib import Path from typing import Iterable diff --git a/examples/example_unrolling_service/example.py b/examples/example_unrolling_service/example.py index ad1b55c60..5067d545b 100644 --- a/examples/example_unrolling_service/example.py +++ b/examples/example_unrolling_service/example.py @@ -10,34 +10,34 @@ import compiler_gym import examples.example_unrolling_service as unrolling_service # noqa Register environments. -env = compiler_gym.make( +with compiler_gym.make( "unrolling-py-v0", benchmark="unrolling-v0/offsets1", observation_space="features", reward_space="runtime", -) -compiler_gym.set_debug_level(4) # TODO: check why this has no effect +) as env: + compiler_gym.set_debug_level(4) # TODO: check why this has no effect -observation = env.reset() -print("observation: ", observation) + observation = env.reset() + print("observation: ", observation) -print() + print() -observation, reward, done, info = env.step(env.action_space.sample()) -print("observation: ", observation) -print("reward: ", reward) -print("done: ", done) -print("info: ", info) + observation, reward, done, info = env.step(env.action_space.sample()) + print("observation: ", observation) + print("reward: ", reward) + print("done: ", done) + print("info: ", info) -print() + print() -observation, reward, done, info = env.step(env.action_space.sample()) -print("observation: ", observation) -print("reward: ", reward) -print("done: ", done) -print("info: ", info) + observation, reward, done, info = env.step(env.action_space.sample()) + print("observation: ", observation) + print("reward: ", reward) + print("done: ", done) + print("info: ", info) -env.reset() + env.reset() -# TODO: implement write_bitcode(..) or write_ir(..) -# env.write_bitcode("/tmp/output.bc") + # TODO: implement write_bitcode(..) or write_ir(..) + # env.write_bitcode("/tmp/output.bc") diff --git a/examples/example_unrolling_service/example_without_bazel.py b/examples/example_unrolling_service/example_without_bazel.py index a5bd71657..5004f1aad 100644 --- a/examples/example_unrolling_service/example_without_bazel.py +++ b/examples/example_unrolling_service/example_without_bazel.py @@ -154,32 +154,32 @@ def benchmark(self, uri: str) -> Benchmark: }, ) -env = compiler_gym.make( +with compiler_gym.make( "unrolling-py-v0", benchmark="unrolling-v0/offsets1", observation_space="features", reward_space="runtime", -) -compiler_gym.set_debug_level(4) # TODO: check why this has no effect +) as env: + compiler_gym.set_debug_level(4) # TODO: check why this has no effect -observation = env.reset() -print("observation: ", observation) + observation = env.reset() + print("observation: ", observation) -print() + print() -observation, reward, done, info = env.step(env.action_space.sample()) -print("observation: ", observation) -print("reward: ", reward) -print("done: ", done) -print("info: ", info) + observation, reward, done, info = env.step(env.action_space.sample()) + print("observation: ", observation) + print("reward: ", reward) + print("done: ", done) + print("info: ", info) -print() + print() -observation, reward, done, info = env.step(env.action_space.sample()) -print("observation: ", observation) -print("reward: ", reward) -print("done: ", done) -print("info: ", info) + observation, reward, done, info = env.step(env.action_space.sample()) + print("observation: ", observation) + print("reward: ", reward) + print("done: ", done) + print("info: ", info) -# TODO: implement write_bitcode(..) or write_ir(..) -# env.write_bitcode("/tmp/output.bc") + # TODO: implement write_bitcode(..) or write_ir(..) + # env.write_bitcode("/tmp/output.bc") diff --git a/examples/loop_optimizations_service/README.md b/examples/loop_optimizations_service/README.md index e5d8650dc..2f2ccf2e8 100644 --- a/examples/loop_optimizations_service/README.md +++ b/examples/loop_optimizations_service/README.md @@ -1,20 +1,20 @@ -# Unrolling CompilerGym Service Example +# Loop Optimizations CompilerGym Service -This is an example of how to create your own CompilerGym environment. All paths listed below are relative to the path of this README file. +A CompilerGym environment dedicated to loop optimizations. All paths listed below are relative to the path of this README file. -* Actions: this environment focuses on the unrolling optimization. The actions are the different unrolling factors. - - The actions are listed in `action_spaces` struct in `service_py/example_service.py` - - The actions are implemented in `apply_action(...)` function in `service_py/example_service.py` +* Actions: this environment currently focuses on unrolling and vectorization optimizations. The plan is to extend that to other loop optimizations. The actions are the different vectorization factors and unrolling factors. + - The actions are listed in `action_spaces` struct in `service_py/loops_opt_service.py` + - The actions are implemented in `apply_action(...)` function in `service_py/loops_opt_service.py` * Observations: the observations are: textual form of the LLVM IR, statistical features of different types of IR instructions, runtime execution, or code size - - The observations are listed in `observation_spaces` struct in `service_py/example_service.py`. - - The observations are implemented in `get_observation(...)` function in `service_py/example_service.py` + - The observations are listed in `observation_spaces` struct in `service_py/loops_opt_service.py`. + - The observations are implemented in `get_observation(...)` function in `service_py/loops_opt_service.py` * Rewards: the rewards could be runtime or code size. - The rewards are implemented in `__init__.py` and they reuse the runtime and code size observations mentioned above * Benchmarks: this environment expects your benchmarks to follow the templates from the [Neruovectorizer repo](https://github.com/intel/neuro-vectorizer/tree/master/training_data) repo, that was in turn adapted from the [LLVM loop test suite](https://github.com/llvm/llvm-test-suite/blob/main/SingleSource/UnitTests/Vectorizer/gcc-loops.cpp). - To implement your benchmark, you need to: include the `header.h` file, implement your benchmark in a custom function, then invoke it using `BENCH` macro inside the `main()` function. - Following this template is necessary in order for the benchmark to measure the execution runtime and write it to stdout, which is in turn parsed by this environment to measure the runtime reward. - You can view and add examples of benchmarks in `benchmarks` directory - - Also, when adding your own benchmark, you need to add it to the `UnrollingDataset` class in `__init__.py` + - Also, when adding your own benchmark, you need to add it to the `LoopsDataset` class in `__init__.py` ## Usage diff --git a/examples/loop_optimizations_service/__init__.py b/examples/loop_optimizations_service/__init__.py index 6da47e447..20f310277 100644 --- a/examples/loop_optimizations_service/__init__.py +++ b/examples/loop_optimizations_service/__init__.py @@ -2,7 +2,7 @@ # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -"""This module demonstrates how to """ +"""This module registers the Loop Optimizations CompilerGym environment """ import subprocess from pathlib import Path from typing import Iterable diff --git a/examples/loop_optimizations_service/benchmarks/BUILD b/examples/loop_optimizations_service/benchmarks/BUILD index f565b5020..a985a0fef 100644 --- a/examples/loop_optimizations_service/benchmarks/BUILD +++ b/examples/loop_optimizations_service/benchmarks/BUILD @@ -5,7 +5,10 @@ filegroup( name = "benchmarks", - srcs = glob(["*.c"]) + [ + srcs = [ + "add.c", + "conv2d.c", + "offsets1.c", "//compiler_gym/third_party/neuro-vectorizer:header", ], visibility = ["//visibility:public"], diff --git a/examples/loop_optimizations_service/benchmarks/add.c b/examples/loop_optimizations_service/benchmarks/add.c index 324713625..e1ef82a43 100644 --- a/examples/loop_optimizations_service/benchmarks/add.c +++ b/examples/loop_optimizations_service/benchmarks/add.c @@ -1,3 +1,7 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. #include "header.h" #ifndef N diff --git a/examples/loop_optimizations_service/benchmarks/conv2d.c b/examples/loop_optimizations_service/benchmarks/conv2d.c index 9848d9529..5366dc9b5 100644 --- a/examples/loop_optimizations_service/benchmarks/conv2d.c +++ b/examples/loop_optimizations_service/benchmarks/conv2d.c @@ -1,3 +1,7 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. #include "header.h" // TODO: use templates instead of macros diff --git a/examples/loop_optimizations_service/benchmarks/offsets1.c b/examples/loop_optimizations_service/benchmarks/offsets1.c index 7a382d20b..a2894b48d 100644 --- a/examples/loop_optimizations_service/benchmarks/offsets1.c +++ b/examples/loop_optimizations_service/benchmarks/offsets1.c @@ -1,3 +1,7 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. #include "header.h" #ifndef N diff --git a/examples/loop_optimizations_service/example.py b/examples/loop_optimizations_service/example.py index 9b9200ec2..e426c66b4 100644 --- a/examples/loop_optimizations_service/example.py +++ b/examples/loop_optimizations_service/example.py @@ -1,26 +1,26 @@ import compiler_gym import examples.loop_optimizations_service as loop_optimizations_service # noqa Register environments. -env = compiler_gym.make( +with compiler_gym.make( "loops-opt-py-v0", benchmark="loops-opt-v0/add", observation_space="ir", reward_space="runtime", -) -compiler_gym.set_debug_level(4) # TODO: check why this has no effect +) as env: + compiler_gym.set_debug_level(4) # TODO: check why this has no effect -observation = env.reset() -print("observation: ", observation) + observation = env.reset() + print("observation: ", observation) -print() + print() -observation, reward, done, info = env.step(env.action_space.sample()) -print("observation: ", observation) -print("reward: ", reward) -print("done: ", done) -print("info: ", info) + observation, reward, done, info = env.step(env.action_space.sample()) + print("observation: ", observation) + print("reward: ", reward) + print("done: ", done) + print("info: ", info) -env.close() + env.close() -# TODO: implement write_bitcode(..) or write_ir(..) -# env.write_bitcode("/tmp/output.bc") + # TODO: implement write_bitcode(..) or write_ir(..) + # env.write_bitcode("/tmp/output.bc") diff --git a/examples/loop_optimizations_service/opt_loops/README.md b/examples/loop_optimizations_service/opt_loops/README.md index 1b593d730..5118c8d3d 100644 --- a/examples/loop_optimizations_service/opt_loops/README.md +++ b/examples/loop_optimizations_service/opt_loops/README.md @@ -1,6 +1,6 @@ -LLVM's opt does not always enforce the unrolling options passed as cli arguments. Hence, we created our own exeutable with custom unrolling pass in examples/loop_optimizations_service/loop_unroller that enforces the unrolling factors passed in its cli. +LLVM's opt does not always enforce the unrolling or vectorization options passed as cli arguments. Hence, we created our own exeutable with custom unrolling pass in examples/loop_optimizations_service/loop_unroller that enforces the unrolling factors passed in its cli. To run the custom unroller: ``` -bazel run //examples/loop_optimizations_service/loop_unroller:loop_unroller -- .ll --funroll-count= -S -o .ll +bazel run //examples/loop_optimizations_service/opt_loops:opt_loops -- .ll --funroll-count= --force-vector-width= -S -o .ll ``` diff --git a/examples/loop_optimizations_service/service_py/loops_opt_service.py b/examples/loop_optimizations_service/service_py/loops_opt_service.py index 37eaee0f1..2242bdda2 100755 --- a/examples/loop_optimizations_service/service_py/loops_opt_service.py +++ b/examples/loop_optimizations_service/service_py/loops_opt_service.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/examples/loop_optimizations_service/service_py/utils.py b/examples/loop_optimizations_service/service_py/utils.py index c3ab2ace3..be435022a 100644 --- a/examples/loop_optimizations_service/service_py/utils.py +++ b/examples/loop_optimizations_service/service_py/utils.py @@ -3,7 +3,8 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. - +# A prelminary, fragile approach to extract statistics +# TODO: replace with AutpPhase features def extract_statistics_from_ir(ir: str): stats = {"control_flow": 0, "arithmetic": 0, "memory": 0} for line in ir.splitlines(): From dd92406a9442da9909159ec0056f9543183694ce Mon Sep 17 00:00:00 2001 From: Mostafa Elhoushi Date: Sat, 1 Jan 2022 14:36:31 -0500 Subject: [PATCH 123/142] reove site_data_path --- examples/loop_optimizations_service/__init__.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/examples/loop_optimizations_service/__init__.py b/examples/loop_optimizations_service/__init__.py index 20f310277..82614f995 100644 --- a/examples/loop_optimizations_service/__init__.py +++ b/examples/loop_optimizations_service/__init__.py @@ -12,7 +12,7 @@ from compiler_gym.spaces import Reward from compiler_gym.third_party import llvm from compiler_gym.util.registration import register -from compiler_gym.util.runfiles_path import runfiles_path, site_data_path +from compiler_gym.util.runfiles_path import runfiles_path LOOPS_OPT_PY_SERVICE_BINARY: Path = runfiles_path( "examples/loop_optimizations_service/service_py/loops-opt-service-py" @@ -83,9 +83,6 @@ def __init__(self, *args, **kwargs): name="benchmark://loops-opt-v0", license="MIT", description="Loops optimization dataset", - site_data_base=site_data_path( - "example_dataset" - ), # TODO: what should we set this to? we are not using it ) self._benchmarks = { From 2821c78e869c52320b651a11b44d7f2801184410 Mon Sep 17 00:00:00 2001 From: Mostafa Elhoushi Date: Sat, 1 Jan 2022 15:28:43 -0500 Subject: [PATCH 124/142] get loop opts to work without bazel --- .../loop_unroller/CMakeLists.txt | 2 +- .../loop_optimizations_service/CMakeLists.txt | 6 + .../example_without_bazel.py | 181 ++++++++++++++++++ 3 files changed, 188 insertions(+), 1 deletion(-) create mode 100644 examples/loop_optimizations_service/CMakeLists.txt create mode 100644 examples/loop_optimizations_service/example_without_bazel.py diff --git a/examples/example_unrolling_service/loop_unroller/CMakeLists.txt b/examples/example_unrolling_service/loop_unroller/CMakeLists.txt index 17f605535..8953eb505 100644 --- a/examples/example_unrolling_service/loop_unroller/CMakeLists.txt +++ b/examples/example_unrolling_service/loop_unroller/CMakeLists.txt @@ -22,5 +22,5 @@ cg_cc_binary( ${LLVM_DEFINITIONS} ) -ADD_CUSTOM_TARGET(link_target ALL +ADD_CUSTOM_TARGET(link_loop_unroller_target ALL COMMAND ${CMAKE_COMMAND} -E create_symlink ${CMAKE_BINARY_DIR}/examples/example_unrolling_service/loop_unroller/loop_unroller ${CMAKE_SOURCE_DIR}/examples/example_unrolling_service/loop_unroller/loop_unroller) diff --git a/examples/loop_optimizations_service/CMakeLists.txt b/examples/loop_optimizations_service/CMakeLists.txt new file mode 100644 index 000000000..8c0906a57 --- /dev/null +++ b/examples/loop_optimizations_service/CMakeLists.txt @@ -0,0 +1,6 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +cg_add_all_subdirs() diff --git a/examples/loop_optimizations_service/example_without_bazel.py b/examples/loop_optimizations_service/example_without_bazel.py new file mode 100644 index 000000000..f20ec789c --- /dev/null +++ b/examples/loop_optimizations_service/example_without_bazel.py @@ -0,0 +1,181 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. +"""This script demonstrates how the Python example service without needing +to use the bazel build system. + +Prerequisite: + # In the repo's INSTALL.md, follow the 'Building from source using CMake' instructions with `-DCOMPILER_GYM_BUILD_EXAMPLES=ON` added to the `cmake` command + $ cd /examples +Usage: + + $ python example_unrolling_service/examples_without_bazel.py + +It is equivalent in behavior to the example.py script in this directory. +""" +import subprocess +from pathlib import Path +from typing import Iterable + +import compiler_gym +from compiler_gym.datasets import Benchmark, Dataset +from compiler_gym.envs.llvm.llvm_benchmark import get_system_includes +from compiler_gym.spaces import Reward +from compiler_gym.third_party import llvm +from compiler_gym.util.registration import register + +LOOPS_OPT_PY_SERVICE_BINARY: Path = Path( + "loop_optimizations_service/service_py/loops_opt_service.py" +) + +BENCHMARKS_PATH: Path = Path("loop_optimizations_service/benchmarks") + +NEURO_VECTORIZER_HEADER: Path = Path( + "../compiler_gym/third_party/neuro-vectorizer/header.h" +) + + +class RuntimeReward(Reward): + """An example reward that uses changes in the "runtime" observation value + to compute incremental reward. + """ + + def __init__(self): + super().__init__( + id="runtime", + observation_spaces=["runtime"], + default_value=0, + default_negates_returns=True, + deterministic=False, + platform_dependent=True, + ) + self.baseline_runtime = 0 + + def reset(self, benchmark: str, observation_view): + del benchmark # unused + self.baseline_runtime = observation_view["runtime"] + + def update(self, action, observations, observation_view): + del action # unused + del observation_view # unused + return float(self.baseline_runtime - observations[0]) / self.baseline_runtime + + +class SizeReward(Reward): + """An example reward that uses changes in the "size" observation value + to compute incremental reward. + """ + + def __init__(self): + super().__init__( + id="size", + observation_spaces=["size"], + default_value=0, + default_negates_returns=True, + deterministic=False, + platform_dependent=True, + ) + self.baseline_size = 0 + + def reset(self, benchmark: str, observation_view): + del benchmark # unused + self.baseline_runtime = observation_view["size"] + + def update(self, action, observations, observation_view): + del action # unused + del observation_view # unused + return float(self.baseline_size - observations[0]) / self.baseline_size + + +class LoopsDataset(Dataset): + def __init__(self, *args, **kwargs): + super().__init__( + name="benchmark://loops-opt-v0", + license="MIT", + description="Loops optimization dataset", + ) + + self._benchmarks = { + "benchmark://loops-opt-v0/add": Benchmark.from_file_contents( + "benchmark://loops-opt-v0/add", + self.preprocess(BENCHMARKS_PATH / "add.c"), + ), + "benchmark://loops-opt-v0/offsets1": Benchmark.from_file_contents( + "benchmark://loops-opt-v0/offsets1", + self.preprocess(BENCHMARKS_PATH / "offsets1.c"), + ), + "benchmark://loops-opt-v0/conv2d": Benchmark.from_file_contents( + "benchmark://loops-opt-v0/conv2d", + self.preprocess(BENCHMARKS_PATH / "conv2d.c"), + ), + } + + @staticmethod + def preprocess(src: Path) -> bytes: + """Front a C source through the compiler frontend.""" + # TODO(github.com/facebookresearch/CompilerGym/issues/325): We can skip + # this pre-processing, or do it on the service side, once support for + # multi-file benchmarks lands. + cmd = [ + str(llvm.clang_path()), + "-E", + "-o", + "-", + "-I", + str(NEURO_VECTORIZER_HEADER.parent), + src, + ] + for directory in get_system_includes(): + cmd += ["-isystem", str(directory)] + return subprocess.check_output( + cmd, + timeout=300, + ) + + def benchmark_uris(self) -> Iterable[str]: + yield from self._benchmarks.keys() + + def benchmark(self, uri: str) -> Benchmark: + if uri in self._benchmarks: + return self._benchmarks[uri] + else: + raise LookupError("Unknown program name") + + +# Register the unrolling example service on module import. After importing this module, +# the loops-opt-py-v0 environment will be available to gym.make(...). + +register( + id="loops-opt-py-v0", + entry_point="compiler_gym.envs:CompilerEnv", + kwargs={ + "service": LOOPS_OPT_PY_SERVICE_BINARY, + "rewards": [RuntimeReward(), SizeReward()], + "datasets": [LoopsDataset()], + }, +) + +with compiler_gym.make( + "loops-opt-py-v0", + benchmark="loops-opt-v0/add", + observation_space="ir", + reward_space="runtime", +) as env: + compiler_gym.set_debug_level(4) # TODO: check why this has no effect + + observation = env.reset() + print("observation: ", observation) + + print() + + observation, reward, done, info = env.step(env.action_space.sample()) + print("observation: ", observation) + print("reward: ", reward) + print("done: ", done) + print("info: ", info) + + env.close() + + # TODO: implement write_bitcode(..) or write_ir(..) + # env.write_bitcode("/tmp/output.bc") From c07f1ebc8f8287dca23996d4e3b915737098bb24 Mon Sep 17 00:00:00 2001 From: Mostafa Elhoushi Date: Sat, 1 Jan 2022 15:35:36 -0500 Subject: [PATCH 125/142] update docstring --- examples/loop_optimizations_service/example_without_bazel.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/loop_optimizations_service/example_without_bazel.py b/examples/loop_optimizations_service/example_without_bazel.py index f20ec789c..e87781a2e 100644 --- a/examples/loop_optimizations_service/example_without_bazel.py +++ b/examples/loop_optimizations_service/example_without_bazel.py @@ -2,7 +2,7 @@ # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -"""This script demonstrates how the Python example service without needing +"""This script uses the loop optimizations service without needing to use the bazel build system. Prerequisite: @@ -10,7 +10,7 @@ $ cd /examples Usage: - $ python example_unrolling_service/examples_without_bazel.py + $ python loop_optimizations_service/examples_without_bazel.py It is equivalent in behavior to the example.py script in this directory. """ From 2f1746fd0427847a7d2eeef4d8180b05cfebd194 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Mon, 10 Jan 2022 17:22:18 +0000 Subject: [PATCH 126/142] [docs] Fix typo in gcc.rst. Credit to Qingwei Lan, #533. --- docs/source/envs/gcc.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/envs/gcc.rst b/docs/source/envs/gcc.rst index 72f48d07f..93efd42cb 100644 --- a/docs/source/envs/gcc.rst +++ b/docs/source/envs/gcc.rst @@ -353,7 +353,7 @@ current state. So, for example, in GCC 11.2.0, the first option is the :code:`-O` option. This has 7 possible settings, other than missing: :code:`-O0`, :code:`-O1`, -:code:`-O2`, :code:`-O3`, :code:`-Ofast`, :code:`-Og`, amd :code:`-Os`. Since +:code:`-O2`, :code:`-O3`, :code:`-Ofast`, :code:`-Og`, and :code:`-Os`. Since this is fewer than ten, there is a corresponding action for each. Similarly, there are action for each of the normal GCC flags, like :code:`-fpeel-loops` and :code:`-fno-peel-loops`. Parameters often have more than ten options, so there From 1f62a1aec28f0371ae7d064ce9b28cae9b5a3f9c Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Mon, 10 Jan 2022 17:23:54 +0000 Subject: [PATCH 127/142] Fix a typo in comment string. --- .../service/runtime/CreateAndRunCompilerGymServiceImpl.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler_gym/service/runtime/CreateAndRunCompilerGymServiceImpl.h b/compiler_gym/service/runtime/CreateAndRunCompilerGymServiceImpl.h index 0370d9e71..51d6dbbb7 100644 --- a/compiler_gym/service/runtime/CreateAndRunCompilerGymServiceImpl.h +++ b/compiler_gym/service/runtime/CreateAndRunCompilerGymServiceImpl.h @@ -76,7 +76,7 @@ template FLAGS_working_dir = workingDirectory.string(); } - // Create amd set the logging directory. + // Create and set the logging directory. boost::filesystem::create_directories(workingDirectory / "logs"); FLAGS_log_dir = workingDirectory.string() + "/logs"; From b602fcfd32f433fd88a66a5b5310447c07ef6b99 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Mon, 10 Jan 2022 17:59:23 +0000 Subject: [PATCH 128/142] [docs] Remove "yes/no" prompts from install instructions. Add a "yes" flag to install instructions to prevent prompts. --- INSTALL.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/INSTALL.md b/INSTALL.md index 82739ad2b..2f947b6bb 100644 --- a/INSTALL.md +++ b/INSTALL.md @@ -31,7 +31,7 @@ Now proceed to [All platforms](#all-platforms) below. On debian-based linux systems, install the required toolchain using: ```sh -sudo apt install clang-9 clang++-9 clang-format golang libjpeg-dev \ +sudo apt install -y clang-9 clang++-9 clang-format golang libjpeg-dev \ libtinfo5 m4 make patch zlib1g-dev tar bzip2 wget mkdir -pv ~/.local/bin wget https://github.com/bazelbuild/bazelisk/releases/download/v1.7.5/bazelisk-linux-amd64 -O ~/.local/bin/bazel @@ -52,9 +52,9 @@ We recommend using to manage the remaining build dependencies. First create a conda environment with the required dependencies: - conda create -n compiler_gym python=3.8 + conda create -y -n compiler_gym python=3.8 conda activate compiler_gym - conda install -c conda-forge cmake pandoc patchelf + conda install -y -c conda-forge cmake pandoc patchelf Then clone the CompilerGym source code using: From dafc54e584ace295c9378bd66bf5f2e4bf159d05 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Mon, 10 Jan 2022 18:00:20 +0000 Subject: [PATCH 129/142] [examples] Don't pin typing-extensions version. This causes an incompatibility with linters. Fixes #537. --- examples/requirements.txt | 1 - 1 file changed, 1 deletion(-) diff --git a/examples/requirements.txt b/examples/requirements.txt index 2899a759c..49982b977 100644 --- a/examples/requirements.txt +++ b/examples/requirements.txt @@ -13,4 +13,3 @@ submitit>=1.2.0 tensorflow==2.6.1 torch>=1.6.0 typer[all]>=0.3.2 -typing-extensions~=3.7.4 # Pin version for tensorflow. From 6320eefa1ee5c263ae451609fea6773dab23f5cb Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Wed, 12 Jan 2022 17:30:59 +0000 Subject: [PATCH 130/142] [github] Add a label to all Issue templates. --- .github/ISSUE_TEMPLATE/--bug-report.md | 2 +- .github/ISSUE_TEMPLATE/--questions-help-support.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/--bug-report.md b/.github/ISSUE_TEMPLATE/--bug-report.md index 975923b8c..032469e71 100644 --- a/.github/ISSUE_TEMPLATE/--bug-report.md +++ b/.github/ISSUE_TEMPLATE/--bug-report.md @@ -2,7 +2,7 @@ name: "\U0001F41B Bug Report" about: Create a report to help us improve CompilerGym title: '' -labels: '' +labels: 'Bug' assignees: '' --- diff --git a/.github/ISSUE_TEMPLATE/--questions-help-support.md b/.github/ISSUE_TEMPLATE/--questions-help-support.md index 2416c904b..8161d1154 100644 --- a/.github/ISSUE_TEMPLATE/--questions-help-support.md +++ b/.github/ISSUE_TEMPLATE/--questions-help-support.md @@ -2,7 +2,7 @@ name: "❓Questions/Help/Support" about: Do you need support? We have resources. title: '' -labels: '' +labels: 'Question' assignees: '' --- From 9719f7d6c77d83bb12cf97a9a1ce863ff5a91404 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Wed, 19 Jan 2022 09:49:26 +0000 Subject: [PATCH 131/142] [llvm] Add a table of accepted file types to make_benchmark(). Fixes #528. --- compiler_gym/envs/llvm/llvm_benchmark.py | 55 ++++++++++++----------- compiler_gym/envs/llvm/llvm_env.py | 56 ++++++++++++++---------- 2 files changed, 62 insertions(+), 49 deletions(-) diff --git a/compiler_gym/envs/llvm/llvm_benchmark.py b/compiler_gym/envs/llvm/llvm_benchmark.py index 9e001b4f6..f8969b30a 100644 --- a/compiler_gym/envs/llvm/llvm_benchmark.py +++ b/compiler_gym/envs/llvm/llvm_benchmark.py @@ -183,10 +183,34 @@ def make_benchmark( ) -> Benchmark: """Create a benchmark for use by LLVM environments. - This function takes one or more inputs and uses them to create a benchmark - that can be passed to :meth:`compiler_gym.envs.LlvmEnv.reset`. + This function takes one or more inputs and uses them to create an LLVM + bitcode benchmark that can be passed to + :meth:`compiler_gym.envs.LlvmEnv.reset`. + + The following input types are supported: + + +-----------------------------------------------------+---------------------+-------------------------------------------------------------+ + | **File Suffix** | **Treated as** | **Converted using** | + +-----------------------------------------------------+---------------------+-------------------------------------------------------------+ + | :code:`.bc` | LLVM IR bitcode | No conversion required. | + +-----------------------------------------------------+---------------------+-------------------------------------------------------------+ + | :code:`.ll` | LLVM IR text format | Assembled to bitcode using llvm-as. | + +-----------------------------------------------------+---------------------+-------------------------------------------------------------+ + | :code:`.c`, :code:`.cc`, :code:`.cpp`, :code:`.cxx` | C / C++ source | Compiled to bitcode using clang and the given :code:`copt`. | + +-----------------------------------------------------+---------------------+-------------------------------------------------------------+ - For single-source C/C++ programs, you can pass the path of the source file: + .. note:: + + The LLVM IR format has no compatability guarantees between versions (see + `LLVM docs + `_). + You must ensure that any :code:`.bc` and :code:`.ll` files are + compatible with the LLVM version used by CompilerGym, which can be + reported using :func:`env.compiler_version + `. + + E.g. for single-source C/C++ programs, you can pass the path of the source + file: >>> benchmark = make_benchmark('my_app.c') >>> env = gym.make("llvm-v0") @@ -209,7 +233,7 @@ def make_benchmark( clang: >>> benchmark = make_benchmark( - ClangInvocation(['/path/to/my_app.c'], timeout=10) + ClangInvocation(['/path/to/my_app.c'], system_includes=False, timeout=10) ) For multi-file programs, pass a list of inputs that will be compiled @@ -219,28 +243,9 @@ def make_benchmark( 'main.c', 'lib.cpp', 'lib2.bc', + 'foo/input.bc' ]) - If you already have prepared bitcode files, those can be linked and used - directly: - - >>> benchmark = make_benchmark([ - 'bitcode1.bc', - 'bitcode2.bc', - ]) - - Text-format LLVM assembly can also be used: - - >>> benchmark = make_benchmark('module.ll') - - .. note:: - - LLVM bitcode compatibility is - `not guaranteed `_, - so you must ensure that any precompiled bitcodes are compatible with the - LLVM version used by CompilerGym, which can be queried using - :func:`env.compiler_version `. - :param inputs: An input, or list of inputs. :param copt: A list of command line options to pass to clang when compiling @@ -278,7 +283,7 @@ def _add_path(path: Path): if path.suffix == ".bc": bitcodes.append(path.absolute()) - elif path.suffix in {".c", ".cxx", ".cpp", ".cc"}: + elif path.suffix in {".c", ".cc", ".cpp", ".cxx"}: clang_jobs.append( ClangInvocation.from_c_file( path, copt=copt, system_includes=system_includes, timeout=timeout diff --git a/compiler_gym/envs/llvm/llvm_env.py b/compiler_gym/envs/llvm/llvm_env.py index f814b6d8f..a8bf29878 100644 --- a/compiler_gym/envs/llvm/llvm_env.py +++ b/compiler_gym/envs/llvm/llvm_env.py @@ -317,13 +317,36 @@ def make_benchmark( ) -> Benchmark: """Create a benchmark for use with this environment. - This function takes one or more inputs and uses them to create a - benchmark that can be passed to :meth:`compiler_gym.envs.LlvmEnv.reset`. + This function takes one or more inputs and uses them to create an LLVM + bitcode benchmark that can be passed to + :meth:`compiler_gym.envs.LlvmEnv.reset`. + + The following input types are supported: + + +-----------------------------------------------------+---------------------+-------------------------------------------------------------+ + | **File Suffix** | **Treated as** | **Converted using** | + +-----------------------------------------------------+---------------------+-------------------------------------------------------------+ + | :code:`.bc` | LLVM IR bitcode | No conversion required. | + +-----------------------------------------------------+---------------------+-------------------------------------------------------------+ + | :code:`.ll` | LLVM IR text format | Assembled to bitcode using llvm-as. | + +-----------------------------------------------------+---------------------+-------------------------------------------------------------+ + | :code:`.c`, :code:`.cc`, :code:`.cpp`, :code:`.cxx` | C / C++ source | Compiled to bitcode using clang and the given :code:`copt`. | + +-----------------------------------------------------+---------------------+-------------------------------------------------------------+ - For single-source C/C++ programs, you can pass the path of the source + .. note:: + + The LLVM IR format has no compatability guarantees between versions (see + `LLVM docs + `_). + You must ensure that any :code:`.bc` and :code:`.ll` files are + compatible with the LLVM version used by CompilerGym, which can be + reported using :func:`env.compiler_version + `. + + E.g. for single-source C/C++ programs, you can pass the path of the source file: - >>> benchmark = make_benchmark('my_app.c') + >>> benchmark = env.make_benchmark('my_app.c') >>> env = gym.make("llvm-v0") >>> env.reset(benchmark=benchmark) @@ -336,42 +359,27 @@ def make_benchmark( Additional compile-time arguments to clang can be provided using the :code:`copt` argument: - >>> benchmark = make_benchmark('/path/to/my_app.cpp', copt=['-O2']) + >>> benchmark = env.make_benchmark('/path/to/my_app.cpp', copt=['-O2']) If you need more fine-grained control over the options, you can directly construct a :class:`ClangInvocation ` to pass a list of arguments to clang: - >>> benchmark = make_benchmark( - ClangInvocation(['/path/to/my_app.c'], timeout=10) + >>> benchmark = env.make_benchmark( + ClangInvocation(['/path/to/my_app.c'], system_includes=False, timeout=10) ) For multi-file programs, pass a list of inputs that will be compiled separately and then linked to a single module: - >>> benchmark = make_benchmark([ + >>> benchmark = env.make_benchmark([ 'main.c', 'lib.cpp', 'lib2.bc', + 'foo/input.bc' ]) - If you already have prepared bitcode files, those can be linked and used - directly: - - >>> benchmark = make_benchmark([ - 'bitcode1.bc', - 'bitcode2.bc', - ]) - - .. note:: - - LLVM bitcode compatibility is - `not guaranteed `_, - so you must ensure that any precompiled bitcodes are compatible with the - LLVM version used by CompilerGym, which can be queried using - :func:`LlvmEnv.compiler_version `. - :param inputs: An input, or list of inputs. :param copt: A list of command line options to pass to clang when From 038bd9d3f714cad740053999691a2f3bfed19bcb Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Tue, 21 Dec 2021 14:57:33 +0000 Subject: [PATCH 132/142] [Makefile] Build docs from source dir to prevent import error. Fixes #543. --- Makefile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 46f0202db..eabaeff5a 100644 --- a/Makefile +++ b/Makefile @@ -278,10 +278,10 @@ doxygen-rst: cd docs && $(PYTHON) generate_cc_rst.py docs: gendocs bazel-build doxygen - PYTHONPATH=$(ROOT)/bazel-bin/package.runfiles/CompilerGym sphinx-build -M html docs/source docs/build $(SPHINXOPTS) + cd docs/source && PYTHONPATH=$(ROOT)/bazel-bin/package.runfiles/CompilerGym sphinx-build -M html . ../build $(SPHINXOPTS) livedocs: gendocs doxygen - PYTHONPATH=$(ROOT)/bazel-bin/package.runfiles/CompilerGym sphinx-autobuild docs/source docs/build $(SPHINXOPTS) --pre-build 'make gendocs bazel-build doxygen' --watch compiler_gym + cd docs/source && PYTHONPATH=$(ROOT)/bazel-bin/package.runfiles/CompilerGym sphinx-autobuild . ../build $(SPHINXOPTS) --pre-build 'make -C ../.. gendocs bazel-build doxygen' --watch ../../compiler_gym .PHONY: doxygen doxygen-rst From bacedc6ec41db93dfef7b17b4458cf0d76c70a15 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Wed, 19 Jan 2022 12:33:21 +0000 Subject: [PATCH 133/142] Remove deprecated datasets APIs and scripts. Removes the legacy dataset scripts and APIs that have been deprecated since v0.1.8. Please use the new dataset API: https://compilergym.com/compiler_gym/datasets.html#datasets The following has been removed: - The `compiler_gym.bin.datasets` script. - The properties: `CompilerEnv.available_datasets`, and `CompilerEnv.benchmarks`. - The `CompilerEnv.require_dataset()`, `CompilerEnv.require_datasets()`, `CompilerEnv.register_dataset()`, and `CompilerEnv.get_benchmark_validation_callback()` methods. --- compiler_gym/bin/BUILD | 14 --- compiler_gym/bin/CMakeLists.txt | 14 --- compiler_gym/bin/datasets.py | 191 ------------------------------ compiler_gym/envs/compiler_env.py | 116 ------------------ tests/bin/BUILD | 10 -- tests/bin/CMakeLists.txt | 11 -- tests/bin/datasets_bin_test.py | 30 ----- 7 files changed, 386 deletions(-) delete mode 100644 compiler_gym/bin/datasets.py delete mode 100644 tests/bin/datasets_bin_test.py diff --git a/compiler_gym/bin/BUILD b/compiler_gym/bin/BUILD index 27e05e66f..65cf3dc8b 100644 --- a/compiler_gym/bin/BUILD +++ b/compiler_gym/bin/BUILD @@ -8,7 +8,6 @@ py_library( name = "bin", visibility = ["//visibility:public"], deps = [ - ":datasets", ":manual_env", ":random_replay", ":random_search", @@ -17,19 +16,6 @@ py_library( ], ) -py_binary( - name = "datasets", - srcs = ["datasets.py"], - visibility = ["//visibility:public"], - deps = [ - ":service", - "//compiler_gym/datasets", - "//compiler_gym/envs", - "//compiler_gym/util", - "//compiler_gym/util/flags", - ], -) - py_binary( name = "manual_env", srcs = ["manual_env.py"], diff --git a/compiler_gym/bin/CMakeLists.txt b/compiler_gym/bin/CMakeLists.txt index 98c437f73..5a3a114bd 100644 --- a/compiler_gym/bin/CMakeLists.txt +++ b/compiler_gym/bin/CMakeLists.txt @@ -9,7 +9,6 @@ cg_py_library( NAME bin DEPS - ::datasets ::manual_env ::random_replay ::random_search @@ -18,19 +17,6 @@ cg_py_library( PUBLIC ) -cg_py_binary( - NAME - datasets - SRCS - "datasets.py" - DEPS - ::service - compiler_gym::datasets::datasets - compiler_gym::envs::envs - compiler_gym::util::util - compiler_gym::util::flags::flags -) - cg_py_binary( NAME manual_env diff --git a/compiler_gym/bin/datasets.py b/compiler_gym/bin/datasets.py deleted file mode 100644 index 582f31cc0..000000000 --- a/compiler_gym/bin/datasets.py +++ /dev/null @@ -1,191 +0,0 @@ -# Copyright (c) Facebook, Inc. and its affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. -"""Manage datasets of benchmarks. - -.. code-block:: - - $ python -m compiler_gym.bin.datasets --env= \ - [--download=] [--delete=] - - -Listing installed datasets --------------------------- - -If run with no arguments, this command shows an overview of the datasets that -are activate, inactive, and available to download. For example: - -.. code-block:: - - $ python -m comiler_gym.bin.benchmarks --env=llvm-v0 - - +-------------------+---------------------+-----------------+----------------+ - | Active Datasets | Description | #. Benchmarks | Size on disk | - +===================+=====================+=================+================+ - | cbench-v1 | Runnable C programs | 23 | 10.1 MB | - +-------------------+---------------------+-----------------+----------------+ - | Total | | 23 | 10.1 MB | - +-------------------+---------------------+-----------------+----------------+ - - -Downloading datasets --------------------- - -Use :code:`--download` to download a dataset from the list of available -datasets: - -.. code-block:: - - $ python -m comiler_gym.bin.benchmarks --env=llvm-v0 --download=npb-v0 - -After downloading, the dataset will be activated and the benchmarks will be -available to use by the environment. - - >>> import compiler_gym - >>> import gym - >>> env = gym.make("llvm-v0") - >>> env.benchmark = "npb-v0" - -The flag :code:`--download_all` can be used to download every available dataset: - -.. code-block:: - - $ python -m comiler_gym.bin.benchmarks --env=llvm-v0 --download_all - -Or use the :code:`file:///` URI to install a local archive file: - -.. code-block:: - - $ python -m compiler_gym.bin.benchmarks --env=llvm-v0 --download=file:////tmp/dataset.tar.bz2 - - -Activating and deactivating datasets ------------------------------------- - -Datasets have two states: active and inactive. An inactive dataset still exists -locally on the filesystem, but is excluded from use by CompilerGym environments. -This be useful if you have many datasets downloaded and you would to limit the -benchmarks that can be selected randomly by an environment. - -Activate or deactivate datasets using the :code:`--activate` and -:code:`--deactivate` flags, respectively: - -.. code-block:: - - $ python -m comiler_gym.bin.benchmarks --env=llvm-v0 --activate=npb-v0,github-v0 --deactivate=cbench-v1 - -The :code:`--activate_all` and :code:`--deactivate_all` flags can be used as a -shortcut to activate or deactivate every downloaded: - -.. code-block:: - - # Activate all inactivate datasets: - $ python -m comiler_gym.bin.benchmarks --env=llvm-v0 --activate_all - # Make all activate datasets inactive: - $ python -m comiler_gym.bin.benchmarks --env=llvm-v0 --deactivate_all - -Deleting datasets ------------------ - -To remove a dataset from the filesystem, use :code:`--delete`: - -.. code-block:: - - $ python -m comiler_gym.bin.benchmarks --env=llvm-v0 --delete=npb-v0 - -Once deleted, a dataset must be downloaded before it can be used again. - -A :code:`--delete_all` flag can be used to delete all of the locally installed -datasets. -""" -import sys - -from absl import app, flags -from deprecated.sphinx import deprecated - -from compiler_gym.bin.service import summarize_datasets -from compiler_gym.datasets.dataset import activate, deactivate, delete -from compiler_gym.util.flags.env_from_flags import env_from_flags - -flags.DEFINE_list( - "download", - [], - "The name or URL of a dataset to download. Accepts a list of choices", -) -flags.DEFINE_list( - "activate", - [], - "The names of one or more inactive datasets to activate. Accepts a list of choices", -) -flags.DEFINE_list( - "deactivate", - [], - "The names of one or more active datasets to deactivate. Accepts a list of choices", -) -flags.DEFINE_list( - "delete", - [], - "The names of one or more inactive dataset to delete. Accepts a list of choices", -) -flags.DEFINE_boolean("download_all", False, "Download all available datasets") -flags.DEFINE_boolean("activate_all", False, "Activate all inactive datasets") -flags.DEFINE_boolean("deactivate_all", False, "Deactivate all active datasets") -FLAGS = flags.FLAGS - - -@deprecated( - version="0.1.8", - reason=( - "Command-line management of datasets is deprecated. Please use " - ":mod:`compiler_gym.bin.service` to print a tabular overview of the " - "available datasets. For management of datasets, use the " - ":class:`env.datasets ` property." - ), -) -def main(argv): - """Main entry point.""" - if len(argv) != 1: - raise app.UsageError(f"Unknown command line arguments: {argv[1:]}") - - with env_from_flags() as env: - invalidated_manifest = False - - for name_or_url in FLAGS.download: - env.datasets.install(name_or_url) - - if FLAGS.download_all: - for dataset in env.datasets: - dataset.install() - - for name in FLAGS.activate: - activate(env, name) - invalidated_manifest = True - - if FLAGS.activate_all: - invalidated_manifest = True - - for name in FLAGS.deactivate: - deactivate(env, name) - invalidated_manifest = True - - if FLAGS.deactivate_all: - invalidated_manifest = True - - for name in FLAGS.delete: - delete(env, name) - - if invalidated_manifest: - env.make_manifest_file() - - print( - summarize_datasets(env.datasets), - ) - - -if __name__ == "__main__": - try: - app.run(main) - except (ValueError, OSError) as e: - print(e, file=sys.stderr) - sys.exit(1) diff --git a/compiler_gym/envs/compiler_env.py b/compiler_gym/envs/compiler_env.py index 1eb00e774..a7646f837 100644 --- a/compiler_gym/envs/compiler_env.py +++ b/compiler_gym/envs/compiler_env.py @@ -328,18 +328,6 @@ def __init__( self.observation_space = observation_space self.reward_space = reward_space - @property - @deprecated( - version="0.1.8", - reason=( - "Use :meth:`env.datasets.datasets() ` instead. " - "`More information `_." - ), - ) - def available_datasets(self) -> Dict[str, Dataset]: - """A dictionary of datasets.""" - return {d.name: d for d in self.datasets} - @property @deprecated( version="0.2.1", @@ -1125,18 +1113,6 @@ def render( else: raise ValueError(f"Invalid mode: {mode}") - @property - @deprecated( - version="0.1.8", - reason=( - "Use :meth:`env.datasets.benchmarks() ` instead. " - "`More information `_." - ), - ) - def benchmarks(self) -> Iterable[str]: - """Enumerate a (possible unbounded) list of available benchmarks.""" - return self.datasets.benchmark_uris() - @property def _observation_view_type(self): """Returns the type for observation views. @@ -1153,76 +1129,6 @@ def _reward_view_type(self): """ return RewardView - @deprecated( - version="0.1.8", - reason=( - "Datasets are now installed automatically, there is no need to call :code:`require()`. " - "`More information `_." - ), - ) - def require_datasets(self, datasets: List[Union[str, Dataset]]) -> bool: - """Deprecated function for managing datasets. - - Datasets are now installed automatically. See :class:`env.datasets - `. - - :param datasets: A list of datasets to require. Each dataset is the name - of an available dataset, the URL of a dataset to download, or a - :class:`Dataset ` instance. - - :return: :code:`True` if one or more datasets were downloaded, or - :code:`False` if all datasets were already available. - """ - return False - - @deprecated( - version="0.1.8", - reason=( - "Use :meth:`env.datasets.require() ` instead. " - "`More information `_." - ), - ) - def require_dataset(self, dataset: Union[str, Dataset]) -> bool: - """Deprecated function for managing datasets. - - Datasets are now installed automatically. See :class:`env.datasets - `. - - :param dataset: The name of the dataset to download, the URL of the - dataset, or a :class:`Dataset ` - instance. - - :return: :code:`True` if the dataset was downloaded, or :code:`False` if - the dataset was already available. - """ - return False - - @deprecated( - version="0.1.8", - reason=( - "Use :meth:`env.datasets.add() ` instead. " - "`More information `_." - ), - ) - def register_dataset(self, dataset: Dataset) -> bool: - """Register a new dataset. - - Example usage: - - >>> my_dataset = Dataset(name="my-dataset-v0", ...) - >>> env = gym.make("llvm-v0") - >>> env.register_dataset(my_dataset) - >>> env.benchmark = "my-dataset-v0/1" - - :param dataset: A :class:`Dataset ` - instance describing the new dataset. - - :return: :code:`True` if the dataset was added, else :code:`False`. - - :raises ValueError: If a dataset with this name is already registered. - """ - return self.datasets.add(dataset) - def apply(self, state: CompilerEnvState) -> None: # noqa """Replay this state on the given an environment. @@ -1349,28 +1255,6 @@ def validate(self, state: Optional[CompilerEnvState] = None) -> ValidationResult **validation, ) - @deprecated( - version="0.1.8", - reason=( - "Use :meth:`env.validate() " - "` instead. " - "`More information `_." - ), - ) - def get_benchmark_validation_callback( - self, - ) -> Optional[Callable[["CompilerEnv"], Iterable[ValidationError]]]: - """Return a callback that validates benchmark semantics, if available.""" - - def composed(env): - for validation_cb in self.benchmark.validation_callbacks(): - errors = validation_cb(env) - if errors: - yield from errors - - if self.benchmark.validation_callbacks(): - return composed - def send_param(self, key: str, value: str) -> str: """Send a single parameter to the compiler service. diff --git a/tests/bin/BUILD b/tests/bin/BUILD index 73faeee90..a5a47480a 100644 --- a/tests/bin/BUILD +++ b/tests/bin/BUILD @@ -4,16 +4,6 @@ # LICENSE file in the root directory of this source tree. load("@rules_python//python:defs.bzl", "py_test") -py_test( - name = "datasets_bin_test", - srcs = ["datasets_bin_test.py"], - deps = [ - "//compiler_gym/bin:datasets", - "//tests:test_main", - "//tests/pytest_plugins:common", - ], -) - py_test( name = "manual_env_bin_test", srcs = ["manual_env_bin_test.py"], diff --git a/tests/bin/CMakeLists.txt b/tests/bin/CMakeLists.txt index 4e207d373..cc9481822 100644 --- a/tests/bin/CMakeLists.txt +++ b/tests/bin/CMakeLists.txt @@ -5,17 +5,6 @@ cg_add_all_subdirs() -cg_py_test( - NAME - datasets_bin_test - SRCS - "datasets_bin_test.py" - DEPS - compiler_gym::bin::datasets - tests::pytest_plugins::common - tests::test_main -) - cg_py_test( NAME manual_env_bin_test diff --git a/tests/bin/datasets_bin_test.py b/tests/bin/datasets_bin_test.py deleted file mode 100644 index 884eb61c6..000000000 --- a/tests/bin/datasets_bin_test.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright (c) Facebook, Inc. and its affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. -"""End-to-end tests for //compiler_gym/bin:benchmarks.""" -import pytest - -from compiler_gym.bin.datasets import main -from compiler_gym.util.capture_output import capture_output -from tests.pytest_plugins.common import set_command_line_flags -from tests.test_main import main as _test_main - - -def run_main(*args): - set_command_line_flags(["argv"] + list(args)) - return main(["argv0"]) - - -def test_llvm_summary(): - with pytest.warns( - DeprecationWarning, match="Command-line management of datasets is deprecated" - ): - with capture_output() as out: - run_main("--env=llvm-v0") - - assert "cbench-v1" in out.stdout - - -if __name__ == "__main__": - _test_main() From 896abef7f5fab13b9ad9b13c9030c48af4b04342 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Wed, 19 Jan 2022 12:35:33 +0000 Subject: [PATCH 134/142] [tests] Mark xfail tests because of unclosed sockets. Issue #459. --- tests/gcc/datasets/csmith_test.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/gcc/datasets/csmith_test.py b/tests/gcc/datasets/csmith_test.py index 230fbe68a..537e088f7 100644 --- a/tests/gcc/datasets/csmith_test.py +++ b/tests/gcc/datasets/csmith_test.py @@ -18,6 +18,9 @@ pytest_plugins = ["tests.pytest_plugins.common", "tests.pytest_plugins.gcc"] +@pytest.mark.xfail( + reason="github.com/facebookresearch/CompilerGym/issues/459", +) @with_gcc_support def test_csmith_size(gcc_bin: str): with gym.make("gcc-v0", gcc_bin=gcc_bin) as env: @@ -27,6 +30,9 @@ def test_csmith_size(gcc_bin: str): assert len(csmith_dataset) == 0 +@pytest.mark.xfail( + reason="github.com/facebookresearch/CompilerGym/issues/459", +) @with_gcc_support @pytest.mark.parametrize("index", range(3) if is_ci() else range(10)) def test_csmith_random_select(gcc_bin: str, index: int, tmpwd: Path): @@ -62,6 +68,9 @@ def test_random_benchmark(gcc_bin: str): assert len(random_benchmarks) == num_benchmarks +@pytest.mark.xfail( + reason="github.com/facebookresearch/CompilerGym/issues/459", +) @with_gcc_support def test_csmith_from_seed_retry_count_exceeded(gcc_bin: str): with gym.make("gcc-v0", gcc_bin=gcc_bin) as env: From 236abf45a21360fc4e08756b31e0316d733a2e86 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Wed, 19 Jan 2022 12:59:55 +0000 Subject: [PATCH 135/142] [datasets] Tidy up docstring. --- compiler_gym/datasets/uri.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/compiler_gym/datasets/uri.py b/compiler_gym/datasets/uri.py index 6dfb34e23..fe788c2e2 100644 --- a/compiler_gym/datasets/uri.py +++ b/compiler_gym/datasets/uri.py @@ -48,11 +48,14 @@ def resolve_uri_protocol(uri: str) -> str: class BenchmarkUri(BaseModel): - """A URI string used to look up a benchmark. + """A URI used to identify a benchmark, and optionally a set of parameters + for the benchmark. - A benchmark URI has the following format: + A URI has the following format: - :code:`scheme://dataset/path?params#fragment` + .. code-block:: + + scheme://dataset/path?params#fragment where: @@ -81,11 +84,11 @@ class BenchmarkUri(BaseModel): A benchmark URI may resolve to zero or more benchmarks, for example: - * :code:`csmith-v0` resolves to any benchmark from the + * :code:`benchmark://csmith-v0` resolves to any benchmark from the :code:`benchmark://csmith-v0` dataset. - * :code:`benchmark://cbench-v0/qsort` resolves to the path :code:`/qsort` - within the dataset :code:`benchmark://cbench-v0`. + * :code:`cbench-v0/qsort` resolves to the path :code:`/qsort` + within the dataset :code:`benchmark://cbench-v0` using the default scheme. * :code:`benchmark://cbench-v0/qsort?debug=true` also resolves to the path :code:`/qsort` within the dataset :code:`benchmark://cbench-v0`, but with From d2b62d4bbc90a2c3eb2c79c4d4d32d9ae6c86e94 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Wed, 19 Jan 2022 14:15:35 +0000 Subject: [PATCH 136/142] [env] Forbid shallow copy, support deep copy. This adds support for `deepcopy(env)`, which has identical semantics to `env.fork()`. Shallow copies are explicitly disallowed as environments cannot share state. Fixes #351. --- compiler_gym/envs/compiler_env.py | 9 ++++++++ tests/BUILD | 10 +++++++++ tests/CMakeLists.txt | 11 ++++++++++ tests/env_copy_test.py | 35 +++++++++++++++++++++++++++++++ 4 files changed, 65 insertions(+) create mode 100644 tests/env_copy_test.py diff --git a/compiler_gym/envs/compiler_env.py b/compiler_gym/envs/compiler_env.py index b732464dd..383287cce 100644 --- a/compiler_gym/envs/compiler_env.py +++ b/compiler_gym/envs/compiler_env.py @@ -1322,3 +1322,12 @@ def send_params(self, *params: Iterable[Tuple[str, str]]) -> List[str]: ) return list(reply.reply) + + def __copy__(self) -> "CompilerEnv": + raise TypeError( + "CompilerEnv instances do not support shallow copies. Use deepcopy()" + ) + + def __deepcopy__(self, memo) -> "CompilerEnv": + del memo # unused + return self.fork() diff --git a/tests/BUILD b/tests/BUILD index db70c3a5c..ab34bddae 100644 --- a/tests/BUILD +++ b/tests/BUILD @@ -26,6 +26,16 @@ py_test( ], ) +py_test( + name = "env_copy_test", + srcs = ["env_copy_test.py"], + deps = [ + "//compiler_gym/envs", + "//tests:test_main", + "//tests/pytest_plugins:llvm", + ], +) + py_test( name = "make_test", timeout = "short", diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index 6d9b7bf3d..c803559fe 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -28,6 +28,17 @@ cg_py_test( tests::test_main ) +cg_py_test( + NAME + env_copy_test + SRCS + "env_copy_test.py" + DEPS + compiler_gym::envs::envs + tests::pytest_plugins::llvm + tests::test_main +) + cg_py_test( NAME make_test diff --git a/tests/env_copy_test.py b/tests/env_copy_test.py new file mode 100644 index 000000000..25b6ed0cc --- /dev/null +++ b/tests/env_copy_test.py @@ -0,0 +1,35 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. +"""Tests for the copy() and deepcopy() operators on CompilerEnv.""" +from copy import copy, deepcopy + +import pytest + +from compiler_gym.envs.llvm import LlvmEnv +from tests.test_main import main + +pytest_plugins = ["tests.pytest_plugins.llvm"] + + +def test_forbidden_shallow_copy(env: LlvmEnv): + """Test that shallow copy operator is explicitly forbidden.""" + with pytest.raises( + TypeError, + match=r"^CompilerEnv instances do not support shallow copies. Use deepcopy\(\)", + ): + copy(env) + + +def test_deep_copy(env: LlvmEnv): + """Test that deep copy creates an independent copy.""" + env.reset() + with deepcopy(env) as cpy: + assert cpy.state == env.state + env.step(env.action_space.sample()) + assert cpy.state != env.state + + +if __name__ == "__main__": + main() From 62932d1be59a6cc9f63363d1e1afa2deabd1858a Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Wed, 19 Jan 2022 14:26:05 +0000 Subject: [PATCH 137/142] [docs] Add docstring for compiler_gym.wrappers module. Fixes #394. --- compiler_gym/wrappers/__init__.py | 25 ++++++++++++++++++++++++- 1 file changed, 24 insertions(+), 1 deletion(-) diff --git a/compiler_gym/wrappers/__init__.py b/compiler_gym/wrappers/__init__.py index ce9353cc8..f6023ae95 100644 --- a/compiler_gym/wrappers/__init__.py +++ b/compiler_gym/wrappers/__init__.py @@ -2,7 +2,30 @@ # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -"""The :code:`compiler_gym.wrappers` module provides. +"""The :code:`compiler_gym.wrappers` module provides a set of classes that can +be used to transform an environment in a modular way. + +For example: + + >>> env = compiler_gym.make("llvm-v0") + >>> env = TimeLimit(env, n=10) + >>> env = CycleOverBenchmarks( + ... env, + ... benchmarks=[ + ... "benchmark://cbench-v1/crc32", + ... "benchmark://cbench-v1/qsort", + ... ], + ... ) + +.. warning:: + + CompilerGym environments are incompatible with the `OpenAI Gym wrappers + `_. This is because + CompilerGym extends the environment API with additional arguments and + methods. You must use the wrappers from this module when wrapping + CompilerGym environments. We provide a set of base wrappers that are + equivalent to those in OpenAI Gym that you can use to write your own + wrappers. """ from compiler_gym.wrappers.commandline import ( CommandlineWithTerminalAction, From ae237a58800e789a4e47faf9cf7a3631c35b54b8 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Wed, 19 Jan 2022 14:27:09 +0000 Subject: [PATCH 138/142] [docs] Improve docstring note about infinite sampling. --- compiler_gym/wrappers/datasets.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/compiler_gym/wrappers/datasets.py b/compiler_gym/wrappers/datasets.py index 22a21d73f..6a8429055 100644 --- a/compiler_gym/wrappers/datasets.py +++ b/compiler_gym/wrappers/datasets.py @@ -157,9 +157,11 @@ class RandomOrderBenchmarks(IterateOverBenchmarks): .. note:: Uniform random selection is provided by evaluating the input benchmarks - iterator into a list and sampling randomly from the list. This will not - work for random iteration over infinite or very large iterables of - benchmarks. + iterator into a list and sampling randomly from the list. For very large + and infinite iterables of benchmarks you must use the + :class:`IterateOverBenchmarks + ` wrapper with your own + random sampling iterator. """ def __init__( From cc260b25b74e2a3fd7dd454d1f750f9076909a43 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Wed, 19 Jan 2022 20:31:59 +0000 Subject: [PATCH 139/142] [wrappers] Add a new wrapper to validate benchmark semantics on step(). --- compiler_gym/wrappers/BUILD | 1 + compiler_gym/wrappers/CMakeLists.txt | 1 + compiler_gym/wrappers/__init__.py | 3 ++ compiler_gym/wrappers/validation.py | 44 ++++++++++++++++++++++++++++ tests/wrappers/BUILD | 10 +++++++ tests/wrappers/validation_test.py | 41 ++++++++++++++++++++++++++ 6 files changed, 100 insertions(+) create mode 100644 compiler_gym/wrappers/validation.py create mode 100644 tests/wrappers/validation_test.py diff --git a/compiler_gym/wrappers/BUILD b/compiler_gym/wrappers/BUILD index 2963d90d3..f8b917ca4 100644 --- a/compiler_gym/wrappers/BUILD +++ b/compiler_gym/wrappers/BUILD @@ -13,6 +13,7 @@ py_library( "datasets.py", "llvm.py", "time_limit.py", + "validation.py", ], visibility = ["//visibility:public"], deps = [ diff --git a/compiler_gym/wrappers/CMakeLists.txt b/compiler_gym/wrappers/CMakeLists.txt index 326ba828e..49ddd19e8 100644 --- a/compiler_gym/wrappers/CMakeLists.txt +++ b/compiler_gym/wrappers/CMakeLists.txt @@ -15,6 +15,7 @@ cg_py_library( "datasets.py" "llvm.py" "time_limit.py" + "validation.py" DEPS compiler_gym::datasets::datasets compiler_gym::envs::envs diff --git a/compiler_gym/wrappers/__init__.py b/compiler_gym/wrappers/__init__.py index f6023ae95..a373a08eb 100644 --- a/compiler_gym/wrappers/__init__.py +++ b/compiler_gym/wrappers/__init__.py @@ -46,6 +46,8 @@ from compiler_gym.wrappers.llvm import RuntimePointEstimateReward from compiler_gym.wrappers.time_limit import TimeLimit +from .validation import ValidateBenchmarkAfterEveryStep + __all__ = [ "ActionWrapper", "CommandlineWithTerminalAction", @@ -59,4 +61,5 @@ "RewardWrapper", "RuntimePointEstimateReward", "TimeLimit", + "ValidateBenchmarkAfterEveryStep", ] diff --git a/compiler_gym/wrappers/validation.py b/compiler_gym/wrappers/validation.py new file mode 100644 index 000000000..a493187cf --- /dev/null +++ b/compiler_gym/wrappers/validation.py @@ -0,0 +1,44 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. +from compiler_gym.envs import CompilerEnv +from compiler_gym.wrappers.core import CompilerEnvWrapper + + +class ValidateBenchmarkAfterEveryStep(CompilerEnvWrapper): + """Run the benchmark validation routine after every step of the environment + and end the episode with a penalty reward if validation fails. + """ + + def __init__( + self, + env: CompilerEnv, + reward_penalty: float = -1e3, + ): + """Constructor. + + :param env: The environment to wrap. + + :param reward_penalty: The reward value that is returned by + :code:`step()` if validation fails. + """ + super().__init__(env) + self.reward_penalty = reward_penalty + + def step(self, action, observations=None, rewards=None): + observation, reward, done, info = self.env.step( + action, observations=observations, rewards=rewards + ) + + # Early exit if environment reaches terminal state. + if done: + return observation, reward, done, info + + try: + # Try and get an error from the validation callback. + info["error_details"] = next(self.env.benchmark.ivalidate(self.env)) + return observation, self.reward_penalty, True, info + except StopIteration: + # No error, we're good. + return observation, reward, done, info diff --git a/tests/wrappers/BUILD b/tests/wrappers/BUILD index f2a42de18..6b81ff3be 100644 --- a/tests/wrappers/BUILD +++ b/tests/wrappers/BUILD @@ -59,3 +59,13 @@ py_test( "//tests/pytest_plugins:llvm", ], ) + +py_test( + name = "validation_test", + srcs = ["validation_test.py"], + deps = [ + "//compiler_gym/wrappers", + "//tests:test_main", + "//tests/pytest_plugins:llvm", + ], +) diff --git a/tests/wrappers/validation_test.py b/tests/wrappers/validation_test.py new file mode 100644 index 000000000..09c465193 --- /dev/null +++ b/tests/wrappers/validation_test.py @@ -0,0 +1,41 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. +"""Unit tests for compiler_gym.wrappers.llvm.""" +import pytest + +from compiler_gym.envs.llvm import LlvmEnv +from compiler_gym.wrappers import ValidateBenchmarkAfterEveryStep +from tests.test_main import main + +pytest_plugins = ["tests.pytest_plugins.llvm"] + + +def test_ValidateBenchmarkAfterEveryStep_valid(env: LlvmEnv): + env.reset() + + type(env.benchmark).ivalidate = lambda *_: iter(()) + + env = ValidateBenchmarkAfterEveryStep(env, reward_penalty=-5) + _, reward, done, info = env.step(0) + assert reward != -5 + assert not done + assert "error_details" not in info + + +@pytest.mark.parametrize("reward_penalty", [-5, 10]) +def test_ValidateBenchmarkAfterEveryStep_invalid(env: LlvmEnv, reward_penalty): + env.reset() + + type(env.benchmark).ivalidate = lambda *_: iter(["Oh no!"]) + + env = ValidateBenchmarkAfterEveryStep(env, reward_penalty=reward_penalty) + _, reward, done, info = env.step(0) + assert reward == reward_penalty + assert done + assert info["error_details"] == "Oh no!" + + +if __name__ == "__main__": + main() From e52d74c6e60309b389793a3dc6fb19e166fd0646 Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Wed, 19 Jan 2022 14:39:29 +0000 Subject: [PATCH 140/142] Release v0.2.2 Amongst the highlights of this release are support for building with CMake and a new compiler environment based on loop unrolling. Many thanks to @sogartar, @mostafaelhoushi, @KyleHerndon, and @yqtianust for code contributions! - Added support for building CompilerGym from source on Linux using **CMake**. The new build system coexists with the bazel build and enables customization over the CMake configuration used to build the LLVM environment. See INSTALL.md for details. Credit: @sogartar, @KyleHerndon. - Added an environment for loop optimizations in LLVM. This new example environment provides control over loop unrolling factors and demonstrates how to build a standalone LLVM binary using the new CMake build system. Credit: @mostafaelhoushi. - Added a new BenchmarkUri class and API for parsing URIs. This enables benchmarks to have optional parameters that can be used by the backend services to modify their behavior. - [llvm] Enabled runtime reward to be calculated on systems where /dev/shm does not permit executables. - [llvm] Added a new benchmark://mibench-v1 dataset and deprecated benchmark://mibench-v0. If you are using mibench-v0, please update to the new version. - [llvm] Enabled all 20 of the cBench runtime datasets to be used by the benchmark://cbench-v1 dataset. - Added support for building CompilerGym from source on macOS Monterey. - Made the site_data_base argument of the Dataset class constructor optional. - Removed the legacy dataset scripts and APIs that were deprecated in v0.1.8. Please use the new dataset API. The following has been removed: - The compiler_gym.bin.datasets script. - The properties: CompilerEnv.available_datasets, and CompilerEnv.benchmarks. - The CompilerEnv.require_dataset(), CompilerEnv.require_datasets(), CompilerEnv.register_dataset(), and CompilerEnv.get_benchmark_validation_callback() methods. - Numerous other bug fixes and improvements. --- CHANGELOG.md | 54 ++++++++++++++++++++++++++++++++++++++++++++ CITATION.cff | 4 ++-- VERSION | 2 +- www/requirements.txt | 2 +- 4 files changed, 58 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index fcfd70b6d..caa5e6d44 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,57 @@ +## Release 0.2.2 (2022-01-19) + +Amongst the highlights of this release are support for building with CMake and a +new compiler environment based on loop unrolling. Many thanks to @sogartar, +@mostafaelhoushi, @KyleHerndon, and @yqtianust for code contributions! + +- Added support for building CompilerGym from source on Linux using **CMake** + ([#498](https://github.com/facebookresearch/CompilerGym/pull/498), + [#478](https://github.com/facebookresearch/CompilerGym/pull/478)). The new + build system coexists with the bazel build and enables customization over the + CMake configuration used to build the LLVM environment. See + [INSTALL.md](https://github.com/facebookresearch/CompilerGym/blob/development/INSTALL.md#building-from-source-with-cmake) + for details. Credit: @sogartar, @KyleHerndon. +- Added an environment for loop optimizations in LLVM + ([#530](https://github.com/facebookresearch/CompilerGym/pull/530), + [#529](https://github.com/facebookresearch/CompilerGym/pull/529), + [#517](https://github.com/facebookresearch/CompilerGym/pull/517)). This new + example environment provides control over loop unrolling factors and + demonstrates how to build a standalone LLVM binary using the new CMake build + system. Credit: @mostafaelhoushi. +- Added a new `BenchmarkUri` class and API for parsing URIs + ([#525](https://github.com/facebookresearch/CompilerGym/pull/525)). This + enables benchmarks to have optional parameters that can be used by the backend + services to modify their behavior. +- **[llvm]** Enabled runtime reward to be calculated on systems where `/dev/shm` + does not permit executables + ([#510](https://github.com/facebookresearch/CompilerGym/pull/510)). +- **[llvm]** Added a new `benchmark://mibench-v1` dataset and deprecated + `benchmark://mibench-v0` + ([#511](https://github.com/facebookresearch/CompilerGym/pull/511)). If you are + using `mibench-v0`, please update to the new version. +- **[llvm]** Enabled all 20 of the cBench runtime datasets to be used by the + `benchmark://cbench-v1` dataset + ([#525](https://github.com/facebookresearch/CompilerGym/pull/525)). +- Made the `site_data_base` argument of the `Dataset` class constructor optional + ([#518](https://github.com/facebookresearch/CompilerGym/pull/518)). +- Added support for building CompilerGym from source on macOS Monterey + ([#494](https://github.com/facebookresearch/CompilerGym/issues/494)). +- Removed the legacy dataset scripts and APIs that were deprecated in v0.1.8. + Please use the [new dataset + API](https://compilergym.com/compiler_gym/datasets.html#datasets). The + following has been removed: + - The `compiler_gym.bin.datasets` script. + - The properties: `CompilerEnv.available_datasets`, and + `CompilerEnv.benchmarks`. + - The `CompilerEnv.require_dataset()`, `CompilerEnv.require_datasets()`, + `CompilerEnv.register_dataset()`, and + `CompilerEnv.get_benchmark_validation_callback()` methods. +- Numerous other bug fixes and improvements. + +**Full Change Log**: +[v0.2.1...v0.2.2](https://github.com/facebookresearch/CompilerGym/compare/v0.2.1...v0.2.2) + + ## Release 0.2.1 (2021-11-17) Highlights of this release include: diff --git a/CITATION.cff b/CITATION.cff index 86152b1e1..91cc1efd8 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -31,8 +31,8 @@ authors: - family-names: "Leather" given-names: "Hugh" title: "CompilerGym" -version: 0.2.1 -date-released: 2021-11-17 +version: 0.2.2 +date-released: 2022-01-19 url: "https://github.com/facebookresearch/CompilerGym" preferred-citation: type: article diff --git a/VERSION b/VERSION index 0c62199f1..ee1372d33 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -0.2.1 +0.2.2 diff --git a/www/requirements.txt b/www/requirements.txt index b3dd04497..72e502b6c 100644 --- a/www/requirements.txt +++ b/www/requirements.txt @@ -1,3 +1,3 @@ -compiler_gym==0.2.1 +compiler_gym==0.2.2 Flask==2.0.1 Flask-Cors==3.0.10 From 9aba08dbbdc563dcd98fca25d1f835d77a18b44e Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Thu, 20 Jan 2022 14:42:28 +0000 Subject: [PATCH 141/142] [ci] Don't nume the build cache. --- .github/workflows/ci.yaml | 6 ------ 1 file changed, 6 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 855faf3e2..ed3bb61d5 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -32,9 +32,6 @@ jobs: - name: Install build dependencies uses: ./.github/actions/install-build-dependencies - - name: Nuke the cache - run: make distclean - - name: Build Python wheel run: make bdist_wheel bdist_wheel-linux-rename env: @@ -110,9 +107,6 @@ jobs: - name: Install build dependencies uses: ./.github/actions/install-build-dependencies - - name: Nuke the cache - run: make distclean - - name: Build Python wheel run: make bdist_wheel env: From c1fabd7cc29a2d79cba242687ecefe0a4df5b9aa Mon Sep 17 00:00:00 2001 From: Chris Cummins Date: Thu, 20 Jan 2022 15:10:03 +0000 Subject: [PATCH 142/142] [ci] Don't persist the cache. --- .github/workflows/ci.yaml | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index ed3bb61d5..1d45aa652 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -14,15 +14,6 @@ jobs: build-linux: runs-on: ubuntu-latest steps: - - name: Persist the bazel cache - uses: actions/cache@v2 - with: - path: ~/.cache/bazel/_bazel_runner - key: bazel-${{ runner.os }}-${{ hashFiles('WORKSPACE') }}-${{ hashFiles('**/BUILD') }} - restore-keys: | - bazel-${{ runner.os }}-${{ hashFiles('WORKSPACE') }}- - bazel-${{ runner.os }}- - - uses: actions/checkout@v2 - uses: actions/setup-python@v2 @@ -89,15 +80,6 @@ jobs: build-macos: runs-on: macos-latest steps: - - name: Persist the bazel cache - uses: actions/cache@v2 - with: - path: /private/var/tmp/_bazel_runner - key: bazel-${{ runner.os }}-${{ hashFiles('WORKSPACE') }}-${{ hashFiles('**/BUILD') }} - restore-keys: | - bazel-${{ runner.os }}-${{ hashFiles('WORKSPACE') }}- - bazel-${{ runner.os }}- - - uses: actions/checkout@v2 - uses: actions/setup-python@v2