From 8c69fa13cab52c178d343348a9a14717173be520 Mon Sep 17 00:00:00 2001 From: erwincoumans Date: Thu, 17 Nov 2016 15:15:52 -0800 Subject: [PATCH] add pybullet getCameraImage, replacing renderImage, cleaner API: always pass in width, hight and viewMatrix, projectionMatrix, optionally lightDir added helper methods computeViewMatrix, computeViewMatrixFromYawPitchRoll, computeProjectionMatrix, computeProjectionMatrixFOV see Bullet/examples/pybullet/testrender.py + testrender_np.py for example use add missing base_link.stl for husky.urdf --- data/husky/meshes/base_link.stl | Bin 0 -> 60484 bytes examples/SharedMemory/PhysicsClientC_API.cpp | 377 +++++++++-------- examples/SharedMemory/PhysicsClientC_API.h | 24 +- .../PhysicsServerCommandProcessor.cpp | 4 + examples/SharedMemory/SharedMemoryCommands.h | 2 + .../TinyRendererVisualShapeConverter.cpp | 40 +- .../TinyRendererVisualShapeConverter.h | 3 +- examples/pybullet/pybullet.c | 397 ++++++++++++++++-- examples/pybullet/test.py | 2 +- examples/pybullet/testrender.py | 37 +- examples/pybullet/testrender_np.py | 39 +- 11 files changed, 664 insertions(+), 261 deletions(-) create mode 100644 data/husky/meshes/base_link.stl diff --git a/data/husky/meshes/base_link.stl b/data/husky/meshes/base_link.stl new file mode 100644 index 0000000000000000000000000000000000000000..db63bd20f234bd73aa0a8fd8f243b96480296e26 GIT binary patch literal 60484 zcmb6CWtbGl_dX6c!4~)6u(&U}3%fI2g zcXua&V8Q;Us%DzI`Q~?B&$BO}uRiClQ+29NRn6({x~mQ8)4Sh*&Ygn0^y}R#xN^_V zKXvNduSA)U@)d#$_aE3HxKt72|F6HDi43E}rMp`+DAsCLY>PNhq>)&ip@BzUmns^r zu7^$AAR-r45NqhZVZ59;+5B;Pt+)}LW32JrHnHRr%d^r)xaZPZbuDr}s^B36BLvfk zp7Yn6(#8+zVl0$=jo6(rU}H(K&g}Zw5xxxXhsatnsulCsiczho+!OIwl3`>%*q!z7 zIKuR7*9cj6spUrp+QGiqkTp7xJm+S^h*h-;NDv{t5b%gKrKLTEGBBbuA<@w4;1WNpM z51?%yN`1qqdFu1B|3KJU&OV=M)GM90ja%73iQiYshO+KTelm;&dX$(nO1Qr!Vq)~2l=3>z|9-T{7k4nFF1>e=6sf7@d`i2GA{q5s?@0$H z`>zFEr<6Vn+;<{S@*fC=)`$sy8~zqDs?}t9Eqn7u{X>&wNrCJCi;#6EE!hq=`ed8@ z2z6g0#`tZZ1QB#CdsBHtBaZ(!!q)P)&)<$NcQc4)i^o;C+#s5dt*v8wU4sM>^QLu~4j7Y>5T8HCaFied+h-Wl zx(_lgqfuhd$A&Q{$+zP0;YeO@ zY(asN0NOA#Le9JIKhK~ZESqhD81`L%>zi_U86xC$K#O(;_WEcTO-nBnhpY6m(oJr~ zP=biSjA|G&$wpARrDDtMt~@-$&kR?~88mClwYF-Mw9bq-(2p0Hcf7ioo`LK|JU+YCF~)wGeF>9c~-t{#Ic1~EA)tAoL{qs&8d1M^nUBU93_ZAFX)!bSuQ%wxG>p+a-~o9 zF4$di(kA=bS*!6GFUnnvib0eh0&DB^F4m-S>&+@Al9N63f??E;j<&LIoaPz%AvHrS ztf3RJ{&kF1WmfIDxY857C_x0))-d8v#PH=I4J!qo0K_{+FD zSiQgwtO>S>`dqh<<`^5H#xDAVMvU^PjBWIxigQkJ9^sF3+gGTCXv3g5=M?Ah{y4W0 zsP+F_t!R{}TKVI{uAyQ4HToL;N{zLi))*jXQEh`-DE%Y%b25U_Bf8~srp|YMR2WJS zK_k&ds~F@IgD!sz+BHE0w#hIk206uGxIYH%`XT~rYZwcc$5@*OMpfwOYGGonuqN0~ zhOxvLYSr2{#T8ZmdxjE3px^8m+~kizyM?HQXj*GWMe*-*jq)`vnVkg>juuUh$Fi)g z5}8kDMvLb^jb#7cOli(aK3s&;{o^e%&~N#hPloZ1l~%fc{i7UP)HPb%m_3yZ4NYR+ z?iDS5F7h+`rGG+mRP=D+qiZuqc0#;-9L0+bENoU@n8krWtrK&j#mqxfm?)Ww5a#CL z{9M~R-cIfJn^>!7 zhLNjzUtVcQLeqG^T43F;J&6`YvQK5nzklU^RXs+WqITSTa>Y$+E$_k)U0)P;9TYvrvMF!bL`k=G3Oqn-5USL;L>B()^mh zyjyIb1A$tR9Y>4xt4)(K&|V&#;IDJFZqEGkM=}a6CbbemP?#R zpac>9+mBN=R`-`9@k+`_YwnVr?)jH~WQahmkAuf58$V9YMTpCBt*o$F3%u=~CE_SS zgzF#O@>eB-330E>DKU2X?)W(!x;qf4^>ooFRjXPv3lrk>3roBhyUmrUaW4k~wX(e) zscKd2ZV^J1s29dg6ddhscView2_i~ej8!)JOv_J*sxNM{4xvlk?TUAHAW#b<*f6#y zufjWRJnx;qt^h{~BAj!{qM3vE;0}d7A@e3MM4%SVM268V=@4EcTb&&RCPq6DsO6kX z3RjBeSHfxwG=mt`qx7YjqNjxot~k*$e3FZGndvAtNTZbyT637 zQuh;@ST5FCSIa-J<$C3==c<-)!#q8>?mgg=BEEj9&YmNtOFsC_zNNMPn4A&XTi)_-kZ2>zfDFyq9A_EtDXlP}^~;RuN;~5~6M9M%MTW zk6r((n%;pxt%GgGDI2%rJ`!Th+u_!#>XYKPb>3m31QEHjXd6f7%8{6+&M+&@oSbeW zPZfp;)N17$t85I~n1v9Bd=;$4u0fu@orL&sP*;aBst51N+>G$kpMhLX>K-A`ESy7NQNqQ!#|4+Yo7`O_)}o z7S_r+`z-f$@J($JWvwfdkD&w+81shF_;83A*mw+USiQd`_X*WYG-P9F|A7%KcLVai z-ZRXvIY$f~JWKT6*3Ci*A~2c_V`ZiqR=SzXSnK)89SGFINTZ!)y*$>DkObC&Z!pXMTnfL|`-<#>F%B zt+y8zu?q`7I1s2M_YcyB-evS0QostjFjb`eGnAtQ5g5(1;tUm{=8%i5Q?14h1Zv6s zgS4UdB^yS!bJtFAOPpEXnWF>|82^UhdsUhh-`AG$8v`8()ROxLX+!Tzs(%~CPZq2# z?r$m1QGy6(ocB1(_<{LnC`4ea(5e;`D!Tr7%G2WYFtu_PWwB~a#d0yy3}Z{^ zBGD}OGgpbadaVs0P)kOaysy{VAI?QsJ&Vlse4?E`N)Ul@V;F^Lt!3LkyPEjd+CW*T zC1XY2*K6&wBT-hFaon3QMIM0?L|~-RFZvq8tgt!UJ|VkNrMv;;)lgiJk5bMp49UT2$UcKM|w}!scoSI5g04gKWT(>i!0vt9=S2-K3%CGYEX@H<)uhpxTJ8u{13fCLdTR^)xX4mPB9ux^!P zR*@Zg9Slei;f%r7v<~jLxQHcQtk=N+0<~nU$oqO7>`Lq43t9*79oOq%K!ONo3|5I0 zA}y_hfBM(K00OmStjPO%9qc}?ojXOMTcUt}9Slei;f%q4|CDA!X&tQYUk3vS)RNI9 z@9TAN)uXa}L`Wr3drnEUss$v7z_>Atm?V{XM$dWk@}Kz~2-Lz~-iDE6ZGZmi<`8kW z6=x_x1V)-+Y@j^c55->i@^#AQK%ka0C7Z7wHZL}5!$-J2I1s3XzZvLPYS}Jk_dY%N zxn)PhsbaC>-rbyRONCU9j1Hm=<8@L)^cdcdm-sb}g}=|RTr7$5J}U;ZiW%a)d72L4 zmz&3mBn`q?`_KQnu^pJ>py$DBs+yNt6^ZD)HHM=E5$F*byT6U|Enm~oTDt1Kz+Z}! zs7>LNDcaj8mFdi|tbAIQ{nTNwHLi0-M-~SWSX&xbj}0-TY*%Ym@ly;XSVO0cY!g0s z8zh(#aq>otg+4(W=tIM3v1x-ZSF+8X-l<|7-bDnqkJgVQG;W8^4ck3-vk$e<9{P=* z_7URO!`b2vjoa+QzC|rW(yfM*ja@H|-R|_%Vc1(ac6a45d>^Ou;?aeK1A$uj)PbJ9?nz{pxZQ)Fe|lJr-L@4+ zvJEtLQ47(ObuRMRmo$F^zUP;87G^dPAzL0O$uPDq7|b$cj`yzdkKLaJPG$Z5V>i$a zd^$j*{drZhe$xUGWoYc81Y3?Cp*&o>GNSFWFpKXlVM#A0p{FGKX}scd3TN(hV%>)9 z$d!TCk~@_gPcslL`zBCZ!|1mnt7!F0FKc}4dNpAAI;>GxbRyA z%?a@%n0ylV-68oD;Kyf~S)Gs~W~LWW4g_l58#9W{q9<`FvPi^V(^|3l3&uz6%Q}>| zZ9JMi8P<^XeO<_`Ib<|5_K#rhm_p{wX`|R@dRF)@gG5v>{g~a2$P(Y8Q%62@w9I<6AJ4)@?H&#AToOV8h zXDoR&F88Mhj#_B%-R80EF4eZ#nrwuqc_BStmOPm^YC}Vg5=1y_8iDPz74}X-#*S^p#%{fUyfEb>f}5| zh}I7(^Wt+4hNpX1z=1%mMscH+jVkOAA%aIGF*W1$2Qw?>XoHagckLWtOj z5#nx(P$9?lprE~z2VBnFa0tRqVy(axod26RXFr4rVW9ko5NgWMb?h*&#W zdv{!hLS*A?vwXhUsS|ql9g1-vP|G>m57x@!>OEzH=jJCT0=1lRUcjtoJ~|V;xrAn%hq>9>R7K2wJ4dr=bU#fW^`xP2N7ks? zUe~h)kq#T<$Bt&@>n3IWTRZI;M()oa*`N6r`|h`E!_Tc7!?x8AVyk=QH4%ZeHH^tQ zvhYmv?z;NktIN^)KANw>j9?Z|^Hq}H$Ff;et8P!_?2~XnDSox#s<_;d;T*Nl-fuL+ z$(9#bE)jn{OT$0SX(Vo@s>@M=2xqO%{`-opuQepXHKsj}UOAerejm(^Ju7IUJ*+Lg zFWM`HH46DFqUM9aj&`7Tu_VK&bh8v+bLZ!sFVi~Pfi-lt{B^BiJnD9MM6*d%7)lT! zXQD~eay`Q>OcliwAI#t?nyst@fm$K;$FZs8-RW873^#&ixS|(t#5bfF4kd_?Gm*5R zXSkncHRK(CyWn~CDZK-MTIUvxQ8p&6md^*LW?0Jpxu3xO^XYyTN)X|`Hc}z3|1IBu z7(6nGdAUwXFTWIHp#%|f?vl0AbK?IRJ{4yN7IW`D)xm*4t>z&y%ErI#Z;_2lNAp@O zYE|@foL;|f-?E|s|z@pHn$93_ZIeJWNVV!n~@Gfe1O&@7E8Z}>3E)?!(xh4E;= z*Qeg!4VpEAJs{IgPFv-YNFb-gg{EBBACk?MMH$3JM4=s*3A@a#gvm1OntOb!H= zo^ZuUevn1c$V*nN2ZGNfCLfP|1|qt?`P#%GRe%6Tt{sHfm$*)<$WEsg`3gW9Cr?R zC-$Cbq685*dJN-CNR;*W`sMDMuXWT0%0ev}oASPn+T|HTE#Is+?mLIesi+M|5b<#M z2!*KDU+!d#E+wpYO>4X6G;wp3AOc4Z0Y>I!#ggrC-`p7GK%ka$T+RIHwz;M42Jg7m z!yO3Js?%GKl0{GRQLT3R>hMfc7n}RP&%#lH2yg0fsvR?jr6)v^yZ!jHawB~cx-DcV zK?IIt%BuY`oFD3*#T+CKnutIxx!Oq|>UFT;FAaFY=LgKBb<$h7_Mw(@w6D#~c)3^W zS+>Xw4g_l9dPVP%d8e^Di=T*0Et*>>L4VI-wh?NPo7o=II73X~uMqktmnaer$> z#S?CEW1a(nTDZa)M!DiqR?=o$y$4=}Fq9y|86U~bDClh3mP)kOdysvk!g*?;Pf`w1SPJavrB#4l)B5mju=j#{@AW%z2 zn!K;~qF={gK!OMvE7FEuqw-S>_N5s7*&l-e1Zv4hllS#r^y?T5NDv`oMcUA7R7Z-z zJ`{uQn>q#q2-K31ChzOrt4T4Kiem7ue{>86B#4l)B5mk3>b*P4xz(#7)g-P(sE0=3Y#VN7WsS6(8#zWF;)f(Wc3?RV!7R8RXB z)tt2h>x+3NEXgqHrMxQI)ANt=h1xUBEvzUzhK-F4Vp%R`Hb0db&34yG$};7Vx!1a_ zQy!UyDM10BLquIW#!K}-MEGD)CGaS?n&|)r#;Q4o3;r&}n0K#eTfx$H@`CvA+rQ3EDs(8pc!lYG6X9&7QN* zEQfayf$gKGOLKOy--c!ln^vY5M;quXL>tDAdAnF<*R-%ggg^-*uziLRH+L6na%Jnz zR$tk`k%4G>hh^b;5wx)#PnRx|VIE&<)uCs1+5KZzKD*1P#_riwOU2E8{rJ1@O%oB) zMnDUnY#7FkxC3m<@g6+E@4u+B!9fp*|i2lCY~{jBdI8@eh_SjJF-EywoJ?~hMec*ccc z*3qsWv6RMZQhKgZmc}bSH*#icw_Y5?u3jE!oh*`up%$WL4+d&W&(-Ss#J#z_tUjGe z`%r>4#F7kSQRRNTqL>_UZm1r+0iU1^^r2yFJJg@gzTMK*%ly^hT|_wB@$#2ltYV`1 zJ0JMRRlo-J8v4yJp3UCH#L&5++rL7f7TTlupwa&C&HsfJBm_zjfgYjEcFPmy>(x=# zpG`me9@iKzh8JkUTHI*Q`kkC0y3B0Mwq5GL)^9v2Dr9NGR@S*q^KNYR1HSaHqO2*G zU;08e9%bt?HDT$S-!@;LoWNq|HD-Tb?!Zz-k7u%^lH=ObDG3+;8s!@LDaJ}NXR8Yl zs8!|hQFdxaV>Z6Bi);*?lU#l~XobGsABMF;MAv^Nh-k9We_Lm=QD{{!*ZP({t>3r( z!BB#TZvRYR-#2LHN5s{A9x83*sJAe*L!vFBAC>jy>}FT(R%4YH9wi*>E*3ILUDp^T zGpSc^2FdqitX|=1BM^aFHyR%m*-A8Foqyxh4pFkdLM)oLbq7ii@${6o(K4%C)iT%E z8gZ9wJYT;!9{rAJ^a#Bj{i08te6#<4f(+$Rf(WM#%)=o9wSFHmPPM#E!x~hpab-$X zkiObJy5x>F$+xiX1*vaKZFC(fFsp6)ldnRLi7<&6UiR>IMD*z~ zNR_)sSnf{GV%2q~x6y>yRXBG)m;3^sfBBFg!GHH@nR>8or*1ZvTUQHW(pOF%o$_8bz1BN1zYCFxoP`U6`& zv8W!eLqF6uv7f%QW8>BCTd*vw36}H)A+Jz^2((AL*V64=`REr**GI!V!(xw$fs>lE zZBK8SDRzz*Z$~y~fA?+A{(N>+T&&%k{TUKLEuXR0aFux;!_AXB-EwX}(z&^s!*M=G zv|-$Ck)Xo+BaysP>h=sJh@kPtWJz+hqn!S~*W)_TGv{j)?z+%=S(=@t)~gFS%`Ans zh!yjiv&U&xxC|rD<$SKA)Q;#rv%*n=h?UPLD)dR3?QCDElMqNtS>= z&RXSu@gZUv*_gMxj0Q7C>z<-EIJ`g?fAEZ$ak__1l9!GR6OrEg{X5<&h48! zR8okuJ3``7f(Z1XvmIDpYzNg!wdrHGx@5zBJdr1uYV}8x`L30@x3Jr*o3l$fSGmv& z`{o~I)226PH`d6NQ~$;CYsofjlQALtU;)j$MP|jQy5FPKS&mS7{~L-;`i^B*DbGx@ zao}lA7qvqn`V^b!MjL3Ga*?;+G9S>|E?awoW@qQv4frb6ix=Baf(UApLNCXe znosyRBn-9Co^$L<8#Gg^nc*ufRYSRw{a*yFY6?LTTMK*ezY+Upln%poVBK-N>2}Mv zy#HQUJRT9K&ot$j6ejjX>*hcE))g_w$sEdeE2;%UXzC?U~HN zauJOy5#=)aq_=XX$|u|(>T*P&7REn)yE{#EG~Yy(G&N%pF2-$? zQMbeRkUI(3!Xl+ClprEq@nbBcXCoGNP$J4?uWD^xn_@?XsrfnjWJ=CStU}U8tk#Rp z41Gwa+}xJHYvr^tf9);yvSm2`y<1X8 z4njA@f(U2JOU=!1$*8@a zuBropTCybR1#M%+l>An=Kla2AqS1m9MEqL*m_nrY$Z@6r^u8TEifHtq6M+&$IBle& z+I+XDMfu7!cF|YpQS<`6XZh^7uj}4utIqOuCiWB}q&+znbj$y&R+Lq&HQ1{6xHdxx zBG5LCcB++Z%^MnD*mAUoZ8D4%e#8VqpyX@Bms<6vFjOi=wP@Cyq07c@uWCJ2M~tbc}U6*N_d`W18dX z+IrrQB|2$RpZ{2Ij&Jm;#H?nPXa@qdqUs%ErP?%RwSQxT_`7^AUz>3*v(dyD2LiQ{ zCpgafhcsi&UP#2NRE2!QuW)nd2s(8UB#5ZE@)#Smq8SS-DEFd8Clv4jD-?J;0}=9t!-0<|8i{xJNYSth4~BALbPFo zG)TaIs6CMtJ>QVyY>D&L?#jp5&$Sz~Mg4D4t&X?K;oGqLd-HLh7zYBi68Mg@3zwU) zA|oXtZ_8tB;fNkQ*~soLtO?pfk5Cq8*}trQg?2ny$$woaL4?yrj%kJXx=EGzq0^({ z5rJAbdgzOhb!WxlW!pw~YyFRf z#Ga8y<8i#A7NQL!dT|F+lpW+MI6ub9$d9w3H1{3%nU2x!Y{%(mG5qPOc=zUAXPy-j{t>TyT_fnLz_uFNx}+nQw?*2DmW!?y9FN@H*Libq_v-^FljIhN}v zNf~)v`WyGNk1-r2h(LSP=cQ^`^_rwIC;Ttis!;I6g=d@Bu-f+o-W$TN~LYWV&`lj#O{b1R;SSW?z3}( zEtDW)SFNq=*yN_n9G`)*2H&@@Vet{q+=IFXTPQ(9^|V`APqP^tRxg|o1DDmX#pV$LXSFcv zh+28_Qr8j_+)Qhs1QBSDYL%=tKR>O2_nW9h9On{@-~&aD zv2V{bV|S}npxGz1vdJH04L3)9F2+%Uh_cg;vEHwnGLa~h5Wh4udH7%D&C}0|ag-op zeCp$DQoknbLh{mtXz_1z_Q$inyyc-7fpy0kqHQ{tv-2eOcXU_IyR32`Pz(FXFp4*P z&t8lv$Tzpo=;%R2;OH@o@{Mn>J))jDDjpayw7+x`!@cQzrN4C_w~{1$xWk#0*iTU?(do zcnd=WYB@*AijOs|OarsI{+*o5s#kognne#E{?>(B&UVb&UV!JVG{!w9Qxywa8`z`d zNmt5rBHnb%%@Zye;hV6shGS+x1lpz(Pfi!MuFRh0TQ!bzlmvE8Ly|_ASVO~@+q{64 z<$}k1u2mI|5=5X!XwQ(;y)}@n#hM@jZ5zhBSB!Ufw%Rphcu|fm$8xbG+Fxa@%0DK_ z=^iyHzy13|`Ueqc&oG*g9?pw*%jXO3deq#IYYQuos4Sgv|?k5sr~&7$2TL-!ZXLJ3eRPzXJ01o1Oc1vxc1QBSDzOH#&s=}q#{j9Y2mouy{J_{+5b&EJYqA|PHqZiF^7cbq3 z%e}ar)u6&V2LiRQeTI>5Y)03`oke_;55!oV=u}v_16Y>fLLQhA@3%6d%uq1Sd#$aw+szj&RT7#mC=mD=?tNiX|0{t(}>)Yoe0#z=U;}===;uA{|%#= zck@nx5=1zklI^|N#oF}robbLmFD7O1Zv?^IeH$ID+BLCJ@{j?1{@`bkR$XY6r#Of$qhn4Blpq2##dK0(<&P|KL0`5iPcxaVwaY>+d^?4n z4^}I04Q2*=cCMtgxyUh5eQ+Z-XYU>JV}|3R^Oz>AP?LuyzX+&Y3%h zhsZ3aEkVS3$6GH6=DDn%&y$;{-HR%L5sp4VU$t#FQT$N88GB#6 z6Xp6QP~Jz*>+2{}gb37Xm1&ar--U)uT$69soKKV9`n}A3mh;z=9JR3JPQ;Pcw?)3{ z;k2{GG6XwRcabZ&t@D4g_j# z?>k<1K?LpL)LN@^$kV$OZshswA0>ru}Z3S!LyW8B4o755rnvJDe?#r?Kn$V}AL*G$njQHy$~yVeF)`*!L-t{x zX*NB6T#Q`NgpK@>5)yR(OYXH)w)y7lqeT^PW>Wi)ZEMy1 z?uK}`s)xY01@O(Y;Ecz_sF3FDR)3in9b3M?*&;z)m0k2)JZay76-nNjty}q~GL12_n#k^m{NdkZ?xIHBvC5Uj?HvTYU_&*J2`N|b@+Q4?8 z7Yrkee$k(u_~Xv(Ef#wm5@?6hSF-K?jBqHHi*3T%()S-d|8Ua*bYQf zCN;DnzgHsIvwcDa3$q6Ji=OgKLgp;jshnl;*^T)6?dd!TrloTrP;17_En;ko#%$rI zXvzUxcv_7we0<)$y?kB=0<|c^D!$}i7kO&%w?&?~m%q*}bFa1p5#`cu6(cFXd**wY z-)%av2LCtYzWZrtu!RytP^MUX$ss4~TZ2cIyyXsWu5!q>1Q9zIZ52b)(>#Av=BZOp zslih=Fw9GZgDsRGf->IXOD;U9Lk+&YUkWp8Qv}u4;bRQGsJ@qZXp+`T6VdVrH6dd~)TLjx`EL40@fO#O-`5_FXT?V>4uO z_zK(PM1(fFA`-u5{9~SP96g8#tRcP0u_LeLOO%rZZLVaY7ZHK|L?SUfs6l3L0 zFww*jj$WWVtQzeyb1NfI?_%T?N)Uk~pOPP$|Hsxm?QBiYxt5^>5!nBRu_^B~c5zu3 zYss_K3?+zg_D9!(wRqUvX|5?Pa#@I&xDnrDNd_=}9$85RsZj zqRg;eE;W@#;y*cySoJ!V_5FF7Ti^SRsadpM%1{%%>qOMvQNU`K^>BFY;3^hM5P`Od z8K_=vzQ>&9OEEulnT9u;&5$F+m;tm;M)Ixh$Tf-RSt!U*b ze8SaoSYd|^YzLz0eTILct>=5k_-+PWb&M;F56m-C%OA&BdovyK4iDe%K?x$9Im@hc zzFmzKD_so|g}D)dT9{ijjNbG#v1X_7Y80d7zYsF3Y8dIN_OljJKJj;*%TQSZ`JO($ ze@A%@bzgpaKw~$$qg9mhiMcZzWGF!dTQQ@rS)VdA?YEceI)k=Q_@#@qP->{rb#gG}193_aLyq~h6-}tR@W{LSe zXH(YeD+Fpe-|Vm3HXU~l4i^upRwzLPW$}~^{pRx8(0qK>j#K8tfmIv`)WUCCXf>D^ z#b@<;>05gEo{17fP!>Eb52wF^I838G|H*+YC1uZ1f(Xh1Duj$1!$?69HG#5&jdX_ImLLLSg<^12l=WS% zQD%)0#o@+!9gh{+G>01t>OE3?&>U{85#9e&ak$-hGR!Jcccr(F=5S;0W=|E9G>02o zTjXcq=Wt`^=~{ER-F+No{WhquZ_Xmc;btS~RA5_+&dg-%4^0tqio>nW=HXW9mUrCI zt@ld~H@jAJhAG>=dxR*UIo#|T(wiKWhFR^~N4OG~QygwANtKb}56$6bBd{cSUSD5p z-Oz--&;Ke8H@oiFpG32Y*{6!P#JfbNDYGZk4$a~AGw)*Uf4?yN8_nTn+n`gSSu^ra z7UFP2G@ahsbAVNA{#vt$=5VvM=xkIQ?W`5c7G*to*~!qya;*2zJMLN66o;EFK?I#cuWaaZ=qH_x;B5}S@q}m&HyeRkbVj^F=riKS zHT2EaGqr_j99-%tSXuEvWwY& zl^kxi7M&4pqp^LoI(vp%L&n{TU$k6txUtN$!$h#=aI?!Ljv2eQio?yC-MWqEMoY!v zW=rTaZoB1l`nO#YJw@)_)haT(y65y+#o=a`i!~(17;JgXm7?rV-+6*Ghnp=y1o1qv zf5{7)!|i+Td6B+yjN8xQW+PCG&eLZ($h$U&8zT-kub;!smLP)ID%5?O!;KS%o7>Of zW=jx3Y*(sQHisK04mXdV!_7vZ7I9Cg`!Z-H)P2n>v)@;N|LPj#8lXAcYzZQWOHA3&9B!lQ7w1jxb@Fu89BwuOwTOE{ z-PgP_=Q2imci&0sEl%$OkvJ`;fT%n#0XTpccl8;&9s`zpfeG=Zxf6v;B@XuuY1?O`Y7*BIRVs z;ieFB#NkLpwBm46hypRk%A*bJb0>m0+!R9Q8PEo5A)4Nr%v@Sjjf~_4TBnyBZnodC zR?gYyREZ8|jh0b7$KkvTC5XV7r&G)ihp;3~#)t^bUBd>KXsGrd7~66;An)rv!{zF8 zSk*zZ*u!m#!_Af;!WrjRGS%SwXD$<$G%d?9epA$)t~lIm2_ocfKqB-mV@jST{9WZ_{EzL5yT+Cv!WrkY&(!DNUtA;x zFH+n!HUhQe{z2N%yNt1e3-FESr?P)Ehnp=y1V%HR**{FM_{fXmK~2TsW+PBb?jNKL zy)TI$-OigM!7X-8bJy4sL^x-LU9U=u76;mj+?u<_Mxd75KS&#TUo!FAFw0k{Hk+Xn;)P2q2mZVN!D^a5bEO)TtaI+vn7ad#^8n;!+7~UgS|g#4mTTtS~6Co4ZYS5*wc#lEp52BY7RGB zf(T~}((AFj^6Fcjv6{opMxd6A6=_4Swb30tNt_IBLmQk@xjF*rdiB7EbHn1^+r2kRZYtgT!UZiOW>@ zxlHY{P|F!nhiM&r6Ow>;_OF8h8>l6tOWxP(-~w6)8_+r!>t6>05=6*Yk@sybQ%+o_ z+|OldOAz6V!E9&i)A{X-#N!2e9SoF(S~6DTeZ3BzpmlHrt%Gs?bub`71jY?z2WcIQ z8*)*U)*NnjS*RsrMV6%3!A7Inc^`bf$%<}JT&A`J5zZJa@Vd0vu&=Es<6j2@WuX@S zj;1&JAC|T57puh9%vBt2%7$Da5#fxeyh$rtcXpijmCzh+HUhQqmp6T{wzj`D?8Xqb zx2586vwyiG!Wn}zC=b`7*b6g9SHlj5A+C9B$$A)FD5Io1N3cY@IW!MjUSHY^o~{YfBC{ z8-ZF*8^qzJPJ#Molvy4nm>0v^Dh@Z*4nK#RYKP38AsX$`S;bRV`Ko=>-g-eiSeT6< z_96BrdoJ;#=D}j69^???6c5&z4{^S?-nQ0Xsoy#f#EE2U;co`YY*!iLE7h`xbtO)5 zxUnqva*EBwgN0g%rmq?+e>Us1YGAdgnNG5d*}vitLF`I)NiE&PN4C1EZ}z9c@m)2Co1Oi@mZL`uquq*eCULm&*_y-6&Z-b=6YEKt zB0q;4WLUzVmKF0l4CXC6C=NGUf(YVvvTIAM(+>^Swrp44u7u)nvn5z}XBH>#m=Ep( zNvFj7IoxcYpbhjPaa3*EU=~ZV+0{#PxY^!Cg!7m1xFj)Fo1t^JAJ!ahc5AT@&~J*> z%Bq(*d-xDywZgtdEkrBUH!Eb>@a++r!_DqB^bgjM&KDiJ%4gBoO`$p5?5w=B(WUiB z`puy@+}KEZ>M&D{-KZeL95}U?)uVvoaI+Dzh5;>nPC?%^tWMz;{`xC|C#s*a! zsm3m9A(~FPDf!t=`A#H6*hLAp9G_Frd&q6eusKV^c+NeF!;J;gQ<7bp!_9t9;mo~mtJhE* zyfTolyRA6fY%SRb0krI!KyB#^vQ=5x!CAd{l~Ibr&6Z#diNnnq6-3jL0~hJI5VZdMN(?H4tN zn_Vl^LVLtSLpIDGyKf(%IoxdjAj0V@Ug(Xl;5RG1bv1{Z_=ldveLkc(+{EwnB*xF- zCeG2d=5V|JJhP}0Qp6XeIoxano$G6B(FvGBK8e#DZi%L~68V;mk3Xe3+(fRZhQiO` zCbI4yAzYfnO=PBLg_^@{KrQ8X3Ql8ejplF@cc~qk!)@rf5Nk%7YvFl5D-JhX3+>T)&muF`R&%(u zxj<*C2PJd&)*NoO1QE_!1)Y8%o-J?V{y}rN*}lSB(OJxPZHYtrXhAD^!-THEn#0Ym z71o5#c!qWmhnqzlZV|&Zhnp=y1f4c5a!@<;Y13y)4zoN3N=K~I9BwuOwdjmzae#c) zRdKkLsao60zhRgwgXVCvC5WIi;*|}3Mtu7Rm91^v55}d?9BwuOwdm|HT2^AU%e z%g^CvOAtY4#48*6jQC*_BiQv8pjK6ak#nt9B#G*5p+hpvSD+$ zapG|E_&MBc1ZvTV?+T$$e9zu|F!$B>dWL8YHyeRkbRM?4Z*#bD;&5~MIoxasBIv|- zWka9%9=3_`N;NimDrycl8-ZFFkBY;M6Nj72&*5fADkA7ycV$C!xLt0R&zzq+q1(^l zW+PC`Ioi+G%HkP5WrNGl;btRHi%!#pIA6u8`7WOcF8@YzxY-EQa?T|qlI`-nF}9eE zHHVv6(okX(*iH8}hug5?9Yw*Iy`F6g6o;E_gHHbz-RmY5{WOQ0ZO<@%{rpjED74s&*Box* zZv7y!R&%)72x6$POQLU=a%8a*&AaRQQFFN2*6EyW(bWhRahk(T9H3fh4!5)eN?E(< zE6_BW!_C&BGs10q#1Wxdt~uOFJxgO1o7;$WNv$~CYzZQqwW|02l{h|QNPITpa2vjI zw7C2}SRBzDZnix-Ti>oNjjNt9;#AmQ@$EH-n=L`_Vo8Qk?Pe(}+vph;{?;6BcDY!0 zI$IxFP8@F5_}k&}Z6_%XH(P=TITH!$bDP7>A`UlCan0doBT$P@;}^TgyEccLRqW!8 zhb_>$h|aK7yuP2nJ4t__iYY0P8@FW zehxQVf(YWbQ1>;5TWnW47^qGncOA{)W=jx33^g$O%w98qwaJ>(>(Lx;HUhPX5yPI9 z=v&LP$Gz=mm5R@zIoxasBIrDQn0+pUE_Nr3+#5GYbGUs$fELE1d}1g6V)e-U`8nLwwV%UHT{}43)Hj60eWfIR4mTTtC20%NhP)!4FuN>bGE>)n4mWizZGS-! zhue14BE~7D<>zqw55)AN*Fw=cB8cHiAvA~E$%!qjpmZZW-8F}s=ujrHngI}jwkdNL z-OyqwK6vkH4mTTtS~AXMNjlEcm6{{+JecmOs5#th2_n!VbedKArQ$`wJnoU2!_7vZ zmYf-+4Lvht9QcHFUcSNi_VW*ttI?JqLdLm7=r~WlyeL1pc$VqsaI+j9O{K=5XWx+&<)9uQ}Xo2_kUx&=;T~QT+D$<=&&3!_7vZ zmW*0yLr3l2jG>%3+`RiVhnp=y1o1qn`%v)o8?n&xn`C5XUrOy{2eGThqIJ&SLK=5VtSs3q4y>0P}JCi|sGR0D zChvQZP;57+gp3twL$6UiC1-U^0IU1`wzvV^H4LyVrzIIInd2o*AS$+-wOVoO{s;5qbH? z_$}f$&EaMvPz!&nD-JhC9B$0d;ieGsw>~1Ad(nuS---%d+}4F+io?xDpqBIZ;HCCi z%~DA>vQ*_1hntN+EzAcf4mXQ9+=QRQ&Ccl|!ufm9b0)ntBhx7FIL+Z^BTx&oUy8%c zA`Umx&*5fg?ht{|Wf+s%4zUW{%f_Z(QygwK0<~n+${y6eIf=tfadU*XKPlPL>|6$B zxt!UkcDwRM(24KX=dfXt!_CeDVt&t=ha(QRIQe|=_D{_vhntN+EwoKvhP98A?+#i6 zOGY|sg$S&n;&4+>`}`bkcK!-;R!$q~QeI^}>G?+$&EaNeBZz%S?2HW(lEdv|snOzP zound*;=!8JBA$iJBBrLZio?xDpcZCZh%4pFelh)44=YQ84Uz{-T&*1>x@rzLJHvu# z!}xe1ft5K+11qWKaI=3eVYyfmJq0M-nwLx(@6H>dc(BCVt-+$J=E1Vtf%zHYGCk6V zUwKn3zP09XvvV2Pa?G|E#>^j23$eB%FHan9_?wJan?y;<`$!HqXTD_OzMf(oJ-b_T z{+>gE2;z3KYfERCUcb!RmF~)G=h!1T+-wQf-DzWP#eUXwF*!a?bGX^wMH|lFoKG6} zvL@ZzY-V;&(f?`whi}$9q`)*DDS;F_@m+bmC6bGX?)K^vG)CT_q( z{jEuRT6$V&4maDoh;X)}&g@;H^1j^_&ilu1!0*^==r?*YN~8Vs(7D?-euY3Sv`1gQ z&}g@A{%?B$|7Z{R2N6zRJxF-Mw_GK}6lfyk>06zNk?qpV#$pz6xGg27 zw&_`#h>?oJt!MfJW`|c%ywCBMzT$Mw)x%6pL^9%Vdqb>m+vhbFFExjokR@pjw>Qa0 zdx*o07hkj0g$QCqu(jwENs(x0V=+l_xamBO&ii1k5J9JasoH7|w*Vr=kj0@W`3j-z zX~dN8FK?5%rElvk45P2&*>bAyWzFHH%Jp-&scX&Q)}T~SxMWx9H#&{vaI+EgjjFBn zu<=n=u|yM5OL4d*JJDCZS;JTF@a;eeBIxT=h1MKyHKI4i6NeigP-Ia&`W?~e5yR;F zOK-)=dRB9|sj_4bBEo4S;H!^8MayAXs6}76s**H^+u+Tm6(?&2vxMYu6TcR0BK|F@ zINXHnXFrFVx~{1>-0t?=rPyevFU#OS&=;VpTtA1Ky4GJWpD13!MI3H?iRN&#ZQ#hD zlmBdcboxb#7OvmOSEV(Fn=L^E_CK8pyfc$(hqp;7$$e(qpz{G0TGpM;@l)3h4ma-S zaAUHyehxQvE!!c#qLueGhg+w|C&Of)rz?I^GP2ncY&qJd?BJj6)VCS2n#0XTpq8^% z8B1*1F2Cl8Y@F564tXY^tcmm}j!?tM+Pk_M?RP@+J4QPq=zDI3mLva5CRJYHD8Z3I z?NbN`hnvdQ(pi(L9exfsb?xVHQ`dTC2#iEOhnp%(j$JI**@FSZzxx+R4mZ0jM9??N zsw4-8o2nHe=%h-8mbG$@tAGvpzx>`=deqP1W|xaStM%nv^6Ro)p=ce^@(XEMl7qvI zUu?Nba=6*$qL%zxS|S`AZrsn|rVw%@Vok6lT`M_CRQ(-(~A%BOUXv}`(<2c1Iv{sxyq9K9BzNt ziXaX*tB~e!QxaKsY?C~lR3h{#1rw@OQiv6M6o;EFK?M3x_lNuiDX-9r@_Ytau55=q z4?IZmV){5t-LC7gIHdo6?eA_+$Q2v3q)RaT7V*tQsk5mXk3d z%kp!$F?sFha8uX%gpJj+OSxu{jUnq5hnqt9Io#B>w1KuM_gcTDul%<$R$|TJCJv=- zE_M=!+q1SuMZGc2#VO6#9B%4b_S*luKjhiL0so-ioTEhdVXPJZCe2oqAVNm4 zEJ<^?Sqb987d?-$s%j25WkZfQ)WQ);=Ve6x9w*yjPJiKu4@AgQN~8_V;TFGVbA{bc zi2Zcjem9ndTFxGPTyRSSakyEFiNg&gh>)jkNE>o2ILEF$7ez|^9B!&jehxQvUyt2@ zuPX7ilEclG(D@ZMg4(369UN{dK2Qto$=Oxf)?-)NkQo;F|5sYdo}a_*D?}g$sVrLy zd+`6-kcg858%Pc}g^(@8y3?s4%7z?ovMz=p-wXeBp5kz`5vb)HiTiG}ij(b_lH=WW zl;A9ZwzUoUuUw~acK&*vm+K(z^d%-%Ap(-G5NdtYh-Lnhhu`~89+rL&tRDe1){x$( zzSP?V|U*xo;c+zi9ju48MEgY#S_Pg zC(iBXiL)h$z~5AK5?iXuR-`9eL`C9>!{1Rj`y{AyO#HmGnP{MR;%aAgSz+^@czY60 z93oIlo+&)bYAQ5O+~a>EtOHqJd6N=P93oJQn3_brsZE6DiQAf_g0uElC(gEk2((A%Yb*0fL)>QMv*)|Y?o;Hlgqj`Al z*%2(kX~h$#5b|VRY?C~8YuC;%vWTf1nTP z9mwBnSp6>^^pw#&aS9#)+&4w@#Mu%=@Ri5J>J`m|=7}>_7BG8{;J*K_t@DnqqS)j3 zA|fKvLRUaaLQxbIkR&%dBPB+VBDpDOl<-0k@F+zg$RmUnjx<5R0|;p7(rlCn_wEc* z1jGcTqo{PAsDSW5n(}@#b2Ira=jETAbAP|@PTASHd%v?gpXJ1f!w4$W9>!C@NE3z= z7oF40J<#H5FF)7pRa_iF1>R*k&Lc0kmu-HTDLyouIP14z7OLsprOu^f&NGw6r6b)r z&Xzb|ZEja6c6ADh1)38#C%uk)#e2!SK%6*KU=~rkMERpBqN(P@{q|y^h#fmncE}z; z8+4ca20%6Th~v~-azi{3n<<+&zY)L)D*R=XdAEryJGreq-E6`pRA3gKS;sl|zyX%F zy+42R-TM^D$o3WX5B7rNe0r`5-)DG&y`E&Z3?d}(wsQk?`j8Kb2MD6>2wKrB-EYP8^?KX@VPLIB_Kr+T*vl45Go=8fs1)uQ@a|;cOqxi7SbK z0((KtJLVoy{l}W99fFI?@YiZVMld1Rc6TVkiL))oIRO5)GwpATmlPMnRP zB9ZnHUhdL-)lKaqyl`<6zh3i7I6#~@jG#j8NPLxMAH#_&9G1uzjr=)0$#CLq1Qqy; zjCRfbs59@Kz9w*HzvjeQ?-jGuUbFM-q=*Z87ur`I?#NgCx;W4zSWjXE6^%$SmlQWT zseNn16@vWx>7NC1h%AQ@RA3G1snT*8vR>zyga?T%hqDh}^V^9F^6dUJaYK{kMzwQg zeCs%`29f13f(o@)@uI7#!jR<#b$8{h>apHILzc42TP+CwOKQ?wc1SRTr%<~!%8@Q=hT$;ZT# z6XInLBFkYG*4?j|wX2~#zPO&aWXN)M?4pABYWBKuoR*KSaz|as;ZHxJ$#Qm-;Mv4} zqZQr*GugkR`|wFIniFSdYE<~oIdS6XS+xg3w+tsvU+24t6Ng#;T8-ZjBNN-^g=Y~b z4(ncg_Y&d6K|5~se@MQPKh~XSIB_)EvWd67uPMKG&in2P!-=zz;=4)3iNhMw zESes}GY^Ckiii`35mfm5O3iTUm$v$kH9>{HR%Nd;Iri|1P=DgYVaxHjcqDpnyLJcJ z@cz2t!G;s3+Y#Z!*>i*C5$SKjXnFnhMsCtT&508|?hcAY#EHX^k82^cMo%6?e&d>f z!P7J+&MJr>CiV%@seZwa$#hhWw>I#vAspU$)s^udYc zH?<9=$k_ulC(cGtf#uQOR8dcSxjSPh?`t@5cI@Ix4DDgeN{tJOvzimP`G-?+r#{N$ z55{RuoK;{JanNl0=(&uE)j}sXHgl&KPMq%D2q%uI@4*Ns&h`TB(me5a+}KWM!nMw5 zPMnP>I$N>A-}1fPKHj+K`jmtSC(hQlSP|jGL9NzwtQHDHPj_czYEGPuphE4%tjbfh zRpU?nncqK+`@(SIY~&7CO{>l7Qctz7^S6I_mCk5RoK>JFhmIaO=ErNx zs+DJY3k@gEDlp5xV)9AtrQXPs6WoS|6Q>nw?FJQoSMAHW<-G3~E%W9WPMlR>7Ov1a zPToUR`RYHnyMNuQsc%}L)`U=j{yvR%qKpmyr4p-dIB`~iS-8gLI2~W=%j`9uY3_)qd9R_fmyiP>o|ow zV|iZQ7#34UbKnwMH&_8z0>>7v=j8qW0lxt z!-=yB%u?f6T_1)McaE-)f}&x98%~^!ph8_MN@3PHYtl6spuN@8vNb2pM($K7ht+W6 z$gS1iPZ77475lB|?~+@qKR2r~GcUi)E3SNpr5aA0ji3VEXd)1|`&+b{KSazioH(n% zEPMinxB!W5c!O9+>?KazS|SY(&Q2B)P8^$^mm(sZI9B3vUdoE&H!=@;%W^d*&PIsA zWEJ>?2i0nBfd6ykKJOZF;xK{=@?lwtTRX8J!1LSO&lR$Pi_^rt%@il@ z{ym-e@EsN0ABhu(WndPOcI>JwP0~+2Et|!@mC%Yfao8u=SHxOl@3l!2w=^ftBi|?I z;ki$}PTmqKFe{__6t?wncOf+=?nI?1{_B%x#qD{T6K9WwE%z&OUpmRwCdSD{h7)HM z*bY?FTS9L{vol+>Wy9{zFwDYQVVlT#EFWYS>JOD~WsmVGFw5UpGs@f}FLkWW58nNd z#FpcT!Ct4m?%OuxFPsXpw!1Yajurd6*jj%(h!e+&6X!-aaaMs@#1GTooQ4y3D6tVA z_eZbrH--~uBdDNzIQ_ZtkmvSqELq%oH`8$9bQ#KH#pBXFoK_eoHM}5N_N-YYaKv!p zYy?|Qexf}R4Xa5F`cp-K!eN$IderwaTx+EOI&ZkqjqJ-(*DY zGO)hb4u8utXzZTfF}mOsja@tkcw8)xyzmR-+}sr-cxmFqp@Mu-T@&TpqT1A{_}&L2 zsPOmIYN}O->0MioGj6RtF1Ew3IN2$QPaQKPv|&ws-+YC=fal+FuF@LI-~Vh1tuvfB zJ4d0y-&ZSptn^Ozswirb_eu;Ww&7@Zag#W4=|x3s$+8qN#&F_Tv8(DhoulS?2fn4Z z>J2B(Mo>XSEjFV{P#8|!!bv^(*4Sfgj2tNNi3EJQtxmN<_T^*g;*{pZU2FD+SLcn) zaF>a>5+kS}CzTcZrH-?C>rY;j`s2Mmh7)HasPNaSMr>8DX`dIpUL&;kY899@WKkiD zT0?W9=EOC9s=8OJ?~9(|#9;&#wpxYIB|0m3)#2rf}*wI#KGJi9CLv^(K$=}Sv7;>yo9qFs%ekk@LJY*{UF(( zKy%{kvGCkruRBgc;(k`^YJb`Ot7-v^U@u^Kbni2{36GxCR`wgLIdQfORQOxIA-y6m zwIf9?*;hw#;;aI*hz4eR#BqAPcSU@)uoq9*ra5tT#Nj#jxBU38)nqXKdHzY_=K+kM z0{f5{pw)9^`5F(f_bMGyoH$zss`0v^x9Cy_xid~K-+0dq&55&D6uFLiMpc|RY?GNw zURd{aNO9tP5m1oJY1Nt&$NQ~s;np;qI30;_;%xu;6|q-x`I+Qd;g&lzC(a%Vd(>C9 zBzf$4$6HKfIgFr!$a1X2JwKe5B$o~= z@7>PQWH}o_1+li+-kQl`oF>byKkD)=J)iR)C$ik$s3OMtrHDpEmRovnAdz?QyxPv!veJsb>0JBg{?By$Ov$V?nWinsx z`&Pp-hP_Vn)y8XV&(DozrE1lEeT8lEEAqM@WnYtf*Mi7$7(oTr&~g4MXvim2s4Iql zuE}z?7g2%di5$p5eY~Q(b9ny$>4@UQVPE+b@dYtFvGz9`dl*ifJr*jkY{y|;ACl|b z_ua~d6KBi7TKQYvzi+g>)}>Fv?N%K)Mo@u0V!l6$XHm=|(wi;M-;O}|Y3@AwU1%P0 z;;;;C2de2=ga796l)^lBb2-h4v!et@J35}46UWb0-<7a9O>^R`1BnX1XIblJF2A^R zdFWDU&55%L%tCLGd{Oeje;Dv~Nc90m{-45d;>b_rUC2+2aN@LUpq{|SXZ*<5(4VXK z0%-4qoZfsj`H5eWUxE=-_@C12)2#+~$WOfgjON7IV__EgeoXl#h7-4sh-~Nn9Va53 zIIF-c@__W`=1q&f`xd$*iTaj3Q*+{M1Qq1_=`zey#e4QI^jN)A(fyETS(M?#X@z==1{LJ{=~|g5u=l@s+S~c$o8bs2&MGhqpSdJHPLq(AbaA`4 zW~1iBSp{a{8xgcBL;mic=eN7%3@1)2)LR>tH3NZ xDwRKHyzu=r+NXavOw=%(I2%C)xr6$1b=}aNK3!1>m_updateFlags |= REQUEST_PIXEL_ARGS_HAS_CAMERA_MATRICES; } +void b3RequestCameraImageSetLightDirection(b3SharedMemoryCommandHandle commandHandle, const float lightDirection[3]) +{ + struct SharedMemoryCommand* command = (struct SharedMemoryCommand*) commandHandle; + b3Assert(command); + b3Assert(command->m_type == CMD_REQUEST_CAMERA_IMAGE_DATA); + for (int i = 0; i<3; i++) + { + command->m_requestPixelDataArguments.m_lightDirection[i] = lightDirection[i]; + } + command->m_updateFlags |= REQUEST_PIXEL_ARGS_SET_LIGHT_DIRECTION; +} + + +void b3ComputeViewMatrixFromPositions(const float cameraPosition[3], const float cameraTargetPosition[3], const float cameraUp[3], float viewMatrix[16]) +{ + b3Vector3 eye = b3MakeVector3(cameraPosition[0], cameraPosition[1], cameraPosition[2]); + b3Vector3 center = b3MakeVector3(cameraTargetPosition[0], cameraTargetPosition[1], cameraTargetPosition[2]); + b3Vector3 up = b3MakeVector3(cameraUp[0], cameraUp[1], cameraUp[2]); + b3Vector3 f = (center - eye).normalized(); + b3Vector3 u = up.normalized(); + b3Vector3 s = (f.cross(u)).normalized(); + u = s.cross(f); + + viewMatrix[0 * 4 + 0] = s.x; + viewMatrix[1 * 4 + 0] = s.y; + viewMatrix[2 * 4 + 0] = s.z; + + viewMatrix[0 * 4 + 1] = u.x; + viewMatrix[1 * 4 + 1] = u.y; + viewMatrix[2 * 4 + 1] = u.z; + + viewMatrix[0 * 4 + 2] = -f.x; + viewMatrix[1 * 4 + 2] = -f.y; + viewMatrix[2 * 4 + 2] = -f.z; + + viewMatrix[0 * 4 + 3] = 0.f; + viewMatrix[1 * 4 + 3] = 0.f; + viewMatrix[2 * 4 + 3] = 0.f; + + viewMatrix[3 * 4 + 0] = -s.dot(eye); + viewMatrix[3 * 4 + 1] = -u.dot(eye); + viewMatrix[3 * 4 + 2] = f.dot(eye); + viewMatrix[3 * 4 + 3] = 1.f; +} + + +void b3ComputeViewMatrixFromYawPitchRoll(const float cameraTargetPosition[3], float distance, float yaw, float pitch, float roll, int upAxis, float viewMatrix[16]) +{ + b3Vector3 camUpVector; + b3Vector3 camForward; + b3Vector3 camPos; + b3Vector3 camTargetPos = b3MakeVector3(cameraTargetPosition[0], cameraTargetPosition[1], cameraTargetPosition[2]); + b3Vector3 eyePos = b3MakeVector3(0, 0, 0); + + int forwardAxis(-1); + + { + + switch (upAxis) + { + + case 1: + { + + + forwardAxis = 0; + eyePos[forwardAxis] = -distance; + camForward = b3MakeVector3(eyePos[0], eyePos[1], eyePos[2]); + if (camForward.length2() < B3_EPSILON) + { + camForward.setValue(1.f, 0.f, 0.f); + } + else + { + camForward.normalize(); + } + b3Scalar rollRad = roll * b3Scalar(0.01745329251994329547); + b3Quaternion rollRot(camForward, rollRad); + + camUpVector = b3QuatRotate(rollRot, b3MakeVector3(0, 1, 0)); + //gLightPos = b3MakeVector3(-50.f,100,30); + break; + } + case 2: + { + + + forwardAxis = 1; + eyePos[forwardAxis] = -distance; + camForward = b3MakeVector3(eyePos[0], eyePos[1], eyePos[2]); + if (camForward.length2() < B3_EPSILON) + { + camForward.setValue(1.f, 0.f, 0.f); + } + else + { + camForward.normalize(); + } + + b3Scalar rollRad = roll * b3Scalar(0.01745329251994329547); + b3Quaternion rollRot(camForward, rollRad); + + camUpVector = b3QuatRotate(rollRot, b3MakeVector3(0, 0, 1)); + //gLightPos = b3MakeVector3(-50.f,30,100); + break; + } + default: + { + //b3Assert(0); + return; + } + }; + } + + + b3Scalar yawRad = yaw * b3Scalar(0.01745329251994329547);// rads per deg + b3Scalar pitchRad = pitch * b3Scalar(0.01745329251994329547);// rads per deg + + b3Quaternion pitchRot(camUpVector, pitchRad); + + b3Vector3 right = camUpVector.cross(camForward); + b3Quaternion yawRot(right, -yawRad); + + eyePos = b3Matrix3x3(pitchRot) * b3Matrix3x3(yawRot) * eyePos; + camPos = eyePos; + camPos += camTargetPos; + + float camPosf[4] = { camPos[0],camPos[1],camPos[2],0 }; + float camPosTargetf[4] = { camTargetPos[0],camTargetPos[1],camTargetPos[2],0 }; + float camUpf[4] = { camUpVector[0],camUpVector[1],camUpVector[2],0 }; + + b3ComputeViewMatrixFromPositions(camPosf, camPosTargetf, camUpf,viewMatrix); + +} + + +void b3ComputeProjectionMatrix(float left, float right, float bottom, float top, float nearVal, float farVal, float projectionMatrix[16]) +{ + projectionMatrix[0 * 4 + 0] = (float(2) * nearVal) / (right - left); + projectionMatrix[0 * 4 + 1] = float(0); + projectionMatrix[0 * 4 + 2] = float(0); + projectionMatrix[0 * 4 + 3] = float(0); + + projectionMatrix[1 * 4 + 0] = float(0); + projectionMatrix[1 * 4 + 1] = (float(2) * nearVal) / (top - bottom); + projectionMatrix[1 * 4 + 2] = float(0); + projectionMatrix[1 * 4 + 3] = float(0); + + projectionMatrix[2 * 4 + 0] = (right + left) / (right - left); + projectionMatrix[2 * 4 + 1] = (top + bottom) / (top - bottom); + projectionMatrix[2 * 4 + 2] = -(farVal + nearVal) / (farVal - nearVal); + projectionMatrix[2 * 4 + 3] = float(-1); + + projectionMatrix[3 * 4 + 0] = float(0); + projectionMatrix[3 * 4 + 1] = float(0); + projectionMatrix[3 * 4 + 2] = -(float(2) * farVal * nearVal) / (farVal - nearVal); + projectionMatrix[3 * 4 + 3] = float(0); +} + + +void b3ComputeProjectionMatrixFOV(float fov, float aspect, float nearVal, float farVal, float projectionMatrix[16]) +{ + float yScale = 1.0 / tan((3.141592538 / 180.0) * fov / 2); + float xScale = yScale / aspect; + + projectionMatrix[0 * 4 + 0] = xScale; + projectionMatrix[0 * 4 + 1] = float(0); + projectionMatrix[0 * 4 + 2] = float(0); + projectionMatrix[0 * 4 + 3] = float(0); + + projectionMatrix[1 * 4 + 0] = float(0); + projectionMatrix[1 * 4 + 1] = yScale; + projectionMatrix[1 * 4 + 2] = float(0); + projectionMatrix[1 * 4 + 3] = float(0); + + projectionMatrix[2 * 4 + 0] = 0; + projectionMatrix[2 * 4 + 1] = 0; + projectionMatrix[2 * 4 + 2] = (nearVal + farVal) / (nearVal - farVal); + projectionMatrix[2 * 4 + 3] = float(-1); + + projectionMatrix[3 * 4 + 0] = float(0); + projectionMatrix[3 * 4 + 1] = float(0); + projectionMatrix[3 * 4 + 2] = (float(2) * farVal * nearVal) / (nearVal - farVal); + projectionMatrix[3 * 4 + 3] = float(0); +} + + void b3RequestCameraImageSetViewMatrix2(b3SharedMemoryCommandHandle commandHandle, const float cameraTargetPosition[3], float distance, float yaw, float pitch, float roll, int upAxis) { struct SharedMemoryCommand* command = (struct SharedMemoryCommand*) commandHandle; b3Assert(command); b3Assert(command->m_type == CMD_REQUEST_CAMERA_IMAGE_DATA); - b3Vector3 camUpVector; - b3Vector3 camForward; - b3Vector3 camPos; - b3Vector3 camTargetPos = b3MakeVector3(cameraTargetPosition[0],cameraTargetPosition[1],cameraTargetPosition[2]); - b3Vector3 eyePos = b3MakeVector3(0,0,0); - - int forwardAxis(-1); - - { - - switch (upAxis) - { - - case 1: - { - - - forwardAxis = 0; - eyePos[forwardAxis] = -distance; - camForward = b3MakeVector3(eyePos[0],eyePos[1],eyePos[2]); - if (camForward.length2() < B3_EPSILON) - { - camForward.setValue(1.f,0.f,0.f); - } else - { - camForward.normalize(); - } - b3Scalar rollRad = roll * b3Scalar(0.01745329251994329547); - b3Quaternion rollRot(camForward,rollRad); - - camUpVector = b3QuatRotate(rollRot,b3MakeVector3(0,1,0)); - //gLightPos = b3MakeVector3(-50.f,100,30); - break; - } - case 2: - { - - - forwardAxis = 1; - eyePos[forwardAxis] = -distance; - camForward = b3MakeVector3(eyePos[0],eyePos[1],eyePos[2]); - if (camForward.length2() < B3_EPSILON) - { - camForward.setValue(1.f,0.f,0.f); - } else - { - camForward.normalize(); - } - - b3Scalar rollRad = roll * b3Scalar(0.01745329251994329547); - b3Quaternion rollRot(camForward,rollRad); - - camUpVector = b3QuatRotate(rollRot,b3MakeVector3(0,0,1)); - //gLightPos = b3MakeVector3(-50.f,30,100); - break; - } - default: - { - //b3Assert(0); - return; - } - }; - } - - - b3Scalar yawRad = yaw * b3Scalar(0.01745329251994329547);// rads per deg - b3Scalar pitchRad = pitch * b3Scalar(0.01745329251994329547);// rads per deg - - b3Quaternion pitchRot(camUpVector,pitchRad); - - b3Vector3 right = camUpVector.cross(camForward); - b3Quaternion yawRot(right,-yawRad); - - - - eyePos = b3Matrix3x3(pitchRot) * b3Matrix3x3(yawRot) * eyePos; - camPos = eyePos; - camPos += camTargetPos; - - float camPosf[4] = {camPos[0],camPos[1],camPos[2],0}; - float camPosTargetf[4] = {camTargetPos[0],camTargetPos[1],camTargetPos[2],0}; - float camUpf[4] = {camUpVector[0],camUpVector[1],camUpVector[2],0}; - - b3RequestCameraImageSetViewMatrix(commandHandle,camPosf,camPosTargetf,camUpf); + + b3ComputeViewMatrixFromYawPitchRoll(cameraTargetPosition, distance, yaw, pitch, roll, upAxis, command->m_requestPixelDataArguments.m_viewMatrix); + command->m_updateFlags |= REQUEST_PIXEL_ARGS_HAS_CAMERA_MATRICES; } + + + + void b3RequestCameraImageSetViewMatrix(b3SharedMemoryCommandHandle commandHandle, const float cameraPosition[3], const float cameraTargetPosition[3], const float cameraUp[3]) { - b3Vector3 eye = b3MakeVector3(cameraPosition[0], cameraPosition[1], cameraPosition[2]); - b3Vector3 center = b3MakeVector3(cameraTargetPosition[0], cameraTargetPosition[1], cameraTargetPosition[2]); - b3Vector3 up = b3MakeVector3(cameraUp[0], cameraUp[1], cameraUp[2]); - b3Vector3 f = (center - eye).normalized(); - b3Vector3 u = up.normalized(); - b3Vector3 s = (f.cross(u)).normalized(); - u = s.cross(f); - - float viewMatrix[16]; - - viewMatrix[0*4+0] = s.x; - viewMatrix[1*4+0] = s.y; - viewMatrix[2*4+0] = s.z; - - viewMatrix[0*4+1] = u.x; - viewMatrix[1*4+1] = u.y; - viewMatrix[2*4+1] = u.z; - - viewMatrix[0*4+2] =-f.x; - viewMatrix[1*4+2] =-f.y; - viewMatrix[2*4+2] =-f.z; - - viewMatrix[0*4+3] = 0.f; - viewMatrix[1*4+3] = 0.f; - viewMatrix[2*4+3] = 0.f; - - viewMatrix[3*4+0] = -s.dot(eye); - viewMatrix[3*4+1] = -u.dot(eye); - viewMatrix[3*4+2] = f.dot(eye); - viewMatrix[3*4+3] = 1.f; + float viewMatrix[16]; + b3ComputeViewMatrixFromPositions(cameraPosition, cameraTargetPosition, cameraUp, viewMatrix); struct SharedMemoryCommand* command = (struct SharedMemoryCommand*) commandHandle; b3Assert(command); b3Assert(command->m_type == CMD_REQUEST_CAMERA_IMAGE_DATA); - for (int i=0;i<16;i++) - { - command->m_requestPixelDataArguments.m_viewMatrix[i] = viewMatrix[i]; - } + + b3ComputeViewMatrixFromPositions(cameraPosition, cameraTargetPosition, cameraUp, command->m_requestPixelDataArguments.m_viewMatrix); + command->m_updateFlags |= REQUEST_PIXEL_ARGS_HAS_CAMERA_MATRICES; } void b3RequestCameraImageSetProjectionMatrix(b3SharedMemoryCommandHandle commandHandle, float left, float right, float bottom, float top, float nearVal, float farVal) { - float frustum[16]; - - frustum[0*4+0] = (float(2) * nearVal) / (right - left); - frustum[0*4+1] = float(0); - frustum[0*4+2] = float(0); - frustum[0*4+3] = float(0); - - frustum[1*4+0] = float(0); - frustum[1*4+1] = (float(2) * nearVal) / (top - bottom); - frustum[1*4+2] = float(0); - frustum[1*4+3] = float(0); - - frustum[2*4+0] = (right + left) / (right - left); - frustum[2*4+1] = (top + bottom) / (top - bottom); - frustum[2*4+2] = -(farVal + nearVal) / (farVal - nearVal); - frustum[2*4+3] = float(-1); - - frustum[3*4+0] = float(0); - frustum[3*4+1] = float(0); - frustum[3*4+2] = -(float(2) * farVal * nearVal) / (farVal - nearVal); - frustum[3*4+3] = float(0); struct SharedMemoryCommand* command = (struct SharedMemoryCommand*) commandHandle; b3Assert(command); b3Assert(command->m_type == CMD_REQUEST_CAMERA_IMAGE_DATA); - for (int i=0;i<16;i++) - { - command->m_requestPixelDataArguments.m_projectionMatrix[i] = frustum[i]; - } + + b3ComputeProjectionMatrix(left, right, bottom, top, nearVal, farVal, command->m_requestPixelDataArguments.m_projectionMatrix); + command->m_updateFlags |= REQUEST_PIXEL_ARGS_HAS_CAMERA_MATRICES; } void b3RequestCameraImageSetFOVProjectionMatrix(b3SharedMemoryCommandHandle commandHandle, float fov, float aspect, float nearVal, float farVal) { - float yScale = 1.0 / tan((3.141592538 / 180.0) * fov / 2); - float xScale = yScale / aspect; - - float frustum[16]; - - frustum[0*4+0] = xScale; - frustum[0*4+1] = float(0); - frustum[0*4+2] = float(0); - frustum[0*4+3] = float(0); - - frustum[1*4+0] = float(0); - frustum[1*4+1] = yScale; - frustum[1*4+2] = float(0); - frustum[1*4+3] = float(0); - - frustum[2*4+0] = 0; - frustum[2*4+1] = 0; - frustum[2*4+2] = (nearVal + farVal) / (nearVal - farVal); - frustum[2*4+3] = float(-1); - - frustum[3*4+0] = float(0); - frustum[3*4+1] = float(0); - frustum[3*4+2] = (float(2) * farVal * nearVal) / (nearVal - farVal); - frustum[3*4+3] = float(0); + struct SharedMemoryCommand* command = (struct SharedMemoryCommand*) commandHandle; b3Assert(command); b3Assert(command->m_type == CMD_REQUEST_CAMERA_IMAGE_DATA); - for (int i=0;i<16;i++) - { - command->m_requestPixelDataArguments.m_projectionMatrix[i] = frustum[i]; - } + + b3ComputeProjectionMatrixFOV(fov, aspect, nearVal, farVal, command->m_requestPixelDataArguments.m_projectionMatrix); + command->m_updateFlags |= REQUEST_PIXEL_ARGS_HAS_CAMERA_MATRICES; } diff --git a/examples/SharedMemory/PhysicsClientC_API.h b/examples/SharedMemory/PhysicsClientC_API.h index 18eba1c0a..529e50e94 100644 --- a/examples/SharedMemory/PhysicsClientC_API.h +++ b/examples/SharedMemory/PhysicsClientC_API.h @@ -95,14 +95,30 @@ int b3GetDebugItemUniqueId(b3SharedMemoryStatusHandle statusHandle); ///request an image from a simulated camera, using a software renderer. b3SharedMemoryCommandHandle b3InitRequestCameraImage(b3PhysicsClientHandle physClient); void b3RequestCameraImageSetCameraMatrices(b3SharedMemoryCommandHandle command, float viewMatrix[16], float projectionMatrix[16]); -void b3RequestCameraImageSetViewMatrix(b3SharedMemoryCommandHandle command, const float cameraPosition[3], const float cameraTargetPosition[3], const float cameraUp[3]); -void b3RequestCameraImageSetViewMatrix2(b3SharedMemoryCommandHandle commandHandle, const float cameraTargetPosition[3], float distance, float yaw, float pitch, float roll, int upAxis); -void b3RequestCameraImageSetProjectionMatrix(b3SharedMemoryCommandHandle command, float left, float right, float bottom, float top, float nearVal, float farVal); -void b3RequestCameraImageSetFOVProjectionMatrix(b3SharedMemoryCommandHandle command, float fov, float aspect, float nearVal, float farVal); void b3RequestCameraImageSetPixelResolution(b3SharedMemoryCommandHandle command, int width, int height ); +void b3RequestCameraImageSetLightDirection(b3SharedMemoryCommandHandle commandHandle, const float lightDirection[3]); void b3RequestCameraImageSelectRenderer(b3SharedMemoryCommandHandle commandHandle, int renderer); void b3GetCameraImageData(b3PhysicsClientHandle physClient, struct b3CameraImageData* imageData); +///compute a view matrix, helper function for b3RequestCameraImageSetCameraMatrices +void b3ComputeViewMatrixFromPositions(const float cameraPosition[3], const float cameraTargetPosition[3], const float cameraUp[3], float viewMatrix[16]); +void b3ComputeViewMatrixFromYawPitchRoll(const float cameraTargetPosition[3], float distance, float yaw, float pitch, float roll, int upAxis, float viewMatrix[16]); + +///compute a projection matrix, helper function for b3RequestCameraImageSetCameraMatrices +void b3ComputeProjectionMatrix(float left, float right, float bottom, float top, float nearVal, float farVal, float projectionMatrix[16]); +void b3ComputeProjectionMatrixFOV(float fov, float aspect, float nearVal, float farVal, float projectionMatrix[16]); + + +/* obsolete, please use b3ComputeViewProjectionMatrices */ +void b3RequestCameraImageSetViewMatrix(b3SharedMemoryCommandHandle command, const float cameraPosition[3], const float cameraTargetPosition[3], const float cameraUp[3]); +/* obsolete, please use b3ComputeViewProjectionMatrices */ +void b3RequestCameraImageSetViewMatrix2(b3SharedMemoryCommandHandle commandHandle, const float cameraTargetPosition[3], float distance, float yaw, float pitch, float roll, int upAxis); +/* obsolete, please use b3ComputeViewProjectionMatrices */ +void b3RequestCameraImageSetProjectionMatrix(b3SharedMemoryCommandHandle command, float left, float right, float bottom, float top, float nearVal, float farVal); +/* obsolete, please use b3ComputeViewProjectionMatrices */ +void b3RequestCameraImageSetFOVProjectionMatrix(b3SharedMemoryCommandHandle command, float fov, float aspect, float nearVal, float farVal); + + ///request an contact point information b3SharedMemoryCommandHandle b3InitRequestContactPointInformation(b3PhysicsClientHandle physClient); void b3SetContactFilterBodyA(b3SharedMemoryCommandHandle commandHandle, int bodyUniqueIdA); diff --git a/examples/SharedMemory/PhysicsServerCommandProcessor.cpp b/examples/SharedMemory/PhysicsServerCommandProcessor.cpp index 9791ab288..8bb278673 100644 --- a/examples/SharedMemory/PhysicsServerCommandProcessor.cpp +++ b/examples/SharedMemory/PhysicsServerCommandProcessor.cpp @@ -1416,6 +1416,10 @@ bool PhysicsServerCommandProcessor::processCommand(const struct SharedMemoryComm { // printf("-------------------------------\nRendering\n"); + if ((clientCmd.m_updateFlags & REQUEST_PIXEL_ARGS_SET_LIGHT_DIRECTION) != 0) + { + m_data->m_visualConverter.setLightDirection(clientCmd.m_requestPixelDataArguments.m_lightDirection[0], clientCmd.m_requestPixelDataArguments.m_lightDirection[1], clientCmd.m_requestPixelDataArguments.m_lightDirection[2]); + } if ((clientCmd.m_updateFlags & REQUEST_PIXEL_ARGS_HAS_CAMERA_MATRICES)!=0) { diff --git a/examples/SharedMemory/SharedMemoryCommands.h b/examples/SharedMemory/SharedMemoryCommands.h index 715bdd11d..40b62f518 100644 --- a/examples/SharedMemory/SharedMemoryCommands.h +++ b/examples/SharedMemory/SharedMemoryCommands.h @@ -138,12 +138,14 @@ struct RequestPixelDataArgs int m_startPixelIndex; int m_pixelWidth; int m_pixelHeight; + float m_lightDirection[3]; }; enum EnumRequestPixelDataUpdateFlags { REQUEST_PIXEL_ARGS_HAS_CAMERA_MATRICES=1, REQUEST_PIXEL_ARGS_SET_PIXEL_WIDTH_HEIGHT=4, + REQUEST_PIXEL_ARGS_SET_LIGHT_DIRECTION=8, //don't exceed (1<<15), because this enum is shared with EnumRenderer in SharedMemoryPublic.h }; diff --git a/examples/SharedMemory/TinyRendererVisualShapeConverter.cpp b/examples/SharedMemory/TinyRendererVisualShapeConverter.cpp index 643c7c1c0..676ca65d3 100644 --- a/examples/SharedMemory/TinyRendererVisualShapeConverter.cpp +++ b/examples/SharedMemory/TinyRendererVisualShapeConverter.cpp @@ -73,14 +73,17 @@ struct TinyRendererVisualShapeConverterInternalData b3AlignedObjectArray m_textures; b3AlignedObjectArray m_depthBuffer; b3AlignedObjectArray m_segmentationMaskBuffer; - + btVector3 m_lightDirection; + bool m_hasLightDirection; SimpleCamera m_camera; + TinyRendererVisualShapeConverterInternalData() :m_upAxis(2), m_swWidth(START_WIDTH), m_swHeight(START_HEIGHT), - m_rgbColorBuffer(START_WIDTH,START_HEIGHT,TGAImage::RGB) + m_rgbColorBuffer(START_WIDTH,START_HEIGHT,TGAImage::RGB), + m_hasLightDirection(false) { m_depthBuffer.resize(m_swWidth*m_swHeight); m_segmentationMaskBuffer.resize(m_swWidth*m_swHeight,-1); @@ -108,7 +111,11 @@ TinyRendererVisualShapeConverter::~TinyRendererVisualShapeConverter() delete m_data; } - +void TinyRendererVisualShapeConverter::setLightDirection(float x, float y, float z) +{ + m_data->m_lightDirection.setValue(x, y, z); + m_data->m_hasLightDirection = true; +} void convertURDFToVisualShape(const UrdfVisual* visual, const char* urdfPathPrefix, const btTransform& visualTransform, btAlignedObjectArray& verticesOut, btAlignedObjectArray& indicesOut, btAlignedObjectArray& texturesOut, b3VisualShapeData& visualShapeOut) @@ -677,16 +684,23 @@ void TinyRendererVisualShapeConverter::render(const float viewMat[16], const flo btVector3 lightDirWorld(-5,200,-40); - switch (m_data->m_upAxis) - { - case 1: - lightDirWorld = btVector3(-50.f,100,30); - break; - case 2: - lightDirWorld = btVector3(-50.f,30,100); - break; - default:{} - }; + if (m_data->m_hasLightDirection) + { + lightDirWorld = m_data->m_lightDirection; + } + else + { + switch (m_data->m_upAxis) + { + case 1: + lightDirWorld = btVector3(-50.f, 100, 30); + break; + case 2: + lightDirWorld = btVector3(-50.f, 30, 100); + break; + default: {} + }; + } lightDirWorld.normalize(); diff --git a/examples/SharedMemory/TinyRendererVisualShapeConverter.h b/examples/SharedMemory/TinyRendererVisualShapeConverter.h index 48c5c1079..1a4a02f76 100644 --- a/examples/SharedMemory/TinyRendererVisualShapeConverter.h +++ b/examples/SharedMemory/TinyRendererVisualShapeConverter.h @@ -32,7 +32,8 @@ struct TinyRendererVisualShapeConverter : public LinkVisualShapesConverter void getWidthAndHeight(int& width, int& height); void setWidthAndHeight(int width, int height); - + void setLightDirection(float x, float y, float z); + void copyCameraImageData(unsigned char* pixelsRGBA, int rgbaBufferSizeInPixels, float* depthBuffer, int depthBufferSizeInPixels,int* segmentationMaskBuffer, int segmentationMaskSizeInPixels, int startPixelIndex, int* widthPtr, int* heightPtr, int* numPixelsCopied); void render(); diff --git a/examples/pybullet/pybullet.c b/examples/pybullet/pybullet.c index 47aaee9d9..17e782b8b 100644 --- a/examples/pybullet/pybullet.c +++ b/examples/pybullet/pybullet.c @@ -1377,15 +1377,18 @@ static int pybullet_internalSetMatrix(PyObject* objMat, float matrix[16]) { PyObject* seq; seq = PySequence_Fast(objMat, "expected a sequence"); - len = PySequence_Size(objMat); - if (len == 16) { - for (i = 0; i < len; i++) { - matrix[i] = pybullet_internalGetFloatFromSequence(seq, i); - } - Py_DECREF(seq); - return 1; + if (seq) + { + len = PySequence_Size(objMat); + if (len == 16) { + for (i = 0; i < len; i++) { + matrix[i] = pybullet_internalGetFloatFromSequence(seq, i); + } + Py_DECREF(seq); + return 1; + } + Py_DECREF(seq); } - Py_DECREF(seq); return 0; } @@ -1397,20 +1400,24 @@ static int pybullet_internalSetMatrix(PyObject* objMat, float matrix[16]) { // vector - float[3] which will be set by values from objMat static int pybullet_internalSetVector(PyObject* objVec, float vector[3]) { int i, len; - PyObject* seq; + PyObject* seq=0; if (objVec==NULL) return 0; seq = PySequence_Fast(objVec, "expected a sequence"); - len = PySequence_Size(objVec); - if (len == 3) { - for (i = 0; i < len; i++) { - vector[i] = pybullet_internalGetFloatFromSequence(seq, i); - } - Py_DECREF(seq); - return 1; + if (seq) + { + + len = PySequence_Size(objVec); + if (len == 3) { + for (i = 0; i < len; i++) { + vector[i] = pybullet_internalGetFloatFromSequence(seq, i); + } + Py_DECREF(seq); + return 1; + } + Py_DECREF(seq); } - Py_DECREF(seq); return 0; } @@ -1422,15 +1429,18 @@ static int pybullet_internalSetVectord(PyObject* obVec, double vector[3]) { return 0; seq = PySequence_Fast(obVec, "expected a sequence"); - len = PySequence_Size(obVec); - if (len == 3) { - for (i = 0; i < len; i++) { - vector[i] = pybullet_internalGetFloatFromSequence(seq, i); - } - Py_DECREF(seq); - return 1; - } - Py_DECREF(seq); + if (seq) + { + len = PySequence_Size(obVec); + if (len == 3) { + for (i = 0; i < len; i++) { + vector[i] = pybullet_internalGetFloatFromSequence(seq, i); + } + Py_DECREF(seq); + return 1; + } + Py_DECREF(seq); + } return 0; } @@ -2196,6 +2206,302 @@ static PyObject* pybullet_getContactPointData(PyObject* self, PyObject* args, Py return Py_None; } + + +/// Render an image from the current timestep of the simulation, width, height are required, other args are optional +// getCameraImage(w, h, view[16], projection[16], lightpos[3]) +static PyObject* pybullet_getCameraImage(PyObject* self, PyObject* args, PyObject *keywds) +{ + /// request an image from a simulated camera, using a software renderer. + struct b3CameraImageData imageData; + PyObject* objViewMat = 0, *objProjMat = 0, *lightDirObj = 0; + int width, height; + int size = PySequence_Size(args); + float viewMatrix[16]; + float projectionMatrix[16]; + float lightDir[3]; + // inialize cmd + b3SharedMemoryCommandHandle command; + + if (0 == sm) + { + PyErr_SetString(SpamError, "Not connected to physics server."); + return NULL; + } + + command = b3InitRequestCameraImage(sm); + + // set camera resolution, optionally view, projection matrix, light direction + static char *kwlist[] = { "width", "height", "viewMatrix", "projectionMatrix", "lightDirection",NULL }; + + if (!PyArg_ParseTupleAndKeywords(args, keywds, "ii|OOO", kwlist, &width, &height, &objViewMat, &objProjMat, &lightDirObj)) + { + return NULL; + } + b3RequestCameraImageSetPixelResolution(command, width, height); + + // set camera matrices only if set matrix function succeeds + if (pybullet_internalSetMatrix(objViewMat, viewMatrix) && (pybullet_internalSetMatrix(objProjMat, projectionMatrix))) + { + b3RequestCameraImageSetCameraMatrices(command, viewMatrix, projectionMatrix); + } + //set light pos only if function succeeds + if (pybullet_internalSetVector(lightDirObj, lightDir)) + { + b3RequestCameraImageSetLightDirection(command, lightDir); + } + + + if (b3CanSubmitCommand(sm)) + { + b3SharedMemoryStatusHandle statusHandle; + int statusType; + + // b3RequestCameraImageSelectRenderer(command,ER_BULLET_HARDWARE_OPENGL); + + statusHandle = b3SubmitClientCommandAndWaitStatus(sm, command); + statusType = b3GetStatusType(statusHandle); + if (statusType == CMD_CAMERA_IMAGE_COMPLETED) { + PyObject* item2; + PyObject* pyResultList; // store 4 elements in this result: width, + // height, rgbData, depth + +#ifdef PYBULLET_USE_NUMPY + PyObject* pyRGB; + PyObject* pyDep; + PyObject* pySeg; + + int i, j, p; + + b3GetCameraImageData(sm, &imageData); + // TODO(hellojas): error handling if image size is 0 + pyResultList = PyTuple_New(5); + PyTuple_SetItem(pyResultList, 0, PyInt_FromLong(imageData.m_pixelWidth)); + PyTuple_SetItem(pyResultList, 1, PyInt_FromLong(imageData.m_pixelHeight)); + + int bytesPerPixel = 4; // Red, Green, Blue, and Alpha each 8 bit values + + npy_intp rgb_dims[3] = { imageData.m_pixelHeight, imageData.m_pixelWidth, + bytesPerPixel }; + npy_intp dep_dims[2] = { imageData.m_pixelHeight, imageData.m_pixelWidth }; + npy_intp seg_dims[2] = { imageData.m_pixelHeight, imageData.m_pixelWidth }; + + pyRGB = PyArray_SimpleNew(3, rgb_dims, NPY_UINT8); + pyDep = PyArray_SimpleNew(2, dep_dims, NPY_FLOAT32); + pySeg = PyArray_SimpleNew(2, seg_dims, NPY_INT32); + + memcpy(PyArray_DATA(pyRGB), imageData.m_rgbColorData, + imageData.m_pixelHeight * imageData.m_pixelWidth * bytesPerPixel); + memcpy(PyArray_DATA(pyDep), imageData.m_depthValues, + imageData.m_pixelHeight * imageData.m_pixelWidth); + memcpy(PyArray_DATA(pySeg), imageData.m_segmentationMaskValues, + imageData.m_pixelHeight * imageData.m_pixelWidth); + + PyTuple_SetItem(pyResultList, 2, pyRGB); + PyTuple_SetItem(pyResultList, 3, pyDep); + PyTuple_SetItem(pyResultList, 4, pySeg); +#else//PYBULLET_USE_NUMPY + PyObject* pylistRGB; + PyObject* pylistDep; + PyObject* pylistSeg; + + int i, j, p; + + b3GetCameraImageData(sm, &imageData); + // TODO(hellojas): error handling if image size is 0 + pyResultList = PyTuple_New(5); + PyTuple_SetItem(pyResultList, 0, PyInt_FromLong(imageData.m_pixelWidth)); + PyTuple_SetItem(pyResultList, 1, PyInt_FromLong(imageData.m_pixelHeight)); + + { + PyObject* item; + int bytesPerPixel = 4; // Red, Green, Blue, and Alpha each 8 bit values + int num = + bytesPerPixel * imageData.m_pixelWidth * imageData.m_pixelHeight; + pylistRGB = PyTuple_New(num); + pylistDep = + PyTuple_New(imageData.m_pixelWidth * imageData.m_pixelHeight); + pylistSeg = + PyTuple_New(imageData.m_pixelWidth * imageData.m_pixelHeight); + for (i = 0; i < imageData.m_pixelWidth; i++) { + for (j = 0; j < imageData.m_pixelHeight; j++) { + // TODO(hellojas): validate depth values make sense + int depIndex = i + j * imageData.m_pixelWidth; + { + item = PyFloat_FromDouble(imageData.m_depthValues[depIndex]); + PyTuple_SetItem(pylistDep, depIndex, item); + } + { + item2 = + PyLong_FromLong(imageData.m_segmentationMaskValues[depIndex]); + PyTuple_SetItem(pylistSeg, depIndex, item2); + } + + for (p = 0; p < bytesPerPixel; p++) { + int pixelIndex = + bytesPerPixel * (i + j * imageData.m_pixelWidth) + p; + item = PyInt_FromLong(imageData.m_rgbColorData[pixelIndex]); + PyTuple_SetItem(pylistRGB, pixelIndex, item); + } + } + } + } + + PyTuple_SetItem(pyResultList, 2, pylistRGB); + PyTuple_SetItem(pyResultList, 3, pylistDep); + PyTuple_SetItem(pyResultList, 4, pylistSeg); + return pyResultList; +#endif//PYBULLET_USE_NUMPY + + return pyResultList; + } + } + + Py_INCREF(Py_None); + return Py_None; + +} + + + +static PyObject* pybullet_computeViewMatrix(PyObject* self, PyObject* args, PyObject *keywds) +{ + PyObject* camEyeObj = 0; + PyObject* camTargetPositionObj = 0; + PyObject* camUpVectorObj = 0; + float camEye[3]; + float camTargetPosition[3]; + float camUpVector[3]; + + // set camera resolution, optionally view, projection matrix, light position + static char *kwlist[] = { "cameraEyePosition", "cameraTargetPosition", "cameraUpVector",NULL }; + + if (!PyArg_ParseTupleAndKeywords(args, keywds, "OOO", kwlist, &camEyeObj, &camTargetPositionObj, &camUpVectorObj)) + { + return NULL; + } + + if (pybullet_internalSetVector(camEyeObj, camEye) && + pybullet_internalSetVector(camTargetPositionObj, camTargetPosition) && + pybullet_internalSetVector(camUpVectorObj, camUpVector)) + { + float viewMatrix[16]; + PyObject* pyResultList=0; + int i; + b3ComputeViewMatrixFromPositions(camEye, camTargetPosition, camUpVector, viewMatrix); + + pyResultList = PyTuple_New(16); + for (i = 0; i < 16; i++) + { + PyObject* item = PyFloat_FromDouble(viewMatrix[i]); + PyTuple_SetItem(pyResultList, i, item); + } + return pyResultList; + } + + PyErr_SetString(SpamError, "Error in computeViewMatrix."); + return NULL; +} + +///compute a view matrix, helper function for b3RequestCameraImageSetCameraMatrices +static PyObject* pybullet_computeViewMatrixFromYawPitchRoll(PyObject* self, PyObject* args, PyObject *keywds) +{ + PyObject* cameraTargetPositionObj = 0; + float cameraTargetPosition[3]; + float distance, yaw, pitch, roll; + int upAxisIndex; + float viewMatrix[16]; + PyObject* pyResultList = 0; + int i; + + // set camera resolution, optionally view, projection matrix, light position + static char *kwlist[] = { "cameraTargetPosition", "distance", "yaw", "pitch", "roll", "upAxisIndex" ,NULL }; + + if (!PyArg_ParseTupleAndKeywords(args, keywds, "Offffi", kwlist, &cameraTargetPositionObj, &distance,&yaw,&pitch,&roll, &upAxisIndex)) + { + return NULL; + } + + if (!pybullet_internalSetVector(cameraTargetPositionObj, cameraTargetPosition)) + { + PyErr_SetString(SpamError, "Cannot convert cameraTargetPosition."); + return NULL; + } + + b3ComputeViewMatrixFromYawPitchRoll(cameraTargetPosition, distance, yaw, pitch, roll, upAxisIndex, viewMatrix); + + pyResultList = PyTuple_New(16); + for (i = 0; i < 16; i++) + { + PyObject* item = PyFloat_FromDouble(viewMatrix[i]); + PyTuple_SetItem(pyResultList, i, item); + } + return pyResultList; + + +} + +///compute a projection matrix, helper function for b3RequestCameraImageSetCameraMatrices +static PyObject* pybullet_computeProjectionMatrix(PyObject* self, PyObject* args, PyObject *keywds) +{ + PyObject* pyResultList = 0; + float left; + float right; + float bottom; + float top; + float nearVal; + float farVal; + float projectionMatrix[16]; + int i; + + // set camera resolution, optionally view, projection matrix, light position + static char *kwlist[] = { "left", "right", "bottom", "top", "nearVal", "farVal" ,NULL }; + + if (!PyArg_ParseTupleAndKeywords(args, keywds, "ffffff", kwlist, &left, &right, &bottom, &top, &nearVal, &farVal)) + { + return NULL; + } + + + b3ComputeProjectionMatrix(left, right, bottom, top, nearVal, farVal, projectionMatrix); + + pyResultList = PyTuple_New(16); + for (i = 0; i < 16; i++) + { + PyObject* item = PyFloat_FromDouble(projectionMatrix[i]); + PyTuple_SetItem(pyResultList, i, item); + } + return pyResultList; + +} + +static PyObject* pybullet_computeProjectionMatrixFOV(PyObject* self, PyObject* args, PyObject *keywds) +{ + float fov, aspect, nearVal, farVal; + PyObject* pyResultList = 0; + float projectionMatrix[16]; + int i; + + static char *kwlist[] = { "fov","aspect","nearVal","farVal",NULL }; + + if (!PyArg_ParseTupleAndKeywords(args, keywds, "ffff", kwlist, &fov, &aspect, &nearVal, &farVal)) + { + return NULL; + } + + b3ComputeProjectionMatrixFOV(fov, aspect, nearVal, farVal, projectionMatrix); + + pyResultList = PyTuple_New(16); + for (i = 0; i < 16; i++) + { + PyObject* item = PyFloat_FromDouble(projectionMatrix[i]); + PyTuple_SetItem(pyResultList, i, item); + } + return pyResultList; + +} + + // Render an image from the current timestep of the simulation // // Examples: @@ -2219,7 +2525,7 @@ static PyObject* pybullet_getContactPointData(PyObject* self, PyObject* args, Py // TODO(hellojas): fix image is cut off at head // TODO(hellojas): should we add check to give minimum image resolution // to see object based on camera position? -static PyObject* pybullet_renderImage(PyObject* self, PyObject* args) { +static PyObject* pybullet_renderImageObsolete(PyObject* self, PyObject* args) { /// request an image from a simulated camera, using a software renderer. struct b3CameraImageData imageData; PyObject* objViewMat, *objProjMat; @@ -2277,7 +2583,8 @@ static PyObject* pybullet_renderImage(PyObject* self, PyObject* args) { b3RequestCameraImageSetPixelResolution(command, width, height); if (pybullet_internalSetVector(objCameraPos, cameraPos) && pybullet_internalSetVector(objTargetPos, targetPos) && - pybullet_internalSetVector(objCameraUp, cameraUp)) { + pybullet_internalSetVector(objCameraUp, cameraUp)) + { b3RequestCameraImageSetViewMatrix(command, cameraPos, targetPos, cameraUp); } else { @@ -3020,15 +3327,35 @@ static PyMethodDef SpamMethods[] = { "[x,y,z] in Cartesian coordinates, flag to select TORQUE_IN_LINK_FRAME or " "TORQUE_IN_WORLD_FRAME coordinates"}, - {"renderImage", pybullet_renderImage, METH_VARARGS, - "Render an image (given the pixel resolution width, height, camera view " - "matrix, projection matrix, near, and far values), and return the " - "8-8-8bit RGB pixel data and floating point depth values" -#ifdef PYBULLET_USE_NUMPY - " as NumPy arrays" -#endif + {"renderImage", pybullet_renderImageObsolete, METH_VARARGS, + "obsolete, please use getCameraImage and getViewProjectionMatrices instead" }, + { "getCameraImage",(PyCFunction)pybullet_getCameraImage, METH_VARARGS| METH_KEYWORDS, + "Render an image (given the pixel resolution width, height, camera viewMatrix " + ", projectionMatrix and lightDirection), and return the " + "8-8-8bit RGB pixel data and floating point depth values" +#ifdef PYBULLET_USE_NUMPY + " as NumPy arrays" +#endif + }, + + { "computeViewMatrix", (PyCFunction)pybullet_computeViewMatrix, METH_VARARGS | METH_KEYWORDS, + "Compute a camera viewmatrix from camera eye, target position and up vector " + }, + + { "computeViewMatrixFromYawPitchRoll",(PyCFunction)pybullet_computeViewMatrixFromYawPitchRoll, METH_VARARGS | METH_KEYWORDS, + "Compute a camera viewmatrix from camera eye, target position and up vector " + }, + + { "computeProjectionMatrix", (PyCFunction)pybullet_computeProjectionMatrix, METH_VARARGS | METH_KEYWORDS, + "Compute a camera projection matrix from screen left/right/bottom/top/near/far values" + }, + + { "computeProjectionMatrixFOV", (PyCFunction)pybullet_computeProjectionMatrixFOV, METH_VARARGS | METH_KEYWORDS, + "Compute a camera projection matrix from fov, aspect ratio, near, far values" + }, + {"getContactPoints", (PyCFunction)pybullet_getContactPointData, METH_VARARGS | METH_KEYWORDS, "Return existing contact points after the stepSimulation command. " "Optional arguments one or two object unique " diff --git a/examples/pybullet/test.py b/examples/pybullet/test.py index 1a184985f..762026246 100644 --- a/examples/pybullet/test.py +++ b/examples/pybullet/test.py @@ -3,7 +3,7 @@ import time #choose connection method: GUI, DIRECT, SHARED_MEMORY pybullet.connect(pybullet.GUI) - +pybullet.loadURDF("plane.urdf",0,0,-1) #load URDF, given a relative or absolute file+path obj = pybullet.loadURDF("r2d2.urdf") diff --git a/examples/pybullet/testrender.py b/examples/pybullet/testrender.py index da96bbbc5..5935c388f 100644 --- a/examples/pybullet/testrender.py +++ b/examples/pybullet/testrender.py @@ -5,7 +5,7 @@ import pybullet pybullet.connect(pybullet.GUI) pybullet.loadURDF("r2d2.urdf") -camTargetPos = [0,0,0] +camTargetPos = [0.,0.,0.] cameraUp = [0,0,1] cameraPos = [1,1,1] yaw = 40 @@ -18,29 +18,28 @@ pixelWidth = 320 pixelHeight = 240 nearPlane = 0.01 farPlane = 1000 - +lightDirection = [0,1,0] fov = 60 #img_arr = pybullet.renderImage(pixelWidth, pixelHeight) #renderImage(w, h, view[16], projection[16]) #img_arr = pybullet.renderImage(pixelWidth, pixelHeight, cameraPos, camTargetPos, cameraUp, nearPlane, farPlane) for pitch in range (0,360,10) : - img_arr = pybullet.renderImage(pixelWidth, pixelHeight, camTargetPos, camDistance, yaw, pitch, roll, upAxisIndex, nearPlane, farPlane, fov) - - w=img_arr[0] #width of the image, in pixels - h=img_arr[1] #height of the image, in pixels - rgb=img_arr[2] #color data RGB - dep=img_arr[3] #depth data - - #print 'width = %d height = %d' % (w,h) - - # reshape creates np array - np_img_arr = np.reshape(rgb, (h, w, 4)) - np_img_arr = np_img_arr*(1./255.) - - #show - plt.imshow(np_img_arr,interpolation='none') - #plt.show() - plt.pause(0.01) + viewMatrix = pybullet.computeViewMatrixFromYawPitchRoll(camTargetPos, camDistance, yaw, pitch, roll, upAxisIndex) + aspect = pixelWidth / pixelHeight; + projectionMatrix = pybullet.computeProjectionMatrixFOV(fov, aspect, nearPlane, farPlane); + img_arr = pybullet.getCameraImage(pixelWidth, pixelHeight, viewMatrix,projectionMatrix, lightDirection) + w=img_arr[0] + h=img_arr[1] + rgb=img_arr[2] + dep=img_arr[3] + #print 'width = %d height = %d' % (w,h) + # reshape creates np array + np_img_arr = np.reshape(rgb, (h, w, 4)) + np_img_arr = np_img_arr*(1./255.) + #show + plt.imshow(np_img_arr,interpolation='none') + + plt.pause(0.01) pybullet.resetSimulation() diff --git a/examples/pybullet/testrender_np.py b/examples/pybullet/testrender_np.py index 56439dde6..b8fca10eb 100644 --- a/examples/pybullet/testrender_np.py +++ b/examples/pybullet/testrender_np.py @@ -1,3 +1,6 @@ + +#make sure to compile pybullet with PYBULLET_USE_NUMPY enabled, otherwise use testrender.py (slower but compatible without numpy) + import numpy as np import matplotlib.pyplot as plt import pybullet @@ -23,28 +26,30 @@ farPlane = 1000 fov = 60 main_start = time.time() -#img_arr = pybullet.renderImage(pixelWidth, pixelHeight) -#renderImage(w, h, view[16], projection[16]) -#img_arr = pybullet.renderImage(pixelWidth, pixelHeight, cameraPos, camTargetPos, cameraUp, nearPlane, farPlane) for pitch in range (0,360,10) : - start = time.time() - img_arr = pybullet.renderImage(pixelWidth, pixelHeight, camTargetPos, camDistance, yaw, pitch, roll, upAxisIndex, nearPlane, farPlane, fov) - stop = time.time() - print "renderImage %f" % (stop - start) + start = time.time() + viewMatrix = pybullet.computeViewMatrixFromYawPitchRoll(camTargetPos, camDistance, yaw, pitch, roll, upAxisIndex) + aspect = pixelWidth / pixelHeight; + projectionMatrix = pybullet.computeProjectionMatrixFOV(fov, aspect, nearPlane, farPlane); + img_arr = pybullet.getCameraImage(pixelWidth, pixelHeight, viewMatrix,projectionMatrix, [0,1,0]) + stop = time.time() + print ("renderImage %f" % (stop - start)) - w=img_arr[0] #width of the image, in pixels - h=img_arr[1] #height of the image, in pixels - rgb=img_arr[2] #color data RGB - dep=img_arr[3] #depth data + w=img_arr[0] #width of the image, in pixels + h=img_arr[1] #height of the image, in pixels + rgb=img_arr[2] #color data RGB + dep=img_arr[3] #depth data - #print 'width = %d height = %d' % (w,h) + print 'width = %d height = %d' % (w,h) - #show - plt.imshow(rgb,interpolation='none') - #plt.show() - plt.pause(0.01) + #note that sending the data to matplotlib is really slow + #show + plt.imshow(rgb,interpolation='none') + #plt.show() + plt.pause(0.01) main_stop = time.time() -print "Total time %f" % (main_stop - main_start) + +print ("Total time %f" % (main_stop - main_start)) pybullet.resetSimulation()