From 1621fe82ab31adbc2c31e2b7e3ab746409c84efe Mon Sep 17 00:00:00 2001
From: Alcolado Nuthall <gn00217@cvplws235.eps.surrey.ac.uk>
Date: Wed, 26 Jul 2023 09:27:32 +0100
Subject: [PATCH] fix: issue with overwiting and feat: pointcloud updates

---
 generated_materials/camera_path_depth.json    |  42 +++++++++
 generated_materials/camera_path_rgb.json      |  42 +++++++++
 .../__pycache__/nerf_render.cpython-310.pyc   | Bin 4246 -> 4156 bytes
 .../pcd_publisher_node.cpython-310.pyc        | Bin 2918 -> 2871 bytes
 .../__pycache__/render_lidar.cpython-310.pyc  | Bin 0 -> 3303 bytes
 .../sim_depth_generator.cpython-310.pyc       | Bin 2850 -> 2870 bytes
 .../sim_rgb_generator.cpython-310.pyc         | Bin 2842 -> 2860 bytes
 nerf_sim/nerf_render.py                       |   7 +-
 nerf_sim/pcd_publisher_node.py                |  50 ++++-------
 nerf_sim/render_lidar.py                      |  82 ++++++++++++++++++
 nerf_sim/sim_depth_generator.py               |   5 +-
 nerf_sim/sim_rgb_generator.py                 |   5 +-
 12 files changed, 192 insertions(+), 41 deletions(-)
 create mode 100644 generated_materials/camera_path_depth.json
 create mode 100644 generated_materials/camera_path_rgb.json
 create mode 100644 nerf_sim/__pycache__/render_lidar.cpython-310.pyc
 create mode 100644 nerf_sim/render_lidar.py

diff --git a/generated_materials/camera_path_depth.json b/generated_materials/camera_path_depth.json
new file mode 100644
index 0000000..1c48c13
--- /dev/null
+++ b/generated_materials/camera_path_depth.json
@@ -0,0 +1,42 @@
+{
+    "camera_type": "perspective",
+    "render_height": 480,
+    "render_width": 640,
+    "camera_path": [
+        {
+            "fov": 70,
+            "aspect": 1.33333333333,
+            "camera_to_world": [
+                [
+                    -0.7712574369763269,
+                    0.1513242754712088,
+                    -0.6182741540464238,
+                    0.37396875264637797
+                ],
+                [
+                    -0.6360692717618674,
+                    -0.21990707861025355,
+                    0.7396328537169178,
+                    0.009873900166690274
+                ],
+                [
+                    -0.024038457293159843,
+                    0.9637125299746899,
+                    0.26585769152075983,
+                    -0.07500160249766082
+                ],
+                [
+                    0.0,
+                    0.0,
+                    0.0,
+                    1.0
+                ]
+            ]
+        }
+    ],
+    "fps": 24,
+    "seconds": 4,
+    "smoothness_value": 0.5,
+    "is_cycle": "false",
+    "crop": null
+}
\ No newline at end of file
diff --git a/generated_materials/camera_path_rgb.json b/generated_materials/camera_path_rgb.json
new file mode 100644
index 0000000..1c48c13
--- /dev/null
+++ b/generated_materials/camera_path_rgb.json
@@ -0,0 +1,42 @@
+{
+    "camera_type": "perspective",
+    "render_height": 480,
+    "render_width": 640,
+    "camera_path": [
+        {
+            "fov": 70,
+            "aspect": 1.33333333333,
+            "camera_to_world": [
+                [
+                    -0.7712574369763269,
+                    0.1513242754712088,
+                    -0.6182741540464238,
+                    0.37396875264637797
+                ],
+                [
+                    -0.6360692717618674,
+                    -0.21990707861025355,
+                    0.7396328537169178,
+                    0.009873900166690274
+                ],
+                [
+                    -0.024038457293159843,
+                    0.9637125299746899,
+                    0.26585769152075983,
+                    -0.07500160249766082
+                ],
+                [
+                    0.0,
+                    0.0,
+                    0.0,
+                    1.0
+                ]
+            ]
+        }
+    ],
+    "fps": 24,
+    "seconds": 4,
+    "smoothness_value": 0.5,
+    "is_cycle": "false",
+    "crop": null
+}
\ No newline at end of file
diff --git a/nerf_sim/__pycache__/nerf_render.cpython-310.pyc b/nerf_sim/__pycache__/nerf_render.cpython-310.pyc
index 67c8120d4b5521aa01e2737672e9544f2e1be06d..7b6193e84355ea083960aac2af291f6d80411378 100644
GIT binary patch
delta 534
zcmYjM!D<vi5UrY-p6!|0Uf0cLvr&kNU?d)5yodx3f=2~~pyH}5OR5vIF|(m(K#*>7
z*qj0(hZez$=+TpiUK9_x`3ti65q^R-34&d`$Lp?s-Syrq{FqlGrBdMa?AI~f`K-Q4
z{OW(oz12<Vwe*y3&w>_o_9jq^ruLyXy$SjZwa2&-(DZ4aRflHwMXy704VJq8AbIES
zrx#YP@UtZlj?qFN%fP~r)i9)h!o%=s<f?OKhXibl{KwS!Je!lk1Z*|;{M+n$qFEMo
zCO^q8pL^9cIwx(4tLuXuZ3g|_LHT@nSd4Oy_0C?%_x`Pfg<{-h6t9{1Z*XCWHW;G|
zc%Fdpbe1FKqB49R+2{j0@dyYau5pNv%WF`~y+nkn9y6?<Xck;d;A*9?g5MIhN)nf~
z6&qT?-$+JGl}gTot=X7W%E%aXhNAc|bJ9wGTz`_Cq@6^gU7h<ywYrd)HJ;WyWSX@9
zi`n1UdZBZU?YGs9#!eM$nJGqzP5(*V!WsWZ9W*<YSybk*D2;jGj~a`Yy1Y-JiFK|f
Xwkpd>$zWcZyWCv5XnJ3MH@@Rvg)eqz

delta 588
zcmZ8cTWb?R6h5=JY<4qAOq-->p;{l5inu7EQu-h$;)76%mttBBOU@)FnrxU^D3n<$
zrcYwL4fv+$PY|EHppX8C{sh7Q;912N&*A&{&YUx6zWI9W^$Jfo4}jw3ukZNu1OMc+
zZ{~-;e^pfQ$dt8fq9R<ZtwSH$b7Ef9TR`>PI^aCk51`%XLyuwOZ>MKr6BiELb`$-Z
zu-Y<@if?Os)owfzGVV;`^!}NlO2)5v(;N+!*Ur<kDIl9N4IM_x`lGDW)-a^Ap@TL$
zGkeF$@(DL%8AG7Ot{DZ{^}TD+ZXzG3N!%5YOw_r~Kqliz$!;_jve!|AQPdYC<5Xa0
zL@WpKLlymR$NUVIxhE}hBR6u|G}Z7z*dwXRL;*7!;Ysr%Z0zSV|H3E130DZ}84($V
z%t~|P7BaNZp4rcNW~cT$N89gN=4=6iLpik>jQz8qmUbC~Jns^;gH8)F7a{XfS36U>
zq4SS>X+c}s(Qs<$({c}8;^Go$Pv<HigoF}-qc;n)f-acbrS0(0NhV?}H_4p&Qd$dr
zd5fgn%IgNdR7yyw(?lMtnH_$aEtog_Nj;yJjhtMfLtCX%-Z2-;C)lF7S8i6S%(oVc
Q@&+|mkIU_kkISFgZ!HLl)Bpeg

diff --git a/nerf_sim/__pycache__/pcd_publisher_node.cpython-310.pyc b/nerf_sim/__pycache__/pcd_publisher_node.cpython-310.pyc
index 4cea66041ff744a63bec567f794f74aae6112038..c3bece276be9d80d65c3276c56c3ef7c5e6b36ec 100644
GIT binary patch
delta 1493
zcmY*ZOKT%X5bo-Es5KgC{g9mv364nuUa`awi@+Y7WgR;kNQ~F)<01%?v3hKeb~H0|
zk03H5bdX_p13nlXvj_V(B#>+JClcr%5ZGU^w<OgqJBeqgn(BIW*Vk3^=khPhjjUex
z2tIFr|CKNK=Z*K__4(Z|##Z9>Yn>VpPPm=;{d%WvWJnsFTd0F%t~37?;SP6C33nrV
zVs#dHg|k!AS>!&ia_<y6&1i|&o>Q?KH75k);fbME{f*vouLa89%Xoy}+T$qYk=TuQ
zwjOJ>y`RbR-@Y3I`ngCJlH5U7kO{c}4wnF=Bp0->dN!xry0l6XIH+-p3b#wrW2nP#
zP(`^4r0^!_AJjN`LR!wATFARR$Oi{WERQ1*q<9Qvq3(@FL)m#Iqn5Ac9687%p;!>a
zX<P)sRMO2yi(L_gMHI-vf$WMnFXAi}wvL;}bVZb6!fu!(2VwWQ@^5(QI+QC+vARwf
zC5O{B>*=_LhIu5Z_xE7|nwASbJz3h{-r2uC=KA$9x)i;xADz=d+D0-MVa1412|e1g
z47Uw;j;u=<(UKw#h!T3a9430j$}xg;;c3`3um&W$r>uUMw`yV$6Dvy$QpFILSeL8d
zdj&%UC(6yUI4z{AnQzcdvH=$rj1aeUSmg@imL-~cZS$}QXZClrlVP{XCn)Oj-$RlN
z*$TMOKp0L|ZU8XDcY0uN7{pABst34jC}lTFk|3C+G<lcw4!1Sa_~`RU-As)5bj~xn
z8$fECz6aB6KcG;M9)kD^bd!y+)h?WpUP8}(MG9-&Lj+Ct!5JM{rFFqB$;f&^Uce#M
z9eK)%Dz_)FLnbh?3$Irzfjeg~axkr5;^ZLQbzXUwaK>3_za+fcBr|?(X_C_7-dPoU
zwid47rLS(A)|0E@PcNM&tzr*3ycp)u^mQeJ`qXFb<ts=LY;RNJ)y?|({jKRU_9fhz
z{>Z+)CqBg5S`AUhk3i({mLBdt+IrgFP&Ca|<uJ*@qP-#3w0%R{oluByIQ^`;Hg1@;
zJPz}y<%$pV%FjdW=pr~2VLzHh=^e!9n%&dP9O+XnncG-JNqfjU$XHW1`W$7{w_WP~
z<-0beK!G9Y*M&LF-wGYoo&Hfh250)f`*{CrT$?8Q-;L|^%I=48`V6JnFi{sgV2%%B
zUt4jOwLJ)0u-95vmK;h&Gbz5A{_NcyYfNxv3m)|4kz7Y69%|JR4)tLm<No?=c!MP7
z2#T5K`J*W7M}-*v7i0bgPCDr4L$Rgf-7JsNHdj{G=BD#ZIp##-BXqc`;laP6sf2Cz
hGd@V7`}#j9SCKKq1(Aa5mr(|Y-J;iKz6bW-{{Rw+Sr`BS

delta 1458
zcmZ8g&u`;I6rS<#xK5g;zXWa3!m{jIm71z$xlooumqh{^3avO)S`FTe(>k?dGcy%U
zNfe}1S}uDi{s)o^5@!w^xpU{hT(}|i1pfiNaVTZ6^*+zMnfd(Q_ul-t@cTj~Etg#c
z&)F|O(Z%XzWf@;wuD*MKWA|{1B8p@0u>3xvIyEj3H9}*kA6BSItqXKGO<ihJ=K>#A
z!x>sS(b#ra9U=^;_6zf;t`v2{$I@t|^zzTo2L{Y7FddAa0|+!kBTVra!#S;wh1S+b
zMi1tFO_(FAr?u@d?jd3er-#~hyEKHIJv4H~RNGT=jdfTpQ?n1>QKbjio*rx9s{+@|
zs75Vn39~&*?Ikog(hIUViuRH3G-NGnQj(oMi6ed-GSW$Dc)nf#GG!+`3z}h_rF?@#
zEbY`eYt}m<YY`qLbrEtwl0bwk3SwT*(x@XyGfq$GS{8Su4y;#7)}xu?8Nbai%&yiC
z0R4Gs^3yD2(jg@3M1l~hr<+u2Q7Wq}3?QHzKB;vwX$L%DEa*zBSmDyl82k}<j)N@B
zf5Te?uL&nbNJNwXbEWu@W)R0ug64@dCvN8`Op{Qsu3SC}1!1WOM3i<^CM-H5EtV$a
zta~oCERy(B-;f3m<Cb&^kpgkdOo(546LQEPO6m7lmI!I)Uue6760A;x@fv`K4b9Ul
znulw)fvfl><9vQ^=i%P9)@#?vH<dKkj~c7q*a74B0HB&+1PSPYHgwfcRTix164cg;
zzEQ>08Uv{(LFSuG#UQ8tBVrLZn^$i8pJMx%t-zeU@-N(0qeTE8jH@Pf^om*2&X=AG
z`*~%3OI5KGBq1T+8cUXvfGy@P8?*im^DX5p=v8WEun9f#wCamY&mS8r`0M;DW91k+
z*zQ42BWH~I`cw22w={~WcHxK;)ra^Y8sfeo-1bxtQ{x%#n*z1Ho=Htmq)e?B5Zp++
zXMBUGGlwR4y)uV-IxRgbfmZGhu6bL_->_3Wn}26-E)=n<2&KCPS3xw>Bzy2l{)hb@
zUd``2kKd984MgBq*sRKBUG20Vef04=2b&vAB_!=uoCacZLz+}{p*8L+rxkED$$xh4
zem18L&H(t<q(EL4un?|Ii-CIVky_qSu&v-t1w~Y^t5G4|9T+KE{u+QYXJHRl@()Y5
z@SQv^#aPdOExo$8tG263|F5#rNP?&ns^jWndYXk@?3f;A`!H2SRO^KJxZ&5N&bwS{
zDQAs**IgZ`q@~9NB?&*`YXIy6<uW^`N!FbRV5M2u*`!RdGgW7%;w{+OT{XJtha0t>
ktt6$Vari*>JzoZJFuZVS)az&tu4>Lb$6a?k7aM>54Zt33_W%F@

diff --git a/nerf_sim/__pycache__/render_lidar.cpython-310.pyc b/nerf_sim/__pycache__/render_lidar.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..4b60e6de2f8acc2f8db6c5157b35032574502dfa
GIT binary patch
literal 3303
zcma)8OK%*<5uTp+&MqIKD3R1F)XUN);X{&b$PO(l79+>7!YGpE00SF?;dFC3t9hws
zR?!l>Cn-lA@(*^AlafD>qklk7z8L6ZauSl8fgFq&&euI$npPAbyXflb>Z+dZuBxv(
zpPKSC{Fc7@g8sFsX@8f4!#@Lqn`q^~K&VDZqWN`Ce4@s>ucOx!BQ<?9wS0@nc_Xn?
z$9GcKchiR7NIl<6n|?EG`K@%ypHezzGM&!&Gm0n4tbatUH|Ni(e)658{yfH3ax6XW
zA5Rzjg>=zhR5o^^rAz*j;+^C~deT3scsDtfp7u|tXZ$m2+(>?uF8j-h_mZ>eIsaUG
z-ansS@Gn5tq^)O~f00hnY4mS=sL>fZ`%I&=Pj&wiCHJ-Vk(W5Tw$;`}gN9WYC1F_#
zYbRz2Z9Brf6(&izoiJf;#!)4lJ8{K$m<adRMHS~6IO~&MfqCmehOuz-$BZX=NZUl1
zcf)G8Z3yc&1eGv8$|_;s-wg{UXYEf|R^}X>#~z1CP_n95U>n}u&D;0G6l-!ZjYW5I
zp6-vKJ5yV2PPU5W8~4Kg2fd796CT)Fw-oMe-YctKT1N7KlV1aj42oN5<)1((`Ai>b
zLqheTJ|F{aQ5)zdwV^=`YCh8{Z3G$Q#@N`@o|2(St%;o4)S>Ra-0EmUdth#ApI;g}
z0}K7Rp*yfgdS!IY${N`NXH3vL19wd3wL=p7K;lT9jzJsLd#;ZO(H{Nu(dEh*VV$vb
zRTEz7{PQ<|IrIBJ-}s{Q)gO!Zyx(5D`EnVjhzQ8!r-QN3?xTHu9HgTSw9lO(QKune
z@(2BqeqVc`z0if3!big4A&tYVgg{?i7bNCOFjM54jkVw9{9##y5nJPVc{PZ6o~>>7
z;)JecjPC?xoPN)6mQluoB&H!>E&9USNaC`J^0c@im;74p?YOpgvBO{+RsoGUi)19N
zZX|gWCglw<6N~?OD!w?`9zP+u(+5Mf?`1hW3!)~^$&tLDco+1g<i4?^9u41Je}OQh
z>~S2ic0=e{A?%QcS%;x73Y<h(RnDVbVOJ_v`AOIcBg#u*lyN4k5()DVrbdd9k%mu1
zGs}Z$H_S4Yl%h#lQiVaiq{kA(xjZUjGu(^QUbR~alZX9MG!Ev3vp@B{2l%r5>XHAt
zv3ZC~c{rDtq(v6>MPlkMk^edTU7~-ho8G_m?>vWJhgu}>e)5y?VWO!#*akBolq_k;
z0j-N@=@N|fC2d|?KrU2z*BF^&ZD@j%v22W-u{Lti8e?rt24>*x&*-Bjq_57l1}5Uy
z*b)ZsYzvFB0>CgOoSes<828~FgaK|gzea`IVR_0bjsUuw$60kN$$L~dy(&)PsxPKL
zMbPB@8m)UcG4Dx`GNPg<tgM$J^6WwymeRrS5_}ltNzTi*B|O}Duw5gDllQ8k2Q?Er
zEBP;Q6T&DW`s(!`-M@YosAVI$pG#0KTOcl(C!T(SoPh)Q9SD8^IT&jO+D)`_6@+QB
zy7>A7fRAr5lUYj|v#Gv;$|AwVcK|XrdRHN|BbxVAR^5p=e|~REEGW3dArV;pGANL4
z(QYq$_}Xky{{1mNTH#|)fn@=yq>sUM4XRV)x$&7flyXp@AV8ySZtd-@J<gLg&Po;{
z{?|T!XRTsowSp^5!(#32x87NQb2|!G*!3N{a^(u6E8+TE+bio=-@F<|SFWzFuU}J;
z5W-9UpLUdIJ8@^VpC+OaXFCiyf~3^>7Pc-L^5a38Q`WXP3b`hlvcv?n5ub&mR&<NF
zU`d?8QkW%6b_5At^`sjFaTZrWa1Cb4eIC(~K_=+@w|n!4?s|2i`h(P&hMkM1uJ%hX
z5>_c-b*NYRQ<*NfT9gc(>d$ptEeZfij5)LhWMmCYYK--vEp@2bnP>s}59VAoXC2Hn
zDtmyG8@Pi8-U=GEpW8zZ?sHHWJbblTH3r_G`BWdaVCl&OBwE$#PIafdGo#r-3p!JS
zsqT^P9ChZk(NWCJcaL?CBXJ=O=G1s$w5W8JRH<>d_ywGdUzFrLNN45Nsek<K?Gx`S
zBt_Moj=~fOj=Q9QPdphigbLDV2lVB4uxOv)GVb^rl1TjFKLG(|$_Ie3IV)ILaS3Al
zo|=&UO;q{_Q^K=ovb3oBL4@pO{5`2dDt<#6ye|jM9iFE_!a8Bp7nVv({-K=Es~juj
z9FLOCs*jIZ5AgK3&S#|QrqpN@c^NAxZclSrCWT!?k+9^K*ZGE&ktc%m`OhS4hMa}M
zE8-_C2_(-yg_!?Rl8+?$SZP(SK@NlWvUf3Co&oWQtxp@C-XgQA&4YTnr_Yj8fb6}Q
z!@&nTloo_*hMNuo;RP}UdWqy)LGY*-CiRSpeeTE|7=S!669gzgc~uviAm9%$#WP6`
z%6iQ$OWChLuAr4Ns6A~Km80b-Y5Q{RLRI!`lrfs8C~A|p7c03eDO`dZN@Tx?vksR<
zh0E%~ZAs*=xLlaamlyXygj3+nk;L0|D%&|K?KN6E*bRB7Km;-6CoU0IG`iuF{lz8N
z@+nDV5-Nv^lkyRQhaQddRaqa(RfUg=RnQ<VAJR}k2H;4GJYy&itFji9^>ef2N93w=
zAmZ#Hn|jsy2@o8do*$KZ3y1XTj8IFKbzmQp>&X{bodB{lsEd7_AB4M6yZ45Cp_DRh
dTvQg9%#sVFwO`m}rD@?m3&}ijb@%9;_aF2zW1#>5

literal 0
HcmV?d00001

diff --git a/nerf_sim/__pycache__/sim_depth_generator.cpython-310.pyc b/nerf_sim/__pycache__/sim_depth_generator.cpython-310.pyc
index b2cda0000a9ab877dc1bc2beee547aa3b5f2a29b..2bf63e627516f78f0525992fe760c9869edf6d43 100644
GIT binary patch
delta 231
zcmZ1^woQyTpO=@50SI_+97qY?$UB#rj}yoP2{{9CvC-rm%*TaN#B140_-oixB$}D}
zd22apI41Y9NJ>|hGcq!OU?dRNFk}fX5K56uXRKjg$h3JsiytGS&tyI}N0#`M)Pl*r
zY&#h_C$qDgva%E;mSjw}W>;dgn;ge(%*n#U!^p!Z^IvH44EBGFfs?0mEMe4{Y{NO3
zF>dk&PFc+ukUKyQ;$dWC;$q@p<zXyJ0dj9~#K-3*X6D7mYcl$2iWkLC7USB(29ha?
RntX%Hncba(kBgDzKL8k{HQWFI

delta 222
zcmdlcwn&UOpO=@50SE#=>`yV?$UB#rj|0d92{{9CvCiZj%*Xjk_*2AFB$}D}d288g
z*e6e9k(4YiXJljm!AKykVaO6(Ae17R&bW|q^HCN*Mn<>EB5aP6quI7Ia!eLxH=XRq
zuE=OJxq#i6orj5sQRctE<TdR77y~A+=2*h0Jvo4LGFvRrx+2cWAGw6}SaK6H^P)l4
zgWSQx$i~FQ#KFqLSd<Lp-r|Ul2g%3BYcl$2iWkLA*5ca329ha?ocxB%nca<pkBgDz
FKLB7pGz0(u

diff --git a/nerf_sim/__pycache__/sim_rgb_generator.cpython-310.pyc b/nerf_sim/__pycache__/sim_rgb_generator.cpython-310.pyc
index ac3c028df3e5426c0c50f9664ec583fa1895d07a..c6dc5064dc764c4c96e1a82dd5af22683aa27947 100644
GIT binary patch
delta 234
zcmbOwwnmIMpO=@50SG=`KagU(k#{;XA19Cp5^@IOVx!5Mn2+nEh}W{0@Yk@XNHjC`
z^VV|Ia4Zn0VOz+^$Pmtu!oUKA&CHApc^rle#g&s6vWT(NurFlVypzR`k<n{12b&{H
zd{KJRWKXuOjGUAIvzfB86eN~pOg3g$Vziqa#%|2X!o<VK!zlA#Xz~R1e~f{X`#F{{
z>P$A_oXi+E`2?q|W(>$FAUE+avN3Tnaj^0*7Nr2Ww>aYCa}zW3;^Q?L{WQgk;wSTQ
X?O_AS6h%$Gz~#*D&cVmU$nqZm$G<oj

delta 212
zcmZ1@HcN~*pO=@50SG3%-=8ACk#{;X9|w>J5^@IOVx7sGn2+<7@TZ8UNHjC`^VYK0
zuutx0k(4YiXJljm!AKykVaO6(Ae17R&bW|q^L`dTMn<>Ed~A-BgW0w+a!lrAH=XRl
zuE=OJIgQ<zorj5sQRctE<VEcN7y~9R<XFO}J=u+OGGpxITb#0*(I87fw(~HuF>x_*
yu<|e#B?Gy)IO5}T6EpMT<24!mG{uYJCM$65VFSq&MNWRi<;?EJ!N<kO@*e<588K)8

diff --git a/nerf_sim/nerf_render.py b/nerf_sim/nerf_render.py
index 5a4f746..44406d4 100644
--- a/nerf_sim/nerf_render.py
+++ b/nerf_sim/nerf_render.py
@@ -96,21 +96,20 @@ class RenderCameraPath():
     load_config: Path
     output_path: Path = Path("renders/output.mp4")
     output_format: Literal["images", "video"] = "images"
-    camera_path_filename: Path = Path("camera_path.json")
     rendered_output_names: List[str] = []
     
     def __init__(self):
         self.load_config = Path('/vol/research/K9/test-colmap/65904bca-e8fd-11ed-a05b-0242ac120003/nerfacto/65904bca-e8fd-11ed-a05b-0242ac120003/config.yml')
-        self.camera_path_filename = Path('/workspace/ros2_iron/src/nerf_sim/generated_materials/camera_path.json')
         _, self.pipeline, _, _ = eval_setup(
             self.load_config,
             eval_num_rays_per_chunk=self.eval_num_rays_per_chunk,
             test_mode="inference",
         )
 
-    def run(self, render_output_names: List[str] = []):
+    def run(self, render_output_names: List[str] = [], camera_path_filename=Path("renders/camera_path.json")):
         pipeline = self.pipeline
-        with open(self.camera_path_filename, "r", encoding="utf-8") as f:
+        camera_path_filename = Path(camera_path_filename)
+        with open(camera_path_filename, "r", encoding="utf-8") as f:
                 camera_path = json.load(f)
         seconds = camera_path["seconds"]
         crop_data = None # we're not cropping the image
diff --git a/nerf_sim/pcd_publisher_node.py b/nerf_sim/pcd_publisher_node.py
index 711fceb..4b102b5 100644
--- a/nerf_sim/pcd_publisher_node.py
+++ b/nerf_sim/pcd_publisher_node.py
@@ -6,6 +6,8 @@ import rclpy
 from rclpy.node import Node
 import sensor_msgs.msg as sensor_msgs
 import std_msgs.msg as std_msgs
+from nerf_sim.render_lidar import RenderLiDAR
+from geometry_msgs.msg import Pose
 
 import numpy as np
 import open3d as o3d
@@ -14,42 +16,21 @@ class PCDPublisher(Node):
 
     def __init__(self):
         super().__init__('pcd_publisher_node')
-
-        pcd_path = '/workspace/ros2_iron/src/nerf_sim/test_materials/point_cloud.ply'
-
-        # I use Open3D to read point clouds and meshes. It's a great library!
-        pcd = o3d.io.read_point_cloud(pcd_path)
-        # I then convert it into a numpy array.
-        self.points = np.asarray(pcd.points)
-        print(self.points.shape)
-        
-        # I create a publisher that publishes sensor_msgs.PointCloud2 to the 
-        # topic 'pcd'. The value '10' refers to the history_depth, which I 
-        # believe is related to the ROS1 concept of queue size. 
-        # Read more here: 
-        # http://wiki.ros.org/rospy/Overview/Publishers%20and%20Subscribers
         self.pcd_publisher = self.create_publisher(sensor_msgs.PointCloud2, 'pcd', 10)
-        timer_period = 1/30.0
-        self.timer = self.create_timer(timer_period, self.timer_callback)
-
-        # This rotation matrix is used for visualization purposes. It rotates
-        # the point cloud on each timer callback. 
-        self.R = o3d.geometry.get_rotation_matrix_from_xyz([0, 0, np.pi/48])
-
-              
-                
-    def timer_callback(self):
-        # For visualization purposes, I rotate the point cloud with self.R 
-        # to make it spin. 
-        self.points = self.points
-        # Here I use the point_cloud() function to convert the numpy array 
-        # into a sensor_msgs.PointCloud2 object. The second argument is the 
-        # name of the frame the point cloud will be represented in. The default
-        # (fixed) frame in RViz is called 'map'
+        
+        # subscribe to pose coming in from topic
+        self.subscription = self.create_subscription(Pose, 'pose', self.listener_callback, 10)
+        self.subscription
+        
+        self.render = RenderLiDAR()
+
+    def listener_callback(self, data):
+        pcd = self.render.run([data.position.x, data.position.y, data.position.z])
+        self.points = pcd
         self.pcd = point_cloud(self.points, 'map')
-        # Then I publish the PointCloud2 object 
         self.pcd_publisher.publish(self.pcd)
 
+
 def point_cloud(points, parent_frame):
     """ Creates a point cloud message.
     Args:
@@ -72,9 +53,12 @@ def point_cloud(points, parent_frame):
     # which desribes the size of each individual point.
     ros_dtype = sensor_msgs.PointField.FLOAT32
     dtype = np.float32
+
     itemsize = np.dtype(dtype).itemsize # A 32-bit float takes 4 bytes.
 
-    data = points.astype(dtype).tobytes() 
+    data = np.array(points, dtype=np.float32)
+
+    data = data.tobytes() 
 
     # The fields specify what the bytes represents. The first 4 bytes 
     # represents the x-coordinate, the next 4 the y-coordinate, etc.
diff --git a/nerf_sim/render_lidar.py b/nerf_sim/render_lidar.py
new file mode 100644
index 0000000..4b83785
--- /dev/null
+++ b/nerf_sim/render_lidar.py
@@ -0,0 +1,82 @@
+import torch
+import math
+import random
+from dataclasses import dataclass, field
+from typing import Callable, Dict, Literal, Optional, Tuple, Union, overload
+
+from pathlib import Path
+import open3d as o3d
+import argparse
+import torch
+import numpy as np
+from jaxtyping import Float, Int, Shaped
+from torch import Tensor
+from nerfstudio.utils.eval_utils import eval_setup
+from nerfstudio.model_components.ray_samplers import PDFSampler, UniformSampler
+
+from nerfstudio.utils.tensor_dataclass import TensorDataclass
+from nerfstudio.cameras.rays import RayBundle, RaySamples, Frustums
+
+def generat_direction():
+    min, max = -30.67, 10.67 # view range degrees
+    no_channels = 32
+    delta_range = (max - min) / no_channels # delta degrees
+    
+    elevations = [math.radians(i) for i in np.arange(min, max, delta_range)]
+    azimuths = [math.radians(i) for i in np.arange(0, 360, 1)]
+
+    rays = []
+    for elevation in elevations:
+        for azimuth in azimuths:
+            rays.append(torch.tensor([
+                        math.cos(elevation) * math.sin(azimuth),
+                        math.cos(elevation) * math.cos(azimuth),
+                        math.sin(elevation)
+                    ], device='cuda:0'))
+
+    return torch.stack(rays)    
+
+def generate_pointcloud(ray_bundle, outputs):
+    rgbs = outputs['rgb']
+    points = ray_bundle.origins + ray_bundle.directions * outputs['depth']
+    
+    pcd = o3d.geometry.PointCloud()
+    pcd.points = o3d.utility.Vector3dVector(points.double().cpu().numpy())
+    pcd.colors = o3d.utility.Vector3dVector(rgbs.detach().double().cpu().numpy())
+    
+    return pcd
+
+class RenderLiDAR():
+    eval_num_rays_per_chunk: Optional[int] = None
+    def __init__(self):
+        self.load_config = Path('/vol/research/K9/test-colmap/65904bca-e8fd-11ed-a05b-0242ac120003/nerfacto/65904bca-e8fd-11ed-a05b-0242ac120003/config.yml')
+        _, self.pipeline, _, _ = eval_setup(
+            self.load_config,
+            eval_num_rays_per_chunk=self.eval_num_rays_per_chunk,
+            test_mode="inference",
+        )
+    def run(self, origin):
+        cuda0 = torch.device('cuda:0')
+    
+        directions = generat_direction()
+        
+        #TODO: change origin depending on pose coming in
+        origin = torch.tensor(origin, device=cuda0)
+        
+        x,y = directions.shape
+        origins = origin.repeat(x,1)
+        
+        area = torch.tensor([0.0001], device=cuda0)
+        pixel_area = area.repeat(x, 1)
+        
+        camera_indices = torch.tensor([0], device=cuda0)
+    
+        ray_bundle = RayBundle(origins, directions, pixel_area, camera_indices=camera_indices, nears=None, fars=None)
+        outputs = self.pipeline.model(ray_bundle)
+        
+        pcd = generate_pointcloud(ray_bundle, outputs)
+        
+        torch.cuda.empty_cache()
+        tpcd = o3d.t.geometry.PointCloud.from_legacy(pcd)
+        tpcd.point.colors = (tpcd.point.colors * 255).to(o3d.core.Dtype.UInt8)  # type: ignore
+        return o3d.core.Tensor.numpy(tpcd.point.positions)
\ No newline at end of file
diff --git a/nerf_sim/sim_depth_generator.py b/nerf_sim/sim_depth_generator.py
index 30e067e..6595cb7 100644
--- a/nerf_sim/sim_depth_generator.py
+++ b/nerf_sim/sim_depth_generator.py
@@ -69,10 +69,11 @@ class MinimalPublisher(Node):
         nerf_info['camera_path'][0]['camera_to_world'] = matrix.tolist()
         json_object = json.dumps(nerf_info, indent=4)
         
-        with open("/workspace/ros2_iron/src/nerf_sim/generated_materials/camera_path.json", "w") as outfile:
+        path = "/workspace/ros2_iron/src/nerf_sim/generated_materials/camera_path_depth.json"
+        with open(path, "w") as outfile:
             outfile.write(json_object)
 
-        self.img_depth = self.render.run(['depth'])
+        self.img_depth = self.render.run(['depth'], path)
         self.img_depth *= 255
         self.img_depth = cv2.cvtColor(self.img_depth, cv2.COLOR_RGB2BGR)
 
diff --git a/nerf_sim/sim_rgb_generator.py b/nerf_sim/sim_rgb_generator.py
index b90c2ed..9078c68 100644
--- a/nerf_sim/sim_rgb_generator.py
+++ b/nerf_sim/sim_rgb_generator.py
@@ -69,10 +69,11 @@ class MinimalPublisher(Node):
         nerf_info['camera_path'][0]['camera_to_world'] = matrix.tolist()
         json_object = json.dumps(nerf_info, indent=4)
         
-        with open("/workspace/ros2_iron/src/nerf_sim/generated_materials/camera_path.json", "w") as outfile:
+        path = "/workspace/ros2_iron/src/nerf_sim/generated_materials/camera_path_rgb.json"
+        with open(path, "w") as outfile:
             outfile.write(json_object)
 
-        self.img_rgb = self.render.run(['rgb'])
+        self.img_rgb = self.render.run(['rgb'], path)
         self.img_rgb *= 255
         self.img_rgb = cv2.cvtColor(self.img_rgb, cv2.COLOR_RGB2BGR)
 
-- 
GitLab