From 11ab9975a0a5128bb0b06621d3c2eb06312a14df Mon Sep 17 00:00:00 2001 From: s-kim Date: Thu, 17 Nov 2022 20:55:26 +0900 Subject: [PATCH 1/4] add hsr_command.sh --- hsr_command.sh | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 hsr_command.sh diff --git a/hsr_command.sh b/hsr_command.sh new file mode 100644 index 0000000..84a38da --- /dev/null +++ b/hsr_command.sh @@ -0,0 +1,12 @@ +#! /bin/bash +docker run --rm --net=host -it detic_ros:latest \ + /bin/bash -i -c \ + 'source ~/.bashrc; \ + rossetip; rossetmaster 133.11.216.222; \ + roslaunch detic_ros sample_detection.launch \ + debug:=true \ + vocabulary:=custom \ + custom_vocabulary:=box \ + input_image:=/hsrb/hand_camera/color/image_raw\ + input_depth:=/hsrb/hand_camera/aligned_depth_to_color/image_raw\ + input_camera_info:=/hsrb/hand_camera/aligned_depth_to_color/camera_info' From 321ac0c995a398fbea543d69dc94e04adb4aaa21 Mon Sep 17 00:00:00 2001 From: s-kim Date: Fri, 18 Nov 2022 00:34:32 +0900 Subject: [PATCH 2/4] add hsr_command.sh --- hsr_command.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/hsr_command.sh b/hsr_command.sh index 84a38da..eb4d171 100644 --- a/hsr_command.sh +++ b/hsr_command.sh @@ -1,10 +1,11 @@ #! /bin/bash -docker run --rm --net=host -it detic_ros:latest \ +docker run --rm --net=host -it --gpus 1 detic_ros:latest \ /bin/bash -i -c \ 'source ~/.bashrc; \ rossetip; rossetmaster 133.11.216.222; \ roslaunch detic_ros sample_detection.launch \ debug:=true \ + confidence_threshold:=0.3\ vocabulary:=custom \ custom_vocabulary:=box \ input_image:=/hsrb/hand_camera/color/image_raw\ From ccd33738539c80db3bd761ea2ee98356a604e822 Mon Sep 17 00:00:00 2001 From: soonhyo Date: Fri, 18 Nov 2022 01:11:24 +0900 Subject: [PATCH 3/4] add use_pca as param in launch file --- hsr_command.sh | 1 + launch/sample_detection.launch | 4 +++- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/hsr_command.sh b/hsr_command.sh index eb4d171..d55cc66 100644 --- a/hsr_command.sh +++ b/hsr_command.sh @@ -6,6 +6,7 @@ docker run --rm --net=host -it --gpus 1 detic_ros:latest \ roslaunch detic_ros sample_detection.launch \ debug:=true \ confidence_threshold:=0.3\ + use_pca:=true\ vocabulary:=custom \ custom_vocabulary:=box \ input_image:=/hsrb/hand_camera/color/image_raw\ diff --git a/launch/sample_detection.launch b/launch/sample_detection.launch index cd6c71c..f37495d 100644 --- a/launch/sample_detection.launch +++ b/launch/sample_detection.launch @@ -8,6 +8,8 @@ + + @@ -85,11 +87,11 @@ align_boxes: true align_boxes_with_plane: false force_to_flip_z_axis: false - use_pca: false target_frame_id: base_footprint approximate_sync: true queue_size: 100 + From 44b55864a1b8552fe08ad3097e17bb9e8904c560 Mon Sep 17 00:00:00 2001 From: soonhyo Date: Thu, 13 Apr 2023 02:40:26 +0900 Subject: [PATCH 4/4] add target segmentation --- #ycb_objects_list_photo.txt# | 1 + commands/#hsr_command_head_ycb.sh# | 15 ++ commands/hsr_command.sh~ | 14 ++ .../hsr_command_hand.sh | 3 +- commands/hsr_command_hand_ycb.sh | 14 ++ commands/hsr_command_hand_ycb.sh~ | 14 ++ commands/hsr_command_hand_ycb_five.sh | 14 ++ commands/hsr_command_hand_ycb_local.sh | 16 ++ commands/hsr_command_hand_ycb_local.sh~ | 16 ++ commands/hsr_command_hand_ycb_photo.sh | 14 ++ commands/hsr_command_hand_ycb_photo.sh~ | 14 ++ commands/hsr_command_hand_ycb_photo_local.sh | 14 ++ commands/hsr_command_hand_ycb_photo_local.sh~ | 14 ++ commands/hsr_command_head.sh | 14 ++ commands/hsr_command_head_ycb.sh | 14 ++ commands/seg_command.sh | 14 ++ commands/seg_command.sh~ | 14 ++ commands/ycb_objects_list.txt | 1 + commands/ycb_objects_list_photo.txt | 1 + commands/ycb_objects_with_categories.txt | 14 ++ example/#masked_image_publisher.py# | 74 +++++++++ example/#sub_segimg.py# | 75 +++++++++ example/masked_image_publisher.py | 66 ++++---- example/model/best_model.pth | Bin 0 -> 13939 bytes example/seg_linera_probe.py | 71 +++++++++ example/seg_linera_probe.py~ | 71 +++++++++ example/seg_target.py | 67 ++++++++ example/sub_segimg.py | 75 +++++++++ example/sub_segimg.py~ | 74 +++++++++ launch/sample_detection_fg.launch~ | 99 ++++++++++++ launch/sample_detection_m.launch | 112 ++++++++++++++ launch/sample_detection_m.launch~ | 99 ++++++++++++ launch/sample_detection_seg.launch | 104 +++++++++++++ masked_image_publisher.py~ | 74 +++++++++ node_script/#node.py# | 119 +++++++++++++++ node_script/#wrapper.py# | 143 ++++++++++++++++++ node_script/node.py | 17 ++- node_script/node_config.py | 2 +- 38 files changed, 1534 insertions(+), 43 deletions(-) create mode 100644 #ycb_objects_list_photo.txt# create mode 100644 commands/#hsr_command_head_ycb.sh# create mode 100644 commands/hsr_command.sh~ rename hsr_command.sh => commands/hsr_command_hand.sh (89%) create mode 100644 commands/hsr_command_hand_ycb.sh create mode 100644 commands/hsr_command_hand_ycb.sh~ create mode 100644 commands/hsr_command_hand_ycb_five.sh create mode 100644 commands/hsr_command_hand_ycb_local.sh create mode 100644 commands/hsr_command_hand_ycb_local.sh~ create mode 100644 commands/hsr_command_hand_ycb_photo.sh create mode 100644 commands/hsr_command_hand_ycb_photo.sh~ create mode 100644 commands/hsr_command_hand_ycb_photo_local.sh create mode 100644 commands/hsr_command_hand_ycb_photo_local.sh~ create mode 100644 commands/hsr_command_head.sh create mode 100644 commands/hsr_command_head_ycb.sh create mode 100644 commands/seg_command.sh create mode 100644 commands/seg_command.sh~ create mode 100644 commands/ycb_objects_list.txt create mode 100644 commands/ycb_objects_list_photo.txt create mode 100644 commands/ycb_objects_with_categories.txt create mode 100755 example/#masked_image_publisher.py# create mode 100755 example/#sub_segimg.py# create mode 100644 example/model/best_model.pth create mode 100755 example/seg_linera_probe.py create mode 100755 example/seg_linera_probe.py~ create mode 100755 example/seg_target.py create mode 100755 example/sub_segimg.py create mode 100755 example/sub_segimg.py~ create mode 100644 launch/sample_detection_fg.launch~ create mode 100644 launch/sample_detection_m.launch create mode 100644 launch/sample_detection_m.launch~ create mode 100644 launch/sample_detection_seg.launch create mode 100755 masked_image_publisher.py~ create mode 100755 node_script/#node.py# create mode 100644 node_script/#wrapper.py# diff --git a/#ycb_objects_list_photo.txt# b/#ycb_objects_list_photo.txt# new file mode 100644 index 0000000..7084064 --- /dev/null +++ b/#ycb_objects_list_photo.txt# @@ -0,0 +1 @@ +,chips can, coffee can, cracker box, box of sugar, tomato soup can, mustard container, tuna fısh can, chocolate pudding box, gelatin box, potted meat can, lemon, apple, pear, orange, banana, peach, strawberries, plum, pitcher, bleach cleanser, glass cleaner, plastic wine glass, enamel-coated metal bowl, metal mug, abrasive sponge, cooking skillet with glass lid, metal plate, knife, spoon, fork, spatula, white table cloth, a power drill and wood block, middle row, scissors, a padlock and keys, markers, an adjustable wrench, Phillips- and flat-head screwdrivers, wood screws, nails, plastic bolts and nuts, and a hammer, spring clamps, mini soccer ball, softball, baseball, tennis ball, racquetball, golf ball, plastic chain, washers, foam brick, dice, marbles, rope, stacking blocks, credit card blank, Box and blocks test, 9 hole peg test, timer, Lego Dublo, magazine, Rubick’s cube, t-shirt, airplane toy diff --git a/commands/#hsr_command_head_ycb.sh# b/commands/#hsr_command_head_ycb.sh# new file mode 100644 index 0000000..05e2792 --- /dev/null +++ b/commands/#hsr_command_head_ycb.sh# @@ -0,0 +1,15 @@ +#! /bin/bash +value=$(NW#NR(WG&ImZDmXkWQfg5| zKwv~*fNijUloVIg$dxPhj{FP<^M?g`TB>%_(lYH2Ze`+L~`Z+(Jb$- z@`n~2>AfmCH1xmna7Q`0%l)CQ3Xk|VL4|(=72Os8owP0@@IUk_{X-t@uJDH(8RZ)l z=pDds-G5M(|Dme5kNrap4fhTBcb0!CQT>Na?QgUH2U|GbLw_4F<{!SXfBTH@wCK>l zNUr)nl^X8C{CBMKH{)vhM}1Dh-(8T;W6Cx7rvd)!qPd2S0y0rtBbS8&3&k7- zgi^T1{Lh7gDcp(a|JXVyiaXhrYm)ZQ!<1C6sVmpaoj=3{{DOTWxl@D22l3w=G$H7( zW;5=zAj5@XQC#zX6ee@0FBDAX&iJd!B8ofHm23G|l~pR2apjWepueLKf+U0ZgNl>P zo#o244*ENQW(SRO3i>-FY=Zv!eooL|FWUzFb>rNiziyfr^w%@<|Ctl~fx0TtH!3=U z?=d@md)#IC28TyS{j1h~p;)wEWVD}&^Fncnf=j{^xqOfQxt#yk39KM6>c3CmDW>ym zWd42nE|)$KK<9}jFgCNCFnjjm`rHH0$ikPGsZvokwp)lH_q8r+X>G=ym(@hi&yaRX zD#7q{9VB&g3HXMwC&nE{t*tT;yX`8yyfqGwYsf*}nL1KjGD7xL$be#&7`pm<;Q{GX z>X)a9zU^hmS#}2%Zu!yk*Ug}JMGQRNb`XAPS%E`K4!Cdq0&7YqaMsnzp~+MU6p@XF z+jDezv187VN(W8Qxzx_sz16^dXGGYr_A&VVyBMk^$q;B<%nQJCu*UWy=3Y@I{j1{W zJ?B(-(>NZzl4emE%_0(Z$Qf4DFCzJ;TN#g;B5cVR0*2$x(}1xSV4N<6Cp*kB(=Hc< zz6in2sEeS-?u6kXE$F^-hQ28JPA+{Pi)-y8X~Ds6bbQE?0sUI4+PH>vG$iAqeofvX zCp};u=V3xXIizoFBpVqaUO|*L^E+FNyxmhu3bl>E|Kk!GZe2@9f^C_%&n@v;^kZ;~ zX(n6c28ptc8Vvo|LEg=7fHA)L%(=+7V6G3~^V=9s9j(POQ&Bi?t_2SYFA!;i1@L%u z3*|abgUa(UFqas@yiH`<27C_3 zHN0O@%dXeiK=eG{F>ie5^S&?L1xk++U}Rh|Nqjm3&HO&F(`sAk{OQZFh8qvOGoJKB zd;qRIwVQR!3MLbybm7P`BP?#+4jQ1zSr0ncbH5KO`wUb z$Kkh}2$+8adhOQ`4Qf=PVm7Do#_~t(mG<@EJJy2j_#p+c^A@ryDQ-B%_Z{7EX%nO9 z+{EN&&qwd?=g0=;I{mufCDfhN;Z;T%BhTLlzGkh)O98%^Xi`rs_gu#+iE~!@{?3@S zbR%6|Jrlod8x86P1g^KsfUxlwVp=)|XGL72Mp`>?L7^<3li5m>Rv)DrZi9@3+Bh5^ z`-alv!I+r07Zv9k(k-K<@W@3wHsZt8h9KJ#^8NgN6n=FEXWL29herryoFK=#`ylp3`hF$!* zl|rohELgFIHQ@Qn4Rto|rYr7d9vPUHfvtzH&O|nkpgY8fH z>Ggd`k!fe&Md+g0yF0`#*BFyk@~OhNUf9U2BcHv+an6DQGE1xtc2D|2)0XTZ(a(l) z;G!6Y8h#=lbgMyfO+4ugpNGjB!Fb=GhJC0gfwNR)@ywzKG|StCo<}{gZ#9xnO;>5W zQvxi!UJS2?+DL7s0H-x!7aZ$d$e0{xfKkt5sg;^Bj_-aCGsd*C76nNluqzX;>^nuf zd1c`BZ4=0wtVP~td?Qz-r9JqIL7@Pa@Fr~GIvKM0MI9+4BQrAMfWOfkqFV<9cIG)O9zh|Ti z?_l3@BP!y$hj4buf{Eul^iD~j&s7VUkMqi)<=A3;|D%HuEy=;U&OoN5dLI5VH^6~U z$t3@54AGrr!tQDwBHLU#tp>k(!3(QO;<_gYO3oC)1kaDG!I@(qIy^!IyC@X=)PkS- z^NH_hD>krx9B#=u1xMFBB*mNd;;bAU{8@9HZn9Oy2?~Ooah$tkpk*zyb#pAaRCF4w zWqV0=p$zewU6C(eUi;EL@X6lXETL0yRG=g@RMeIQ5^NVP>KMIZ>U2OFUfJ?7MG?g~AY9?fjVr zo>BuvMge5!O3;ZCCRVG0JYnjp44mtf3C?wUn5)uKBtfYUjW5Q5T=ztLF|v(r+NJ;w zM{i<{gBqGCTCyJ-($Hz_Fuf2{Km$KXgN9KJ>X}Be>(5D&T7kJJ>sW#^=_&Z;T>)e( z_0XmVr|H#u7f6`5HgS=E%%%`IINjiR>;SqGh|781500M6VDJSstpY zm}1W8wUAgb3a4p^wIST{NHIUyTr>UQV z8Ie4d!JK-Mjq%0DQ1FvJ+~M2?p-qWIV6iw4&Lz@xmA546v?$yOXaUu>B=*qY4J(oI zbm)IoMOI#YOS|e*==jOgKr;La{Wh+M20rd0^P=|S_K!cPY>^!AjpbI@{__M%Ji3k7 z%?LJ)+DD%oSRoXPV8~!PX$%=6^Cf-Yn7#pNf0KlrhfFc?`YUGi-A_a=tbi7}=AqS| z99SOIMGE2zah}dBTyE}%(?7Y9V~I(KQLUv}!LSvTOr>^SjS*7dNm{ zKE+J2*%L72Uk>>`Bx9%%M)F_(<%uW!fUXTq#BW6id)g8( zE^acqhTpZ+uCd1ll3_S%d?Ko5#1pwwZ|IwjpTsUN11BvFM*YW~e{UcFYCl8i(H}PX<9wTuo7|ZmXz+~e=P*WPh z$oY5R&PfjW(eea`AIR}^ou(kC8?a;IGe)z=5`V4K!Bd_);A}Dl1+8u}?`I921ac5p zu10U2GNzxRGbvMDM6PeXO$(i*F=>@CQ`S|FrS4BG6Qys1sEHtc-f@cf?h)bH&KKag zOc|mxA_13)SJ4}vOCV)ND!hCu&zbi;ja*+j4_a+^GwHO9t*Y&YSN$hR`p|uv<$e+k zFK6H)qbPRDv1YbaG#`?e?8fWC*QgrDi+nINhAW5Nu)Aaq?LWC5GL`SL2EC1>cFzM= z>tqO|eHCsv+@;Kt9#arXngZ#{MdbE)4(O=uqkHQgqQy`?nu}h*()QDY%}t`m3+|8s z*>4Q2k3k2=d^jT2+nmd zWbHS+CkE!3P|_=a9=a9%eRN-&;112J4CI@-i@cXdRbz1MF^W`333WI7c$Oo z4Ct+O9rW5OAIQ}RARaAQs2-Y0o*ZeVhbP(Br{6EZDK(C0J0+YddnV!-`!LM9HwRWc zHo?dxM?fLE9@J!;>8mCa6g#>XBEIp!)%p(8wekUy4dZd#uR~;V&xeNOOZ(`Vhug@M zMV}iMY|zFzAJmC=dlM;a9RZIk!gPJ@BshEB28%&3rk8$@S%?l@_y~6Mqj_fw}4zDD+{qg9ukb_r? zrxNW$>R=dE0*|>CAk`HN?z(!k#e_ee^n_rmO9n>vjKNs7o%n5%93<}EPRG_?V&tu# z(9ZS{5>agdRXt%$@!%Ykt5oDoFaCgOrTMfTGJ>=Y~MofY`O=*{Fr3H)M)HE8V+9ba?sbs5;G5I!|HxptRWAvKI%Bk(8`DW zPgR(eD#a6Z;*v)li7**|;O)yDY+r^vo>93;)82H`qV&;lJ^daW_;mw$65&Mp+&5NU z=@1RxmrhHIUqE=*F*Z&j6hzl1p+=oGb~>g4ciLuf%#5{qkolfus_lb25~83WvlX0l zzeA*021cy>Nn#2+N%kstFm^~I>)DmqohVD!Kbw!v0*u%&7*t$NZ#gH$=>vdUOA&^$;Qe+h8ZK2~5=0+Lz0VO%;<4rf=V zuyb@4(DaH95WD!5s&v>v*O>KSUEu`AwYpS$ND$xGw9@sJd*S`PY!F{5foYk$FxKxV zvQq*uSY{S^F*OT*+XqV@X#)yR6A3OrnGP7X(DgPxHjN3>;xF4f4! zgy-JCrVbO)l5rfj6eB1&rh~JLu49z|NE3ah`Yy}p%=^YQGf3?49mL? zz6Eih>Qf5KZr4)#33WvF;$_s#(&dyk++v1moxob}1gwfOz|w&vt5W;rsB$a;59rK; zuPRm4T_T8XI(rUJZX3%s-X9?rs<~{>)E;(c&JH3taGY%^)Zjc#Q0M&G)K9)R?E}xf z`J{6~JpE{U*0SH}EVRdY;$c5m(%iR4l$up73sz0Cand#yz(J@BnWGz1(7mto{jl_4y-UW8z?&ksuN2TFX?6TxRay z6lU%`{YB;WJ;O(a@nCyN0hg)|QkQM(X=|!8J{k23H-(CFjF-;E7vm@~(OrzuuOrdo z)lPh;m-ecAN-E92EBV;h;CDdqWB^XHCdXWFHZz@eph)vFbM7`bEEwE$?bdcsKVGsMJ@aEMGcpyN-}kZb49Tg{mr zLV;?-GU=CK`fMJi1XnYzAB!<>S19DDo@9c2;>d7oEuQ!xM>sp`ubYCQ`h+G&YdOxePVA=>&LZ{OS%>O}!*G9YoYhSQ1+ei+fTbIJz@+&+y4R&* z9hr;`h9wX+Ule<%dyrVcTh!yFE|@b?wD>5uW9MuG_Wj^J)XM)3#I*p_W!aOIsDr6o? z_X~jfk~vi1bR`KADx=$aCZoeM5$GFp4RS{I;^1X-*e9)vtMyr`pBjqF_qvgC8mO|} zGPrf&CmefHfr_DVShFFCd>Lciu+{VfYd0w$-6ZnpkpoZJDt{p!?cm_`b1!M0_HU4r zA&g~)5FS!phqDL0iNVB^*fxC^ZvN^73Wu)1r*ZdbS4S^N4IPUlSpjs;E8vg8JZN2F z50wifVE$VLNUeN8ucc{YUW}+!j3tE>2LsG@t0n4HNvK?12g@Q4HY6u+#L|{>`1$w< z?R=hqZZG4h{rX(IAYFiTwhs*aj-{CyH`tDjcu>6coSInwWZv(tfKN83X;N$f+bu^XtR-f_yD+?+YYCe;J) z%T(g-#v~{(eM8SSH<8QD$<%tsJd7{T!pAu`Y0~GDROzh(7TX`3-of*#&#mB`NP+=cI zBp%;__^mC(euX4lcPM90zTQr2@>u%f(0HCp$PK!QeN0DYJRowhTbXSkvLHT`PJVpR z!c`JGu|nF9taGTPQS0=vUF{T^eN7U)*43g`;z|0bS(cn>JVb)mw!w4L70|id58f1| z5!b@|OrhIx)K~OFf5Bo9D4I+>&nR$qxyoYh%M&!i?j>WGCQse6p3t_|GcetEDIVNX zO?=bt(es{dP!TkRtvcroCBd~&E$7C#pDQ7~_97_$eJ@#Gl|~I16^_S-kFd{N90Kkh zgomDI;P)YM2%J?-o*cXjd-Q`LKjA)GanFV-**CN8pBv%z!xj{K(*RDr@1dYZ1cWpi zsduvqj#+aMw~j4iFFkFf6H<)9WO4%d8{H*w#$8nSrW}a=+=oxjJ7cWZB#?YL8yly; zVn2Kt%>>C>A!qt(+&dJ9VV>LB{wiQ|u2++rZ(QKVw*q*om}|B9<2Kke*%9LxPeHN4 z3i?svEG_)m2YTr%*_aDIspI&o*gG3RKD-{u*IVRiS2|`T>fq+bPAF`#1Ud@t()j5= z$o`CYKuuTN!h24V?c+dF3aOP?4P%yAL`LX-{9cxgPg0gafFy;S?Q1Z;=>d6scnkdS zF@PU2{QYzP3fi_{43tDCP*J1FaIVW5)sBC{n*opMSBWGTIF?A_!Y7lUSQV^MIe-_o@0)ZPw$o0DMVu0-cc;(9QLv@0+Eu=aw=|OBUv|{%mJfy;LOfn{(JG zFMVnk;0cbLWbn*e1TH>5SUb)zs08b={`|ebJxe{{=JG@2AR~u%zePzxMF`WhbRk`3 z5RZKoeZ)&m-Hu7WJ%=m58tsSsO_adv3c>s<-yF@ol)5MJ>s?;&; z2sv4#0v2EL$b9`{q>ihDC%O&bz2|RCo#YDoyw60asgyY~cRRKP9f1{Dsd%kB4df`R0>>C)OW@s&i%#l+}k`LP`+x)dWU1L|C;b-DS2)YLNWsCK^6wH&Evm zGBQ-pei2ncs{D#nmB>MCYc9+(7Qx3qc0qzeA}Y*nhanurnP@PR%JllcuUkT}*|VPx zmfNvQ#jg;*uctAistSH-oFf-@ilDIG0c>pRVI-x`up3hVg+p?g67M|lwXlJJCPDH* zV=fI}zJzfpTmvUGB^Wh38Anexf?rQ{$uX$J+VpW~TE*emwm&5DRtMRY(z8jw zD*@fWIatPUVPNk}(jT!48U{M)kc&B98oP_C4VK}uh~gDdzxsg&|2D`U>!QHVX0jVZ$$ zAwxh4-fp|jq$|h3H2-yU?dGSX>6<*KHB=Q{!~wU#en_b~f<{Mk$nKs3>>J6WD$B#6 z?`9r!mAZmMnjvm+u;iux_{f~vk_E9lKhWS&L1gKyVoZEm!MvHv(ml1$$^02t=!T9R z_%%3zc@%ApytvPxHAIQZ^)Ql}Dh}P7 z`}aWC>OmqUet?ARv4yYFC#k2I=uIb7u-3sV|2U zZo6ToLO=C6@|%2#(L~|NyHWg526T5&8WVqjZgpFLUaPgN777*Mn7%IZtX&rd2ek41 zO*eSsS^_g2(x|@AX3Evd0aK@=*!3(3?XpY2zUnAx6fwXaJquR!@eR}y)Wc{2d1y0^ zgAG;^yjj)XNpksNy7$Hx%FLOBA*nI=P;xn(E}MlD=AFiyEpM5`xjw|%cOMJ|&Y+V= zpMpmh_mX2DT#$FB6mJUbBOY$bcycvASJ-3DvtJ}Y?``M%ZqjWG4_yuKdYTwlle>)L zok{flvO0Wua~~MGZ=M(ZX37Q#4xfXhs#TbhC!&fl{=PwZ)>~ zlj!r%pKNVRq6WVMas9c)q+`tF`ucU{=z2Q_3wOz(Q94qcu(w$KGLi|i)~CTRfu~=t z4fA&W{OzC5x;pU9jU&(V~p0pRS9-{Rx&^;`wK`j8MW zBO4qrb_N;0Oc+`$3BiTEgt@T?9A|UMmHRtb7#$587mbj*nGa}LFGCJCyhom51X^x! z1kZ8C$cE~oY*;lWq`8oNi;ZE{h5&r1at)(|MDdZW3kVN)LA{0y4DB65#-k>5cLKg#m@Z@(nj zi)%snJwMl(Ih&OJx`&ZjH_JM*#@hxYlL*#xe zS+ogb_eR0;wDIU)u@|na;OC4so+jgC>%gpwfLdi1h{Ru^Dupgs9VCQ5o!-+Mj0Sz? zVhkSE>L|Q55*p+SpnKe2ykZnjJ6_KQt9tK-Tgm#&*30>{UcH5Rm8?y?3SywX>nOH5 zYO}&$O~}TLanvyIH>v+|n@r1HGE-36kwzZh06*L8;9HUhHj4{G>_tVWtlCK8`_^G! z&j^&hv|`q}lrvjOA2GtKZer`NNHlaEi|1F4qltqX;qX>DP+C5RqZelibFX&b)ji#q z>23$sO($TVST4Rf9SVnoA0XE?9!_*>pn(Ykinhxbqr2y5l*3P&SuqY83Q}-i!vy%A zXNAp2e9%^P9(4*9g_$OO*lTNma#vf4j-Vzs9(xMkSAU{1E^%-xWD+m+)n{~zok?b0 zjK;5cntuF{LS}qZgV4{XFy1H$Keo$Jjq~b2WA;Mn>>Q|n@th$Cj^eVGPP#B}wWY-Z zCFrR)r6-b@HfKFN^{zvGlX=u-a03)abv87~ zii4nKI(VEFhde6oW+9$nBv-M0jS?9)1u7CnWtBS#a)h6v%W4Prdysqz^5`xC44 zlD}@9`U>gS-ZR%$=duew1FZFG!z{J+xOQwThBuui*`6%>E@ul&a=S+MEZl_s*FF-{ z-c=BJU@GTObspZ|tq!e~JMqe_FjP8m3Oqj>f-}*B(O2_P!0P}!%e_Xcx`xS6mm#VT zyIYp;b$}+9Y3%fDA-dng5|WnbgY3a2zzwfL1+IqDUI7df%)<1~wzPbc z7H7)BDts_kpB^56fzE%QL-Suxfmajz_%X8`Oy+#0`>GQ0P^AjbMp71Rhw~vfK?CHj z)?kE19=evlX3w>K0xt0X3x_YKiXpd9_8z~#wq?_+*H?k*WEV95Y7YB@Q?NKK6Q9mV zMKe<^Fqswto5x~F!-WcG@wkd-eETKsbclLH%}HoX$L@BPtu=6Aa5FoW|QTyep~hosBvC|kDS7!l1l zi<%NUA@9Qo>mXN8O$vozShf=DBK@&9#0MU}Xs1_rhe%Q2cJ!}rXKECc7&rF{#NRD6 zZu>N3Bs18T+9fc{Ydh@d$RWxXO#q1%o?TuEF|*_V8W-X88buJ$kwNm{Ayqm43d|=8 zqSDV=vM4SQjEjOn=5`Xq-a80YlLM*FkBe9>=8XDFX5itn2&!(b$$6R|j~yx{&XH8xD+~TW+n<6RFOOH%}{wtArwrqg$Qv^Dn8SeG2c{xFCN^$Hn-KdyF(TA zYNnD*mv`i~W(#xRc@j>H?F1%mKKd-XO{FgcGAh;#wEdWgS#A~ZN~)Uu7M^+=)KLVK}8i z9n?6RNNi0!t>on5w0WE8u#i5?Hs6Y_Ro7@bry386ClH0hv2=d%L?}$=l3(k4Fu+3( zcY98S1@Y(6oUBBPH{m$4tBclMbA%^dDVXb`2O|&8LtB>;`DxG3waS>G*Qgb<-WdiMJ%Dv@`2HVDM~uxt(c~z!)@H!`nfr+Mikcuz#h6 z3)kFYM{IVXTZ0FzJie7KvA@iInZ5*jFYKdRa?jFZAOXJ(Yv`ui*XWfLFIao$H7d67 zNPC1Kd}#HeV;2Xa_#_`#RFq8>?z|@E%3jQ;^96L7>O^qxaDm~qiBLNKKJ#KsB)Zl) z;nj#{s7=UX4>U-k;hFu6W!)%fJGLJJ3Kru@%L!QJC4lmN7qI)A4|aOL<*&VE;CkGj z@-ok3ZPp!dJ5r8IZkz<+q(}I+v;ws}rD2EZEB-kjE!0#pLqio!j1O2#L=zbjd*uTx zxEw$fTll$F_hoq1yb@}pdmDB&T2jY@PnjDrnO1#grla(e1^8Q_3e&=O;11_|G@)-k z^nPn4sisoAh)5BhS;=bn8JP`^LkB?P%VaDvv;!sI%hc6&3k>w}b>C7TbK8|MbdD8- z&HqiezkUYmUFX8Oz0+xF&>|#qHCTN3CcNt+n5!sD-v)1|A$DEl$MizdeMO%q{hOX^ zXgolkvz1U=lMDSrSLxyJVvJ~fenZ7x2ePBHpY-ayWG2r}Axd`-LB3Ev`c|gU!A(C1 zey@iY--a3c^nI9prW!hjykIszk6<<`0N$QI$Y1ZKknhHWVEaZI6j!V#^QAjc!c~$^ znovU4J$(keBiq=Ux3idmPh!|`;4U$ESWQQGdF;eaLtJ}hF>RvTX}z(ZjQ)KGVS{bK%n)!4fO z=e8(=PIneW&M0X3(8kiQc}7IlZz|4jk0tktG{J4e3oNHe;^$#AD4Z7#jv~J;*LcN{ zL!GzTMunSn*;*&Gm@I*>EP;MJtxP)ye)D6u7y9$S=WRw_4qzMXiCctufXm$6-=9WkyR2B z<5`LYl3TVmNQEna=aPhlb7o?z#R&{Mvl73EYq86cJ^1?(>+qJ>RIoY`MUC^?$OHAA zWbdIo3?5TP68L9ewyYkY2YdDsl~2xiy?hQf4_^g(@)NnFIu_QB-UTT;GjQ^*PU7!T zN?VTc|Le@EL=P%T=Dza5CsAtrvmZLBq$|Oza}tE0fE>zQqyu6u{Z>sY8Eg$xK>HRO zFz<)~uPNE6YSjcw%ClilbS_!B?iSu|@TP(l{Cur+E~u&Pfm=J@kQF~4VRd^9D&?Dy zD~T*rN~E&t; z6WJR?w2JT)O!G+^h@cO(Y{Neuq-TPPL zi-Rs$P*@IACLCmLO3j7WBd6GE;Uu!U_#7Ueu1+&9oMbjm3d0per6?V|pLVSS9IB`! zV`rSDvl_~Anb29>?0XQZok#IrNO7&EWcUyxaa}yD6M^jBL?|2zr6XlAR>21Z!Bfo( zwoV%aqtE-GHF7Py>J!HW(IWh{tec2!I7l1?5>VAWgr=qHgXaq$CN-#vju*I&JWC~F zcBvBs_IDFrttsrdbDjKbYs9v2U!1#51_RwV_@>w%$LX!WM;}k2WX^PYNQ55?Xb?Cg zkdD8&-*8GqF>J^@LS8BufZdc^R_2Qpp?Ty3sdp}+ThGSex0Odoljk`c75jtnFcqZB z%D1yqM?Js?p36}4mP@@@&_0rMNDGSYkAhEeY2YAG2v3i^fQ+2q^y+{n-q4x^})x7-pIw3hF|Ck|0E<2SHY0iF`^ilK*MYdc}I`DVoPus`u)=I_v0dBpj`{%_x<44 zvSk=Fl+J8Apa?7aFVF`Er7@s356pv8KsVH!NtNY9b{HlVC(phhy&8$wUv1r+f7CaLk>Bu-)(k=Ig1Ge3i#AraFgg zJlRR?My-MA>i6;J!AB5&R0zXu3P|u%5xBSJJP{HSz>eq7FwAa%j2bS0XTdE@K5i!p za^pBV2UZi&*bK@v-Q%C@;o++M9QtU-QmfSZuXOac9y-5I3%En_czR?bzM5NyqVKN4 zXoLH-`15ucX<*R))E#Q51)!cX2ik9s#ZK2NR8*@B)NgF9*IjfQ?rzJVJ-)xliwZ5M zP8_7VR&hALVKirwtT`^|l82YhpCoO?Ws{}w zcd8b=8`*~4%SXXQY$jO)iugq`lVw*)@n$>|;`wjSCz|`^(DI8qxxF-*94-r>PRU~A z#L{Q%BOgJyUXul)EBfJaNL53@$s#Oxu!Eg>t^%1c1{nBY1k&byC+e$9h_6}}8!5K} zZ>a8q?67yFzOVvCV(&t5<6~lI*#tG|%gBsc{u)yA8En=YkS$%K_4e8CCWrOq=yaP!I2&#G*TEd$=|7g*Q9bX-0o8f_PC2E`ZiiPNHs?6DjC zT*(ASl3cG$r*>?^h#FlePtd|=Hzp8|S2keJYVzJ>9Aq{ft-$&G^EBEEa&SfaZF0EY z5XOu-Np{&pz}81PFhlb=S@y+*if&J^+T1ah_%q`82TtI)FK3~E=^vcM|392y_Wytr zn9Z|+{{koY7cl7m@!R%|Zs@kd1ijO)k;ZU!v_5V`2OB=oKXz?DmNr9;Z`Pmde}_3J z3K;&oUF-i~g2Kb*+35X088%Jo&zo)*uU;y|f6JfCd7ZNe z)PGm-@7? + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + multi: true + tolerance: 0.03 + min_size: 10 + downsample_enable: true + approximate_sync: true + queue_size: 100 + + + + + + + + + + align_boxes: true + align_boxes_with_plane: false + force_to_flip_z_axis: false + target_frame_id: base_footprint + approximate_sync: true + queue_size: 100 + + + + + + + diff --git a/launch/sample_detection_m.launch b/launch/sample_detection_m.launch new file mode 100644 index 0000000..50e0cdf --- /dev/null +++ b/launch/sample_detection_m.launch @@ -0,0 +1,112 @@ +o + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + multi: true + tolerance: 0.03 + min_size: 10 + downsample_enable: true + approximate_sync: true + queue_size: 100 + + + + + + + + + + align_boxes: true + align_boxes_with_plane: false + force_to_flip_z_axis: false + target_frame_id: base_footprint + approximate_sync: true + queue_size: 100 + + + + + + diff --git a/launch/sample_detection_m.launch~ b/launch/sample_detection_m.launch~ new file mode 100644 index 0000000..f37495d --- /dev/null +++ b/launch/sample_detection_m.launch~ @@ -0,0 +1,99 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + multi: true + tolerance: 0.03 + min_size: 10 + downsample_enable: true + approximate_sync: true + queue_size: 100 + + + + + + + + + + align_boxes: true + align_boxes_with_plane: false + force_to_flip_z_axis: false + target_frame_id: base_footprint + approximate_sync: true + queue_size: 100 + + + + + + + diff --git a/launch/sample_detection_seg.launch b/launch/sample_detection_seg.launch new file mode 100644 index 0000000..de6f80b --- /dev/null +++ b/launch/sample_detection_seg.launch @@ -0,0 +1,104 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + multi: true + tolerance: 0.03 + min_size: 10 + downsample_enable: true + approximate_sync: true + queue_size: 100 + + + + + + + + + + align_boxes: true + align_boxes_with_plane: false + force_to_flip_z_axis: false + target_frame_id: camera_link + approximate_sync: true + queue_size: 100 + + + + + + + diff --git a/masked_image_publisher.py~ b/masked_image_publisher.py~ new file mode 100755 index 0000000..c7eba8d --- /dev/null +++ b/masked_image_publisher.py~ @@ -0,0 +1,74 @@ +#!/usr/bin/env python3 +import copy + +import message_filters +import numpy as np +import rospy +from cv_bridge import CvBridge +from sensor_msgs.msg import Image + +from msg import SegmentationInfo + + +class SampleNode: + + def __init__(self, mask_class_name='bottle'): + sub_image = message_filters.Subscriber(rospy.get_param('~in_image'), Image) + sub_info = message_filters.Subscriber(rospy.get_param('~seginfo'), SegmentationInfo) + sub_list = [sub_image, sub_info] + ts = message_filters.ApproximateTimeSynchronizer(sub_list, 100, 10.0) + ts.registerCallback(self.callback) + + self.pub = rospy.Publisher(rospy.get_param('~out_image'), Image, + queue_size=1) + self.class_name = mask_class_name + + def callback(self, msg_image, msg_info: SegmentationInfo): + rospy.loginfo('rec messages') + + # simply republish if class name is 'background' + if (self.class_name == 'background'): + self.pub.publish(msg_image) + return + + # find label number corresponding to desired object class name + try: + label_index = msg_info.detected_classes.index(self.class_name) + confidence_score = msg_info.scores[label_index] + rospy.loginfo('specified object class {} is detected with score {}'.format( + self.class_name, confidence_score)) + except ValueError: + rospy.logdebug('class not found: {}'.format(self.class_name)) + return + + seg_img = msg_info.segmentation + assert msg_image.width == seg_img.width + assert msg_image.height == seg_img.height + bridge = CvBridge() + img = bridge.imgmsg_to_cv2(msg_image, desired_encoding='passthrough') + + # Add 1 to label_index to account for the background + mask_indexes = np.where(img == label_index + 1) + + masked_img = copy.deepcopy(img) + masked_img[mask_indexes] = 0 # filled by black + + msg_out = bridge.cv2_to_imgmsg(masked_img, encoding="rgb8") + self.pub.publish(msg_out) + + +if __name__ == '__main__': + import sys + argv = [x for x in sys.argv if not x.startswith('_')] # remove roslaunch args + if len(argv) == 1: + class_name = 'background' + elif len(argv) == 2: + class_name = argv[1] + else: + print('Usage: masked_image_publisher.py [class_name]') + sys.exit(1) + + rospy.init_node('mask_image_publisher', anonymous=True) + SampleNode(mask_class_name=class_name) + rospy.spin() + pass diff --git a/node_script/#node.py# b/node_script/#node.py# new file mode 100755 index 0000000..ec42b77 --- /dev/null +++ b/node_script/#node.py# @@ -0,0 +1,119 @@ +#!/usr/bin/env python3 +from typing import Optional + +import rospy +from jsk_recognition_msgs.msg import LabelArray, VectorArray +from node_config import NodeConfig +from rospy import Publisher, Subscriber +from sensor_msgs.msg import Image +from wrapper import DeticWrapper + +from detic_ros.msg import SegmentationInfo +from detic_ros.srv import DeticSeg, DeticSegRequest, DeticSegResponse + + +class DeticRosNode: + detic_wrapper: DeticWrapper + sub: Subscriber + # some debug image publisher + pub_debug_image: Optional[Publisher] + pub_debug_segmentation_image: Optional[Publisher] + + # used when you set use_jsk_msgs = True + pub_segimg: Optional[Publisher] + pub_labels: Optional[Publisher] + pub_score: Optional[Publisher] + + # otherwise, the following publisher will be used + pub_info: Optional[Publisher] + + def __init__(self, node_config: Optional[NodeConfig] = None): + if node_config is None: + node_config = NodeConfig.from_rosparam() + + self.detic_wrapper = DeticWrapper(node_config) + self.srv_handler = rospy.Service('~segment_image', DeticSeg, self.callback_srv) + + if node_config.enable_pubsub: + # As for large buff_size please see: + # https://answers.ros.org/question/220502/image-subscriber-lag-despite-queue-1/?answer=220505?answer=220505#post-id-22050://answers.ros.org/question/220502/image-subscriber-lag-despite-queue-1/?answer=220505?answer=220505#post-id-220505 + self.sub = rospy.Subscriber('~input_image', Image, self.callback_image, queue_size=1, buff_size=2**24) + if node_config.use_jsk_msgs: + self.pub_segimg = rospy.Publisher('~segmentation', Image, queue_size=1) + self.pub_labels = rospy.Publisher('~detected_classes', LabelArray, queue_size=1) + self.pub_score = rospy.Publisher('~score', VectorArray, queue_size=1) + self.pub_info = rospy.Publisher('~segmentation_info', SegmentationInfo, queue_size=1) + # else: + # self.pub_info = rospy.Publisher('~segmentation_info', SegmentationInfo, + # queue_size=1) + + if node_config.out_debug_img: + self.pub_debug_image = rospy.Publisher('~debug_image', Image, queue_size=1) + else: + self.pub_debug_image = None + if node_config.out_debug_segimg: + self.pub_debug_segmentation_image = rospy.Publisher('~debug_segmentation_image', + Image, queue_size=10) + else: + self.pub_debug_segmentation_image = None + + rospy.loginfo('initialized node') + + def callback_image(self, msg: Image): + # Inference + raw_result = self.detic_wrapper.infer(msg) + + # Publish main topics + if self.detic_wrapper.node_config.use_jsk_msgs: + # assertion for mypy + assert self.pub_segimg is not None + assert self.pub_labels is not None + assert self.pub_score is not None + assert self.pub_info is not None + + seg_img = raw_result.get_ros_segmentaion_image() + labels = raw_result.get_label_array() + scores = raw_result.get_score_array() + seg_info = raw_result.get_segmentation_info() + self.pub_segimg.publish(seg_img) + self.pub_labels.publish(labels) + self.pub_score.publish(scores) + self.pub_info.publish(seg_info) + # else: + # assert self.pub_info is not None + # seg_info = raw_result.get_segmentation_info() + # self.pub_info.publish(seg_info) + + # Publish optional topics + + if self.pub_debug_image is not None: + debug_img = raw_result.get_ros_debug_image() + self.pub_debug_image.publish(debug_img) + + if self.pub_debug_segmentation_image is not None: + debug_seg_img = raw_result.get_ros_debug_segmentation_img() + self.pub_debug_segmentation_image.publish(debug_seg_img) + + # Print debug info + if self.detic_wrapper.node_config.verbose: + time_elapsed_total = (rospy.Time.now() - msg.header.stamp).to_sec() + rospy.loginfo('total elapsed time in callback {}'.format(time_elapsed_total)) + + def callback_srv(self, req: DeticSegRequest) -> DeticSegResponse: + msg = req.image + raw_result = self.detic_wrapper.infer(msg) + seginfo = raw_result.get_segmentation_info() + + resp = DeticSegResponse() + resp.seg_info = seginfo + + if raw_result.visualization is not None: + debug_image = raw_result.get_ros_debug_segmentation_img() + resp.debug_image = debug_image + return resp + + +if __name__ == '__main__': + rospy.init_node('detic_node', anonymous=True) + node = DeticRosNode() + rospy.spin() diff --git a/node_script/#wrapper.py# b/node_script/#wrapper.py# new file mode 100644 index 0000000..857eb4d --- /dev/null +++ b/node_script/#wrapper.py# @@ -0,0 +1,143 @@ +import os +from dataclasses import dataclass +from typing import List, Optional + +import detic +import numpy as np +import rospkg +import rospy +import torch +from cv_bridge import CvBridge +from detectron2.utils.visualizer import VisImage +from detic.predictor import VisualizationDemo +from jsk_recognition_msgs.msg import Label, LabelArray, VectorArray +from node_config import NodeConfig +from sensor_msgs.msg import Image +from std_msgs.msg import Header + +from detic_ros.msg import SegmentationInfo + +_cv_bridge = CvBridge() + + +@dataclass(frozen=True) +class InferenceRawResult: + segmentation_raw_image: np.ndarray + class_indices: List[int] + scores: List[float] + visualization: Optional[VisImage] + header: Header + detected_class_names: List[str] + + def get_ros_segmentaion_image(self) -> Image: + seg_img = _cv_bridge.cv2_to_imgmsg(self.segmentation_raw_image, encoding="32SC1") + seg_img.header = self.header + return seg_img + + def get_ros_debug_image(self) -> Image: + message = "you didn't configure the wrapper so that it computes the debug images" + assert self.visualization is not None, message + debug_img = _cv_bridge.cv2_to_imgmsg( + self.visualization.get_image(), encoding="rgb8") + debug_img.header = self.header + return debug_img + + def get_ros_debug_segmentation_img(self) -> Image: + human_friendly_scaling = 255 // self.segmentation_raw_image.max() + new_data = (self.segmentation_raw_image * human_friendly_scaling).astype(np.uint8) + debug_seg_img = _cv_bridge.cv2_to_imgmsg(new_data, encoding="mono8") + debug_seg_img.header = self.header + return debug_seg_img + + def get_label_array(self) -> LabelArray: + labels = [Label(id=i + 1, name=name) + for i, name + in zip(self.class_indices, self.detected_class_names)] + lab_arr = LabelArray(header=self.header, labels=labels) + return lab_arr + + def get_score_array(self) -> VectorArray: + vec_arr = VectorArray(header=self.header, vector_dim=len(self.scores), data=self.scores) + return vec_arr + + def get_segmentation_info(self) -> SegmentationInfo: + seg_img = self.get_ros_segmentaion_image() + seg_info = SegmentationInfo(detected_classes=self.detected_class_names, + scores=self.scores, + segmentation=seg_img, + header=self.header) + return seg_info + + +class DeticWrapper: + predictor: VisualizationDemo + node_config: NodeConfig + class_names: List[str] + + class DummyArgs: + vocabulary: str + + def __init__(self, vocabulary, custom_vocabulary): + assert vocabulary in ['lvis', 'openimages', 'objects365', 'coco', 'custom'] + self.vocabulary = vocabulary + self.custom_vocabulary = custom_vocabulary + + def __init__(self, node_config: NodeConfig): + self._adhoc_hack_metadata_path() + detectron_cfg = node_config.to_detectron_config() + dummy_args = self.DummyArgs(node_config.vocabulary, node_config.custom_vocabulary) + + self.predictor = VisualizationDemo(detectron_cfg, dummy_args) + self.node_config = node_config + self.class_names = self.predictor.metadata.get("thing_classes", None) + + @staticmethod + def _adhoc_hack_metadata_path(): + # because original BUILDIN_CLASSIFIER is somehow position dependent + rospack = rospkg.RosPack() + pack_path = rospack.get_path('detic_ros') + path_dict = detic.predictor.BUILDIN_CLASSIFIER + for key in path_dict.keys(): + path_dict[key] = os.path.join(pack_path, path_dict[key]) + + def infer(self, msg: Image) -> InferenceRawResult: + # Segmentation image, detected classes, detection scores, visualization image + img = _cv_bridge.imgmsg_to_cv2(msg, desired_encoding='bgr8') + + if self.node_config.verbose: + time_start = rospy.Time.now() + + if self.node_config.out_debug_img: + predictions, visualized_output = self.predictor.run_on_image(img) + else: + predictions = self.predictor.predictor(img) + visualized_output = None + instances = predictions['instances'].to(torch.device("cpu")) + + if self.node_config.verbose: + time_elapsed = (rospy.Time.now() - time_start).to_sec() + rospy.loginfo('elapsed time to inference {}'.format(time_elapsed)) + rospy.loginfo('detected {} classes'.format(len(instances))) + + # Initialize segmentation data + data = np.zeros((img.shape[0], img.shape[1]), dtype=np.int32) + + # largest to smallest order to reduce occlusion. + sorted_index = np.argsort([-mask.sum() for mask in instances.pred_masks]) + for i in sorted_index: + mask = instances.pred_masks[i] + # label 0 is reserved for background label, so starting from 1 + data[mask] = (i + 1) + + # Get class and score arrays + class_indices = instances.pred_classes.tolist() + detected_classes_names = [self.class_names[i] for i in class_indices] + scores = instances.scores.tolist() + result = InferenceRawResult( + data, + class_indices, + scores, + visualized_output, + msg.header, + detected_classes_names) + return result diff --git a/node_script/node.py b/node_script/node.py index 9a33812..ec42b77 100755 --- a/node_script/node.py +++ b/node_script/node.py @@ -42,9 +42,10 @@ def __init__(self, node_config: Optional[NodeConfig] = None): self.pub_segimg = rospy.Publisher('~segmentation', Image, queue_size=1) self.pub_labels = rospy.Publisher('~detected_classes', LabelArray, queue_size=1) self.pub_score = rospy.Publisher('~score', VectorArray, queue_size=1) - else: - self.pub_info = rospy.Publisher('~segmentation_info', SegmentationInfo, - queue_size=1) + self.pub_info = rospy.Publisher('~segmentation_info', SegmentationInfo, queue_size=1) + # else: + # self.pub_info = rospy.Publisher('~segmentation_info', SegmentationInfo, + # queue_size=1) if node_config.out_debug_img: self.pub_debug_image = rospy.Publisher('~debug_image', Image, queue_size=1) @@ -68,16 +69,20 @@ def callback_image(self, msg: Image): assert self.pub_segimg is not None assert self.pub_labels is not None assert self.pub_score is not None + assert self.pub_info is not None + seg_img = raw_result.get_ros_segmentaion_image() labels = raw_result.get_label_array() scores = raw_result.get_score_array() + seg_info = raw_result.get_segmentation_info() self.pub_segimg.publish(seg_img) self.pub_labels.publish(labels) self.pub_score.publish(scores) - else: - assert self.pub_info is not None - seg_info = raw_result.get_segmentation_info() self.pub_info.publish(seg_info) + # else: + # assert self.pub_info is not None + # seg_info = raw_result.get_segmentation_info() + # self.pub_info.publish(seg_info) # Publish optional topics diff --git a/node_script/node_config.py b/node_script/node_config.py index 645f397..d1f197e 100644 --- a/node_script/node_config.py +++ b/node_script/node_config.py @@ -86,7 +86,7 @@ def from_rosparam(cls): rospy.get_param('~model_type', 'swin'), rospy.get_param('~enable_pubsub', True), rospy.get_param('~out_debug_img', True), - rospy.get_param('~out_debug_segimg', False), + rospy.get_param('~out_debug_segimg', True), rospy.get_param('~verbose', True), rospy.get_param('~use_jsk_msgs', False), rospy.get_param('~confidence_threshold', 0.5),