diff --git a/.github/workflows/pypi.yml b/.github/workflows/pypi.yml new file mode 100644 index 0000000..7541ee7 --- /dev/null +++ b/.github/workflows/pypi.yml @@ -0,0 +1,24 @@ +name: pypi +on: [push] +jobs: + upload-if-tagged-commit: + runs-on: ubuntu-latest + steps: + - name: Checkout 🛎️Ivy Demo Utils + uses: actions/checkout@v2 + with: + path: ivy_demo_utils + persist-credentials: false + + - name: Install Dependencies + run: | + pip3 install --upgrade pip + pip3 install twine setuptools wheel + + - name: Upload to Pypi + if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') + env: + PYPI_PASSWORD: ${{ secrets.PYPI_PASSWORD }} + run: | + cd ivy_demo_utils + bash deploy_pypi.sh diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..3631ced --- /dev/null +++ b/.gitignore @@ -0,0 +1,11 @@ +*.egg-info/ +.idea +.run +__pycache__/ +.pytest_cache/ +build/ +dist/ +autogenerated_source/ +log/ +chkpt/ +saved_model/ diff --git a/LICENCE b/LICENCE new file mode 100644 index 0000000..4e8b92a --- /dev/null +++ b/LICENCE @@ -0,0 +1,203 @@ +Copyright 2021 The Ivy Authors. All rights reserved. + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/README.rst b/README.rst new file mode 100644 index 0000000..04d2709 --- /dev/null +++ b/README.rst @@ -0,0 +1,44 @@ +.. raw:: html + +

+ +

+ +.. raw:: html + +
+ + + +
+ +**A set of utilities for creating visual demos for Ivy libraries.** + +| +| **Supported Frameworks:** + +.. raw:: html + +
+ + + + + + + + + + + + + + + + + + + + + +
diff --git a/deploy_pypi.sh b/deploy_pypi.sh new file mode 100755 index 0000000..0a43dbe --- /dev/null +++ b/deploy_pypi.sh @@ -0,0 +1,2 @@ +python3 setup.py sdist bdist_wheel +python3 -m twine upload dist/* -u "__token__" -p "$PYPI_PASSWORD" --verbose diff --git a/docs/partial_source/logos/logo.png b/docs/partial_source/logos/logo.png new file mode 100644 index 0000000..0c8b8f2 Binary files /dev/null and b/docs/partial_source/logos/logo.png differ diff --git a/docs/partial_source/logos/logo.svg b/docs/partial_source/logos/logo.svg new file mode 100644 index 0000000..eae5056 --- /dev/null +++ b/docs/partial_source/logos/logo.svg @@ -0,0 +1,150 @@ + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + IVY + DEMO UTILS + diff --git a/docs/partial_source/logos/supported/empty.png b/docs/partial_source/logos/supported/empty.png new file mode 100644 index 0000000..ca5bcd1 Binary files /dev/null and b/docs/partial_source/logos/supported/empty.png differ diff --git a/docs/partial_source/logos/supported/jax_logo.png b/docs/partial_source/logos/supported/jax_logo.png new file mode 100644 index 0000000..7df24dd Binary files /dev/null and b/docs/partial_source/logos/supported/jax_logo.png differ diff --git a/docs/partial_source/logos/supported/mxnet_logo.png b/docs/partial_source/logos/supported/mxnet_logo.png new file mode 100644 index 0000000..a55e79c Binary files /dev/null and b/docs/partial_source/logos/supported/mxnet_logo.png differ diff --git a/docs/partial_source/logos/supported/numpy_logo.png b/docs/partial_source/logos/supported/numpy_logo.png new file mode 100644 index 0000000..3041f6d Binary files /dev/null and b/docs/partial_source/logos/supported/numpy_logo.png differ diff --git a/docs/partial_source/logos/supported/pytorch_logo.png b/docs/partial_source/logos/supported/pytorch_logo.png new file mode 100644 index 0000000..7d4d2a3 Binary files /dev/null and b/docs/partial_source/logos/supported/pytorch_logo.png differ diff --git a/docs/partial_source/logos/supported/tensorflow_logo.png b/docs/partial_source/logos/supported/tensorflow_logo.png new file mode 100644 index 0000000..8f94bc9 Binary files /dev/null and b/docs/partial_source/logos/supported/tensorflow_logo.png differ diff --git a/ivy_demo_utils/__init__.py b/ivy_demo_utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/ivy_demo_utils/framework_utils.py b/ivy_demo_utils/framework_utils.py new file mode 100644 index 0000000..6fdc9a7 --- /dev/null +++ b/ivy_demo_utils/framework_utils.py @@ -0,0 +1,90 @@ +# global +import logging +import numpy as np + + +# Framework Getters # +# ------------------# + +def try_import_ivy_jax(warn=False): + try: + import ivy.jax + return ivy.jax + except (ImportError, ModuleNotFoundError) as e: + if not warn: + return + logging.warning('{}\n\nEither jax or jaxlib appear to not be installed, ' + 'ivy.jax can therefore not be imported.\n'.format(e)) + + +def try_import_ivy_tf(warn=False): + try: + import ivy.tensorflow + return ivy.tensorflow + except (ImportError, ModuleNotFoundError) as e: + if not warn: + return + logging.warning('{}\n\ntensorflow does not appear to be installed, ' + 'ivy.tensorflow can therefore not be imported.\n'.format(e)) + + +def try_import_ivy_torch(warn=False): + try: + import ivy.torch + return ivy.torch + except (ImportError, ModuleNotFoundError) as e: + if not warn: + return + logging.warning('{}\n\ntorch does not appear to be installed, ' + 'ivy.torch can therefore not be imported.\n'.format(e)) + + +def try_import_ivy_mxnd(warn=False): + try: + import ivy.mxnd + return ivy.mxnd + except (ImportError, ModuleNotFoundError) as e: + if not warn: + return + logging.warning('{}\n\nmxnet does not appear to be installed, ' + 'ivy.mxnd can therefore not be imported.\n'.format(e)) + + +def try_import_ivy_numpy(warn=False): + try: + import ivy.numpy + return ivy.numpy + except (ImportError, ModuleNotFoundError) as e: + if not warn: + return + logging.warning('{}\n\nnumpy does not appear to be installed, ' + 'ivy.numpy can therefore not be imported.\n'.format(e)) + + +# Framework Selection # +# --------------------# + +FW_DICT = {'jax': try_import_ivy_jax, + 'tensorflow': try_import_ivy_tf, + 'torch': try_import_ivy_torch, + 'mxnd': try_import_ivy_mxnd, + 'numpy': try_import_ivy_numpy} + + +def get_framework_from_str(f_str): + return FW_DICT[f_str](warn=True) + + +def choose_random_framework(excluded=None): + excluded = list() if excluded is None else excluded + while True: + if len(excluded) == 5: + raise Exception('Unable to select framework, all backends are either excluded or not installed.') + f_key = np.random.choice([f_srt for f_srt in list(FW_DICT.keys()) if f_srt not in excluded]) + f = get_framework_from_str(f_key) + if f is None: + excluded.append(f_key) + continue + else: + print('\nselected framework: {}\n'.format(f_key)) + return f diff --git a/ivy_demo_utils/ivy_scene/__init__.py b/ivy_demo_utils/ivy_scene/__init__.py new file mode 100644 index 0000000..d2bd24e --- /dev/null +++ b/ivy_demo_utils/ivy_scene/__init__.py @@ -0,0 +1,2 @@ +from . import scene_utils +from .scene_utils import * diff --git a/ivy_demo_utils/ivy_scene/no_sim/__init__.py b/ivy_demo_utils/ivy_scene/no_sim/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/ivy_demo_utils/ivy_scene/no_sim/obj_bbx_0.npy b/ivy_demo_utils/ivy_scene/no_sim/obj_bbx_0.npy new file mode 100644 index 0000000..5d1746f Binary files /dev/null and b/ivy_demo_utils/ivy_scene/no_sim/obj_bbx_0.npy differ diff --git a/ivy_demo_utils/ivy_scene/no_sim/obj_bbx_1.npy b/ivy_demo_utils/ivy_scene/no_sim/obj_bbx_1.npy new file mode 100644 index 0000000..5d1746f Binary files /dev/null and b/ivy_demo_utils/ivy_scene/no_sim/obj_bbx_1.npy differ diff --git a/ivy_demo_utils/ivy_scene/no_sim/obj_bbx_10.npy b/ivy_demo_utils/ivy_scene/no_sim/obj_bbx_10.npy new file mode 100644 index 0000000..da0c466 Binary files /dev/null and b/ivy_demo_utils/ivy_scene/no_sim/obj_bbx_10.npy differ diff --git a/ivy_demo_utils/ivy_scene/no_sim/obj_bbx_2.npy b/ivy_demo_utils/ivy_scene/no_sim/obj_bbx_2.npy new file mode 100644 index 0000000..b0fbf20 Binary files /dev/null and b/ivy_demo_utils/ivy_scene/no_sim/obj_bbx_2.npy differ diff --git a/ivy_demo_utils/ivy_scene/no_sim/obj_bbx_3.npy b/ivy_demo_utils/ivy_scene/no_sim/obj_bbx_3.npy new file mode 100644 index 0000000..3cebba5 Binary files /dev/null and b/ivy_demo_utils/ivy_scene/no_sim/obj_bbx_3.npy differ diff --git a/ivy_demo_utils/ivy_scene/no_sim/obj_bbx_4.npy b/ivy_demo_utils/ivy_scene/no_sim/obj_bbx_4.npy new file mode 100644 index 0000000..3cebba5 Binary files /dev/null and b/ivy_demo_utils/ivy_scene/no_sim/obj_bbx_4.npy differ diff --git a/ivy_demo_utils/ivy_scene/no_sim/obj_bbx_5.npy b/ivy_demo_utils/ivy_scene/no_sim/obj_bbx_5.npy new file mode 100644 index 0000000..50b5331 Binary files /dev/null and b/ivy_demo_utils/ivy_scene/no_sim/obj_bbx_5.npy differ diff --git a/ivy_demo_utils/ivy_scene/no_sim/obj_bbx_6.npy b/ivy_demo_utils/ivy_scene/no_sim/obj_bbx_6.npy new file mode 100644 index 0000000..ab1dba9 Binary files /dev/null and b/ivy_demo_utils/ivy_scene/no_sim/obj_bbx_6.npy differ diff --git a/ivy_demo_utils/ivy_scene/no_sim/obj_bbx_7.npy b/ivy_demo_utils/ivy_scene/no_sim/obj_bbx_7.npy new file mode 100644 index 0000000..331004b Binary files /dev/null and b/ivy_demo_utils/ivy_scene/no_sim/obj_bbx_7.npy differ diff --git a/ivy_demo_utils/ivy_scene/no_sim/obj_bbx_8.npy b/ivy_demo_utils/ivy_scene/no_sim/obj_bbx_8.npy new file mode 100644 index 0000000..e24c50d Binary files /dev/null and b/ivy_demo_utils/ivy_scene/no_sim/obj_bbx_8.npy differ diff --git a/ivy_demo_utils/ivy_scene/no_sim/obj_bbx_9.npy b/ivy_demo_utils/ivy_scene/no_sim/obj_bbx_9.npy new file mode 100644 index 0000000..5592556 Binary files /dev/null and b/ivy_demo_utils/ivy_scene/no_sim/obj_bbx_9.npy differ diff --git a/ivy_demo_utils/ivy_scene/no_sim/obj_inv_ext_mat_0.npy b/ivy_demo_utils/ivy_scene/no_sim/obj_inv_ext_mat_0.npy new file mode 100644 index 0000000..2887ddb Binary files /dev/null and b/ivy_demo_utils/ivy_scene/no_sim/obj_inv_ext_mat_0.npy differ diff --git a/ivy_demo_utils/ivy_scene/no_sim/obj_inv_ext_mat_1.npy b/ivy_demo_utils/ivy_scene/no_sim/obj_inv_ext_mat_1.npy new file mode 100644 index 0000000..021f3fd Binary files /dev/null and b/ivy_demo_utils/ivy_scene/no_sim/obj_inv_ext_mat_1.npy differ diff --git a/ivy_demo_utils/ivy_scene/no_sim/obj_inv_ext_mat_10.npy b/ivy_demo_utils/ivy_scene/no_sim/obj_inv_ext_mat_10.npy new file mode 100644 index 0000000..82a1ff2 Binary files /dev/null and b/ivy_demo_utils/ivy_scene/no_sim/obj_inv_ext_mat_10.npy differ diff --git a/ivy_demo_utils/ivy_scene/no_sim/obj_inv_ext_mat_2.npy b/ivy_demo_utils/ivy_scene/no_sim/obj_inv_ext_mat_2.npy new file mode 100644 index 0000000..c540acf Binary files /dev/null and b/ivy_demo_utils/ivy_scene/no_sim/obj_inv_ext_mat_2.npy differ diff --git a/ivy_demo_utils/ivy_scene/no_sim/obj_inv_ext_mat_3.npy b/ivy_demo_utils/ivy_scene/no_sim/obj_inv_ext_mat_3.npy new file mode 100644 index 0000000..340108e Binary files /dev/null and b/ivy_demo_utils/ivy_scene/no_sim/obj_inv_ext_mat_3.npy differ diff --git a/ivy_demo_utils/ivy_scene/no_sim/obj_inv_ext_mat_4.npy b/ivy_demo_utils/ivy_scene/no_sim/obj_inv_ext_mat_4.npy new file mode 100644 index 0000000..fb61961 Binary files /dev/null and b/ivy_demo_utils/ivy_scene/no_sim/obj_inv_ext_mat_4.npy differ diff --git a/ivy_demo_utils/ivy_scene/no_sim/obj_inv_ext_mat_5.npy b/ivy_demo_utils/ivy_scene/no_sim/obj_inv_ext_mat_5.npy new file mode 100644 index 0000000..3d83acd Binary files /dev/null and b/ivy_demo_utils/ivy_scene/no_sim/obj_inv_ext_mat_5.npy differ diff --git a/ivy_demo_utils/ivy_scene/no_sim/obj_inv_ext_mat_6.npy b/ivy_demo_utils/ivy_scene/no_sim/obj_inv_ext_mat_6.npy new file mode 100644 index 0000000..bdf8cb8 Binary files /dev/null and b/ivy_demo_utils/ivy_scene/no_sim/obj_inv_ext_mat_6.npy differ diff --git a/ivy_demo_utils/ivy_scene/no_sim/obj_inv_ext_mat_7.npy b/ivy_demo_utils/ivy_scene/no_sim/obj_inv_ext_mat_7.npy new file mode 100644 index 0000000..75bb525 Binary files /dev/null and b/ivy_demo_utils/ivy_scene/no_sim/obj_inv_ext_mat_7.npy differ diff --git a/ivy_demo_utils/ivy_scene/no_sim/obj_inv_ext_mat_8.npy b/ivy_demo_utils/ivy_scene/no_sim/obj_inv_ext_mat_8.npy new file mode 100644 index 0000000..16c94cc Binary files /dev/null and b/ivy_demo_utils/ivy_scene/no_sim/obj_inv_ext_mat_8.npy differ diff --git a/ivy_demo_utils/ivy_scene/no_sim/obj_inv_ext_mat_9.npy b/ivy_demo_utils/ivy_scene/no_sim/obj_inv_ext_mat_9.npy new file mode 100644 index 0000000..94fc963 Binary files /dev/null and b/ivy_demo_utils/ivy_scene/no_sim/obj_inv_ext_mat_9.npy differ diff --git a/ivy_demo_utils/ivy_scene/scene.ttt b/ivy_demo_utils/ivy_scene/scene.ttt new file mode 100644 index 0000000..5f021d4 Binary files /dev/null and b/ivy_demo_utils/ivy_scene/scene.ttt differ diff --git a/ivy_demo_utils/ivy_scene/scene_utils.py b/ivy_demo_utils/ivy_scene/scene_utils.py new file mode 100644 index 0000000..f19f4b9 --- /dev/null +++ b/ivy_demo_utils/ivy_scene/scene_utils.py @@ -0,0 +1,294 @@ +# global +import os +import ivy +import math +import ivy_mech +import ivy_vision +import numpy as np +from ivy_vision.containers import PrimitiveScene + +# pyrep +try: + from pyrep import PyRep + from pyrep.objects.dummy import Dummy + from pyrep.objects.shape import Shape + from pyrep.robots.arms.arm import Arm + from pyrep.const import PrimitiveShape + from pyrep.objects.camera import Camera + from pyrep.objects.vision_sensor import VisionSensor + from pyrep.objects.cartesian_path import CartesianPath + from pyrep.sensors.spherical_vision_sensor import SphericalVisionSensor +except ImportError: + print('\nPyRep appears to not be installed. For demos with an interactive simulator, please install PyRep.\n') + PyRep, Dummy, Shape, Arm, PrimitiveShape, Camera, VisionSensor, CartesianPath, SphericalVisionSensor =\ + tuple([None]*9) + + +class SimObj: + + def __init__(self, pr_obj, f): + self._pr_obj = pr_obj + self._f = f + + def get_pos(self): + return self._f.array(self._pr_obj.get_position(), 'float32') + + def set_pos(self, pos): + return self._pr_obj.set_position(self._f.to_numpy(pos)) + + def set_rot_mat(self, rot_mat): + inv_ext_mat = np.concatenate((self._f.to_numpy(rot_mat), + np.reshape(self._pr_obj.get_position(), (3, 1))), -1) + self._pr_obj.set_matrix(inv_ext_mat.reshape((-1,)).tolist()) + + def get_inv_ext_mat(self): + return self._f.reshape(self._f.array(self._pr_obj.get_matrix(), 'float32'), (3, 4)) + + def get_ext_mat(self): + return self._f.inv(ivy_mech.make_transformation_homogeneous(self.get_inv_ext_mat()))[0:3, :] + + +class SimCam(SimObj): + + def __init__(self, pr_obj, f): + super().__init__(pr_obj, f) + self._img_dims = pr_obj.get_resolution() + if isinstance(pr_obj, VisionSensor): + pp_offsets = self._f.array([item/2 - 0.5 for item in self._img_dims], 'float32') + persp_angles = self._f.array([pr_obj.get_perspective_angle() * math.pi/180]*2, 'float32') + intrinsics = ivy_vision.persp_angles_and_pp_offsets_to_intrinsics_object( + persp_angles, pp_offsets, self._img_dims) + self.calib_mat = intrinsics.calib_mats + self.inv_calib_mat = intrinsics.inv_calib_mats + + def cap(self): + self._pr_obj.handle_explicitly() + return self._f.expand_dims(self._f.array(self._pr_obj.capture_depth(True).tolist()), -1),\ + self._f.array(self._pr_obj.capture_rgb().tolist()) + + +# noinspection PyProtectedMember +class BaseSimulator: + + def __init__(self, interactive, try_use_sim, f): + self._interactive = interactive + self._try_use_sim = try_use_sim + self._f = f + if PyRep is not None and try_use_sim: + self.with_pyrep = True + self._pyrep_init() + else: + self.with_pyrep = False + + def _user_prompt(self, str_in): + if self._interactive: + input(str_in) + else: + print(str_in) + + def _pyrep_init(self): + + # pyrep + self._pyrep = PyRep() + scene_filepath = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'scene.ttt') + self._pyrep.launch(scene_filepath, headless=False, responsive_ui=True) + + # target + self._target = Dummy('target') + + # default camera + self._default_camera = Camera('DefaultCamera') + + # default vision sensor + self._default_vision_sensor = VisionSensor('DefaultVisionSensor') + + # vision sensors + self._vision_sensor_0 = VisionSensor('vision_sensor_0') + self._vision_sensor_1 = VisionSensor('vision_sensor_1') + self._vision_sensor_2 = VisionSensor('vision_sensor_2') + self._vision_sensor_3 = VisionSensor('vision_sensor_3') + self._vision_sensor_4 = VisionSensor('vision_sensor_4') + self._vision_sensor_5 = VisionSensor('vision_sensor_5') + self._vision_sensors = [self._vision_sensor_0, self._vision_sensor_1, self._vision_sensor_2, + self._vision_sensor_3, self._vision_sensor_4, self._vision_sensor_5] + + # vision sensor bodies + self._vision_sensor_body_0 = Shape('vision_sensor_body_0') + self._vision_sensor_body_1 = Shape('vision_sensor_body_1') + self._vision_sensor_body_2 = Shape('vision_sensor_body_2') + self._vision_sensor_body_3 = Shape('vision_sensor_body_3') + self._vision_sensor_body_4 = Shape('vision_sensor_body_4') + self._vision_sensor_body_5 = Shape('vision_sensor_body_5') + self._vision_sensor_bodies =\ + [self._vision_sensor_body_0, self._vision_sensor_body_1, self._vision_sensor_body_2, + self._vision_sensor_body_3, self._vision_sensor_body_4, self._vision_sensor_body_5] + + # vision sensor rays + self._vision_sensor_rays_0 = [Shape('ray{}vs0'.format(i)) for i in range(4)] + self._vision_sensor_rays_1 = [Shape('ray{}vs1'.format(i)) for i in range(4)] + self._vision_sensor_rays_2 = [Shape('ray{}vs2'.format(i)) for i in range(4)] + self._vision_sensor_rays_3 = [Shape('ray{}vs3'.format(i)) for i in range(4)] + self._vision_sensor_rays_4 = [Shape('ray{}vs4'.format(i)) for i in range(4)] + self._vision_sensor_rays_5 = [Shape('ray{}vs5'.format(i)) for i in range(4)] + self._vision_sensor_rays = [self._vision_sensor_rays_0, self._vision_sensor_rays_1, self._vision_sensor_rays_2, + self._vision_sensor_rays_3, self._vision_sensor_rays_4, self._vision_sensor_rays_5] + + # objects + self._dining_chair_0 = Shape('diningChair0') + self._dining_chair_1 = Shape('diningChair1') + self._dining_table = Shape('diningTable_visible') + self._high_table_0 = Shape('highTable0') + self._high_table_1 = Shape('highTable1') + self._plant = Shape('indoorPlant_visible') + self._sofa = Shape('sofa') + self._swivel_chair = Shape('swivelChair') + self._rack = Shape('_rack') + self._cupboard = Shape('cupboard') + self._box = Shape('Cuboid') + self._objects = [self._dining_chair_0, self._dining_chair_1, self._dining_table, self._high_table_0, + self._high_table_1, self._plant, self._sofa, self._swivel_chair, self._rack, self._cupboard, + self._box] + + # spherical vision sensor + self._spherical_vision_sensor = SphericalVisionSensor('sphericalVisionRGBAndDepth') + + # drone + self._drone = Shape('Quadricopter') + + # robot + self._robot = Arm(0, 'Mico', 6) + self._robot_base = Dummy('Mico_dh_base') + self._robot_target = Arm(0, 'MicoTarget', 6) + + # spline paths + self._spline_paths = list() + + # primitive scene + self._with_primitive_scene_vis = False + + def _update_path_visualization_pyrep(self, multi_spline_points, multi_spline_sdf_vals): + if len(self._spline_paths) > 0: + for spline_path_segs in self._spline_paths: + for spline_path_seg in spline_path_segs: + spline_path_seg.remove() + self._spline_paths.clear() + for spline_points, sdf_vals in zip(multi_spline_points, multi_spline_sdf_vals): + sdf_flags_0 = sdf_vals[1:, 0] > 0 + sdf_flags_1 = sdf_vals[:-1, 0] > 0 + sdf_borders = sdf_flags_1 != sdf_flags_0 + borders_indices = self._f.indices_where(sdf_borders) + if borders_indices.shape[0] != 0: + to_concat = (self._f.array([0], 'int32'), self._f.cast(borders_indices, 'int32')[:, 0], + self._f.array([-1], 'int32')) + else: + to_concat = (self._f.array([0], 'int32'), self._f.array([-1], 'int32')) + border_indices = self._f.concatenate(to_concat, 0) + num_groups = border_indices.shape[0] - 1 + spline_path = list() + for i in range(num_groups): + border_idx_i = int(self._f.to_numpy(border_indices[i]).item()) + border_idx_ip1 = int(self._f.to_numpy(border_indices[i + 1]).item()) + if i < num_groups - 1: + control_group = spline_points[border_idx_i:border_idx_ip1] + sdf_group = sdf_vals[border_idx_i:border_idx_ip1] + else: + control_group = spline_points[border_idx_i:] + sdf_group = sdf_vals[border_idx_i:] + num_points = control_group.shape[0] + orientation_zeros = np.zeros((num_points, 3)) + color = (0.2, 0.8, 0.2) if sdf_group[-1] > 0 else (0.8, 0.2, 0.2) + control_poses = np.concatenate((ivy.to_numpy(control_group), orientation_zeros), -1) + spline_path_seg = CartesianPath.create(show_orientation=False, show_position=False, line_size=8, + path_color=color) + spline_path_seg.insert_control_points(control_poses.tolist()) + spline_path.append(spline_path_seg) + self._spline_paths.append(spline_path) + + def depth_to_xyz(self, depth, inv_ext_mat, inv_calib_mat, img_dims): + uniform_pixel_coords = ivy_vision.create_uniform_pixel_coords_image(img_dims, f=self._f) + pixel_coords = uniform_pixel_coords * depth + cam_coords = ivy_vision.pixel_to_cam_coords(pixel_coords, inv_calib_mat, [], img_dims, f=self._f) + return ivy_vision.cam_to_world_coords(cam_coords, inv_ext_mat)[..., 0:3] + + def get_pix_coords(self): + return ivy_vision.create_uniform_pixel_coords_image([360, 720], f=self._f)[..., 0:2] + + def setup_primitive_scene_no_sim(self, box_pos=None): + + # lists + shape_matrices_list = list() + shape_dims_list = list() + + this_dir = os.path.dirname(os.path.realpath(__file__)) + for i in range(11): + shape_mat = np.load(os.path.join(this_dir, 'no_sim/obj_inv_ext_mat_{}.npy'.format(i))) + if i == 10 and box_pos is not None: + shape_mat[..., -1:] = box_pos.reshape((1, 3, 1)) + shape_matrices_list.append(self._f.array(shape_mat, 'float32')) + shape_dims_list.append( + self._f.array(np.load(os.path.join(this_dir, 'no_sim/obj_bbx_{}.npy'.format(i))), 'float32') + ) + + # matices + shape_matrices = self._f.concatenate(shape_matrices_list, 0) + shape_dims = self._f.concatenate(shape_dims_list, 0) + + # sdf + primitive_scene = PrimitiveScene(cuboid_ext_mats=ivy.inv(ivy_mech.make_transformation_homogeneous( + shape_matrices))[..., 0:3, :], cuboid_dims=shape_dims, f=self._f) + self.sdf = primitive_scene.sdf + + def setup_primitive_scene(self): + + # shape matrices + shape_matrices = self._f.concatenate([self._f.reshape(self._f.array(obj.get_matrix(), 'float32'), (1, 3, 4)) + for obj in self._objects], 0) + + # shape dims + x_dims = self._f.concatenate([self._f.reshape(self._f.array( + obj.get_bounding_box()[1] - obj.get_bounding_box()[0], 'float32'), (1, 1)) for obj in self._objects], 0) + y_dims = self._f.concatenate([self._f.reshape(self._f.array( + obj.get_bounding_box()[3] - obj.get_bounding_box()[2], 'float32'), (1, 1)) for obj in self._objects], 0) + z_dims = self._f.concatenate([self._f.reshape(self._f.array( + obj.get_bounding_box()[5] - obj.get_bounding_box()[4], 'float32'), (1, 1)) for obj in self._objects], 0) + shape_dims = self._f.concatenate((x_dims, y_dims, z_dims), -1) + + # primitve scene visualization + if self._with_primitive_scene_vis: + scene_vis = [Shape.create(PrimitiveShape.CUBOID, ivy.to_numpy(shape_dim).tolist()) + for shape_dim in shape_dims] + [obj.set_matrix(ivy.to_numpy(shape_mat).reshape(-1).tolist()) + for shape_mat, obj in zip(shape_matrices, scene_vis)] + [obj.set_transparency(0.5) for obj in scene_vis] + + # sdf + primitive_scene = PrimitiveScene(cuboid_ext_mats=ivy.inv(ivy_mech.make_transformation_homogeneous( + shape_matrices))[..., 0:3, :], cuboid_dims=shape_dims, f=self._f) + self.sdf = primitive_scene.sdf + + def update_path_visualization(self, multi_spline_points, multi_spline_sdf_vals, img_path): + if not self.with_pyrep: + if not self._interactive: + return + import matplotlib.pyplot as plt + import matplotlib.image as mpimg + plt.ion() + plt.imshow(mpimg.imread(img_path)) + plt.show() + plt.pause(0.1) + plt.ioff() + return + with self._pyrep._step_lock: + self._update_path_visualization_pyrep(multi_spline_points, multi_spline_sdf_vals) + + def close(self): + if self._interactive: + input('\nPress enter to end demo.\n') + print('\nClosing simulator...\n') + + # noinspection PyUnresolvedReferences + def __del__(self): + if self.with_pyrep: + self._pyrep.stop() + self._pyrep.shutdown() + print('\nDemo finished.\n') diff --git a/ivy_demo_utils/open3d_utils.py b/ivy_demo_utils/open3d_utils.py new file mode 100644 index 0000000..b0a6d8f --- /dev/null +++ b/ivy_demo_utils/open3d_utils.py @@ -0,0 +1,159 @@ +# global +import ivy +import threading +import numpy as np +import open3d as o3d + + +# noinspection PyCallByClass +class Visualizer: + + def __init__(self, cam_ext_mat=None): + + # visualizer + self._vis = o3d.visualization.Visualizer() + self._vis.create_window() + + # visualizer control + self._ctr = self._vis.get_view_control() + self._cam_ext_mat = cam_ext_mat + self._first_pass = True + self._cam_pose_initialized = True if cam_ext_mat is None else False + + # Private # + + def _wait_for_enter(self): + input('press enter to continue...') + self._pressend_enter = True + + def _listen_for_enter_in_thread(self): + self._pressend_enter = False + self._thread = threading.Thread(target=self._wait_for_enter) + self._thread.start() + + def _join_enter_listener_thread(self): + self._thread.join() + + # Public # + + def show_point_cloud(self, xyz_data, rgb_data, interactive, sphere_inv_ext_mats=None, sphere_radii=None): + if not interactive: + return + + vectors = o3d.utility.Vector3dVector(np.reshape(ivy.to_numpy(xyz_data), (-1, 3))) + color_vectors = o3d.utility.Vector3dVector(np.reshape(ivy.to_numpy(rgb_data), (-1, 3))) + + sphere_inv_ext_mats = list() if sphere_inv_ext_mats is None else sphere_inv_ext_mats + sphere_radii = list() if sphere_radii is None else sphere_radii + + if self._first_pass: + # create point cloud + self._point_cloud = o3d.geometry.PointCloud(vectors) + self._point_cloud.colors = color_vectors + self._vis.clear_geometries() + self._vis.add_geometry(o3d.geometry.TriangleMesh.create_coordinate_frame(0.15, [0., 0., 0.]), True) + self._vis.add_geometry(self._point_cloud, True) + # spheres + self._spheres = list() + for sphere_inv_ext_mat, sphere_rad in zip(sphere_inv_ext_mats, sphere_radii): + sphere = o3d.geometry.TriangleMesh.create_sphere(sphere_rad) + sphere.paint_uniform_color(np.array([[0.], [0.], [0.]])) + sphere.transform(sphere_inv_ext_mat) + self._spheres.append(sphere) + self._vis.add_geometry(sphere, True) + else: + # update point cloud + self._point_cloud.points = vectors + self._point_cloud.colors = color_vectors + self._vis.update_geometry(self._point_cloud) + for sphere, sphere_inv_ext_mat in zip(self._spheres, sphere_inv_ext_mats): + sphere.transform(sphere_inv_ext_mat) + self._vis.update_geometry(sphere) + + # camera matrix + if not self._cam_pose_initialized: + cam_params = o3d.camera.PinholeCameraParameters() + cam_params.extrinsic = self._cam_ext_mat + cam_params.intrinsic = self._ctr.convert_to_pinhole_camera_parameters().intrinsic + self._ctr.convert_from_pinhole_camera_parameters(cam_params) + self._cam_pose_initialized = True + + # update flag + self._first_pass = False + + # spin visualizer until key-pressed + self._listen_for_enter_in_thread() + while not self._pressend_enter: + self._vis.poll_events() + self._join_enter_listener_thread() + + # reset spheres to origin + for sphere, sphere_inv_ext_mat in zip(self._spheres, sphere_inv_ext_mats): + sphere.transform(np.linalg.inv(sphere_inv_ext_mat)) + + # noinspection PyArgumentList + def show_voxel_grid(self, voxels, interactive, cuboid_inv_ext_mats=None, cuboid_dims=None): + + if not interactive: + return + + cuboid_inv_ext_mats = list() if cuboid_inv_ext_mats is None else cuboid_inv_ext_mats + cuboid_dims = list() if cuboid_dims is None else cuboid_dims + + voxel_grid_data = ivy.to_numpy(voxels[0]) + res = ivy.to_numpy(voxels[2]) + bb_mins = ivy.to_numpy(voxels[3]) + rgb_grid = voxel_grid_data[..., 3:6] + occupancy_grid = voxel_grid_data[..., -1:] + + boxes = list() + for x, (x_slice, x_col_slice) in enumerate(zip(occupancy_grid, rgb_grid)): + for y, (y_slice, y_col_slice) in enumerate(zip(x_slice, x_col_slice)): + for z, (z_slice, z_col_slice) in enumerate(zip(y_slice, y_col_slice)): + if z_slice[0] > 0: + box = o3d.geometry.TriangleMesh.create_box(res[0], res[1], res[2]) + box.vertex_colors = o3d.utility.Vector3dVector(np.ones((8, 3)) * z_col_slice) + xtrue = bb_mins[0] + res[0]*x + ytrue = bb_mins[1] + res[1]*y + ztrue = bb_mins[2] + res[2]*z + box.translate(np.array([xtrue, ytrue, ztrue]) - res/2) + boxes.append(box) + + all_vertices = np.concatenate([np.asarray(box.vertices) for box in boxes], 0) + all_vertex_colors = np.concatenate([np.asarray(box.vertex_colors) for box in boxes], 0) + all_triangles = np.concatenate([np.asarray(box.triangles) + i*8 for i, box in enumerate(boxes)], 0) + final_mesh = o3d.geometry.TriangleMesh(o3d.utility.Vector3dVector(all_vertices), + o3d.utility.Vector3iVector(all_triangles)) + final_mesh.vertex_colors = o3d.utility.Vector3dVector(all_vertex_colors) + + # add to visualizer + self._vis.clear_geometries() + self._vis.add_geometry(o3d.geometry.TriangleMesh.create_coordinate_frame(0.15, [0., 0., 0.]), self._first_pass) + self._vis.add_geometry(final_mesh, self._first_pass) + + # cuboids + self._cuboids = list() + for cuboid_inv_ext_mat, cuboid_dim in zip(cuboid_inv_ext_mats, cuboid_dims): + cuboid = o3d.geometry.TriangleMesh.create_box(cuboid_dim[0], cuboid_dim[1], cuboid_dim[2]) + cuboid.translate(-cuboid_dim/2) + cuboid.paint_uniform_color(np.array([[0.], [0.], [0.]])) + cuboid.transform(cuboid_inv_ext_mat) + self._cuboids.append(cuboid) + self._vis.add_geometry(cuboid, self._first_pass) + + # camera matrix + if not self._cam_pose_initialized: + cam_params = o3d.camera.PinholeCameraParameters() + cam_params.extrinsic = self._cam_ext_mat + cam_params.intrinsic = self._ctr.convert_to_pinhole_camera_parameters().intrinsic + self._ctr.convert_from_pinhole_camera_parameters(cam_params) + self._cam_pose_initialized = True + + # update flag + self._first_pass = False + + # spin visualizer until key-pressed + self._listen_for_enter_in_thread() + while not self._pressend_enter: + self._vis.poll_events() + self._join_enter_listener_thread() diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..6c0ba1c --- /dev/null +++ b/requirements.txt @@ -0,0 +1 @@ +open3d diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..2bc7984 --- /dev/null +++ b/setup.py @@ -0,0 +1,14 @@ +from distutils.core import setup +import setuptools + +setup(name='ivy-demo-utils', + version='0.0.0', + description='Ivy Demo Utils provides a set of utilities for creating visual demos for Ivy libraries', + author='Ivy Team', + author_email='ivydl.team@gmail.com', + packages=setuptools.find_packages(), + package_data={'': ['*.npy', '*.ttt']}, + install_requires=['open3d'], + classifiers=['License :: OSI Approved :: Apache Software License'], + license='Apache 2.0' + )