From b562322eda07c5c7c11e14f075fa9f960384398d Mon Sep 17 00:00:00 2001 From: Henning Wiedemann <35524093+hwiedPro@users.noreply.github.com> Date: Thu, 26 Oct 2023 09:19:18 +0200 Subject: [PATCH 01/25] Update defaults.json Add topic entry https://github.com/dfki-ric/phobos/issues/330 --- phobos/data/defaults.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/phobos/data/defaults.json b/phobos/data/defaults.json index c5ba47db..c6d42244 100644 --- a/phobos/data/defaults.json +++ b/phobos/data/defaults.json @@ -40,7 +40,8 @@ "opening_width": 90, "depth_image": true, "show_cam": false, - "frame_offset": 1 + "frame_offset": 1, + "topic": "" } }, "IMU": { From ea4b6445f014e8edd13261fba3ba5b26ef34642c Mon Sep 17 00:00:00 2001 From: Henning Wiedemann Date: Thu, 26 Oct 2023 10:00:37 +0200 Subject: [PATCH 02/25] Fix #339 https://github.com/dfki-ric/phobos/issues/339 --- phobos/data/xml_formats.json | 2 +- phobos/io/representation.py | 33 +++++++++++++++++++++++---------- 2 files changed, 24 insertions(+), 11 deletions(-) diff --git a/phobos/data/xml_formats.json b/phobos/data/xml_formats.json index 7e5fcff6..98f3f0b3 100644 --- a/phobos/data/xml_formats.json +++ b/phobos/data/xml_formats.json @@ -653,7 +653,7 @@ "children": { "pose": { "classname": "Pose", - "varname": "origin" + "varname": "secure_origin" }, "inertial": { "classname": "Inertial", diff --git a/phobos/io/representation.py b/phobos/io/representation.py index 27349ad2..489743b1 100644 --- a/phobos/io/representation.py +++ b/phobos/io/representation.py @@ -1577,6 +1577,16 @@ def __init__(self, name=None, visuals=None, inertial=None, collisions=None, orig geo.name += str(i) self.excludes += ["inertial"] + @property + def secure_origin(self): + if self.origin is None: + return self.joint_relative_origin + return self.origin + + @secure_origin.setter + def secure_origin(self, value): + self.origin = value + def remove_aggregate(self, elem): if isinstance(elem, Visual): self.visuals.remove(elem) @@ -1606,17 +1616,20 @@ def materials(self, value): @property def joint_relative_origin(self): assert self._related_robot_instance is not None - assert self.origin is not None - out = self.origin - if self.origin.relative_to == self._related_robot_instance.get_parent(self): - return out + if self.origin is not None: + out = self.origin + if self.origin.relative_to == self._related_robot_instance.get_parent(self): + return out + else: + assert self.origin.relative_to is not None + r2x = self._related_robot_instance.get_transformation + return Pose.from_matrix( + inv(r2x(self)).dot(r2x(self.origin.relative_to).dot(self.origin.to_matrix())), + relative_to=self._related_robot_instance.get_parent(self) + ) else: - assert self.origin.relative_to is not None - r2x = self._related_robot_instance.get_transformation - return Pose.from_matrix( - inv(r2x(self)).dot(r2x(self.origin.relative_to).dot(self.origin.to_matrix())), - relative_to=self._related_robot_instance.get_parent(self) - ) + return Pose(relative_to=self._related_robot_instance.get_parent(self)) + @joint_relative_origin.setter def joint_relative_origin(self, value): From c9f5beca7d69e5fc3033e44d7ae901dc8cf5dc9e Mon Sep 17 00:00:00 2001 From: Henning Wiedemann Date: Thu, 26 Oct 2023 10:09:05 +0200 Subject: [PATCH 03/25] Fix #331 https://github.com/dfki-ric/phobos/issues/331 --- phobos/data/xml_formats.json | 45 ++++++++++++++++++++++++++++++++++-- phobos/io/xmlrobot.py | 2 ++ 2 files changed, 45 insertions(+), 2 deletions(-) diff --git a/phobos/data/xml_formats.json b/phobos/data/xml_formats.json index 98f3f0b3..c9c17807 100644 --- a/phobos/data/xml_formats.json +++ b/phobos/data/xml_formats.json @@ -385,7 +385,8 @@ "collision": "targets" } }, - "gps": {} + "gps": {}, + "imu": {} } }, "Joint6DOF": { @@ -529,6 +530,25 @@ "varname": "origin" } } + }, + "IMU": { + "tag": "sensor", + "attributes": { + "name": "name", + "rate": "rate", + "type": "sdf_type" + }, + "attribute_children": { + "parent": { + "link": "link" + } + }, + "children": { + "origin": { + "classname": "Pose", + "varname": "origin" + } + } } }, "sdf": { @@ -963,7 +983,8 @@ "collision": "targets" } }, - "gps": {} + "gps": {}, + "imu": {} } }, "Joint6DOF": { @@ -1124,6 +1145,26 @@ } } }, + "IMU": { + "tag": "sensor", + "attributes": { + "name": "name", + "type": "sdf_type" + }, + "value_children": { + "always_on": "always_on", + "rate": "rate", + "visualize": "visualize", + "topic": "topic", + "enable_metrics": "enable_metrics" + }, + "children": { + "pose": { + "classname": "Pose", + "varname": "origin" + } + } + }, "PluginFactory": { "tag": "plugin", "attributes": { diff --git a/phobos/io/xmlrobot.py b/phobos/io/xmlrobot.py index fb70c9dc..79484d79 100644 --- a/phobos/io/xmlrobot.py +++ b/phobos/io/xmlrobot.py @@ -92,6 +92,8 @@ def unlink_from_world(self): def assert_validity(self): assert self.get_root().origin is None or self.get_root().origin.is_zero() + for link in self.links: + assert link.origin is None or link.origin.relative_to != link.name @property def collisions(self): From f93c1add2c0d954ca5708c3660c8f3ef5734ce44 Mon Sep 17 00:00:00 2001 From: Henning Wiedemann Date: Tue, 13 Feb 2024 14:34:37 +0100 Subject: [PATCH 04/25] Fix install_requirements.py --- install_requirements.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/install_requirements.py b/install_requirements.py index 46c42fac..ad4ead69 100644 --- a/install_requirements.py +++ b/install_requirements.py @@ -31,7 +31,7 @@ extra_requirements = { "pybullet": "pybullet", # optional for blender - "open3d": "open3d", # optional for blender + "open3d": "open3d", # optional for blender, used for mesh adaption by trimesh "python-fcl": "python-fcl", # optional for blender } @@ -41,7 +41,11 @@ def install_requirement(package_name, upgrade_pip=False, lib=None, ensure_pip=Tr lib = bpy.utils.user_resource("SCRIPTS", path="modules") if ensure_pip: # Ensure pip is installed - subprocess.check_call([sys.executable, "-m", "ensurepip", "--user"]) + try: + subprocess.check_call([sys.executable, "-m", "ensurepip", "--user"]) + except subprocess.CalledProcessError: + print("WARNING: We couldn't do ensurepip, we try to continue anyways") + pass # Update pip (not mandatory) if upgrade_pip: print(" Upgrading pip...") @@ -60,7 +64,11 @@ def check_requirements(optional=False, extra=False, force=False, upgrade_pip=Fal return print("Checking requirements:") # Ensure pip is installed - subprocess.check_call([sys.executable, "-m", "ensurepip", "--user"]) + try: + subprocess.check_call([sys.executable, "-m", "ensurepip", "--user"]) + except subprocess.CalledProcessError: + print("WARNING: We couldn't do ensurepip, we try to continue anyways") + pass reqs = [requirements] if optional: reqs += [optional_requirements] From 1e7a797de2270a5371d1522058af63198c0c382b Mon Sep 17 00:00:00 2001 From: Henning Wiedemann Date: Tue, 13 Feb 2024 14:37:10 +0100 Subject: [PATCH 05/25] Add frame handling --- phobos/data/xml_formats.json | 6 +++++- phobos/io/representation.py | 33 ++++++++++++++++++++++++++++++++- phobos/io/scenes.py | 11 ----------- phobos/io/xmlrobot.py | 24 ++++++++++++++++++++++++ 4 files changed, 61 insertions(+), 13 deletions(-) diff --git a/phobos/data/xml_formats.json b/phobos/data/xml_formats.json index c9c17807..1dc30d48 100644 --- a/phobos/data/xml_formats.json +++ b/phobos/data/xml_formats.json @@ -645,7 +645,11 @@ "children": { "link": { "classname": "Link", - "varname": "links" + "varname": "physical_links" + }, + "frame": { + "classname": "Frame", + "varname": "frames" }, "joint": { "classname": "Joint", diff --git a/phobos/io/representation.py b/phobos/io/representation.py index 489743b1..bdd11dd9 100644 --- a/phobos/io/representation.py +++ b/phobos/io/representation.py @@ -1552,7 +1552,7 @@ class Link(Representation, SmurfBase): _class_variables = ["name", "visuals", "collisions", "inertial", "kccd_hull", "origin"] def __init__(self, name=None, visuals=None, inertial=None, collisions=None, origin=None, - noDataPackage=None, reducedDataPackage=None, is_human=None, kccd_hull=None, joint=None, **kwargs): + noDataPackage=None, reducedDataPackage=None, is_human=None, kccd_hull=None, **kwargs): SmurfBase.__init__(self, **kwargs) self.name = name self.origin = _singular(origin) @@ -1653,6 +1653,37 @@ def sensors(self, value): if not any([existing.name == s.name and existing.equivalent(s) for existing in self._related_robot_instance.sensors]): self._related_robot_instance.add_aggregate("sensor", s) + +class Frame(Link): + _class_variables = ["name", "attached_to", "origin"] + _type_dict = {"attached_to": "frames"} + + def __init__(self, name=None, origin=None, attached_to=None, + noDataPackage=None, reducedDataPackage=None, is_human=None, joint=None, **kwargs): + super().__init__(name, origin, noDataPackage, reducedDataPackage, is_human, joint, **kwargs) + self._attached_to = attached_to + + @property + def attached_to(self): + if self._attached_to: + return self._attached_to + elif self.origin: + return self.origin.relative_to + elif self._related_robot_instance: + return self.joint_relative_origin.relative_to + else: + return None + + @attached_to.setter + def attached_to(self, attached_to): + if self.origin and self.origin.relative_to == attached_to: + self._attached_to = None + elif self._related_robot_instance and self.joint_relative_origin.relative_to == attached_to: + self._attached_to = None + else: + self._attached_to = attached_to + + class JointDynamics(Representation): def __init__(self, damping=None, friction=None, spring_stiffness=None, spring_reference=None, **kwargs): super().__init__() diff --git a/phobos/io/scenes.py b/phobos/io/scenes.py index 695334ea..82eb3fda 100644 --- a/phobos/io/scenes.py +++ b/phobos/io/scenes.py @@ -44,17 +44,6 @@ def __init__(self, ode=None, gravity=None): super(Physics, self).__init__(ode=ode, gravity=gravity) -class Frame(Representation): - _class_variables = ["name", "attached_to", "origin"] - _type_dict = {"attached_to": "frames"} - - def __init__(self, name=None, attached_to=None, origin=None): - super(Frame, self).__init__() - self.name = name - self.attached_to = attached_to - self.origin = origin - - # [TODO v2.1.0] Add SDF-/MARS-Scene support class Environment(Representation, SmurfBase): def __init__(self): diff --git a/phobos/io/xmlrobot.py b/phobos/io/xmlrobot.py index 79484d79..fd68c2be 100644 --- a/phobos/io/xmlrobot.py +++ b/phobos/io/xmlrobot.py @@ -23,6 +23,8 @@ class XMLRobot(Representation): _related_entity_instance = None def __init__(self, name=None, version=None, links: List[representation.Link] = None, + frames: List[representation.Link] = None, + physical_links: List[representation.Link] = None, joints: List[representation.Joint] = None, materials: List[representation.Material] = None, transmissions: List[representation.Transmission] = None, @@ -61,6 +63,12 @@ def __init__(self, name=None, version=None, links: List[representation.Link] = N if links is not None: for link in links: self.add_aggregate("link", link) + if frames is not None: + for frame in frames: + self.add_aggregate("link", frame) + if physical_links is not None: + for link in physical_links: + self.add_aggregate("link", link) self.materials = materials if materials is not None else [] self.transmissions = transmissions if transmissions is not None else [] @@ -107,6 +115,22 @@ def visuals(self): def sensors_that_dont_belong_to_links_or_joints(self): return [s for s in self.sensors if getattr(s, "link", None) is None and getattr(s, "joint", None) is None] + @property + def frames(self): + return [lnk for lnk in self.links if not(lnk.inertial or lnk.collision or lnk.visual)] + + @frames.setter + def frames(self, frames): + self.add_aggregate("link". frames) + + @property + def physical_links(self): + return [lnk for lnk in self.links if (lnk.inertial or lnk.collision or lnk.visual)] + + @physical_links.setter + def physical_links(self, links): + self.add_aggregate("link". links) + @sensors_that_dont_belong_to_links_or_joints.setter def sensors_that_dont_belong_to_links_or_joints(self, value): value = _plural(value) From 1a7a6bbae57ab03ec4c823824f270b01e8317567 Mon Sep 17 00:00:00 2001 From: Henning Wiedemann Date: Tue, 13 Feb 2024 14:44:16 +0100 Subject: [PATCH 06/25] Fix to_hey_color() --- phobos/utils/misc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/phobos/utils/misc.py b/phobos/utils/misc.py index bcdb3933..feb92d40 100644 --- a/phobos/utils/misc.py +++ b/phobos/utils/misc.py @@ -338,4 +338,4 @@ def color_parser(*args, rgba=None): def to_hex_color(color_as_list): - return "#" + "".join([hex(int(x * 255))[2:] for x in color_as_list]) + return "#" + "".join([f"{int(x * 255):02x}" for x in color_as_list]) From a62fe45660e6156e49299bc6d3cd63a9cabc3a30 Mon Sep 17 00:00:00 2001 From: Henning Wiedemann Date: Tue, 13 Feb 2024 14:44:51 +0100 Subject: [PATCH 07/25] Catch git_error when getting maintainer, author, url --- phobos/core/robot.py | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/phobos/core/robot.py b/phobos/core/robot.py index 3b4f0096..ec898ceb 100755 --- a/phobos/core/robot.py +++ b/phobos/core/robot.py @@ -815,15 +815,20 @@ def export_ros_package_files(self, outputdir, ros_pkg_name, author=None, maintai packagexml_path) with open(packagexml_path, "r") as packagexml: content = packagexml.read() - if any([x is None for x in [author, maintainer, url]]): - _author, _maintainer, _url = git.get_repo_data(outputdir) content = misc.regex_replace(content, { - "\$INPUTNAME": ros_pkg_name, - "\$AUTHOR": f"{author if author is not None else _author}", - "\$MAINTAINER": f"{maintainer if maintainer is not None else _maintainer}", - "\$URL": f"{url if url is not None else _url}", - "\$VERSION": f"def: {version if version is not None else self.version}" + "\$INPUTNAME": ros_pkg_name }) + try: + if any([x is None for x in [author, maintainer, url]]): + _author, _maintainer, _url = git.get_repo_data(outputdir) + content = misc.regex_replace(content, { + "\$AUTHOR": f"{author if author is not None else _author}", + "\$MAINTAINER": f"{maintainer if maintainer is not None else _maintainer}", + "\$URL": f"{url if url is not None else _url}", + "\$VERSION": f"def: {version if version is not None else self.version}" + }) + except: + log.info("Couldn't obtain author, maintainer, url info from git") with open(packagexml_path, "w") as packagexml: packagexml.write(content) From 078fe490f8630191162ecbe8866e2a96a13923c5 Mon Sep 17 00:00:00 2001 From: Henning Wiedemann Date: Tue, 13 Feb 2024 14:45:05 +0100 Subject: [PATCH 08/25] Get log level from env --- phobos/commandline_logging.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/phobos/commandline_logging.py b/phobos/commandline_logging.py index e76fcb23..ec01513e 100644 --- a/phobos/commandline_logging.py +++ b/phobos/commandline_logging.py @@ -1,9 +1,10 @@ import logging import sys +import os SUPPORTED_LEVELS = ["DEBUG", "INFO", "WARNING", "ERROR"] LOGGER_NAME = "phobos_log" -BASE_LOG_LEVEL = "WARNING" +BASE_LOG_LEVEL = os.getenv("PHOBOS_LOG_LEVEL", "WARNING") LOG_FILE_CONVENTION = None #The background is set with 40 plus the number of the color, and the foreground with 30 From 53209f974408837d69d7f65b12f8304c4aa39c24 Mon Sep 17 00:00:00 2001 From: Henning Wiedemann Date: Tue, 13 Feb 2024 14:45:35 +0100 Subject: [PATCH 09/25] Add -m option to run pipeline for specific definition file --- phobos/ci/pipeline.py | 5 ++++- phobos/scripts/run_pipeline.py | 3 ++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/phobos/ci/pipeline.py b/phobos/ci/pipeline.py index 6df6e327..671fa4a1 100644 --- a/phobos/ci/pipeline.py +++ b/phobos/ci/pipeline.py @@ -26,7 +26,7 @@ class Pipeline(yaml.YAMLObject): - def __init__(self, configfile, processed_model_exists, subclass=False): + def __init__(self, configfile, model_file=None, processed_model_exists=False, subclass=False): self.processing_failed = {} self.test_results = {} self.configdir = os.path.abspath(os.path.dirname(configfile)) @@ -40,6 +40,9 @@ def __init__(self, configfile, processed_model_exists, subclass=False): for (k, v) in kwargs.items(): setattr(self, k, v) + if model_file: + self.model_definitions = [model_file] + self.temp_dir = os.path.join(self.root, "temp") self.faillog = os.path.join(self.temp_dir, "failures.txt") self.test_protocol = os.path.join(self.temp_dir, "test_protocol.txt") diff --git a/phobos/scripts/run_pipeline.py b/phobos/scripts/run_pipeline.py index c9d27b74..8dafadf4 100644 --- a/phobos/scripts/run_pipeline.py +++ b/phobos/scripts/run_pipeline.py @@ -20,6 +20,7 @@ def main(args): parser = argparse.ArgumentParser(description=INFO, prog="phobos "+os.path.basename(__file__)[:-3]) parser.add_argument('config_file', type=str, help='Path to the pipeline configfile', default="pipeline.yml") + parser.add_argument('-m', '--model_file', type=str, help='Path to the model definition to process', default=None) parser.add_argument('-p', '--process', help='Process Models', action='store_true', default=False) parser.add_argument('-t', '--test', help='Test Models', action='store_true', default=False) parser.add_argument('-d', '--deploy', help='Deploy Models (This is made for usage in the phobos-CI-docker rather than local/manual usage)', action='store_true', default=False) @@ -38,7 +39,7 @@ def main(args): if any([args.process, args.test, args.deploy, args.verify]) is True: if os.path.isfile(args.config_file): from phobos.ci.pipeline import Pipeline - pipeline = Pipeline(args.config_file, processed_model_exists=not args.process) + pipeline = Pipeline(args.config_file, model_file=args.model_file, processed_model_exists=not args.process) else: raise FileNotFoundError("Config file not found!") From 872eb0e42a2981774bf36314f92527d6bc0e4879 Mon Sep 17 00:00:00 2001 From: Henning Wiedemann Date: Tue, 13 Feb 2024 14:52:36 +0100 Subject: [PATCH 10/25] Add possibility to define default_export_config in pipeline.yml --- phobos/ci/base_model.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/phobos/ci/base_model.py b/phobos/ci/base_model.py index 0d78114b..f05e7c7d 100644 --- a/phobos/ci/base_model.py +++ b/phobos/ci/base_model.py @@ -34,7 +34,6 @@ def __init__subclass__(self, configfile, pipeline, processed_model_exists=True): self.modelname = "" self.robotname = "" self.test = {} - self.export_config = [] kwargs = {} if 'model' in self.cfg.keys(): kwargs = self.cfg['model'] @@ -43,7 +42,6 @@ def __init__subclass__(self, configfile, pipeline, processed_model_exists=True): # check whether all necessary configurations are there assert hasattr(self, "modelname") and len(self.modelname) > 0 assert hasattr(self, "robotname") and len(self.robotname) > 0 - assert hasattr(self, "export_config") and len(self.export_config) >= 1 assert hasattr(self, "test") and len(self.test) >= 1 self.test = misc.merge_default( self.test, @@ -62,6 +60,8 @@ def __init__subclass__(self, configfile, pipeline, processed_model_exists=True): # parse export_config self.export_meshes = {} self.export_xmlfile = None + if not hasattr(self, "export_config"): + self.export_config = self.pipeline.default_export_config for ec in self.export_config: if ec["type"] in KINEMATIC_TYPES: assert "mesh_format" in ec @@ -475,7 +475,7 @@ def process(self): link.add_annotation(k, v, overwrite=True) if "$name_editing" in self.frames: self.robot.rename("links", self.robot.links, **self.frames.get("$name_editing", {})) - + if hasattr(self, "joints"): if '$replace_joint_types' in self.joints: for joint in self.robot.joints: @@ -585,7 +585,7 @@ def process(self): and self.joints["$default"]["backup"] ) else None ) - + if hasattr(self, 'collisions'): if "$name_editing" in self.collisions.keys(): self.robot.rename("collision", self.robot.collisions, **self.collisions["$name_editing"]) @@ -620,7 +620,7 @@ def process(self): # leads to problems in reusing identic meshes # if conf["shape"] == "convex": # reduceMeshCollision(self.robot, link.name, reduction=0.3) - + if "auto_bitmask" in self.collisions.keys() and \ self.collisions["auto_bitmask"] is True: log.debug(" Setting auto bitmask") @@ -646,11 +646,11 @@ def process(self): if key in conf: conf.pop(key) coll.add_annotations(**conf) - + if hasattr(self, 'visuals'): if "$name_editing" in self.visuals.keys(): self.robot.rename("visuals", self.robot.visuals, **self.visuals["$name_editing"]) - + if hasattr(self, "exoskeletons") or hasattr(self, "submechanisms"): if hasattr(self, "exoskeletons"): self.robot.load_submechanisms({"exoskeletons": deepcopy(self.exoskeletons)}, @@ -671,7 +671,7 @@ def process(self): # self.robot.define_submodel(name=self.export_total_submechanisms, start=root, # stop=tree.find_leaves(self.robot, spanningtree), # only_urdf=True) - + if hasattr(self, "sensors"): multi_sensors = [x for x in dir(sensor_representations) if not x.startswith("__") and x not in sensor_representations.__IMPORTS__ and @@ -704,12 +704,12 @@ def process(self): if sensor_ is not None: self.robot.add_sensor(sensor_) log.debug(' Attached {} {}'.format(s["type"], s['name'])) - + if hasattr(self, "poses"): for (cn, config) in self.poses.items(): pose = poses.JointPoseSet(robot=self.robot, name=cn, configuration=config) self.robot.add_pose(pose) - + if hasattr(self, "annotations"): log.debug(' Adding further annotations.') From 9db3b92cfd9e3bad96766ac793a8d5a93df0c8f4 Mon Sep 17 00:00:00 2001 From: Henning Wiedemann Date: Tue, 13 Feb 2024 15:00:58 +0100 Subject: [PATCH 11/25] Add verbose option to execute_shell_command --- phobos/utils/misc.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/phobos/utils/misc.py b/phobos/utils/misc.py index feb92d40..862a32f9 100644 --- a/phobos/utils/misc.py +++ b/phobos/utils/misc.py @@ -142,17 +142,23 @@ def is_binary_file(filepath): return bool(f.read(256).translate(None, textchars)) -def execute_shell_command(cmd, cwd=None, dry_run=False, silent=False): +def execute_shell_command(cmd, cwd=None, dry_run=False, silent=False, verbose=False): if cwd is None: cwd = os.getcwd() if not silent: log.debug(("Skipping" if dry_run else "Executing") + f": {cmd} in {cwd}") if dry_run: return "", "" - proc = subprocess.Popen([cmd], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, cwd=cwd) + proc = subprocess.Popen([cmd], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, cwd=cwd, executable="/bin/bash") # proc.wait() (out, err) = proc.communicate() - if not silent: + if verbose: + log.info("Executing: "+cmd+" in "+cwd) + log.info(out.decode('UTF-8')) + log.info(err.decode('UTF-8')) + log.info(f"Subprocess returned {proc.returncode}") + elif not silent: + log.debug("Executing: "+cmd+" in "+cwd) log.debug(out.decode('UTF-8')) log.debug(err.decode('UTF-8')) log.debug(f"Subprocess returned {proc.returncode}") From b3b1ed1d3d698d73abadb585df94cbca92c04210 Mon Sep 17 00:00:00 2001 From: Henning Wiedemann Date: Tue, 13 Feb 2024 15:01:38 +0100 Subject: [PATCH 12/25] Make to_pretty_xml_string more robust --- phobos/utils/misc.py | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/phobos/utils/misc.py b/phobos/utils/misc.py index 862a32f9..65330dc7 100644 --- a/phobos/utils/misc.py +++ b/phobos/utils/misc.py @@ -20,9 +20,21 @@ def duplicate(obj, link_obj=False): def to_pretty_xml_string(xml): - xml_string = xml if type(xml) == str else ET.tostring(xml, method='xml').decode('utf-8') - xml_string = parseString(xml_string) - return xml_string.toprettyxml(indent=' ').replace(u'', '').strip() + if type(xml) == str: + xml_string = xml + else: + try: + xml_string = ET.tostring(xml, method='xml').decode('utf-8') + xml_string = parseString(xml_string) + return xml_string.toprettyxml(indent=' ').replace(u'', '').strip() + except Exception as e: + log.warn(f"WARNING: Couldn't generate pretty xml, due to this error: {e}") + try: + ET.indent(xml, space=" ") + except: + pass + xml_string = ET.tostring(xml, method='xml').decode('utf-8') + return xml_string.replace(u'', '').strip() def merge_default(input_dict, default_dict): From 3a9e85f13bebf6475158fe4d197ec9539106de43 Mon Sep 17 00:00:00 2001 From: Henning Wiedemann Date: Tue, 13 Feb 2024 15:02:41 +0100 Subject: [PATCH 13/25] Adapt append_string to logger usage --- phobos/utils/misc.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/phobos/utils/misc.py b/phobos/utils/misc.py index 65330dc7..751ea538 100644 --- a/phobos/utils/misc.py +++ b/phobos/utils/misc.py @@ -137,13 +137,17 @@ def regex_replace(string, replacements, verbose=False): def append_string(s, *args, **kwargs): """Replacement for print so that the printed string is put to s""" new = " ".join([str(a) for a in args]) + if ("print" in kwargs.keys() and kwargs["print"]) or "loglevel" in kwargs: + loglevel = kwargs.get("loglevel", "info") + if "loglevel" in kwargs: + kwargs.pop("loglevel") + if "print" in kwargs: + kwargs.pop("print") + getattr(log, loglevel.lower())(" ".join([str(a) for a in args])) if "end" in kwargs.keys(): new += kwargs["end"] else: new += "\n" - if "print" in kwargs.keys() and kwargs["print"]: - kwargs.pop("print") - log.info(" ".join(args)) return s + new From 701fb4ae6282121a83fd1bfef0a94f1f8917925a Mon Sep 17 00:00:00 2001 From: Henning Wiedemann Date: Tue, 13 Feb 2024 15:04:30 +0100 Subject: [PATCH 14/25] Fix setup_git script --- phobos/scripts/setup_git.py | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/phobos/scripts/setup_git.py b/phobos/scripts/setup_git.py index c06954b7..418323f3 100644 --- a/phobos/scripts/setup_git.py +++ b/phobos/scripts/setup_git.py @@ -22,7 +22,6 @@ def main(args): parser.add_argument('-r', '--remote', type=str, help='The git-repository remote url (required for -c)', action="store", default=None) parser.add_argument('-i', '--init', help='Call git init (instead of -c)', action="store_true", default=False) parser.add_argument('-c', '--clone', help='Clone the repo (requires remote; instead of -i)', action="store_true", default=False) - parser.add_argument('-e', '--apply-on-existing', help='Use this when you want to apply this on an existing directory', action="store_true", default=False) parser.add_argument('-l', '--no-lfs', help='Do not use git-lfs', action="store_true", default=False) parser.add_argument("--loglevel", help="The log level", choices=["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"], default=BASE_LOG_LEVEL) @@ -33,30 +32,27 @@ def main(args): log.info("Creating directory") if args.clone: assert args.remote is not None - os.makedirs(os.path.basename(args.directory), exist_ok=True) + log.info("Cloning "+args.remote) git.clone( repo=args.remote, target=args.directory, - cwd=args.directory + cwd=os.getcwd() ) else: os.makedirs(args.directory, exist_ok=True) - elif not args.apply_on_existing: - raise FileExistsError(args.directory + " already exists!") - if args.init: - assert not args.clone + if args.init and not args.clone: log.info("Initalizing repository") misc.execute_shell_command("git init", args.directory) if args.remote: git.add_remote(args.directory, args.remote, "origin") if not args.no_lfs: + log.info("Installing git lfs") git.install_lfs( args.directory, ["*.stl", "*.obj", ".mtl", "*.bobj", "*.iv", "*.dae", "*.blend", "*.pdf", "*.jpg", "*.jpeg", "*.png"] ) - misc.execute_shell_command("git add .gitattributes", cwd=args.directory) git.ignore(args.directory, ["*.blend1", "*.log", "*.gv"]) misc.execute_shell_command("git add .gitignore", cwd=args.directory) From 6492934febb1115c1314ea740861fbf7d12f4bdf Mon Sep 17 00:00:00 2001 From: Henning Wiedemann Date: Tue, 13 Feb 2024 15:14:32 +0100 Subject: [PATCH 15/25] Fix Git-LFS Readme integration --- phobos/ci/base_model.py | 9 ++++----- phobos/data/GitLFS_README.md.in | 10 ++-------- 2 files changed, 6 insertions(+), 13 deletions(-) diff --git a/phobos/ci/base_model.py b/phobos/ci/base_model.py index f05e7c7d..5915e639 100644 --- a/phobos/ci/base_model.py +++ b/phobos/ci/base_model.py @@ -811,11 +811,10 @@ def deploy(self, mesh_commit, failed_model=False, uses_lfs=False): with open(readme_path, "w") as readme: readme.write(content) if uses_lfs: - readme_content = open(readme_path, "r").read() - if "# Git LFS for mesh repositories" not in readme_content: - with open(readme_path, "a") as readme: - readme.write(open(resources.get_resources_path("GitLFS_README.md.in")).read()) - + readme_content = open(readme_path, "r").read() + if "Git LFS" not in readme_content: + with open(readme_path, "a") as readme: + readme.write(open(resources.get_resources_path("GitLFS_README.md.in")).read()) # update additional submodules if "submodules" in self.deployment: for subm in self.deployment["submodules"]: diff --git a/phobos/data/GitLFS_README.md.in b/phobos/data/GitLFS_README.md.in index 3f8eb2bc..df579150 100644 --- a/phobos/data/GitLFS_README.md.in +++ b/phobos/data/GitLFS_README.md.in @@ -1,18 +1,12 @@ -# Git LFS for mesh repositories +# Git LFS -This repo uses git-lfs for the mesh repositories to save you some disk space and download time. +This repo uses git-lfs for the mesh files to save you some disk space and download time. If you check out this repo via autoproj and you have included this model-group's package_set, autoproj will take care of it. If you are checking out this repo via git you probably have to install git-lfs first: -- If you haven't used git-lfs before you probably have to do: -``` -curl -s https://packagecloud.io/install/repositories/github/git-lfs/script.deb.sh | sudo bash -``` - -- If you once have added git-lfs to your repos simply do: ``` sudo apt-get install git-lfs ``` From 866d7960f9be8e3f633362ae3ff440b68eb1ee2e Mon Sep 17 00:00:00 2001 From: Henning Wiedemann Date: Tue, 13 Feb 2024 15:27:52 +0100 Subject: [PATCH 16/25] Fix several path/file handling issues --- phobos/ci/base_model.py | 16 ++++++++++++---- phobos/ci/pipeline.py | 11 +++++++---- phobos/io/representation.py | 9 +++++++-- phobos/utils/xml.py | 2 +- 4 files changed, 27 insertions(+), 11 deletions(-) diff --git a/phobos/ci/base_model.py b/phobos/ci/base_model.py index 5915e639..6fba8a4a 100644 --- a/phobos/ci/base_model.py +++ b/phobos/ci/base_model.py @@ -23,10 +23,12 @@ def __init__subclass__(self, configfile, pipeline, processed_model_exists=True): self.processed_model_exists = processed_model_exists self.pipeline = pipeline + self.configfile = None if type(configfile) is str: if not os.path.isfile(configfile): raise Exception('{} not found!'.format(configfile)) self.cfg = load_json(open(configfile, 'r')) + self.configfile = configfile else: self.cfg = configfile @@ -151,11 +153,12 @@ def _load_robot(self): # create a robot with the basic properties given self.robot = Robot(name=self.robotname if self.robotname else None) else: - if os.path.exists(os.path.join(self.exportdir, "smurf", self.robotname + ".smurf")): + load_file = os.path.join(self.exportdir, "smurf", getattr(self, "filename", self.robotname) + ".smurf") + if os.path.exists(load_file): self.robot = Robot(name=self.robotname if self.robotname else None, - inputfile=os.path.join(self.exportdir, "smurf", self.robotname + ".smurf")) + inputfile=load_file) else: - raise Exception('Preprocessed file {} not found!'.format(self.basefile)) + raise Exception('Preprocessed file {} not found!'.format(load_file)) def _join_to_basefile(self): # get all the models we need @@ -735,7 +738,12 @@ def export(self): for vc in self.robot.collisions + self.robot.visuals: if isinstance(vc.geometry, representation.Mesh): self.processed_meshes = self.processed_meshes.union([os.path.realpath(f["filepath"]) for f in vc.geometry._exported.values()]) - self.processed_meshes.add(os.path.realpath(vc.geometry.abs_filepath)) + try: + self.processed_meshes.add(os.path.realpath(vc.geometry.abs_filepath)) + except IOError: + # This mesh has been edited and has no source mesh any more that could provide a file path + pass + if "keep_files" in self.deployment: git.reset(self.targetdir, "autobuild", "master") misc.store_persisting_files(self.pipeline, self.targetdir, self.deployment["keep_files"], self.exportdir) diff --git a/phobos/ci/pipeline.py b/phobos/ci/pipeline.py index 671fa4a1..81823f72 100644 --- a/phobos/ci/pipeline.py +++ b/phobos/ci/pipeline.py @@ -29,7 +29,7 @@ class Pipeline(yaml.YAMLObject): def __init__(self, configfile, model_file=None, processed_model_exists=False, subclass=False): self.processing_failed = {} self.test_results = {} - self.configdir = os.path.abspath(os.path.dirname(configfile)) + self.configdir = os.path.dirname(os.path.abspath(configfile)) self.processed_model_exists = processed_model_exists if not os.path.isfile(configfile): raise Exception('{} not found!'.format(configfile)) @@ -40,6 +40,8 @@ def __init__(self, configfile, model_file=None, processed_model_exists=False, su for (k, v) in kwargs.items(): setattr(self, k, v) + self.root = os.path.expandvars(self.root) + if model_file: self.model_definitions = [model_file] @@ -50,10 +52,11 @@ def __init__(self, configfile, model_file=None, processed_model_exists=False, su if not subclass: assert hasattr(self, "model_definitions") and len(self.model_definitions) > 0 assert hasattr(self, "root") and self.root is not None and len(self.root) > 0 - - self.root = os.path.abspath(os.path.join(self.configdir, self.root)) + + if not os.path.abspath(self.root): + self.root = os.path.abspath(os.path.join(self.configdir, self.root)) log.info(f"Working from {self.root}") - self.git_rev = git.revision(os.path.abspath(self.configdir)) + self.git_rev = git.revision(self.configdir) self.temp_dir = os.path.join(self.root, "temp") self.faillog = os.path.join(self.temp_dir, "failures.txt") self.test_protocol = os.path.join(self.temp_dir, "test_protocol.txt") diff --git a/phobos/io/representation.py b/phobos/io/representation.py index bdd11dd9..2c0f7d1e 100644 --- a/phobos/io/representation.py +++ b/phobos/io/representation.py @@ -646,7 +646,12 @@ def read_history(cls, targetpath): @property def abs_filepath(self): if len(self._exported) == 0: - out = self.input_file if self.input_file is not None else self._mesh_object.get("input_file", None) + out = self.input_file + if out is None and self._mesh_object: + try: + out = self._mesh_object["input_file"] + except: + out = None if out is None: raise IOError(f"This mesh ({self.unique_name}) hasn't been exported nor is an input file known, which could be used.") return out @@ -711,7 +716,7 @@ def file_exists(self): return os.path.isfile(self.abs_filepath) def available(self): - return self.file_exists() or self.mesh_object is not None + return self.mesh_object is not None or self.file_exists() def is_lfs_checked_out(self): if self.input_file is not None and os.path.isfile(self.input_file): diff --git a/phobos/utils/xml.py b/phobos/utils/xml.py index 5b61693d..d3c297f9 100644 --- a/phobos/utils/xml.py +++ b/phobos/utils/xml.py @@ -86,7 +86,7 @@ def read_relative_filename(filename, start_file_path): start_file_path = os.path.abspath(start_file_path) if start_file_path.split(".")[-1] in IMPORT_TYPES: start_file_path = os.path.dirname(start_file_path) # /bla/blub/xyz/blib.xyz -> /bla/blub/xyz - if filename.startswith("package://"): # ROS Package + if filename.startswith("package:"): # ROS Package if os.path.basename(start_file_path) in IMPORT_TYPES+["xacro"]: package_dir = os.path.dirname(start_file_path) # /bla/blub/xyz -> /bla/blub else: From b8ea40f7230ad6c4db79f82d7fb4dd3f29b0e327 Mon Sep 17 00:00:00 2001 From: Henning Wiedemann Date: Tue, 13 Feb 2024 16:10:53 +0100 Subject: [PATCH 17/25] Fix model_testing --- phobos/ci/model_testing.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/phobos/ci/model_testing.py b/phobos/ci/model_testing.py index 9b399f12..e2443f5a 100644 --- a/phobos/ci/model_testing.py +++ b/phobos/ci/model_testing.py @@ -25,15 +25,15 @@ def __init__(self, new_model, compare_model=None): self.old_hyrodyn = None self.new_hml_test = get_load_report(self.new.robot.xmlfile, self.new.robot.submechanisms_file) self.new_sm_hml_test = [] - if len([x for x in self.new.robot.submodel_defs if not x["name"].startswith("#submech#")]) > 0: + if len([x for name, x in self.new.robot.submodel_defs.items() if not name.startswith("#submech#")]) > 0: sm_path = os.path.join(self.new.exportdir, "submodels") - for au in self.new.robot.submodel_defs: - if au["name"].startswith("#submech#"): + for name, au in self.new.robot.submodel_defs.items(): + if name.startswith("#submech#"): continue self.new_sm_hml_test += [{ - "name": au["name"], - "urdf": os.path.join(sm_path, au["name"], "urdf", au["name"] + ".urdf"), - "submech": os.path.normpath(os.path.join(sm_path, au["name"], "smurf", au["name"] + "_submechanisms.yml")) + "name": name, + "urdf": os.path.join(sm_path, name, "urdf", name + ".urdf"), + "submech": os.path.normpath(os.path.join(sm_path, name, "smurf", name + "_submechanisms.yml")) }] self.new_sm_hml_test[-1]["report"] = get_load_report( self.new_sm_hml_test[-1]["urdf"], @@ -267,10 +267,12 @@ def test_compare_amount_joints(self): return len(self.new.robot.joints) == len(self.old.joints) def test_load_in_pybullet(self): - if not PYBULLET_AVAILABLE: + from ..defs import check_pybullet_available + if not check_pybullet_available(): log.warning('Pybullet not present') return True try: + import pybullet as pb client = pb.connect(pb.DIRECT) pb.loadURDF(os.path.join(self.new.robot.xmlfile), physicsClientId=client) pb.disconnect(client) From fac9da621078b4d6d2af2cf2f58a61ffaa01368a Mon Sep 17 00:00:00 2001 From: Henning Wiedemann Date: Tue, 13 Feb 2024 16:12:43 +0100 Subject: [PATCH 18/25] Ensure nothing is changed on export() --- phobos/core/robot.py | 44 +++++++++++++++++++++++--------------------- 1 file changed, 23 insertions(+), 21 deletions(-) diff --git a/phobos/core/robot.py b/phobos/core/robot.py index ec898ceb..6852c236 100755 --- a/phobos/core/robot.py +++ b/phobos/core/robot.py @@ -662,6 +662,8 @@ def add_joint(joint): def export(self, outputdir, export_config=None, rel_mesh_pathes=None, ros_pkg_name=None, no_smurf=False, ros_pkg_later=False, check_submechs=True, with_meshes=True, use_existing_meshes=False, apply_scale=False): assert self.check_linkage() + + main_export_robot_instance = self.duplicate() outputdir = os.path.abspath(outputdir) if rel_mesh_pathes is None: rel_mesh_pathes = resources.get_default_rel_mesh_pathes() @@ -672,8 +674,8 @@ def export(self, outputdir, export_config=None, rel_mesh_pathes=None, ros_pkg_na if ros_pkg_name is None: ros_pkg_name = os.path.basename(outputdir) # submechanism generation if necessary - if self.autogenerate_submechanisms is None or self.autogenerate_submechanisms is True: - self.generate_submechanisms() + if main_export_robot_instance.autogenerate_submechanisms is None or main_export_robot_instance.autogenerate_submechanisms is True: + main_export_robot_instance.generate_submechanisms() # export meshes if with_meshes and not use_existing_meshes: mesh_formats = set() @@ -682,14 +684,14 @@ def export(self, outputdir, export_config=None, rel_mesh_pathes=None, ros_pkg_na if "mesh_format" in ex: mesh_formats.add(ex["mesh_format"].lower()) for mf in mesh_formats: - self.export_meshes(mesh_output_dir=os.path.join(outputdir, rel_mesh_pathes[mf]), format=mf, apply_scale=apply_scale) # export everything else + main_export_robot_instance.export_meshes(mesh_output_dir=os.path.join(outputdir, rel_mesh_pathes[mf]), format=mf, apply_scale=apply_scale) for export in export_config: if export["type"] in KINEMATIC_TYPES: if export.get("link_in_smurf", False): - export_robot_instance = self.duplicate() + export_robot_instance = main_export_robot_instance.duplicate() else: - export_robot_instance = self + export_robot_instance = main_export_robot_instance xml_file = export_robot_instance.export_xml( outputdir=outputdir, format=export["type"], @@ -708,17 +710,17 @@ def export(self, outputdir, export_config=None, rel_mesh_pathes=None, ros_pkg_na ) if not export.get("link_in_smurf", False): if "filename" not in export: - self.additional_files[export["type"]] = xml_file + main_export_robot_instance.additional_files[export["type"]] = xml_file else: - self.additional_files[export["filename"]] = xml_file + main_export_robot_instance.additional_files[export["filename"]] = xml_file ros_pkg |= export["ros_pathes"] if "ros_pathes" in export else None if export.get("link_in_smurf", False): assert xml_file_in_smurf is None, "Only one xml file can be linked in the SMURF" xml_file_in_smurf = xml_file elif export["type"] == "submodel": log.debug(f"Exporting submodel {export['name']}") - if export["name"] not in self.submodel_defs: - export_robot_instance = self.define_submodel( + if export["name"] not in main_export_robot_instance.submodel_defs: + export_robot_instance = main_export_robot_instance.define_submodel( name=export["name"], start=export.get("start", None), stop=export.get("stop", None), @@ -732,11 +734,11 @@ def export(self, outputdir, export_config=None, rel_mesh_pathes=None, ros_pkg_na move_joint_axis_to_intersection=export.get("move_joint_axis_to_intersection", None) ) else: - assert export.get("start", None) == self.submodel_defs[export["name"]]["start"] - assert export.get("stop", None) == self.submodel_defs[export["name"]]["stop"] - assert export.get("include_unstopped_branches", False) == self.submodel_defs[export["name"]]["include_unstopped_branches"] - assert export.get("no_submechanisms", False) == self.submodel_defs[export["name"]]["no_submechanisms"] - export_robot_instance = self.instantiate_submodel(**export) + assert export.get("start", None) == main_export_robot_instance.submodel_defs[export["name"]]["start"] + assert export.get("stop", None) == main_export_robot_instance.submodel_defs[export["name"]]["stop"] + assert export.get("include_unstopped_branches", False) == main_export_robot_instance.submodel_defs[export["name"]]["include_unstopped_branches"] + assert export.get("no_submechanisms", False) == main_export_robot_instance.submodel_defs[export["name"]]["no_submechanisms"] + export_robot_instance = main_export_robot_instance.instantiate_submodel(**export) if "add_floating_base" in export and export["add_floating_base"]: export_robot_instance = export_robot_instance.add_floating_base() _export_config = None @@ -744,9 +746,9 @@ def export(self, outputdir, export_config=None, rel_mesh_pathes=None, ros_pkg_na _export_config = export["export_config"] else: _export_config = [ec for ec in export_config if ec["type"] != "submodel"] - self.submodel_defs[export["name"]]["export_dir"] = os.path.join(outputdir, "submodels", export["name"]) + main_export_robot_instance.submodel_defs[export["name"]]["export_dir"] = os.path.join(outputdir, "submodels", export["name"]) export_robot_instance.export( - outputdir=self.submodel_defs[export["name"]]["export_dir"], + outputdir=main_export_robot_instance.submodel_defs[export["name"]]["export_dir"], export_config=_export_config, rel_mesh_pathes={k: os.path.join("..", "..", v) for k, v in rel_mesh_pathes.items()}, with_meshes=False, @@ -754,9 +756,9 @@ def export(self, outputdir, export_config=None, rel_mesh_pathes=None, ros_pkg_na no_smurf=no_smurf ) elif export["type"] == "pdf": - self.export_pdf(outputfile=os.path.join(outputdir, self.name.replace('/','_') + ".pdf")) + main_export_robot_instance.export_pdf(outputfile=os.path.join(outputdir, self.name.replace('/','_') + ".pdf")) elif export["type"] == "kccd": - export_robot_instance = self.duplicate() + export_robot_instance = main_export_robot_instance.duplicate() export_robot_instance.export_kccd( outputdir=outputdir, rel_iv_meshes_path=rel_mesh_pathes["iv"], @@ -769,7 +771,7 @@ def export(self, outputdir, export_config=None, rel_mesh_pathes=None, ros_pkg_na kwargs["file_name"] = export["file_name"] if "joints" in export: kwargs["joint_desc"] = export["joints"] - self.export_joint_limits( + main_export_robot_instance.export_joint_limits( outputdir=outputdir, **kwargs ) @@ -780,7 +782,7 @@ def export(self, outputdir, export_config=None, rel_mesh_pathes=None, ros_pkg_na log.error(f"Can't export according to following export configuration:\n{export}") # export smurf if not no_smurf: - self.export_smurf( + main_export_robot_instance.export_smurf( outputdir=outputdir, robotfile=xml_file_in_smurf, check_submechs=check_submechs, @@ -789,7 +791,7 @@ def export(self, outputdir, export_config=None, rel_mesh_pathes=None, ros_pkg_na ) # export ros package files if ros_pkg and not ros_pkg_later: - self.export_ros_package_files(outputdir, ros_pkg_name) + main_export_robot_instance.export_ros_package_files(outputdir, ros_pkg_name) elif ros_pkg and ros_pkg_later: return ros_pkg_name From 0d94abddd24f03e38f90982d3cbebb45728edb63 Mon Sep 17 00:00:00 2001 From: Henning Wiedemann Date: Tue, 13 Feb 2024 16:22:44 +0100 Subject: [PATCH 19/25] Refactored version of Mathias commit: https://github.com/dfki-ric/phobos/commit/3c833ce46a1b8ff7c78babc9d02e693c8f54a28f Co-authored-by: mathias.trampler@dfki.de --- phobos/io/xmlrobot.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/phobos/io/xmlrobot.py b/phobos/io/xmlrobot.py index fd68c2be..1179b300 100644 --- a/phobos/io/xmlrobot.py +++ b/phobos/io/xmlrobot.py @@ -28,8 +28,9 @@ def __init__(self, name=None, version=None, links: List[representation.Link] = N joints: List[representation.Joint] = None, materials: List[representation.Material] = None, transmissions: List[representation.Transmission] = None, - sensors=None, motors=None, plugins=None, root=None, - is_human=False, urdf_version=None, xmlfile=None, _xmlfile=None): + sensors=None, sensors_that_dont_belong_to_links_or_joints=None, motors=None, + plugins=None, root=None, is_human=False, + urdf_version=None, xmlfile=None, _xmlfile=None): self._related_robot_instance = self super().__init__() self.joints = [] @@ -39,6 +40,8 @@ def __init__(self, name=None, version=None, links: List[representation.Link] = N self.materials = [] self.transmissions = [] # [TODO v2.1.0] currently not fully supported self.sensors = [] + if sensors_that_dont_belong_to_links_or_joints is not None: + self.sensors_that_dont_belong_to_links_or_joints = sensors_that_dont_belong_to_links_or_joints self.plugins = [] # Currently just a place holder self.motors = [] self.xmlfile = xmlfile if xmlfile is not None else _xmlfile From dd48b5dc8ece9776b570179ec40912735a0e4c34 Mon Sep 17 00:00:00 2001 From: Henning Wiedemann Date: Thu, 13 Jun 2024 10:27:57 +0200 Subject: [PATCH 20/25] [BREAKING CHANGES] Squashed refactoring of python module Python Module: -------------- ci: - Support for Azure Pipelines - Split ci pushes to Failure, Success and Production Branch, so that all pipeline pushes can be reviewed before production - Extended Test-protocols in PRs - Improve configurability of model_def by hierarchical defaults - autogenerated files can be marked as such - Results from feature/* branches will be pushed to the corresponding feature/* branch in the target repo, for testing purposes scripts: - phobos convert: Fix for Scenes/Worlds, Args for mesh type and export config core/io: - Refactor utils - Adaption in defaults.json - Several bugfixes in mesh handling - Bugfixes for poses - Configurability for exports has been extended - Enhanced submodel export - Bugfixes for import/export smurf: - Include list of submodels - A mesh collision can now be annotated with primitive collisions, during export the user can decide what to export general: - Several error messages have been improved - Minor codestyle improvements - Updated ROS templates for ROS2 Blender: -------- - Only minor bugfixes - Most bugfixes in Python Module apply here as well ToDo: ----- - Documentation has to be updated accordingly --- manifest.xml | 2 +- phobos/blender/io/phobos2blender.py | 12 +- phobos/ci/base_model.py | 467 +++++++++++++++-------- phobos/ci/model_testing.py | 341 +++++++++-------- phobos/ci/pipeline.py | 316 +++++++++------- phobos/core/multiple.py | 21 +- phobos/core/robot.py | 461 ++++++++++++++--------- phobos/data/ROSCMakeLists.txt.in | 17 +- phobos/data/ROSpackage.xml.in | 26 +- phobos/data/defaults.json | 30 +- phobos/data/xml_formats.json | 57 +-- phobos/defs.py | 15 +- phobos/geometry/geometry.py | 45 ++- phobos/geometry/robot.py | 69 ++-- phobos/io/base.py | 2 +- phobos/io/poses.py | 12 +- phobos/io/representation.py | 554 ++++++++++++++++++++-------- phobos/io/smurf_reflection.py | 2 +- phobos/io/smurfrobot.py | 60 ++- phobos/io/xml_factory.py | 360 +++++++++--------- phobos/io/xmlrobot.py | 115 +++++- phobos/scripts/convert.py | 47 +-- phobos/utils/git.py | 192 +++++++--- phobos/utils/misc.py | 190 +++++++++- phobos/utils/xml.py | 2 +- 25 files changed, 2207 insertions(+), 1208 deletions(-) diff --git a/manifest.xml b/manifest.xml index bbc61b45..30eb4110 100644 --- a/manifest.xml +++ b/manifest.xml @@ -34,7 +34,7 @@ - + diff --git a/phobos/blender/io/phobos2blender.py b/phobos/blender/io/phobos2blender.py index f7f91e9c..fd87f184 100644 --- a/phobos/blender/io/phobos2blender.py +++ b/phobos/blender/io/phobos2blender.py @@ -96,9 +96,14 @@ def createGeometry(viscol, geomsrc, linkobj=None): newgeom = bUtils.createPrimitive(viscol.name, geometry_type, dimensions, phobostype=geomsrc, pmaterial="phobos_collision") newgeom.select_set(True) else: + # [TODO] we need to handle here the primitive alternatives that are available when loading from smurf + # They can be returned as second return value to make clear that they are different + # Also they should get another colored material + # When we do this we should add support to edit and create such primitive alternatives + # Also they will currently not be exported. log( "Unknown geometry type of " - + type(geometry) + + str(type(geometry)) + viscol.name + '. Placing empty coordinate system.', "ERROR", @@ -216,6 +221,11 @@ def createLink(link): geometries.append((geom, viscol)) for viscol in link.collisions: geom = createGeometry(viscol, "collision") + if geom is None: + # [TODO] this happens when we load from SMURF if this collision has primitives defined but no mesh + # we need to display those primitive alternatives in Blender, too. + # Yet, we are completely ignoring them + continue bound_box = ( max(bound_box[0], max([c[0] for c in geom.bound_box])), max(bound_box[1], max([c[1] for c in geom.bound_box])), diff --git a/phobos/ci/base_model.py b/phobos/ci/base_model.py index 6fba8a4a..d1d6c5b7 100644 --- a/phobos/ci/base_model.py +++ b/phobos/ci/base_model.py @@ -4,11 +4,12 @@ import numpy as np import yaml +import json from ..commandline_logging import get_logger from ..core import Robot -from ..defs import load_json, dump_yaml, KINEMATIC_TYPES -from ..geometry import replace_collision, join_collisions, remove_collision +from ..defs import load_json, dump_json, dump_yaml, KINEMATIC_TYPES +from ..geometry import replace_collision, join_collisions, remove_collision, remove_visual from ..io import representation, sensor_representations, poses from ..io.hyrodyn import ConstraintAxis from ..utils import misc, git, xml, transform, resources @@ -19,10 +20,11 @@ class BaseModel(yaml.YAMLObject): - def __init__subclass__(self, configfile, pipeline, processed_model_exists=True): + def __init__subclass__(self, configfile, pipeline, processed_model_exists=True, configkey=None): self.processed_model_exists = processed_model_exists self.pipeline = pipeline + self.configkey = configkey self.configfile = None if type(configfile) is str: if not os.path.isfile(configfile): @@ -39,9 +41,35 @@ def __init__subclass__(self, configfile, pipeline, processed_model_exists=True): kwargs = {} if 'model' in self.cfg.keys(): kwargs = self.cfg['model'] + if "defaults" in kwargs: + definition_cascade = [kwargs] + prev_defaults_file = None + base_dir = os.path.dirname(self.configfile) + while "defaults" in definition_cascade[-1]: + defaults_file = misc.abspath(definition_cascade[-1]["defaults"], base_dir) + assert defaults_file != prev_defaults_file, "File references itself as default" + prev_defaults_file = defaults_file + assert os.path.isfile(defaults_file), defaults_file + base_dir = os.path.dirname(defaults_file) + with open(defaults_file, "r") as f: + definition_cascade.append(load_json(f.read())["default_model"]) + # go through the cascade + parent = definition_cascade[-1] + child = definition_cascade[-2] + for i in range(len(definition_cascade)-1): + child = misc.patch_dict(parent, child) + parent = child + if i <= len(definition_cascade)-3: + child = definition_cascade[-i-3] + # make it possible to use the things defined in kwargs as variables + defaults_string = dump_json(parent) # to make sure we don't mess with yaml whitespaces + defaults_string = misc.regex_replace(defaults_string, replacements={"@"+k+"@": json.dumps(v)[1:-1] for k, v in parent.items()}) + kwargs = load_json(defaults_string) + kwargs.pop("defaults") for (k, v) in kwargs.items(): setattr(self, k, v) # check whether all necessary configurations are there + assert hasattr(self, "input_models") and len(self.modelname) > 0 and type(self.input_models) == dict assert hasattr(self, "modelname") and len(self.modelname) > 0 assert hasattr(self, "robotname") and len(self.robotname) > 0 assert hasattr(self, "test") and len(self.test) >= 1 @@ -49,21 +77,32 @@ def __init__subclass__(self, configfile, pipeline, processed_model_exists=True): self.test, self.pipeline.default_test if hasattr(pipeline, "default_test") else resources.get_default_ci_test_definition() ) + if "test_data_file" in self.test: + self.test_data_file = self.test["test_data_file"] self.deployment = misc.merge_default( self.deployment if hasattr(self, "deployment") else {}, self.pipeline.default_deployment if hasattr(pipeline, "default_deployment") else resources.get_default_ci_deploy_definition() ) + self.extended_test_protocol = "" + # get directories for this model self.exportdir = os.path.join(self.pipeline.temp_dir, self.modelname) self.targetdir = os.path.join(self.pipeline.root, self.modelname) self.tempdir = os.path.join(self.pipeline.temp_dir, "temp_" + self.modelname) + self.extended_test_protocol_path = os.path.join(self.tempdir, "extended_test_protocol.log") + + if not os.path.isdir(self.targetdir): + log.warning(self.targetdir + "does not exist. Trying to check it our from: " + self.pipeline.remote_base + self.modelname) + git.clone(self.pipeline.remote_base + self.modelname, self.targetdir, branch=git.GIT_PRODUCTION_BRANCH, pipeline=self.pipeline, all_branches=True, shallow=0) # parse export_config self.export_meshes = {} self.export_xmlfile = None if not hasattr(self, "export_config"): self.export_config = self.pipeline.default_export_config + if hasattr(self, "append_export_config"): + self.export_config += self.append_export_config for ec in self.export_config: if ec["type"] in KINEMATIC_TYPES: assert "mesh_format" in ec @@ -89,12 +128,12 @@ def __init__subclass__(self, configfile, pipeline, processed_model_exists=True): elif ec["type"] == "kccd": self.export_meshes["iv"] = self.pipeline.meshes["iv"] - def __init__(self, configfile, pipeline, processed_model_exists=True): + def __init__(self, configfile, pipeline, processed_model_exists=True, configkey=None): # These variables have to be defined in the config file self.input_models = {} self.assemble = {} # init - self.__init__subclass__(configfile, pipeline, processed_model_exists) + self.__init__subclass__(configfile, pipeline, processed_model_exists, configkey=configkey) # check whether all necessary configurations are there assert hasattr(self, "input_models") and len(self.input_models) >= 1 assert hasattr(self, "assemble") and len(self.assemble) > 0 @@ -117,7 +156,8 @@ def __init__(self, configfile, pipeline, processed_model_exists=True): target=repo_path, commit_id=self.input_models["repo"]["commit"], recursive=True, - ignore_failure=True + ignore_failure=True, + recreate=True ) self.basefile = os.path.join(repo_path, self.input_models["repo"]["model_in_repo"]) r = Robot(inputfile=self.basefile) @@ -170,11 +210,17 @@ def _join_to_basefile(self): os.path.join(self.pipeline.configdir, config["derived_base"]), self.pipeline, processed_model_exists=True) }) - # copy the mesh files to the temporary combined model directory - for mp in self.dep_models[name].export_meshes.values(): - misc.create_symlink( - self.pipeline, os.path.join(self.pipeline.temp_dir, str(mp)), os.path.join(self.basedir, str(mp)) - ) + if self.pipeline.central_meshes: + # copy the mesh files to the temporary combined model diretory + for mp in self.dep_models[name].export_meshes.values(): + misc.create_symlink( + self.pipeline, os.path.join(self.pipeline.temp_dir, str(mp)), os.path.join(self.basedir, str(mp)) + ) + else: + for mp in self.dep_models[name].export_meshes.values(): + misc.create_dir( + self.pipeline, os.path.join(self.basedir, str(mp)) + ) for name, config in self.input_models.items(): if "basefile" in config.keys(): kwargs = {} @@ -183,7 +229,6 @@ def _join_to_basefile(self): kwargs["is_human"] = config["is_human"] kwargs["inputfile"] = config["basefile"] self.dep_models.update({name: Robot(name=name, **kwargs)}) - self.dep_models[name].clean_meshes() elif "repo" in config.keys(): repo_path = os.path.join(self.tempdir, "repo", os.path.basename(config["repo"]["git"])) git.clone( @@ -192,7 +237,8 @@ def _join_to_basefile(self): target=repo_path, branch=config["repo"]["commit"], recursive=True, - ignore_failure=True + ignore_failure=True, + recreate=True ) self.dep_models.update({ name: Robot(name=name, inputfile=os.path.join(repo_path, config["repo"]["model_in_repo"]), @@ -200,7 +246,7 @@ def _join_to_basefile(self): if "submechanisms_in_repo" in config["repo"] else None, is_human=config["is_human"] if "is_human" in config else False) }) - self.dep_models[name].clean_meshes() + self.dep_models[name].clean_meshes(check_enough_vertices=False) # now we can join theses models # 1. get root model if isinstance(self.dep_models[self.assemble["model"]], Robot): @@ -235,9 +281,8 @@ def _join_to_basefile(self): assert self.assemble["take_leaf"] in combined_model, f"{combined_model}" combined_model = combined_model[self.assemble["take_leaf"]] - combined_model.clean_meshes() - if "mirror" in self.assemble.keys(): + combined_model.clean_meshes() combined_model.mirror_model( mirror_plane=self.assemble["mirror"]["plane"] if "plane" in self.assemble["mirror"].keys() else [0, 1, 0], flip_axis=self.assemble["mirror"]["flip_axis"] if "flip_axis" in self.assemble["mirror"].keys() else 1, @@ -374,22 +419,36 @@ def recursive_attach(parent, children, parentname): check_submechs=False) def recreate_sym_links(self): - for mt, mp in self.export_meshes.items(): - log.info('Re-creating mesh symlinks') - misc.create_symlink( - self.pipeline, os.path.join(self.pipeline.temp_dir, str(mp)), os.path.join(self.exportdir, str(mp)) - ) + if self.pipeline.central_meshes: + for mt, mp in self.export_meshes.items(): + log.info(' Re-creating central meshes symlinks') + misc.create_symlink( + self.pipeline, os.path.join(self.pipeline.temp_dir, str(mp)), os.path.join(self.exportdir, str(mp)) + ) + else: + for mt, mp in self.export_meshes.items(): + log.info(' Ensuring mesh directories') + misc.create_dir( + self.pipeline, os.path.join(self.exportdir, str(mp)) + ) def process(self): misc.recreate_dir(self.pipeline, self.tempdir) misc.recreate_dir(self.pipeline, self.exportdir) # Make sure the mesh symlinks are set correctly - for mt, mp in self.export_meshes.items(): - log.info(' Creating mesh symlinks') - misc.create_symlink( - self.pipeline, os.path.join(self.pipeline.temp_dir, str(mp)), os.path.join(self.exportdir, str(mp)) - ) + if self.pipeline.central_meshes: + for mt, mp in self.export_meshes.items(): + log.info(' Creating mesh symlinks') + misc.create_symlink( + self.pipeline, os.path.join(self.pipeline.temp_dir, str(mp)), os.path.join(self.exportdir, str(mp)) + ) + else: + for mt, mp in self.export_meshes.items(): + log.info(' Creating mesh directories') + misc.create_dir( + self.pipeline, os.path.join(self.exportdir, str(mp)) + ) self._join_to_basefile() self._load_robot() @@ -399,7 +458,6 @@ def process(self): log.info('Start processing robot') self.robot.correct_inertials() - self.robot.clean_meshes() if hasattr(self, "materials"): for m_name, m_def in self.materials.items(): @@ -417,9 +475,12 @@ def process(self): if hasattr(self, "frames"): _default = {} if "$default" not in self.frames else self.frames["$default"] _ignore_new_links = [] + make_root = [] for linkname, config in self.frames.items(): if linkname.startswith("$"): continue + if config.get("make_root", False): + make_root.append(linkname) self.frames[linkname] = misc.merge_default(config, _default) config = copy(self.frames[linkname]) if self.robot.get_link(linkname) is None: @@ -442,6 +503,11 @@ def process(self): ) self.robot.add_link_by_properties(linkname, _joint, **config) _ignore_new_links.append(linkname) + if len(make_root) == 1: + self.robot.exchange_root(make_root[0]) + elif len(make_root) > 1: + raise AssertionError("You can make only one link to the new root not "+str(make_root)) + assert self.robot.get_root() for link in self.robot.links: linkname = link.name if linkname in _ignore_new_links: @@ -466,16 +532,22 @@ def process(self): only_frame=(k == "transform_frame"), # transform_to="transform" in v.keys() and v["transform"] == "TO" ) - elif k == "reparent_to": + elif k == "reparent_to" and v != link.name: self.robot.move_link_in_tree(link_name=linkname, new_parent_name=v) elif k == "estimate_missing_com" and v is True: self.robot.set_estimated_link_com(linkname, dont_overwrite=True) elif k == "material" or k == "materials": link.materials = v - elif k == "name_editing": - link.name = misc.edit_name_string(link.name, **v) else: link.add_annotation(k, v, overwrite=True) + for link in self.robot.links: + linkname = link.name + if linkname in _ignore_new_links: + continue + config = self.frames[linkname] if linkname in self.frames else _default + for k, v in config.items(): + if k == "name_editing": + link.name = misc.edit_name_string(link.name, **v, correspondance=link) if "$name_editing" in self.frames: self.robot.rename("links", self.robot.links, **self.frames.get("$name_editing", {})) @@ -512,7 +584,7 @@ def process(self): for joint in self.robot.joints: jointname = joint.name if jointname in self.joints or self.joints.get("$all", False): - config = misc.merge_default(self.joints[jointname], _default) + config = misc.merge_default(self.joints[jointname] if jointname in self.joints else {}, _default) for k, v in config.items(): if k == "remove" and v is True: remove_joints.append(jointname) @@ -520,10 +592,11 @@ def process(self): if k in ["min", "max", "eff", "vel"]: if joint.limit is None: joint.limit = representation.JointLimit() + if k in ["friction", "damping", "spring_stiffness", "spring_reference"]: + if joint.dynamics is None: + joint.dynamics = representation.JointDynamics() if k == "move_joint_axis_to_intersection": self.robot.move_joint_to_intersection(jointname, v) - elif k == "name_editing": - joint.name = misc.edit_name_string(joint.name, **v) elif k == "type": joint.joint_type = v elif k == "min": @@ -534,6 +607,14 @@ def process(self): joint.limit.effort = misc.read_number_from_config(v) elif k == "vel": joint.limit.velocity = misc.read_number_from_config(v) + elif k == "friction": + joint.dynamics.friction = misc.read_number_from_config(v) + elif k == "damping": + joint.dynamics.damping = misc.read_number_from_config(v) + elif k == "spring_stiffness": + joint.dynamics.spring_stiffness = misc.read_number_from_config(v) + elif k == "spring_reference": + joint.dynamics.spring_reference = misc.read_number_from_config(v) elif k == "movement_depends_on": for jd in v: joint.joint_dependencies.append(representation.JointMimic(joint=jd["joint_name"], @@ -543,36 +624,36 @@ def process(self): if "joint_name" in v: v["joint"] = v.pop("joint_name") joint.mimic = representation.JointMimic(**v) - elif k == "active": + elif k == "active" and v is True: + _motor = representation.Motor(name=jointname+"_motor", joint=jointname) + self.robot.add_motor(_motor) + elif k == "motor": + assert type(v) == dict v = misc.merge_default(v, resources.get_default_motor()) - if type(v) == dict: - if "name" not in v: - v["name"] = jointname+"_motor" - if "joint" not in v: - v["joint"] = jointname - else: - assert jointname == v["joint"] - _motor = representation.Motor(**v) - self.robot.add_motor(_motor) - elif v is True: - _motor = representation.Motor(name=jointname+"_motor", joint=jointname) - self.robot.add_motor(_motor) + if "name" not in v: + v["name"] = jointname+"_motor" + if "joint" not in v: + v["joint"] = jointname + else: + assert jointname == v["joint"] + _motor = representation.Motor(**v) + self.robot.add_motor(_motor) else: # axis joint.add_annotation(k, v, overwrite=True) - elif "$default" in self.joints: - config = _default - for k, v in config.items(): - if k not in ["min", "max", "eff", "vel", "movement_depends_on", "active"] + representation.Joint._class_variables: - joint.add_annotation(k, v, overwrite=True) - # [TODO v2.0.0] Re-add transmission support + # elif "$default" in self.joints: + # config = _default + # for k, v in config.items(): + # if k not in ["min", "max", "eff", "vel", "movement_depends_on", "mimic", "active", "name_editing", "move_joint_axis_to_intersection"] + representation.Joint._class_variables: + # joint.add_annotation(k, v, overwrite=True) + # [TODO v2.1.0] Re-add transmission support joint.link_with_robot(self.robot) for joint in remove_joints: self.robot.remove_joint(joint) for joint in self.robot.joints: jointname = joint.name - if jointname in self.joints: - config = misc.merge_default(self.joints[jointname], _default) - joint.name = misc.edit_name_string(joint.name, **config.get("name_editing", {})) + if jointname in self.joints or self.joints.get("$all", False): + config = misc.merge_default(self.joints[jointname] if jointname in self.joints else {}, _default) + joint.name = misc.edit_name_string(joint.name, **config.get("name_editing", {}), correspondance=joint) if "$name_editing" in self.joints: self.robot.rename("joints", self.robot.joints, **self.joints.get("$name_editing", {})) # as we mess manually with some names, we need to make sure the tree maps are up to date @@ -581,20 +662,20 @@ def process(self): # Check for joint definitions self.robot.check_joint_definitions( raise_error=True, - backup=self.joints["$default"] if ( + backup=self.joints["$backup"] if ( hasattr(self, "joints") - and "$default" in self.joints.keys() - and "backup" in self.joints["$default"].keys() - and self.joints["$default"]["backup"] + and "$backup" in self.joints.keys() ) else None ) + self.robot.clean_meshes() + if hasattr(self, 'collisions'): if "$name_editing" in self.collisions.keys(): self.robot.rename("collision", self.robot.collisions, **self.collisions["$name_editing"]) for link in self.robot.links: conf = deepcopy(self.collisions["$default"]) - exclude = self.collisions["exclude"] if "exclude" in self.collisions.keys() else [] + exclude = self.collisions["$exclude"] if "$exclude" in self.collisions.keys() else [] if link.name in exclude: continue if link.name in self.collisions.keys(): @@ -612,13 +693,13 @@ def process(self): if conf["shape"] == "remove": remove_collision(self.robot, link.name) elif conf["shape"] != "mesh": - if not ("do_not_apply_primitives" in self.collisions.keys() and - self.collisions["do_not_apply_primitives"] is True): + if not (conf["shape"] == "convex" and "join" in conf.keys() and conf["join"] is True): # beacuse join_collisions already make convex hulls replace_collision( self.robot, link.name, shape=conf["shape"], oriented=conf["oriented"] if "oriented" in conf.keys() else True, scale=conf["scale"] if "scale" in conf.keys() else 1.0, + apply_primitives=self.collisions.get("apply_primitives", False) ) # leads to problems in reusing identic meshes # if conf["shape"] == "convex": @@ -653,6 +734,18 @@ def process(self): if hasattr(self, 'visuals'): if "$name_editing" in self.visuals.keys(): self.robot.rename("visuals", self.robot.visuals, **self.visuals["$name_editing"]) + for link in self.robot.links: + conf = deepcopy(self.visuals["$default"]) + exclude = self.visuals["$exclude"] if "$exclude" in self.visuals.keys() else [] + if link.name in exclude: + continue + if link.name in self.visuals.keys(): + conf = misc.merge_default(self.visuals[link.name], conf) + if "remove" in conf.keys(): + if type(conf["remove"]) is list: + remove_visual(self.robot, link.name, visualname=conf["remove"]) + elif type(conf["remove"]) is str: + remove_visual(self.robot, link.name, visualname=[c.name for c in link.visuals if re.fullmatch(r"" + conf["remove"], c.name) is not None]) if hasattr(self, "exoskeletons") or hasattr(self, "submechanisms"): if hasattr(self, "exoskeletons"): @@ -709,7 +802,14 @@ def process(self): log.debug(' Attached {} {}'.format(s["type"], s['name'])) if hasattr(self, "poses"): + if "$default" in self.poses: + assert type(self.poses["$default"]) in [float, int] + default_config = {str(j): float(self.poses["$default"]) for j in self.robot.joints if j.joint_type != "fixed"} for (cn, config) in self.poses.items(): + if cn == "$default": + continue + if "$default" in self.poses: + misc.merge_default(config, default_config) pose = poses.JointPoseSet(robot=self.robot, name=cn, configuration=config) self.robot.add_pose(pose) @@ -734,7 +834,13 @@ def export(self): self.robot.link_entities() self.robot.submodel_defs = {} # we define these here per model ros_pkg_name = self.robot.export(outputdir=self.exportdir, export_config=self.export_config, - rel_mesh_pathes=self.export_meshes, ros_pkg_later=True) + rel_mesh_pathes=self.pipeline.meshes, ros_pkg_later=True, + reduce_meshes=getattr(self, "reduce_meshes", None), + apply_scale=getattr(self, "apply_scale", None), + ros_pkg_name=getattr(self, "ros_pkg_name", None), + filename=getattr(self, "filename", None), + mark_as_autogenerated=getattr(self, "mark_as_autogenerated", False) + ) for vc in self.robot.collisions + self.robot.visuals: if isinstance(vc.geometry, representation.Mesh): self.processed_meshes = self.processed_meshes.union([os.path.realpath(f["filepath"]) for f in vc.geometry._exported.values()]) @@ -745,7 +851,7 @@ def export(self): pass if "keep_files" in self.deployment: - git.reset(self.targetdir, "autobuild", "master") + git.reset(self.targetdir, git.GIT_REMOTE_NAME, git.GIT_PRODUCTION_BRANCH, hard=False) misc.store_persisting_files(self.pipeline, self.targetdir, self.deployment["keep_files"], self.exportdir) log.info('Finished export of the new model') @@ -753,28 +859,30 @@ def export(self): if hasattr(self, "post_processing"): for script in self.post_processing: + os.environ["PHOBOS_CI_EXPORTDIR"] = self.exportdir + os.environ["PHOBOS_CI_TARGETDIR"] = self.targetdir + os.environ["PHOBOS_CI_DEFINITIONDIR"] = self.pipeline.configdir if "cwd" not in script.keys(): script["cwd"] = self.exportdir else: - script["cwd"] = os.path.abspath(script["cwd"]) - misc.execute_shell_command(script["cmd"], script["cwd"]) + script["cwd"] = misc.abspath(script["cwd"]) + misc.execute_shell_command(script["cmd"], script["cwd"], verbose=True) log.info('Finished post_processing of the new model') if ros_pkg_name is not None: self.robot.export_ros_package_files( self.exportdir, ros_pkg_name, - author=os.path.join(self.pipeline.configdir, self.modelname + ".yml"), - maintainer="https://git.hb.dfki.de/phobos/ci-run/-/wikis/home", - url=self.pipeline.remote_base + "/" + self.modelname, - version=self.pipeline.git_rev[10] + author=self.pipeline.relpath(self.configfile), + maintainer="Phobos CI Pipeline", + url=self.pipeline.remote_base + self.modelname, + version=getattr(self, "version", self.pipeline.git_rev[10]), + license=getattr(self, "license", None) ) - # REVIEW are the following lines here obsolete? - if "keep_files" in self.deployment: - git.reset(self.targetdir, "autobuild", "master") - misc.store_persisting_files(self.pipeline, self.targetdir, self.deployment["keep_files"], self.exportdir) - def deploy(self, mesh_commit, failed_model=False, uses_lfs=False): + feature_branch = None + if os.getenv("CI_COMMIT_BRANCH", "").startswith("feature/"): + feature_branch = os.getenv("CI_COMMIT_BRANCH", None) if "do_not_deploy" in self.deployment and self.deployment["do_not_deploy"] is True: return "deployment suppressed in cfg" if not os.path.exists(self.targetdir): @@ -782,18 +890,34 @@ def deploy(self, mesh_commit, failed_model=False, uses_lfs=False): " This might happen if you haven't added the result" + " repo to the manifest.xml or spelled it wrong.") repo = self.targetdir - git.reset(self.targetdir, "autobuild", "master") + # Clear local changes + git.reset(self.targetdir, git.GIT_REMOTE_NAME, git.GIT_PRODUCTION_BRANCH) if "keep_files" in self.deployment: misc.store_persisting_files(self.pipeline, repo, self.deployment["keep_files"], os.path.join(self.tempdir, "_sustain")) - git.update(repo, update_target_branch="$CI_UPDATE_TARGET_BRANCH" if failed_model else "master") + # Fetch the latest changes from the target branch for a clean merge + git.update(repo, update_target_branch=feature_branch if feature_branch else (git.GIT_FAILURE_BRANCH if failed_model else git.GIT_SUCCESS_BRANCH)) + # Remove everything so that we can implement the produced version instead git.clear_repo(repo) + git.ignore(repo, "*\_history.log") + if uses_lfs: + track = ["*.obj", "*.stl", "*.dae", "*.bobj", "*.iv", "*.mtl", "*.OBJ", "*.STL", "*.DAE", "*.BOBJ", "*.IV", "*.MTL"] + for file in getattr(self, "add_lfs_track", getattr(self.pipeline, "add_lfs_track", [])): + if not file.startswith("*"): + if not file.startswith("."): + file = "."+file + file = "*"+file + track.append(file) + track.append(file.upper()) + track.append(file.lower()) + track = list(set(track)) + git.install_lfs(repo, track=track) # add manifest.xml if necessary manifest_path = os.path.join(repo, "manifest.xml") - if not os.path.isfile(manifest_path): + if getattr(self.deployment, "add_manifest", getattr(self.pipeline, "add_manifest", False)) and not os.path.isfile(manifest_path): misc.copy(self.pipeline, resources.get_resources_path("manifest.xml.in"), manifest_path) with open(manifest_path, "r") as manifest: content = manifest.read() - url = self.pipeline.remote_base + "/" + self.modelname + url = self.pipeline.remote_base + self.modelname content = misc.regex_replace(content, { "\$INPUTNAME": self.modelname, "\$AUTHOR": "" + os.path.join(self.pipeline.configdir, @@ -806,15 +930,17 @@ def deploy(self, mesh_commit, failed_model=False, uses_lfs=False): readme_path = os.path.join(repo, "README.md") if not os.path.isfile(readme_path): misc.copy(self.pipeline, resources.get_resources_path("README.md.in"), readme_path) + _url = misc.regex_replace(os.getenv("CI_PROJECT_URL", "PipelineRepo://"), {"https:\/\/.*@":"https://"}) + if git.RUNNING_FOR_AZURE: + config_link = os.path.join(_url+"?path=", os.path.relpath(self.configfile, self.pipeline.configdir)) + "&version=GB" + os.getenv("CI_COMMIT_BRANCH", "master").replace("/", "%2F") + else: + config_link = os.path.join(_url+"-/blob", os.getenv("CI_COMMIT_BRANCH", "master"), os.path.relpath(self.configfile, self.pipeline.configdir)) with open(readme_path, "r") as readme: content = readme.read() content = misc.regex_replace(content, { "\$MODELNAME": self.modelname, - "\$USERS": self.pipeline.mr_mention if not hasattr(self, "mr_mention") else self.mr_mention, - "\$DEFINITION_FILE": "https://git.hb.dfki.de/" + os.path.join( - os.environ["CI_ROOT_PATH"], - os.environ["CI_MODEL_DEFINITIONS_PATH"], - self.modelname + ".yml").replace("models/robots", "models-robots"), + "\$USERS": getattr(self.deployment, "mr_mention", getattr(self.pipeline, "mr_mention", "none")), + "\$DEFINITION_FILE": config_link }) with open(readme_path, "w") as readme: readme.write(content) @@ -831,93 +957,112 @@ def deploy(self, mesh_commit, failed_model=False, uses_lfs=False): subm["repo"], subm["target"], commit=subm["commit"] if "commit" in subm.keys() else None, - branch=subm["branch"] if "branch" in subm.keys() else "master" + branch=subm["branch"] if "branch" in subm.keys() else git.GIT_SUCCESS_BRANCH ) # update the mesh submodule - for mt, mp in self.export_meshes.items(): - git.add_submodule( - repo, - os.path.relpath( - os.path.join(self.pipeline.root, str(mp)), repo - ), - mp, - commit=mesh_commit[mt]["commit"], - branch=os.environ["CI_MESH_UPDATE_TARGET_BRANCH"] - if "CI_MESH_UPDATE_TARGET_BRANCH" in os.environ.keys() else "master" - ) + if self.pipeline.central_meshes: + for mt, mp in self.export_meshes.items(): + git.add_submodule( + repo, + os.path.relpath( + os.path.join(self.pipeline.root, str(mp)), repo + ), + mp, + commit=mesh_commit[mt]["commit"], + branch=os.environ["CI_MESH_UPDATE_TARGET_BRANCH"] + if "CI_MESH_UPDATE_TARGET_BRANCH" in os.environ.keys() else git.GIT_SUCCESS_BRANCH + ) # now we move back to the push repo - misc.copy(self.pipeline, self.exportdir + "/*", self.targetdir) + misc.copy(self.pipeline, self.exportdir + "/*", self.targetdir, silent=False) if "keep_files" in self.deployment: misc.restore_persisting_files(self.pipeline, repo, self.deployment["keep_files"], os.path.join(self.tempdir, "_sustain")) - git.commit(repo, origin_repo=os.path.abspath(self.pipeline.configdir)) - git.add_remote(repo, self.pipeline.remote_base + "/" + self.modelname) - mr = git.MergeRequest() - mr.target = self.deployment["mr_target_branch"] - mr.title = self.deployment["mr_title"] - mr.description = self.deployment["mr_description"] - if os.path.isfile(self.pipeline.test_protocol): - log.info("Appending test_protocol to MR description") - with open(self.pipeline.test_protocol, "r") as f: - protocol = load_json(f.read()) - mr.description = misc.append_string(mr.description, "\n" + str(protocol["all"])) - mr.description = misc.append_string(mr.description, str(protocol[self.modelname])) - else: - log.warning(f"Did not find test_protocol file at: {self.pipeline.test_protocol}") - if failed_model: + commit_hash = git.commit(repo, origin_repo=self.pipeline.configdir) + log.info("Created commit: "+commit_hash) + git.add_remote(repo, self.pipeline.remote_base + self.modelname) + if not feature_branch: + mr = git.MergeRequest() + mr.title = self.deployment["mr_title"] + mr.description = self.deployment["mr_description"] + if os.path.isfile(self.pipeline.test_protocol): + log.info("Appending test_protocol to MR description") + with open(self.pipeline.test_protocol, "r") as f: + protocol = load_json(f.read()) + mr.description = misc.append_string(mr.description, "\n" + str(protocol["all"])) + mr.description = misc.append_string(mr.description, str(protocol[self.modelname])) + if not bool(int(os.getenv("RUNNING_FOR_AZURE", 0))): + log.info("Appending extended test_protocol to MR description") + with open(self.extended_test_protocol_path, "r") as f: + protocol = f.read() + mr.description = misc.append_string(mr.description, "\n\n\n# Extended Protocol\n" + protocol) + elif os.getenv("CI_EXTENDED_TEST_PROTOCOL_URL", None) is not None: + log.info("Appending link to extended test_protocol to MR description") + mr.description = misc.append_string(mr.description, "\n\n The extended test protocol can be found here: " + os.getenv("CI_EXTENDED_TEST_PROTOCOL_URL", "URL not available")) + else: + log.warning(f"Did not find test_protocol file at: {self.pipeline.test_protocol}") if "mr_mention" in self.deployment: mr.mention = self.deployment["mr_mention"] - return_msg = "pushed to " + git.push(repo, merge_request=mr) - else: - return_msg = "pushed to " + git.push(repo, branch="master") + elif hasattr(self.pipeline, "mr_mention"): + mr.mention = self.pipeline.mr_mention + if failed_model: + mr.target = self.deployment.get("mr_target_branch", git.GIT_SUCCESS_BRANCH) + return_msg = "pushed to " + git.push(repo, branch=git.GIT_FAILURE_BRANCH, + merge_request=mr) + else: + mr.target = self.deployment.get("mr_target_branch", git.GIT_PRODUCTION_BRANCH) + return_msg = "pushed to " + git.push(repo, branch=git.GIT_SUCCESS_BRANCH, + merge_request=mr if git.GIT_PRODUCTION_BRANCH != git.GIT_SUCCESS_BRANCH else None) # deploy to mirror - if "mirror" in self.deployment: - log.info(f"Deploying to mirror:\n {dump_yaml(self.deployment['mirror'], default_flow_style=False)}") - mirror_dir = os.path.join(self.tempdir, "deploy_mirror") - git.clone( - pipeline=self.pipeline, - repo=self.deployment["mirror"]["repo"], - target=mirror_dir, - branch="master", - shallow=False - ) - git.update(mirror_dir, update_remote="origin", update_target_branch=self.deployment["mirror"]["branch"]) - git.clear_repo(mirror_dir) - submodule_dict = {} - if "submodules" in self.deployment["mirror"].keys() and ".gitmodules" in os.listdir(repo): - submodule_file = open(os.path.join(repo, ".gitmodules"), "r").read() - current_key = None - for line in submodule_file.split("\n"): - if line.startswith("["): - current_key = line.split()[1][1:-2] - if current_key not in submodule_dict.keys(): - submodule_dict[current_key] = {} - elif " = " in line: - k, v = line.strip().split(" = ") - submodule_dict[current_key][k] = v - if self.deployment["mirror"]["submodules"]: + if "mirror" in self.deployment: + log.info(f"Deploying to mirror:\n {dump_yaml(self.deployment['mirror'], default_flow_style=False)}") + mirror_dir = os.path.join(self.tempdir, "deploy_mirror") + git.clone( + pipeline=self.pipeline, + repo=self.deployment["mirror"]["repo"], + target=mirror_dir, + branch=git.GIT_SUCCESS_BRANCH, + shallow=False, + recreate=True + ) + git.update(mirror_dir, update_remote=git.GIT_REMOTE_NAME, update_target_branch=self.deployment["mirror"]["branch"]) + git.clear_repo(mirror_dir) + submodule_dict = {} + if "submodules" in self.deployment["mirror"].keys() and ".gitmodules" in os.listdir(repo): + submodule_file = open(os.path.join(repo, ".gitmodules"), "r").read() + current_key = None + for line in submodule_file.split("\n"): + if line.startswith("["): + current_key = line.split()[1][1:-2] + if current_key not in submodule_dict.keys(): + submodule_dict[current_key] = {} + elif " = " in line: + k, v = line.strip().split(" = ") + submodule_dict[current_key][k] = v + if self.deployment["mirror"]["submodules"]: + for _, sm in submodule_dict.items(): + git.add_submodule(mirror_dir, sm["url"], sm["path"], + branch=sm["branch"] if "branch" in sm.keys() else git.GIT_SUCCESS_BRANCH) + misc.copy(self.pipeline, repo + "/*", mirror_dir) + if "submodules" in self.deployment["mirror"].keys() and ".gitmodules" in os.listdir(repo): for _, sm in submodule_dict.items(): - git.add_submodule(mirror_dir, sm["url"], sm["path"], - branch=sm["branch"] if "branch" in sm.keys() else "master") - misc.copy(self.pipeline, repo + "/*", mirror_dir) - if "submodules" in self.deployment["mirror"].keys() and ".gitmodules" in os.listdir(repo): - for _, sm in submodule_dict.items(): - # git.clone(self.pipeline, os.path.join(self.deployment["mirror"]["repo"], sm["url"]), sm["path"], - # sm["branch"] if "branch" in sm.keys() else "master", cwd=mirror_dir) - misc.execute_shell_command("rm -rf " + os.path.join(sm["path"], ".git*"), mirror_dir) - git.commit(mirror_dir, origin_repo=self.pipeline.configdir) - if "merge_request" in self.deployment["mirror"].keys() and self.deployment["mirror"]["merge_request"]: - if "mr_mention" in self.deployment["mirror"].keys(): - mr.mention = self.deployment["mirror"]["mr_mention"] - if "mr_title" in self.deployment["mirror"].keys(): - mr.title = self.deployment["mirror"]["mr_title"] - if "mr_target" in self.deployment["mirror"].keys(): - mr.target = self.deployment["mirror"]["mr_target"] - git.push(mirror_dir, remote="origin", merge_request=mr, branch=self.deployment["mirror"]["branch"]) - else: - git.push(mirror_dir, remote="origin", branch=self.deployment["mirror"]["branch"]) - return_msg += "& pushed to mirror" - git.checkout("master", repo) + # git.clone(self.pipeline, os.path.join(self.deployment["mirror"]["repo"], sm["url"]), sm["path"], + # sm["branch"] if "branch" in sm.keys() else git.GIT_SUCCESS_BRANCH, cwd=mirror_dir) + misc.execute_shell_command("rm -rf " + os.path.join(sm["path"], ".git*"), mirror_dir) + git.commit(mirror_dir, origin_repo=self.pipeline.configdir) + if "merge_request" in self.deployment["mirror"].keys() and self.deployment["mirror"]["merge_request"]: + if "mr_mention" in self.deployment["mirror"].keys(): + mr.mention = self.deployment["mirror"]["mr_mention"] + if "mr_title" in self.deployment["mirror"].keys(): + mr.title = self.deployment["mirror"]["mr_title"] + if "mr_target" in self.deployment["mirror"].keys(): + mr.target = self.deployment["mirror"]["mr_target"] + git.push(mirror_dir, remote=git.GIT_REMOTE_NAME, merge_request=mr, branch=self.deployment["mirror"]["branch"]) + else: + git.push(mirror_dir, remote=git.GIT_REMOTE_NAME, branch=self.deployment["mirror"]["branch"]) + return_msg += "& pushed to mirror" + else: + return_msg = "pushed to " + git.push(repo, branch=feature_branch) + #git.checkout(repo, branch=git.GIT_SUCCESS_BRANCH) return return_msg def get_input_meshes(self): diff --git a/phobos/ci/model_testing.py b/phobos/ci/model_testing.py index e2443f5a..fb05ceff 100644 --- a/phobos/ci/model_testing.py +++ b/phobos/ci/model_testing.py @@ -8,7 +8,7 @@ from ..io.representation import Pose from ..utils import xml from ..utils.hyrodyn import get_load_report, debug_report -from ..utils.misc import execute_shell_command, list_files +from ..utils.misc import execute_shell_command, list_files, append_string log = get_logger(__name__) @@ -77,22 +77,22 @@ def _load_old_hyrodyn_model(self): # info procedures def info_swing_my_robot(self): if not (hasattr(self.new, "swing_my_robot") and self.new.swing_my_robot is True): - return - log.info("Running swing_my_robot") + return None + protocol = None + protocol = append_string(protocol, "Running swing_my_robot", loglevel="info") if not HYRODYN_AVAILABLE: - log.warning('Info procedure swing_my_robot not possible: Hyrodyn not present') - return + protocol = append_string(protocol, 'Info procedure swing_my_robot not possible: Hyrodyn not present', loglevel="warning") + return protocol submech_file = self.new.submechanisms_file cmd = "swing_my_robot.py" if submech_file is not None and os.path.exists(submech_file): - log.info(f'Submechs: {submech_file} {"exists!" if os.path.isfile(submech_file) else "does not exist!"}') + protocol = append_string(protocol, f'Submechs: {submech_file} {"exists!" if os.path.isfile(submech_file) else "does not exist!"}', loglevel="info") cmd += " --submechanism_yml " + submech_file limits_file = os.path.join(self.new.modeldir, "submechanisms/joint_limits.yml") - log.info(f'Limits: {limits_file} {"exists!" if os.path.isfile(limits_file) else "does not exist!"}') + protocol = append_string(protocol, f'Limits: {limits_file} {"exists!" if os.path.isfile(limits_file) else "does not exist!"}', loglevel="info") if os.path.isfile(limits_file): cmd += " --joint_limits_yml " + limits_file - log.info("URDF:", self.new.robot.xmlfile, - "exists!" if os.path.isfile(self.new.robot.xmlfile) else "does not exist!") + protocol = append_string(protocol, f"URDF: {self.new.robot.xmlfile}" + "exists!" if os.path.isfile(self.new.robot.xmlfile) else "does not exist!", loglevel="info") # cmd += " --export_animation" cmd += " --export_animation_as_mp4" cmd += " " + self.new.robot.xmlfile @@ -101,27 +101,29 @@ def info_swing_my_robot(self): try: execute_shell_command(cmd, os.getcwd()) except ImportError as e: - log.warning(f"Can't run swing_my_robot procedure because of missing library:\n {e}") + protocol = append_string(protocol, f"Can't run swing_my_robot procedure because of missing library:\n {e}", loglevel="warning") return except AssertionError as e: - log.error(f"Swing_my_robot.py failed due to an assertion error: {e}") + protocol = append_string(protocol, f"Swing_my_robot.py failed due to an assertion error: {e}", loglevel="error") return except: try: execute_shell_command("python " + os.path.join(os.environ["AUTOPROJ_CURRENT_ROOT"], "control/hyrodyn/python/") + cmd, os.getcwd()) except ImportError as e: - log.warning(f"Can't run swing_my_robot procedure because of missing library:\n {e}") + protocol = append_string(protocol, f"Can't run swing_my_robot procedure because of missing library:\n {e}", loglevel="warning") return except AssertionError as e: - log.error(f"Swing_my_robot.py failed due to an assertion error: {e}") + protocol = append_string(protocol, f"Swing_my_robot.py failed due to an assertion error: {e}", loglevel="error") return except Exception as e: - log.error(f"Swing_my_robot.py failed due to an unknown error {e}") + protocol = append_string(protocol, f"Swing_my_robot.py failed due to an unknown error {e}", loglevel="error") + return protocol # test procedures def test_process_double_check(self): success = True + protocol = "" # have joints been removed? if hasattr(self.new, "remove_joints"): joint_names = [joint.name for joint in self.new.robot.joints] @@ -129,20 +131,47 @@ def test_process_double_check(self): success &= joint_name not in joint_names # [TODO later] check the others # check if meshes are there - log.debug(f"Checking meshes for {self.new.robot.xmlfile}") + protocol = append_string(protocol, f"Checking meshes for {self.new.robot.xmlfile}", loglevel="debug") for link in self.new.robot.links: for mesh in [c.geometry.filepath for c in link.collisions+link.visuals if isinstance(c.geometry, representation.Mesh)]: mesh_path = xml.read_relative_filename(mesh, self.new.robot.xmlfile) if os.path.isfile(mesh_path): success &= True else: - log.error(f"Mesh {mesh_path} does not exist!") + protocol = append_string(protocol, f"Mesh {mesh_path} does not exist!", loglevel="error") success = False - return success + return success, protocol + + def test_total_mass(self, expected_mass=None): + protocol = "" + if expected_mass is None: + path = os.path.join(os.path.dirname(self.new.configfile), getattr(self.new, "test_data_file", "test_data.yml")) + if not os.path.isfile(path): + path = os.path.join(os.path.dirname(self.new.exportdir), getattr(self.new, "test_data_file", "test_data.yml")) + assert os.path.isfile(path), path + log.info("Found test_data_file: "+path) + with open(path, "r") as f: + expected_mass = load_json(f.read())["test_data"]["expected_mass"] + assert expected_mass is not None and type(expected_mass) in [float, int] + old_mass = None + if self.old is not None: + old_mass = self.old.compute_mass() + current_mass = self.new.robot.compute_mass() + mass_check = np.absolute(current_mass - expected_mass) <= self.new.test["tolerances"]["mass"] + out_msg = f"Mass check: Current: {current_mass}; Expected: {expected_mass}; Previous: {old_mass}" + if not mass_check: + out_msg += " Difference too big" + mass_check2 = True + if old_mass: + mass_check2 = np.absolute(old_mass - expected_mass) <= self.new.test["tolerances"]["mass"] + out_msg += " Mass changed with respect to previous version" + protocol = append_string(protocol, out_msg, loglevel="info" if mass_check and mass_check2 else "error") + return mass_check and mass_check2, protocol def test_compare_link_masses(self): success = True - log.info("Masses of Links:") + protocol = "" + protocol = append_string(protocol, "Masses of Links:", loglevel="info") new_link_masses = { link.name: (link.inertial.mass if link.inertial is not None else 0) for link in self.new.robot.links } @@ -151,7 +180,7 @@ def test_compare_link_masses(self): link.name: (link.inertial.mass if link.inertial is not None else 0) for link in self.old.links } else: - log.info("Old model not present! Skipping test!") + protocol = append_string(protocol, "Old model not present! Skipping test!", loglevel="info") return "skipped (no model to compare)" max_length = max(*[len(n) for n in new_link_masses.keys()]) for k in new_link_masses.keys(): @@ -163,15 +192,15 @@ def test_compare_link_masses(self): if abs(new_link_masses[k] - old_link_masses[k]) > self.new.test["tolerances"]["mass"]: outmsg +=" too big!" success = False - log.info(outmsg) - return success + protocol = append_string(protocol, outmsg, loglevel="info" if success else "error") + return success, protocol def test_symmetry_check_masses(self, left_right_end_effectors): + protocol = "" for ee in left_right_end_effectors: if self.new.robot.get_link_id(ee) is None: - log.error(f"Existing links: {[link.name for link in self.new.robot.links]}") + protocol = append_string(protocol, f"Existing links: {[link.name for link in self.new.robot.links]}", loglevel="error") raise AssertionError(ee + " Link does not exist in the newly exported model!") - masses = {} for link in self.new.robot.links: masses[link.name] = link.inertial.mass if hasattr(link, "inertial") and link.inertial is not None else 0 @@ -180,40 +209,41 @@ def test_symmetry_check_masses(self, left_right_end_effectors): right = self.new.robot.get_chain(self.new.robot.get_root(), left_right_end_effectors[1], links=True, joints=False, fixed=True) assert len(left) == len(right) - log.info("Symmetry Check Masses:") + protocol = append_string(protocol, "Symmetry Check Masses:", loglevel="info") success = True max_len = np.array([len(left[i] + "/" + right[i]) for i in range(len(left))]).max(initial=0) for i in range(len(left)): left_mass = masses[left[i]] right_mass = masses[right[i]] diff = left_mass - right_mass - log.info("{}\t{}\t{}\t{}\t{}".format( + protocol = append_string(protocol, "{}\t{}\t{}\t{}\t{}".format( left[i] + "/" + right[i] + " " * (max_len - (len(left[i] + "/" + right[i]))), "{:f}".format(left_mass), "{:f}".format(right_mass), "{:f}".format(diff), "!!!" if diff is None or np.abs(diff) > self.new.test["tolerances"]["mass"] or np.isnan(diff) else "" - )) + ), loglevel="info") success &= not (np.abs(diff) > self.new.test["tolerances"]["mass"] or np.isnan(diff)) - log.info(f" Success? {success}") - return success + protocol = append_string(protocol, f" Success? {success}", loglevel="info") + return success, protocol def test_compare_link_transformations(self): success = True + protocol = "" log.info("Transformation changes of Links:") root2new_links = {link.name: self.new.robot.get_transformation(link.name) for link in self.new.robot.links} if self.old is not None: root2old_links = {link.name: self.old.get_transformation(link.name) for link in self.old.links} else: - log.info("Old model not present! Skipping test!") - return "skipped (no model to compare)" + protocol = append_string(protocol, "Old model not present! Skipping test!", loglevel="info") + return "skipped (no model to compare)", protocol max_length = max(*[len(n) for n in root2new_links.keys()]) for k in root2new_links.keys(): link_name = k + (max_length - len(k)) * " " if k not in root2old_links.keys(): - log.info("%s doesn't exist in compare model" % link_name) + protocol = append_string(protocol, "%s doesn't exist in compare model" % link_name, loglevel="info") _temp_pose = Pose.from_matrix(root2new_links[k], relative_to=self.new.robot.get_root()) - log.info("root2link: xyz: %.5f %.5f %.5f\trpy: %.5f %.5f %.5f" % tuple(_temp_pose.xyz + _temp_pose.rpy)) + protocol = append_string(protocol, "root2link: xyz: %.5f %.5f %.5f\trpy: %.5f %.5f %.5f" % tuple(_temp_pose.xyz + _temp_pose.rpy), loglevel="info") continue diff = np.linalg.inv(root2old_links[k]).dot(root2new_links[k]) diff_o = representation.Pose.from_matrix(diff, relative_to=k) @@ -225,74 +255,83 @@ def test_compare_link_transformations(self): if any(np.abs(diff_o.rpy) > [self.new.test["tolerances"]["rad"]]*3): outmsg += str(np.abs(diff_o.rpy)) + " > " + str([self.new.test["tolerances"]["rad"]]*3) outmsg += " !!!" - log.error(outmsg) + protocol = append_string(protocol, outmsg, loglevel="error") # print("Difference as Transformation Matrix from old to new:") # print(diff) success = False else: - log.info(outmsg) - return success + protocol = append_string(protocol, outmsg, loglevel="info") + return success, protocol def test_topological_self_consistency(self): + protocol = "" if self.old is None: log.info("Old model not present! Skipping test!") - return "skipped (no model to compare)" + return "skipped (no model to compare)", protocol # The new model is allowed to have more joints/links than the previous - log.info("New model contains:") - log.info(f"Links {sorted(set([link.name for link in self.new.robot.links]) - set([link.name for link in self.old.links]))}") - log.info(f"Coll {sorted(set([collision.name for link in self.new.robot.links for collision in link.collisions]) - set([collision.name for link in self.old.links for collision in link.collisions]))}") - log.info(f"Vis {sorted(set([visual.name for link in self.new.robot.links for visual in link.visuals]) - set([visual.name for link in self.old.links for visual in link.visuals]))}") - log.info(f"Joints {sorted(set([joint.name for joint in self.new.robot.joints]) - set([joint.name for joint in self.old.joints]))}") + protocol = append_string(protocol, "New model contains:", loglevel="info") + protocol = append_string(protocol, f"Links {sorted(set([link.name for link in self.new.robot.links]) - set([link.name for link in self.old.links]))}", loglevel="info") + protocol = append_string(protocol, f"Coll {sorted(set([collision.name for link in self.new.robot.links for collision in link.collisions]) - set([collision.name for link in self.old.links for collision in link.collisions]))}", loglevel="info") + protocol = append_string(protocol, f"Vis {sorted(set([visual.name for link in self.new.robot.links for visual in link.visuals]) - set([visual.name for link in self.old.links for visual in link.visuals]))}", loglevel="info") + protocol = append_string(protocol, f"Joints {sorted(set([joint.name for joint in self.new.robot.joints]) - set([joint.name for joint in self.old.joints]))}", loglevel="info") changed_joint_types1 = set([joint.name + ":" + joint.joint_type for joint in self.new.robot.joints]) - \ set([joint.name + ":" + joint.joint_type for joint in self.old.joints]) - log.info(f"JTypes {sorted(changed_joint_types1)}") - log.info("but not:") + protocol = append_string(protocol, f"JTypes {sorted(changed_joint_types1)}", loglevel="info") + protocol = append_string(protocol, "but not:", loglevel="info") removed_links = set([link.name for link in self.old.links]) - set([link.name for link in self.new.robot.links]) - log.info(f"Links {sorted(removed_links)}") - log.info(f"Coll {sorted(set([collision.name for link in self.old.links for collision in link.collisions]) - set([collision.name for link in self.new.robot.links for collision in link.collisions]))}") - log.info(f"Vis {sorted(set([visual.name for link in self.old.links for visual in link.visuals]) - set([visual.name for link in self.new.robot.links for visual in link.visuals]))}") + protocol = append_string(protocol, f"Links {sorted(removed_links)}", loglevel="info") + protocol = append_string(protocol, f"Coll {sorted(set([collision.name for link in self.old.links for collision in link.collisions]) - set([collision.name for link in self.new.robot.links for collision in link.collisions]))}", loglevel="info") + protocol = append_string(protocol, f"Vis {sorted(set([visual.name for link in self.old.links for visual in link.visuals]) - set([visual.name for link in self.new.robot.links for visual in link.visuals]))}", loglevel="info") removed_joints = set([joint.name for joint in self.old.joints]) - \ set([joint.name for joint in self.new.robot.joints]) - log.info(f"Joints {sorted(removed_joints)}") + protocol = append_string(protocol, f"Joints {sorted(removed_joints)}", loglevel="info") changed_joint_types2 = set([joint.name + ":" + joint.joint_type for joint in self.old.joints]) - \ set([joint.name + ":" + joint.joint_type for joint in self.new.robot.joints]) - log.info(f"JTypes {sorted(changed_joint_types2)}") - return len(removed_links) + len(removed_joints) + len(changed_joint_types1) + len(changed_joint_types2) == 0 + protocol = append_string(protocol, f"JTypes {sorted(changed_joint_types2)}", loglevel="info") + return len(removed_links) + len(removed_joints) + len(changed_joint_types1) + len(changed_joint_types2) == 0, protocol def test_compare_amount_joints(self): + protocol = "" if self.old is None: - log.info("Old model not present! Skipping test!") - return "skipped (no model to compare)" - log.info(f"New Model has {len(self.new.robot.joints)} and old model has {len(self.old.joints)}") - return len(self.new.robot.joints) == len(self.old.joints) + protocol = append_string(protocol, "Old model not present! Skipping test!", loglevel="info") + return "skipped (no model to compare)", protocol + protocol = append_string(protocol, f"New Model has {len(self.new.robot.joints)} and old model has {len(self.old.joints)}", loglevel="info") + return len(self.new.robot.movable_joints) == len(self.old.movable_joints), protocol def test_load_in_pybullet(self): + protocol = "" from ..defs import check_pybullet_available if not check_pybullet_available(): - log.warning('Pybullet not present') - return True + protocol = append_string(protocol, 'Pybullet not present', loglevel="warning") + return True, protocol try: import pybullet as pb client = pb.connect(pb.DIRECT) pb.loadURDF(os.path.join(self.new.robot.xmlfile), physicsClientId=client) pb.disconnect(client) - return True + return True, protocol except Exception as e: - log.error(f"Failed: pyBullet check failed!\n {e}") - return False + protocol = append_string(protocol, f"Failed: pyBullet check failed!\n {e}", loglevel="error") + return False, protocol def test_file_consistency(self): + protocol = "" if self.old is None: - log.info("Old model not present! Skipping test!") - return "skipped (no model to compare)" - new_files = list_files(self.new.exportdir, ignore=["\.gv", "\.pdf", "\.git*", "README\.md", "manifest\.xml"]) - old_files = list_files(self.old.directory, ignore=["\.gv", "\.pdf", "\.git*", "README\.md", "manifest\.xml"]) - log.info("New model contains:") - log.info(dump_json(sorted(list(set(new_files) - set(old_files))))) - log.info("but not:") + protocol = append_string("Old model not present! Skipping test!", loglevel="info") + return "skipped (no model to compare)", protocol + ignore = ["\.gv", "\.pdf", "\.git*", "README\.md", "manifest\.xml", "meshes/*"] + new_files = list_files(self.new.exportdir, ignore=ignore) + new_files_full = list_files(self.new.exportdir) + old_files = list_files(self.old.directory, ignore=ignore) + old_files_full = list_files(self.old.directory) + protocol = append_string(protocol, "New model contains:", loglevel="info") + protocol = append_string(protocol, dump_json(sorted(list(set(new_files) - set(old_files)))), loglevel="info") + protocol = append_string(protocol, "but not:", loglevel="info") removed_files = set(old_files) - set(new_files) - log.info(dump_json(sorted(list(removed_files)))) - return len(removed_files) == 0 + protocol = append_string(protocol, dump_json(sorted(list(removed_files))), loglevel="info") + if len(removed_files) != 0: + protocol = append_string(protocol, "The removal of following files is critical: " + str(removed_files), loglevel="error") + return len(removed_files) == 0, protocol def test_hyrodyn_load_in_hyrodyn(self): if not HYRODYN_AVAILABLE: @@ -319,80 +358,83 @@ def test_hyrodyn_load_in_hyrodyn(self): return out def test_hyrodyn_compare_masses(self): + protocol = "" if not HYRODYN_AVAILABLE: - log.info('Hyrodyn not present') - return "skipped (no HyRoDyn)" + protocol = append_string(protocol, 'Hyrodyn not present', loglevel="info") + return "skipped (no HyRoDyn)", protocol if self.old_hyrodyn is None: self._load_old_hyrodyn_model() if self.new_hyrodyn is None and self.new_hml_test[2] == 0: self.test_hyrodyn_load_in_hyrodyn() if self.new_hyrodyn is None: - log.info("New HyRoDyn model not present! Skipping test!") - return "skipped (HyRoDyn model not loaded)" + protocol = append_string(protocol, "New HyRoDyn model not present! Skipping test!", loglevel="info") + return "skipped (HyRoDyn model not loaded)", protocol if self.old_hyrodyn is None and self.old is None: - log.info("Old HyRoDyn model not present! Skipping test!") - return "skipped (no model to compare)" + protocol = append_string(protocol, "Old HyRoDyn model not present! Skipping test!", loglevel="info") + return "skipped (no model to compare)", protocol elif self.old_hyrodyn is None: - log.info("Old HyRoDyn model not present! Skipping test!") - return "skipped (no hyrodyn model to compare)" + protocol = append_string(protocol, "Old HyRoDyn model not present! Skipping test!", loglevel="info") + return "skipped (no hyrodyn model to compare)", protocol self.old_hyrodyn.calculate_com_properties() self.new_hyrodyn.calculate_com_properties() - log.info("Comparing masses:") - log.info(f" Total mass of old model = {self.old_hyrodyn.mass}") - log.info(f" Total mass of new model = {self.new_hyrodyn.mass}") + protocol = append_string(protocol, "Comparing masses:", loglevel="info") + protocol = append_string(protocol, f" Total mass of old model = {self.old_hyrodyn.mass}", loglevel="info") + protocol = append_string(protocol, f" Total mass of new model = {self.new_hyrodyn.mass}", loglevel="info") value = self.old_hyrodyn.mass - self.new_hyrodyn.mass check = value < self.new.test["tolerances"]["mass"] * self.old_hyrodyn.mass - log.info(" Success? {check} Diff: {value}") - return check + protocol = append_string(protocol, " Success? {check} Diff: {value}", loglevel="info") + return check, protocol def test_hyrodyn_compare_com(self): + protocol = "" if not HYRODYN_AVAILABLE: - log.info('Hyrodyn not present') - return "skipped (no HyRoDyn)" + protocol = append_string(protocol, 'Hyrodyn not present', loglevel="info") + return "skipped (no HyRoDyn)", protocol if self.old_hyrodyn is None: self._load_old_hyrodyn_model() if self.new_hyrodyn is None and self.new_hml_test[2] == 0: self.test_hyrodyn_load_in_hyrodyn() if self.new_hyrodyn is None: - log.info("New HyRoDyn model not present! Skipping test!") - return "skipped (HyRoDyn model not loaded)" + protocol = append_string(protocol, "New HyRoDyn model not present! Skipping test!", loglevel="info") + return "skipped (HyRoDyn model not loaded)", protocol if self.old_hyrodyn is None and self.old is None: - log.info("Old HyRoDyn model not present! Skipping test!") - return "skipped (no model to compare)" + protocol = append_string(protocol, "Old HyRoDyn model not present! Skipping test!", loglevel="info") + return "skipped (no model to compare)", protocol elif self.old_hyrodyn is None: - log.info("Old HyRoDyn model not present! Skipping test!") - return "skipped (no hyrodyn model to compare)" + protocol = append_string(protocol, "Old HyRoDyn model not present! Skipping test!", loglevel="info") + return "skipped (no hyrodyn model to compare)", protocol - log.info("Compare COM position:") - log.info(f" COM of old robot = {self.old_hyrodyn.com}") - log.info(f" COM of new robot = {self.new_hyrodyn.com}") + protocol = append_string(protocol, "Compare COM position:", loglevel="info") + protocol = append_string(protocol, f" COM of old robot = {self.old_hyrodyn.com}", loglevel="info") + protocol = append_string(protocol, f" COM of new robot = {self.new_hyrodyn.com}", loglevel="info") diff = self.old_hyrodyn.com - self.new_hyrodyn.com value = np.linalg.norm(diff) check = value < (self.new.test["tolerances"]["distance"] and not any([np.isnan(x) for x in diff.reshape((diff.size,))])) - log.info(f" Success? {check} Diff: {value}") - return check + protocol = append_string(protocol, f" Success? {check} Diff: {value}", loglevel="info") + return check, protocol def test_hyrodyn_compare_torques(self): + protocol = "" if not HYRODYN_AVAILABLE: - log.info('Hyrodyn not present') - return "skipped (no HyRoDyn)" + protocol = append_string('Hyrodyn not present', loglevel="info") + return "skipped (no HyRoDyn)", protocol if self.old_hyrodyn is None: self._load_old_hyrodyn_model() if self.new_hyrodyn is None and self.new_hml_test[2] == 0: self.test_hyrodyn_load_in_hyrodyn() if self.new_hyrodyn is None: - log.info("New HyRoDyn model not present! Skipping test!") - return "skipped (HyRoDyn model not loaded)" + protocol = append_string("New HyRoDyn model not present! Skipping test!", loglevel="info") + return "skipped (HyRoDyn model not loaded)", protocol if self.old_hyrodyn is None and self.old is None: - log.info("Old HyRoDyn model not present! Skipping test!") - return "skipped (no model to compare)" + protocol = append_string("Old HyRoDyn model not present! Skipping test!", loglevel="info") + return "skipped (no model to compare)", protocol elif self.old_hyrodyn is None: - log.info("Old HyRoDyn model not present! Skipping test!") - return "skipped (no hyrodyn model to compare)" + protocol = append_string("Old HyRoDyn model not present! Skipping test!", loglevel="info") + return "skipped (no hyrodyn model to compare)", protocol - log.info("Compare joint torques:") + protocol = append_string("Compare joint torques:", loglevel="info") self.old_hyrodyn.calculate_inverse_dynamics() self.new_hyrodyn.calculate_inverse_dynamics() torques = {} @@ -408,17 +450,17 @@ def test_hyrodyn_compare_torques(self): torques.update({ self.new_hyrodyn.jointnames_active[i]: {"N": self.new_hyrodyn.Tau_actuated[0][i], "O": None} }) - log.info("Name \tOld \tNew \tDiff") + protocol = append_string("Name \tOld \tNew \tDiff", loglevel="info") for k, v in torques.items(): diff = v["O"] - v["N"] if v["O"] is not None and v["N"] is not None else None name = "".join([k[j] if j < len(k) else " " for j in range(30)]) - log.info("{}\t{}\t{}\t{}\t{}".format( + protocol = append_string("{}\t{}\t{}\t{}\t{}".format( name, - "{:f}".format(v["O"]) if v["O"] is not None else " --- ", - "{:f}".format(v["N"]) if v["N"] is not None else " --- ", - "{:f}".format(diff) if v["O"] is not None and v["N"] is not None else " --- ", + "{:f}".format(v["O"]) if v["O"] is not None else " ---- ", + "{:f}".format(v["N"]) if v["N"] is not None else " ---- ", + "{:f}".format(diff) if v["O"] is not None and v["N"] is not None else " ---- ", "!!!" if diff is None or np.abs(diff) > self.new.test["tolerances"]["default"] or np.isnan(diff) else "" - )) + ), loglevel="info") if self.old_hyrodyn.Tau_actuated.shape == self.new_hyrodyn.Tau_actuated.shape: diff = np.abs(self.old_hyrodyn.Tau_actuated - self.new_hyrodyn.Tau_actuated) value = np.amax(diff) @@ -427,63 +469,65 @@ def test_hyrodyn_compare_torques(self): else: value = None check = "skipped (not the same joints)" - log.info(f" Success? {check} Diff: {value}") - return check + protocol = append_string(f" Success? {check} Diff: {value}", loglevel="info") + return check, protocol except Exception as e: - log.error(f" Failed due to error: {e}") - return False + protocol = append_string(f" Failed due to error: {e}", loglevel="error") + return False, protocol def test_hyrodyn_compare_link_positions(self, end_effectors): + protocol = "" if not HYRODYN_AVAILABLE: - log.info('Hyrodyn not present') - return "skipped (no HyRoDyn)" + protocol = append_string(protocol, 'Hyrodyn not present', loglevel="info") + return "skipped (no HyRoDyn)", protocol if self.old_hyrodyn is None: self._load_old_hyrodyn_model() if self.new_hyrodyn is None and self.new_hml_test[2] == 0: self.test_hyrodyn_load_in_hyrodyn() if self.new_hyrodyn is None: - log.info("New HyRoDyn model not present! Skipping test!") - return "skipped (HyRoDyn model not loaded)" + protocol = append_string(protocol, "New HyRoDyn model not present! Skipping test!", loglevel="info") + return "skipped (HyRoDyn model not loaded)", protocol if self.old_hyrodyn is None and self.old is None: - log.info("Old HyRoDyn model not present! Skipping test!") - return "skipped (no model to compare)" + protocol = append_string(protocol, "Old HyRoDyn model not present! Skipping test!", loglevel="info") + return "skipped (no model to compare)", protocol elif self.old_hyrodyn is None: - log.info("Old HyRoDyn model not present! Skipping test!") - return "skipped (no hyrodyn model to compare)" + protocol = append_string(protocol, "Old HyRoDyn model not present! Skipping test!", loglevel="info") + return "skipped (no hyrodyn model to compare)", protocol succ = True - log.info("Compare EE positions:") + protocol = append_string(protocol, "Compare EE positions:", loglevel="info") for ee in end_effectors: if ee not in [link.name for link in self.old.links]: - log.error(f"{ee} not found in compare model. Skipping comparison") + protocol = append_string(protocol, f"{ee} not found in compare model. Skipping comparison", loglevel="error") continue self.old_hyrodyn.calculate_forward_kinematics(ee) old_pose = deepcopy(self.old_hyrodyn.pose) self.new_hyrodyn.calculate_forward_kinematics(ee) new_pose = deepcopy(self.new_hyrodyn.pose) - log.info(f"ee pose of old model {old_pose}") - log.info(f"ee pose of new model {new_pose}") + protocol = append_string(protocol, f"ee pose of old model {old_pose}", loglevel="info") + protocol = append_string(protocol, f"ee pose of new model {new_pose}", loglevel="info") value = np.linalg.norm(old_pose[0, 0:3] - new_pose[0, 0:3]) check = value < self.new.test["tolerances"]["distance"] - log.info(f" Success? {check} Diff: {value}") + protocol = append_string(protocol, f" Success? {check} Diff: {value}", loglevel="info") succ &= check - return succ + return succ, protocol def test_hyrodyn_symmetry_check(self, left_right_end_effectors): + protocol = "" if not HYRODYN_AVAILABLE: - log.info('Hyrodyn not present') - return True + protocol = append_string('Hyrodyn not present', loglevel="info") + return True, protocol if self.new_hyrodyn is None and self.new_hml_test[2] == 0: self.test_hyrodyn_load_in_hyrodyn() if self.new_hyrodyn is None: - log.info("New HyRoDyn model not present! Skipping test!") - return "skipped (HyRoDyn model not loaded)" + protocol = append_string("New HyRoDyn model not present! Skipping test!", loglevel="info") + return "skipped (HyRoDyn model not loaded)", protocol for ee in left_right_end_effectors: if self.new.robot.get_link_id(ee) is None: - log.info(f"Existing links: {[link.name for link in self.new.robot.links]}") + protocol = append_string(f"Existing links: {[link.name for link in self.new.robot.links]}", loglevel="info") raise AssertionError(ee + " Link does not exist in the newly exported model!") self.new_hyrodyn.calculate_forward_kinematics(left_right_end_effectors[0]) @@ -491,8 +535,8 @@ def test_hyrodyn_symmetry_check(self, left_right_end_effectors): self.new_hyrodyn.calculate_forward_kinematics(left_right_end_effectors[1]) right_pose = deepcopy(self.new_hyrodyn.pose) - log.info("Symmetry Check:") - log.info(f" Right EE of new model {right_pose[0, 0:3]}") + protocol = append_string("Symmetry Check:", loglevel="info") + protocol = append_string(f" Right EE of new model {right_pose[0, 0:3]}", loglevel="info") mirrored_left_pose = np.array([left_pose[0, 0], -left_pose[0, 1], left_pose[0, 2], @@ -500,26 +544,27 @@ def test_hyrodyn_symmetry_check(self, left_right_end_effectors): -left_pose[0, 4], left_pose[0, 5], left_pose[0, 6]]).reshape(1, 7) - log.info(f" (-y) Left EE of new model {mirrored_left_pose[0, 0:3]}") + protocol = append_string(f" (-y) Left EE of new model {mirrored_left_pose[0, 0:3]}", loglevel="info") value = np.linalg.norm(right_pose[0, 0:3] - mirrored_left_pose[0, 0:3]) check = value < self.new.test["tolerances"]["distance"] - log.info(f" Success? {check} Diff: {value}") - log.info("!!! Check ignores orientation !!!") - return check + protocol = append_string(f" Success? {check} Diff: {value}", loglevel="info") + protocol = append_string("!!! Check ignores orientation !!!", loglevel="info") + return check, protocol def test_hyrodyn_symmetry_check_torques(self, left_right_end_effectors): + protocol = "" if not HYRODYN_AVAILABLE: - log.info('Hyrodyn not present') - return True + protocol = append_string('Hyrodyn not present', loglevel="info") + return True, protocol if self.new_hyrodyn is None and self.new_hml_test[2] == 0: self.test_hyrodyn_load_in_hyrodyn() if self.new_hyrodyn is None: - log.info("New HyRoDyn model not present! Skipping test!") - return "skipped (HyRoDyn model not loaded)" + protocol = append_string("New HyRoDyn model not present! Skipping test!", loglevel="info") + return "skipped (HyRoDyn model not loaded)", protocol for ee in left_right_end_effectors: if self.new.robot.get_link_id(ee) is None: - log.error(f"Existing links: {[link.name for link in self.new.robot.links]}") + protocol = append_string(f"Existing links: {[link.name for link in self.new.robot.links]}", loglevel="error") raise AssertionError(ee + " Link does not exist in the newly exported model!") self.new_hyrodyn.calculate_inverse_dynamics() @@ -532,24 +577,24 @@ def test_hyrodyn_symmetry_check_torques(self, left_right_end_effectors): right = self.new.robot.get_chain(self.new.robot.get_root(), left_right_end_effectors[1], links=False, joints=True, fixed=False) assert len(left) == len(right) - log.info("Symmetry Check Torques:") - log.info(f"Calculated inverse dynamics for pose: {self.new_hyrodyn.y}") + protocol = append_string("Symmetry Check Torques:", loglevel="info") + protocol = append_string(f"Calculated inverse dynamics for pose: {self.new_hyrodyn.y}", loglevel="info") success = True max_len = np.array([len(left[i] + "/" + right[i]) for i in range(len(left))]).max(initial=0) for i in range(len(left)): left_torque = torques[left[i]] right_torque = torques[right[i]] diff = left_torque - right_torque - log.info("{}\t{}\t{}\t{}\t{}".format( + protocol = append_string("{}\t{}\t{}\t{}\t{}".format( left[i] + "/" + right[i] + " " * (max_len - (len(left[i] + "/" + right[i]))), "{:f}".format(left_torque), "{:f}".format(right_torque), "{:f}".format(diff), "!!!" if diff is None or np.abs(diff) > self.new.test["tolerances"]["default"] or np.isnan(diff) else "" - )) + ), loglevel="info") success &= not (np.abs(diff) > self.new.test["tolerances"]["default"] or np.isnan(diff)) - log.info(f" Success? {success}") - return success + protocol = append_string(f" Success? {success}", loglevel="info") + return success, protocol def move_hyrodyn_model(self, new_joint_angles): if not HYRODYN_AVAILABLE: diff --git a/phobos/ci/pipeline.py b/phobos/ci/pipeline.py index 81823f72..bf0674d1 100644 --- a/phobos/ci/pipeline.py +++ b/phobos/ci/pipeline.py @@ -25,7 +25,7 @@ NA_DEPL = int('10000000', 2) -class Pipeline(yaml.YAMLObject): +class Pipeline(yaml.YAMLObject): def __init__(self, configfile, model_file=None, processed_model_exists=False, subclass=False): self.processing_failed = {} self.test_results = {} @@ -48,6 +48,8 @@ def __init__(self, configfile, model_file=None, processed_model_exists=False, su self.temp_dir = os.path.join(self.root, "temp") self.faillog = os.path.join(self.temp_dir, "failures.txt") self.test_protocol = os.path.join(self.temp_dir, "test_protocol.txt") + if not hasattr(self, "central_meshes"): + self.central_meshes = True if not subclass: assert hasattr(self, "model_definitions") and len(self.model_definitions) > 0 @@ -63,10 +65,10 @@ def __init__(self, configfile, model_file=None, processed_model_exists=False, su if os.path.isfile(self.faillog) and processed_model_exists: with open(self.faillog, "r") as f: self.processing_failed = load_json(f.read()) - + log.debug(f"Models to process: {self.model_definitions}") log.debug(f"Finished reading config {configfile}") - + self.models = [] input_meshes = [] order = 0 @@ -82,40 +84,38 @@ def __init__(self, configfile, model_file=None, processed_model_exists=False, su raise AssertionError(f"File {_file} does not exist!") cfg = load_json(open(_file, "r").read()) cfg_ = cfg[list(cfg.keys())[0]] - if "input_models" not in cfg_.keys(): - raise AssertionError(f"{_file} lacks input_models key!") except Exception as e: self.processing_failed[md[:-4]]["load"] = ''.join(traceback.format_exception(None, e, e.__traceback__)) log.error("Could not load model config " + md) traceback.print_exc() continue - fstate = self._get_model_failure_state(md[:-4]) - self.processing_failed[md[:-4]]["load"] = "" - if bool(fstate & (F_PROC | NA_PROC)) and processed_model_exists: - log.warning("Skipping model " + md + " as it was not successfully processed") - self.processing_failed[md[:-4]]["load"] += \ - "Skipping model " + md + " as it was not successfully processed!\n" - continue - elif bool(fstate & (F_PROC | NA_PROC)): - # reset processing error - self.processing_failed[md[:-4]]["process"] = "N/A" - if any(["derived_base" in v.keys() and self._get_model_failure_state( - v["derived_base"][:-4]) & (F_LOAD | F_PROC) for _, v in cfg_["input_models"].items()]): - log.warning("Skipping model " + md + " as at least one parent model was not successfully processed!") - self.processing_failed[md[:-4]]["load"] += \ - "Skipping model " + md + " as at least one parent model was not successfully processed!\n" - continue try: if list(cfg.keys())[0] == "model": - self.models += [BaseModel(os.path.join(self.configdir, md), self, self.processed_model_exists)] + self.models += [BaseModel(os.path.join(self.configdir, md), self, self.processed_model_exists, configkey=md[:-4])] else: self.processing_failed[md[:-4]]["load"] = f"Skipping {md} as it is no valid model definition!\n" log.error(self.processing_failed[md[:-4]]["load"]) continue + fstate = self._get_model_failure_state(md[:-4]) + self.processing_failed[md[:-4]]["load"] = "" + if bool(fstate & (F_PROC | NA_PROC)) and processed_model_exists: + log.warning("Skipping model " + md + " as it was not successfully processed") + self.processing_failed[md[:-4]]["load"] += \ + "Skipping model " + md + " as it was not successfully processed!\n" + continue + elif bool(fstate & (F_PROC | NA_PROC)): + # reset processing error + self.processing_failed[md[:-4]]["process"] = "N/A" + if any(["derived_base" in v.keys() and self._get_model_failure_state( + v["derived_base"][:-4]) & (F_LOAD | F_PROC) for _, v in self.models[-1].input_models.items()]): + log.warning("Skipping model " + md + " as at least one parent model was not successfully processed!") + self.processing_failed[md[:-4]]["load"] += \ + "Skipping model " + md + " as at least one parent model was not successfully processed!\n" + continue for m in self.models[-1].get_input_meshes(): for m2 in input_meshes: if os.path.basename(m) == os.path.basename(m2) and os.path.dirname(m) != os.path.dirname(m): - raise Exception( + raise RuntimeError( "You can not import two different meshes with the same name!\n" "In model {} you tried to import:\n {} but\n {} already exists!".format(md, m, m2) ) @@ -123,7 +123,7 @@ def __init__(self, configfile, model_file=None, processed_model_exists=False, su self.processing_failed[md[:-4]]["load"] = "Good" except Exception as e: self.processing_failed[md[:-4]]["load"] += "Loading " + md + " failed with the following error:" +\ - ''.join(traceback.format_exception(None, e, e.__traceback__)) + ''.join(traceback.format_exception(None, e, e.__traceback__)) log.error(self.processing_failed[md[:-4]]["load"]) continue if os.path.exists(os.path.join(self.temp_dir)): @@ -193,7 +193,7 @@ def get_coverage(self, phases=None, allow_na=False): def print_fail_log(self, file=sys.stdout): log = load_json(open(self.faillog, "r").read()) - print("\nPipeline Report:\n---------------------", file=file) + print("\nPipeline Report:\n--------------------", file=file) report = [(k, v) for k, v in log.items()] order_val = ["order", "load", "process", "test", "deploy"] if len(report) == 0: @@ -212,6 +212,11 @@ def print_fail_log(self, file=sys.stdout): for link in v[1].split("\n"): print(" " + link, file=file) + def get_model(self, modelname_or_configkey): + for model in self.models: + if model.modelname == modelname_or_configkey or model.configkey == modelname_or_configkey: + return model + def process_models(self): # delete the temp_dir if there is already one misc.recreate_dir(self, self.temp_dir) @@ -219,44 +224,48 @@ def process_models(self): f.write(dump_json(self.processing_failed, default_flow_style=False)) # create the central mesh folders - export_meshes = {} - for model in self.models: - export_meshes = misc.merge_default(model.export_meshes, export_meshes) - for mt, mp in export_meshes.items(): - misc.create_dir(self, os.path.join(self.temp_dir, str(mp))) - ext = mt - misc.copy(self, os.path.join(self.root, str(mp), "*."+ext), os.path.join(self.temp_dir, str(mp))) + if self.central_meshes: + export_meshes = {} + for model in self.models: + export_meshes = misc.merge_default(model.export_meshes, export_meshes) + for mt, mp in export_meshes.items(): + misc.create_dir(self, os.path.join(self.temp_dir, str(mp))) + ext = mt + misc.copy(self, os.path.join(self.root, str(mp), "*."+ext), os.path.join(self.temp_dir, str(mp))) + processed_meshes = set() for model in self.models: log.info(f"\nProcessing {model.modelname} model...") - if self._get_model_failure_state(model.modelname) & (F_LOAD | NA_LOAD): - self.processing_failed[model.modelname]["process"] = \ + if self._get_model_failure_state(model.configkey) & (F_LOAD | NA_LOAD): + self.processing_failed[model.configkey]["process"] = \ "Skipping ", model.modelname, " as it model definition file wasn't loaded successfully!" - log.info(self.processing_failed[model.modelname]["process"]) + log.info(self.processing_failed[model.configkey]["process"]) continue try: model.process() model.export() processed_meshes = processed_meshes.union(model.processed_meshes) - self.processing_failed[model.modelname]["process"] = "Good" + self.processing_failed[model.configkey]["process"] = "Good" except Exception as e: log.error(f"\nFailed processing {model.modelname} model with the following error and skipped to next:\n {e}") - self.processing_failed[model.modelname]["process"] = ''.join( + self.processing_failed[model.configkey]["process"] = ''.join( traceback.format_exception(None, e, e.__traceback__)) traceback.print_exc() - # Remove all mesh files we have initially copied but that haven't been processed - existing_meshes = [] - for mt, mp in export_meshes.items(): - # misc.create_dir(self, os.path.join(self.temp_dir, str(mp))) - existing_meshes += misc.list_files(os.path.join(self.temp_dir, str(mp)), - ignore=["\.gv", "\.pdf", "\.git*", "\_history.log", "README\.md", "manifest\.xml"], - resolve_symlinks=True, abs_path=True) - existing_meshes = set(existing_meshes) - processed_meshes = set(processed_meshes) - for unused_mesh in existing_meshes - processed_meshes: - log.debug("Removing unused mesh: "+unused_mesh) - assert os.path.isfile(unused_mesh) - os.remove(unused_mesh) + if self.central_meshes: + # Remove all mesh files we have initially copied but that haven't been processed + existing_meshes = [] + for mt, mp in export_meshes.items(): + # misc.create_dir(self, os.path.join(self.temp_dir, str(mp))) + existing_meshes += misc.list_files(os.path.join(self.temp_dir, str(mp)), + ignore=["\.gv", "\.pdf", "\.git*", "*\_history.log", "README\.md", "manifest\.xml"], + resolve_symlinks=True, abs_path=True) + existing_meshes = set(existing_meshes) + processed_meshes = set(processed_meshes) + for unused_mesh in existing_meshes - processed_meshes: + log.debug("Removing unused mesh: "+unused_mesh) + assert os.path.isfile(unused_mesh) + os.remove(unused_mesh) + with open(self.faillog, "w") as f: f.write(dump_json(self.processing_failed, default_flow_style=False)) @@ -266,7 +275,8 @@ def test_models(self): self.processing_failed = load_json(f.read()) failures_ignored_for = [] for model in self.models: - fstate = self._get_model_failure_state(model.modelname) + # [ToDo] make this more beautiful by moving the model test to base_model + fstate = self._get_model_failure_state(model.configkey) if bool(fstate & (F_LOAD | F_PROC | NA_LOAD | NA_PROC)): log.warning(f"\nSkipping {model.modelname} model as it wasn't successfully created." f"(Code: " + bin(fstate) + ")") @@ -286,25 +296,28 @@ def test_models(self): pipeline=self, repo=model.test["compare_model"]["git"], target=compare_model_path, - branch=model.test["compare_model"]["branch"], + branch=model.test["compare_model"].get("branch", git.GIT_PRODUCTION_BRANCH), recursive=True, - ignore_failure=False + ignore_failure=False, + recreate=True ) if "ignore_failing_tests_for" not in model.test["compare_model"].keys(): model.test["compare_model"]["ignore_failing_tests_for"] = "None" - if os.path.exists(os.path.join(compare_model_path, model.test["compare_model"]["model_in_repo"])): + + model_in_repo = os.path.join(compare_model_path, model.test['compare_model']['model_in_repo']) if "model_in_repo" in model.test["compare_model"] else compare_model_path + if os.path.exists(model_in_repo): commit_hash, _ = misc.execute_shell_command("git rev-parse HEAD", compare_model_path) if commit_hash.startswith(str(model.test["compare_model"]["ignore_failing_tests_for"])): failures_ignored_for += [model.modelname] log.debug( f"Loading compare model: " - f"{self.relpath(os.path.join(compare_model_path, model.test['compare_model']['model_in_repo']))}" + f"{self.relpath(model_in_repo)}" ) try: compare_model = CompareModel( name=model.robotname, directory=compare_model_path, - robotfile=os.path.join(compare_model_path, model.test["compare_model"]["model_in_repo"]), + robotfile=model_in_repo, submechanisms_file=os.path.join( compare_model_path, model.test["compare_model"]["submechanisms_in_repo"] @@ -319,26 +332,39 @@ def test_models(self): log.warning("Compare model not found!") model_test = ModelTest(model, compare_model) - log.info("\nRunning info procedures:") + model.extended_test_protocol = "" + model.extended_test_protocol = misc.append_string(model.extended_test_protocol, "\nRunning info procedures:", loglevel="info") for p in dir(model_test): if p.startswith("info_"): - getattr(model_test, p)() - log.info("\nRunning test procedures:") + _protocol = getattr(model_test, p)() + if _protocol is not None: + model.extended_test_protocol = misc.append_string(model.extended_test_protocol, f"-> {p}", loglevel="info") + model.extended_test_protocol += _protocol + model.extended_test_protocol = misc.append_string(model.extended_test_protocol, "\nRunning test procedures:", loglevel="info") + compare_model_info = model.test["compare_model"] + if compare_model: + compare_model_info.update({"loaded_smurf_file": compare_model.smurffile, "loaded_xml_file": compare_model.xmlfile}) self.test_results[model.modelname] = { "ignore_failure": commit_hash.startswith( str(model.test["compare_model"]["ignore_failing_tests_for"])) if compare_model is not None else False, - "compare_model_present": model_test.old is not None, - "compare_model": model.test["compare_model"] + "compare_model_present": compare_model is not None, + "compare_model": compare_model_info } - def add_test_result(model_name, test_name, value): + def add_test_result(model_name, test_name, test_result): + if type(test_result) == tuple: + value = test_result[0] + protocol = test_result[1] + else: + value = test_result + protocol = "" i = 2 if test_name in self.test_results[model.modelname].keys(): while test_name + " " + str(i) in self.test_results[model.modelname].keys(): i += 1 test_name += " " + str(i) self.test_results[model_name][test_name] = value - return value + return value, protocol # these tests will be run always obligatory_tests = ["topological_self_consistency", "file_consistency"] @@ -347,45 +373,56 @@ def add_test_result(model_name, test_name, value): model.test["tests"] += [otest] # let's go testing for test in model.test["tests"]: + model.extended_test_protocol += "\n\n" if type(test) is str: - print(" -> ", test) - if not add_test_result(model.modelname, test, getattr(model_test, "test_" + test)()): - print("Test", test, "failed for", model.modelname) + model.extended_test_protocol = misc.append_string(model.extended_test_protocol, f"-> {test}", loglevel="info") + _result, _protocol = add_test_result(model.modelname, test, getattr(model_test, "test_" + test)()) + model.extended_test_protocol += _protocol + if not _result: + model.extended_test_protocol = misc.append_string(model.extended_test_protocol, f"Test {test} failed for {model.modelname}", loglevel="error") elif type(test) is dict and list(test.keys())[0] == "hyrodynChecks": if not HYRODYN_AVAILABLE: - print("Hyrodyn checks not possible, as Hyrodyn couldn't be loaded") + model.extended_test_protocol = misc.append_string(model.extended_test_protocol, "Hyrodyn checks not possible, as Hyrodyn couldn't be loaded", loglevel="warning") for htest in test["hyrodynChecks"]: if type(htest) is str: - print(" -> ", htest) - if not add_test_result(model.modelname, htest, - getattr(model_test, "test_hyrodyn_" + htest)()): - print("Hyrodyn-Test", htest, "failed for", model.modelname) + log.info(f"-> {htest}") + _result, _protocol = add_test_result(model.modelname, htest, getattr(model_test, "test_hyrodyn_" + htest)()) + model.extended_test_protocol += _protocol + if not _result: + model.extended_test_protocol = misc.append_string(model.extended_test_protocol, f"Hyrodyn-Test {htest} failed for {model.modelname}", loglevel="error") elif type(htest) is dict and "move_hyrodyn_model" in htest.keys(): k, v = list(htest.items())[0] getattr(model_test, k)(v) - print(" -> ", k) + model.extended_test_protocol = misc.append_string(model.extended_test_protocol, f"-> {k}", loglevel="info") elif type(htest) is dict: k, v = list(htest.items())[0] - print(" -> ", k) - if not add_test_result(model.modelname, k, getattr(model_test, "test_hyrodyn_" + k)(v)): - print("Hyrodyn-Test", k, "failed for", model.modelname) + model.extended_test_protocol = misc.append_string(model.extended_test_protocol, f"-> {k}", loglevel="info") + _result, _protocol = add_test_result(model.modelname, k, getattr(model_test, "test_hyrodyn_" + k)(v)) + model.extended_test_protocol += _protocol + if not _result: + model.extended_test_protocol = misc.append_string(model.extended_test_protocol, f"Hyrodyn-Test {k} failed for {model.modelname}", loglevel="error") else: - log.error(f"Couldn't process test definition {htest}") + model.extended_test_protocol = misc.append_string(model.extended_test_protocol, f"Couldn't process test definition {htest}", loglevel="error") elif type(test) is dict: k, v = list(test.items())[0] - log.info(f" -> {test}") - if not add_test_result(model.modelname, k, getattr(model_test, "test_" + k)(v)): - log.error(f"Hyrodyn-Test {test} failed for {model.modelname}") - self.processing_failed[model.modelname]["test"] = "Good" + model.extended_test_protocol = misc.append_string(model.extended_test_protocol, f"-> {test}", loglevel="info") + _result, _protocol = add_test_result(model.modelname, k, getattr(model_test, "test_" + k)(v)) + model.extended_test_protocol += _protocol + if not _result: + model.extended_test_protocol = misc.append_string(model.extended_test_protocol, f"Hyrodyn-Test {test} failed for {model.modelname}", loglevel="error") + self.processing_failed[model.configkey]["test"] = "Good" except Exception as e: log.error(f"\nFailed testing {model.modelname} model with the following error and skipped to next:\n {e}") - self.processing_failed[model.modelname]["test"] = ''.join( + self.processing_failed[model.configkey]["test"] = ''.join( traceback.format_exception(None, e, e.__traceback__)) + "\n" traceback.print_exc() + with open(model.extended_test_protocol_path, "w") as f: + f.write(model.extended_test_protocol) test_protocol = {"all": ""} test_protocol["all"] = misc.append_string(test_protocol["all"], "----------\nTest protocol:", print=True) success = True for modelname, test in self.test_results.items(): + model = self.get_model(modelname) test_protocol[modelname] = "" test_protocol[modelname] = misc.append_string(test_protocol[modelname], " " + modelname, end="", print=True) @@ -403,21 +440,20 @@ def add_test_result(model_name, test_name, value): continue else: sign = "+" - if type(result) is str: + if type(result) is str and (not test["compare_model_present"] or "no model to compare" not in result): sign = "o" elif not result: sign = "-" test_protocol[modelname] = misc.append_string(test_protocol[modelname], " ", sign, testname + ":", - str(result) if type(result) is bool else result, + str(result), print=True) - if not result and not test["ignore_failure"]: + if sign == "-" and not test["ignore_failure"]: success = False - if self.processing_failed[modelname]["test"].upper().startswith("GOOD") or \ - self.processing_failed[modelname]["test"].upper().startswith("N/A"): - self.processing_failed[modelname]["test"] = "" - self.processing_failed[modelname][ - "test"] += "Test " + testname + " failed (and was not ignored)!\n" + if self.processing_failed[model.configkey]["test"].upper().startswith("GOOD") or \ + self.processing_failed[model.configkey]["test"].upper().startswith("N/A"): + self.processing_failed[model.configkey]["test"] = "" + self.processing_failed[model.configkey]["test"] += "Test " + testname + " failed (and was not ignored)!\n" # print(" !!! Causes Failure !!!") # else: # print(flush=True) @@ -434,57 +470,70 @@ def deploy_models(self): self.processing_failed = load_json(f.read()) mesh_repos = {} # copying the files to the repos - uses_lfs = False + uses_lfs = getattr(self, "uses_lfs", False) if "BADGE_DIRECTORY" in os.environ.keys(): # reset badges for model in self.models: git.create_pipeline_badge(self, model.modelname, "not processed", "inactive", target="${BADGE_DIRECTORY}") + if self.central_meshes: + for name, path in self.meshes.items(): + git.create_pipeline_badge(self, os.path.basename(path), "not processed", "inactive", + target="${BADGE_DIRECTORY}", filename=name + ".svg") + if self.central_meshes: for name, path in self.meshes.items(): - git.create_pipeline_badge(self, os.path.basename(path), "not processed", "inactive", - target="${BADGE_DIRECTORY}", filename=name + ".svg") - for name, path in self.meshes.items(): - log.info(f"\nDeploying meshes... {path}") - repo = os.path.join(self.root, path) - git.update(repo, update_target_branch="$CI_MESH_UPDATE_TARGET_BRANCH") - if os.path.basename(path).lower().startswith("lfs"): - uses_lfs = True - git.install_lfs(repo, - track=["*.obj", "*.stl", "*.dae", "*.bobj", "*.iv", "*.mtl", "*.OBJ", "*.STL", "*.DAE", - "*.BOBJ", "*.IV", "*.MTL"]) - git.clear_repo(repo) - misc.copy(self, os.path.join(self.temp_dir, path) + "/*", os.path.join(self.root, path)) - commit_hash = git.commit(repo, origin_repo=os.path.abspath(self.configdir)) - if "BADGE_DIRECTORY" in os.environ.keys(): - git.create_pipeline_badge(self, os.path.basename(path).lower(), commit_hash[:6], 'informational', - target="${BADGE_DIRECTORY}", - filename=name + ".svg") - mesh_repos.update({ - name: { - "repo": repo, - "commit": commit_hash - } - }) - git.add_remote(repo, self.remote_base + "/" + path) - git.push(repo, branch="$CI_MESH_UPDATE_TARGET_BRANCH") + log.info(f"\nDeploying meshes... {path}") + repo = os.path.join(self.root, path) + git.update(repo, update_target_branch="$CI_MESH_UPDATE_TARGET_BRANCH") + if os.path.basename(path).lower().startswith("lfs") or uses_lfs: + track = ["*.obj", "*.stl", "*.dae", "*.bobj", "*.iv", "*.mtl", "*.OBJ", "*.STL", "*.DAE", "*.BOBJ", "*.IV", "*.MTL"] + for file in getattr(self, "add_lfs_track", []): + if not file.startswith("*"): + if not file.startswith("."): + file = "."+file + file = "*"+file + track.append(file) + track.append(file.upper()) + track.append(file.lower()) + track = list(set(track)) + git.install_lfs(repo, track=track) + git.clear_repo(repo) + misc.copy(self, os.path.join(self.temp_dir, path) + "/*", os.path.join(self.root, path)) + commit_hash = git.commit(repo, origin_repo=self.configdir) + if "BADGE_DIRECTORY" in os.environ.keys(): + git.create_pipeline_badge(self, os.path.basename(path).lower(), commit_hash[:6], 'informational', + target="${BADGE_DIRECTORY}", + filename=name + ".svg") + mesh_repos.update({ + name: { + "repo": repo, + "commit": commit_hash + } + }) + git.add_remote(repo, self.remote_base + "/" + path) + git.push(repo, branch="$CI_MESH_UPDATE_TARGET_BRANCH") + for model in self.models: - fstate = self._get_model_failure_state(model.modelname) + fstate = self._get_model_failure_state(model.configkey) if bool(fstate & (F_LOAD | F_PROC | NA_LOAD | NA_PROC)): log.warning(f"\nSkipping {model.modelname} model as it wasn't successfully created." f"(Code: " + bin(fstate) + ")") continue log.info(f"\nDeploying {model.modelname} model...") try: - dpl_msg = model.deploy(mesh_repos, uses_lfs=uses_lfs, - failed_model=bool(fstate & F_TEST) or bool(fstate & NA_TEST)) - self.processing_failed[model.modelname]["deploy"] = "Good" + " (" + dpl_msg + ")" + if self.central_meshes: + dpl_msg = model.deploy(mesh_repos, uses_lfs=uses_lfs, + failed_model=bool(fstate & F_TEST) or bool(fstate & NA_TEST)) + else: + dpl_msg = model.deploy(None, failed_model=bool(fstate & F_TEST) or bool(fstate & NA_TEST), uses_lfs=uses_lfs) + self.processing_failed[model.configkey]["deploy"] = "Good" + " (" + dpl_msg + ")" except Exception as e: - log.error("\nFailed deploying {model.modelname} model with the following error and skipped to next:\n {e}") - self.processing_failed[model.modelname]["deploy"] = ''.join( + log.error(f"\nFailed deploying {model.modelname} model with the following error and skipped to next:\n {e}") + self.processing_failed[model.configkey]["deploy"] = ''.join( traceback.format_exception(None, e, e.__traceback__)) traceback.print_exc() if "BADGE_DIRECTORY" in os.environ.keys(): - state = self._get_model_failure_state(model.modelname) + state = self._get_model_failure_state(model.configkey) if state & F_LOAD: color = "inactive" text = "not processed" @@ -493,10 +542,10 @@ def deploy_models(self): text = "failed" elif state & F_TEST and not state & (F_DEPL | NA_DEPL): color = "yellow" - text = "${CI_UPDATE_TARGET_BRANCH}" + text = git.GIT_FAILURE_BRANCH else: color = "green" - text = "master" + text = git.GIT_SUCCESS_BRANCH git.create_pipeline_badge(self, model.modelname, text, color, target="${BADGE_DIRECTORY}") with open(self.faillog, "w") as f: f.write(dump_json(self.processing_failed, default_flow_style=False)) @@ -533,7 +582,7 @@ def __init__(self, root, configfile, place_temp_in=None): print("Finished reading config", configfile) - self.processing_failed = {self.models[0].modelname: { + self.processing_failed = {self.models[0].configkey: { "load": "GOOD" if not load_failed else "ERROR:\n"+str(load_failed), "test": "N/A", "order": 0} } @@ -578,7 +627,7 @@ def get_coverage(self, phases=None): # shallow=2 # ) # if self.git_rev == git.revision(compare_model_path): - # git.checkout(git.get_previous_commit_hash(compare_model_path), compare_model_path) + # git.checkout(compare_model_path, commit_id=git.get_previous_commit_hash(compare_model_path)) # log.info(f"Comparing with compare model at commit {git.revision(compare_model_path)}") # if "ignore_failing_tests_for" not in self.compare_model.keys(): # self.compare_model["ignore_failing_tests_for"] = "None" @@ -717,18 +766,19 @@ def get_coverage(self, phases=None): # return success def _get_model_failure_state(self, modelname): + model = self.get_model(modelname) state = 0 if modelname not in self.processing_failed: return state - if not self.processing_failed[modelname]["load"].upper().startswith("GOOD") and not \ - self.processing_failed[modelname]["load"].upper().startswith("N/A"): + if not self.processing_failed[model.configkey]["load"].upper().startswith("GOOD") and not \ + self.processing_failed[model.configkey]["load"].upper().startswith("N/A"): state += F_LOAD - if not self.processing_failed[modelname]["test"].upper().startswith("GOOD") and not \ - self.processing_failed[modelname]["test"].upper().startswith("N/A"): + if not self.processing_failed[model.configkey]["test"].upper().startswith("GOOD") and not \ + self.processing_failed[model.configkey]["test"].upper().startswith("N/A"): state += F_TEST - if self.processing_failed[modelname]["load"].upper().startswith("N/A"): + if self.processing_failed[model.configkey]["load"].upper().startswith("N/A"): state += NA_LOAD - if self.processing_failed[modelname]["test"].upper().startswith("N/A"): + if self.processing_failed[model.configkey]["test"].upper().startswith("N/A"): state += NA_TEST return state diff --git a/phobos/core/multiple.py b/phobos/core/multiple.py index c19a69aa..5233bc09 100644 --- a/phobos/core/multiple.py +++ b/phobos/core/multiple.py @@ -7,6 +7,9 @@ from ..io.smurf_reflection import SmurfBase from ..io.xml_factory import plural as _plural, singular as _singular +from ..commandline_logging import get_logger +log = get_logger(__name__) + __IMPORTS__ = [x for x in dir() if not x.startswith("__")] @@ -37,7 +40,7 @@ def __init__(self, name=None, world=None, model=None, file=None, origin=None, fr for frame in _plural(frames): self._frames.append(frame) self.anchor = anchor - self.parent = parent.upper() + self.parent = parent self.child = child SmurfBase.__init__(self, name=name, returns=["name", "type", "parent", "position", "rotation", "anchor", "root", "file", "child"], **kwargs) self.excludes += ["origin", "model"] @@ -105,7 +108,7 @@ def position(self): def position(self, val): if self.origin is None: self.origin = representation.Pose() - self.origin.xyz = [val["x"], val["y"], val["z"]] + self.origin.position = val @property def rotation(self): @@ -199,6 +202,7 @@ def add_robot(self, name, robot, origin=None, anchor=None): def add_entity(self, entity): assert isinstance(entity, Entity) assert self.get_aggregate("entities", str(entity)) is None, f"There is already an entity with the name {str(entity)}" + log.debug("Adding entity "+ entity.name) self.entities.append(entity) def get_aggregate(self, typeName, elem): @@ -296,13 +300,13 @@ def assemble(self, root_entity=None): continue if entity._anchor == "PARENT": assert "::" in entity.parent, "Please specify the parent in the way entity::link. Received: "+entity.parent - parent_entity, parent_link = entity.parent.split("::", 1) + parent_entity, parent_link_name = entity.parent.split("::", 1) else: assert "::" in entity._anchor, "Please specify the anchor in the way entity::link or use the parent keyword. Received: "+entity._anchor - parent_entity, parent_link = entity._anchor.split("::", 1) + parent_entity, parent_link_name = entity._anchor.split("::", 1) if parent_entity in [str(e) for e in entities_in_tree]: - parent_link = assembly.get_link(parent_entity+"_"+parent_link) - assert parent_link is not None + parent_link = assembly.get_link(parent_entity+"_"+parent_link_name, verbose=True) + assert parent_link is not None, f"parent link {parent_entity}_{parent_link_name} not found "+str(entity) if isinstance(root_entity.model, Robot): attach_model = entity.model.duplicate() assembly.unlink_from_world() @@ -317,13 +321,13 @@ def assemble(self, root_entity=None): # make sure that we have a consistent downward tree attach_model.exchange_root(child_link) attach_model.unlink_from_world() - attach_model.rename_all(prefix=entity.name + "_") + attach_model.rename_all(prefix=entity.name + "_", do_not_double=False) origin = entity.origin.duplicate() origin.relative_to = str(entity.origin.relative_to).replace("::", "_", 1) assembly.attach( other=attach_model, joint=representation.Joint( - name=str(parent_entity)+"2"+str(entity), + name=str(parent_entity)+"_2_"+str(entity), parent=parent_link, child=child_link, type="fixed", @@ -334,6 +338,7 @@ def assemble(self, root_entity=None): attached += 1 assembly.link_entities() + assert assembly.verify_meshes() return assembly def export_sdf(self, use_includes=True): diff --git a/phobos/core/robot.py b/phobos/core/robot.py index 6852c236..4a23d0a8 100755 --- a/phobos/core/robot.py +++ b/phobos/core/robot.py @@ -5,6 +5,13 @@ import pkg_resources import pydot import traceback +import re + +try: + from lxml import etree as ET, QName +except ImportError: + from xml.etree import ElementTree as ET + QName = None from .. import geometry as pgu, utils from ..commandline_logging import get_logger @@ -15,7 +22,7 @@ from ..io.poses import JointPoseSet from ..io.smurfrobot import SMURFRobot from ..utils import transform, misc, git, resources -from ..utils.misc import read_number_from_config, regex_replace, create_dir, edit_name_string, execute_shell_command +from ..utils.misc import read_number_from_config, regex_replace, create_dir, edit_name_string, execute_shell_command, get_var, plural from ..utils.transform import create_transformation, inv, get_adjoint, round_array from ..utils.tree import find_close_ancestor_links, get_joints from ..utils.xml import transform_object, get_joint_info_dict @@ -27,13 +34,13 @@ class Robot(SMURFRobot): def __init__(self, name=None, xmlfile=None, submechanisms_file=None, smurffile=None, verify_meshes_on_import=True, inputfile=None, description=None, is_human=False, autogenerate_submechanisms=None, - assert_validity=True, shallow=False): + assert_validity=True, shallow=False, **kwargs): """ The basic robot class to represent a urdf. """ try: super().__init__(xmlfile=xmlfile, submechanisms_file=submechanisms_file, smurffile=smurffile, verify_meshes_on_import=verify_meshes_on_import, inputfile=inputfile, description=description, - autogenerate_submechanisms=autogenerate_submechanisms, is_human=is_human, shallow=shallow) + autogenerate_submechanisms=autogenerate_submechanisms, is_human=is_human, shallow=shallow, **kwargs) except Exception as e: log.error(f"Failed loading:\n input: {inputfile}\n xml: {xmlfile}\n submechanims: {submechanisms_file}\n smurf: {smurffile}\n" f"because of:\n"+''.join(traceback.format_exception(None, e, e.__traceback__))) @@ -46,7 +53,7 @@ def __init__(self, name=None, xmlfile=None, submechanisms_file=None, smurffile=N self.assert_validity() # export methods - def export_meshes(self, mesh_output_dir, format=None, use_existing=False, apply_scale=False): + def export_meshes(self, mesh_output_dir, rel_mesh_pathes=None, format=None, use_existing=False, apply_scale=False): """ Will go through all visuals and collisions and export the meshes of all mesh geometries to in the given format to the outputdir Args: @@ -58,7 +65,7 @@ def export_meshes(self, mesh_output_dir, format=None, use_existing=False, apply_ """ for vc in self.visuals + self.collisions: if isinstance(vc.geometry, representation.Mesh): - vc.geometry.provide_mesh_file(targetpath=os.path.abspath(mesh_output_dir), format=format, + vc.geometry.provide_mesh_file(targetpath=os.path.abspath(mesh_output_dir), rel_mesh_pathes=rel_mesh_pathes, format=format, use_existing=use_existing, apply_scale=apply_scale) def to_x3d_string(self, float_fmt_dict=None, reduce_meshes=0): @@ -76,7 +83,7 @@ def to_x3d_string(self, float_fmt_dict=None, reduce_meshes=0): return super(Robot, export_instance).to_x3d_string(float_fmt_dict=float_fmt_dict) - def export_x3d(self, outputfile, float_fmt_dict=None, reduce_meshes=1000): #, ros_pkg=False, copy_with_other_pathes=None, ros_pkg_name=None, mesh_format=None): + def export_x3d(self, outputfile, float_fmt_dict=None, reduce_meshes=1000, mark_as_autogenerated=False): #, ros_pkg=False, copy_with_other_pathes=None, ros_pkg_name=None, mesh_format=None): """Export the mechanism to the given output file. If export_visuals is set to True, all visuals will be exported. Otherwise no visuals get exported. If export_collisions is set to to True, all collisions will be exported. Otherwise no collision get exported. @@ -95,13 +102,15 @@ def export_x3d(self, outputfile, float_fmt_dict=None, reduce_meshes=1000): #, r if not os.path.exists(os.path.dirname(os.path.abspath(outputfile))): os.makedirs(os.path.dirname(os.path.abspath(outputfile))) with open(outputfile, "w") as f: + if mark_as_autogenerated: + f.write("\n") f.write(xml_string) f.close() log.info("X3D written to {}".format(outputfile)) return - def export_urdf(self, outputfile, float_fmt_dict=None, ros_pkg=False, copy_with_other_pathes=False, ros_pkg_name=None, mesh_format=None): + def export_urdf(self, outputfile, float_fmt_dict=None, ros_pkg=False, copy_with_other_pathes=False, ros_pkg_name=None, mesh_format=None, add_world_joint=None, add_default_transmissions=False, mark_as_autogenerated=False, write_material_references=True): """Export the mechanism to the given output file. If export_visuals is set to True, all visuals will be exported. Otherwise no visuals get exported. If export_collisions is set to to True, all collisions will be exported. Otherwise no collision get exported. @@ -111,7 +120,6 @@ def export_urdf(self, outputfile, float_fmt_dict=None, ros_pkg=False, copy_with_ if float_fmt_dict is None: float_fmt_dict = {} - self.joints = self.get_joints_ordered_df() outputfile = os.path.abspath(outputfile) @@ -123,14 +131,33 @@ def export_urdf(self, outputfile, float_fmt_dict=None, ros_pkg=False, copy_with_ export_robot.mesh_format = mesh_format export_robot.xmlfile = outputfile + if write_material_references is not None: + export_robot.write_material_references = write_material_references + + if add_default_transmissions: + for j in export_robot.joints: + if j.joint_type != "fixed": + export_robot.add_aggregate("transmission", representation.Transmission( + name=j.name, + transmission_type="SimpleTransmission", + joints=[representation.TransmissionJoint(name=j.name)], + actuators=[representation.Actuator(name=j.name)] + )) + xml_string = export_robot.to_urdf_string(float_fmt_dict=float_fmt_dict) if ros_pkg is True: xml_string = regex_replace(xml_string, {'filename="../': 'filename="package://' if ros_pkg_name is None else f'filename="package://{ros_pkg_name}/'}) + if add_world_joint: + xml_lines = xml_string.split("\n") + joint_lines = representation.Joint(name="world_to_"+export_robot.root, parent="world", child=export_robot.root, joint_type="fixed").to_urdf_string().split("\n") + xml_string = "\n".join(xml_lines[:1] + joint_lines + xml_lines[1:]) if not os.path.exists(os.path.dirname(os.path.abspath(outputfile))): os.makedirs(os.path.dirname(os.path.abspath(outputfile))) with open(outputfile, "w") as f: + if mark_as_autogenerated: + f.write("\n") f.write(xml_string) f.close() @@ -148,7 +175,7 @@ def export_urdf(self, outputfile, float_fmt_dict=None, ros_pkg=False, copy_with_ log.info("URDF written to {}".format(outputfile)) return - def export_sdf(self, outputfile, float_fmt_dict=None, ros_pkg=False, copy_with_other_pathes=None, ros_pkg_name=None, mesh_format=None): + def export_sdf(self, outputfile, float_fmt_dict=None, ros_pkg=False, copy_with_other_pathes=None, ros_pkg_name=None, mesh_format=None, add_world_joint=None, mark_as_autogenerated=False): """Export the mechanism to the given output file. If export_visuals is set to True, all visuals will be exported. Otherwise no visuals get exported. If export_collisions is set to to True, all collisions will be exported. Otherwise no collision get exported. @@ -156,7 +183,6 @@ def export_sdf(self, outputfile, float_fmt_dict=None, ros_pkg=False, copy_with_o # [Todo v2.1.0] Create the model.config file for gazebo if float_fmt_dict is None: float_fmt_dict = {} - self.joints = self.get_joints_ordered_df() outputfile = os.path.abspath(outputfile) @@ -169,10 +195,16 @@ def export_sdf(self, outputfile, float_fmt_dict=None, ros_pkg=False, copy_with_o if ros_pkg is True: xml_string = regex_replace(xml_string, {'../': 'package://' if ros_pkg_name is None else f'package://{ros_pkg_name}/'}) + if add_world_joint: + xml_lines = xml_string.split("\n") + joint_lines = representation.Joint(name="world", parent="world", child=export_robot.root, origin=representation.Pose(relative_to="world"), joint_type="fixed").to_sdf_string().split("\n") + xml_string = "\n".join(xml_lines[:1] + joint_lines + xml_lines[1:]) if not os.path.exists(os.path.dirname(os.path.abspath(outputfile))): os.makedirs(os.path.dirname(os.path.abspath(outputfile))) with open(outputfile, "w") as f: + if mark_as_autogenerated: + f.write("\n") f.write(xml_string) f.close() @@ -193,13 +225,13 @@ def export_sdf(self, outputfile, float_fmt_dict=None, ros_pkg=False, copy_with_o def export_xml(self, outputdir=None, format="urdf", filename=None, float_fmt_dict=None, no_format_dir=False, ros_pkg=False, copy_with_other_pathes=None, ros_pkg_name=None, with_meshes=True, use_existing_meshes=False, mesh_format=None, additional_meshes=None, rel_mesh_pathes=None, - enforce_zero=False, correct_inertials=False): + enforce_zero=False, correct_inertials=False, add_world_joint=None, add_default_transmissions=False, mark_as_autogenerated=False, write_material_references=True): """ Exports all model information stored inside this instance. """ outputdir = os.path.abspath(outputdir) if rel_mesh_pathes is None: rel_mesh_pathes = resources.get_default_rel_mesh_pathes() - assert self.get_root() + assert len(self.get_roots()) == 1, [str(r) for r in self.get_roots()] format = format.lower() assert format in KINEMATIC_TYPES, format @@ -243,22 +275,24 @@ def export_xml(self, outputdir=None, format="urdf", filename=None, float_fmt_dic _export_robot.export_urdf( outputfile=model_file, ros_pkg=ros_pkg, copy_with_other_pathes=copy_with_other_pathes, ros_pkg_name=ros_pkg_name, - float_fmt_dict=float_fmt_dict, mesh_format=mesh_format + float_fmt_dict=float_fmt_dict, mesh_format=mesh_format, add_world_joint=add_world_joint, add_default_transmissions=add_default_transmissions, + mark_as_autogenerated=mark_as_autogenerated, write_material_references=write_material_references ) elif format == "sdf": _export_robot.export_sdf( outputfile=model_file, ros_pkg=ros_pkg, copy_with_other_pathes=copy_with_other_pathes, ros_pkg_name=ros_pkg_name, - float_fmt_dict=float_fmt_dict, mesh_format=mesh_format + float_fmt_dict=float_fmt_dict, mesh_format=mesh_format, add_world_joint=add_world_joint, + mark_as_autogenerated=mark_as_autogenerated ) else: raise IOError("Unknown export format:" + format) return model_file - def export_smurf(self, outputdir=None, outputfile=None, robotfile=None, + def export_smurf(self, outputdir=None, outputfile=None, robotfile=None, filename=None, check_submechs=True, with_submodel_defs=False, - with_meshes=True, mesh_format=None, additional_meshes=None, rel_mesh_pathes=None): + with_meshes=True, mesh_format=None, additional_meshes=None, rel_mesh_pathes=None, mark_as_autogenerated=False): """ Export self and all annotations inside a given folder with structure """ assert self.check_linkage() @@ -279,6 +313,11 @@ def export_smurf(self, outputdir=None, outputfile=None, robotfile=None, if not os.path.exists(os.path.abspath(submech_dir)): os.makedirs(os.path.abspath(submech_dir)) self.link_entities() + + if filename is None: + filename = self.name + filename.replace('/', '_') + # meshes if with_meshes: _mesh_format = mesh_format @@ -292,7 +331,7 @@ def export_smurf(self, outputdir=None, outputfile=None, robotfile=None, self.export_meshes(mesh_output_dir=os.path.join(outputdir, rel_mesh_pathes[mf]), format=mf) # Export the smurf files smurf_dir = os.path.join(outputdir, "smurf") - self.smurffile = os.path.join(smurf_dir, "{}.smurf".format(self.name.replace('/','_'))) + self.smurffile = os.path.join(smurf_dir, "{}.smurf".format(filename)) if not os.path.exists(smurf_dir): os.mkdir(smurf_dir) # Export attr @@ -336,20 +375,27 @@ def export_smurf(self, outputdir=None, outputfile=None, robotfile=None, if annotation == "submechanisms" or annotation == "exoskeletons": submechanisms[annotation] = annotation_dict[annotation] else: - with open(os.path.join(smurf_dir, "{}_{}.yml".format(self.name.replace('/','_'), annotation_name)), "w+") as stream: + with open(os.path.join(smurf_dir, "{}_{}.yml".format(filename, annotation_name)), "w+") as stream: + if mark_as_autogenerated: + annotation_dict["_autogenerated"] = True stream.write(dump_json(annotation_dict, default_style=False)) export_files.append(os.path.split(stream.name)[-1]) if submechanisms != {}: - self.submechanisms_file = os.path.join(smurf_dir, "{}_submechanisms.yml".format(self.name.replace('/','_'))) + self.submechanisms_file = os.path.join(smurf_dir, "{}_submechanisms.yml".format(filename)) with open(self.submechanisms_file, "w+") as stream: + if mark_as_autogenerated: + submechanisms["_autogenerated"] = True stream.write(dump_json(submechanisms, default_style=False)) export_files.append(os.path.split(stream.name)[-1]) # further annotations for category, annos in self.annotations.items(): if category not in self.smurf_annotation_keys: - with open(os.path.join(smurf_dir, "{}_{}.yml".format(self.name.replace('/','_'), category)), "w+") as stream: - stream.write(dump_json({category: annos}, default_style=False)) + with open(os.path.join(smurf_dir, "{}_{}.yml".format(filename, category)), "w+") as stream: + dump = {category: annos} + if mark_as_autogenerated: + dump["_autogenerated"] = True + stream.write(dump_json(dump, default_style=False)) export_files.append(os.path.split(stream.name)[-1]) # submodel list @@ -358,8 +404,11 @@ def export_smurf(self, outputdir=None, outputfile=None, robotfile=None, for name, definition in self.submodel_defs.items(): out_submodel_defs[name] = deepcopy(definition) out_submodel_defs[name]["export_dir"] = os.path.relpath(out_submodel_defs[name]["export_dir"], smurf_dir) - with open(os.path.join(smurf_dir, "{}_submodels.yml".format(self.name.replace('/','_'))), "w+") as stream: - stream.write(dump_json({"submodels": out_submodel_defs}, default_style=False)) + with open(os.path.join(smurf_dir, "{}_submodels.yml".format(filename)), "w+") as stream: + dump = {"submodels": out_submodel_defs} + if mark_as_autogenerated: + dump["_autogenerated"] = True + stream.write(dump_json(dump, default_style=False)) export_files.append(os.path.split(stream.name)[-1]) # generic annotations @@ -389,8 +438,11 @@ def export_smurf(self, outputdir=None, outputfile=None, robotfile=None, for category, annos in temp_generic_annotations.items(): # write if len(annos) > 0 and category not in self.smurf_annotation_keys: - with open(os.path.join(smurf_dir, "{}_{}.yml".format(self.name.replace('/','_'), category)), "w") as stream: - stream.write(dump_json({category: annos}, default_style=False)) + with open(os.path.join(smurf_dir, "{}_{}.yml".format(filename, category)), "w") as stream: + dump = {category: annos} + if mark_as_autogenerated: + dump["_autogenerated"] = True + stream.write(dump_json(dump, default_style=False)) export_files.append(os.path.split(stream.name)[-1]) # Create the smurf file itself @@ -405,16 +457,20 @@ def export_smurf(self, outputdir=None, outputfile=None, robotfile=None, annotation_dict['version'] = self.version with open(self.smurffile, "w+") as stream: + if mark_as_autogenerated: + annotation_dict["_autogenerated"] = True stream.write(dump_json(annotation_dict, default_style=False, sort_keys=True)) log.info(f"SMURF written to {smurf_dir}") - def export_joint_limits(self, outputdir, file_name="joint_limits.yml", joint_desc=None): + def export_joint_limits(self, outputdir, file_name="joint_limits.yml", joint_desc=None, mark_as_autogenerated=False): output_dict = get_joint_info_dict(self, get_joints(self, joint_desc)) log.info(f"Exporting joint_limits file {os.path.join(outputdir, file_name)}") if not os.path.isdir(os.path.dirname(os.path.abspath(os.path.join(outputdir, file_name)))): os.makedirs(os.path.dirname(os.path.abspath(os.path.join(outputdir, file_name)))) output_dict = {"limits": output_dict} with open(os.path.join(outputdir, file_name), "w") as jl_file: + if mark_as_autogenerated: + output_dict["_autogenerated"] = True jl_file.write(dump_json(output_dict)) def export_kccd(self, outputdir, rel_iv_meshes_path, output_mesh_format, join_before_convexhull=True, @@ -659,9 +715,14 @@ def add_joint(joint): graph = pydot.graph_from_dot_data(out) graph[0].write_pdf(outputfile) - def export(self, outputdir, export_config=None, rel_mesh_pathes=None, ros_pkg_name=None, no_smurf=False, - ros_pkg_later=False, check_submechs=True, with_meshes=True, use_existing_meshes=False, apply_scale=False): + def export(self, outputdir, export_config=None, rel_mesh_pathes=None, ros_pkg_name=None, no_smurf=False, filename=None, + ros_pkg_later=False, check_submechs=True, with_meshes=True, reduce_meshes=None, use_existing_meshes=False, + apply_scale=False, export_null_pose=True, sort_links_and_joints=True, mark_as_autogenerated=False): assert self.check_linkage() + if sort_links_and_joints: + # this sorting is required to minimize the produced diff when tracking the model in a VCS + self.joints = self.get_joints_ordered_df() + self.links = self.get_links_ordered_df() main_export_robot_instance = self.duplicate() outputdir = os.path.abspath(outputdir) @@ -731,7 +792,9 @@ def export(self, outputdir, export_config=None, rel_mesh_pathes=None, ros_pkg_na include_human_in_abstract=export.get("include_human_in_abstract", False), only_urdf=export.get("only_urdf", False), remove_joints=export.get("remove_joints", None), - move_joint_axis_to_intersection=export.get("move_joint_axis_to_intersection", None) + remove_links=export.get("remove_links", None), + move_joint_axis_to_intersection=export.get("move_joint_axis_to_intersection", None), + name_replacements=export.get("name_replacements", None) ) else: assert export.get("start", None) == main_export_robot_instance.submodel_defs[export["name"]]["start"] @@ -745,7 +808,7 @@ def export(self, outputdir, export_config=None, rel_mesh_pathes=None, ros_pkg_na if "export_config" in export: _export_config = export["export_config"] else: - _export_config = [ec for ec in export_config if ec["type"] != "submodel"] + _export_config = [ec for ec in export_config if ec["type"] not in ["for-loop", "submodel"]] main_export_robot_instance.submodel_defs[export["name"]]["export_dir"] = os.path.join(outputdir, "submodels", export["name"]) export_robot_instance.export( outputdir=main_export_robot_instance.submodel_defs[export["name"]]["export_dir"], @@ -753,7 +816,9 @@ def export(self, outputdir, export_config=None, rel_mesh_pathes=None, ros_pkg_na rel_mesh_pathes={k: os.path.join("..", "..", v) for k, v in rel_mesh_pathes.items()}, with_meshes=False, use_existing_meshes=use_existing_meshes, - no_smurf=no_smurf + no_smurf=no_smurf, + filename=export.get("filename", None), + mark_as_autogenerated=mark_as_autogenerated ) elif export["type"] == "pdf": main_export_robot_instance.export_pdf(outputfile=os.path.join(outputdir, self.name.replace('/','_') + ".pdf")) @@ -773,6 +838,7 @@ def export(self, outputdir, export_config=None, rel_mesh_pathes=None, ros_pkg_na kwargs["joint_desc"] = export["joints"] main_export_robot_instance.export_joint_limits( outputdir=outputdir, + mark_as_autogenerated=mark_as_autogenerated, **kwargs ) elif export["type"] == "smurf": @@ -787,19 +853,22 @@ def export(self, outputdir, export_config=None, rel_mesh_pathes=None, ros_pkg_na robotfile=xml_file_in_smurf, check_submechs=check_submechs, with_submodel_defs=True, + filename=filename, + mark_as_autogenerated=mark_as_autogenerated, with_meshes=False # has been done before ) # export ros package files if ros_pkg and not ros_pkg_later: - main_export_robot_instance.export_ros_package_files(outputdir, ros_pkg_name) + main_export_robot_instance.export_ros_package_files(outputdir, ros_pkg_name, mark_as_autogenerated=mark_as_autogenerated) elif ros_pkg and ros_pkg_later: return ros_pkg_name - def export_ros_package_files(self, outputdir, ros_pkg_name, author=None, maintainer=None, url=None, version=None): + def export_ros_package_files(self, outputdir, ros_pkg_name, author=None, maintainer=None, url=None, version=None, license=None, cmake=None, package_xml=None, mark_as_autogenerated=False): # ROS CMakeLists.txt ros_cmake = os.path.join(outputdir, "CMakeLists.txt") directories = [os.path.relpath(dir, outputdir) for dir, _, _ in os.walk(outputdir)] - if not os.path.isfile(ros_cmake): + if (cmake is None and not os.path.isfile(ros_cmake)) or cmake: # if we want to keep that we can put it to the keep_files + log.info("Creating default CMakeLists.txt for ROS") misc.copy(None, pkg_resources.resource_filename("phobos", "data/ROSCMakeLists.txt.in"), ros_cmake) with open(ros_cmake, "r") as cmake: @@ -809,10 +878,13 @@ def export_ros_package_files(self, outputdir, ros_pkg_name, author=None, maintai "@DIRECTORIES@": " ".join(directories), }) with open(ros_cmake, "w") as cmake: + if mark_as_autogenerated: + cmake.write("# generated by Phobos\n") cmake.write(content) # ROS package.xml packagexml_path = os.path.join(outputdir, "package.xml") - if not os.path.isfile(packagexml_path): + if (package_xml is None and not os.path.isfile(packagexml_path)) or package_xml: + log.info("Creating default package.xml for ROS") misc.copy(None, pkg_resources.resource_filename("phobos", "data/ROSpackage.xml.in"), packagexml_path) with open(packagexml_path, "r") as packagexml: @@ -820,19 +892,34 @@ def export_ros_package_files(self, outputdir, ros_pkg_name, author=None, maintai content = misc.regex_replace(content, { "\$INPUTNAME": ros_pkg_name }) + if license: + content = misc.regex_replace(content, { + "\$LICENSE": f""+license+"" + }) try: if any([x is None for x in [author, maintainer, url]]): _author, _maintainer, _url = git.get_repo_data(outputdir) content = misc.regex_replace(content, { "\$AUTHOR": f"{author if author is not None else _author}", - "\$MAINTAINER": f"{maintainer if maintainer is not None else _maintainer}", + "\$MAINTAINER": f"{maintainer if maintainer is not None else _maintainer}", "\$URL": f"{url if url is not None else _url}", - "\$VERSION": f"def: {version if version is not None else self.version}" + "\$VERSION": version if version is not None else self.version }) except: log.info("Couldn't obtain author, maintainer, url info from git") with open(packagexml_path, "w") as packagexml: + if mark_as_autogenerated: + packagexml.write("\n") packagexml.write(content) + # config/ with initial poses + config_dir = os.path.join(outputdir, "config") + os.makedirs(config_dir, exist_ok=True) + for pose in self.poses: + with open(os.path.join(config_dir, f"init_positions_{pose.name}.yml"), "w") as f: + dump = {"init_positions": pose.joints} + if mark_as_autogenerated: + dump["_autogenerated"] = True + f.write(dump_json(dump, default_flow_style=False)) # getters def get_submodel(self, name): @@ -848,6 +935,7 @@ def get_submodel(self, name): def exchange_root(self, new_root): """ This function adapts all joints in this robot in that way, that the given link becomes the new root. + The robot should be linked before (link_entities()) Args: link_name: The name of the link that shall become the new root @@ -864,19 +952,19 @@ def exchange_root(self, new_root): nr2x = lambda x: old_robot.get_transformation(x, start=str(new_root)) # 0. make sure all link parts follow the convention that they have to be relative_to that link for link in self.links: - for vc in self.visuals + self.collisions: + for vc in link.visuals + link.collisions + link.primitives: if str(vc.origin.relative_to) != str(link): vc.origin = representation.Pose.from_matrix( inv(or2x(link)).dot(or2x(vc.origin.relative_to).dot(vc.origin.to_matrix())), relative_to=link ) vc.origin.link_with_robot(self) - if link.inertial is not None: - link.inertial.origin = representation.Pose.from_matrix( - inv(or2x(link)).dot(or2x(vc.origin.relative_to).dot(link.inertial.origin.to_matrix())), - relative_to=link - ) - link.inertial.origin.link_with_robot(self) + if link.inertial is not None: + link.inertial.origin = representation.Pose.from_matrix( + inv(or2x(link)).dot(or2x(vc.origin.relative_to).dot(link.inertial.origin.to_matrix())), + relative_to=link + ) + link.inertial.origin.link_with_robot(self) # 1. get the tree structure correct for jointname in flip_joints: joint = self.get_joint(jointname) @@ -906,6 +994,38 @@ def exchange_root(self, new_root): jl.origin = jl.joint_relative_origin jl.origin.link_with_robot(self) + def fix_tree(self, root, link_entities=True): + """This method may fix your robot if it has multiple roots. This can fix it under the following conditions: + - Your robot is serial + - Your robot only has multiple roots because one or more joints are inverted + - You know the one and only true root of your robot + + Args: + root (_type_): The name of the one and only true root + """ + self.unlink_entities() + def _ensure_link_rootest(robot, start_link, joints_done=[]): + # first ensure that the start_link is the rootest + for joint in robot.joints: + if str(joint.child) == start_link and str(joint) not in joints_done: + joint.invert() + joints_done.append(str(joint)) + return joints_done + + def _reorient_tree_below(robot, start_link, joints_done=[]): + for joint in robot.joints: + if joint.parent == start_link: + link = joint.child + joints_done += _ensure_link_rootest(link, joints_done=joints_done) + joints_done += _reorient_tree_below(link, joints_done=joints_done) + return joints_done + + joints_done = self._ensure_link_rootest(self, str(root)) + _reorient_tree_below(self, str(root), joints_done=joints_done) + self.regenerate_tree_maps() + if link_entities: + self.link_entities() + def remove_visuals(self): """ Removes all visuals from this robot. @@ -924,84 +1044,11 @@ def remove_collisions(self): for coll in link.collisions: link.remove_aggregate(coll) - # not used - # def reparent_link(self, link_name, parent, inertia=True, visual=True, collision=True): - # """ - # Reparent all xml-children ( inertia, visual and collision ) of the given link onto the new parent. - # :param link_name: the link we apply this to - # :param parent: the new parent - # :param inertia: whether we do this for the inertias - # :param visual: whether we do this for the visuals - # :param collision: whether we do this for the collisions - # :return: None - # """ - # if isinstance(link_name, list): - # if isinstance(parent, list): - # assert len(link_name) == len(parent) - # for link_, parent_ in zip(link_name, parent): - # self.reparent_link(link_, parent_, inertia=inertia, visual=visual, collision=collision) - # return - # for link_ in link_name: - # self.reparent_link(link_, parent, inertia=inertia, visual=visual, collision=collision) - # return - # - # link = self.get_link(link_name) - # parent = self.get_link(parent) - # - # if not link or not parent: - # log.warning("Link or new parent not found!") - # return - # - # # Get the transformation - # # root to link - # L_T_R = self.get_transformation(link.name) - # R_T_P = inv(self.get_transformation(parent.name)) - # - # L_T_P = R_T_P.dot(L_T_R) - # - # if inertia and link.inertial: - # inertia_L = link.inertial - # if parent.inertial: - # # Merge the inertials - # # Old one - # I_L = inertia_L.to_mass_matrix() - # IP_T_IL = parent.inertial.origin.to_matrix().dot( - # L_T_P.dot(inertia_L.origin.to_matrix())) - # Ad = get_adjoint(IP_T_IL) - # # Transform into parent - # I_NL = Ad.dot(I_L.dot(Ad.T)) + parent.inertial.to_mass_matrix() - # parent.inertial = representation.Inertial.from_mass_matrix(I_NL, parent.inertial.origin) - # - # else: - # # Set inertial to new parent - # new_origin = L_T_P.dot(inertia_L.origin.to_matrix()) - # parent.inertial = link.inertial - # parent.inertial.origin = representation.Pose.from_matrix(new_origin) - # - # # Set link to near zero - # link.inertial = representation.Inertial.from_mass_matrix(1e-5 * np.ones((6, 6)), link.inertial.origin) - # - # if visual and link.visuals: - # for vis in link.visuals: - # VL_T_L = vis.origin.to_matrix() - # new_origin = L_T_P.dot(VL_T_L) - # vis.origin = representation.Pose.from_matrix(new_origin) - # parent.add_aggregate('visual', vis.duplicate()) - # link.remove_aggregate(vis) - # - # if collision and link.collisions: - # for col in link.collisions: - # CL_T_L = col.origin.to_matrix() - # new_origin = L_T_P.dot(CL_T_L) - # col.origin = representation.Pose.from_matrix(new_origin) - # parent.add_aggregate('collision', col.duplicate()) - # link.remove_aggregate(col) - # - # # Reinit the link - # # link.to_xml() - # # parent.to_xml() - # self.relink_entities() - # return + def remove_fixed(self): + """Removes all fixed joints, see remove_joint + """ + remove = [str(j) for j in self.joints if j.joint_type == "fixed"] + self.remove_joint(remove) def move_link_in_tree(self, link_name, new_parent_name): """ @@ -1018,23 +1065,32 @@ def move_link_in_tree(self, link_name, new_parent_name): if jointname is None: raise AssertionError("Can't move the root link.") joint = self.get_joint(jointname) + if joint.parent == new_parent_name: + return T0_old = self.get_transformation(link_name) T0_newp = self.get_transformation(new_parent_name) joint.origin = representation.Pose.from_matrix(inv(T0_newp).dot(T0_old), relative_to=new_parent_name) joint.origin.link_with_robot(self) joint.parent = new_parent_name + self.regenerate_tree_maps() def define_submodel(self, name, start=None, stop=None, robotname=None, only_urdf=False, abstract_model=False, - remove_joints=None, remove_fixed=False, no_submechanisms=False, include_unstopped_branches=False, - move_joint_axis_to_intersection=None, include_human_in_abstract=False, + remove_joints=None, remove_links=None, remove_fixed=False, no_submechanisms=False, include_unstopped_branches=False, + move_joint_axis_to_intersection=None, include_human_in_abstract=False, name_replacements=None, overwrite=False): """Defines a submodel from a given starting link. If stop is provided than the chain from start to stop is used. """ if remove_joints is None: remove_joints = [] - assert stop is None or type(stop) == list + if remove_links is None: + remove_links = [] + if type(stop) == str: + stop = [stop] + if type(stop) == list and len(stop)==0: + stop = None + assert stop is None or (type(stop) == list and len(stop) > 0) definition = { "name": name, "robotname": robotname, @@ -1043,11 +1099,13 @@ def define_submodel(self, name, start=None, stop=None, robotname=None, only_urdf "only_urdf": only_urdf, "abstract_model": abstract_model, "remove_joints": remove_joints, + "remove_links": remove_links, "remove_fixed": remove_fixed, "move_joint_axis_to_intersection": move_joint_axis_to_intersection, "include_unstopped_branches": include_unstopped_branches, "no_submechanisms": no_submechanisms, - "include_human_in_abstract": include_human_in_abstract + "include_human_in_abstract": include_human_in_abstract, + "name_replacements": name_replacements } if name in self.submodel_defs.keys() and not overwrite: raise NameError("A submodel with the given name is already defined") @@ -1056,6 +1114,8 @@ def define_submodel(self, name, start=None, stop=None, robotname=None, only_urdf return self.instantiate_submodel(**definition) def get_links_and_joints_in_subtree(self, start, stop=None, include_unstopped_branches=False): + if type(stop) == str: + stop = [stop] assert self.get_link(start, verbose=True) is not None, f"Link {start} does not exist" linknames = set() _stop = list(set(stop)) if stop is not None else None @@ -1072,8 +1132,8 @@ def get_links_and_joints_in_subtree(self, start, stop=None, include_unstopped_br return linknames, jointnames def instantiate_submodel(self, name=None, start=None, stop=None, robotname=None, include_unstopped_branches=None, - no_submechanisms=False, abstract_model=False, remove_joints=None, remove_fixed=False, - move_joint_axis_to_intersection=None, include_human_in_abstract=False, + no_submechanisms=False, abstract_model=False, remove_joints=None, remove_links=None, remove_fixed=False, + move_joint_axis_to_intersection=None, include_human_in_abstract=False, name_replacements=None, **ignored_kwargs): """ Instantiates a submodel by it's definition. Takes either name or definition. If both are given, the submodel @@ -1083,8 +1143,19 @@ def instantiate_submodel(self, name=None, start=None, stop=None, robotname=None, """ if remove_joints is None: remove_joints = [] + elif type(remove_joints) == str: + remove_joints = [remove_joints] + if remove_joints is None: + remove_links = [] + elif type(remove_links) == str: + remove_links = [remove_links] if start is None: start = self.get_root() + if type(stop) == str: + stop = [stop] + if type(stop) == list and len(stop)==0: + stop = None + assert stop is None or (type(stop) == list and len(stop) > 0) if robotname is None and name is not None: robotname = name @@ -1097,10 +1168,21 @@ def instantiate_submodel(self, name=None, start=None, stop=None, robotname=None, if stop is None: include_unstopped_branches = True + elif type(stop) == list and len(stop) > 0: + _stop = [] + for link_id in stop: + for link in self.links: + if re.match(link_id, str(link)): + _stop.append(str(link)) + stop = list(set(_stop)) - linknames, jointnames = self.get_links_and_joints_in_subtree( - start=start, stop=stop, include_unstopped_branches=include_unstopped_branches - ) + try: + linknames, jointnames = self.get_links_and_joints_in_subtree( + start=start, stop=stop, include_unstopped_branches=include_unstopped_branches + ) + except Exception as e: + log.error(f"During instantiation of submodel {name} the following exception was raised: {e}") + raise e # [ToDo] In the following we have to go through all origins and check whether their relative_to is in the submodel as well and if not they have to be transformed links = self.get_link(linknames) @@ -1180,7 +1262,7 @@ def instantiate_submodel(self, name=None, start=None, stop=None, robotname=None, submodel.get_root().origin = None for link in submodel.links: - for vc in link.collisions+link.visuals: + for vc in link.collisions+link.visuals+link.primitives: if submodel.get_link(vc.origin.relative_to) is None and submodel.get_joint(vc.origin.relative_to) is None: vc.origin = representation.Pose.from_matrix(self.get_transformation(end=vc.origin.relative_to, start=link), relative_to=link).dot(vc.origin) for joint in submodel.joints: @@ -1204,6 +1286,22 @@ def instantiate_submodel(self, name=None, start=None, stop=None, robotname=None, submodel.submechanisms = [] submodel.exoskeletons = [] + if remove_links: + _new_remove_links = [] + for link_id in remove_links: + for link in submodel.links: + if re.match(link_id, str(link)): + _new_remove_links.append(str(link)) + remove_links = _new_remove_links + submodel.remove_link(list(set(remove_links))) + + _new_remove_joints = [] + for joint_id in remove_joints: + for joint in submodel.joints: + if re.match(joint_id, str(joint)): + _new_remove_joints.append(str(joint)) + remove_joints = _new_remove_joints + if remove_fixed: fixed_joints = [str(j) for j in self.joints if j.joint_type == "fixed"] @@ -1217,6 +1315,9 @@ def instantiate_submodel(self, name=None, start=None, stop=None, robotname=None, submodel.link_entities() + if name_replacements: + submodel.edit_names({"replacements": name_replacements}) + return submodel def remove_submodel(self, name): @@ -1258,27 +1359,7 @@ def intersection(self, other, name=None, submodel=True, useother=False, keep=Non ) return new_robot - # # [TODO later] this method is not fully implemented - # def difference(self, other, name=None, submodel=True, useother=False, exclude=None): - # """ - # Compute the difference between two models and returns a new robot model with this as the spanning - # """ - # raise NotImplementedError - # # if exclude is None: - # # exclude = [] - # # # Collect all joints - # # driver_joints = set([j.name for j in self.joints]) - # # other_joints = set([j.name for j in other.joints]) - # # - # # if exclude: - # # driver_joints.update(driver_joints.difference(set(exclude))) - # # - # # # Get the difference - # # difference = list(driver_joints.difference(other_joints)) - # # - # # return difference - - def correct_inertials(self, limit=1e-5): + def correct_inertials(self, limit=0.0): """ Correct all inertials of the robot. """ @@ -1290,11 +1371,10 @@ def correct_inertials(self, limit=1e-5): if origin.relative_to is None: origin.relative_to = link else: - M = np.zeros((6, 6)) - origin = representation.Pose.from_matrix(np.eye(4), relative_to=link) - origin.link_with_robot(self) + log.info(" Link {} has no inertial defined".format(link.name)) + continue m = M[0, 0] - if m <= limit: + if m < limit: M[:3, :3] = np.eye(3) * limit log.info(" Corrected mass for link {}".format(link.name)) @@ -1620,25 +1700,13 @@ def check_joint_definitions(self, raise_error=False, backup=None, default_axis=N velocity=backup["vel"] if "vel" in backup.keys() else None, lower=backup["min"] if "min" in backup.keys() else None, upper=backup["max"] if "max" in backup.keys() else None) - log.warfning(f"Joint limits for {joint.name} not defined taking default values!") + log.warning(f"Joint limits for {joint.name} not defined taking default values!") elif raise_error: log.error(joint.to_urdf_string()) raise ValueError(f"ERROR: Joint limits for {joint.name} not defined!") else: - if any([not hasattr(joint.limit, x) for x in ["lower", "upper", "effort", "velocity"]]) or \ - any([getattr(joint.limit, x) is None for x in ["lower", "upper", "effort", "velocity"]]): - log.error(f"{joint.name} {joint.limit.lower} {joint.limit.upper} {joint.limit.effort} " - f"{joint.limit.velocity}") - result &= 2 - if raise_error: - raise ValueError("ERROR: Not all joint limits for " + joint.name + " defined!") - if (joint.joint_type == "revolute" or joint.joint_type == "prismatic") and \ - not hasattr(joint, "axis") or joint.axis is None: - log.warning(f"Joint axis for joint {joint.name} not defined. Setting to [0 0 1]") - joint.axis = default_axis - result &= 4 if hasattr(joint, "limit") and joint.limit is not None and ( - joint.limit.lower == joint.limit.upper or joint.limit.velocity == 0): + (joint.limit.lower == joint.limit.upper and not joint.joint_type == "continuous") or joint.limit.velocity == 0 or joint.limit.effort == 0): log.warning(f"The joint limits of joint {joint.name} might restrict motion: " f"min: {joint.limit.lower} max: {joint.limit.upper} vel: {joint.limit.velocity} " f"eff: {joint.limit.effort}") @@ -1656,6 +1724,18 @@ def check_joint_definitions(self, raise_error=False, backup=None, default_axis=N log.info(f"Therefore we take the backup/default values for the joint limits:\n" f"min: {joint.limit.lower} max {joint.limit.upper} vel {joint.limit.velocity} " f"eff {joint.limit.effort}") + if any([not hasattr(joint.limit, x) for x in ["lower", "upper"]]) or \ + any([getattr(joint.limit, x) is None for x in ["lower", "upper"]]) and joint.joint_type not in ["continuous"]: + log.error(f"{joint.name} has no limits defined! min:{joint.limit.lower} max:{joint.limit.upper} eff:{joint.limit.effort} " + f"vel:{joint.limit.velocity}") + result &= 2 + if raise_error: + raise ValueError("ERROR: Not all joint limits for " + joint.name + " defined!") + if (joint.joint_type == ["revolute", "prismatic", "continuous"]) and \ + not hasattr(joint, "axis") or joint.axis is None: + log.warning(f"Joint axis for joint {joint.name} not defined. Setting to [0 0 1]") + joint.axis = default_axis + result &= 4 def generate_collision_matrix(self, coll_override=None, no_coll_override=None): """ @@ -1896,7 +1976,7 @@ def rename(self, targettype, target, prefix=None, suffix=None, replacements=None joint = self.get_joint(target) new_name = joint.child if not joint.child.upper().endswith("_LINK") else joint.child[:-5] else: - new_name = edit_name_string(target, prefix=prefix, suffix=suffix, replacements=replacements) + new_name = edit_name_string(target, prefix=prefix, suffix=suffix, replacements=replacements, do_not_double=do_not_double, correspondance=self.get_aggregate(targettype, target)) renamed_entities.update(self._rename(targettype, target, new_name)) if targettype in ['link', "links"]: @@ -1997,7 +2077,7 @@ def set_collision_scale(self, linkname, scale): col.geometry.scale = scale return - def clean_meshes(self): + def clean_meshes(self, check_enough_vertices=True): """ Checks all meshes if they have sufficient vertices. If not they will be removed Note: override this in subclasses if they have meshes dont forget the super call @@ -2005,9 +2085,12 @@ def clean_meshes(self): """ for link in self.links: for vc in link.collisions: - if isinstance(vc.geometry, representation.Mesh) and not vc.geometry.is_valid(): - log.warning(f"Mesh-Geometry {vc.name} {vc.geometry.input_file} is empty/to small; removing the corresponding {repr(type(vc)).split('.')[-1][:-2]}!") - link.remove_aggregate(vc) + if isinstance(vc.geometry, representation.Mesh) and not vc.geometry.is_valid(check_enough_vertices=check_enough_vertices): + log.warning(f"Mesh-Geometry {vc.name} {vc.geometry.input_file} is empty/too small; removing the corresponding {repr(type(vc)).split('.')[-1][:-2]}!") + if getattr(vc, "primitive", []): + vc.geometry = None + else: + link.remove_aggregate(vc) def attach(self, other, joint, do_not_rename=False, name_prefix="", name_suffix="_2", link_other=False): """ @@ -2169,11 +2252,11 @@ def attach(self, other, joint, do_not_rename=False, name_prefix="", name_suffix= else: raise NameError("There are duplicates in interface names", repr(pinterfaces & cinterfaces)) - new_poses = [p for p in pposes if p.name not in pposes & cposes] + [p for p in cposes if p.name not in pposes & cposes] + new_poses = [p for p in pposes if str(p) not in pposes & cposes] + [p for p in cposes if str(p) not in pposes & cposes] conflicting_poses = [] for pose_name in pposes & cposes: - ppose = [pp for pp in self.poses if pp.name == pose_name][0] - cpose = [cp for cp in other.poses if cp.name == pose_name][0] + ppose = [pp for pp in self.poses if str(pp) == pose_name][0] + cpose = [cp for cp in other.poses if str(cp) == pose_name][0] if not ppose.conflicts_with(cpose): new_poses.append(JointPoseSet.merge(ppose, cpose)) else: @@ -2248,6 +2331,7 @@ def add_link_by_properties(self, name, joint, mass=0.0, add_default_motor=True, ), origin=representation.Pose(xyz=[0, 0, 0], rpy=[0, 0, 0], relative_to=name) ) + log.debug("Adding inertial") else: inertial = None link = representation.Link(name, inertial=inertial,**kwargs) @@ -2344,6 +2428,9 @@ def recursive_link_transform(link_, T_root_to_link): name_replacements=name_replacements) for col in new_link.collisions: + for p in col.primitives: + T = T_R.dot(T_link.dot(p.origin.to_matrix())) + p.origin = representation.Pose.from_matrix(inv(T_root_to_link).dot(T.dot(T_flip)), relative_to=new_link) T = T_R.dot(T_link.dot(col.origin.to_matrix())) col.origin = representation.Pose.from_matrix(inv(T_root_to_link).dot(T.dot(T_flip)), relative_to=new_link) if isinstance(col.geometry, representation.Mesh) and not ( @@ -2467,6 +2554,7 @@ def get_beyond(self, link_to_cut): def remove_joint(self, jointname, only_frame=True): """Remove the joint(s) from the mechanism and transforms all inertia, visuals and collisions to the corresponding parent of the joint. + If only_frame is set to the physical information of the child link is removed as well """ if type(jointname) in [list, tuple, set]: [self.remove_joint(j) for j in jointname] @@ -2516,6 +2604,8 @@ def remove_joint(self, jointname, only_frame=True): parent.add_aggregate('visual', vis) for col in child.collisions: + for p in col.primitives: + p.origin = representation.Pose.from_matrix(C_T_P.dot(p.origin.to_matrix()), relative_to=parent) CC_T_P = C_T_P.dot(col.origin.to_matrix()) col.origin = representation.Pose.from_matrix(CC_T_P, relative_to=parent) col.link = parent.name @@ -2529,6 +2619,16 @@ def remove_joint(self, jointname, only_frame=True): assert self.get_joint(str(joint)) is None assert self.get_link(str(child)) is None + def remove_link(self, linkname): + """ + Removes the link with all its physical properties and the joint by which it was attached. + """ + if type(linkname) != list: + linkname = [linkname] + for ln in linkname: + jointname = self.get_parent(ln) + self.remove_joint(jointname=jointname, only_frame=False) + def add_floating_base(self): """ Returns a copy of this robot with a floatingbase mechanisms prepended @@ -2610,3 +2710,6 @@ def scale_link(self, linkname, scale_x, scale_y, scale_z, new_mass=None, geometr raise TypeError("geometry_for_inertia holds invalid type "+type(geometry_for_inertia)) link.inertial.inertia = representation.Inertia(*inertia_list) link.inertial.inertia.link_with_robot(self) + + def add_ros2control(self): + pass \ No newline at end of file diff --git a/phobos/data/ROSCMakeLists.txt.in b/phobos/data/ROSCMakeLists.txt.in index 20b036a5..cd82df8d 100644 --- a/phobos/data/ROSCMakeLists.txt.in +++ b/phobos/data/ROSCMakeLists.txt.in @@ -1,14 +1,11 @@ -cmake_minimum_required(VERSION 2.8.3) - +cmake_minimum_required(VERSION 3.8) project(@PACKAGE_NAME@) -find_package(catkin REQUIRED) - -catkin_package() +find_package(ament_cmake REQUIRED) -find_package(roslaunch) - -foreach(dir @DIRECTORIES@) - install(DIRECTORY ${dir}/ - DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/${dir}) +foreach(dir . ) + install(DIRECTORY ${dir} + DESTINATION share/${PROJECT_NAME}) endforeach(dir) + +ament_package() \ No newline at end of file diff --git a/phobos/data/ROSpackage.xml.in b/phobos/data/ROSpackage.xml.in index 868dff77..fd4294ee 100644 --- a/phobos/data/ROSpackage.xml.in +++ b/phobos/data/ROSpackage.xml.in @@ -1,4 +1,6 @@ - + + + $INPUTNAME $VERSION @@ -7,13 +9,21 @@ $AUTHOR $MAINTAINER $URL - catkin - - robot_state_publisher - rviz - joint_state_publisher - + $LICENSE + ament_cmake + + urdf + xacro + launch + launch_ros + robot_state_publisher + joint_state_publisher_gui + rviz2 + + ament_lint_auto + ament_lint_common + - + ament_cmake \ No newline at end of file diff --git a/phobos/data/defaults.json b/phobos/data/defaults.json index c6d42244..5d2e7094 100644 --- a/phobos/data/defaults.json +++ b/phobos/data/defaults.json @@ -289,7 +289,6 @@ }, "ci_deploy": { "do_not_deploy": false, - "mr_target_branch": "develop", "mr_title": "[Phobos-CI] New pipeline model", "mr_description": "New model update generated by the phobos-CI-pipeline", "keep_files": [] @@ -299,9 +298,7 @@ { "type": "urdf", "mesh_format": "input_type", - "additional_meshes": [ - "bobj" - ], + "additional_meshes": [], "link_in_smurf": true, "include_cut_joints": false, "filename_suffix": "", @@ -311,23 +308,12 @@ { "type": "sdf", "mesh_format": "input_type", - "additional_meshes": [ - "bobj" - ], + "additional_meshes": [], "link_in_smurf": false, "include_cut_joints": false, "ros_pathes": false, - "enforce_zero": false - }, - { - "type": "joint_limits", - "joints": "ALL", - "file_name": "joint_limits.yml" - }, - { - "type": "submodel", - "name": "abstract_model", - "abstract_model": true + "enforce_zero": false, + "provide_primitives": true }, { "type": "pdf" @@ -348,9 +334,7 @@ { "type": "urdf", "mesh_format": "stl", - "additional_meshes": [ - "bobj" - ], + "additional_meshes": [], "link_in_smurf": true, "include_cut_joints": false, "filename_suffix": "", @@ -360,9 +344,7 @@ { "type": "sdf", "mesh_format": "stl", - "additional_meshes": [ - "bobj" - ], + "additional_meshes": [], "link_in_smurf": false, "include_cut_joints": false, "ros_pathes": true, diff --git a/phobos/data/xml_formats.json b/phobos/data/xml_formats.json index 1dc30d48..83cebb54 100644 --- a/phobos/data/xml_formats.json +++ b/phobos/data/xml_formats.json @@ -17,7 +17,7 @@ }, "material": { "classname": "Material", - "varname": "materials" + "varname": "referenced_materials" }, "sensor": { "classname": "SensorFactory", @@ -45,7 +45,7 @@ }, "collision": { "classname": "Collision", - "varname": "collisions" + "varname": "export_collisions" }, "sensor": { "classname": "SensorFactory", @@ -62,7 +62,7 @@ "Material": { "tag": "material", "attributes": { - "name": "name" + "name": "referenced_name" }, "children": { "texture": { @@ -117,12 +117,7 @@ }, "material": { "classname": "Material", - "varname": "material_" - } - }, - "attribute_children": { - "material": { - "name": "material" + "varname": "material_reference" } }, "nested_children": { @@ -290,7 +285,7 @@ } }, "TransmissionJoint": { - "tag": "transmission", + "tag": "joint", "attributes": { "name": "name" }, @@ -302,7 +297,7 @@ "tag": "transmission", "attributes": { "name": "name", - "type": "type" + "type": "transmission_type" }, "children": { "joint": { @@ -549,6 +544,17 @@ "varname": "origin" } } + }, + "Plugin": { + "tag": "plugin", + "attributes": { + "filename": "plugin_filename", + "name": "plugin_name" + }, + "value_children": { + "parameters": "parameters", + "controller_manager_node_name": "controller_manager_node_name" + } } }, "sdf": { @@ -645,11 +651,7 @@ "children": { "link": { "classname": "Link", - "varname": "physical_links" - }, - "frame": { - "classname": "Frame", - "varname": "frames" + "varname": "links" }, "joint": { "classname": "Joint", @@ -689,7 +691,7 @@ }, "collision": { "classname": "Collision", - "varname": "collisions" + "varname": "export_collisions" }, "sensor": { "classname": "SensorFactory", @@ -1208,6 +1210,17 @@ "cmd_min": "cmd_min", "cmd_offset": "cmd_offset" } + }, + "Plugin": { + "tag": "plugin", + "attributes": { + "filename": "plugin_filename", + "name": "plugin_name" + }, + "value_children": { + "parameters": "parameters", + "controller_manager_node_name": "controller_manager_node_name" + } } }, "x3d": { @@ -1224,8 +1237,8 @@ "tag": "IndexedFaceSet", "attributes": { "coordIndex": "x3d_faces", - "colorPerVertex": "$false", - "normalPerVertex": "$false" + "colorPerVertex": "@false", + "normalPerVertex": "@false" }, "attribute_children": { "Coordinate": { @@ -1266,9 +1279,9 @@ "diffuseFactor": "diffuse_rgb", "specularFactor": "specular_rgb", "shininessFactor": "shininess", - "ambientFactor": "$.588 .588 .588", - "normalScale": "$2 2 2", - "normalBias": "$-1 -1 1" + "ambientFactor": "@.588 .588 .588", + "normalScale": "@2 2 2", + "normalBias": "@-1 -1 1" }, "children": { "ImageTexture": { diff --git a/phobos/defs.py b/phobos/defs.py index 5a22bc18..7f9d7c1c 100644 --- a/phobos/defs.py +++ b/phobos/defs.py @@ -28,17 +28,24 @@ BPY_AVAILABLE = False try: import bpy - BPY_AVAILABLE = True - log.info("Blender-Python (bpy) available.") + import sys + if "blender" in sys.executable.lower(): + BPY_AVAILABLE = True + log.info("Blender-Python (bpy) available.") + else: + BPY_AVAILABLE = False + log.info("Blender-Python (bpy) available, but not executing from within blender.") + del sys except ImportError: log.info("Blender-Python (bpy) not available.") -PYBULLET_AVAILBABLE = False +PYBULLET_AVAILABLE = False def check_pybullet_available(): - if not PYBULLET_AVAILBABLE: + global PYBULLET_AVAILABLE + if not PYBULLET_AVAILABLE: from .commandline_logging import get_logger log = get_logger(__name__) try: diff --git a/phobos/geometry/geometry.py b/phobos/geometry/geometry.py index c7424706..2f866c7e 100644 --- a/phobos/geometry/geometry.py +++ b/phobos/geometry/geometry.py @@ -26,7 +26,7 @@ def get_vertex_id(x, vertices): ).nonzero()[0][0] -def create_box(mesh, oriented=True, scale=1.0): +def create_box(mesh, oriented=False, scale=1.0): """ Create a box element. """ @@ -53,7 +53,7 @@ def create_box(mesh, oriented=True, scale=1.0): else: raise ValueError(f"Received {type(mesh)}") - return representation.Box(size=extent), transform + return representation.Box(size=extent, origin=None), transform def create_sphere(mesh, scale=1.0): @@ -64,7 +64,7 @@ def create_sphere(mesh, scale=1.0): if isinstance(mesh, trimesh.Trimesh) or isinstance(mesh, trimesh.Scene): # scale the mesh mesh = deepcopy(mesh) - mesh.apply_transform(np.diag(scale + [1])) + #mesh.apply_transform(np.diag(scale + [1])) half_ext = mesh.bounding_box.extents transform = mesh.bounding_box.primitive.transform elif BPY_AVAILABLE and isinstance(mesh, bpy.types.Object): @@ -78,7 +78,7 @@ def create_sphere(mesh, scale=1.0): r = np.sqrt(half_ext[0]**2 + half_ext[1]**2 + half_ext[2]**2) - return representation.Sphere(radius=r * 0.5), transform + return representation.Sphere(radius=r * 0.5, origin=None), transform def create_cylinder(mesh, scale=1.0): @@ -90,20 +90,14 @@ def create_cylinder(mesh, scale=1.0): if isinstance(mesh, trimesh.Trimesh) or isinstance(mesh, trimesh.Scene): # scale the mesh mesh = deepcopy(mesh) - mesh.apply_transform(np.diag(scale + [1])) + #mesh.apply_transform(np.diag(scale + [1])) c = mesh.bounding_cylinder transform = mesh.bounding_cylinder.primitive.transform # Find the length and the axis - axis = mesh.bounding_cylinder.direction - orthogonal = np.array(axis) - if axis[0] != 0.0: - orthogonal[0] = -axis[0] - elif axis[1] != 0.0: - orthogonal[1] = -axis[1] - elif axis[2] != 0.0: - orthogonal[2] = -axis[2] + axis = c.direction + orthogonal = np.array([axis[1], axis[2], axis[0]]) length = np.abs(c.direction).dot(c.extents) - diameter = np.cross(axis, orthogonal).dot(c.extents) + diameter = np.abs(np.cross(axis, orthogonal).dot(c.extents)) elif BPY_AVAILABLE: bound_box = np.array(mesh.bound_box) * np.array(scale) extent = (np.max(bound_box, axis=0) - np.min(bound_box, axis=0)) @@ -120,7 +114,7 @@ def create_cylinder(mesh, scale=1.0): else: raise ValueError(f"Received {type(mesh)}") - return representation.Cylinder(radius=diameter/2, length=length), transform + return representation.Cylinder(radius=diameter/2, length=length, origin=None), transform def get_reflection_matrix(point=np.array((0, 0, 0)), normal=np.array((0, 1, 0))): @@ -128,23 +122,34 @@ def get_reflection_matrix(point=np.array((0, 0, 0)), normal=np.array((0, 1, 0))) def improve_mesh(mesh): + v = len(mesh.vertices) + f = len(mesh.faces) mesh.fix_normals() mesh.fill_holes() mesh.merge_vertices() mesh.remove_duplicate_faces() mesh.remove_infinite_values() mesh.remove_unreferenced_vertices() + v_ = len(mesh.vertices) + f_ = len(mesh.faces) + log.info(f"Improved mesh: {f} -> {f_} ({np.round(1000 * f_ / f) / 10}%) faces, {v} -> {v_} ({np.round(1000 * v_ / v) / 10}%) vertices") return mesh -def reduce_mesh(mesh, factor): - mesh = improve_mesh(mesh) - n = np.ceil(factor * len(mesh.faces)) - out = mesh.simplify_quadratic_decimation(n) +def reduce_mesh(mesh, factor, max_faces=None, min_faces=None): v = len(mesh.vertices) - v_ = len(out.vertices) f = len(mesh.faces) + out = improve_mesh(deepcopy(mesh)) + n = np.ceil(factor * len(out.faces)) + if max_faces: + n = min(n, max_faces) + if min_faces and min_faces: + n = max(n, min_faces) + if n < len(out.faces): + out = out.simplify_quadratic_decimation(n) + v_ = len(out.vertices) f_ = len(out.faces) + out = improve_mesh(out) log.info(f"Reduced {f} -> {f_} ({np.round(1000 * f_ / f) / 10}%) faces and {v} -> {v_} ({np.round(1000 * v_ / v) / 10}%) vertices") return out diff --git a/phobos/geometry/robot.py b/phobos/geometry/robot.py index 47a62dd1..28eb8a0b 100644 --- a/phobos/geometry/robot.py +++ b/phobos/geometry/robot.py @@ -9,6 +9,9 @@ from . import geometry from . import io from ..io import representation +from ..commandline_logging import get_logger + +log = get_logger(__name__) def generate_kccd_optimizer_ready_collision(robot, linkname, outputdir, join_first=True, merge_additionally=None, @@ -99,9 +102,9 @@ def find_zero_pose_collisions(robot): return zero_pose_colls if colls_exist else None -def replace_geometry(element, shape='box', oriented=False, scale=1.0): +def replace_geometry(element, shape='box', oriented=False, scale=1.0, apply_primitives=False): """ - Replace the geometry of the element with an oriented shape. urdf_path is needed for mesh loading. + Creates a geometry of the element with an oriented shape. urdf_path is needed for mesh loading. Args: element: An geometry element representation.Visual or representation.Collision shape: ['box', 'sphere', 'cylinder', 'convex'] @@ -112,7 +115,7 @@ def replace_geometry(element, shape='box', oriented=False, scale=1.0): None """ if type(element) is list: - return [replace_geometry(e, shape, oriented, scale) for e in element] + return [replace_geometry(e, shape, oriented, scale, apply_primitives) for e in element] if element is None: return @@ -120,22 +123,25 @@ def replace_geometry(element, shape='box', oriented=False, scale=1.0): if not isinstance(element.geometry, representation.Mesh): return - mesh = io.as_trimesh(element.geometry.load_mesh(), silent=True) - mesh.apply_transform(element.origin.to_matrix()) - - if shape == 'sphere': - element.geometry, transform = geometry.create_sphere(mesh, scale=scale) - element.origin = representation.Pose.from_matrix(transform, relative_to=element.link) - elif shape == 'cylinder': - element.geometry, transform = geometry.create_cylinder(mesh, scale=scale) - element.origin = representation.Pose.from_matrix(transform, relative_to=element.link) - elif shape == 'box': - element.geometry, transform = geometry.create_box(mesh, oriented=oriented, scale=scale) - element.origin = representation.Pose.from_matrix(transform, relative_to=element.link) - elif shape == 'convex': + if shape == 'convex': element.geometry.to_convex_hull() element.geometry.scale = scale element.origin = representation.Pose.from_matrix(np.identity(4), relative_to=element.link) + elif shape in ["sphere", "cylinder", "box"]: + mesh = io.as_trimesh(element.geometry.load_mesh(), silent=True) + if shape == 'sphere': + geo, transform = geometry.create_sphere(mesh, scale=scale) + elif shape == 'cylinder': + geo, transform = geometry.create_cylinder(mesh, scale=scale) + elif shape == 'box': + geo, transform = geometry.create_box(mesh, oriented=oriented, scale=scale) + new_origin = representation.Pose.from_matrix(np.array(element.origin.to_matrix()).dot(np.array(transform)), relative_to=element.link) + if not apply_primitives: + geo.origin = new_origin + element.primitives.append(geo) + else: + element.geometry = geo + element.origin = new_origin else: raise Exception('Shape {} not implemented. Please choose sphere, cylinder, box or convex.'.format(shape)) return @@ -159,10 +165,9 @@ def join_collisions(robot, linkname, collisionnames=None, name_id=None, only_ret primitives = [] file_types = set() for e in elements: - if not isinstance(e.geometry, representation.Mesh): - primitives += [e] - continue - else: + if e.primitives: + primitives += e.primitives + if e.geometry is not None: mesh = io.as_trimesh(e.geometry.load_mesh(), silent=True) name = e.name if name.lower().startswith("collision_"): @@ -172,12 +177,19 @@ def join_collisions(robot, linkname, collisionnames=None, name_id=None, only_ret name = str.replace(name, "/", "") names.append(name) mesh.apply_transform(e.origin.to_matrix()) + #mesh = geometry.improve_mesh(mesh) + mesh = mesh.convex_hull + #assert mesh.is_volume, f"Convex-Hull of{e.geometry.abs_filepath} is no volume" meshes.append(mesh) file_types.add(e.geometry.input_type) if not only_return: link.remove_aggregate(e) + try: + mesh = trimesh.boolean.union(meshes, engine="blender") + except ValueError as error: + log.warning(f"In link {linkname} this mesh issue occurs {error}\n Trying to skip the volume check. (This issues occurs since trimesh update 4.1.4->4.2.0 Message from trimesh.)") + mesh = trimesh.boolean.union(meshes, engine="blender", check_volume=False) - mesh = trimesh.util.concatenate(meshes) if only_return: return mesh @@ -191,11 +203,12 @@ def join_collisions(robot, linkname, collisionnames=None, name_id=None, only_ret link.add_aggregate("collision", representation.Collision( origin=representation.Pose(rpy=[0, 0, 0], xyz=[0, 0, 0], relative_to=link), geometry=mesh_representation, - name="collision_"+link.name + name="collision_"+link.name, + primitives=primitives )) -def replace_collisions(robot, shape='box', oriented=False, exclude=None): +def replace_collisions(robot, shape='box', oriented=False, exclude=None, apply_primitives=False): """Replace all collisions of the robot ( except exclude's ) with a given shape. This can be a 'sphere', 'cylinder', 'box' or 'convex'. """ @@ -203,22 +216,22 @@ def replace_collisions(robot, shape='box', oriented=False, exclude=None): exclude = [] for link in robot.links: if link.name not in exclude: - replace_collision(robot, link.name, shape=shape, oriented=oriented) + replace_collision(robot, link.name, shape=shape, oriented=oriented, apply_primitives=apply_primitives) return -def replace_collision(robot, linkname, shape='box', oriented=False, scale=1.0): +def replace_collision(robot, linkname, shape='box', oriented=False, scale=1.0, apply_primitives=False): """Replace the collision(s) stated in linkname of the robot with an oriented shape. """ if isinstance(linkname, list): - [replace_collision(robot, lname, shape=shape, oriented=oriented, scale=scale) for lname in linkname] + [replace_collision(robot, lname, shape=shape, oriented=oriented, scale=scale, apply_primitives=False) for lname in linkname] return assert type(linkname) is str link = robot.get_link(linkname) - if link and hasattr(link, 'collision'): + if link: # print("Processing {}...".format(link.name)) - geometry.replace_geometry(link.collisions, shape=shape, oriented=oriented, scale=scale) + replace_geometry(link.collisions, shape=shape, oriented=oriented, scale=scale, apply_primitives=apply_primitives) return diff --git a/phobos/io/base.py b/phobos/io/base.py index f5ef882a..64e5f5bd 100644 --- a/phobos/io/base.py +++ b/phobos/io/base.py @@ -194,7 +194,7 @@ def check_linkage(self, attribute=None): True if all references are python-references """ linked = self._related_robot_instance is not None - assert linked, self.__class__ + assert linked, str(self.__class__) + repr(self.__dict__) _class_attributes = self._class_linkables if attribute is not None: _class_attributes = [var for var in self._class_linkables if var == attribute] diff --git a/phobos/io/poses.py b/phobos/io/poses.py index a09f2f01..7b0a1fec 100644 --- a/phobos/io/poses.py +++ b/phobos/io/poses.py @@ -15,7 +15,7 @@ def __init__(self, joint=None, position=None): @property def position(self): - if self._joint and self._joint.limit: + if self._joint and self._joint.limit and self._joint.limit.lower and self._joint.limit.upper: return min(self._joint.limit.upper, max(self._joint.limit.lower, self._position)) else: return self._position @@ -76,13 +76,13 @@ def set_joints(self, configuration, robot): self.configuration = [] for joint, position in configuration.items(): # Check for joint - if robot.get_joint(joint): - c_joint = robot.get_joint(joint) + c_joint = robot.get_joint(joint) + if c_joint and c_joint.joint_type != "fixed": self.configuration.append( JointPose(joint=c_joint, position=position) ) - def conflicts_with(self, other): + def conflicts_with(self, other, new_name=None): for tc in self.configuration: for oc in other.configuration: if tc.joint == oc.joint and tc.position != oc.position: @@ -91,6 +91,6 @@ def conflicts_with(self, other): @classmethod def merge(cls, first, second): - assert not first.conflicts_with(second) + assert not first.conflicts_with(second) and first.name == second.name first_joints = [str(p.joint) for p in first.configuration] - return first.configuration + [jp for jp in second.configuration if str(jp.joint) not in first_joints] \ No newline at end of file + return JointPoseSet(name=first.name, configuration=first.configuration + [jp for jp in second.configuration if str(jp.joint) not in first_joints]) \ No newline at end of file diff --git a/phobos/io/representation.py b/phobos/io/representation.py index 39880f7e..131a704a 100644 --- a/phobos/io/representation.py +++ b/phobos/io/representation.py @@ -15,7 +15,7 @@ from .yaml_reflection import to_yaml from ..defs import BPY_AVAILABLE from ..geometry import io as mesh_io -from ..geometry.geometry import identical, reduce_mesh, get_reflection_matrix, improve_mesh +from ..geometry.geometry import identical, reduce_mesh, get_reflection_matrix, improve_mesh, create_box from ..utils import misc, git, transform from ..utils.transform import inv from ..utils.xml import read_relative_filename @@ -353,8 +353,8 @@ def equivalent(self, other): class Material(Representation, SmurfBase): _class_variables = ["name", "diffuse", "ambient", "emissive", "specular", "diffuseTexture", "normalTexture"] - def __init__(self, name=None, diffuse=None, ambient=None, specular=None, emissive=None, - diffuseTexture=None, normalTexture=None, transparency=None, shininess=None, **kwargs): + def __init__(self, name=None, referenced_name=None, diffuse=None, ambient=None, specular=None, emissive=None, + diffuseTexture=None, normalTexture=None, transparency=None, shininess=None, write_name=None, **kwargs): self.diffuse = misc.color_parser(rgba=diffuse) self.ambient = misc.color_parser(rgba=ambient) self.specular = misc.color_parser(rgba=specular) @@ -362,18 +362,30 @@ def __init__(self, name=None, diffuse=None, ambient=None, specular=None, emissiv self.diffuseTexture = _singular(diffuseTexture) self.normalTexture = _singular(normalTexture) self.transparency = transparency + self.write_name = write_name if self.transparency is None and self.diffuse is not None: self.transparency = 1-self.diffuse[3] self.shininess = shininess - self.original_name = name + self.original_name = name if name is not None else referenced_name Representation.__init__(self) SmurfBase.__init__(self, returns=["name", "diffuseColor", "ambientColor", "specularColor", "emissionColor", "diffuseTexture", "normalTexture"], **kwargs) - if name is None or len(name) == 0: - name = misc.to_hex_color(self.diffuse) + (os.path.basename(self.diffuseTexture) if diffuseTexture is not None else "") - self.name = name + self.name = self.original_name + if not self.name: + self.name = misc.to_hex_color(self.diffuse) + (os.path.basename(self.diffuseTexture) if diffuseTexture is not None else "") self.excludes += ["diffuse", "ambient", "specular", "emissive", "original_name", "users"] + @property + def referenced_name(self): + if self.write_name == False: + return None + else: + return self.name + + @referenced_name.setter + def referenced_name(self, value): + self.name = value + def check_valid(self): # [TODO v2.0.0] REVIEW add other colors here if self.diffuse is None and self.diffuseTexture is None: @@ -439,12 +451,11 @@ def emissionColor(self, *args, rgba=None): self.emissive = misc.color_parser(*args, rgba=rgba) -class Box(Representation): - _class_variables = ["size"] +class Box(Representation, SmurfBase): + _class_variables = ["size", "origin"] - def __init__(self, size=None, **kwargs): - super().__init__() - self.size = size + def __init__(self, size=None, origin=None, **kwargs): + SmurfBase.__init__(self, size=size, origin=origin, geometry_type="box", returns=["geometry_type"], **kwargs) def scale_geometry(self, x=1, y=1, z=1): self.size = (v * s for v, s in zip(self.size, [x, y, z])) @@ -462,60 +473,59 @@ def get_corners(self): np.array([ self.size[0] / 2, self.size[1] / 2, self.size[2] / 2]), ] - def __str__(self): - return "box" + def stringable(self): + return False @property def extent(self): return self.size -class Cylinder(Representation): - _class_variables = ["radius", "length"] +class Cylinder(Representation, SmurfBase): + _class_variables = ["radius", "length", "origin"] - def __init__(self, radius=0.0, length=0.0, **kwargs): - super().__init__() - self.radius = radius - self.length = length + def __init__(self, radius=0.0, length=0.0, origin=None, **kwargs): + SmurfBase.__init__(self, radius=radius, length=length, origin=origin, geometry_type="cylinder", returns=["geometry_type"], **kwargs) def scale_geometry(self, x=1, y=1, z=1): assert x == y self.radius *= x self.length *= z - def __str__(self): - return "cylinder" + def stringable(self): + return False @property def extent(self): return (self.radius, self.radius, self.length) -class Sphere(Representation): - _class_variables = ["radius"] +class Sphere(Representation, SmurfBase): + _class_variables = ["radius", "origin"] - def __init__(self, radius=0.0, **kwargs): - super().__init__() - self.radius = radius + def __init__(self, radius=0.0, origin=None, **kwargs): + SmurfBase.__init__(self, radius=radius, origin=origin, geometry_type="sphere", returns=["geometry_type"], **kwargs) def scale_geometry(self, x=1, y=1, z=1): assert x == y == z self.radius *= x - def __str__(self): - return "sphere" + def stringable(self): + return False @property def extent(self): return (self.radius, self.radius, self.radius) +# TODO This class needs to be refactored to be better (de-)serializable class Mesh(Representation, SmurfBase): _class_variables = ["material"] def __init__(self, filepath=None, posix_path=None, scale=None, mesh=None, meshname=None, material=None, mesh_orientation=None, fast_init=False, **kwargs): - SmurfBase.__init__(self, returns=["scale", "exported", "unique_name", "imported"]) + filepath = misc.sys_path(filepath if posix_path is None else posix_path) + SmurfBase.__init__(self, geometry_type="mesh", returns=["scale", "exported", "unique_name", "imported", "geometry_type"]) self._operations = [] self._scale = [1.0, 1.0, 1.0] if scale is not None and scale != self.scale: @@ -523,7 +533,6 @@ def __init__(self, filepath=None, posix_path=None, scale=None, mesh=None, meshna self._changed = False self._info_in_sync = True self.material = material - filepath = misc.sys_path(filepath if posix_path is None else posix_path) if mesh is not None: if meshname is None or type(meshname) != str: raise AssertionError(f"Invalid mesh name {meshname}.") @@ -567,7 +576,9 @@ def __init__(self, filepath=None, posix_path=None, scale=None, mesh=None, meshna # using this convention exporting stl/obj from blender will have the vertices on the same axes as defined by the link frames in urdf/sdf. # OSG has for obj a special handling which changes this https://github.com/openscenegraph/OpenSceneGraph/blob/2e4ae2ea94595995c1fc56860051410b0c0be605/src/osgPlugins/obj/ReaderWriterOBJ.cpp#L208 # This can only be switched off via an ReaderWriter option https://github.com/openscenegraph/OpenSceneGraph/blob/2e4ae2ea94595995c1fc56860051410b0c0be605/src/osgPlugins/obj/ReaderWriterOBJ.cpp#L60 - mars_mesh = self.input_file is not None and "mars_obj" in self.input_file and ".mars.obj" in self.input_file + mars_mesh = self.input_file is not None and ("mars_obj" in self.input_type or ".mars.obj" in self.input_file) + if mars_mesh: + self.input_type = "file_mars_obj" if BPY_AVAILABLE and self.input_type == "file_obj": self.mesh_orientation = { "up": bpy.context.preferences.addons["phobos"].preferences.obj_axis_up if not mars_mesh else "Y", @@ -597,7 +608,7 @@ def __init__(self, filepath=None, posix_path=None, scale=None, mesh=None, meshna else: self.imported = "input file not known" self.history = [f"Instantiated with filepath={filepath}->{self.input_file}, scale={scale}, mesh={mesh}, meshname={meshname}, " - f"material={material}, mesh_orientation={mesh_orientation}, {kwargs}"] + f"material={material}, mesh_orientation={mesh_orientation}"] self.excludes += ["history", "input_type", "input_file", "original_mesh_name", "mesh_object"] @property @@ -730,13 +741,15 @@ def is_lfs_checked_out(self): return False return True - def is_valid(self): - if self.input_file is None or not os.path.isfile(self.input_file): - log.error(f"Mesh {self.input_file} does not exist") + def is_valid(self, check_enough_vertices=True): + if (self.input_file is None or not os.path.isfile(self.input_file)) and not self.mesh_object: + log.warning(f"Mesh {self.unique_name} is empty and file: {self.input_file} does not exist") return False if not self.is_lfs_checked_out(): return False - return self.has_enough_vertices() + if check_enough_vertices: + return self.has_enough_vertices() + return True def has_enough_vertices(self): self.load_mesh() @@ -798,7 +811,7 @@ def load_mesh(self, reload=False): bpy.ops.import_mesh.stl(filepath=self.input_file) self._mesh_object = bpy.data.meshes[bpy.context.object.data.name] bpy.ops.object.delete() - elif self.input_type == "file_obj": + elif self.input_type in ["file_obj", "file_mars_obj"]: bpy.ops.import_scene.obj(filepath=self.input_file, axis_forward=self.mesh_orientation["forward"], axis_up=self.mesh_orientation["up"], @@ -810,7 +823,14 @@ def load_mesh(self, reload=False): object = bpy.context.selected_objects[0] if object.data.name != self.unique_name: object.data.name = self.unique_name - self._mesh_object = bpy.data.meshes[self.unique_name] + try: + self._mesh_object = bpy.data.meshes[self.unique_name] + except KeyError as e: + log.warning("The mesh couldn't be found by the unique_name, trying to find it by the shortened name") + try: + self._mesh_object = bpy.data.meshes[self.unique_name[:63]] + except KeyError as e: + raise KeyError(f"Couldn't find loaded mesh {self.unique_name}. The following were found: {bpy.data.meshes.keys()}") # with obj file import, blender only turns the object, not the vertices, # leaving a rotation in the matrix_basis, which we here get rid of bpy.ops.object.transform_apply(rotation=True) @@ -879,7 +899,7 @@ def load_mesh(self, reload=False): elif self.mesh_object is None: if self.input_type == "file_stl": self._mesh_object = mesh_io.import_mesh(self.input_file) - elif self.input_type == "file_obj": + elif self.input_type in ["file_obj", "file_mars_obj"]: self._mesh_object = mesh_io.import_mesh(self.input_file) if isinstance(self.mesh_object, trimesh.Trimesh): try: @@ -895,10 +915,10 @@ def load_mesh(self, reload=False): elif self.input_type == "file_bobj": self._mesh_information = mesh_io.parse_bobj(self.input_file) self._mesh_object = mesh_io.mesh_info_dict_2_trimesh(**self._mesh_information) - self.history.append(f"loaded {'bpy-Mesh' if BPY_AVAILABLE else 'trimesh'} from {self.input_type} {self.input_file}") + self.history.append(f"->loaded {'bpy-Mesh' if BPY_AVAILABLE else 'trimesh'} from {self.input_type} {self.input_file}") return self.mesh_object - def provide_mesh_file(self, targetpath, format=None, throw_on_invalid_bobj=False, use_existing=False, apply_scale=False): + def provide_mesh_file(self, targetpath, rel_mesh_pathes=None, format=None, throw_on_invalid_bobj=False, use_existing=False, apply_scale=False): if format is None and self._related_robot_instance is not None: format = self._related_robot_instance.mesh_format if format in [None, "input_type"] and self.input_type.startswith("file"): @@ -906,11 +926,13 @@ def provide_mesh_file(self, targetpath, format=None, throw_on_invalid_bobj=False if format is None: raise AssertionError("To export meshes you have to specify the format. (format=None)") assert os.path.isabs(targetpath) - ext = format.lower() + ext = format.lower().replace("_", ".") + if rel_mesh_pathes is not None: + targetpath = os.path.join(targetpath, rel_mesh_pathes[ext]) os.makedirs(targetpath, exist_ok=True) targetpath = os.path.join(targetpath, self.unique_name+"."+ext) if use_existing: - self._exported[ext] = { + self._exported[format.lower()] = { "operations": self._operations, "filepath": targetpath, "export_operation": "None" @@ -923,7 +945,7 @@ def provide_mesh_file(self, targetpath, format=None, throw_on_invalid_bobj=False self.history.append(f"->trying export of {str(self.mesh_object)} to {targetpath}") # log.debug(f"Providing mesh {targetpath}...") # if there are no changes we can simply copy - if self.input_file is not None and "file_"+ext == self.input_type and not self._changed: + if self.input_file is not None and "file_"+format.lower() == self.input_type and not self._changed: if self.input_file == targetpath: log.debug(f"Using existing mesh {targetpath}...") self._exported[ext] = { @@ -937,7 +959,7 @@ def provide_mesh_file(self, targetpath, format=None, throw_on_invalid_bobj=False else: log.debug(f"Copying mesh {os.path.relpath(self.input_file, os.path.dirname(targetpath))} to {targetpath}...") shutil.copyfile(self.input_file, targetpath) - self._exported[ext] = { + self._exported[format.lower()] = { "operations": self._operations, "filepath": targetpath, "export_operation": "copy" @@ -951,10 +973,10 @@ def provide_mesh_file(self, targetpath, format=None, throw_on_invalid_bobj=False o_history = self.read_history(targetpath) equiv_histories = False if o_history is not None: - equiv_histories = [x for x in o_history[1:] if not x.startswith("->")] == [x for x in self.history[1:] if not x.startswith("->")] + equiv_histories = [x.strip() for x in o_history[1:] if not x.startswith("->") and x.strip()] == [x.strip() for x in self.history[1:] if not x.startswith("->") and x.strip()] if existing_mesh is not None and (equiv_histories or mesh_io.identical(mesh_io.as_trimesh(self.load_mesh(), silent=True), existing_mesh)): log.debug(f"Skipping export of {targetpath} as the mesh file already exists and is identical") - self._exported[ext] = { + self._exported[format.lower()] = { "operations": self._operations, "filepath": targetpath, "export_operation": "None" @@ -977,14 +999,14 @@ def provide_mesh_file(self, targetpath, format=None, throw_on_invalid_bobj=False if self.input_type.startswith("file") and self.mesh_object is None: self.load_mesh() # only export bobj, if it makes sense for the mesh - if not BPY_AVAILABLE and ext == "bobj" and self._mesh_information is None: + if not BPY_AVAILABLE and format.lower() == "bobj" and self._mesh_information is None: if throw_on_invalid_bobj: raise IOError( f"Couldn't provide mesh {self.unique_name} to {targetpath}, because this mesh can't be exported as bobj") else: log.warn(f"Couldn't provide mesh {self.unique_name} to {targetpath}, because this mesh can't be exported as bobj") return - elif ext == "bobj" and (not self._info_in_sync and "texture_coords" in self._mesh_information): + elif format.lower() == "bobj" and (not self._info_in_sync and "texture_coords" in self._mesh_information): if throw_on_invalid_bobj: raise IOError( f"Couldn't provide mesh {self.unique_name} to {targetpath}, because this mesh has been edited and thus the textures might have get mixed up.") @@ -994,9 +1016,9 @@ def provide_mesh_file(self, targetpath, format=None, throw_on_invalid_bobj=False # export log.debug(f"Writing {type(self.mesh_object)} to {targetpath}...") assert self.mesh_object is not None - if ext == "bobj" and self.input_type == "file_obj": + if format.lower() == "bobj" and self.input_type == "file_obj": mesh_io.write_bobj(targetpath, **self._mesh_information) - self._exported[ext] = { + self._exported[format.lower()] = { "operations": self._operations, "filepath": targetpath, "export_operation": "write_bobj" @@ -1013,7 +1035,7 @@ def provide_mesh_file(self, targetpath, format=None, throw_on_invalid_bobj=False tmpobject.data = self.mesh_object bpy.ops.object.select_all(action='DESELECT') tmpobject.select_set(True) - if ext == 'obj': + if format.lower() == 'obj': axis_forward = bpy.context.preferences.addons["phobos"].preferences.obj_axis_forward axis_up = bpy.context.preferences.addons["phobos"].preferences.obj_axis_up bpy.ops.export_scene.obj( @@ -1026,27 +1048,27 @@ def provide_mesh_file(self, targetpath, format=None, throw_on_invalid_bobj=False axis_forward=axis_forward, axis_up=axis_up, ) - elif ext == 'stl': + elif format.lower() == 'stl': bpy.ops.export_mesh.stl(filepath=targetpath, use_selection=True, use_mesh_modifiers=True) - elif ext == 'dae': + elif format.lower() == 'dae': bpy.ops.wm.collada_export(filepath=targetpath, selected=True) - elif ext == "bobj": + elif format.lower() == "bobj": log.debug(f"Exporting {targetpath} with {len(self.mesh_object.vertices)} vertices...") mesh_io.write_bobj(targetpath, **mesh_io.blender_2_mesh_info_dict(self.mesh_object)) bpy.ops.object.select_all(action='DESELECT') tmpobject.select_set(True) bpy.ops.object.delete() - self._exported[ext] = { + self._exported[format.lower()] = { "operations": self._operations, "filepath": targetpath, "export_operation": "blender_export" } - self.history.append(f"->wrote {ext} to {targetpath} from {self.input_type}") + self.history.append(f"->wrote {format.lower()} to {targetpath} from {self.input_type}") self.write_history(targetpath) return # export from trimesh assert isinstance(self.mesh_object, trimesh.Trimesh) or isinstance(self.mesh_object, trimesh.Scene) - if ext == "dae": + if format.lower() == "dae": dae_xml = trimesh.exchange.dae.export_collada(self.mesh_object) if self.material is not None: dae_xml = dae_xml.decode(json.detect_encoding(dae_xml)) @@ -1059,20 +1081,27 @@ def provide_mesh_file(self, targetpath, format=None, throw_on_invalid_bobj=False with open(targetpath, "wb") as f: f.write(dae_xml) exp_op = "trimesh_export" - elif ext == "bobj": + elif format.lower() == "bobj": assert isinstance(self.mesh_object, trimesh.Trimesh),\ f"Export to bobj only possible from trimesh.Trimesh not for {type(self.mesh_object)}" log.debug(f"Exporting {targetpath} with {len(self.mesh_object.vertices)} vertices...") mesh_io.write_bobj(targetpath, **mesh_io.trimesh_2_mesh_info_dict(self.mesh_object)) exp_op = "write_bobj" + elif format.lower() == "mars_obj": + tmp_mesh = deepcopy(self.mesh_object) + #tmp_mesh.apply_transform() # [Todo] turn it here to the mars mesh_orientation if not already set + self.mesh_object.export( + file_obj=targetpath + ) + exp_op = "trimesh_mars_obj_export" else: self.mesh_object.export( file_obj=targetpath ) exp_op = "trimesh_export" - self.history.append(f"->wrote {ext} to {targetpath} from {self.input_type}") + self.history.append(f"->wrote {format.lower()} to {targetpath} from {self.input_type}") self.write_history(targetpath) - self._exported[ext] = { + self._exported[format.lower()] = { "operations": self._operations, "filepath": targetpath, "export_operation": exp_op @@ -1121,7 +1150,7 @@ def apply_scale(self): tmpobject.select_set(True) bpy.ops.object.delete() else: - raise NotImplentedError() + raise NotImplementedError() self._operations.append("apply_scale") self.history.append(f"apply_scale {self.scale}") @@ -1131,7 +1160,6 @@ def apply_scale(self): self._mesh_information["vertices"] = self._mesh_information["vertices"] * np.array(self.scale) self._mesh_information["vertex_normals"] = self._mesh_information["vertex_normals"] / np.array(self.scale) - def improve_mesh(self): self.load_mesh() if isinstance(self.mesh_object, trimesh.Trimesh): @@ -1140,21 +1168,21 @@ def improve_mesh(self): self._mesh_object = improve_mesh(self.mesh_object) self._operations.append("improve_mesh") self.history.append(f"improved mesh") - elif isinstance(self.mesh_object, bpy.types.Mesh): + elif BPY_AVAILABLE and isinstance(self.mesh_object, bpy.types.Mesh): raise NotImplementedError("Please optimize the mesh directly in blender") else: log.warning(f"Can only optimize trimesh.Trimesh not {type(self.mesh_object)}") - def reduce_mesh(self, factor): + def reduce_mesh(self, factor, max_faces=None, min_faces=None): self.load_mesh() if isinstance(self.mesh_object, trimesh.Trimesh): self._changed = True self._info_in_sync = False - self._mesh_object = reduce_mesh(self.mesh_object, factor) + self._mesh_object = reduce_mesh(self.mesh_object, factor=factor, max_faces=max_faces, min_faces=min_faces) self._operations.append({"reduce_mesh": [factor]}) self.set_unique_name(misc.edit_name_string(self.unique_name, suffix=f"_red{str(factor).replace('.',',')}")) self.history.append(f"reduced mesh factor={factor}") - elif isinstance(self.mesh_object, bpy.types.Mesh): + elif BPY_AVAILABLE and isinstance(self.mesh_object, bpy.types.Mesh): raise NotImplementedError("Please reduce the mesh directly in blender") else: log.warning(f"Can only reduce trimesh.Trimesh not {type(self.mesh_object)}") @@ -1231,6 +1259,40 @@ def to_trimesh_mesh(self): self._mesh_object = mesh_io.as_trimesh(self.mesh_object) self._operations.append("to_trimesh_mesh") + def transform(self, transform, name_replacements={}, suffix=""): + self.load_mesh() + if transform is None or np.all(np.array(transform) == np.identity(4)): + return + if isinstance(self.mesh_object, trimesh.Trimesh) or isinstance(self.mesh_object, trimesh.Scene): + self._mesh_object = self.mesh_object.apply_transform(transform) + self._operations.append({"transform": [transform]}) + try: + self.mesh_object.fix_normals() + except AttributeError: + # [TODO v2.1.0] It seems there is an issue in trimesh, which we have to escape here + pass + self._mesh_information = mesh_io.trimesh_2_mesh_info_dict(self.mesh_object) + self._changed = True + self._info_in_sync = False + self.set_unique_name(misc.edit_name_string(self.unique_name, suffix=suffix, replacements=name_replacements)) + self.history.append(f"trimesh transformed {['{:0.2f}'.format(x) for x in transform.flatten()]}") + elif BPY_AVAILABLE and isinstance(self.mesh_object, bpy.types.Mesh): + import bmesh + import mathutils + bm = bmesh.new() + bm.from_mesh(self.mesh_object) + bmesh.ops.mirror(bm, bm.faces, mathutils.Matrix(transform)) + bm.to_mesh(self.mesh_object) + bm.free() + self._operations.append({"mirror": [transform]}) + self._changed = True + self._info_in_sync = False + self._mesh_information = mesh_io.blender_2_mesh_info_dict(self.mesh_object) + self.set_unique_name(misc.edit_name_string(self.unique_name, suffix=suffix, replacements=name_replacements)) + self.history.append(f"bpy transformed {['{:0.2f}'.format(x) for x in transform.flatten()]}") + else: + log.error(f"Couldn't create convex hull for mesh {self.unique_name}") + @property def x3d_vertices(self): self.load_mesh() @@ -1256,31 +1318,50 @@ def x3d_faces(self): faces = np.array(tm.faces) return np.c_[faces, [-1]*faces.shape[0]].flatten() + @property + def extent(self): + self.load_mesh() + mesh = mesh_io.as_trimesh(self.mesh_object) + return create_box(mesh)[0].extent class GeometryFactory(Representation): @classmethod def create(cls, *args, **kwargs): - if kwargs["type"] == "mesh": - return Mesh(**kwargs) - elif kwargs["type"] == "box": - return Box(**kwargs) - elif kwargs["type"] == "cylinder": - return Cylinder(**kwargs) - elif kwargs["type"] == "sphere": - return Sphere(**kwargs) + origin = None + if "origin" in kwargs: + origin = Pose(**kwargs.pop("origin")) + geometry_type = kwargs.pop("geometry_type") + if geometry_type == "mesh": + if "exported" in kwargs: + filetype = kwargs.get("mesh_format", None) + if filetype is None: + for filetype in ["obj", "stl", "dae", "bobj"]: + if filetype in kwargs["exported"]: + break + info = kwargs["exported"][filetype] + posix_path = read_relative_filename(info["filepath"], kwargs["_smurffile"]) + return Mesh(origin=origin, posix_path=posix_path, **kwargs) + elif geometry_type == "box": + return Box(origin=origin, **kwargs) + elif geometry_type == "cylinder": + return Cylinder(origin=origin, **kwargs) + elif geometry_type == "sphere": + return Sphere(origin=origin, **kwargs) raise Exception("Can't create geometry from: "+repr(kwargs)) class Collision(Representation, SmurfBase): - _class_variables = ["name", "link", "geometry", "origin", "bitmask", "primitive"] + _class_variables = ["name", "link", "geometry", "origin", "bitmask", "primitives"] def __init__(self, name=None, link=None, geometry=None, origin=None, bitmask=None, noDataPackage=None, - reducedDataPackage=None, ccfm=None, primitive=None, **kwargs): + reducedDataPackage=None, ccfm=None, primitives=None, **kwargs): _parent_xml = kwargs.get("_parent_xml", None) if _parent_xml is not None and link is None: link = _parent_xml.attrib.get("name") + name = str(name) self.original_name = name - self.primitive = _plural(primitive) + self._primitives = [] + self.primitives = _plural(primitives) if name is None or len(name) == 0: if link is not None: name = str(link) + "_collision" @@ -1296,8 +1377,15 @@ def __init__(self, name=None, link=None, geometry=None, origin=None, bitmask=Non self.origin = _singular(origin) if self.origin.relative_to is None: self.origin.relative_to = self.link - SmurfBase.__init__(self, returns=['name', 'link', 'geometry', 'primitive'], **kwargs) + SmurfBase.__init__(self, returns=['name', 'link', 'geometry', 'primitives'], **kwargs) self.geometry = _singular(geometry) + if type(self.geometry) == dict: + if "_smurffile" in kwargs: + self.geometry["_smurffile"] = kwargs["_smurffile"] + if "_xmlfile" in kwargs: + self.geometry["_xmlfile"] = kwargs["_xmlfile"] + self.geometry = GeometryFactory.create(**self.geometry) + assert isinstance(self.geometry, Mesh) or isinstance(self.geometry, Box) or isinstance(self.geometry, Sphere) or isinstance(self.geometry, Cylinder) or self.geometry is None assert isinstance(self.origin, Pose) self.bitmask = bitmask self.noDataPackage = noDataPackage @@ -1306,15 +1394,22 @@ def __init__(self, name=None, link=None, geometry=None, origin=None, bitmask=Non self.ccfm = ccfm if bitmask is not None: self.returns += ['bitmask'] - self.excludes += ["origin", "original_name"] + self.excludes += ["original_name", "joint_relative_origin"] + + @property + def primitives(self): + return self._primitives - def add_primitive(self, primitive): - if type(primitive) in (list, tuple): - for p in primitive: - self.add_primitive(p) - assert isinstance(primitive, Collision) - assert type(primitive.geometry) in (Box, Sphere, Cylinder) - self.primitive.append(primitive) + @primitives.setter + def primitives(self, value): + self._primitives = [] + for v in _plural(value): + if type(v) == dict: + self._primitives.append(GeometryFactory.create(**v)) + elif type(v) in (Box, Sphere, Cylinder): + self._primitives.append(v) + else: + raise TypeError("Can't add a primitive of type"+str(type(v))) @property def joint_relative_origin(self): @@ -1329,10 +1424,10 @@ def joint_relative_origin(self, value): class Visual(Representation, SmurfBase): _class_variables = ["name", "link", "geometry", "material", "origin"] - def __init__(self, geometry=None, material=None, material_: Material = None, origin=None, name=None, link=None, **kwargs): - self.original_name = name + def __init__(self, geometry=None, material=None, material_reference: Material = None, origin=None, name=None, link=None, **kwargs): if link is not None: link = str(link) + name = str(name) self.original_name = name if name is None or len(name) == 0: if link is not None: @@ -1348,18 +1443,20 @@ def __init__(self, geometry=None, material=None, material_: Material = None, ori self.link = link self.name = name self.geometry = _singular(geometry) - material_ = _singular(material_) + material_reference = _singular(material_reference) material = _singular(material) - if type(material) == str and material_ is not None and not material_.is_delegate(): - assert isinstance(material_, Material) and material_.original_name == material - self.material = material_ + if type(material) == str and material_reference is not None and not material_reference.is_delegate(): + assert isinstance(material_reference, Material) and material_reference.original_name == material + self.material = material_reference + elif isinstance(material_reference, Material) and material is None: + self.material = material_reference elif isinstance(material, Material): - assert material_ is None or material_.equivalent(material) + assert material_reference is None or material_reference.equivalent(material) self.material = material elif type(material) == str: self.material = material else: - assert material is None and material_ is None + assert material is None and material_reference is None if origin is None: origin = Pose() self.origin = _singular(origin) @@ -1369,9 +1466,19 @@ def __init__(self, geometry=None, material=None, material_: Material = None, ori assert isinstance(self.origin, Pose) @property - # [TODO v2.0.0] Make the name more self-explanatory, that this is the dummy for exporting only a reference - def material_(self): - return None + def material_reference(self): + if self._related_robot_instance and self._related_robot_instance.write_material_references: + return Material(name=str(self.material), write_name=True) + elif self._related_robot_instance and not self._related_robot_instance.write_material_references: + out_material = self._material.duplicate() + out_material.write_name = True + return out_material + else: + return self._material + + @material_reference.setter + def material_reference(self, value): + self.material = value def equivalent(self, other): return self.geometry.equivalent(other.geometry) and self._material.equivalent(other._material) and \ @@ -1429,8 +1536,7 @@ def to_list(self): return [self.ixx, self.ixy, self.ixz, self.iyy, self.iyz, self.izz] @staticmethod - def from_mass_matrix(M): - I = M[3::, 3::] + def from_matrix(I): inertias = { 'ixx': I[0, 0], 'ixy': I[0, 1], @@ -1439,9 +1545,13 @@ def from_mass_matrix(M): 'iyz': I[1, 2], 'izz': I[2, 2] } - return Inertia(**inertias) + @staticmethod + def from_mass_matrix(M): + I = M[3::, 3::] + return Inertia.from_matrix(I) + class Inertial(Representation, SmurfBase): _class_variables = ["mass", "inertia", "origin", "link"] @@ -1494,6 +1604,35 @@ def joint_relative_origin(self, value): """This setter should only be used from the urdf import""" _joint_relative_origin_setter(self, value) + @staticmethod + def merge(*inertials): + if len(inertials) == 0: + return None + elif len(inertials) == 1: + return inertials[0] + assert all([i.origin.rpy == [0, 0, 0] for i in inertials]), "Dealing with differently rotated inertias has to be implemented" + relative_to = inertials[0].origin.relative_to + assert all([i.origin.relative_to == relative_to for i in inertials]) + com = np.zeros(3) + mass = 0.0 + for i in inertials: + com += np.array(i.origin.xyz) * i.mass + mass += i.mass + assert mass > 0.0 + com = com / mass + # https://pybullet.org/Bullet/phpBB3/viewtopic.php?t=246 + I = np.zeros((3,3)) + for i in inertials: + r = i.origin.xyz - com + p = np.matrix(r) + I += i.inertia.to_matrix() + (np.dot(r, r)*np.identity(3) - p.T*p) * i.mass + return Inertial( + mass=mass, + inertia=Inertia.from_matrix(I), + origin=Pose(xyz=com, relative_to=relative_to), + merged_from = inertials + ) + class KCCDHull(Representation, SmurfBase): _class_variables = ["link", "points", "radius"] @@ -1558,7 +1697,7 @@ def from_collision(self, coll, npoints=None, iv_mesh=None, kccd_path=None): class Link(Representation, SmurfBase): _class_variables = ["name", "visuals", "collisions", "inertial", "kccd_hull", "origin"] - def __init__(self, name=None, visuals=None, inertial=None, collisions=None, origin=None, + def __init__(self, name=None, visuals=None, inertial=None, collisions=None, export_collisions=None, origin=None, noDataPackage=None, reducedDataPackage=None, is_human=None, kccd_hull=None, **kwargs): SmurfBase.__init__(self, **kwargs) self.name = name @@ -1567,21 +1706,28 @@ def __init__(self, name=None, visuals=None, inertial=None, collisions=None, orig self.returns += ['name', "is_human"] self.visuals = _plural(visuals) self.inertial = _singular(inertial) - self.collisions = _plural(collisions) + self._collisions = _plural(collisions) + if collisions is None and export_collisions: + self._collisions = export_collisions self.kccd_hull = kccd_hull - for geo in self.visuals + self.collisions: + for geo in self.visuals + self._collisions: if geo.origin.relative_to is None: geo.origin.relative_to = self.name self.noDataPackage = noDataPackage self.returns += ['noDataPackage'] self.reducedDataPackage = reducedDataPackage self.returns += ['reducedDataPackage'] - for geo in self.collisions + self.visuals: - i = 0 - if geo.name is None: - geo.name = self.name + ("_collision" if isinstance(geo, Collision) else "_visual") - if i > 0: - geo.name += str(i) + i = 0 + for geo_list in [self._collisions, self.visuals]: + geo_names={} + for geo in geo_list: + if geo.name is None: + geo.name = self.name + ("_collision" if isinstance(geo, Collision) else "_visual") + if geo.name not in geo_names: + geo_names[geo.name] = 0 + else: + geo_names[geo.name] += 1 + geo.name += str(geo_names[geo.name]) self.excludes += ["inertial"] @property @@ -1598,13 +1744,13 @@ def remove_aggregate(self, elem): if isinstance(elem, Visual): self.visuals.remove(elem) elif isinstance(elem, Collision): - self.collisions.remove(elem) + self._collisions.remove(elem) def add_aggregate(self, elem_type, elem): if isinstance(elem, Visual) or elem_type.lower() == "visual": self.visuals.append(elem) elif isinstance(elem, Collision) or elem_type.lower() == "collision": - self.collisions.append(elem) + self._collisions.append(elem) @property def materials(self): @@ -1620,6 +1766,13 @@ def materials(self, value): for visual in self.visuals: visual.material = value + def sort_string(self, dialect=None) -> str: + prefix = type(self).__name__ if dialect is None else self.to_xml(dialect).tag + if self._related_robot_instance: + return prefix + str(self._related_robot_instance.links.index(self)) + else: + return super().sort_string(dialect) + @property def joint_relative_origin(self): assert self._related_robot_instance is not None @@ -1637,7 +1790,6 @@ def joint_relative_origin(self): else: return Pose(relative_to=self._related_robot_instance.get_parent(self)) - @joint_relative_origin.setter def joint_relative_origin(self, value): """This setter should only be used from the xml import""" @@ -1660,6 +1812,35 @@ def sensors(self, value): if not any([existing.name == s.name and existing.equivalent(s) for existing in self._related_robot_instance.sensors]): self._related_robot_instance.add_aggregate("sensor", s) + @property + def primitives(self): + primitives = [] + for c in self._collisions: + if c.primitives: + primitives += c.primitives + return primitives + + @property + def export_collisions(self): + if self._related_robot_instance and self._related_robot_instance.provide_primitives and len(self._related_robot_instance.get_all_primitives()) > 0: + colls = [Collision(name=self.name+"_primitive_collision"+str(i), link=self, geometry=prim, origin=prim.origin) for i, prim in enumerate(self.primitives)] + for c in colls: + c.link_with_robot(self._related_robot_instance) + return colls + return [c for c in self._collisions if c.geometry is not None] + + @export_collisions.setter + def export_collisions(self, value): + self._collisions = value + + @property + def collisions(self): + return self._collisions + + @collisions.setter + def collisions(self, value): + self._collisions = value + class Frame(Link): _class_variables = ["name", "attached_to", "origin"] @@ -1691,13 +1872,9 @@ def attached_to(self, attached_to): self._attached_to = attached_to -class JointDynamics(Representation): +class JointDynamics(Representation, SmurfBase): def __init__(self, damping=None, friction=None, spring_stiffness=None, spring_reference=None, **kwargs): - super().__init__() - self.damping = damping - self.friction = friction - self.spring_stiffness = spring_stiffness - self.spring_reference = spring_reference + SmurfBase.__init__(self, damping=damping, friction=friction, spring_stiffness=spring_stiffness, spring_reference=spring_reference, **kwargs) def stringable(self): return False @@ -1706,15 +1883,11 @@ def is_empty(self): return self.damping is None and self.friction is None and self.spring_stiffness is None and self.spring_reference is None -class JointLimit(Representation): +class JointLimit(Representation, SmurfBase): _class_variables = ["effort", "velocity", "lower", "upper"] def __init__(self, effort=None, velocity=None, lower=None, upper=None, **kwargs): - super().__init__() - self.effort = effort - self.velocity = velocity - self.lower = lower - self.upper = upper + SmurfBase.__init__(self, effort=effort, velocity=velocity, lower=lower, upper=upper, **kwargs) def stringable(self): return False @@ -1726,7 +1899,7 @@ def is_empty(self): class JointMimic(Representation, SmurfBase): _class_variables = ["joint", "multiplier", "offset"] - def __init__(self, joint=None, multiplier=None, offset=None, **kwargs): + def __init__(self, joint=None, multiplier=None, offset=0, **kwargs): super().__init__() self.joint = _singular(joint) assert self.joint is not None @@ -1798,11 +1971,11 @@ def __init__(self, name=None, parent=None, child=None, joint_type=None, self.axis = [0, 0, 1] else: self.axis = None - if origin is None and cut_joint is False: - log.debug(f"Created joint {name} without specified origin assuming zero-transformation") - origin = Pose(xyz=[0, 0, 0], rpy=[0, 0, 0], relative_to=self.parent) + # if origin is None and cut_joint is False: + # log.debug(f"Created joint {name} without specified origin assuming zero-transformation") + # origin = Pose(xyz=[0, 0, 0], rpy=[0, 0, 0], relative_to=self.parent) self.origin = _singular(origin) - if self.origin.relative_to is None: + if self.origin and self.origin.relative_to is None: self.origin.relative_to = self.parent self.limit = _singular(limit) if self.joint_type != "fixed" else None if joint_dependencies is not None: @@ -1819,9 +1992,7 @@ def __init__(self, name=None, parent=None, child=None, joint_type=None, # dynamics self.dynamics = _singular(dynamics) SmurfBase.__init__(self, **kwargs) - # [Todo v2.1.0] To safe all information in SMURF we have to add here the transformation from parent_relative_origin, but with the correct key - self.returns += ["joint_dependencies", "parent", "child"] - self.excludes += ["limit", "mimic", "axis", "dynamics"] + self.returns += ["joint_dependencies", "parent", "child", "limit", "mimic", "axis", "dynamics"] def link_with_robot(self, robot, check_linkage_later=False): super(Joint, self).link_with_robot(robot, check_linkage_later=True) @@ -1860,11 +2031,33 @@ def check_valid(self): (self._related_robot_instance.get_link(self.parent) is not None and self._related_robot_instance.get_link(self.child) is not None))) + def invert(self): + relink = False + if self._related_robot_instance is not None: + self.unlink_from_robot() + relink = self._related_robot_instance + + _temp = self.child + self.child = self.parent + self.parent = _temp + + self.origin.inv(self.parent) + + if relink: + self.link_with_robot(relink) + + def sort_string(self, dialect=None) -> str: + prefix = type(self).__name__ if dialect is None else self.to_xml(dialect).tag + if self._related_robot_instance: + return prefix + str(self._related_robot_instance.joints.index(self)) + else: + return super().sort_string(dialect) + @property def joint_relative_origin(self): assert self.origin is None or self.origin.relative_to is not None if self.origin is None: - return None + return Pose() assert self._related_robot_instance is not None, "Trying to get joint_relative_origin while robot is not linked" parent_joint_name = self._related_robot_instance.get_parent(self.parent) if parent_joint_name is None: @@ -1987,8 +2180,10 @@ def joint_dependencies(self, new_val): @property def sensors(self): - assert self._related_robot_instance is not None - return [s for s in self._related_robot_instance.sensors if getattr(s, "joint", None) == self.name] + if self._related_robot_instance is not None: + return [s for s in self._related_robot_instance.sensors if getattr(s, "joint", None) == self.name] + else: + return self._sensors @sensors.setter def sensors(self, value): @@ -2057,6 +2252,15 @@ def __init__(self, name, mechanicalReduction=1, **kwargs): self.name = name self.mechanicalReduction = mechanicalReduction + def set_unique_name(self, value): + self.name=value + + def __str__(self): + return self.name + + def stringable(self): + return True + class TransmissionJoint(Representation): _class_variables = ["name", "hardwareInterface"] @@ -2071,28 +2275,43 @@ def __init__(self, name, hardwareInterfaces=None, **kwargs): def check_valid(self): assert len(self.hardwareInterfaces) > 0, "no hardwareInterface defined" + def set_unique_name(self, value): + self.name=value + + def __str__(self): + return self.name + + def stringable(self): + return True class Transmission(Representation): """ New format: http://wiki.ros.org/urdf/XML/Transmission """ - _class_variables = ["name", "joints", "actuators"] + _class_variables = ["name", "joints", "actuators", "transmission_type"] - def __init__(self, name, joints: TransmissionJoint = None, actuators=None, **kwargs): + def __init__(self, name, transmission_type=None, joints: TransmissionJoint = None, actuators=None, **kwargs): super().__init__() self.name = name - self.joints = [] if joints is None else joints - self.actuators = [] if actuators is None else actuators + self.transmission_type = transmission_type + self.joints = _plural(joints) + self.actuators = _plural(actuators) def check_valid(self): assert len(self.joints) > 0, "no joint defined" assert len(self.actuators) > 0, "no actuator defined" + def set_unique_name(self, value): + self.name=value + + def __str__(self): + return self.name + class Motor(Representation, SmurfBase): BUILD_TYPES = ["BLDC", "DC", "AC", "STEP"] TYPES = ["position", "velocity", "force"] _class_variables = ["name", "joint"] - def __init__(self, name=None, joint=None, type="position", **kwargs): + def __init__(self, name=None, joint=None, type="position", soft_limits={}, pid_config={}, **kwargs): self.name = name self.joint = joint SmurfBase.__init__(self, returns=["name", "joint"], **kwargs) @@ -2102,6 +2321,12 @@ def __init__(self, name=None, joint=None, type="position", **kwargs): self._maxSpeed = None self._maxValue = None self._minValue = None + self.soft_limits = {} + for ln, sl in self.soft_limits.items(): + self.add_soft_limit(ln, **sl) + self.pid_config = {} + for cn, pid in self.pid_config.items(): + self.add_pid_config(cn, **pid) self.build_type = kwargs.get("type", None) # TODO smurf/json output of build type is missing if self.build_type: @@ -2249,6 +2474,39 @@ def force_control(self): return None return self.type == "force" + def add_soft_limit(self, name, lower, upper, effort, velocity): + self.soft_limits[name] = JointLimit( + effort=effort, + velocity=velocity, + lower=lower, + upper=upper + ) + + def get_soft_limit(self, name): + return self.soft_limits.get(name, self.joint.limit) + + def has_soft_limit(self, name): + return name in self.soft_limits + + def add_pid_config(self, name, p, i, d): + self.pid_config[name] = { + "p": p, "i": i, "d": d + } + + def get_pid_config(self, name): + return self.pid_config.get(name, None) + + def has_pid_config(self, name): + return name in self.pid_config + + +class Plugin(Representation, SmurfBase): + _class_variables = ["plugin_filename", "plugin_name"] + + def __init__(self, plugin_filename=None, plugin_name=None, **kwargs): + SmurfBase.__init__(self, plugin_filename=plugin_filename, plugin_name=plugin_name, **kwargs) + + class PluginFactory(Representation): @classmethod @@ -2257,17 +2515,17 @@ def create(cls, plugin_name, plugin_filename, _xml: ET.Element = None, **kwargs) assert "joint_name" in kwargs if "JointPositionController" in plugin_name: type = "position" - elif not kwargs.get("use_force_command", False): + elif not kwargs.get("use_force_commands", False): type = "velocity" else: type = "force" - kwargs.pop("use_force_command") + kwargs.pop("use_force_commands") return Motor( name=kwargs["joint_name"]+"_motor", type=( "position" if "JointPositionController" in plugin_name else ( - "velocity" if not kwargs.get("use_force_command", False) + "velocity" if not kwargs.get("use_force_commands", False) else "force" ) ), @@ -2276,8 +2534,10 @@ def create(cls, plugin_name, plugin_filename, _xml: ET.Element = None, **kwargs) d=kwargs.pop("d_gain") if "d_gain" in kwargs else None, **kwargs ) - log.error(f"Couldn't instantiate plugin from {repr(kwargs)}") - return None + else: + return Plugin(name=plugin_name, filename=plugin_filename, **kwargs) + # log.error(f"Couldn't instantiate plugin from {repr(kwargs)}") + # return None # [TODO v2.1.0] Check how we can store which properties are defined by which literals to reload them properly from file diff --git a/phobos/io/smurf_reflection.py b/phobos/io/smurf_reflection.py index 6906d2c8..f75cb66f 100644 --- a/phobos/io/smurf_reflection.py +++ b/phobos/io/smurf_reflection.py @@ -51,7 +51,7 @@ def add_annotations(self, overwrite=False, **kwargs): continue # The object has to know which properties to export, this is done via self.returns.append(category) - if overwrite or getattr(self, category, None) is None: + if overwrite or not getattr(self, category, None): if category in self.type_dict.keys(): if type(information) == list: information = [x if type(x) == str else str(x) for x in information] diff --git a/phobos/io/smurfrobot.py b/phobos/io/smurfrobot.py index 8b1e43d2..0ba75731 100644 --- a/phobos/io/smurfrobot.py +++ b/phobos/io/smurfrobot.py @@ -16,10 +16,10 @@ class SMURFRobot(XMLRobot): def __init__(self, name=None, xmlfile=None, submechanisms_file=None, smurffile=None, verify_meshes_on_import=True, - inputfile=None, description=None, autogenerate_submechanisms=None, is_human=False, shallow=False): + inputfile=None, description=None, autogenerate_submechanisms=None, is_human=False, shallow=False, **kwargs): self.smurf_annotation_keys = [ 'motors', 'sensors', 'materials', "joints", "links", 'collisions', 'visuals', 'poses', - "submechanisms", "exoskeletons", "interfaces" + "submechanisms", "exoskeletons", "interfaces", "submodels" ] self.name = name @@ -63,28 +63,34 @@ def __init__(self, name=None, xmlfile=None, submechanisms_file=None, smurffile=N if len(smurffiles) == 1: smurffile = os.path.join(inputfile, "smurf", smurffiles[0]) log.info(f"Found smurf directory with {smurffile} in the input directory!") + else: + raise ValueError(f"Found multiple smurf files can't decide which one to take: {smurffiles}") elif "urdf" in content: urdffiles = [f for f in os.listdir(os.path.join(inputfile, "urdf")) if f.endswith("urdf")] if len(urdffiles) == 1: xmlfile = os.path.join(inputfile, "urdf", urdffiles[0]) log.info(f"Found urdf directory with {xmlfile} in the input directory!") + else: + raise ValueError(f"Found multiple urdf files can't decide which one to take: {urdffiles}") elif "sdf" in content: sdffiles = [f for f in os.listdir(os.path.join(inputfile, "sdf")) if f.endswith("sdf")] if len(sdffiles) == 1: xmlfile = os.path.join(inputfile, "sdf", sdffiles[0]) log.info(f"Found sdf directory with {xmlfile} in the input directory!") + else: + raise ValueError(f"Found multiple sdf files can't decide which one to take: {sdffiles}") elif any([f for f in relevant_files if f.endswith(".urdf")]) == 1: xmlfile = os.path.join(inputfile, [f for f in relevant_files if f.endswith("urdf")][0]) log.info(f"Found urdf file {xmlfile} in the input directory!") elif any([f for f in relevant_files if f.endswith(".sdf")]) == 1: xmlfile = os.path.join(inputfile, [f for f in relevant_files if f.endswith("sdf")][0]) log.info(f"Found sdf file {xmlfile} in the input directory!") - if inputfile is None: - raise ValueError("Couldn't find a valid input file in the given directory!") + else: + raise ValueError("Couldn't find a valid input file in the given directory!", content) else: raise ValueError("Can't parse robot from: "+inputfile + ("(can't parse)" if os.path.exists(inputfile) else "(not found)")) - super(SMURFRobot, self).__init__(is_human=is_human) + super(SMURFRobot, self).__init__(is_human=is_human, **kwargs) self.xmlfile = misc.sys_path(xmlfile) self.smurffile = misc.sys_path(smurffile) self.submechanisms_file = misc.sys_path(submechanisms_file) @@ -98,7 +104,8 @@ def __init__(self, name=None, xmlfile=None, submechanisms_file=None, smurffile=N base_robot = parse_xml(self.xmlfile) assert type(base_robot) == XMLRobot, f"{type(base_robot)}" for k, v in base_robot.__dict__.items(): - setattr(self, k, v) + if not getattr(self, k, None): + setattr(self, k, v) self.description = "" if description is None else description @@ -158,6 +165,7 @@ def read_smurffile(self, smurffile): smurf_dict = load_json(stream) self.name = smurf_dict['modelname'] self.inputfiles = smurf_dict['files'] + self.version = smurf_dict.get('version', None) # Process the file to get the absolute path for i, f in enumerate(self.inputfiles): @@ -293,6 +301,16 @@ def _init_annotations(self): log.debug(f"But there are: {[str(v) for v in self.visuals]}") if 'collisions' in self.annotations: + # Ignore the primitive collisions as they shoudl be existent in smurf + primitives = [] + for coll in self.collisions: + if "primitive_collision" in coll.name: + primitives.append(coll) + for p in primitives: + link = self.get_link(p.link) + if p in link._collisions: + link._collisions.remove(p) + # now parse what's in smurf for collision in self.annotations['collisions']: coll_instance = self.get_collision_by_name(collision['name']) if coll_instance is not None: @@ -303,8 +321,22 @@ def _init_annotations(self): collision.pop("geometry") coll_instance.add_annotations(overwrite=False, **collision) else: - log.error(f"There is no collision with name {collision['name']} in this robot.") - log.debug(f"But there are: {[str(c) for c in self.collisions]}") + assert "link" in collision + link = self.get_link(collision["link"], verbose=True) + assert link is not None + if "joint_relative_origin" in collision: + collision.pop("joint_relative_origin") + if "origin" in collision: + origin = collision.pop("origin") + origin = representation.Pose(**origin) + try: + if "geometry" in collision: + collision["geometry"]["mesh_format"] = self.get_used_mesh_formats()[0] + link.collisions += [representation.Collision(origin=origin, _smurffile=self.smurffile, **collision)] + # TODO: It might be a good idea to check whether all primitives are really employed by the smurf before discarding them + except Exception as e: + print(collision) + raise e if 'submechanisms' in self.annotations: for submech in self.annotations['submechanisms']: @@ -329,10 +361,15 @@ def _init_annotations(self): representation.Interface(**interf) ) + if 'submodels' in self.annotations: + self.submodel_defs = self.annotations["submodels"] + # [TODO v2.1.0] Check how we can store which properties are defined by which literals to reload them properly from file pop_annotations = [] for k, v in self.annotations.items(): - if k not in self.smurf_annotation_keys: + if k == "_autogenerated": + pop_annotations.append(k) + elif k not in self.smurf_annotation_keys: pop_annotations.append(k) log.info(f"Adding generic annotation of category {k}") if type(v) == list: @@ -448,7 +485,10 @@ def verify_meshes(self): no_problems = True for link in self.links: for vc in link.collisions + link.visuals: - no_problems &= not isinstance(vc.geometry, representation.Mesh) or not vc.geometry.available() + if isinstance(vc.geometry, representation.Mesh): + no_problems &= vc.geometry.available() + # else: + # no_problems &= vc.primitives is not None or vc.geometry is not None return no_problems # [ToDo v2.1.0] Support here all powers of GenericAnnotation diff --git a/phobos/io/xml_factory.py b/phobos/io/xml_factory.py index c9261ebb..fb5d8ec5 100644 --- a/phobos/io/xml_factory.py +++ b/phobos/io/xml_factory.py @@ -1,42 +1,40 @@ try: - from lxml import etree as ET + from lxml import etree as ET, QName except ImportError: from xml.etree import ElementTree as ET + QName = None import json from typing import List +from copy import deepcopy import numpy as np import pkg_resources +from ..defs import KINEMATIC_TYPES from .base import Representation, Linkable -from ..utils.misc import to_pretty_xml_string +from ..utils.misc import to_pretty_xml_string, patch_dict, get_var, deserialize, is_int, is_float, plural, singular from ..commandline_logging import get_logger log = get_logger(__name__) +XML_REFLECTIONS = { + "urdf": { + "read": True, + "write": True + }, + "sdf": { + "read": True, + "write": True + }, + "x3d": { + "read": False, + "write": True + } +} FORMATS = json.load(open(pkg_resources.resource_filename("phobos", "data/xml_formats.json"), "r")) -REGISTERED_CLASSES = {} - -def is_int(val): - if type(val) == int: - return True - try: - int(val) - return True - except ValueError: - return False - - -def is_float(val): - if type(val) == float: - return True - try: - float(val) - return True - except ValueError: - return False +REGISTERED_CLASSES = {} def get_class(classname): @@ -51,40 +49,53 @@ def get_class(classname): return cls -def get_var(object, varname_string, default="ABORT"): - if varname_string.startswith("$"): - return varname_string[1:] - if "." in varname_string: - var = object - for var_part in varname_string.split("."): - if default == "ABORT": - var = getattr(var, var_part) - else: - log.debug(f"{varname_string} of {repr(object)} does not exist returning {default}") - var = getattr(var, var_part, default) - else: - if default == "ABORT": - var = getattr(object, varname_string) +# instead of misc.serialize +def serialize(entry, float_fmt=None, **kwargs) -> str: + """ + + Args: + entry: The entry to serialize + precision_dict: the float format in the style "%.6f" + + Returns: + + """ + assert entry is not None + if hasattr(entry, "tolist"): + entry = entry.tolist() + if type(entry) in [list, tuple, np.array]: + entry = [serialize(v, float_fmt=float_fmt) for v in entry] + return " ".join(entry) + elif type(entry) in [int, np.intc, np.int64, bool]: + return str(int(entry)) + elif type(entry) in [float, np.float64]: + return float_fmt % entry if float_fmt is not None else str(entry) + elif isinstance(entry, Linkable): + if entry._related_robot_instance is not None and kwargs.get("robot_instance", None) is not None and \ + str(entry._related_robot_instance._related_entity_instance) != str(kwargs["robot_instance"]._related_entity_instance): + return str(entry._related_robot_instance._related_entity_instance) + "::" + str(entry) else: - log.debug(f"{varname_string} of {repr(object)} does not exist returning {default}") - var = getattr(object, varname_string, default) - return var + return str(entry) + else: + return str(entry) class XMLDefinition(object): def __init__(self, dialect, tag, value=None, attributes=None, children=None, value_children=None, - attribute_children=None, nested_children=None): + attribute_children=None, nested_children=None, text=None): self.dialect = dialect self.xml_tag = tag self.xml_value = value self.xml_attributes = attributes if attributes else {} self.xml_children = children if children else {} - for _, child_def in self.xml_children.items(): - try: - child_def["class"] = get_class(child_def["classname"]) - assert child_def["class"].__name__ == child_def["classname"] - except TypeError: - raise TypeError(f"Faulty XMLDefinition for dialect '{dialect}' and tag '{tag}': {child_def}") + self.xml_text = text + if type(self.xml_children) == dict: + for _, child_def in self.xml_children.items(): + try: + child_def["class"] = get_class(child_def["classname"]) + assert child_def["class"].__name__ == child_def["classname"] + except TypeError: + raise TypeError(f"Faulty XMLDefinition for dialect '{dialect}' and tag '{tag}': {child_def}") self.xml_value_children = value_children if value_children else {} self.xml_attribute_children = attribute_children if attribute_children else {} self.xml_nested_children = {} @@ -105,38 +116,50 @@ def to_xml(self, object, float_fmt_dict=None, **kwargs): Returns: An XML object """ + _xml_tag = self.xml_tag + if self.xml_tag == "__DYNAMIC__": + _xml_tag = object.xml_tag + if QName: + _xml_tag == QName(**_xml_tag.split(":")) # attributes if float_fmt_dict is None: float_fmt_dict = {} attrib = {} - for attname, varname in self.xml_attributes.items(): - val = get_var(object, varname, None) - if val is not None: - attrib[attname] = self._serialize(val, float_fmt=float_fmt_dict.get(attname, float_fmt_dict.get("default", None)), **kwargs) - out = ET.Element(self.xml_tag, attrib=attrib) - # value - if self.xml_value is not None: - assert all([x == {} for x in [self.xml_children, self.xml_value_children, self.xml_attribute_children, - self.xml_nested_children]]) - val = get_var(object, self.xml_value, None) - out.text = self._serialize(val, float_fmt=float_fmt_dict.get(self.xml_tag, float_fmt_dict.get("default", None)), **kwargs) - if val is not None: - return out + _xml_attributes = self.xml_attributes + if type(self.xml_attributes) == str: + _xml_attributes = get_var(object, self.xml_attributes, default={}) + if _xml_attributes is None: + _xml_attributes = {} + assert type(_xml_attributes) == dict, self.xml_attributes+": "+repr(_xml_attributes) + for attname, varname in _xml_attributes.items(): + if type(self.xml_attributes) != str: + val = get_var(object, varname, None) else: - return + val = varname + if val is not None: + attrib[attname] = serialize(val, float_fmt=float_fmt_dict.get(attname, float_fmt_dict.get("default", None)), **kwargs) + out = ET.Element(_xml_tag, attrib=attrib) # normal children children = [] - for _, var in self.xml_children.items(): - obj = get_var(object, var["varname"], None) - if type(obj) == list: + if type(self.xml_children) == str: + # this way we can export children directly from list and keep the order + obj = get_var(object, self.xml_children) + if obj: + assert type(obj) == list children += [o for o in obj if o is not None and (not isinstance(o, Representation) or not o.is_empty())] - elif isinstance(obj, var["class"]): - children += [obj] - elif hasattr(object, "_"+var["varname"]): - _obj = getattr(object, "_"+var["varname"]) - if _obj is not None and (not isinstance(_obj, Representation) or not _obj.is_empty()): - children += [_obj] - for child in sorted(children, key=lambda x: x.sort_string()): + else: + for _, var in self.xml_children.items(): + obj = get_var(object, var["varname"], None) + if type(obj) == list: + children += [o for o in obj if o is not None and (not isinstance(o, Representation) or not o.is_empty())] + elif isinstance(obj, var["class"]): + children += [obj] + elif hasattr(object, "_"+var["varname"]): + _obj = getattr(object, "_"+var["varname"]) + if _obj is not None and (not isinstance(_obj, Representation) or not _obj.is_empty()): + children += [_obj] + children = sorted(children, key=lambda x: x.sort_string()) + for child in children: try: e = child.to_xml(self.dialect, float_fmt_dict=float_fmt_dict, **kwargs) if e is not None: @@ -147,40 +170,85 @@ def to_xml(self, object, float_fmt_dict=None, **kwargs): else: raise error # children that are created from a simple property and have only attributes - for tag, attribute_map in self.xml_attribute_children.items(): - _attrib = {attname: self._serialize(get_var(object, varname, None), float_fmt=float_fmt_dict.get(tag, float_fmt_dict.get("default", None)), **kwargs) - for attname, varname in attribute_map.items() if get_var(object, varname, None) is not None} - if len(_attrib) == 0: - continue - e = ET.Element(tag, attrib=_attrib) - out.append(e) + if type(self.xml_attribute_children) == str: + _xml_attribute_children = get_var(object, self.xml_attribute_children, {}) + if _xml_attribute_children: + for tag, attribute_map in _xml_attribute_children: + _attrib = {attname: serialize(var, float_fmt=float_fmt_dict.get(tag, float_fmt_dict.get("default", None)), **kwargs) + for attname, var in attribute_map.items() if var is not None} + if len(_attrib) == 0: + continue + e = ET.Element(tag, attrib=_attrib) + out.append(e) + else: + for tag, attribute_map in self.xml_attribute_children.items(): + _attrib = {attname: serialize(get_var(object, varname, None), float_fmt=float_fmt_dict.get(tag, float_fmt_dict.get("default", None)), **kwargs) + for attname, varname in attribute_map.items() if get_var(object, varname, None) is not None} + if len(_attrib) == 0: + continue + e = ET.Element(tag, attrib=_attrib) + out.append(e) # children that have the a value as text - for tag, varname in self.xml_value_children.items(): - val = get_var(object, varname, None) - if val is not None: - if type(val) == list and all([not is_int(v) and not is_float(v) and type(v) == str for v in val]): - for v in val: + if type(self.xml_value_children) == str: + _xml_value_children = get_var(object, self.xml_value_children, {}) + if _xml_value_children: + for tag, val in get_var(object, _xml_value_children, {}): + if val is not None: + if type(val) == list and all([not is_int(v) and not is_float(v) and type(v) == str for v in val]): + for v in val: + e = ET.Element(tag) + _t = serialize(v, float_fmt=float_fmt_dict.get(tag, float_fmt_dict.get("default", None)), **kwargs) + try: + e.text = _t + except TypeError as error: + print(_t, type(_t)) + raise error + out.append(e) + else: + e = ET.Element(tag) + _t = serialize(val, float_fmt=float_fmt_dict.get(tag, float_fmt_dict.get("default", None)), **kwargs) + try: + e.text = _t + except TypeError as error: + print(_t, type(_t)) + raise error + out.append(e) + else: + for tag, varname in self.xml_value_children.items(): + val = get_var(object, varname, None) + if val is not None: + if type(val) == list and all([not is_int(v) and not is_float(v) and type(v) == str for v in val]): + for v in val: + e = ET.Element(tag) + _t = serialize(v, float_fmt=float_fmt_dict.get(tag, float_fmt_dict.get("default", None)), **kwargs) + try: + e.text = _t + except TypeError as error: + print(_t, type(_t)) + raise error + out.append(e) + else: e = ET.Element(tag) - _t = self._serialize(v, float_fmt=float_fmt_dict.get(tag, float_fmt_dict.get("default", None)), **kwargs) + _t = serialize(val, float_fmt=float_fmt_dict.get(tag, float_fmt_dict.get("default", None)), **kwargs) try: e.text = _t except TypeError as error: print(_t, type(_t)) raise error out.append(e) - else: - e = ET.Element(tag) - _t = self._serialize(val, float_fmt=float_fmt_dict.get(tag, float_fmt_dict.get("default", None)), **kwargs) - try: - e.text = _t - except TypeError as error: - print(_t, type(_t)) - raise error - out.append(e) # children that are nested in another element if self.xml_nested_children != {}: for _, nest in self.xml_nested_children.items(): out.append(nest.to_xml(object, float_fmt_dict=float_fmt_dict, **kwargs)) + # value + if self.xml_value is not None: + assert len(out) == 0 + val = get_var(object, self.xml_value, None) + out.text = serialize(val, float_fmt=float_fmt_dict.get(_xml_tag, float_fmt_dict.get("default", None)), **kwargs) + if val is not None: + return out + else: + return return out def kwargs_from_xml(self, xml: ET.Element, **kwargs): @@ -188,15 +256,15 @@ def kwargs_from_xml(self, xml: ET.Element, **kwargs): _smurffile = kwargs.get("_smurffile", None) # value if self.xml_value is not None and xml.text is not None: - kwargs[self.xml_value] = self._deserialize(xml.text, key=xml.tag) + kwargs[self.xml_value] = deserialize(xml.text, key=xml.tag) # attributes for attname, varname in self.xml_attributes.items(): if attname in xml.attrib: - kwargs[varname] = self._deserialize(xml.attrib[attname], key=attname) + kwargs[varname] = deserialize(xml.attrib[attname], key=attname) for child in xml: if self.xml_value is not None: # value - kwargs[self.xml_value] = self._deserialize( + kwargs[self.xml_value] = deserialize( child.text, key=child.tag ) if child.tag in self.xml_children.keys(): @@ -210,10 +278,10 @@ def kwargs_from_xml(self, xml: ET.Element, **kwargs): # children that are created from a simple property and have only attributes for attname, varname in self.xml_attribute_children[child.tag].items(): if attname in child.attrib.keys(): - kwargs[varname] = self._deserialize(child.attrib[attname], key=attname) + kwargs[varname] = deserialize(child.attrib[attname], key=attname) if child.tag in self.xml_value_children.keys(): # children that have the a value as text - kwargs[self.xml_value_children[child.tag]] = self._deserialize(child.text, key=child.tag) + kwargs[self.xml_value_children[child.tag]] = deserialize(child.text, key=child.tag) if child.tag in self.xml_nested_children.keys(): # children that are nested in another element _kwargs = self.xml_nested_children[child.tag].kwargs_from_xml(child, @@ -227,52 +295,6 @@ def kwargs_from_xml(self, xml: ET.Element, **kwargs): kwargs[k] = v return kwargs - def _serialize(self, entry, float_fmt=None, **kwargs) -> str: - """ - - Args: - entry: The entry to serialize - precision_dict: the float format in the style "%.6f" - - Returns: - - """ - assert entry is not None - if hasattr(entry, "tolist"): - entry = entry.tolist() - if type(entry) in [list, tuple, np.array]: - entry = [self._serialize(v, float_fmt=float_fmt) for v in entry] - return " ".join(entry) - elif type(entry) in [int, np.intc, np.int64, bool]: - return str(int(entry)) - elif type(entry) in [float, np.float64]: - return float_fmt % entry if float_fmt is not None else str(entry) - elif isinstance(entry, Linkable): - if entry._related_robot_instance is not None and kwargs.get("robot_instance", None) is not None and \ - str(entry._related_robot_instance._related_entity_instance) != str(kwargs["robot_instance"]._related_entity_instance): - return str(entry._related_robot_instance._related_entity_instance) + "::" + str(entry) - else: - return str(entry) - else: - return str(entry) - - def _deserialize(self, string: str, key=None): - string = string.strip() - if " " in string and not key in ["uri", "url", "file", "filepath", "filename"]: - _list = string.split() - if all([is_int(v) for v in _list]): - return [int(v) for v in _list] - elif all([is_float(v) for v in _list]): - return [float(v) for v in _list] - else: - return _list - elif is_int(string): - return int(string) - elif is_float(string): - return float(string) - else: - return string - class XMLFactory(XMLDefinition): def __init__(self, dialect, classname): @@ -318,22 +340,6 @@ def from_xml_string(self, classtype, xml_string: str): return None -XML_REFLECTIONS = { - "urdf": { - "read": True, - "write": True - }, - "sdf": { - "read": True, - "write": True - }, - "x3d": { - "read": False, - "write": True - } -} - - def class_factory(cls, only=None): reflections = [refl for refl in XML_REFLECTIONS.keys() if cls.__name__ in FORMATS[refl].keys()] setattr(cls, "factory", {refl: XMLFactory(refl, cls.__name__) for refl in reflections}) @@ -399,40 +405,4 @@ def REGISTER_MODULE_TO_XML_FACTORY(module): class_factory(getattr(module, classname)) -def singular(prop): - """ - Makes sure the prop is single and not a list - Args: - prop: The property to check - Returns: - prop as a single element - Raises: - Assertion error if prop is a list with more than one element - """ - if type(prop) == list: - if len(prop) == 1: - return prop[0] - elif len(prop) == 0: - return None - else: - raise AssertionError(f"Got list with content {prop} where only a single element is allowed.") - else: - return prop - - -def plural(prop): - """ - Makes sure prop is a list - Args: - prop: The property to check - - Returns: - prop as list - """ - if type(prop) == list: - return prop - elif prop is None: - return [] - else: - return [prop] diff --git a/phobos/io/xmlrobot.py b/phobos/io/xmlrobot.py index 1179b300..b432e512 100644 --- a/phobos/io/xmlrobot.py +++ b/phobos/io/xmlrobot.py @@ -10,6 +10,7 @@ from ..commandline_logging import get_logger from ..utils.transform import create_transformation, get_adjoint, inv from ..utils.tree import get_joints_depth_first +from ..utils import misc log = get_logger(__name__) @@ -27,10 +28,10 @@ def __init__(self, name=None, version=None, links: List[representation.Link] = N physical_links: List[representation.Link] = None, joints: List[representation.Joint] = None, materials: List[representation.Material] = None, + referenced_materials: List[representation.Material] = None, transmissions: List[representation.Transmission] = None, - sensors=None, sensors_that_dont_belong_to_links_or_joints=None, motors=None, - plugins=None, root=None, is_human=False, - urdf_version=None, xmlfile=None, _xmlfile=None): + sensors=None, motors=None, plugins=None, root=None, + is_human=False, urdf_version=None, xmlfile=None, _xmlfile=None): self._related_robot_instance = self super().__init__() self.joints = [] @@ -40,14 +41,17 @@ def __init__(self, name=None, version=None, links: List[representation.Link] = N self.materials = [] self.transmissions = [] # [TODO v2.1.0] currently not fully supported self.sensors = [] - if sensors_that_dont_belong_to_links_or_joints is not None: - self.sensors_that_dont_belong_to_links_or_joints = sensors_that_dont_belong_to_links_or_joints self.plugins = [] # Currently just a place holder self.motors = [] self.xmlfile = xmlfile if xmlfile is not None else _xmlfile # Default export mesh format from phobos.defs.MESH_TYPES self.mesh_format = "input_type" + # Default export setting + self.write_material_references = True + # Whether to use the primitive objects if the collision provides them on export + # If this is set true only the link nows about the collision instances anyone calling it's collision getter will receive their primitives + self.provide_primitives = False if name is None or len(name) == 0: if self.xmlfile is not None: @@ -57,7 +61,7 @@ def __init__(self, name=None, version=None, links: List[representation.Link] = N else: self.name = name - self.version = version + self.version = version if version else "0.0.0" self.urdf_version = "1.0" if urdf_version is None else urdf_version if joints is not None: @@ -73,7 +77,7 @@ def __init__(self, name=None, version=None, links: List[representation.Link] = N for link in physical_links: self.add_aggregate("link", link) - self.materials = materials if materials is not None else [] + self.materials = materials if materials is not None else referenced_materials if referenced_materials is not None else [] self.transmissions = transmissions if transmissions is not None else [] self.sensors = sensors if sensors is not None else [] self.motors = motors if motors is not None else [] @@ -85,11 +89,19 @@ def __init__(self, name=None, version=None, links: List[representation.Link] = N self.regenerate_tree_maps() if self.links: + # link everything self.link_entities() if root is not None: assert root in [str(l) for l in self.links], "root specified in xml is no link in the robot" assert root == str(self.get_root()), "root specified in xml is not root of the robot" + def get_used_mesh_formats(self): + formats = set() + for vc in self.collisions + self.visuals: + if isinstance(vc.geometry, representation.Mesh): + formats.add(vc.geometry.abs_filepath.rsplit(".",1)[-1]) + return list(formats) + def __str__(self): return self.name @@ -103,8 +115,28 @@ def unlink_from_world(self): def assert_validity(self): assert self.get_root().origin is None or self.get_root().origin.is_zero() + # assert there are no links relative_to it self for link in self.links: assert link.origin is None or link.origin.relative_to != link.name + # assert names are unique + for entity_list in ["collisions", "visuals", "joints", "links", "sensors"]: + entity_names = set() + for e in getattr(self, entity_list): + entity_names.add(e.name) + if len(entity_names) < len(getattr(self, entity_list)): + log.error(entity_list+" name: "+ ", ".join([str(e) for e in getattr(self, entity_list)])) + raise AssertionError(entity_list[:-1]+" names are not unique!") + + @property + def referenced_materials(self): + if self.write_material_references: + return self.materials + else: + return None + + @referenced_materials.setter + def referenced_materials(self, materials): + self.materials = materials if materials is not None else [] @property def collisions(self): @@ -115,25 +147,29 @@ def visuals(self): return self.get_all_visuals() @property - def sensors_that_dont_belong_to_links_or_joints(self): - return [s for s in self.sensors if getattr(s, "link", None) is None and getattr(s, "joint", None) is None] + def movable_joints(self): + return [joint for joint in self.joints if joint.joint_type != "fixed"] @property def frames(self): - return [lnk for lnk in self.links if not(lnk.inertial or lnk.collision or lnk.visual)] + return [lnk for lnk in self.links if not(lnk.inertials or lnk.collisions or lnk.visuals)] @frames.setter def frames(self, frames): - self.add_aggregate("link". frames) + self.add_aggregate("link", frames) @property def physical_links(self): - return [lnk for lnk in self.links if (lnk.inertial or lnk.collision or lnk.visual)] + return [lnk for lnk in self.links if (lnk.inertial or lnk.collisions or lnk.visuals)] @physical_links.setter def physical_links(self, links): self.add_aggregate("link". links) + @property + def sensors_that_dont_belong_to_links_or_joints(self): + return [s for s in self.sensors if getattr(s, "link", None) is None and getattr(s, "joint", None) is None] + @sensors_that_dont_belong_to_links_or_joints.setter def sensors_that_dont_belong_to_links_or_joints(self, value): value = _plural(value) @@ -147,19 +183,30 @@ def annotate_as_human(self): def link_entities(self, check_linkage_later=False): root = self.get_root() - self.assert_validity() - for link in self.links: for child_entity in ([link.inertial] if link.inertial is not None else []) + link.visuals + link.collisions: child_entity.link = link if child_entity.origin is not None and child_entity.origin.relative_to is None: child_entity.origin.relative_to = link + # make sure we have unique visual and collision names + for geo_list in ["collisions", "visuals"]: + geo_names = set() + for g in getattr(self, geo_list): + geo_names.add(g.name) + if len(geo_names) < len(getattr(self, geo_list)): + log.warning(geo_list[:-1]+" names are not unique, setting link name as prefix") + for g in getattr(self, geo_list): + g.name = misc.edit_name_string(g.name, prefix=str(g.link)+"_", do_not_double=True) + for joint in self.joints: if joint.origin.relative_to is None: parent = self.get_parent(joint.name) joint.origin.relative_to = parent if parent is not None else self.get_root().name for entity in self.links + self.joints + self.motors + self.sensors + self.materials: entity.link_with_robot(self, check_linkage_later=True) + + self.assert_validity() + if not check_linkage_later: assert self.check_linkage() @@ -282,6 +329,8 @@ def regenerate_tree_maps(self): self.child_map[j.parent].append((j.name, j.child)) else: self.child_map[j.parent] = [(j.name, j.child)] + if self.joints: + self.joints = self.get_joints_ordered_df() def add_aggregate(self, typeName, elem, silent=False): assert elem is not None @@ -305,6 +354,13 @@ def add_aggregate(self, typeName, elem, silent=False): self.joints += [elem] elif typeName in 'links': if elem not in self.links: + if elem in [str(l) for l in self.links]: + counter = 1 + while str(elem) + f"_{counter}" in [str(l) for l in self.links]: + counter += 1 + if not silent: + log.debug(f"Renamed {typeName} {str(elem)} to {str(elem)}_{counter}") + elem.set_unique_name(str(elem) + f"_{counter}") self.links += [elem] else: if not typeName.endswith("s"): @@ -479,6 +535,19 @@ def get_root(self): assert root is not None, "No roots detected, invalid URDF." return root + def get_roots(self): + """Returns all links that have currently root characteristics. + If there is more than one root this robot is not properly defined + + Returns: + list: the rooty links + """ + roots = [] + for link in self.links: + if link.name not in self.parent_map: + roots.append(link) + return roots + def get_aggregate(self, targettype, target, verbose=False): """ Returns the id of the given instance @@ -628,6 +697,12 @@ def get_collision_by_name(self, collision_name): return None + def get_all_primitives(self): + primitives = [] + for link in self.links: + primitives += link.primitives + return primitives + def get_id(self, targettype, target): """ Returns the id of the given instance @@ -843,12 +918,12 @@ def global_origin(self, stop): def post_read_xml(self): """Version and validity check""" - if self.version is None: - self.version = "1.0" + if self.urdf_version is None: + self.urdf_version = "1.0" - split = self.version.split(".") + split = self.urdf_version.split(".") if len(split) != 2: - raise ValueError("The version attribute should be in the form 'x.y'") + raise ValueError("The urdf_version attribute should be in the form 'x.y'") if split[0] == '' or split[1] == '': raise ValueError("Empty major or minor number is not allowed") @@ -856,8 +931,8 @@ def post_read_xml(self): if int(split[0]) < 0 or int(split[1]) < 0: raise ValueError("Version number must be positive") - if self.version not in self.SUPPORTED_VERSIONS: - raise ValueError("Invalid version; only %s is supported" % (','.join(self.SUPPORTED_VERSIONS))) + if self.urdf_version not in self.SUPPORTED_VERSIONS: + raise ValueError("Invalid urdf_version; only %s is supported" % (','.join(self.SUPPORTED_VERSIONS))) xml_factory.class_factory(XMLRobot) diff --git a/phobos/scripts/convert.py b/phobos/scripts/convert.py index bcdb5ff2..26175eb5 100644 --- a/phobos/scripts/convert.py +++ b/phobos/scripts/convert.py @@ -15,6 +15,7 @@ def cant_be_used_msg(): def main(args): import argparse import os.path as path + from ..defs import load_json from ..core import Robot, World from ..utils import misc, resources from ..commandline_logging import setup_logger_level, BASE_LOG_LEVEL @@ -27,7 +28,9 @@ def main(args): 'is a (non-existing) directory SMURF will be exported.', action="store", default=None) parser.add_argument('-c', '--copy-meshes', help='Copies the meshes', action='store_true', default=False) + parser.add_argument('-m', '--mesh-type', choices=["stl", "obj", "mars_obj", "dae", "input_type"], help='If -c set the mesh type', default="input_type") parser.add_argument('-a', '--sdf-assemble', help='When converting a scene to sdf set this flag to get one assembled model', action='store_true', default=False) + parser.add_argument('-e', '--export_config', type=str, help='Path to the a json/yaml file that stores the export config', default=None) parser.add_argument("--loglevel", help="The log level", choices=["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"], default=BASE_LOG_LEVEL) args = parser.parse_args(args) @@ -42,25 +45,31 @@ def main(args): robot = None if args.input.rsplit(".", 1)[-1] in ["smurfa", "smurfs"]: world = World(inputfile=args.input) + if world.is_empty(): + log.error("Given file is empty!") + return 1 + if args.output.lower() not in ["smurfa", "smurfs"] and world.has_one_root(): + robot = world.assemble() else: robot = Robot(inputfile=args.input) - if args.output.lower().endswith("sdf"): - if robot: - log.info("Converting to SDF model") - robot.export_sdf(outputfile=args.output, with_meshes=args.copy_meshes, mesh_format="input_type") - elif args.sdf_assemble: # world + if args.copy_meshes and robot and not args.export_config: + robot.export_meshes(path.join(path.dirname(args.output), "meshes"), format=args.mesh_type) + if args.export_config: + assert path.isfile(args.export_config) + with open(args.export_config, "r") as f: + export_config = load_json(f.read()) + robot.export(args.output, export_config=export_config["export_config"], with_meshes=args.copy_meshes, no_smurf=["no_smurf"]) + elif args.output.lower().endswith("sdf"): + if args.sdf_assemble: # world log.info("Converting to SDF model") - robot = world.assemble() - robot.export_sdf(outputfile=args.output, with_meshes=args.copy_meshes, mesh_format="input_type") + robot.export_sdf(outputfile=args.output, mesh_format=args.mesh_type) else: # world log.info("Converting to SDF world") - world.export_sdf(outputfile=args.output, with_meshes=args.copy_meshes, mesh_format="input_type") + world.export_sdf(outputfile=args.output, mesh_format=args.mesh_type) elif args.output.lower().endswith("urdf"): log.info("Converting to URDF") - if world: - robot = world.assemble() - robot.export_urdf(outputfile=args.output, with_meshes=args.copy_meshes, mesh_format="input_type") + robot.export_urdf(outputfile=args.output, mesh_format=args.mesh_type) elif args.output.lower().endswith("pdf"): log.info("Converting to PDF") if robot: @@ -70,19 +79,13 @@ def main(args): elif args.output.lower().endswith("smurf") or path.isdir(args.output): log.info("Converting to SMURF") if robot: - robot.export(outputdir=args.output, export_config=resources.get_default_export_config(), with_meshes=args.copy_meshes, mesh_format="input_type") - elif world.is_empty(): - log.error("Given file is empty!") - return 1 - elif world.has_one_root(): - robot = world.assemble() - robot.export(outputdir=args.output, export_config=resources.get_default_export_config(), with_meshes=args.copy_meshes) - else: + robot.export(outputdir=args.output, export_config=resources.get_default_export_config(), mesh_format=args.mesh_type) + else: # todo this is also valid for sdf and urdf if not path.isdir(args.output): raise IOError(f"Please specify a directory as output, when exporting a world with multiple entities. {args.output} is not a directory.") for root in world.get_root_entities(): robot = world.assemble(root) - robot.export(outputdir=path.join(args.output, f"root-{root.name}"), export_config=getattr(world, "export_config", resources.get_default_export_config()), with_meshes=args.copy_meshes) + robot.export(outputdir=path.join(args.output, f"root-{root.name}"), export_config=getattr(world, "export_config", resources.get_default_export_config()), mesh_format=args.mesh_type) elif args.output.lower().endswith("jpg") or args.output.lower().endswith("png"): if robot: log.info("Creating thumbnail") @@ -95,12 +98,12 @@ def main(args): if robot: world = World() world.add_robot(name="robot", robot=robot, anchor="world") - world.export_smurfs(outputfile=args.output) + world.export_smurfs(outputfile=args.output, mesh_format=args.mesh_type) elif args.output.lower().endswith("smurfa"): if robot: world = World() world.add_robot(name="robot", robot=robot, anchor="world") - world.export_smurfa(outputfile=args.output) + world.export_smurfa(outputfile=args.output, mesh_format=args.mesh_type) else: log.error(f"Don't know which conversion to apply for {args.output}") return 1 diff --git a/phobos/utils/git.py b/phobos/utils/git.py index e334e741..ab694328 100644 --- a/phobos/utils/git.py +++ b/phobos/utils/git.py @@ -2,13 +2,26 @@ from . import misc from .misc import execute_shell_command +from ..defs import load_json from ..commandline_logging import get_logger log = get_logger(__name__) +GIT_C_OPTION = "" +RUNNING_FOR_AZURE = os.getenv("RUNNING_FOR_AZURE", False) +if bool(os.getenv("SET_GIT_HTTP_EXTRAHEADER_FOR_AZURE", 0)): + RUNNING_FOR_AZURE = True + # than we are running an azure agent for whom we need + GIT_C_OPTION = "-c http.extraheader=\"AUTHORIZATION: bearer ${SYSTEM_ACCESSTOKEN}\"" + +GIT_SUCCESS_BRANCH = os.getenv("GIT_SUCCESS_BRANCH", "master") +GIT_PRODUCTION_BRANCH = os.getenv("GIT_PRODUCTION_BRANCH", GIT_SUCCESS_BRANCH) +GIT_FAILURE_BRANCH = os.getenv("GIT_FAILURE_BRANCH", "$CI_UPDATE_TARGET_BRANCH") +GIT_REMOTE_NAME = os.getenv("GIT_REMOTE_NAME", "autobuild") + class MergeRequest(object): - target = "master" + target = GIT_SUCCESS_BRANCH title = "" description = "" mention = "" @@ -24,7 +37,7 @@ def get_options(self): options = " -o merge_request.create" options += " -o merge_request.target='" + self.target + "'" options += " -o merge_request.title='" + self.title + "'" - options += " -o merge_request.description='" + self.mention + " " + self.description + "
" + self._tag + "'" + options += " -o merge_request.description='" + self.mention + " " + self.description.replace("'", "\"") + "
" + self._tag + "'" if len(self.mention.strip().strip("@").split()) != 0: options += " -o merge_request.assign='" + self.mention.strip().strip("@").split()[0] + "'" return options @@ -39,12 +52,12 @@ def get_root(cwd): def clone(repo, target, branch=None, cwd=None, recursive=False, ignore_failure=False, commit_id=None, - shallow=1, pipeline=None): + shallow=1, pipeline=None, recreate=False, remote=GIT_REMOTE_NAME, all_branches=False): execute_shell_command("git lfs install || true", cwd) - if os.path.exists(target): + if os.path.exists(target) and recreate: log.info(f"Deleting {pipeline.relpath(target) if pipeline is not None else target}") os.system("rm -rf {}".format(target)) - cmd = "git clone" + cmd = "git "+GIT_C_OPTION+" clone -o "+remote if branch is not None: cmd += " -b " + branch if recursive: @@ -53,6 +66,8 @@ def clone(repo, target, branch=None, cwd=None, recursive=False, ignore_failure=F cmd += " --depth " + str(shallow) + " " if recursive: cmd += " --shallow-submodules " + if all_branches: + cmd += " --no-single-branch " cmd += " " + repo + " " + target # not loading with --recursive as we don't need the meshes submodule if cwd is None and pipeline is not None: cwd = pipeline.root @@ -63,25 +78,58 @@ def clone(repo, target, branch=None, cwd=None, recursive=False, ignore_failure=F checkout(commit_id, target, force=True) -def checkout(commit_id, repo, force=False): +def checkout(repo, commit_id=None, branch=None, remote=GIT_REMOTE_NAME, force=False): + assert commit_id or branch execute_shell_command("git stash", repo) - execute_shell_command("git checkout " + ("-f " if force else "") + commit_id, repo) + if commit_id: + execute_shell_command("git checkout " + ("-f " if force else "") + commit_id, repo) + else: # branch + execute_shell_command("git checkout " + ("-f " if force else "") + f"-t {remote}/{branch}", repo) execute_shell_command("git submodule update --init --recursive ", repo) -def update(repo, update_remote="autobuild", update_target_branch="$CI_UPDATE_TARGET_BRANCH"): +def update(repo, update_remote=GIT_REMOTE_NAME, update_target_branch=GIT_FAILURE_BRANCH): execute_shell_command("git stash", repo) - execute_shell_command("git branch " + update_target_branch + " || true", repo) - execute_shell_command("git checkout " + update_target_branch, repo) - execute_shell_command("git remote -v update || true", repo) - execute_shell_command("git fetch --all || true", repo) - execute_shell_command("git reset --hard " + update_remote + "/" + update_target_branch + " || true", repo) + execute_shell_command("git "+GIT_C_OPTION+" remote -v update || true", repo) + execute_shell_command("git "+GIT_C_OPTION+" fetch --all || true", repo) + try: + execute_shell_command("git branch -D " + update_target_branch + " || true", repo) + execute_shell_command(f"git checkout -f -t {update_remote}/{update_target_branch} -b {update_target_branch}", repo) + except: + execute_shell_command("git branch " + update_target_branch + " || true", repo) + execute_shell_command("git checkout -f --theirs " + update_target_branch, repo) + execute_shell_command("git "+GIT_C_OPTION+" reset --hard " + update_remote + "/" + update_target_branch + " || true", repo) + execute_shell_command("git "+GIT_C_OPTION+" config pull.rebase false", repo) if update_target_branch.startswith("$"): update_target_branch = os.environ[update_target_branch[1:]] - if update_target_branch == "master": - execute_shell_command("git pull " + update_remote + " $CI_UPDATE_TARGET_BRANCH -s ours || true", repo) - else: # update_target_branch == "$CI_UPDATE_TARGET_BRANCH" - execute_shell_command("git pull " + update_remote + " master -s ours || true", repo) + # we are using reset GIT_PRODUCTION_BRANCH + merge strategy ours to merge with out conflicts from the production branch and build up on this (#TODO may be we should rename this method) + if update_target_branch == GIT_SUCCESS_BRANCH: + # if we are pushing to the GIT_SUCCESS_BRANCH, everything what happened on GIT_FAILURE_BRANCH is no obsolete again. + # To have the tree clean we merge GIT_FAILURE_BRANCH to close open merge requests and show that development on the + # formerly failed version has been finished + # if there was no failure before this is already in the tree anyways + try: + execute_shell_command("git "+GIT_C_OPTION+" pull " + update_remote + " "+GIT_FAILURE_BRANCH+" -s ours --no-ff --commit --no-edit", repo) + except Exception as e: + log.warning(f"Pull failed: {e}") + execute_shell_command("git branch "+GIT_FAILURE_BRANCH) + execute_shell_command("git checkout "+GIT_FAILURE_BRANCH) + elif GIT_SUCCESS_BRANCH != GIT_PRODUCTION_BRANCH: # update_target_branch == GIT_FAILURE_BRANCH + # if we are pushing to the GIT_FAILURE_BRANCH we merge changes from the last good version to get the tree clean and + # continue upon that version + try: + execute_shell_command("git "+GIT_C_OPTION+" pull " + update_remote + " "+GIT_SUCCESS_BRANCH+" -s ours --no-ff --commit --no-edit", repo) + except Exception as e: + log.warning(f"Pull failed: {e}") + execute_shell_command("git branch "+GIT_SUCCESS_BRANCH) + execute_shell_command("git checkout "+GIT_SUCCESS_BRANCH) + # pull the latest changes from the production branch if there is one + try: + execute_shell_command("git "+GIT_C_OPTION+" pull " + update_remote + " "+GIT_PRODUCTION_BRANCH+" -s ours --no-ff --commit --no-edit", repo) + except Exception as e: + log.warning(f"Pull failed: {e}") + execute_shell_command("git branch "+GIT_PRODUCTION_BRANCH) + execute_shell_command("git checkout "+GIT_PRODUCTION_BRANCH) def revision(repo): @@ -117,28 +165,79 @@ def commit(repo, message=None, origin_repo=None): message += "Update from phobos-CI run \nBased on " + os.environ["CI_PROJECT_NAME"] + " commit " + os.environ[ "CI_COMMIT_SHORT_SHA"] elif message is None: - message = "Commit generated by manual pipeline script run" - log.info(f"Commiting to {repo}") + message = "[CI] Commit generated by manual pipeline script run" + log.info(f"Commiting to {repo} on branch {get_branch(repo)}") commit_message = "" for m in message.split("\n"): commit_message += " -m '" + m + "'" execute_shell_command("git add -A * || true", repo) - execute_shell_command("git commit " + commit_message + " || true", repo) + execute_shell_command("git commit " + commit_message, repo) return revision(repo) -def reset(repo, remote, branch): - execute_shell_command("git reset --hard " + remote + "/" + branch, repo) +def reset(repo, remote, branch, hard=True): + if hard: + execute_shell_command("git "+GIT_C_OPTION+" reset --hard " + remote + "/" + branch, repo) + else: + execute_shell_command("git "+GIT_C_OPTION+" reset --mixed " + remote + "/" + branch, repo) + execute_shell_command("git stash", repo) def add_remote(repo, target_remote_url, target_remote_name="target_remote"): - execute_shell_command("git remote add " + target_remote_name + " " + target_remote_url + " || true", repo) - execute_shell_command("git remote -v update || true", repo) - - -def push(repo, remote="target_remote", branch="$CI_UPDATE_TARGET_BRANCH", merge_request=None): - options = "" if merge_request is None else merge_request.get_options() - execute_shell_command("git push " + remote + " " + branch + " " + options, repo, dry_run=False) + execute_shell_command("git "+GIT_C_OPTION+" remote add " + target_remote_name + " " + target_remote_url + " || true", repo) + execute_shell_command("git "+GIT_C_OPTION+" remote -v update || true", repo) + + +def push(repo, remote="target_remote", branch=GIT_FAILURE_BRANCH, merge_request=None): + if RUNNING_FOR_AZURE: + _, _, url = get_repo_data(repo, remote_name=remote) + log.info(f"Pushing to remote: {remote}/{branch} at {url}") + execute_shell_command("git "+GIT_C_OPTION+" push " + remote + " " + branch , repo, dry_run=False, verbose=True) + if merge_request is not None: + first_part, repository_name = url.split("/_git/") + _, org_and_project = first_part.split(".com/") + org, project = org_and_project.split("/") + _tb = " --target-branch \""+merge_request.target+"\"" + _sb = " --source-branch \""+branch+"\"" + _org = " --organization \"https://dev.azure.com/"+org+"/\"" + _p = " --project \""+project+"\"" + _r = " --repository \""+repository_name+"\"" + _t = " --title \""+merge_request.title+"\"" + _d = " -d \""+merge_request.description.strip().replace('"', "'").replace("\n", "\" \"").replace("--", "#") +"\"" + if merge_request.mention: + users = merge_request.mention.split(";") + # user_ids = [] + # for user in users: + # try: + # std, _ = execute_shell_command("az ad user list --display-name "+user.strip()) + # entry = load_json(std) + # user_ids.append(entry[0]["id"]) + # except: + # log.error("AZ: Couldn't obtain user id for name: "+user) + _rv = " --reviewers" + for user in users: + _rv += f" \"{user}\"" + try: + cmd = "az repos pr create" + execute_shell_command(cmd + _tb + _sb + _org + _p + _r + _t + _d + _rv, repo, dry_run=False, verbose=True) + except: + # the command probably failed because the pr is already there + cmd = "az repos pr list" + pr, _ = execute_shell_command(cmd +_tb + _p + _r, repo, dry_run=False, verbose=True) + pr_dict = load_json(pr) + _pr = " --id " + str(pr_dict[0]["pullRequestId"]) + cmd = "az repos pr update" + execute_shell_command(cmd + _pr + _d + _org + _t + " || true", repo, dry_run=False, verbose=True) + try: + cmd = "az repos pr reviewer add" + execute_shell_command(cmd + _pr + _org + _rv + " || true", repo, dry_run=False, verbose=True) + except: + # probably some user is already assigned, and because azure is stupid it fails instead of being happy about less work + # we don't handle this yet + pass + else: + options = "" if merge_request is None else merge_request.get_options() + execute_shell_command("git "+GIT_C_OPTION+" push " + remote + " " + branch + " " + options, repo, dry_run=False, verbose=True) return os.environ[branch[1:]] if branch.startswith("$") else branch @@ -157,17 +256,20 @@ def clear_repo(repo): def add_submodule(repo, remote, path, commit=None, branch="master"): - execute_shell_command("git remote rename autobuild origin || true", repo) - execute_shell_command("git submodule add -b " + branch + " -f " + remote + " " + path, repo) - execute_shell_command("git submodule update --init --recursive " + path, repo) + if GIT_REMOTE_NAME != "origin": + execute_shell_command(f"git remote rename {GIT_REMOTE_NAME} origin || true", repo) + execute_shell_command("git "+GIT_C_OPTION+" submodule add -b " + branch + " -f " + remote + " " + path, repo) + execute_shell_command("git "+GIT_C_OPTION+" submodule update --init --recursive " + path, repo) if commit is not None: - execute_shell_command("git remote update", os.path.join(repo, path)) + execute_shell_command("git "+GIT_C_OPTION+" remote update", os.path.join(repo, path)) execute_shell_command("git checkout --detach " + commit, os.path.join(repo, path)) - execute_shell_command("git remote rename origin autobuild || true", repo) + if GIT_REMOTE_NAME != "origin": + execute_shell_command(f"git remote rename origin {GIT_REMOTE_NAME} || true", repo) def install_lfs(repo, track): - execute_shell_command("git remote rename autobuild origin || true", repo) + if GIT_REMOTE_NAME != "origin": + execute_shell_command(f"git remote rename {GIT_REMOTE_NAME} origin || true", repo) execute_shell_command("git lfs install || true", repo, silent=True) if ".gitattributes" not in os.listdir(repo) or False: if type(track) is str: @@ -177,7 +279,8 @@ def install_lfs(repo, track): for t in track: execute_shell_command("git lfs track '" + t.lower() + "' || true", repo, silent=True) execute_shell_command("git lfs track '" + t.upper() + "' || true", repo, silent=True) - execute_shell_command("git remote rename origin autobuild || true", repo) + if GIT_REMOTE_NAME != "origin": + execute_shell_command(f"git remote rename origin {GIT_REMOTE_NAME} || true", repo) execute_shell_command("git add .gitattributes || true", repo) @@ -189,24 +292,21 @@ def ignore(repo, ignore): if os.path.isfile(ign_file): with open(ign_file, "r") as f: content = [l if not l.endswith("\n") else l[:-1] for l in f.readlines()] - with open(".gitignore", "a") as f: + with open(ign_file, "a") as f: for i in ignore: if i not in content: f.write(i+"\n") -def get_repo_data(directory): +def get_repo_data(directory, remote_name=GIT_REMOTE_NAME): author, _ = execute_shell_command("git show -s | grep Author || true", directory) - author = " ".join(author.split()[1:]) - maintainer, _ = execute_shell_command("git config user.name; git config user.email", directory, silent=True) - maintainer = " ".join(maintainer.split()) - if maintainer == " ": - maintainer = "" + author = " ".join(author.split()[1:]).strip() + maintainer, _ = execute_shell_command("git config user.name || true; git config user.email || true", directory, silent=True) + maintainer = " ".join(maintainer.split()).strip() if author == "": author = maintainer - url, _ = execute_shell_command("git remote get-url --push autobuild || true", directory, silent=True) - if url == "": - url, _ = execute_shell_command("git remote get-url --push origin || true", directory, silent=True) + url, _ = execute_shell_command(f"git remote get-url --push {remote_name} || true", directory, silent=True) + assert url != "" return author.strip().replace("<", "").replace(">", ""), maintainer.strip().replace("<", "").replace(">", ""),\ url.strip() diff --git a/phobos/utils/misc.py b/phobos/utils/misc.py index 751ea538..fefc339e 100644 --- a/phobos/utils/misc.py +++ b/phobos/utils/misc.py @@ -2,12 +2,14 @@ import platform import re import subprocess +import glob from copy import deepcopy from xml.dom.minidom import parseString from xml.etree import ElementTree as ET import numpy as np +from ..defs import load_json from ..commandline_logging import get_logger log = get_logger(__name__) @@ -19,6 +21,13 @@ def duplicate(obj, link_obj=False): return deepcopy(obj) +def abspath(in_path, base=os.getcwd()): + out = os.path.expanduser(os.path.expandvars(in_path)) + if not os.path.isabs(out): + out = os.path.join(base, in_path) + return os.path.normpath(os.path.expanduser(os.path.expandvars(out))) + + def to_pretty_xml_string(xml): if type(xml) == str: xml_string = xml @@ -46,6 +55,20 @@ def merge_default(input_dict, default_dict): return input_dict +def patch_dict(base_dict, patch, patch_list_strategy="APPEND"): + patched = deepcopy(base_dict) + for k, v in patch.items(): + if k in patched and type(patched[k]) == dict and type(v) == dict: + patched[k] = patch_dict(patched[k], v, patch_list_strategy=patch_list_strategy) + elif patch_list_strategy.upper() == "APPEND" and k in patched and type(patched[k]) == list and type(v) == list: + patched[k] = patched[k] + v + elif patch_list_strategy.upper() == "PREPEND" and k in patched and type(patched[k]) == list and type(v) == list: + patched[k] = v + patched[k] + else: # patch_list_strategy = REPLACE + patched[k] = v + return patched + + def sys_path(path): if path is None: return path @@ -104,7 +127,98 @@ def trunc(values, decimals=0): return np.trunc(values*10**decimals)/(10**decimals) -def regex_replace(string, replacements, verbose=False): +def is_int(val): + if type(val) == int: + return True + try: + int(val) + return True + except ValueError: + return False + + +def is_float(val): + if type(val) == float: + return True + try: + float(val) + return True + except ValueError: + return False + + +def serialize(entry, float_fmt=None, **kwargs) -> str: + """ + + Args: + entry: The entry to serialize + precision_dict: the float format in the style "%.6f" + + Returns: + + """ + assert entry is not None + if hasattr(entry, "tolist"): + entry = entry.tolist() + if type(entry) in [list, tuple, np.array]: + entry = [serialize(v, float_fmt=float_fmt) for v in entry] + return " ".join(entry) + elif type(entry) in [int, np.intc, np.int64, bool]: + return str(int(entry)) + elif type(entry) in [float, np.float64]: + return float_fmt % entry if float_fmt is not None else str(entry) + else: + return str(entry) + + +def deserialize(string: str, key=None): + string = string.strip() + if " " in string and not key in ["uri", "url", "file", "filepath", "filename"]: + _list = string.split() + if all([is_int(v) for v in _list]): + return [int(v) for v in _list] + elif all([is_float(v) for v in _list]): + return [float(v) for v in _list] + else: + return _list + elif is_int(string): + return int(string) + elif is_float(string): + return float(string) + else: + return string + + +def get_var(object, varname_string, default="ABORT", float_fmt=None): + # if the var name startswith @ it shall be taken as is + if varname_string.startswith("@"): + return varname_string[1:] + #if varname contains a | the parts will be evaluated separately (only possible for strings) + if "|" in varname_string: + var = "" + for part in varname_string.split("|"): + var += serialize(get_var(object, part, default=default), float_fmt=float_fmt) + return var + # if varname contains . it will access sub values of a var + if "." in varname_string: + var = object + for var_part in varname_string.split("."): + if default == "ABORT": + var = getattr(var, var_part) + else: + log.debug(f"{varname_string} of {repr(object)} does not exist returning {default}") + var = getattr(var, var_part, default) + else: + if default == "ABORT": + var = getattr(object, varname_string) + else: + if not hasattr(object, varname_string): + log.debug(f"{varname_string} of {repr(object)} does not exist returning {default}") + var = getattr(object, varname_string, default) + return var + + +def regex_replace(string, replacements, verbose=False, correspondance=None, float_fmt=None): """ In string applies all replacements defined in replacements dict. It can be a list of dicts or a dict of strings that shall be replaced.0 @@ -114,6 +228,8 @@ def regex_replace(string, replacements, verbose=False): after = before if type(replacements) is dict: for old, new in replacements.items(): + if new.startswith("~") and correspondance is not None: + new = str(get_var(correspondance, new[1:], float_fmt=float_fmt)) after = re.sub( r"" + old, new, @@ -122,6 +238,8 @@ def regex_replace(string, replacements, verbose=False): elif type(replacements) is list: for entry in replacements: for old, new in entry.items(): + if new.startswith("~") and correspondance is not None: + new = str(get_var(correspondance, new[1:], float_fmt=float_fmt)) after = re.sub( r"" + old, new, @@ -165,16 +283,19 @@ def execute_shell_command(cmd, cwd=None, dry_run=False, silent=False, verbose=Fa log.debug(("Skipping" if dry_run else "Executing") + f": {cmd} in {cwd}") if dry_run: return "", "" + + if verbose: + log.info("Executing: "+cmd+" in "+cwd) + elif not silent: + log.debug("Executing: "+cmd+" in "+cwd) proc = subprocess.Popen([cmd], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, cwd=cwd, executable="/bin/bash") # proc.wait() (out, err) = proc.communicate() if verbose: - log.info("Executing: "+cmd+" in "+cwd) log.info(out.decode('UTF-8')) log.info(err.decode('UTF-8')) log.info(f"Subprocess returned {proc.returncode}") elif not silent: - log.debug("Executing: "+cmd+" in "+cwd) log.debug(out.decode('UTF-8')) log.debug(err.decode('UTF-8')) log.debug(f"Subprocess returned {proc.returncode}") @@ -209,7 +330,7 @@ def get_thumbnail(robotfile, icon_size=512): if robotfile.lower().endswith(".smurf"): base_dir = os.path.dirname(robotfile) with open(robotfile, "r") as f: - robotfile = [x for x in yaml.safe_load(f.read())["files"] if x.endswith("df")][0] + robotfile = [x for x in load_json(f.read())["files"] if x.endswith("df")][0] robotfile = os.path.normpath(os.path.join(base_dir, robotfile)) # use pybullet to get an image import pybullet as pb @@ -269,40 +390,46 @@ def recreate_dir(pipeline, directory): create_dir(pipeline, directory) -def copy(pipeline, src, dst, silent=False): +def copy(pipeline, src, dst, silent=True): if not silent: - log.debug(f"Copying {pipeline.relpath(src) if pipeline is not None else src} to {pipeline.relpath(dst) if pipeline is not None else dst}") + log.info(f"Copying {pipeline.relpath(src) if pipeline is not None else src} to {pipeline.relpath(dst) if pipeline is not None else dst}") execute_shell_command("mkdir -p {} || true".format(os.path.dirname(dst)), silent=True) os.system("cp -rP {} {}".format(src, dst)) def store_persisting_files(pipeline, repo, list_of_files, temp_dir): log.debug("Storing the following files:") + expanded_files = [] for f in list_of_files: + expanded_files += glob.glob(f) + for f in expanded_files: src = os.path.join(repo, f) dst = os.path.join(temp_dir, f) create_dir(pipeline, os.path.dirname(dst)) log.debug(pipeline.relpath(src)) - copy(pipeline, src, dst, silent=False) + copy(pipeline, src, os.path.dirname(dst)+"/", silent=False) def restore_persisting_files(pipeline, repo, list_of_files, temp_dir): log.debug("Restoring the following files:") + expanded_files = [] for f in list_of_files: + expanded_files += glob.glob(f) + for f in expanded_files: src = os.path.join(temp_dir, f) dst = os.path.join(repo, f) create_dir(pipeline, os.path.dirname(dst)) log.debug(pipeline.relpath(dst)) - copy(pipeline, src, dst, silent=False) + copy(pipeline, src, os.path.dirname(dst)+"/", silent=False) -def edit_name_string(name, prefix=None, suffix=None, replacements=None): +def edit_name_string(name, prefix=None, suffix=None, replacements=None, do_not_double=True, correspondance=None, float_fmt="%.3f"): if replacements is None: replacements = {} - name = regex_replace(name, replacements) - if prefix: + name = regex_replace(name, replacements, correspondance=correspondance, float_fmt=float_fmt) + if prefix and (not do_not_double or not name.startswith(prefix)): name = str(prefix) + name - if suffix: + if suffix and (not do_not_double or not name.endswith(suffix)): name = name + str(suffix) return name @@ -361,3 +488,42 @@ def color_parser(*args, rgba=None): def to_hex_color(color_as_list): return "#" + "".join([f"{int(x * 255):02x}" for x in color_as_list]) + + +def singular(prop): + """ + Makes sure the prop is single and not a list + Args: + prop: The property to check + + Returns: + prop as a single element + Raises: + Assertion error if prop is a list with more than one element + """ + if type(prop) == list: + if len(prop) == 1: + return prop[0] + elif len(prop) == 0: + return None + else: + raise AssertionError(f"Got list with content {prop} where only a single element is allowed.") + else: + return prop + + +def plural(prop): + """ + Makes sure prop is a list + Args: + prop: The property to check + + Returns: + prop as list + """ + if type(prop) == list: + return prop + elif prop is None: + return [] + else: + return [prop] \ No newline at end of file diff --git a/phobos/utils/xml.py b/phobos/utils/xml.py index d3c297f9..87b9c239 100644 --- a/phobos/utils/xml.py +++ b/phobos/utils/xml.py @@ -87,7 +87,7 @@ def read_relative_filename(filename, start_file_path): if start_file_path.split(".")[-1] in IMPORT_TYPES: start_file_path = os.path.dirname(start_file_path) # /bla/blub/xyz/blib.xyz -> /bla/blub/xyz if filename.startswith("package:"): # ROS Package - if os.path.basename(start_file_path) in IMPORT_TYPES+["xacro"]: + if os.path.basename(start_file_path) in IMPORT_TYPES: package_dir = os.path.dirname(start_file_path) # /bla/blub/xyz -> /bla/blub else: raise IOError("Can't derive package_dir from " + start_file_path + ". " From 646ceaffe2f1b5e4c67d46b406852b23d2c5ed1e Mon Sep 17 00:00:00 2001 From: Henning Wiedemann <35524093+hwiedPro@users.noreply.github.com> Date: Fri, 14 Jun 2024 11:08:30 +0200 Subject: [PATCH 21/25] Fix #363 --- phobos/core/multiple.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/phobos/core/multiple.py b/phobos/core/multiple.py index 5233bc09..4bb72bdd 100644 --- a/phobos/core/multiple.py +++ b/phobos/core/multiple.py @@ -173,9 +173,9 @@ def __init__(self, name=None, inputfile=None, entities=None, frames=None, **kwar SmurfBase.__init__(self, returns=["entities"], **kwargs) self.name = name self.entities = _plural(entities) - self.inputfile = os.path.abspath(inputfile) self._frames = _plural(frames) - if self.inputfile is not None: + if self.inputfile is not None: + self.inputfile = os.path.abspath(inputfile) ext = self.inputfile.lower().rsplit(".", 1)[-1] if ext == "sdf": # [Todo v2.1.0] @@ -381,4 +381,4 @@ def expot_smurfs(self, outputfile): if not os.path.exists(os.path.dirname(outputfile)): os.makedirs(os.path.dirname(outputfile), exist_ok=True) with open(outputfile, "w") as f: - f.write(dump_json(out)) \ No newline at end of file + f.write(dump_json(out)) From 271f34c08d1ba3a2607fe76f7c1fe526461a0092 Mon Sep 17 00:00:00 2001 From: Henning Wiedemann <35524093+hwiedPro@users.noreply.github.com> Date: Fri, 14 Jun 2024 11:19:45 +0200 Subject: [PATCH 22/25] Bugfix for 646ceaf --- phobos/core/multiple.py | 1 + 1 file changed, 1 insertion(+) diff --git a/phobos/core/multiple.py b/phobos/core/multiple.py index 4bb72bdd..e53736b3 100644 --- a/phobos/core/multiple.py +++ b/phobos/core/multiple.py @@ -172,6 +172,7 @@ def __init__(self, name=None, inputfile=None, entities=None, frames=None, **kwar Representation.__init__(self) SmurfBase.__init__(self, returns=["entities"], **kwargs) self.name = name + self.inputfile = inputfile self.entities = _plural(entities) self._frames = _plural(frames) if self.inputfile is not None: From 1e4cc8e1456342ea210fa9227f58f6f14cca6f6e Mon Sep 17 00:00:00 2001 From: Henning Wiedemann <35524093+hwiedPro@users.noreply.github.com> Date: Wed, 3 Jul 2024 11:21:42 +0200 Subject: [PATCH 23/25] Fix execute_shell_command for Windows --- phobos/ci/base_model.py | 2 +- phobos/utils/misc.py | 7 +++++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/phobos/ci/base_model.py b/phobos/ci/base_model.py index d1d6c5b7..94d8a519 100644 --- a/phobos/ci/base_model.py +++ b/phobos/ci/base_model.py @@ -866,7 +866,7 @@ def export(self): script["cwd"] = self.exportdir else: script["cwd"] = misc.abspath(script["cwd"]) - misc.execute_shell_command(script["cmd"], script["cwd"], verbose=True) + misc.execute_shell_command(script["cmd"], script["cwd"], verbose=True, executable=script.get("executable", None)) log.info('Finished post_processing of the new model') if ros_pkg_name is not None: diff --git a/phobos/utils/misc.py b/phobos/utils/misc.py index fefc339e..d6d44d8e 100644 --- a/phobos/utils/misc.py +++ b/phobos/utils/misc.py @@ -276,7 +276,7 @@ def is_binary_file(filepath): return bool(f.read(256).translate(None, textchars)) -def execute_shell_command(cmd, cwd=None, dry_run=False, silent=False, verbose=False): +def execute_shell_command(cmd, cwd=None, dry_run=False, silent=False, verbose=False, executable=None): if cwd is None: cwd = os.getcwd() if not silent: @@ -288,7 +288,10 @@ def execute_shell_command(cmd, cwd=None, dry_run=False, silent=False, verbose=Fa log.info("Executing: "+cmd+" in "+cwd) elif not silent: log.debug("Executing: "+cmd+" in "+cwd) - proc = subprocess.Popen([cmd], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, cwd=cwd, executable="/bin/bash") + if executable: + proc = subprocess.Popen([cmd], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, cwd=cwd, executable=executable) + else: + proc = subprocess.Popen([cmd], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, cwd=cwd) # proc.wait() (out, err) = proc.communicate() if verbose: From c95092527bde4a0754d51293a51c0206b3bcb2e7 Mon Sep 17 00:00:00 2001 From: Henning Wiedemann <35524093+hwiedPro@users.noreply.github.com> Date: Wed, 3 Jul 2024 11:22:26 +0200 Subject: [PATCH 24/25] Fix faulty indentation --- phobos/ci/base_model.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/phobos/ci/base_model.py b/phobos/ci/base_model.py index 94d8a519..98fdbf11 100644 --- a/phobos/ci/base_model.py +++ b/phobos/ci/base_model.py @@ -945,10 +945,10 @@ def deploy(self, mesh_commit, failed_model=False, uses_lfs=False): with open(readme_path, "w") as readme: readme.write(content) if uses_lfs: - readme_content = open(readme_path, "r").read() - if "Git LFS" not in readme_content: - with open(readme_path, "a") as readme: - readme.write(open(resources.get_resources_path("GitLFS_README.md.in")).read()) + readme_content = open(readme_path, "r").read() + if "Git LFS" not in readme_content: + with open(readme_path, "a") as readme: + readme.write(open(resources.get_resources_path("GitLFS_README.md.in")).read()) # update additional submodules if "submodules" in self.deployment: for subm in self.deployment["submodules"]: From 5928c9b22ad16147a5019a9a8c700653e8b48809 Mon Sep 17 00:00:00 2001 From: Henning Wiedemann <35524093+hwiedPro@users.noreply.github.com> Date: Wed, 3 Jul 2024 11:36:38 +0200 Subject: [PATCH 25/25] Bump version to 2.1.0 --- README.md | 4 ++++ codemeta.json | 4 ++-- phobos/__init__.py | 2 +- phobos/blender/operators/editing.py | 16 ++++++++-------- phobos/blender/phobosgui.py | 2 +- phobos/ci/base_model.py | 3 +-- phobos/core/robot.py | 2 +- phobos/geometry/io.py | 2 +- phobos/io/representation.py | 6 +++--- phobos/io/sensor_representations.py | 2 +- phobos/scripts/phobos.py | 2 +- phobos/utils/xml.py | 2 +- 12 files changed, 25 insertions(+), 22 deletions(-) diff --git a/README.md b/README.md index 06347626..bcdc0fc4 100644 --- a/README.md +++ b/README.md @@ -28,6 +28,10 @@ Please contact [Henning Wiedemann](https://robotik.dfki-bremen.de/de/ueber-uns/m for any inquiries, or any questions and feedback not suited for the issues page. +## Version 2.1.0 +Version 2.1.0 refactors especially the phobos-ci usage and improves the configurability by config inheritance. However these are breaking changes to the pipeline configuration. See PR #364 for more details on the changes. + + ## Version 2.0.0 With version 2.0.0 we did a refactoring of Phobos and its now possible to use phobos as a normal python package and command line tool (see below). diff --git a/codemeta.json b/codemeta.json index 554eb98a..329f7313 100644 --- a/codemeta.json +++ b/codemeta.json @@ -12,12 +12,12 @@ "identifier": "", "codeRepository": "https://github.com/dfki-ric/phobos", "datePublished": "2019-02-01", - "dateModified": "2022-08-30", + "dateModified": "2024-07-03", "dateCreated": "2019-02-01", "description": "Phobos is a python tool for processing simulation models. It comes with a Blender addon to create, edit and annotate robot models in a WYSIWYG environment and a python package/CLI tool to do this automated.", "keywords": "robotics, Blender, Python, URDF, SDF, model, MARS, SMURF, CLI", "license": "BSD-3-Clause", "title": "Phobos", - "version": "2.0.0", + "version": "2.1.0", "citation": "https://doi.org/10.21105/joss.01326" } diff --git a/phobos/__init__.py b/phobos/__init__.py index a5fdcd43..795ff210 100644 --- a/phobos/__init__.py +++ b/phobos/__init__.py @@ -23,7 +23,7 @@ pass # Phobos information -version = '2.0.0 "Perilled Pangolin"' +version = '2.1.0 "Perilled Pangolin"' repository = 'https://github.com/dfki-ric/phobos' bl_info = { diff --git a/phobos/blender/operators/editing.py b/phobos/blender/operators/editing.py index e48766d4..a83a911c 100644 --- a/phobos/blender/operators/editing.py +++ b/phobos/blender/operators/editing.py @@ -2337,7 +2337,7 @@ def execute(self, context): return {'FINISHED'} # -# # [TODO v2.0.0] REVIEW this +# # [TODO v2.2.0] REVIEW this # def addControllerFromYaml(controller_dict, annotations, selected_objs, active_obj, *args): # """Execution function for the temporary operator to add controllers from yaml files. # @@ -2382,7 +2382,7 @@ def execute(self, context): # return controller_objs, annotation_objs, [] # # -# # [TODO v2.0.0] REVIEW this +# # [TODO v2.2.0] REVIEW this # class AddControllerOperator(Operator): # """Add a controller at the position of the selected object.""" # @@ -2535,7 +2535,7 @@ def execute(self, context): # return {'FINISHED'} # # -# # [TODO v2.0.0] REVIEW this +# # [TODO v2.2.0] REVIEW this # def getControllerParameters(name): # """Returns the controller parameters for the controller type with the provided # name. @@ -2552,7 +2552,7 @@ def execute(self, context): # return [] # # -# # [TODO v2.0.0] REVIEW this +# # [TODO v2.2.0] REVIEW this # def getDefaultControllerParameters(scene, context): # """Returns the default controller parameters for the controller of the active # object. @@ -2571,7 +2571,7 @@ def execute(self, context): # return None # -# [TODO v2.0.0] REVIEW this +# [TODO v2.2.0] REVIEW this class CreateMimicJointOperator(Operator): """Make a number of joints follow a specified joint""" @@ -2768,7 +2768,7 @@ def invoke(self, context, event): return {'RUNNING_MODAL'} -# [TODO v2.0.0] REVIEW this +# [TODO v2.2.0] REVIEW this class AddSubmodel(Operator): """Add a submodel instance to the scene""" @@ -2897,7 +2897,7 @@ def execute(self, context): return {'FINISHED'} -# [TODO v2.0.0] REVIEW this +# [TODO v2.2.0] REVIEW this class DefineSubmodel(Operator): """Define a new submodel from objects""" @@ -3273,7 +3273,7 @@ def execute(self, context): -# [TODO v2.0.0] REVIEW this +# [TODO v2.2.0] REVIEW this class MergeLinks(Operator): """Merge links""" diff --git a/phobos/blender/phobosgui.py b/phobos/blender/phobosgui.py index bf086e8b..5c091ae7 100644 --- a/phobos/blender/phobosgui.py +++ b/phobos/blender/phobosgui.py @@ -849,7 +849,7 @@ def draw(self, context): ) -# [TODO v2.0.0] Improve e.g. add edit tools +# [TODO v2.2.0] Improve e.g. add edit tools class PhobosPropertyInformationPanel(bpy.types.Panel): """Contains all properties sorted in different categories""" diff --git a/phobos/ci/base_model.py b/phobos/ci/base_model.py index 98fdbf11..732a6495 100644 --- a/phobos/ci/base_model.py +++ b/phobos/ci/base_model.py @@ -139,7 +139,6 @@ def __init__(self, configfile, pipeline, processed_model_exists=True, configkey= assert hasattr(self, "assemble") and len(self.assemble) > 0 # list directly imported mesh pathes - # [TODO pre_v2.0.0] REVIEW mesh usage self._meshes = [] for _, v in self.input_models.items(): if "basefile" in v.keys(): @@ -566,7 +565,7 @@ def process(self): if self.robot.get_joint(jointname) is None and ("cut_joint" not in config or config["cut_joint"] is False): faulty_joint_defs += [(jointname, [str(j) for j in self.robot.joints if jointname in str(j) or str(j) in jointname])] elif self.robot.get_joint(jointname) is None and ("cut_joint" in config and config["cut_joint"] is True): # cut_joint - # [TODO v2.0.0] Review and Check whether this works as expected + # [TODO v2.2.0] Review and Check whether this works as expected # Check whether everything is given and calculate origin and axis (?) _joint = representation.Joint(**config) assert "constraint_axes" in config diff --git a/phobos/core/robot.py b/phobos/core/robot.py index 4a23d0a8..22d24041 100755 --- a/phobos/core/robot.py +++ b/phobos/core/robot.py @@ -124,7 +124,7 @@ def export_urdf(self, outputfile, float_fmt_dict=None, ros_pkg=False, copy_with_ outputfile = os.path.abspath(outputfile) export_robot = self.duplicate() - # [ToDo v2.0.0] When converting an SDF to an URDF the relative_to information of the Poses will get lost. + # [ToDo v2.2.0] When converting an SDF to an URDF the relative_to information of the Poses will get lost. # Therefore we need to check whether the origin have the by URDF expected frames and transform them when necessary if mesh_format is not None: diff --git a/phobos/geometry/io.py b/phobos/geometry/io.py index d34045f2..18657346 100644 --- a/phobos/geometry/io.py +++ b/phobos/geometry/io.py @@ -46,7 +46,7 @@ def as_trimesh(scene_or_mesh, scale=None, silent=False): vertices = np.asarray([np.asarray(scale * v.co) for v in blender_mesh.vertices]) faces = np.array([[v for v in p.vertices] for p in blender_mesh.polygons], dtype=np.int64) if not silent: - # [TODO v2.0.0] Resolve the following error + # [TODO v2.2.0] Resolve the following error log.error("Received a blender mesh, porting uv maps to trimesh representation are not yet supported and thus will be lost.") mesh = trimesh.Trimesh(vertices=vertices, faces=trimesh.geometry.triangulate_quads(faces)) else: diff --git a/phobos/io/representation.py b/phobos/io/representation.py index 131a704a..a2ff580f 100644 --- a/phobos/io/representation.py +++ b/phobos/io/representation.py @@ -387,19 +387,19 @@ def referenced_name(self, value): self.name = value def check_valid(self): - # [TODO v2.0.0] REVIEW add other colors here + # [TODO v2.2.0] REVIEW add other colors here if self.diffuse is None and self.diffuseTexture is None: raise Exception("Material has neither a color nor texture.") def equivalent(self, other): - # [TODO v2.0.0] REVIEW add other colors here + # [TODO v2.2.0] REVIEW add other colors here return self == other or ( (self.diffuseTexture == other.diffuseTexture or (self.diffuseTexture is not None and self.diffuseTexture.equivalent(self.diffuseTexture))) and other.diffuse == self.diffuse ) def is_delegate(self): - # [TODO v2.0.0] REVIEW add other colors here + # [TODO v2.2.0] REVIEW add other colors here return self.diffuse is None and self.diffuseTexture is None @property diff --git a/phobos/io/sensor_representations.py b/phobos/io/sensor_representations.py index 6f897ed8..fedb0514 100644 --- a/phobos/io/sensor_representations.py +++ b/phobos/io/sensor_representations.py @@ -125,7 +125,7 @@ def __init__(self, name=None, link=None, **kwargs): self.returns += ['link'] -# [TODO v2.0.0] Check kwargs, internal computation and usage in mars +# [TODO v2.2.0] Check kwargs, internal computation and usage in mars class RotatingRaySensor(Sensor): _class_variables = ["name", "link", "bands", "draw_rays", "horizontal_offset", "horizontal_resolution", "opening_width", "lasers", "max_distance", "min_distance", "opening_height", "vertical_offset"] diff --git a/phobos/scripts/phobos.py b/phobos/scripts/phobos.py index f9b24132..ceeb37aa 100755 --- a/phobos/scripts/phobos.py +++ b/phobos/scripts/phobos.py @@ -12,7 +12,7 @@ def version(package_name): try: __version__ = version("phobos") except PackageNotFoundError: - __version__ = '2.0.0' + __version__ = '2.1.0' del version, PackageNotFoundError diff --git a/phobos/utils/xml.py b/phobos/utils/xml.py index 87b9c239..08704f45 100644 --- a/phobos/utils/xml.py +++ b/phobos/utils/xml.py @@ -60,7 +60,7 @@ def transform_object(obj, T, relative_to): def adapt_mesh_pathes(robot, new_urdf_dir, copy_to=None): - # [TODO pre_v2.0.0] Check whether this has become obsolete due to export adaption + # [TODO v2.2.0] Check whether this has become obsolete due to export adaption for link in robot.links: for geo in link.visuals + link.collisions: if isinstance(geo.geometry, representation.Mesh):