diff --git a/omnigibson/envs/env_base.py b/omnigibson/envs/env_base.py index f0ae1adcd..61db0dc0f 100644 --- a/omnigibson/envs/env_base.py +++ b/omnigibson/envs/env_base.py @@ -234,11 +234,12 @@ def _load_robots(self): og.sim.import_object(robot) robot.set_position_orientation(position=position, orientation=orientation) - # Auto-initialize all robots - og.sim.play() - self.scene.reset() - self.scene.update_initial_state() - og.sim.stop() + if len(self.robots_config) > 0: + # Auto-initialize all robots + og.sim.play() + self.scene.reset() + self.scene.update_initial_state() + og.sim.stop() assert og.sim.is_stopped(), "Simulator must be stopped after loading robots!" @@ -264,11 +265,12 @@ def _load_objects(self): og.sim.import_object(obj) obj.set_position_orientation(position=position, orientation=orientation) - # Auto-initialize all objects - og.sim.play() - self.scene.reset() - self.scene.update_initial_state() - og.sim.stop() + if len(self.objects_config) > 0: + # Auto-initialize all objects + og.sim.play() + self.scene.reset() + self.scene.update_initial_state() + og.sim.stop() assert og.sim.is_stopped(), "Simulator must be stopped after loading objects!" diff --git a/omnigibson/maps/segmentation_map.py b/omnigibson/maps/segmentation_map.py index 9c714057a..bf981ba66 100644 --- a/omnigibson/maps/segmentation_map.py +++ b/omnigibson/maps/segmentation_map.py @@ -3,6 +3,7 @@ import numpy as np from PIL import Image +Image.MAX_IMAGE_PIXELS = None import omnigibson as og from omnigibson.macros import gm diff --git a/omnigibson/maps/traversable_map.py b/omnigibson/maps/traversable_map.py index c3d7f2a91..69ea4f185 100644 --- a/omnigibson/maps/traversable_map.py +++ b/omnigibson/maps/traversable_map.py @@ -3,6 +3,7 @@ import cv2 import numpy as np from PIL import Image +Image.MAX_IMAGE_PIXELS = None from omnigibson.maps.map_base import BaseMap from omnigibson.utils.ui_utils import create_module_logger diff --git a/omnigibson/object_states/link_based_state_mixin.py b/omnigibson/object_states/link_based_state_mixin.py index 0f674f106..41143f382 100644 --- a/omnigibson/object_states/link_based_state_mixin.py +++ b/omnigibson/object_states/link_based_state_mixin.py @@ -2,6 +2,7 @@ from omnigibson.object_states.object_state_base import BaseObjectState from omnigibson.utils.ui_utils import create_module_logger from omnigibson.utils.python_utils import classproperty +from omnigibson.prims.cloth_prim import ClothPrim # Create module logger log = create_module_logger(module_name=__name__) @@ -104,8 +105,10 @@ def initialize_link_mixin(self): for name, link in self.obj.links.items(): if self.metalink_prefix in name or (self._default_link is not None and link.name == self._default_link.name): self._links[name] = link - assert np.allclose(link.scale, self.obj.scale), \ - f"the meta link {name} has a inconsistent scale with the object {self.obj.name}" + # Make sure the scale is similar if the link is not a cloth prim + if not isinstance(link, ClothPrim): + assert np.allclose(link.scale, self.obj.scale), \ + f"the meta link {name} has a inconsistent scale with the object {self.obj.name}" @classproperty def _do_not_register_classes(cls): diff --git a/omnigibson/objects/dataset_object.py b/omnigibson/objects/dataset_object.py index af78623f1..551e15da7 100644 --- a/omnigibson/objects/dataset_object.py +++ b/omnigibson/objects/dataset_object.py @@ -231,17 +231,21 @@ def _post_load(self): for material in self.materials: material.shader_update_asset_paths_with_root_path(root_path) - # Assign realistic density and mass based on average object category spec + # Assign realistic density and mass based on average object category spec, or fall back to a default value if self.avg_obj_dims is not None and self.avg_obj_dims["density"] is not None: - if self._prim_type == PrimType.RIGID: - for link in self._links.values(): - # If not a meta (virtual) link, set the density based on avg_obj_dims and a zero mass (ignored) - if link.has_collision_meshes: - link.mass = 0.0 - link.density = self.avg_obj_dims["density"] - - elif self._prim_type == PrimType.CLOTH: - self.root_link.mass = self.avg_obj_dims["density"] * self.root_link.volume + density = self.avg_obj_dims["density"] + else: + density = 1000.0 + + if self._prim_type == PrimType.RIGID: + for link in self._links.values(): + # If not a meta (virtual) link, set the density based on avg_obj_dims and a zero mass (ignored) + if link.has_collision_meshes: + link.mass = 0.0 + link.density = density + + elif self._prim_type == PrimType.CLOTH: + self.root_link.mass = density * self.root_link.volume def _update_texture_change(self, object_state): """ diff --git a/omnigibson/objects/usd_object.py b/omnigibson/objects/usd_object.py index b416c1b49..f614929cc 100644 --- a/omnigibson/objects/usd_object.py +++ b/omnigibson/objects/usd_object.py @@ -94,13 +94,12 @@ def _load(self): if self._encrypted: # Create a temporary file to store the decrytped asset, load it, and then delete it encrypted_filename = self._usd_path.replace(".usd", ".encrypted.usd") - decrypted_fd, usd_path = tempfile.mkstemp(os.path.basename(self._usd_path), dir=og.tempdir) + usd_path = self._usd_path.replace(".usd", f".{self.uuid}.usd") decrypt_file(encrypted_filename, usd_path) prim = add_asset_to_stage(asset_path=usd_path, prim_path=self._prim_path) if self._encrypted: - os.close(decrypted_fd) # On Windows, Isaac Sim won't let go of the file until the prim is removed, so we can't delete it. if os.name == "posix": os.remove(usd_path) diff --git a/omnigibson/prims/cloth_prim.py b/omnigibson/prims/cloth_prim.py index d0dc87d4b..921b6fcca 100644 --- a/omnigibson/prims/cloth_prim.py +++ b/omnigibson/prims/cloth_prim.py @@ -27,7 +27,7 @@ # Subsample cloth particle points to boost performance m.N_CLOTH_KEYPOINTS = 1000 -m.KEYPOINT_COVERAGE_THRESHOLD = 0.80 +m.KEYPOINT_COVERAGE_THRESHOLD = 0.75 m.N_CLOTH_KEYFACES = 500 @@ -111,7 +111,7 @@ def _post_load(self): max(min(true_aabb[1][1], keypoint_aabb[1][1]) - max(true_aabb[0][1], keypoint_aabb[0][1]), 0) * \ max(min(true_aabb[1][2], keypoint_aabb[1][2]) - max(true_aabb[0][2], keypoint_aabb[0][2]), 0) true_vol = np.product(true_aabb[1] - true_aabb[0]) - if overlap_vol / true_vol > m.KEYPOINT_COVERAGE_THRESHOLD: + if true_vol == 0.0 or overlap_vol / true_vol > m.KEYPOINT_COVERAGE_THRESHOLD: success = True break assert success, f"Did not adequately subsample keypoints for cloth {self.name}!" @@ -131,6 +131,18 @@ def _initialize(self): # Store the default position of the points in the local frame self._default_positions = np.array(self.get_attribute(attr="points")) + @property + def visual_aabb(self): + return self.aabb + + @property + def visual_aabb_extent(self): + return self.aabb_extent + + @property + def visual_aabb_center(self): + return self.aabb_center + @classproperty def cloth_system(cls): return get_system("cloth") diff --git a/omnigibson/prims/entity_prim.py b/omnigibson/prims/entity_prim.py index a3a8864a7..33713ebce 100644 --- a/omnigibson/prims/entity_prim.py +++ b/omnigibson/prims/entity_prim.py @@ -21,7 +21,7 @@ m = create_module_macros(module_path=__file__) # Default sleep threshold for all objects -- see https://docs.omniverse.nvidia.com/extensions/latest/ext_physics/simulation-control/physics-settings.html?highlight=sleep#sleeping -m.DEFAULT_SLEEP_THRESHOLD = 0.005 +m.DEFAULT_SLEEP_THRESHOLD = 0.001 class EntityPrim(XFormPrim): @@ -1415,6 +1415,7 @@ def create_attachment_point_link(self): link.visible = False # Set a very small mass link.mass = 1e-6 + link.density = 0.0 self._links[link_name] = link diff --git a/omnigibson/prims/rigid_prim.py b/omnigibson/prims/rigid_prim.py index 9094935db..184e4d2e6 100644 --- a/omnigibson/prims/rigid_prim.py +++ b/omnigibson/prims/rigid_prim.py @@ -465,16 +465,15 @@ def density(self): Returns: float: density of the rigid body in kg / m^3. """ - raw_usd_mass = self._rigid_prim_view.get_masses()[0] - # We first check if the raw usd mass is specified, since mass overrides density - # If it's specified, we infer density based on that value divided by volume - # Otherwise, we try to directly grab the raw usd density value, and if that value - # does not exist, we return 1000 since that is the canonical density assigned by omniverse - if raw_usd_mass != 0: - density = raw_usd_mass / self.volume + mass = self._rigid_prim_view.get_masses()[0] + # We first check if the mass is specified, since mass overrides density. If so, density = mass / volume. + # Otherwise, we try to directly grab the raw usd density value, and if that value does not exist, + # we return 1000 since that is the canonical density assigned by omniverse + if mass != 0.0: + density = mass / self.volume else: density = self._rigid_prim_view.get_densities()[0] - if density == 0: + if density == 0.0: density = 1000.0 return density diff --git a/omnigibson/scenes/scene_base.py b/omnigibson/scenes/scene_base.py index bec09ebf2..04aa2f0ac 100644 --- a/omnigibson/scenes/scene_base.py +++ b/omnigibson/scenes/scene_base.py @@ -8,12 +8,14 @@ from omnigibson.macros import create_module_macros, gm from omnigibson.prims.xform_prim import XFormPrim from omnigibson.prims.material_prim import MaterialPrim +from omnigibson.utils.constants import STRUCTURE_CATEGORIES from omnigibson.utils.python_utils import classproperty, Serializable, Registerable, Recreatable, \ create_object_from_init_info from omnigibson.utils.registry_utils import SerializableRegistry from omnigibson.utils.ui_utils import create_module_logger from omnigibson.utils.usd_utils import CollisionAPI from omnigibson.objects.object_base import BaseObject +from omnigibson.objects.dataset_object import DatasetObject from omnigibson.systems.system_base import SYSTEM_REGISTRY, clear_all_systems, get_system from omnigibson.objects.light_object import LightObject from omnigibson.robots.robot_base import m as robot_macros @@ -441,10 +443,9 @@ def add_object(self, obj, register=True, _is_call_from_simulator=False): # If this object is fixed and is NOT an agent, disable collisions between the fixed links of the fixed objects # This is to account for cases such as Tiago, which has a fixed base which is needed for its global base joints # We do this by adding the object to our tracked collision groups - structure_categories = {"walls", "floors", "ceilings"} if obj.fixed_base and obj.category != robot_macros.ROBOT_CATEGORY and not obj.visual_only: # TODO: Remove structure hotfix once asset collision meshes are fixed!! - if obj.category in structure_categories: + if obj.category in STRUCTURE_CATEGORIES: CollisionAPI.add_to_collision_group(col_group="structures", prim_path=obj.prim_path) else: for link in obj.links.values(): diff --git a/omnigibson/systems/macro_particle_system.py b/omnigibson/systems/macro_particle_system.py index 417a5be14..c707bf661 100644 --- a/omnigibson/systems/macro_particle_system.py +++ b/omnigibson/systems/macro_particle_system.py @@ -202,7 +202,9 @@ def process_particle_object(cls): """ # Update color if the particle object has any material color = np.ones(3) - if cls.particle_object.has_material(): + if cls.particle_object.material.is_glass: + color = cls.particle_object.material.glass_color + else: diffuse_texture = cls.particle_object.material.diffuse_texture color = plt.imread(diffuse_texture).mean(axis=(0, 1)) if diffuse_texture else cls.particle_object.material.diffuse_color_constant cls._color = color @@ -355,6 +357,7 @@ class MacroVisualParticleSystem(MacroParticleSystem, VisualParticleSystem): _SAMPLING_BIMODAL_MEAN_FRACTION = 0.9 _SAMPLING_BIMODAL_STDEV_FRACTION = 0.2 _SAMPLING_MAX_ATTEMPTS = 20 + _SAMPLING_HIT_PROPORTION = 0.4 @classmethod def initialize(cls): @@ -571,6 +574,7 @@ def generate_group_particles_on_object(cls, group, max_samples, min_samples_for_ aabb_offset=cls._SAMPLING_AABB_OFFSET, max_sampling_attempts=cls._SAMPLING_MAX_ATTEMPTS, refuse_downwards=True, + hit_proportion=cls._SAMPLING_HIT_PROPORTION, ) # Use sampled points @@ -693,8 +697,10 @@ def _modify_batch_particles_position_orientation(cls, particles, positions=None, Modifies all @particles' positions and orientations with @positions and @orientations Args: - particles (Iterable of str): Names of particles to compute batched position orientation for - local (bool): Whether to set particles' poses in local frame or not + particles (Iterable of str): Names of particles to modify + positions (None or (n, 3)-array): New positions to set for the particles + orientations (None or (n, 4)-array): New orientations to set for the particles + local (bool): Whether to modify particles' poses in local frame or not Returns: 2-tuple: @@ -712,7 +718,7 @@ def _modify_batch_particles_position_orientation(cls, particles, positions=None, lens = np.array([len(particles), len(positions), len(orientations)]) assert lens.min() == lens.max(), "Got mismatched particles, positions, and orientations!" - particle_local_poses_batch = np.zeros((cls.n_particles, 4, 4)) + particle_local_poses_batch = np.zeros((n_particles, 4, 4)) particle_local_poses_batch[:, -1, -1] = 1.0 particle_local_poses_batch[:, :3, 3] = positions particle_local_poses_batch[:, :3, :3] = T.quat2mat(orientations) @@ -720,7 +726,7 @@ def _modify_batch_particles_position_orientation(cls, particles, positions=None, if not local: # Iterate over all particles and compute link tfs programmatically, then batch the matrix transform link_tfs = dict() - link_tfs_batch = np.zeros((cls.n_particles, 4, 4)) + link_tfs_batch = np.zeros((n_particles, 4, 4)) for i, name in enumerate(particles): obj = cls._particles_info[name]["obj"] is_cloth = cls._is_cloth_obj(obj=obj) diff --git a/omnigibson/systems/micro_particle_system.py b/omnigibson/systems/micro_particle_system.py index 1753240f8..524409cd2 100644 --- a/omnigibson/systems/micro_particle_system.py +++ b/omnigibson/systems/micro_particle_system.py @@ -87,9 +87,6 @@ def __init__(self, prim_path, name, idn): # Store inputs self._idn = idn - # Values loaded at runtime - self._n_particles = None - # Run super method directly super().__init__(prim_path=prim_path, name=name) @@ -99,13 +96,6 @@ def _load(self): # We raise an error, this should NOT be created from scratch raise NotImplementedError("PhysxPointInstancer should NOT be loaded via this class! Should be created before.") - def _post_load(self): - # Run super - super()._post_load() - - # Store how many particles we have - self._n_particles = len(self.particle_positions) - def remove(self): super().remove() self._parent_prim.remove() @@ -140,8 +130,6 @@ def add_particles( scales = np.ones((n_new_particles, 3)) * np.ones((1, 3)) if scales is None else scales prototype_indices = np.zeros(n_new_particles, dtype=int) if prototype_indices is None else prototype_indices - # Update the number of particles and update the values - self._n_particles += n_new_particles self.particle_positions = np.vstack([self.particle_positions, positions]) self.particle_velocities = np.vstack([self.particle_velocities, velocities]) self.particle_orientations = np.vstack([self.particle_orientations, orientations]) @@ -157,8 +145,6 @@ def remove_particles(self, idxs): instancer """ if len(idxs) > 0: - # Update the number of particles - self._n_particles -= len(idxs) # Remove all requested indices and write to all the internal data arrays self.particle_positions = np.delete(self.particle_positions, idxs, axis=0) self.particle_velocities = np.delete(self.particle_velocities, idxs, axis=0) @@ -167,7 +153,7 @@ def remove_particles(self, idxs): self.particle_prototype_ids = np.delete(self.particle_prototype_ids, idxs, axis=0) def remove_all_particles(self): - self.remove_particles(idxs=np.arange(self._n_particles)) + self.remove_particles(idxs=np.arange(self.n_particles)) @property def n_particles(self): @@ -175,7 +161,7 @@ def n_particles(self): Returns: int: Number of particles owned by this instancer """ - return self._n_particles + return len(self.particle_positions) @property def idn(self): @@ -219,8 +205,6 @@ def particle_positions(self, pos): np.array: (N, 3) numpy array, where each of the N particles' desired positions are expressed in (x,y,z) cartesian coordinates relative to this instancer's parent prim """ - assert pos.shape[0] == self._n_particles, \ - f"Got mismatch in particle setting size: {pos.shape[0]}, vs. number of particles {self._n_particles}!" self.set_attribute(attr="positions", val=lazy.pxr.Vt.Vec3fArray.FromNumpy(pos.astype(float))) @property @@ -230,9 +214,7 @@ def particle_orientations(self): np.array: (N, 4) numpy array, where each of the N particles' orientations are expressed in (x,y,z,w) quaternion coordinates relative to this instancer's parent prim """ - oris = self.get_attribute(attr="orientations") - assert oris is not None, f"Orientations should be set for particle instancer {self.name}!" - return np.array(oris) + return np.array(self.get_attribute(attr="orientations")) @particle_orientations.setter def particle_orientations(self, quat): @@ -243,11 +225,11 @@ def particle_orientations(self, quat): np.array: (N, 4) numpy array, where each of the N particles' desired orientations are expressed in (x,y,z,w) quaternion coordinates relative to this instancer's parent prim """ - assert quat.shape[0] == self._n_particles, \ - f"Got mismatch in particle setting size: {quat.shape[0]}, vs. number of particles {self._n_particles}!" + assert quat.shape[0] == self.n_particles, \ + f"Got mismatch in particle setting size: {quat.shape[0]}, vs. number of particles {self.n_particles}!" # If the number of particles is nonzero, swap w position, since Quath takes (w,x,y,z) quat = quat.astype(float) - if self._n_particles > 0: + if self.n_particles > 0: quat = quat[:, [3, 0, 1, 2]] self.set_attribute(attr="orientations", val=lazy.pxr.Vt.QuathArray.FromNumpy(quat)) @@ -269,10 +251,9 @@ def particle_velocities(self, vel): np.array: (N, 3) numpy array, where each of the N particles' desired velocities are expressed in (x,y,z) cartesian coordinates relative to this instancer's parent prim """ - assert vel.shape[0] == self._n_particles, \ - f"Got mismatch in particle setting size: {vel.shape[0]}, vs. number of particles {self._n_particles}!" - vel = vel.astype(float) - self.set_attribute(attr="velocities", val=lazy.pxr.Vt.Vec3fArray.FromNumpy(vel)) + assert vel.shape[0] == self.n_particles, \ + f"Got mismatch in particle setting size: {vel.shape[0]}, vs. number of particles {self.n_particles}!" + self.set_attribute(attr="velocities", val=lazy.pxr.Vt.Vec3fArray.FromNumpy(vel.astype(float))) @property def particle_scales(self): @@ -281,8 +262,7 @@ def particle_scales(self): np.array: (N, 3) numpy array, where each of the N particles' scales are expressed in (x,y,z) cartesian coordinates relative to this instancer's parent prim """ - scales = self.get_attribute(attr="scales") - return np.ones((self._n_particles, 3)) if scales is None else np.array(scales) + return np.array(self.get_attribute(attr="scales")) @particle_scales.setter def particle_scales(self, scales): @@ -293,10 +273,9 @@ def particle_scales(self, scales): np.array: (N, 3) numpy array, where each of the N particles' desired scales are expressed in (x,y,z) cartesian coordinates relative to this instancer's parent prim """ - assert scales.shape[0] == self._n_particles, \ - f"Got mismatch in particle setting size: {scales.shape[0]}, vs. number of particles {self._n_particles}!" - scales = scales.astype(float) - self.set_attribute(attr="scales", val=lazy.pxr.Vt.Vec3fArray.FromNumpy(scales)) + assert scales.shape[0] == self.n_particles, \ + f"Got mismatch in particle setting size: {scales.shape[0]}, vs. number of particles {self.n_particles}!" + self.set_attribute(attr="scales", val=lazy.pxr.Vt.Vec3fArray.FromNumpy(scales.astype(float))) @property def particle_prototype_ids(self): @@ -305,8 +284,7 @@ def particle_prototype_ids(self): np.array: (N,) numpy array, where each of the N particles' prototype_id (i.e.: which prototype is being used for that particle) """ - ids = self.get_attribute(attr="protoIndices") - return np.zeros(self.n_particles) if ids is None else np.array(ids) + return np.array(self.get_attribute(attr="protoIndices")) @particle_prototype_ids.setter def particle_prototype_ids(self, prototype_ids): @@ -317,21 +295,21 @@ def particle_prototype_ids(self, prototype_ids): np.array: (N,) numpy array, where each of the N particles' desired prototype_id (i.e.: which prototype is being used for that particle) """ - assert prototype_ids.shape[0] == self._n_particles, \ - f"Got mismatch in particle setting size: {prototype_ids.shape[0]}, vs. number of particles {self._n_particles}!" - self.set_attribute(attr="protoIndices", val=prototype_ids) + assert prototype_ids.shape[0] == self.n_particles, \ + f"Got mismatch in particle setting size: {prototype_ids.shape[0]}, vs. number of particles {self.n_particles}!" + self.set_attribute(attr="protoIndices", val=prototype_ids.astype(np.int32)) @property def state_size(self): # idn (1), particle_group (1), n_particles (1), and the corresponding states for each particle # N * (pos (3) + vel (3) + orn (4) + scale (3) + prototype_id (1)) - return 3 + self._n_particles * 14 + return 3 + self.n_particles * 14 def _dump_state(self): return dict( idn=self._idn, particle_group=self.particle_group, - n_particles=self._n_particles, + n_particles=self.n_particles, particle_positions=self.particle_positions, particle_velocities=self.particle_velocities, particle_orientations=self.particle_orientations, @@ -347,7 +325,6 @@ def _load_state(self, state): f"instancer when loading state! Should be: {self.particle_group}, got: {state['particle_group']}." # Set values appropriately - self._n_particles = state["n_particles"] keys = ("particle_positions", "particle_velocities", "particle_orientations", "particle_scales", "particle_prototype_ids") for key in keys: # Make sure the loaded state is a numpy array, it could have been accidentally casted into a list during @@ -637,6 +614,17 @@ def self_collision(cls): # Default is True return True + @classmethod + def _sync_particle_prototype_ids(cls): + """ + Synchronizes the particle prototype IDs across all particle instancers when sim is stopped. + Omniverse has a bug where all particle positions, orientations, velocities, and scales are correctly reset + when sim is stopped, but not the prototype IDs. This function is a workaround for that. + """ + if cls.initialized: + for instancer in cls.particle_instancers.values(): + instancer.particle_prototype_ids = np.zeros(instancer.n_particles, dtype=np.int32) + @classmethod def initialize(cls): # Create prototype before running super! @@ -648,6 +636,9 @@ def initialize(cls): # Initialize class variables that are mutable so they don't get overridden by children classes cls.particle_instancers = dict() + # TODO: remove this hack once omniverse fixes the issue (now we assume prototype IDs are all 0 always) + og.sim.add_callback_on_stop(name=f"{cls.name}_sync_particle_prototype_ids", callback=cls._sync_particle_prototype_ids) + @classmethod def _clear(cls): for prototype in cls.particle_prototypes: @@ -1594,7 +1585,7 @@ def clothify_mesh_prim(cls, mesh_prim, remesh=True, particle_distance=None): particle_distance = cls.particle_contact_offset * 2 / 1.5 if particle_distance is None else particle_distance # Repetitively re-mesh at lower resolution until we have a mesh that has less than MAX_CLOTH_PARTICLES vertices - for _ in range(3): + for _ in range(10): ms = pymeshlab.MeshSet() ms.load_new_mesh(tmp_fpath) diff --git a/omnigibson/systems/system_base.py b/omnigibson/systems/system_base.py index 9e6d01abe..80d64fa18 100644 --- a/omnigibson/systems/system_base.py +++ b/omnigibson/systems/system_base.py @@ -973,7 +973,7 @@ def generate_particles_from_link( # Also potentially sub-sample if we're past our limit if len(particle_positions) > max_samples: particle_positions = particle_positions[ - np.random.choice(len(particle_positions), size=(max_samples,), replace=False)] + np.random.choice(len(particle_positions), size=(int(max_samples),), replace=False)] return cls.generate_particles( positions=particle_positions, diff --git a/omnigibson/utils/bddl_utils.py b/omnigibson/utils/bddl_utils.py index b2640c878..47572afad 100644 --- a/omnigibson/utils/bddl_utils.py +++ b/omnigibson/utils/bddl_utils.py @@ -1050,6 +1050,11 @@ def _sample_initial_conditions_final(self): state = og.sim.dump_state(serialized=False) break if success: + # After the final round of kinematic sampling, we assign in_rooms to newly imported objects + if group == "kinematic": + parent = self._object_scope[condition.body[1]] + entity.in_rooms = parent.in_rooms.copy() + # Can terminate immediately break @@ -1072,8 +1077,13 @@ def _sample_initial_conditions_final(self): og.sim.load_state(state, serialized=False) og.sim.step_physics() if not success: + # Update object registry because we just assigned in_rooms to newly imported objects + og.sim.scene.object_registry.update(keys=["in_rooms"]) return f"Sampleable object conditions failed: {condition.STATE_NAME} {condition.body}" + # Update object registry because we just assigned in_rooms to newly imported objects + og.sim.scene.object_registry.update(keys=["in_rooms"]) + # One more sim step to make sure the object states are propagated correctly # E.g. after sampling Filled.set_value(True), Filled.get_value() will become True only after one step og.sim.step() diff --git a/omnigibson/utils/constants.py b/omnigibson/utils/constants.py index f743121d6..90d394428 100644 --- a/omnigibson/utils/constants.py +++ b/omnigibson/utils/constants.py @@ -51,11 +51,8 @@ class ParticleModifyCondition(str, Enum): GRAVITY = "gravity" -# Valid omni characters for specifying strings, e.g. prim paths -VALID_OMNI_CHARS = frozenset({'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '_', '/'}) - # Structure categories that need to always be loaded for stability purposes -STRUCTURE_CATEGORIES = frozenset({"floors", "walls", "ceilings", "lawn", "driveway", "fence"}) +STRUCTURE_CATEGORIES = frozenset({"floors", "walls", "ceilings", "lawn", "driveway", "fence", "roof", "background"}) # Joint friction magic values to assign to objects based on their category diff --git a/omnigibson/utils/sampling_utils.py b/omnigibson/utils/sampling_utils.py index 21646cf79..679dde7f2 100644 --- a/omnigibson/utils/sampling_utils.py +++ b/omnigibson/utils/sampling_utils.py @@ -29,7 +29,7 @@ m.DEFAULT_CUBOID_BOTTOM_PADDING = 0.005 # We will cast an additional parallel ray for each additional this much distance. m.DEFAULT_NEW_RAY_PER_HORIZONTAL_DISTANCE = 0.1 -m.DEFAULT_HIT_PROPORTION = 0.6 +m.DEFAULT_HIT_PROPORTION = 0.8 def fit_plane(points, refusal_log): diff --git a/omnigibson/utils/usd_utils.py b/omnigibson/utils/usd_utils.py index 0ae7690e3..550e9e89b 100644 --- a/omnigibson/utils/usd_utils.py +++ b/omnigibson/utils/usd_utils.py @@ -638,8 +638,9 @@ def mesh_prim_mesh_to_trimesh_mesh(mesh_prim, include_normals=True, include_texc if include_normals: kwargs["vertex_normals"] = np.array(mesh_prim.GetAttribute("normals").Get()) - if include_texcoord: - kwargs["visual"] = trimesh.visual.TextureVisuals(uv=np.array(mesh_prim.GetAttribute("primvars:st").Get())) + raw_texture = mesh_prim.GetAttribute("primvars:st").Get() + if raw_texture is not None: + kwargs["visual"] = trimesh.visual.TextureVisuals(uv=np.array(raw_texture)) return trimesh.Trimesh(**kwargs) diff --git a/tests/test_robot_states.py b/tests/test_robot_states.py index cf5197d19..fe812f92e 100644 --- a/tests/test_robot_states.py +++ b/tests/test_robot_states.py @@ -62,12 +62,12 @@ def camera_pose_test(flatcache): robot_to_sensor_mat = pose2mat(relative_pose_transform(sensor_world_pos, sensor_world_ori, robot_world_pos, robot_world_ori)) - sensor_world_pos_gt = np.array([150.16513062, 150.0, 101.3833847]) + sensor_world_pos_gt = np.array([150.16513062, 150., 101.38952637]) sensor_world_ori_gt = np.array([-0.29444987, 0.29444981, 0.64288363, -0.64288352]) assert np.allclose(sensor_world_pos, sensor_world_pos_gt, atol=1e-3) assert np.allclose(sensor_world_ori, sensor_world_ori_gt, atol=1e-3) - + # Now, we want to move the robot and check if the sensor pose has been updated old_camera_local_pose = vision_sensor.get_local_pose()